diff --git a/README.md b/README.md index 2a1bd7ce2..54cd80503 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ You can choose the latest stable release : ```html ``` @@ -72,7 +72,7 @@ with the [available stdlib](https://github.com/brython-dev/brython/tree/master/w ```html ``` diff --git a/npm/package.json b/npm/package.json index 2bb77c2f0..a375f12a5 100644 --- a/npm/package.json +++ b/npm/package.json @@ -1,6 +1,6 @@ { "name": "brython", - "version": "3.12.1", + "version": "3.12.3", "description": "Python 3 in the browser", "main": "brython.js", "scripts": { diff --git a/releases/index.html b/releases/index.html index 2bf81b500..e640077d9 100644 --- a/releases/index.html +++ b/releases/index.html @@ -7,7 +7,7 @@ - + - + - + @@ -40,8 +40,8 @@ - + @@ -49,8 +49,16 @@ - - + + + + + + + + + + diff --git a/www/demo.html b/www/demo.html index bad269c02..714456a14 100644 --- a/www/demo.html +++ b/www/demo.html @@ -1277,6 +1277,8 @@ + + @@ -1307,6 +1309,15 @@ + + + + + + + + + diff --git a/www/doc/doc_brython.css b/www/doc/doc_brython.css index 9fe1150b6..5259870f7 100644 --- a/www/doc/doc_brython.css +++ b/www/doc/doc_brython.css @@ -146,7 +146,7 @@ select.language { border-color:#888; border-radius: 10px; width:auto; - font-family: "Consolas"; + font-family: "Consolas", monospace; } .xml{ @@ -176,7 +176,7 @@ li { } pre{ - font-family: Consolas; + font-family: Consolas, monospace; } pre.marked { color:var(--header-color); @@ -200,7 +200,7 @@ span.python-builtin{ code { color:var(--special-text-color); - font-family: Consolas; + font-family: Consolas, monospace; } em { diff --git a/www/doc/en/install.md b/www/doc/en/install.md index eee163e9e..fe7b7f672 100644 --- a/www/doc/en/install.md +++ b/www/doc/en/install.md @@ -11,14 +11,14 @@ Brython scripts from a CDN: ``` The minor version can be specified with `brython@3.12` and the micro version -with `brython@3.12.1`. +with `brython@3.12.3`. Brython is also available with cdnjs: ```xml - - ``` diff --git a/www/doc/fr/install.md b/www/doc/fr/install.md index 8ca8fa00e..18a7d8d1f 100644 --- a/www/doc/fr/install.md +++ b/www/doc/fr/install.md @@ -11,14 +11,14 @@ d'inclure les scripts Brython dans la page HTML depuis un CDN. ``` On peut aussi préciser la version mineure avec `brython@3.12` et la version -micro avec `brython@3.12.1`. +micro avec `brython@3.12.3`. Brython est également disponible sur cdnjs: ```xml - - ``` diff --git a/www/gallery/ajax.html b/www/gallery/ajax.html index efb7913d5..2fad94164 100644 --- a/www/gallery/ajax.html +++ b/www/gallery/ajax.html @@ -14,6 +14,8 @@ + + @@ -35,8 +37,8 @@ - + @@ -45,6 +47,16 @@ + + + + + + + + + + diff --git a/www/gallery/indexedDB_cache.html b/www/gallery/indexedDB_cache.html index 59ae256c0..bc91e05e9 100644 --- a/www/gallery/indexedDB_cache.html +++ b/www/gallery/indexedDB_cache.html @@ -17,6 +17,8 @@ + + @@ -47,6 +49,15 @@ + + + + + + + + + diff --git a/www/gallery/inject_scripts.html b/www/gallery/inject_scripts.html index 992ffc200..2ec4cd6ea 100644 --- a/www/gallery/inject_scripts.html +++ b/www/gallery/inject_scripts.html @@ -16,6 +16,8 @@ + + @@ -29,7 +31,6 @@ - @@ -38,6 +39,7 @@ + @@ -47,11 +49,15 @@ + + - - - + + + + + diff --git a/www/gallery/music/drum_score.py b/www/gallery/music/drum_score.py index 002840013..633f8a2dd 100644 --- a/www/gallery/music/drum_score.py +++ b/www/gallery/music/drum_score.py @@ -11,7 +11,7 @@ def __init__(self, message): class NoteStyle: checked = '#666' - unchecked = '#ddd' + unchecked = {0: '#bbb', 1: '#ddd'} class NoteCell(html.TD): @@ -22,10 +22,13 @@ def __init__(self, text=""): class Note(NoteCell): - def __init__(self, bar, instrument): + def __init__(self, bar, instrument, pos): super().__init__() self.instrument = instrument self.bar = bar + nb = bar.score.notes_per_bar + div = nb / 4 + self.unchecked_color = NoteStyle.unchecked[(pos // div) % 2] self.bind('click', self.click) self.uncheck() @@ -45,14 +48,9 @@ def check(self): self.style.backgroundColor = NoteStyle.checked def uncheck(self): - self.style.backgroundColor = NoteStyle.unchecked + self.style.backgroundColor = self.unchecked_color -def checked(elt): - return elt.style.backgroundColor != NoteStyle.unchecked - -seq = [] - class Tab(html.TD): def __init__(self, score, num): @@ -68,41 +66,36 @@ def __init__(self, score, num): self.add_close_button() def add_close_button(self): - if self.num > 1: - self.row <= self.close_button + self.row <= self.close_button def remove_close_button(self): - if self.num > 1: - self.close_button.remove() + self.close_button.remove() def select(self): - index = self.num - 1 - bar = self.score.bars[index] if self.score.selected_tab is not None: self.score.selected_tab.unselect() + index = self.score.tabs.index(self) self.score.bar_cell <= self.score.bars[index] self.score.selected_tab = self self.className = 'selected_tab' + self.add_close_button() def unselect(self): - bar = self.score.bars[self.num - 1] self.score.bar_cell.clear() self.className = 'unselected_tab' def close(self, ev): ev.stopPropagation() - self.score.tabs[-2].add_close_button() - self.score.tabs[-2].select() - del self.score.tabs[-1] - del self.score.bars[-1] - self.remove() + self.score.remove_tab(self) + class Score(html.TABLE): - def __init__(self, instruments): + def __init__(self, instruments, notes_per_bar=12): super().__init__(cellpadding=0, cellspacing=0, Class='score') self.instruments = instruments + self.notes_per_bar = notes_per_bar self.plus_tab = html.TD('+', Class='plus unselected_tab') self.tabs = [] @@ -135,46 +128,76 @@ def new_tab(self, ev=None, notes=None): tab.select() self.selected_tab = tab + def get_tab(self, tab_num): + for tab in self.tabs: + if tab.num == tab_num: + return tab + def show_pattern(self, pattern_num): - selected = self.tabs[pattern_num] - if selected is self.selected_tab: - return - selected.select() + for tab in self.tabs: + if tab.num == pattern_num: + selected = tab + if selected is self.selected_tab: + return + selected.select() + + def play_pattern(self, ev): + pass def flash(self, cell): cell.style.backgroundColor = 'black' timer.set_timeout(lambda: cell.check(), 100) def select_tab(self, ev): - self.show_pattern(int(ev.target.text) - 1) - - def get_seq(self, bpm): + tab = ev.target.parentNode.closest('TD') + while not hasattr(tab, 'num'): + tab = tab.parentNode.closest('TD') + self.show_pattern(tab.num) + + def remove_tab(self, tab): + ix = self.tabs.index(tab) + if len(self.tabs) > 1: + next_tab = self.tabs[ix + 1] if ix < len(self.tabs) - 1 \ + else self.tabs[ix - 1] + self.tabs.remove(tab) + del self.bars[ix] + tab.remove() + if self.tabs: + self.show_pattern(next_tab.num) + + def get_seq(self, pattern_num=None): seq = [] patterns = [] - for pattern in self.patterns.value.split(): - repeat = pattern.split('x') - if len(repeat) == 2: - patterns += [int(repeat[0]) - 1] * int(repeat[1]) - elif len(repeat) > 2: - raise PatternError(f'invalid pattern: {pattern}') - else: - patterns.append(int(pattern) - 1) - #patterns = [int(x.strip()) - 1 for x in self.patterns.value.split()] + if pattern_num is None: + for pattern in self.patterns.value.split(): + repeat = pattern.split('x') + if len(repeat) == 2: + patterns += [int(repeat[0])] * int(repeat[1]) + elif len(repeat) > 2: + raise PatternError(f'invalid pattern: {pattern}') + else: + patterns.append(int(pattern)) + else: + patterns = [pattern_num + 1] nb_bars = len(patterns) # there are bpm quarter notes per minute # each quarter note lasts 60/bpm second + quarter_len = 60 / self.bpm # a bar has 4 quarter notes, so a bar lasts 240/bpm seconds + bar_len = 4 * quarter_len + # number of notes between 2 quarter notes # dt is the interval between 16th notes (1/4 of a quarter) - dt = 15 / bpm + dt = bar_len / self.notes_per_bar t0 = 0 for pattern in patterns: - bar = self.bars[pattern] + tab = self.get_tab(pattern) + bar = self.bars[self.tabs.index(tab)] notes = bar.notes for line_num, instrument in enumerate(notes): for pos in notes[instrument]: cell = bar.lines[line_num].children[pos + 1] seq.append((line_num, t0 + pos * dt, pattern, cell)) - t0 += 240 / bpm + t0 += bar_len seq.sort(key=lambda x: x[1]) return seq, nb_bars @@ -186,16 +209,22 @@ def __init__(self, score, notes=None): self.score = score if notes is None: notes = {} - top = html.TR(html.TD(' ')) - top <= [NoteCell(x) for x in '1 2 3 4 '] + play_button = html.BUTTON('⏵') + play_button.bind('click', self.score.play_pattern) + top = html.TR(html.TD(play_button)) + nb = score.notes_per_bar + if nb == 16: + top <= [NoteCell(x) for x in '1 2 3 4 '] + elif nb == 13: + top <= [NoteCell(x) for x in '1 2 3 4 '] self <= top self.lines = [] self.notes = {} for instrument in score.instruments: self.notes[instrument] = notes.get(instrument.__name__, []) line = html.TR(html.TD(instrument.__name__)) - for pos in range(16): - note = Note(self, instrument) + for pos in range(nb): + note = Note(self, instrument, pos) line <= note if pos in self.notes[instrument]: note.check() diff --git a/www/gallery/music/synth_drums.html b/www/gallery/music/synth_drums.html index 5b4f0cc57..c2165c6ad 100644 --- a/www/gallery/music/synth_drums.html +++ b/www/gallery/music/synth_drums.html @@ -10,23 +10,30 @@ - Drum synthesis by chrislo + Drum machine - + + - +
+
+ Notes per bar + + +
+
@@ -34,19 +41,17 @@

- Kick frequency
- -

-

- - - +

+ +

+

LOAD - SAVE - -

+ SAVE
diff --git a/www/gallery/music/synth_drums.py b/www/gallery/music/synth_drums.py index ea6686169..b2e6d6e34 100644 --- a/www/gallery/music/synth_drums.py +++ b/www/gallery/music/synth_drums.py @@ -8,150 +8,10 @@ from browser import bind, console, document, html, timer, window import drum_score +import player -class Config: - context = None - - -def setup(): - if Config.context is None: - Config.context = window.AudioContext.new() - -kick_freq = document['kick_freq'] - -class Kick: - - checked = 'o' - - def __init__(self): - setup() - - def setup(self): - self.osc = Config.context.createOscillator() - self.gain = Config.context.createGain() - self.osc.connect(self.gain) - self.gain.connect(Config.context.destination) - - def trigger(self, time=None): - time = time or Config.context.currentTime - self.setup() - - self.osc.frequency.setValueAtTime(int(kick_freq.value), time) - self.gain.gain.setValueAtTime(1, time) - - self.osc.frequency.exponentialRampToValueAtTime(0.01, time + 0.5) - self.gain.gain.exponentialRampToValueAtTime(0.01, time + 0.5) - - self.osc.start(time) - - self.osc.stop(time + 0.5) - - -class Snare: - - checked = 'o' - - def __init__(self): - setup() - self.setup() - - def setup(self): - self.noise = Config.context.createBufferSource() - self.noise.buffer = self.noiseBuffer() - - noiseFilter = Config.context.createBiquadFilter() - noiseFilter.type = 'highpass' - noiseFilter.frequency.value = 1000 - self.noise.connect(noiseFilter) - - self.noiseEnvelope = Config.context.createGain() - noiseFilter.connect(self.noiseEnvelope) - - self.noiseEnvelope.connect(Config.context.destination) - - def noiseBuffer(self): - bufferSize = Config.context.sampleRate - buffer = Config.context.createBuffer(1, bufferSize, - Config.context.sampleRate) - output = buffer.getChannelData(0) - - for i in range(bufferSize): - output[i] = random.random() * 2 - 1 - - return buffer - - def trigger(self, time=None): - - time = time or Config.context.currentTime - self.osc = Config.context.createOscillator() - self.osc.type = 'triangle' - - self.oscEnvelope = Config.context.createGain() - self.osc.connect(self.oscEnvelope) - self.oscEnvelope.connect(Config.context.destination) - - self.noiseEnvelope.gain.cancelScheduledValues(time) - - self.noiseEnvelope.gain.setValueAtTime(1, time) - self.noiseEnvelope.gain.exponentialRampToValueAtTime(0.01, time + 0.2) - self.noise.start(time) - - self.osc.frequency.setValueAtTime(100, time) - self.oscEnvelope.gain.setValueAtTime(0.7, time) - self.oscEnvelope.gain.exponentialRampToValueAtTime(0.01, time + 0.1) - self.osc.start(time) - - self.osc.stop(time + 0.2) - self.noise.stop(time + 0.2) - -class HiHat: - - buffer = None - checked = 'x' - - def setup(self, time): - self.source = Config.context.createBufferSource() - self.source.buffer = self.buffer - self.source.connect(Config.context.destination) - self.play(time) - - def trigger(self, time=None): - if self.buffer is None: - Config.context = window.AudioContext.new() - time = time or Config.context.currentTime - sampleLoader('samples/hihat.wav', HiHat, lambda: self.setup(time)) - else: - time = time or Config.context.currentTime - self.setup(time) - - def play(self, time): - time = Config.context.currentTime if time is None else time - self.source.start(time) - -instruments = [HiHat, Snare, Kick] - -score = drum_score.Score(instruments) -document['score'] <= html.DIV('Patterns') - -document['score'] <= score -score.new_tab() - - -def sampleLoader(url, cls, callback): - request = window.XMLHttpRequest.new() - request.open("GET", url, True) - request.responseType = "arraybuffer" - - def f(buffer): - cls.buffer = buffer - callback() - - @bind(request, 'load') - def load(ev): - Config.context.decodeAudioData(request.response, f) - - request.send() +instruments = player.instruments load_button = document['load_score'] @@ -163,16 +23,19 @@ def onload(event): event.target. The file content, as text, is the FileReader instance's "result" attribute.""" - global score data = json.loads(event.target.result) - score = drum_score.Score(instruments) + print('data', data) document['score'].clear() + score = create_score(data.get('notes_per_bar', 16)) document['score'] <= score + while score.tabs: + score.remove_tab(score.tabs[-1]) score.patterns.value = data['patterns'] for i, notes in enumerate(data['bars']): score.new_tab(notes=notes) + document["bpm_control"].value = document["bpm_value"].text = data['bpm'] # set attribute "download" to file name - save_button.attrs["download"] = file.name + ev.target.attrs["download"] = file.name # Get the selected file as a DOM File object file = load_button.files[0] @@ -182,10 +45,10 @@ def onload(event): reader.readAsText(file) reader.bind("load", onload) -save_button = document['save_score'] +#save_button = document['save_score'] -@bind(save_button, "mousedown") -def mousedown(evt): +#@bind(save_button, "mousedown") +def save_score(ev, score): """Create a "data URI" to set the downloaded file content Cf. https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs """ @@ -197,73 +60,78 @@ def mousedown(evt): sbar[instrument.__name__] = bar.notes[instrument] bars.append(sbar) - data = json.dumps({'patterns': score.patterns.value, 'bars': bars}) + data = json.dumps({'patterns': score.patterns.value, + 'bars': bars, + 'bpm': score.bpm, + 'notes_per_bar': score.notes_per_bar + }) content = window.encodeURIComponent(data) # set attribute "href" of save link + save_button = ev.target save_button.attrs["download"] = 'drum_score.json' save_button.attrs["href"] = "data:text/json," + content +def create_score(notes_per_bar): -look_ahead = 0.1 -schedule_period = 1000 * 0.05 # milliseconds + document["notes_per_bar"].clear() + document["score"].clear() -bpm_control = document['bpm'] + score = drum_score.Score(instruments, notes_per_bar) -@bind('#bpm', 'input') -def change_bpm(ev): - Sequencer.read_sequence() + document['score'] <= html.DIV('Patterns') -def get_bpm(): - return int(bpm_control.value) + document['score'] <= score + score.new_tab() -class Sequencer: + bpm = document["bpm"] + bpm.clear() - running = False - pattern = None + document['load'].clear() - @classmethod - def read_sequence(cls): - cls.seq, cls.nb_bars = score.get_seq(get_bpm()) + play_control = html.BUTTON("⏵", + id="play_score", Class="pure-button start_loop") + play_control.bind('click', lambda ev: play_score(ev, score)) + bpm <= play_control + bpm_control = html.INPUT(id="bpm_control", + type="range", min=30, max=300, step=1, value=120) + bpm <= bpm_control + bpm_control.bind('input', lambda ev: change_bpm(ev, score)) + bpm_value = html.SPAN(id="bpm_value") + bpm <= bpm_value -@bind('#start_loop', 'click') -def start_loop(ev): - setup() - if Sequencer.running: - return - Sequencer.read_sequence() - if not Sequencer.seq: - return - Sequencer.running = True - Sequencer.pattern = None - loop(Config.context.currentTime, 0) + score.bpm = bpm_control.value -@bind('#end_loop', 'click') -def end_loop(ev): - Sequencer.running = False + save_control = html.A("SAVE", href="#", id="save_score", + download=True, Class="pure-button") + save_control.bind('click', lambda ev: save_score(ev, score)) + document["score"] <= save_control -def loop(t0, i): - dt = Config.context.currentTime - t0 + return score - if not Sequencer.running: - return +@bind("#notes_per_bar button", "click") +def create_12_16(ev): + create_score(int(ev.target.text)) - while dt > Sequencer.seq[i][1] - look_ahead: - line_num, t, pattern, cell = Sequencer.seq[i] - instrument = score.instruments[line_num]() - if pattern != Sequencer.pattern: - score.show_pattern(pattern) - Sequencer.pattern = pattern - score.flash(cell) - start = t0 + t - instrument.trigger(start + 0.1) - i += 1 - if i >= len(Sequencer.seq): - i = 0 - bpm = get_bpm() - t0 = t0 + Sequencer.nb_bars * 240 / bpm # bar duration (4 quarter notes) - Sequencer.read_sequence() - break - timer.set_timeout(loop, schedule_period, t0, i) +def change_bpm(ev, score): + document["bpm_value"].text = ev.target.value + score.bpm = int(ev.target.value) + +def end_play(ev): + print('END PALY') + ev.target.html = "⏵" + +def play_score(ev, score): + if player.Sequencer.running: + ev.target.html = '⏵' + player.Sequencer.running = False + return + player.Sequencer.running = True + ev.target.html = '⏹' + score.bpm = int(document['bpm_control'].value) + seq, nb_bars = score.get_seq() + duration = nb_bars * 240 / score.bpm + timer.set_timeout(end_play, duration * 1000, ev) + player.start_loop(seq, nb_bars, score, None) diff --git a/www/gallery/music.py b/www/gallery/notes.py similarity index 100% rename from www/gallery/music.py rename to www/gallery/notes.py diff --git a/www/gallery/sort_table.html b/www/gallery/sort_table.html index 1cef21c20..f219dad69 100644 --- a/www/gallery/sort_table.html +++ b/www/gallery/sort_table.html @@ -37,6 +37,8 @@ + + @@ -58,8 +60,8 @@ - + @@ -67,6 +69,15 @@ + + + + + + + + + diff --git a/www/gallery/synth_keyboard.html b/www/gallery/synth_keyboard.html index 2e1e72c7c..bcf98a2c5 100644 --- a/www/gallery/synth_keyboard.html +++ b/www/gallery/synth_keyboard.html @@ -6,43 +6,7 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + diff --git a/www/gallery/synth_keyboard.py b/www/gallery/synth_keyboard.py index 8ca3af541..834d86fcb 100644 --- a/www/gallery/synth_keyboard.py +++ b/www/gallery/synth_keyboard.py @@ -2,7 +2,7 @@ from browser import document, window, html, console, bind, timer, alert -import music +import notes import synthesizer import widgets @@ -296,7 +296,7 @@ class Sound: def play(octave, note, time=None): global audioContext - freq = music.note_freqs[octave][note] + freq = notes.note_freqs[octave][note] if audioContext is None: audioContext = synthesizer.make_context() @@ -489,7 +489,7 @@ def noteReleased(event): def setup(): base = tone_value.text octave = int(octave_value.text) - scale = music.create_major_scale(base, octave)[:3 * 7] + scale = notes.create_major_scale(base, octave)[:3 * 7] keyboard = document.select_one(".keyboard") @@ -504,7 +504,7 @@ def setup(): keyboard.insertBefore(line := html.DIV(), keyboard.firstChild) line_num += 1 octaveElem = html.DIV(Class="octave") - octaveElem <= createKey(note, octave, music.note_freqs[octave][note]) + octaveElem <= createKey(note, octave, notes.note_freqs[octave][note]) line <= octaveElem key_mapping[keys[i]] = octave, note diff --git a/www/gallery/webcomponent.html b/www/gallery/webcomponent.html index c29be940a..544e69732 100644 --- a/www/gallery/webcomponent.html +++ b/www/gallery/webcomponent.html @@ -27,6 +27,8 @@ + + @@ -48,8 +50,8 @@ - + @@ -58,6 +60,16 @@ + + + + + + + + + + diff --git a/www/src/Lib/interpreter.py b/www/src/Lib/interpreter.py index df2926424..3cc845443 100644 --- a/www/src/Lib/interpreter.py +++ b/www/src/Lib/interpreter.py @@ -354,7 +354,7 @@ def handle_line(self, event=None): src = self.get_content().strip() if self._status == "main": currentLine = src[src.rfind('\n>>>') + 5:] - elif self._status == "3string": + elif self._status in ["3string", "parenth_expr"]: currentLine = src[src.rfind('\n>>>') + 5:] currentLine = currentLine.replace('\n... ', '\n') else: @@ -368,11 +368,14 @@ def handle_line(self, event=None): return self.add_to_history(currentLine) self.current = len(self.history) - if self._status in ["main", "3string"]: + if self._status in ["main", "3string", "parenth_expr"]: # special case if currentLine == "help": + self.insert_cr() self.write(_help) + self.insert_cr() self.insert_prompt() + self.cursor_to_end() if event is not None: event.preventDefault() return @@ -392,14 +395,26 @@ def handle_line(self, event=None): self.insert_continuation() self._status = "parenth_expr" else: - self.insert_cr() try: code = compile(currentLine, '', 'exec') exec(code, self.globals, self.locals) + except SyntaxError as exc: + if exc.args[0].startswith('expected an indented block'): + self.insert_continuation() + self._status = "block" + else: + self.insert_cr() + self.print_tb(exc) + self.insert_prompt() except Exception as exc: + self.insert_cr() self.print_tb(msg) - self.insert_prompt() - self._status = "main" + self.insert_prompt() + self._status = "main" + else: + self.insert_cr() + self.insert_prompt() + self._status = "main" except Exception as exc: # the full traceback includes the call to eval(); to # remove it, it is stored in a buffer and the 2nd and 3rd diff --git a/www/src/Lib/pydoc_data/__init__.py b/www/src/Lib/pydoc_data/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/www/src/Lib/pydoc_data/_pydoc.css b/www/src/Lib/pydoc_data/_pydoc.css deleted file mode 100644 index f036ef37a..000000000 --- a/www/src/Lib/pydoc_data/_pydoc.css +++ /dev/null @@ -1,6 +0,0 @@ -/* - CSS file for pydoc. - - Contents of this file are subject to change without notice. - -*/ diff --git a/www/src/Lib/pydoc_data/topics.py b/www/src/Lib/pydoc_data/topics.py deleted file mode 100644 index 35668514f..000000000 --- a/www/src/Lib/pydoc_data/topics.py +++ /dev/null @@ -1,15182 +0,0 @@ -# -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Mon Aug 2 20:07:41 2021 -topics = {'assert': 'The "assert" statement\n' - '**********************\n' - '\n' - 'Assert statements are a convenient way to insert debugging ' - 'assertions\n' - 'into a program:\n' - '\n' - ' assert_stmt ::= "assert" expression ["," expression]\n' - '\n' - 'The simple form, "assert expression", is equivalent to\n' - '\n' - ' if __debug__:\n' - ' if not expression: raise AssertionError\n' - '\n' - 'The extended form, "assert expression1, expression2", is ' - 'equivalent to\n' - '\n' - ' if __debug__:\n' - ' if not expression1: raise AssertionError(expression2)\n' - '\n' - 'These equivalences assume that "__debug__" and "AssertionError" ' - 'refer\n' - 'to the built-in variables with those names. In the current\n' - 'implementation, the built-in variable "__debug__" is "True" under\n' - 'normal circumstances, "False" when optimization is requested ' - '(command\n' - 'line option "-O"). The current code generator emits no code for ' - 'an\n' - 'assert statement when optimization is requested at compile time. ' - 'Note\n' - 'that it is unnecessary to include the source code for the ' - 'expression\n' - 'that failed in the error message; it will be displayed as part of ' - 'the\n' - 'stack trace.\n' - '\n' - 'Assignments to "__debug__" are illegal. The value for the ' - 'built-in\n' - 'variable is determined when the interpreter starts.\n', - 'assignment': 'Assignment statements\n' - '*********************\n' - '\n' - 'Assignment statements are used to (re)bind names to values and ' - 'to\n' - 'modify attributes or items of mutable objects:\n' - '\n' - ' assignment_stmt ::= (target_list "=")+ (starred_expression ' - '| yield_expression)\n' - ' target_list ::= target ("," target)* [","]\n' - ' target ::= identifier\n' - ' | "(" [target_list] ")"\n' - ' | "[" [target_list] "]"\n' - ' | attributeref\n' - ' | subscription\n' - ' | slicing\n' - ' | "*" target\n' - '\n' - '(See section Primaries for the syntax definitions for ' - '*attributeref*,\n' - '*subscription*, and *slicing*.)\n' - '\n' - 'An assignment statement evaluates the expression list ' - '(remember that\n' - 'this can be a single expression or a comma-separated list, the ' - 'latter\n' - 'yielding a tuple) and assigns the single resulting object to ' - 'each of\n' - 'the target lists, from left to right.\n' - '\n' - 'Assignment is defined recursively depending on the form of the ' - 'target\n' - '(list). When a target is part of a mutable object (an ' - 'attribute\n' - 'reference, subscription or slicing), the mutable object must\n' - 'ultimately perform the assignment and decide about its ' - 'validity, and\n' - 'may raise an exception if the assignment is unacceptable. The ' - 'rules\n' - 'observed by various types and the exceptions raised are given ' - 'with the\n' - 'definition of the object types (see section The standard type\n' - 'hierarchy).\n' - '\n' - 'Assignment of an object to a target list, optionally enclosed ' - 'in\n' - 'parentheses or square brackets, is recursively defined as ' - 'follows.\n' - '\n' - '* If the target list is a single target with no trailing ' - 'comma,\n' - ' optionally in parentheses, the object is assigned to that ' - 'target.\n' - '\n' - '* Else: The object must be an iterable with the same number of ' - 'items\n' - ' as there are targets in the target list, and the items are ' - 'assigned,\n' - ' from left to right, to the corresponding targets.\n' - '\n' - ' * If the target list contains one target prefixed with an ' - 'asterisk,\n' - ' called a “starred” target: The object must be an iterable ' - 'with at\n' - ' least as many items as there are targets in the target ' - 'list, minus\n' - ' one. The first items of the iterable are assigned, from ' - 'left to\n' - ' right, to the targets before the starred target. The ' - 'final items\n' - ' of the iterable are assigned to the targets after the ' - 'starred\n' - ' target. A list of the remaining items in the iterable is ' - 'then\n' - ' assigned to the starred target (the list can be empty).\n' - '\n' - ' * Else: The object must be an iterable with the same number ' - 'of items\n' - ' as there are targets in the target list, and the items ' - 'are\n' - ' assigned, from left to right, to the corresponding ' - 'targets.\n' - '\n' - 'Assignment of an object to a single target is recursively ' - 'defined as\n' - 'follows.\n' - '\n' - '* If the target is an identifier (name):\n' - '\n' - ' * If the name does not occur in a "global" or "nonlocal" ' - 'statement\n' - ' in the current code block: the name is bound to the object ' - 'in the\n' - ' current local namespace.\n' - '\n' - ' * Otherwise: the name is bound to the object in the global ' - 'namespace\n' - ' or the outer namespace determined by "nonlocal", ' - 'respectively.\n' - '\n' - ' The name is rebound if it was already bound. This may cause ' - 'the\n' - ' reference count for the object previously bound to the name ' - 'to reach\n' - ' zero, causing the object to be deallocated and its ' - 'destructor (if it\n' - ' has one) to be called.\n' - '\n' - '* If the target is an attribute reference: The primary ' - 'expression in\n' - ' the reference is evaluated. It should yield an object with\n' - ' assignable attributes; if this is not the case, "TypeError" ' - 'is\n' - ' raised. That object is then asked to assign the assigned ' - 'object to\n' - ' the given attribute; if it cannot perform the assignment, it ' - 'raises\n' - ' an exception (usually but not necessarily ' - '"AttributeError").\n' - '\n' - ' Note: If the object is a class instance and the attribute ' - 'reference\n' - ' occurs on both sides of the assignment operator, the ' - 'right-hand side\n' - ' expression, "a.x" can access either an instance attribute or ' - '(if no\n' - ' instance attribute exists) a class attribute. The left-hand ' - 'side\n' - ' target "a.x" is always set as an instance attribute, ' - 'creating it if\n' - ' necessary. Thus, the two occurrences of "a.x" do not ' - 'necessarily\n' - ' refer to the same attribute: if the right-hand side ' - 'expression\n' - ' refers to a class attribute, the left-hand side creates a ' - 'new\n' - ' instance attribute as the target of the assignment:\n' - '\n' - ' class Cls:\n' - ' x = 3 # class variable\n' - ' inst = Cls()\n' - ' inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x ' - 'as 3\n' - '\n' - ' This description does not necessarily apply to descriptor\n' - ' attributes, such as properties created with "property()".\n' - '\n' - '* If the target is a subscription: The primary expression in ' - 'the\n' - ' reference is evaluated. It should yield either a mutable ' - 'sequence\n' - ' object (such as a list) or a mapping object (such as a ' - 'dictionary).\n' - ' Next, the subscript expression is evaluated.\n' - '\n' - ' If the primary is a mutable sequence object (such as a ' - 'list), the\n' - ' subscript must yield an integer. If it is negative, the ' - 'sequence’s\n' - ' length is added to it. The resulting value must be a ' - 'nonnegative\n' - ' integer less than the sequence’s length, and the sequence is ' - 'asked\n' - ' to assign the assigned object to its item with that index. ' - 'If the\n' - ' index is out of range, "IndexError" is raised (assignment to ' - 'a\n' - ' subscripted sequence cannot add new items to a list).\n' - '\n' - ' If the primary is a mapping object (such as a dictionary), ' - 'the\n' - ' subscript must have a type compatible with the mapping’s key ' - 'type,\n' - ' and the mapping is then asked to create a key/datum pair ' - 'which maps\n' - ' the subscript to the assigned object. This can either ' - 'replace an\n' - ' existing key/value pair with the same key value, or insert a ' - 'new\n' - ' key/value pair (if no key with the same value existed).\n' - '\n' - ' For user-defined objects, the "__setitem__()" method is ' - 'called with\n' - ' appropriate arguments.\n' - '\n' - '* If the target is a slicing: The primary expression in the ' - 'reference\n' - ' is evaluated. It should yield a mutable sequence object ' - '(such as a\n' - ' list). The assigned object should be a sequence object of ' - 'the same\n' - ' type. Next, the lower and upper bound expressions are ' - 'evaluated,\n' - ' insofar they are present; defaults are zero and the ' - 'sequence’s\n' - ' length. The bounds should evaluate to integers. If either ' - 'bound is\n' - ' negative, the sequence’s length is added to it. The ' - 'resulting\n' - ' bounds are clipped to lie between zero and the sequence’s ' - 'length,\n' - ' inclusive. Finally, the sequence object is asked to replace ' - 'the\n' - ' slice with the items of the assigned sequence. The length ' - 'of the\n' - ' slice may be different from the length of the assigned ' - 'sequence,\n' - ' thus changing the length of the target sequence, if the ' - 'target\n' - ' sequence allows it.\n' - '\n' - '**CPython implementation detail:** In the current ' - 'implementation, the\n' - 'syntax for targets is taken to be the same as for expressions, ' - 'and\n' - 'invalid syntax is rejected during the code generation phase, ' - 'causing\n' - 'less detailed error messages.\n' - '\n' - 'Although the definition of assignment implies that overlaps ' - 'between\n' - 'the left-hand side and the right-hand side are ‘simultaneous’ ' - '(for\n' - 'example "a, b = b, a" swaps two variables), overlaps *within* ' - 'the\n' - 'collection of assigned-to variables occur left-to-right, ' - 'sometimes\n' - 'resulting in confusion. For instance, the following program ' - 'prints\n' - '"[0, 2]":\n' - '\n' - ' x = [0, 1]\n' - ' i = 0\n' - ' i, x[i] = 1, 2 # i is updated, then x[i] is ' - 'updated\n' - ' print(x)\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3132** - Extended Iterable Unpacking\n' - ' The specification for the "*target" feature.\n' - '\n' - '\n' - 'Augmented assignment statements\n' - '===============================\n' - '\n' - 'Augmented assignment is the combination, in a single ' - 'statement, of a\n' - 'binary operation and an assignment statement:\n' - '\n' - ' augmented_assignment_stmt ::= augtarget augop ' - '(expression_list | yield_expression)\n' - ' augtarget ::= identifier | attributeref | ' - 'subscription | slicing\n' - ' augop ::= "+=" | "-=" | "*=" | "@=" | ' - '"/=" | "//=" | "%=" | "**="\n' - ' | ">>=" | "<<=" | "&=" | "^=" | "|="\n' - '\n' - '(See section Primaries for the syntax definitions of the last ' - 'three\n' - 'symbols.)\n' - '\n' - 'An augmented assignment evaluates the target (which, unlike ' - 'normal\n' - 'assignment statements, cannot be an unpacking) and the ' - 'expression\n' - 'list, performs the binary operation specific to the type of ' - 'assignment\n' - 'on the two operands, and assigns the result to the original ' - 'target.\n' - 'The target is only evaluated once.\n' - '\n' - 'An augmented assignment expression like "x += 1" can be ' - 'rewritten as\n' - '"x = x + 1" to achieve a similar, but not exactly equal ' - 'effect. In the\n' - 'augmented version, "x" is only evaluated once. Also, when ' - 'possible,\n' - 'the actual operation is performed *in-place*, meaning that ' - 'rather than\n' - 'creating a new object and assigning that to the target, the ' - 'old object\n' - 'is modified instead.\n' - '\n' - 'Unlike normal assignments, augmented assignments evaluate the ' - 'left-\n' - 'hand side *before* evaluating the right-hand side. For ' - 'example, "a[i]\n' - '+= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and ' - 'performs\n' - 'the addition, and lastly, it writes the result back to ' - '"a[i]".\n' - '\n' - 'With the exception of assigning to tuples and multiple targets ' - 'in a\n' - 'single statement, the assignment done by augmented assignment\n' - 'statements is handled the same way as normal assignments. ' - 'Similarly,\n' - 'with the exception of the possible *in-place* behavior, the ' - 'binary\n' - 'operation performed by augmented assignment is the same as the ' - 'normal\n' - 'binary operations.\n' - '\n' - 'For targets which are attribute references, the same caveat ' - 'about\n' - 'class and instance attributes applies as for regular ' - 'assignments.\n' - '\n' - '\n' - 'Annotated assignment statements\n' - '===============================\n' - '\n' - '*Annotation* assignment is the combination, in a single ' - 'statement, of\n' - 'a variable or attribute annotation and an optional assignment\n' - 'statement:\n' - '\n' - ' annotated_assignment_stmt ::= augtarget ":" expression\n' - ' ["=" (starred_expression | ' - 'yield_expression)]\n' - '\n' - 'The difference from normal Assignment statements is that only ' - 'single\n' - 'target is allowed.\n' - '\n' - 'For simple names as assignment targets, if in class or module ' - 'scope,\n' - 'the annotations are evaluated and stored in a special class or ' - 'module\n' - 'attribute "__annotations__" that is a dictionary mapping from ' - 'variable\n' - 'names (mangled if private) to evaluated annotations. This ' - 'attribute is\n' - 'writable and is automatically created at the start of class or ' - 'module\n' - 'body execution, if annotations are found statically.\n' - '\n' - 'For expressions as assignment targets, the annotations are ' - 'evaluated\n' - 'if in class or module scope, but not stored.\n' - '\n' - 'If a name is annotated in a function scope, then this name is ' - 'local\n' - 'for that scope. Annotations are never evaluated and stored in ' - 'function\n' - 'scopes.\n' - '\n' - 'If the right hand side is present, an annotated assignment ' - 'performs\n' - 'the actual assignment before evaluating annotations (where\n' - 'applicable). If the right hand side is not present for an ' - 'expression\n' - 'target, then the interpreter evaluates the target except for ' - 'the last\n' - '"__setitem__()" or "__setattr__()" call.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 526** - Syntax for Variable Annotations\n' - ' The proposal that added syntax for annotating the types ' - 'of\n' - ' variables (including class variables and instance ' - 'variables),\n' - ' instead of expressing them through comments.\n' - '\n' - ' **PEP 484** - Type hints\n' - ' The proposal that added the "typing" module to provide a ' - 'standard\n' - ' syntax for type annotations that can be used in static ' - 'analysis\n' - ' tools and IDEs.\n' - '\n' - 'Changed in version 3.8: Now annotated assignments allow same\n' - 'expressions in the right hand side as the regular ' - 'assignments.\n' - 'Previously, some expressions (like un-parenthesized tuple ' - 'expressions)\n' - 'caused a syntax error.\n', - 'async': 'Coroutines\n' - '**********\n' - '\n' - 'New in version 3.5.\n' - '\n' - '\n' - 'Coroutine function definition\n' - '=============================\n' - '\n' - ' async_funcdef ::= [decorators] "async" "def" funcname "(" ' - '[parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - '\n' - 'Execution of Python coroutines can be suspended and resumed at ' - 'many\n' - 'points (see *coroutine*). "await" expressions, "async for" and ' - '"async\n' - 'with" can only be used in the body of a coroutine function.\n' - '\n' - 'Functions defined with "async def" syntax are always coroutine\n' - 'functions, even if they do not contain "await" or "async" ' - 'keywords.\n' - '\n' - 'It is a "SyntaxError" to use a "yield from" expression inside the ' - 'body\n' - 'of a coroutine function.\n' - '\n' - 'An example of a coroutine function:\n' - '\n' - ' async def func(param1, param2):\n' - ' do_stuff()\n' - ' await some_coroutine()\n' - '\n' - 'Changed in version 3.7: "await" and "async" are now keywords;\n' - 'previously they were only treated as such inside the body of a\n' - 'coroutine function.\n' - '\n' - '\n' - 'The "async for" statement\n' - '=========================\n' - '\n' - ' async_for_stmt ::= "async" for_stmt\n' - '\n' - 'An *asynchronous iterable* provides an "__aiter__" method that\n' - 'directly returns an *asynchronous iterator*, which can call\n' - 'asynchronous code in its "__anext__" method.\n' - '\n' - 'The "async for" statement allows convenient iteration over\n' - 'asynchronous iterables.\n' - '\n' - 'The following code:\n' - '\n' - ' async for TARGET in ITER:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'Is semantically equivalent to:\n' - '\n' - ' iter = (ITER)\n' - ' iter = type(iter).__aiter__(iter)\n' - ' running = True\n' - '\n' - ' while running:\n' - ' try:\n' - ' TARGET = await type(iter).__anext__(iter)\n' - ' except StopAsyncIteration:\n' - ' running = False\n' - ' else:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'See also "__aiter__()" and "__anext__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async for" statement outside the ' - 'body\n' - 'of a coroutine function.\n' - '\n' - '\n' - 'The "async with" statement\n' - '==========================\n' - '\n' - ' async_with_stmt ::= "async" with_stmt\n' - '\n' - 'An *asynchronous context manager* is a *context manager* that is ' - 'able\n' - 'to suspend execution in its *enter* and *exit* methods.\n' - '\n' - 'The following code:\n' - '\n' - ' async with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' aenter = type(manager).__aenter__\n' - ' aexit = type(manager).__aexit__\n' - ' value = await aenter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not await aexit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' await aexit(manager, None, None, None)\n' - '\n' - 'See also "__aenter__()" and "__aexit__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async with" statement outside the\n' - 'body of a coroutine function.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 492** - Coroutines with async and await syntax\n' - ' The proposal that made coroutines a proper standalone concept ' - 'in\n' - ' Python, and added supporting syntax.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] The exception is propagated to the invocation stack unless ' - 'there\n' - ' is a "finally" clause which happens to raise another ' - 'exception.\n' - ' That new exception causes the old one to be lost.\n' - '\n' - '[2] In pattern matching, a sequence is defined as one of the\n' - ' following:\n' - '\n' - ' * a class that inherits from "collections.abc.Sequence"\n' - '\n' - ' * a Python class that has been registered as\n' - ' "collections.abc.Sequence"\n' - '\n' - ' * a builtin class that has its (CPython) ' - '"Py_TPFLAGS_SEQUENCE"\n' - ' bit set\n' - '\n' - ' * a class that inherits from any of the above\n' - '\n' - ' The following standard library classes are sequences:\n' - '\n' - ' * "array.array"\n' - '\n' - ' * "collections.deque"\n' - '\n' - ' * "list"\n' - '\n' - ' * "memoryview"\n' - '\n' - ' * "range"\n' - '\n' - ' * "tuple"\n' - '\n' - ' Note:\n' - '\n' - ' Subject values of type "str", "bytes", and "bytearray" do ' - 'not\n' - ' match sequence patterns.\n' - '\n' - '[3] In pattern matching, a mapping is defined as one of the ' - 'following:\n' - '\n' - ' * a class that inherits from "collections.abc.Mapping"\n' - '\n' - ' * a Python class that has been registered as\n' - ' "collections.abc.Mapping"\n' - '\n' - ' * a builtin class that has its (CPython) ' - '"Py_TPFLAGS_MAPPING"\n' - ' bit set\n' - '\n' - ' * a class that inherits from any of the above\n' - '\n' - ' The standard library classes "dict" and ' - '"types.MappingProxyType"\n' - ' are mappings.\n' - '\n' - '[4] A string literal appearing as the first statement in the ' - 'function\n' - ' body is transformed into the function’s "__doc__" attribute ' - 'and\n' - ' therefore the function’s *docstring*.\n' - '\n' - '[5] A string literal appearing as the first statement in the class\n' - ' body is transformed into the namespace’s "__doc__" item and\n' - ' therefore the class’s *docstring*.\n', - 'atom-identifiers': 'Identifiers (Names)\n' - '*******************\n' - '\n' - 'An identifier occurring as an atom is a name. See ' - 'section Identifiers\n' - 'and keywords for lexical definition and section Naming ' - 'and binding for\n' - 'documentation of naming and binding.\n' - '\n' - 'When the name is bound to an object, evaluation of the ' - 'atom yields\n' - 'that object. When a name is not bound, an attempt to ' - 'evaluate it\n' - 'raises a "NameError" exception.\n' - '\n' - '**Private name mangling:** When an identifier that ' - 'textually occurs in\n' - 'a class definition begins with two or more underscore ' - 'characters and\n' - 'does not end in two or more underscores, it is ' - 'considered a *private\n' - 'name* of that class. Private names are transformed to a ' - 'longer form\n' - 'before code is generated for them. The transformation ' - 'inserts the\n' - 'class name, with leading underscores removed and a ' - 'single underscore\n' - 'inserted, in front of the name. For example, the ' - 'identifier "__spam"\n' - 'occurring in a class named "Ham" will be transformed to ' - '"_Ham__spam".\n' - 'This transformation is independent of the syntactical ' - 'context in which\n' - 'the identifier is used. If the transformed name is ' - 'extremely long\n' - '(longer than 255 characters), implementation defined ' - 'truncation may\n' - 'happen. If the class name consists only of underscores, ' - 'no\n' - 'transformation is done.\n', - 'atom-literals': 'Literals\n' - '********\n' - '\n' - 'Python supports string and bytes literals and various ' - 'numeric\n' - 'literals:\n' - '\n' - ' literal ::= stringliteral | bytesliteral\n' - ' | integer | floatnumber | imagnumber\n' - '\n' - 'Evaluation of a literal yields an object of the given type ' - '(string,\n' - 'bytes, integer, floating point number, complex number) with ' - 'the given\n' - 'value. The value may be approximated in the case of ' - 'floating point\n' - 'and imaginary (complex) literals. See section Literals for ' - 'details.\n' - '\n' - 'All literals correspond to immutable data types, and hence ' - 'the\n' - 'object’s identity is less important than its value. ' - 'Multiple\n' - 'evaluations of literals with the same value (either the ' - 'same\n' - 'occurrence in the program text or a different occurrence) ' - 'may obtain\n' - 'the same object or a different object with the same ' - 'value.\n', - 'attribute-access': 'Customizing attribute access\n' - '****************************\n' - '\n' - 'The following methods can be defined to customize the ' - 'meaning of\n' - 'attribute access (use of, assignment to, or deletion of ' - '"x.name") for\n' - 'class instances.\n' - '\n' - 'object.__getattr__(self, name)\n' - '\n' - ' Called when the default attribute access fails with ' - 'an\n' - ' "AttributeError" (either "__getattribute__()" raises ' - 'an\n' - ' "AttributeError" because *name* is not an instance ' - 'attribute or an\n' - ' attribute in the class tree for "self"; or ' - '"__get__()" of a *name*\n' - ' property raises "AttributeError"). This method ' - 'should either\n' - ' return the (computed) attribute value or raise an ' - '"AttributeError"\n' - ' exception.\n' - '\n' - ' Note that if the attribute is found through the ' - 'normal mechanism,\n' - ' "__getattr__()" is not called. (This is an ' - 'intentional asymmetry\n' - ' between "__getattr__()" and "__setattr__()".) This is ' - 'done both for\n' - ' efficiency reasons and because otherwise ' - '"__getattr__()" would have\n' - ' no way to access other attributes of the instance. ' - 'Note that at\n' - ' least for instance variables, you can fake total ' - 'control by not\n' - ' inserting any values in the instance attribute ' - 'dictionary (but\n' - ' instead inserting them in another object). See the\n' - ' "__getattribute__()" method below for a way to ' - 'actually get total\n' - ' control over attribute access.\n' - '\n' - 'object.__getattribute__(self, name)\n' - '\n' - ' Called unconditionally to implement attribute ' - 'accesses for\n' - ' instances of the class. If the class also defines ' - '"__getattr__()",\n' - ' the latter will not be called unless ' - '"__getattribute__()" either\n' - ' calls it explicitly or raises an "AttributeError". ' - 'This method\n' - ' should return the (computed) attribute value or raise ' - 'an\n' - ' "AttributeError" exception. In order to avoid ' - 'infinite recursion in\n' - ' this method, its implementation should always call ' - 'the base class\n' - ' method with the same name to access any attributes it ' - 'needs, for\n' - ' example, "object.__getattribute__(self, name)".\n' - '\n' - ' Note:\n' - '\n' - ' This method may still be bypassed when looking up ' - 'special methods\n' - ' as the result of implicit invocation via language ' - 'syntax or\n' - ' built-in functions. See Special method lookup.\n' - '\n' - ' For certain sensitive attribute accesses, raises an ' - 'auditing event\n' - ' "object.__getattr__" with arguments "obj" and ' - '"name".\n' - '\n' - 'object.__setattr__(self, name, value)\n' - '\n' - ' Called when an attribute assignment is attempted. ' - 'This is called\n' - ' instead of the normal mechanism (i.e. store the value ' - 'in the\n' - ' instance dictionary). *name* is the attribute name, ' - '*value* is the\n' - ' value to be assigned to it.\n' - '\n' - ' If "__setattr__()" wants to assign to an instance ' - 'attribute, it\n' - ' should call the base class method with the same name, ' - 'for example,\n' - ' "object.__setattr__(self, name, value)".\n' - '\n' - ' For certain sensitive attribute assignments, raises ' - 'an auditing\n' - ' event "object.__setattr__" with arguments "obj", ' - '"name", "value".\n' - '\n' - 'object.__delattr__(self, name)\n' - '\n' - ' Like "__setattr__()" but for attribute deletion ' - 'instead of\n' - ' assignment. This should only be implemented if "del ' - 'obj.name" is\n' - ' meaningful for the object.\n' - '\n' - ' For certain sensitive attribute deletions, raises an ' - 'auditing event\n' - ' "object.__delattr__" with arguments "obj" and ' - '"name".\n' - '\n' - 'object.__dir__(self)\n' - '\n' - ' Called when "dir()" is called on the object. A ' - 'sequence must be\n' - ' returned. "dir()" converts the returned sequence to a ' - 'list and\n' - ' sorts it.\n' - '\n' - '\n' - 'Customizing module attribute access\n' - '===================================\n' - '\n' - 'Special names "__getattr__" and "__dir__" can be also ' - 'used to\n' - 'customize access to module attributes. The "__getattr__" ' - 'function at\n' - 'the module level should accept one argument which is the ' - 'name of an\n' - 'attribute and return the computed value or raise an ' - '"AttributeError".\n' - 'If an attribute is not found on a module object through ' - 'the normal\n' - 'lookup, i.e. "object.__getattribute__()", then ' - '"__getattr__" is\n' - 'searched in the module "__dict__" before raising an ' - '"AttributeError".\n' - 'If found, it is called with the attribute name and the ' - 'result is\n' - 'returned.\n' - '\n' - 'The "__dir__" function should accept no arguments, and ' - 'return a\n' - 'sequence of strings that represents the names accessible ' - 'on module. If\n' - 'present, this function overrides the standard "dir()" ' - 'search on a\n' - 'module.\n' - '\n' - 'For a more fine grained customization of the module ' - 'behavior (setting\n' - 'attributes, properties, etc.), one can set the ' - '"__class__" attribute\n' - 'of a module object to a subclass of "types.ModuleType". ' - 'For example:\n' - '\n' - ' import sys\n' - ' from types import ModuleType\n' - '\n' - ' class VerboseModule(ModuleType):\n' - ' def __repr__(self):\n' - " return f'Verbose {self.__name__}'\n" - '\n' - ' def __setattr__(self, attr, value):\n' - " print(f'Setting {attr}...')\n" - ' super().__setattr__(attr, value)\n' - '\n' - ' sys.modules[__name__].__class__ = VerboseModule\n' - '\n' - 'Note:\n' - '\n' - ' Defining module "__getattr__" and setting module ' - '"__class__" only\n' - ' affect lookups made using the attribute access syntax ' - '– directly\n' - ' accessing the module globals (whether by code within ' - 'the module, or\n' - ' via a reference to the module’s globals dictionary) is ' - 'unaffected.\n' - '\n' - 'Changed in version 3.5: "__class__" module attribute is ' - 'now writable.\n' - '\n' - 'New in version 3.7: "__getattr__" and "__dir__" module ' - 'attributes.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 562** - Module __getattr__ and __dir__\n' - ' Describes the "__getattr__" and "__dir__" functions ' - 'on modules.\n' - '\n' - '\n' - 'Implementing Descriptors\n' - '========================\n' - '\n' - 'The following methods only apply when an instance of the ' - 'class\n' - 'containing the method (a so-called *descriptor* class) ' - 'appears in an\n' - '*owner* class (the descriptor must be in either the ' - 'owner’s class\n' - 'dictionary or in the class dictionary for one of its ' - 'parents). In the\n' - 'examples below, “the attribute” refers to the attribute ' - 'whose name is\n' - 'the key of the property in the owner class’ "__dict__".\n' - '\n' - 'object.__get__(self, instance, owner=None)\n' - '\n' - ' Called to get the attribute of the owner class (class ' - 'attribute\n' - ' access) or of an instance of that class (instance ' - 'attribute\n' - ' access). The optional *owner* argument is the owner ' - 'class, while\n' - ' *instance* is the instance that the attribute was ' - 'accessed through,\n' - ' or "None" when the attribute is accessed through the ' - '*owner*.\n' - '\n' - ' This method should return the computed attribute ' - 'value or raise an\n' - ' "AttributeError" exception.\n' - '\n' - ' **PEP 252** specifies that "__get__()" is callable ' - 'with one or two\n' - ' arguments. Python’s own built-in descriptors support ' - 'this\n' - ' specification; however, it is likely that some ' - 'third-party tools\n' - ' have descriptors that require both arguments. ' - 'Python’s own\n' - ' "__getattribute__()" implementation always passes in ' - 'both arguments\n' - ' whether they are required or not.\n' - '\n' - 'object.__set__(self, instance, value)\n' - '\n' - ' Called to set the attribute on an instance *instance* ' - 'of the owner\n' - ' class to a new value, *value*.\n' - '\n' - ' Note, adding "__set__()" or "__delete__()" changes ' - 'the kind of\n' - ' descriptor to a “data descriptor”. See Invoking ' - 'Descriptors for\n' - ' more details.\n' - '\n' - 'object.__delete__(self, instance)\n' - '\n' - ' Called to delete the attribute on an instance ' - '*instance* of the\n' - ' owner class.\n' - '\n' - 'object.__set_name__(self, owner, name)\n' - '\n' - ' Called at the time the owning class *owner* is ' - 'created. The\n' - ' descriptor has been assigned to *name*.\n' - '\n' - ' Note:\n' - '\n' - ' "__set_name__()" is only called implicitly as part ' - 'of the "type"\n' - ' constructor, so it will need to be called ' - 'explicitly with the\n' - ' appropriate parameters when a descriptor is added ' - 'to a class\n' - ' after initial creation:\n' - '\n' - ' class A:\n' - ' pass\n' - ' descr = custom_descriptor()\n' - ' A.attr = descr\n' - " descr.__set_name__(A, 'attr')\n" - '\n' - ' See Creating the class object for more details.\n' - '\n' - ' New in version 3.6.\n' - '\n' - 'The attribute "__objclass__" is interpreted by the ' - '"inspect" module as\n' - 'specifying the class where this object was defined ' - '(setting this\n' - 'appropriately can assist in runtime introspection of ' - 'dynamic class\n' - 'attributes). For callables, it may indicate that an ' - 'instance of the\n' - 'given type (or a subclass) is expected or required as ' - 'the first\n' - 'positional argument (for example, CPython sets this ' - 'attribute for\n' - 'unbound methods that are implemented in C).\n' - '\n' - '\n' - 'Invoking Descriptors\n' - '====================\n' - '\n' - 'In general, a descriptor is an object attribute with ' - '“binding\n' - 'behavior”, one whose attribute access has been ' - 'overridden by methods\n' - 'in the descriptor protocol: "__get__()", "__set__()", ' - 'and\n' - '"__delete__()". If any of those methods are defined for ' - 'an object, it\n' - 'is said to be a descriptor.\n' - '\n' - 'The default behavior for attribute access is to get, ' - 'set, or delete\n' - 'the attribute from an object’s dictionary. For instance, ' - '"a.x" has a\n' - 'lookup chain starting with "a.__dict__[\'x\']", then\n' - '"type(a).__dict__[\'x\']", and continuing through the ' - 'base classes of\n' - '"type(a)" excluding metaclasses.\n' - '\n' - 'However, if the looked-up value is an object defining ' - 'one of the\n' - 'descriptor methods, then Python may override the default ' - 'behavior and\n' - 'invoke the descriptor method instead. Where this occurs ' - 'in the\n' - 'precedence chain depends on which descriptor methods ' - 'were defined and\n' - 'how they were called.\n' - '\n' - 'The starting point for descriptor invocation is a ' - 'binding, "a.x". How\n' - 'the arguments are assembled depends on "a":\n' - '\n' - 'Direct Call\n' - ' The simplest and least common call is when user code ' - 'directly\n' - ' invokes a descriptor method: "x.__get__(a)".\n' - '\n' - 'Instance Binding\n' - ' If binding to an object instance, "a.x" is ' - 'transformed into the\n' - ' call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n' - '\n' - 'Class Binding\n' - ' If binding to a class, "A.x" is transformed into the ' - 'call:\n' - ' "A.__dict__[\'x\'].__get__(None, A)".\n' - '\n' - 'Super Binding\n' - ' If "a" is an instance of "super", then the binding ' - '"super(B,\n' - ' obj).m()" searches "obj.__class__.__mro__" for the ' - 'base class "A"\n' - ' immediately preceding "B" and then invokes the ' - 'descriptor with the\n' - ' call: "A.__dict__[\'m\'].__get__(obj, ' - 'obj.__class__)".\n' - '\n' - 'For instance bindings, the precedence of descriptor ' - 'invocation depends\n' - 'on which descriptor methods are defined. A descriptor ' - 'can define any\n' - 'combination of "__get__()", "__set__()" and ' - '"__delete__()". If it\n' - 'does not define "__get__()", then accessing the ' - 'attribute will return\n' - 'the descriptor object itself unless there is a value in ' - 'the object’s\n' - 'instance dictionary. If the descriptor defines ' - '"__set__()" and/or\n' - '"__delete__()", it is a data descriptor; if it defines ' - 'neither, it is\n' - 'a non-data descriptor. Normally, data descriptors ' - 'define both\n' - '"__get__()" and "__set__()", while non-data descriptors ' - 'have just the\n' - '"__get__()" method. Data descriptors with "__get__()" ' - 'and "__set__()"\n' - '(and/or "__delete__()") defined always override a ' - 'redefinition in an\n' - 'instance dictionary. In contrast, non-data descriptors ' - 'can be\n' - 'overridden by instances.\n' - '\n' - 'Python methods (including "staticmethod()" and ' - '"classmethod()") are\n' - 'implemented as non-data descriptors. Accordingly, ' - 'instances can\n' - 'redefine and override methods. This allows individual ' - 'instances to\n' - 'acquire behaviors that differ from other instances of ' - 'the same class.\n' - '\n' - 'The "property()" function is implemented as a data ' - 'descriptor.\n' - 'Accordingly, instances cannot override the behavior of a ' - 'property.\n' - '\n' - '\n' - '__slots__\n' - '=========\n' - '\n' - '*__slots__* allow us to explicitly declare data members ' - '(like\n' - 'properties) and deny the creation of *__dict__* and ' - '*__weakref__*\n' - '(unless explicitly declared in *__slots__* or available ' - 'in a parent.)\n' - '\n' - 'The space saved over using *__dict__* can be ' - 'significant. Attribute\n' - 'lookup speed can be significantly improved as well.\n' - '\n' - 'object.__slots__\n' - '\n' - ' This class variable can be assigned a string, ' - 'iterable, or sequence\n' - ' of strings with variable names used by instances. ' - '*__slots__*\n' - ' reserves space for the declared variables and ' - 'prevents the\n' - ' automatic creation of *__dict__* and *__weakref__* ' - 'for each\n' - ' instance.\n' - '\n' - '\n' - 'Notes on using *__slots__*\n' - '--------------------------\n' - '\n' - '* When inheriting from a class without *__slots__*, the ' - '*__dict__* and\n' - ' *__weakref__* attribute of the instances will always ' - 'be accessible.\n' - '\n' - '* Without a *__dict__* variable, instances cannot be ' - 'assigned new\n' - ' variables not listed in the *__slots__* definition. ' - 'Attempts to\n' - ' assign to an unlisted variable name raises ' - '"AttributeError". If\n' - ' dynamic assignment of new variables is desired, then ' - 'add\n' - ' "\'__dict__\'" to the sequence of strings in the ' - '*__slots__*\n' - ' declaration.\n' - '\n' - '* Without a *__weakref__* variable for each instance, ' - 'classes defining\n' - ' *__slots__* do not support weak references to its ' - 'instances. If weak\n' - ' reference support is needed, then add ' - '"\'__weakref__\'" to the\n' - ' sequence of strings in the *__slots__* declaration.\n' - '\n' - '* *__slots__* are implemented at the class level by ' - 'creating\n' - ' descriptors (Implementing Descriptors) for each ' - 'variable name. As a\n' - ' result, class attributes cannot be used to set default ' - 'values for\n' - ' instance variables defined by *__slots__*; otherwise, ' - 'the class\n' - ' attribute would overwrite the descriptor assignment.\n' - '\n' - '* The action of a *__slots__* declaration is not limited ' - 'to the class\n' - ' where it is defined. *__slots__* declared in parents ' - 'are available\n' - ' in child classes. However, child subclasses will get a ' - '*__dict__*\n' - ' and *__weakref__* unless they also define *__slots__* ' - '(which should\n' - ' only contain names of any *additional* slots).\n' - '\n' - '* If a class defines a slot also defined in a base ' - 'class, the instance\n' - ' variable defined by the base class slot is ' - 'inaccessible (except by\n' - ' retrieving its descriptor directly from the base ' - 'class). This\n' - ' renders the meaning of the program undefined. In the ' - 'future, a\n' - ' check may be added to prevent this.\n' - '\n' - '* Nonempty *__slots__* does not work for classes derived ' - 'from\n' - ' “variable-length” built-in types such as "int", ' - '"bytes" and "tuple".\n' - '\n' - '* Any non-string iterable may be assigned to ' - '*__slots__*. Mappings may\n' - ' also be used; however, in the future, special meaning ' - 'may be\n' - ' assigned to the values corresponding to each key.\n' - '\n' - '* *__class__* assignment works only if both classes have ' - 'the same\n' - ' *__slots__*.\n' - '\n' - '* Multiple inheritance with multiple slotted parent ' - 'classes can be\n' - ' used, but only one parent is allowed to have ' - 'attributes created by\n' - ' slots (the other bases must have empty slot layouts) - ' - 'violations\n' - ' raise "TypeError".\n' - '\n' - '* If an iterator is used for *__slots__* then a ' - 'descriptor is created\n' - ' for each of the iterator’s values. However, the ' - '*__slots__*\n' - ' attribute will be an empty iterator.\n', - 'attribute-references': 'Attribute references\n' - '********************\n' - '\n' - 'An attribute reference is a primary followed by a ' - 'period and a name:\n' - '\n' - ' attributeref ::= primary "." identifier\n' - '\n' - 'The primary must evaluate to an object of a type ' - 'that supports\n' - 'attribute references, which most objects do. This ' - 'object is then\n' - 'asked to produce the attribute whose name is the ' - 'identifier. This\n' - 'production can be customized by overriding the ' - '"__getattr__()" method.\n' - 'If this attribute is not available, the exception ' - '"AttributeError" is\n' - 'raised. Otherwise, the type and value of the object ' - 'produced is\n' - 'determined by the object. Multiple evaluations of ' - 'the same attribute\n' - 'reference may yield different objects.\n', - 'augassign': 'Augmented assignment statements\n' - '*******************************\n' - '\n' - 'Augmented assignment is the combination, in a single statement, ' - 'of a\n' - 'binary operation and an assignment statement:\n' - '\n' - ' augmented_assignment_stmt ::= augtarget augop ' - '(expression_list | yield_expression)\n' - ' augtarget ::= identifier | attributeref | ' - 'subscription | slicing\n' - ' augop ::= "+=" | "-=" | "*=" | "@=" | ' - '"/=" | "//=" | "%=" | "**="\n' - ' | ">>=" | "<<=" | "&=" | "^=" | "|="\n' - '\n' - '(See section Primaries for the syntax definitions of the last ' - 'three\n' - 'symbols.)\n' - '\n' - 'An augmented assignment evaluates the target (which, unlike ' - 'normal\n' - 'assignment statements, cannot be an unpacking) and the ' - 'expression\n' - 'list, performs the binary operation specific to the type of ' - 'assignment\n' - 'on the two operands, and assigns the result to the original ' - 'target.\n' - 'The target is only evaluated once.\n' - '\n' - 'An augmented assignment expression like "x += 1" can be ' - 'rewritten as\n' - '"x = x + 1" to achieve a similar, but not exactly equal effect. ' - 'In the\n' - 'augmented version, "x" is only evaluated once. Also, when ' - 'possible,\n' - 'the actual operation is performed *in-place*, meaning that ' - 'rather than\n' - 'creating a new object and assigning that to the target, the old ' - 'object\n' - 'is modified instead.\n' - '\n' - 'Unlike normal assignments, augmented assignments evaluate the ' - 'left-\n' - 'hand side *before* evaluating the right-hand side. For ' - 'example, "a[i]\n' - '+= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and ' - 'performs\n' - 'the addition, and lastly, it writes the result back to "a[i]".\n' - '\n' - 'With the exception of assigning to tuples and multiple targets ' - 'in a\n' - 'single statement, the assignment done by augmented assignment\n' - 'statements is handled the same way as normal assignments. ' - 'Similarly,\n' - 'with the exception of the possible *in-place* behavior, the ' - 'binary\n' - 'operation performed by augmented assignment is the same as the ' - 'normal\n' - 'binary operations.\n' - '\n' - 'For targets which are attribute references, the same caveat ' - 'about\n' - 'class and instance attributes applies as for regular ' - 'assignments.\n', - 'await': 'Await expression\n' - '****************\n' - '\n' - 'Suspend the execution of *coroutine* on an *awaitable* object. Can\n' - 'only be used inside a *coroutine function*.\n' - '\n' - ' await_expr ::= "await" primary\n' - '\n' - 'New in version 3.5.\n', - 'binary': 'Binary arithmetic operations\n' - '****************************\n' - '\n' - 'The binary arithmetic operations have the conventional priority\n' - 'levels. Note that some of these operations also apply to certain ' - 'non-\n' - 'numeric types. Apart from the power operator, there are only two\n' - 'levels, one for multiplicative operators and one for additive\n' - 'operators:\n' - '\n' - ' m_expr ::= u_expr | m_expr "*" u_expr | m_expr "@" m_expr |\n' - ' m_expr "//" u_expr | m_expr "/" u_expr |\n' - ' m_expr "%" u_expr\n' - ' a_expr ::= m_expr | a_expr "+" m_expr | a_expr "-" m_expr\n' - '\n' - 'The "*" (multiplication) operator yields the product of its ' - 'arguments.\n' - 'The arguments must either both be numbers, or one argument must be ' - 'an\n' - 'integer and the other must be a sequence. In the former case, the\n' - 'numbers are converted to a common type and then multiplied ' - 'together.\n' - 'In the latter case, sequence repetition is performed; a negative\n' - 'repetition factor yields an empty sequence.\n' - '\n' - 'This operation can be customized using the special "__mul__()" ' - 'and\n' - '"__rmul__()" methods.\n' - '\n' - 'The "@" (at) operator is intended to be used for matrix\n' - 'multiplication. No builtin Python types implement this operator.\n' - '\n' - 'New in version 3.5.\n' - '\n' - 'The "/" (division) and "//" (floor division) operators yield the\n' - 'quotient of their arguments. The numeric arguments are first\n' - 'converted to a common type. Division of integers yields a float, ' - 'while\n' - 'floor division of integers results in an integer; the result is ' - 'that\n' - 'of mathematical division with the ‘floor’ function applied to the\n' - 'result. Division by zero raises the "ZeroDivisionError" ' - 'exception.\n' - '\n' - 'This operation can be customized using the special "__div__()" ' - 'and\n' - '"__floordiv__()" methods.\n' - '\n' - 'The "%" (modulo) operator yields the remainder from the division ' - 'of\n' - 'the first argument by the second. The numeric arguments are ' - 'first\n' - 'converted to a common type. A zero right argument raises the\n' - '"ZeroDivisionError" exception. The arguments may be floating ' - 'point\n' - 'numbers, e.g., "3.14%0.7" equals "0.34" (since "3.14" equals ' - '"4*0.7 +\n' - '0.34".) The modulo operator always yields a result with the same ' - 'sign\n' - 'as its second operand (or zero); the absolute value of the result ' - 'is\n' - 'strictly smaller than the absolute value of the second operand ' - '[1].\n' - '\n' - 'The floor division and modulo operators are connected by the ' - 'following\n' - 'identity: "x == (x//y)*y + (x%y)". Floor division and modulo are ' - 'also\n' - 'connected with the built-in function "divmod()": "divmod(x, y) ==\n' - '(x//y, x%y)". [2].\n' - '\n' - 'In addition to performing the modulo operation on numbers, the ' - '"%"\n' - 'operator is also overloaded by string objects to perform ' - 'old-style\n' - 'string formatting (also known as interpolation). The syntax for\n' - 'string formatting is described in the Python Library Reference,\n' - 'section printf-style String Formatting.\n' - '\n' - 'The *modulo* operation can be customized using the special ' - '"__mod__()"\n' - 'method.\n' - '\n' - 'The floor division operator, the modulo operator, and the ' - '"divmod()"\n' - 'function are not defined for complex numbers. Instead, convert to ' - 'a\n' - 'floating point number using the "abs()" function if appropriate.\n' - '\n' - 'The "+" (addition) operator yields the sum of its arguments. The\n' - 'arguments must either both be numbers or both be sequences of the ' - 'same\n' - 'type. In the former case, the numbers are converted to a common ' - 'type\n' - 'and then added together. In the latter case, the sequences are\n' - 'concatenated.\n' - '\n' - 'This operation can be customized using the special "__add__()" ' - 'and\n' - '"__radd__()" methods.\n' - '\n' - 'The "-" (subtraction) operator yields the difference of its ' - 'arguments.\n' - 'The numeric arguments are first converted to a common type.\n' - '\n' - 'This operation can be customized using the special "__sub__()" ' - 'method.\n', - 'bitwise': 'Binary bitwise operations\n' - '*************************\n' - '\n' - 'Each of the three bitwise operations has a different priority ' - 'level:\n' - '\n' - ' and_expr ::= shift_expr | and_expr "&" shift_expr\n' - ' xor_expr ::= and_expr | xor_expr "^" and_expr\n' - ' or_expr ::= xor_expr | or_expr "|" xor_expr\n' - '\n' - 'The "&" operator yields the bitwise AND of its arguments, which ' - 'must\n' - 'be integers or one of them must be a custom object overriding\n' - '"__and__()" or "__rand__()" special methods.\n' - '\n' - 'The "^" operator yields the bitwise XOR (exclusive OR) of its\n' - 'arguments, which must be integers or one of them must be a ' - 'custom\n' - 'object overriding "__xor__()" or "__rxor__()" special methods.\n' - '\n' - 'The "|" operator yields the bitwise (inclusive) OR of its ' - 'arguments,\n' - 'which must be integers or one of them must be a custom object\n' - 'overriding "__or__()" or "__ror__()" special methods.\n', - 'bltin-code-objects': 'Code Objects\n' - '************\n' - '\n' - 'Code objects are used by the implementation to ' - 'represent “pseudo-\n' - 'compiled” executable Python code such as a function ' - 'body. They differ\n' - 'from function objects because they don’t contain a ' - 'reference to their\n' - 'global execution environment. Code objects are ' - 'returned by the built-\n' - 'in "compile()" function and can be extracted from ' - 'function objects\n' - 'through their "__code__" attribute. See also the ' - '"code" module.\n' - '\n' - 'Accessing "__code__" raises an auditing event ' - '"object.__getattr__"\n' - 'with arguments "obj" and ""__code__"".\n' - '\n' - 'A code object can be executed or evaluated by passing ' - 'it (instead of a\n' - 'source string) to the "exec()" or "eval()" built-in ' - 'functions.\n' - '\n' - 'See The standard type hierarchy for more ' - 'information.\n', - 'bltin-ellipsis-object': 'The Ellipsis Object\n' - '*******************\n' - '\n' - 'This object is commonly used by slicing (see ' - 'Slicings). It supports\n' - 'no special operations. There is exactly one ' - 'ellipsis object, named\n' - '"Ellipsis" (a built-in name). "type(Ellipsis)()" ' - 'produces the\n' - '"Ellipsis" singleton.\n' - '\n' - 'It is written as "Ellipsis" or "...".\n', - 'bltin-null-object': 'The Null Object\n' - '***************\n' - '\n' - 'This object is returned by functions that don’t ' - 'explicitly return a\n' - 'value. It supports no special operations. There is ' - 'exactly one null\n' - 'object, named "None" (a built-in name). "type(None)()" ' - 'produces the\n' - 'same singleton.\n' - '\n' - 'It is written as "None".\n', - 'bltin-type-objects': 'Type Objects\n' - '************\n' - '\n' - 'Type objects represent the various object types. An ' - 'object’s type is\n' - 'accessed by the built-in function "type()". There are ' - 'no special\n' - 'operations on types. The standard module "types" ' - 'defines names for\n' - 'all standard built-in types.\n' - '\n' - 'Types are written like this: "".\n', - 'booleans': 'Boolean operations\n' - '******************\n' - '\n' - ' or_test ::= and_test | or_test "or" and_test\n' - ' and_test ::= not_test | and_test "and" not_test\n' - ' not_test ::= comparison | "not" not_test\n' - '\n' - 'In the context of Boolean operations, and also when expressions ' - 'are\n' - 'used by control flow statements, the following values are ' - 'interpreted\n' - 'as false: "False", "None", numeric zero of all types, and empty\n' - 'strings and containers (including strings, tuples, lists,\n' - 'dictionaries, sets and frozensets). All other values are ' - 'interpreted\n' - 'as true. User-defined objects can customize their truth value ' - 'by\n' - 'providing a "__bool__()" method.\n' - '\n' - 'The operator "not" yields "True" if its argument is false, ' - '"False"\n' - 'otherwise.\n' - '\n' - 'The expression "x and y" first evaluates *x*; if *x* is false, ' - 'its\n' - 'value is returned; otherwise, *y* is evaluated and the resulting ' - 'value\n' - 'is returned.\n' - '\n' - 'The expression "x or y" first evaluates *x*; if *x* is true, its ' - 'value\n' - 'is returned; otherwise, *y* is evaluated and the resulting value ' - 'is\n' - 'returned.\n' - '\n' - 'Note that neither "and" nor "or" restrict the value and type ' - 'they\n' - 'return to "False" and "True", but rather return the last ' - 'evaluated\n' - 'argument. This is sometimes useful, e.g., if "s" is a string ' - 'that\n' - 'should be replaced by a default value if it is empty, the ' - 'expression\n' - '"s or \'foo\'" yields the desired value. Because "not" has to ' - 'create a\n' - 'new value, it returns a boolean value regardless of the type of ' - 'its\n' - 'argument (for example, "not \'foo\'" produces "False" rather ' - 'than "\'\'".)\n', - 'break': 'The "break" statement\n' - '*********************\n' - '\n' - ' break_stmt ::= "break"\n' - '\n' - '"break" may only occur syntactically nested in a "for" or "while"\n' - 'loop, but not nested in a function or class definition within that\n' - 'loop.\n' - '\n' - 'It terminates the nearest enclosing loop, skipping the optional ' - '"else"\n' - 'clause if the loop has one.\n' - '\n' - 'If a "for" loop is terminated by "break", the loop control target\n' - 'keeps its current value.\n' - '\n' - 'When "break" passes control out of a "try" statement with a ' - '"finally"\n' - 'clause, that "finally" clause is executed before really leaving ' - 'the\n' - 'loop.\n', - 'callable-types': 'Emulating callable objects\n' - '**************************\n' - '\n' - 'object.__call__(self[, args...])\n' - '\n' - ' Called when the instance is “called” as a function; if ' - 'this method\n' - ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)".\n', - 'calls': 'Calls\n' - '*****\n' - '\n' - 'A call calls a callable object (e.g., a *function*) with a ' - 'possibly\n' - 'empty series of *arguments*:\n' - '\n' - ' call ::= primary "(" [argument_list [","] | ' - 'comprehension] ")"\n' - ' argument_list ::= positional_arguments ["," ' - 'starred_and_keywords]\n' - ' ["," keywords_arguments]\n' - ' | starred_and_keywords ["," ' - 'keywords_arguments]\n' - ' | keywords_arguments\n' - ' positional_arguments ::= positional_item ("," positional_item)*\n' - ' positional_item ::= assignment_expression | "*" expression\n' - ' starred_and_keywords ::= ("*" expression | keyword_item)\n' - ' ("," "*" expression | "," ' - 'keyword_item)*\n' - ' keywords_arguments ::= (keyword_item | "**" expression)\n' - ' ("," keyword_item | "," "**" ' - 'expression)*\n' - ' keyword_item ::= identifier "=" expression\n' - '\n' - 'An optional trailing comma may be present after the positional and\n' - 'keyword arguments but does not affect the semantics.\n' - '\n' - 'The primary must evaluate to a callable object (user-defined\n' - 'functions, built-in functions, methods of built-in objects, class\n' - 'objects, methods of class instances, and all objects having a\n' - '"__call__()" method are callable). All argument expressions are\n' - 'evaluated before the call is attempted. Please refer to section\n' - 'Function definitions for the syntax of formal *parameter* lists.\n' - '\n' - 'If keyword arguments are present, they are first converted to\n' - 'positional arguments, as follows. First, a list of unfilled slots ' - 'is\n' - 'created for the formal parameters. If there are N positional\n' - 'arguments, they are placed in the first N slots. Next, for each\n' - 'keyword argument, the identifier is used to determine the\n' - 'corresponding slot (if the identifier is the same as the first ' - 'formal\n' - 'parameter name, the first slot is used, and so on). If the slot ' - 'is\n' - 'already filled, a "TypeError" exception is raised. Otherwise, the\n' - 'value of the argument is placed in the slot, filling it (even if ' - 'the\n' - 'expression is "None", it fills the slot). When all arguments have\n' - 'been processed, the slots that are still unfilled are filled with ' - 'the\n' - 'corresponding default value from the function definition. ' - '(Default\n' - 'values are calculated, once, when the function is defined; thus, a\n' - 'mutable object such as a list or dictionary used as default value ' - 'will\n' - 'be shared by all calls that don’t specify an argument value for ' - 'the\n' - 'corresponding slot; this should usually be avoided.) If there are ' - 'any\n' - 'unfilled slots for which no default value is specified, a ' - '"TypeError"\n' - 'exception is raised. Otherwise, the list of filled slots is used ' - 'as\n' - 'the argument list for the call.\n' - '\n' - '**CPython implementation detail:** An implementation may provide\n' - 'built-in functions whose positional parameters do not have names, ' - 'even\n' - 'if they are ‘named’ for the purpose of documentation, and which\n' - 'therefore cannot be supplied by keyword. In CPython, this is the ' - 'case\n' - 'for functions implemented in C that use "PyArg_ParseTuple()" to ' - 'parse\n' - 'their arguments.\n' - '\n' - 'If there are more positional arguments than there are formal ' - 'parameter\n' - 'slots, a "TypeError" exception is raised, unless a formal ' - 'parameter\n' - 'using the syntax "*identifier" is present; in this case, that ' - 'formal\n' - 'parameter receives a tuple containing the excess positional ' - 'arguments\n' - '(or an empty tuple if there were no excess positional arguments).\n' - '\n' - 'If any keyword argument does not correspond to a formal parameter\n' - 'name, a "TypeError" exception is raised, unless a formal parameter\n' - 'using the syntax "**identifier" is present; in this case, that ' - 'formal\n' - 'parameter receives a dictionary containing the excess keyword\n' - 'arguments (using the keywords as keys and the argument values as\n' - 'corresponding values), or a (new) empty dictionary if there were ' - 'no\n' - 'excess keyword arguments.\n' - '\n' - 'If the syntax "*expression" appears in the function call, ' - '"expression"\n' - 'must evaluate to an *iterable*. Elements from these iterables are\n' - 'treated as if they were additional positional arguments. For the ' - 'call\n' - '"f(x1, x2, *y, x3, x4)", if *y* evaluates to a sequence *y1*, …, ' - '*yM*,\n' - 'this is equivalent to a call with M+4 positional arguments *x1*, ' - '*x2*,\n' - '*y1*, …, *yM*, *x3*, *x4*.\n' - '\n' - 'A consequence of this is that although the "*expression" syntax ' - 'may\n' - 'appear *after* explicit keyword arguments, it is processed ' - '*before*\n' - 'the keyword arguments (and any "**expression" arguments – see ' - 'below).\n' - 'So:\n' - '\n' - ' >>> def f(a, b):\n' - ' ... print(a, b)\n' - ' ...\n' - ' >>> f(b=1, *(2,))\n' - ' 2 1\n' - ' >>> f(a=1, *(2,))\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " TypeError: f() got multiple values for keyword argument 'a'\n" - ' >>> f(1, *(2,))\n' - ' 1 2\n' - '\n' - 'It is unusual for both keyword arguments and the "*expression" ' - 'syntax\n' - 'to be used in the same call, so in practice this confusion does ' - 'not\n' - 'arise.\n' - '\n' - 'If the syntax "**expression" appears in the function call,\n' - '"expression" must evaluate to a *mapping*, the contents of which ' - 'are\n' - 'treated as additional keyword arguments. If a keyword is already\n' - 'present (as an explicit keyword argument, or from another ' - 'unpacking),\n' - 'a "TypeError" exception is raised.\n' - '\n' - 'Formal parameters using the syntax "*identifier" or "**identifier"\n' - 'cannot be used as positional argument slots or as keyword argument\n' - 'names.\n' - '\n' - 'Changed in version 3.5: Function calls accept any number of "*" ' - 'and\n' - '"**" unpackings, positional arguments may follow iterable ' - 'unpackings\n' - '("*"), and keyword arguments may follow dictionary unpackings ' - '("**").\n' - 'Originally proposed by **PEP 448**.\n' - '\n' - 'A call always returns some value, possibly "None", unless it raises ' - 'an\n' - 'exception. How this value is computed depends on the type of the\n' - 'callable object.\n' - '\n' - 'If it is—\n' - '\n' - 'a user-defined function:\n' - ' The code block for the function is executed, passing it the\n' - ' argument list. The first thing the code block will do is bind ' - 'the\n' - ' formal parameters to the arguments; this is described in ' - 'section\n' - ' Function definitions. When the code block executes a "return"\n' - ' statement, this specifies the return value of the function ' - 'call.\n' - '\n' - 'a built-in function or method:\n' - ' The result is up to the interpreter; see Built-in Functions for ' - 'the\n' - ' descriptions of built-in functions and methods.\n' - '\n' - 'a class object:\n' - ' A new instance of that class is returned.\n' - '\n' - 'a class instance method:\n' - ' The corresponding user-defined function is called, with an ' - 'argument\n' - ' list that is one longer than the argument list of the call: the\n' - ' instance becomes the first argument.\n' - '\n' - 'a class instance:\n' - ' The class must define a "__call__()" method; the effect is then ' - 'the\n' - ' same as if that method was called.\n', - 'class': 'Class definitions\n' - '*****************\n' - '\n' - 'A class definition defines a class object (see section The ' - 'standard\n' - 'type hierarchy):\n' - '\n' - ' classdef ::= [decorators] "class" classname [inheritance] ":" ' - 'suite\n' - ' inheritance ::= "(" [argument_list] ")"\n' - ' classname ::= identifier\n' - '\n' - 'A class definition is an executable statement. The inheritance ' - 'list\n' - 'usually gives a list of base classes (see Metaclasses for more\n' - 'advanced uses), so each item in the list should evaluate to a ' - 'class\n' - 'object which allows subclassing. Classes without an inheritance ' - 'list\n' - 'inherit, by default, from the base class "object"; hence,\n' - '\n' - ' class Foo:\n' - ' pass\n' - '\n' - 'is equivalent to\n' - '\n' - ' class Foo(object):\n' - ' pass\n' - '\n' - 'The class’s suite is then executed in a new execution frame (see\n' - 'Naming and binding), using a newly created local namespace and the\n' - 'original global namespace. (Usually, the suite contains mostly\n' - 'function definitions.) When the class’s suite finishes execution, ' - 'its\n' - 'execution frame is discarded but its local namespace is saved. [5] ' - 'A\n' - 'class object is then created using the inheritance list for the ' - 'base\n' - 'classes and the saved local namespace for the attribute ' - 'dictionary.\n' - 'The class name is bound to this class object in the original local\n' - 'namespace.\n' - '\n' - 'The order in which attributes are defined in the class body is\n' - 'preserved in the new class’s "__dict__". Note that this is ' - 'reliable\n' - 'only right after the class is created and only for classes that ' - 'were\n' - 'defined using the definition syntax.\n' - '\n' - 'Class creation can be customized heavily using metaclasses.\n' - '\n' - 'Classes can also be decorated: just like when decorating ' - 'functions,\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' class Foo: pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' class Foo: pass\n' - ' Foo = f1(arg)(f2(Foo))\n' - '\n' - 'The evaluation rules for the decorator expressions are the same as ' - 'for\n' - 'function decorators. The result is then bound to the class name.\n' - '\n' - 'Changed in version 3.9: Classes may be decorated with any valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - '**Programmer’s note:** Variables defined in the class definition ' - 'are\n' - 'class attributes; they are shared by instances. Instance ' - 'attributes\n' - 'can be set in a method with "self.name = value". Both class and\n' - 'instance attributes are accessible through the notation ' - '“"self.name"”,\n' - 'and an instance attribute hides a class attribute with the same ' - 'name\n' - 'when accessed in this way. Class attributes can be used as ' - 'defaults\n' - 'for instance attributes, but using mutable values there can lead ' - 'to\n' - 'unexpected results. Descriptors can be used to create instance\n' - 'variables with different implementation details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3115** - Metaclasses in Python 3000\n' - ' The proposal that changed the declaration of metaclasses to ' - 'the\n' - ' current syntax, and the semantics for how classes with\n' - ' metaclasses are constructed.\n' - '\n' - ' **PEP 3129** - Class Decorators\n' - ' The proposal that added class decorators. Function and ' - 'method\n' - ' decorators were introduced in **PEP 318**.\n', - 'comparisons': 'Comparisons\n' - '***********\n' - '\n' - 'Unlike C, all comparison operations in Python have the same ' - 'priority,\n' - 'which is lower than that of any arithmetic, shifting or ' - 'bitwise\n' - 'operation. Also unlike C, expressions like "a < b < c" have ' - 'the\n' - 'interpretation that is conventional in mathematics:\n' - '\n' - ' comparison ::= or_expr (comp_operator or_expr)*\n' - ' comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n' - ' | "is" ["not"] | ["not"] "in"\n' - '\n' - 'Comparisons yield boolean values: "True" or "False". Custom ' - '*rich\n' - 'comparison methods* may return non-boolean values. In this ' - 'case Python\n' - 'will call "bool()" on such value in boolean contexts.\n' - '\n' - 'Comparisons can be chained arbitrarily, e.g., "x < y <= z" ' - 'is\n' - 'equivalent to "x < y and y <= z", except that "y" is ' - 'evaluated only\n' - 'once (but in both cases "z" is not evaluated at all when "x < ' - 'y" is\n' - 'found to be false).\n' - '\n' - 'Formally, if *a*, *b*, *c*, …, *y*, *z* are expressions and ' - '*op1*,\n' - '*op2*, …, *opN* are comparison operators, then "a op1 b op2 c ' - '... y\n' - 'opN z" is equivalent to "a op1 b and b op2 c and ... y opN ' - 'z", except\n' - 'that each expression is evaluated at most once.\n' - '\n' - 'Note that "a op1 b op2 c" doesn’t imply any kind of ' - 'comparison between\n' - '*a* and *c*, so that, e.g., "x < y > z" is perfectly legal ' - '(though\n' - 'perhaps not pretty).\n' - '\n' - '\n' - 'Value comparisons\n' - '=================\n' - '\n' - 'The operators "<", ">", "==", ">=", "<=", and "!=" compare ' - 'the values\n' - 'of two objects. The objects do not need to have the same ' - 'type.\n' - '\n' - 'Chapter Objects, values and types states that objects have a ' - 'value (in\n' - 'addition to type and identity). The value of an object is a ' - 'rather\n' - 'abstract notion in Python: For example, there is no canonical ' - 'access\n' - 'method for an object’s value. Also, there is no requirement ' - 'that the\n' - 'value of an object should be constructed in a particular way, ' - 'e.g.\n' - 'comprised of all its data attributes. Comparison operators ' - 'implement a\n' - 'particular notion of what the value of an object is. One can ' - 'think of\n' - 'them as defining the value of an object indirectly, by means ' - 'of their\n' - 'comparison implementation.\n' - '\n' - 'Because all types are (direct or indirect) subtypes of ' - '"object", they\n' - 'inherit the default comparison behavior from "object". Types ' - 'can\n' - 'customize their comparison behavior by implementing *rich ' - 'comparison\n' - 'methods* like "__lt__()", described in Basic customization.\n' - '\n' - 'The default behavior for equality comparison ("==" and "!=") ' - 'is based\n' - 'on the identity of the objects. Hence, equality comparison ' - 'of\n' - 'instances with the same identity results in equality, and ' - 'equality\n' - 'comparison of instances with different identities results in\n' - 'inequality. A motivation for this default behavior is the ' - 'desire that\n' - 'all objects should be reflexive (i.e. "x is y" implies "x == ' - 'y").\n' - '\n' - 'A default order comparison ("<", ">", "<=", and ">=") is not ' - 'provided;\n' - 'an attempt raises "TypeError". A motivation for this default ' - 'behavior\n' - 'is the lack of a similar invariant as for equality.\n' - '\n' - 'The behavior of the default equality comparison, that ' - 'instances with\n' - 'different identities are always unequal, may be in contrast ' - 'to what\n' - 'types will need that have a sensible definition of object ' - 'value and\n' - 'value-based equality. Such types will need to customize ' - 'their\n' - 'comparison behavior, and in fact, a number of built-in types ' - 'have done\n' - 'that.\n' - '\n' - 'The following list describes the comparison behavior of the ' - 'most\n' - 'important built-in types.\n' - '\n' - '* Numbers of built-in numeric types (Numeric Types — int, ' - 'float,\n' - ' complex) and of the standard library types ' - '"fractions.Fraction" and\n' - ' "decimal.Decimal" can be compared within and across their ' - 'types,\n' - ' with the restriction that complex numbers do not support ' - 'order\n' - ' comparison. Within the limits of the types involved, they ' - 'compare\n' - ' mathematically (algorithmically) correct without loss of ' - 'precision.\n' - '\n' - ' The not-a-number values "float(\'NaN\')" and ' - '"decimal.Decimal(\'NaN\')"\n' - ' are special. Any ordered comparison of a number to a ' - 'not-a-number\n' - ' value is false. A counter-intuitive implication is that ' - 'not-a-number\n' - ' values are not equal to themselves. For example, if "x =\n' - ' float(\'NaN\')", "3 < x", "x < 3" and "x == x" are all ' - 'false, while "x\n' - ' != x" is true. This behavior is compliant with IEEE 754.\n' - '\n' - '* "None" and "NotImplemented" are singletons. **PEP 8** ' - 'advises that\n' - ' comparisons for singletons should always be done with "is" ' - 'or "is\n' - ' not", never the equality operators.\n' - '\n' - '* Binary sequences (instances of "bytes" or "bytearray") can ' - 'be\n' - ' compared within and across their types. They compare\n' - ' lexicographically using the numeric values of their ' - 'elements.\n' - '\n' - '* Strings (instances of "str") compare lexicographically ' - 'using the\n' - ' numerical Unicode code points (the result of the built-in ' - 'function\n' - ' "ord()") of their characters. [3]\n' - '\n' - ' Strings and binary sequences cannot be directly compared.\n' - '\n' - '* Sequences (instances of "tuple", "list", or "range") can be ' - 'compared\n' - ' only within each of their types, with the restriction that ' - 'ranges do\n' - ' not support order comparison. Equality comparison across ' - 'these\n' - ' types results in inequality, and ordering comparison across ' - 'these\n' - ' types raises "TypeError".\n' - '\n' - ' Sequences compare lexicographically using comparison of\n' - ' corresponding elements. The built-in containers typically ' - 'assume\n' - ' identical objects are equal to themselves. That lets them ' - 'bypass\n' - ' equality tests for identical objects to improve performance ' - 'and to\n' - ' maintain their internal invariants.\n' - '\n' - ' Lexicographical comparison between built-in collections ' - 'works as\n' - ' follows:\n' - '\n' - ' * For two collections to compare equal, they must be of the ' - 'same\n' - ' type, have the same length, and each pair of ' - 'corresponding\n' - ' elements must compare equal (for example, "[1,2] == ' - '(1,2)" is\n' - ' false because the type is not the same).\n' - '\n' - ' * Collections that support order comparison are ordered the ' - 'same as\n' - ' their first unequal elements (for example, "[1,2,x] <= ' - '[1,2,y]"\n' - ' has the same value as "x <= y"). If a corresponding ' - 'element does\n' - ' not exist, the shorter collection is ordered first (for ' - 'example,\n' - ' "[1,2] < [1,2,3]" is true).\n' - '\n' - '* Mappings (instances of "dict") compare equal if and only if ' - 'they\n' - ' have equal *(key, value)* pairs. Equality comparison of the ' - 'keys and\n' - ' values enforces reflexivity.\n' - '\n' - ' Order comparisons ("<", ">", "<=", and ">=") raise ' - '"TypeError".\n' - '\n' - '* Sets (instances of "set" or "frozenset") can be compared ' - 'within and\n' - ' across their types.\n' - '\n' - ' They define order comparison operators to mean subset and ' - 'superset\n' - ' tests. Those relations do not define total orderings (for ' - 'example,\n' - ' the two sets "{1,2}" and "{2,3}" are not equal, nor subsets ' - 'of one\n' - ' another, nor supersets of one another). Accordingly, sets ' - 'are not\n' - ' appropriate arguments for functions which depend on total ' - 'ordering\n' - ' (for example, "min()", "max()", and "sorted()" produce ' - 'undefined\n' - ' results given a list of sets as inputs).\n' - '\n' - ' Comparison of sets enforces reflexivity of its elements.\n' - '\n' - '* Most other built-in types have no comparison methods ' - 'implemented, so\n' - ' they inherit the default comparison behavior.\n' - '\n' - 'User-defined classes that customize their comparison behavior ' - 'should\n' - 'follow some consistency rules, if possible:\n' - '\n' - '* Equality comparison should be reflexive. In other words, ' - 'identical\n' - ' objects should compare equal:\n' - '\n' - ' "x is y" implies "x == y"\n' - '\n' - '* Comparison should be symmetric. In other words, the ' - 'following\n' - ' expressions should have the same result:\n' - '\n' - ' "x == y" and "y == x"\n' - '\n' - ' "x != y" and "y != x"\n' - '\n' - ' "x < y" and "y > x"\n' - '\n' - ' "x <= y" and "y >= x"\n' - '\n' - '* Comparison should be transitive. The following ' - '(non-exhaustive)\n' - ' examples illustrate that:\n' - '\n' - ' "x > y and y > z" implies "x > z"\n' - '\n' - ' "x < y and y <= z" implies "x < z"\n' - '\n' - '* Inverse comparison should result in the boolean negation. ' - 'In other\n' - ' words, the following expressions should have the same ' - 'result:\n' - '\n' - ' "x == y" and "not x != y"\n' - '\n' - ' "x < y" and "not x >= y" (for total ordering)\n' - '\n' - ' "x > y" and "not x <= y" (for total ordering)\n' - '\n' - ' The last two expressions apply to totally ordered ' - 'collections (e.g.\n' - ' to sequences, but not to sets or mappings). See also the\n' - ' "total_ordering()" decorator.\n' - '\n' - '* The "hash()" result should be consistent with equality. ' - 'Objects that\n' - ' are equal should either have the same hash value, or be ' - 'marked as\n' - ' unhashable.\n' - '\n' - 'Python does not enforce these consistency rules. In fact, ' - 'the\n' - 'not-a-number values are an example for not following these ' - 'rules.\n' - '\n' - '\n' - 'Membership test operations\n' - '==========================\n' - '\n' - 'The operators "in" and "not in" test for membership. "x in ' - 's"\n' - 'evaluates to "True" if *x* is a member of *s*, and "False" ' - 'otherwise.\n' - '"x not in s" returns the negation of "x in s". All built-in ' - 'sequences\n' - 'and set types support this as well as dictionary, for which ' - '"in" tests\n' - 'whether the dictionary has a given key. For container types ' - 'such as\n' - 'list, tuple, set, frozenset, dict, or collections.deque, the\n' - 'expression "x in y" is equivalent to "any(x is e or x == e ' - 'for e in\n' - 'y)".\n' - '\n' - 'For the string and bytes types, "x in y" is "True" if and ' - 'only if *x*\n' - 'is a substring of *y*. An equivalent test is "y.find(x) != ' - '-1".\n' - 'Empty strings are always considered to be a substring of any ' - 'other\n' - 'string, so """ in "abc"" will return "True".\n' - '\n' - 'For user-defined classes which define the "__contains__()" ' - 'method, "x\n' - 'in y" returns "True" if "y.__contains__(x)" returns a true ' - 'value, and\n' - '"False" otherwise.\n' - '\n' - 'For user-defined classes which do not define "__contains__()" ' - 'but do\n' - 'define "__iter__()", "x in y" is "True" if some value "z", ' - 'for which\n' - 'the expression "x is z or x == z" is true, is produced while ' - 'iterating\n' - 'over "y". If an exception is raised during the iteration, it ' - 'is as if\n' - '"in" raised that exception.\n' - '\n' - 'Lastly, the old-style iteration protocol is tried: if a class ' - 'defines\n' - '"__getitem__()", "x in y" is "True" if and only if there is a ' - 'non-\n' - 'negative integer index *i* such that "x is y[i] or x == ' - 'y[i]", and no\n' - 'lower integer index raises the "IndexError" exception. (If ' - 'any other\n' - 'exception is raised, it is as if "in" raised that ' - 'exception).\n' - '\n' - 'The operator "not in" is defined to have the inverse truth ' - 'value of\n' - '"in".\n' - '\n' - '\n' - 'Identity comparisons\n' - '====================\n' - '\n' - 'The operators "is" and "is not" test for an object’s ' - 'identity: "x is\n' - 'y" is true if and only if *x* and *y* are the same object. ' - 'An\n' - 'Object’s identity is determined using the "id()" function. ' - '"x is not\n' - 'y" yields the inverse truth value. [4]\n', - 'compound': 'Compound statements\n' - '*******************\n' - '\n' - 'Compound statements contain (groups of) other statements; they ' - 'affect\n' - 'or control the execution of those other statements in some way. ' - 'In\n' - 'general, compound statements span multiple lines, although in ' - 'simple\n' - 'incarnations a whole compound statement may be contained in one ' - 'line.\n' - '\n' - 'The "if", "while" and "for" statements implement traditional ' - 'control\n' - 'flow constructs. "try" specifies exception handlers and/or ' - 'cleanup\n' - 'code for a group of statements, while the "with" statement ' - 'allows the\n' - 'execution of initialization and finalization code around a block ' - 'of\n' - 'code. Function and class definitions are also syntactically ' - 'compound\n' - 'statements.\n' - '\n' - 'A compound statement consists of one or more ‘clauses.’ A ' - 'clause\n' - 'consists of a header and a ‘suite.’ The clause headers of a\n' - 'particular compound statement are all at the same indentation ' - 'level.\n' - 'Each clause header begins with a uniquely identifying keyword ' - 'and ends\n' - 'with a colon. A suite is a group of statements controlled by a\n' - 'clause. A suite can be one or more semicolon-separated simple\n' - 'statements on the same line as the header, following the ' - 'header’s\n' - 'colon, or it can be one or more indented statements on ' - 'subsequent\n' - 'lines. Only the latter form of a suite can contain nested ' - 'compound\n' - 'statements; the following is illegal, mostly because it wouldn’t ' - 'be\n' - 'clear to which "if" clause a following "else" clause would ' - 'belong:\n' - '\n' - ' if test1: if test2: print(x)\n' - '\n' - 'Also note that the semicolon binds tighter than the colon in ' - 'this\n' - 'context, so that in the following example, either all or none of ' - 'the\n' - '"print()" calls are executed:\n' - '\n' - ' if x < y < z: print(x); print(y); print(z)\n' - '\n' - 'Summarizing:\n' - '\n' - ' compound_stmt ::= if_stmt\n' - ' | while_stmt\n' - ' | for_stmt\n' - ' | try_stmt\n' - ' | with_stmt\n' - ' | match_stmt\n' - ' | funcdef\n' - ' | classdef\n' - ' | async_with_stmt\n' - ' | async_for_stmt\n' - ' | async_funcdef\n' - ' suite ::= stmt_list NEWLINE | NEWLINE INDENT ' - 'statement+ DEDENT\n' - ' statement ::= stmt_list NEWLINE | compound_stmt\n' - ' stmt_list ::= simple_stmt (";" simple_stmt)* [";"]\n' - '\n' - 'Note that statements always end in a "NEWLINE" possibly followed ' - 'by a\n' - '"DEDENT". Also note that optional continuation clauses always ' - 'begin\n' - 'with a keyword that cannot start a statement, thus there are no\n' - 'ambiguities (the ‘dangling "else"’ problem is solved in Python ' - 'by\n' - 'requiring nested "if" statements to be indented).\n' - '\n' - 'The formatting of the grammar rules in the following sections ' - 'places\n' - 'each clause on a separate line for clarity.\n' - '\n' - '\n' - 'The "if" statement\n' - '==================\n' - '\n' - 'The "if" statement is used for conditional execution:\n' - '\n' - ' if_stmt ::= "if" assignment_expression ":" suite\n' - ' ("elif" assignment_expression ":" suite)*\n' - ' ["else" ":" suite]\n' - '\n' - 'It selects exactly one of the suites by evaluating the ' - 'expressions one\n' - 'by one until one is found to be true (see section Boolean ' - 'operations\n' - 'for the definition of true and false); then that suite is ' - 'executed\n' - '(and no other part of the "if" statement is executed or ' - 'evaluated).\n' - 'If all expressions are false, the suite of the "else" clause, ' - 'if\n' - 'present, is executed.\n' - '\n' - '\n' - 'The "while" statement\n' - '=====================\n' - '\n' - 'The "while" statement is used for repeated execution as long as ' - 'an\n' - 'expression is true:\n' - '\n' - ' while_stmt ::= "while" assignment_expression ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'This repeatedly tests the expression and, if it is true, ' - 'executes the\n' - 'first suite; if the expression is false (which may be the first ' - 'time\n' - 'it is tested) the suite of the "else" clause, if present, is ' - 'executed\n' - 'and the loop terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the ' - 'loop\n' - 'without executing the "else" clause’s suite. A "continue" ' - 'statement\n' - 'executed in the first suite skips the rest of the suite and goes ' - 'back\n' - 'to testing the expression.\n' - '\n' - '\n' - 'The "for" statement\n' - '===================\n' - '\n' - 'The "for" statement is used to iterate over the elements of a ' - 'sequence\n' - '(such as a string, tuple or list) or other iterable object:\n' - '\n' - ' for_stmt ::= "for" target_list "in" expression_list ":" ' - 'suite\n' - ' ["else" ":" suite]\n' - '\n' - 'The expression list is evaluated once; it should yield an ' - 'iterable\n' - 'object. An iterator is created for the result of the\n' - '"expression_list". The suite is then executed once for each ' - 'item\n' - 'provided by the iterator, in the order returned by the ' - 'iterator. Each\n' - 'item in turn is assigned to the target list using the standard ' - 'rules\n' - 'for assignments (see Assignment statements), and then the suite ' - 'is\n' - 'executed. When the items are exhausted (which is immediately ' - 'when the\n' - 'sequence is empty or an iterator raises a "StopIteration" ' - 'exception),\n' - 'the suite in the "else" clause, if present, is executed, and the ' - 'loop\n' - 'terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the ' - 'loop\n' - 'without executing the "else" clause’s suite. A "continue" ' - 'statement\n' - 'executed in the first suite skips the rest of the suite and ' - 'continues\n' - 'with the next item, or with the "else" clause if there is no ' - 'next\n' - 'item.\n' - '\n' - 'The for-loop makes assignments to the variables in the target ' - 'list.\n' - 'This overwrites all previous assignments to those variables ' - 'including\n' - 'those made in the suite of the for-loop:\n' - '\n' - ' for i in range(10):\n' - ' print(i)\n' - ' i = 5 # this will not affect the for-loop\n' - ' # because i will be overwritten with ' - 'the next\n' - ' # index in the range\n' - '\n' - 'Names in the target list are not deleted when the loop is ' - 'finished,\n' - 'but if the sequence is empty, they will not have been assigned ' - 'to at\n' - 'all by the loop. Hint: the built-in function "range()" returns ' - 'an\n' - 'iterator of integers suitable to emulate the effect of Pascal’s ' - '"for i\n' - ':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, ' - '2]".\n' - '\n' - 'Note:\n' - '\n' - ' There is a subtlety when the sequence is being modified by the ' - 'loop\n' - ' (this can only occur for mutable sequences, e.g. lists). An\n' - ' internal counter is used to keep track of which item is used ' - 'next,\n' - ' and this is incremented on each iteration. When this counter ' - 'has\n' - ' reached the length of the sequence the loop terminates. This ' - 'means\n' - ' that if the suite deletes the current (or a previous) item ' - 'from the\n' - ' sequence, the next item will be skipped (since it gets the ' - 'index of\n' - ' the current item which has already been treated). Likewise, ' - 'if the\n' - ' suite inserts an item in the sequence before the current item, ' - 'the\n' - ' current item will be treated again the next time through the ' - 'loop.\n' - ' This can lead to nasty bugs that can be avoided by making a\n' - ' temporary copy using a slice of the whole sequence, e.g.,\n' - '\n' - ' for x in a[:]:\n' - ' if x < 0: a.remove(x)\n' - '\n' - '\n' - 'The "try" statement\n' - '===================\n' - '\n' - 'The "try" statement specifies exception handlers and/or cleanup ' - 'code\n' - 'for a group of statements:\n' - '\n' - ' try_stmt ::= try1_stmt | try2_stmt\n' - ' try1_stmt ::= "try" ":" suite\n' - ' ("except" [expression ["as" identifier]] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try2_stmt ::= "try" ":" suite\n' - ' "finally" ":" suite\n' - '\n' - 'The "except" clause(s) specify one or more exception handlers. ' - 'When no\n' - 'exception occurs in the "try" clause, no exception handler is\n' - 'executed. When an exception occurs in the "try" suite, a search ' - 'for an\n' - 'exception handler is started. This search inspects the except ' - 'clauses\n' - 'in turn until one is found that matches the exception. An ' - 'expression-\n' - 'less except clause, if present, must be last; it matches any\n' - 'exception. For an except clause with an expression, that ' - 'expression\n' - 'is evaluated, and the clause matches the exception if the ' - 'resulting\n' - 'object is “compatible” with the exception. An object is ' - 'compatible\n' - 'with an exception if it is the class or a base class of the ' - 'exception\n' - 'object, or a tuple containing an item that is the class or a ' - 'base\n' - 'class of the exception object.\n' - '\n' - 'If no except clause matches the exception, the search for an ' - 'exception\n' - 'handler continues in the surrounding code and on the invocation ' - 'stack.\n' - '[1]\n' - '\n' - 'If the evaluation of an expression in the header of an except ' - 'clause\n' - 'raises an exception, the original search for a handler is ' - 'canceled and\n' - 'a search starts for the new exception in the surrounding code ' - 'and on\n' - 'the call stack (it is treated as if the entire "try" statement ' - 'raised\n' - 'the exception).\n' - '\n' - 'When a matching except clause is found, the exception is ' - 'assigned to\n' - 'the target specified after the "as" keyword in that except ' - 'clause, if\n' - 'present, and the except clause’s suite is executed. All except\n' - 'clauses must have an executable block. When the end of this ' - 'block is\n' - 'reached, execution continues normally after the entire try ' - 'statement.\n' - '(This means that if two nested handlers exist for the same ' - 'exception,\n' - 'and the exception occurs in the try clause of the inner handler, ' - 'the\n' - 'outer handler will not handle the exception.)\n' - '\n' - 'When an exception has been assigned using "as target", it is ' - 'cleared\n' - 'at the end of the except clause. This is as if\n' - '\n' - ' except E as N:\n' - ' foo\n' - '\n' - 'was translated to\n' - '\n' - ' except E as N:\n' - ' try:\n' - ' foo\n' - ' finally:\n' - ' del N\n' - '\n' - 'This means the exception must be assigned to a different name to ' - 'be\n' - 'able to refer to it after the except clause. Exceptions are ' - 'cleared\n' - 'because with the traceback attached to them, they form a ' - 'reference\n' - 'cycle with the stack frame, keeping all locals in that frame ' - 'alive\n' - 'until the next garbage collection occurs.\n' - '\n' - 'Before an except clause’s suite is executed, details about the\n' - 'exception are stored in the "sys" module and can be accessed ' - 'via\n' - '"sys.exc_info()". "sys.exc_info()" returns a 3-tuple consisting ' - 'of the\n' - 'exception class, the exception instance and a traceback object ' - '(see\n' - 'section The standard type hierarchy) identifying the point in ' - 'the\n' - 'program where the exception occurred. The details about the ' - 'exception\n' - 'accessed via "sys.exc_info()" are restored to their previous ' - 'values\n' - 'when leaving an exception handler:\n' - '\n' - ' >>> print(sys.exc_info())\n' - ' (None, None, None)\n' - ' >>> try:\n' - ' ... raise TypeError\n' - ' ... except:\n' - ' ... print(sys.exc_info())\n' - ' ... try:\n' - ' ... raise ValueError\n' - ' ... except:\n' - ' ... print(sys.exc_info())\n' - ' ... print(sys.exc_info())\n' - ' ...\n' - " (, TypeError(), )\n' - " (, ValueError(), )\n' - " (, TypeError(), )\n' - ' >>> print(sys.exc_info())\n' - ' (None, None, None)\n' - '\n' - 'The optional "else" clause is executed if the control flow ' - 'leaves the\n' - '"try" suite, no exception was raised, and no "return", ' - '"continue", or\n' - '"break" statement was executed. Exceptions in the "else" clause ' - 'are\n' - 'not handled by the preceding "except" clauses.\n' - '\n' - 'If "finally" is present, it specifies a ‘cleanup’ handler. The ' - '"try"\n' - 'clause is executed, including any "except" and "else" clauses. ' - 'If an\n' - 'exception occurs in any of the clauses and is not handled, the\n' - 'exception is temporarily saved. The "finally" clause is ' - 'executed. If\n' - 'there is a saved exception it is re-raised at the end of the ' - '"finally"\n' - 'clause. If the "finally" clause raises another exception, the ' - 'saved\n' - 'exception is set as the context of the new exception. If the ' - '"finally"\n' - 'clause executes a "return", "break" or "continue" statement, the ' - 'saved\n' - 'exception is discarded:\n' - '\n' - ' >>> def f():\n' - ' ... try:\n' - ' ... 1/0\n' - ' ... finally:\n' - ' ... return 42\n' - ' ...\n' - ' >>> f()\n' - ' 42\n' - '\n' - 'The exception information is not available to the program ' - 'during\n' - 'execution of the "finally" clause.\n' - '\n' - 'When a "return", "break" or "continue" statement is executed in ' - 'the\n' - '"try" suite of a "try"…"finally" statement, the "finally" clause ' - 'is\n' - 'also executed ‘on the way out.’\n' - '\n' - 'The return value of a function is determined by the last ' - '"return"\n' - 'statement executed. Since the "finally" clause always executes, ' - 'a\n' - '"return" statement executed in the "finally" clause will always ' - 'be the\n' - 'last one executed:\n' - '\n' - ' >>> def foo():\n' - ' ... try:\n' - " ... return 'try'\n" - ' ... finally:\n' - " ... return 'finally'\n" - ' ...\n' - ' >>> foo()\n' - " 'finally'\n" - '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information on using the "raise" statement to ' - 'generate\n' - 'exceptions may be found in section The raise statement.\n' - '\n' - 'Changed in version 3.8: Prior to Python 3.8, a "continue" ' - 'statement\n' - 'was illegal in the "finally" clause due to a problem with the\n' - 'implementation.\n' - '\n' - '\n' - 'The "with" statement\n' - '====================\n' - '\n' - 'The "with" statement is used to wrap the execution of a block ' - 'with\n' - 'methods defined by a context manager (see section With ' - 'Statement\n' - 'Context Managers). This allows common "try"…"except"…"finally" ' - 'usage\n' - 'patterns to be encapsulated for convenient reuse.\n' - '\n' - ' with_stmt ::= "with" ( "(" with_stmt_contents ","? ' - '")" | with_stmt_contents ) ":" suite\n' - ' with_stmt_contents ::= with_item ("," with_item)*\n' - ' with_item ::= expression ["as" target]\n' - '\n' - 'The execution of the "with" statement with one “item” proceeds ' - 'as\n' - 'follows:\n' - '\n' - '1. The context expression (the expression given in the ' - '"with_item") is\n' - ' evaluated to obtain a context manager.\n' - '\n' - '2. The context manager’s "__enter__()" is loaded for later use.\n' - '\n' - '3. The context manager’s "__exit__()" is loaded for later use.\n' - '\n' - '4. The context manager’s "__enter__()" method is invoked.\n' - '\n' - '5. If a target was included in the "with" statement, the return ' - 'value\n' - ' from "__enter__()" is assigned to it.\n' - '\n' - ' Note:\n' - '\n' - ' The "with" statement guarantees that if the "__enter__()" ' - 'method\n' - ' returns without an error, then "__exit__()" will always be\n' - ' called. Thus, if an error occurs during the assignment to ' - 'the\n' - ' target list, it will be treated the same as an error ' - 'occurring\n' - ' within the suite would be. See step 6 below.\n' - '\n' - '6. The suite is executed.\n' - '\n' - '7. The context manager’s "__exit__()" method is invoked. If an\n' - ' exception caused the suite to be exited, its type, value, ' - 'and\n' - ' traceback are passed as arguments to "__exit__()". Otherwise, ' - 'three\n' - ' "None" arguments are supplied.\n' - '\n' - ' If the suite was exited due to an exception, and the return ' - 'value\n' - ' from the "__exit__()" method was false, the exception is ' - 'reraised.\n' - ' If the return value was true, the exception is suppressed, ' - 'and\n' - ' execution continues with the statement following the "with"\n' - ' statement.\n' - '\n' - ' If the suite was exited for any reason other than an ' - 'exception, the\n' - ' return value from "__exit__()" is ignored, and execution ' - 'proceeds\n' - ' at the normal location for the kind of exit that was taken.\n' - '\n' - 'The following code:\n' - '\n' - ' with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' enter = type(manager).__enter__\n' - ' exit = type(manager).__exit__\n' - ' value = enter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not exit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' exit(manager, None, None, None)\n' - '\n' - 'With more than one item, the context managers are processed as ' - 'if\n' - 'multiple "with" statements were nested:\n' - '\n' - ' with A() as a, B() as b:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' with A() as a:\n' - ' with B() as b:\n' - ' SUITE\n' - '\n' - 'You can also write multi-item context managers in multiple lines ' - 'if\n' - 'the items are surrounded by parentheses. For example:\n' - '\n' - ' with (\n' - ' A() as a,\n' - ' B() as b,\n' - ' ):\n' - ' SUITE\n' - '\n' - 'Changed in version 3.1: Support for multiple context ' - 'expressions.\n' - '\n' - 'Changed in version 3.10: Support for using grouping parentheses ' - 'to\n' - 'break the statement in multiple lines.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with” statement\n' - ' The specification, background, and examples for the Python ' - '"with"\n' - ' statement.\n' - '\n' - '\n' - 'The "match" statement\n' - '=====================\n' - '\n' - 'New in version 3.10.\n' - '\n' - 'The match statement is used for pattern matching. Syntax:\n' - '\n' - ' match_stmt ::= \'match\' subject_expr ":" NEWLINE INDENT ' - 'case_block+ DEDENT\n' - ' subject_expr ::= star_named_expression "," ' - 'star_named_expressions?\n' - ' | named_expression\n' - ' case_block ::= \'case\' patterns [guard] ":" block\n' - '\n' - 'Note:\n' - '\n' - ' This section uses single quotes to denote soft keywords.\n' - '\n' - 'Pattern matching takes a pattern as input (following "case") and ' - 'a\n' - 'subject value (following "match"). The pattern (which may ' - 'contain\n' - 'subpatterns) is matched against the subject value. The outcomes ' - 'are:\n' - '\n' - '* A match success or failure (also termed a pattern success or\n' - ' failure).\n' - '\n' - '* Possible binding of matched values to a name. The ' - 'prerequisites for\n' - ' this are further discussed below.\n' - '\n' - 'The "match" and "case" keywords are soft keywords.\n' - '\n' - 'See also:\n' - '\n' - ' * **PEP 634** – Structural Pattern Matching: Specification\n' - '\n' - ' * **PEP 636** – Structural Pattern Matching: Tutorial\n' - '\n' - '\n' - 'Overview\n' - '--------\n' - '\n' - 'Here’s an overview of the logical flow of a match statement:\n' - '\n' - '1. The subject expression "subject_expr" is evaluated and a ' - 'resulting\n' - ' subject value obtained. If the subject expression contains a ' - 'comma,\n' - ' a tuple is constructed using the standard rules.\n' - '\n' - '2. Each pattern in a "case_block" is attempted to match with ' - 'the\n' - ' subject value. The specific rules for success or failure are\n' - ' described below. The match attempt can also bind some or all ' - 'of the\n' - ' standalone names within the pattern. The precise pattern ' - 'binding\n' - ' rules vary per pattern type and are specified below. **Name\n' - ' bindings made during a successful pattern match outlive the\n' - ' executed block and can be used after the match statement**.\n' - '\n' - ' Note:\n' - '\n' - ' During failed pattern matches, some subpatterns may ' - 'succeed.\n' - ' Do not rely on bindings being made for a failed match.\n' - ' Conversely, do not rely on variables remaining unchanged ' - 'after\n' - ' a failed match. The exact behavior is dependent on\n' - ' implementation and may vary. This is an intentional ' - 'decision\n' - ' made to allow different implementations to add ' - 'optimizations.\n' - '\n' - '3. If the pattern succeeds, the corresponding guard (if present) ' - 'is\n' - ' evaluated. In this case all name bindings are guaranteed to ' - 'have\n' - ' happened.\n' - '\n' - ' * If the guard evaluates as truthy or missing, the "block" ' - 'inside\n' - ' "case_block" is executed.\n' - '\n' - ' * Otherwise, the next "case_block" is attempted as described ' - 'above.\n' - '\n' - ' * If there are no further case blocks, the match statement ' - 'is\n' - ' completed.\n' - '\n' - 'Note:\n' - '\n' - ' Users should generally never rely on a pattern being ' - 'evaluated.\n' - ' Depending on implementation, the interpreter may cache values ' - 'or use\n' - ' other optimizations which skip repeated evaluations.\n' - '\n' - 'A sample match statement:\n' - '\n' - ' >>> flag = False\n' - ' >>> match (100, 200):\n' - ' ... case (100, 300): # Mismatch: 200 != 300\n' - " ... print('Case 1')\n" - ' ... case (100, 200) if flag: # Successful match, but ' - 'guard fails\n' - " ... print('Case 2')\n" - ' ... case (100, y): # Matches and binds y to 200\n' - " ... print(f'Case 3, y: {y}')\n" - ' ... case _: # Pattern not attempted\n' - " ... print('Case 4, I match anything!')\n" - ' ...\n' - ' Case 3, y: 200\n' - '\n' - 'In this case, "if flag" is a guard. Read more about that in the ' - 'next\n' - 'section.\n' - '\n' - '\n' - 'Guards\n' - '------\n' - '\n' - ' guard ::= "if" named_expression\n' - '\n' - 'A "guard" (which is part of the "case") must succeed for code ' - 'inside\n' - 'the "case" block to execute. It takes the form: "if" followed ' - 'by an\n' - 'expression.\n' - '\n' - 'The logical flow of a "case" block with a "guard" follows:\n' - '\n' - '1. Check that the pattern in the "case" block succeeded. If ' - 'the\n' - ' pattern failed, the "guard" is not evaluated and the next ' - '"case"\n' - ' block is checked.\n' - '\n' - '2. If the pattern succeeded, evaluate the "guard".\n' - '\n' - ' * If the "guard" condition evaluates to “truthy”, the case ' - 'block is\n' - ' selected.\n' - '\n' - ' * If the "guard" condition evaluates to “falsy”, the case ' - 'block is\n' - ' not selected.\n' - '\n' - ' * If the "guard" raises an exception during evaluation, the\n' - ' exception bubbles up.\n' - '\n' - 'Guards are allowed to have side effects as they are ' - 'expressions.\n' - 'Guard evaluation must proceed from the first to the last case ' - 'block,\n' - 'one at a time, skipping case blocks whose pattern(s) don’t all\n' - 'succeed. (I.e., guard evaluation must happen in order.) Guard\n' - 'evaluation must stop once a case block is selected.\n' - '\n' - '\n' - 'Irrefutable Case Blocks\n' - '-----------------------\n' - '\n' - 'An irrefutable case block is a match-all case block. A match\n' - 'statement may have at most one irrefutable case block, and it ' - 'must be\n' - 'last.\n' - '\n' - 'A case block is considered irrefutable if it has no guard and ' - 'its\n' - 'pattern is irrefutable. A pattern is considered irrefutable if ' - 'we can\n' - 'prove from its syntax alone that it will always succeed. Only ' - 'the\n' - 'following patterns are irrefutable:\n' - '\n' - '* AS Patterns whose left-hand side is irrefutable\n' - '\n' - '* OR Patterns containing at least one irrefutable pattern\n' - '\n' - '* Capture Patterns\n' - '\n' - '* Wildcard Patterns\n' - '\n' - '* parenthesized irrefutable patterns\n' - '\n' - '\n' - 'Patterns\n' - '--------\n' - '\n' - 'Note:\n' - '\n' - ' This section uses grammar notations beyond standard EBNF:\n' - '\n' - ' * the notation "SEP.RULE+" is shorthand for "RULE (SEP ' - 'RULE)*"\n' - '\n' - ' * the notation "!RULE" is shorthand for a negative lookahead\n' - ' assertion\n' - '\n' - 'The top-level syntax for "patterns" is:\n' - '\n' - ' patterns ::= open_sequence_pattern | pattern\n' - ' pattern ::= as_pattern | or_pattern\n' - ' closed_pattern ::= | literal_pattern\n' - ' | capture_pattern\n' - ' | wildcard_pattern\n' - ' | value_pattern\n' - ' | group_pattern\n' - ' | sequence_pattern\n' - ' | mapping_pattern\n' - ' | class_pattern\n' - '\n' - 'The descriptions below will include a description “in simple ' - 'terms” of\n' - 'what a pattern does for illustration purposes (credits to ' - 'Raymond\n' - 'Hettinger for a document that inspired most of the ' - 'descriptions). Note\n' - 'that these descriptions are purely for illustration purposes and ' - '**may\n' - 'not** reflect the underlying implementation. Furthermore, they ' - 'do not\n' - 'cover all valid forms.\n' - '\n' - '\n' - 'OR Patterns\n' - '~~~~~~~~~~~\n' - '\n' - 'An OR pattern is two or more patterns separated by vertical bars ' - '"|".\n' - 'Syntax:\n' - '\n' - ' or_pattern ::= "|".closed_pattern+\n' - '\n' - 'Only the final subpattern may be irrefutable, and each ' - 'subpattern must\n' - 'bind the same set of names to avoid ambiguity.\n' - '\n' - 'An OR pattern matches each of its subpatterns in turn to the ' - 'subject\n' - 'value, until one succeeds. The OR pattern is then considered\n' - 'successful. Otherwise, if none of the subpatterns succeed, the ' - 'OR\n' - 'pattern fails.\n' - '\n' - 'In simple terms, "P1 | P2 | ..." will try to match "P1", if it ' - 'fails\n' - 'it will try to match "P2", succeeding immediately if any ' - 'succeeds,\n' - 'failing otherwise.\n' - '\n' - '\n' - 'AS Patterns\n' - '~~~~~~~~~~~\n' - '\n' - 'An AS pattern matches an OR pattern on the left of the "as" ' - 'keyword\n' - 'against a subject. Syntax:\n' - '\n' - ' as_pattern ::= or_pattern "as" capture_pattern\n' - '\n' - 'If the OR pattern fails, the AS pattern fails. Otherwise, the ' - 'AS\n' - 'pattern binds the subject to the name on the right of the as ' - 'keyword\n' - 'and succeeds. "capture_pattern" cannot be a a "_".\n' - '\n' - 'In simple terms "P as NAME" will match with "P", and on success ' - 'it\n' - 'will set "NAME = ".\n' - '\n' - '\n' - 'Literal Patterns\n' - '~~~~~~~~~~~~~~~~\n' - '\n' - 'A literal pattern corresponds to most literals in Python. ' - 'Syntax:\n' - '\n' - ' literal_pattern ::= signed_number\n' - ' | signed_number "+" NUMBER\n' - ' | signed_number "-" NUMBER\n' - ' | strings\n' - ' | "None"\n' - ' | "True"\n' - ' | "False"\n' - ' | signed_number: NUMBER | "-" NUMBER\n' - '\n' - 'The rule "strings" and the token "NUMBER" are defined in the ' - 'standard\n' - 'Python grammar. Triple-quoted strings are supported. Raw ' - 'strings and\n' - 'byte strings are supported. Formatted string literals are not\n' - 'supported.\n' - '\n' - 'The forms "signed_number \'+\' NUMBER" and "signed_number \'-\' ' - 'NUMBER"\n' - 'are for expressing complex numbers; they require a real number ' - 'on the\n' - 'left and an imaginary number on the right. E.g. "3 + 4j".\n' - '\n' - 'In simple terms, "LITERAL" will succeed only if " ==\n' - 'LITERAL". For the singletons "None", "True" and "False", the ' - '"is"\n' - 'operator is used.\n' - '\n' - '\n' - 'Capture Patterns\n' - '~~~~~~~~~~~~~~~~\n' - '\n' - 'A capture pattern binds the subject value to a name. Syntax:\n' - '\n' - " capture_pattern ::= !'_' NAME\n" - '\n' - 'A single underscore "_" is not a capture pattern (this is what ' - '"!\'_\'"\n' - 'expresses). It is instead treated as a "wildcard_pattern".\n' - '\n' - 'In a given pattern, a given name can only be bound once. E.g. ' - '"case\n' - 'x, x: ..." is invalid while "case [x] | x: ..." is allowed.\n' - '\n' - 'Capture patterns always succeed. The binding follows scoping ' - 'rules\n' - 'established by the assignment expression operator in **PEP ' - '572**; the\n' - 'name becomes a local variable in the closest containing function ' - 'scope\n' - 'unless there’s an applicable "global" or "nonlocal" statement.\n' - '\n' - 'In simple terms "NAME" will always succeed and it will set "NAME ' - '=\n' - '".\n' - '\n' - '\n' - 'Wildcard Patterns\n' - '~~~~~~~~~~~~~~~~~\n' - '\n' - 'A wildcard pattern always succeeds (matches anything) and binds ' - 'no\n' - 'name. Syntax:\n' - '\n' - " wildcard_pattern ::= '_'\n" - '\n' - '"_" is a soft keyword within any pattern, but only within ' - 'patterns.\n' - 'It is an identifier, as usual, even within "match" subject\n' - 'expressions, "guard"s, and "case" blocks.\n' - '\n' - 'In simple terms, "_" will always succeed.\n' - '\n' - '\n' - 'Value Patterns\n' - '~~~~~~~~~~~~~~\n' - '\n' - 'A value pattern represents a named value in Python. Syntax:\n' - '\n' - ' value_pattern ::= attr\n' - ' attr ::= name_or_attr "." NAME\n' - ' name_or_attr ::= attr | NAME\n' - '\n' - 'The dotted name in the pattern is looked up using standard ' - 'Python name\n' - 'resolution rules. The pattern succeeds if the value found ' - 'compares\n' - 'equal to the subject value (using the "==" equality operator).\n' - '\n' - 'In simple terms "NAME1.NAME2" will succeed only if " ' - '==\n' - 'NAME1.NAME2"\n' - '\n' - 'Note:\n' - '\n' - ' If the same value occurs multiple times in the same match ' - 'statement,\n' - ' the interpreter may cache the first value found and reuse it ' - 'rather\n' - ' than repeat the same lookup. This cache is strictly tied to a ' - 'given\n' - ' execution of a given match statement.\n' - '\n' - '\n' - 'Group Patterns\n' - '~~~~~~~~~~~~~~\n' - '\n' - 'A group pattern allows users to add parentheses around patterns ' - 'to\n' - 'emphasize the intended grouping. Otherwise, it has no ' - 'additional\n' - 'syntax. Syntax:\n' - '\n' - ' group_pattern ::= "(" pattern ")"\n' - '\n' - 'In simple terms "(P)" has the same effect as "P".\n' - '\n' - '\n' - 'Sequence Patterns\n' - '~~~~~~~~~~~~~~~~~\n' - '\n' - 'A sequence pattern contains several subpatterns to be matched ' - 'against\n' - 'sequence elements. The syntax is similar to the unpacking of a ' - 'list or\n' - 'tuple.\n' - '\n' - ' sequence_pattern ::= "[" [maybe_sequence_pattern] "]"\n' - ' | "(" [open_sequence_pattern] ")"\n' - ' open_sequence_pattern ::= maybe_star_pattern "," ' - '[maybe_sequence_pattern]\n' - ' maybe_sequence_pattern ::= ",".maybe_star_pattern+ ","?\n' - ' maybe_star_pattern ::= star_pattern | pattern\n' - ' star_pattern ::= "*" (capture_pattern | ' - 'wildcard_pattern)\n' - '\n' - 'There is no difference if parentheses or square brackets are ' - 'used for\n' - 'sequence patterns (i.e. "(...)" vs "[...]" ).\n' - '\n' - 'Note:\n' - '\n' - ' A single pattern enclosed in parentheses without a trailing ' - 'comma\n' - ' (e.g. "(3 | 4)") is a group pattern. While a single pattern ' - 'enclosed\n' - ' in square brackets (e.g. "[3 | 4]") is still a sequence ' - 'pattern.\n' - '\n' - 'At most one star subpattern may be in a sequence pattern. The ' - 'star\n' - 'subpattern may occur in any position. If no star subpattern is\n' - 'present, the sequence pattern is a fixed-length sequence ' - 'pattern;\n' - 'otherwise it is a variable-length sequence pattern.\n' - '\n' - 'The following is the logical flow for matching a sequence ' - 'pattern\n' - 'against a subject value:\n' - '\n' - '1. If the subject value is not a sequence [2], the sequence ' - 'pattern\n' - ' fails.\n' - '\n' - '2. If the subject value is an instance of "str", "bytes" or\n' - ' "bytearray" the sequence pattern fails.\n' - '\n' - '3. The subsequent steps depend on whether the sequence pattern ' - 'is\n' - ' fixed or variable-length.\n' - '\n' - ' If the sequence pattern is fixed-length:\n' - '\n' - ' 1. If the length of the subject sequence is not equal to the ' - 'number\n' - ' of subpatterns, the sequence pattern fails\n' - '\n' - ' 2. Subpatterns in the sequence pattern are matched to their\n' - ' corresponding items in the subject sequence from left to ' - 'right.\n' - ' Matching stops as soon as a subpattern fails. If all\n' - ' subpatterns succeed in matching their corresponding item, ' - 'the\n' - ' sequence pattern succeeds.\n' - '\n' - ' Otherwise, if the sequence pattern is variable-length:\n' - '\n' - ' 1. If the length of the subject sequence is less than the ' - 'number of\n' - ' non-star subpatterns, the sequence pattern fails.\n' - '\n' - ' 2. The leading non-star subpatterns are matched to their\n' - ' corresponding items as for fixed-length sequences.\n' - '\n' - ' 3. If the previous step succeeds, the star subpattern matches ' - 'a\n' - ' list formed of the remaining subject items, excluding the\n' - ' remaining items corresponding to non-star subpatterns ' - 'following\n' - ' the star subpattern.\n' - '\n' - ' 4. Remaining non-star subpatterns are matched to their\n' - ' corresponding subject items, as for a fixed-length ' - 'sequence.\n' - '\n' - ' Note:\n' - '\n' - ' The length of the subject sequence is obtained via "len()" ' - '(i.e.\n' - ' via the "__len__()" protocol). This length may be cached ' - 'by the\n' - ' interpreter in a similar manner as value patterns.\n' - '\n' - 'In simple terms "[P1, P2, P3," … ", P]" matches only if all ' - 'the\n' - 'following happens:\n' - '\n' - '* check "" is a sequence\n' - '\n' - '* "len(subject) == "\n' - '\n' - '* "P1" matches "[0]" (note that this match can also ' - 'bind\n' - ' names)\n' - '\n' - '* "P2" matches "[1]" (note that this match can also ' - 'bind\n' - ' names)\n' - '\n' - '* … and so on for the corresponding pattern/element.\n' - '\n' - '\n' - 'Mapping Patterns\n' - '~~~~~~~~~~~~~~~~\n' - '\n' - 'A mapping pattern contains one or more key-value patterns. The ' - 'syntax\n' - 'is similar to the construction of a dictionary. Syntax:\n' - '\n' - ' mapping_pattern ::= "{" [items_pattern] "}"\n' - ' items_pattern ::= ",".key_value_pattern+ ","?\n' - ' key_value_pattern ::= (literal_pattern | value_pattern) ":" ' - 'pattern\n' - ' | double_star_pattern\n' - ' double_star_pattern ::= "**" capture_pattern\n' - '\n' - 'At most one double star pattern may be in a mapping pattern. ' - 'The\n' - 'double star pattern must be the last subpattern in the mapping\n' - 'pattern.\n' - '\n' - 'Duplicate keys in mapping patterns are disallowed. Duplicate ' - 'literal\n' - 'keys will raise a "SyntaxError". Two keys that otherwise have ' - 'the same\n' - 'value will raise a "ValueError" at runtime.\n' - '\n' - 'The following is the logical flow for matching a mapping ' - 'pattern\n' - 'against a subject value:\n' - '\n' - '1. If the subject value is not a mapping [3],the mapping ' - 'pattern\n' - ' fails.\n' - '\n' - '2. If every key given in the mapping pattern is present in the ' - 'subject\n' - ' mapping, and the pattern for each key matches the ' - 'corresponding\n' - ' item of the subject mapping, the mapping pattern succeeds.\n' - '\n' - '3. If duplicate keys are detected in the mapping pattern, the ' - 'pattern\n' - ' is considered invalid. A "SyntaxError" is raised for ' - 'duplicate\n' - ' literal values; or a "ValueError" for named keys of the same ' - 'value.\n' - '\n' - 'Note:\n' - '\n' - ' Key-value pairs are matched using the two-argument form of ' - 'the\n' - ' mapping subject’s "get()" method. Matched key-value pairs ' - 'must\n' - ' already be present in the mapping, and not created on-the-fly ' - 'via\n' - ' "__missing__()" or "__getitem__()".\n' - '\n' - 'In simple terms "{KEY1: P1, KEY2: P2, ... }" matches only if all ' - 'the\n' - 'following happens:\n' - '\n' - '* check "" is a mapping\n' - '\n' - '* "KEY1 in "\n' - '\n' - '* "P1" matches "[KEY1]"\n' - '\n' - '* … and so on for the corresponding KEY/pattern pair.\n' - '\n' - '\n' - 'Class Patterns\n' - '~~~~~~~~~~~~~~\n' - '\n' - 'A class pattern represents a class and its positional and ' - 'keyword\n' - 'arguments (if any). Syntax:\n' - '\n' - ' class_pattern ::= name_or_attr "(" [pattern_arguments ' - '","?] ")"\n' - ' pattern_arguments ::= positional_patterns ["," ' - 'keyword_patterns]\n' - ' | keyword_patterns\n' - ' positional_patterns ::= ",".pattern+\n' - ' keyword_patterns ::= ",".keyword_pattern+\n' - ' keyword_pattern ::= NAME "=" pattern\n' - '\n' - 'The same keyword should not be repeated in class patterns.\n' - '\n' - 'The following is the logical flow for matching a mapping ' - 'pattern\n' - 'against a subject value:\n' - '\n' - '1. If "name_or_attr" is not an instance of the builtin "type" , ' - 'raise\n' - ' "TypeError".\n' - '\n' - '2. If the subject value is not an instance of "name_or_attr" ' - '(tested\n' - ' via "isinstance()"), the class pattern fails.\n' - '\n' - '3. If no pattern arguments are present, the pattern succeeds.\n' - ' Otherwise, the subsequent steps depend on whether keyword or\n' - ' positional argument patterns are present.\n' - '\n' - ' For a number of built-in types (specified below), a single\n' - ' positional subpattern is accepted which will match the ' - 'entire\n' - ' subject; for these types keyword patterns also work as for ' - 'other\n' - ' types.\n' - '\n' - ' If only keyword patterns are present, they are processed as\n' - ' follows, one by one:\n' - '\n' - ' I. The keyword is looked up as an attribute on the subject.\n' - '\n' - ' * If this raises an exception other than "AttributeError", ' - 'the\n' - ' exception bubbles up.\n' - '\n' - ' * If this raises "AttributeError", the class pattern has ' - 'failed.\n' - '\n' - ' * Else, the subpattern associated with the keyword pattern ' - 'is\n' - ' matched against the subject’s attribute value. If this ' - 'fails,\n' - ' the class pattern fails; if this succeeds, the match ' - 'proceeds\n' - ' to the next keyword.\n' - '\n' - ' II. If all keyword patterns succeed, the class pattern ' - 'succeeds.\n' - '\n' - ' If any positional patterns are present, they are converted ' - 'to\n' - ' keyword patterns using the "__match_args__" attribute on the ' - 'class\n' - ' "name_or_attr" before matching:\n' - '\n' - ' I. The equivalent of "getattr(cls, "__match_args__", ()))" ' - 'is\n' - ' called.\n' - '\n' - ' * If this raises an exception, the exception bubbles up.\n' - '\n' - ' * If the returned value is not a tuple, the conversion ' - 'fails and\n' - ' "TypeError" is raised.\n' - '\n' - ' * If there are more positional patterns than\n' - ' "len(cls.__match_args__)", "TypeError" is raised.\n' - '\n' - ' * Otherwise, positional pattern "i" is converted to a ' - 'keyword\n' - ' pattern using "__match_args__[i]" as the keyword.\n' - ' "__match_args__[i]" must be a string; if not "TypeError" ' - 'is\n' - ' raised.\n' - '\n' - ' * If there are duplicate keywords, "TypeError" is raised.\n' - '\n' - ' See also:\n' - '\n' - ' Customizing positional arguments in class pattern ' - 'matching\n' - '\n' - ' II. Once all positional patterns have been converted to ' - 'keyword\n' - ' patterns,\n' - ' the match proceeds as if there were only keyword ' - 'patterns.\n' - '\n' - ' For the following built-in types the handling of positional\n' - ' subpatterns is different:\n' - '\n' - ' * "bool"\n' - '\n' - ' * "bytearray"\n' - '\n' - ' * "bytes"\n' - '\n' - ' * "dict"\n' - '\n' - ' * "float"\n' - '\n' - ' * "frozenset"\n' - '\n' - ' * "int"\n' - '\n' - ' * "list"\n' - '\n' - ' * "set"\n' - '\n' - ' * "str"\n' - '\n' - ' * "tuple"\n' - '\n' - ' These classes accept a single positional argument, and the ' - 'pattern\n' - ' there is matched against the whole object rather than an ' - 'attribute.\n' - ' For example "int(0|1)" matches the value "0", but not the ' - 'values\n' - ' "0.0" or "False".\n' - '\n' - 'In simple terms "CLS(P1, attr=P2)" matches only if the ' - 'following\n' - 'happens:\n' - '\n' - '* "isinstance(, CLS)"\n' - '\n' - '* convert "P1" to a keyword pattern using "CLS.__match_args__"\n' - '\n' - '* For each keyword argument "attr=P2":\n' - ' * "hasattr(, "attr")"\n' - '\n' - ' * "P2" matches ".attr"\n' - '\n' - '* … and so on for the corresponding keyword argument/pattern ' - 'pair.\n' - '\n' - 'See also:\n' - '\n' - ' * **PEP 634** – Structural Pattern Matching: Specification\n' - '\n' - ' * **PEP 636** – Structural Pattern Matching: Tutorial\n' - '\n' - '\n' - 'Function definitions\n' - '====================\n' - '\n' - 'A function definition defines a user-defined function object ' - '(see\n' - 'section The standard type hierarchy):\n' - '\n' - ' funcdef ::= [decorators] "def" funcname "(" ' - '[parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - ' decorators ::= decorator+\n' - ' decorator ::= "@" assignment_expression ' - 'NEWLINE\n' - ' parameter_list ::= defparameter ("," ' - 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n' - ' | parameter_list_no_posonly\n' - ' parameter_list_no_posonly ::= defparameter ("," ' - 'defparameter)* ["," [parameter_list_starargs]]\n' - ' | parameter_list_starargs\n' - ' parameter_list_starargs ::= "*" [parameter] ("," ' - 'defparameter)* ["," ["**" parameter [","]]]\n' - ' | "**" parameter [","]\n' - ' parameter ::= identifier [":" expression]\n' - ' defparameter ::= parameter ["=" expression]\n' - ' funcname ::= identifier\n' - '\n' - 'A function definition is an executable statement. Its execution ' - 'binds\n' - 'the function name in the current local namespace to a function ' - 'object\n' - '(a wrapper around the executable code for the function). This\n' - 'function object contains a reference to the current global ' - 'namespace\n' - 'as the global namespace to be used when the function is called.\n' - '\n' - 'The function definition does not execute the function body; this ' - 'gets\n' - 'executed only when the function is called. [4]\n' - '\n' - 'A function definition may be wrapped by one or more *decorator*\n' - 'expressions. Decorator expressions are evaluated when the ' - 'function is\n' - 'defined, in the scope that contains the function definition. ' - 'The\n' - 'result must be a callable, which is invoked with the function ' - 'object\n' - 'as the only argument. The returned value is bound to the ' - 'function name\n' - 'instead of the function object. Multiple decorators are applied ' - 'in\n' - 'nested fashion. For example, the following code\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' def func(): pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' def func(): pass\n' - ' func = f1(arg)(f2(func))\n' - '\n' - 'except that the original function is not temporarily bound to ' - 'the name\n' - '"func".\n' - '\n' - 'Changed in version 3.9: Functions may be decorated with any ' - 'valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'When one or more *parameters* have the form *parameter* "="\n' - '*expression*, the function is said to have “default parameter ' - 'values.”\n' - 'For a parameter with a default value, the corresponding ' - '*argument* may\n' - 'be omitted from a call, in which case the parameter’s default ' - 'value is\n' - 'substituted. If a parameter has a default value, all following\n' - 'parameters up until the “"*"” must also have a default value — ' - 'this is\n' - 'a syntactic restriction that is not expressed by the grammar.\n' - '\n' - '**Default parameter values are evaluated from left to right when ' - 'the\n' - 'function definition is executed.** This means that the ' - 'expression is\n' - 'evaluated once, when the function is defined, and that the same ' - '“pre-\n' - 'computed” value is used for each call. This is especially ' - 'important\n' - 'to understand when a default parameter value is a mutable ' - 'object, such\n' - 'as a list or a dictionary: if the function modifies the object ' - '(e.g.\n' - 'by appending an item to a list), the default parameter value is ' - 'in\n' - 'effect modified. This is generally not what was intended. A ' - 'way\n' - 'around this is to use "None" as the default, and explicitly test ' - 'for\n' - 'it in the body of the function, e.g.:\n' - '\n' - ' def whats_on_the_telly(penguin=None):\n' - ' if penguin is None:\n' - ' penguin = []\n' - ' penguin.append("property of the zoo")\n' - ' return penguin\n' - '\n' - 'Function call semantics are described in more detail in section ' - 'Calls.\n' - 'A function call always assigns values to all parameters ' - 'mentioned in\n' - 'the parameter list, either from positional arguments, from ' - 'keyword\n' - 'arguments, or from default values. If the form “"*identifier"” ' - 'is\n' - 'present, it is initialized to a tuple receiving any excess ' - 'positional\n' - 'parameters, defaulting to the empty tuple. If the form\n' - '“"**identifier"” is present, it is initialized to a new ordered\n' - 'mapping receiving any excess keyword arguments, defaulting to a ' - 'new\n' - 'empty mapping of the same type. Parameters after “"*"” or\n' - '“"*identifier"” are keyword-only parameters and may only be ' - 'passed by\n' - 'keyword arguments. Parameters before “"/"” are positional-only\n' - 'parameters and may only be passed by positional arguments.\n' - '\n' - 'Changed in version 3.8: The "/" function parameter syntax may be ' - 'used\n' - 'to indicate positional-only parameters. See **PEP 570** for ' - 'details.\n' - '\n' - 'Parameters may have an *annotation* of the form “": ' - 'expression"”\n' - 'following the parameter name. Any parameter may have an ' - 'annotation,\n' - 'even those of the form "*identifier" or "**identifier". ' - 'Functions may\n' - 'have “return” annotation of the form “"-> expression"” after ' - 'the\n' - 'parameter list. These annotations can be any valid Python ' - 'expression.\n' - 'The presence of annotations does not change the semantics of a\n' - 'function. The annotation values are available as values of a\n' - 'dictionary keyed by the parameters’ names in the ' - '"__annotations__"\n' - 'attribute of the function object. If the "annotations" import ' - 'from\n' - '"__future__" is used, annotations are preserved as strings at ' - 'runtime\n' - 'which enables postponed evaluation. Otherwise, they are ' - 'evaluated\n' - 'when the function definition is executed. In this case ' - 'annotations\n' - 'may be evaluated in a different order than they appear in the ' - 'source\n' - 'code.\n' - '\n' - 'It is also possible to create anonymous functions (functions not ' - 'bound\n' - 'to a name), for immediate use in expressions. This uses lambda\n' - 'expressions, described in section Lambdas. Note that the ' - 'lambda\n' - 'expression is merely a shorthand for a simplified function ' - 'definition;\n' - 'a function defined in a “"def"” statement can be passed around ' - 'or\n' - 'assigned to another name just like a function defined by a ' - 'lambda\n' - 'expression. The “"def"” form is actually more powerful since ' - 'it\n' - 'allows the execution of multiple statements and annotations.\n' - '\n' - '**Programmer’s note:** Functions are first-class objects. A ' - '“"def"”\n' - 'statement executed inside a function definition defines a local\n' - 'function that can be returned or passed around. Free variables ' - 'used\n' - 'in the nested function can access the local variables of the ' - 'function\n' - 'containing the def. See section Naming and binding for ' - 'details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3107** - Function Annotations\n' - ' The original specification for function annotations.\n' - '\n' - ' **PEP 484** - Type Hints\n' - ' Definition of a standard meaning for annotations: type ' - 'hints.\n' - '\n' - ' **PEP 526** - Syntax for Variable Annotations\n' - ' Ability to type hint variable declarations, including ' - 'class\n' - ' variables and instance variables\n' - '\n' - ' **PEP 563** - Postponed Evaluation of Annotations\n' - ' Support for forward references within annotations by ' - 'preserving\n' - ' annotations in a string form at runtime instead of eager\n' - ' evaluation.\n' - '\n' - '\n' - 'Class definitions\n' - '=================\n' - '\n' - 'A class definition defines a class object (see section The ' - 'standard\n' - 'type hierarchy):\n' - '\n' - ' classdef ::= [decorators] "class" classname [inheritance] ' - '":" suite\n' - ' inheritance ::= "(" [argument_list] ")"\n' - ' classname ::= identifier\n' - '\n' - 'A class definition is an executable statement. The inheritance ' - 'list\n' - 'usually gives a list of base classes (see Metaclasses for more\n' - 'advanced uses), so each item in the list should evaluate to a ' - 'class\n' - 'object which allows subclassing. Classes without an inheritance ' - 'list\n' - 'inherit, by default, from the base class "object"; hence,\n' - '\n' - ' class Foo:\n' - ' pass\n' - '\n' - 'is equivalent to\n' - '\n' - ' class Foo(object):\n' - ' pass\n' - '\n' - 'The class’s suite is then executed in a new execution frame ' - '(see\n' - 'Naming and binding), using a newly created local namespace and ' - 'the\n' - 'original global namespace. (Usually, the suite contains mostly\n' - 'function definitions.) When the class’s suite finishes ' - 'execution, its\n' - 'execution frame is discarded but its local namespace is saved. ' - '[5] A\n' - 'class object is then created using the inheritance list for the ' - 'base\n' - 'classes and the saved local namespace for the attribute ' - 'dictionary.\n' - 'The class name is bound to this class object in the original ' - 'local\n' - 'namespace.\n' - '\n' - 'The order in which attributes are defined in the class body is\n' - 'preserved in the new class’s "__dict__". Note that this is ' - 'reliable\n' - 'only right after the class is created and only for classes that ' - 'were\n' - 'defined using the definition syntax.\n' - '\n' - 'Class creation can be customized heavily using metaclasses.\n' - '\n' - 'Classes can also be decorated: just like when decorating ' - 'functions,\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' class Foo: pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' class Foo: pass\n' - ' Foo = f1(arg)(f2(Foo))\n' - '\n' - 'The evaluation rules for the decorator expressions are the same ' - 'as for\n' - 'function decorators. The result is then bound to the class ' - 'name.\n' - '\n' - 'Changed in version 3.9: Classes may be decorated with any valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - '**Programmer’s note:** Variables defined in the class definition ' - 'are\n' - 'class attributes; they are shared by instances. Instance ' - 'attributes\n' - 'can be set in a method with "self.name = value". Both class ' - 'and\n' - 'instance attributes are accessible through the notation ' - '“"self.name"”,\n' - 'and an instance attribute hides a class attribute with the same ' - 'name\n' - 'when accessed in this way. Class attributes can be used as ' - 'defaults\n' - 'for instance attributes, but using mutable values there can lead ' - 'to\n' - 'unexpected results. Descriptors can be used to create instance\n' - 'variables with different implementation details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3115** - Metaclasses in Python 3000\n' - ' The proposal that changed the declaration of metaclasses to ' - 'the\n' - ' current syntax, and the semantics for how classes with\n' - ' metaclasses are constructed.\n' - '\n' - ' **PEP 3129** - Class Decorators\n' - ' The proposal that added class decorators. Function and ' - 'method\n' - ' decorators were introduced in **PEP 318**.\n' - '\n' - '\n' - 'Coroutines\n' - '==========\n' - '\n' - 'New in version 3.5.\n' - '\n' - '\n' - 'Coroutine function definition\n' - '-----------------------------\n' - '\n' - ' async_funcdef ::= [decorators] "async" "def" funcname "(" ' - '[parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - '\n' - 'Execution of Python coroutines can be suspended and resumed at ' - 'many\n' - 'points (see *coroutine*). "await" expressions, "async for" and ' - '"async\n' - 'with" can only be used in the body of a coroutine function.\n' - '\n' - 'Functions defined with "async def" syntax are always coroutine\n' - 'functions, even if they do not contain "await" or "async" ' - 'keywords.\n' - '\n' - 'It is a "SyntaxError" to use a "yield from" expression inside ' - 'the body\n' - 'of a coroutine function.\n' - '\n' - 'An example of a coroutine function:\n' - '\n' - ' async def func(param1, param2):\n' - ' do_stuff()\n' - ' await some_coroutine()\n' - '\n' - 'Changed in version 3.7: "await" and "async" are now keywords;\n' - 'previously they were only treated as such inside the body of a\n' - 'coroutine function.\n' - '\n' - '\n' - 'The "async for" statement\n' - '-------------------------\n' - '\n' - ' async_for_stmt ::= "async" for_stmt\n' - '\n' - 'An *asynchronous iterable* provides an "__aiter__" method that\n' - 'directly returns an *asynchronous iterator*, which can call\n' - 'asynchronous code in its "__anext__" method.\n' - '\n' - 'The "async for" statement allows convenient iteration over\n' - 'asynchronous iterables.\n' - '\n' - 'The following code:\n' - '\n' - ' async for TARGET in ITER:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'Is semantically equivalent to:\n' - '\n' - ' iter = (ITER)\n' - ' iter = type(iter).__aiter__(iter)\n' - ' running = True\n' - '\n' - ' while running:\n' - ' try:\n' - ' TARGET = await type(iter).__anext__(iter)\n' - ' except StopAsyncIteration:\n' - ' running = False\n' - ' else:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'See also "__aiter__()" and "__anext__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async for" statement outside ' - 'the body\n' - 'of a coroutine function.\n' - '\n' - '\n' - 'The "async with" statement\n' - '--------------------------\n' - '\n' - ' async_with_stmt ::= "async" with_stmt\n' - '\n' - 'An *asynchronous context manager* is a *context manager* that is ' - 'able\n' - 'to suspend execution in its *enter* and *exit* methods.\n' - '\n' - 'The following code:\n' - '\n' - ' async with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' aenter = type(manager).__aenter__\n' - ' aexit = type(manager).__aexit__\n' - ' value = await aenter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not await aexit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' await aexit(manager, None, None, None)\n' - '\n' - 'See also "__aenter__()" and "__aexit__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async with" statement outside ' - 'the\n' - 'body of a coroutine function.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 492** - Coroutines with async and await syntax\n' - ' The proposal that made coroutines a proper standalone ' - 'concept in\n' - ' Python, and added supporting syntax.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] The exception is propagated to the invocation stack unless ' - 'there\n' - ' is a "finally" clause which happens to raise another ' - 'exception.\n' - ' That new exception causes the old one to be lost.\n' - '\n' - '[2] In pattern matching, a sequence is defined as one of the\n' - ' following:\n' - '\n' - ' * a class that inherits from "collections.abc.Sequence"\n' - '\n' - ' * a Python class that has been registered as\n' - ' "collections.abc.Sequence"\n' - '\n' - ' * a builtin class that has its (CPython) ' - '"Py_TPFLAGS_SEQUENCE"\n' - ' bit set\n' - '\n' - ' * a class that inherits from any of the above\n' - '\n' - ' The following standard library classes are sequences:\n' - '\n' - ' * "array.array"\n' - '\n' - ' * "collections.deque"\n' - '\n' - ' * "list"\n' - '\n' - ' * "memoryview"\n' - '\n' - ' * "range"\n' - '\n' - ' * "tuple"\n' - '\n' - ' Note:\n' - '\n' - ' Subject values of type "str", "bytes", and "bytearray" do ' - 'not\n' - ' match sequence patterns.\n' - '\n' - '[3] In pattern matching, a mapping is defined as one of the ' - 'following:\n' - '\n' - ' * a class that inherits from "collections.abc.Mapping"\n' - '\n' - ' * a Python class that has been registered as\n' - ' "collections.abc.Mapping"\n' - '\n' - ' * a builtin class that has its (CPython) ' - '"Py_TPFLAGS_MAPPING"\n' - ' bit set\n' - '\n' - ' * a class that inherits from any of the above\n' - '\n' - ' The standard library classes "dict" and ' - '"types.MappingProxyType"\n' - ' are mappings.\n' - '\n' - '[4] A string literal appearing as the first statement in the ' - 'function\n' - ' body is transformed into the function’s "__doc__" attribute ' - 'and\n' - ' therefore the function’s *docstring*.\n' - '\n' - '[5] A string literal appearing as the first statement in the ' - 'class\n' - ' body is transformed into the namespace’s "__doc__" item and\n' - ' therefore the class’s *docstring*.\n', - 'context-managers': 'With Statement Context Managers\n' - '*******************************\n' - '\n' - 'A *context manager* is an object that defines the ' - 'runtime context to\n' - 'be established when executing a "with" statement. The ' - 'context manager\n' - 'handles the entry into, and the exit from, the desired ' - 'runtime context\n' - 'for the execution of the block of code. Context ' - 'managers are normally\n' - 'invoked using the "with" statement (described in section ' - 'The with\n' - 'statement), but can also be used by directly invoking ' - 'their methods.\n' - '\n' - 'Typical uses of context managers include saving and ' - 'restoring various\n' - 'kinds of global state, locking and unlocking resources, ' - 'closing opened\n' - 'files, etc.\n' - '\n' - 'For more information on context managers, see Context ' - 'Manager Types.\n' - '\n' - 'object.__enter__(self)\n' - '\n' - ' Enter the runtime context related to this object. The ' - '"with"\n' - ' statement will bind this method’s return value to the ' - 'target(s)\n' - ' specified in the "as" clause of the statement, if ' - 'any.\n' - '\n' - 'object.__exit__(self, exc_type, exc_value, traceback)\n' - '\n' - ' Exit the runtime context related to this object. The ' - 'parameters\n' - ' describe the exception that caused the context to be ' - 'exited. If the\n' - ' context was exited without an exception, all three ' - 'arguments will\n' - ' be "None".\n' - '\n' - ' If an exception is supplied, and the method wishes to ' - 'suppress the\n' - ' exception (i.e., prevent it from being propagated), ' - 'it should\n' - ' return a true value. Otherwise, the exception will be ' - 'processed\n' - ' normally upon exit from this method.\n' - '\n' - ' Note that "__exit__()" methods should not reraise the ' - 'passed-in\n' - ' exception; this is the caller’s responsibility.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with” statement\n' - ' The specification, background, and examples for the ' - 'Python "with"\n' - ' statement.\n', - 'continue': 'The "continue" statement\n' - '************************\n' - '\n' - ' continue_stmt ::= "continue"\n' - '\n' - '"continue" may only occur syntactically nested in a "for" or ' - '"while"\n' - 'loop, but not nested in a function or class definition within ' - 'that\n' - 'loop. It continues with the next cycle of the nearest enclosing ' - 'loop.\n' - '\n' - 'When "continue" passes control out of a "try" statement with a\n' - '"finally" clause, that "finally" clause is executed before ' - 'really\n' - 'starting the next loop cycle.\n', - 'conversions': 'Arithmetic conversions\n' - '**********************\n' - '\n' - 'When a description of an arithmetic operator below uses the ' - 'phrase\n' - '“the numeric arguments are converted to a common type”, this ' - 'means\n' - 'that the operator implementation for built-in types works as ' - 'follows:\n' - '\n' - '* If either argument is a complex number, the other is ' - 'converted to\n' - ' complex;\n' - '\n' - '* otherwise, if either argument is a floating point number, ' - 'the other\n' - ' is converted to floating point;\n' - '\n' - '* otherwise, both must be integers and no conversion is ' - 'necessary.\n' - '\n' - 'Some additional rules apply for certain operators (e.g., a ' - 'string as a\n' - 'left argument to the ‘%’ operator). Extensions must define ' - 'their own\n' - 'conversion behavior.\n', - 'customization': 'Basic customization\n' - '*******************\n' - '\n' - 'object.__new__(cls[, ...])\n' - '\n' - ' Called to create a new instance of class *cls*. ' - '"__new__()" is a\n' - ' static method (special-cased so you need not declare it ' - 'as such)\n' - ' that takes the class of which an instance was requested ' - 'as its\n' - ' first argument. The remaining arguments are those ' - 'passed to the\n' - ' object constructor expression (the call to the class). ' - 'The return\n' - ' value of "__new__()" should be the new object instance ' - '(usually an\n' - ' instance of *cls*).\n' - '\n' - ' Typical implementations create a new instance of the ' - 'class by\n' - ' invoking the superclass’s "__new__()" method using\n' - ' "super().__new__(cls[, ...])" with appropriate arguments ' - 'and then\n' - ' modifying the newly-created instance as necessary before ' - 'returning\n' - ' it.\n' - '\n' - ' If "__new__()" is invoked during object construction and ' - 'it returns\n' - ' an instance or subclass of *cls*, then the new ' - 'instance’s\n' - ' "__init__()" method will be invoked like ' - '"__init__(self[, ...])",\n' - ' where *self* is the new instance and the remaining ' - 'arguments are\n' - ' the same as were passed to the object constructor.\n' - '\n' - ' If "__new__()" does not return an instance of *cls*, ' - 'then the new\n' - ' instance’s "__init__()" method will not be invoked.\n' - '\n' - ' "__new__()" is intended mainly to allow subclasses of ' - 'immutable\n' - ' types (like int, str, or tuple) to customize instance ' - 'creation. It\n' - ' is also commonly overridden in custom metaclasses in ' - 'order to\n' - ' customize class creation.\n' - '\n' - 'object.__init__(self[, ...])\n' - '\n' - ' Called after the instance has been created (by ' - '"__new__()"), but\n' - ' before it is returned to the caller. The arguments are ' - 'those\n' - ' passed to the class constructor expression. If a base ' - 'class has an\n' - ' "__init__()" method, the derived class’s "__init__()" ' - 'method, if\n' - ' any, must explicitly call it to ensure proper ' - 'initialization of the\n' - ' base class part of the instance; for example:\n' - ' "super().__init__([args...])".\n' - '\n' - ' Because "__new__()" and "__init__()" work together in ' - 'constructing\n' - ' objects ("__new__()" to create it, and "__init__()" to ' - 'customize\n' - ' it), no non-"None" value may be returned by ' - '"__init__()"; doing so\n' - ' will cause a "TypeError" to be raised at runtime.\n' - '\n' - 'object.__del__(self)\n' - '\n' - ' Called when the instance is about to be destroyed. This ' - 'is also\n' - ' called a finalizer or (improperly) a destructor. If a ' - 'base class\n' - ' has a "__del__()" method, the derived class’s ' - '"__del__()" method,\n' - ' if any, must explicitly call it to ensure proper ' - 'deletion of the\n' - ' base class part of the instance.\n' - '\n' - ' It is possible (though not recommended!) for the ' - '"__del__()" method\n' - ' to postpone destruction of the instance by creating a ' - 'new reference\n' - ' to it. This is called object *resurrection*. It is\n' - ' implementation-dependent whether "__del__()" is called a ' - 'second\n' - ' time when a resurrected object is about to be destroyed; ' - 'the\n' - ' current *CPython* implementation only calls it once.\n' - '\n' - ' It is not guaranteed that "__del__()" methods are called ' - 'for\n' - ' objects that still exist when the interpreter exits.\n' - '\n' - ' Note:\n' - '\n' - ' "del x" doesn’t directly call "x.__del__()" — the ' - 'former\n' - ' decrements the reference count for "x" by one, and the ' - 'latter is\n' - ' only called when "x"’s reference count reaches zero.\n' - '\n' - ' **CPython implementation detail:** It is possible for a ' - 'reference\n' - ' cycle to prevent the reference count of an object from ' - 'going to\n' - ' zero. In this case, the cycle will be later detected ' - 'and deleted\n' - ' by the *cyclic garbage collector*. A common cause of ' - 'reference\n' - ' cycles is when an exception has been caught in a local ' - 'variable.\n' - ' The frame’s locals then reference the exception, which ' - 'references\n' - ' its own traceback, which references the locals of all ' - 'frames caught\n' - ' in the traceback.\n' - '\n' - ' See also: Documentation for the "gc" module.\n' - '\n' - ' Warning:\n' - '\n' - ' Due to the precarious circumstances under which ' - '"__del__()"\n' - ' methods are invoked, exceptions that occur during ' - 'their execution\n' - ' are ignored, and a warning is printed to "sys.stderr" ' - 'instead.\n' - ' In particular:\n' - '\n' - ' * "__del__()" can be invoked when arbitrary code is ' - 'being\n' - ' executed, including from any arbitrary thread. If ' - '"__del__()"\n' - ' needs to take a lock or invoke any other blocking ' - 'resource, it\n' - ' may deadlock as the resource may already be taken by ' - 'the code\n' - ' that gets interrupted to execute "__del__()".\n' - '\n' - ' * "__del__()" can be executed during interpreter ' - 'shutdown. As a\n' - ' consequence, the global variables it needs to access ' - '(including\n' - ' other modules) may already have been deleted or set ' - 'to "None".\n' - ' Python guarantees that globals whose name begins ' - 'with a single\n' - ' underscore are deleted from their module before ' - 'other globals\n' - ' are deleted; if no other references to such globals ' - 'exist, this\n' - ' may help in assuring that imported modules are still ' - 'available\n' - ' at the time when the "__del__()" method is called.\n' - '\n' - 'object.__repr__(self)\n' - '\n' - ' Called by the "repr()" built-in function to compute the ' - '“official”\n' - ' string representation of an object. If at all possible, ' - 'this\n' - ' should look like a valid Python expression that could be ' - 'used to\n' - ' recreate an object with the same value (given an ' - 'appropriate\n' - ' environment). If this is not possible, a string of the ' - 'form\n' - ' "<...some useful description...>" should be returned. ' - 'The return\n' - ' value must be a string object. If a class defines ' - '"__repr__()" but\n' - ' not "__str__()", then "__repr__()" is also used when an ' - '“informal”\n' - ' string representation of instances of that class is ' - 'required.\n' - '\n' - ' This is typically used for debugging, so it is important ' - 'that the\n' - ' representation is information-rich and unambiguous.\n' - '\n' - 'object.__str__(self)\n' - '\n' - ' Called by "str(object)" and the built-in functions ' - '"format()" and\n' - ' "print()" to compute the “informal” or nicely printable ' - 'string\n' - ' representation of an object. The return value must be a ' - 'string\n' - ' object.\n' - '\n' - ' This method differs from "object.__repr__()" in that ' - 'there is no\n' - ' expectation that "__str__()" return a valid Python ' - 'expression: a\n' - ' more convenient or concise representation can be used.\n' - '\n' - ' The default implementation defined by the built-in type ' - '"object"\n' - ' calls "object.__repr__()".\n' - '\n' - 'object.__bytes__(self)\n' - '\n' - ' Called by bytes to compute a byte-string representation ' - 'of an\n' - ' object. This should return a "bytes" object.\n' - '\n' - 'object.__format__(self, format_spec)\n' - '\n' - ' Called by the "format()" built-in function, and by ' - 'extension,\n' - ' evaluation of formatted string literals and the ' - '"str.format()"\n' - ' method, to produce a “formatted” string representation ' - 'of an\n' - ' object. The *format_spec* argument is a string that ' - 'contains a\n' - ' description of the formatting options desired. The ' - 'interpretation\n' - ' of the *format_spec* argument is up to the type ' - 'implementing\n' - ' "__format__()", however most classes will either ' - 'delegate\n' - ' formatting to one of the built-in types, or use a ' - 'similar\n' - ' formatting option syntax.\n' - '\n' - ' See Format Specification Mini-Language for a description ' - 'of the\n' - ' standard formatting syntax.\n' - '\n' - ' The return value must be a string object.\n' - '\n' - ' Changed in version 3.4: The __format__ method of ' - '"object" itself\n' - ' raises a "TypeError" if passed any non-empty string.\n' - '\n' - ' Changed in version 3.7: "object.__format__(x, \'\')" is ' - 'now\n' - ' equivalent to "str(x)" rather than "format(str(x), ' - '\'\')".\n' - '\n' - 'object.__lt__(self, other)\n' - 'object.__le__(self, other)\n' - 'object.__eq__(self, other)\n' - 'object.__ne__(self, other)\n' - 'object.__gt__(self, other)\n' - 'object.__ge__(self, other)\n' - '\n' - ' These are the so-called “rich comparison” methods. The\n' - ' correspondence between operator symbols and method names ' - 'is as\n' - ' follows: "xy" calls\n' - ' "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n' - '\n' - ' A rich comparison method may return the singleton ' - '"NotImplemented"\n' - ' if it does not implement the operation for a given pair ' - 'of\n' - ' arguments. By convention, "False" and "True" are ' - 'returned for a\n' - ' successful comparison. However, these methods can return ' - 'any value,\n' - ' so if the comparison operator is used in a Boolean ' - 'context (e.g.,\n' - ' in the condition of an "if" statement), Python will call ' - '"bool()"\n' - ' on the value to determine if the result is true or ' - 'false.\n' - '\n' - ' By default, "object" implements "__eq__()" by using ' - '"is", returning\n' - ' "NotImplemented" in the case of a false comparison: ' - '"True if x is y\n' - ' else NotImplemented". For "__ne__()", by default it ' - 'delegates to\n' - ' "__eq__()" and inverts the result unless it is ' - '"NotImplemented".\n' - ' There are no other implied relationships among the ' - 'comparison\n' - ' operators or default implementations; for example, the ' - 'truth of\n' - ' "(x.__hash__".\n' - '\n' - ' If a class that does not override "__eq__()" wishes to ' - 'suppress\n' - ' hash support, it should include "__hash__ = None" in the ' - 'class\n' - ' definition. A class which defines its own "__hash__()" ' - 'that\n' - ' explicitly raises a "TypeError" would be incorrectly ' - 'identified as\n' - ' hashable by an "isinstance(obj, ' - 'collections.abc.Hashable)" call.\n' - '\n' - ' Note:\n' - '\n' - ' By default, the "__hash__()" values of str and bytes ' - 'objects are\n' - ' “salted” with an unpredictable random value. Although ' - 'they\n' - ' remain constant within an individual Python process, ' - 'they are not\n' - ' predictable between repeated invocations of ' - 'Python.This is\n' - ' intended to provide protection against a ' - 'denial-of-service caused\n' - ' by carefully-chosen inputs that exploit the worst ' - 'case\n' - ' performance of a dict insertion, O(n^2) complexity. ' - 'See\n' - ' http://www.ocert.org/advisories/ocert-2011-003.html ' - 'for\n' - ' details.Changing hash values affects the iteration ' - 'order of sets.\n' - ' Python has never made guarantees about this ordering ' - '(and it\n' - ' typically varies between 32-bit and 64-bit builds).See ' - 'also\n' - ' "PYTHONHASHSEED".\n' - '\n' - ' Changed in version 3.3: Hash randomization is enabled by ' - 'default.\n' - '\n' - 'object.__bool__(self)\n' - '\n' - ' Called to implement truth value testing and the built-in ' - 'operation\n' - ' "bool()"; should return "False" or "True". When this ' - 'method is not\n' - ' defined, "__len__()" is called, if it is defined, and ' - 'the object is\n' - ' considered true if its result is nonzero. If a class ' - 'defines\n' - ' neither "__len__()" nor "__bool__()", all its instances ' - 'are\n' - ' considered true.\n', - 'debugger': '"pdb" — The Python Debugger\n' - '***************************\n' - '\n' - '**Source code:** Lib/pdb.py\n' - '\n' - '======================================================================\n' - '\n' - 'The module "pdb" defines an interactive source code debugger ' - 'for\n' - 'Python programs. It supports setting (conditional) breakpoints ' - 'and\n' - 'single stepping at the source line level, inspection of stack ' - 'frames,\n' - 'source code listing, and evaluation of arbitrary Python code in ' - 'the\n' - 'context of any stack frame. It also supports post-mortem ' - 'debugging\n' - 'and can be called under program control.\n' - '\n' - 'The debugger is extensible – it is actually defined as the ' - 'class\n' - '"Pdb". This is currently undocumented but easily understood by ' - 'reading\n' - 'the source. The extension interface uses the modules "bdb" and ' - '"cmd".\n' - '\n' - 'The debugger’s prompt is "(Pdb)". Typical usage to run a program ' - 'under\n' - 'control of the debugger is:\n' - '\n' - ' >>> import pdb\n' - ' >>> import mymodule\n' - " >>> pdb.run('mymodule.test()')\n" - ' > (0)?()\n' - ' (Pdb) continue\n' - ' > (1)?()\n' - ' (Pdb) continue\n' - " NameError: 'spam'\n" - ' > (1)?()\n' - ' (Pdb)\n' - '\n' - 'Changed in version 3.3: Tab-completion via the "readline" module ' - 'is\n' - 'available for commands and command arguments, e.g. the current ' - 'global\n' - 'and local names are offered as arguments of the "p" command.\n' - '\n' - '"pdb.py" can also be invoked as a script to debug other ' - 'scripts. For\n' - 'example:\n' - '\n' - ' python3 -m pdb myscript.py\n' - '\n' - 'When invoked as a script, pdb will automatically enter ' - 'post-mortem\n' - 'debugging if the program being debugged exits abnormally. After ' - 'post-\n' - 'mortem debugging (or after normal exit of the program), pdb ' - 'will\n' - 'restart the program. Automatic restarting preserves pdb’s state ' - '(such\n' - 'as breakpoints) and in most cases is more useful than quitting ' - 'the\n' - 'debugger upon program’s exit.\n' - '\n' - 'New in version 3.2: "pdb.py" now accepts a "-c" option that ' - 'executes\n' - 'commands as if given in a ".pdbrc" file, see Debugger Commands.\n' - '\n' - 'New in version 3.7: "pdb.py" now accepts a "-m" option that ' - 'execute\n' - 'modules similar to the way "python3 -m" does. As with a script, ' - 'the\n' - 'debugger will pause execution just before the first line of the\n' - 'module.\n' - '\n' - 'The typical usage to break into the debugger from a running ' - 'program is\n' - 'to insert\n' - '\n' - ' import pdb; pdb.set_trace()\n' - '\n' - 'at the location you want to break into the debugger. You can ' - 'then\n' - 'step through the code following this statement, and continue ' - 'running\n' - 'without the debugger using the "continue" command.\n' - '\n' - 'New in version 3.7: The built-in "breakpoint()", when called ' - 'with\n' - 'defaults, can be used instead of "import pdb; pdb.set_trace()".\n' - '\n' - 'The typical usage to inspect a crashed program is:\n' - '\n' - ' >>> import pdb\n' - ' >>> import mymodule\n' - ' >>> mymodule.test()\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - ' File "./mymodule.py", line 4, in test\n' - ' test2()\n' - ' File "./mymodule.py", line 3, in test2\n' - ' print(spam)\n' - ' NameError: spam\n' - ' >>> pdb.pm()\n' - ' > ./mymodule.py(3)test2()\n' - ' -> print(spam)\n' - ' (Pdb)\n' - '\n' - 'The module defines the following functions; each enters the ' - 'debugger\n' - 'in a slightly different way:\n' - '\n' - 'pdb.run(statement, globals=None, locals=None)\n' - '\n' - ' Execute the *statement* (given as a string or a code object) ' - 'under\n' - ' debugger control. The debugger prompt appears before any ' - 'code is\n' - ' executed; you can set breakpoints and type "continue", or you ' - 'can\n' - ' step through the statement using "step" or "next" (all these\n' - ' commands are explained below). The optional *globals* and ' - '*locals*\n' - ' arguments specify the environment in which the code is ' - 'executed; by\n' - ' default the dictionary of the module "__main__" is used. ' - '(See the\n' - ' explanation of the built-in "exec()" or "eval()" functions.)\n' - '\n' - 'pdb.runeval(expression, globals=None, locals=None)\n' - '\n' - ' Evaluate the *expression* (given as a string or a code ' - 'object)\n' - ' under debugger control. When "runeval()" returns, it returns ' - 'the\n' - ' value of the expression. Otherwise this function is similar ' - 'to\n' - ' "run()".\n' - '\n' - 'pdb.runcall(function, *args, **kwds)\n' - '\n' - ' Call the *function* (a function or method object, not a ' - 'string)\n' - ' with the given arguments. When "runcall()" returns, it ' - 'returns\n' - ' whatever the function call returned. The debugger prompt ' - 'appears\n' - ' as soon as the function is entered.\n' - '\n' - 'pdb.set_trace(*, header=None)\n' - '\n' - ' Enter the debugger at the calling stack frame. This is ' - 'useful to\n' - ' hard-code a breakpoint at a given point in a program, even if ' - 'the\n' - ' code is not otherwise being debugged (e.g. when an assertion\n' - ' fails). If given, *header* is printed to the console just ' - 'before\n' - ' debugging begins.\n' - '\n' - ' Changed in version 3.7: The keyword-only argument *header*.\n' - '\n' - 'pdb.post_mortem(traceback=None)\n' - '\n' - ' Enter post-mortem debugging of the given *traceback* object. ' - 'If no\n' - ' *traceback* is given, it uses the one of the exception that ' - 'is\n' - ' currently being handled (an exception must be being handled ' - 'if the\n' - ' default is to be used).\n' - '\n' - 'pdb.pm()\n' - '\n' - ' Enter post-mortem debugging of the traceback found in\n' - ' "sys.last_traceback".\n' - '\n' - 'The "run*" functions and "set_trace()" are aliases for ' - 'instantiating\n' - 'the "Pdb" class and calling the method of the same name. If you ' - 'want\n' - 'to access further features, you have to do this yourself:\n' - '\n' - "class pdb.Pdb(completekey='tab', stdin=None, stdout=None, " - 'skip=None, nosigint=False, readrc=True)\n' - '\n' - ' "Pdb" is the debugger class.\n' - '\n' - ' The *completekey*, *stdin* and *stdout* arguments are passed ' - 'to the\n' - ' underlying "cmd.Cmd" class; see the description there.\n' - '\n' - ' The *skip* argument, if given, must be an iterable of ' - 'glob-style\n' - ' module name patterns. The debugger will not step into frames ' - 'that\n' - ' originate in a module that matches one of these patterns. ' - '[1]\n' - '\n' - ' By default, Pdb sets a handler for the SIGINT signal (which ' - 'is sent\n' - ' when the user presses "Ctrl-C" on the console) when you give ' - 'a\n' - ' "continue" command. This allows you to break into the ' - 'debugger\n' - ' again by pressing "Ctrl-C". If you want Pdb not to touch ' - 'the\n' - ' SIGINT handler, set *nosigint* to true.\n' - '\n' - ' The *readrc* argument defaults to true and controls whether ' - 'Pdb\n' - ' will load .pdbrc files from the filesystem.\n' - '\n' - ' Example call to enable tracing with *skip*:\n' - '\n' - " import pdb; pdb.Pdb(skip=['django.*']).set_trace()\n" - '\n' - ' Raises an auditing event "pdb.Pdb" with no arguments.\n' - '\n' - ' New in version 3.1: The *skip* argument.\n' - '\n' - ' New in version 3.2: The *nosigint* argument. Previously, a ' - 'SIGINT\n' - ' handler was never set by Pdb.\n' - '\n' - ' Changed in version 3.6: The *readrc* argument.\n' - '\n' - ' run(statement, globals=None, locals=None)\n' - ' runeval(expression, globals=None, locals=None)\n' - ' runcall(function, *args, **kwds)\n' - ' set_trace()\n' - '\n' - ' See the documentation for the functions explained above.\n' - '\n' - '\n' - 'Debugger Commands\n' - '=================\n' - '\n' - 'The commands recognized by the debugger are listed below. Most\n' - 'commands can be abbreviated to one or two letters as indicated; ' - 'e.g.\n' - '"h(elp)" means that either "h" or "help" can be used to enter ' - 'the help\n' - 'command (but not "he" or "hel", nor "H" or "Help" or "HELP").\n' - 'Arguments to commands must be separated by whitespace (spaces ' - 'or\n' - 'tabs). Optional arguments are enclosed in square brackets ' - '("[]") in\n' - 'the command syntax; the square brackets must not be typed.\n' - 'Alternatives in the command syntax are separated by a vertical ' - 'bar\n' - '("|").\n' - '\n' - 'Entering a blank line repeats the last command entered. ' - 'Exception: if\n' - 'the last command was a "list" command, the next 11 lines are ' - 'listed.\n' - '\n' - 'Commands that the debugger doesn’t recognize are assumed to be ' - 'Python\n' - 'statements and are executed in the context of the program being\n' - 'debugged. Python statements can also be prefixed with an ' - 'exclamation\n' - 'point ("!"). This is a powerful way to inspect the program ' - 'being\n' - 'debugged; it is even possible to change a variable or call a ' - 'function.\n' - 'When an exception occurs in such a statement, the exception name ' - 'is\n' - 'printed but the debugger’s state is not changed.\n' - '\n' - 'The debugger supports aliases. Aliases can have parameters ' - 'which\n' - 'allows one a certain level of adaptability to the context under\n' - 'examination.\n' - '\n' - 'Multiple commands may be entered on a single line, separated by ' - '";;".\n' - '(A single ";" is not used as it is the separator for multiple ' - 'commands\n' - 'in a line that is passed to the Python parser.) No intelligence ' - 'is\n' - 'applied to separating the commands; the input is split at the ' - 'first\n' - '";;" pair, even if it is in the middle of a quoted string.\n' - '\n' - 'If a file ".pdbrc" exists in the user’s home directory or in ' - 'the\n' - 'current directory, it is read in and executed as if it had been ' - 'typed\n' - 'at the debugger prompt. This is particularly useful for ' - 'aliases. If\n' - 'both files exist, the one in the home directory is read first ' - 'and\n' - 'aliases defined there can be overridden by the local file.\n' - '\n' - 'Changed in version 3.2: ".pdbrc" can now contain commands that\n' - 'continue debugging, such as "continue" or "next". Previously, ' - 'these\n' - 'commands had no effect.\n' - '\n' - 'h(elp) [command]\n' - '\n' - ' Without argument, print the list of available commands. With ' - 'a\n' - ' *command* as argument, print help about that command. "help ' - 'pdb"\n' - ' displays the full documentation (the docstring of the "pdb"\n' - ' module). Since the *command* argument must be an identifier, ' - '"help\n' - ' exec" must be entered to get help on the "!" command.\n' - '\n' - 'w(here)\n' - '\n' - ' Print a stack trace, with the most recent frame at the ' - 'bottom. An\n' - ' arrow indicates the current frame, which determines the ' - 'context of\n' - ' most commands.\n' - '\n' - 'd(own) [count]\n' - '\n' - ' Move the current frame *count* (default one) levels down in ' - 'the\n' - ' stack trace (to a newer frame).\n' - '\n' - 'u(p) [count]\n' - '\n' - ' Move the current frame *count* (default one) levels up in the ' - 'stack\n' - ' trace (to an older frame).\n' - '\n' - 'b(reak) [([filename:]lineno | function) [, condition]]\n' - '\n' - ' With a *lineno* argument, set a break there in the current ' - 'file.\n' - ' With a *function* argument, set a break at the first ' - 'executable\n' - ' statement within that function. The line number may be ' - 'prefixed\n' - ' with a filename and a colon, to specify a breakpoint in ' - 'another\n' - ' file (probably one that hasn’t been loaded yet). The file ' - 'is\n' - ' searched on "sys.path". Note that each breakpoint is ' - 'assigned a\n' - ' number to which all the other breakpoint commands refer.\n' - '\n' - ' If a second argument is present, it is an expression which ' - 'must\n' - ' evaluate to true before the breakpoint is honored.\n' - '\n' - ' Without argument, list all breaks, including for each ' - 'breakpoint,\n' - ' the number of times that breakpoint has been hit, the ' - 'current\n' - ' ignore count, and the associated condition if any.\n' - '\n' - 'tbreak [([filename:]lineno | function) [, condition]]\n' - '\n' - ' Temporary breakpoint, which is removed automatically when it ' - 'is\n' - ' first hit. The arguments are the same as for "break".\n' - '\n' - 'cl(ear) [filename:lineno | bpnumber ...]\n' - '\n' - ' With a *filename:lineno* argument, clear all the breakpoints ' - 'at\n' - ' this line. With a space separated list of breakpoint numbers, ' - 'clear\n' - ' those breakpoints. Without argument, clear all breaks (but ' - 'first\n' - ' ask confirmation).\n' - '\n' - 'disable [bpnumber ...]\n' - '\n' - ' Disable the breakpoints given as a space separated list of\n' - ' breakpoint numbers. Disabling a breakpoint means it cannot ' - 'cause\n' - ' the program to stop execution, but unlike clearing a ' - 'breakpoint, it\n' - ' remains in the list of breakpoints and can be (re-)enabled.\n' - '\n' - 'enable [bpnumber ...]\n' - '\n' - ' Enable the breakpoints specified.\n' - '\n' - 'ignore bpnumber [count]\n' - '\n' - ' Set the ignore count for the given breakpoint number. If ' - 'count is\n' - ' omitted, the ignore count is set to 0. A breakpoint becomes ' - 'active\n' - ' when the ignore count is zero. When non-zero, the count is\n' - ' decremented each time the breakpoint is reached and the ' - 'breakpoint\n' - ' is not disabled and any associated condition evaluates to ' - 'true.\n' - '\n' - 'condition bpnumber [condition]\n' - '\n' - ' Set a new *condition* for the breakpoint, an expression which ' - 'must\n' - ' evaluate to true before the breakpoint is honored. If ' - '*condition*\n' - ' is absent, any existing condition is removed; i.e., the ' - 'breakpoint\n' - ' is made unconditional.\n' - '\n' - 'commands [bpnumber]\n' - '\n' - ' Specify a list of commands for breakpoint number *bpnumber*. ' - 'The\n' - ' commands themselves appear on the following lines. Type a ' - 'line\n' - ' containing just "end" to terminate the commands. An example:\n' - '\n' - ' (Pdb) commands 1\n' - ' (com) p some_variable\n' - ' (com) end\n' - ' (Pdb)\n' - '\n' - ' To remove all commands from a breakpoint, type "commands" ' - 'and\n' - ' follow it immediately with "end"; that is, give no commands.\n' - '\n' - ' With no *bpnumber* argument, "commands" refers to the last\n' - ' breakpoint set.\n' - '\n' - ' You can use breakpoint commands to start your program up ' - 'again.\n' - ' Simply use the "continue" command, or "step", or any other ' - 'command\n' - ' that resumes execution.\n' - '\n' - ' Specifying any command resuming execution (currently ' - '"continue",\n' - ' "step", "next", "return", "jump", "quit" and their ' - 'abbreviations)\n' - ' terminates the command list (as if that command was ' - 'immediately\n' - ' followed by end). This is because any time you resume ' - 'execution\n' - ' (even with a simple next or step), you may encounter another\n' - ' breakpoint—which could have its own command list, leading to\n' - ' ambiguities about which list to execute.\n' - '\n' - ' If you use the ‘silent’ command in the command list, the ' - 'usual\n' - ' message about stopping at a breakpoint is not printed. This ' - 'may be\n' - ' desirable for breakpoints that are to print a specific ' - 'message and\n' - ' then continue. If none of the other commands print anything, ' - 'you\n' - ' see no sign that the breakpoint was reached.\n' - '\n' - 's(tep)\n' - '\n' - ' Execute the current line, stop at the first possible ' - 'occasion\n' - ' (either in a function that is called or on the next line in ' - 'the\n' - ' current function).\n' - '\n' - 'n(ext)\n' - '\n' - ' Continue execution until the next line in the current ' - 'function is\n' - ' reached or it returns. (The difference between "next" and ' - '"step"\n' - ' is that "step" stops inside a called function, while "next"\n' - ' executes called functions at (nearly) full speed, only ' - 'stopping at\n' - ' the next line in the current function.)\n' - '\n' - 'unt(il) [lineno]\n' - '\n' - ' Without argument, continue execution until the line with a ' - 'number\n' - ' greater than the current one is reached.\n' - '\n' - ' With a line number, continue execution until a line with a ' - 'number\n' - ' greater or equal to that is reached. In both cases, also ' - 'stop when\n' - ' the current frame returns.\n' - '\n' - ' Changed in version 3.2: Allow giving an explicit line ' - 'number.\n' - '\n' - 'r(eturn)\n' - '\n' - ' Continue execution until the current function returns.\n' - '\n' - 'c(ont(inue))\n' - '\n' - ' Continue execution, only stop when a breakpoint is ' - 'encountered.\n' - '\n' - 'j(ump) lineno\n' - '\n' - ' Set the next line that will be executed. Only available in ' - 'the\n' - ' bottom-most frame. This lets you jump back and execute code ' - 'again,\n' - ' or jump forward to skip code that you don’t want to run.\n' - '\n' - ' It should be noted that not all jumps are allowed – for ' - 'instance it\n' - ' is not possible to jump into the middle of a "for" loop or ' - 'out of a\n' - ' "finally" clause.\n' - '\n' - 'l(ist) [first[, last]]\n' - '\n' - ' List source code for the current file. Without arguments, ' - 'list 11\n' - ' lines around the current line or continue the previous ' - 'listing.\n' - ' With "." as argument, list 11 lines around the current line. ' - 'With\n' - ' one argument, list 11 lines around at that line. With two\n' - ' arguments, list the given range; if the second argument is ' - 'less\n' - ' than the first, it is interpreted as a count.\n' - '\n' - ' The current line in the current frame is indicated by "->". ' - 'If an\n' - ' exception is being debugged, the line where the exception ' - 'was\n' - ' originally raised or propagated is indicated by ">>", if it ' - 'differs\n' - ' from the current line.\n' - '\n' - ' New in version 3.2: The ">>" marker.\n' - '\n' - 'll | longlist\n' - '\n' - ' List all source code for the current function or frame.\n' - ' Interesting lines are marked as for "list".\n' - '\n' - ' New in version 3.2.\n' - '\n' - 'a(rgs)\n' - '\n' - ' Print the argument list of the current function.\n' - '\n' - 'p expression\n' - '\n' - ' Evaluate the *expression* in the current context and print ' - 'its\n' - ' value.\n' - '\n' - ' Note:\n' - '\n' - ' "print()" can also be used, but is not a debugger command — ' - 'this\n' - ' executes the Python "print()" function.\n' - '\n' - 'pp expression\n' - '\n' - ' Like the "p" command, except the value of the expression is ' - 'pretty-\n' - ' printed using the "pprint" module.\n' - '\n' - 'whatis expression\n' - '\n' - ' Print the type of the *expression*.\n' - '\n' - 'source expression\n' - '\n' - ' Try to get source code for the given object and display it.\n' - '\n' - ' New in version 3.2.\n' - '\n' - 'display [expression]\n' - '\n' - ' Display the value of the expression if it changed, each time\n' - ' execution stops in the current frame.\n' - '\n' - ' Without expression, list all display expressions for the ' - 'current\n' - ' frame.\n' - '\n' - ' New in version 3.2.\n' - '\n' - 'undisplay [expression]\n' - '\n' - ' Do not display the expression any more in the current frame.\n' - ' Without expression, clear all display expressions for the ' - 'current\n' - ' frame.\n' - '\n' - ' New in version 3.2.\n' - '\n' - 'interact\n' - '\n' - ' Start an interactive interpreter (using the "code" module) ' - 'whose\n' - ' global namespace contains all the (global and local) names ' - 'found in\n' - ' the current scope.\n' - '\n' - ' New in version 3.2.\n' - '\n' - 'alias [name [command]]\n' - '\n' - ' Create an alias called *name* that executes *command*. The ' - 'command\n' - ' must *not* be enclosed in quotes. Replaceable parameters can ' - 'be\n' - ' indicated by "%1", "%2", and so on, while "%*" is replaced by ' - 'all\n' - ' the parameters. If no command is given, the current alias ' - 'for\n' - ' *name* is shown. If no arguments are given, all aliases are ' - 'listed.\n' - '\n' - ' Aliases may be nested and can contain anything that can be ' - 'legally\n' - ' typed at the pdb prompt. Note that internal pdb commands ' - '*can* be\n' - ' overridden by aliases. Such a command is then hidden until ' - 'the\n' - ' alias is removed. Aliasing is recursively applied to the ' - 'first\n' - ' word of the command line; all other words in the line are ' - 'left\n' - ' alone.\n' - '\n' - ' As an example, here are two useful aliases (especially when ' - 'placed\n' - ' in the ".pdbrc" file):\n' - '\n' - ' # Print instance variables (usage "pi classInst")\n' - ' alias pi for k in %1.__dict__.keys(): ' - 'print("%1.",k,"=",%1.__dict__[k])\n' - ' # Print instance variables in self\n' - ' alias ps pi self\n' - '\n' - 'unalias name\n' - '\n' - ' Delete the specified alias.\n' - '\n' - '! statement\n' - '\n' - ' Execute the (one-line) *statement* in the context of the ' - 'current\n' - ' stack frame. The exclamation point can be omitted unless the ' - 'first\n' - ' word of the statement resembles a debugger command. To set ' - 'a\n' - ' global variable, you can prefix the assignment command with ' - 'a\n' - ' "global" statement on the same line, e.g.:\n' - '\n' - " (Pdb) global list_options; list_options = ['-l']\n" - ' (Pdb)\n' - '\n' - 'run [args ...]\n' - 'restart [args ...]\n' - '\n' - ' Restart the debugged Python program. If an argument is ' - 'supplied,\n' - ' it is split with "shlex" and the result is used as the new\n' - ' "sys.argv". History, breakpoints, actions and debugger ' - 'options are\n' - ' preserved. "restart" is an alias for "run".\n' - '\n' - 'q(uit)\n' - '\n' - ' Quit from the debugger. The program being executed is ' - 'aborted.\n' - '\n' - 'debug code\n' - '\n' - ' Enter a recursive debugger that steps through the code ' - 'argument\n' - ' (which is an arbitrary expression or statement to be executed ' - 'in\n' - ' the current environment).\n' - '\n' - 'retval\n' - '\n' - ' Print the return value for the last return of a function.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] Whether a frame is considered to originate in a certain ' - 'module is\n' - ' determined by the "__name__" in the frame globals.\n', - 'del': 'The "del" statement\n' - '*******************\n' - '\n' - ' del_stmt ::= "del" target_list\n' - '\n' - 'Deletion is recursively defined very similar to the way assignment ' - 'is\n' - 'defined. Rather than spelling it out in full details, here are some\n' - 'hints.\n' - '\n' - 'Deletion of a target list recursively deletes each target, from left\n' - 'to right.\n' - '\n' - 'Deletion of a name removes the binding of that name from the local ' - 'or\n' - 'global namespace, depending on whether the name occurs in a "global"\n' - 'statement in the same code block. If the name is unbound, a\n' - '"NameError" exception will be raised.\n' - '\n' - 'Deletion of attribute references, subscriptions and slicings is ' - 'passed\n' - 'to the primary object involved; deletion of a slicing is in general\n' - 'equivalent to assignment of an empty slice of the right type (but ' - 'even\n' - 'this is determined by the sliced object).\n' - '\n' - 'Changed in version 3.2: Previously it was illegal to delete a name\n' - 'from the local namespace if it occurs as a free variable in a nested\n' - 'block.\n', - 'dict': 'Dictionary displays\n' - '*******************\n' - '\n' - 'A dictionary display is a possibly empty series of key/datum pairs\n' - 'enclosed in curly braces:\n' - '\n' - ' dict_display ::= "{" [key_datum_list | dict_comprehension] ' - '"}"\n' - ' key_datum_list ::= key_datum ("," key_datum)* [","]\n' - ' key_datum ::= expression ":" expression | "**" or_expr\n' - ' dict_comprehension ::= expression ":" expression comp_for\n' - '\n' - 'A dictionary display yields a new dictionary object.\n' - '\n' - 'If a comma-separated sequence of key/datum pairs is given, they are\n' - 'evaluated from left to right to define the entries of the ' - 'dictionary:\n' - 'each key object is used as a key into the dictionary to store the\n' - 'corresponding datum. This means that you can specify the same key\n' - 'multiple times in the key/datum list, and the final dictionary’s ' - 'value\n' - 'for that key will be the last one given.\n' - '\n' - 'A double asterisk "**" denotes *dictionary unpacking*. Its operand\n' - 'must be a *mapping*. Each mapping item is added to the new\n' - 'dictionary. Later values replace values already set by earlier\n' - 'key/datum pairs and earlier dictionary unpackings.\n' - '\n' - 'New in version 3.5: Unpacking into dictionary displays, originally\n' - 'proposed by **PEP 448**.\n' - '\n' - 'A dict comprehension, in contrast to list and set comprehensions,\n' - 'needs two expressions separated with a colon followed by the usual\n' - '“for” and “if” clauses. When the comprehension is run, the ' - 'resulting\n' - 'key and value elements are inserted in the new dictionary in the ' - 'order\n' - 'they are produced.\n' - '\n' - 'Restrictions on the types of the key values are listed earlier in\n' - 'section The standard type hierarchy. (To summarize, the key type\n' - 'should be *hashable*, which excludes all mutable objects.) Clashes\n' - 'between duplicate keys are not detected; the last datum (textually\n' - 'rightmost in the display) stored for a given key value prevails.\n' - '\n' - 'Changed in version 3.8: Prior to Python 3.8, in dict ' - 'comprehensions,\n' - 'the evaluation order of key and value was not well-defined. In\n' - 'CPython, the value was evaluated before the key. Starting with ' - '3.8,\n' - 'the key is evaluated before the value, as proposed by **PEP 572**.\n', - 'dynamic-features': 'Interaction with dynamic features\n' - '*********************************\n' - '\n' - 'Name resolution of free variables occurs at runtime, not ' - 'at compile\n' - 'time. This means that the following code will print 42:\n' - '\n' - ' i = 10\n' - ' def f():\n' - ' print(i)\n' - ' i = 42\n' - ' f()\n' - '\n' - 'The "eval()" and "exec()" functions do not have access ' - 'to the full\n' - 'environment for resolving names. Names may be resolved ' - 'in the local\n' - 'and global namespaces of the caller. Free variables are ' - 'not resolved\n' - 'in the nearest enclosing namespace, but in the global ' - 'namespace. [1]\n' - 'The "exec()" and "eval()" functions have optional ' - 'arguments to\n' - 'override the global and local namespace. If only one ' - 'namespace is\n' - 'specified, it is used for both.\n', - 'else': 'The "if" statement\n' - '******************\n' - '\n' - 'The "if" statement is used for conditional execution:\n' - '\n' - ' if_stmt ::= "if" assignment_expression ":" suite\n' - ' ("elif" assignment_expression ":" suite)*\n' - ' ["else" ":" suite]\n' - '\n' - 'It selects exactly one of the suites by evaluating the expressions ' - 'one\n' - 'by one until one is found to be true (see section Boolean ' - 'operations\n' - 'for the definition of true and false); then that suite is executed\n' - '(and no other part of the "if" statement is executed or evaluated).\n' - 'If all expressions are false, the suite of the "else" clause, if\n' - 'present, is executed.\n', - 'exceptions': 'Exceptions\n' - '**********\n' - '\n' - 'Exceptions are a means of breaking out of the normal flow of ' - 'control\n' - 'of a code block in order to handle errors or other ' - 'exceptional\n' - 'conditions. An exception is *raised* at the point where the ' - 'error is\n' - 'detected; it may be *handled* by the surrounding code block or ' - 'by any\n' - 'code block that directly or indirectly invoked the code block ' - 'where\n' - 'the error occurred.\n' - '\n' - 'The Python interpreter raises an exception when it detects a ' - 'run-time\n' - 'error (such as division by zero). A Python program can also\n' - 'explicitly raise an exception with the "raise" statement. ' - 'Exception\n' - 'handlers are specified with the "try" … "except" statement. ' - 'The\n' - '"finally" clause of such a statement can be used to specify ' - 'cleanup\n' - 'code which does not handle the exception, but is executed ' - 'whether an\n' - 'exception occurred or not in the preceding code.\n' - '\n' - 'Python uses the “termination” model of error handling: an ' - 'exception\n' - 'handler can find out what happened and continue execution at ' - 'an outer\n' - 'level, but it cannot repair the cause of the error and retry ' - 'the\n' - 'failing operation (except by re-entering the offending piece ' - 'of code\n' - 'from the top).\n' - '\n' - 'When an exception is not handled at all, the interpreter ' - 'terminates\n' - 'execution of the program, or returns to its interactive main ' - 'loop. In\n' - 'either case, it prints a stack traceback, except when the ' - 'exception is\n' - '"SystemExit".\n' - '\n' - 'Exceptions are identified by class instances. The "except" ' - 'clause is\n' - 'selected depending on the class of the instance: it must ' - 'reference the\n' - 'class of the instance or a base class thereof. The instance ' - 'can be\n' - 'received by the handler and can carry additional information ' - 'about the\n' - 'exceptional condition.\n' - '\n' - 'Note:\n' - '\n' - ' Exception messages are not part of the Python API. Their ' - 'contents\n' - ' may change from one version of Python to the next without ' - 'warning\n' - ' and should not be relied on by code which will run under ' - 'multiple\n' - ' versions of the interpreter.\n' - '\n' - 'See also the description of the "try" statement in section The ' - 'try\n' - 'statement and "raise" statement in section The raise ' - 'statement.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] This limitation occurs because the code that is executed ' - 'by these\n' - ' operations is not available at the time the module is ' - 'compiled.\n', - 'execmodel': 'Execution model\n' - '***************\n' - '\n' - '\n' - 'Structure of a program\n' - '======================\n' - '\n' - 'A Python program is constructed from code blocks. A *block* is ' - 'a piece\n' - 'of Python program text that is executed as a unit. The ' - 'following are\n' - 'blocks: a module, a function body, and a class definition. ' - 'Each\n' - 'command typed interactively is a block. A script file (a file ' - 'given\n' - 'as standard input to the interpreter or specified as a command ' - 'line\n' - 'argument to the interpreter) is a code block. A script command ' - '(a\n' - 'command specified on the interpreter command line with the ' - '"-c"\n' - 'option) is a code block. A module run as a top level script (as ' - 'module\n' - '"__main__") from the command line using a "-m" argument is also ' - 'a code\n' - 'block. The string argument passed to the built-in functions ' - '"eval()"\n' - 'and "exec()" is a code block.\n' - '\n' - 'A code block is executed in an *execution frame*. A frame ' - 'contains\n' - 'some administrative information (used for debugging) and ' - 'determines\n' - 'where and how execution continues after the code block’s ' - 'execution has\n' - 'completed.\n' - '\n' - '\n' - 'Naming and binding\n' - '==================\n' - '\n' - '\n' - 'Binding of names\n' - '----------------\n' - '\n' - '*Names* refer to objects. Names are introduced by name ' - 'binding\n' - 'operations.\n' - '\n' - 'The following constructs bind names: formal parameters to ' - 'functions,\n' - '"import" statements, class and function definitions (these bind ' - 'the\n' - 'class or function name in the defining block), and targets that ' - 'are\n' - 'identifiers if occurring in an assignment, "for" loop header, ' - 'or after\n' - '"as" in a "with" statement or "except" clause. The "import" ' - 'statement\n' - 'of the form "from ... import *" binds all names defined in the\n' - 'imported module, except those beginning with an underscore. ' - 'This form\n' - 'may only be used at the module level.\n' - '\n' - 'A target occurring in a "del" statement is also considered ' - 'bound for\n' - 'this purpose (though the actual semantics are to unbind the ' - 'name).\n' - '\n' - 'Each assignment or import statement occurs within a block ' - 'defined by a\n' - 'class or function definition or at the module level (the ' - 'top-level\n' - 'code block).\n' - '\n' - 'If a name is bound in a block, it is a local variable of that ' - 'block,\n' - 'unless declared as "nonlocal" or "global". If a name is bound ' - 'at the\n' - 'module level, it is a global variable. (The variables of the ' - 'module\n' - 'code block are local and global.) If a variable is used in a ' - 'code\n' - 'block but not defined there, it is a *free variable*.\n' - '\n' - 'Each occurrence of a name in the program text refers to the ' - '*binding*\n' - 'of that name established by the following name resolution ' - 'rules.\n' - '\n' - '\n' - 'Resolution of names\n' - '-------------------\n' - '\n' - 'A *scope* defines the visibility of a name within a block. If ' - 'a local\n' - 'variable is defined in a block, its scope includes that block. ' - 'If the\n' - 'definition occurs in a function block, the scope extends to any ' - 'blocks\n' - 'contained within the defining one, unless a contained block ' - 'introduces\n' - 'a different binding for the name.\n' - '\n' - 'When a name is used in a code block, it is resolved using the ' - 'nearest\n' - 'enclosing scope. The set of all such scopes visible to a code ' - 'block\n' - 'is called the block’s *environment*.\n' - '\n' - 'When a name is not found at all, a "NameError" exception is ' - 'raised. If\n' - 'the current scope is a function scope, and the name refers to a ' - 'local\n' - 'variable that has not yet been bound to a value at the point ' - 'where the\n' - 'name is used, an "UnboundLocalError" exception is raised.\n' - '"UnboundLocalError" is a subclass of "NameError".\n' - '\n' - 'If a name binding operation occurs anywhere within a code ' - 'block, all\n' - 'uses of the name within the block are treated as references to ' - 'the\n' - 'current block. This can lead to errors when a name is used ' - 'within a\n' - 'block before it is bound. This rule is subtle. Python lacks\n' - 'declarations and allows name binding operations to occur ' - 'anywhere\n' - 'within a code block. The local variables of a code block can ' - 'be\n' - 'determined by scanning the entire text of the block for name ' - 'binding\n' - 'operations.\n' - '\n' - 'If the "global" statement occurs within a block, all uses of ' - 'the name\n' - 'specified in the statement refer to the binding of that name in ' - 'the\n' - 'top-level namespace. Names are resolved in the top-level ' - 'namespace by\n' - 'searching the global namespace, i.e. the namespace of the ' - 'module\n' - 'containing the code block, and the builtins namespace, the ' - 'namespace\n' - 'of the module "builtins". The global namespace is searched ' - 'first. If\n' - 'the name is not found there, the builtins namespace is ' - 'searched. The\n' - '"global" statement must precede all uses of the name.\n' - '\n' - 'The "global" statement has the same scope as a name binding ' - 'operation\n' - 'in the same block. If the nearest enclosing scope for a free ' - 'variable\n' - 'contains a global statement, the free variable is treated as a ' - 'global.\n' - '\n' - 'The "nonlocal" statement causes corresponding names to refer ' - 'to\n' - 'previously bound variables in the nearest enclosing function ' - 'scope.\n' - '"SyntaxError" is raised at compile time if the given name does ' - 'not\n' - 'exist in any enclosing function scope.\n' - '\n' - 'The namespace for a module is automatically created the first ' - 'time a\n' - 'module is imported. The main module for a script is always ' - 'called\n' - '"__main__".\n' - '\n' - 'Class definition blocks and arguments to "exec()" and "eval()" ' - 'are\n' - 'special in the context of name resolution. A class definition ' - 'is an\n' - 'executable statement that may use and define names. These ' - 'references\n' - 'follow the normal rules for name resolution with an exception ' - 'that\n' - 'unbound local variables are looked up in the global namespace. ' - 'The\n' - 'namespace of the class definition becomes the attribute ' - 'dictionary of\n' - 'the class. The scope of names defined in a class block is ' - 'limited to\n' - 'the class block; it does not extend to the code blocks of ' - 'methods –\n' - 'this includes comprehensions and generator expressions since ' - 'they are\n' - 'implemented using a function scope. This means that the ' - 'following\n' - 'will fail:\n' - '\n' - ' class A:\n' - ' a = 42\n' - ' b = list(a + i for i in range(10))\n' - '\n' - '\n' - 'Builtins and restricted execution\n' - '---------------------------------\n' - '\n' - '**CPython implementation detail:** Users should not touch\n' - '"__builtins__"; it is strictly an implementation detail. ' - 'Users\n' - 'wanting to override values in the builtins namespace should ' - '"import"\n' - 'the "builtins" module and modify its attributes appropriately.\n' - '\n' - 'The builtins namespace associated with the execution of a code ' - 'block\n' - 'is actually found by looking up the name "__builtins__" in its ' - 'global\n' - 'namespace; this should be a dictionary or a module (in the ' - 'latter case\n' - 'the module’s dictionary is used). By default, when in the ' - '"__main__"\n' - 'module, "__builtins__" is the built-in module "builtins"; when ' - 'in any\n' - 'other module, "__builtins__" is an alias for the dictionary of ' - 'the\n' - '"builtins" module itself.\n' - '\n' - '\n' - 'Interaction with dynamic features\n' - '---------------------------------\n' - '\n' - 'Name resolution of free variables occurs at runtime, not at ' - 'compile\n' - 'time. This means that the following code will print 42:\n' - '\n' - ' i = 10\n' - ' def f():\n' - ' print(i)\n' - ' i = 42\n' - ' f()\n' - '\n' - 'The "eval()" and "exec()" functions do not have access to the ' - 'full\n' - 'environment for resolving names. Names may be resolved in the ' - 'local\n' - 'and global namespaces of the caller. Free variables are not ' - 'resolved\n' - 'in the nearest enclosing namespace, but in the global ' - 'namespace. [1]\n' - 'The "exec()" and "eval()" functions have optional arguments to\n' - 'override the global and local namespace. If only one namespace ' - 'is\n' - 'specified, it is used for both.\n' - '\n' - '\n' - 'Exceptions\n' - '==========\n' - '\n' - 'Exceptions are a means of breaking out of the normal flow of ' - 'control\n' - 'of a code block in order to handle errors or other exceptional\n' - 'conditions. An exception is *raised* at the point where the ' - 'error is\n' - 'detected; it may be *handled* by the surrounding code block or ' - 'by any\n' - 'code block that directly or indirectly invoked the code block ' - 'where\n' - 'the error occurred.\n' - '\n' - 'The Python interpreter raises an exception when it detects a ' - 'run-time\n' - 'error (such as division by zero). A Python program can also\n' - 'explicitly raise an exception with the "raise" statement. ' - 'Exception\n' - 'handlers are specified with the "try" … "except" statement. ' - 'The\n' - '"finally" clause of such a statement can be used to specify ' - 'cleanup\n' - 'code which does not handle the exception, but is executed ' - 'whether an\n' - 'exception occurred or not in the preceding code.\n' - '\n' - 'Python uses the “termination” model of error handling: an ' - 'exception\n' - 'handler can find out what happened and continue execution at an ' - 'outer\n' - 'level, but it cannot repair the cause of the error and retry ' - 'the\n' - 'failing operation (except by re-entering the offending piece of ' - 'code\n' - 'from the top).\n' - '\n' - 'When an exception is not handled at all, the interpreter ' - 'terminates\n' - 'execution of the program, or returns to its interactive main ' - 'loop. In\n' - 'either case, it prints a stack traceback, except when the ' - 'exception is\n' - '"SystemExit".\n' - '\n' - 'Exceptions are identified by class instances. The "except" ' - 'clause is\n' - 'selected depending on the class of the instance: it must ' - 'reference the\n' - 'class of the instance or a base class thereof. The instance ' - 'can be\n' - 'received by the handler and can carry additional information ' - 'about the\n' - 'exceptional condition.\n' - '\n' - 'Note:\n' - '\n' - ' Exception messages are not part of the Python API. Their ' - 'contents\n' - ' may change from one version of Python to the next without ' - 'warning\n' - ' and should not be relied on by code which will run under ' - 'multiple\n' - ' versions of the interpreter.\n' - '\n' - 'See also the description of the "try" statement in section The ' - 'try\n' - 'statement and "raise" statement in section The raise ' - 'statement.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] This limitation occurs because the code that is executed by ' - 'these\n' - ' operations is not available at the time the module is ' - 'compiled.\n', - 'exprlists': 'Expression lists\n' - '****************\n' - '\n' - ' expression_list ::= expression ("," expression)* [","]\n' - ' starred_list ::= starred_item ("," starred_item)* ' - '[","]\n' - ' starred_expression ::= expression | (starred_item ",")* ' - '[starred_item]\n' - ' starred_item ::= assignment_expression | "*" or_expr\n' - '\n' - 'Except when part of a list or set display, an expression list\n' - 'containing at least one comma yields a tuple. The length of ' - 'the tuple\n' - 'is the number of expressions in the list. The expressions are\n' - 'evaluated from left to right.\n' - '\n' - 'An asterisk "*" denotes *iterable unpacking*. Its operand must ' - 'be an\n' - '*iterable*. The iterable is expanded into a sequence of items, ' - 'which\n' - 'are included in the new tuple, list, or set, at the site of ' - 'the\n' - 'unpacking.\n' - '\n' - 'New in version 3.5: Iterable unpacking in expression lists, ' - 'originally\n' - 'proposed by **PEP 448**.\n' - '\n' - 'The trailing comma is required only to create a single tuple ' - '(a.k.a. a\n' - '*singleton*); it is optional in all other cases. A single ' - 'expression\n' - 'without a trailing comma doesn’t create a tuple, but rather ' - 'yields the\n' - 'value of that expression. (To create an empty tuple, use an ' - 'empty pair\n' - 'of parentheses: "()".)\n', - 'floating': 'Floating point literals\n' - '***********************\n' - '\n' - 'Floating point literals are described by the following lexical\n' - 'definitions:\n' - '\n' - ' floatnumber ::= pointfloat | exponentfloat\n' - ' pointfloat ::= [digitpart] fraction | digitpart "."\n' - ' exponentfloat ::= (digitpart | pointfloat) exponent\n' - ' digitpart ::= digit (["_"] digit)*\n' - ' fraction ::= "." digitpart\n' - ' exponent ::= ("e" | "E") ["+" | "-"] digitpart\n' - '\n' - 'Note that the integer and exponent parts are always interpreted ' - 'using\n' - 'radix 10. For example, "077e010" is legal, and denotes the same ' - 'number\n' - 'as "77e10". The allowed range of floating point literals is\n' - 'implementation-dependent. As in integer literals, underscores ' - 'are\n' - 'supported for digit grouping.\n' - '\n' - 'Some examples of floating point literals:\n' - '\n' - ' 3.14 10. .001 1e100 3.14e-10 0e0 ' - '3.14_15_93\n' - '\n' - 'Changed in version 3.6: Underscores are now allowed for ' - 'grouping\n' - 'purposes in literals.\n', - 'for': 'The "for" statement\n' - '*******************\n' - '\n' - 'The "for" statement is used to iterate over the elements of a ' - 'sequence\n' - '(such as a string, tuple or list) or other iterable object:\n' - '\n' - ' for_stmt ::= "for" target_list "in" expression_list ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'The expression list is evaluated once; it should yield an iterable\n' - 'object. An iterator is created for the result of the\n' - '"expression_list". The suite is then executed once for each item\n' - 'provided by the iterator, in the order returned by the iterator. ' - 'Each\n' - 'item in turn is assigned to the target list using the standard rules\n' - 'for assignments (see Assignment statements), and then the suite is\n' - 'executed. When the items are exhausted (which is immediately when ' - 'the\n' - 'sequence is empty or an iterator raises a "StopIteration" ' - 'exception),\n' - 'the suite in the "else" clause, if present, is executed, and the ' - 'loop\n' - 'terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the loop\n' - 'without executing the "else" clause’s suite. A "continue" statement\n' - 'executed in the first suite skips the rest of the suite and ' - 'continues\n' - 'with the next item, or with the "else" clause if there is no next\n' - 'item.\n' - '\n' - 'The for-loop makes assignments to the variables in the target list.\n' - 'This overwrites all previous assignments to those variables ' - 'including\n' - 'those made in the suite of the for-loop:\n' - '\n' - ' for i in range(10):\n' - ' print(i)\n' - ' i = 5 # this will not affect the for-loop\n' - ' # because i will be overwritten with the ' - 'next\n' - ' # index in the range\n' - '\n' - 'Names in the target list are not deleted when the loop is finished,\n' - 'but if the sequence is empty, they will not have been assigned to at\n' - 'all by the loop. Hint: the built-in function "range()" returns an\n' - 'iterator of integers suitable to emulate the effect of Pascal’s "for ' - 'i\n' - ':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, 2]".\n' - '\n' - 'Note:\n' - '\n' - ' There is a subtlety when the sequence is being modified by the ' - 'loop\n' - ' (this can only occur for mutable sequences, e.g. lists). An\n' - ' internal counter is used to keep track of which item is used next,\n' - ' and this is incremented on each iteration. When this counter has\n' - ' reached the length of the sequence the loop terminates. This ' - 'means\n' - ' that if the suite deletes the current (or a previous) item from ' - 'the\n' - ' sequence, the next item will be skipped (since it gets the index ' - 'of\n' - ' the current item which has already been treated). Likewise, if ' - 'the\n' - ' suite inserts an item in the sequence before the current item, the\n' - ' current item will be treated again the next time through the loop.\n' - ' This can lead to nasty bugs that can be avoided by making a\n' - ' temporary copy using a slice of the whole sequence, e.g.,\n' - '\n' - ' for x in a[:]:\n' - ' if x < 0: a.remove(x)\n', - 'formatstrings': 'Format String Syntax\n' - '********************\n' - '\n' - 'The "str.format()" method and the "Formatter" class share ' - 'the same\n' - 'syntax for format strings (although in the case of ' - '"Formatter",\n' - 'subclasses can define their own format string syntax). The ' - 'syntax is\n' - 'related to that of formatted string literals, but it is ' - 'less\n' - 'sophisticated and, in particular, does not support ' - 'arbitrary\n' - 'expressions.\n' - '\n' - 'Format strings contain “replacement fields” surrounded by ' - 'curly braces\n' - '"{}". Anything that is not contained in braces is ' - 'considered literal\n' - 'text, which is copied unchanged to the output. If you need ' - 'to include\n' - 'a brace character in the literal text, it can be escaped by ' - 'doubling:\n' - '"{{" and "}}".\n' - '\n' - 'The grammar for a replacement field is as follows:\n' - '\n' - ' replacement_field ::= "{" [field_name] ["!" ' - 'conversion] [":" format_spec] "}"\n' - ' field_name ::= arg_name ("." attribute_name | ' - '"[" element_index "]")*\n' - ' arg_name ::= [identifier | digit+]\n' - ' attribute_name ::= identifier\n' - ' element_index ::= digit+ | index_string\n' - ' index_string ::= +\n' - ' conversion ::= "r" | "s" | "a"\n' - ' format_spec ::= \n' - '\n' - 'In less formal terms, the replacement field can start with ' - 'a\n' - '*field_name* that specifies the object whose value is to be ' - 'formatted\n' - 'and inserted into the output instead of the replacement ' - 'field. The\n' - '*field_name* is optionally followed by a *conversion* ' - 'field, which is\n' - 'preceded by an exclamation point "\'!\'", and a ' - '*format_spec*, which is\n' - 'preceded by a colon "\':\'". These specify a non-default ' - 'format for the\n' - 'replacement value.\n' - '\n' - 'See also the Format Specification Mini-Language section.\n' - '\n' - 'The *field_name* itself begins with an *arg_name* that is ' - 'either a\n' - 'number or a keyword. If it’s a number, it refers to a ' - 'positional\n' - 'argument, and if it’s a keyword, it refers to a named ' - 'keyword\n' - 'argument. If the numerical arg_names in a format string ' - 'are 0, 1, 2,\n' - '… in sequence, they can all be omitted (not just some) and ' - 'the numbers\n' - '0, 1, 2, … will be automatically inserted in that order. ' - 'Because\n' - '*arg_name* is not quote-delimited, it is not possible to ' - 'specify\n' - 'arbitrary dictionary keys (e.g., the strings "\'10\'" or ' - '"\':-]\'") within\n' - 'a format string. The *arg_name* can be followed by any ' - 'number of index\n' - 'or attribute expressions. An expression of the form ' - '"\'.name\'" selects\n' - 'the named attribute using "getattr()", while an expression ' - 'of the form\n' - '"\'[index]\'" does an index lookup using "__getitem__()".\n' - '\n' - 'Changed in version 3.1: The positional argument specifiers ' - 'can be\n' - 'omitted for "str.format()", so "\'{} {}\'.format(a, b)" is ' - 'equivalent to\n' - '"\'{0} {1}\'.format(a, b)".\n' - '\n' - 'Changed in version 3.4: The positional argument specifiers ' - 'can be\n' - 'omitted for "Formatter".\n' - '\n' - 'Some simple format string examples:\n' - '\n' - ' "First, thou shalt count to {0}" # References first ' - 'positional argument\n' - ' "Bring me a {}" # Implicitly ' - 'references the first positional argument\n' - ' "From {} to {}" # Same as "From {0} to ' - '{1}"\n' - ' "My quest is {name}" # References keyword ' - "argument 'name'\n" - ' "Weight in tons {0.weight}" # \'weight\' attribute ' - 'of first positional arg\n' - ' "Units destroyed: {players[0]}" # First element of ' - "keyword argument 'players'.\n" - '\n' - 'The *conversion* field causes a type coercion before ' - 'formatting.\n' - 'Normally, the job of formatting a value is done by the ' - '"__format__()"\n' - 'method of the value itself. However, in some cases it is ' - 'desirable to\n' - 'force a type to be formatted as a string, overriding its ' - 'own\n' - 'definition of formatting. By converting the value to a ' - 'string before\n' - 'calling "__format__()", the normal formatting logic is ' - 'bypassed.\n' - '\n' - 'Three conversion flags are currently supported: "\'!s\'" ' - 'which calls\n' - '"str()" on the value, "\'!r\'" which calls "repr()" and ' - '"\'!a\'" which\n' - 'calls "ascii()".\n' - '\n' - 'Some examples:\n' - '\n' - ' "Harold\'s a clever {0!s}" # Calls str() on the ' - 'argument first\n' - ' "Bring out the holy {name!r}" # Calls repr() on the ' - 'argument first\n' - ' "More {!a}" # Calls ascii() on the ' - 'argument first\n' - '\n' - 'The *format_spec* field contains a specification of how the ' - 'value\n' - 'should be presented, including such details as field width, ' - 'alignment,\n' - 'padding, decimal precision and so on. Each value type can ' - 'define its\n' - 'own “formatting mini-language” or interpretation of the ' - '*format_spec*.\n' - '\n' - 'Most built-in types support a common formatting ' - 'mini-language, which\n' - 'is described in the next section.\n' - '\n' - 'A *format_spec* field can also include nested replacement ' - 'fields\n' - 'within it. These nested replacement fields may contain a ' - 'field name,\n' - 'conversion flag and format specification, but deeper ' - 'nesting is not\n' - 'allowed. The replacement fields within the format_spec ' - 'are\n' - 'substituted before the *format_spec* string is interpreted. ' - 'This\n' - 'allows the formatting of a value to be dynamically ' - 'specified.\n' - '\n' - 'See the Format examples section for some examples.\n' - '\n' - '\n' - 'Format Specification Mini-Language\n' - '==================================\n' - '\n' - '“Format specifications” are used within replacement fields ' - 'contained\n' - 'within a format string to define how individual values are ' - 'presented\n' - '(see Format String Syntax and Formatted string literals). ' - 'They can\n' - 'also be passed directly to the built-in "format()" ' - 'function. Each\n' - 'formattable type may define how the format specification is ' - 'to be\n' - 'interpreted.\n' - '\n' - 'Most built-in types implement the following options for ' - 'format\n' - 'specifications, although some of the formatting options are ' - 'only\n' - 'supported by the numeric types.\n' - '\n' - 'A general convention is that an empty format specification ' - 'produces\n' - 'the same result as if you had called "str()" on the value. ' - 'A non-empty\n' - 'format specification typically modifies the result.\n' - '\n' - 'The general form of a *standard format specifier* is:\n' - '\n' - ' format_spec ::= ' - '[[fill]align][sign][#][0][width][grouping_option][.precision][type]\n' - ' fill ::= \n' - ' align ::= "<" | ">" | "=" | "^"\n' - ' sign ::= "+" | "-" | " "\n' - ' width ::= digit+\n' - ' grouping_option ::= "_" | ","\n' - ' precision ::= digit+\n' - ' type ::= "b" | "c" | "d" | "e" | "E" | "f" | ' - '"F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n' - '\n' - 'If a valid *align* value is specified, it can be preceded ' - 'by a *fill*\n' - 'character that can be any character and defaults to a space ' - 'if\n' - 'omitted. It is not possible to use a literal curly brace ' - '(”"{"” or\n' - '“"}"”) as the *fill* character in a formatted string ' - 'literal or when\n' - 'using the "str.format()" method. However, it is possible ' - 'to insert a\n' - 'curly brace with a nested replacement field. This ' - 'limitation doesn’t\n' - 'affect the "format()" function.\n' - '\n' - 'The meaning of the various alignment options is as ' - 'follows:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Option | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'<\'" | Forces the field to be left-aligned ' - 'within the available |\n' - ' | | space (this is the default for most ' - 'objects). |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'>\'" | Forces the field to be right-aligned ' - 'within the available |\n' - ' | | space (this is the default for ' - 'numbers). |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'=\'" | Forces the padding to be placed after ' - 'the sign (if any) |\n' - ' | | but before the digits. This is used for ' - 'printing fields |\n' - ' | | in the form ‘+000000120’. This alignment ' - 'option is only |\n' - ' | | valid for numeric types. It becomes the ' - 'default for |\n' - ' | | numbers when ‘0’ immediately precedes the ' - 'field width. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'^\'" | Forces the field to be centered within ' - 'the available |\n' - ' | | ' - 'space. ' - '|\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'Note that unless a minimum field width is defined, the ' - 'field width\n' - 'will always be the same size as the data to fill it, so ' - 'that the\n' - 'alignment option has no meaning in this case.\n' - '\n' - 'The *sign* option is only valid for number types, and can ' - 'be one of\n' - 'the following:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Option | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'+\'" | indicates that a sign should be used for ' - 'both positive as |\n' - ' | | well as negative ' - 'numbers. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'-\'" | indicates that a sign should be used ' - 'only for negative |\n' - ' | | numbers (this is the default ' - 'behavior). |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | space | indicates that a leading space should be ' - 'used on positive |\n' - ' | | numbers, and a minus sign on negative ' - 'numbers. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'The "\'#\'" option causes the “alternate form” to be used ' - 'for the\n' - 'conversion. The alternate form is defined differently for ' - 'different\n' - 'types. This option is only valid for integer, float and ' - 'complex\n' - 'types. For integers, when binary, octal, or hexadecimal ' - 'output is\n' - 'used, this option adds the respective prefix "\'0b\'", ' - '"\'0o\'", "\'0x\'",\n' - 'or "\'0X\'" to the output value. For float and complex the ' - 'alternate\n' - 'form causes the result of the conversion to always contain ' - 'a decimal-\n' - 'point character, even if no digits follow it. Normally, a ' - 'decimal-\n' - 'point character appears in the result of these conversions ' - 'only if a\n' - 'digit follows it. In addition, for "\'g\'" and "\'G\'" ' - 'conversions,\n' - 'trailing zeros are not removed from the result.\n' - '\n' - 'The "\',\'" option signals the use of a comma for a ' - 'thousands separator.\n' - 'For a locale aware separator, use the "\'n\'" integer ' - 'presentation type\n' - 'instead.\n' - '\n' - 'Changed in version 3.1: Added the "\',\'" option (see also ' - '**PEP 378**).\n' - '\n' - 'The "\'_\'" option signals the use of an underscore for a ' - 'thousands\n' - 'separator for floating point presentation types and for ' - 'integer\n' - 'presentation type "\'d\'". For integer presentation types ' - '"\'b\'", "\'o\'",\n' - '"\'x\'", and "\'X\'", underscores will be inserted every 4 ' - 'digits. For\n' - 'other presentation types, specifying this option is an ' - 'error.\n' - '\n' - 'Changed in version 3.6: Added the "\'_\'" option (see also ' - '**PEP 515**).\n' - '\n' - '*width* is a decimal integer defining the minimum total ' - 'field width,\n' - 'including any prefixes, separators, and other formatting ' - 'characters.\n' - 'If not specified, then the field width will be determined ' - 'by the\n' - 'content.\n' - '\n' - 'When no explicit alignment is given, preceding the *width* ' - 'field by a\n' - 'zero ("\'0\'") character enables sign-aware zero-padding ' - 'for numeric\n' - 'types. This is equivalent to a *fill* character of "\'0\'" ' - 'with an\n' - '*alignment* type of "\'=\'".\n' - '\n' - 'Changed in version 3.10: Preceding the *width* field by ' - '"\'0\'" no\n' - 'longer affects the default alignment for strings.\n' - '\n' - 'The *precision* is a decimal number indicating how many ' - 'digits should\n' - 'be displayed after the decimal point for a floating point ' - 'value\n' - 'formatted with "\'f\'" and "\'F\'", or before and after the ' - 'decimal point\n' - 'for a floating point value formatted with "\'g\'" or ' - '"\'G\'". For non-\n' - 'number types the field indicates the maximum field size - ' - 'in other\n' - 'words, how many characters will be used from the field ' - 'content. The\n' - '*precision* is not allowed for integer values.\n' - '\n' - 'Finally, the *type* determines how the data should be ' - 'presented.\n' - '\n' - 'The available string presentation types are:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Type | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'s\'" | String format. This is the default type ' - 'for strings and |\n' - ' | | may be ' - 'omitted. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | None | The same as ' - '"\'s\'". |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'The available integer presentation types are:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Type | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'b\'" | Binary format. Outputs the number in ' - 'base 2. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'c\'" | Character. Converts the integer to the ' - 'corresponding |\n' - ' | | unicode character before ' - 'printing. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'d\'" | Decimal Integer. Outputs the number in ' - 'base 10. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'o\'" | Octal format. Outputs the number in base ' - '8. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'x\'" | Hex format. Outputs the number in base ' - '16, using lower- |\n' - ' | | case letters for the digits above ' - '9. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'X\'" | Hex format. Outputs the number in base ' - '16, using upper- |\n' - ' | | case letters for the digits above 9. In ' - 'case "\'#\'" is |\n' - ' | | specified, the prefix "\'0x\'" will be ' - 'upper-cased to "\'0X\'" |\n' - ' | | as ' - 'well. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'n\'" | Number. This is the same as "\'d\'", ' - 'except that it uses the |\n' - ' | | current locale setting to insert the ' - 'appropriate number |\n' - ' | | separator ' - 'characters. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | None | The same as ' - '"\'d\'". |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'In addition to the above presentation types, integers can ' - 'be formatted\n' - 'with the floating point presentation types listed below ' - '(except "\'n\'"\n' - 'and "None"). When doing so, "float()" is used to convert ' - 'the integer\n' - 'to a floating point number before formatting.\n' - '\n' - 'The available presentation types for "float" and "Decimal" ' - 'values are:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Type | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'e\'" | Scientific notation. For a given ' - 'precision "p", formats |\n' - ' | | the number in scientific notation with the ' - 'letter ‘e’ |\n' - ' | | separating the coefficient from the ' - 'exponent. The |\n' - ' | | coefficient has one digit before and "p" ' - 'digits after the |\n' - ' | | decimal point, for a total of "p + 1" ' - 'significant digits. |\n' - ' | | With no precision given, uses a precision ' - 'of "6" digits |\n' - ' | | after the decimal point for "float", and ' - 'shows all |\n' - ' | | coefficient digits for "Decimal". If no ' - 'digits follow the |\n' - ' | | decimal point, the decimal point is also ' - 'removed unless |\n' - ' | | the "#" option is ' - 'used. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'E\'" | Scientific notation. Same as "\'e\'" ' - 'except it uses an upper |\n' - ' | | case ‘E’ as the separator ' - 'character. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'f\'" | Fixed-point notation. For a given ' - 'precision "p", formats |\n' - ' | | the number as a decimal number with ' - 'exactly "p" digits |\n' - ' | | following the decimal point. With no ' - 'precision given, uses |\n' - ' | | a precision of "6" digits after the ' - 'decimal point for |\n' - ' | | "float", and uses a precision large enough ' - 'to show all |\n' - ' | | coefficient digits for "Decimal". If no ' - 'digits follow the |\n' - ' | | decimal point, the decimal point is also ' - 'removed unless |\n' - ' | | the "#" option is ' - 'used. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'F\'" | Fixed-point notation. Same as "\'f\'", ' - 'but converts "nan" to |\n' - ' | | "NAN" and "inf" to ' - '"INF". |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'g\'" | General format. For a given precision ' - '"p >= 1", this |\n' - ' | | rounds the number to "p" significant ' - 'digits and then |\n' - ' | | formats the result in either fixed-point ' - 'format or in |\n' - ' | | scientific notation, depending on its ' - 'magnitude. A |\n' - ' | | precision of "0" is treated as equivalent ' - 'to a precision |\n' - ' | | of "1". The precise rules are as follows: ' - 'suppose that |\n' - ' | | the result formatted with presentation ' - 'type "\'e\'" and |\n' - ' | | precision "p-1" would have exponent ' - '"exp". Then, if "m <= |\n' - ' | | exp < p", where "m" is -4 for floats and ' - '-6 for |\n' - ' | | "Decimals", the number is formatted with ' - 'presentation type |\n' - ' | | "\'f\'" and precision "p-1-exp". ' - 'Otherwise, the number is |\n' - ' | | formatted with presentation type "\'e\'" ' - 'and precision |\n' - ' | | "p-1". In both cases insignificant ' - 'trailing zeros are |\n' - ' | | removed from the significand, and the ' - 'decimal point is |\n' - ' | | also removed if there are no remaining ' - 'digits following |\n' - ' | | it, unless the "\'#\'" option is used. ' - 'With no precision |\n' - ' | | given, uses a precision of "6" significant ' - 'digits for |\n' - ' | | "float". For "Decimal", the coefficient of ' - 'the result is |\n' - ' | | formed from the coefficient digits of the ' - 'value; |\n' - ' | | scientific notation is used for values ' - 'smaller than "1e-6" |\n' - ' | | in absolute value and values where the ' - 'place value of the |\n' - ' | | least significant digit is larger than 1, ' - 'and fixed-point |\n' - ' | | notation is used otherwise. Positive and ' - 'negative |\n' - ' | | infinity, positive and negative zero, and ' - 'nans, are |\n' - ' | | formatted as "inf", "-inf", "0", "-0" and ' - '"nan" |\n' - ' | | respectively, regardless of the ' - 'precision. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'G\'" | General format. Same as "\'g\'" except ' - 'switches to "\'E\'" if |\n' - ' | | the number gets too large. The ' - 'representations of infinity |\n' - ' | | and NaN are uppercased, ' - 'too. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'n\'" | Number. This is the same as "\'g\'", ' - 'except that it uses the |\n' - ' | | current locale setting to insert the ' - 'appropriate number |\n' - ' | | separator ' - 'characters. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'%\'" | Percentage. Multiplies the number by 100 ' - 'and displays in |\n' - ' | | fixed ("\'f\'") format, followed by a ' - 'percent sign. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | None | For "float" this is the same as "\'g\'", ' - 'except that when |\n' - ' | | fixed-point notation is used to format the ' - 'result, it |\n' - ' | | always includes at least one digit past ' - 'the decimal point. |\n' - ' | | The precision used is as large as needed ' - 'to represent the |\n' - ' | | given value faithfully. For "Decimal", ' - 'this is the same |\n' - ' | | as either "\'g\'" or "\'G\'" depending on ' - 'the value of |\n' - ' | | "context.capitals" for the current decimal ' - 'context. The |\n' - ' | | overall effect is to match the output of ' - '"str()" as |\n' - ' | | altered by the other format ' - 'modifiers. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - '\n' - 'Format examples\n' - '===============\n' - '\n' - 'This section contains examples of the "str.format()" syntax ' - 'and\n' - 'comparison with the old "%"-formatting.\n' - '\n' - 'In most of the cases the syntax is similar to the old ' - '"%"-formatting,\n' - 'with the addition of the "{}" and with ":" used instead of ' - '"%". For\n' - 'example, "\'%03.2f\'" can be translated to "\'{:03.2f}\'".\n' - '\n' - 'The new format syntax also supports new and different ' - 'options, shown\n' - 'in the following examples.\n' - '\n' - 'Accessing arguments by position:\n' - '\n' - " >>> '{0}, {1}, {2}'.format('a', 'b', 'c')\n" - " 'a, b, c'\n" - " >>> '{}, {}, {}'.format('a', 'b', 'c') # 3.1+ only\n" - " 'a, b, c'\n" - " >>> '{2}, {1}, {0}'.format('a', 'b', 'c')\n" - " 'c, b, a'\n" - " >>> '{2}, {1}, {0}'.format(*'abc') # unpacking " - 'argument sequence\n' - " 'c, b, a'\n" - " >>> '{0}{1}{0}'.format('abra', 'cad') # arguments' " - 'indices can be repeated\n' - " 'abracadabra'\n" - '\n' - 'Accessing arguments by name:\n' - '\n' - " >>> 'Coordinates: {latitude}, " - "{longitude}'.format(latitude='37.24N', " - "longitude='-115.81W')\n" - " 'Coordinates: 37.24N, -115.81W'\n" - " >>> coord = {'latitude': '37.24N', 'longitude': " - "'-115.81W'}\n" - " >>> 'Coordinates: {latitude}, " - "{longitude}'.format(**coord)\n" - " 'Coordinates: 37.24N, -115.81W'\n" - '\n' - 'Accessing arguments’ attributes:\n' - '\n' - ' >>> c = 3-5j\n' - " >>> ('The complex number {0} is formed from the real " - "part {0.real} '\n" - " ... 'and the imaginary part {0.imag}.').format(c)\n" - " 'The complex number (3-5j) is formed from the real part " - "3.0 and the imaginary part -5.0.'\n" - ' >>> class Point:\n' - ' ... def __init__(self, x, y):\n' - ' ... self.x, self.y = x, y\n' - ' ... def __str__(self):\n' - " ... return 'Point({self.x}, " - "{self.y})'.format(self=self)\n" - ' ...\n' - ' >>> str(Point(4, 2))\n' - " 'Point(4, 2)'\n" - '\n' - 'Accessing arguments’ items:\n' - '\n' - ' >>> coord = (3, 5)\n' - " >>> 'X: {0[0]}; Y: {0[1]}'.format(coord)\n" - " 'X: 3; Y: 5'\n" - '\n' - 'Replacing "%s" and "%r":\n' - '\n' - ' >>> "repr() shows quotes: {!r}; str() doesn\'t: ' - '{!s}".format(\'test1\', \'test2\')\n' - ' "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n' - '\n' - 'Aligning the text and specifying a width:\n' - '\n' - " >>> '{:<30}'.format('left aligned')\n" - " 'left aligned '\n" - " >>> '{:>30}'.format('right aligned')\n" - " ' right aligned'\n" - " >>> '{:^30}'.format('centered')\n" - " ' centered '\n" - " >>> '{:*^30}'.format('centered') # use '*' as a fill " - 'char\n' - " '***********centered***********'\n" - '\n' - 'Replacing "%+f", "%-f", and "% f" and specifying a sign:\n' - '\n' - " >>> '{:+f}; {:+f}'.format(3.14, -3.14) # show it " - 'always\n' - " '+3.140000; -3.140000'\n" - " >>> '{: f}; {: f}'.format(3.14, -3.14) # show a space " - 'for positive numbers\n' - " ' 3.140000; -3.140000'\n" - " >>> '{:-f}; {:-f}'.format(3.14, -3.14) # show only the " - "minus -- same as '{:f}; {:f}'\n" - " '3.140000; -3.140000'\n" - '\n' - 'Replacing "%x" and "%o" and converting the value to ' - 'different bases:\n' - '\n' - ' >>> # format also supports binary numbers\n' - ' >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: ' - '{0:b}".format(42)\n' - " 'int: 42; hex: 2a; oct: 52; bin: 101010'\n" - ' >>> # with 0x, 0o, or 0b as prefix:\n' - ' >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: ' - '{0:#b}".format(42)\n' - " 'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010'\n" - '\n' - 'Using the comma as a thousands separator:\n' - '\n' - " >>> '{:,}'.format(1234567890)\n" - " '1,234,567,890'\n" - '\n' - 'Expressing a percentage:\n' - '\n' - ' >>> points = 19\n' - ' >>> total = 22\n' - " >>> 'Correct answers: {:.2%}'.format(points/total)\n" - " 'Correct answers: 86.36%'\n" - '\n' - 'Using type-specific formatting:\n' - '\n' - ' >>> import datetime\n' - ' >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n' - " >>> '{:%Y-%m-%d %H:%M:%S}'.format(d)\n" - " '2010-07-04 12:15:58'\n" - '\n' - 'Nesting arguments and more complex examples:\n' - '\n' - " >>> for align, text in zip('<^>', ['left', 'center', " - "'right']):\n" - " ... '{0:{fill}{align}16}'.format(text, fill=align, " - 'align=align)\n' - ' ...\n' - " 'left<<<<<<<<<<<<'\n" - " '^^^^^center^^^^^'\n" - " '>>>>>>>>>>>right'\n" - ' >>>\n' - ' >>> octets = [192, 168, 0, 1]\n' - " >>> '{:02X}{:02X}{:02X}{:02X}'.format(*octets)\n" - " 'C0A80001'\n" - ' >>> int(_, 16)\n' - ' 3232235521\n' - ' >>>\n' - ' >>> width = 5\n' - ' >>> for num in range(5,12): \n' - " ... for base in 'dXob':\n" - " ... print('{0:{width}{base}}'.format(num, " - "base=base, width=width), end=' ')\n" - ' ... print()\n' - ' ...\n' - ' 5 5 5 101\n' - ' 6 6 6 110\n' - ' 7 7 7 111\n' - ' 8 8 10 1000\n' - ' 9 9 11 1001\n' - ' 10 A 12 1010\n' - ' 11 B 13 1011\n', - 'function': 'Function definitions\n' - '********************\n' - '\n' - 'A function definition defines a user-defined function object ' - '(see\n' - 'section The standard type hierarchy):\n' - '\n' - ' funcdef ::= [decorators] "def" funcname "(" ' - '[parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - ' decorators ::= decorator+\n' - ' decorator ::= "@" assignment_expression ' - 'NEWLINE\n' - ' parameter_list ::= defparameter ("," ' - 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n' - ' | parameter_list_no_posonly\n' - ' parameter_list_no_posonly ::= defparameter ("," ' - 'defparameter)* ["," [parameter_list_starargs]]\n' - ' | parameter_list_starargs\n' - ' parameter_list_starargs ::= "*" [parameter] ("," ' - 'defparameter)* ["," ["**" parameter [","]]]\n' - ' | "**" parameter [","]\n' - ' parameter ::= identifier [":" expression]\n' - ' defparameter ::= parameter ["=" expression]\n' - ' funcname ::= identifier\n' - '\n' - 'A function definition is an executable statement. Its execution ' - 'binds\n' - 'the function name in the current local namespace to a function ' - 'object\n' - '(a wrapper around the executable code for the function). This\n' - 'function object contains a reference to the current global ' - 'namespace\n' - 'as the global namespace to be used when the function is called.\n' - '\n' - 'The function definition does not execute the function body; this ' - 'gets\n' - 'executed only when the function is called. [4]\n' - '\n' - 'A function definition may be wrapped by one or more *decorator*\n' - 'expressions. Decorator expressions are evaluated when the ' - 'function is\n' - 'defined, in the scope that contains the function definition. ' - 'The\n' - 'result must be a callable, which is invoked with the function ' - 'object\n' - 'as the only argument. The returned value is bound to the ' - 'function name\n' - 'instead of the function object. Multiple decorators are applied ' - 'in\n' - 'nested fashion. For example, the following code\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' def func(): pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' def func(): pass\n' - ' func = f1(arg)(f2(func))\n' - '\n' - 'except that the original function is not temporarily bound to ' - 'the name\n' - '"func".\n' - '\n' - 'Changed in version 3.9: Functions may be decorated with any ' - 'valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'When one or more *parameters* have the form *parameter* "="\n' - '*expression*, the function is said to have “default parameter ' - 'values.”\n' - 'For a parameter with a default value, the corresponding ' - '*argument* may\n' - 'be omitted from a call, in which case the parameter’s default ' - 'value is\n' - 'substituted. If a parameter has a default value, all following\n' - 'parameters up until the “"*"” must also have a default value — ' - 'this is\n' - 'a syntactic restriction that is not expressed by the grammar.\n' - '\n' - '**Default parameter values are evaluated from left to right when ' - 'the\n' - 'function definition is executed.** This means that the ' - 'expression is\n' - 'evaluated once, when the function is defined, and that the same ' - '“pre-\n' - 'computed” value is used for each call. This is especially ' - 'important\n' - 'to understand when a default parameter value is a mutable ' - 'object, such\n' - 'as a list or a dictionary: if the function modifies the object ' - '(e.g.\n' - 'by appending an item to a list), the default parameter value is ' - 'in\n' - 'effect modified. This is generally not what was intended. A ' - 'way\n' - 'around this is to use "None" as the default, and explicitly test ' - 'for\n' - 'it in the body of the function, e.g.:\n' - '\n' - ' def whats_on_the_telly(penguin=None):\n' - ' if penguin is None:\n' - ' penguin = []\n' - ' penguin.append("property of the zoo")\n' - ' return penguin\n' - '\n' - 'Function call semantics are described in more detail in section ' - 'Calls.\n' - 'A function call always assigns values to all parameters ' - 'mentioned in\n' - 'the parameter list, either from positional arguments, from ' - 'keyword\n' - 'arguments, or from default values. If the form “"*identifier"” ' - 'is\n' - 'present, it is initialized to a tuple receiving any excess ' - 'positional\n' - 'parameters, defaulting to the empty tuple. If the form\n' - '“"**identifier"” is present, it is initialized to a new ordered\n' - 'mapping receiving any excess keyword arguments, defaulting to a ' - 'new\n' - 'empty mapping of the same type. Parameters after “"*"” or\n' - '“"*identifier"” are keyword-only parameters and may only be ' - 'passed by\n' - 'keyword arguments. Parameters before “"/"” are positional-only\n' - 'parameters and may only be passed by positional arguments.\n' - '\n' - 'Changed in version 3.8: The "/" function parameter syntax may be ' - 'used\n' - 'to indicate positional-only parameters. See **PEP 570** for ' - 'details.\n' - '\n' - 'Parameters may have an *annotation* of the form “": ' - 'expression"”\n' - 'following the parameter name. Any parameter may have an ' - 'annotation,\n' - 'even those of the form "*identifier" or "**identifier". ' - 'Functions may\n' - 'have “return” annotation of the form “"-> expression"” after ' - 'the\n' - 'parameter list. These annotations can be any valid Python ' - 'expression.\n' - 'The presence of annotations does not change the semantics of a\n' - 'function. The annotation values are available as values of a\n' - 'dictionary keyed by the parameters’ names in the ' - '"__annotations__"\n' - 'attribute of the function object. If the "annotations" import ' - 'from\n' - '"__future__" is used, annotations are preserved as strings at ' - 'runtime\n' - 'which enables postponed evaluation. Otherwise, they are ' - 'evaluated\n' - 'when the function definition is executed. In this case ' - 'annotations\n' - 'may be evaluated in a different order than they appear in the ' - 'source\n' - 'code.\n' - '\n' - 'It is also possible to create anonymous functions (functions not ' - 'bound\n' - 'to a name), for immediate use in expressions. This uses lambda\n' - 'expressions, described in section Lambdas. Note that the ' - 'lambda\n' - 'expression is merely a shorthand for a simplified function ' - 'definition;\n' - 'a function defined in a “"def"” statement can be passed around ' - 'or\n' - 'assigned to another name just like a function defined by a ' - 'lambda\n' - 'expression. The “"def"” form is actually more powerful since ' - 'it\n' - 'allows the execution of multiple statements and annotations.\n' - '\n' - '**Programmer’s note:** Functions are first-class objects. A ' - '“"def"”\n' - 'statement executed inside a function definition defines a local\n' - 'function that can be returned or passed around. Free variables ' - 'used\n' - 'in the nested function can access the local variables of the ' - 'function\n' - 'containing the def. See section Naming and binding for ' - 'details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3107** - Function Annotations\n' - ' The original specification for function annotations.\n' - '\n' - ' **PEP 484** - Type Hints\n' - ' Definition of a standard meaning for annotations: type ' - 'hints.\n' - '\n' - ' **PEP 526** - Syntax for Variable Annotations\n' - ' Ability to type hint variable declarations, including ' - 'class\n' - ' variables and instance variables\n' - '\n' - ' **PEP 563** - Postponed Evaluation of Annotations\n' - ' Support for forward references within annotations by ' - 'preserving\n' - ' annotations in a string form at runtime instead of eager\n' - ' evaluation.\n', - 'global': 'The "global" statement\n' - '**********************\n' - '\n' - ' global_stmt ::= "global" identifier ("," identifier)*\n' - '\n' - 'The "global" statement is a declaration which holds for the ' - 'entire\n' - 'current code block. It means that the listed identifiers are to ' - 'be\n' - 'interpreted as globals. It would be impossible to assign to a ' - 'global\n' - 'variable without "global", although free variables may refer to\n' - 'globals without being declared global.\n' - '\n' - 'Names listed in a "global" statement must not be used in the same ' - 'code\n' - 'block textually preceding that "global" statement.\n' - '\n' - 'Names listed in a "global" statement must not be defined as ' - 'formal\n' - 'parameters, or as targets in "with" statements or "except" ' - 'clauses, or\n' - 'in a "for" target list, "class" definition, function definition,\n' - '"import" statement, or variable annotation.\n' - '\n' - '**CPython implementation detail:** The current implementation does ' - 'not\n' - 'enforce some of these restrictions, but programs should not abuse ' - 'this\n' - 'freedom, as future implementations may enforce them or silently ' - 'change\n' - 'the meaning of the program.\n' - '\n' - '**Programmer’s note:** "global" is a directive to the parser. It\n' - 'applies only to code parsed at the same time as the "global"\n' - 'statement. In particular, a "global" statement contained in a ' - 'string\n' - 'or code object supplied to the built-in "exec()" function does ' - 'not\n' - 'affect the code block *containing* the function call, and code\n' - 'contained in such a string is unaffected by "global" statements in ' - 'the\n' - 'code containing the function call. The same applies to the ' - '"eval()"\n' - 'and "compile()" functions.\n', - 'id-classes': 'Reserved classes of identifiers\n' - '*******************************\n' - '\n' - 'Certain classes of identifiers (besides keywords) have ' - 'special\n' - 'meanings. These classes are identified by the patterns of ' - 'leading and\n' - 'trailing underscore characters:\n' - '\n' - '"_*"\n' - ' Not imported by "from module import *". The special ' - 'identifier "_"\n' - ' is used in the interactive interpreter to store the result ' - 'of the\n' - ' last evaluation; it is stored in the "builtins" module. ' - 'When not\n' - ' in interactive mode, "_" has no special meaning and is not ' - 'defined.\n' - ' See section The import statement.\n' - '\n' - ' Note:\n' - '\n' - ' The name "_" is often used in conjunction with\n' - ' internationalization; refer to the documentation for the\n' - ' "gettext" module for more information on this ' - 'convention.\n' - '\n' - '"__*__"\n' - ' System-defined names, informally known as “dunder” names. ' - 'These\n' - ' names are defined by the interpreter and its ' - 'implementation\n' - ' (including the standard library). Current system names are\n' - ' discussed in the Special method names section and ' - 'elsewhere. More\n' - ' will likely be defined in future versions of Python. *Any* ' - 'use of\n' - ' "__*__" names, in any context, that does not follow ' - 'explicitly\n' - ' documented use, is subject to breakage without warning.\n' - '\n' - '"__*"\n' - ' Class-private names. Names in this category, when used ' - 'within the\n' - ' context of a class definition, are re-written to use a ' - 'mangled form\n' - ' to help avoid name clashes between “private” attributes of ' - 'base and\n' - ' derived classes. See section Identifiers (Names).\n', - 'identifiers': 'Identifiers and keywords\n' - '************************\n' - '\n' - 'Identifiers (also referred to as *names*) are described by ' - 'the\n' - 'following lexical definitions.\n' - '\n' - 'The syntax of identifiers in Python is based on the Unicode ' - 'standard\n' - 'annex UAX-31, with elaboration and changes as defined below; ' - 'see also\n' - '**PEP 3131** for further details.\n' - '\n' - 'Within the ASCII range (U+0001..U+007F), the valid characters ' - 'for\n' - 'identifiers are the same as in Python 2.x: the uppercase and ' - 'lowercase\n' - 'letters "A" through "Z", the underscore "_" and, except for ' - 'the first\n' - 'character, the digits "0" through "9".\n' - '\n' - 'Python 3.0 introduces additional characters from outside the ' - 'ASCII\n' - 'range (see **PEP 3131**). For these characters, the ' - 'classification\n' - 'uses the version of the Unicode Character Database as ' - 'included in the\n' - '"unicodedata" module.\n' - '\n' - 'Identifiers are unlimited in length. Case is significant.\n' - '\n' - ' identifier ::= xid_start xid_continue*\n' - ' id_start ::= \n' - ' id_continue ::= \n' - ' xid_start ::= \n' - ' xid_continue ::= \n' - '\n' - 'The Unicode category codes mentioned above stand for:\n' - '\n' - '* *Lu* - uppercase letters\n' - '\n' - '* *Ll* - lowercase letters\n' - '\n' - '* *Lt* - titlecase letters\n' - '\n' - '* *Lm* - modifier letters\n' - '\n' - '* *Lo* - other letters\n' - '\n' - '* *Nl* - letter numbers\n' - '\n' - '* *Mn* - nonspacing marks\n' - '\n' - '* *Mc* - spacing combining marks\n' - '\n' - '* *Nd* - decimal numbers\n' - '\n' - '* *Pc* - connector punctuations\n' - '\n' - '* *Other_ID_Start* - explicit list of characters in ' - 'PropList.txt to\n' - ' support backwards compatibility\n' - '\n' - '* *Other_ID_Continue* - likewise\n' - '\n' - 'All identifiers are converted into the normal form NFKC while ' - 'parsing;\n' - 'comparison of identifiers is based on NFKC.\n' - '\n' - 'A non-normative HTML file listing all valid identifier ' - 'characters for\n' - 'Unicode 4.1 can be found at\n' - 'https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt\n' - '\n' - '\n' - 'Keywords\n' - '========\n' - '\n' - 'The following identifiers are used as reserved words, or ' - '*keywords* of\n' - 'the language, and cannot be used as ordinary identifiers. ' - 'They must\n' - 'be spelled exactly as written here:\n' - '\n' - ' False await else import pass\n' - ' None break except in raise\n' - ' True class finally is return\n' - ' and continue for lambda try\n' - ' as def from nonlocal while\n' - ' assert del global not with\n' - ' async elif if or yield\n' - '\n' - '\n' - 'Soft Keywords\n' - '=============\n' - '\n' - 'New in version 3.10.\n' - '\n' - 'Some identifiers are only reserved under specific contexts. ' - 'These are\n' - 'known as *soft keywords*. The identifiers "match", "case" ' - 'and "_" can\n' - 'syntactically act as keywords in contexts related to the ' - 'pattern\n' - 'matching statement, but this distinction is done at the ' - 'parser level,\n' - 'not when tokenizing.\n' - '\n' - 'As soft keywords, their use with pattern matching is possible ' - 'while\n' - 'still preserving compatibility with existing code that uses ' - '"match",\n' - '"case" and "_" as identifier names.\n' - '\n' - '\n' - 'Reserved classes of identifiers\n' - '===============================\n' - '\n' - 'Certain classes of identifiers (besides keywords) have ' - 'special\n' - 'meanings. These classes are identified by the patterns of ' - 'leading and\n' - 'trailing underscore characters:\n' - '\n' - '"_*"\n' - ' Not imported by "from module import *". The special ' - 'identifier "_"\n' - ' is used in the interactive interpreter to store the result ' - 'of the\n' - ' last evaluation; it is stored in the "builtins" module. ' - 'When not\n' - ' in interactive mode, "_" has no special meaning and is not ' - 'defined.\n' - ' See section The import statement.\n' - '\n' - ' Note:\n' - '\n' - ' The name "_" is often used in conjunction with\n' - ' internationalization; refer to the documentation for ' - 'the\n' - ' "gettext" module for more information on this ' - 'convention.\n' - '\n' - '"__*__"\n' - ' System-defined names, informally known as “dunder” names. ' - 'These\n' - ' names are defined by the interpreter and its ' - 'implementation\n' - ' (including the standard library). Current system names ' - 'are\n' - ' discussed in the Special method names section and ' - 'elsewhere. More\n' - ' will likely be defined in future versions of Python. ' - '*Any* use of\n' - ' "__*__" names, in any context, that does not follow ' - 'explicitly\n' - ' documented use, is subject to breakage without warning.\n' - '\n' - '"__*"\n' - ' Class-private names. Names in this category, when used ' - 'within the\n' - ' context of a class definition, are re-written to use a ' - 'mangled form\n' - ' to help avoid name clashes between “private” attributes of ' - 'base and\n' - ' derived classes. See section Identifiers (Names).\n', - 'if': 'The "if" statement\n' - '******************\n' - '\n' - 'The "if" statement is used for conditional execution:\n' - '\n' - ' if_stmt ::= "if" assignment_expression ":" suite\n' - ' ("elif" assignment_expression ":" suite)*\n' - ' ["else" ":" suite]\n' - '\n' - 'It selects exactly one of the suites by evaluating the expressions ' - 'one\n' - 'by one until one is found to be true (see section Boolean operations\n' - 'for the definition of true and false); then that suite is executed\n' - '(and no other part of the "if" statement is executed or evaluated).\n' - 'If all expressions are false, the suite of the "else" clause, if\n' - 'present, is executed.\n', - 'imaginary': 'Imaginary literals\n' - '******************\n' - '\n' - 'Imaginary literals are described by the following lexical ' - 'definitions:\n' - '\n' - ' imagnumber ::= (floatnumber | digitpart) ("j" | "J")\n' - '\n' - 'An imaginary literal yields a complex number with a real part ' - 'of 0.0.\n' - 'Complex numbers are represented as a pair of floating point ' - 'numbers\n' - 'and have the same restrictions on their range. To create a ' - 'complex\n' - 'number with a nonzero real part, add a floating point number to ' - 'it,\n' - 'e.g., "(3+4j)". Some examples of imaginary literals:\n' - '\n' - ' 3.14j 10.j 10j .001j 1e100j 3.14e-10j ' - '3.14_15_93j\n', - 'import': 'The "import" statement\n' - '**********************\n' - '\n' - ' import_stmt ::= "import" module ["as" identifier] ("," ' - 'module ["as" identifier])*\n' - ' | "from" relative_module "import" identifier ' - '["as" identifier]\n' - ' ("," identifier ["as" identifier])*\n' - ' | "from" relative_module "import" "(" ' - 'identifier ["as" identifier]\n' - ' ("," identifier ["as" identifier])* [","] ")"\n' - ' | "from" relative_module "import" "*"\n' - ' module ::= (identifier ".")* identifier\n' - ' relative_module ::= "."* module | "."+\n' - '\n' - 'The basic import statement (no "from" clause) is executed in two\n' - 'steps:\n' - '\n' - '1. find a module, loading and initializing it if necessary\n' - '\n' - '2. define a name or names in the local namespace for the scope ' - 'where\n' - ' the "import" statement occurs.\n' - '\n' - 'When the statement contains multiple clauses (separated by commas) ' - 'the\n' - 'two steps are carried out separately for each clause, just as ' - 'though\n' - 'the clauses had been separated out into individual import ' - 'statements.\n' - '\n' - 'The details of the first step, finding and loading modules are\n' - 'described in greater detail in the section on the import system, ' - 'which\n' - 'also describes the various types of packages and modules that can ' - 'be\n' - 'imported, as well as all the hooks that can be used to customize ' - 'the\n' - 'import system. Note that failures in this step may indicate ' - 'either\n' - 'that the module could not be located, *or* that an error occurred\n' - 'while initializing the module, which includes execution of the\n' - 'module’s code.\n' - '\n' - 'If the requested module is retrieved successfully, it will be ' - 'made\n' - 'available in the local namespace in one of three ways:\n' - '\n' - '* If the module name is followed by "as", then the name following ' - '"as"\n' - ' is bound directly to the imported module.\n' - '\n' - '* If no other name is specified, and the module being imported is ' - 'a\n' - ' top level module, the module’s name is bound in the local ' - 'namespace\n' - ' as a reference to the imported module\n' - '\n' - '* If the module being imported is *not* a top level module, then ' - 'the\n' - ' name of the top level package that contains the module is bound ' - 'in\n' - ' the local namespace as a reference to the top level package. ' - 'The\n' - ' imported module must be accessed using its full qualified name\n' - ' rather than directly\n' - '\n' - 'The "from" form uses a slightly more complex process:\n' - '\n' - '1. find the module specified in the "from" clause, loading and\n' - ' initializing it if necessary;\n' - '\n' - '2. for each of the identifiers specified in the "import" clauses:\n' - '\n' - ' 1. check if the imported module has an attribute by that name\n' - '\n' - ' 2. if not, attempt to import a submodule with that name and ' - 'then\n' - ' check the imported module again for that attribute\n' - '\n' - ' 3. if the attribute is not found, "ImportError" is raised.\n' - '\n' - ' 4. otherwise, a reference to that value is stored in the local\n' - ' namespace, using the name in the "as" clause if it is ' - 'present,\n' - ' otherwise using the attribute name\n' - '\n' - 'Examples:\n' - '\n' - ' import foo # foo imported and bound locally\n' - ' import foo.bar.baz # foo.bar.baz imported, foo bound ' - 'locally\n' - ' import foo.bar.baz as fbb # foo.bar.baz imported and bound as ' - 'fbb\n' - ' from foo.bar import baz # foo.bar.baz imported and bound as ' - 'baz\n' - ' from foo import attr # foo imported and foo.attr bound as ' - 'attr\n' - '\n' - 'If the list of identifiers is replaced by a star ("\'*\'"), all ' - 'public\n' - 'names defined in the module are bound in the local namespace for ' - 'the\n' - 'scope where the "import" statement occurs.\n' - '\n' - 'The *public names* defined by a module are determined by checking ' - 'the\n' - 'module’s namespace for a variable named "__all__"; if defined, it ' - 'must\n' - 'be a sequence of strings which are names defined or imported by ' - 'that\n' - 'module. The names given in "__all__" are all considered public ' - 'and\n' - 'are required to exist. If "__all__" is not defined, the set of ' - 'public\n' - 'names includes all names found in the module’s namespace which do ' - 'not\n' - 'begin with an underscore character ("\'_\'"). "__all__" should ' - 'contain\n' - 'the entire public API. It is intended to avoid accidentally ' - 'exporting\n' - 'items that are not part of the API (such as library modules which ' - 'were\n' - 'imported and used within the module).\n' - '\n' - 'The wild card form of import — "from module import *" — is only\n' - 'allowed at the module level. Attempting to use it in class or\n' - 'function definitions will raise a "SyntaxError".\n' - '\n' - 'When specifying what module to import you do not have to specify ' - 'the\n' - 'absolute name of the module. When a module or package is ' - 'contained\n' - 'within another package it is possible to make a relative import ' - 'within\n' - 'the same top package without having to mention the package name. ' - 'By\n' - 'using leading dots in the specified module or package after "from" ' - 'you\n' - 'can specify how high to traverse up the current package hierarchy\n' - 'without specifying exact names. One leading dot means the current\n' - 'package where the module making the import exists. Two dots means ' - 'up\n' - 'one package level. Three dots is up two levels, etc. So if you ' - 'execute\n' - '"from . import mod" from a module in the "pkg" package then you ' - 'will\n' - 'end up importing "pkg.mod". If you execute "from ..subpkg2 import ' - 'mod"\n' - 'from within "pkg.subpkg1" you will import "pkg.subpkg2.mod". The\n' - 'specification for relative imports is contained in the Package\n' - 'Relative Imports section.\n' - '\n' - '"importlib.import_module()" is provided to support applications ' - 'that\n' - 'determine dynamically the modules to be loaded.\n' - '\n' - 'Raises an auditing event "import" with arguments "module", ' - '"filename",\n' - '"sys.path", "sys.meta_path", "sys.path_hooks".\n' - '\n' - '\n' - 'Future statements\n' - '=================\n' - '\n' - 'A *future statement* is a directive to the compiler that a ' - 'particular\n' - 'module should be compiled using syntax or semantics that will be\n' - 'available in a specified future release of Python where the ' - 'feature\n' - 'becomes standard.\n' - '\n' - 'The future statement is intended to ease migration to future ' - 'versions\n' - 'of Python that introduce incompatible changes to the language. ' - 'It\n' - 'allows use of the new features on a per-module basis before the\n' - 'release in which the feature becomes standard.\n' - '\n' - ' future_stmt ::= "from" "__future__" "import" feature ["as" ' - 'identifier]\n' - ' ("," feature ["as" identifier])*\n' - ' | "from" "__future__" "import" "(" feature ' - '["as" identifier]\n' - ' ("," feature ["as" identifier])* [","] ")"\n' - ' feature ::= identifier\n' - '\n' - 'A future statement must appear near the top of the module. The ' - 'only\n' - 'lines that can appear before a future statement are:\n' - '\n' - '* the module docstring (if any),\n' - '\n' - '* comments,\n' - '\n' - '* blank lines, and\n' - '\n' - '* other future statements.\n' - '\n' - 'The only feature that requires using the future statement is\n' - '"annotations" (see **PEP 563**).\n' - '\n' - 'All historical features enabled by the future statement are still\n' - 'recognized by Python 3. The list includes "absolute_import",\n' - '"division", "generators", "generator_stop", "unicode_literals",\n' - '"print_function", "nested_scopes" and "with_statement". They are ' - 'all\n' - 'redundant because they are always enabled, and only kept for ' - 'backwards\n' - 'compatibility.\n' - '\n' - 'A future statement is recognized and treated specially at compile\n' - 'time: Changes to the semantics of core constructs are often\n' - 'implemented by generating different code. It may even be the ' - 'case\n' - 'that a new feature introduces new incompatible syntax (such as a ' - 'new\n' - 'reserved word), in which case the compiler may need to parse the\n' - 'module differently. Such decisions cannot be pushed off until\n' - 'runtime.\n' - '\n' - 'For any given release, the compiler knows which feature names ' - 'have\n' - 'been defined, and raises a compile-time error if a future ' - 'statement\n' - 'contains a feature not known to it.\n' - '\n' - 'The direct runtime semantics are the same as for any import ' - 'statement:\n' - 'there is a standard module "__future__", described later, and it ' - 'will\n' - 'be imported in the usual way at the time the future statement is\n' - 'executed.\n' - '\n' - 'The interesting runtime semantics depend on the specific feature\n' - 'enabled by the future statement.\n' - '\n' - 'Note that there is nothing special about the statement:\n' - '\n' - ' import __future__ [as name]\n' - '\n' - 'That is not a future statement; it’s an ordinary import statement ' - 'with\n' - 'no special semantics or syntax restrictions.\n' - '\n' - 'Code compiled by calls to the built-in functions "exec()" and\n' - '"compile()" that occur in a module "M" containing a future ' - 'statement\n' - 'will, by default, use the new syntax or semantics associated with ' - 'the\n' - 'future statement. This can be controlled by optional arguments ' - 'to\n' - '"compile()" — see the documentation of that function for details.\n' - '\n' - 'A future statement typed at an interactive interpreter prompt ' - 'will\n' - 'take effect for the rest of the interpreter session. If an\n' - 'interpreter is started with the "-i" option, is passed a script ' - 'name\n' - 'to execute, and the script includes a future statement, it will be ' - 'in\n' - 'effect in the interactive session started after the script is\n' - 'executed.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 236** - Back to the __future__\n' - ' The original proposal for the __future__ mechanism.\n', - 'in': 'Membership test operations\n' - '**************************\n' - '\n' - 'The operators "in" and "not in" test for membership. "x in s"\n' - 'evaluates to "True" if *x* is a member of *s*, and "False" otherwise.\n' - '"x not in s" returns the negation of "x in s". All built-in ' - 'sequences\n' - 'and set types support this as well as dictionary, for which "in" ' - 'tests\n' - 'whether the dictionary has a given key. For container types such as\n' - 'list, tuple, set, frozenset, dict, or collections.deque, the\n' - 'expression "x in y" is equivalent to "any(x is e or x == e for e in\n' - 'y)".\n' - '\n' - 'For the string and bytes types, "x in y" is "True" if and only if *x*\n' - 'is a substring of *y*. An equivalent test is "y.find(x) != -1".\n' - 'Empty strings are always considered to be a substring of any other\n' - 'string, so """ in "abc"" will return "True".\n' - '\n' - 'For user-defined classes which define the "__contains__()" method, "x\n' - 'in y" returns "True" if "y.__contains__(x)" returns a true value, and\n' - '"False" otherwise.\n' - '\n' - 'For user-defined classes which do not define "__contains__()" but do\n' - 'define "__iter__()", "x in y" is "True" if some value "z", for which\n' - 'the expression "x is z or x == z" is true, is produced while ' - 'iterating\n' - 'over "y". If an exception is raised during the iteration, it is as if\n' - '"in" raised that exception.\n' - '\n' - 'Lastly, the old-style iteration protocol is tried: if a class defines\n' - '"__getitem__()", "x in y" is "True" if and only if there is a non-\n' - 'negative integer index *i* such that "x is y[i] or x == y[i]", and no\n' - 'lower integer index raises the "IndexError" exception. (If any other\n' - 'exception is raised, it is as if "in" raised that exception).\n' - '\n' - 'The operator "not in" is defined to have the inverse truth value of\n' - '"in".\n', - 'integers': 'Integer literals\n' - '****************\n' - '\n' - 'Integer literals are described by the following lexical ' - 'definitions:\n' - '\n' - ' integer ::= decinteger | bininteger | octinteger | ' - 'hexinteger\n' - ' decinteger ::= nonzerodigit (["_"] digit)* | "0"+ (["_"] ' - '"0")*\n' - ' bininteger ::= "0" ("b" | "B") (["_"] bindigit)+\n' - ' octinteger ::= "0" ("o" | "O") (["_"] octdigit)+\n' - ' hexinteger ::= "0" ("x" | "X") (["_"] hexdigit)+\n' - ' nonzerodigit ::= "1"..."9"\n' - ' digit ::= "0"..."9"\n' - ' bindigit ::= "0" | "1"\n' - ' octdigit ::= "0"..."7"\n' - ' hexdigit ::= digit | "a"..."f" | "A"..."F"\n' - '\n' - 'There is no limit for the length of integer literals apart from ' - 'what\n' - 'can be stored in available memory.\n' - '\n' - 'Underscores are ignored for determining the numeric value of ' - 'the\n' - 'literal. They can be used to group digits for enhanced ' - 'readability.\n' - 'One underscore can occur between digits, and after base ' - 'specifiers\n' - 'like "0x".\n' - '\n' - 'Note that leading zeros in a non-zero decimal number are not ' - 'allowed.\n' - 'This is for disambiguation with C-style octal literals, which ' - 'Python\n' - 'used before version 3.0.\n' - '\n' - 'Some examples of integer literals:\n' - '\n' - ' 7 2147483647 0o177 0b100110111\n' - ' 3 79228162514264337593543950336 0o377 0xdeadbeef\n' - ' 100_000_000_000 0b_1110_0101\n' - '\n' - 'Changed in version 3.6: Underscores are now allowed for ' - 'grouping\n' - 'purposes in literals.\n', - 'lambda': 'Lambdas\n' - '*******\n' - '\n' - ' lambda_expr ::= "lambda" [parameter_list] ":" expression\n' - '\n' - 'Lambda expressions (sometimes called lambda forms) are used to ' - 'create\n' - 'anonymous functions. The expression "lambda parameters: ' - 'expression"\n' - 'yields a function object. The unnamed object behaves like a ' - 'function\n' - 'object defined with:\n' - '\n' - ' def (parameters):\n' - ' return expression\n' - '\n' - 'See section Function definitions for the syntax of parameter ' - 'lists.\n' - 'Note that functions created with lambda expressions cannot ' - 'contain\n' - 'statements or annotations.\n', - 'lists': 'List displays\n' - '*************\n' - '\n' - 'A list display is a possibly empty series of expressions enclosed ' - 'in\n' - 'square brackets:\n' - '\n' - ' list_display ::= "[" [starred_list | comprehension] "]"\n' - '\n' - 'A list display yields a new list object, the contents being ' - 'specified\n' - 'by either a list of expressions or a comprehension. When a comma-\n' - 'separated list of expressions is supplied, its elements are ' - 'evaluated\n' - 'from left to right and placed into the list object in that order.\n' - 'When a comprehension is supplied, the list is constructed from the\n' - 'elements resulting from the comprehension.\n', - 'naming': 'Naming and binding\n' - '******************\n' - '\n' - '\n' - 'Binding of names\n' - '================\n' - '\n' - '*Names* refer to objects. Names are introduced by name binding\n' - 'operations.\n' - '\n' - 'The following constructs bind names: formal parameters to ' - 'functions,\n' - '"import" statements, class and function definitions (these bind ' - 'the\n' - 'class or function name in the defining block), and targets that ' - 'are\n' - 'identifiers if occurring in an assignment, "for" loop header, or ' - 'after\n' - '"as" in a "with" statement or "except" clause. The "import" ' - 'statement\n' - 'of the form "from ... import *" binds all names defined in the\n' - 'imported module, except those beginning with an underscore. This ' - 'form\n' - 'may only be used at the module level.\n' - '\n' - 'A target occurring in a "del" statement is also considered bound ' - 'for\n' - 'this purpose (though the actual semantics are to unbind the ' - 'name).\n' - '\n' - 'Each assignment or import statement occurs within a block defined ' - 'by a\n' - 'class or function definition or at the module level (the ' - 'top-level\n' - 'code block).\n' - '\n' - 'If a name is bound in a block, it is a local variable of that ' - 'block,\n' - 'unless declared as "nonlocal" or "global". If a name is bound at ' - 'the\n' - 'module level, it is a global variable. (The variables of the ' - 'module\n' - 'code block are local and global.) If a variable is used in a ' - 'code\n' - 'block but not defined there, it is a *free variable*.\n' - '\n' - 'Each occurrence of a name in the program text refers to the ' - '*binding*\n' - 'of that name established by the following name resolution rules.\n' - '\n' - '\n' - 'Resolution of names\n' - '===================\n' - '\n' - 'A *scope* defines the visibility of a name within a block. If a ' - 'local\n' - 'variable is defined in a block, its scope includes that block. If ' - 'the\n' - 'definition occurs in a function block, the scope extends to any ' - 'blocks\n' - 'contained within the defining one, unless a contained block ' - 'introduces\n' - 'a different binding for the name.\n' - '\n' - 'When a name is used in a code block, it is resolved using the ' - 'nearest\n' - 'enclosing scope. The set of all such scopes visible to a code ' - 'block\n' - 'is called the block’s *environment*.\n' - '\n' - 'When a name is not found at all, a "NameError" exception is ' - 'raised. If\n' - 'the current scope is a function scope, and the name refers to a ' - 'local\n' - 'variable that has not yet been bound to a value at the point where ' - 'the\n' - 'name is used, an "UnboundLocalError" exception is raised.\n' - '"UnboundLocalError" is a subclass of "NameError".\n' - '\n' - 'If a name binding operation occurs anywhere within a code block, ' - 'all\n' - 'uses of the name within the block are treated as references to ' - 'the\n' - 'current block. This can lead to errors when a name is used within ' - 'a\n' - 'block before it is bound. This rule is subtle. Python lacks\n' - 'declarations and allows name binding operations to occur anywhere\n' - 'within a code block. The local variables of a code block can be\n' - 'determined by scanning the entire text of the block for name ' - 'binding\n' - 'operations.\n' - '\n' - 'If the "global" statement occurs within a block, all uses of the ' - 'name\n' - 'specified in the statement refer to the binding of that name in ' - 'the\n' - 'top-level namespace. Names are resolved in the top-level ' - 'namespace by\n' - 'searching the global namespace, i.e. the namespace of the module\n' - 'containing the code block, and the builtins namespace, the ' - 'namespace\n' - 'of the module "builtins". The global namespace is searched ' - 'first. If\n' - 'the name is not found there, the builtins namespace is searched. ' - 'The\n' - '"global" statement must precede all uses of the name.\n' - '\n' - 'The "global" statement has the same scope as a name binding ' - 'operation\n' - 'in the same block. If the nearest enclosing scope for a free ' - 'variable\n' - 'contains a global statement, the free variable is treated as a ' - 'global.\n' - '\n' - 'The "nonlocal" statement causes corresponding names to refer to\n' - 'previously bound variables in the nearest enclosing function ' - 'scope.\n' - '"SyntaxError" is raised at compile time if the given name does ' - 'not\n' - 'exist in any enclosing function scope.\n' - '\n' - 'The namespace for a module is automatically created the first time ' - 'a\n' - 'module is imported. The main module for a script is always ' - 'called\n' - '"__main__".\n' - '\n' - 'Class definition blocks and arguments to "exec()" and "eval()" ' - 'are\n' - 'special in the context of name resolution. A class definition is ' - 'an\n' - 'executable statement that may use and define names. These ' - 'references\n' - 'follow the normal rules for name resolution with an exception ' - 'that\n' - 'unbound local variables are looked up in the global namespace. ' - 'The\n' - 'namespace of the class definition becomes the attribute dictionary ' - 'of\n' - 'the class. The scope of names defined in a class block is limited ' - 'to\n' - 'the class block; it does not extend to the code blocks of methods ' - '–\n' - 'this includes comprehensions and generator expressions since they ' - 'are\n' - 'implemented using a function scope. This means that the ' - 'following\n' - 'will fail:\n' - '\n' - ' class A:\n' - ' a = 42\n' - ' b = list(a + i for i in range(10))\n' - '\n' - '\n' - 'Builtins and restricted execution\n' - '=================================\n' - '\n' - '**CPython implementation detail:** Users should not touch\n' - '"__builtins__"; it is strictly an implementation detail. Users\n' - 'wanting to override values in the builtins namespace should ' - '"import"\n' - 'the "builtins" module and modify its attributes appropriately.\n' - '\n' - 'The builtins namespace associated with the execution of a code ' - 'block\n' - 'is actually found by looking up the name "__builtins__" in its ' - 'global\n' - 'namespace; this should be a dictionary or a module (in the latter ' - 'case\n' - 'the module’s dictionary is used). By default, when in the ' - '"__main__"\n' - 'module, "__builtins__" is the built-in module "builtins"; when in ' - 'any\n' - 'other module, "__builtins__" is an alias for the dictionary of ' - 'the\n' - '"builtins" module itself.\n' - '\n' - '\n' - 'Interaction with dynamic features\n' - '=================================\n' - '\n' - 'Name resolution of free variables occurs at runtime, not at ' - 'compile\n' - 'time. This means that the following code will print 42:\n' - '\n' - ' i = 10\n' - ' def f():\n' - ' print(i)\n' - ' i = 42\n' - ' f()\n' - '\n' - 'The "eval()" and "exec()" functions do not have access to the ' - 'full\n' - 'environment for resolving names. Names may be resolved in the ' - 'local\n' - 'and global namespaces of the caller. Free variables are not ' - 'resolved\n' - 'in the nearest enclosing namespace, but in the global namespace. ' - '[1]\n' - 'The "exec()" and "eval()" functions have optional arguments to\n' - 'override the global and local namespace. If only one namespace ' - 'is\n' - 'specified, it is used for both.\n', - 'nonlocal': 'The "nonlocal" statement\n' - '************************\n' - '\n' - ' nonlocal_stmt ::= "nonlocal" identifier ("," identifier)*\n' - '\n' - 'The "nonlocal" statement causes the listed identifiers to refer ' - 'to\n' - 'previously bound variables in the nearest enclosing scope ' - 'excluding\n' - 'globals. This is important because the default behavior for ' - 'binding is\n' - 'to search the local namespace first. The statement allows\n' - 'encapsulated code to rebind variables outside of the local ' - 'scope\n' - 'besides the global (module) scope.\n' - '\n' - 'Names listed in a "nonlocal" statement, unlike those listed in ' - 'a\n' - '"global" statement, must refer to pre-existing bindings in an\n' - 'enclosing scope (the scope in which a new binding should be ' - 'created\n' - 'cannot be determined unambiguously).\n' - '\n' - 'Names listed in a "nonlocal" statement must not collide with ' - 'pre-\n' - 'existing bindings in the local scope.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3104** - Access to Names in Outer Scopes\n' - ' The specification for the "nonlocal" statement.\n', - 'numbers': 'Numeric literals\n' - '****************\n' - '\n' - 'There are three types of numeric literals: integers, floating ' - 'point\n' - 'numbers, and imaginary numbers. There are no complex literals\n' - '(complex numbers can be formed by adding a real number and an\n' - 'imaginary number).\n' - '\n' - 'Note that numeric literals do not include a sign; a phrase like ' - '"-1"\n' - 'is actually an expression composed of the unary operator ‘"-"’ ' - 'and the\n' - 'literal "1".\n', - 'numeric-types': 'Emulating numeric types\n' - '***********************\n' - '\n' - 'The following methods can be defined to emulate numeric ' - 'objects.\n' - 'Methods corresponding to operations that are not supported ' - 'by the\n' - 'particular kind of number implemented (e.g., bitwise ' - 'operations for\n' - 'non-integral numbers) should be left undefined.\n' - '\n' - 'object.__add__(self, other)\n' - 'object.__sub__(self, other)\n' - 'object.__mul__(self, other)\n' - 'object.__matmul__(self, other)\n' - 'object.__truediv__(self, other)\n' - 'object.__floordiv__(self, other)\n' - 'object.__mod__(self, other)\n' - 'object.__divmod__(self, other)\n' - 'object.__pow__(self, other[, modulo])\n' - 'object.__lshift__(self, other)\n' - 'object.__rshift__(self, other)\n' - 'object.__and__(self, other)\n' - 'object.__xor__(self, other)\n' - 'object.__or__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|"). For ' - 'instance, to\n' - ' evaluate the expression "x + y", where *x* is an ' - 'instance of a\n' - ' class that has an "__add__()" method, "x.__add__(y)" is ' - 'called.\n' - ' The "__divmod__()" method should be the equivalent to ' - 'using\n' - ' "__floordiv__()" and "__mod__()"; it should not be ' - 'related to\n' - ' "__truediv__()". Note that "__pow__()" should be ' - 'defined to accept\n' - ' an optional third argument if the ternary version of the ' - 'built-in\n' - ' "pow()" function is to be supported.\n' - '\n' - ' If one of those methods does not support the operation ' - 'with the\n' - ' supplied arguments, it should return "NotImplemented".\n' - '\n' - 'object.__radd__(self, other)\n' - 'object.__rsub__(self, other)\n' - 'object.__rmul__(self, other)\n' - 'object.__rmatmul__(self, other)\n' - 'object.__rtruediv__(self, other)\n' - 'object.__rfloordiv__(self, other)\n' - 'object.__rmod__(self, other)\n' - 'object.__rdivmod__(self, other)\n' - 'object.__rpow__(self, other[, modulo])\n' - 'object.__rlshift__(self, other)\n' - 'object.__rrshift__(self, other)\n' - 'object.__rand__(self, other)\n' - 'object.__rxor__(self, other)\n' - 'object.__ror__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' - '(swapped)\n' - ' operands. These functions are only called if the left ' - 'operand does\n' - ' not support the corresponding operation [3] and the ' - 'operands are of\n' - ' different types. [4] For instance, to evaluate the ' - 'expression "x -\n' - ' y", where *y* is an instance of a class that has an ' - '"__rsub__()"\n' - ' method, "y.__rsub__(x)" is called if "x.__sub__(y)" ' - 'returns\n' - ' *NotImplemented*.\n' - '\n' - ' Note that ternary "pow()" will not try calling ' - '"__rpow__()" (the\n' - ' coercion rules would become too complicated).\n' - '\n' - ' Note:\n' - '\n' - ' If the right operand’s type is a subclass of the left ' - 'operand’s\n' - ' type and that subclass provides a different ' - 'implementation of the\n' - ' reflected method for the operation, this method will ' - 'be called\n' - ' before the left operand’s non-reflected method. This ' - 'behavior\n' - ' allows subclasses to override their ancestors’ ' - 'operations.\n' - '\n' - 'object.__iadd__(self, other)\n' - 'object.__isub__(self, other)\n' - 'object.__imul__(self, other)\n' - 'object.__imatmul__(self, other)\n' - 'object.__itruediv__(self, other)\n' - 'object.__ifloordiv__(self, other)\n' - 'object.__imod__(self, other)\n' - 'object.__ipow__(self, other[, modulo])\n' - 'object.__ilshift__(self, other)\n' - 'object.__irshift__(self, other)\n' - 'object.__iand__(self, other)\n' - 'object.__ixor__(self, other)\n' - 'object.__ior__(self, other)\n' - '\n' - ' These methods are called to implement the augmented ' - 'arithmetic\n' - ' assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", ' - '"**=",\n' - ' "<<=", ">>=", "&=", "^=", "|="). These methods should ' - 'attempt to\n' - ' do the operation in-place (modifying *self*) and return ' - 'the result\n' - ' (which could be, but does not have to be, *self*). If a ' - 'specific\n' - ' method is not defined, the augmented assignment falls ' - 'back to the\n' - ' normal methods. For instance, if *x* is an instance of ' - 'a class\n' - ' with an "__iadd__()" method, "x += y" is equivalent to ' - '"x =\n' - ' x.__iadd__(y)" . Otherwise, "x.__add__(y)" and ' - '"y.__radd__(x)" are\n' - ' considered, as with the evaluation of "x + y". In ' - 'certain\n' - ' situations, augmented assignment can result in ' - 'unexpected errors\n' - ' (see Why does a_tuple[i] += [‘item’] raise an exception ' - 'when the\n' - ' addition works?), but this behavior is in fact part of ' - 'the data\n' - ' model.\n' - '\n' - 'object.__neg__(self)\n' - 'object.__pos__(self)\n' - 'object.__abs__(self)\n' - 'object.__invert__(self)\n' - '\n' - ' Called to implement the unary arithmetic operations ' - '("-", "+",\n' - ' "abs()" and "~").\n' - '\n' - 'object.__complex__(self)\n' - 'object.__int__(self)\n' - 'object.__float__(self)\n' - '\n' - ' Called to implement the built-in functions "complex()", ' - '"int()" and\n' - ' "float()". Should return a value of the appropriate ' - 'type.\n' - '\n' - 'object.__index__(self)\n' - '\n' - ' Called to implement "operator.index()", and whenever ' - 'Python needs\n' - ' to losslessly convert the numeric object to an integer ' - 'object (such\n' - ' as in slicing, or in the built-in "bin()", "hex()" and ' - '"oct()"\n' - ' functions). Presence of this method indicates that the ' - 'numeric\n' - ' object is an integer type. Must return an integer.\n' - '\n' - ' If "__int__()", "__float__()" and "__complex__()" are ' - 'not defined\n' - ' then corresponding built-in functions "int()", "float()" ' - 'and\n' - ' "complex()" fall back to "__index__()".\n' - '\n' - 'object.__round__(self[, ndigits])\n' - 'object.__trunc__(self)\n' - 'object.__floor__(self)\n' - 'object.__ceil__(self)\n' - '\n' - ' Called to implement the built-in function "round()" and ' - '"math"\n' - ' functions "trunc()", "floor()" and "ceil()". Unless ' - '*ndigits* is\n' - ' passed to "__round__()" all these methods should return ' - 'the value\n' - ' of the object truncated to an "Integral" (typically an ' - '"int").\n' - '\n' - ' If "__int__()" is not defined then the built-in function ' - '"int()"\n' - ' falls back to "__trunc__()".\n', - 'objects': 'Objects, values and types\n' - '*************************\n' - '\n' - '*Objects* are Python’s abstraction for data. All data in a ' - 'Python\n' - 'program is represented by objects or by relations between ' - 'objects. (In\n' - 'a sense, and in conformance to Von Neumann’s model of a “stored\n' - 'program computer”, code is also represented by objects.)\n' - '\n' - 'Every object has an identity, a type and a value. An object’s\n' - '*identity* never changes once it has been created; you may think ' - 'of it\n' - 'as the object’s address in memory. The ‘"is"’ operator compares ' - 'the\n' - 'identity of two objects; the "id()" function returns an integer\n' - 'representing its identity.\n' - '\n' - '**CPython implementation detail:** For CPython, "id(x)" is the ' - 'memory\n' - 'address where "x" is stored.\n' - '\n' - 'An object’s type determines the operations that the object ' - 'supports\n' - '(e.g., “does it have a length?”) and also defines the possible ' - 'values\n' - 'for objects of that type. The "type()" function returns an ' - 'object’s\n' - 'type (which is an object itself). Like its identity, an ' - 'object’s\n' - '*type* is also unchangeable. [1]\n' - '\n' - 'The *value* of some objects can change. Objects whose value can\n' - 'change are said to be *mutable*; objects whose value is ' - 'unchangeable\n' - 'once they are created are called *immutable*. (The value of an\n' - 'immutable container object that contains a reference to a ' - 'mutable\n' - 'object can change when the latter’s value is changed; however ' - 'the\n' - 'container is still considered immutable, because the collection ' - 'of\n' - 'objects it contains cannot be changed. So, immutability is not\n' - 'strictly the same as having an unchangeable value, it is more ' - 'subtle.)\n' - 'An object’s mutability is determined by its type; for instance,\n' - 'numbers, strings and tuples are immutable, while dictionaries ' - 'and\n' - 'lists are mutable.\n' - '\n' - 'Objects are never explicitly destroyed; however, when they ' - 'become\n' - 'unreachable they may be garbage-collected. An implementation is\n' - 'allowed to postpone garbage collection or omit it altogether — it ' - 'is a\n' - 'matter of implementation quality how garbage collection is\n' - 'implemented, as long as no objects are collected that are still\n' - 'reachable.\n' - '\n' - '**CPython implementation detail:** CPython currently uses a ' - 'reference-\n' - 'counting scheme with (optional) delayed detection of cyclically ' - 'linked\n' - 'garbage, which collects most objects as soon as they become\n' - 'unreachable, but is not guaranteed to collect garbage containing\n' - 'circular references. See the documentation of the "gc" module ' - 'for\n' - 'information on controlling the collection of cyclic garbage. ' - 'Other\n' - 'implementations act differently and CPython may change. Do not ' - 'depend\n' - 'on immediate finalization of objects when they become unreachable ' - '(so\n' - 'you should always close files explicitly).\n' - '\n' - 'Note that the use of the implementation’s tracing or debugging\n' - 'facilities may keep objects alive that would normally be ' - 'collectable.\n' - 'Also note that catching an exception with a ‘"try"…"except"’ ' - 'statement\n' - 'may keep objects alive.\n' - '\n' - 'Some objects contain references to “external” resources such as ' - 'open\n' - 'files or windows. It is understood that these resources are ' - 'freed\n' - 'when the object is garbage-collected, but since garbage ' - 'collection is\n' - 'not guaranteed to happen, such objects also provide an explicit ' - 'way to\n' - 'release the external resource, usually a "close()" method. ' - 'Programs\n' - 'are strongly recommended to explicitly close such objects. The\n' - '‘"try"…"finally"’ statement and the ‘"with"’ statement provide\n' - 'convenient ways to do this.\n' - '\n' - 'Some objects contain references to other objects; these are ' - 'called\n' - '*containers*. Examples of containers are tuples, lists and\n' - 'dictionaries. The references are part of a container’s value. ' - 'In\n' - 'most cases, when we talk about the value of a container, we imply ' - 'the\n' - 'values, not the identities of the contained objects; however, ' - 'when we\n' - 'talk about the mutability of a container, only the identities of ' - 'the\n' - 'immediately contained objects are implied. So, if an immutable\n' - 'container (like a tuple) contains a reference to a mutable ' - 'object, its\n' - 'value changes if that mutable object is changed.\n' - '\n' - 'Types affect almost all aspects of object behavior. Even the\n' - 'importance of object identity is affected in some sense: for ' - 'immutable\n' - 'types, operations that compute new values may actually return a\n' - 'reference to any existing object with the same type and value, ' - 'while\n' - 'for mutable objects this is not allowed. E.g., after "a = 1; b = ' - '1",\n' - '"a" and "b" may or may not refer to the same object with the ' - 'value\n' - 'one, depending on the implementation, but after "c = []; d = []", ' - '"c"\n' - 'and "d" are guaranteed to refer to two different, unique, newly\n' - 'created empty lists. (Note that "c = d = []" assigns the same ' - 'object\n' - 'to both "c" and "d".)\n', - 'operator-summary': 'Operator precedence\n' - '*******************\n' - '\n' - 'The following table summarizes the operator precedence ' - 'in Python, from\n' - 'highest precedence (most binding) to lowest precedence ' - '(least\n' - 'binding). Operators in the same box have the same ' - 'precedence. Unless\n' - 'the syntax is explicitly given, operators are binary. ' - 'Operators in\n' - 'the same box group left to right (except for ' - 'exponentiation, which\n' - 'groups from right to left).\n' - '\n' - 'Note that comparisons, membership tests, and identity ' - 'tests, all have\n' - 'the same precedence and have a left-to-right chaining ' - 'feature as\n' - 'described in the Comparisons section.\n' - '\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| Operator | ' - 'Description |\n' - '|=================================================|=======================================|\n' - '| "(expressions...)", "[expressions...]", "{key: | ' - 'Binding or parenthesized expression, |\n' - '| value...}", "{expressions...}" | list ' - 'display, dictionary display, set |\n' - '| | ' - 'display |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "x[index]", "x[index:index]", | ' - 'Subscription, slicing, call, |\n' - '| "x(arguments...)", "x.attribute" | ' - 'attribute reference |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "await" "x" | ' - 'Await expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "**" | ' - 'Exponentiation [5] |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "+x", "-x", "~x" | ' - 'Positive, negative, bitwise NOT |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "*", "@", "/", "//", "%" | ' - 'Multiplication, matrix |\n' - '| | ' - 'multiplication, division, floor |\n' - '| | ' - 'division, remainder [6] |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "+", "-" | ' - 'Addition and subtraction |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "<<", ">>" | ' - 'Shifts |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "&" | ' - 'Bitwise AND |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "^" | ' - 'Bitwise XOR |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "|" | ' - 'Bitwise OR |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "in", "not in", "is", "is not", "<", "<=", ">", | ' - 'Comparisons, including membership |\n' - '| ">=", "!=", "==" | ' - 'tests and identity tests |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "not" "x" | ' - 'Boolean NOT |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "and" | ' - 'Boolean AND |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "or" | ' - 'Boolean OR |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "if" – "else" | ' - 'Conditional expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "lambda" | ' - 'Lambda expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| ":=" | ' - 'Assignment expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] While "abs(x%y) < abs(y)" is true mathematically, ' - 'for floats it\n' - ' may not be true numerically due to roundoff. For ' - 'example, and\n' - ' assuming a platform on which a Python float is an ' - 'IEEE 754 double-\n' - ' precision number, in order that "-1e-100 % 1e100" ' - 'have the same\n' - ' sign as "1e100", the computed result is "-1e-100 + ' - '1e100", which\n' - ' is numerically exactly equal to "1e100". The ' - 'function\n' - ' "math.fmod()" returns a result whose sign matches ' - 'the sign of the\n' - ' first argument instead, and so returns "-1e-100" in ' - 'this case.\n' - ' Which approach is more appropriate depends on the ' - 'application.\n' - '\n' - '[2] If x is very close to an exact integer multiple of ' - 'y, it’s\n' - ' possible for "x//y" to be one larger than ' - '"(x-x%y)//y" due to\n' - ' rounding. In such cases, Python returns the latter ' - 'result, in\n' - ' order to preserve that "divmod(x,y)[0] * y + x % y" ' - 'be very close\n' - ' to "x".\n' - '\n' - '[3] The Unicode standard distinguishes between *code ' - 'points* (e.g.\n' - ' U+0041) and *abstract characters* (e.g. “LATIN ' - 'CAPITAL LETTER A”).\n' - ' While most abstract characters in Unicode are only ' - 'represented\n' - ' using one code point, there is a number of abstract ' - 'characters\n' - ' that can in addition be represented using a sequence ' - 'of more than\n' - ' one code point. For example, the abstract character ' - '“LATIN\n' - ' CAPITAL LETTER C WITH CEDILLA” can be represented as ' - 'a single\n' - ' *precomposed character* at code position U+00C7, or ' - 'as a sequence\n' - ' of a *base character* at code position U+0043 (LATIN ' - 'CAPITAL\n' - ' LETTER C), followed by a *combining character* at ' - 'code position\n' - ' U+0327 (COMBINING CEDILLA).\n' - '\n' - ' The comparison operators on strings compare at the ' - 'level of\n' - ' Unicode code points. This may be counter-intuitive ' - 'to humans. For\n' - ' example, ""\\u00C7" == "\\u0043\\u0327"" is "False", ' - 'even though both\n' - ' strings represent the same abstract character “LATIN ' - 'CAPITAL\n' - ' LETTER C WITH CEDILLA”.\n' - '\n' - ' To compare strings at the level of abstract ' - 'characters (that is,\n' - ' in a way intuitive to humans), use ' - '"unicodedata.normalize()".\n' - '\n' - '[4] Due to automatic garbage-collection, free lists, and ' - 'the dynamic\n' - ' nature of descriptors, you may notice seemingly ' - 'unusual behaviour\n' - ' in certain uses of the "is" operator, like those ' - 'involving\n' - ' comparisons between instance methods, or constants. ' - 'Check their\n' - ' documentation for more info.\n' - '\n' - '[5] The power operator "**" binds less tightly than an ' - 'arithmetic or\n' - ' bitwise unary operator on its right, that is, ' - '"2**-1" is "0.5".\n' - '\n' - '[6] The "%" operator is also used for string formatting; ' - 'the same\n' - ' precedence applies.\n', - 'pass': 'The "pass" statement\n' - '********************\n' - '\n' - ' pass_stmt ::= "pass"\n' - '\n' - '"pass" is a null operation — when it is executed, nothing happens. ' - 'It\n' - 'is useful as a placeholder when a statement is required ' - 'syntactically,\n' - 'but no code needs to be executed, for example:\n' - '\n' - ' def f(arg): pass # a function that does nothing (yet)\n' - '\n' - ' class C: pass # a class with no methods (yet)\n', - 'power': 'The power operator\n' - '******************\n' - '\n' - 'The power operator binds more tightly than unary operators on its\n' - 'left; it binds less tightly than unary operators on its right. ' - 'The\n' - 'syntax is:\n' - '\n' - ' power ::= (await_expr | primary) ["**" u_expr]\n' - '\n' - 'Thus, in an unparenthesized sequence of power and unary operators, ' - 'the\n' - 'operators are evaluated from right to left (this does not ' - 'constrain\n' - 'the evaluation order for the operands): "-1**2" results in "-1".\n' - '\n' - 'The power operator has the same semantics as the built-in "pow()"\n' - 'function, when called with two arguments: it yields its left ' - 'argument\n' - 'raised to the power of its right argument. The numeric arguments ' - 'are\n' - 'first converted to a common type, and the result is of that type.\n' - '\n' - 'For int operands, the result has the same type as the operands ' - 'unless\n' - 'the second argument is negative; in that case, all arguments are\n' - 'converted to float and a float result is delivered. For example,\n' - '"10**2" returns "100", but "10**-2" returns "0.01".\n' - '\n' - 'Raising "0.0" to a negative power results in a ' - '"ZeroDivisionError".\n' - 'Raising a negative number to a fractional power results in a ' - '"complex"\n' - 'number. (In earlier versions it raised a "ValueError".)\n' - '\n' - 'This operation can be customized using the special "__pow__()" ' - 'method.\n', - 'raise': 'The "raise" statement\n' - '*********************\n' - '\n' - ' raise_stmt ::= "raise" [expression ["from" expression]]\n' - '\n' - 'If no expressions are present, "raise" re-raises the last ' - 'exception\n' - 'that was active in the current scope. If no exception is active ' - 'in\n' - 'the current scope, a "RuntimeError" exception is raised indicating\n' - 'that this is an error.\n' - '\n' - 'Otherwise, "raise" evaluates the first expression as the exception\n' - 'object. It must be either a subclass or an instance of\n' - '"BaseException". If it is a class, the exception instance will be\n' - 'obtained when needed by instantiating the class with no arguments.\n' - '\n' - 'The *type* of the exception is the exception instance’s class, the\n' - '*value* is the instance itself.\n' - '\n' - 'A traceback object is normally created automatically when an ' - 'exception\n' - 'is raised and attached to it as the "__traceback__" attribute, ' - 'which\n' - 'is writable. You can create an exception and set your own traceback ' - 'in\n' - 'one step using the "with_traceback()" exception method (which ' - 'returns\n' - 'the same exception instance, with its traceback set to its ' - 'argument),\n' - 'like so:\n' - '\n' - ' raise Exception("foo occurred").with_traceback(tracebackobj)\n' - '\n' - 'The "from" clause is used for exception chaining: if given, the ' - 'second\n' - '*expression* must be another exception class or instance. If the\n' - 'second expression is an exception instance, it will be attached to ' - 'the\n' - 'raised exception as the "__cause__" attribute (which is writable). ' - 'If\n' - 'the expression is an exception class, the class will be ' - 'instantiated\n' - 'and the resulting exception instance will be attached to the ' - 'raised\n' - 'exception as the "__cause__" attribute. If the raised exception is ' - 'not\n' - 'handled, both exceptions will be printed:\n' - '\n' - ' >>> try:\n' - ' ... print(1 / 0)\n' - ' ... except Exception as exc:\n' - ' ... raise RuntimeError("Something bad happened") from exc\n' - ' ...\n' - ' Traceback (most recent call last):\n' - ' File "", line 2, in \n' - ' ZeroDivisionError: division by zero\n' - '\n' - ' The above exception was the direct cause of the following ' - 'exception:\n' - '\n' - ' Traceback (most recent call last):\n' - ' File "", line 4, in \n' - ' RuntimeError: Something bad happened\n' - '\n' - 'A similar mechanism works implicitly if an exception is raised ' - 'inside\n' - 'an exception handler or a "finally" clause: the previous exception ' - 'is\n' - 'then attached as the new exception’s "__context__" attribute:\n' - '\n' - ' >>> try:\n' - ' ... print(1 / 0)\n' - ' ... except:\n' - ' ... raise RuntimeError("Something bad happened")\n' - ' ...\n' - ' Traceback (most recent call last):\n' - ' File "", line 2, in \n' - ' ZeroDivisionError: division by zero\n' - '\n' - ' During handling of the above exception, another exception ' - 'occurred:\n' - '\n' - ' Traceback (most recent call last):\n' - ' File "", line 4, in \n' - ' RuntimeError: Something bad happened\n' - '\n' - 'Exception chaining can be explicitly suppressed by specifying ' - '"None"\n' - 'in the "from" clause:\n' - '\n' - ' >>> try:\n' - ' ... print(1 / 0)\n' - ' ... except:\n' - ' ... raise RuntimeError("Something bad happened") from None\n' - ' ...\n' - ' Traceback (most recent call last):\n' - ' File "", line 4, in \n' - ' RuntimeError: Something bad happened\n' - '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information about handling exceptions is in ' - 'section\n' - 'The try statement.\n' - '\n' - 'Changed in version 3.3: "None" is now permitted as "Y" in "raise X\n' - 'from Y".\n' - '\n' - 'New in version 3.3: The "__suppress_context__" attribute to ' - 'suppress\n' - 'automatic display of the exception context.\n', - 'return': 'The "return" statement\n' - '**********************\n' - '\n' - ' return_stmt ::= "return" [expression_list]\n' - '\n' - '"return" may only occur syntactically nested in a function ' - 'definition,\n' - 'not within a nested class definition.\n' - '\n' - 'If an expression list is present, it is evaluated, else "None" is\n' - 'substituted.\n' - '\n' - '"return" leaves the current function call with the expression list ' - '(or\n' - '"None") as return value.\n' - '\n' - 'When "return" passes control out of a "try" statement with a ' - '"finally"\n' - 'clause, that "finally" clause is executed before really leaving ' - 'the\n' - 'function.\n' - '\n' - 'In a generator function, the "return" statement indicates that ' - 'the\n' - 'generator is done and will cause "StopIteration" to be raised. ' - 'The\n' - 'returned value (if any) is used as an argument to construct\n' - '"StopIteration" and becomes the "StopIteration.value" attribute.\n' - '\n' - 'In an asynchronous generator function, an empty "return" ' - 'statement\n' - 'indicates that the asynchronous generator is done and will cause\n' - '"StopAsyncIteration" to be raised. A non-empty "return" statement ' - 'is\n' - 'a syntax error in an asynchronous generator function.\n', - 'sequence-types': 'Emulating container types\n' - '*************************\n' - '\n' - 'The following methods can be defined to implement ' - 'container objects.\n' - 'Containers usually are sequences (such as lists or tuples) ' - 'or mappings\n' - '(like dictionaries), but can represent other containers as ' - 'well. The\n' - 'first set of methods is used either to emulate a sequence ' - 'or to\n' - 'emulate a mapping; the difference is that for a sequence, ' - 'the\n' - 'allowable keys should be the integers *k* for which "0 <= ' - 'k < N" where\n' - '*N* is the length of the sequence, or slice objects, which ' - 'define a\n' - 'range of items. It is also recommended that mappings ' - 'provide the\n' - 'methods "keys()", "values()", "items()", "get()", ' - '"clear()",\n' - '"setdefault()", "pop()", "popitem()", "copy()", and ' - '"update()"\n' - 'behaving similar to those for Python’s standard dictionary ' - 'objects.\n' - 'The "collections.abc" module provides a "MutableMapping" ' - 'abstract base\n' - 'class to help create those methods from a base set of ' - '"__getitem__()",\n' - '"__setitem__()", "__delitem__()", and "keys()". Mutable ' - 'sequences\n' - 'should provide methods "append()", "count()", "index()", ' - '"extend()",\n' - '"insert()", "pop()", "remove()", "reverse()" and "sort()", ' - 'like Python\n' - 'standard list objects. Finally, sequence types should ' - 'implement\n' - 'addition (meaning concatenation) and multiplication ' - '(meaning\n' - 'repetition) by defining the methods "__add__()", ' - '"__radd__()",\n' - '"__iadd__()", "__mul__()", "__rmul__()" and "__imul__()" ' - 'described\n' - 'below; they should not define other numerical operators. ' - 'It is\n' - 'recommended that both mappings and sequences implement ' - 'the\n' - '"__contains__()" method to allow efficient use of the "in" ' - 'operator;\n' - 'for mappings, "in" should search the mapping’s keys; for ' - 'sequences, it\n' - 'should search through the values. It is further ' - 'recommended that both\n' - 'mappings and sequences implement the "__iter__()" method ' - 'to allow\n' - 'efficient iteration through the container; for mappings, ' - '"__iter__()"\n' - 'should iterate through the object’s keys; for sequences, ' - 'it should\n' - 'iterate through the values.\n' - '\n' - 'object.__len__(self)\n' - '\n' - ' Called to implement the built-in function "len()". ' - 'Should return\n' - ' the length of the object, an integer ">=" 0. Also, an ' - 'object that\n' - ' doesn’t define a "__bool__()" method and whose ' - '"__len__()" method\n' - ' returns zero is considered to be false in a Boolean ' - 'context.\n' - '\n' - ' **CPython implementation detail:** In CPython, the ' - 'length is\n' - ' required to be at most "sys.maxsize". If the length is ' - 'larger than\n' - ' "sys.maxsize" some features (such as "len()") may ' - 'raise\n' - ' "OverflowError". To prevent raising "OverflowError" by ' - 'truth value\n' - ' testing, an object must define a "__bool__()" method.\n' - '\n' - 'object.__length_hint__(self)\n' - '\n' - ' Called to implement "operator.length_hint()". Should ' - 'return an\n' - ' estimated length for the object (which may be greater ' - 'or less than\n' - ' the actual length). The length must be an integer ">=" ' - '0. The\n' - ' return value may also be "NotImplemented", which is ' - 'treated the\n' - ' same as if the "__length_hint__" method didn’t exist at ' - 'all. This\n' - ' method is purely an optimization and is never required ' - 'for\n' - ' correctness.\n' - '\n' - ' New in version 3.4.\n' - '\n' - 'Note:\n' - '\n' - ' Slicing is done exclusively with the following three ' - 'methods. A\n' - ' call like\n' - '\n' - ' a[1:2] = b\n' - '\n' - ' is translated to\n' - '\n' - ' a[slice(1, 2, None)] = b\n' - '\n' - ' and so forth. Missing slice items are always filled in ' - 'with "None".\n' - '\n' - 'object.__getitem__(self, key)\n' - '\n' - ' Called to implement evaluation of "self[key]". For ' - 'sequence types,\n' - ' the accepted keys should be integers and slice ' - 'objects. Note that\n' - ' the special interpretation of negative indexes (if the ' - 'class wishes\n' - ' to emulate a sequence type) is up to the ' - '"__getitem__()" method. If\n' - ' *key* is of an inappropriate type, "TypeError" may be ' - 'raised; if of\n' - ' a value outside the set of indexes for the sequence ' - '(after any\n' - ' special interpretation of negative values), ' - '"IndexError" should be\n' - ' raised. For mapping types, if *key* is missing (not in ' - 'the\n' - ' container), "KeyError" should be raised.\n' - '\n' - ' Note:\n' - '\n' - ' "for" loops expect that an "IndexError" will be ' - 'raised for\n' - ' illegal indexes to allow proper detection of the end ' - 'of the\n' - ' sequence.\n' - '\n' - 'object.__setitem__(self, key, value)\n' - '\n' - ' Called to implement assignment to "self[key]". Same ' - 'note as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support changes to the values for keys, or ' - 'if new keys\n' - ' can be added, or for sequences if elements can be ' - 'replaced. The\n' - ' same exceptions should be raised for improper *key* ' - 'values as for\n' - ' the "__getitem__()" method.\n' - '\n' - 'object.__delitem__(self, key)\n' - '\n' - ' Called to implement deletion of "self[key]". Same note ' - 'as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support removal of keys, or for sequences ' - 'if elements\n' - ' can be removed from the sequence. The same exceptions ' - 'should be\n' - ' raised for improper *key* values as for the ' - '"__getitem__()" method.\n' - '\n' - 'object.__missing__(self, key)\n' - '\n' - ' Called by "dict"."__getitem__()" to implement ' - '"self[key]" for dict\n' - ' subclasses when key is not in the dictionary.\n' - '\n' - 'object.__iter__(self)\n' - '\n' - ' This method is called when an iterator is required for ' - 'a container.\n' - ' This method should return a new iterator object that ' - 'can iterate\n' - ' over all the objects in the container. For mappings, ' - 'it should\n' - ' iterate over the keys of the container.\n' - '\n' - ' Iterator objects also need to implement this method; ' - 'they are\n' - ' required to return themselves. For more information on ' - 'iterator\n' - ' objects, see Iterator Types.\n' - '\n' - 'object.__reversed__(self)\n' - '\n' - ' Called (if present) by the "reversed()" built-in to ' - 'implement\n' - ' reverse iteration. It should return a new iterator ' - 'object that\n' - ' iterates over all the objects in the container in ' - 'reverse order.\n' - '\n' - ' If the "__reversed__()" method is not provided, the ' - '"reversed()"\n' - ' built-in will fall back to using the sequence protocol ' - '("__len__()"\n' - ' and "__getitem__()"). Objects that support the ' - 'sequence protocol\n' - ' should only provide "__reversed__()" if they can ' - 'provide an\n' - ' implementation that is more efficient than the one ' - 'provided by\n' - ' "reversed()".\n' - '\n' - 'The membership test operators ("in" and "not in") are ' - 'normally\n' - 'implemented as an iteration through a container. However, ' - 'container\n' - 'objects can supply the following special method with a ' - 'more efficient\n' - 'implementation, which also does not require the object be ' - 'iterable.\n' - '\n' - 'object.__contains__(self, item)\n' - '\n' - ' Called to implement membership test operators. Should ' - 'return true\n' - ' if *item* is in *self*, false otherwise. For mapping ' - 'objects, this\n' - ' should consider the keys of the mapping rather than the ' - 'values or\n' - ' the key-item pairs.\n' - '\n' - ' For objects that don’t define "__contains__()", the ' - 'membership test\n' - ' first tries iteration via "__iter__()", then the old ' - 'sequence\n' - ' iteration protocol via "__getitem__()", see this ' - 'section in the\n' - ' language reference.\n', - 'shifting': 'Shifting operations\n' - '*******************\n' - '\n' - 'The shifting operations have lower priority than the arithmetic\n' - 'operations:\n' - '\n' - ' shift_expr ::= a_expr | shift_expr ("<<" | ">>") a_expr\n' - '\n' - 'These operators accept integers as arguments. They shift the ' - 'first\n' - 'argument to the left or right by the number of bits given by ' - 'the\n' - 'second argument.\n' - '\n' - 'This operation can be customized using the special ' - '"__lshift__()" and\n' - '"__rshift__()" methods.\n' - '\n' - 'A right shift by *n* bits is defined as floor division by ' - '"pow(2,n)".\n' - 'A left shift by *n* bits is defined as multiplication with ' - '"pow(2,n)".\n', - 'slicings': 'Slicings\n' - '********\n' - '\n' - 'A slicing selects a range of items in a sequence object (e.g., ' - 'a\n' - 'string, tuple or list). Slicings may be used as expressions or ' - 'as\n' - 'targets in assignment or "del" statements. The syntax for a ' - 'slicing:\n' - '\n' - ' slicing ::= primary "[" slice_list "]"\n' - ' slice_list ::= slice_item ("," slice_item)* [","]\n' - ' slice_item ::= expression | proper_slice\n' - ' proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" ' - '[stride] ]\n' - ' lower_bound ::= expression\n' - ' upper_bound ::= expression\n' - ' stride ::= expression\n' - '\n' - 'There is ambiguity in the formal syntax here: anything that ' - 'looks like\n' - 'an expression list also looks like a slice list, so any ' - 'subscription\n' - 'can be interpreted as a slicing. Rather than further ' - 'complicating the\n' - 'syntax, this is disambiguated by defining that in this case the\n' - 'interpretation as a subscription takes priority over the\n' - 'interpretation as a slicing (this is the case if the slice list\n' - 'contains no proper slice).\n' - '\n' - 'The semantics for a slicing are as follows. The primary is ' - 'indexed\n' - '(using the same "__getitem__()" method as normal subscription) ' - 'with a\n' - 'key that is constructed from the slice list, as follows. If the ' - 'slice\n' - 'list contains at least one comma, the key is a tuple containing ' - 'the\n' - 'conversion of the slice items; otherwise, the conversion of the ' - 'lone\n' - 'slice item is the key. The conversion of a slice item that is ' - 'an\n' - 'expression is that expression. The conversion of a proper slice ' - 'is a\n' - 'slice object (see section The standard type hierarchy) whose ' - '"start",\n' - '"stop" and "step" attributes are the values of the expressions ' - 'given\n' - 'as lower bound, upper bound and stride, respectively, ' - 'substituting\n' - '"None" for missing expressions.\n', - 'specialattrs': 'Special Attributes\n' - '******************\n' - '\n' - 'The implementation adds a few special read-only attributes ' - 'to several\n' - 'object types, where they are relevant. Some of these are ' - 'not reported\n' - 'by the "dir()" built-in function.\n' - '\n' - 'object.__dict__\n' - '\n' - ' A dictionary or other mapping object used to store an ' - 'object’s\n' - ' (writable) attributes.\n' - '\n' - 'instance.__class__\n' - '\n' - ' The class to which a class instance belongs.\n' - '\n' - 'class.__bases__\n' - '\n' - ' The tuple of base classes of a class object.\n' - '\n' - 'definition.__name__\n' - '\n' - ' The name of the class, function, method, descriptor, or ' - 'generator\n' - ' instance.\n' - '\n' - 'definition.__qualname__\n' - '\n' - ' The *qualified name* of the class, function, method, ' - 'descriptor, or\n' - ' generator instance.\n' - '\n' - ' New in version 3.3.\n' - '\n' - 'class.__mro__\n' - '\n' - ' This attribute is a tuple of classes that are considered ' - 'when\n' - ' looking for base classes during method resolution.\n' - '\n' - 'class.mro()\n' - '\n' - ' This method can be overridden by a metaclass to customize ' - 'the\n' - ' method resolution order for its instances. It is called ' - 'at class\n' - ' instantiation, and its result is stored in "__mro__".\n' - '\n' - 'class.__subclasses__()\n' - '\n' - ' Each class keeps a list of weak references to its ' - 'immediate\n' - ' subclasses. This method returns a list of all those ' - 'references\n' - ' still alive. The list is in definition order. Example:\n' - '\n' - ' >>> int.__subclasses__()\n' - " []\n" - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] Additional information on these special methods may be ' - 'found in\n' - ' the Python Reference Manual (Basic customization).\n' - '\n' - '[2] As a consequence, the list "[1, 2]" is considered equal ' - 'to "[1.0,\n' - ' 2.0]", and similarly for tuples.\n' - '\n' - '[3] They must have since the parser can’t tell the type of ' - 'the\n' - ' operands.\n' - '\n' - '[4] Cased characters are those with general category ' - 'property being\n' - ' one of “Lu” (Letter, uppercase), “Ll” (Letter, ' - 'lowercase), or “Lt”\n' - ' (Letter, titlecase).\n' - '\n' - '[5] To format only a tuple you should therefore provide a ' - 'singleton\n' - ' tuple whose only element is the tuple to be formatted.\n', - 'specialnames': 'Special method names\n' - '********************\n' - '\n' - 'A class can implement certain operations that are invoked by ' - 'special\n' - 'syntax (such as arithmetic operations or subscripting and ' - 'slicing) by\n' - 'defining methods with special names. This is Python’s ' - 'approach to\n' - '*operator overloading*, allowing classes to define their own ' - 'behavior\n' - 'with respect to language operators. For instance, if a ' - 'class defines\n' - 'a method named "__getitem__()", and "x" is an instance of ' - 'this class,\n' - 'then "x[i]" is roughly equivalent to "type(x).__getitem__(x, ' - 'i)".\n' - 'Except where mentioned, attempts to execute an operation ' - 'raise an\n' - 'exception when no appropriate method is defined (typically\n' - '"AttributeError" or "TypeError").\n' - '\n' - 'Setting a special method to "None" indicates that the ' - 'corresponding\n' - 'operation is not available. For example, if a class sets ' - '"__iter__()"\n' - 'to "None", the class is not iterable, so calling "iter()" on ' - 'its\n' - 'instances will raise a "TypeError" (without falling back to\n' - '"__getitem__()"). [2]\n' - '\n' - 'When implementing a class that emulates any built-in type, ' - 'it is\n' - 'important that the emulation only be implemented to the ' - 'degree that it\n' - 'makes sense for the object being modelled. For example, ' - 'some\n' - 'sequences may work well with retrieval of individual ' - 'elements, but\n' - 'extracting a slice may not make sense. (One example of this ' - 'is the\n' - '"NodeList" interface in the W3C’s Document Object Model.)\n' - '\n' - '\n' - 'Basic customization\n' - '===================\n' - '\n' - 'object.__new__(cls[, ...])\n' - '\n' - ' Called to create a new instance of class *cls*. ' - '"__new__()" is a\n' - ' static method (special-cased so you need not declare it ' - 'as such)\n' - ' that takes the class of which an instance was requested ' - 'as its\n' - ' first argument. The remaining arguments are those passed ' - 'to the\n' - ' object constructor expression (the call to the class). ' - 'The return\n' - ' value of "__new__()" should be the new object instance ' - '(usually an\n' - ' instance of *cls*).\n' - '\n' - ' Typical implementations create a new instance of the ' - 'class by\n' - ' invoking the superclass’s "__new__()" method using\n' - ' "super().__new__(cls[, ...])" with appropriate arguments ' - 'and then\n' - ' modifying the newly-created instance as necessary before ' - 'returning\n' - ' it.\n' - '\n' - ' If "__new__()" is invoked during object construction and ' - 'it returns\n' - ' an instance or subclass of *cls*, then the new ' - 'instance’s\n' - ' "__init__()" method will be invoked like "__init__(self[, ' - '...])",\n' - ' where *self* is the new instance and the remaining ' - 'arguments are\n' - ' the same as were passed to the object constructor.\n' - '\n' - ' If "__new__()" does not return an instance of *cls*, then ' - 'the new\n' - ' instance’s "__init__()" method will not be invoked.\n' - '\n' - ' "__new__()" is intended mainly to allow subclasses of ' - 'immutable\n' - ' types (like int, str, or tuple) to customize instance ' - 'creation. It\n' - ' is also commonly overridden in custom metaclasses in ' - 'order to\n' - ' customize class creation.\n' - '\n' - 'object.__init__(self[, ...])\n' - '\n' - ' Called after the instance has been created (by ' - '"__new__()"), but\n' - ' before it is returned to the caller. The arguments are ' - 'those\n' - ' passed to the class constructor expression. If a base ' - 'class has an\n' - ' "__init__()" method, the derived class’s "__init__()" ' - 'method, if\n' - ' any, must explicitly call it to ensure proper ' - 'initialization of the\n' - ' base class part of the instance; for example:\n' - ' "super().__init__([args...])".\n' - '\n' - ' Because "__new__()" and "__init__()" work together in ' - 'constructing\n' - ' objects ("__new__()" to create it, and "__init__()" to ' - 'customize\n' - ' it), no non-"None" value may be returned by "__init__()"; ' - 'doing so\n' - ' will cause a "TypeError" to be raised at runtime.\n' - '\n' - 'object.__del__(self)\n' - '\n' - ' Called when the instance is about to be destroyed. This ' - 'is also\n' - ' called a finalizer or (improperly) a destructor. If a ' - 'base class\n' - ' has a "__del__()" method, the derived class’s "__del__()" ' - 'method,\n' - ' if any, must explicitly call it to ensure proper deletion ' - 'of the\n' - ' base class part of the instance.\n' - '\n' - ' It is possible (though not recommended!) for the ' - '"__del__()" method\n' - ' to postpone destruction of the instance by creating a new ' - 'reference\n' - ' to it. This is called object *resurrection*. It is\n' - ' implementation-dependent whether "__del__()" is called a ' - 'second\n' - ' time when a resurrected object is about to be destroyed; ' - 'the\n' - ' current *CPython* implementation only calls it once.\n' - '\n' - ' It is not guaranteed that "__del__()" methods are called ' - 'for\n' - ' objects that still exist when the interpreter exits.\n' - '\n' - ' Note:\n' - '\n' - ' "del x" doesn’t directly call "x.__del__()" — the ' - 'former\n' - ' decrements the reference count for "x" by one, and the ' - 'latter is\n' - ' only called when "x"’s reference count reaches zero.\n' - '\n' - ' **CPython implementation detail:** It is possible for a ' - 'reference\n' - ' cycle to prevent the reference count of an object from ' - 'going to\n' - ' zero. In this case, the cycle will be later detected and ' - 'deleted\n' - ' by the *cyclic garbage collector*. A common cause of ' - 'reference\n' - ' cycles is when an exception has been caught in a local ' - 'variable.\n' - ' The frame’s locals then reference the exception, which ' - 'references\n' - ' its own traceback, which references the locals of all ' - 'frames caught\n' - ' in the traceback.\n' - '\n' - ' See also: Documentation for the "gc" module.\n' - '\n' - ' Warning:\n' - '\n' - ' Due to the precarious circumstances under which ' - '"__del__()"\n' - ' methods are invoked, exceptions that occur during their ' - 'execution\n' - ' are ignored, and a warning is printed to "sys.stderr" ' - 'instead.\n' - ' In particular:\n' - '\n' - ' * "__del__()" can be invoked when arbitrary code is ' - 'being\n' - ' executed, including from any arbitrary thread. If ' - '"__del__()"\n' - ' needs to take a lock or invoke any other blocking ' - 'resource, it\n' - ' may deadlock as the resource may already be taken by ' - 'the code\n' - ' that gets interrupted to execute "__del__()".\n' - '\n' - ' * "__del__()" can be executed during interpreter ' - 'shutdown. As a\n' - ' consequence, the global variables it needs to access ' - '(including\n' - ' other modules) may already have been deleted or set ' - 'to "None".\n' - ' Python guarantees that globals whose name begins with ' - 'a single\n' - ' underscore are deleted from their module before other ' - 'globals\n' - ' are deleted; if no other references to such globals ' - 'exist, this\n' - ' may help in assuring that imported modules are still ' - 'available\n' - ' at the time when the "__del__()" method is called.\n' - '\n' - 'object.__repr__(self)\n' - '\n' - ' Called by the "repr()" built-in function to compute the ' - '“official”\n' - ' string representation of an object. If at all possible, ' - 'this\n' - ' should look like a valid Python expression that could be ' - 'used to\n' - ' recreate an object with the same value (given an ' - 'appropriate\n' - ' environment). If this is not possible, a string of the ' - 'form\n' - ' "<...some useful description...>" should be returned. The ' - 'return\n' - ' value must be a string object. If a class defines ' - '"__repr__()" but\n' - ' not "__str__()", then "__repr__()" is also used when an ' - '“informal”\n' - ' string representation of instances of that class is ' - 'required.\n' - '\n' - ' This is typically used for debugging, so it is important ' - 'that the\n' - ' representation is information-rich and unambiguous.\n' - '\n' - 'object.__str__(self)\n' - '\n' - ' Called by "str(object)" and the built-in functions ' - '"format()" and\n' - ' "print()" to compute the “informal” or nicely printable ' - 'string\n' - ' representation of an object. The return value must be a ' - 'string\n' - ' object.\n' - '\n' - ' This method differs from "object.__repr__()" in that ' - 'there is no\n' - ' expectation that "__str__()" return a valid Python ' - 'expression: a\n' - ' more convenient or concise representation can be used.\n' - '\n' - ' The default implementation defined by the built-in type ' - '"object"\n' - ' calls "object.__repr__()".\n' - '\n' - 'object.__bytes__(self)\n' - '\n' - ' Called by bytes to compute a byte-string representation ' - 'of an\n' - ' object. This should return a "bytes" object.\n' - '\n' - 'object.__format__(self, format_spec)\n' - '\n' - ' Called by the "format()" built-in function, and by ' - 'extension,\n' - ' evaluation of formatted string literals and the ' - '"str.format()"\n' - ' method, to produce a “formatted” string representation of ' - 'an\n' - ' object. The *format_spec* argument is a string that ' - 'contains a\n' - ' description of the formatting options desired. The ' - 'interpretation\n' - ' of the *format_spec* argument is up to the type ' - 'implementing\n' - ' "__format__()", however most classes will either ' - 'delegate\n' - ' formatting to one of the built-in types, or use a ' - 'similar\n' - ' formatting option syntax.\n' - '\n' - ' See Format Specification Mini-Language for a description ' - 'of the\n' - ' standard formatting syntax.\n' - '\n' - ' The return value must be a string object.\n' - '\n' - ' Changed in version 3.4: The __format__ method of "object" ' - 'itself\n' - ' raises a "TypeError" if passed any non-empty string.\n' - '\n' - ' Changed in version 3.7: "object.__format__(x, \'\')" is ' - 'now\n' - ' equivalent to "str(x)" rather than "format(str(x), ' - '\'\')".\n' - '\n' - 'object.__lt__(self, other)\n' - 'object.__le__(self, other)\n' - 'object.__eq__(self, other)\n' - 'object.__ne__(self, other)\n' - 'object.__gt__(self, other)\n' - 'object.__ge__(self, other)\n' - '\n' - ' These are the so-called “rich comparison” methods. The\n' - ' correspondence between operator symbols and method names ' - 'is as\n' - ' follows: "xy" calls\n' - ' "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n' - '\n' - ' A rich comparison method may return the singleton ' - '"NotImplemented"\n' - ' if it does not implement the operation for a given pair ' - 'of\n' - ' arguments. By convention, "False" and "True" are returned ' - 'for a\n' - ' successful comparison. However, these methods can return ' - 'any value,\n' - ' so if the comparison operator is used in a Boolean ' - 'context (e.g.,\n' - ' in the condition of an "if" statement), Python will call ' - '"bool()"\n' - ' on the value to determine if the result is true or ' - 'false.\n' - '\n' - ' By default, "object" implements "__eq__()" by using "is", ' - 'returning\n' - ' "NotImplemented" in the case of a false comparison: "True ' - 'if x is y\n' - ' else NotImplemented". For "__ne__()", by default it ' - 'delegates to\n' - ' "__eq__()" and inverts the result unless it is ' - '"NotImplemented".\n' - ' There are no other implied relationships among the ' - 'comparison\n' - ' operators or default implementations; for example, the ' - 'truth of\n' - ' "(x.__hash__".\n' - '\n' - ' If a class that does not override "__eq__()" wishes to ' - 'suppress\n' - ' hash support, it should include "__hash__ = None" in the ' - 'class\n' - ' definition. A class which defines its own "__hash__()" ' - 'that\n' - ' explicitly raises a "TypeError" would be incorrectly ' - 'identified as\n' - ' hashable by an "isinstance(obj, ' - 'collections.abc.Hashable)" call.\n' - '\n' - ' Note:\n' - '\n' - ' By default, the "__hash__()" values of str and bytes ' - 'objects are\n' - ' “salted” with an unpredictable random value. Although ' - 'they\n' - ' remain constant within an individual Python process, ' - 'they are not\n' - ' predictable between repeated invocations of Python.This ' - 'is\n' - ' intended to provide protection against a ' - 'denial-of-service caused\n' - ' by carefully-chosen inputs that exploit the worst case\n' - ' performance of a dict insertion, O(n^2) complexity. ' - 'See\n' - ' http://www.ocert.org/advisories/ocert-2011-003.html ' - 'for\n' - ' details.Changing hash values affects the iteration ' - 'order of sets.\n' - ' Python has never made guarantees about this ordering ' - '(and it\n' - ' typically varies between 32-bit and 64-bit builds).See ' - 'also\n' - ' "PYTHONHASHSEED".\n' - '\n' - ' Changed in version 3.3: Hash randomization is enabled by ' - 'default.\n' - '\n' - 'object.__bool__(self)\n' - '\n' - ' Called to implement truth value testing and the built-in ' - 'operation\n' - ' "bool()"; should return "False" or "True". When this ' - 'method is not\n' - ' defined, "__len__()" is called, if it is defined, and the ' - 'object is\n' - ' considered true if its result is nonzero. If a class ' - 'defines\n' - ' neither "__len__()" nor "__bool__()", all its instances ' - 'are\n' - ' considered true.\n' - '\n' - '\n' - 'Customizing attribute access\n' - '============================\n' - '\n' - 'The following methods can be defined to customize the ' - 'meaning of\n' - 'attribute access (use of, assignment to, or deletion of ' - '"x.name") for\n' - 'class instances.\n' - '\n' - 'object.__getattr__(self, name)\n' - '\n' - ' Called when the default attribute access fails with an\n' - ' "AttributeError" (either "__getattribute__()" raises an\n' - ' "AttributeError" because *name* is not an instance ' - 'attribute or an\n' - ' attribute in the class tree for "self"; or "__get__()" of ' - 'a *name*\n' - ' property raises "AttributeError"). This method should ' - 'either\n' - ' return the (computed) attribute value or raise an ' - '"AttributeError"\n' - ' exception.\n' - '\n' - ' Note that if the attribute is found through the normal ' - 'mechanism,\n' - ' "__getattr__()" is not called. (This is an intentional ' - 'asymmetry\n' - ' between "__getattr__()" and "__setattr__()".) This is ' - 'done both for\n' - ' efficiency reasons and because otherwise "__getattr__()" ' - 'would have\n' - ' no way to access other attributes of the instance. Note ' - 'that at\n' - ' least for instance variables, you can fake total control ' - 'by not\n' - ' inserting any values in the instance attribute dictionary ' - '(but\n' - ' instead inserting them in another object). See the\n' - ' "__getattribute__()" method below for a way to actually ' - 'get total\n' - ' control over attribute access.\n' - '\n' - 'object.__getattribute__(self, name)\n' - '\n' - ' Called unconditionally to implement attribute accesses ' - 'for\n' - ' instances of the class. If the class also defines ' - '"__getattr__()",\n' - ' the latter will not be called unless "__getattribute__()" ' - 'either\n' - ' calls it explicitly or raises an "AttributeError". This ' - 'method\n' - ' should return the (computed) attribute value or raise an\n' - ' "AttributeError" exception. In order to avoid infinite ' - 'recursion in\n' - ' this method, its implementation should always call the ' - 'base class\n' - ' method with the same name to access any attributes it ' - 'needs, for\n' - ' example, "object.__getattribute__(self, name)".\n' - '\n' - ' Note:\n' - '\n' - ' This method may still be bypassed when looking up ' - 'special methods\n' - ' as the result of implicit invocation via language ' - 'syntax or\n' - ' built-in functions. See Special method lookup.\n' - '\n' - ' For certain sensitive attribute accesses, raises an ' - 'auditing event\n' - ' "object.__getattr__" with arguments "obj" and "name".\n' - '\n' - 'object.__setattr__(self, name, value)\n' - '\n' - ' Called when an attribute assignment is attempted. This ' - 'is called\n' - ' instead of the normal mechanism (i.e. store the value in ' - 'the\n' - ' instance dictionary). *name* is the attribute name, ' - '*value* is the\n' - ' value to be assigned to it.\n' - '\n' - ' If "__setattr__()" wants to assign to an instance ' - 'attribute, it\n' - ' should call the base class method with the same name, for ' - 'example,\n' - ' "object.__setattr__(self, name, value)".\n' - '\n' - ' For certain sensitive attribute assignments, raises an ' - 'auditing\n' - ' event "object.__setattr__" with arguments "obj", "name", ' - '"value".\n' - '\n' - 'object.__delattr__(self, name)\n' - '\n' - ' Like "__setattr__()" but for attribute deletion instead ' - 'of\n' - ' assignment. This should only be implemented if "del ' - 'obj.name" is\n' - ' meaningful for the object.\n' - '\n' - ' For certain sensitive attribute deletions, raises an ' - 'auditing event\n' - ' "object.__delattr__" with arguments "obj" and "name".\n' - '\n' - 'object.__dir__(self)\n' - '\n' - ' Called when "dir()" is called on the object. A sequence ' - 'must be\n' - ' returned. "dir()" converts the returned sequence to a ' - 'list and\n' - ' sorts it.\n' - '\n' - '\n' - 'Customizing module attribute access\n' - '-----------------------------------\n' - '\n' - 'Special names "__getattr__" and "__dir__" can be also used ' - 'to\n' - 'customize access to module attributes. The "__getattr__" ' - 'function at\n' - 'the module level should accept one argument which is the ' - 'name of an\n' - 'attribute and return the computed value or raise an ' - '"AttributeError".\n' - 'If an attribute is not found on a module object through the ' - 'normal\n' - 'lookup, i.e. "object.__getattribute__()", then "__getattr__" ' - 'is\n' - 'searched in the module "__dict__" before raising an ' - '"AttributeError".\n' - 'If found, it is called with the attribute name and the ' - 'result is\n' - 'returned.\n' - '\n' - 'The "__dir__" function should accept no arguments, and ' - 'return a\n' - 'sequence of strings that represents the names accessible on ' - 'module. If\n' - 'present, this function overrides the standard "dir()" search ' - 'on a\n' - 'module.\n' - '\n' - 'For a more fine grained customization of the module behavior ' - '(setting\n' - 'attributes, properties, etc.), one can set the "__class__" ' - 'attribute\n' - 'of a module object to a subclass of "types.ModuleType". For ' - 'example:\n' - '\n' - ' import sys\n' - ' from types import ModuleType\n' - '\n' - ' class VerboseModule(ModuleType):\n' - ' def __repr__(self):\n' - " return f'Verbose {self.__name__}'\n" - '\n' - ' def __setattr__(self, attr, value):\n' - " print(f'Setting {attr}...')\n" - ' super().__setattr__(attr, value)\n' - '\n' - ' sys.modules[__name__].__class__ = VerboseModule\n' - '\n' - 'Note:\n' - '\n' - ' Defining module "__getattr__" and setting module ' - '"__class__" only\n' - ' affect lookups made using the attribute access syntax – ' - 'directly\n' - ' accessing the module globals (whether by code within the ' - 'module, or\n' - ' via a reference to the module’s globals dictionary) is ' - 'unaffected.\n' - '\n' - 'Changed in version 3.5: "__class__" module attribute is now ' - 'writable.\n' - '\n' - 'New in version 3.7: "__getattr__" and "__dir__" module ' - 'attributes.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 562** - Module __getattr__ and __dir__\n' - ' Describes the "__getattr__" and "__dir__" functions on ' - 'modules.\n' - '\n' - '\n' - 'Implementing Descriptors\n' - '------------------------\n' - '\n' - 'The following methods only apply when an instance of the ' - 'class\n' - 'containing the method (a so-called *descriptor* class) ' - 'appears in an\n' - '*owner* class (the descriptor must be in either the owner’s ' - 'class\n' - 'dictionary or in the class dictionary for one of its ' - 'parents). In the\n' - 'examples below, “the attribute” refers to the attribute ' - 'whose name is\n' - 'the key of the property in the owner class’ "__dict__".\n' - '\n' - 'object.__get__(self, instance, owner=None)\n' - '\n' - ' Called to get the attribute of the owner class (class ' - 'attribute\n' - ' access) or of an instance of that class (instance ' - 'attribute\n' - ' access). The optional *owner* argument is the owner ' - 'class, while\n' - ' *instance* is the instance that the attribute was ' - 'accessed through,\n' - ' or "None" when the attribute is accessed through the ' - '*owner*.\n' - '\n' - ' This method should return the computed attribute value or ' - 'raise an\n' - ' "AttributeError" exception.\n' - '\n' - ' **PEP 252** specifies that "__get__()" is callable with ' - 'one or two\n' - ' arguments. Python’s own built-in descriptors support ' - 'this\n' - ' specification; however, it is likely that some ' - 'third-party tools\n' - ' have descriptors that require both arguments. Python’s ' - 'own\n' - ' "__getattribute__()" implementation always passes in both ' - 'arguments\n' - ' whether they are required or not.\n' - '\n' - 'object.__set__(self, instance, value)\n' - '\n' - ' Called to set the attribute on an instance *instance* of ' - 'the owner\n' - ' class to a new value, *value*.\n' - '\n' - ' Note, adding "__set__()" or "__delete__()" changes the ' - 'kind of\n' - ' descriptor to a “data descriptor”. See Invoking ' - 'Descriptors for\n' - ' more details.\n' - '\n' - 'object.__delete__(self, instance)\n' - '\n' - ' Called to delete the attribute on an instance *instance* ' - 'of the\n' - ' owner class.\n' - '\n' - 'object.__set_name__(self, owner, name)\n' - '\n' - ' Called at the time the owning class *owner* is created. ' - 'The\n' - ' descriptor has been assigned to *name*.\n' - '\n' - ' Note:\n' - '\n' - ' "__set_name__()" is only called implicitly as part of ' - 'the "type"\n' - ' constructor, so it will need to be called explicitly ' - 'with the\n' - ' appropriate parameters when a descriptor is added to a ' - 'class\n' - ' after initial creation:\n' - '\n' - ' class A:\n' - ' pass\n' - ' descr = custom_descriptor()\n' - ' A.attr = descr\n' - " descr.__set_name__(A, 'attr')\n" - '\n' - ' See Creating the class object for more details.\n' - '\n' - ' New in version 3.6.\n' - '\n' - 'The attribute "__objclass__" is interpreted by the "inspect" ' - 'module as\n' - 'specifying the class where this object was defined (setting ' - 'this\n' - 'appropriately can assist in runtime introspection of dynamic ' - 'class\n' - 'attributes). For callables, it may indicate that an instance ' - 'of the\n' - 'given type (or a subclass) is expected or required as the ' - 'first\n' - 'positional argument (for example, CPython sets this ' - 'attribute for\n' - 'unbound methods that are implemented in C).\n' - '\n' - '\n' - 'Invoking Descriptors\n' - '--------------------\n' - '\n' - 'In general, a descriptor is an object attribute with ' - '“binding\n' - 'behavior”, one whose attribute access has been overridden by ' - 'methods\n' - 'in the descriptor protocol: "__get__()", "__set__()", and\n' - '"__delete__()". If any of those methods are defined for an ' - 'object, it\n' - 'is said to be a descriptor.\n' - '\n' - 'The default behavior for attribute access is to get, set, or ' - 'delete\n' - 'the attribute from an object’s dictionary. For instance, ' - '"a.x" has a\n' - 'lookup chain starting with "a.__dict__[\'x\']", then\n' - '"type(a).__dict__[\'x\']", and continuing through the base ' - 'classes of\n' - '"type(a)" excluding metaclasses.\n' - '\n' - 'However, if the looked-up value is an object defining one of ' - 'the\n' - 'descriptor methods, then Python may override the default ' - 'behavior and\n' - 'invoke the descriptor method instead. Where this occurs in ' - 'the\n' - 'precedence chain depends on which descriptor methods were ' - 'defined and\n' - 'how they were called.\n' - '\n' - 'The starting point for descriptor invocation is a binding, ' - '"a.x". How\n' - 'the arguments are assembled depends on "a":\n' - '\n' - 'Direct Call\n' - ' The simplest and least common call is when user code ' - 'directly\n' - ' invokes a descriptor method: "x.__get__(a)".\n' - '\n' - 'Instance Binding\n' - ' If binding to an object instance, "a.x" is transformed ' - 'into the\n' - ' call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n' - '\n' - 'Class Binding\n' - ' If binding to a class, "A.x" is transformed into the ' - 'call:\n' - ' "A.__dict__[\'x\'].__get__(None, A)".\n' - '\n' - 'Super Binding\n' - ' If "a" is an instance of "super", then the binding ' - '"super(B,\n' - ' obj).m()" searches "obj.__class__.__mro__" for the base ' - 'class "A"\n' - ' immediately preceding "B" and then invokes the descriptor ' - 'with the\n' - ' call: "A.__dict__[\'m\'].__get__(obj, obj.__class__)".\n' - '\n' - 'For instance bindings, the precedence of descriptor ' - 'invocation depends\n' - 'on which descriptor methods are defined. A descriptor can ' - 'define any\n' - 'combination of "__get__()", "__set__()" and "__delete__()". ' - 'If it\n' - 'does not define "__get__()", then accessing the attribute ' - 'will return\n' - 'the descriptor object itself unless there is a value in the ' - 'object’s\n' - 'instance dictionary. If the descriptor defines "__set__()" ' - 'and/or\n' - '"__delete__()", it is a data descriptor; if it defines ' - 'neither, it is\n' - 'a non-data descriptor. Normally, data descriptors define ' - 'both\n' - '"__get__()" and "__set__()", while non-data descriptors have ' - 'just the\n' - '"__get__()" method. Data descriptors with "__get__()" and ' - '"__set__()"\n' - '(and/or "__delete__()") defined always override a ' - 'redefinition in an\n' - 'instance dictionary. In contrast, non-data descriptors can ' - 'be\n' - 'overridden by instances.\n' - '\n' - 'Python methods (including "staticmethod()" and ' - '"classmethod()") are\n' - 'implemented as non-data descriptors. Accordingly, instances ' - 'can\n' - 'redefine and override methods. This allows individual ' - 'instances to\n' - 'acquire behaviors that differ from other instances of the ' - 'same class.\n' - '\n' - 'The "property()" function is implemented as a data ' - 'descriptor.\n' - 'Accordingly, instances cannot override the behavior of a ' - 'property.\n' - '\n' - '\n' - '__slots__\n' - '---------\n' - '\n' - '*__slots__* allow us to explicitly declare data members ' - '(like\n' - 'properties) and deny the creation of *__dict__* and ' - '*__weakref__*\n' - '(unless explicitly declared in *__slots__* or available in a ' - 'parent.)\n' - '\n' - 'The space saved over using *__dict__* can be significant. ' - 'Attribute\n' - 'lookup speed can be significantly improved as well.\n' - '\n' - 'object.__slots__\n' - '\n' - ' This class variable can be assigned a string, iterable, ' - 'or sequence\n' - ' of strings with variable names used by instances. ' - '*__slots__*\n' - ' reserves space for the declared variables and prevents ' - 'the\n' - ' automatic creation of *__dict__* and *__weakref__* for ' - 'each\n' - ' instance.\n' - '\n' - '\n' - 'Notes on using *__slots__*\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - '* When inheriting from a class without *__slots__*, the ' - '*__dict__* and\n' - ' *__weakref__* attribute of the instances will always be ' - 'accessible.\n' - '\n' - '* Without a *__dict__* variable, instances cannot be ' - 'assigned new\n' - ' variables not listed in the *__slots__* definition. ' - 'Attempts to\n' - ' assign to an unlisted variable name raises ' - '"AttributeError". If\n' - ' dynamic assignment of new variables is desired, then add\n' - ' "\'__dict__\'" to the sequence of strings in the ' - '*__slots__*\n' - ' declaration.\n' - '\n' - '* Without a *__weakref__* variable for each instance, ' - 'classes defining\n' - ' *__slots__* do not support weak references to its ' - 'instances. If weak\n' - ' reference support is needed, then add "\'__weakref__\'" to ' - 'the\n' - ' sequence of strings in the *__slots__* declaration.\n' - '\n' - '* *__slots__* are implemented at the class level by ' - 'creating\n' - ' descriptors (Implementing Descriptors) for each variable ' - 'name. As a\n' - ' result, class attributes cannot be used to set default ' - 'values for\n' - ' instance variables defined by *__slots__*; otherwise, the ' - 'class\n' - ' attribute would overwrite the descriptor assignment.\n' - '\n' - '* The action of a *__slots__* declaration is not limited to ' - 'the class\n' - ' where it is defined. *__slots__* declared in parents are ' - 'available\n' - ' in child classes. However, child subclasses will get a ' - '*__dict__*\n' - ' and *__weakref__* unless they also define *__slots__* ' - '(which should\n' - ' only contain names of any *additional* slots).\n' - '\n' - '* If a class defines a slot also defined in a base class, ' - 'the instance\n' - ' variable defined by the base class slot is inaccessible ' - '(except by\n' - ' retrieving its descriptor directly from the base class). ' - 'This\n' - ' renders the meaning of the program undefined. In the ' - 'future, a\n' - ' check may be added to prevent this.\n' - '\n' - '* Nonempty *__slots__* does not work for classes derived ' - 'from\n' - ' “variable-length” built-in types such as "int", "bytes" ' - 'and "tuple".\n' - '\n' - '* Any non-string iterable may be assigned to *__slots__*. ' - 'Mappings may\n' - ' also be used; however, in the future, special meaning may ' - 'be\n' - ' assigned to the values corresponding to each key.\n' - '\n' - '* *__class__* assignment works only if both classes have the ' - 'same\n' - ' *__slots__*.\n' - '\n' - '* Multiple inheritance with multiple slotted parent classes ' - 'can be\n' - ' used, but only one parent is allowed to have attributes ' - 'created by\n' - ' slots (the other bases must have empty slot layouts) - ' - 'violations\n' - ' raise "TypeError".\n' - '\n' - '* If an iterator is used for *__slots__* then a descriptor ' - 'is created\n' - ' for each of the iterator’s values. However, the ' - '*__slots__*\n' - ' attribute will be an empty iterator.\n' - '\n' - '\n' - 'Customizing class creation\n' - '==========================\n' - '\n' - 'Whenever a class inherits from another class, ' - '*__init_subclass__* is\n' - 'called on that class. This way, it is possible to write ' - 'classes which\n' - 'change the behavior of subclasses. This is closely related ' - 'to class\n' - 'decorators, but where class decorators only affect the ' - 'specific class\n' - 'they’re applied to, "__init_subclass__" solely applies to ' - 'future\n' - 'subclasses of the class defining the method.\n' - '\n' - 'classmethod object.__init_subclass__(cls)\n' - '\n' - ' This method is called whenever the containing class is ' - 'subclassed.\n' - ' *cls* is then the new subclass. If defined as a normal ' - 'instance\n' - ' method, this method is implicitly converted to a class ' - 'method.\n' - '\n' - ' Keyword arguments which are given to a new class are ' - 'passed to the\n' - ' parent’s class "__init_subclass__". For compatibility ' - 'with other\n' - ' classes using "__init_subclass__", one should take out ' - 'the needed\n' - ' keyword arguments and pass the others over to the base ' - 'class, as\n' - ' in:\n' - '\n' - ' class Philosopher:\n' - ' def __init_subclass__(cls, /, default_name, ' - '**kwargs):\n' - ' super().__init_subclass__(**kwargs)\n' - ' cls.default_name = default_name\n' - '\n' - ' class AustralianPhilosopher(Philosopher, ' - 'default_name="Bruce"):\n' - ' pass\n' - '\n' - ' The default implementation "object.__init_subclass__" ' - 'does nothing,\n' - ' but raises an error if it is called with any arguments.\n' - '\n' - ' Note:\n' - '\n' - ' The metaclass hint "metaclass" is consumed by the rest ' - 'of the\n' - ' type machinery, and is never passed to ' - '"__init_subclass__"\n' - ' implementations. The actual metaclass (rather than the ' - 'explicit\n' - ' hint) can be accessed as "type(cls)".\n' - '\n' - ' New in version 3.6.\n' - '\n' - '\n' - 'Metaclasses\n' - '-----------\n' - '\n' - 'By default, classes are constructed using "type()". The ' - 'class body is\n' - 'executed in a new namespace and the class name is bound ' - 'locally to the\n' - 'result of "type(name, bases, namespace)".\n' - '\n' - 'The class creation process can be customized by passing the\n' - '"metaclass" keyword argument in the class definition line, ' - 'or by\n' - 'inheriting from an existing class that included such an ' - 'argument. In\n' - 'the following example, both "MyClass" and "MySubclass" are ' - 'instances\n' - 'of "Meta":\n' - '\n' - ' class Meta(type):\n' - ' pass\n' - '\n' - ' class MyClass(metaclass=Meta):\n' - ' pass\n' - '\n' - ' class MySubclass(MyClass):\n' - ' pass\n' - '\n' - 'Any other keyword arguments that are specified in the class ' - 'definition\n' - 'are passed through to all metaclass operations described ' - 'below.\n' - '\n' - 'When a class definition is executed, the following steps ' - 'occur:\n' - '\n' - '* MRO entries are resolved;\n' - '\n' - '* the appropriate metaclass is determined;\n' - '\n' - '* the class namespace is prepared;\n' - '\n' - '* the class body is executed;\n' - '\n' - '* the class object is created.\n' - '\n' - '\n' - 'Resolving MRO entries\n' - '---------------------\n' - '\n' - 'If a base that appears in class definition is not an ' - 'instance of\n' - '"type", then an "__mro_entries__" method is searched on it. ' - 'If found,\n' - 'it is called with the original bases tuple. This method must ' - 'return a\n' - 'tuple of classes that will be used instead of this base. The ' - 'tuple may\n' - 'be empty, in such case the original base is ignored.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 560** - Core support for typing module and generic ' - 'types\n' - '\n' - '\n' - 'Determining the appropriate metaclass\n' - '-------------------------------------\n' - '\n' - 'The appropriate metaclass for a class definition is ' - 'determined as\n' - 'follows:\n' - '\n' - '* if no bases and no explicit metaclass are given, then ' - '"type()" is\n' - ' used;\n' - '\n' - '* if an explicit metaclass is given and it is *not* an ' - 'instance of\n' - ' "type()", then it is used directly as the metaclass;\n' - '\n' - '* if an instance of "type()" is given as the explicit ' - 'metaclass, or\n' - ' bases are defined, then the most derived metaclass is ' - 'used.\n' - '\n' - 'The most derived metaclass is selected from the explicitly ' - 'specified\n' - 'metaclass (if any) and the metaclasses (i.e. "type(cls)") of ' - 'all\n' - 'specified base classes. The most derived metaclass is one ' - 'which is a\n' - 'subtype of *all* of these candidate metaclasses. If none of ' - 'the\n' - 'candidate metaclasses meets that criterion, then the class ' - 'definition\n' - 'will fail with "TypeError".\n' - '\n' - '\n' - 'Preparing the class namespace\n' - '-----------------------------\n' - '\n' - 'Once the appropriate metaclass has been identified, then the ' - 'class\n' - 'namespace is prepared. If the metaclass has a "__prepare__" ' - 'attribute,\n' - 'it is called as "namespace = metaclass.__prepare__(name, ' - 'bases,\n' - '**kwds)" (where the additional keyword arguments, if any, ' - 'come from\n' - 'the class definition). The "__prepare__" method should be ' - 'implemented\n' - 'as a "classmethod()". The namespace returned by ' - '"__prepare__" is\n' - 'passed in to "__new__", but when the final class object is ' - 'created the\n' - 'namespace is copied into a new "dict".\n' - '\n' - 'If the metaclass has no "__prepare__" attribute, then the ' - 'class\n' - 'namespace is initialised as an empty ordered mapping.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3115** - Metaclasses in Python 3000\n' - ' Introduced the "__prepare__" namespace hook\n' - '\n' - '\n' - 'Executing the class body\n' - '------------------------\n' - '\n' - 'The class body is executed (approximately) as "exec(body, ' - 'globals(),\n' - 'namespace)". The key difference from a normal call to ' - '"exec()" is that\n' - 'lexical scoping allows the class body (including any ' - 'methods) to\n' - 'reference names from the current and outer scopes when the ' - 'class\n' - 'definition occurs inside a function.\n' - '\n' - 'However, even when the class definition occurs inside the ' - 'function,\n' - 'methods defined inside the class still cannot see names ' - 'defined at the\n' - 'class scope. Class variables must be accessed through the ' - 'first\n' - 'parameter of instance or class methods, or through the ' - 'implicit\n' - 'lexically scoped "__class__" reference described in the next ' - 'section.\n' - '\n' - '\n' - 'Creating the class object\n' - '-------------------------\n' - '\n' - 'Once the class namespace has been populated by executing the ' - 'class\n' - 'body, the class object is created by calling ' - '"metaclass(name, bases,\n' - 'namespace, **kwds)" (the additional keywords passed here are ' - 'the same\n' - 'as those passed to "__prepare__").\n' - '\n' - 'This class object is the one that will be referenced by the ' - 'zero-\n' - 'argument form of "super()". "__class__" is an implicit ' - 'closure\n' - 'reference created by the compiler if any methods in a class ' - 'body refer\n' - 'to either "__class__" or "super". This allows the zero ' - 'argument form\n' - 'of "super()" to correctly identify the class being defined ' - 'based on\n' - 'lexical scoping, while the class or instance that was used ' - 'to make the\n' - 'current call is identified based on the first argument ' - 'passed to the\n' - 'method.\n' - '\n' - '**CPython implementation detail:** In CPython 3.6 and later, ' - 'the\n' - '"__class__" cell is passed to the metaclass as a ' - '"__classcell__" entry\n' - 'in the class namespace. If present, this must be propagated ' - 'up to the\n' - '"type.__new__" call in order for the class to be ' - 'initialised\n' - 'correctly. Failing to do so will result in a "RuntimeError" ' - 'in Python\n' - '3.8.\n' - '\n' - 'When using the default metaclass "type", or any metaclass ' - 'that\n' - 'ultimately calls "type.__new__", the following additional\n' - 'customisation steps are invoked after creating the class ' - 'object:\n' - '\n' - '* first, "type.__new__" collects all of the descriptors in ' - 'the class\n' - ' namespace that define a "__set_name__()" method;\n' - '\n' - '* second, all of these "__set_name__" methods are called ' - 'with the\n' - ' class being defined and the assigned name of that ' - 'particular\n' - ' descriptor;\n' - '\n' - '* finally, the "__init_subclass__()" hook is called on the ' - 'immediate\n' - ' parent of the new class in its method resolution order.\n' - '\n' - 'After the class object is created, it is passed to the ' - 'class\n' - 'decorators included in the class definition (if any) and the ' - 'resulting\n' - 'object is bound in the local namespace as the defined ' - 'class.\n' - '\n' - 'When a new class is created by "type.__new__", the object ' - 'provided as\n' - 'the namespace parameter is copied to a new ordered mapping ' - 'and the\n' - 'original object is discarded. The new copy is wrapped in a ' - 'read-only\n' - 'proxy, which becomes the "__dict__" attribute of the class ' - 'object.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3135** - New super\n' - ' Describes the implicit "__class__" closure reference\n' - '\n' - '\n' - 'Uses for metaclasses\n' - '--------------------\n' - '\n' - 'The potential uses for metaclasses are boundless. Some ideas ' - 'that have\n' - 'been explored include enum, logging, interface checking, ' - 'automatic\n' - 'delegation, automatic property creation, proxies, ' - 'frameworks, and\n' - 'automatic resource locking/synchronization.\n' - '\n' - '\n' - 'Customizing instance and subclass checks\n' - '========================================\n' - '\n' - 'The following methods are used to override the default ' - 'behavior of the\n' - '"isinstance()" and "issubclass()" built-in functions.\n' - '\n' - 'In particular, the metaclass "abc.ABCMeta" implements these ' - 'methods in\n' - 'order to allow the addition of Abstract Base Classes (ABCs) ' - 'as\n' - '“virtual base classes” to any class or type (including ' - 'built-in\n' - 'types), including other ABCs.\n' - '\n' - 'class.__instancecheck__(self, instance)\n' - '\n' - ' Return true if *instance* should be considered a (direct ' - 'or\n' - ' indirect) instance of *class*. If defined, called to ' - 'implement\n' - ' "isinstance(instance, class)".\n' - '\n' - 'class.__subclasscheck__(self, subclass)\n' - '\n' - ' Return true if *subclass* should be considered a (direct ' - 'or\n' - ' indirect) subclass of *class*. If defined, called to ' - 'implement\n' - ' "issubclass(subclass, class)".\n' - '\n' - 'Note that these methods are looked up on the type ' - '(metaclass) of a\n' - 'class. They cannot be defined as class methods in the ' - 'actual class.\n' - 'This is consistent with the lookup of special methods that ' - 'are called\n' - 'on instances, only in this case the instance is itself a ' - 'class.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3119** - Introducing Abstract Base Classes\n' - ' Includes the specification for customizing ' - '"isinstance()" and\n' - ' "issubclass()" behavior through "__instancecheck__()" ' - 'and\n' - ' "__subclasscheck__()", with motivation for this ' - 'functionality in\n' - ' the context of adding Abstract Base Classes (see the ' - '"abc"\n' - ' module) to the language.\n' - '\n' - '\n' - 'Emulating generic types\n' - '=======================\n' - '\n' - 'One can implement the generic class syntax as specified by ' - '**PEP 484**\n' - '(for example "List[int]") by defining a special method:\n' - '\n' - 'classmethod object.__class_getitem__(cls, key)\n' - '\n' - ' Return an object representing the specialization of a ' - 'generic class\n' - ' by type arguments found in *key*.\n' - '\n' - 'This method is looked up on the class object itself, and ' - 'when defined\n' - 'in the class body, this method is implicitly a class ' - 'method. Note,\n' - 'this mechanism is primarily reserved for use with static ' - 'type hints,\n' - 'other usage is discouraged.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 560** - Core support for typing module and generic ' - 'types\n' - '\n' - '\n' - 'Emulating callable objects\n' - '==========================\n' - '\n' - 'object.__call__(self[, args...])\n' - '\n' - ' Called when the instance is “called” as a function; if ' - 'this method\n' - ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)".\n' - '\n' - '\n' - 'Emulating container types\n' - '=========================\n' - '\n' - 'The following methods can be defined to implement container ' - 'objects.\n' - 'Containers usually are sequences (such as lists or tuples) ' - 'or mappings\n' - '(like dictionaries), but can represent other containers as ' - 'well. The\n' - 'first set of methods is used either to emulate a sequence or ' - 'to\n' - 'emulate a mapping; the difference is that for a sequence, ' - 'the\n' - 'allowable keys should be the integers *k* for which "0 <= k ' - '< N" where\n' - '*N* is the length of the sequence, or slice objects, which ' - 'define a\n' - 'range of items. It is also recommended that mappings ' - 'provide the\n' - 'methods "keys()", "values()", "items()", "get()", ' - '"clear()",\n' - '"setdefault()", "pop()", "popitem()", "copy()", and ' - '"update()"\n' - 'behaving similar to those for Python’s standard dictionary ' - 'objects.\n' - 'The "collections.abc" module provides a "MutableMapping" ' - 'abstract base\n' - 'class to help create those methods from a base set of ' - '"__getitem__()",\n' - '"__setitem__()", "__delitem__()", and "keys()". Mutable ' - 'sequences\n' - 'should provide methods "append()", "count()", "index()", ' - '"extend()",\n' - '"insert()", "pop()", "remove()", "reverse()" and "sort()", ' - 'like Python\n' - 'standard list objects. Finally, sequence types should ' - 'implement\n' - 'addition (meaning concatenation) and multiplication ' - '(meaning\n' - 'repetition) by defining the methods "__add__()", ' - '"__radd__()",\n' - '"__iadd__()", "__mul__()", "__rmul__()" and "__imul__()" ' - 'described\n' - 'below; they should not define other numerical operators. It ' - 'is\n' - 'recommended that both mappings and sequences implement the\n' - '"__contains__()" method to allow efficient use of the "in" ' - 'operator;\n' - 'for mappings, "in" should search the mapping’s keys; for ' - 'sequences, it\n' - 'should search through the values. It is further recommended ' - 'that both\n' - 'mappings and sequences implement the "__iter__()" method to ' - 'allow\n' - 'efficient iteration through the container; for mappings, ' - '"__iter__()"\n' - 'should iterate through the object’s keys; for sequences, it ' - 'should\n' - 'iterate through the values.\n' - '\n' - 'object.__len__(self)\n' - '\n' - ' Called to implement the built-in function "len()". ' - 'Should return\n' - ' the length of the object, an integer ">=" 0. Also, an ' - 'object that\n' - ' doesn’t define a "__bool__()" method and whose ' - '"__len__()" method\n' - ' returns zero is considered to be false in a Boolean ' - 'context.\n' - '\n' - ' **CPython implementation detail:** In CPython, the length ' - 'is\n' - ' required to be at most "sys.maxsize". If the length is ' - 'larger than\n' - ' "sys.maxsize" some features (such as "len()") may raise\n' - ' "OverflowError". To prevent raising "OverflowError" by ' - 'truth value\n' - ' testing, an object must define a "__bool__()" method.\n' - '\n' - 'object.__length_hint__(self)\n' - '\n' - ' Called to implement "operator.length_hint()". Should ' - 'return an\n' - ' estimated length for the object (which may be greater or ' - 'less than\n' - ' the actual length). The length must be an integer ">=" 0. ' - 'The\n' - ' return value may also be "NotImplemented", which is ' - 'treated the\n' - ' same as if the "__length_hint__" method didn’t exist at ' - 'all. This\n' - ' method is purely an optimization and is never required ' - 'for\n' - ' correctness.\n' - '\n' - ' New in version 3.4.\n' - '\n' - 'Note:\n' - '\n' - ' Slicing is done exclusively with the following three ' - 'methods. A\n' - ' call like\n' - '\n' - ' a[1:2] = b\n' - '\n' - ' is translated to\n' - '\n' - ' a[slice(1, 2, None)] = b\n' - '\n' - ' and so forth. Missing slice items are always filled in ' - 'with "None".\n' - '\n' - 'object.__getitem__(self, key)\n' - '\n' - ' Called to implement evaluation of "self[key]". For ' - 'sequence types,\n' - ' the accepted keys should be integers and slice objects. ' - 'Note that\n' - ' the special interpretation of negative indexes (if the ' - 'class wishes\n' - ' to emulate a sequence type) is up to the "__getitem__()" ' - 'method. If\n' - ' *key* is of an inappropriate type, "TypeError" may be ' - 'raised; if of\n' - ' a value outside the set of indexes for the sequence ' - '(after any\n' - ' special interpretation of negative values), "IndexError" ' - 'should be\n' - ' raised. For mapping types, if *key* is missing (not in ' - 'the\n' - ' container), "KeyError" should be raised.\n' - '\n' - ' Note:\n' - '\n' - ' "for" loops expect that an "IndexError" will be raised ' - 'for\n' - ' illegal indexes to allow proper detection of the end of ' - 'the\n' - ' sequence.\n' - '\n' - 'object.__setitem__(self, key, value)\n' - '\n' - ' Called to implement assignment to "self[key]". Same note ' - 'as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support changes to the values for keys, or if ' - 'new keys\n' - ' can be added, or for sequences if elements can be ' - 'replaced. The\n' - ' same exceptions should be raised for improper *key* ' - 'values as for\n' - ' the "__getitem__()" method.\n' - '\n' - 'object.__delitem__(self, key)\n' - '\n' - ' Called to implement deletion of "self[key]". Same note ' - 'as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support removal of keys, or for sequences if ' - 'elements\n' - ' can be removed from the sequence. The same exceptions ' - 'should be\n' - ' raised for improper *key* values as for the ' - '"__getitem__()" method.\n' - '\n' - 'object.__missing__(self, key)\n' - '\n' - ' Called by "dict"."__getitem__()" to implement "self[key]" ' - 'for dict\n' - ' subclasses when key is not in the dictionary.\n' - '\n' - 'object.__iter__(self)\n' - '\n' - ' This method is called when an iterator is required for a ' - 'container.\n' - ' This method should return a new iterator object that can ' - 'iterate\n' - ' over all the objects in the container. For mappings, it ' - 'should\n' - ' iterate over the keys of the container.\n' - '\n' - ' Iterator objects also need to implement this method; they ' - 'are\n' - ' required to return themselves. For more information on ' - 'iterator\n' - ' objects, see Iterator Types.\n' - '\n' - 'object.__reversed__(self)\n' - '\n' - ' Called (if present) by the "reversed()" built-in to ' - 'implement\n' - ' reverse iteration. It should return a new iterator ' - 'object that\n' - ' iterates over all the objects in the container in reverse ' - 'order.\n' - '\n' - ' If the "__reversed__()" method is not provided, the ' - '"reversed()"\n' - ' built-in will fall back to using the sequence protocol ' - '("__len__()"\n' - ' and "__getitem__()"). Objects that support the sequence ' - 'protocol\n' - ' should only provide "__reversed__()" if they can provide ' - 'an\n' - ' implementation that is more efficient than the one ' - 'provided by\n' - ' "reversed()".\n' - '\n' - 'The membership test operators ("in" and "not in") are ' - 'normally\n' - 'implemented as an iteration through a container. However, ' - 'container\n' - 'objects can supply the following special method with a more ' - 'efficient\n' - 'implementation, which also does not require the object be ' - 'iterable.\n' - '\n' - 'object.__contains__(self, item)\n' - '\n' - ' Called to implement membership test operators. Should ' - 'return true\n' - ' if *item* is in *self*, false otherwise. For mapping ' - 'objects, this\n' - ' should consider the keys of the mapping rather than the ' - 'values or\n' - ' the key-item pairs.\n' - '\n' - ' For objects that don’t define "__contains__()", the ' - 'membership test\n' - ' first tries iteration via "__iter__()", then the old ' - 'sequence\n' - ' iteration protocol via "__getitem__()", see this section ' - 'in the\n' - ' language reference.\n' - '\n' - '\n' - 'Emulating numeric types\n' - '=======================\n' - '\n' - 'The following methods can be defined to emulate numeric ' - 'objects.\n' - 'Methods corresponding to operations that are not supported ' - 'by the\n' - 'particular kind of number implemented (e.g., bitwise ' - 'operations for\n' - 'non-integral numbers) should be left undefined.\n' - '\n' - 'object.__add__(self, other)\n' - 'object.__sub__(self, other)\n' - 'object.__mul__(self, other)\n' - 'object.__matmul__(self, other)\n' - 'object.__truediv__(self, other)\n' - 'object.__floordiv__(self, other)\n' - 'object.__mod__(self, other)\n' - 'object.__divmod__(self, other)\n' - 'object.__pow__(self, other[, modulo])\n' - 'object.__lshift__(self, other)\n' - 'object.__rshift__(self, other)\n' - 'object.__and__(self, other)\n' - 'object.__xor__(self, other)\n' - 'object.__or__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|"). For instance, ' - 'to\n' - ' evaluate the expression "x + y", where *x* is an instance ' - 'of a\n' - ' class that has an "__add__()" method, "x.__add__(y)" is ' - 'called.\n' - ' The "__divmod__()" method should be the equivalent to ' - 'using\n' - ' "__floordiv__()" and "__mod__()"; it should not be ' - 'related to\n' - ' "__truediv__()". Note that "__pow__()" should be defined ' - 'to accept\n' - ' an optional third argument if the ternary version of the ' - 'built-in\n' - ' "pow()" function is to be supported.\n' - '\n' - ' If one of those methods does not support the operation ' - 'with the\n' - ' supplied arguments, it should return "NotImplemented".\n' - '\n' - 'object.__radd__(self, other)\n' - 'object.__rsub__(self, other)\n' - 'object.__rmul__(self, other)\n' - 'object.__rmatmul__(self, other)\n' - 'object.__rtruediv__(self, other)\n' - 'object.__rfloordiv__(self, other)\n' - 'object.__rmod__(self, other)\n' - 'object.__rdivmod__(self, other)\n' - 'object.__rpow__(self, other[, modulo])\n' - 'object.__rlshift__(self, other)\n' - 'object.__rrshift__(self, other)\n' - 'object.__rand__(self, other)\n' - 'object.__rxor__(self, other)\n' - 'object.__ror__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' - '(swapped)\n' - ' operands. These functions are only called if the left ' - 'operand does\n' - ' not support the corresponding operation [3] and the ' - 'operands are of\n' - ' different types. [4] For instance, to evaluate the ' - 'expression "x -\n' - ' y", where *y* is an instance of a class that has an ' - '"__rsub__()"\n' - ' method, "y.__rsub__(x)" is called if "x.__sub__(y)" ' - 'returns\n' - ' *NotImplemented*.\n' - '\n' - ' Note that ternary "pow()" will not try calling ' - '"__rpow__()" (the\n' - ' coercion rules would become too complicated).\n' - '\n' - ' Note:\n' - '\n' - ' If the right operand’s type is a subclass of the left ' - 'operand’s\n' - ' type and that subclass provides a different ' - 'implementation of the\n' - ' reflected method for the operation, this method will be ' - 'called\n' - ' before the left operand’s non-reflected method. This ' - 'behavior\n' - ' allows subclasses to override their ancestors’ ' - 'operations.\n' - '\n' - 'object.__iadd__(self, other)\n' - 'object.__isub__(self, other)\n' - 'object.__imul__(self, other)\n' - 'object.__imatmul__(self, other)\n' - 'object.__itruediv__(self, other)\n' - 'object.__ifloordiv__(self, other)\n' - 'object.__imod__(self, other)\n' - 'object.__ipow__(self, other[, modulo])\n' - 'object.__ilshift__(self, other)\n' - 'object.__irshift__(self, other)\n' - 'object.__iand__(self, other)\n' - 'object.__ixor__(self, other)\n' - 'object.__ior__(self, other)\n' - '\n' - ' These methods are called to implement the augmented ' - 'arithmetic\n' - ' assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", ' - '"**=",\n' - ' "<<=", ">>=", "&=", "^=", "|="). These methods should ' - 'attempt to\n' - ' do the operation in-place (modifying *self*) and return ' - 'the result\n' - ' (which could be, but does not have to be, *self*). If a ' - 'specific\n' - ' method is not defined, the augmented assignment falls ' - 'back to the\n' - ' normal methods. For instance, if *x* is an instance of a ' - 'class\n' - ' with an "__iadd__()" method, "x += y" is equivalent to "x ' - '=\n' - ' x.__iadd__(y)" . Otherwise, "x.__add__(y)" and ' - '"y.__radd__(x)" are\n' - ' considered, as with the evaluation of "x + y". In ' - 'certain\n' - ' situations, augmented assignment can result in unexpected ' - 'errors\n' - ' (see Why does a_tuple[i] += [‘item’] raise an exception ' - 'when the\n' - ' addition works?), but this behavior is in fact part of ' - 'the data\n' - ' model.\n' - '\n' - 'object.__neg__(self)\n' - 'object.__pos__(self)\n' - 'object.__abs__(self)\n' - 'object.__invert__(self)\n' - '\n' - ' Called to implement the unary arithmetic operations ("-", ' - '"+",\n' - ' "abs()" and "~").\n' - '\n' - 'object.__complex__(self)\n' - 'object.__int__(self)\n' - 'object.__float__(self)\n' - '\n' - ' Called to implement the built-in functions "complex()", ' - '"int()" and\n' - ' "float()". Should return a value of the appropriate ' - 'type.\n' - '\n' - 'object.__index__(self)\n' - '\n' - ' Called to implement "operator.index()", and whenever ' - 'Python needs\n' - ' to losslessly convert the numeric object to an integer ' - 'object (such\n' - ' as in slicing, or in the built-in "bin()", "hex()" and ' - '"oct()"\n' - ' functions). Presence of this method indicates that the ' - 'numeric\n' - ' object is an integer type. Must return an integer.\n' - '\n' - ' If "__int__()", "__float__()" and "__complex__()" are not ' - 'defined\n' - ' then corresponding built-in functions "int()", "float()" ' - 'and\n' - ' "complex()" fall back to "__index__()".\n' - '\n' - 'object.__round__(self[, ndigits])\n' - 'object.__trunc__(self)\n' - 'object.__floor__(self)\n' - 'object.__ceil__(self)\n' - '\n' - ' Called to implement the built-in function "round()" and ' - '"math"\n' - ' functions "trunc()", "floor()" and "ceil()". Unless ' - '*ndigits* is\n' - ' passed to "__round__()" all these methods should return ' - 'the value\n' - ' of the object truncated to an "Integral" (typically an ' - '"int").\n' - '\n' - ' If "__int__()" is not defined then the built-in function ' - '"int()"\n' - ' falls back to "__trunc__()".\n' - '\n' - '\n' - 'With Statement Context Managers\n' - '===============================\n' - '\n' - 'A *context manager* is an object that defines the runtime ' - 'context to\n' - 'be established when executing a "with" statement. The ' - 'context manager\n' - 'handles the entry into, and the exit from, the desired ' - 'runtime context\n' - 'for the execution of the block of code. Context managers ' - 'are normally\n' - 'invoked using the "with" statement (described in section The ' - 'with\n' - 'statement), but can also be used by directly invoking their ' - 'methods.\n' - '\n' - 'Typical uses of context managers include saving and ' - 'restoring various\n' - 'kinds of global state, locking and unlocking resources, ' - 'closing opened\n' - 'files, etc.\n' - '\n' - 'For more information on context managers, see Context ' - 'Manager Types.\n' - '\n' - 'object.__enter__(self)\n' - '\n' - ' Enter the runtime context related to this object. The ' - '"with"\n' - ' statement will bind this method’s return value to the ' - 'target(s)\n' - ' specified in the "as" clause of the statement, if any.\n' - '\n' - 'object.__exit__(self, exc_type, exc_value, traceback)\n' - '\n' - ' Exit the runtime context related to this object. The ' - 'parameters\n' - ' describe the exception that caused the context to be ' - 'exited. If the\n' - ' context was exited without an exception, all three ' - 'arguments will\n' - ' be "None".\n' - '\n' - ' If an exception is supplied, and the method wishes to ' - 'suppress the\n' - ' exception (i.e., prevent it from being propagated), it ' - 'should\n' - ' return a true value. Otherwise, the exception will be ' - 'processed\n' - ' normally upon exit from this method.\n' - '\n' - ' Note that "__exit__()" methods should not reraise the ' - 'passed-in\n' - ' exception; this is the caller’s responsibility.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with” statement\n' - ' The specification, background, and examples for the ' - 'Python "with"\n' - ' statement.\n' - '\n' - '\n' - 'Customizing positional arguments in class pattern matching\n' - '==========================================================\n' - '\n' - 'When using a class name in a pattern, positional arguments ' - 'in the\n' - 'pattern are not allowed by default, i.e. "case MyClass(x, ' - 'y)" is\n' - 'typically invalid without special support in "MyClass". To ' - 'be able to\n' - 'use that kind of patterns, the class needs to define a\n' - '*__match_args__* attribute.\n' - '\n' - 'object.__match_args__\n' - '\n' - ' This class variable can be assigned a tuple of strings. ' - 'When this\n' - ' class is used in a class pattern with positional ' - 'arguments, each\n' - ' positional argument will be converted into a keyword ' - 'argument,\n' - ' using the corresponding value in *__match_args__* as the ' - 'keyword.\n' - ' The absence of this attribute is equivalent to setting it ' - 'to "()".\n' - '\n' - 'For example, if "MyClass.__match_args__" is "("left", ' - '"center",\n' - '"right")" that means that "case MyClass(x, y)" is equivalent ' - 'to "case\n' - 'MyClass(left=x, center=y)". Note that the number of ' - 'arguments in the\n' - 'pattern must be smaller than or equal to the number of ' - 'elements in\n' - '*__match_args__*; if it is larger, the pattern match attempt ' - 'will\n' - 'raise a "TypeError".\n' - '\n' - 'New in version 3.10.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 634** - Structural Pattern Matching\n' - ' The specification for the Python "match" statement.\n' - '\n' - '\n' - 'Special method lookup\n' - '=====================\n' - '\n' - 'For custom classes, implicit invocations of special methods ' - 'are only\n' - 'guaranteed to work correctly if defined on an object’s type, ' - 'not in\n' - 'the object’s instance dictionary. That behaviour is the ' - 'reason why\n' - 'the following code raises an exception:\n' - '\n' - ' >>> class C:\n' - ' ... pass\n' - ' ...\n' - ' >>> c = C()\n' - ' >>> c.__len__ = lambda: 5\n' - ' >>> len(c)\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " TypeError: object of type 'C' has no len()\n" - '\n' - 'The rationale behind this behaviour lies with a number of ' - 'special\n' - 'methods such as "__hash__()" and "__repr__()" that are ' - 'implemented by\n' - 'all objects, including type objects. If the implicit lookup ' - 'of these\n' - 'methods used the conventional lookup process, they would ' - 'fail when\n' - 'invoked on the type object itself:\n' - '\n' - ' >>> 1 .__hash__() == hash(1)\n' - ' True\n' - ' >>> int.__hash__() == hash(int)\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " TypeError: descriptor '__hash__' of 'int' object needs an " - 'argument\n' - '\n' - 'Incorrectly attempting to invoke an unbound method of a ' - 'class in this\n' - 'way is sometimes referred to as ‘metaclass confusion’, and ' - 'is avoided\n' - 'by bypassing the instance when looking up special methods:\n' - '\n' - ' >>> type(1).__hash__(1) == hash(1)\n' - ' True\n' - ' >>> type(int).__hash__(int) == hash(int)\n' - ' True\n' - '\n' - 'In addition to bypassing any instance attributes in the ' - 'interest of\n' - 'correctness, implicit special method lookup generally also ' - 'bypasses\n' - 'the "__getattribute__()" method even of the object’s ' - 'metaclass:\n' - '\n' - ' >>> class Meta(type):\n' - ' ... def __getattribute__(*args):\n' - ' ... print("Metaclass getattribute invoked")\n' - ' ... return type.__getattribute__(*args)\n' - ' ...\n' - ' >>> class C(object, metaclass=Meta):\n' - ' ... def __len__(self):\n' - ' ... return 10\n' - ' ... def __getattribute__(*args):\n' - ' ... print("Class getattribute invoked")\n' - ' ... return object.__getattribute__(*args)\n' - ' ...\n' - ' >>> c = C()\n' - ' >>> c.__len__() # Explicit lookup via ' - 'instance\n' - ' Class getattribute invoked\n' - ' 10\n' - ' >>> type(c).__len__(c) # Explicit lookup via ' - 'type\n' - ' Metaclass getattribute invoked\n' - ' 10\n' - ' >>> len(c) # Implicit lookup\n' - ' 10\n' - '\n' - 'Bypassing the "__getattribute__()" machinery in this fashion ' - 'provides\n' - 'significant scope for speed optimisations within the ' - 'interpreter, at\n' - 'the cost of some flexibility in the handling of special ' - 'methods (the\n' - 'special method *must* be set on the class object itself in ' - 'order to be\n' - 'consistently invoked by the interpreter).\n', - 'string-methods': 'String Methods\n' - '**************\n' - '\n' - 'Strings implement all of the common sequence operations, ' - 'along with\n' - 'the additional methods described below.\n' - '\n' - 'Strings also support two styles of string formatting, one ' - 'providing a\n' - 'large degree of flexibility and customization (see ' - '"str.format()",\n' - 'Format String Syntax and Custom String Formatting) and the ' - 'other based\n' - 'on C "printf" style formatting that handles a narrower ' - 'range of types\n' - 'and is slightly harder to use correctly, but is often ' - 'faster for the\n' - 'cases it can handle (printf-style String Formatting).\n' - '\n' - 'The Text Processing Services section of the standard ' - 'library covers a\n' - 'number of other modules that provide various text related ' - 'utilities\n' - '(including regular expression support in the "re" ' - 'module).\n' - '\n' - 'str.capitalize()\n' - '\n' - ' Return a copy of the string with its first character ' - 'capitalized\n' - ' and the rest lowercased.\n' - '\n' - ' Changed in version 3.8: The first character is now put ' - 'into\n' - ' titlecase rather than uppercase. This means that ' - 'characters like\n' - ' digraphs will only have their first letter capitalized, ' - 'instead of\n' - ' the full character.\n' - '\n' - 'str.casefold()\n' - '\n' - ' Return a casefolded copy of the string. Casefolded ' - 'strings may be\n' - ' used for caseless matching.\n' - '\n' - ' Casefolding is similar to lowercasing but more ' - 'aggressive because\n' - ' it is intended to remove all case distinctions in a ' - 'string. For\n' - ' example, the German lowercase letter "\'ß\'" is ' - 'equivalent to ""ss"".\n' - ' Since it is already lowercase, "lower()" would do ' - 'nothing to "\'ß\'";\n' - ' "casefold()" converts it to ""ss"".\n' - '\n' - ' The casefolding algorithm is described in section 3.13 ' - 'of the\n' - ' Unicode Standard.\n' - '\n' - ' New in version 3.3.\n' - '\n' - 'str.center(width[, fillchar])\n' - '\n' - ' Return centered in a string of length *width*. Padding ' - 'is done\n' - ' using the specified *fillchar* (default is an ASCII ' - 'space). The\n' - ' original string is returned if *width* is less than or ' - 'equal to\n' - ' "len(s)".\n' - '\n' - 'str.count(sub[, start[, end]])\n' - '\n' - ' Return the number of non-overlapping occurrences of ' - 'substring *sub*\n' - ' in the range [*start*, *end*]. Optional arguments ' - '*start* and\n' - ' *end* are interpreted as in slice notation.\n' - '\n' - "str.encode(encoding='utf-8', errors='strict')\n" - '\n' - ' Return an encoded version of the string as a bytes ' - 'object. Default\n' - ' encoding is "\'utf-8\'". *errors* may be given to set a ' - 'different\n' - ' error handling scheme. The default for *errors* is ' - '"\'strict\'",\n' - ' meaning that encoding errors raise a "UnicodeError". ' - 'Other possible\n' - ' values are "\'ignore\'", "\'replace\'", ' - '"\'xmlcharrefreplace\'",\n' - ' "\'backslashreplace\'" and any other name registered ' - 'via\n' - ' "codecs.register_error()", see section Error Handlers. ' - 'For a list\n' - ' of possible encodings, see section Standard Encodings.\n' - '\n' - ' By default, the *errors* argument is not checked for ' - 'best\n' - ' performances, but only used at the first encoding ' - 'error. Enable the\n' - ' Python Development Mode, or use a debug build to check ' - '*errors*.\n' - '\n' - ' Changed in version 3.1: Support for keyword arguments ' - 'added.\n' - '\n' - ' Changed in version 3.9: The *errors* is now checked in ' - 'development\n' - ' mode and in debug mode.\n' - '\n' - 'str.endswith(suffix[, start[, end]])\n' - '\n' - ' Return "True" if the string ends with the specified ' - '*suffix*,\n' - ' otherwise return "False". *suffix* can also be a tuple ' - 'of suffixes\n' - ' to look for. With optional *start*, test beginning at ' - 'that\n' - ' position. With optional *end*, stop comparing at that ' - 'position.\n' - '\n' - 'str.expandtabs(tabsize=8)\n' - '\n' - ' Return a copy of the string where all tab characters ' - 'are replaced\n' - ' by one or more spaces, depending on the current column ' - 'and the\n' - ' given tab size. Tab positions occur every *tabsize* ' - 'characters\n' - ' (default is 8, giving tab positions at columns 0, 8, 16 ' - 'and so on).\n' - ' To expand the string, the current column is set to zero ' - 'and the\n' - ' string is examined character by character. If the ' - 'character is a\n' - ' tab ("\\t"), one or more space characters are inserted ' - 'in the result\n' - ' until the current column is equal to the next tab ' - 'position. (The\n' - ' tab character itself is not copied.) If the character ' - 'is a newline\n' - ' ("\\n") or return ("\\r"), it is copied and the current ' - 'column is\n' - ' reset to zero. Any other character is copied unchanged ' - 'and the\n' - ' current column is incremented by one regardless of how ' - 'the\n' - ' character is represented when printed.\n' - '\n' - " >>> '01\\t012\\t0123\\t01234'.expandtabs()\n" - " '01 012 0123 01234'\n" - " >>> '01\\t012\\t0123\\t01234'.expandtabs(4)\n" - " '01 012 0123 01234'\n" - '\n' - 'str.find(sub[, start[, end]])\n' - '\n' - ' Return the lowest index in the string where substring ' - '*sub* is\n' - ' found within the slice "s[start:end]". Optional ' - 'arguments *start*\n' - ' and *end* are interpreted as in slice notation. Return ' - '"-1" if\n' - ' *sub* is not found.\n' - '\n' - ' Note:\n' - '\n' - ' The "find()" method should be used only if you need ' - 'to know the\n' - ' position of *sub*. To check if *sub* is a substring ' - 'or not, use\n' - ' the "in" operator:\n' - '\n' - " >>> 'Py' in 'Python'\n" - ' True\n' - '\n' - 'str.format(*args, **kwargs)\n' - '\n' - ' Perform a string formatting operation. The string on ' - 'which this\n' - ' method is called can contain literal text or ' - 'replacement fields\n' - ' delimited by braces "{}". Each replacement field ' - 'contains either\n' - ' the numeric index of a positional argument, or the name ' - 'of a\n' - ' keyword argument. Returns a copy of the string where ' - 'each\n' - ' replacement field is replaced with the string value of ' - 'the\n' - ' corresponding argument.\n' - '\n' - ' >>> "The sum of 1 + 2 is {0}".format(1+2)\n' - " 'The sum of 1 + 2 is 3'\n" - '\n' - ' See Format String Syntax for a description of the ' - 'various\n' - ' formatting options that can be specified in format ' - 'strings.\n' - '\n' - ' Note:\n' - '\n' - ' When formatting a number ("int", "float", "complex",\n' - ' "decimal.Decimal" and subclasses) with the "n" type ' - '(ex:\n' - ' "\'{:n}\'.format(1234)"), the function temporarily ' - 'sets the\n' - ' "LC_CTYPE" locale to the "LC_NUMERIC" locale to ' - 'decode\n' - ' "decimal_point" and "thousands_sep" fields of ' - '"localeconv()" if\n' - ' they are non-ASCII or longer than 1 byte, and the ' - '"LC_NUMERIC"\n' - ' locale is different than the "LC_CTYPE" locale. This ' - 'temporary\n' - ' change affects other threads.\n' - '\n' - ' Changed in version 3.7: When formatting a number with ' - 'the "n" type,\n' - ' the function sets temporarily the "LC_CTYPE" locale to ' - 'the\n' - ' "LC_NUMERIC" locale in some cases.\n' - '\n' - 'str.format_map(mapping)\n' - '\n' - ' Similar to "str.format(**mapping)", except that ' - '"mapping" is used\n' - ' directly and not copied to a "dict". This is useful if ' - 'for example\n' - ' "mapping" is a dict subclass:\n' - '\n' - ' >>> class Default(dict):\n' - ' ... def __missing__(self, key):\n' - ' ... return key\n' - ' ...\n' - " >>> '{name} was born in " - "{country}'.format_map(Default(name='Guido'))\n" - " 'Guido was born in country'\n" - '\n' - ' New in version 3.2.\n' - '\n' - 'str.index(sub[, start[, end]])\n' - '\n' - ' Like "find()", but raise "ValueError" when the ' - 'substring is not\n' - ' found.\n' - '\n' - 'str.isalnum()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'alphanumeric and\n' - ' there is at least one character, "False" otherwise. A ' - 'character\n' - ' "c" is alphanumeric if one of the following returns ' - '"True":\n' - ' "c.isalpha()", "c.isdecimal()", "c.isdigit()", or ' - '"c.isnumeric()".\n' - '\n' - 'str.isalpha()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'alphabetic and\n' - ' there is at least one character, "False" otherwise. ' - 'Alphabetic\n' - ' characters are those characters defined in the Unicode ' - 'character\n' - ' database as “Letter”, i.e., those with general category ' - 'property\n' - ' being one of “Lm”, “Lt”, “Lu”, “Ll”, or “Lo”. Note ' - 'that this is\n' - ' different from the “Alphabetic” property defined in the ' - 'Unicode\n' - ' Standard.\n' - '\n' - 'str.isascii()\n' - '\n' - ' Return "True" if the string is empty or all characters ' - 'in the\n' - ' string are ASCII, "False" otherwise. ASCII characters ' - 'have code\n' - ' points in the range U+0000-U+007F.\n' - '\n' - ' New in version 3.7.\n' - '\n' - 'str.isdecimal()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'decimal\n' - ' characters and there is at least one character, "False" ' - 'otherwise.\n' - ' Decimal characters are those that can be used to form ' - 'numbers in\n' - ' base 10, e.g. U+0660, ARABIC-INDIC DIGIT ZERO. ' - 'Formally a decimal\n' - ' character is a character in the Unicode General ' - 'Category “Nd”.\n' - '\n' - 'str.isdigit()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'digits and there\n' - ' is at least one character, "False" otherwise. Digits ' - 'include\n' - ' decimal characters and digits that need special ' - 'handling, such as\n' - ' the compatibility superscript digits. This covers ' - 'digits which\n' - ' cannot be used to form numbers in base 10, like the ' - 'Kharosthi\n' - ' numbers. Formally, a digit is a character that has the ' - 'property\n' - ' value Numeric_Type=Digit or Numeric_Type=Decimal.\n' - '\n' - 'str.isidentifier()\n' - '\n' - ' Return "True" if the string is a valid identifier ' - 'according to the\n' - ' language definition, section Identifiers and keywords.\n' - '\n' - ' Call "keyword.iskeyword()" to test whether string "s" ' - 'is a reserved\n' - ' identifier, such as "def" and "class".\n' - '\n' - ' Example:\n' - '\n' - ' >>> from keyword import iskeyword\n' - '\n' - " >>> 'hello'.isidentifier(), iskeyword('hello')\n" - ' True, False\n' - " >>> 'def'.isidentifier(), iskeyword('def')\n" - ' True, True\n' - '\n' - 'str.islower()\n' - '\n' - ' Return "True" if all cased characters [4] in the string ' - 'are\n' - ' lowercase and there is at least one cased character, ' - '"False"\n' - ' otherwise.\n' - '\n' - 'str.isnumeric()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'numeric\n' - ' characters, and there is at least one character, ' - '"False" otherwise.\n' - ' Numeric characters include digit characters, and all ' - 'characters\n' - ' that have the Unicode numeric value property, e.g. ' - 'U+2155, VULGAR\n' - ' FRACTION ONE FIFTH. Formally, numeric characters are ' - 'those with\n' - ' the property value Numeric_Type=Digit, ' - 'Numeric_Type=Decimal or\n' - ' Numeric_Type=Numeric.\n' - '\n' - 'str.isprintable()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'printable or the\n' - ' string is empty, "False" otherwise. Nonprintable ' - 'characters are\n' - ' those characters defined in the Unicode character ' - 'database as\n' - ' “Other” or “Separator”, excepting the ASCII space ' - '(0x20) which is\n' - ' considered printable. (Note that printable characters ' - 'in this\n' - ' context are those which should not be escaped when ' - '"repr()" is\n' - ' invoked on a string. It has no bearing on the handling ' - 'of strings\n' - ' written to "sys.stdout" or "sys.stderr".)\n' - '\n' - 'str.isspace()\n' - '\n' - ' Return "True" if there are only whitespace characters ' - 'in the string\n' - ' and there is at least one character, "False" ' - 'otherwise.\n' - '\n' - ' A character is *whitespace* if in the Unicode character ' - 'database\n' - ' (see "unicodedata"), either its general category is ' - '"Zs"\n' - ' (“Separator, space”), or its bidirectional class is one ' - 'of "WS",\n' - ' "B", or "S".\n' - '\n' - 'str.istitle()\n' - '\n' - ' Return "True" if the string is a titlecased string and ' - 'there is at\n' - ' least one character, for example uppercase characters ' - 'may only\n' - ' follow uncased characters and lowercase characters only ' - 'cased ones.\n' - ' Return "False" otherwise.\n' - '\n' - 'str.isupper()\n' - '\n' - ' Return "True" if all cased characters [4] in the string ' - 'are\n' - ' uppercase and there is at least one cased character, ' - '"False"\n' - ' otherwise.\n' - '\n' - " >>> 'BANANA'.isupper()\n" - ' True\n' - " >>> 'banana'.isupper()\n" - ' False\n' - " >>> 'baNana'.isupper()\n" - ' False\n' - " >>> ' '.isupper()\n" - ' False\n' - '\n' - 'str.join(iterable)\n' - '\n' - ' Return a string which is the concatenation of the ' - 'strings in\n' - ' *iterable*. A "TypeError" will be raised if there are ' - 'any non-\n' - ' string values in *iterable*, including "bytes" ' - 'objects. The\n' - ' separator between elements is the string providing this ' - 'method.\n' - '\n' - 'str.ljust(width[, fillchar])\n' - '\n' - ' Return the string left justified in a string of length ' - '*width*.\n' - ' Padding is done using the specified *fillchar* (default ' - 'is an ASCII\n' - ' space). The original string is returned if *width* is ' - 'less than or\n' - ' equal to "len(s)".\n' - '\n' - 'str.lower()\n' - '\n' - ' Return a copy of the string with all the cased ' - 'characters [4]\n' - ' converted to lowercase.\n' - '\n' - ' The lowercasing algorithm used is described in section ' - '3.13 of the\n' - ' Unicode Standard.\n' - '\n' - 'str.lstrip([chars])\n' - '\n' - ' Return a copy of the string with leading characters ' - 'removed. The\n' - ' *chars* argument is a string specifying the set of ' - 'characters to be\n' - ' removed. If omitted or "None", the *chars* argument ' - 'defaults to\n' - ' removing whitespace. The *chars* argument is not a ' - 'prefix; rather,\n' - ' all combinations of its values are stripped:\n' - '\n' - " >>> ' spacious '.lstrip()\n" - " 'spacious '\n" - " >>> 'www.example.com'.lstrip('cmowz.')\n" - " 'example.com'\n" - '\n' - ' See "str.removeprefix()" for a method that will remove ' - 'a single\n' - ' prefix string rather than all of a set of characters. ' - 'For example:\n' - '\n' - " >>> 'Arthur: three!'.lstrip('Arthur: ')\n" - " 'ee!'\n" - " >>> 'Arthur: three!'.removeprefix('Arthur: ')\n" - " 'three!'\n" - '\n' - 'static str.maketrans(x[, y[, z]])\n' - '\n' - ' This static method returns a translation table usable ' - 'for\n' - ' "str.translate()".\n' - '\n' - ' If there is only one argument, it must be a dictionary ' - 'mapping\n' - ' Unicode ordinals (integers) or characters (strings of ' - 'length 1) to\n' - ' Unicode ordinals, strings (of arbitrary lengths) or ' - '"None".\n' - ' Character keys will then be converted to ordinals.\n' - '\n' - ' If there are two arguments, they must be strings of ' - 'equal length,\n' - ' and in the resulting dictionary, each character in x ' - 'will be mapped\n' - ' to the character at the same position in y. If there ' - 'is a third\n' - ' argument, it must be a string, whose characters will be ' - 'mapped to\n' - ' "None" in the result.\n' - '\n' - 'str.partition(sep)\n' - '\n' - ' Split the string at the first occurrence of *sep*, and ' - 'return a\n' - ' 3-tuple containing the part before the separator, the ' - 'separator\n' - ' itself, and the part after the separator. If the ' - 'separator is not\n' - ' found, return a 3-tuple containing the string itself, ' - 'followed by\n' - ' two empty strings.\n' - '\n' - 'str.removeprefix(prefix, /)\n' - '\n' - ' If the string starts with the *prefix* string, return\n' - ' "string[len(prefix):]". Otherwise, return a copy of the ' - 'original\n' - ' string:\n' - '\n' - " >>> 'TestHook'.removeprefix('Test')\n" - " 'Hook'\n" - " >>> 'BaseTestCase'.removeprefix('Test')\n" - " 'BaseTestCase'\n" - '\n' - ' New in version 3.9.\n' - '\n' - 'str.removesuffix(suffix, /)\n' - '\n' - ' If the string ends with the *suffix* string and that ' - '*suffix* is\n' - ' not empty, return "string[:-len(suffix)]". Otherwise, ' - 'return a copy\n' - ' of the original string:\n' - '\n' - " >>> 'MiscTests'.removesuffix('Tests')\n" - " 'Misc'\n" - " >>> 'TmpDirMixin'.removesuffix('Tests')\n" - " 'TmpDirMixin'\n" - '\n' - ' New in version 3.9.\n' - '\n' - 'str.replace(old, new[, count])\n' - '\n' - ' Return a copy of the string with all occurrences of ' - 'substring *old*\n' - ' replaced by *new*. If the optional argument *count* is ' - 'given, only\n' - ' the first *count* occurrences are replaced.\n' - '\n' - 'str.rfind(sub[, start[, end]])\n' - '\n' - ' Return the highest index in the string where substring ' - '*sub* is\n' - ' found, such that *sub* is contained within ' - '"s[start:end]".\n' - ' Optional arguments *start* and *end* are interpreted as ' - 'in slice\n' - ' notation. Return "-1" on failure.\n' - '\n' - 'str.rindex(sub[, start[, end]])\n' - '\n' - ' Like "rfind()" but raises "ValueError" when the ' - 'substring *sub* is\n' - ' not found.\n' - '\n' - 'str.rjust(width[, fillchar])\n' - '\n' - ' Return the string right justified in a string of length ' - '*width*.\n' - ' Padding is done using the specified *fillchar* (default ' - 'is an ASCII\n' - ' space). The original string is returned if *width* is ' - 'less than or\n' - ' equal to "len(s)".\n' - '\n' - 'str.rpartition(sep)\n' - '\n' - ' Split the string at the last occurrence of *sep*, and ' - 'return a\n' - ' 3-tuple containing the part before the separator, the ' - 'separator\n' - ' itself, and the part after the separator. If the ' - 'separator is not\n' - ' found, return a 3-tuple containing two empty strings, ' - 'followed by\n' - ' the string itself.\n' - '\n' - 'str.rsplit(sep=None, maxsplit=- 1)\n' - '\n' - ' Return a list of the words in the string, using *sep* ' - 'as the\n' - ' delimiter string. If *maxsplit* is given, at most ' - '*maxsplit* splits\n' - ' are done, the *rightmost* ones. If *sep* is not ' - 'specified or\n' - ' "None", any whitespace string is a separator. Except ' - 'for splitting\n' - ' from the right, "rsplit()" behaves like "split()" which ' - 'is\n' - ' described in detail below.\n' - '\n' - 'str.rstrip([chars])\n' - '\n' - ' Return a copy of the string with trailing characters ' - 'removed. The\n' - ' *chars* argument is a string specifying the set of ' - 'characters to be\n' - ' removed. If omitted or "None", the *chars* argument ' - 'defaults to\n' - ' removing whitespace. The *chars* argument is not a ' - 'suffix; rather,\n' - ' all combinations of its values are stripped:\n' - '\n' - " >>> ' spacious '.rstrip()\n" - " ' spacious'\n" - " >>> 'mississippi'.rstrip('ipz')\n" - " 'mississ'\n" - '\n' - ' See "str.removesuffix()" for a method that will remove ' - 'a single\n' - ' suffix string rather than all of a set of characters. ' - 'For example:\n' - '\n' - " >>> 'Monty Python'.rstrip(' Python')\n" - " 'M'\n" - " >>> 'Monty Python'.removesuffix(' Python')\n" - " 'Monty'\n" - '\n' - 'str.split(sep=None, maxsplit=- 1)\n' - '\n' - ' Return a list of the words in the string, using *sep* ' - 'as the\n' - ' delimiter string. If *maxsplit* is given, at most ' - '*maxsplit*\n' - ' splits are done (thus, the list will have at most ' - '"maxsplit+1"\n' - ' elements). If *maxsplit* is not specified or "-1", ' - 'then there is\n' - ' no limit on the number of splits (all possible splits ' - 'are made).\n' - '\n' - ' If *sep* is given, consecutive delimiters are not ' - 'grouped together\n' - ' and are deemed to delimit empty strings (for example,\n' - ' "\'1,,2\'.split(\',\')" returns "[\'1\', \'\', ' - '\'2\']"). The *sep* argument\n' - ' may consist of multiple characters (for example,\n' - ' "\'1<>2<>3\'.split(\'<>\')" returns "[\'1\', \'2\', ' - '\'3\']"). Splitting an\n' - ' empty string with a specified separator returns ' - '"[\'\']".\n' - '\n' - ' For example:\n' - '\n' - " >>> '1,2,3'.split(',')\n" - " ['1', '2', '3']\n" - " >>> '1,2,3'.split(',', maxsplit=1)\n" - " ['1', '2,3']\n" - " >>> '1,2,,3,'.split(',')\n" - " ['1', '2', '', '3', '']\n" - '\n' - ' If *sep* is not specified or is "None", a different ' - 'splitting\n' - ' algorithm is applied: runs of consecutive whitespace ' - 'are regarded\n' - ' as a single separator, and the result will contain no ' - 'empty strings\n' - ' at the start or end if the string has leading or ' - 'trailing\n' - ' whitespace. Consequently, splitting an empty string or ' - 'a string\n' - ' consisting of just whitespace with a "None" separator ' - 'returns "[]".\n' - '\n' - ' For example:\n' - '\n' - " >>> '1 2 3'.split()\n" - " ['1', '2', '3']\n" - " >>> '1 2 3'.split(maxsplit=1)\n" - " ['1', '2 3']\n" - " >>> ' 1 2 3 '.split()\n" - " ['1', '2', '3']\n" - '\n' - 'str.splitlines([keepends])\n' - '\n' - ' Return a list of the lines in the string, breaking at ' - 'line\n' - ' boundaries. Line breaks are not included in the ' - 'resulting list\n' - ' unless *keepends* is given and true.\n' - '\n' - ' This method splits on the following line boundaries. ' - 'In\n' - ' particular, the boundaries are a superset of *universal ' - 'newlines*.\n' - '\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | Representation | ' - 'Description |\n' - ' ' - '|=========================|===============================|\n' - ' | "\\n" | Line ' - 'Feed |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\r" | Carriage ' - 'Return |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\r\\n" | Carriage Return + Line ' - 'Feed |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\v" or "\\x0b" | Line ' - 'Tabulation |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\f" or "\\x0c" | Form ' - 'Feed |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x1c" | File ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x1d" | Group ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x1e" | Record ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x85" | Next Line (C1 Control ' - 'Code) |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\u2028" | Line ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\u2029" | Paragraph ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - '\n' - ' Changed in version 3.2: "\\v" and "\\f" added to list ' - 'of line\n' - ' boundaries.\n' - '\n' - ' For example:\n' - '\n' - " >>> 'ab c\\n\\nde fg\\rkl\\r\\n'.splitlines()\n" - " ['ab c', '', 'de fg', 'kl']\n" - " >>> 'ab c\\n\\nde " - "fg\\rkl\\r\\n'.splitlines(keepends=True)\n" - " ['ab c\\n', '\\n', 'de fg\\r', 'kl\\r\\n']\n" - '\n' - ' Unlike "split()" when a delimiter string *sep* is ' - 'given, this\n' - ' method returns an empty list for the empty string, and ' - 'a terminal\n' - ' line break does not result in an extra line:\n' - '\n' - ' >>> "".splitlines()\n' - ' []\n' - ' >>> "One line\\n".splitlines()\n' - " ['One line']\n" - '\n' - ' For comparison, "split(\'\\n\')" gives:\n' - '\n' - " >>> ''.split('\\n')\n" - " ['']\n" - " >>> 'Two lines\\n'.split('\\n')\n" - " ['Two lines', '']\n" - '\n' - 'str.startswith(prefix[, start[, end]])\n' - '\n' - ' Return "True" if string starts with the *prefix*, ' - 'otherwise return\n' - ' "False". *prefix* can also be a tuple of prefixes to ' - 'look for.\n' - ' With optional *start*, test string beginning at that ' - 'position.\n' - ' With optional *end*, stop comparing string at that ' - 'position.\n' - '\n' - 'str.strip([chars])\n' - '\n' - ' Return a copy of the string with the leading and ' - 'trailing\n' - ' characters removed. The *chars* argument is a string ' - 'specifying the\n' - ' set of characters to be removed. If omitted or "None", ' - 'the *chars*\n' - ' argument defaults to removing whitespace. The *chars* ' - 'argument is\n' - ' not a prefix or suffix; rather, all combinations of its ' - 'values are\n' - ' stripped:\n' - '\n' - " >>> ' spacious '.strip()\n" - " 'spacious'\n" - " >>> 'www.example.com'.strip('cmowz.')\n" - " 'example'\n" - '\n' - ' The outermost leading and trailing *chars* argument ' - 'values are\n' - ' stripped from the string. Characters are removed from ' - 'the leading\n' - ' end until reaching a string character that is not ' - 'contained in the\n' - ' set of characters in *chars*. A similar action takes ' - 'place on the\n' - ' trailing end. For example:\n' - '\n' - " >>> comment_string = '#....... Section 3.2.1 Issue " - "#32 .......'\n" - " >>> comment_string.strip('.#! ')\n" - " 'Section 3.2.1 Issue #32'\n" - '\n' - 'str.swapcase()\n' - '\n' - ' Return a copy of the string with uppercase characters ' - 'converted to\n' - ' lowercase and vice versa. Note that it is not ' - 'necessarily true that\n' - ' "s.swapcase().swapcase() == s".\n' - '\n' - 'str.title()\n' - '\n' - ' Return a titlecased version of the string where words ' - 'start with an\n' - ' uppercase character and the remaining characters are ' - 'lowercase.\n' - '\n' - ' For example:\n' - '\n' - " >>> 'Hello world'.title()\n" - " 'Hello World'\n" - '\n' - ' The algorithm uses a simple language-independent ' - 'definition of a\n' - ' word as groups of consecutive letters. The definition ' - 'works in\n' - ' many contexts but it means that apostrophes in ' - 'contractions and\n' - ' possessives form word boundaries, which may not be the ' - 'desired\n' - ' result:\n' - '\n' - ' >>> "they\'re bill\'s friends from the UK".title()\n' - ' "They\'Re Bill\'S Friends From The Uk"\n' - '\n' - ' A workaround for apostrophes can be constructed using ' - 'regular\n' - ' expressions:\n' - '\n' - ' >>> import re\n' - ' >>> def titlecase(s):\n' - ' ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n' - ' ... lambda mo: ' - 'mo.group(0).capitalize(),\n' - ' ... s)\n' - ' ...\n' - ' >>> titlecase("they\'re bill\'s friends.")\n' - ' "They\'re Bill\'s Friends."\n' - '\n' - 'str.translate(table)\n' - '\n' - ' Return a copy of the string in which each character has ' - 'been mapped\n' - ' through the given translation table. The table must be ' - 'an object\n' - ' that implements indexing via "__getitem__()", typically ' - 'a *mapping*\n' - ' or *sequence*. When indexed by a Unicode ordinal (an ' - 'integer), the\n' - ' table object can do any of the following: return a ' - 'Unicode ordinal\n' - ' or a string, to map the character to one or more other ' - 'characters;\n' - ' return "None", to delete the character from the return ' - 'string; or\n' - ' raise a "LookupError" exception, to map the character ' - 'to itself.\n' - '\n' - ' You can use "str.maketrans()" to create a translation ' - 'map from\n' - ' character-to-character mappings in different formats.\n' - '\n' - ' See also the "codecs" module for a more flexible ' - 'approach to custom\n' - ' character mappings.\n' - '\n' - 'str.upper()\n' - '\n' - ' Return a copy of the string with all the cased ' - 'characters [4]\n' - ' converted to uppercase. Note that ' - '"s.upper().isupper()" might be\n' - ' "False" if "s" contains uncased characters or if the ' - 'Unicode\n' - ' category of the resulting character(s) is not “Lu” ' - '(Letter,\n' - ' uppercase), but e.g. “Lt” (Letter, titlecase).\n' - '\n' - ' The uppercasing algorithm used is described in section ' - '3.13 of the\n' - ' Unicode Standard.\n' - '\n' - 'str.zfill(width)\n' - '\n' - ' Return a copy of the string left filled with ASCII ' - '"\'0\'" digits to\n' - ' make a string of length *width*. A leading sign prefix\n' - ' ("\'+\'"/"\'-\'") is handled by inserting the padding ' - '*after* the sign\n' - ' character rather than before. The original string is ' - 'returned if\n' - ' *width* is less than or equal to "len(s)".\n' - '\n' - ' For example:\n' - '\n' - ' >>> "42".zfill(5)\n' - " '00042'\n" - ' >>> "-42".zfill(5)\n' - " '-0042'\n", - 'strings': 'String and Bytes literals\n' - '*************************\n' - '\n' - 'String literals are described by the following lexical ' - 'definitions:\n' - '\n' - ' stringliteral ::= [stringprefix](shortstring | longstring)\n' - ' stringprefix ::= "r" | "u" | "R" | "U" | "f" | "F"\n' - ' | "fr" | "Fr" | "fR" | "FR" | "rf" | "rF" | ' - '"Rf" | "RF"\n' - ' shortstring ::= "\'" shortstringitem* "\'" | \'"\' ' - 'shortstringitem* \'"\'\n' - ' longstring ::= "\'\'\'" longstringitem* "\'\'\'" | ' - '\'"""\' longstringitem* \'"""\'\n' - ' shortstringitem ::= shortstringchar | stringescapeseq\n' - ' longstringitem ::= longstringchar | stringescapeseq\n' - ' shortstringchar ::= \n' - ' longstringchar ::= \n' - ' stringescapeseq ::= "\\" \n' - '\n' - ' bytesliteral ::= bytesprefix(shortbytes | longbytes)\n' - ' bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | ' - '"rb" | "rB" | "Rb" | "RB"\n' - ' shortbytes ::= "\'" shortbytesitem* "\'" | \'"\' ' - 'shortbytesitem* \'"\'\n' - ' longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | \'"""\' ' - 'longbytesitem* \'"""\'\n' - ' shortbytesitem ::= shortbyteschar | bytesescapeseq\n' - ' longbytesitem ::= longbyteschar | bytesescapeseq\n' - ' shortbyteschar ::= \n' - ' longbyteschar ::= \n' - ' bytesescapeseq ::= "\\" \n' - '\n' - 'One syntactic restriction not indicated by these productions is ' - 'that\n' - 'whitespace is not allowed between the "stringprefix" or ' - '"bytesprefix"\n' - 'and the rest of the literal. The source character set is defined ' - 'by\n' - 'the encoding declaration; it is UTF-8 if no encoding declaration ' - 'is\n' - 'given in the source file; see section Encoding declarations.\n' - '\n' - 'In plain English: Both types of literals can be enclosed in ' - 'matching\n' - 'single quotes ("\'") or double quotes ("""). They can also be ' - 'enclosed\n' - 'in matching groups of three single or double quotes (these are\n' - 'generally referred to as *triple-quoted strings*). The ' - 'backslash\n' - '("\\") character is used to escape characters that otherwise have ' - 'a\n' - 'special meaning, such as newline, backslash itself, or the quote\n' - 'character.\n' - '\n' - 'Bytes literals are always prefixed with "\'b\'" or "\'B\'"; they ' - 'produce\n' - 'an instance of the "bytes" type instead of the "str" type. They ' - 'may\n' - 'only contain ASCII characters; bytes with a numeric value of 128 ' - 'or\n' - 'greater must be expressed with escapes.\n' - '\n' - 'Both string and bytes literals may optionally be prefixed with a\n' - 'letter "\'r\'" or "\'R\'"; such strings are called *raw strings* ' - 'and treat\n' - 'backslashes as literal characters. As a result, in string ' - 'literals,\n' - '"\'\\U\'" and "\'\\u\'" escapes in raw strings are not treated ' - 'specially.\n' - 'Given that Python 2.x’s raw unicode literals behave differently ' - 'than\n' - 'Python 3.x’s the "\'ur\'" syntax is not supported.\n' - '\n' - 'New in version 3.3: The "\'rb\'" prefix of raw bytes literals has ' - 'been\n' - 'added as a synonym of "\'br\'".\n' - '\n' - 'New in version 3.3: Support for the unicode legacy literal\n' - '("u\'value\'") was reintroduced to simplify the maintenance of ' - 'dual\n' - 'Python 2.x and 3.x codebases. See **PEP 414** for more ' - 'information.\n' - '\n' - 'A string literal with "\'f\'" or "\'F\'" in its prefix is a ' - '*formatted\n' - 'string literal*; see Formatted string literals. The "\'f\'" may ' - 'be\n' - 'combined with "\'r\'", but not with "\'b\'" or "\'u\'", therefore ' - 'raw\n' - 'formatted strings are possible, but formatted bytes literals are ' - 'not.\n' - '\n' - 'In triple-quoted literals, unescaped newlines and quotes are ' - 'allowed\n' - '(and are retained), except that three unescaped quotes in a row\n' - 'terminate the literal. (A “quote” is the character used to open ' - 'the\n' - 'literal, i.e. either "\'" or """.)\n' - '\n' - 'Unless an "\'r\'" or "\'R\'" prefix is present, escape sequences ' - 'in string\n' - 'and bytes literals are interpreted according to rules similar to ' - 'those\n' - 'used by Standard C. The recognized escape sequences are:\n' - '\n' - '+-------------------+-----------------------------------+---------+\n' - '| Escape Sequence | Meaning | Notes ' - '|\n' - '|===================|===================================|=========|\n' - '| "\\newline" | Backslash and newline ignored ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\\\" | Backslash ("\\") ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\\'" | Single quote ("\'") ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\"" | Double quote (""") ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\a" | ASCII Bell (BEL) ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\b" | ASCII Backspace (BS) ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\f" | ASCII Formfeed (FF) ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\n" | ASCII Linefeed (LF) ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\r" | ASCII Carriage Return (CR) ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\t" | ASCII Horizontal Tab (TAB) ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\v" | ASCII Vertical Tab (VT) ' - '| |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\ooo" | Character with octal value *ooo* | ' - '(1,3) |\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\xhh" | Character with hex value *hh* | ' - '(2,3) |\n' - '+-------------------+-----------------------------------+---------+\n' - '\n' - 'Escape sequences only recognized in string literals are:\n' - '\n' - '+-------------------+-----------------------------------+---------+\n' - '| Escape Sequence | Meaning | Notes ' - '|\n' - '|===================|===================================|=========|\n' - '| "\\N{name}" | Character named *name* in the | ' - '(4) |\n' - '| | Unicode database | ' - '|\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\uxxxx" | Character with 16-bit hex value | ' - '(5) |\n' - '| | *xxxx* | ' - '|\n' - '+-------------------+-----------------------------------+---------+\n' - '| "\\Uxxxxxxxx" | Character with 32-bit hex value | ' - '(6) |\n' - '| | *xxxxxxxx* | ' - '|\n' - '+-------------------+-----------------------------------+---------+\n' - '\n' - 'Notes:\n' - '\n' - '1. As in Standard C, up to three octal digits are accepted.\n' - '\n' - '2. Unlike in Standard C, exactly two hex digits are required.\n' - '\n' - '3. In a bytes literal, hexadecimal and octal escapes denote the ' - 'byte\n' - ' with the given value. In a string literal, these escapes ' - 'denote a\n' - ' Unicode character with the given value.\n' - '\n' - '4. Changed in version 3.3: Support for name aliases [1] has been\n' - ' added.\n' - '\n' - '5. Exactly four hex digits are required.\n' - '\n' - '6. Any Unicode character can be encoded this way. Exactly eight ' - 'hex\n' - ' digits are required.\n' - '\n' - 'Unlike Standard C, all unrecognized escape sequences are left in ' - 'the\n' - 'string unchanged, i.e., *the backslash is left in the result*. ' - '(This\n' - 'behavior is useful when debugging: if an escape sequence is ' - 'mistyped,\n' - 'the resulting output is more easily recognized as broken.) It is ' - 'also\n' - 'important to note that the escape sequences only recognized in ' - 'string\n' - 'literals fall into the category of unrecognized escapes for ' - 'bytes\n' - 'literals.\n' - '\n' - ' Changed in version 3.6: Unrecognized escape sequences produce ' - 'a\n' - ' "DeprecationWarning". In a future Python version they will be ' - 'a\n' - ' "SyntaxWarning" and eventually a "SyntaxError".\n' - '\n' - 'Even in a raw literal, quotes can be escaped with a backslash, ' - 'but the\n' - 'backslash remains in the result; for example, "r"\\""" is a ' - 'valid\n' - 'string literal consisting of two characters: a backslash and a ' - 'double\n' - 'quote; "r"\\"" is not a valid string literal (even a raw string ' - 'cannot\n' - 'end in an odd number of backslashes). Specifically, *a raw ' - 'literal\n' - 'cannot end in a single backslash* (since the backslash would ' - 'escape\n' - 'the following quote character). Note also that a single ' - 'backslash\n' - 'followed by a newline is interpreted as those two characters as ' - 'part\n' - 'of the literal, *not* as a line continuation.\n', - 'subscriptions': 'Subscriptions\n' - '*************\n' - '\n' - 'Subscription of a sequence (string, tuple or list) or ' - 'mapping\n' - '(dictionary) object usually selects an item from the ' - 'collection:\n' - '\n' - ' subscription ::= primary "[" expression_list "]"\n' - '\n' - 'The primary must evaluate to an object that supports ' - 'subscription\n' - '(lists or dictionaries for example). User-defined objects ' - 'can support\n' - 'subscription by defining a "__getitem__()" method.\n' - '\n' - 'For built-in objects, there are two types of objects that ' - 'support\n' - 'subscription:\n' - '\n' - 'If the primary is a mapping, the expression list must ' - 'evaluate to an\n' - 'object whose value is one of the keys of the mapping, and ' - 'the\n' - 'subscription selects the value in the mapping that ' - 'corresponds to that\n' - 'key. (The expression list is a tuple except if it has ' - 'exactly one\n' - 'item.)\n' - '\n' - 'If the primary is a sequence, the expression list must ' - 'evaluate to an\n' - 'integer or a slice (as discussed in the following ' - 'section).\n' - '\n' - 'The formal syntax makes no special provision for negative ' - 'indices in\n' - 'sequences; however, built-in sequences all provide a ' - '"__getitem__()"\n' - 'method that interprets negative indices by adding the ' - 'length of the\n' - 'sequence to the index (so that "x[-1]" selects the last ' - 'item of "x").\n' - 'The resulting value must be a nonnegative integer less than ' - 'the number\n' - 'of items in the sequence, and the subscription selects the ' - 'item whose\n' - 'index is that value (counting from zero). Since the support ' - 'for\n' - 'negative indices and slicing occurs in the object’s ' - '"__getitem__()"\n' - 'method, subclasses overriding this method will need to ' - 'explicitly add\n' - 'that support.\n' - '\n' - 'A string’s items are characters. A character is not a ' - 'separate data\n' - 'type but a string of exactly one character.\n' - '\n' - 'Subscription of certain *classes* or *types* creates a ' - 'generic alias.\n' - 'In this case, user-defined classes can support subscription ' - 'by\n' - 'providing a "__class_getitem__()" classmethod.\n', - 'truth': 'Truth Value Testing\n' - '*******************\n' - '\n' - 'Any object can be tested for truth value, for use in an "if" or\n' - '"while" condition or as operand of the Boolean operations below.\n' - '\n' - 'By default, an object is considered true unless its class defines\n' - 'either a "__bool__()" method that returns "False" or a "__len__()"\n' - 'method that returns zero, when called with the object. [1] Here ' - 'are\n' - 'most of the built-in objects considered false:\n' - '\n' - '* constants defined to be false: "None" and "False".\n' - '\n' - '* zero of any numeric type: "0", "0.0", "0j", "Decimal(0)",\n' - ' "Fraction(0, 1)"\n' - '\n' - '* empty sequences and collections: "\'\'", "()", "[]", "{}", ' - '"set()",\n' - ' "range(0)"\n' - '\n' - 'Operations and built-in functions that have a Boolean result ' - 'always\n' - 'return "0" or "False" for false and "1" or "True" for true, unless\n' - 'otherwise stated. (Important exception: the Boolean operations ' - '"or"\n' - 'and "and" always return one of their operands.)\n', - 'try': 'The "try" statement\n' - '*******************\n' - '\n' - 'The "try" statement specifies exception handlers and/or cleanup code\n' - 'for a group of statements:\n' - '\n' - ' try_stmt ::= try1_stmt | try2_stmt\n' - ' try1_stmt ::= "try" ":" suite\n' - ' ("except" [expression ["as" identifier]] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try2_stmt ::= "try" ":" suite\n' - ' "finally" ":" suite\n' - '\n' - 'The "except" clause(s) specify one or more exception handlers. When ' - 'no\n' - 'exception occurs in the "try" clause, no exception handler is\n' - 'executed. When an exception occurs in the "try" suite, a search for ' - 'an\n' - 'exception handler is started. This search inspects the except ' - 'clauses\n' - 'in turn until one is found that matches the exception. An ' - 'expression-\n' - 'less except clause, if present, must be last; it matches any\n' - 'exception. For an except clause with an expression, that expression\n' - 'is evaluated, and the clause matches the exception if the resulting\n' - 'object is “compatible” with the exception. An object is compatible\n' - 'with an exception if it is the class or a base class of the ' - 'exception\n' - 'object, or a tuple containing an item that is the class or a base\n' - 'class of the exception object.\n' - '\n' - 'If no except clause matches the exception, the search for an ' - 'exception\n' - 'handler continues in the surrounding code and on the invocation ' - 'stack.\n' - '[1]\n' - '\n' - 'If the evaluation of an expression in the header of an except clause\n' - 'raises an exception, the original search for a handler is canceled ' - 'and\n' - 'a search starts for the new exception in the surrounding code and on\n' - 'the call stack (it is treated as if the entire "try" statement ' - 'raised\n' - 'the exception).\n' - '\n' - 'When a matching except clause is found, the exception is assigned to\n' - 'the target specified after the "as" keyword in that except clause, ' - 'if\n' - 'present, and the except clause’s suite is executed. All except\n' - 'clauses must have an executable block. When the end of this block ' - 'is\n' - 'reached, execution continues normally after the entire try ' - 'statement.\n' - '(This means that if two nested handlers exist for the same ' - 'exception,\n' - 'and the exception occurs in the try clause of the inner handler, the\n' - 'outer handler will not handle the exception.)\n' - '\n' - 'When an exception has been assigned using "as target", it is cleared\n' - 'at the end of the except clause. This is as if\n' - '\n' - ' except E as N:\n' - ' foo\n' - '\n' - 'was translated to\n' - '\n' - ' except E as N:\n' - ' try:\n' - ' foo\n' - ' finally:\n' - ' del N\n' - '\n' - 'This means the exception must be assigned to a different name to be\n' - 'able to refer to it after the except clause. Exceptions are cleared\n' - 'because with the traceback attached to them, they form a reference\n' - 'cycle with the stack frame, keeping all locals in that frame alive\n' - 'until the next garbage collection occurs.\n' - '\n' - 'Before an except clause’s suite is executed, details about the\n' - 'exception are stored in the "sys" module and can be accessed via\n' - '"sys.exc_info()". "sys.exc_info()" returns a 3-tuple consisting of ' - 'the\n' - 'exception class, the exception instance and a traceback object (see\n' - 'section The standard type hierarchy) identifying the point in the\n' - 'program where the exception occurred. The details about the ' - 'exception\n' - 'accessed via "sys.exc_info()" are restored to their previous values\n' - 'when leaving an exception handler:\n' - '\n' - ' >>> print(sys.exc_info())\n' - ' (None, None, None)\n' - ' >>> try:\n' - ' ... raise TypeError\n' - ' ... except:\n' - ' ... print(sys.exc_info())\n' - ' ... try:\n' - ' ... raise ValueError\n' - ' ... except:\n' - ' ... print(sys.exc_info())\n' - ' ... print(sys.exc_info())\n' - ' ...\n' - " (, TypeError(), )\n' - " (, ValueError(), )\n' - " (, TypeError(), )\n' - ' >>> print(sys.exc_info())\n' - ' (None, None, None)\n' - '\n' - 'The optional "else" clause is executed if the control flow leaves ' - 'the\n' - '"try" suite, no exception was raised, and no "return", "continue", ' - 'or\n' - '"break" statement was executed. Exceptions in the "else" clause are\n' - 'not handled by the preceding "except" clauses.\n' - '\n' - 'If "finally" is present, it specifies a ‘cleanup’ handler. The ' - '"try"\n' - 'clause is executed, including any "except" and "else" clauses. If ' - 'an\n' - 'exception occurs in any of the clauses and is not handled, the\n' - 'exception is temporarily saved. The "finally" clause is executed. ' - 'If\n' - 'there is a saved exception it is re-raised at the end of the ' - '"finally"\n' - 'clause. If the "finally" clause raises another exception, the saved\n' - 'exception is set as the context of the new exception. If the ' - '"finally"\n' - 'clause executes a "return", "break" or "continue" statement, the ' - 'saved\n' - 'exception is discarded:\n' - '\n' - ' >>> def f():\n' - ' ... try:\n' - ' ... 1/0\n' - ' ... finally:\n' - ' ... return 42\n' - ' ...\n' - ' >>> f()\n' - ' 42\n' - '\n' - 'The exception information is not available to the program during\n' - 'execution of the "finally" clause.\n' - '\n' - 'When a "return", "break" or "continue" statement is executed in the\n' - '"try" suite of a "try"…"finally" statement, the "finally" clause is\n' - 'also executed ‘on the way out.’\n' - '\n' - 'The return value of a function is determined by the last "return"\n' - 'statement executed. Since the "finally" clause always executes, a\n' - '"return" statement executed in the "finally" clause will always be ' - 'the\n' - 'last one executed:\n' - '\n' - ' >>> def foo():\n' - ' ... try:\n' - " ... return 'try'\n" - ' ... finally:\n' - " ... return 'finally'\n" - ' ...\n' - ' >>> foo()\n' - " 'finally'\n" - '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information on using the "raise" statement to ' - 'generate\n' - 'exceptions may be found in section The raise statement.\n' - '\n' - 'Changed in version 3.8: Prior to Python 3.8, a "continue" statement\n' - 'was illegal in the "finally" clause due to a problem with the\n' - 'implementation.\n', - 'types': 'The standard type hierarchy\n' - '***************************\n' - '\n' - 'Below is a list of the types that are built into Python. ' - 'Extension\n' - 'modules (written in C, Java, or other languages, depending on the\n' - 'implementation) can define additional types. Future versions of\n' - 'Python may add types to the type hierarchy (e.g., rational ' - 'numbers,\n' - 'efficiently stored arrays of integers, etc.), although such ' - 'additions\n' - 'will often be provided via the standard library instead.\n' - '\n' - 'Some of the type descriptions below contain a paragraph listing\n' - '‘special attributes.’ These are attributes that provide access to ' - 'the\n' - 'implementation and are not intended for general use. Their ' - 'definition\n' - 'may change in the future.\n' - '\n' - 'None\n' - ' This type has a single value. There is a single object with ' - 'this\n' - ' value. This object is accessed through the built-in name "None". ' - 'It\n' - ' is used to signify the absence of a value in many situations, ' - 'e.g.,\n' - ' it is returned from functions that don’t explicitly return\n' - ' anything. Its truth value is false.\n' - '\n' - 'NotImplemented\n' - ' This type has a single value. There is a single object with ' - 'this\n' - ' value. This object is accessed through the built-in name\n' - ' "NotImplemented". Numeric methods and rich comparison methods\n' - ' should return this value if they do not implement the operation ' - 'for\n' - ' the operands provided. (The interpreter will then try the\n' - ' reflected operation, or some other fallback, depending on the\n' - ' operator.) It should not be evaluated in a boolean context.\n' - '\n' - ' See Implementing the arithmetic operations for more details.\n' - '\n' - ' Changed in version 3.9: Evaluating "NotImplemented" in a ' - 'boolean\n' - ' context is deprecated. While it currently evaluates as true, it\n' - ' will emit a "DeprecationWarning". It will raise a "TypeError" in ' - 'a\n' - ' future version of Python.\n' - '\n' - 'Ellipsis\n' - ' This type has a single value. There is a single object with ' - 'this\n' - ' value. This object is accessed through the literal "..." or the\n' - ' built-in name "Ellipsis". Its truth value is true.\n' - '\n' - '"numbers.Number"\n' - ' These are created by numeric literals and returned as results ' - 'by\n' - ' arithmetic operators and arithmetic built-in functions. ' - 'Numeric\n' - ' objects are immutable; once created their value never changes.\n' - ' Python numbers are of course strongly related to mathematical\n' - ' numbers, but subject to the limitations of numerical ' - 'representation\n' - ' in computers.\n' - '\n' - ' The string representations of the numeric classes, computed by\n' - ' "__repr__()" and "__str__()", have the following properties:\n' - '\n' - ' * They are valid numeric literals which, when passed to their ' - 'class\n' - ' constructor, produce an object having the value of the ' - 'original\n' - ' numeric.\n' - '\n' - ' * The representation is in base 10, when possible.\n' - '\n' - ' * Leading zeros, possibly excepting a single zero before a ' - 'decimal\n' - ' point, are not shown.\n' - '\n' - ' * Trailing zeros, possibly excepting a single zero after a ' - 'decimal\n' - ' point, are not shown.\n' - '\n' - ' * A sign is shown only when the number is negative.\n' - '\n' - ' Python distinguishes between integers, floating point numbers, ' - 'and\n' - ' complex numbers:\n' - '\n' - ' "numbers.Integral"\n' - ' These represent elements from the mathematical set of ' - 'integers\n' - ' (positive and negative).\n' - '\n' - ' There are two types of integers:\n' - '\n' - ' Integers ("int")\n' - ' These represent numbers in an unlimited range, subject to\n' - ' available (virtual) memory only. For the purpose of ' - 'shift\n' - ' and mask operations, a binary representation is assumed, ' - 'and\n' - ' negative numbers are represented in a variant of 2’s\n' - ' complement which gives the illusion of an infinite string ' - 'of\n' - ' sign bits extending to the left.\n' - '\n' - ' Booleans ("bool")\n' - ' These represent the truth values False and True. The two\n' - ' objects representing the values "False" and "True" are ' - 'the\n' - ' only Boolean objects. The Boolean type is a subtype of ' - 'the\n' - ' integer type, and Boolean values behave like the values 0 ' - 'and\n' - ' 1, respectively, in almost all contexts, the exception ' - 'being\n' - ' that when converted to a string, the strings ""False"" or\n' - ' ""True"" are returned, respectively.\n' - '\n' - ' The rules for integer representation are intended to give ' - 'the\n' - ' most meaningful interpretation of shift and mask operations\n' - ' involving negative integers.\n' - '\n' - ' "numbers.Real" ("float")\n' - ' These represent machine-level double precision floating ' - 'point\n' - ' numbers. You are at the mercy of the underlying machine\n' - ' architecture (and C or Java implementation) for the accepted\n' - ' range and handling of overflow. Python does not support ' - 'single-\n' - ' precision floating point numbers; the savings in processor ' - 'and\n' - ' memory usage that are usually the reason for using these are\n' - ' dwarfed by the overhead of using objects in Python, so there ' - 'is\n' - ' no reason to complicate the language with two kinds of ' - 'floating\n' - ' point numbers.\n' - '\n' - ' "numbers.Complex" ("complex")\n' - ' These represent complex numbers as a pair of machine-level\n' - ' double precision floating point numbers. The same caveats ' - 'apply\n' - ' as for floating point numbers. The real and imaginary parts ' - 'of a\n' - ' complex number "z" can be retrieved through the read-only\n' - ' attributes "z.real" and "z.imag".\n' - '\n' - 'Sequences\n' - ' These represent finite ordered sets indexed by non-negative\n' - ' numbers. The built-in function "len()" returns the number of ' - 'items\n' - ' of a sequence. When the length of a sequence is *n*, the index ' - 'set\n' - ' contains the numbers 0, 1, …, *n*-1. Item *i* of sequence *a* ' - 'is\n' - ' selected by "a[i]".\n' - '\n' - ' Sequences also support slicing: "a[i:j]" selects all items with\n' - ' index *k* such that *i* "<=" *k* "<" *j*. When used as an\n' - ' expression, a slice is a sequence of the same type. This ' - 'implies\n' - ' that the index set is renumbered so that it starts at 0.\n' - '\n' - ' Some sequences also support “extended slicing” with a third ' - '“step”\n' - ' parameter: "a[i:j:k]" selects all items of *a* with index *x* ' - 'where\n' - ' "x = i + n*k", *n* ">=" "0" and *i* "<=" *x* "<" *j*.\n' - '\n' - ' Sequences are distinguished according to their mutability:\n' - '\n' - ' Immutable sequences\n' - ' An object of an immutable sequence type cannot change once it ' - 'is\n' - ' created. (If the object contains references to other ' - 'objects,\n' - ' these other objects may be mutable and may be changed; ' - 'however,\n' - ' the collection of objects directly referenced by an ' - 'immutable\n' - ' object cannot change.)\n' - '\n' - ' The following types are immutable sequences:\n' - '\n' - ' Strings\n' - ' A string is a sequence of values that represent Unicode ' - 'code\n' - ' points. All the code points in the range "U+0000 - ' - 'U+10FFFF"\n' - ' can be represented in a string. Python doesn’t have a ' - '*char*\n' - ' type; instead, every code point in the string is ' - 'represented\n' - ' as a string object with length "1". The built-in ' - 'function\n' - ' "ord()" converts a code point from its string form to an\n' - ' integer in the range "0 - 10FFFF"; "chr()" converts an\n' - ' integer in the range "0 - 10FFFF" to the corresponding ' - 'length\n' - ' "1" string object. "str.encode()" can be used to convert ' - 'a\n' - ' "str" to "bytes" using the given text encoding, and\n' - ' "bytes.decode()" can be used to achieve the opposite.\n' - '\n' - ' Tuples\n' - ' The items of a tuple are arbitrary Python objects. Tuples ' - 'of\n' - ' two or more items are formed by comma-separated lists of\n' - ' expressions. A tuple of one item (a ‘singleton’) can be\n' - ' formed by affixing a comma to an expression (an expression ' - 'by\n' - ' itself does not create a tuple, since parentheses must be\n' - ' usable for grouping of expressions). An empty tuple can ' - 'be\n' - ' formed by an empty pair of parentheses.\n' - '\n' - ' Bytes\n' - ' A bytes object is an immutable array. The items are ' - '8-bit\n' - ' bytes, represented by integers in the range 0 <= x < 256.\n' - ' Bytes literals (like "b\'abc\'") and the built-in ' - '"bytes()"\n' - ' constructor can be used to create bytes objects. Also, ' - 'bytes\n' - ' objects can be decoded to strings via the "decode()" ' - 'method.\n' - '\n' - ' Mutable sequences\n' - ' Mutable sequences can be changed after they are created. ' - 'The\n' - ' subscription and slicing notations can be used as the target ' - 'of\n' - ' assignment and "del" (delete) statements.\n' - '\n' - ' There are currently two intrinsic mutable sequence types:\n' - '\n' - ' Lists\n' - ' The items of a list are arbitrary Python objects. Lists ' - 'are\n' - ' formed by placing a comma-separated list of expressions ' - 'in\n' - ' square brackets. (Note that there are no special cases ' - 'needed\n' - ' to form lists of length 0 or 1.)\n' - '\n' - ' Byte Arrays\n' - ' A bytearray object is a mutable array. They are created ' - 'by\n' - ' the built-in "bytearray()" constructor. Aside from being\n' - ' mutable (and hence unhashable), byte arrays otherwise ' - 'provide\n' - ' the same interface and functionality as immutable "bytes"\n' - ' objects.\n' - '\n' - ' The extension module "array" provides an additional example ' - 'of a\n' - ' mutable sequence type, as does the "collections" module.\n' - '\n' - 'Set types\n' - ' These represent unordered, finite sets of unique, immutable\n' - ' objects. As such, they cannot be indexed by any subscript. ' - 'However,\n' - ' they can be iterated over, and the built-in function "len()"\n' - ' returns the number of items in a set. Common uses for sets are ' - 'fast\n' - ' membership testing, removing duplicates from a sequence, and\n' - ' computing mathematical operations such as intersection, union,\n' - ' difference, and symmetric difference.\n' - '\n' - ' For set elements, the same immutability rules apply as for\n' - ' dictionary keys. Note that numeric types obey the normal rules ' - 'for\n' - ' numeric comparison: if two numbers compare equal (e.g., "1" and\n' - ' "1.0"), only one of them can be contained in a set.\n' - '\n' - ' There are currently two intrinsic set types:\n' - '\n' - ' Sets\n' - ' These represent a mutable set. They are created by the ' - 'built-in\n' - ' "set()" constructor and can be modified afterwards by ' - 'several\n' - ' methods, such as "add()".\n' - '\n' - ' Frozen sets\n' - ' These represent an immutable set. They are created by the\n' - ' built-in "frozenset()" constructor. As a frozenset is ' - 'immutable\n' - ' and *hashable*, it can be used again as an element of ' - 'another\n' - ' set, or as a dictionary key.\n' - '\n' - 'Mappings\n' - ' These represent finite sets of objects indexed by arbitrary ' - 'index\n' - ' sets. The subscript notation "a[k]" selects the item indexed by ' - '"k"\n' - ' from the mapping "a"; this can be used in expressions and as ' - 'the\n' - ' target of assignments or "del" statements. The built-in ' - 'function\n' - ' "len()" returns the number of items in a mapping.\n' - '\n' - ' There is currently a single intrinsic mapping type:\n' - '\n' - ' Dictionaries\n' - ' These represent finite sets of objects indexed by nearly\n' - ' arbitrary values. The only types of values not acceptable ' - 'as\n' - ' keys are values containing lists or dictionaries or other\n' - ' mutable types that are compared by value rather than by ' - 'object\n' - ' identity, the reason being that the efficient implementation ' - 'of\n' - ' dictionaries requires a key’s hash value to remain constant.\n' - ' Numeric types used for keys obey the normal rules for ' - 'numeric\n' - ' comparison: if two numbers compare equal (e.g., "1" and ' - '"1.0")\n' - ' then they can be used interchangeably to index the same\n' - ' dictionary entry.\n' - '\n' - ' Dictionaries preserve insertion order, meaning that keys will ' - 'be\n' - ' produced in the same order they were added sequentially over ' - 'the\n' - ' dictionary. Replacing an existing key does not change the ' - 'order,\n' - ' however removing a key and re-inserting it will add it to ' - 'the\n' - ' end instead of keeping its old place.\n' - '\n' - ' Dictionaries are mutable; they can be created by the "{...}"\n' - ' notation (see section Dictionary displays).\n' - '\n' - ' The extension modules "dbm.ndbm" and "dbm.gnu" provide\n' - ' additional examples of mapping types, as does the ' - '"collections"\n' - ' module.\n' - '\n' - ' Changed in version 3.7: Dictionaries did not preserve ' - 'insertion\n' - ' order in versions of Python before 3.6. In CPython 3.6,\n' - ' insertion order was preserved, but it was considered an\n' - ' implementation detail at that time rather than a language\n' - ' guarantee.\n' - '\n' - 'Callable types\n' - ' These are the types to which the function call operation (see\n' - ' section Calls) can be applied:\n' - '\n' - ' User-defined functions\n' - ' A user-defined function object is created by a function\n' - ' definition (see section Function definitions). It should be\n' - ' called with an argument list containing the same number of ' - 'items\n' - ' as the function’s formal parameter list.\n' - '\n' - ' Special attributes:\n' - '\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | Attribute | Meaning ' - '| |\n' - ' ' - '|===========================|=================================|=============|\n' - ' | "__doc__" | The function’s documentation ' - '| Writable |\n' - ' | | string, or "None" if ' - '| |\n' - ' | | unavailable; not inherited by ' - '| |\n' - ' | | subclasses. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__name__" | The function’s name. ' - '| Writable |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__qualname__" | The function’s *qualified ' - '| Writable |\n' - ' | | name*. New in version 3.3. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__module__" | The name of the module the ' - '| Writable |\n' - ' | | function was defined in, or ' - '| |\n' - ' | | "None" if unavailable. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__defaults__" | A tuple containing default ' - '| Writable |\n' - ' | | argument values for those ' - '| |\n' - ' | | arguments that have defaults, ' - '| |\n' - ' | | or "None" if no arguments have ' - '| |\n' - ' | | a default value. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__code__" | The code object representing ' - '| Writable |\n' - ' | | the compiled function body. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__globals__" | A reference to the dictionary ' - '| Read-only |\n' - ' | | that holds the function’s ' - '| |\n' - ' | | global variables — the global ' - '| |\n' - ' | | namespace of the module in ' - '| |\n' - ' | | which the function was defined. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__dict__" | The namespace supporting ' - '| Writable |\n' - ' | | arbitrary function attributes. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__closure__" | "None" or a tuple of cells that ' - '| Read-only |\n' - ' | | contain bindings for the ' - '| |\n' - ' | | function’s free variables. See ' - '| |\n' - ' | | below for information on the ' - '| |\n' - ' | | "cell_contents" attribute. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__annotations__" | A dict containing annotations ' - '| Writable |\n' - ' | | of parameters. The keys of the ' - '| |\n' - ' | | dict are the parameter names, ' - '| |\n' - ' | | and "\'return\'" for the ' - 'return | |\n' - ' | | annotation, if provided. For ' - '| |\n' - ' | | more information on working ' - '| |\n' - ' | | with this attribute, see ' - '| |\n' - ' | | Annotations Best Practices. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - ' | "__kwdefaults__" | A dict containing defaults for ' - '| Writable |\n' - ' | | keyword-only parameters. ' - '| |\n' - ' ' - '+---------------------------+---------------------------------+-------------+\n' - '\n' - ' Most of the attributes labelled “Writable” check the type of ' - 'the\n' - ' assigned value.\n' - '\n' - ' Function objects also support getting and setting arbitrary\n' - ' attributes, which can be used, for example, to attach ' - 'metadata\n' - ' to functions. Regular attribute dot-notation is used to get ' - 'and\n' - ' set such attributes. *Note that the current implementation ' - 'only\n' - ' supports function attributes on user-defined functions. ' - 'Function\n' - ' attributes on built-in functions may be supported in the\n' - ' future.*\n' - '\n' - ' A cell object has the attribute "cell_contents". This can be\n' - ' used to get the value of the cell, as well as set the value.\n' - '\n' - ' Additional information about a function’s definition can be\n' - ' retrieved from its code object; see the description of ' - 'internal\n' - ' types below. The "cell" type can be accessed in the "types"\n' - ' module.\n' - '\n' - ' Instance methods\n' - ' An instance method object combines a class, a class instance ' - 'and\n' - ' any callable object (normally a user-defined function).\n' - '\n' - ' Special read-only attributes: "__self__" is the class ' - 'instance\n' - ' object, "__func__" is the function object; "__doc__" is the\n' - ' method’s documentation (same as "__func__.__doc__"); ' - '"__name__"\n' - ' is the method name (same as "__func__.__name__"); ' - '"__module__"\n' - ' is the name of the module the method was defined in, or ' - '"None"\n' - ' if unavailable.\n' - '\n' - ' Methods also support accessing (but not setting) the ' - 'arbitrary\n' - ' function attributes on the underlying function object.\n' - '\n' - ' User-defined method objects may be created when getting an\n' - ' attribute of a class (perhaps via an instance of that class), ' - 'if\n' - ' that attribute is a user-defined function object or a class\n' - ' method object.\n' - '\n' - ' When an instance method object is created by retrieving a ' - 'user-\n' - ' defined function object from a class via one of its ' - 'instances,\n' - ' its "__self__" attribute is the instance, and the method ' - 'object\n' - ' is said to be bound. The new method’s "__func__" attribute ' - 'is\n' - ' the original function object.\n' - '\n' - ' When an instance method object is created by retrieving a ' - 'class\n' - ' method object from a class or instance, its "__self__" ' - 'attribute\n' - ' is the class itself, and its "__func__" attribute is the\n' - ' function object underlying the class method.\n' - '\n' - ' When an instance method object is called, the underlying\n' - ' function ("__func__") is called, inserting the class ' - 'instance\n' - ' ("__self__") in front of the argument list. For instance, ' - 'when\n' - ' "C" is a class which contains a definition for a function ' - '"f()",\n' - ' and "x" is an instance of "C", calling "x.f(1)" is equivalent ' - 'to\n' - ' calling "C.f(x, 1)".\n' - '\n' - ' When an instance method object is derived from a class ' - 'method\n' - ' object, the “class instance” stored in "__self__" will ' - 'actually\n' - ' be the class itself, so that calling either "x.f(1)" or ' - '"C.f(1)"\n' - ' is equivalent to calling "f(C,1)" where "f" is the ' - 'underlying\n' - ' function.\n' - '\n' - ' Note that the transformation from function object to ' - 'instance\n' - ' method object happens each time the attribute is retrieved ' - 'from\n' - ' the instance. In some cases, a fruitful optimization is to\n' - ' assign the attribute to a local variable and call that local\n' - ' variable. Also notice that this transformation only happens ' - 'for\n' - ' user-defined functions; other callable objects (and all non-\n' - ' callable objects) are retrieved without transformation. It ' - 'is\n' - ' also important to note that user-defined functions which are\n' - ' attributes of a class instance are not converted to bound\n' - ' methods; this *only* happens when the function is an ' - 'attribute\n' - ' of the class.\n' - '\n' - ' Generator functions\n' - ' A function or method which uses the "yield" statement (see\n' - ' section The yield statement) is called a *generator ' - 'function*.\n' - ' Such a function, when called, always returns an iterator ' - 'object\n' - ' which can be used to execute the body of the function: ' - 'calling\n' - ' the iterator’s "iterator.__next__()" method will cause the\n' - ' function to execute until it provides a value using the ' - '"yield"\n' - ' statement. When the function executes a "return" statement ' - 'or\n' - ' falls off the end, a "StopIteration" exception is raised and ' - 'the\n' - ' iterator will have reached the end of the set of values to ' - 'be\n' - ' returned.\n' - '\n' - ' Coroutine functions\n' - ' A function or method which is defined using "async def" is\n' - ' called a *coroutine function*. Such a function, when ' - 'called,\n' - ' returns a *coroutine* object. It may contain "await"\n' - ' expressions, as well as "async with" and "async for" ' - 'statements.\n' - ' See also the Coroutine Objects section.\n' - '\n' - ' Asynchronous generator functions\n' - ' A function or method which is defined using "async def" and\n' - ' which uses the "yield" statement is called a *asynchronous\n' - ' generator function*. Such a function, when called, returns ' - 'an\n' - ' asynchronous iterator object which can be used in an "async ' - 'for"\n' - ' statement to execute the body of the function.\n' - '\n' - ' Calling the asynchronous iterator’s "aiterator.__anext__()"\n' - ' method will return an *awaitable* which when awaited will\n' - ' execute until it provides a value using the "yield" ' - 'expression.\n' - ' When the function executes an empty "return" statement or ' - 'falls\n' - ' off the end, a "StopAsyncIteration" exception is raised and ' - 'the\n' - ' asynchronous iterator will have reached the end of the set ' - 'of\n' - ' values to be yielded.\n' - '\n' - ' Built-in functions\n' - ' A built-in function object is a wrapper around a C function.\n' - ' Examples of built-in functions are "len()" and "math.sin()"\n' - ' ("math" is a standard built-in module). The number and type ' - 'of\n' - ' the arguments are determined by the C function. Special ' - 'read-\n' - ' only attributes: "__doc__" is the function’s documentation\n' - ' string, or "None" if unavailable; "__name__" is the ' - 'function’s\n' - ' name; "__self__" is set to "None" (but see the next item);\n' - ' "__module__" is the name of the module the function was ' - 'defined\n' - ' in or "None" if unavailable.\n' - '\n' - ' Built-in methods\n' - ' This is really a different disguise of a built-in function, ' - 'this\n' - ' time containing an object passed to the C function as an\n' - ' implicit extra argument. An example of a built-in method is\n' - ' "alist.append()", assuming *alist* is a list object. In this\n' - ' case, the special read-only attribute "__self__" is set to ' - 'the\n' - ' object denoted by *alist*.\n' - '\n' - ' Classes\n' - ' Classes are callable. These objects normally act as ' - 'factories\n' - ' for new instances of themselves, but variations are possible ' - 'for\n' - ' class types that override "__new__()". The arguments of the\n' - ' call are passed to "__new__()" and, in the typical case, to\n' - ' "__init__()" to initialize the new instance.\n' - '\n' - ' Class Instances\n' - ' Instances of arbitrary classes can be made callable by ' - 'defining\n' - ' a "__call__()" method in their class.\n' - '\n' - 'Modules\n' - ' Modules are a basic organizational unit of Python code, and are\n' - ' created by the import system as invoked either by the "import"\n' - ' statement, or by calling functions such as\n' - ' "importlib.import_module()" and built-in "__import__()". A ' - 'module\n' - ' object has a namespace implemented by a dictionary object (this ' - 'is\n' - ' the dictionary referenced by the "__globals__" attribute of\n' - ' functions defined in the module). Attribute references are\n' - ' translated to lookups in this dictionary, e.g., "m.x" is ' - 'equivalent\n' - ' to "m.__dict__["x"]". A module object does not contain the code\n' - ' object used to initialize the module (since it isn’t needed ' - 'once\n' - ' the initialization is done).\n' - '\n' - ' Attribute assignment updates the module’s namespace dictionary,\n' - ' e.g., "m.x = 1" is equivalent to "m.__dict__["x"] = 1".\n' - '\n' - ' Predefined (writable) attributes:\n' - '\n' - ' "__name__"\n' - ' The module’s name.\n' - '\n' - ' "__doc__"\n' - ' The module’s documentation string, or "None" if ' - 'unavailable.\n' - '\n' - ' "__file__"\n' - ' The pathname of the file from which the module was loaded, ' - 'if\n' - ' it was loaded from a file. The "__file__" attribute may ' - 'be\n' - ' missing for certain types of modules, such as C modules ' - 'that\n' - ' are statically linked into the interpreter. For ' - 'extension\n' - ' modules loaded dynamically from a shared library, it’s ' - 'the\n' - ' pathname of the shared library file.\n' - '\n' - ' "__annotations__"\n' - ' A dictionary containing *variable annotations* collected\n' - ' during module body execution. For best practices on ' - 'working\n' - ' with "__annotations__", please see Annotations Best\n' - ' Practices.\n' - '\n' - ' Special read-only attribute: "__dict__" is the module’s ' - 'namespace\n' - ' as a dictionary object.\n' - '\n' - ' **CPython implementation detail:** Because of the way CPython\n' - ' clears module dictionaries, the module dictionary will be ' - 'cleared\n' - ' when the module falls out of scope even if the dictionary still ' - 'has\n' - ' live references. To avoid this, copy the dictionary or keep ' - 'the\n' - ' module around while using its dictionary directly.\n' - '\n' - 'Custom classes\n' - ' Custom class types are typically created by class definitions ' - '(see\n' - ' section Class definitions). A class has a namespace implemented ' - 'by\n' - ' a dictionary object. Class attribute references are translated ' - 'to\n' - ' lookups in this dictionary, e.g., "C.x" is translated to\n' - ' "C.__dict__["x"]" (although there are a number of hooks which ' - 'allow\n' - ' for other means of locating attributes). When the attribute name ' - 'is\n' - ' not found there, the attribute search continues in the base\n' - ' classes. This search of the base classes uses the C3 method\n' - ' resolution order which behaves correctly even in the presence ' - 'of\n' - ' ‘diamond’ inheritance structures where there are multiple\n' - ' inheritance paths leading back to a common ancestor. Additional\n' - ' details on the C3 MRO used by Python can be found in the\n' - ' documentation accompanying the 2.3 release at\n' - ' https://www.python.org/download/releases/2.3/mro/.\n' - '\n' - ' When a class attribute reference (for class "C", say) would ' - 'yield a\n' - ' class method object, it is transformed into an instance method\n' - ' object whose "__self__" attribute is "C". When it would yield ' - 'a\n' - ' static method object, it is transformed into the object wrapped ' - 'by\n' - ' the static method object. See section Implementing Descriptors ' - 'for\n' - ' another way in which attributes retrieved from a class may ' - 'differ\n' - ' from those actually contained in its "__dict__".\n' - '\n' - ' Class attribute assignments update the class’s dictionary, ' - 'never\n' - ' the dictionary of a base class.\n' - '\n' - ' A class object can be called (see above) to yield a class ' - 'instance\n' - ' (see below).\n' - '\n' - ' Special attributes:\n' - '\n' - ' "__name__"\n' - ' The class name.\n' - '\n' - ' "__module__"\n' - ' The name of the module in which the class was defined.\n' - '\n' - ' "__dict__"\n' - ' The dictionary containing the class’s namespace.\n' - '\n' - ' "__bases__"\n' - ' A tuple containing the base classes, in the order of ' - 'their\n' - ' occurrence in the base class list.\n' - '\n' - ' "__doc__"\n' - ' The class’s documentation string, or "None" if undefined.\n' - '\n' - ' "__annotations__"\n' - ' A dictionary containing *variable annotations* collected\n' - ' during class body execution. For best practices on ' - 'working\n' - ' with "__annotations__", please see Annotations Best\n' - ' Practices.\n' - '\n' - 'Class instances\n' - ' A class instance is created by calling a class object (see ' - 'above).\n' - ' A class instance has a namespace implemented as a dictionary ' - 'which\n' - ' is the first place in which attribute references are searched.\n' - ' When an attribute is not found there, and the instance’s class ' - 'has\n' - ' an attribute by that name, the search continues with the class\n' - ' attributes. If a class attribute is found that is a ' - 'user-defined\n' - ' function object, it is transformed into an instance method ' - 'object\n' - ' whose "__self__" attribute is the instance. Static method and\n' - ' class method objects are also transformed; see above under\n' - ' “Classes”. See section Implementing Descriptors for another way ' - 'in\n' - ' which attributes of a class retrieved via its instances may ' - 'differ\n' - ' from the objects actually stored in the class’s "__dict__". If ' - 'no\n' - ' class attribute is found, and the object’s class has a\n' - ' "__getattr__()" method, that is called to satisfy the lookup.\n' - '\n' - ' Attribute assignments and deletions update the instance’s\n' - ' dictionary, never a class’s dictionary. If the class has a\n' - ' "__setattr__()" or "__delattr__()" method, this is called ' - 'instead\n' - ' of updating the instance dictionary directly.\n' - '\n' - ' Class instances can pretend to be numbers, sequences, or ' - 'mappings\n' - ' if they have methods with certain special names. See section\n' - ' Special method names.\n' - '\n' - ' Special attributes: "__dict__" is the attribute dictionary;\n' - ' "__class__" is the instance’s class.\n' - '\n' - 'I/O objects (also known as file objects)\n' - ' A *file object* represents an open file. Various shortcuts are\n' - ' available to create file objects: the "open()" built-in ' - 'function,\n' - ' and also "os.popen()", "os.fdopen()", and the "makefile()" ' - 'method\n' - ' of socket objects (and perhaps by other functions or methods\n' - ' provided by extension modules).\n' - '\n' - ' The objects "sys.stdin", "sys.stdout" and "sys.stderr" are\n' - ' initialized to file objects corresponding to the interpreter’s\n' - ' standard input, output and error streams; they are all open in ' - 'text\n' - ' mode and therefore follow the interface defined by the\n' - ' "io.TextIOBase" abstract class.\n' - '\n' - 'Internal types\n' - ' A few types used internally by the interpreter are exposed to ' - 'the\n' - ' user. Their definitions may change with future versions of the\n' - ' interpreter, but they are mentioned here for completeness.\n' - '\n' - ' Code objects\n' - ' Code objects represent *byte-compiled* executable Python ' - 'code,\n' - ' or *bytecode*. The difference between a code object and a\n' - ' function object is that the function object contains an ' - 'explicit\n' - ' reference to the function’s globals (the module in which it ' - 'was\n' - ' defined), while a code object contains no context; also the\n' - ' default argument values are stored in the function object, ' - 'not\n' - ' in the code object (because they represent values calculated ' - 'at\n' - ' run-time). Unlike function objects, code objects are ' - 'immutable\n' - ' and contain no references (directly or indirectly) to ' - 'mutable\n' - ' objects.\n' - '\n' - ' Special read-only attributes: "co_name" gives the function ' - 'name;\n' - ' "co_argcount" is the total number of positional arguments\n' - ' (including positional-only arguments and arguments with ' - 'default\n' - ' values); "co_posonlyargcount" is the number of ' - 'positional-only\n' - ' arguments (including arguments with default values);\n' - ' "co_kwonlyargcount" is the number of keyword-only arguments\n' - ' (including arguments with default values); "co_nlocals" is ' - 'the\n' - ' number of local variables used by the function (including\n' - ' arguments); "co_varnames" is a tuple containing the names of ' - 'the\n' - ' local variables (starting with the argument names);\n' - ' "co_cellvars" is a tuple containing the names of local ' - 'variables\n' - ' that are referenced by nested functions; "co_freevars" is a\n' - ' tuple containing the names of free variables; "co_code" is a\n' - ' string representing the sequence of bytecode instructions;\n' - ' "co_consts" is a tuple containing the literals used by the\n' - ' bytecode; "co_names" is a tuple containing the names used by ' - 'the\n' - ' bytecode; "co_filename" is the filename from which the code ' - 'was\n' - ' compiled; "co_firstlineno" is the first line number of the\n' - ' function; "co_lnotab" is a string encoding the mapping from\n' - ' bytecode offsets to line numbers (for details see the source\n' - ' code of the interpreter); "co_stacksize" is the required ' - 'stack\n' - ' size; "co_flags" is an integer encoding a number of flags ' - 'for\n' - ' the interpreter.\n' - '\n' - ' The following flag bits are defined for "co_flags": bit ' - '"0x04"\n' - ' is set if the function uses the "*arguments" syntax to accept ' - 'an\n' - ' arbitrary number of positional arguments; bit "0x08" is set ' - 'if\n' - ' the function uses the "**keywords" syntax to accept ' - 'arbitrary\n' - ' keyword arguments; bit "0x20" is set if the function is a\n' - ' generator.\n' - '\n' - ' Future feature declarations ("from __future__ import ' - 'division")\n' - ' also use bits in "co_flags" to indicate whether a code ' - 'object\n' - ' was compiled with a particular feature enabled: bit "0x2000" ' - 'is\n' - ' set if the function was compiled with future division ' - 'enabled;\n' - ' bits "0x10" and "0x1000" were used in earlier versions of\n' - ' Python.\n' - '\n' - ' Other bits in "co_flags" are reserved for internal use.\n' - '\n' - ' If a code object represents a function, the first item in\n' - ' "co_consts" is the documentation string of the function, or\n' - ' "None" if undefined.\n' - '\n' - ' Frame objects\n' - ' Frame objects represent execution frames. They may occur in\n' - ' traceback objects (see below), and are also passed to ' - 'registered\n' - ' trace functions.\n' - '\n' - ' Special read-only attributes: "f_back" is to the previous ' - 'stack\n' - ' frame (towards the caller), or "None" if this is the bottom\n' - ' stack frame; "f_code" is the code object being executed in ' - 'this\n' - ' frame; "f_locals" is the dictionary used to look up local\n' - ' variables; "f_globals" is used for global variables;\n' - ' "f_builtins" is used for built-in (intrinsic) names; ' - '"f_lasti"\n' - ' gives the precise instruction (this is an index into the\n' - ' bytecode string of the code object).\n' - '\n' - ' Accessing "f_code" raises an auditing event ' - '"object.__getattr__"\n' - ' with arguments "obj" and ""f_code"".\n' - '\n' - ' Special writable attributes: "f_trace", if not "None", is a\n' - ' function called for various events during code execution ' - '(this\n' - ' is used by the debugger). Normally an event is triggered for\n' - ' each new source line - this can be disabled by setting\n' - ' "f_trace_lines" to "False".\n' - '\n' - ' Implementations *may* allow per-opcode events to be requested ' - 'by\n' - ' setting "f_trace_opcodes" to "True". Note that this may lead ' - 'to\n' - ' undefined interpreter behaviour if exceptions raised by the\n' - ' trace function escape to the function being traced.\n' - '\n' - ' "f_lineno" is the current line number of the frame — writing ' - 'to\n' - ' this from within a trace function jumps to the given line ' - '(only\n' - ' for the bottom-most frame). A debugger can implement a Jump\n' - ' command (aka Set Next Statement) by writing to f_lineno.\n' - '\n' - ' Frame objects support one method:\n' - '\n' - ' frame.clear()\n' - '\n' - ' This method clears all references to local variables held ' - 'by\n' - ' the frame. Also, if the frame belonged to a generator, ' - 'the\n' - ' generator is finalized. This helps break reference ' - 'cycles\n' - ' involving frame objects (for example when catching an\n' - ' exception and storing its traceback for later use).\n' - '\n' - ' "RuntimeError" is raised if the frame is currently ' - 'executing.\n' - '\n' - ' New in version 3.4.\n' - '\n' - ' Traceback objects\n' - ' Traceback objects represent a stack trace of an exception. ' - 'A\n' - ' traceback object is implicitly created when an exception ' - 'occurs,\n' - ' and may also be explicitly created by calling\n' - ' "types.TracebackType".\n' - '\n' - ' For implicitly created tracebacks, when the search for an\n' - ' exception handler unwinds the execution stack, at each ' - 'unwound\n' - ' level a traceback object is inserted in front of the current\n' - ' traceback. When an exception handler is entered, the stack\n' - ' trace is made available to the program. (See section The try\n' - ' statement.) It is accessible as the third item of the tuple\n' - ' returned by "sys.exc_info()", and as the "__traceback__"\n' - ' attribute of the caught exception.\n' - '\n' - ' When the program contains no suitable handler, the stack ' - 'trace\n' - ' is written (nicely formatted) to the standard error stream; ' - 'if\n' - ' the interpreter is interactive, it is also made available to ' - 'the\n' - ' user as "sys.last_traceback".\n' - '\n' - ' For explicitly created tracebacks, it is up to the creator ' - 'of\n' - ' the traceback to determine how the "tb_next" attributes ' - 'should\n' - ' be linked to form a full stack trace.\n' - '\n' - ' Special read-only attributes: "tb_frame" points to the ' - 'execution\n' - ' frame of the current level; "tb_lineno" gives the line ' - 'number\n' - ' where the exception occurred; "tb_lasti" indicates the ' - 'precise\n' - ' instruction. The line number and last instruction in the\n' - ' traceback may differ from the line number of its frame object ' - 'if\n' - ' the exception occurred in a "try" statement with no matching\n' - ' except clause or with a finally clause.\n' - '\n' - ' Accessing "tb_frame" raises an auditing event\n' - ' "object.__getattr__" with arguments "obj" and ""tb_frame"".\n' - '\n' - ' Special writable attribute: "tb_next" is the next level in ' - 'the\n' - ' stack trace (towards the frame where the exception occurred), ' - 'or\n' - ' "None" if there is no next level.\n' - '\n' - ' Changed in version 3.7: Traceback objects can now be ' - 'explicitly\n' - ' instantiated from Python code, and the "tb_next" attribute ' - 'of\n' - ' existing instances can be updated.\n' - '\n' - ' Slice objects\n' - ' Slice objects are used to represent slices for ' - '"__getitem__()"\n' - ' methods. They are also created by the built-in "slice()"\n' - ' function.\n' - '\n' - ' Special read-only attributes: "start" is the lower bound; ' - '"stop"\n' - ' is the upper bound; "step" is the step value; each is "None" ' - 'if\n' - ' omitted. These attributes can have any type.\n' - '\n' - ' Slice objects support one method:\n' - '\n' - ' slice.indices(self, length)\n' - '\n' - ' This method takes a single integer argument *length* and\n' - ' computes information about the slice that the slice ' - 'object\n' - ' would describe if applied to a sequence of *length* ' - 'items.\n' - ' It returns a tuple of three integers; respectively these ' - 'are\n' - ' the *start* and *stop* indices and the *step* or stride\n' - ' length of the slice. Missing or out-of-bounds indices are\n' - ' handled in a manner consistent with regular slices.\n' - '\n' - ' Static method objects\n' - ' Static method objects provide a way of defeating the\n' - ' transformation of function objects to method objects ' - 'described\n' - ' above. A static method object is a wrapper around any other\n' - ' object, usually a user-defined method object. When a static\n' - ' method object is retrieved from a class or a class instance, ' - 'the\n' - ' object actually returned is the wrapped object, which is not\n' - ' subject to any further transformation. Static method objects ' - 'are\n' - ' also callable. Static method objects are created by the ' - 'built-in\n' - ' "staticmethod()" constructor.\n' - '\n' - ' Class method objects\n' - ' A class method object, like a static method object, is a ' - 'wrapper\n' - ' around another object that alters the way in which that ' - 'object\n' - ' is retrieved from classes and class instances. The behaviour ' - 'of\n' - ' class method objects upon such retrieval is described above,\n' - ' under “User-defined methods”. Class method objects are ' - 'created\n' - ' by the built-in "classmethod()" constructor.\n', - 'typesfunctions': 'Functions\n' - '*********\n' - '\n' - 'Function objects are created by function definitions. The ' - 'only\n' - 'operation on a function object is to call it: ' - '"func(argument-list)".\n' - '\n' - 'There are really two flavors of function objects: built-in ' - 'functions\n' - 'and user-defined functions. Both support the same ' - 'operation (to call\n' - 'the function), but the implementation is different, hence ' - 'the\n' - 'different object types.\n' - '\n' - 'See Function definitions for more information.\n', - 'typesmapping': 'Mapping Types — "dict"\n' - '**********************\n' - '\n' - 'A *mapping* object maps *hashable* values to arbitrary ' - 'objects.\n' - 'Mappings are mutable objects. There is currently only one ' - 'standard\n' - 'mapping type, the *dictionary*. (For other containers see ' - 'the built-\n' - 'in "list", "set", and "tuple" classes, and the "collections" ' - 'module.)\n' - '\n' - 'A dictionary’s keys are *almost* arbitrary values. Values ' - 'that are\n' - 'not *hashable*, that is, values containing lists, ' - 'dictionaries or\n' - 'other mutable types (that are compared by value rather than ' - 'by object\n' - 'identity) may not be used as keys. Numeric types used for ' - 'keys obey\n' - 'the normal rules for numeric comparison: if two numbers ' - 'compare equal\n' - '(such as "1" and "1.0") then they can be used ' - 'interchangeably to index\n' - 'the same dictionary entry. (Note however, that since ' - 'computers store\n' - 'floating-point numbers as approximations it is usually ' - 'unwise to use\n' - 'them as dictionary keys.)\n' - '\n' - 'Dictionaries can be created by placing a comma-separated ' - 'list of "key:\n' - 'value" pairs within braces, for example: "{\'jack\': 4098, ' - "'sjoerd':\n" - '4127}" or "{4098: \'jack\', 4127: \'sjoerd\'}", or by the ' - '"dict"\n' - 'constructor.\n' - '\n' - 'class dict(**kwarg)\n' - 'class dict(mapping, **kwarg)\n' - 'class dict(iterable, **kwarg)\n' - '\n' - ' Return a new dictionary initialized from an optional ' - 'positional\n' - ' argument and a possibly empty set of keyword arguments.\n' - '\n' - ' Dictionaries can be created by several means:\n' - '\n' - ' * Use a comma-separated list of "key: value" pairs within ' - 'braces:\n' - ' "{\'jack\': 4098, \'sjoerd\': 4127}" or "{4098: ' - "'jack', 4127:\n" - ' \'sjoerd\'}"\n' - '\n' - ' * Use a dict comprehension: "{}", "{x: x ** 2 for x in ' - 'range(10)}"\n' - '\n' - ' * Use the type constructor: "dict()", "dict([(\'foo\', ' - "100), ('bar',\n" - ' 200)])", "dict(foo=100, bar=200)"\n' - '\n' - ' If no positional argument is given, an empty dictionary ' - 'is created.\n' - ' If a positional argument is given and it is a mapping ' - 'object, a\n' - ' dictionary is created with the same key-value pairs as ' - 'the mapping\n' - ' object. Otherwise, the positional argument must be an ' - '*iterable*\n' - ' object. Each item in the iterable must itself be an ' - 'iterable with\n' - ' exactly two objects. The first object of each item ' - 'becomes a key\n' - ' in the new dictionary, and the second object the ' - 'corresponding\n' - ' value. If a key occurs more than once, the last value ' - 'for that key\n' - ' becomes the corresponding value in the new dictionary.\n' - '\n' - ' If keyword arguments are given, the keyword arguments and ' - 'their\n' - ' values are added to the dictionary created from the ' - 'positional\n' - ' argument. If a key being added is already present, the ' - 'value from\n' - ' the keyword argument replaces the value from the ' - 'positional\n' - ' argument.\n' - '\n' - ' To illustrate, the following examples all return a ' - 'dictionary equal\n' - ' to "{"one": 1, "two": 2, "three": 3}":\n' - '\n' - ' >>> a = dict(one=1, two=2, three=3)\n' - " >>> b = {'one': 1, 'two': 2, 'three': 3}\n" - " >>> c = dict(zip(['one', 'two', 'three'], [1, 2, 3]))\n" - " >>> d = dict([('two', 2), ('one', 1), ('three', 3)])\n" - " >>> e = dict({'three': 3, 'one': 1, 'two': 2})\n" - " >>> f = dict({'one': 1, 'three': 3}, two=2)\n" - ' >>> a == b == c == d == e == f\n' - ' True\n' - '\n' - ' Providing keyword arguments as in the first example only ' - 'works for\n' - ' keys that are valid Python identifiers. Otherwise, any ' - 'valid keys\n' - ' can be used.\n' - '\n' - ' These are the operations that dictionaries support (and ' - 'therefore,\n' - ' custom mapping types should support too):\n' - '\n' - ' list(d)\n' - '\n' - ' Return a list of all the keys used in the dictionary ' - '*d*.\n' - '\n' - ' len(d)\n' - '\n' - ' Return the number of items in the dictionary *d*.\n' - '\n' - ' d[key]\n' - '\n' - ' Return the item of *d* with key *key*. Raises a ' - '"KeyError" if\n' - ' *key* is not in the map.\n' - '\n' - ' If a subclass of dict defines a method "__missing__()" ' - 'and *key*\n' - ' is not present, the "d[key]" operation calls that ' - 'method with\n' - ' the key *key* as argument. The "d[key]" operation ' - 'then returns\n' - ' or raises whatever is returned or raised by the\n' - ' "__missing__(key)" call. No other operations or ' - 'methods invoke\n' - ' "__missing__()". If "__missing__()" is not defined, ' - '"KeyError"\n' - ' is raised. "__missing__()" must be a method; it cannot ' - 'be an\n' - ' instance variable:\n' - '\n' - ' >>> class Counter(dict):\n' - ' ... def __missing__(self, key):\n' - ' ... return 0\n' - ' >>> c = Counter()\n' - " >>> c['red']\n" - ' 0\n' - " >>> c['red'] += 1\n" - " >>> c['red']\n" - ' 1\n' - '\n' - ' The example above shows part of the implementation of\n' - ' "collections.Counter". A different "__missing__" ' - 'method is used\n' - ' by "collections.defaultdict".\n' - '\n' - ' d[key] = value\n' - '\n' - ' Set "d[key]" to *value*.\n' - '\n' - ' del d[key]\n' - '\n' - ' Remove "d[key]" from *d*. Raises a "KeyError" if ' - '*key* is not\n' - ' in the map.\n' - '\n' - ' key in d\n' - '\n' - ' Return "True" if *d* has a key *key*, else "False".\n' - '\n' - ' key not in d\n' - '\n' - ' Equivalent to "not key in d".\n' - '\n' - ' iter(d)\n' - '\n' - ' Return an iterator over the keys of the dictionary. ' - 'This is a\n' - ' shortcut for "iter(d.keys())".\n' - '\n' - ' clear()\n' - '\n' - ' Remove all items from the dictionary.\n' - '\n' - ' copy()\n' - '\n' - ' Return a shallow copy of the dictionary.\n' - '\n' - ' classmethod fromkeys(iterable[, value])\n' - '\n' - ' Create a new dictionary with keys from *iterable* and ' - 'values set\n' - ' to *value*.\n' - '\n' - ' "fromkeys()" is a class method that returns a new ' - 'dictionary.\n' - ' *value* defaults to "None". All of the values refer ' - 'to just a\n' - ' single instance, so it generally doesn’t make sense ' - 'for *value*\n' - ' to be a mutable object such as an empty list. To get ' - 'distinct\n' - ' values, use a dict comprehension instead.\n' - '\n' - ' get(key[, default])\n' - '\n' - ' Return the value for *key* if *key* is in the ' - 'dictionary, else\n' - ' *default*. If *default* is not given, it defaults to ' - '"None", so\n' - ' that this method never raises a "KeyError".\n' - '\n' - ' items()\n' - '\n' - ' Return a new view of the dictionary’s items ("(key, ' - 'value)"\n' - ' pairs). See the documentation of view objects.\n' - '\n' - ' keys()\n' - '\n' - ' Return a new view of the dictionary’s keys. See the\n' - ' documentation of view objects.\n' - '\n' - ' pop(key[, default])\n' - '\n' - ' If *key* is in the dictionary, remove it and return ' - 'its value,\n' - ' else return *default*. If *default* is not given and ' - '*key* is\n' - ' not in the dictionary, a "KeyError" is raised.\n' - '\n' - ' popitem()\n' - '\n' - ' Remove and return a "(key, value)" pair from the ' - 'dictionary.\n' - ' Pairs are returned in LIFO (last-in, first-out) ' - 'order.\n' - '\n' - ' "popitem()" is useful to destructively iterate over a\n' - ' dictionary, as often used in set algorithms. If the ' - 'dictionary\n' - ' is empty, calling "popitem()" raises a "KeyError".\n' - '\n' - ' Changed in version 3.7: LIFO order is now guaranteed. ' - 'In prior\n' - ' versions, "popitem()" would return an arbitrary ' - 'key/value pair.\n' - '\n' - ' reversed(d)\n' - '\n' - ' Return a reverse iterator over the keys of the ' - 'dictionary. This\n' - ' is a shortcut for "reversed(d.keys())".\n' - '\n' - ' New in version 3.8.\n' - '\n' - ' setdefault(key[, default])\n' - '\n' - ' If *key* is in the dictionary, return its value. If ' - 'not, insert\n' - ' *key* with a value of *default* and return *default*. ' - '*default*\n' - ' defaults to "None".\n' - '\n' - ' update([other])\n' - '\n' - ' Update the dictionary with the key/value pairs from ' - '*other*,\n' - ' overwriting existing keys. Return "None".\n' - '\n' - ' "update()" accepts either another dictionary object or ' - 'an\n' - ' iterable of key/value pairs (as tuples or other ' - 'iterables of\n' - ' length two). If keyword arguments are specified, the ' - 'dictionary\n' - ' is then updated with those key/value pairs: ' - '"d.update(red=1,\n' - ' blue=2)".\n' - '\n' - ' values()\n' - '\n' - ' Return a new view of the dictionary’s values. See ' - 'the\n' - ' documentation of view objects.\n' - '\n' - ' An equality comparison between one "dict.values()" ' - 'view and\n' - ' another will always return "False". This also applies ' - 'when\n' - ' comparing "dict.values()" to itself:\n' - '\n' - " >>> d = {'a': 1}\n" - ' >>> d.values() == d.values()\n' - ' False\n' - '\n' - ' d | other\n' - '\n' - ' Create a new dictionary with the merged keys and ' - 'values of *d*\n' - ' and *other*, which must both be dictionaries. The ' - 'values of\n' - ' *other* take priority when *d* and *other* share ' - 'keys.\n' - '\n' - ' New in version 3.9.\n' - '\n' - ' d |= other\n' - '\n' - ' Update the dictionary *d* with keys and values from ' - '*other*,\n' - ' which may be either a *mapping* or an *iterable* of ' - 'key/value\n' - ' pairs. The values of *other* take priority when *d* ' - 'and *other*\n' - ' share keys.\n' - '\n' - ' New in version 3.9.\n' - '\n' - ' Dictionaries compare equal if and only if they have the ' - 'same "(key,\n' - ' value)" pairs (regardless of ordering). Order comparisons ' - '(‘<’,\n' - ' ‘<=’, ‘>=’, ‘>’) raise "TypeError".\n' - '\n' - ' Dictionaries preserve insertion order. Note that ' - 'updating a key\n' - ' does not affect the order. Keys added after deletion are ' - 'inserted\n' - ' at the end.\n' - '\n' - ' >>> d = {"one": 1, "two": 2, "three": 3, "four": 4}\n' - ' >>> d\n' - " {'one': 1, 'two': 2, 'three': 3, 'four': 4}\n" - ' >>> list(d)\n' - " ['one', 'two', 'three', 'four']\n" - ' >>> list(d.values())\n' - ' [1, 2, 3, 4]\n' - ' >>> d["one"] = 42\n' - ' >>> d\n' - " {'one': 42, 'two': 2, 'three': 3, 'four': 4}\n" - ' >>> del d["two"]\n' - ' >>> d["two"] = None\n' - ' >>> d\n' - " {'one': 42, 'three': 3, 'four': 4, 'two': None}\n" - '\n' - ' Changed in version 3.7: Dictionary order is guaranteed to ' - 'be\n' - ' insertion order. This behavior was an implementation ' - 'detail of\n' - ' CPython from 3.6.\n' - '\n' - ' Dictionaries and dictionary views are reversible.\n' - '\n' - ' >>> d = {"one": 1, "two": 2, "three": 3, "four": 4}\n' - ' >>> d\n' - " {'one': 1, 'two': 2, 'three': 3, 'four': 4}\n" - ' >>> list(reversed(d))\n' - " ['four', 'three', 'two', 'one']\n" - ' >>> list(reversed(d.values()))\n' - ' [4, 3, 2, 1]\n' - ' >>> list(reversed(d.items()))\n' - " [('four', 4), ('three', 3), ('two', 2), ('one', 1)]\n" - '\n' - ' Changed in version 3.8: Dictionaries are now reversible.\n' - '\n' - 'See also:\n' - '\n' - ' "types.MappingProxyType" can be used to create a read-only ' - 'view of a\n' - ' "dict".\n' - '\n' - '\n' - 'Dictionary view objects\n' - '=======================\n' - '\n' - 'The objects returned by "dict.keys()", "dict.values()" and\n' - '"dict.items()" are *view objects*. They provide a dynamic ' - 'view on the\n' - 'dictionary’s entries, which means that when the dictionary ' - 'changes,\n' - 'the view reflects these changes.\n' - '\n' - 'Dictionary views can be iterated over to yield their ' - 'respective data,\n' - 'and support membership tests:\n' - '\n' - 'len(dictview)\n' - '\n' - ' Return the number of entries in the dictionary.\n' - '\n' - 'iter(dictview)\n' - '\n' - ' Return an iterator over the keys, values or items ' - '(represented as\n' - ' tuples of "(key, value)") in the dictionary.\n' - '\n' - ' Keys and values are iterated over in insertion order. ' - 'This allows\n' - ' the creation of "(value, key)" pairs using "zip()": ' - '"pairs =\n' - ' zip(d.values(), d.keys())". Another way to create the ' - 'same list is\n' - ' "pairs = [(v, k) for (k, v) in d.items()]".\n' - '\n' - ' Iterating views while adding or deleting entries in the ' - 'dictionary\n' - ' may raise a "RuntimeError" or fail to iterate over all ' - 'entries.\n' - '\n' - ' Changed in version 3.7: Dictionary order is guaranteed to ' - 'be\n' - ' insertion order.\n' - '\n' - 'x in dictview\n' - '\n' - ' Return "True" if *x* is in the underlying dictionary’s ' - 'keys, values\n' - ' or items (in the latter case, *x* should be a "(key, ' - 'value)"\n' - ' tuple).\n' - '\n' - 'reversed(dictview)\n' - '\n' - ' Return a reverse iterator over the keys, values or items ' - 'of the\n' - ' dictionary. The view will be iterated in reverse order of ' - 'the\n' - ' insertion.\n' - '\n' - ' Changed in version 3.8: Dictionary views are now ' - 'reversible.\n' - '\n' - 'dictview.mapping\n' - '\n' - ' Return a "types.MappingProxyType" that wraps the ' - 'original\n' - ' dictionary to which the view refers.\n' - '\n' - ' New in version 3.10.\n' - '\n' - 'Keys views are set-like since their entries are unique and ' - 'hashable.\n' - 'If all values are hashable, so that "(key, value)" pairs are ' - 'unique\n' - 'and hashable, then the items view is also set-like. (Values ' - 'views are\n' - 'not treated as set-like since the entries are generally not ' - 'unique.)\n' - 'For set-like views, all of the operations defined for the ' - 'abstract\n' - 'base class "collections.abc.Set" are available (for example, ' - '"==",\n' - '"<", or "^").\n' - '\n' - 'An example of dictionary view usage:\n' - '\n' - " >>> dishes = {'eggs': 2, 'sausage': 1, 'bacon': 1, " - "'spam': 500}\n" - ' >>> keys = dishes.keys()\n' - ' >>> values = dishes.values()\n' - '\n' - ' >>> # iteration\n' - ' >>> n = 0\n' - ' >>> for val in values:\n' - ' ... n += val\n' - ' >>> print(n)\n' - ' 504\n' - '\n' - ' >>> # keys and values are iterated over in the same order ' - '(insertion order)\n' - ' >>> list(keys)\n' - " ['eggs', 'sausage', 'bacon', 'spam']\n" - ' >>> list(values)\n' - ' [2, 1, 1, 500]\n' - '\n' - ' >>> # view objects are dynamic and reflect dict changes\n' - " >>> del dishes['eggs']\n" - " >>> del dishes['sausage']\n" - ' >>> list(keys)\n' - " ['bacon', 'spam']\n" - '\n' - ' >>> # set operations\n' - " >>> keys & {'eggs', 'bacon', 'salad'}\n" - " {'bacon'}\n" - " >>> keys ^ {'sausage', 'juice'}\n" - " {'juice', 'sausage', 'bacon', 'spam'}\n" - '\n' - ' >>> # get back a read-only proxy for the original ' - 'dictionary\n' - ' >>> values.mapping\n' - " mappingproxy({'eggs': 2, 'sausage': 1, 'bacon': 1, " - "'spam': 500})\n" - " >>> values.mapping['spam']\n" - ' 500\n', - 'typesmethods': 'Methods\n' - '*******\n' - '\n' - 'Methods are functions that are called using the attribute ' - 'notation.\n' - 'There are two flavors: built-in methods (such as "append()" ' - 'on lists)\n' - 'and class instance methods. Built-in methods are described ' - 'with the\n' - 'types that support them.\n' - '\n' - 'If you access a method (a function defined in a class ' - 'namespace)\n' - 'through an instance, you get a special object: a *bound ' - 'method* (also\n' - 'called *instance method*) object. When called, it will add ' - 'the "self"\n' - 'argument to the argument list. Bound methods have two ' - 'special read-\n' - 'only attributes: "m.__self__" is the object on which the ' - 'method\n' - 'operates, and "m.__func__" is the function implementing the ' - 'method.\n' - 'Calling "m(arg-1, arg-2, ..., arg-n)" is completely ' - 'equivalent to\n' - 'calling "m.__func__(m.__self__, arg-1, arg-2, ..., arg-n)".\n' - '\n' - 'Like function objects, bound method objects support getting ' - 'arbitrary\n' - 'attributes. However, since method attributes are actually ' - 'stored on\n' - 'the underlying function object ("meth.__func__"), setting ' - 'method\n' - 'attributes on bound methods is disallowed. Attempting to ' - 'set an\n' - 'attribute on a method results in an "AttributeError" being ' - 'raised. In\n' - 'order to set a method attribute, you need to explicitly set ' - 'it on the\n' - 'underlying function object:\n' - '\n' - ' >>> class C:\n' - ' ... def method(self):\n' - ' ... pass\n' - ' ...\n' - ' >>> c = C()\n' - " >>> c.method.whoami = 'my name is method' # can't set on " - 'the method\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " AttributeError: 'method' object has no attribute " - "'whoami'\n" - " >>> c.method.__func__.whoami = 'my name is method'\n" - ' >>> c.method.whoami\n' - " 'my name is method'\n" - '\n' - 'See The standard type hierarchy for more information.\n', - 'typesmodules': 'Modules\n' - '*******\n' - '\n' - 'The only special operation on a module is attribute access: ' - '"m.name",\n' - 'where *m* is a module and *name* accesses a name defined in ' - '*m*’s\n' - 'symbol table. Module attributes can be assigned to. (Note ' - 'that the\n' - '"import" statement is not, strictly speaking, an operation ' - 'on a module\n' - 'object; "import foo" does not require a module object named ' - '*foo* to\n' - 'exist, rather it requires an (external) *definition* for a ' - 'module\n' - 'named *foo* somewhere.)\n' - '\n' - 'A special attribute of every module is "__dict__". This is ' - 'the\n' - 'dictionary containing the module’s symbol table. Modifying ' - 'this\n' - 'dictionary will actually change the module’s symbol table, ' - 'but direct\n' - 'assignment to the "__dict__" attribute is not possible (you ' - 'can write\n' - '"m.__dict__[\'a\'] = 1", which defines "m.a" to be "1", but ' - 'you can’t\n' - 'write "m.__dict__ = {}"). Modifying "__dict__" directly is ' - 'not\n' - 'recommended.\n' - '\n' - 'Modules built into the interpreter are written like this: ' - '"". If loaded from a file, they are ' - 'written as\n' - '"".\n', - 'typesseq': 'Sequence Types — "list", "tuple", "range"\n' - '*****************************************\n' - '\n' - 'There are three basic sequence types: lists, tuples, and range\n' - 'objects. Additional sequence types tailored for processing of ' - 'binary\n' - 'data and text strings are described in dedicated sections.\n' - '\n' - '\n' - 'Common Sequence Operations\n' - '==========================\n' - '\n' - 'The operations in the following table are supported by most ' - 'sequence\n' - 'types, both mutable and immutable. The ' - '"collections.abc.Sequence" ABC\n' - 'is provided to make it easier to correctly implement these ' - 'operations\n' - 'on custom sequence types.\n' - '\n' - 'This table lists the sequence operations sorted in ascending ' - 'priority.\n' - 'In the table, *s* and *t* are sequences of the same type, *n*, ' - '*i*,\n' - '*j* and *k* are integers and *x* is an arbitrary object that ' - 'meets any\n' - 'type and value restrictions imposed by *s*.\n' - '\n' - 'The "in" and "not in" operations have the same priorities as ' - 'the\n' - 'comparison operations. The "+" (concatenation) and "*" ' - '(repetition)\n' - 'operations have the same priority as the corresponding numeric\n' - 'operations. [3]\n' - '\n' - '+----------------------------+----------------------------------+------------+\n' - '| Operation | Result ' - '| Notes |\n' - '|============================|==================================|============|\n' - '| "x in s" | "True" if an item of *s* is ' - '| (1) |\n' - '| | equal to *x*, else "False" ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "x not in s" | "False" if an item of *s* is ' - '| (1) |\n' - '| | equal to *x*, else "True" ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s + t" | the concatenation of *s* and *t* ' - '| (6)(7) |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s * n" or "n * s" | equivalent to adding *s* to ' - '| (2)(7) |\n' - '| | itself *n* times ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s[i]" | *i*th item of *s*, origin 0 ' - '| (3) |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s[i:j]" | slice of *s* from *i* to *j* ' - '| (3)(4) |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s[i:j:k]" | slice of *s* from *i* to *j* ' - '| (3)(5) |\n' - '| | with step *k* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "len(s)" | length of *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "min(s)" | smallest item of *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "max(s)" | largest item of *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s.index(x[, i[, j]])" | index of the first occurrence of ' - '| (8) |\n' - '| | *x* in *s* (at or after index ' - '| |\n' - '| | *i* and before index *j*) ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s.count(x)" | total number of occurrences of ' - '| |\n' - '| | *x* in *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '\n' - 'Sequences of the same type also support comparisons. In ' - 'particular,\n' - 'tuples and lists are compared lexicographically by comparing\n' - 'corresponding elements. This means that to compare equal, every\n' - 'element must compare equal and the two sequences must be of the ' - 'same\n' - 'type and have the same length. (For full details see ' - 'Comparisons in\n' - 'the language reference.)\n' - '\n' - 'Notes:\n' - '\n' - '1. While the "in" and "not in" operations are used only for ' - 'simple\n' - ' containment testing in the general case, some specialised ' - 'sequences\n' - ' (such as "str", "bytes" and "bytearray") also use them for\n' - ' subsequence testing:\n' - '\n' - ' >>> "gg" in "eggs"\n' - ' True\n' - '\n' - '2. Values of *n* less than "0" are treated as "0" (which yields ' - 'an\n' - ' empty sequence of the same type as *s*). Note that items in ' - 'the\n' - ' sequence *s* are not copied; they are referenced multiple ' - 'times.\n' - ' This often haunts new Python programmers; consider:\n' - '\n' - ' >>> lists = [[]] * 3\n' - ' >>> lists\n' - ' [[], [], []]\n' - ' >>> lists[0].append(3)\n' - ' >>> lists\n' - ' [[3], [3], [3]]\n' - '\n' - ' What has happened is that "[[]]" is a one-element list ' - 'containing\n' - ' an empty list, so all three elements of "[[]] * 3" are ' - 'references\n' - ' to this single empty list. Modifying any of the elements of\n' - ' "lists" modifies this single list. You can create a list of\n' - ' different lists this way:\n' - '\n' - ' >>> lists = [[] for i in range(3)]\n' - ' >>> lists[0].append(3)\n' - ' >>> lists[1].append(5)\n' - ' >>> lists[2].append(7)\n' - ' >>> lists\n' - ' [[3], [5], [7]]\n' - '\n' - ' Further explanation is available in the FAQ entry How do I ' - 'create a\n' - ' multidimensional list?.\n' - '\n' - '3. If *i* or *j* is negative, the index is relative to the end ' - 'of\n' - ' sequence *s*: "len(s) + i" or "len(s) + j" is substituted. ' - 'But\n' - ' note that "-0" is still "0".\n' - '\n' - '4. The slice of *s* from *i* to *j* is defined as the sequence ' - 'of\n' - ' items with index *k* such that "i <= k < j". If *i* or *j* ' - 'is\n' - ' greater than "len(s)", use "len(s)". If *i* is omitted or ' - '"None",\n' - ' use "0". If *j* is omitted or "None", use "len(s)". If *i* ' - 'is\n' - ' greater than or equal to *j*, the slice is empty.\n' - '\n' - '5. The slice of *s* from *i* to *j* with step *k* is defined as ' - 'the\n' - ' sequence of items with index "x = i + n*k" such that "0 <= n ' - '<\n' - ' (j-i)/k". In other words, the indices are "i", "i+k", ' - '"i+2*k",\n' - ' "i+3*k" and so on, stopping when *j* is reached (but never\n' - ' including *j*). When *k* is positive, *i* and *j* are ' - 'reduced to\n' - ' "len(s)" if they are greater. When *k* is negative, *i* and ' - '*j* are\n' - ' reduced to "len(s) - 1" if they are greater. If *i* or *j* ' - 'are\n' - ' omitted or "None", they become “end” values (which end ' - 'depends on\n' - ' the sign of *k*). Note, *k* cannot be zero. If *k* is ' - '"None", it\n' - ' is treated like "1".\n' - '\n' - '6. Concatenating immutable sequences always results in a new ' - 'object.\n' - ' This means that building up a sequence by repeated ' - 'concatenation\n' - ' will have a quadratic runtime cost in the total sequence ' - 'length.\n' - ' To get a linear runtime cost, you must switch to one of the\n' - ' alternatives below:\n' - '\n' - ' * if concatenating "str" objects, you can build a list and ' - 'use\n' - ' "str.join()" at the end or else write to an "io.StringIO"\n' - ' instance and retrieve its value when complete\n' - '\n' - ' * if concatenating "bytes" objects, you can similarly use\n' - ' "bytes.join()" or "io.BytesIO", or you can do in-place\n' - ' concatenation with a "bytearray" object. "bytearray" ' - 'objects are\n' - ' mutable and have an efficient overallocation mechanism\n' - '\n' - ' * if concatenating "tuple" objects, extend a "list" instead\n' - '\n' - ' * for other types, investigate the relevant class ' - 'documentation\n' - '\n' - '7. Some sequence types (such as "range") only support item ' - 'sequences\n' - ' that follow specific patterns, and hence don’t support ' - 'sequence\n' - ' concatenation or repetition.\n' - '\n' - '8. "index" raises "ValueError" when *x* is not found in *s*. Not ' - 'all\n' - ' implementations support passing the additional arguments *i* ' - 'and\n' - ' *j*. These arguments allow efficient searching of subsections ' - 'of\n' - ' the sequence. Passing the extra arguments is roughly ' - 'equivalent to\n' - ' using "s[i:j].index(x)", only without copying any data and ' - 'with the\n' - ' returned index being relative to the start of the sequence ' - 'rather\n' - ' than the start of the slice.\n' - '\n' - '\n' - 'Immutable Sequence Types\n' - '========================\n' - '\n' - 'The only operation that immutable sequence types generally ' - 'implement\n' - 'that is not also implemented by mutable sequence types is ' - 'support for\n' - 'the "hash()" built-in.\n' - '\n' - 'This support allows immutable sequences, such as "tuple" ' - 'instances, to\n' - 'be used as "dict" keys and stored in "set" and "frozenset" ' - 'instances.\n' - '\n' - 'Attempting to hash an immutable sequence that contains ' - 'unhashable\n' - 'values will result in "TypeError".\n' - '\n' - '\n' - 'Mutable Sequence Types\n' - '======================\n' - '\n' - 'The operations in the following table are defined on mutable ' - 'sequence\n' - 'types. The "collections.abc.MutableSequence" ABC is provided to ' - 'make\n' - 'it easier to correctly implement these operations on custom ' - 'sequence\n' - 'types.\n' - '\n' - 'In the table *s* is an instance of a mutable sequence type, *t* ' - 'is any\n' - 'iterable object and *x* is an arbitrary object that meets any ' - 'type and\n' - 'value restrictions imposed by *s* (for example, "bytearray" ' - 'only\n' - 'accepts integers that meet the value restriction "0 <= x <= ' - '255").\n' - '\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| Operation | ' - 'Result | Notes |\n' - '|================================|==================================|=======================|\n' - '| "s[i] = x" | item *i* of *s* is replaced ' - 'by | |\n' - '| | ' - '*x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j] = t" | slice of *s* from *i* to *j* ' - 'is | |\n' - '| | replaced by the contents of ' - 'the | |\n' - '| | iterable ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j]" | same as "s[i:j] = ' - '[]" | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j:k] = t" | the elements of "s[i:j:k]" ' - 'are | (1) |\n' - '| | replaced by those of ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j:k]" | removes the elements ' - 'of | |\n' - '| | "s[i:j:k]" from the ' - 'list | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.append(x)" | appends *x* to the end of ' - 'the | |\n' - '| | sequence (same ' - 'as | |\n' - '| | "s[len(s):len(s)] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.clear()" | removes all items from *s* ' - '(same | (5) |\n' - '| | as "del ' - 's[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.copy()" | creates a shallow copy of ' - '*s* | (5) |\n' - '| | (same as ' - '"s[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.extend(t)" or "s += t" | extends *s* with the contents ' - 'of | |\n' - '| | *t* (for the most part the ' - 'same | |\n' - '| | as "s[len(s):len(s)] = ' - 't") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s *= n" | updates *s* with its ' - 'contents | (6) |\n' - '| | repeated *n* ' - 'times | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.insert(i, x)" | inserts *x* into *s* at ' - 'the | |\n' - '| | index given by *i* (same ' - 'as | |\n' - '| | "s[i:i] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.pop()" or "s.pop(i)" | retrieves the item at *i* ' - 'and | (2) |\n' - '| | also removes it from ' - '*s* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.remove(x)" | remove the first item from ' - '*s* | (3) |\n' - '| | where "s[i]" is equal to ' - '*x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.reverse()" | reverses the items of *s* ' - 'in | (4) |\n' - '| | ' - 'place | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '\n' - 'Notes:\n' - '\n' - '1. *t* must have the same length as the slice it is replacing.\n' - '\n' - '2. The optional argument *i* defaults to "-1", so that by ' - 'default the\n' - ' last item is removed and returned.\n' - '\n' - '3. "remove()" raises "ValueError" when *x* is not found in *s*.\n' - '\n' - '4. The "reverse()" method modifies the sequence in place for ' - 'economy\n' - ' of space when reversing a large sequence. To remind users ' - 'that it\n' - ' operates by side effect, it does not return the reversed ' - 'sequence.\n' - '\n' - '5. "clear()" and "copy()" are included for consistency with the\n' - ' interfaces of mutable containers that don’t support slicing\n' - ' operations (such as "dict" and "set"). "copy()" is not part ' - 'of the\n' - ' "collections.abc.MutableSequence" ABC, but most concrete ' - 'mutable\n' - ' sequence classes provide it.\n' - '\n' - ' New in version 3.3: "clear()" and "copy()" methods.\n' - '\n' - '6. The value *n* is an integer, or an object implementing\n' - ' "__index__()". Zero and negative values of *n* clear the ' - 'sequence.\n' - ' Items in the sequence are not copied; they are referenced ' - 'multiple\n' - ' times, as explained for "s * n" under Common Sequence ' - 'Operations.\n' - '\n' - '\n' - 'Lists\n' - '=====\n' - '\n' - 'Lists are mutable sequences, typically used to store collections ' - 'of\n' - 'homogeneous items (where the precise degree of similarity will ' - 'vary by\n' - 'application).\n' - '\n' - 'class list([iterable])\n' - '\n' - ' Lists may be constructed in several ways:\n' - '\n' - ' * Using a pair of square brackets to denote the empty list: ' - '"[]"\n' - '\n' - ' * Using square brackets, separating items with commas: "[a]", ' - '"[a,\n' - ' b, c]"\n' - '\n' - ' * Using a list comprehension: "[x for x in iterable]"\n' - '\n' - ' * Using the type constructor: "list()" or "list(iterable)"\n' - '\n' - ' The constructor builds a list whose items are the same and in ' - 'the\n' - ' same order as *iterable*’s items. *iterable* may be either ' - 'a\n' - ' sequence, a container that supports iteration, or an ' - 'iterator\n' - ' object. If *iterable* is already a list, a copy is made and\n' - ' returned, similar to "iterable[:]". For example, ' - '"list(\'abc\')"\n' - ' returns "[\'a\', \'b\', \'c\']" and "list( (1, 2, 3) )" ' - 'returns "[1, 2,\n' - ' 3]". If no argument is given, the constructor creates a new ' - 'empty\n' - ' list, "[]".\n' - '\n' - ' Many other operations also produce lists, including the ' - '"sorted()"\n' - ' built-in.\n' - '\n' - ' Lists implement all of the common and mutable sequence ' - 'operations.\n' - ' Lists also provide the following additional method:\n' - '\n' - ' sort(*, key=None, reverse=False)\n' - '\n' - ' This method sorts the list in place, using only "<" ' - 'comparisons\n' - ' between items. Exceptions are not suppressed - if any ' - 'comparison\n' - ' operations fail, the entire sort operation will fail (and ' - 'the\n' - ' list will likely be left in a partially modified state).\n' - '\n' - ' "sort()" accepts two arguments that can only be passed by\n' - ' keyword (keyword-only arguments):\n' - '\n' - ' *key* specifies a function of one argument that is used ' - 'to\n' - ' extract a comparison key from each list element (for ' - 'example,\n' - ' "key=str.lower"). The key corresponding to each item in ' - 'the list\n' - ' is calculated once and then used for the entire sorting ' - 'process.\n' - ' The default value of "None" means that list items are ' - 'sorted\n' - ' directly without calculating a separate key value.\n' - '\n' - ' The "functools.cmp_to_key()" utility is available to ' - 'convert a\n' - ' 2.x style *cmp* function to a *key* function.\n' - '\n' - ' *reverse* is a boolean value. If set to "True", then the ' - 'list\n' - ' elements are sorted as if each comparison were reversed.\n' - '\n' - ' This method modifies the sequence in place for economy of ' - 'space\n' - ' when sorting a large sequence. To remind users that it ' - 'operates\n' - ' by side effect, it does not return the sorted sequence ' - '(use\n' - ' "sorted()" to explicitly request a new sorted list ' - 'instance).\n' - '\n' - ' The "sort()" method is guaranteed to be stable. A sort ' - 'is\n' - ' stable if it guarantees not to change the relative order ' - 'of\n' - ' elements that compare equal — this is helpful for sorting ' - 'in\n' - ' multiple passes (for example, sort by department, then by ' - 'salary\n' - ' grade).\n' - '\n' - ' For sorting examples and a brief sorting tutorial, see ' - 'Sorting\n' - ' HOW TO.\n' - '\n' - ' **CPython implementation detail:** While a list is being ' - 'sorted,\n' - ' the effect of attempting to mutate, or even inspect, the ' - 'list is\n' - ' undefined. The C implementation of Python makes the list ' - 'appear\n' - ' empty for the duration, and raises "ValueError" if it can ' - 'detect\n' - ' that the list has been mutated during a sort.\n' - '\n' - '\n' - 'Tuples\n' - '======\n' - '\n' - 'Tuples are immutable sequences, typically used to store ' - 'collections of\n' - 'heterogeneous data (such as the 2-tuples produced by the ' - '"enumerate()"\n' - 'built-in). Tuples are also used for cases where an immutable ' - 'sequence\n' - 'of homogeneous data is needed (such as allowing storage in a ' - '"set" or\n' - '"dict" instance).\n' - '\n' - 'class tuple([iterable])\n' - '\n' - ' Tuples may be constructed in a number of ways:\n' - '\n' - ' * Using a pair of parentheses to denote the empty tuple: ' - '"()"\n' - '\n' - ' * Using a trailing comma for a singleton tuple: "a," or ' - '"(a,)"\n' - '\n' - ' * Separating items with commas: "a, b, c" or "(a, b, c)"\n' - '\n' - ' * Using the "tuple()" built-in: "tuple()" or ' - '"tuple(iterable)"\n' - '\n' - ' The constructor builds a tuple whose items are the same and ' - 'in the\n' - ' same order as *iterable*’s items. *iterable* may be either ' - 'a\n' - ' sequence, a container that supports iteration, or an ' - 'iterator\n' - ' object. If *iterable* is already a tuple, it is returned\n' - ' unchanged. For example, "tuple(\'abc\')" returns "(\'a\', ' - '\'b\', \'c\')"\n' - ' and "tuple( [1, 2, 3] )" returns "(1, 2, 3)". If no argument ' - 'is\n' - ' given, the constructor creates a new empty tuple, "()".\n' - '\n' - ' Note that it is actually the comma which makes a tuple, not ' - 'the\n' - ' parentheses. The parentheses are optional, except in the ' - 'empty\n' - ' tuple case, or when they are needed to avoid syntactic ' - 'ambiguity.\n' - ' For example, "f(a, b, c)" is a function call with three ' - 'arguments,\n' - ' while "f((a, b, c))" is a function call with a 3-tuple as the ' - 'sole\n' - ' argument.\n' - '\n' - ' Tuples implement all of the common sequence operations.\n' - '\n' - 'For heterogeneous collections of data where access by name is ' - 'clearer\n' - 'than access by index, "collections.namedtuple()" may be a more\n' - 'appropriate choice than a simple tuple object.\n' - '\n' - '\n' - 'Ranges\n' - '======\n' - '\n' - 'The "range" type represents an immutable sequence of numbers and ' - 'is\n' - 'commonly used for looping a specific number of times in "for" ' - 'loops.\n' - '\n' - 'class range(stop)\n' - 'class range(start, stop[, step])\n' - '\n' - ' The arguments to the range constructor must be integers ' - '(either\n' - ' built-in "int" or any object that implements the "__index__"\n' - ' special method). If the *step* argument is omitted, it ' - 'defaults to\n' - ' "1". If the *start* argument is omitted, it defaults to "0". ' - 'If\n' - ' *step* is zero, "ValueError" is raised.\n' - '\n' - ' For a positive *step*, the contents of a range "r" are ' - 'determined\n' - ' by the formula "r[i] = start + step*i" where "i >= 0" and ' - '"r[i] <\n' - ' stop".\n' - '\n' - ' For a negative *step*, the contents of the range are still\n' - ' determined by the formula "r[i] = start + step*i", but the\n' - ' constraints are "i >= 0" and "r[i] > stop".\n' - '\n' - ' A range object will be empty if "r[0]" does not meet the ' - 'value\n' - ' constraint. Ranges do support negative indices, but these ' - 'are\n' - ' interpreted as indexing from the end of the sequence ' - 'determined by\n' - ' the positive indices.\n' - '\n' - ' Ranges containing absolute values larger than "sys.maxsize" ' - 'are\n' - ' permitted but some features (such as "len()") may raise\n' - ' "OverflowError".\n' - '\n' - ' Range examples:\n' - '\n' - ' >>> list(range(10))\n' - ' [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n' - ' >>> list(range(1, 11))\n' - ' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n' - ' >>> list(range(0, 30, 5))\n' - ' [0, 5, 10, 15, 20, 25]\n' - ' >>> list(range(0, 10, 3))\n' - ' [0, 3, 6, 9]\n' - ' >>> list(range(0, -10, -1))\n' - ' [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\n' - ' >>> list(range(0))\n' - ' []\n' - ' >>> list(range(1, 0))\n' - ' []\n' - '\n' - ' Ranges implement all of the common sequence operations ' - 'except\n' - ' concatenation and repetition (due to the fact that range ' - 'objects\n' - ' can only represent sequences that follow a strict pattern ' - 'and\n' - ' repetition and concatenation will usually violate that ' - 'pattern).\n' - '\n' - ' start\n' - '\n' - ' The value of the *start* parameter (or "0" if the ' - 'parameter was\n' - ' not supplied)\n' - '\n' - ' stop\n' - '\n' - ' The value of the *stop* parameter\n' - '\n' - ' step\n' - '\n' - ' The value of the *step* parameter (or "1" if the parameter ' - 'was\n' - ' not supplied)\n' - '\n' - 'The advantage of the "range" type over a regular "list" or ' - '"tuple" is\n' - 'that a "range" object will always take the same (small) amount ' - 'of\n' - 'memory, no matter the size of the range it represents (as it ' - 'only\n' - 'stores the "start", "stop" and "step" values, calculating ' - 'individual\n' - 'items and subranges as needed).\n' - '\n' - 'Range objects implement the "collections.abc.Sequence" ABC, and\n' - 'provide features such as containment tests, element index ' - 'lookup,\n' - 'slicing and support for negative indices (see Sequence Types — ' - 'list,\n' - 'tuple, range):\n' - '\n' - '>>> r = range(0, 20, 2)\n' - '>>> r\n' - 'range(0, 20, 2)\n' - '>>> 11 in r\n' - 'False\n' - '>>> 10 in r\n' - 'True\n' - '>>> r.index(10)\n' - '5\n' - '>>> r[5]\n' - '10\n' - '>>> r[:5]\n' - 'range(0, 10, 2)\n' - '>>> r[-1]\n' - '18\n' - '\n' - 'Testing range objects for equality with "==" and "!=" compares ' - 'them as\n' - 'sequences. That is, two range objects are considered equal if ' - 'they\n' - 'represent the same sequence of values. (Note that two range ' - 'objects\n' - 'that compare equal might have different "start", "stop" and ' - '"step"\n' - 'attributes, for example "range(0) == range(2, 1, 3)" or ' - '"range(0, 3,\n' - '2) == range(0, 4, 2)".)\n' - '\n' - 'Changed in version 3.2: Implement the Sequence ABC. Support ' - 'slicing\n' - 'and negative indices. Test "int" objects for membership in ' - 'constant\n' - 'time instead of iterating through all items.\n' - '\n' - 'Changed in version 3.3: Define ‘==’ and ‘!=’ to compare range ' - 'objects\n' - 'based on the sequence of values they define (instead of ' - 'comparing\n' - 'based on object identity).\n' - '\n' - 'New in version 3.3: The "start", "stop" and "step" attributes.\n' - '\n' - 'See also:\n' - '\n' - ' * The linspace recipe shows how to implement a lazy version of ' - 'range\n' - ' suitable for floating point applications.\n', - 'typesseq-mutable': 'Mutable Sequence Types\n' - '**********************\n' - '\n' - 'The operations in the following table are defined on ' - 'mutable sequence\n' - 'types. The "collections.abc.MutableSequence" ABC is ' - 'provided to make\n' - 'it easier to correctly implement these operations on ' - 'custom sequence\n' - 'types.\n' - '\n' - 'In the table *s* is an instance of a mutable sequence ' - 'type, *t* is any\n' - 'iterable object and *x* is an arbitrary object that ' - 'meets any type and\n' - 'value restrictions imposed by *s* (for example, ' - '"bytearray" only\n' - 'accepts integers that meet the value restriction "0 <= x ' - '<= 255").\n' - '\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| Operation | ' - 'Result | Notes ' - '|\n' - '|================================|==================================|=======================|\n' - '| "s[i] = x" | item *i* of *s* is ' - 'replaced by | |\n' - '| | ' - '*x* | ' - '|\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j] = t" | slice of *s* from *i* ' - 'to *j* is | |\n' - '| | replaced by the ' - 'contents of the | |\n' - '| | iterable ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j]" | same as "s[i:j] = ' - '[]" | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j:k] = t" | the elements of ' - '"s[i:j:k]" are | (1) |\n' - '| | replaced by those of ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j:k]" | removes the elements ' - 'of | |\n' - '| | "s[i:j:k]" from the ' - 'list | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.append(x)" | appends *x* to the ' - 'end of the | |\n' - '| | sequence (same ' - 'as | |\n' - '| | "s[len(s):len(s)] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.clear()" | removes all items ' - 'from *s* (same | (5) |\n' - '| | as "del ' - 's[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.copy()" | creates a shallow ' - 'copy of *s* | (5) |\n' - '| | (same as ' - '"s[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.extend(t)" or "s += t" | extends *s* with the ' - 'contents of | |\n' - '| | *t* (for the most ' - 'part the same | |\n' - '| | as "s[len(s):len(s)] ' - '= t") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s *= n" | updates *s* with its ' - 'contents | (6) |\n' - '| | repeated *n* ' - 'times | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.insert(i, x)" | inserts *x* into *s* ' - 'at the | |\n' - '| | index given by *i* ' - '(same as | |\n' - '| | "s[i:i] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.pop()" or "s.pop(i)" | retrieves the item at ' - '*i* and | (2) |\n' - '| | also removes it from ' - '*s* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.remove(x)" | remove the first item ' - 'from *s* | (3) |\n' - '| | where "s[i]" is equal ' - 'to *x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.reverse()" | reverses the items of ' - '*s* in | (4) |\n' - '| | ' - 'place | ' - '|\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '\n' - 'Notes:\n' - '\n' - '1. *t* must have the same length as the slice it is ' - 'replacing.\n' - '\n' - '2. The optional argument *i* defaults to "-1", so that ' - 'by default the\n' - ' last item is removed and returned.\n' - '\n' - '3. "remove()" raises "ValueError" when *x* is not found ' - 'in *s*.\n' - '\n' - '4. The "reverse()" method modifies the sequence in place ' - 'for economy\n' - ' of space when reversing a large sequence. To remind ' - 'users that it\n' - ' operates by side effect, it does not return the ' - 'reversed sequence.\n' - '\n' - '5. "clear()" and "copy()" are included for consistency ' - 'with the\n' - ' interfaces of mutable containers that don’t support ' - 'slicing\n' - ' operations (such as "dict" and "set"). "copy()" is ' - 'not part of the\n' - ' "collections.abc.MutableSequence" ABC, but most ' - 'concrete mutable\n' - ' sequence classes provide it.\n' - '\n' - ' New in version 3.3: "clear()" and "copy()" methods.\n' - '\n' - '6. The value *n* is an integer, or an object ' - 'implementing\n' - ' "__index__()". Zero and negative values of *n* clear ' - 'the sequence.\n' - ' Items in the sequence are not copied; they are ' - 'referenced multiple\n' - ' times, as explained for "s * n" under Common Sequence ' - 'Operations.\n', - 'unary': 'Unary arithmetic and bitwise operations\n' - '***************************************\n' - '\n' - 'All unary arithmetic and bitwise operations have the same ' - 'priority:\n' - '\n' - ' u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n' - '\n' - 'The unary "-" (minus) operator yields the negation of its numeric\n' - 'argument; the operation can be overridden with the "__neg__()" ' - 'special\n' - 'method.\n' - '\n' - 'The unary "+" (plus) operator yields its numeric argument ' - 'unchanged;\n' - 'the operation can be overridden with the "__pos__()" special ' - 'method.\n' - '\n' - 'The unary "~" (invert) operator yields the bitwise inversion of ' - 'its\n' - 'integer argument. The bitwise inversion of "x" is defined as\n' - '"-(x+1)". It only applies to integral numbers or to custom ' - 'objects\n' - 'that override the "__invert__()" special method.\n' - '\n' - 'In all three cases, if the argument does not have the proper type, ' - 'a\n' - '"TypeError" exception is raised.\n', - 'while': 'The "while" statement\n' - '*********************\n' - '\n' - 'The "while" statement is used for repeated execution as long as an\n' - 'expression is true:\n' - '\n' - ' while_stmt ::= "while" assignment_expression ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'This repeatedly tests the expression and, if it is true, executes ' - 'the\n' - 'first suite; if the expression is false (which may be the first ' - 'time\n' - 'it is tested) the suite of the "else" clause, if present, is ' - 'executed\n' - 'and the loop terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the ' - 'loop\n' - 'without executing the "else" clause’s suite. A "continue" ' - 'statement\n' - 'executed in the first suite skips the rest of the suite and goes ' - 'back\n' - 'to testing the expression.\n', - 'with': 'The "with" statement\n' - '********************\n' - '\n' - 'The "with" statement is used to wrap the execution of a block with\n' - 'methods defined by a context manager (see section With Statement\n' - 'Context Managers). This allows common "try"…"except"…"finally" ' - 'usage\n' - 'patterns to be encapsulated for convenient reuse.\n' - '\n' - ' with_stmt ::= "with" ( "(" with_stmt_contents ","? ")" | ' - 'with_stmt_contents ) ":" suite\n' - ' with_stmt_contents ::= with_item ("," with_item)*\n' - ' with_item ::= expression ["as" target]\n' - '\n' - 'The execution of the "with" statement with one “item” proceeds as\n' - 'follows:\n' - '\n' - '1. The context expression (the expression given in the "with_item") ' - 'is\n' - ' evaluated to obtain a context manager.\n' - '\n' - '2. The context manager’s "__enter__()" is loaded for later use.\n' - '\n' - '3. The context manager’s "__exit__()" is loaded for later use.\n' - '\n' - '4. The context manager’s "__enter__()" method is invoked.\n' - '\n' - '5. If a target was included in the "with" statement, the return ' - 'value\n' - ' from "__enter__()" is assigned to it.\n' - '\n' - ' Note:\n' - '\n' - ' The "with" statement guarantees that if the "__enter__()" ' - 'method\n' - ' returns without an error, then "__exit__()" will always be\n' - ' called. Thus, if an error occurs during the assignment to the\n' - ' target list, it will be treated the same as an error occurring\n' - ' within the suite would be. See step 6 below.\n' - '\n' - '6. The suite is executed.\n' - '\n' - '7. The context manager’s "__exit__()" method is invoked. If an\n' - ' exception caused the suite to be exited, its type, value, and\n' - ' traceback are passed as arguments to "__exit__()". Otherwise, ' - 'three\n' - ' "None" arguments are supplied.\n' - '\n' - ' If the suite was exited due to an exception, and the return ' - 'value\n' - ' from the "__exit__()" method was false, the exception is ' - 'reraised.\n' - ' If the return value was true, the exception is suppressed, and\n' - ' execution continues with the statement following the "with"\n' - ' statement.\n' - '\n' - ' If the suite was exited for any reason other than an exception, ' - 'the\n' - ' return value from "__exit__()" is ignored, and execution ' - 'proceeds\n' - ' at the normal location for the kind of exit that was taken.\n' - '\n' - 'The following code:\n' - '\n' - ' with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' enter = type(manager).__enter__\n' - ' exit = type(manager).__exit__\n' - ' value = enter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not exit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' exit(manager, None, None, None)\n' - '\n' - 'With more than one item, the context managers are processed as if\n' - 'multiple "with" statements were nested:\n' - '\n' - ' with A() as a, B() as b:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' with A() as a:\n' - ' with B() as b:\n' - ' SUITE\n' - '\n' - 'You can also write multi-item context managers in multiple lines if\n' - 'the items are surrounded by parentheses. For example:\n' - '\n' - ' with (\n' - ' A() as a,\n' - ' B() as b,\n' - ' ):\n' - ' SUITE\n' - '\n' - 'Changed in version 3.1: Support for multiple context expressions.\n' - '\n' - 'Changed in version 3.10: Support for using grouping parentheses to\n' - 'break the statement in multiple lines.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with” statement\n' - ' The specification, background, and examples for the Python ' - '"with"\n' - ' statement.\n', - 'yield': 'The "yield" statement\n' - '*********************\n' - '\n' - ' yield_stmt ::= yield_expression\n' - '\n' - 'A "yield" statement is semantically equivalent to a yield ' - 'expression.\n' - 'The yield statement can be used to omit the parentheses that would\n' - 'otherwise be required in the equivalent yield expression ' - 'statement.\n' - 'For example, the yield statements\n' - '\n' - ' yield \n' - ' yield from \n' - '\n' - 'are equivalent to the yield expression statements\n' - '\n' - ' (yield )\n' - ' (yield from )\n' - '\n' - 'Yield expressions and statements are only used when defining a\n' - '*generator* function, and are only used in the body of the ' - 'generator\n' - 'function. Using yield in a function definition is sufficient to ' - 'cause\n' - 'that definition to create a generator function instead of a normal\n' - 'function.\n' - '\n' - 'For full details of "yield" semantics, refer to the Yield ' - 'expressions\n' - 'section.\n'} diff --git a/www/src/Lib/test/test_syntax.py b/www/src/Lib/test/test_syntax.py index eefd4dbae..4c988382f 100644 --- a/www/src/Lib/test/test_syntax.py +++ b/www/src/Lib/test/test_syntax.py @@ -334,7 +334,12 @@ >>> def f(x, y=1, z): ... pass Traceback (most recent call last): -SyntaxError: non-default argument follows default argument +SyntaxError: parameter without a default follows parameter with a default + +>>> def f(x, /, y=1, z): +... pass +Traceback (most recent call last): +SyntaxError: parameter without a default follows parameter with a default >>> def f(x, None): ... pass @@ -555,6 +560,24 @@ Traceback (most recent call last): SyntaxError: expected default value expression +>>> lambda a,d=3,c: None +Traceback (most recent call last): +SyntaxError: parameter without a default follows parameter with a default + +>>> lambda a,/,d=3,c: None +Traceback (most recent call last): +SyntaxError: parameter without a default follows parameter with a default + +>>> import ast; ast.parse(''' +... def f( +... *, # type: int +... a, # type: int +... ): +... pass +... ''', type_comments=True) +Traceback (most recent call last): +SyntaxError: bare * has associated type comment + From ast_for_funcdef(): @@ -733,6 +756,27 @@ >>> __debug__: int Traceback (most recent call last): SyntaxError: cannot assign to __debug__ +>>> f(a=) +Traceback (most recent call last): +SyntaxError: expected argument value expression +>>> f(a, b, c=) +Traceback (most recent call last): +SyntaxError: expected argument value expression +>>> f(a, b, c=, d) +Traceback (most recent call last): +SyntaxError: expected argument value expression +>>> f(*args=[0]) +Traceback (most recent call last): +SyntaxError: cannot assign to iterable argument unpacking +>>> f(a, b, *args=[0]) +Traceback (most recent call last): +SyntaxError: cannot assign to iterable argument unpacking +>>> f(**kwargs={'a': 1}) +Traceback (most recent call last): +SyntaxError: cannot assign to keyword argument unpacking +>>> f(a, b, *args, **kwargs={'a': 1}) +Traceback (most recent call last): +SyntaxError: cannot assign to keyword argument unpacking More set_context(): @@ -1561,6 +1605,38 @@ Traceback (most recent call last): SyntaxError: trailing comma not allowed without surrounding parentheses +>>> import a from b +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a.y.z from b.y.z +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a from b as bar +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a.y.z from b.y.z as bar +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a, b,c from b +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a.y.z, b.y.z, c.y.z from b.y.z +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a,b,c from b as bar +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a.y.z, b.y.z, c.y.z from b.y.z as bar +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + # Check that we dont raise the "trailing comma" error if there is more # input to the left of the valid part that we parsed. @@ -1793,6 +1869,92 @@ def f(x: *b) Traceback (most recent call last): ... SyntaxError: invalid syntax + +Invalid bytes literals: + + >>> b"Ā" + Traceback (most recent call last): + ... + b"Ā" + ^^^ + SyntaxError: bytes can only contain ASCII literal characters + + >>> b"абвгде" + Traceback (most recent call last): + ... + b"абвгде" + ^^^^^^^^ + SyntaxError: bytes can only contain ASCII literal characters + + >>> b"abc ъющый" # first 3 letters are ascii + Traceback (most recent call last): + ... + b"abc ъющый" + ^^^^^^^^^^^ + SyntaxError: bytes can only contain ASCII literal characters + +Invalid expressions in type scopes: + + >>> type A[T: (x:=3)] = int + Traceback (most recent call last): + ... + SyntaxError: named expression cannot be used within a TypeVar bound + + >>> type A[T: (yield 3)] = int + Traceback (most recent call last): + ... + SyntaxError: yield expression cannot be used within a TypeVar bound + + >>> type A[T: (await 3)] = int + Traceback (most recent call last): + ... + SyntaxError: await expression cannot be used within a TypeVar bound + + >>> type A[T: (yield from [])] = int + Traceback (most recent call last): + ... + SyntaxError: yield expression cannot be used within a TypeVar bound + + >>> type A = (x := 3) + Traceback (most recent call last): + ... + SyntaxError: named expression cannot be used within a type alias + + >>> type A = (yield 3) + Traceback (most recent call last): + ... + SyntaxError: yield expression cannot be used within a type alias + + >>> type A = (await 3) + Traceback (most recent call last): + ... + SyntaxError: await expression cannot be used within a type alias + + >>> type A = (yield from []) + Traceback (most recent call last): + ... + SyntaxError: yield expression cannot be used within a type alias + + >>> class A[T]((x := 3)): ... + Traceback (most recent call last): + ... + SyntaxError: named expression cannot be used within the definition of a generic + + >>> class A[T]((yield 3)): ... + Traceback (most recent call last): + ... + SyntaxError: yield expression cannot be used within the definition of a generic + + >>> class A[T]((await 3)): ... + Traceback (most recent call last): + ... + SyntaxError: await expression cannot be used within the definition of a generic + + >>> class A[T]((yield from [])): ... + Traceback (most recent call last): + ... + SyntaxError: yield expression cannot be used within the definition of a generic + """ import re @@ -1897,9 +2059,6 @@ def error2(): """ self._check_error(source, "parameter and nonlocal", lineno=3) - def test_break_outside_loop(self): - self._check_error("break", "outside loop") - def test_yield_outside_function(self): self._check_error("if 0: yield", "outside function") self._check_error("if 0: yield\nelse: x=1", "outside function") @@ -1928,20 +2087,27 @@ def test_return_outside_function(self): "outside function") def test_break_outside_loop(self): - self._check_error("if 0: break", "outside loop") - self._check_error("if 0: break\nelse: x=1", "outside loop") - self._check_error("if 1: pass\nelse: break", "outside loop") - self._check_error("class C:\n if 0: break", "outside loop") + msg = "outside loop" + self._check_error("break", msg, lineno=1) + self._check_error("if 0: break", msg, lineno=1) + self._check_error("if 0: break\nelse: x=1", msg, lineno=1) + self._check_error("if 1: pass\nelse: break", msg, lineno=2) + self._check_error("class C:\n if 0: break", msg, lineno=2) self._check_error("class C:\n if 1: pass\n else: break", - "outside loop") + msg, lineno=3) + self._check_error("with object() as obj:\n break", + msg, lineno=2) def test_continue_outside_loop(self): - self._check_error("if 0: continue", "not properly in loop") - self._check_error("if 0: continue\nelse: x=1", "not properly in loop") - self._check_error("if 1: pass\nelse: continue", "not properly in loop") - self._check_error("class C:\n if 0: continue", "not properly in loop") + msg = "not properly in loop" + self._check_error("if 0: continue", msg, lineno=1) + self._check_error("if 0: continue\nelse: x=1", msg, lineno=1) + self._check_error("if 1: pass\nelse: continue", msg, lineno=2) + self._check_error("class C:\n if 0: continue", msg, lineno=2) self._check_error("class C:\n if 1: pass\n else: continue", - "not properly in loop") + msg, lineno=3) + self._check_error("with object() as obj:\n continue", + msg, lineno=2) def test_unexpected_indent(self): self._check_error("foo()\n bar()\n", "unexpected indent", @@ -1975,6 +2141,16 @@ def test_generator_in_function_call(self): "Generator expression must be parenthesized", lineno=1, end_lineno=1, offset=11, end_offset=53) + def test_except_then_except_star(self): + self._check_error("try: pass\nexcept ValueError: pass\nexcept* TypeError: pass", + r"cannot have both 'except' and 'except\*' on the same 'try'", + lineno=3, end_lineno=3, offset=1, end_offset=8) + + def test_except_star_then_except(self): + self._check_error("try: pass\nexcept* ValueError: pass\nexcept TypeError: pass", + r"cannot have both 'except' and 'except\*' on the same 'try'", + lineno=3, end_lineno=3, offset=1, end_offset=7) + def test_empty_line_after_linecont(self): # See issue-40847 s = r"""\ @@ -2075,6 +2251,22 @@ def test_error_parenthesis(self): for paren in ")]}": self._check_error(paren + "1 + 2", f"unmatched '\\{paren}'") + # Some more complex examples: + code = """\ +func( + a=["unclosed], # Need a quote in this comment: " + b=2, +) +""" + self._check_error(code, "parenthesis '\\)' does not match opening parenthesis '\\['") + + def test_error_string_literal(self): + + self._check_error("'blech", "unterminated string literal") + self._check_error('"blech', "unterminated string literal") + self._check_error("'''blech", "unterminated triple-quoted string literal") + self._check_error('"""blech', "unterminated triple-quoted string literal") + def test_invisible_characters(self): self._check_error('print\x17("Hello")', "invalid non-printable character") @@ -2143,7 +2335,7 @@ def test_error_on_parser_stack_overflow(self): source = "-" * 100000 + "4" for mode in ["exec", "eval", "single"]: with self.subTest(mode=mode): - with self.assertRaises(MemoryError): + with self.assertRaisesRegex(MemoryError, r"too complex"): compile(source, "", mode) @support.cpython_only diff --git a/www/src/action_helpers_generated_version.js b/www/src/action_helpers_generated_version.js index 87a79e834..2cba09a35 100644 --- a/www/src/action_helpers_generated_version.js +++ b/www/src/action_helpers_generated_version.js @@ -5,8 +5,63 @@ var _b_ = $B.builtins, NULL = undefined, DOT = '.', - ELLIPSIS = '...', - DEL_TARGETS = 'del_targets' + ELLIPSIS = '...' + +// TARGETS_TYPE +const STAR_TARGETS = 1, + DEL_TARGETS = 2, + FOR_TARGETS = 3 + +function make_string_for_ast_value(value){ + value = value.replace(/\n/g,'\\n\\\n') + value = value.replace(/\r/g,'\\r\\\r') + if(value[0] == "'"){ + var unquoted = value.substr(1, value.length - 2) + return unquoted + } + // prepare value so that "'" + value + "'" is the correct string + if(value.indexOf("'") > -1){ + var s = '', + escaped = false + for(var char of value){ + if(char == '\\'){ + if(escaped){ + s += '\\\\' + } + escaped = !escaped + }else{ + if(char == "'" && ! escaped){ + // escape unescaped single quotes + s += '\\' + }else if(escaped){ + s += '\\' + } + s += char + escaped = false + } + } + value = s + } + return value.substr(1, value.length - 2) +} + +function encode_bytestring(s){ + s = s.replace(/\\t/g, '\t') + .replace(/\\n/g, '\n') + .replace(/\\r/g, '\r') + .replace(/\\f/g, '\f') + .replace(/\\v/g, '\v') + .replace(/\\\\/g, '\\') + var t = [] + for(var i = 0, len = s.length; i < len; i++){ + var cp = s.codePointAt(i) + if(cp > 255){ + throw Error() + } + t.push(cp) + } + return t +} function EXTRA_EXPR(head, tail){ return { @@ -172,15 +227,16 @@ $B._PyPegen.constant_from_string = function(p, token){ var prepared = $B.prepare_string(token) var is_bytes = prepared.value.startsWith('b') if(! is_bytes){ - var value = $B.make_string_for_ast_value(prepared.value) + var value = make_string_for_ast_value(prepared.value) }else{ value = prepared.value.substr(2, prepared.value.length - 3) try{ - value = _b_.bytes.$factory($B.encode_bytestring(value)) + value = _b_.bytes.$factory(encode_bytestring(value)) }catch(err){ $B._PyPegen.raise_error_known_location(p, _b_.SyntaxError, - token.start[0], token.start[1], token.end[0], token.end[1], + token.lineno, token.col_offset, + token.end_lineno, token.end_col_offset, 'bytes can only contain ASCII literal characters') } } @@ -284,8 +340,10 @@ $B._PyPegen.seq_count_dots = function(seq){ } var number_of_dots = 0; for(var token of seq){ - if(token.type == 'OP'){ + if(token.num_type == $B.py_tokens.DOT){ number_of_dots += token.string.length + }else if(token.num_type == $B.py_tokens.ELLIPSIS){ + number_of_dots += 3 } } @@ -447,7 +505,7 @@ $B._PyPegen.get_expr_name = function(e){ if (value === true) { return "True"; } - if (value.type == 'ellipsis') { + if (value === _b_.Ellipsis) { return "ellipsis"; } return "literal"; @@ -458,9 +516,11 @@ $B._PyPegen.get_expr_name = function(e){ case 'NamedExpr': return "named expression"; default: + /* PyErr_Format(PyExc_SystemError, "unexpected expression in assignment %d (line %d)", e.kind, e.lineno); + */ return NULL; } } @@ -501,10 +561,10 @@ $B._PyPegen.dummy_name = function(p){ return cache; } - var id = "", + var id = "dummy" + Math.random().toString(36).substr(2), ast_obj = new $B.ast.Name(id, new $B.ast.Load()) set_position_from_list(ast_obj, [1, 0, 1, 0]) - return cache; + return ast_obj } $B._PyPegen.add_type_comment_to_arg = function(p, a, tc){ @@ -542,7 +602,8 @@ $B._PyPegen.function_def_decorators = function(p, decorators, function_def){ var ast_obj = new constr( function_def.name, function_def.args, function_def.body, decorators, function_def.returns, - function_def.type_comment) + function_def.type_comment, + function_def.type_params) for(var position of positions){ ast_obj[position] = function_def[position] } @@ -553,7 +614,8 @@ $B._PyPegen.function_def_decorators = function(p, decorators, function_def){ $B._PyPegen.class_def_decorators = function(p, decorators, class_def){ var ast_obj = $B._PyAST.ClassDef( class_def.name, class_def.bases, - class_def.keywords, class_def.body, decorators) + class_def.keywords, class_def.body, decorators, + class_def.type_params) set_position_from_obj(ast_obj, class_def) return ast_obj } @@ -627,7 +689,7 @@ $B._PyPegen.raise_error = function(p, errtype, errmsg){ var t = p.known_err_token != NULL ? p.known_err_token : p.tokens[p.fill - 1]; var va = errmsg $B._PyPegen.raise_error_known_location(p, errtype, - t.start[0], t.start[1], t.end[0], t.end[1], errmsg, va); + t.lineno, t.col_offset, t.end_lineno, t.end_col_offset, errmsg, va); } $B._PyPegen.raise_error_known_location = function(p, errtype, @@ -636,10 +698,10 @@ $B._PyPegen.raise_error_known_location = function(p, errtype, exc.filename = p.filename if(p.known_err_token){ var token = p.known_err_token - exc.lineno = token.start[0] - exc.offset = token.start[1] + 1 - exc.end_lineno = token.end[0] - exc.end_offset = token.end[1] + exc.lineno = token.lineno + exc.offset = token.col_offset + 1 + exc.end_lineno = token.end_lineno + exc.end_offset = token.end_col_offset exc.text = token.line }else{ exc.lineno = lineno @@ -964,6 +1026,24 @@ $B._PyPegen.get_last_comprehension_item = function(comprehension) { return $B.last(comprehension.ifs); } +$B._PyPegen.arguments_parsing_error = function(p, e){ + var kwarg_unpacking = 0; + for (let keyword of e.keywords){ + if (! keyword.arg) { + kwarg_unpacking = 1; + } + } + + var msg = NULL; + if (kwarg_unpacking) { + msg = "positional argument follows keyword argument unpacking"; + } else { + msg = "positional argument follows keyword argument"; + } + + return $B.helper_functions.RAISE_SYNTAX_ERROR(p, msg); +} + $B._PyPegen.nonparen_genexp_in_call = function(p, args, comprehensions){ /* The rule that calls this function is 'args for_if_clauses'. For the input f(L, x for x in y), L and x are in args and @@ -979,7 +1059,7 @@ $B._PyPegen.nonparen_genexp_in_call = function(p, args, comprehensions){ var last_comprehension = $B.last(comprehensions); - return $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE( + return $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, args.args[len - 1], $B._PyPegen.get_last_comprehension_item(last_comprehension), "Generator expression must be parenthesized" @@ -987,7 +1067,6 @@ $B._PyPegen.nonparen_genexp_in_call = function(p, args, comprehensions){ } $B._PyPegen.get_invalid_target = function(e, targets_type){ - if (e == NULL) { return NULL; } @@ -1010,21 +1089,20 @@ $B._PyPegen.get_invalid_target = function(e, targets_type){ switch (e.constructor) { case $B.ast.List: case $B.ast.Tuple: - VISIT_CONTAINER(e, e.constructor); - return NULL; + return VISIT_CONTAINER(e, e.constructor); case $B.ast.Starred: if (targets_type == DEL_TARGETS) { return e; } - return _PyPegen_get_invalid_target(e.value, targets_type); + return $B._PyPegen.get_invalid_target(e.value, targets_type); case $B.ast.Compare: // This is needed, because the `a in b` in `for a in b` gets parsed // as a comparison, and so we need to search the left side of the comparison // for invalid targets. if (targets_type == FOR_TARGETS) { var cmpop = e.ops[0] - if (cmpop == $B.ast.In) { - return _PyPegen_get_invalid_target(e.left, targets_type); + if (cmpop instanceof $B.ast.In) { + return $B._PyPegen.get_invalid_target(e.left, targets_type); } return NULL; } diff --git a/www/src/ast_to_js.js b/www/src/ast_to_js.js index fa9186a87..d5441088d 100644 --- a/www/src/ast_to_js.js +++ b/www/src/ast_to_js.js @@ -3,6 +3,89 @@ var _b_ = $B.builtins +function ast_dump(tree, indent){ + var attr, + value + indent = indent || 0 + if(tree === _b_.None){ + // happens in dictionary keys for **kw + return 'None' + }else if(typeof tree == 'string'){ + return `'${tree}'` + }else if(typeof tree == 'number'){ + return tree + '' + }else if(tree.imaginary){ + return tree.value + 'j' + }else if(Array.isArray(tree)){ + if(tree.length == 0){ + return '[]' + } + res = '[\n' + var items = [] + for(var x of tree){ + try{ + items.push(ast_dump(x, indent + 1)) + }catch(err){ + console.log('error', tree) + console.log('for item', x) + throw err + } + } + res += items.join(',\n') + return res + ']' + }else if(tree.$name){ + return tree.$name + '()' + }else if(tree instanceof ast.MatchSingleton){ + return `MatchSingleton(value=${$B.AST.$convert(tree.value)})` + }else if(tree instanceof ast.Constant){ + value = tree.value + // For imaginary numbers, value is an object with + // attribute "imaginary" set + if(value.imaginary){ + return `Constant(value=${_b_.repr(value.value)}j)` + } + return `Constant(value=${$B.AST.$convert(value)})` + } + var proto = Object.getPrototypeOf(tree).constructor + var res = ' ' .repeat(indent) + proto.$name + '(' + if($B.ast_classes[proto.$name] === undefined){ + console.log('no ast class', proto) + } + var attr_names = $B.ast_classes[proto.$name].split(','), + attrs = [] + // remove trailing * in attribute names + attr_names = attr_names.map(x => (x.endsWith('*') || x.endsWith('?')) ? + x.substr(0, x.length - 1) : x) + if([ast.Name].indexOf(proto) > -1){ + for(attr of attr_names){ + if(tree[attr] !== undefined){ + attrs.push(`${attr}=${ast_dump(tree[attr])}`) + } + } + return res + attrs.join(', ') + ')' + } + for(attr of attr_names){ + if(tree[attr] !== undefined){ + value = tree[attr] + attrs.push(attr + '=' + + ast_dump(tree[attr], indent + 1).trimStart()) + } + } + if(attrs.length > 0){ + res += '\n' + res += attrs.map(x => ' '.repeat(indent + 1) + x).join(',\n') + } + res += ')' + return res +} + + +function string_from_ast_value(value){ + // remove escaped "'" in string value + return value.replace(new RegExp("\\\\'", 'g'), "'") +} + + function compiler_error(ast_obj, message, end){ var exc = _b_.SyntaxError.$factory(message) exc.filename = state.filename @@ -15,15 +98,15 @@ function compiler_error(ast_obj, message, end){ exc.text = _b_.None } exc.lineno = ast_obj.lineno - exc.offset = ast_obj.col_offset + exc.offset = ast_obj.col_offset + 1 end = end || ast_obj exc.end_lineno = end.end_lineno - exc.end_offset = end.end_col_offset + exc.end_offset = end.end_col_offset + 1 exc.args[1] = [exc.filename, exc.lineno, exc.offset, exc.text, exc.end_lineno, exc.end_offset] exc.$frame_obj = $B.frame_obj if($B.frame_obj === null){ - console.log('frame obj is null') + // console.log('frame obj is null') } throw exc } @@ -848,6 +931,25 @@ function compiler_check(obj){ } } +function check_assign_or_delete(obj, target, action){ + action = action ?? 'assign to' + if(target instanceof $B.ast.Attribute){ + if(target.attr == '__debug__'){ + compiler_error(obj, `cannot ${action} __debug__`, target) + } + }else if(target instanceof $B.ast.Name){ + if(target.id == '__debug__'){ + compiler_error(obj, `cannot ${action} __debug__`, target) + } + }else if(target instanceof $B.ast.Tuple){ + for(var elt of target.elts){ + check_assign_or_delete(elt, elt, action) + } + }else if(target instanceof $B.ast.Starred){ + check_assign_or_delete(obj, target.value, action) + } +} + $B.ast.Assert.prototype.to_js = function(scopes){ var test = $B.js_from_ast(this.test, scopes), msg = this.msg ? $B.js_from_ast(this.msg, scopes) : '' @@ -860,6 +962,7 @@ function annotation_to_str(obj, scopes){ } $B.ast.AnnAssign.prototype.to_js = function(scopes){ + compiler_check(this) var postpone_annotation = scopes.symtable.table.future.features & $B.CO_FUTURE_ANNOTATIONS var scope = last_scope(scopes) @@ -905,6 +1008,10 @@ $B.ast.AnnAssign.prototype.to_js = function(scopes){ return `$B.set_lineno(frame, ${this.lineno})\n` + js } +$B.ast.AnnAssign.prototype._check = function(){ + check_assign_or_delete(this, this.target) +} + $B.ast.Assign.prototype.to_js = function(scopes){ compiler_check(this) var js = this.lineno ? `$B.set_lineno(frame, ${this.lineno})\n` : '', @@ -987,6 +1094,13 @@ $B.ast.Assign.prototype.to_js = function(scopes){ return js } + +$B.ast.Assign.prototype._check = function(){ + for(var target of this.targets){ + check_assign_or_delete(this, target) + } +} + $B.ast.AsyncFor.prototype.to_js = function(scopes){ if(! (last_scope(scopes).ast instanceof $B.ast.AsyncFunctionDef)){ compiler_error(this, "'async for' outside async function") @@ -1094,9 +1208,6 @@ $B.ast.AsyncWith.prototype.to_js = function(scopes){ $B.ast.Attribute.prototype.to_js = function(scopes){ var attr = mangle(scopes, last_scope(scopes), this.attr) - if(this.value instanceof $B.ast.Name && this.value.id == 'axw'){ - return `${$B.js_from_ast(this.value, scopes)}.${attr}` - } var position = encode_position(this.value.col_offset, this.value.col_offset, this.end_col_offset) @@ -1105,6 +1216,7 @@ $B.ast.Attribute.prototype.to_js = function(scopes){ } $B.ast.AugAssign.prototype.to_js = function(scopes){ + compiler_check(this) var js, op_class = this.op.$name ? this.op : this.op.constructor for(var op in $B.op2ast_class){ @@ -1146,6 +1258,10 @@ $B.ast.AugAssign.prototype.to_js = function(scopes){ return `$B.set_lineno(frame, ${this.lineno})\n` + js } +$B.ast.AugAssign.prototype._check = function(){ + check_assign_or_delete(this, this.target) +} + $B.ast.Await.prototype.to_js = function(scopes){ var ix = scopes.length - 1 while(scopes[ix].parent){ @@ -1243,6 +1359,7 @@ $B.ast.Break.prototype.to_js = function(scopes){ } $B.ast.Call.prototype.to_js = function(scopes){ + compiler_check(this) var func = $B.js_from_ast(this.func, scopes), js = `$B.$call(${func}` @@ -1257,6 +1374,14 @@ $B.ast.Call.prototype.to_js = function(scopes){ `(${args.js})`) } +$B.ast.Call.prototype._check = function(){ + for(var kw of this.keywords){ + if(kw.arg == '__debug__'){ + compiler_error(this, "cannot assign to __debug__", kw) + } + } +} + function make_args(scopes){ var js = '', named_args = [], @@ -1453,7 +1578,7 @@ $B.ast.ClassDef.prototype.to_js = function(scopes){ js += '\n$B.trace_return_and_leave(frame, _b_.None)\n' + `return $B.$class_constructor('${this.name}', locals, metaclass, ` + `resolved_bases, bases, [${keywords.join(', ')}])\n` + - `})('${this.name}', '${glob}', $B.fast_tuple([${bases}]))\n` + `})('${this.name}',${globals_name}.__name__ ?? '${glob}', $B.fast_tuple([${bases}]))\n` var class_ref = reference(scopes, enclosing_scope, this.name) @@ -1596,6 +1721,13 @@ $B.ast.Delete.prototype.to_js = function(scopes){ } return `$B.set_lineno(frame, ${this.lineno})\n` + js } + +$B.ast.Delete.prototype._check = function(){ + for(var target of this.targets){ + check_assign_or_delete(this, target, 'delete') + } +} + $B.ast.Dict.prototype.to_js = function(scopes){ var items = [], keys = this.keys, @@ -1618,7 +1750,7 @@ $B.ast.Dict.prototype.to_js = function(scopes){ if(this.keys[i] instanceof $B.ast.Constant){ var v = this.keys[i].value if(typeof v == 'string'){ - item += ', ' + $B.$hash($B.string_from_ast_value(v)) + item += ', ' + $B.$hash(string_from_ast_value(v)) }else{ try{ var hash = $B.$hash(this.keys[i].value) @@ -1661,6 +1793,7 @@ $B.ast.Expression.prototype.to_js = function(scopes){ $B.ast.For.prototype.to_js = function(scopes){ // Create a new scope with the same name to avoid binding in the enclosing // scope. + compiler_check(this) var id = $B.UUID(), iter = $B.js_from_ast(this.iter, scopes), js = `frame.$lineno = ${this.lineno}\n` @@ -2281,6 +2414,7 @@ $B.make_args_parser_and_parse = function make_args_parser_and_parse(fct, args) { $B.ast.FunctionDef.prototype.to_js = function(scopes){ + compiler_check(this) var symtable_block = scopes.symtable.table.blocks.get(fast_id(this)) var in_class = last_scope(scopes).ast instanceof $B.ast.ClassDef, is_async = this instanceof $B.ast.AsyncFunctionDef @@ -2656,6 +2790,26 @@ $B.ast.FunctionDef.prototype.to_js = function(scopes){ return js } +$B.ast.FunctionDef.prototype._check = function(){ + for(var arg of this.args.args){ + if(arg instanceof $B.ast.arg){ + if(arg.arg == '__debug__'){ + compiler_error(arg, 'cannot assign to __debug__') + } + } + } + for(var arg of this.args.kwonlyargs){ + if(arg instanceof $B.ast.arg){ + if(arg.arg == '__debug__'){ + compiler_error(arg, 'cannot assign to __debug__') + } + } + } + if(this.args.kwarg && this.args.kwarg.arg == '__debug__'){ + compiler_error(this.args.kwarg, 'cannot assign to __debug__') + } +} + $B.ast.GeneratorExp.prototype.to_js = function(scopes){ var id = $B.UUID(), symtable_block = scopes.symtable.table.blocks.get(fast_id(this)), @@ -2826,6 +2980,53 @@ $B.ast.ImportFrom.prototype.to_js = function(scopes){ return js } +$B.ast.Interactive.prototype.to_js = function(scopes){ + mark_parents(this) + // create top scope + var name = init_scopes.bind(this)('module', scopes) + + var module_id = name, + global_name = make_scope_name(scopes), + mod_name = module_name(scopes) + + var js = `// Javascript code generated from ast\n` + + `var $B = __BRYTHON__,\n_b_ = $B.builtins,\n` + + js += `${global_name} = {}, // $B.imported["${mod_name}"],\n` + + `locals = ${global_name},\n` + + `frame = ["${module_id}", locals, "${module_id}", locals]` + + js += `\nvar __file__ = frame.__file__ = '${scopes.filename || ""}'\n` + + `locals.__name__ = '${name}'\n` + + `locals.__doc__ = ${extract_docstring(this, scopes)}\n` + + if(! scopes.imported){ + js += `locals.__annotations__ = locals.__annotations__ || $B.empty_dict()\n` + } + + + // for exec(), frame is put on top of the stack inside + // py_builtin_functions.js / $$eval() + + js += `frame.$f_trace = $B.enter_frame(frame)\n` + js += `$B.set_lineno(frame, 1)\n` + + '\nvar _frame_obj = $B.frame_obj\n' + js += 'var stack_length = $B.count_frames()\n' + + js += `try{\n` + + add_body(this.body, scopes) + '\n' + + `$B.leave_frame({locals, value: _b_.None})\n` + + `}catch(err){\n` + + `$B.set_exc_and_trace(frame, err)\n` + + `$B.leave_frame({locals, value: _b_.None})\n` + + 'throw err\n' + + `}` + scopes.pop() + + console.log('Interactive', js) + return js +} + $B.ast.JoinedStr.prototype.to_js = function(scopes){ var items = this.values.map(s => $B.js_from_ast(s, scopes)) if(items.length == 0){ @@ -3172,9 +3373,8 @@ $B.ast.Module.prototype.to_js = function(scopes){ // for exec(), frame is put on top of the stack inside // py_builtin_functions.js / $$eval() - - js += `frame.$f_trace = $B.enter_frame(frame)\n` if(! namespaces){ + js += `frame.$f_trace = $B.enter_frame(frame)\n` js += `$B.set_lineno(frame, 1)\n` + '\nvar _frame_obj = $B.frame_obj\n' } @@ -3214,6 +3414,7 @@ $B.ast.Name.prototype.to_js = function(scopes){ } $B.ast.NamedExpr.prototype.to_js = function(scopes){ + compiler_check(this) // Named expressions in a comprehension are bound in the enclosing scope var i = scopes.length - 1 while(scopes[i].type == 'comprehension'){ @@ -3226,6 +3427,10 @@ $B.ast.NamedExpr.prototype.to_js = function(scopes){ $B.js_from_ast(this.value, scopes) + ')' } +$B.ast.NamedExpr.prototype._check = function(){ + check_assign_or_delete(this, this.target) +} + $B.ast.Nonlocal.prototype.to_js = function(scopes){ var scope = $B.last(scopes) for(var name of this.names){ @@ -3253,6 +3458,10 @@ $B.ast.Raise.prototype.to_js = function(scopes){ $B.ast.Return.prototype.to_js = function(scopes){ // check that return is inside a function + if(last_scope(scopes).type != 'def'){ + compiler_error(this, "'return' outside function") + } + compiler_check(this) var js = `$B.set_lineno(frame, ${this.lineno})\n` + 'var result = ' + @@ -3299,7 +3508,7 @@ $B.ast.Starred.prototype.to_js = function(scopes){ compiler_error(this, "starred assignment target must be in a list or tuple") }else{ - compiler_error(this, "invalid syntax") + compiler_error(this, "can't use starred expression here") } } diff --git a/www/src/brython.js b/www/src/brython.js index 0efbdeb5e..3ed83f7ad 100644 --- a/www/src/brython.js +++ b/www/src/brython.js @@ -1,6 +1,6 @@ // brython.js brython.info // version [3, 12, 0, 'final', 0] -// implementation [3, 12, 1, 'dev', 0] +// implementation [3, 12, 3, 'dev', 0] // version compiled from commented, indented source files at // github.com/brython-dev/brython var __BRYTHON__=globalThis.__BRYTHON__ ||{} @@ -39,6 +39,8 @@ if(['http','https'].includes(mo[1])){$B.domain=mo[1]+'://'+mo[2]}} var path=_window.location.origin+_window.location.pathname,path_elts=path.split("/") path_elts.pop() $B.script_dir=path_elts.join("/") +mo=parts_re.exec($B.script_dir) +if(mo){if(['http','https'].includes(mo[1])){$B.script_domain=mo[1]+'://'+mo[2]}} $B.strip_host=function(url){var parts_re=new RegExp('(.*?)://(.*?)/(.*)'),mo=parts_re.exec(url) if(mo){return mo[3]} console.log(Error().stack) @@ -72,6 +74,7 @@ $B.tz_name=long.substr(ix).trim() $B.PyCF_ONLY_AST=1024 $B.PyCF_TYPE_COMMENTS=0x1000 $B.CO_FUTURE_ANNOTATIONS=0x1000000 +$B.PyCF_ALLOW_INCOMPLETE_INPUT=0x4000 if($B.isWebWorker){$B.charset="utf-8"}else{ $B.charset=document.characterSet ||document.inputEncoding ||"utf-8"} $B.max_int=Math.pow(2,53)-1 @@ -82,7 +85,16 @@ $B.int_max_str_digits=4300 $B.str_digits_check_threshold=640 $B.max_array_size=2**32-1 $B.recursion_limit=200 -$B.pep657=true +$B.op2method={operations:{"**":"pow","//":"floordiv","<<":"lshift",">>":"rshift","+":"add","-":"sub","*":"mul","/":"truediv","%":"mod","@":"matmul" },augmented_assigns:{"//=":"ifloordiv",">>=":"irshift","<<=":"ilshift","**=":"ipow","+=":"iadd","-=":"isub","*=":"imul","/=":"itruediv","%=":"imod","&=":"iand","|=":"ior","^=":"ixor","@=":"imatmul"},binary:{"&":"and","|":"or","~":"invert","^":"xor"},comparisons:{"<":"lt",">":"gt","<=":"le",">=":"ge","==":"eq","!=":"ne"},boolean:{"or":"or","and":"and","in":"in","not":"not","is":"is"},subset:function(){var res={},keys=[] +if(arguments[0]=="all"){keys=Object.keys($B.op2method) +keys.splice(keys.indexOf("subset"),1)}else{for(var arg of arguments){keys.push(arg)}} +for(var key of keys){var ops=$B.op2method[key] +if(ops===undefined){throw Error(key)} +for(var attr in ops){res[attr]=ops[attr]}} +return res}} +$B.method_to_op={} +for(var category in $B.op2method){for(var op in $B.op2method[category]){var method=`__${$B.op2method[category][op]}__` +$B.method_to_op[method]=op}} $B.special_string_repr={8:"\\x08",9:"\\t",10:"\\n",11:"\\x0b",12:"\\x0c",13:"\\r",92:"\\\\",160:"\\xa0"} $B.$py_next_hash=Math.pow(2,53)-1 $B.$py_UUID=0 @@ -141,29 +153,44 @@ return $B.imported[script_id]}})(__BRYTHON__) ; __BRYTHON__.ast_classes={Add:'',And:'',AnnAssign:'target,annotation,value?,simple',Assert:'test,msg?',Assign:'targets*,value,type_comment?',AsyncFor:'target,iter,body*,orelse*,type_comment?',AsyncFunctionDef:'name,args,body*,decorator_list*,returns?,type_comment?,type_params*',AsyncWith:'items*,body*,type_comment?',Attribute:'value,attr,ctx',AugAssign:'target,op,value',Await:'value',BinOp:'left,op,right',BitAnd:'',BitOr:'',BitXor:'',BoolOp:'op,values*',Break:'',Call:'func,args*,keywords*',ClassDef:'name,bases*,keywords*,body*,decorator_list*,type_params*',Compare:'left,ops*,comparators*',Constant:'value,kind?',Continue:'',Del:'',Delete:'targets*',Dict:'keys*,values*',DictComp:'key,value,generators*',Div:'',Eq:'',ExceptHandler:'type?,name?,body*',Expr:'value',Expression:'body',FloorDiv:'',For:'target,iter,body*,orelse*,type_comment?',FormattedValue:'value,conversion,format_spec?',FunctionDef:'name,args,body*,decorator_list*,returns?,type_comment?,type_params*',FunctionType:'argtypes*,returns',GeneratorExp:'elt,generators*',Global:'names*',Gt:'',GtE:'',If:'test,body*,orelse*',IfExp:'test,body,orelse',Import:'names*',ImportFrom:'module?,names*,level?',In:'',Interactive:'body*',Invert:'',Is:'',IsNot:'',JoinedStr:'values*',LShift:'',Lambda:'args,body',List:'elts*,ctx',ListComp:'elt,generators*',Load:'',Lt:'',LtE:'',MatMult:'',Match:'subject,cases*',MatchAs:'pattern?,name?',MatchClass:'cls,patterns*,kwd_attrs*,kwd_patterns*',MatchMapping:'keys*,patterns*,rest?',MatchOr:'patterns*',MatchSequence:'patterns*',MatchSingleton:'value',MatchStar:'name?',MatchValue:'value',Mod:'',Module:'body*,type_ignores*',Mult:'',Name:'id,ctx',NamedExpr:'target,value',Nonlocal:'names*',Not:'',NotEq:'',NotIn:'',Or:'',ParamSpec:'name',Pass:'',Pow:'',RShift:'',Raise:'exc?,cause?',Return:'value?',Set:'elts*',SetComp:'elt,generators*',Slice:'lower?,upper?,step?',Starred:'value,ctx',Store:'',Sub:'',Subscript:'value,slice,ctx',Try:'body*,handlers*,orelse*,finalbody*',TryStar:'body*,handlers*,orelse*,finalbody*',Tuple:'elts*,ctx',TypeAlias:'name,type_params*,value',TypeIgnore:'lineno,tag',TypeVar:'name,bound?',TypeVarTuple:'name',UAdd:'',USub:'',UnaryOp:'op,operand',While:'test,body*,orelse*',With:'items*,body*,type_comment?',Yield:'value?',YieldFrom:'value',alias:'name,asname?',arg:'arg,annotation?,type_comment?',arguments:'posonlyargs*,args*,vararg?,kwonlyargs*,kw_defaults*,kwarg?,defaults*',boolop:['And','Or'],cmpop:['Eq','NotEq','Lt','LtE','Gt','GtE','Is','IsNot','In','NotIn'],comprehension:'target,iter,ifs*,is_async',excepthandler:['ExceptHandler'],expr:['BoolOp','NamedExpr','BinOp','UnaryOp','Lambda','IfExp','Dict','Set','ListComp','SetComp','DictComp','GeneratorExp','Await','Yield','YieldFrom','Compare','Call','FormattedValue','JoinedStr','Constant','Attribute','Subscript','Starred','Name','List','Tuple','Slice'],expr_context:['Load','Store','Del'],keyword:'arg?,value',match_case:'pattern,guard?,body*',mod:['Module','Interactive','Expression','FunctionType'],operator:['Add','Sub','Mult','MatMult','Div','Mod','Pow','LShift','RShift','BitOr','BitXor','BitAnd','FloorDiv'],pattern:['MatchValue','MatchSingleton','MatchSequence','MatchMapping','MatchClass','MatchStar','MatchAs','MatchOr'],stmt:['FunctionDef','AsyncFunctionDef','ClassDef','Return','Delete','Assign','TypeAlias','AugAssign','AnnAssign','For','AsyncFor','While','If','With','AsyncWith','Match','Raise','Try','TryStar','Assert','Import','ImportFrom','Global','Nonlocal','Expr','Pass','Break','Continue'],type_ignore:['TypeIgnore'],type_param:['TypeVar','ParamSpec','TypeVarTuple'],unaryop:['Invert','Not','UAdd','USub'],withitem:'context_expr,optional_vars?'} ; - -var $B=__BRYTHON__ -$B.unicode={"No_digits":[178,179,185,[4969,9],6618,8304,[8308,6],[8320,10],[9312,9],[9332,9],[9352,9],9450,[9461,9],9471,[10102,9],[10112,9],[10122,9],[68160,4],[69216,9],[69714,9],[127232,11]],"Lo_numeric":[13317,13443,14378,15181,19968,19971,19975,19977,20061,20108,20116,20118,20159,20160,20191,20200,20237,20336,20740,20806,[20841,3,2],21313,[21315,3],21324,[21441,4],22235,22769,22777,24186,24318,24319,[24332,3],24336,25342,25420,26578,28422,29590,30334,32902,33836,36014,36019,36144,38433,38470,38476,38520,38646,63851,63859,63864,63922,63953,63955,63997,131073,131172,131298,131361,133418,133507,133516,133532,133866,133885,133913,140176,141720,146203,156269,194704]} -$B.digits_starts=[48,1632,1776,1984,2406,2534,2662,2790,2918,3046,3174,3302,3430,3558,3664,3792,3872,4160,4240,6112,6160,6470,6608,6784,6800,6992,7088,7232,7248,42528,43216,43264,43472,43504,43600,44016,65296,66720,68912,69734,69872,69942,70096,70384,70736,70864,71248,71360,71472,71904,72016,72784,73040,73120,73552,92768,92864,93008,120782,120792,120802,120812,120822,123200,123632,124144,125264,130032] -$B.unicode_casefold={223:[115,115],304:[105,775],329:[700,110],496:[106,780],912:[953,776,769],944:[965,776,769],1415:[1381,1410],7830:[104,817],7831:[116,776],7832:[119,778],7833:[121,778],7834:[97,702],7838:[223],8016:[965,787],8018:[965,787,768],8020:[965,787,769],8022:[965,787,834],8064:[7936,953],8065:[7937,953],8066:[7938,953],8067:[7939,953],8068:[7940,953],8069:[7941,953],8070:[7942,953],8071:[7943,953],8072:[8064],8073:[8065],8074:[8066],8075:[8067],8076:[8068],8077:[8069],8078:[8070],8079:[8071],8080:[7968,953],8081:[7969,953],8082:[7970,953],8083:[7971,953],8084:[7972,953],8085:[7973,953],8086:[7974,953],8087:[7975,953],8088:[8080],8089:[8081],8090:[8082],8091:[8083],8092:[8084],8093:[8085],8094:[8086],8095:[8087],8096:[8032,953],8097:[8033,953],8098:[8034,953],8099:[8035,953],8100:[8036,953],8101:[8037,953],8102:[8038,953],8103:[8039,953],8104:[8096],8105:[8097],8106:[8098],8107:[8099],8108:[8100],8109:[8101],8110:[8102],8111:[8103],8114:[8048,953],8115:[945,953],8116:[940,953],8118:[945,834],8119:[945,834,953],8124:[8115],8130:[8052,953],8131:[951,953],8132:[942,953],8134:[951,834],8135:[951,834,953],8140:[8131],8146:[953,776,768],8147:[912],8150:[953,834],8151:[953,776,834],8162:[965,776,768],8163:[944],8164:[961,787],8166:[965,834],8167:[965,776,834],8178:[8060,953],8179:[969,953],8180:[974,953],8182:[969,834],8183:[969,834,953],8188:[8179],64256:[102,102],64257:[102,105],64258:[102,108],64259:[102,102,105],64260:[102,102,108],64261:[64262],64262:[115,116],64275:[1396,1398],64276:[1396,1381],64277:[1396,1387],64278:[1406,1398],64279:[1396,1389]} -$B.unicode_bidi_whitespace=[9,10,11,12,13,28,29,30,31,32,133,5760,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8232,8233,8287,12288] -; ;(function($B){$B.stdlib={} var pylist=['VFS_import','__future__','_aio','_codecs','_codecs_jp','_collections','_collections_abc','_compat_pickle','_compression','_contextvars','_csv','_dummy_thread','_frozen_importlib','_functools','_imp','_io','_markupbase','_multibytecodec','_operator','_py_abc','_pydatetime','_pydecimal','_queue','_signal','_socket','_sre','_struct','_sysconfigdata','_sysconfigdata_0_brython_','_testcapi','_thread','_threading_local','_typing','_weakref','_weakrefset','abc','antigravity','argparse','ast','asyncio','atexit','base64','bdb','binascii','bisect','browser.ajax','browser.highlight','browser.idbcache','browser.indexed_db','browser.local_storage','browser.markdown','browser.object_storage','browser.session_storage','browser.svg','browser.template','browser.timer','browser.ui','browser.webcomponent','browser.websocket','browser.worker','calendar','cmath','cmd','code','codecs','codeop','colorsys','configparser','contextlib','contextvars','copy','copyreg','csv','dataclasses','datetime','decimal','difflib','doctest','enum','errno','external_import','faulthandler','fnmatch','formatter','fractions','functools','gc','genericpath','getopt','getpass','gettext','glob','gzip','heapq','hmac','imp','inspect','interpreter','io','ipaddress','itertools','keyword','linecache','locale','mimetypes','nntplib','ntpath','numbers','opcode','operator','optparse','os','pathlib','pdb','pickle','pkgutil','platform','posixpath','pprint','profile','pwd','py_compile','pyclbr','pydoc','queue','quopri','random','re','re1','reprlib','secrets','select','selectors','shlex','shutil','signal','site','site-packages.__future__','site-packages.docs','site-packages.header','site-packages.test_sp','socket','sre_compile','sre_constants','sre_parse','stat','statistics','string','stringprep','struct','subprocess','symtable','sys','sysconfig','tabnanny','tarfile','tb','tempfile','test.namespace_pkgs.module_and_namespace_package.a_test','textwrap','this','threading','time','timeit','token','tokenize','traceback','turtle','types','typing','uu','uuid','warnings','weakref','webbrowser','zipfile','zipimport','zlib'] for(var i=0;i < pylist.length;i++){$B.stdlib[pylist[i]]=['py']} var js=['_ajax','_ast','_base64','_binascii','_io_classes','_json','_jsre','_locale','_multiprocessing','_posixsubprocess','_profile','_random','_sre','_sre_utils','_string','_strptime','_svg','_symtable','_tokenize','_webcomponent','_webworker','_zlib_utils','aes','array','builtins','dis','encoding_cp932','hashlib','hmac-md5','hmac-ripemd160','hmac-sha1','hmac-sha224','hmac-sha256','hmac-sha3','hmac-sha384','hmac-sha512','html_parser','marshal','math','md5','modulefinder','pbkdf2','posix','pyexpat','python_re','rabbit','rabbit-legacy','rc4','ripemd160','sha1','sha224','sha256','sha3','sha384','sha512','tripledes','unicodedata'] for(var i=0;i < js.length;i++){$B.stdlib[js[i]]=['js']} -var pkglist=['browser','browser.widgets','collections','concurrent','concurrent.futures','email','email.mime','encodings','html','http','importlib','importlib.metadata','importlib.resources','json','logging','multiprocessing','multiprocessing.dummy','pydoc_data','site-packages.foobar','site-packages.simpleaio','site-packages.ui','test','test.encoded_modules','test.leakers','test.namespace_pkgs.not_a_namespace_pkg.foo','test.support','test.test_email','test.test_importlib','test.test_importlib.builtin','test.test_importlib.extension','test.test_importlib.frozen','test.test_importlib.import_','test.test_importlib.source','test.test_json','test.tracedmodules','unittest','unittest.test','unittest.test.testmock','urllib'] +var pkglist=['browser','browser.widgets','collections','concurrent','concurrent.futures','email','email.mime','encodings','html','http','importlib','importlib.metadata','importlib.resources','json','logging','multiprocessing','multiprocessing.dummy','site-packages.foobar','site-packages.simpleaio','site-packages.ui','test','test.encoded_modules','test.leakers','test.namespace_pkgs.not_a_namespace_pkg.foo','test.support','test.test_email','test.test_importlib','test.test_importlib.builtin','test.test_importlib.extension','test.test_importlib.frozen','test.test_importlib.import_','test.test_importlib.source','test.test_json','test.tracedmodules','unittest','unittest.test','unittest.test.testmock','urllib'] for(var i=0;i < pkglist.length;i++){$B.stdlib[pkglist[i]]=['py',true]} $B.stdlib_module_names=Object.keys($B.stdlib)})(__BRYTHON__) ; -__BRYTHON__.implementation=[3,12,1,'dev',0] + +var $B=__BRYTHON__ +$B.unicode={"No_digits":[178,179,185,[4969,9],6618,8304,[8308,6],[8320,10],[9312,9],[9332,9],[9352,9],9450,[9461,9],9471,[10102,9],[10112,9],[10122,9],[68160,4],[69216,9],[69714,9],[127232,11]],"Lo_numeric":[13317,13443,14378,15181,19968,19971,19975,19977,20061,20108,20116,20118,20159,20160,20191,20200,20237,20336,20740,20806,[20841,3,2],21313,[21315,3],21324,[21441,4],22235,22769,22777,24186,24318,24319,[24332,3],24336,25342,25420,26578,28422,29590,30334,32902,33836,36014,36019,36144,38433,38470,38476,38520,38646,63851,63859,63864,63922,63953,63955,63997,131073,131172,131298,131361,133418,133507,133516,133532,133866,133885,133913,140176,141720,146203,156269,194704]} +$B.digits_starts=[48,1632,1776,1984,2406,2534,2662,2790,2918,3046,3174,3302,3430,3558,3664,3792,3872,4160,4240,6112,6160,6470,6608,6784,6800,6992,7088,7232,7248,42528,43216,43264,43472,43504,43600,44016,65296,66720,68912,69734,69872,69942,70096,70384,70736,70864,71248,71360,71472,71904,72016,72784,73040,73120,73552,92768,92864,93008,120782,120792,120802,120812,120822,123200,123632,124144,125264,130032] +$B.unicode_casefold={223:[115,115],304:[105,775],329:[700,110],496:[106,780],912:[953,776,769],944:[965,776,769],1415:[1381,1410],7830:[104,817],7831:[116,776],7832:[119,778],7833:[121,778],7834:[97,702],7838:[223],8016:[965,787],8018:[965,787,768],8020:[965,787,769],8022:[965,787,834],8064:[7936,953],8065:[7937,953],8066:[7938,953],8067:[7939,953],8068:[7940,953],8069:[7941,953],8070:[7942,953],8071:[7943,953],8072:[8064],8073:[8065],8074:[8066],8075:[8067],8076:[8068],8077:[8069],8078:[8070],8079:[8071],8080:[7968,953],8081:[7969,953],8082:[7970,953],8083:[7971,953],8084:[7972,953],8085:[7973,953],8086:[7974,953],8087:[7975,953],8088:[8080],8089:[8081],8090:[8082],8091:[8083],8092:[8084],8093:[8085],8094:[8086],8095:[8087],8096:[8032,953],8097:[8033,953],8098:[8034,953],8099:[8035,953],8100:[8036,953],8101:[8037,953],8102:[8038,953],8103:[8039,953],8104:[8096],8105:[8097],8106:[8098],8107:[8099],8108:[8100],8109:[8101],8110:[8102],8111:[8103],8114:[8048,953],8115:[945,953],8116:[940,953],8118:[945,834],8119:[945,834,953],8124:[8115],8130:[8052,953],8131:[951,953],8132:[942,953],8134:[951,834],8135:[951,834,953],8140:[8131],8146:[953,776,768],8147:[912],8150:[953,834],8151:[953,776,834],8162:[965,776,768],8163:[944],8164:[961,787],8166:[965,834],8167:[965,776,834],8178:[8060,953],8179:[969,953],8180:[974,953],8182:[969,834],8183:[969,834,953],8188:[8179],64256:[102,102],64257:[102,105],64258:[102,108],64259:[102,102,105],64260:[102,102,108],64261:[64262],64262:[115,116],64275:[1396,1398],64276:[1396,1381],64277:[1396,1387],64278:[1406,1398],64279:[1396,1389]} +$B.unicode_bidi_whitespace=[9,10,11,12,13,28,29,30,31,32,133,5760,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8232,8233,8287,12288] +; +__BRYTHON__.implementation=[3,12,3,'dev',0] __BRYTHON__.version_info=[3,12,0,'final',0] -__BRYTHON__.compiled_date="2024-02-04 02:27:37.926428" -__BRYTHON__.timestamp=1707031657926 +__BRYTHON__.compiled_date="2024-03-05 02:20:55.108836" +__BRYTHON__.timestamp=1709623255108 __BRYTHON__.builtin_module_names=["_ajax","_ast","_base64","_binascii","_io_classes","_json","_jsre","_locale","_multiprocessing","_posixsubprocess","_profile","_random","_sre","_sre_utils","_string","_strptime","_svg","_symtable","_tokenize","_webcomponent","_webworker","_zlib_utils","array","builtins","dis","encoding_cp932","hashlib","html_parser","marshal","math","modulefinder","posix","pyexpat","python_re","unicodedata"] ; + +(function($B){const tokens=['ENDMARKER','NAME','NUMBER','STRING','NEWLINE','INDENT','DEDENT','LPAR','RPAR','LSQB','RSQB','COLON','COMMA','SEMI','PLUS','MINUS','STAR','SLASH','VBAR','AMPER','LESS','GREATER','EQUAL','DOT','PERCENT','LBRACE','RBRACE','EQEQUAL','NOTEQUAL','LESSEQUAL','GREATEREQUAL','TILDE','CIRCUMFLEX','LEFTSHIFT','RIGHTSHIFT','DOUBLESTAR','PLUSEQUAL','MINEQUAL','STAREQUAL','SLASHEQUAL','PERCENTEQUAL','AMPEREQUAL','VBAREQUAL','CIRCUMFLEXEQUAL','LEFTSHIFTEQUAL','RIGHTSHIFTEQUAL','DOUBLESTAREQUAL','DOUBLESLASH','DOUBLESLASHEQUAL','AT','ATEQUAL','RARROW','ELLIPSIS','COLONEQUAL','EXCLAMATION','OP','AWAIT','ASYNC','TYPE_IGNORE','TYPE_COMMENT','SOFT_KEYWORD','FSTRING_START','FSTRING_MIDDLE','FSTRING_END','COMMENT','NL', +'ERRORTOKEN','ENCODING','N_TOKENS' +] +$B.py_tokens={} +var pos=0 +for(var tok of tokens){$B.py_tokens[tok]=pos++} +$B.py_tokens['NT_OFFSET']=256 +$B.EXACT_TOKEN_TYPES={'!':'EXCLAMATION','!=':'NOTEQUAL','%':'PERCENT','%=':'PERCENTEQUAL','&':'AMPER','&=':'AMPEREQUAL','(':'LPAR',')':'RPAR','*':'STAR','**':'DOUBLESTAR','**=':'DOUBLESTAREQUAL','*=':'STAREQUAL','+':'PLUS','+=':'PLUSEQUAL',',':'COMMA','-':'MINUS','-=':'MINEQUAL','->':'RARROW','.':'DOT','...':'ELLIPSIS','/':'SLASH','//':'DOUBLESLASH','//=':'DOUBLESLASHEQUAL','/=':'SLASHEQUAL',':':'COLON',':=':'COLONEQUAL',';':'SEMI','<':'LESS','<<':'LEFTSHIFT','<<=':'LEFTSHIFTEQUAL','<=':'LESSEQUAL','=':'EQUAL','==':'EQEQUAL','>':'GREATER','>=':'GREATEREQUAL','>>':'RIGHTSHIFT','>>=':'RIGHTSHIFTEQUAL','@':'AT','@=':'ATEQUAL','[':'LSQB',']':'RSQB','^':'CIRCUMFLEX','^=':'CIRCUMFLEXEQUAL','{':'LBRACE','|':'VBAR','|=':'VBAREQUAL','}':'RBRACE','~':'TILDE'} +function ISTERMINAL(x){return x < NT_OFFSET} +function ISNONTERMINAL(x){return x >=NT_OFFSET} +function ISEOF(x){return x==ENDMARKER}})(__BRYTHON__) +; (function($B){var _b_=$B.builtins +function is_whitespace(char){return ' \n\r\t\f'.includes(char)} +var unprintable_re=/\p{Cc}|\p{Cf}|\p{Co}|\p{Cs}|\p{Zl}|\p{Zp}|\p{Zs}/u const Other_ID_Start=[0x1885,0x1886,0x2118,0x212E,0x309B,0x309C].map( x=> String.fromCodePoint(x)) function is_ID_Start(char){return/\p{Letter}/u.test(char)|| @@ -218,31 +245,25 @@ return code} function $last(array){return array[array.length-1]} var ops='.,:;+-*/%~^|&=<>[](){}@', op2=['**','//','>>','<<'],augm_op='+-*/%^|&=<>@',closing={'}':'{',']':'[',')':'('} -function Token(type,string,start,end,line){start=start.slice(0,2) -var res -if($B.py_tokens){res={string,line} +function Token(type,string,lineno,col_offset,end_lineno,end_col_offset,line){var res={type,string,line,lineno,col_offset,end_lineno,end_col_offset} res.num_type=$B.py_tokens[type] -if(type=='OP'){res.num_type=$B.py_tokens[$B.EXACT_TOKEN_TYPES[string]]}else if(type=='NAME' &&['async','await'].includes(string)){res.num_type=$B.py_tokens[string.toUpperCase()]} -res.lineno=start[0] -res.col_offset=start[1] -res.end_lineno=end[0] -res.end_col_offset=end[1] -if(res.num_type==-1){console.log('res',res) -alert()}}else{res={type,string,start,end,line} -res[0]=type -res[1]=string -res[2]=start -res[3]=end -res[4]=line} +if(type=='OP'){res.num_type=$B.py_tokens[$B.EXACT_TOKEN_TYPES[string]]}else if(type=='NAME' &&['async','await'].includes(string)){res.num_type=$B.py_tokens[string.toUpperCase()]}else if(type=='ENCODING'){res.num_type=$B.py_tokens.ENCODING} +res.bytes=res.string return res} -function get_comment(src,pos,line_num,line_start,token_name,line){var start=pos,ix +function get_comment(parser,src,pos,line_num,line_start,token_name,line){var start=pos,ix var t=[] -while(true){if(pos >=src.length ||(ix='\r\n'.indexOf(src[pos]))>-1){t.push(Token('COMMENT',src.substring(start-1,pos),[line_num,start-line_start],[line_num,pos-line_start+1],line)) +while(true){if(pos >=src.length ||(ix='\r\n'.indexOf(src[pos]))>-1){if(parser && parser.flags & $B.PyCF_TYPE_COMMENTS){var comment=src.substring(start-1,pos),mo=/^#\s*type\s*:(.*)/.exec(comment) +if(mo){var is_type_ignore=false +if(mo[1].startsWith('ignore')){if(mo[1].length==6){is_type_ignore=true}else{var char=mo[1][6] +if(char.charCodeAt(0)<=128 &&/[a-zA-Z0-9]/.exec(char)===null){is_type_ignore=true}}} +if(is_type_ignore){t.push(Token('TYPE_IGNORE',comment,line_num,start-line_start,line_num,pos-line_start+1,line))}else{t.push(Token('TYPE_COMMENT',comment,line_num,start-line_start,line_num,pos-line_start+1,line))} +return{t,pos}}} +t.push(Token('COMMENT',src.substring(start-1,pos),line_num,start-line_start,line_num,pos-line_start+1,line)) if(ix !==undefined){var nb=1 if(src[pos]=='\r' && src[pos+1]=='\n'){nb++}else if(src[pos]===undefined){ nb=0} -t.push(Token(token_name,src.substr(pos,nb),[line_num,pos-line_start+1],[line_num,pos-line_start+nb+1],line)) -if(src[pos]===undefined){t.push(Token('NEWLINE','\n',[line_num,pos-line_start+1],[line_num,pos-line_start+2],''))} +t.push(Token(token_name,src.substr(pos,nb),line_num,pos-line_start+1,line_num,pos-line_start+nb+1,line)) +if(src[pos]===undefined){t.push(Token('NEWLINE','\n',line_num,pos-line_start+1,line_num,pos-line_start+2,''))} pos+=nb} return{t,pos}} pos++}} @@ -256,32 +277,21 @@ case 'o': return '01234567'.includes(char) default: throw Error('unknown num type '+num_type)}} -$B.TokenReader=function(src,filename){this.tokens=[] -this.tokenizer=$B.tokenizer(src,filename) -this.position=0} -$B.TokenReader.prototype.read=function(){var res -if(this.position < this.tokens.length){res=this.tokens[this.position]}else{res=this.tokenizer.next() -if(res.done){this.done=true -return} -res=res.value -this.tokens.push(res)} -this.position++ -return res} -$B.TokenReader.prototype.seek=function(position){this.position=position} function nesting_level(token_modes){var ix=token_modes.length-1 while(ix >=0){var mode=token_modes[ix] if(mode.nesting !==undefined){return mode.nesting} ix--}} -$B.tokenizer=function*(src,filename,mode){var string_prefix=/^(r|u|R|U|f|F|fr|Fr|fR|FR|rf|rF|Rf|RF)$/,bytes_prefix=/^(b|B|br|Br|bR|BR|rb|rB|Rb|RB)$/ +$B.tokenizer=function*(src,filename,mode,parser){var string_prefix=/^(r|u|R|U|f|F|fr|Fr|fR|FR|rf|rF|Rf|RF)$/,bytes_prefix=/^(b|B|br|Br|bR|BR|rb|rB|Rb|RB)$/ src=src.replace(/\r\n/g,'\n'). replace(/\r/g,'\n') if(mode !='eval' && ! src.endsWith('\n')){src+='\n'} var lines=src.split('\n'),linenum=0,line_at={} -for(var i=0,len=src.length;i < len;i++){line_at[i]=linenum +for(let i=0,len=src.length;i < len;i++){line_at[i]=linenum if(src[i]=='\n'){linenum++}} function get_line_at(pos){return lines[line_at[pos]]+'\n'} -var state="line_start",char,cp,mo,pos=0,quote,triple_quote,escaped=false,string_start,string,prefix,name,number,num_type,comment,indent,indents=[],braces=[],line,line_num=0,line_start=1,token_modes=['regular'],token_mode='regular',save_mode=token_mode,fstring_buffer,fstring_start,fstring_expr_start,fstring_escape,format_specifier -yield Token('ENCODING','utf-8',[0,0],[0,0],'') +var state="line_start",char,cp,mo,pos=0,quote,triple_quote,escaped=false,string_start,string,prefix,name,number,num_type,comment,indent,indent_before_continuation=0,indents=[],braces=[],line,line_num=0,line_start=1,token_modes=['regular'],token_mode='regular',save_mode=token_mode,fstring_buffer,fstring_start,fstring_expr_start,fstring_escape,format_specifier +if(parser){parser.braces=braces} +yield Token('ENCODING','utf-8',0,0,0,0,'') while(pos < src.length){char=src[pos] cp=src.charCodeAt(pos) if(cp >=0xD800 && cp <=0xDBFF){ @@ -300,8 +310,8 @@ continue} char=token_mode.quote.repeat(3) pos+=2} if(fstring_buffer.length > 0){ -yield Token(FSTRING_MIDDLE,fstring_buffer,[line_num,fstring_start],[line_num,fstring_start+fstring_buffer.length],line)} -yield Token(FSTRING_END,char,[line_num,pos],[line_num,pos],line) +yield Token(FSTRING_MIDDLE,fstring_buffer,line_num,fstring_start,line_num,fstring_start+fstring_buffer.length,line)} +yield Token(FSTRING_END,char,line_num,pos,line_num,pos,line) token_modes.pop() token_mode=$B.last(token_modes) state=null @@ -309,7 +319,7 @@ continue}else if(char=='{'){if(src.charAt(pos)=='{'){ fstring_buffer+=char pos++ continue}else{ -if(fstring_buffer.length > 0){yield Token(FSTRING_MIDDLE,fstring_buffer,[line_num,fstring_start],[line_num,fstring_start+fstring_buffer.length],line)} +if(fstring_buffer.length > 0){yield Token(FSTRING_MIDDLE,fstring_buffer,line_num,fstring_start,line_num,fstring_start+fstring_buffer.length,line)} token_mode='regular_within_fstring' fstring_expr_start=pos-line_start state=null @@ -317,7 +327,7 @@ token_modes.push(token_mode)}}else if(char=='}'){if(src.charAt(pos)=='}'){ fstring_buffer+=char pos++ continue}else{ -yield Token('OP',char,[line_num,pos-line_start],[line_num,pos-line_start+1],line) +yield Token('OP',char,line_num,pos-line_start,line_num,pos-line_start+1,line) continue}}else if(char=='\\'){if(token_mode.raw){fstring_buffer+=char+char}else{if(fstring_escape){fstring_buffer+='\\'+char} fstring_escape=! fstring_escape} continue}else{if(fstring_escape){fstring_buffer+='\\'} @@ -325,18 +335,18 @@ fstring_buffer+=char fstring_escape=false if(char=='\n'){line_num++} continue}}else if(token_mode=='format_specifier'){if(char==quote){if(format_specifier.length > 0){ -yield Token(FSTRING_MIDDLE,format_specifier,[line_num,fstring_start],[line_num,fstring_start+format_specifier.length],line) +yield Token(FSTRING_MIDDLE,format_specifier,line_num,fstring_start,line_num,fstring_start+format_specifier.length,line) token_modes.pop() token_mode=$B.last(token_modes) continue}}else if(char=='{'){ -yield Token(FSTRING_MIDDLE,format_specifier,[line_num,fstring_start],[line_num,fstring_start+format_specifier.length],line) +yield Token(FSTRING_MIDDLE,format_specifier,line_num,fstring_start,line_num,fstring_start+format_specifier.length,line) token_mode='regular_within_fstring' fstring_expr_start=pos-line_start state=null token_modes.push(token_mode)}else if(char=='}'){ -yield Token(FSTRING_MIDDLE,format_specifier,[line_num,fstring_start],[line_num,fstring_start+format_specifier.length],line) -yield Token('OP',char,[line_num,pos-line_start],[line_num,pos-line_start+1],line) -if(braces.length==0 ||$B.last(braces)!=='{'){throw Error('wrong braces')} +yield Token(FSTRING_MIDDLE,format_specifier,line_num,fstring_start,line_num,fstring_start+format_specifier.length,line) +yield Token('OP',char,line_num,pos-line_start,line_num,pos-line_start+1,line) +if(braces.length==0 ||$B.last(braces).char !=='{'){throw Error('wrong braces')} braces.pop() token_modes.pop() token_mode=$B.last(token_modes) @@ -347,40 +357,50 @@ line=get_line_at(pos-1) line_start=pos line_num++ if(mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos-1))){ -yield Token('NL',mo[0],[line_num,0],[line_num,mo[0].length],line) +yield Token('NL',mo[0],line_num,0,line_num,mo[0].length,line) pos+=mo[0].length-1 -continue}else if(char=='#'){comment=get_comment(src,pos,line_num,line_start,'NL',line) +continue}else if(char=='#'){comment=get_comment(parser,src,pos,line_num,line_start,'NL',line) for(var item of comment.t){yield item} pos=comment.pos state='line_start' continue} indent=0 if(char==' '){indent=1}else if(char=='\t'){indent=8} -if(indent){while(pos < src.length){if(src[pos]==' '){indent++}else if(src[pos]=='\t'){indent+=8}else{break} +if(indent){var broken=false +while(pos < src.length){if(broken && indent > 0 && ' \t'.includes(src[pos])){console.log('indentation error 479') +$B.raise_error_known_location( +_b_.IndentationError,filename,line_num,pos-line_start,line_num,pos-line_start+1,line,'unindent does not match any outer indentation level' +)} +if(src[pos]==' '){indent++}else if(src[pos]=='\t'){indent+=8}else if(src[pos]=='\\' && src[pos+1]=='\n'){ +pos++ +line_start=pos+2 +line_num++ +line=get_line_at(pos+2) +broken=true}else{break} pos++} if(pos==src.length){ line_num-- break} if(src[pos]=='#'){ -var comment=get_comment(src,pos+1,line_num,line_start,'NL',line) +comment=get_comment(parser,src,pos+1,line_num,line_start,'NL',line) for(var item of comment.t){yield item} pos=comment.pos -continue}else if(mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos))){ -yield Token('NL','',[line_num,pos-line_start+1],[line_num,pos-line_start+1+mo[0].length],line) +continue}else if(src[pos]=='\\'){if(/^\f?(\r\n|\r|\n)/.exec(src[pos+1])){line_num++ +pos++ +continue}else{$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,pos+2-line_start,line_num,pos+3-line_start,line,'unexpected character after line continuation character')}}else if(mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos))){ +yield Token('NL','',line_num,pos-line_start+1,line_num,pos-line_start+1+mo[0].length,line) pos+=mo[0].length continue} if(indents.length==0 ||indent > $last(indents)){indents.push(indent) -yield Token('INDENT','',[line_num,0],[line_num,indent],line)}else if(indent < $last(indents)){var ix=indents.indexOf(indent) -if(ix==-1){var error=Error('unindent does not match '+ -'any outer indentation level') -error.type='IndentationError' -error.line_num=line_num -throw error } +yield Token('INDENT','',line_num,0,line_num,indent,line)}else if(indent < $last(indents)){var ix=indents.indexOf(indent) +if(ix==-1){var message='unindent does not match '+ +'any outer indentation level' +$B.raise_error_known_location(_b_.IndentationError,filename,line_num,0,line_num,0,line,message)} for(var i=indents.length-1;i > ix;i--){indents.pop() -yield Token('DEDENT','',[line_num,indent],[line_num,indent],line)}} +yield Token('DEDENT','',line_num,indent,line_num,indent,line)}} state=null}else{ while(indents.length > 0){indents.pop() -yield Token('DEDENT','',[line_num,indent],[line_num,indent],line)} +yield Token('DEDENT','',line_num,indent,line_num,indent,line)} state=null pos--} break @@ -398,7 +418,7 @@ prefix="" break case '#': var token_name=braces.length > 0 ? 'NL' :'NEWLINE' -comment=get_comment(src,pos,line_num,line_start,token_name,line) +comment=get_comment(parser,src,pos,line_num,line_start,token_name,line) for(var item of comment.t){yield item} pos=comment.pos if(braces.length==0){state='line_start'}else{state=null @@ -429,16 +449,15 @@ while(src[pos]==char){pos++ op+=char} var dot_pos=pos-line_start-op.length+1 while(op.length >=3){ -yield Token('OP','...',[line_num,dot_pos],[line_num,dot_pos+3],line) +yield Token('OP','...',line_num,dot_pos,line_num,dot_pos+3,line) op=op.substr(3)} -for(var i=0;i < op.length;i++){yield Token('OP','.',[line_num,dot_pos],[line_num,dot_pos+1],line) +for(var i=0;i < op.length;i++){yield Token('OP','.',line_num,dot_pos,line_num,dot_pos+1,line) dot_pos++}} break case '\\': var mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos)) -if(mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos))){if(pos==src.length-1){yield Token('ERRORTOKEN',char,[line_num,pos-line_start],[line_num,pos-line_start+1],line) -var token_name=braces.length > 0 ? 'NL':'NEWLINE' -yield Token(token_name,mo[0],[line_num,pos-line_start],[line_num,pos-line_start+mo[0].length],line)} +if(mo){if(pos==src.length-1){var msg='unexpected EOF while parsing' +$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,pos-line_start,line_num,pos-line_start+1,line,msg)} line_num++ pos+=mo[0].length line_start=pos+1 @@ -451,7 +470,7 @@ case '\n': case '\r': var token_name=braces.length > 0 ? 'NL':'NEWLINE' mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos-1)) -yield Token(token_name,mo[0],[line_num,pos-line_start],[line_num,pos-line_start+mo[0].length],line) +yield Token(token_name,mo[0],line_num,pos-line_start,line_num,pos-line_start+mo[0].length,line) pos+=mo[0].length-1 if(token_name=='NEWLINE'){state='line_start'}else{line_num++ line_start=pos+1 @@ -464,7 +483,7 @@ name=char}else if($B.in_unicode_category('Nd',ord(char))){state='NUMBER' num_type='' number=char}else if(ops.includes(char)){if(token_mode=='regular_within_fstring' && (char==':' ||char=='}')){if(char==':'){ -if(nesting_level(token_modes)==braces.length-1){let colon=Token('OP',char,[line_num,pos-line_start-op.length+1],[line_num,pos-line_start+1],line) +if(nesting_level(token_modes)==braces.length-1){let colon=Token('OP',char,line_num,pos-line_start-op.length+1,line_num,pos-line_start+1,line) colon.metadata=src.substr( line_start+fstring_expr_start,pos-line_start-fstring_expr_start-1) yield colon @@ -472,13 +491,13 @@ token_modes.pop() token_mode='format_specifier' token_modes.push(token_mode) continue}}else{ -let closing_brace=Token('OP',char,[line_num,pos-line_start-op.length+1],[line_num,pos-line_start+1],line) +let closing_brace=Token('OP',char,line_num,pos-line_start-op.length+1,line_num,pos-line_start+1,line) closing_brace.metadata=src.substring( -line_start+fstring_start+2,pos-1) +line_start+fstring_expr_start,pos-1) yield closing_brace token_modes.pop() token_mode=token_modes[token_modes.length-1] -if(braces.length==0 ||$B.last(braces)!=='{'){throw Error('wrong braces')} +if(braces.length==0 ||$B.last(braces).char !=='{'){throw Error('wrong braces')} braces.pop() continue}} var op=char @@ -489,14 +508,21 @@ augm_op.includes(op))){op+=src[pos] pos++}else if((char=='-' && src[pos]=='>')|| (char==':' && src[pos]=='=')){op+=src[pos] pos++} -if('[({'.includes(char)){braces.push(char)}else if('])}'.includes(char)){if(braces && $last(braces)==closing[char]){braces.pop()}else{braces.push(char)}} -yield Token('OP',op,[line_num,pos-line_start-op.length+1],[line_num,pos-line_start+1],line)}else if(char=='!'){if(src[pos]=='='){yield Token('OP','!=',[line_num,pos-line_start],[line_num,pos-line_start+2],line) +if('[({'.includes(char)){braces.push({char,pos,line_num,line_start,line})}else if('])}'.includes(char)){if(braces.length && $last(braces).char==closing[char]){braces.pop()}else{braces.push({char,pos,line_num,line_start,line})}} +yield Token('OP',op,line_num,pos-line_start-op.length+1,line_num,pos-line_start+1,line)}else if(char=='!'){if(src[pos]=='='){yield Token('OP','!=',line_num,pos-line_start,line_num,pos-line_start+2,line) pos++}else{ -let token=Token('OP',char,[line_num,pos-line_start],[line_num,pos-line_start+1],line) +let token=Token('OP',char,line_num,pos-line_start,line_num,pos-line_start+1,line) token.metadata=src.substring( line_start+fstring_start+2,pos-1) yield token}}else if(char==' ' ||char=='\t'){}else{ -yield Token('ERRORTOKEN',char,[line_num,pos-line_start],[line_num,pos-line_start+1],line)}} +var cp=char.codePointAt(0),err_msg='invalid' +if(unprintable_re.exec(char)){err_msg+=' non-printable'} +var unicode=cp.toString(16).toUpperCase() +while(unicode.length < 4){unicode='0'+unicode} +err_msg+=` character '${char}' (U+${unicode})` +if(char=='$' ||char=='`'){err_msg='invalid syntax'} +var err_token=Token('ERRORTOKEN',char,line_num,pos-line_start,line_num,pos-line_start+1,line) +$B.raise_error_known_token(_b_.SyntaxError,filename,err_token,err_msg)}} break case 'NAME': if($B.is_XID_Continue(ord(char))){name+=char}else if(char=='"' ||char=="'"){if(string_prefix.exec(name)||bytes_prefix.exec(name)){ @@ -514,13 +540,13 @@ token_mode.raw=prefix.toLowerCase().includes('r') token_modes.push(token_mode) var s=triple_quote ? quote.repeat(3):quote var end_col=fstring_start+name.length+s.length -yield Token(FSTRING_START,prefix+s,[line_num,fstring_start],[line_num,end_col],line) +yield Token(FSTRING_START,prefix+s,line_num,fstring_start,line_num,end_col,line) continue} escaped=false string_start=[line_num,pos-line_start-name.length,line_start] -string=''}else{yield Token('NAME',name,[line_num,pos-line_start-name.length],[line_num,pos-line_start],line) +string=''}else{yield Token('NAME',name,line_num,pos-line_start-name.length,line_num,pos-line_start,line) state=null -pos--}}else{yield Token('NAME',name,[line_num,pos-line_start-name.length],[line_num,pos-line_start],line) +pos--}}else{yield Token('NAME',name,line_num,pos-line_start-name.length,line_num,pos-line_start,line) state=null pos--} break @@ -533,11 +559,11 @@ string_start[2]-1,pos+2)} var full_string if(! triple_quote){full_string=prefix+quote+string+ quote -yield Token('STRING',full_string,string_start,[line_num,pos-line_start+1],string_line) +yield Token('STRING',full_string,string_start[0],string_start[1],line_num,pos-line_start+1,string_line) state=null}else if(char+src.substr(pos,2)== quote.repeat(3)){full_string=prefix+quote.repeat(3)+ string+quote.repeat(3) -yield Token('STRING',full_string,string_start,[line_num,pos-line_start+3],string_line) +yield Token('STRING',full_string,string_start[0],string_start[1],line_num,pos-line_start+3,string_line) pos+=2 state=null}else{string+=char}}else{string+=char} escaped=false @@ -545,16 +571,9 @@ break case '\r': case '\n': if(! escaped && ! triple_quote){ -var quote_pos=string_start[1]+line_start-1 -pos=quote_pos -while(src[pos-1]==' '){pos--} -while(pos < quote_pos){yield Token('ERRORTOKEN',' ',[line_num,pos-line_start+1],[line_num,pos-line_start+2],line) -pos++} -pos++ -yield Token('ERRORTOKEN',quote,[line_num,pos-line_start],[line_num,pos-line_start+1],line) -state=null -pos++ -break} +var msg=`unterminated string literal `+ +`(detected at line ${line_num})`,line_num=string_start[0],col_offset=string_start[1] +$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,col_offset,line_num,col_offset,line,msg)} string+=char line_num++ line_start=pos+1 @@ -576,39 +595,40 @@ break case 'NUMBER': if(test_num(num_type,char)){number+=char}else if(char=='_' && ! number.endsWith('.')){if(number.endsWith('_')){throw SyntaxError('consecutive _ in number')}else if(src[pos]===undefined || ! test_num(num_type,src[pos])){ -yield Token('NUMBER',number,[line_num,pos-line_start-number.length],[line_num,pos-line_start],line) +yield Token('NUMBER',number,line_num,pos-line_start-number.length,line_num,pos-line_start,line) state=null pos--}else{number+=char}}else if(char=='.' && ! number.includes(char)){number+=char}else if(char.toLowerCase()=='e' && ! number.toLowerCase().includes('e')){if('+-'.includes(src[pos])|| -$B.in_unicode_category('Nd',ord(src[pos]))){number+=char}else{yield Token('NUMBER',number,[line_num,pos-line_start-number.length],[line_num,pos-line_start],line) +$B.in_unicode_category('Nd',ord(src[pos]))){number+=char}else{yield Token('NUMBER',number,line_num,pos-line_start-number.length,line_num,pos-line_start,line) state=null pos--}}else if((char=='+' ||char=='-')&& number.toLowerCase().endsWith('e')){number+=char}else if(char.toLowerCase()=='j'){ number+=char -yield Token('NUMBER',number,[line_num,pos-line_start-number.length+1],[line_num,pos-line_start+1],line) -state=null}else if(char.match(/\p{Letter}/u)){$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,pos-line_start-number.length,line_num,pos-line_start,line,'invalid decimal literal')}else{yield Token('NUMBER',number,[line_num,pos-line_start-number.length],[line_num,pos-line_start],line) +yield Token('NUMBER',number,line_num,pos-line_start-number.length+1,line_num,pos-line_start+1,line) +state=null}else if(char.match(/\p{Letter}/u)){$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,pos-line_start-number.length,line_num,pos-line_start,line,'invalid decimal literal')}else{yield Token('NUMBER',number,line_num,pos-line_start-number.length,line_num,pos-line_start,line) state=null pos--} break}} -if(braces.length > 0){throw SyntaxError('EOF in multi-line statement')} switch(state){case 'line_start': line_num++ break case 'NAME': -yield Token('NAME',name,[line_num,pos-line_start-name.length+1],[line_num,pos-line_start+1],line) +yield Token('NAME',name,line_num,pos-line_start-name.length+1,line_num,pos-line_start+1,line) break case 'NUMBER': -yield Token('NUMBER',number,[line_num,pos-line_start-number.length+1],[line_num,pos-line_start+1],line) +yield Token('NUMBER',number,line_num,pos-line_start-number.length+1,line_num,pos-line_start+1,line) break case 'STRING': -throw SyntaxError( -`unterminated ${triple_quote ? 'triple-quoted ' : ''}`+ -`string literal (detected at line ${line_num})`)} -if(! src.endsWith('\n')&& state !=line_start){yield Token('NEWLINE','',[line_num,pos-line_start+1],[line_num,pos-line_start+1],line+'\n') +line_num=string_start[0] +line=lines[line_num-1] +var msg=`unterminated ${triple_quote ? 'triple-quoted ' : ''}`+ +`string literal (detected at line ${line_num})`,col_offset=string_start[1] +$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,col_offset,line_num,col_offset,line,msg)} +if(! src.endsWith('\n')&& state !=line_start){yield Token('NEWLINE','',line_num,pos-line_start+1,line_num,pos-line_start+1,line+'\n') line_num++} while(indents.length > 0){indents.pop() -yield Token('DEDENT','',[line_num,0],[line_num,0],'')} -yield Token('ENDMARKER','',[line_num,0],[line_num,0],'')}})(__BRYTHON__) +yield Token('DEDENT','',line_num,0,line_num,0,'')} +yield Token('ENDMARKER','',line_num,0,line_num,0,'')}})(__BRYTHON__) ; (function($B){ var binary_ops={'+':'Add','-':'Sub','*':'Mult','/':'Div','//':'FloorDiv','%':'Mod','**':'Pow','<<':'LShift','>>':'RShift','|':'BitOr','^':'BitXor','&':'BitAnd','@':'MatMult'} @@ -679,67 +699,7 @@ for(var attr in obj){res[attr]=obj[attr]} return res} $B.last=function(table){if(table===undefined){console.log($B.make_frames_stack())} return table[table.length-1]} -$B.list2obj=function(list,value){var res={},i=list.length -if(value===undefined){value=true} -while(i--> 0){res[list[i]]=value} -return res} -$B.op2method={operations:{"**":"pow","//":"floordiv","<<":"lshift",">>":"rshift","+":"add","-":"sub","*":"mul","/":"truediv","%":"mod","@":"matmul" },augmented_assigns:{"//=":"ifloordiv",">>=":"irshift","<<=":"ilshift","**=":"ipow","+=":"iadd","-=":"isub","*=":"imul","/=":"itruediv","%=":"imod","&=":"iand","|=":"ior","^=":"ixor","@=":"imatmul"},binary:{"&":"and","|":"or","~":"invert","^":"xor"},comparisons:{"<":"lt",">":"gt","<=":"le",">=":"ge","==":"eq","!=":"ne"},boolean:{"or":"or","and":"and","in":"in","not":"not","is":"is"},subset:function(){var res={},keys=[] -if(arguments[0]=="all"){keys=Object.keys($B.op2method) -keys.splice(keys.indexOf("subset"),1)}else{for(var arg of arguments){keys.push(arg)}} -for(var key of keys){var ops=$B.op2method[key] -if(ops===undefined){throw Error(key)} -for(var attr in ops){res[attr]=ops[attr]}} -return res}} -var $operators=$B.op2method.subset("all") -$B.method_to_op={} -for(var category in $B.op2method){for(var op in $B.op2method[category]){var method=`__${$B.op2method[category][op]}__` -$B.method_to_op[method]=op}} -var $augmented_assigns=$B.augmented_assigns=$B.op2method.augmented_assigns -var noassign=$B.list2obj(['True','False','None','__debug__']) -var $op_order=[['or'],['and'],['not'],['in','not_in'],['<','<=','>','>=','!=','==','is','is_not'],['|'],['^'],['&'],['>>','<<'],['+','-'],['*','@','/','//','%'],['unary_neg','unary_inv','unary_pos'],['**'] -] -var $op_weight={},$weight=1 -for(var _tmp of $op_order){for(var item of _tmp){$op_weight[item]=$weight} -$weight++} -var ast=$B.ast,op2ast_class=$B.op2ast_class -function ast_body(block_ctx){ -var body=[] -for(var child of block_ctx.node.children){var ctx=child.C.tree[0] -if(['single_kw','except','decorator'].indexOf(ctx.type)>-1 || -(ctx.type=='condition' && ctx.token=='elif')){continue} -var child_ast=ctx.ast() -if(ast.expr.indexOf(child_ast.constructor)>-1){child_ast=new ast.Expr(child_ast) -copy_position(child_ast,child_ast.value)} -body.push(child_ast)} -return body} -var ast_dump=$B.ast_dump=function(tree,indent){var attr,value -indent=indent ||0 -if(tree===_b_.None){ -return 'None'}else if(typeof tree=='string'){return `'${tree}'`}else if(typeof tree=='number'){return tree+''}else if(tree.imaginary){return tree.value+'j'}else if(Array.isArray(tree)){if(tree.length==0){return '[]'} -res='[\n' -var items=[] -for(var x of tree){try{items.push(ast_dump(x,indent+1))}catch(err){console.log('error',tree) -console.log('for item',x) -throw err}} -res+=items.join(',\n') -return res+']'}else if(tree.$name){return tree.$name+'()'}else if(tree instanceof ast.MatchSingleton){return `MatchSingleton(value=${$B.AST.$convert(tree.value)})`}else if(tree instanceof ast.Constant){value=tree.value -if(value.imaginary){return `Constant(value=${_b_.repr(value.value)}j)`} -return `Constant(value=${$B.AST.$convert(value)})`} -var proto=Object.getPrototypeOf(tree).constructor -var res=' ' .repeat(indent)+proto.$name+'(' -if($B.ast_classes[proto.$name]===undefined){console.log('no ast class',proto)} -var attr_names=$B.ast_classes[proto.$name].split(','),attrs=[] -attr_names=attr_names.map(x=>(x.endsWith('*')||x.endsWith('?'))? -x.substr(0,x.length-1):x) -if([ast.Name].indexOf(proto)>-1){for(attr of attr_names){if(tree[attr]!==undefined){attrs.push(`${attr}=${ast_dump(tree[attr])}`)}} -return res+attrs.join(', ')+')'} -for(attr of attr_names){if(tree[attr]!==undefined){value=tree[attr] -attrs.push(attr+'='+ -ast_dump(tree[attr],indent+1).trimStart())}} -if(attrs.length > 0){res+='\n' -res+=attrs.map(x=> ' '.repeat(indent+1)+x).join(',\n')} -res+=')' -return res} +var ast=$B.ast function get_line(filename,lineno){var src=$B.file_cache[filename],line=_b_.None if(src !==undefined){var lines=src.split('\n') line=lines[lineno-1]} @@ -756,121 +716,6 @@ for(var alias of child.names){var name=alias.name if(name=="braces"){raise_error_known_location(_b_.SyntaxError,filename,alias.lineno,alias.col_offset,alias.end_lineno,alias.end_col_offset,get_line(filename,child.lineno),"not a chance")}else if(name=="annotations"){features |=$B.CO_FUTURE_ANNOTATIONS}else if(VALID_FUTURES.indexOf(name)==-1){raise_error_known_location(_b_.SyntaxError,filename,alias.lineno,alias.col_offset,alias.end_lineno,alias.end_col_offset,get_line(filename,child.lineno),`future feature ${name} is not defined`)}} i++}else{break}} return{features}} -function set_position(ast_obj,position,end_position){ast_obj.lineno=position.start[0] -ast_obj.col_offset=position.start[1] -position=end_position ||position -ast_obj.end_lineno=position.end[0] -ast_obj.end_col_offset=position.end[1]} -function copy_position(target,origin){target.lineno=origin.lineno -target.col_offset=origin.col_offset -target.end_lineno=origin.end_lineno -target.end_col_offset=origin.end_col_offset} -function first_position(C){var ctx=C -while(ctx.tree && ctx.tree.length > 0){ctx=ctx.tree[0]} -return ctx.position} -function last_position(C){var ctx=C -while(ctx.tree && ctx.tree.length > 0){ctx=$B.last(ctx.tree) -if(ctx.end_position){return ctx.end_position}} -return ctx.end_position ||ctx.position} -function raise_error_known_location(type,filename,lineno,col_offset,end_lineno,end_col_offset,line,message){var exc=type.$factory(message) -exc.filename=filename -exc.lineno=lineno -exc.offset=col_offset+1 -exc.end_lineno=end_lineno -exc.end_offset=end_col_offset+1 -exc.text=line -exc.args[1]=$B.fast_tuple([filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]) -exc.$frame_obj=$B.frame_obj -throw exc} -$B.raise_error_known_location=raise_error_known_location -function raise_syntax_error_known_range(C,a,b,msg){ -raise_error_known_location(_b_.SyntaxError,get_module(C).filename,a.start[0],a.start[1],b.end[0],b.end[1],a.line,msg)} -function raise_error(errtype,C,msg,token){var filename=get_module(C).filename -token=token ||$token.value -msg=msg ||'invalid syntax' -if(msg.startsWith('(')){msg='invalid syntax '+msg} -msg=msg.trim() -raise_error_known_location(errtype,filename,token.start[0],token.start[1],token.end[0],token.end[1]-1,token.line,msg)} -function raise_syntax_error(C,msg,token){raise_error(_b_.SyntaxError,C,msg,token)} -function raise_indentation_error(C,msg,indented_node){ -if(indented_node){ -var type=indented_node.C.tree[0].type,token=indented_node.C.tree[0].token,lineno=indented_node.line_num -if(type=='except' && indented_node.C.tree[0].try_node.C.is_trystar){type='except*'} -switch(type){case 'class': -type='class definition' -break -case 'condition': -type=`'${token}' statement` -break -case 'def': -type='function definition' -break -case 'case': -case 'except': -case 'except*': -case 'for': -case 'match': -case 'try': -case 'while': -case 'with': -type=`'${type}' statement` -break -case 'single_kw': -type=`'${token}' statement` -break} -msg+=` after ${type} on line ${lineno}`} -raise_error(_b_.IndentationError,C,msg)} -function check_assignment(C,kwargs){ -function in_left_side(C,assign_type){var ctx=C -while(ctx){if(ctx.parent && ctx.parent.type==assign_type && -ctx===ctx.parent.tree[0]){return true} -ctx=ctx.parent}} -var once,action='assign to',augmented=false -if(kwargs){once=kwargs.once -action=kwargs.action ||action -augmented=kwargs.augmented===undefined ? false :kwargs.augmented} -var ctx=C,forbidden=['assert','import','raise','return','decorator','comprehension','await'] -if(action !='delete'){ -forbidden.push('del')} -function report(wrong_type,a,b){a=a ||C.position -b=b ||$token.value -if(augmented){raise_syntax_error_known_range( -C,a,b,`'${wrong_type}' is an illegal expression `+ -'for augmented assignment')}else{var msg=wrong_type -if(Array.isArray(msg)){ -msg=msg[0]}else if($token.value.string=='=' && $token.value.type=='OP'){if(parent_match(C,{type:'augm_assign'})){ -raise_syntax_error(C)} -if(parent_match(C,{type:'assign'})){raise_syntax_error_known_range( -C,a,b,`invalid syntax. Maybe you meant '==' or ':=' instead of '='?`)} -if(! parent_match(C,{type:'list_or_tuple'})){msg+=" here. Maybe you meant '==' instead of '='?"}} -raise_syntax_error_known_range( -C,a,b,`cannot ${action} ${msg}`)}} -if(in_left_side(C,'augm_assign')){raise_syntax_error(C)} -if(C.type=='target_list'){for(let target of C.tree){check_assignment(target,{action:'assign to'})} -return} -ctx=C -while(ctx){if(forbidden.indexOf(ctx.type)>-1){raise_syntax_error(C,`(assign to ${ctx.type})`)}else if(ctx.type=="expr"){if(parent_match(ctx,{type:'annotation'})){return true} -if(ctx.parent.type=='yield'){raise_syntax_error_known_range(ctx,ctx.parent.position,last_position(ctx),"assignment to yield expression not possible")} -var assigned=ctx.tree[0] -if(assigned.type=="op"){if($B.op2method.comparisons[ctx.tree[0].op]!==undefined){if(parent_match(ctx,{type:'target_list'})){ -raise_syntax_error(C)} -report('comparison',assigned.tree[0].position,last_position(assigned))}else{report('expression',assigned.tree[0].position,last_position(assigned))}}else if(assigned.type=='attribute' && -parent_match(ctx,{type:'condition'})){report('attribute',ctx.position,last_position(C))}else if(assigned.type=='sub' && -parent_match(ctx,{type:'condition'})){report('subscript',ctx.position,last_position(C))}else if(assigned.type=='unary'){report('expression',assigned.position,last_position(assigned))}else if(assigned.type=='call'){report('function call',assigned.position,assigned.end_position)}else if(assigned.type=='id'){var name=assigned.value -if(['None','True','False','__debug__'].indexOf(name)>-1){ -if(name=='__debug__' && augmented){ -$token.value=assigned.position -raise_syntax_error(assigned,'cannot assign to __debug__')} -report([name])}}else if(['str','int','float','complex'].indexOf(assigned.type)>-1){if(ctx.parent.type !='op'){report('literal')}}else if(assigned.type=="ellipsis"){report('ellipsis')}else if(assigned.type=='genexpr'){report(['generator expression'])}else if(assigned.type=='starred'){if(action=='delete'){report('starred',assigned.position,last_position(assigned))} -check_assignment(assigned.tree[0],{action,once:true})}else if(assigned.type=='named_expr'){if(! assigned.parenthesized){report('named expression')}else if(ctx.parent.type=='node'){raise_syntax_error_known_range( -C,assigned.target.position,last_position(assigned),"cannot assign to named expression here. "+ -"Maybe you meant '==' instead of '='?")}else if(action=='delete'){report('named expression',assigned.position,last_position(assigned))}}else if(assigned.type=='list_or_tuple'){for(let item of ctx.tree){check_assignment(item,{action,once:true})}}else if(assigned.type=='dict_or_set'){if(assigned.closed){report(assigned.real=='set' ? 'set display' :'dict literal',ctx.position,last_position(assigned))}}else if(assigned.type=='lambda'){report('lambda')}else if(assigned.type=='ternary'){report(['conditional expression'])}else if(['fstring','JoinedStr'].indexOf(assigned.type)>-1){report('f-string expression',assigned.position,last_position(assigned))}}else if(ctx.type=='list_or_tuple'){for(let item of ctx.tree){check_assignment(item,{action,once:true})}}else if(ctx.type=='ternary'){report(['conditional expression'],ctx.position,last_position(C))}else if(ctx.type=='op'){let a=ctx.tree[0].position,last=$B.last(ctx.tree).tree[0],b=last.end_position ||last.position -if($B.op2method.comparisons[ctx.op]!==undefined){if(parent_match(C,{type:'target_list'})){ -raise_syntax_error(C)} -report('comparison',a,b)}else{report('expression',a,b)}}else if(ctx.type=='yield'){report('yield expression')}else if(ctx.comprehension){break} -if(once){break} -ctx=ctx.parent}} -function remove_abstract_expr(tree){if(tree.length > 0 && $B.last(tree).type=='abstract_expr'){tree.pop()}} $B.format_indent=function(js,indent){ var indentation=' ',lines=js.split('\n'),level=indent,res='',last_is_closing_brace=false,last_is_backslash=false,last_is_var_and_comma=false for(var i=0,len=lines.length;i < len;i++){var line=lines[i],add_closing_brace=false,add_spaces=true @@ -896,3936 +741,60 @@ last_is_backslash=line.endsWith('\\') last_is_var_and_comma=line.endsWith(',')&& (line.startsWith('var ')||last_is_var_and_comma)} return res} -function show_line(ctx){ -var lnum=get_node(ctx).line_num,src=get_module(ctx).src -console.log('this',ctx,'\nline',lnum,src.split('\n')[lnum-1])} -var $Node=$B.parser.$Node=function(type){this.type=type -this.children=[]} -$Node.prototype.add=function(child){ -this.children[this.children.length]=child -child.parent=this -child.module=this.module} -$Node.prototype.ast=function(){var root_ast=new ast.Module([],[]) -root_ast.lineno=this.line_num -for(var node of this.children){var t=node.C.tree[0] -if(['single_kw','except','decorator'].indexOf(t.type)>-1 || -(t.type=='condition' && t.token=='elif')){continue} -var node_ast=node.C.tree[0].ast() -if(ast.expr.indexOf(node_ast.constructor)>-1){node_ast=new ast.Expr(node_ast) -copy_position(node_ast,node_ast.value)} -root_ast.body.push(node_ast)} -if(this.mode=='eval'){if(root_ast.body.length > 1 || -!(root_ast.body[0]instanceof $B.ast.Expr)){raise_syntax_error(this.children[0].C,'eval() argument must be an expression')} -root_ast=new $B.ast.Expression(root_ast.body[0].value) -copy_position(root_ast,root_ast.body)} -return root_ast} -$Node.prototype.insert=function(pos,child){ -this.children.splice(pos,0,child) -child.parent=this -child.module=this.module} -$Node.prototype.show=function(indent){ -var res='' -if(this.type==='module'){for(let child of this.children){res+=child.show(indent)} -return res} -indent=indent ||0 -res+=' '.repeat(indent) -res+=this.C -if(this.children.length > 0){res+='{'} -res+='\n' -for(let child of this.children){res+=child.show(indent+4)} -if(this.children.length > 0){res+=' '.repeat(indent) -res+='}\n'} -return res} -var AbstractExprCtx=$B.parser.AbstractExprCtx=function(C,with_commas){this.type='abstract_expr' -this.with_commas=with_commas -this.parent=C -this.tree=[] -this.position=$token.value -C.tree.push(this)} -AbstractExprCtx.prototype.transition=function(token,value){var C=this -var packed=C.packed,is_await=C.is_await,commas -switch(token){case 'await': -case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'JoinedStr': -case 'bytes': -case 'ellipsis': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -case 'yield': -C.parent.tree.pop() -commas=C.with_commas -var star_position -if(C.packed){star_position=C.star_position} -C=C.parent -C.packed=packed -C.is_await=is_await -if(C.position===undefined){C.position=$token.value} -if(star_position){C.star_position=star_position}} -switch(token){case 'await': -return new AbstractExprCtx(new AwaitCtx( -new ExprCtx(C,'await',false)),false) -case 'id': -return new IdCtx(new ExprCtx(C,'id',commas),value) -case 'str': -return new StringCtx(new ExprCtx(C,'str',commas),value) -case 'JoinedStr': -return new FStringCtx(new ExprCtx(C,'str',commas),value) -case 'bytes': -return new StringCtx(new ExprCtx(C,'bytes',commas),value) -case 'int': -return new NumberCtx('int',new ExprCtx(C,'int',commas),value) -case 'float': -return new NumberCtx('float',new ExprCtx(C,'float',commas),value) -case 'imaginary': -return new NumberCtx('imaginary',new ExprCtx(C,'imaginary',commas),value) -case '(': -return new ListOrTupleCtx( -new ExprCtx(C,'tuple',commas),'tuple') -case '[': -return new ListOrTupleCtx( -new ExprCtx(C,'list',commas),'list') -case '{': -return new AbstractExprCtx( -new DictOrSetCtx( -new ExprCtx(C,'dict_or_set',commas)),false) -case 'ellipsis': -return new EllipsisCtx( -new ExprCtx(C,'ellipsis',commas)) -case 'not': -if(C.type=='op' && C.op=='is'){ -C.op='is_not' -return new AbstractExprCtx(C,false)} -return new AbstractExprCtx( -new NotCtx(new ExprCtx(C,'not',commas)),false) -case 'lambda': -return new LambdaCtx(new ExprCtx(C,'lambda',commas)) -case 'op': -var tg=value -if(C.parent.type=='op' && '+-~'.indexOf(tg)==-1){raise_syntax_error(C)} -switch(tg){case '*': -C.parent.tree.pop() -commas=C.with_commas -C=C.parent -C.position=$token.value -return new AbstractExprCtx( -new StarredCtx( -new ExprCtx(C,'expr',commas)),false) -case '**': -C.parent.tree.pop() -commas=C.with_commas -C=C.parent -C.position=$token.value -if(C.type !='dict_or_set'){raise_syntax_error(C)} -return new AbstractExprCtx( -new KwdCtx( -new ExprCtx(C,'expr',commas)),false) -case '-': -case '~': -case '+': -C.parent.tree.pop() -return new AbstractExprCtx( -new UnaryCtx( -new ExprCtx(C.parent,'unary',false),tg),false -) -case 'not': -C.parent.tree.pop() -commas=C.with_commas -C=C.parent -return new NotCtx( -new ExprCtx(C,'not',commas)) -case '...': -return new EllipsisCtx(new ExprCtx(C,'ellipsis',commas))} -raise_syntax_error(C) -break -case 'in': -if(C.parent.type=='op' && C.parent.op=='not'){C.parent.op='not_in' -return C} -raise_syntax_error(C) -break -case '=': -if(C.parent.type=="yield"){raise_syntax_error(C,"assignment to yield expression not possible",C.parent.position)} -raise_syntax_error(C) -break -case 'yield': -return new AbstractExprCtx(new YieldCtx(C),true) -case ':': -if(C.parent.type=="sub" || -(C.parent.type=="list_or_tuple" && -C.parent.parent.type=="sub")){return new AbstractExprCtx(new SliceCtx(C.parent),false)} -return transition(C.parent,token,value) -case ')': -case ',': -switch(C.parent.type){case 'list_or_tuple': -case 'slice': -case 'call_arg': -case 'op': -case 'yield': -break -case 'match': -if(token==','){ -C.parent.tree.pop() -var tuple=new ListOrTupleCtx(C.parent,'tuple') -tuple.implicit=true -tuple.has_comma=true -tuple.tree=[C] -C.parent=tuple -return tuple} -break -case 'func_arg_id': -raise_syntax_error(C,'expected default value expression') -default: -raise_syntax_error(C)} -break -case '.': -case 'assert': -case 'break': -case 'class': -case 'continue': -case 'def': -case 'except': -case 'for': -case 'while': -case 'return': -case 'try': -raise_syntax_error(C) -break} -return transition(C.parent,token,value)} -var AliasCtx=$B.parser.AliasCtx=function(C){ -this.type='ctx_manager_alias' -this.parent=C -this.tree=[] -C.tree[C.tree.length-1].alias=this} -AliasCtx.prototype.transition=function(token,value){var C=this -switch(token){case ',': -case ')': -case ':': -check_assignment(C.tree[0]) -C.parent.set_alias(C.tree[0].tree[0]) -return transition(C.parent,token,value) -case 'eol': -$token.value=last_position(C) -raise_syntax_error(C,"expected ':'")} -raise_syntax_error(C)} -var AnnotationCtx=$B.parser.AnnotationCtx=function(C){ -this.type='annotation' -this.parent=C -this.tree=[] -C.annotation=this -var scope=get_scope(C) -if(scope.ntype=="def" && C.tree && C.tree.length > 0 && -C.tree[0].type=="id"){var name=C.tree[0].value -scope.annotations=scope.annotations ||new Set() -scope.annotations.add(name)}} -AnnotationCtx.prototype.transition=function(token){var C=this -if(token=="eol" && C.tree.length==1 && -C.tree[0].tree.length==0){raise_syntax_error(C)}else if(token==':' && C.parent.type !="def"){raise_syntax_error(C,"more than one annotation")}else if(token=="augm_assign"){raise_syntax_error(C,"augmented assign as annotation")}else if(token=="op"){raise_syntax_error(C,"operator as annotation")} -if(C.parent.type=='expr'){C.parent.with_commas=false} -return transition(C.parent,token)} -var AssertCtx=$B.parser.AssertCtx=function(C){ -this.type='assert' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this} -AssertCtx.prototype.ast=function(){ -var msg=this.tree[1],ast_obj=new ast.Assert(this.tree[0].ast(),msg===undefined ? msg :msg.ast()) -set_position(ast_obj,this.position) -return ast_obj} -AssertCtx.prototype.transition=function(token){var C=this -if(token==","){if(this.tree.length > 1){raise_syntax_error(C,'(too many commas after assert)')} -return new AbstractExprCtx(this,false)} -if(token=='eol'){if(this.tree.length==1 && -this.tree[0].type=='expr' && -this.tree[0].tree[0].type=='list_or_tuple'){$B.warn(_b_.SyntaxWarning,"assertion is always true, perhaps remove parentheses?",get_module(C).filename,$token.value)} -return transition(C.parent,token)} -raise_syntax_error(C)} -var AssignCtx=$B.parser.AssignCtx=function(C){ -check_assignment(C) -this.type='assign' -this.position=$token.value -C.parent.tree.pop() -C.parent.tree.push(this) -this.parent=C.parent -this.tree=[C] -if(C.type=='assign'){check_assignment(C.tree[1])}else{var assigned=C.tree[0] -if(assigned.type=="ellipsis"){raise_syntax_error(C,'cannot assign to Ellipsis')}else if(assigned.type=='unary'){raise_syntax_error(C,'cannot assign to operator')}else if(assigned.type=='starred'){if(assigned.tree[0].name=='id'){var id=assigned.tree[0].tree[0].value -if(['None','True','False','__debug__'].indexOf(id)>-1){raise_syntax_error(C,'cannot assign to '+id)}} -if(assigned.parent.in_tuple===undefined){raise_syntax_error(C,"starred assignment target must be in a list or tuple")}}}} -function set_ctx_to_store(obj){if(Array.isArray(obj)){for(let item of obj){set_ctx_to_store(item)}}else if(obj instanceof ast.List || -obj instanceof ast.Tuple){for(let item of obj.elts){set_ctx_to_store(item)}}else if(obj instanceof ast.Starred){obj.value.ctx=new ast.Store()}else if(obj===undefined){}else if(obj.ctx){obj.ctx=new ast.Store()}else{console.log('bizarre',obj,obj.constructor.$name)}} -AssignCtx.prototype.ast=function(){var value=this.tree[1].ast(),targets=[],target=this.tree[0] -if(target.type=='expr' && target.tree[0].type=='list_or_tuple'){target=target.tree[0]} -if(target.type=='list_or_tuple'){target=target.ast() -target.ctx=new ast.Store() -targets=[target]}else{while(target.type=='assign'){targets.splice(0,0,target.tree[1].ast()) -target=target.tree[0]} -targets.splice(0,0,target.ast())} -value.ctx=new ast.Load() -var ast_obj -if(target.annotation){ast_obj=new ast.AnnAssign( -target.tree[0].ast(),target.annotation.tree[0].ast(),value,target.$was_parenthesized ? 0 :1) -set_position(ast_obj.annotation,target.annotation.position,last_position(target.annotation)) -ast_obj.target.ctx=new ast.Store()}else{ast_obj=new ast.Assign(targets,value)} -set_position(ast_obj,this.position) -set_ctx_to_store(ast_obj.targets) -return ast_obj} -AssignCtx.prototype.transition=function(token){var C=this -if(token=='eol'){if(C.tree[1].type=='abstract_expr'){raise_syntax_error(C)} -return transition(C.parent,'eol')} -raise_syntax_error(C)} -var AsyncCtx=$B.parser.AsyncCtx=function(C){ -this.type='async' -this.parent=C -C.async=true -this.position=C.position=$token.value} -AsyncCtx.prototype.transition=function(token,value){var C=this -if(token=="def"){return transition(C.parent,token,value)}else if(token=="with"){let ctx=transition(C.parent,token,value) -ctx.async=C -return ctx}else if(token=="for"){let ctx=transition(C.parent,token,value) -ctx.parent.async=C -return ctx} -raise_syntax_error(C)} -var AttrCtx=$B.parser.AttrCtx=function(C){ -this.type='attribute' -this.value=C.tree[0] -this.parent=C -this.position=$token.value -C.tree.pop() -C.tree[C.tree.length]=this -this.tree=[] -this.func='getattr' } -AttrCtx.prototype.ast=function(){ -var value=this.value.ast(),attr=this.unmangled_name,ctx=new ast.Load() -if(this.func=='setattr'){ctx=new ast.Store()}else if(this.func=='delattr'){ctx=new ast.Delete()} -var ast_obj=new ast.Attribute(value,attr,ctx) -set_position(ast_obj,this.position,this.end_position) -return ast_obj} -AttrCtx.prototype.transition=function(token,value){var C=this -if(token==='id'){var name=value -if(name=='__debug__'){raise_syntax_error(C,'cannot assign to __debug__')}else if(noassign[name]===true){raise_syntax_error(C)} -C.unmangled_name=name -C.position=$token.value -C.end_position=$token.value -name=mangle_name(name,C) -C.name=name -return C.parent} -raise_syntax_error(C)} -var AugmentedAssignCtx=$B.parser.AugmentedAssignCtx=function(C,op){ -check_assignment(C,{augmented:true}) -this.type='augm_assign' -this.C=C -this.parent=C.parent -this.position=$token.value -C.parent.tree.pop() -C.parent.tree[C.parent.tree.length]=this -this.op=op -this.tree=[C] -var scope=this.scope=get_scope(this) -this.module=scope.module} -AugmentedAssignCtx.prototype.ast=function(){ -var target=this.tree[0].ast(),value=this.tree[1].ast() -target.ctx=new ast.Store() -value.ctx=new ast.Load() -var op=this.op.substr(0,this.op.length-1),ast_type_class=op2ast_class[op],ast_class=ast_type_class[1] -var ast_obj=new ast.AugAssign(target,new ast_class(),value) -set_position(ast_obj,this.position) -return ast_obj} -AugmentedAssignCtx.prototype.transition=function(token){var C=this -if(token=='eol'){if(C.tree[1].type=='abstract_expr'){raise_syntax_error(C)} -return transition(C.parent,'eol')} -raise_syntax_error(C)} -var AwaitCtx=$B.parser.AwaitCtx=function(C){ -this.type='await' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree.push(this) -var p=C -while(p){if(p.type=="list_or_tuple"){p.is_await=true} -p=p.parent} -var node=get_node(this) -node.awaits=node.awaits ||[] -node.awaits.push(this)} -AwaitCtx.prototype.ast=function(){ -var ast_obj=new ast.Await(this.tree[0].ast()) -set_position(ast_obj,this.position) -return ast_obj} -AwaitCtx.prototype.transition=function(token,value){var C=this -C.parent.is_await=true -return transition(C.parent,token,value)} -var BodyCtx=$B.parser.BodyCtx=function(C){ -var ctx_node=C.parent -while(ctx_node.type !=='node'){ctx_node=ctx_node.parent} -var tree_node=ctx_node.node -var body_node=new $Node() -body_node.is_body_node=true -body_node.line_num=tree_node.line_num -tree_node.insert(0,body_node) -return new NodeCtx(body_node)} -var BreakCtx=$B.parser.BreakCtx=function(C){ -this.type='break' -this.position=$token.value -this.parent=C -C.tree[C.tree.length]=this} -BreakCtx.prototype.ast=function(){var ast_obj=new ast.Break() -set_position(ast_obj,this.position) -return ast_obj} -BreakCtx.prototype.transition=function(token){var C=this -if(token=='eol'){return transition(C.parent,'eol')} -raise_syntax_error(C)} -var CallArgCtx=$B.parser.CallArgCtx=function(C){ -this.type='call_arg' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree.push(this) -this.expect='id'} -CallArgCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'await': -case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'JoinedStr': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'ellipsis': -case 'not': -case 'lambda': -if(C.expect=='id'){this.position=$token.value -C.expect=',' -var expr=new AbstractExprCtx(C,false) -return transition(expr,token,value)} -break -case '=': -if(C.expect==','){return new ExprCtx(new KwArgCtx(C),'kw_value',false)} -break -case 'for': -return new TargetListCtx(new ForExpr(new GeneratorExpCtx(C))) -case 'op': -if(C.expect=='id'){var op=value -C.expect=',' -switch(op){case '+': -case '-': -case '~': -return transition(new AbstractExprCtx(C,false),token,op) -case '*': -C.parent.tree.pop() -return new StarArgCtx(C.parent) -case '**': -C.parent.tree.pop() -return new DoubleStarArgCtx(C.parent)}} -raise_syntax_error(C) -break -case ')': -return transition(C.parent,token) -case ':': -if(C.expect==',' && -C.parent.parent.type=='lambda'){return transition(C.parent.parent,token)} -break -case ',': -if(C.expect==','){return transition(C.parent,token,value)}} -raise_syntax_error(C)} -var CallCtx=$B.parser.CallCtx=function(C){ -this.position=$token.value -this.type='call' -this.func=C.tree[0] -if(this.func !==undefined){ -this.func.parent=this -this.parenth_position=this.position -this.position=this.func.position} -this.parent=C -if(C.type !='class'){C.tree.pop() -C.tree[C.tree.length]=this}else{ -C.args=this} -this.expect='id' -this.tree=[]} -CallCtx.prototype.ast=function(){var res=new ast.Call(this.func.ast(),[],[]),keywords=new Set() -for(var call_arg of this.tree){if(call_arg.type=='double_star_arg'){let value=call_arg.tree[0].tree[0].ast(),keyword=new ast.keyword(_b_.None,value) -delete keyword.arg -res.keywords.push(keyword)}else if(call_arg.type=='star_arg'){if(res.keywords.length > 0){if(! res.keywords[0].arg){raise_syntax_error(this,'iterable argument unpacking follows keyword argument unpacking')}} -let starred=new ast.Starred(call_arg.tree[0].ast()) -set_position(starred,call_arg.position) -starred.ctx=new ast.Load() -res.args.push(starred)}else if(call_arg.type=='genexpr'){res.args.push(call_arg.ast())}else{let item=call_arg.tree[0] -if(item===undefined){ -continue} -if(item.type=='kwarg'){let key=item.tree[0].value -if(key=='__debug__'){raise_syntax_error_known_range(this,this.position,this.end_position,"cannot assign to __debug__")}else if(['True','False','None'].indexOf(key)>-1){raise_syntax_error_known_range(this,item.position,item.equal_sign_position,'expression cannot contain assignment, perhaps you meant "=="?')} -if(keywords.has(key)){raise_syntax_error_known_range(item,item.position,last_position(item),`keyword argument repeated: ${key}`)} -keywords.add(key) -let keyword=new ast.keyword(item.tree[0].value,item.tree[1].ast()) -set_position(keyword,item.position) -res.keywords.push(keyword)}else{if(res.keywords.length > 0){if(res.keywords[0].arg){raise_syntax_error_known_range(this,item.position,last_position(item),'positional argument follows keyword argument')}else{raise_syntax_error_known_range(this,item.position,last_position(item),'positional argument follows keyword argument unpacking')}} -res.args.push(item.ast())}}} -set_position(res,this.position,this.end_position) -return res} -CallCtx.prototype.transition=function(token,value){var C=this -switch(token){case ',': -if(C.expect=='id'){raise_syntax_error(C)} -C.expect='id' -return C -case 'await': -case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'JoinedStr': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -case 'ellipsis': -C.expect=',' -return transition(new CallArgCtx(C),token,value) -case ')': -C.end_position=$token.value -return C.parent -case 'op': -C.expect=',' -switch(value){case '-': -case '~': -case '+': -C.expect=',' -return transition(new CallArgCtx(C),token,value) -case '*': -C.has_star=true -return new StarArgCtx(C) -case '**': -C.has_dstar=true -return new DoubleStarArgCtx(C)} -raise_syntax_error(C) -break -case 'yield': -raise_syntax_error(C)} -return transition(C.parent,token,value)} -var CaseCtx=$B.parser.CaseCtx=function(node_ctx){ -this.type="case" -this.position=$token.value -node_ctx.tree=[this] -this.parent=node_ctx -this.tree=[] -this.expect='as'} -CaseCtx.prototype.ast=function(){ -var ast_obj=new ast.match_case(this.tree[0].ast(),this.has_guard ? this.tree[1].tree[0].ast():undefined,ast_body(this.parent)) -set_position(ast_obj,this.position) -return ast_obj} -CaseCtx.prototype.set_alias=function(name){this.alias=name} -function is_irrefutable(pattern){var cause -if(pattern.type=="capture_pattern"){return pattern.tree[0]}else if(pattern.type=="or_pattern"){for(var subpattern of pattern.tree){if(cause=is_irrefutable(subpattern)){return cause}}}else if(pattern.type=="sequence_pattern" && -pattern.token=='(' && -pattern.tree.length==1 && -(cause=is_irrefutable(pattern.tree[0]))){return cause} -return false} -CaseCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'as': -C.expect=':' -return new AbstractExprCtx(new AliasCtx(C)) -case ':': -var cause -if(cause=is_irrefutable(this.tree[0])){ -get_node(C).parent.irrefutable=cause} -switch(C.expect){case 'id': -case 'as': -case ':': -var last=$B.last(C.tree) -if(last && last.type=='sequence_pattern'){remove_empty_pattern(last)} -return BodyCtx(C)} -break -case 'op': -if(value=='|'){return new PatternCtx(new PatternOrCtx(C))} -raise_syntax_error(C,"expected ':'") -break -case ',': -if(C.expect==':' ||C.expect=='as'){return new PatternCtx(new PatternSequenceCtx(C))} -break -case 'if': -C.has_guard=true -return new AbstractExprCtx(new ConditionCtx(C,token),false) -default: -raise_syntax_error(C,"expected ':'")}} -var ClassCtx=$B.parser.ClassCtx=function(C){ -this.type='class' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this -this.expect='id' -var scope=this.scope=get_scope(this) -this.parent.node.parent_block=scope -this.parent.node.bound={}} -ClassCtx.prototype.ast=function(){ -var decorators=get_decorators(this.parent.node),bases=[],keywords=[],type_params=[] -if(this.args){for(var arg of this.args.tree){if(arg.tree[0].type=='kwarg'){keywords.push(new ast.keyword(arg.tree[0].tree[0].value,arg.tree[0].tree[1].ast()))}else{bases.push(arg.tree[0].ast())}}} -if(this.type_params){type_params=this.type_params.ast()} -var ast_obj=new ast.ClassDef(this.name,bases,keywords,ast_body(this.parent),decorators,type_params) -set_position(ast_obj,this.position) -return ast_obj} -ClassCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.expect=='id'){C.set_name(value) -C.expect='(:' -return C} -break -case '(': -if(C.name===undefined){raise_syntax_error(C,'missing class name')} -C.parenthesis_position=$token.value -return new CallCtx(C) -case '[': -if(C.name===undefined){raise_syntax_error(C,'missing class name')} -return new TypeParamsCtx(C) -case ':': -if(this.args){for(var arg of this.args.tree){var param=arg.tree[0] -if(arg.type !='call_arg'){$token.value=C.parenthesis_position -raise_syntax_error(C,"invalid syntax")} -if((param.type=='expr' && param.name=='id')|| -param.type=="kwarg"){continue} -$token.value=arg.position -raise_syntax_error(arg,'invalid class parameter')}} -return BodyCtx(C) -case 'eol': -raise_syntax_error(C,"expected ':'")} -raise_syntax_error(C)} -ClassCtx.prototype.set_name=function(name){var C=this.parent -this.random=$B.UUID() -this.name=name -this.id=C.node.module+'_'+name+'_'+this.random -this.parent.node.id=this.id -var scope=this.scope,parent_block=scope -var block=scope,parent_classes=[] -while(block.ntype=="class"){parent_classes.splice(0,0,block.C.tree[0].name) -block=block.parent} -this.qualname=parent_classes.concat([name]).join(".") -while(parent_block.C && -parent_block.C.tree[0].type=='class'){parent_block=parent_block.parent} -while(parent_block.C && -'def' !=parent_block.C.tree[0].type && -'generator' !=parent_block.C.tree[0].type){parent_block=parent_block.parent} -this.parent.node.parent_block=parent_block} -var Comprehension={generators:function(comps){ -var comprehensions=[] -for(var item of comps){if(item.type=='for'){var target=item.tree[0].ast() -set_ctx_to_store(target) -comprehensions.push( -new ast.comprehension( -target,item.tree[1].ast(),[],item.is_async ? 1 :0 -) -)}else{$B.last(comprehensions).ifs.push(item.tree[0].ast())}} -return comprehensions},make_comp:function(comp,C){comp.comprehension=true -comp.parent=C.parent -comp.id=comp.type+$B.UUID() -var scope=get_scope(C) -comp.parent_block=scope -while(scope){if(scope.C && scope.C.tree && -scope.C.tree.length > 0 && -scope.C.tree[0].async){comp.async=true -break} -scope=scope.parent_block} -comp.module=get_module(C).module -comp.module_ref=comp.module.replace(/\./g,'_') -C.parent.tree[C.parent.tree.length-1]=comp -Comprehension.set_parent_block(C.tree[0],comp)},set_parent_block:function(ctx,parent_block){if(ctx.tree){for(var item of ctx.tree){if(item.comprehension){item.parent_block=parent_block} -Comprehension.set_parent_block(item,parent_block)}}}} -var ConditionCtx=$B.parser.ConditionCtx=function(C,token){ -this.type='condition' -this.token=token -this.parent=C -this.tree=[] -this.position=$token.value -this.node=get_node(this) -this.scope=get_scope(this) -if(token=='elif'){ -var rank=this.node.parent.children.indexOf(this.node),previous=this.node.parent.children[rank-1] -previous.C.tree[0].orelse=this} -C.tree.push(this)} -ConditionCtx.prototype.ast=function(){ -var types={'if':'If','while':'While','elif':'If'} -var res=new ast[types[this.token]](this.tree[0].ast()) -if(this.orelse){if(this.orelse.token=='elif'){res.orelse=[this.orelse.ast()]}else{res.orelse=this.orelse.ast()}}else{res.orelse=[]} -res.body=ast_body(this) -set_position(res,this.position) -return res} -ConditionCtx.prototype.transition=function(token,value){var C=this -if(token==':'){if(C.tree[0].type=="abstract_expr" && -C.tree[0].tree.length==0){ -raise_syntax_error(C)} -return BodyCtx(C)}else if(C.in_comp && C.token=='if'){ -if(token==']'){return transition(C.parent,token,value)}else if(token=='if'){var if_exp=new ConditionCtx(C.parent,'if') -if_exp.in_comp=C.in_comp -return new AbstractExprCtx(if_exp,false)}else if(')]}'.indexOf(token)>-1){return transition(this.parent,token,value)}else if(C.in_comp && token=='for'){return new TargetListCtx(new ForExpr(C.parent))} -if(token==',' && parent_match(C,{type:'call'})){raise_syntax_error_known_range(C,C.in_comp.position,last_position(C),'Generator expression must be parenthesized')}} -raise_syntax_error(C,"expected ':'")} -var ContinueCtx=$B.parser.ContinueCtx=function(C){ -this.type='continue' -this.parent=C -this.position=$token.value -get_node(this).is_continue=true -C.tree[C.tree.length]=this} -ContinueCtx.prototype.ast=function(){var ast_obj=new ast.Continue() -set_position(ast_obj,this.position) -return ast_obj} -ContinueCtx.prototype.transition=function(token){var C=this -if(token=='eol'){return C.parent} -raise_syntax_error(C)} -var DecoratorCtx=$B.parser.DecoratorCtx=function(C){ -this.type='decorator' -this.parent=C -C.tree[C.tree.length]=this -this.tree=[] -this.position=$token.value} -DecoratorCtx.prototype.transition=function(token){var C=this -if(token=='eol'){return transition(C.parent,token)} -raise_syntax_error(C)} -function get_decorators(node){var decorators=[] -var parent_node=node.parent -var rank=parent_node.children.indexOf(node) -while(true){rank-- -if(rank < 0){break}else if(parent_node.children[rank].C.tree[0].type== -'decorator'){var deco=parent_node.children[rank].C.tree[0].tree[0] -decorators.splice(0,0,deco.ast())}else{break}} -return decorators} -var DefCtx=$B.parser.DefCtx=function(C){this.type='def' -this.name=null -this.parent=C -this.tree=[] -this.async=C.async -if(this.async){this.position=C.position}else{this.position=$token.value} -C.tree[C.tree.length]=this -this.enclosing=[] -var scope=this.scope=get_scope(this) -if(scope.C && scope.C.tree[0].type=="class"){this.class_name=scope.C.tree[0].name} -var parent_block=scope -while(parent_block.C && -parent_block.C.tree[0].type=='class'){parent_block=parent_block.parent} -while(parent_block.C && -'def' !=parent_block.C.tree[0].type){parent_block=parent_block.parent} -this.parent.node.parent_block=parent_block -var pb=parent_block -this.is_comp=pb.is_comp -while(pb && pb.C){if(pb.C.tree[0].type=='def'){this.inside_function=true -break} -pb=pb.parent_block} -this.module=scope.module -this.root=get_module(this) -this.positional_list=[] -this.default_list=[] -this.other_args=null -this.other_kw=null -this.after_star=[]} -DefCtx.prototype.ast=function(){var args={posonlyargs:[],args:[],kwonlyargs:[],kw_defaults:[],defaults:[],type_params:[]},decorators=get_decorators(this.parent.node),func_args=this.tree[1],res -args=func_args.ast() -if(this.async){res=new ast.AsyncFunctionDef(this.name,args,[],decorators)}else{res=new ast.FunctionDef(this.name,args,[],decorators)} -if(this.annotation){res.returns=this.annotation.tree[0].ast()} -if(this.type_params){res.type_params=this.type_params.ast()} -res.body=ast_body(this.parent) -set_position(res,this.position) -return res} -DefCtx.prototype.set_name=function(name){if(["None","True","False"].indexOf(name)>-1){raise_syntax_error(this)} -new IdCtx(this,name) -this.name=name -this.id=this.scope.id+'_'+name -this.id=this.id.replace(/\./g,'_') -this.id+='_'+$B.UUID() -this.parent.node.id=this.id -this.parent.node.module=this.module} -DefCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.name){raise_syntax_error(C)} -C.set_name(value) -return C -case '(': -if(C.name==null){raise_syntax_error(C,"missing name in function definition")} -C.has_args=true; -return new FuncArgs(C) -case '[': -if(C.name===undefined){raise_syntax_error(C,'missing function name')} -return new TypeParamsCtx(C) -case ')': -return C -case 'annotation': -return new AbstractExprCtx(new AnnotationCtx(C),true) -case ':': -if(C.has_args){return BodyCtx(C)} -raise_syntax_error(C,"expected '('") -break -case 'eol': -if(C.has_args){raise_syntax_error(C,"expected ':'")}} -raise_syntax_error(C)} -var DelCtx=$B.parser.DelCtx=function(C){ -this.type='del' -this.parent=C -C.tree.push(this) -this.tree=[] -this.position=$token.value} -DelCtx.prototype.ast=function(){let targets -if(this.tree[0].type=='list_or_tuple'){ -targets=this.tree[0].tree.map(x=> x.ast())}else if(this.tree[0].type=='expr' && -this.tree[0].tree[0].type=='list_or_tuple'){ -targets=this.tree[0].tree[0].ast() -targets.ctx=new ast.Del() -for(var elt of targets.elts){elt.ctx=new ast.Del()} -let ast_obj=new ast.Delete([targets]) -set_position(ast_obj,this.position) -return ast_obj}else{targets=[this.tree[0].tree[0].ast()]} -for(let target of targets){target.ctx=new ast.Del()} -let ast_obj=new ast.Delete(targets) -set_position(ast_obj,this.position) -return ast_obj} -DelCtx.prototype.transition=function(token){var C=this -if(token=='eol'){check_assignment(this.tree[0],{action:'delete'}) -return transition(C.parent,token)} -raise_syntax_error(C)} -var DictCompCtx=function(C){ -if(C.tree[0].type=='expr' && -C.tree[0].tree[0].comprehension){ -var comp=C.tree[0].tree[0] -comp.parent_block=this} -this.type='dictcomp' -this.position=$token.value -this.comprehension=true -this.parent=C.parent -this.key=C.tree[0] -this.value=C.tree[1] -this.key.parent=this -this.value.parent=this -this.tree=[] -this.id='dictcomp'+$B.UUID() -this.parent_block=get_scope(C) -this.module=get_module(C).module -C.parent.tree[C.parent.tree.length-1]=this -this.type='dictcomp' -Comprehension.make_comp(this,C)} -DictCompCtx.prototype.ast=function(){ -if(this.value.ast===undefined){console.log('dict comp ast, no value.ast',this)} -var ast_obj=new ast.DictComp( -this.key.ast(),this.value.ast(),Comprehension.generators(this.tree) -) -set_position(ast_obj,this.position) -return ast_obj} -DictCompCtx.prototype.transition=function(token){var C=this -if(token=='}'){return this.parent} -raise_syntax_error(C)} -var DictOrSetCtx=$B.parser.DictOrSetCtx=function(C){ -this.type='dict_or_set' -this.real='dict_or_set' -this.expect=',' -this.closed=false -this.position=$token.value -this.nb_items=0 -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this} -DictOrSetCtx.prototype.ast=function(){ -var ast_obj -if(this.real=='dict'){let keys=[],values=[] -for(let i=0,len=this.items.length;i < len;i++){if(this.items[i].type=='expr' && -this.items[i].tree[0].type=='kwd'){keys.push(_b_.None) -values.push(this.items[i].tree[0].tree[0].ast())}else{keys.push(this.items[i].ast()) -values.push(this.items[i+1].ast()) -i++}} -ast_obj=new ast.Dict(keys,values)}else if(this.real=='set'){var items=[] -for(let item of this.items){if(item.packed){var starred=new ast.Starred(item.ast(),new ast.Load()) -set_position(starred,item.position) -items.push(starred)}else{items.push(item.ast())}} -ast_obj=new ast.Set(items)} -set_position(ast_obj,this.position) -return ast_obj} -DictOrSetCtx.prototype.transition=function(token,value){var C=this -if(C.closed){switch(token){case '[': -return new AbstractExprCtx(new SubscripCtx(C.parent),false) -case '(': -return new CallArgCtx(new CallCtx(C.parent))} -return transition(C.parent,token,value)}else{if(C.expect==','){function check_last(){var last=$B.last(C.tree),err_msg -if(last && last.wrong_assignment){ -err_msg="invalid syntax. Maybe you meant '==' or ':=' instead of '='?"}else if(C.real=='dict' && last.type=='expr' && -last.tree[0].type=='starred'){ -err_msg='cannot use a starred expression in a dictionary value'}else if(C.real=='set' && last.tree[0].type=='kwd'){$token.value=last.position -raise_syntax_error(C)} -if(err_msg){raise_syntax_error_known_range(C,last.position,last_position(last),err_msg)}} -switch(token){case '}': -var last=$B.last(C.tree) -if(last.type=="expr" && last.tree[0].type=="kwd"){C.nb_items+=2}else if(last.type=="abstract_expr"){C.tree.pop()}else{C.nb_items++} -check_last() -C.end_position=$token.value -if(C.real=='dict_or_set'){ -for(var item of C.tree){if(item.type=="expr" && item.tree[0].type=="kwd"){C.real='dict' -break}}} -if(C.real=='dict_or_set'){ -C.real=C.tree.length==0 ? -'dict' :'set'} -switch(C.real){case 'set': -C.items=C.tree -C.tree=[] -C.closed=true -return C -case 'dict': -if(C.tree.length && -$B.last(C.tree).type=='abstract_expr'){raise_syntax_error(C,"expression expected after dictionary key and ':'")}else{if(C.nb_items % 2 !=0){raise_syntax_error(C,"':' expected after dictionary key")}} -C.items=C.tree -C.tree=[] -C.closed=true -return C} -raise_syntax_error(C) -break -case ',': -check_last() -var last=$B.last(C.tree) -if(last.type=="expr" && last.tree[0].type=="kwd"){C.nb_items+=2}else{C.nb_items++} -if(C.real=='dict_or_set'){var last=C.tree[0] -C.real=(last.type=='expr' && -last.tree[0].type=='kwd')? 'dict' :'set'} -if(C.real=='dict' && C.nb_items % 2){raise_syntax_error(C,"':' expected after dictionary key")} -return new AbstractExprCtx(C,false) -case ':': -if(C.real=='dict_or_set'){C.real='dict'} -if(C.real=='dict'){C.expect='value' -this.nb_items++ -C.value_pos=$token.value -return C}else{raise_syntax_error(C)} -break -case 'for': -if(C.real=="set" && C.tree.length > 1){$token.value=C.tree[0].position -raise_syntax_error(C,"did you forget "+ -"parentheses around the comprehension target?")} -var expr=C.tree[0],err_msg -if(expr.type=='expr'){if(expr.tree[0].type=='kwd'){err_msg='dict unpacking cannot be used in dict comprehension'}else if(expr.tree[0].type=='starred'){err_msg='iterable unpacking cannot be used in comprehension'} -if(err_msg){raise_syntax_error_known_range(C,expr.position,last_position(expr),err_msg)}} -if(C.real=='dict_or_set'){return new TargetListCtx(new ForExpr( -new SetCompCtx(this)))}else{return new TargetListCtx(new ForExpr( -new DictCompCtx(this)))}} -raise_syntax_error(C)}else if(C.expect=='value'){if(python_keywords.indexOf(token)>-1){var ae=new AbstractExprCtx(C,false) -try{transition(ae,token,value) -C.tree.pop()}catch(err){raise_syntax_error(C)}} -try{C.expect=',' -return transition(new AbstractExprCtx(C,false),token,value)}catch(err){$token.value=C.value_pos -raise_syntax_error(C,"expression expected after "+ -"dictionary key and ':'")}} -return transition(C.parent,token,value)}} -var DoubleStarArgCtx=$B.parser.DoubleStarArgCtx=function(C){ -this.type='double_star_arg' -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this} -DoubleStarArgCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'JoinedStr': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -return transition(new AbstractExprCtx(C,false),token,value) -case ',': -case ')': -return transition(C.parent,token) -case ':': -if(C.parent.parent.type=='lambda'){return transition(C.parent.parent,token)}} -raise_syntax_error(C)} -var EllipsisCtx=$B.parser.EllipsisCtx=function(C){ -this.type='ellipsis' -this.parent=C -this.position=$token.value -C.tree[C.tree.length]=this} -EllipsisCtx.prototype.ast=function(){var ast_obj=new ast.Constant(_b_.Ellipsis) -set_position(ast_obj,this.position) -return ast_obj} -EllipsisCtx.prototype.transition=function(token,value){var C=this -return transition(C.parent,token,value)} -var EndOfPositionalCtx=$B.parser.$EndOfConditionalCtx=function(C){ -this.type="end_positional" -this.parent=C -C.has_end_positional=true -C.parent.pos_only=C.tree.length -C.tree.push(this)} -EndOfPositionalCtx.prototype.transition=function(token,value){var C=this -if(token=="," ||token==")"){return transition(C.parent,token,value)} -if(token=='op' && value=='*'){raise_syntax_error(C,"expected comma between / and *")} -raise_syntax_error(C)} -var ExceptCtx=$B.parser.ExceptCtx=function(C){ -this.type='except' -this.position=$token.value -this.parent=C -C.tree[C.tree.length]=this -this.tree=[] -this.scope=get_scope(this) -var node=C.node,rank=node.parent.children.indexOf(node),ix=rank-1 -while(node.parent.children[ix].C.tree[0].type !='try'){ix--} -this.try_node=node.parent.children[ix] -this.is_first_child=rank==ix+1 -if(this.try_node.C.is_trystar){this.expect='*'}else{this.expect='id'}} -ExceptCtx.prototype.ast=function(){ -var ast_obj=new ast.ExceptHandler( -this.tree.length==1 ? this.tree[0].ast():undefined,this.has_alias ? this.tree[0].alias :undefined,ast_body(this.parent) -) -set_position(ast_obj,this.position) -return ast_obj} -ExceptCtx.prototype.transition=function(token,value){var C=this -if(token=='op' && value=='*'){ -if(C.is_first_child){ -C.try_node.C.is_trystar=true -C.expect='id' -return C}else if(C.expect !='*'){ -raise_syntax_error(C,"cannot have both 'except' and 'except*' "+ -"on the same 'try'")}else{C.expect='id' -return C}}else if(C.expect=='*'){ -raise_syntax_error(C,"cannot have both 'except' and 'except*' "+ -"on the same 'try'")} -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'JoinedStr': -case 'bytes': -case '[': -case '(': -case '{': -case 'not': -case 'lambda': -if(C.expect=='id'){C.expect='as' -return transition(new AbstractExprCtx(C,false),token,value)}} -switch(token){case 'as': -if(C.expect=='as' && -C.has_alias===undefined){C.expect='alias' -C.has_alias=true -return C} -break -case 'id': -if(C.expect=='alias'){C.expect=':' -C.set_alias(value) -return C} -break -case ':': -if(C.tree.length==0 && C.try_node.C.is_trystar){raise_syntax_error(C,"expected one or more exception types")} -var _ce=C.expect -if(_ce=='id' ||_ce=='as' ||_ce==':'){return BodyCtx(C)} -break -case '(': -if(C.expect=='id' && C.tree.length==0){C.parenth=true -return C} -break -case ')': -if(C.expect==',' ||C.expect=='as'){C.expect='as' -return C} -break -case ',': -if(C.parenth !==undefined && -C.has_alias===undefined && -(C.expect=='as' ||C.expect==',')){C.expect='id' -return C}else if(C.parenth===undefined){raise_syntax_error(C,"multiple exception types must be parenthesized")} -break -case 'eol': -raise_syntax_error(C,"expected ':'")} -raise_syntax_error(C)} -ExceptCtx.prototype.set_alias=function(alias){this.tree[0].alias=mangle_name(alias,this)} -var ExprCtx=$B.parser.ExprCtx=function(C,name,with_commas){ -this.type='expr' -this.name=name -this.position=$token.value -this.with_commas=with_commas -this.expect=',' -this.parent=C -if(C.packed){this.packed=C.packed} -this.tree=[] -C.tree[C.tree.length]=this} -ExprCtx.prototype.ast=function(){var res=this.tree[0].ast() -if(this.packed){}else if(this.annotation){res=new ast.AnnAssign( -res,this.annotation.tree[0].ast(),undefined,this.$was_parenthesized ? 0 :1) -set_position(res,this.position)} -return res} -ExprCtx.prototype.transition=function(token,value){var C=this -if(python_keywords.indexOf(token)>-1 && -['as','else','if','for','from','in'].indexOf(token)==-1){raise_syntax_error(C)} -if(C.parent.expect=='star_target'){if(['pass','in','not','op','augm_assign','=',':=','if','eol']. -indexOf(token)>-1){return transition(C.parent,token,value)}} -switch(token){case 'bytes': -case 'float': -case 'id': -case 'imaginary': -case 'int': -case 'lambda': -case 'pass': -var msg='invalid syntax. Perhaps you forgot a comma?' -raise_syntax_error_known_range(C,this.position,$token.value,msg) -break -case '{': -if(C.tree[0].type !="id" || -["print","exec"].indexOf(C.tree[0].value)==-1){raise_syntax_error(C)} -return new AbstractExprCtx(new DictOrSetCtx(C),false) -case '[': -case '(': -case '.': -case 'not': -if(C.expect=='expr'){C.expect=',' -return transition(new AbstractExprCtx(C,false),token,value)}} -switch(token){case 'not': -if(C.expect==','){return new ExprNot(C)} -break -case 'in': -if(C.parent.type=='target_list'){ -return transition(C.parent,token)} -if(C.expect==','){return transition(C,'op','in')} -break -case ',': -if(C.expect==','){if(C.name=='iterator' && -C.parent.parent.type !='node'){ -var for_expr=C.parent.parent -raise_syntax_error_known_range(C,first_position(for_expr),last_position(for_expr),'Generator expression must be parenthesized')} -if(C.with_commas || -["assign","return"].indexOf(C.parent.type)>-1){if(parent_match(C,{type:"yield","from":true})){raise_syntax_error(C,"no implicit tuple for yield from")} -C.parent.tree.pop() -var tuple=new ListOrTupleCtx(C.parent,'tuple') -tuple.implicit=true -tuple.has_comma=true -tuple.tree=[C] -C.parent=tuple -return tuple}} -return transition(C.parent,token) -case '.': -return new AttrCtx(C) -case '[': -if(C.tree[0].type=='id'){ -delete C.tree[0].bound} -return new AbstractExprCtx(new SubscripCtx(C),true) -case '(': -return new CallCtx(C) -case 'op': -if($op_weight[value]===undefined){ -var frs=parent_match(C,{type:"fstring_replacement_field"}) -if(frs){return transition(frs,token,value)} -raise_syntax_error(C)} -if(C.parent.type=='withitem' && C.parent.tree.length==2){raise_syntax_error(C,"expected ':'")} -if(value=='~'){raise_syntax_error(C)} -var op_parent=C.parent,op=value -if(op_parent.type=='ternary' && op_parent.in_else){var new_op=new OpCtx(C,op) -return new AbstractExprCtx(new_op,false)} -var op1=C.parent,repl=null -while(1){if(op1.type=='unary' && op !=='**'){repl=op1 -op1=op1.parent}else if(op1.type=='expr'){op1=op1.parent}else if(op1.type=='op' && -$op_weight[op1.op]>=$op_weight[op]&& -!(op1.op=='**' && op=='**')){ -repl=op1 -op1=op1.parent}else if(op1.type=="not" && -$op_weight['not']> $op_weight[op]){repl=op1 -op1=op1.parent}else{break}} -if(repl===null){if(op1.type=='op'){ -var right=op1.tree.pop(),expr=new ExprCtx(op1,'operand',C.with_commas) -expr.tree.push(right) -right.parent=expr -var new_op=new OpCtx(expr,op) -return new AbstractExprCtx(new_op,false)} -var position=C.position -while(C.parent !==op1){C=C.parent -op_parent=C.parent} -C.parent.tree.pop() -var expr=new ExprCtx(op_parent,'operand',C.with_commas) -expr.position=position -expr.expect=',' -C.parent=expr -var new_op=new OpCtx(C,op) -return new AbstractExprCtx(new_op,false)}else{ -if(op==='and' ||op==='or'){while(repl.parent.type=='not' || -(repl.parent.type=='expr' && -repl.parent.parent.type=='not')){ -repl=repl.parent -op_parent=repl.parent}}} -if(repl.type=='op'){var _flag=false -switch(repl.op){case '<': -case '<=': -case '==': -case '!=': -case 'is': -case '>=': -case '>': -_flag=true} -if(_flag){switch(op){case '<': -case '<=': -case '==': -case '!=': -case 'is': -case '>=': -case '>': -case 'in': -case 'not_in': -repl.ops=repl.ops ||[repl.op] -repl.ops.push(op) -return new AbstractExprCtx(repl,false)}}} -repl.parent.tree.pop() -var expr=new ExprCtx(repl.parent,'operand',false) -expr.tree=[op1] -expr.position=op1.position -repl.parent=expr -var new_op=new OpCtx(repl,op) -return new AbstractExprCtx(new_op,false) -case 'augm_assign': -check_assignment(C,{augmented:true}) -var parent=C -while(parent){if(parent.type=="assign" ||parent.type=="augm_assign"){raise_syntax_error(C,"augmented assignment inside assignment")}else if(parent.type=="op"){raise_syntax_error(C,"cannot assign to operator")}else if(parent.type=="list_or_tuple"){raise_syntax_error(C,`'${parent.real}' is an illegal`+ -" expression for augmented assignment")}else if(['list','tuple'].indexOf(parent.name)>-1){raise_syntax_error(C,`'${parent.name}' is an illegal`+ -" expression for augmented assignment")}else if(['dict_or_set'].indexOf(parent.name)>-1){raise_syntax_error(C,`'${parent.tree[0].real } display'`+ -" is an illegal expression for augmented assignment")} -parent=parent.parent} -if(C.expect==','){return new AbstractExprCtx( -new AugmentedAssignCtx(C,value),true)} -return transition(C.parent,token,value) -case ":": -if(C.parent.type=="sub" || -(C.parent.type=="list_or_tuple" && -C.parent.parent.type=="sub")){return new AbstractExprCtx(new SliceCtx(C.parent),false)}else if(C.parent.type=="slice"){return transition(C.parent,token,value)}else if(C.parent.type=="node"){ -if(C.tree.length==1){var child=C.tree[0] -check_assignment(child) -if(["id","sub","attribute"].indexOf(child.type)>-1){return new AbstractExprCtx(new AnnotationCtx(C),false)}else if(child.real=="tuple" && child.expect=="," && -child.tree.length==1){return new AbstractExprCtx(new AnnotationCtx(child.tree[0]),false)}} -var type=C.tree[0].real -raise_syntax_error_known_range(C,C.position,last_position(C),`only single target (not ${type}) can be annotated`)} -break -case '=': -var frs=parent_match(C,{type:'fstring_replacement_field'}) -if(frs){return transition(frs,token,value)} -var call_arg=parent_match(C,{type:'call_arg'}) -try{check_assignment(C)}catch(err){if(call_arg){var ctx=C -while(ctx.parent !==call_arg){ctx=ctx.parent} -raise_syntax_error_known_range(ctx,ctx.position,$token.value,'expression cannot contain assignment, perhaps you meant "=="?')}else{throw err}} -var annotation -if(C.expect==','){if(C.parent.type=="call_arg"){ -if(C.tree[0].type !="id"){raise_syntax_error_known_range(C,C.position,$token.value,'expression cannot contain assignment, perhaps you meant "=="?')} -return new AbstractExprCtx(new KwArgCtx(C),true)}else if(annotation=parent_match(C,{type:"annotation"})){return transition(annotation,token,value)}else if(C.parent.type=="op"){ -raise_syntax_error(C,"cannot assign to operator")}else if(C.parent.type=="not"){ -raise_syntax_error(C,"cannot assign to operator")}else if(C.parent.type=="with"){raise_syntax_error(C,"expected :")}else if(C.parent.type=='dict_or_set'){if(C.parent.expect==','){ -C.wrong_assignment=true -return transition(C,':=')}}else if(C.parent.type=="list_or_tuple"){ -for(var i=0;i < C.parent.tree.length;i++){var item=C.parent.tree[i] -try{check_assignment(item,{once:true})}catch(err){console.log(C) -raise_syntax_error(C,"invalid syntax. "+ -"Maybe you meant '==' or ':=' instead of '='?")} -if(item.type=="expr" && item.name=="operand"){raise_syntax_error(C,"cannot assign to operator")}} -if(C.parent.real=='list' || -(C.parent.real=='tuple' && -! C.parent.implicit)){raise_syntax_error(C,"invalid syntax. "+ -"Maybe you meant '==' or ':=' instead of '='?")}}else if(C.parent.type=="expr" && -C.parent.name=="iterator"){raise_syntax_error(C,'expected :')}else if(C.parent.type=="lambda"){if(C.parent.parent.parent.type !="node"){raise_syntax_error(C,'expression cannot contain'+ -' assignment, perhaps you meant "=="?')}}else if(C.parent.type=='target_list'){raise_syntax_error(C,"(assign to target in iteration)")} -while(C.parent !==undefined){C=C.parent -if(C.type=="condition"){raise_syntax_error(C,"invalid syntax. Maybe you"+ -" meant '==' or ':=' instead of '='?")}else if(C.type=="augm_assign"){raise_syntax_error(C,"(assignment inside augmented assignment)")}} -C=C.tree[0] -return new AbstractExprCtx(new AssignCtx(C),true)} -break -case ':=': -var ptype=C.parent.type -if(["node","assign","kwarg","annotation"]. -indexOf(ptype)>-1){raise_syntax_error(C,'(:= invalid, parent '+ptype+')')}else if(ptype=="func_arg_id" && -C.parent.tree.length > 0){ -raise_syntax_error(C,'(:= invalid, parent '+ptype+')')}else if(ptype=="call_arg" && -C.parent.parent.type=="call" && -C.parent.parent.parent.type=="lambda"){ -raise_syntax_error(C,'(:= invalid inside function arguments)' )} -if(C.tree.length==1 && C.tree[0].type=="id"){var scope=get_scope(C),name=C.tree[0].value -if(['None','True','False'].indexOf(name)>-1){raise_syntax_error(C,`cannot use assignment expressions with ${name}`)}else if(name=='__debug__'){raise_syntax_error(C,'cannot assign to __debug__')} -while(scope.comprehension){scope=scope.parent_block} -return new AbstractExprCtx(new NamedExprCtx(C),false)} -raise_syntax_error(C) -break -case 'if': -var in_comp=false,ctx=C.parent -while(ctx){if(ctx.comprehension){in_comp=true -break}else if(ctx.type=="list_or_tuple"){ -break}else if(ctx.type=='comp_for'){break}else if(ctx.type=='comp_if'){ -in_comp=true -break}else if(ctx.type=='call_arg' ||ctx.type=='sub'){ -break}else if(ctx.type=='expr'){if(ctx.parent.type=='comp_iterable'){ -in_comp=true -break}} -ctx=ctx.parent} -if(in_comp){break} -ctx=C -while(ctx.parent && -(ctx.parent.type=='op' || -ctx.parent.type=='not' || -ctx.parent.type=='unary' || -(ctx.parent.type=="expr" && ctx.parent.name=="operand"))){ctx=ctx.parent} -return new AbstractExprCtx(new TernaryCtx(ctx),false) -case 'JoinedStr': -if(C.tree.length==1 && C.tree[0]instanceof FStringCtx){return C.tree[0]}else{raise_syntax_error_known_range(C,this.position,$token.value,'invalid syntax. Perhaps you forgot a comma?')} -break -case 'str': -if(C.tree.length==1 && C.tree[0]instanceof FStringCtx){new StringCtx(C.tree[0],value) -return C}else{raise_syntax_error_known_range(C,this.position,$token.value,'invalid syntax. Perhaps you forgot a comma?')} -break -case 'eol': -if(C.tree.length==2 && -C.tree[0].type=="id" && -["print","exec"].indexOf(C.tree[0].value)>-1){var func=C.tree[0].value -raise_syntax_error_known_range(C,C.position,$token.value,"Missing parentheses in call "+ -`to '${func}'. Did you mean ${func}(...)?`)} -if(["dict_or_set","list_or_tuple","str"].indexOf(C.parent.type)==-1){var t=C.tree[0] -if(t.type=="starred"){$token.value=t.position -if(parent_match(C,{type:'del'})){raise_syntax_error(C,'cannot delete starred')} -if(['assign','augm_assign','node'].indexOf(C.parent.type)>-1){raise_syntax_error_known_range(C,t.position,last_position(t),"can't use starred expression here")} -raise_syntax_error_known_range(C,t.position,last_position(t),"invalid syntax")}else if(t.type=="call" && t.func.type=="starred"){$token.value=t.func.position -raise_syntax_error(C,"can't use starred expression here")}}} -return transition(C.parent,token)} -var ExprNot=$B.parser.ExprNot=function(C){ -this.type='expr_not' -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this} -ExprNot.prototype.transition=function(token){var C=this -if(token=='in'){ -C.parent.tree.pop() -var op1=C.parent -while(op1.type !=='expr'){op1=op1.parent} -return op1.transition('op','not_in')} -raise_syntax_error(C)} -var ForExpr=$B.parser.ForExpr=function(C){ -if(C.node && C.node.parent.is_comp){ -C.node.parent.first_for=this} -this.type='for' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree.push(this) -this.scope=get_scope(this) -this.module=this.scope.module} -ForExpr.prototype.ast=function(){ -var target=this.tree[0].ast(),iter=this.tree[1].ast(),orelse=this.orelse ? this.orelse.ast():[],type_comment,body=ast_body(this.parent) -set_ctx_to_store(target) -var klass=this.async ? ast.AsyncFor :ast.For -var ast_obj=new klass(target,iter,body,orelse,type_comment) -set_position(ast_obj,this.async ? this.async.position :this.position,last_position(this)) -return ast_obj} -ForExpr.prototype.transition=function(token,value){var C=this -switch(token){case 'in': -if(C.tree[0].tree.length==0){ -raise_syntax_error(C,"(missing target between 'for' and 'in')")} -check_assignment(C.tree[0]) -return new AbstractExprCtx( -new ExprCtx(C,'iterator',true),false) -case ':': -check_assignment(C.tree[0]) -if(C.tree.length < 2 || -C.tree[1].tree[0].type=="abstract_expr"){raise_syntax_error(C)} -return BodyCtx(C)} -if(this.parent.comprehension){switch(token){case ']': -if(this.parent.type=='listcomp'){return transition(this.parent,token,value)} -break -case ')': -if(this.parent.type=='genexpr'){return transition(this.parent,token,value)} +function get_docstring(node){var doc_string=_b_.None +if(node.body.length > 0){var firstchild=node.body[0] +if(firstchild instanceof $B.ast.Constant && +typeof firstchild.value=='string'){doc_string=firstchild.value}} +return doc_string} +var s_escaped='abfnrtvxuU"0123456789'+"'"+'\\',is_escaped={} +for(var i=0;i < s_escaped.length;i++){is_escaped[s_escaped.charAt(i)]=true} +function SurrogatePair(value){ +value=value-0x10000 +return String.fromCharCode(0xD800 |(value >> 10))+ +String.fromCharCode(0xDC00 |(value & 0x3FF))} +function test_escape(text,antislash_pos){ +var seq_end,mo +mo=/^[0-7]{1,3}/.exec(text.substr(antislash_pos+1)) +if(mo){return[String.fromCharCode(parseInt(mo[0],8)),1+mo[0].length]} +switch(text[antislash_pos+1]){case "x": +mo=/^[0-9A-F]{0,2}/i.exec(text.substr(antislash_pos+2)) +if(mo[0].length !=2){seq_end=antislash_pos+mo[0].length+1 +$token.value.start[1]=seq_end +throw Error( +"(unicode error) 'unicodeescape' codec can't decode "+ +`bytes in position ${antislash_pos}-${seq_end}: truncated `+ +"\\xXX escape")}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]} break -case '}': -if(this.parent.type=='dictcomp' || -this.parent.type=='setcomp'){return transition(this.parent,token,value)} +case "u": +mo=/^[0-9A-F]{0,4}/i.exec(text.substr(antislash_pos+2)) +if(mo[0].length !=4){seq_end=antislash_pos+mo[0].length+1 +$token.value.start[1]=seq_end +throw Error( +"(unicode error) 'unicodeescape' codec can't decode "+ +`bytes in position ${antislash_pos}-${seq_end}: truncated `+ +"\\uXXXX escape")}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]} break -case 'for': -return new TargetListCtx(new ForExpr(this.parent)) -case 'if': -var if_ctx=new ConditionCtx(this.parent,'if') -if_ctx.in_comp=this.parent -return new AbstractExprCtx(if_ctx,false)}} -if(token=='eol'){$token.value=last_position(C) -if(C.tree.length==2){raise_syntax_error(C,"expected ':'")}} -raise_syntax_error(C)} -var FromCtx=$B.parser.FromCtx=function(C){ -this.type='from' -this.parent=C -this.module='' -this.names=[] -this.names_position=[] -this.position=$token.value -C.tree[C.tree.length]=this -this.expect='module' -this.scope=get_scope(this)} -FromCtx.prototype.ast=function(){ -var module=this.module,level=0,alias -while(module.length > 0 && module.startsWith('.')){level++ -module=module.substr(1)} -var res={module:module ||undefined,names:[],level} -for(var i=0,len=this.names.length;i < len;i++){var name=this.names[i],position=this.names_position[i] -if(Array.isArray(name)){alias=new ast.alias(name[0],name[1])}else{alias=new ast.alias(name)} -set_position(alias,position) -res.names.push(alias)} -var ast_obj=new ast.ImportFrom(res.module,res.names,res.level) -set_position(ast_obj,this.position) -return ast_obj} -FromCtx.prototype.add_name=function(name){this.names.push(name) -this.names_position.push($token.value) -if(name=='*'){this.scope.blurred=true} -this.end_position=$token.value} -FromCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.expect=='module'){C.module+=value -return C}else if(C.expect=='id'){C.add_name(value) -C.expect=',' -return C}else if(C.expect=='alias'){C.names[C.names.length-1]= -[$B.last(C.names),value] -C.expect=',' -return C} -break -case '.': -if(C.expect=='module'){if(token=='id'){C.module+=value} -else{C.module+='.'} -return C} -break -case 'ellipsis': -if(C.expect=='module'){C.module+='...' -return C} -break -case 'import': -if(C.names.length > 0){ -raise_syntax_error(C,"only one 'import' allowed after 'from'")} -if(C.expect=='module'){C.expect='id' -return C} -break -case 'op': -if(value=='*' && C.expect=='id' && -C.names.length==0){if(get_scope(C).ntype !=='module'){raise_syntax_error(C,"import * only allowed at module level")} -C.add_name('*') -C.expect='eol' -return C}else{raise_syntax_error(C)} -break -case ',': -if(C.expect==','){C.expect='id' -return C} -break -case 'eol': -switch(C.expect){case ',': -case 'eol': -return transition(C.parent,token) -case 'id': -raise_syntax_error(C,'trailing comma not allowed without '+ -'surrounding parentheses') -break -default: -raise_syntax_error(C)} -break -case 'as': -if(C.expect==',' ||C.expect=='eol'){C.expect='alias' -return C} -break -case '(': -if(C.expect=='id'){C.expect='id' -return C} -break -case ')': -if(C.expect==',' ||C.expect=='id'){C.expect='eol' -return C}} -raise_syntax_error(C)} -function escape_quotes(s,quotes){if(quotes.length==1){return quotes+s+quotes}else{var quote=quotes[0] -return quote+s.replace(new RegExp(quote,'g'),'\\'+quote)+quote}} -var FStringCtx=$B.parser.FStringCtx=function(C,start){ -for(var i=0;i < start.length;i++){if(start[i]=='"' ||start[i]=="'"){this.prefix=start.substr(0,i) -this.quotes=start.substr(i) -break}} -this.type='fstring' -this.parent=C -this.tree=[] -this.position=$token.value -this.scope=get_scope(C) -C.tree.push(this) -this.raw=this.prefix.toLowerCase().indexOf('r')>-1} -FStringCtx.prototype.transition=function(token,value){var C=this -if(token=='middle'){new StringCtx(C,escape_quotes(value,this.quotes)) -return C}else if(token=='{'){return new AbstractExprCtx(new FStringReplacementFieldCtx(C),false)}else if(token=='end'){return C.parent} -raise_syntax_error(C)} -FStringCtx.prototype.ast=function(){var res={type:'JoinedStr',values:[]} -var state -for(var item of this.tree){if(item instanceof StringCtx){if(state=='string'){ -$B.last(res.values).value+=item.value}else{let item_ast=new ast.Constant(item.value) -set_position(item_ast,item.position) -res.values.push(item_ast)} -state='string'}else{let item_ast=item.ast() -set_position(item_ast,item.position) -res.values.push(item_ast) -state='formatted_value'}} -var ast_obj=new ast.JoinedStr(res.values) -set_position(ast_obj,this.position) -return ast_obj} -var FStringReplacementFieldCtx= -$B.parser.FStringReplacementFieldCtx=function(C){this.type='fstring_replacement_field' -this.tree=[] -this.parent=C -this.position=$token.value -C.tree.push(this)} -FStringReplacementFieldCtx.prototype.transition=function(token,value){var C=this -if(token=='='){if(C.equal_sign_pos){raise_syntax_error(C)} -var expr_text=C.position.line.substring( -C.position.start[1]+1,$token.value.start[1]) -var quotes=C.parent.quotes -C.formula=new StringCtx(C.parent,escape_quotes(expr_text+'=',quotes)) -var s=C.parent.tree.pop() -C.parent.tree.splice(C.parent.tree.length-1,0,s) -C.equal_sign_pos=$token.value.start -return C}else if(C.equal_sign_pos){ -if(! C.insert_whitespace){var nb_ws=$token.value.start[1]-C.equal_sign_pos[1] -if(nb_ws > 1){C.formula.value+=' '.repeat(nb_ws-1)} -C.insert_whitespace=true}} -if(token=='op' && value=='!'){C.expect='id' -return C}else if(token==':'){return new FStringFormatSpecCtx(C)}else if(token=='}'){if(C.tree.length==1 && -C.tree[0]instanceof AbstractExprCtx){raise_syntax_error(C,"f-string: valid expression required before '}'")} -return C.parent}else if(token=='id' && this.expect=='id'){if('sra'.indexOf(value)>-1){C.conversion=value -delete this.expect -return C} -raise_syntax_error(C,`unknown conversion type ${value}`)} -raise_syntax_error(C)} -FStringReplacementFieldCtx.prototype.ast=function(){var value=this.tree[0].ast(),format=this.tree[1] -var conv_num={a:97,r:114,s:115},conversion=conv_num[this.conversion]||-1 -if(format !==undefined){format=format.ast()} -var res=new ast.FormattedValue( -value,conversion,format) -set_position(res,this.position) -return res} -var FStringFormatSpecCtx= -$B.parser.FStringFormatSpecCtx=function(C){this.type='fstring_format_spec' -this.tree=[] -this.parent=C -this.position=$token.value -C.tree.push(this)} -FStringFormatSpecCtx.prototype.transition=function(token,value){var C=this -if(token=='middle'){var quotes=this.parent.parent.quotes -new StringCtx(C,escape_quotes(value,quotes)) -return C}else if(token=='{'){return new AbstractExprCtx(new FStringReplacementFieldCtx(C),false)}else if(token=='}'){return transition(C.parent,token,value)} -raise_syntax_error(C)} -FStringFormatSpecCtx.prototype.ast=function(){if(this.tree.length==1){return this.tree[0].ast()}else{return FStringCtx.prototype.ast.call(this)}} -var FuncArgs=$B.parser.FuncArgs=function(C){ -this.type='func_args' -this.parent=C -this.tree=[] -this.names=[] -C.tree[C.tree.length]=this -this.expect='id' -this.has_default=false -this.has_star_arg=false -this.has_kw_arg=false} -FuncArgs.prototype.ast=function(){var args={posonlyargs:[],args:[],kwonlyargs:[],kw_defaults:[],defaults:[]},state='arg',default_value -for(var arg of this.tree){if(arg.type=='end_positional'){args.posonlyargs=args.args -args.args=[]}else if(arg.type=='func_star_arg'){state='kwonly' -if(arg.op=='*' && arg.name !='*'){args.vararg=new ast.arg(arg.name) -if(arg.annotation){args.vararg.annotation=arg.annotation.tree[0].ast()} -set_position(args.vararg,arg.position)}else if(arg.op=='**'){args.kwarg=new ast.arg(arg.name) -if(arg.annotation){args.kwarg.annotation=arg.annotation.tree[0].ast()} -set_position(args.kwarg,arg.position)}}else{default_value=false -if(arg.has_default){default_value=arg.tree[0].ast()} -var argument=new ast.arg(arg.name) -set_position(argument,arg.position,last_position(arg)) -if(arg.annotation){argument.annotation=arg.annotation.tree[0].ast()} -if(state=='kwonly'){args.kwonlyargs.push(argument) -if(default_value){args.kw_defaults.push(default_value)}else{args.kw_defaults.push(_b_.None)}}else{args.args.push(argument) -if(default_value){args.defaults.push(default_value)}}}} -var res=new ast.arguments(args.posonlyargs,args.args,args.vararg,args.kwonlyargs,args.kw_defaults,args.kwarg,args.defaults) -return res} -FuncArgs.prototype.transition=function(token,value){var C=this -function check(){if(C.tree.length==0){return} -var last=$B.last(C.tree) -if(C.has_default && ! last.has_default){if(last.type=='func_star_arg' || -last.type=='end_positional'){return} -if(C.has_star_arg){ -return} -raise_syntax_error(C,'non-default argument follows default argument')} -if(last.has_default){C.has_default=true}} -function check_last(){var last=$B.last(C.tree) -if(last && last.type=="func_star_arg"){if(last.name=="*"){ -raise_syntax_error(C,'named arguments must follow bare *')}}} -switch(token){case 'id': -if(C.has_kw_arg){raise_syntax_error(C,'arguments cannot follow var-keyword argument')} -if(C.expect=='id'){C.expect=',' -if(C.names.indexOf(value)>-1){raise_syntax_error(C,'duplicate argument '+value+ -' in function definition')}} -return new FuncArgIdCtx(C,value) -case ',': -if(C.expect==','){check() -C.expect='id' -return C} -raise_syntax_error(C) -break -case ')': -check() -check_last() -return transition(C.parent,token,value) -case 'op': -if(C.has_kw_arg){raise_syntax_error(C,"arguments cannot follow var-keyword argument")} -var op=value -C.expect=',' -if(op=='*'){if(C.has_star_arg){raise_syntax_error(C,"* argument may appear only once")} -return new FuncStarArgCtx(C,'*')}else if(op=='**'){return new FuncStarArgCtx(C,'**')}else if(op=='/'){ -if(C.tree.length==0){raise_syntax_error(C,'at least one argument must precede /')}else if(C.has_end_positional){raise_syntax_error(C,'/ may appear only once')}else if(C.has_star_arg){raise_syntax_error(C,'/ must be ahead of *')} -return new EndOfPositionalCtx(C)} -raise_syntax_error(C) -break -case ':': -if(C.parent.type=="lambda"){return transition(C.parent,token)} -case '(': -let type_name=C.parent.type=='def' ? 'Function' :'Lambda expression' -raise_syntax_error(C,`${type_name} parameters cannot be parenthesized`)} -raise_syntax_error(C)} -var FuncArgIdCtx=$B.parser.FuncArgIdCtx=function(C,name){ -this.type='func_arg_id' -if(["None","True","False"].indexOf(name)>-1){raise_syntax_error(C)} -if(name=='__debug__'){raise_syntax_error(C,'cannot assign to __debug__')} -this.name=name -this.parent=C -this.position=$token.value -if(C.has_star_arg){C.parent.after_star.push(name)}else{C.parent.positional_list.push(name)} -this.tree=[] -C.tree[C.tree.length]=this -this.expect='='} -FuncArgIdCtx.prototype.transition=function(token){var C=this -switch(token){case '=': -if(C.expect=='='){C.has_default=true -var def_ctx=C.parent.parent -if(C.parent.has_star_arg){def_ctx.default_list.push(def_ctx.after_star.pop())}else{def_ctx.default_list.push(def_ctx.positional_list.pop())} -return new AbstractExprCtx(C,false)} -break -case ',': -case ')': -if(C.parent.has_default && C.tree.length==0 && -C.parent.has_star_arg===undefined){raise_syntax_error(C,'non-default argument follows default argument')}else{return transition(C.parent,token)} -break -case ':': -if(C.parent.parent.type=="lambda"){ -return transition(C.parent.parent,":")} -if(C.has_default){ -raise_syntax_error(C)} -return new AbstractExprCtx(new AnnotationCtx(C),false)} -raise_syntax_error(C)} -var FuncStarArgCtx=$B.parser.FuncStarArgCtx=function(C,op){ -this.type='func_star_arg' -this.op=op -this.parent=C -this.node=get_node(this) -this.position=$token.value -C.has_star_arg=op=='*' -C.has_kw_arg=op=='**' -C.tree[C.tree.length]=this} -FuncStarArgCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.name===undefined){if(C.parent.names.indexOf(value)>-1){raise_syntax_error(C,'duplicate argument '+value+ -' in function definition')}} -if(["None","True","False"].indexOf(value)>-1){raise_syntax_error(C)} -C.set_name(value) -C.parent.names.push(value) -return C -case ',': -case ')': -if(C.name===undefined){ -C.set_name('*') -C.parent.names.push('*')} -return transition(C.parent,token) -case ':': -if(C.parent.parent.type=="lambda"){ -if(C.name===undefined){raise_syntax_error(C,'named arguments must follow bare *')} -return transition(C.parent.parent,":")} -if(C.name===undefined){raise_syntax_error(C,'(annotation on an unnamed parameter)')} -return new AbstractExprCtx( -new AnnotationCtx(C),false) -case '=': -if(C.op=='*'){raise_syntax_error(C,'var-positional argument cannot have default value')} -raise_syntax_error(C,'var-keyword argument cannot have default value')} -raise_syntax_error(C)} -FuncStarArgCtx.prototype.set_name=function(name){if(name=='__debug__'){raise_syntax_error_known_range(this,this.position,$token.value,'cannot assign to __debug__')} -this.name=name -var ctx=this.parent -while(ctx.parent !==undefined){if(ctx.type=='def'){break} -ctx=ctx.parent} -if(this.op=='*'){ctx.other_args='"'+name+'"'}else{ctx.other_kw='"'+name+'"'}} -var GeneratorExpCtx=function(C){ -this.type='genexpr' -this.tree=[C.tree[0]] -this.tree[0].parent=this -this.position=C.position -Comprehension.make_comp(this,C)} -GeneratorExpCtx.prototype.ast=function(){ -var res=new ast.GeneratorExp( -this.tree[0].ast(),Comprehension.generators(this.tree.slice(1)) -) -set_position(res,this.position) -return res} -GeneratorExpCtx.prototype.transition=function(token){var C=this -if(token==')'){if(this.parent.type=='call'){ -if(C.parent.tree.length > 1){raise_syntax_error_known_range(C,first_position(C),last_position(C),'Generator expression must be parenthesized')} -return this.parent.parent} -return this.parent} -raise_syntax_error(C)} -var GlobalCtx=$B.parser.GlobalCtx=function(C){ -this.type='global' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this -this.expect='id' -this.scope=get_scope(this) -this.module=get_module(this) -if(this.module.module !==''){ -while(this.module.module !=this.module.id){this.module=this.module.parent_block}}} -GlobalCtx.prototype.ast=function(){ -var ast_obj=new ast.Global(this.tree.map(item=> item.value)) -set_position(ast_obj,this.position) -return ast_obj} -GlobalCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.expect=='id'){new IdCtx(C,value) -C.add(value) -C.expect=',' -return C} -break -case ',': -if(C.expect==','){C.expect='id' -return C} -break -case 'eol': -if(C.expect==','){return transition(C.parent,token)} -break} -raise_syntax_error(C)} -GlobalCtx.prototype.add=function(name){if(this.scope.type=="module"){ -return} -var mod=this.scope.parent_block -if(this.module.module.startsWith("$exec")){while(mod && mod.parent_block !==this.module){ -mod._globals=mod._globals ||new Map() -mod._globals.set(name,this.module.id) -mod=mod.parent_block}}} -var IdCtx=$B.parser.IdCtx=function(C,value){ -this.type='id' -this.value=value -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this -this.position=$token.value -var scope=this.scope=get_scope(this) -this.blurred_scope=this.scope.blurred -if(["def","generator"].indexOf(scope.ntype)>-1){if((!(C instanceof GlobalCtx))&& -!(C instanceof NonlocalCtx)){scope.referenced=scope.referenced ||{} -if(! $B.builtins[this.value]){scope.referenced[this.value]=true}}} -if(C.parent.type=='call_arg'){this.call_arg=true}} -IdCtx.prototype.ast=function(){var ast_obj -if(['True','False','None'].indexOf(this.value)>-1){ast_obj=new ast.Constant(_b_[this.value])}else{ast_obj=new ast.Name(this.value,this.bound ? new ast.Store():new ast.Load())} -set_position(ast_obj,this.position) -return ast_obj} -IdCtx.prototype.transition=function(token,value){var C=this,module=get_module(this) -if(C.value=='case' && C.parent.parent.type=="node"){ -let save_position=module.token_reader.position,ends_with_colon=line_ends_with_colon(module.token_reader,module.filename) -module.token_reader.position=save_position -if(ends_with_colon ||token=='id'){var node=get_node(C) -if((! node.parent)||!(node.parent.is_match)){raise_syntax_error(C,"('case' not inside 'match')")}else{if(node.parent.irrefutable){ -let name=node.parent.irrefutable,msg=name=='_' ? 'wildcard' : -`name capture '${name}'` -raise_syntax_error(C,`${msg} makes remaining patterns unreachable`)}} -return transition(new PatternCtx( -new CaseCtx(C.parent.parent)),token,value)}}else if(C.value=='match' && C.parent.parent.type=="node"){ -let save_position=module.token_reader.position,ends_with_colon=line_ends_with_colon(module.token_reader,module.filename) -module.token_reader.position=save_position -if(ends_with_colon ||token=='id'){return transition(new AbstractExprCtx( -new MatchCtx(C.parent.parent),true),token,value)}}else if(C.value=='type' && C.parent.parent.type=="node"){if(token=='id'){ -return new TypeAliasCtx(C,value)}} -switch(token){case '=': -if(C.parent.type=='expr' && -C.parent.parent !==undefined && -C.parent.parent.type=='call_arg'){return new AbstractExprCtx( -new KwArgCtx(C.parent),false)} -return transition(C.parent,token,value) -case '.': -delete this.bound -return transition(C.parent,token,value) -case 'op': -return transition(C.parent,token,value) -case 'id': -case 'str': -case 'JoinedStr': -case 'int': -case 'float': -case 'imaginary': -var msg='invalid syntax' -if(["print","exec"].indexOf(C.value)>-1 ){var f=C.value -msg=`Missing parentheses in call to '${f}'.`+ -` Did you mean ${f}(...)?`}else if(C.parent.parent &&(['list_or_tuple','dict'].indexOf(C.parent.parent.type)>-1)){msg='invalid syntax. Perhaps you forgot a comma?'} -raise_syntax_error_known_range(C,this.position,$token.value,msg)} -if(this.parent.parent.type=="starred"){if(['.','[','('].indexOf(token)==-1){return this.parent.parent.transition(token,value)}} -return transition(C.parent,token,value)} -var ImportCtx=$B.parser.ImportCtx=function(C){ -this.type='import' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this -this.expect='id'} -ImportCtx.prototype.ast=function(){ -var names=[] -for(var item of this.tree){ -var alias=new ast.alias(item.name) -if(item.alias !=item.name){alias.asname=item.alias} -names.push(alias)} -var ast_obj=new ast.Import(names) -set_position(ast_obj,this.position) -return ast_obj} -ImportCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.expect=='id'){if(C.order_error){raise_syntax_error(C,"Did you mean to use 'from ... import ...' instead?")} -new ImportedModuleCtx(C,value) -C.expect=',' -return C} -if(C.expect=='qual'){C.expect=',' -C.tree[C.tree.length-1].name+= -'.'+value -C.tree[C.tree.length-1].alias+= -'.'+value -return C} -if(C.expect=='alias'){C.expect=',' -C.tree[C.tree.length-1].alias= -value -return C} -break -case '.': -if(C.expect==','){C.expect='qual' -return C} -break -case ',': -if(C.expect==','){C.expect='id' -return C} -break -case 'as': -if(C.expect==','){C.expect='alias' -return C} -break -case 'eol': -if(C.expect==','){return transition(C.parent,token)} -break -case 'from': -if(C.expect==','){C.expect='id' -C.order_error=true -return C} -break} -raise_syntax_error(C)} -var ImportedModuleCtx=$B.parser.ImportedModuleCtx=function(C,name){this.type='imported module' -this.parent=C -this.name=name -this.alias=name -C.tree[C.tree.length]=this} -var JoinedStrCtx=$B.parser.JoinedStrCtx=function(C,values){ -this.type='JoinedStr' -this.parent=C -this.tree=[] -this.position=$token.value -this.scope=get_scope(C) -var line_num=get_node(C).line_num -for(var value of values){if(typeof value=="string"){new StringCtx(this,"'"+ -value.replace(new RegExp("'","g"),"\\"+"'")+"'")}else{if(value.format !==undefined){value.format=new JoinedStrCtx(this,value.format) -this.tree.pop()} -var src=value.expression.trimStart(), -filename=get_module(this).filename,root=create_root_node(src,this.scope.module,this.scope.id,this.scope.parent_block,line_num) -try{dispatch_tokens(root)}catch(err){var fstring_lineno=this.position.start[0],fstring_offset=this.position.start[1] -err.filename=get_module(this).filename -err.lineno+=fstring_lineno-1 -err.offset+=fstring_offset-1 -err.end_lineno+=fstring_lineno-1 -err.end_offset+=fstring_offset-1 -err.text=this.position.string -err.args[1]=$B.fast_tuple([filename,err.lineno,err.offset,err.text,err.end_lineno,err.end_offset]) -throw err} -var expr=root.children[0].C.tree[0] -this.tree.push(expr) -expr.parent=this -expr.elt=value}} -C.tree.push(this) -this.raw=false} -JoinedStrCtx.prototype.ast=function(){var res={type:'JoinedStr',values:[]} -var state -for(var item of this.tree){if(item instanceof StringCtx){if(state=='string'){ -$B.last(res.values).value+=item.value}else{var item_ast=new ast.Constant(item.value) -set_position(item_ast,item.position) -res.values.push(item_ast)} -state='string'}else{var conv_num={a:97,r:114,s:115},format=item.elt.format -format=format===undefined ? format :format.ast() -var value=new ast.FormattedValue( -item.ast(),conv_num[item.elt.conversion]||-1,format) -set_position(value,this.position) -format=item.format -if(format !==undefined){value.format=item.format.ast()} -res.values.push(value) -state='formatted_value'}} -var ast_obj=new ast.JoinedStr(res.values) -set_position(ast_obj,this.position) -return ast_obj} -JoinedStrCtx.prototype.transition=function(token,value){var C=this -switch(token){case '[': -return new AbstractExprCtx(new SubscripCtx(C.parent),false) -case '(': -C.parent.tree[0]=C -return new CallCtx(C.parent) -case 'str': -if(C.tree.length > 0 && -$B.last(C.tree).type=="str"){C.tree[C.tree.length-1].add_value(value)}else{new StringCtx(this,value)} -return C -case 'JoinedStr': -var joined_expr=new JoinedStrCtx(C.parent,value) -C.parent.tree.pop() -if(C.tree.length > 0 && -$B.last(C.tree)instanceof StringCtx && -joined_expr.tree[0]instanceof StringCtx){ -$B.last(C.tree).value+=joined_expr.tree[0].value -C.tree=C.tree.concat(joined_expr.tree.slice(1))}else{C.tree=C.tree.concat(joined_expr.tree)} -return C} -return transition(C.parent,token,value)} -var KwdCtx=$B.parser.KwdCtx=function(C){ -this.type='kwd' -this.position=C.position -this.parent=C -this.tree=[] -C.tree.push(this)} -KwdCtx.prototype.ast=function(){var ast_obj=new $B.ast.keyword(this.tree[0].ast(),new ast.Load()) -set_position(ast_obj,this.position) -return ast_obj} -KwdCtx.prototype.transition=function(token,value){var C=this -return transition(C.parent,token,value)} -var KwArgCtx=$B.parser.KwArgCtx=function(C){ -this.type='kwarg' -this.parent=C.parent -this.position=first_position(C) -this.equal_sign_position=$token.value -this.tree=[C.tree[0]] -C.parent.tree.pop() -C.parent.tree.push(this) -if(['None','True','False','__debug__'].indexOf(C.tree[0].value)>-1){raise_syntax_error(C,'cannot assign to '+C.tree[0].value)} -C.parent.parent.has_kw=true} -KwArgCtx.prototype.transition=function(token){var C=this -if(token==','){return new CallArgCtx(C.parent.parent)}else if(token=='for'){ -raise_syntax_error_known_range(C,C.position,C.equal_sign_position,"invalid syntax. "+ -"Maybe you meant '==' or ':=' instead of '='?")} -return transition(C.parent,token)} -var LambdaCtx=$B.parser.LambdaCtx=function(C){ -this.type='lambda' -this.parent=C -C.tree[C.tree.length]=this -this.tree=[] -this.position=$token.value -this.node=get_node(this) -this.positional_list=[] -this.default_list=[] -this.other_args=null -this.other_kw=null -this.after_star=[]} -LambdaCtx.prototype.ast=function(){ -var args -if(this.args.length==0){args=new ast.arguments([],[],undefined,[],[],undefined,[])}else{args=this.args[0].ast()} -var ast_obj=new ast.Lambda(args,this.tree[0].ast()) -set_position(ast_obj,this.position) -return ast_obj} -LambdaCtx.prototype.transition=function(token,value){var C=this -if(token==':' && C.args===undefined){C.args=C.tree -C.tree=[] -return new AbstractExprCtx(C,false)} -if(C.args !==undefined){ -return transition(C.parent,token)} -if(C.args===undefined){if(token=='('){raise_syntax_error(C,'Lambda expression parameters cannot be parenthesized')}else if(C.tree.length > 0 && -C.tree[0].type=='func_args'){ -raise_syntax_error(C)}else{return transition(new FuncArgs(C),token,value)}} -raise_syntax_error(C)} -var ListCompCtx=function(C){ -this.type='listcomp' -this.tree=[C.tree[0]] -this.tree[0].parent=this -this.position=$token.value -Comprehension.make_comp(this,C)} -ListCompCtx.prototype.ast=function(){ -var res=new ast.ListComp( -this.tree[0].ast(),Comprehension.generators(this.tree.slice(1))) -set_position(res,this.position) -return res} -ListCompCtx.prototype.transition=function(token){var C=this -if(token==']'){return this.parent} -raise_syntax_error(C)} -var ListOrTupleCtx=$B.parser.ListOrTupleCtx=function(C,real){ -this.type='list_or_tuple' -this.real=real -this.expect='id' -this.closed=false -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this} -ListOrTupleCtx.prototype.ast=function(){var elts=this.tree.map(x=> x.ast()),ast_obj -if(this.real=='list'){ast_obj=new ast.List(elts,new ast.Load())}else if(this.real=='tuple'){ast_obj=new ast.Tuple(elts,new ast.Load())} -set_position(ast_obj,this.position,this.end_position) -return ast_obj} -ListOrTupleCtx.prototype.transition=function(token,value){var C=this -if(C.closed){if(token=='['){return new AbstractExprCtx( -new SubscripCtx(C.parent),false)} -if(token=='('){return new CallCtx(C.parent)} -return transition(C.parent,token,value)}else{if(C.expect==','){switch(C.real){case 'tuple': -if(token==')'){if(C.implicit){return transition(C.parent,token,value)} -var close=true -C.end_position=$token.value -if(C.tree.length==1){if(C.tree[0].type=='expr' && -C.tree[0].tree[0].type=='starred'){raise_syntax_error_known_range(C,C.tree[0].tree[0].position,last_position(C.tree[0]),'cannot use starred expression here')} -var grandparent=C.parent.parent -grandparent.tree.pop() -grandparent.tree.push(C.tree[0]) -C.tree[0].$was_parenthesized=true -C.tree[0].parent=grandparent -return C.tree[0]} -if(C.packed || -(C.type=='list_or_tuple' && -C.tree.length==1 && -C.tree[0].type=='expr' && -C.tree[0].tree[0].type=='starred')){ -raise_syntax_error(C,"cannot use starred expression here")} -if(close){C.close()} -if(C.parent.type=="starred"){return C.parent.parent} -return C.parent} -break -case 'list': -if(token==']'){C.close() -if(C.parent.type=="starred"){if(C.parent.tree.length > 0){return C.parent.tree[0]}else{return C.parent.parent}} -return C.parent} -break} -switch(token){case ',': -if(C.real=='tuple'){C.has_comma=true} -C.expect='id' -return C -case 'for': -if(C.real=='list'){if(this.tree.length > 1){ -raise_syntax_error(C,"did you forget "+ -"parentheses around the comprehension target?")} -return new TargetListCtx(new ForExpr( -new ListCompCtx(C)))} -else{return new TargetListCtx(new ForExpr( -new GeneratorExpCtx(C)))}} -return transition(C.parent,token,value)}else if(C.expect=='id'){switch(C.real){case 'tuple': -if(token==')'){C.close() -return C.parent} -if(token=='eol' && -C.implicit===true){C.close() -return transition(C.parent,token)} -break -case 'list': -if(token==']'){C.close() -return C} -break} -switch(token){case '=': -if(C.real=='tuple' && -C.implicit===true){C.close() -C.parent.tree.pop() -var expr1=new ExprCtx(C.parent,'tuple',false) -expr1.tree=[C] -C.parent=expr1 -return transition(C.parent,token)} -raise_syntax_error(C,"(unexpected '=' inside list)") -break -case ')': -break -case ']': -if(C.real=='tuple' && -C.implicit===true){ -return transition(C.parent,token,value)}else{break} -raise_syntax_error(C,'(unexpected "if" inside list)') -break -case ',': -raise_syntax_error(C,'(unexpected comma inside list)') -break -case 'str': -case 'JoinedStr': -case 'int': -case 'float': -case 'imaginary': -case 'ellipsis': -case 'lambda': -case 'yield': -case 'id': -case '(': -case '[': -case '{': -case 'await': -case 'not': -case ':': -C.expect=',' -var expr2=new AbstractExprCtx(C,false) -return transition(expr2,token,value) -case 'op': -if('+-~*'.indexOf(value)>-1 ||value=='**'){C.expect=',' -var expr3=new AbstractExprCtx(C,false) -return transition(expr3,token,value)} -raise_syntax_error(C,`(unexpected operator: ${value})`) -break -default: -raise_syntax_error(C)}}else{return transition(C.parent,token,value)}}} -ListOrTupleCtx.prototype.close=function(){this.closed=true -this.end_position=$token.value -this.src=get_module(this).src -for(var i=0,len=this.tree.length;i < len;i++){ -var elt=this.tree[i] -if(elt.type=="expr" && -elt.tree[0].type=="list_or_tuple" && -elt.tree[0].real=="tuple" && -elt.tree[0].tree.length==1 && -elt.tree[0].expect==","){this.tree[i]=elt.tree[0].tree[0] -this.tree[i].parent=this}}} -var MatchCtx=$B.parser.MatchCtx=function(node_ctx){ -this.type="match" -this.position=$token.value -node_ctx.tree=[this] -node_ctx.node.is_match=true -this.parent=node_ctx -this.tree=[] -this.expect='as' -this.token_position=get_module(this).token_reader.position} -MatchCtx.prototype.ast=function(){ -var res=new ast.Match(this.tree[0].ast(),ast_body(this.parent)) -set_position(res,this.position) -res.$line_num=get_node(this).line_num -return res} -MatchCtx.prototype.transition=function(token){var C=this -switch(token){case ':': -if(this.tree[0].type=='list_or_tuple'){remove_abstract_expr(this.tree[0].tree)} -switch(C.expect){case 'id': -case 'as': -case ':': -return BodyCtx(C)} -break -case 'eol': -raise_syntax_error(C,"expected ':'")} -raise_syntax_error(C)} -var NamedExprCtx=function(C){ -this.type='named_expr' -this.position=C.position -this.target=C.tree[0] -C.tree.pop() -C.tree.push(this) -this.parent=C -this.target.parent=this -this.tree=[] -if(C.parent.type=='list_or_tuple' && -C.parent.real=='tuple'){ -this.parenthesized=true}} -NamedExprCtx.prototype.ast=function(){var res=new ast.NamedExpr(this.target.ast(),this.tree[0].ast()) -res.target.ctx=new ast.Store() -set_position(res,this.position) -return res} -NamedExprCtx.prototype.transition=function(token,value){return transition(this.parent,token,value)} -function get_node_ancestor(node){return node.parent -&& node.parent.C -&& node.parent.C.tree -&& node.parent.C.tree.length > 0 -&& node.parent.C.tree[0]} -var NodeCtx=$B.parser.NodeCtx=function(node){ -this.node=node -node.C=this -this.tree=[] -this.type='node' -var scope=null -var tree_node=node -while(tree_node.parent && tree_node.parent.type !='module'){var ntype=tree_node.parent.C.tree[0].type,_break_flag=false -switch(ntype){case 'def': -case 'class': -case 'generator': -scope=tree_node.parent -_break_flag=true} -if(_break_flag){break} -tree_node=tree_node.parent} -if(scope===null){scope=tree_node.parent ||tree_node } -this.scope=scope} -NodeCtx.prototype.transition=function(token,value){var C=this -if(this.node.parent && this.node.parent.C){var pctx=this.node.parent.C -if(pctx.tree && pctx.tree.length==1 && -pctx.tree[0].type=="match"){if(token !='eol' &&(token !=='id' ||value !=='case')){raise_syntax_error(C)}}} -if(this.tree.length==0 && this.node.parent){var rank=this.node.parent.children.indexOf(this.node) -if(rank > 0){let previous=this.node.parent.children[rank-1] -if(previous.C.tree[0].type=='try' && -['except','finally'].indexOf(token)==-1){raise_syntax_error(C,"expected 'except' or 'finally' block")}}} -switch(token){case ',': -if(C.tree && C.tree.length==0){raise_syntax_error(C)} -var first=C.tree[0] -C.tree=[] -var implicit_tuple=new ListOrTupleCtx(C) -implicit_tuple.real="tuple" -implicit_tuple.implicit=0 -implicit_tuple.tree.push(first) -first.parent=implicit_tuple -return implicit_tuple -case '[': -case '(': -case '{': -case '.': -case 'bytes': -case 'float': -case 'id': -case 'imaginary': -case 'int': -case 'str': -case 'JoinedStr': -case 'not': -case 'lambda': -if(value=='case'){let node_ancestor=get_node_ancestor(C.node) -if(node_ancestor && node_ancestor.type=='match'){return new PatternCtx(new CaseCtx(C))}} -var expr=new AbstractExprCtx(C,true) -return transition(expr,token,value) -case 'assert': -return new AbstractExprCtx( -new AssertCtx(C),false,true) -case 'async': -return new AsyncCtx(C) -case 'await': -return new AbstractExprCtx(new AwaitCtx(C),false) -case 'break': -return new BreakCtx(C) -case 'class': -return new ClassCtx(C) -case 'continue': -return new ContinueCtx(C) -case 'def': -return new DefCtx(C) -case 'del': -return new AbstractExprCtx(new DelCtx(C),true) -case 'elif': -try{var previous=get_previous(C)}catch(err){raise_syntax_error(C,"('elif' does not follow 'if')")} -if(['condition'].indexOf(previous.type)==-1 || -previous.token=='while'){raise_syntax_error(C,`(elif after ${previous.type})`)} -return new AbstractExprCtx( -new ConditionCtx(C,token),false) -case 'ellipsis': -var ell_expr=new AbstractExprCtx(C,true) -return transition(ell_expr,token,value) -case 'else': -var previous1=get_previous(C) -if(!['condition','except','for'].includes(previous1.type)){raise_syntax_error(C,`(else after ${previous.type})`)} -return new SingleKwCtx(C,token) -case 'except': -var previous2=get_previous(C) -if(!['try','except'].includes(previous2.type)){raise_syntax_error(C,`(except after ${previous.type})`)} -return new ExceptCtx(C) -case 'finally': -var previous3=get_previous(C) -if(!['try','except'].includes(previous3.type)&& -(previous3.type !='single_kw' || -previous3.token !='else')){raise_syntax_error(C,`finally after ${previous.type})`)} -return new SingleKwCtx(C,token) -case 'for': -return new TargetListCtx(new ForExpr(C)) -case 'from': -return new FromCtx(C) -case 'global': -return new GlobalCtx(C) -case 'if': -case 'while': -return new AbstractExprCtx( -new ConditionCtx(C,token),false) -case 'import': -return new ImportCtx(C) -case 'nonlocal': -return new NonlocalCtx(C) -case 'op': -switch(value){case '*': -var expr1=new AbstractExprCtx(C,true) -return transition(expr1,token,value) -case '+': -case '-': -case '~': -C.position=$token.value -var expr2=new ExprCtx(C,'unary',true) -return new AbstractExprCtx( -new UnaryCtx(expr2,value),false) -case '@': -return new AbstractExprCtx(new DecoratorCtx(C),false)} -break -case 'pass': -return new PassCtx(C) -case 'raise': -return new AbstractExprCtx(new RaiseCtx(C),false) -case 'return': -return new AbstractExprCtx(new ReturnCtx(C),true) -case 'try': -return new TryCtx(C) -case 'with': -return new WithCtx(C) -case 'yield': -return new AbstractExprCtx(new YieldCtx(C),true) -case 'eol': -if(C.maybe_type){if(C.tree.length > 0 && C.tree[0].type=='assign'){alert('type soft keyword')}else{raise_syntax_error(C)}} -if(C.tree.length==0){ -C.node.parent.children.pop() -return C.node.parent.C} -return C} -console.log('error, C',C,'token',token,value) -raise_syntax_error(C)} -var NonlocalCtx=$B.parser.NonlocalCtx=function(C){ -this.type='nonlocal' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this -this.expect='id' -this.scope=get_scope(this) -this.scope.nonlocals=this.scope.nonlocals ||new Set()} -NonlocalCtx.prototype.ast=function(){ -var ast_obj=new ast.Nonlocal(this.tree.map(item=> item.value)) -set_position(ast_obj,this.position) -return ast_obj} -NonlocalCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.expect=='id'){new IdCtx(C,value) -C.expect=',' -return C} -break -case ',': -if(C.expect==','){C.expect='id' -return C} -break -case 'eol': -if(C.expect==','){return transition(C.parent,token)} -break} -raise_syntax_error(C)} -var NotCtx=$B.parser.NotCtx=function(C){ -this.type='not' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this} -NotCtx.prototype.ast=function(){var ast_obj=new ast.UnaryOp(new ast.Not(),this.tree[0].ast()) -set_position(ast_obj,this.position) -return ast_obj} -NotCtx.prototype.transition=function(token,value){var C=this,expr -switch(token){case 'in': -C.parent.parent.tree.pop() -return new ExprCtx(new OpCtx(C.parent,'not_in'),'op',false) -case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'JoinedStr': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -expr=new AbstractExprCtx(C,false) -return transition(expr,token,value) -case 'op': -var a=value -if('+'==a ||'-'==a ||'~'==a){expr=new AbstractExprCtx(C,false) -return transition(expr,token,value)}} -if(this.tree.length==0 ||this.tree[0]instanceof AbstractExprCtx){raise_syntax_error(C)} -return transition(C.parent,token)} -var NumberCtx=$B.parser.NumberCtx=function(type,C,value){ -this.type=type -this.value=value -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this} -NumberCtx.prototype.ast=function(){var value=$B.AST.$convert(this), -ast_obj=new $B.ast.Constant(value) -set_position(ast_obj,this.position) -return ast_obj} -NumberCtx.prototype.transition=function(token,value){var C=this -var num_type={2:'binary',8:'octal',10:'decimal',16:'hexadecimal'}[this.value[0]] -if(token=='id'){if(value=='_'){raise_syntax_error(C,'invalid decimal literal')}else if(["and","else","for","if","in","is","or"].indexOf(value)==-1){raise_syntax_error(C,`invalid ${num_type} literal`)}else if(num_type=='hexadecimal' && this.value[1].length % 2==1){$B.warn(_b_.SyntaxWarning,`invalid hexadecimal literal`,get_module(C).filename,$token.value)}}else if(token=='op'){if(["and","in","is","or"].indexOf(value)>-1 && -num_type=='hexadecimal' && -this.value[1].length % 2==1){$B.warn(_b_.SyntaxWarning,`invalid hexadecimal literal`,get_module(C).filename,$token.value)}} -return transition(C.parent,token,value)} -var OpCtx=$B.parser.OpCtx=function(C,op){ -this.type='op' -this.op=op -this.parent=C.parent -this.position=$token.value -this.tree=[C] -this.scope=get_scope(this) -if(C.type=="expr"){if(['int','float','str'].indexOf(C.tree[0].type)>-1){this.left_type=C.tree[0].type}} -C.parent.tree.pop() -C.parent.tree.push(this)} -OpCtx.prototype.ast=function(){ -var ast_type_class=op2ast_class[this.op],op_type=ast_type_class[0],ast_class=ast_type_class[1],ast_obj -if(op_type===ast.Compare){var left=this.tree[0].ast(),ops=[new ast_class()] -if(this.ops){for(var op of this.ops.slice(1)){ops.push(new op2ast_class[op][1]())} -ast_obj=new ast.Compare(left,ops,this.tree.slice(1).map(x=> x.ast()))}else{ast_obj=new ast.Compare(left,ops,[this.tree[1].ast()])}}else if(op_type===ast.UnaryOp){ast_obj=new op_type(new ast_class(),this.tree[1].ast())}else if(op_type===ast.BoolOp){ -var values=[this.tree[1]],main_op=this.op,ctx=this -while(ctx.tree[0].type=='op' && ctx.tree[0].op==main_op){values.splice(0,0,ctx.tree[0].tree[1]) -ctx=ctx.tree[0]} -values.splice(0,0,ctx.tree[0]) -ast_obj=new op_type(new ast_class(),values.map(x=> x.ast()))}else{ast_obj=new op_type( -this.tree[0].ast(),new ast_class(),this.tree[1].ast())} -set_position(ast_obj,this.position) -return ast_obj} -function is_literal(expr){return expr.type=='expr' && -['int','str','float','imaginary'].indexOf(expr.tree[0].type)>-1} -OpCtx.prototype.transition=function(token,value){var C=this -if(C.op===undefined){console.log('C has no op',C) -raise_syntax_error(C)} -if((C.op=='is' ||C.op=='is_not')&& -C.tree.length > 1){for(var operand of C.tree){if(is_literal(operand)){var head=C.op=='is' ? 'is' :'is not' -$B.warn(_b_.SyntaxWarning,`"${head}" with a literal. Did you mean "=="?"`,get_module(C).filename,$token.value) -break}}} -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'JoinedStr': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -return transition(new AbstractExprCtx(C,false),token,value) -case 'op': -switch(value){case '+': -case '-': -case '~': -return new UnaryCtx(C,value)} -break -default: -if(C.tree[C.tree.length-1].type== -'abstract_expr'){raise_syntax_error(C)}} -return transition(C.parent,token)} -var PassCtx=$B.parser.PassCtx=function(C){ -this.type='pass' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this} -PassCtx.prototype.ast=function(){var ast_obj=new ast.Pass() -set_position(ast_obj,this.position) -return ast_obj} -PassCtx.prototype.transition=function(token){var C=this -if(token=='eol'){return C.parent} -raise_syntax_error(C)} -var PatternCtx=$B.parser.PatternCtx=function(C){ -this.type="pattern" -this.parent=C -this.tree=[] -C.tree.push(this) -this.expect='id'} -PatternCtx.prototype.transition=function(token,value){var C=this -switch(C.expect){case 'id': -switch(token){case 'str': -case 'int': -case 'float': -case 'imaginary': -C.expect=',' -return new PatternLiteralCtx(C,token,value) -case 'op': -switch(value){case '-': -case '+': -C.expect=',' -return new PatternLiteralCtx(C,{sign:value}) -case '*': -C.expect='starred_id' -return C -default: -raise_syntax_error(C)} -break -case 'id': -C.expect=',' -if(['None','True','False'].indexOf(value)>-1){return new PatternLiteralCtx(C,token,value)}else{return new PatternCaptureCtx(C,value)} -break -case '[': -return new PatternCtx( -new PatternSequenceCtx(C.parent,token)) -case '(': -return new PatternCtx( -new PatternGroupCtx(C.parent,token)) -case '{': -return new PatternMappingCtx(C.parent,token) -case 'JoinedStr': -raise_syntax_error(C,"patterns may only match "+ -"literals and attribute lookups")} -break -case 'starred_id': -if(token=='id'){var capture=new PatternCaptureCtx(C,value) -capture.starred=true -return capture} -raise_syntax_error(C,"(expected id after '*')") -break -case 'number': -switch(token){case 'int': -case 'float': -case 'imaginary': -C.expect=',' -return new PatternLiteralCtx(C,token,value,C.sign) -default: -raise_syntax_error(C)} -break -case ',': -switch(token){case ',': -if(C.parent instanceof PatternSequenceCtx){return new PatternCtx(C.parent)} -return new PatternCtx( -new PatternSequenceCtx(C.parent)) -case ':': -return BodyCtx(C)}} -return C.parent.transition(token,value)} -function as_pattern(C,token,value){ -if(C.expect=='as'){if(token=='as'){C.expect='alias' -return C}else{return transition(C.parent,token,value)}}else if(C.expect=='alias'){if(token=='id'){if(value=='_'){raise_syntax_error(C,"cannot use '_' as a target")} -if(C.bindings().indexOf(value)>-1){raise_syntax_error(C,`multiple assignments to name '${value}' in pattern`)} -C.alias=value -return C.parent}else{raise_syntax_error(C,'invalid pattern target')}}} -var PatternCaptureCtx=function(C,value){ -this.type="capture_pattern" -this.parent=C.parent -C.parent.tree.pop() -C.parent.tree.push(this) -this.tree=[value] -this.position=$token.value -this.positions=[this.position] -this.expect='.'} -PatternCaptureCtx.prototype.ast=function(){var ast_obj,pattern -try{if(this.tree.length > 1){pattern=new ast.Name(this.tree[0],new ast.Load()) -set_position(pattern,this.position) -for(var i=1;i < this.tree.length;i++){pattern=new ast.Attribute(pattern,this.tree[i],new ast.Load()) -copy_position(pattern,pattern.value)} -pattern=new ast.MatchValue(pattern) -copy_position(pattern,pattern.value)}else if(this.starred){var v=this.tree[0] -if(v=='_'){ast_obj=new ast.MatchStar()}else{ast_obj=new ast.MatchStar(v)} -set_position(ast_obj,this.position)}else{pattern=this.tree[0] -if(typeof pattern=='string'){}else if(pattern.type=='group_pattern'){pattern=pattern.ast()}else{console.log('bizarre',pattern) -pattern=NumberCtx.prototype.ast.bind(this)()} -if(pattern=='_'){pattern=new ast.MatchAs() -set_position(pattern,this.position)}} -if(this.alias){if(typeof pattern=="string"){pattern=new ast.MatchAs(undefined,pattern) -set_position(pattern,this.position)} -ast_obj=new ast.MatchAs(pattern,this.alias)}else if(this.tree.length > 1 ||pattern instanceof ast.MatchAs){ast_obj=pattern}else if(typeof pattern=='string'){ast_obj=new ast.MatchAs(undefined,pattern)}else if(! this.starred){ast_obj=new ast.MatchAs(undefined,pattern)} -set_position(ast_obj,this.position) -return ast_obj}catch(err){console.log('error capture ast') -show_line(this) -throw err}} -PatternCaptureCtx.prototype.bindings=function(){var bindings=this.tree[0]=='_' ?[]:this.tree.slice() -if(this.alias){bindings.push(this.alias)} -return bindings} -PatternCaptureCtx.prototype.transition=function(token,value){var C=this -switch(C.expect){case '.': -if(token=='.'){C.type="value_pattern" -C.expect='id' -return C}else if(token=='('){ -return new PatternCtx(new PatternClassCtx(C))}else if(C.parent instanceof PatternMappingCtx){return C.parent.transition(token,value)}else{C.expect='as' -return C.transition(token,value)} -break -case 'as': -case 'alias': -var res=as_pattern(C,token,value) -return res -case 'id': -if(token=='id'){C.tree.push(value) -C.positions.push($token.value) -C.expect='.' -return C}} -return transition(C.parent,token,value)} -const PatternClassCtx=function(C){this.type="class_pattern" -this.tree=[] -this.parent=C.parent -this.position=$token.value -this.class_id=C.tree.slice() -this.positions=C.positions -C.tree.pop() -this.attrs=C.tree.slice(2) -C.parent.tree.pop() -C.parent.tree.push(this) -this.expect=',' -this.keywords=[] -this.positionals=[] -this.bound_names=[]} -PatternClassCtx.prototype.ast=function(){ -var cls -if(this.class_id.length==1){cls=new ast.Name(this.class_id[0])}else{ -for(let i=0,len=this.class_id.length;i < len-1;i++){var value=new ast.Name(this.class_id[i],new ast.Load()) -set_position(value,this.positions[i]) -if(i==0){cls=new ast.Attribute(value,this.class_id[i+1])}else{cls=new ast.Attribute(cls,this.class_id[i+1])} -set_position(cls,this.positions[i])}} -set_position(cls,this.position) -cls.ctx=new ast.Load() -var patterns=[],kwd_attrs=[],kwd_patterns=[] -for(var item of this.tree){if(item.is_keyword){kwd_attrs.push(item.tree[0]) -kwd_patterns.push(item.tree[1].ast())}else{try{patterns.push(item.ast())}catch(err){console.log('error in class pattern item') -show_line(this) -throw err}}} -var ast_obj=new ast.MatchClass(cls,patterns,kwd_attrs,kwd_patterns) -set_position(ast_obj,this.position) -if(this.alias){ast_obj=new ast.MatchAs(ast_obj,this.alias) -set_position(ast_obj,this.position)} -return ast_obj} -PatternClassCtx.prototype.bindings=function(){var bindings=this.bound_names -if(this.alias){bindings.push(this.alias)} -return bindings} -PatternClassCtx.prototype.transition=function(token,value){var C=this -function check_last_arg(){var last=$B.last(C.tree),bound -if(last instanceof PatternCaptureCtx){if(! last.is_keyword && -C.keywords.length > 0){$token.value=last.position -raise_syntax_error(C,'positional patterns follow keyword patterns')} -if(last.is_keyword){if(C.keywords.indexOf(last.tree[0])>-1){raise_syntax_error(C,`keyword argument repeated: ${last.tree[0]}`)} -C.keywords.push(last.tree[0]) -bound=last.tree[1].bindings()}else{bound=last.bindings()} -for(var b of bound){if(C.bound_names.indexOf(b)>-1){raise_syntax_error(C,'multiple assignments '+ -`to name '${b}' in pattern`)}} -C.bound_names=C.bound_names.concat(bound)}} -switch(this.expect){case ',': -switch(token){case '=': -var current=$B.last(this.tree) -if(current instanceof PatternCaptureCtx){ -if(this.keywords.indexOf(current.tree[0])>-1){raise_syntax_error(C,'attribute name repeated in class pattern: '+ -current.tree[0])} -current.is_keyword=true -return new PatternCtx(current)} -raise_syntax_error(this,"'=' after non-capture") -break -case ',': -check_last_arg() -return new PatternCtx(this) -case ')': -check_last_arg() -if($B.last(this.tree).tree.length==0){this.tree.pop()} -C.expect='as' -return C -default: -raise_syntax_error(C)} -break -case 'as': -case 'alias': -return as_pattern(C,token,value)} -return transition(C.parent,token,value)} -var PatternGroupCtx=function(C){ -this.type="group_pattern" -this.parent=C -this.position=$token.value -this.tree=[] -C.tree.pop() -this.expect=',|' -C.tree.push(this)} -function remove_empty_pattern(C){var last=$B.last(C.tree) -if(last && last instanceof PatternCtx && -last.tree.length==0){C.tree.pop()}} -PatternGroupCtx.prototype.ast=function(){var ast_obj -if(this.tree.length==1 && ! this.has_comma){ast_obj=this.tree[0].ast()}else{ast_obj=PatternSequenceCtx.prototype.ast.bind(this)()} -if(this.alias){ast_obj=new ast.MatchAs(ast_obj,this.alias)} -set_position(ast_obj,this.position) -return ast_obj} -PatternGroupCtx.prototype.bindings=function(){var bindings=[] -for(var item of this.tree){bindings=bindings.concat(item.bindings())} -if(this.alias){bindings.push(this.alias)} -return bindings} -PatternGroupCtx.prototype.transition=function(token,value){var C=this -switch(C.expect){case ',|': -if(token==")"){ -remove_empty_pattern(C) -C.expect='as' -return C}else if(token==','){C.expect='id' -C.has_comma=true -return C}else if(token=='op' && value=='|'){var opctx=new PatternOrCtx(C.parent) -opctx.parenthese=true -return new PatternCtx(opctx)}else if(this.token===undefined){return transition(C.parent,token,value)} -raise_syntax_error(C) -break -case 'as': -case 'alias': -return as_pattern(C,token,value) -case 'id': -if(token==')'){ -remove_empty_pattern(C) -C.expect='as' -return C} -C.expect=',|' -return transition(new PatternCtx(C),token,value)} -raise_syntax_error(C)} -var PatternLiteralCtx=function(C,token,value,sign){ -this.type="literal_pattern" -this.parent=C.parent -this.position=$token.value -C.parent.tree.pop() -C.parent.tree.push(this) -if(token.sign){this.tree=[{sign:token.sign}] -this.expect='number'}else{if(token=='str'){this.tree=[] -new StringCtx(this,value)}else if(token=='JoinedStr'){raise_syntax_error(this,"patterns cannot include f-strings")}else{this.tree=[{type:token,value,sign}]} -this.expect='op'}} -PatternLiteralCtx.prototype.ast=function(){try{var first=this.tree[0],result -if(first.type=='str'){var v=StringCtx.prototype.ast.bind(first)() -result=new ast.MatchValue(v)}else if(first.type=='id'){result=new ast.MatchSingleton(_b_[first.value])}else{first.position=this.position -var num=NumberCtx.prototype.ast.bind(first)(),res=new ast.MatchValue(num) -if(first.sign && first.sign !='+'){var op={'+':ast.UAdd,'-':ast.USub,'~':ast.Invert}[first.sign] -var unary_op=new ast.UnaryOp(new op(),res.value) -set_position(unary_op,this.position) -res=new ast.MatchValue(unary_op) -set_position(res,this.position)} -if(this.tree.length==1){result=res}else{this.tree[2].position=this.position -var num2=NumberCtx.prototype.ast.bind(this.tree[2])(),binop=new ast.BinOp(res.value,this.tree[1]=='+' ? new ast.Add():new ast.Sub(),num2) -set_position(binop,this.position) -result=new ast.MatchValue(binop)}} -set_position(result,this.position) -if(this.tree.length==2){ -result=new ast.MatchValue(new ast.BinOp( -this.tree[0].ast(),this.num_sign=='+' ? ast.Add :ast.Sub,this.tree[1].ast()))} -if(this.alias){result=new ast.MatchAs(result,this.alias)} -set_position(result,this.position) -return result}catch(err){show_line(this) -throw err}} -PatternLiteralCtx.prototype.bindings=function(){if(this.alias){return[this.alias]} -return[]} -PatternLiteralCtx.prototype.transition=function(token,value){var C=this -switch(C.expect){case 'op': -if(token=="op"){switch(value){case '+': -case '-': -if(['int','float'].indexOf(C.tree[0].type)>-1){C.expect='imaginary' -this.tree.push(value) -C.num_sign=value -return C} -raise_syntax_error(C,'patterns cannot include operators') -break -default: -return transition(C.parent,token,value)}} -break -case 'number': -switch(token){case 'int': -case 'float': -case 'imaginary': -var last=$B.last(C.tree) -if(this.tree.token===undefined){ -last.type=token -last.value=value -C.expect='op' -return C} -break -default: -raise_syntax_error(C)} -break -case 'imaginary': -switch(token){case 'imaginary': -C.tree.push({type:token,value,sign:C.num_sign}) -return C.parent -default: -raise_syntax_error(C,'(expected imaginary)')} -break -case 'as': -case 'alias': -return as_pattern(C,token,value)} -if(token=='as' && C.tree.length==1){C.expect='as' -return C.transition(token,value)} -return transition(C.parent,token,value)} -var PatternMappingCtx=function(C){ -this.type="mapping_pattern" -this.parent=C -this.position=$token.value -C.tree.pop() -this.tree=[] -C.tree.push(this) -this.expect='key_value_pattern' -this.literal_keys=[] -this.bound_names=[]} -PatternMappingCtx.prototype.ast=function(){ -var keys=[],patterns=[] -for(var item of this.tree){keys.push(item.tree[0].ast().value) -if(item.tree[0]instanceof PatternLiteralCtx){patterns.push(item.tree[1].ast())}else{patterns.push(item.tree[2].ast())}} -var res=new ast.MatchMapping(keys,patterns) -if(this.double_star){res.rest=this.double_star.tree[0]} -set_position(res,this.position) -return res} -PatternMappingCtx.prototype.bindings=function(){var bindings=[] -for(var item of this.tree){bindings=bindings.concat(item.bindings())} -if(this.rest){bindings=bindings.concat(this.rest.bindings())} -if(this.alias){bindings.push(this.alias)} -return bindings} -PatternMappingCtx.prototype.transition=function(token,value){var C=this -function check_duplicate_names(){var last=$B.last(C.tree),bindings -if(last instanceof PatternKeyValueCtx){if(C.double_star){ -raise_syntax_error(C,"can't use starred name here (consider moving to end)")} -if(last.tree[0].type=='value_pattern'){bindings=last.tree[2].bindings()}else{bindings=last.tree[1].bindings()} -for(var binding of bindings){if(C.bound_names.indexOf(binding)>-1){raise_syntax_error(C,`multiple assignments to name '${binding}'`+ -' in pattern')}} -C.bound_names=C.bound_names.concat(bindings)}} -switch(C.expect){case 'key_value_pattern': -if(token=='}' ||token==','){ -check_duplicate_names() -if(C.double_star){var ix=C.tree.indexOf(C.double_star) -if(ix !=C.tree.length-1){raise_syntax_error(C,"can't use starred name here (consider moving to end)")} -C.rest=C.tree.pop()} -return token==',' ? C :C.parent} -if(token=='op' && value=='**'){C.expect='capture_pattern' -return C} -var p=new PatternCtx(C) -try{var lit_or_val=p.transition(token,value)}catch(err){raise_syntax_error(C,"mapping pattern keys may only "+ -"match literals and attribute lookups")} -if(C.double_star){ -raise_syntax_error(C)} -if(lit_or_val instanceof PatternLiteralCtx){C.tree.pop() -new PatternKeyValueCtx(C,lit_or_val) -return lit_or_val}else if(lit_or_val instanceof PatternCaptureCtx){C.has_value_pattern_keys=true -C.tree.pop() -new PatternKeyValueCtx(C,lit_or_val) -C.expect='.' -return this}else{raise_syntax_error(C,'(expected key or **)')} -break -case 'capture_pattern': -var capture=transition(new PatternCtx(C),token,value) -if(capture instanceof PatternCaptureCtx){if(C.double_star){raise_syntax_error(C,"only one double star pattern is accepted")} -if(value=='_'){raise_syntax_error(C)} -if(C.bound_names.indexOf(value)>-1){raise_syntax_error(C,'duplicate binding: '+value)} -C.bound_names.push(value) -capture.double_star=true -C.double_star=capture -C.expect=',' -return C}else{raise_syntax_error(C,'(expected identifier)')} -break -case ',': -if(token==','){C.expect='key_value_pattern' -return C}else if(token=='}'){C.expect='key_value_pattern' -return C.transition(token,value)} -raise_syntax_error(C) -break -case '.': -if(C.tree.length > 0){var last=$B.last(C.tree) -if(last instanceof PatternKeyValueCtx){ -new IdCtx(last,last.tree[0].tree[0]) -C.expect='key_value_pattern' -return transition(last.tree[0],token,value)}} -raise_syntax_error(C)} -return transition(C.parent,token,value)} -var PatternKeyValueCtx=function(C,literal_or_value){this.type="pattern_key_value" -this.parent=C -this.tree=[literal_or_value] -literal_or_value.parent=this -this.expect=':' -C.tree.push(this)} -PatternKeyValueCtx.prototype.bindings=PatternMappingCtx.prototype.bindings -PatternKeyValueCtx.prototype.transition=function(token,value){var C=this -switch(C.expect){case ':': -switch(token){case ':': -var key_obj=this.tree[0] -if(key_obj instanceof PatternLiteralCtx){var key=$B.AST.$convert(key_obj.tree[0]) -if(_b_.list.__contains__(this.parent.literal_keys,key)){raise_syntax_error(C,`mapping pattern checks `+ -`duplicate key (${_b_.repr(key)})`)} -this.parent.literal_keys.push(key)} -this.expect=',' -return new PatternCtx(this) -default: -raise_syntax_error(C,'(expected :)')} -break -case ',': -switch(token){case '}': -return transition(C.parent,token,value) -case ',': -C.parent.expect='key_value_pattern' -return transition(C.parent,token,value) -case 'op': -if(value=='|'){ -return new PatternCtx(new PatternOrCtx(C))}} -raise_syntax_error(C,"(expected ',' or '}')")} -return transition(C.parent,token,value)} -var PatternOrCtx=function(C){ -this.type="or_pattern" -this.parent=C -this.position=$token.value -var first_pattern=C.tree.pop() -if(first_pattern instanceof PatternGroupCtx && -first_pattern.expect !='as'){ -first_pattern=first_pattern.tree[0]} -this.tree=[first_pattern] -first_pattern.parent=this -this.expect='|' -C.tree.push(this) -this.check_reachable()} -PatternOrCtx.prototype.ast=function(){ -var ast_obj=new ast.MatchOr(this.tree.map(x=> x.ast())) -set_position(ast_obj,this.position) -if(this.alias){ast_obj=new ast.MatchAs(ast_obj,this.alias)} -set_position(ast_obj,this.position) -return ast_obj} -PatternOrCtx.prototype.bindings=function(){var names -for(var subpattern of this.tree){if(subpattern.bindings===undefined){console.log('no binding',subpattern)} -var subbindings=subpattern.bindings() -if(names===undefined){names=subbindings}else{for(let item of names){if(subbindings.indexOf(item)==-1){raise_syntax_error(this,"alternative patterns bind different names")}} -for(let item of subbindings){if(names.indexOf(item)==-1){raise_syntax_error(this,"alternative patterns bind different names")}}}} -if(this.alias){return names.concat(this.alias)} -return names} -PatternOrCtx.prototype.check_reachable=function(){ -var item=$B.last(this.tree) -var capture -if(item.type=='capture_pattern'){capture=item.tree[0]}else if(item.type=='group_pattern' && item.tree.length==1 && -item.tree[0].type=='capture_pattern'){capture=item.tree[0].tree[0]}else if(item instanceof PatternOrCtx){item.check_reachable()} -if(capture){var msg=capture=='_' ? 'wildcard' : -`name capture '${capture}'` -raise_syntax_error(this,`${msg} makes remaining patterns unreachable`)}} -PatternOrCtx.prototype.transition=function(token,value){function set_alias(){ -var last=$B.last(C.tree) -if(last.alias){C.alias=last.alias -delete last.alias}} -var C=this -if(['as','alias'].indexOf(C.expect)>-1){return as_pattern(C,token,value)} -if(token=='op' && value=="|"){ -for(var item of C.tree){if(item.alias){raise_syntax_error(C,'(no as pattern inside or pattern)')}} -C.check_reachable() -return new PatternCtx(C)}else if(token==')' && C.parenthese){set_alias() -C.bindings() -delete C.parenthese -C.expect='as' -return C} -set_alias() -C.bindings() -return transition(C.parent,token,value)} -var PatternSequenceCtx=function(C,token){ -this.type="sequence_pattern" -this.parent=C -this.position=$token.value -this.tree=[] -this.bound_names=[] -var first_pattern=C.tree.pop() -if(token===undefined){ -this.bound_names=first_pattern.bindings() -this.tree=[first_pattern] -if(first_pattern.starred){this.has_star=true} -first_pattern.parent=this}else{ -this.token=token} -this.expect=',' -C.tree.push(this)} -PatternSequenceCtx.prototype.ast=function(){var ast_obj=new ast.MatchSequence(this.tree.map(x=> x.ast())) -set_position(ast_obj,this.position) -if(this.alias){ast_obj=new ast.MatchAs(ast_obj,this.alias) -set_position(ast_obj,this.position)} -return ast_obj} -PatternSequenceCtx.prototype.bindings=PatternMappingCtx.prototype.bindings -PatternSequenceCtx.prototype.transition=function(token,value){function check_duplicate_names(){var last=$B.last(C.tree) -if(!(last instanceof PatternCtx)){ -var last_bindings=last.bindings() -for(var b of last_bindings){if(C.bound_names.indexOf(b)>-1){raise_syntax_error(C,"multiple assignments to"+ -` name '${b}' in pattern`)}} -if(last.starred){if(C.has_star){raise_syntax_error(C,'multiple starred names in sequence pattern')} -C.has_star=true} -C.bound_names=C.bound_names.concat(last_bindings)}} -var C=this -if(C.expect==','){if((C.token=='[' && token==']')|| -(C.token=='(' && token==")")){ -var nb_starred=0 -for(var item of C.tree){if(item instanceof PatternCaptureCtx && item.starred){nb_starred++ -if(nb_starred > 1){raise_syntax_error(C,'multiple starred names in sequence pattern')}}} -C.expect='as' -check_duplicate_names() -remove_empty_pattern(C) -return C}else if(token==','){check_duplicate_names() -C.expect='id' -return C}else if(token=='op' && value=='|'){ -remove_empty_pattern(C) -return new PatternCtx(new PatternOrCtx(C))}else if(this.token===undefined){ -check_duplicate_names() -return transition(C.parent,token,value)} -raise_syntax_error(C)}else if(C.expect=='as'){if(token=='as'){this.expect='alias' -return C} -return transition(C.parent,token,value)}else if(C.expect=='alias'){if(token=='id'){C.alias=value -return C.parent} -raise_syntax_error(C,'expected alias')}else if(C.expect=='id'){C.expect=',' -return transition(new PatternCtx(C),token,value)}} -var RaiseCtx=$B.parser.RaiseCtx=function(C){ -this.type='raise' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this -this.scope_type=get_scope(this).ntype} -RaiseCtx.prototype.ast=function(){ -var ast_obj=new ast.Raise(...this.tree.map(x=> x.ast())) -set_position(ast_obj,this.position) -return ast_obj} -RaiseCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.tree.length==0){return new IdCtx(new ExprCtx(C,'exc',false),value)} -break -case 'from': -if(C.tree.length > 0){return new AbstractExprCtx(C,false)} -break -case 'eol': -remove_abstract_expr(this.tree) -return transition(C.parent,token)} -raise_syntax_error(C)} -var ReturnCtx=$B.parser.ReturnCtx=function(C){ -this.type='return' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this -this.scope=get_scope(this) -if(["def","generator"].indexOf(this.scope.ntype)==-1){raise_syntax_error(C,"'return' outside function")} -var node=this.node=get_node(this) -while(node.parent){if(node.parent.C){var elt=node.parent.C.tree[0] -if(elt.type=='for'){elt.has_return=true -break}else if(elt.type=='try'){elt.has_return=true}else if(elt.type=='single_kw' && elt.token=='finally'){elt.has_return=true}} -node=node.parent}} -ReturnCtx.prototype.ast=function(){var res=new ast.Return() -if(this.tree.length > 0){res.value=this.tree[0].ast()} -set_position(res,this.position) -return res} -ReturnCtx.prototype.transition=function(token,value){var C=this -if(token=='eol' && this.tree.length==1 && -this.tree[0].type=='abstract_expr'){ -this.tree.pop()} -return transition(new AbstractExprCtx(C.parent,false),token,value)} -var SetCompCtx=function(C){ -this.type='setcomp' -this.tree=[C.tree[0]] -this.tree[0].parent=this -this.position=$token.value -Comprehension.make_comp(this,C)} -SetCompCtx.prototype.ast=function(){ -var ast_obj=new ast.SetComp( -this.tree[0].ast(),Comprehension.generators(this.tree.slice(1)) -) -set_position(ast_obj,this.position) -return ast_obj} -SetCompCtx.prototype.transition=function(token){var C=this -if(token=='}'){return this.parent} -raise_syntax_error(C)} -var SingleKwCtx=$B.parser.SingleKwCtx=function(C,token){ -this.type='single_kw' -this.token=token -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this -if(token=="else"){var node=C.node,rank=node.parent.children.indexOf(node),pctx=node.parent.children[rank-1].C -pctx.tree[0].orelse=this -if(pctx.tree.length > 0){var elt=pctx.tree[0] -if(elt.type=='for' || -elt.type=='asyncfor' || -(elt.type=='condition' && elt.token=='while')){elt.has_break=true -elt.else_node=get_node(this)}}}} -SingleKwCtx.prototype.ast=function(){return ast_body(this.parent)} -SingleKwCtx.prototype.transition=function(token){var C=this -if(token==':'){return BodyCtx(C)}else if(token=='eol'){raise_syntax_error(C,"expected ':'")} -raise_syntax_error(C)} -var SliceCtx=$B.parser.SliceCtx=function(C){ -this.type='slice' -this.parent=C -this.position=$token.value -this.tree=C.tree.length > 0 ?[C.tree.pop()]:[] -C.tree.push(this)} -SliceCtx.prototype.ast=function(){var slice=new ast.Slice() -var attrs=['lower','upper','step'] -for(var i=0;i < this.tree.length;i++){var item=this.tree[i] -if(item.type !=='abstract_expr'){slice[attrs[i]]=item.ast()}} -set_position(slice,this.position) -return slice} -SliceCtx.prototype.transition=function(token,value){var C=this -if(token==":"){return new AbstractExprCtx(C,false)} -return transition(C.parent,token,value)} -var StarArgCtx=$B.parser.StarArgCtx=function(C){ -this.type='star_arg' -this.parent=C -this.tree=[] -this.position=$token.value -C.tree[C.tree.length]=this} -StarArgCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.parent.type=="target_list"){C.tree.push(value) -C.parent.expect=',' -return C.parent} -return transition(new AbstractExprCtx(C,false),token,value) -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'JoinedStr': -case 'bytes': -case '[': -case '(': -case '{': -case 'not': -case 'lambda': -return transition(new AbstractExprCtx(C,false),token,value) -case ',': -case ')': -if(C.tree.length==0){raise_syntax_error(C,"(unnamed star argument)")} -return transition(C.parent,token) -case ':': -if(C.parent.parent.type=='lambda'){return transition(C.parent.parent,token)}} -raise_syntax_error(C)} -var StarredCtx=$B.parser.StarredCtx=function(C){ -this.type='starred' -this.position=C.position -if(C.parent.type=='list_or_tuple' && -C.parent.parent.type=="node"){ -for(var i=0;i < C.parent.tree.length;i++){var child=C.parent.tree[i] -if(child.type=='expr' && -child.tree.length > 0 && -child.tree[0].type=='starred'){raise_syntax_error(C,"two starred expressions in assignment")}}} -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this} -StarredCtx.prototype.ast=function(){if(this.tree[0].type=="abstract_expr"){raise_syntax_error_known_range(this,this.position,last_position(this),'invalid syntax')} -var ast_obj=new ast.Starred(this.tree[0].ast(),new ast.Load()) -set_position(ast_obj,this.position) -return ast_obj} -StarredCtx.prototype.transition=function(token,value){var C=this -return transition(C.parent,token,value)} -var StringCtx=$B.parser.StringCtx=function(C,value){ -this.type='str' -this.parent=C -this.position=this.end_position=$token.value -C.tree.push(this) -this.is_bytes=value.startsWith('b') -this.value=this.is_bytes ?[]:'' -this.add_value(value) -this.raw=false} -$B.string_from_ast_value=function(value){ -return value.replace(new RegExp("\\\\'",'g'),"'")} -var make_string_for_ast_value=$B.make_string_for_ast_value=function(value){value=value.replace(/\n/g,'\\n\\\n') -value=value.replace(/\r/g,'\\r\\\r') -if(value[0]=="'"){var unquoted=value.substr(1,value.length-2) -return unquoted} -if(value.indexOf("'")>-1){var s='',escaped=false -for(var char of value){if(char=='\\'){if(escaped){s+='\\\\'} -escaped=!escaped}else{if(char=="'" && ! escaped){ -s+='\\'}else if(escaped){s+='\\'} -s+=char -escaped=false}} -value=s} -return value.substr(1,value.length-2)} -StringCtx.prototype.add_value=function(value){this.is_bytes=value.charAt(0)=='b' -if(! this.is_bytes){this.value+=make_string_for_ast_value(value)}else{value=value.substr(2,value.length-3) -try{var b=encode_bytestring(value)}catch(err){raise_syntax_error(this,'bytes can only contain ASCII literal characters')} -this.value=this.value.concat(b)}} -var encode_bytestring=$B.encode_bytestring=function(s){s=s.replace(/\\t/g,'\t') -.replace(/\\n/g,'\n') -.replace(/\\r/g,'\r') -.replace(/\\f/g,'\f') -.replace(/\\v/g,'\v') -.replace(/\\\\/g,'\\') -var t=[] -for(var i=0,len=s.length;i < len;i++){var cp=s.codePointAt(i) -if(cp > 255){throw Error()} -t.push(cp)} -return t} -StringCtx.prototype.ast=function(){var value=this.value -if(this.is_bytes){value=_b_.bytes.$factory(this.value)} -var ast_obj=new ast.Constant(value) -set_position(ast_obj,this.position) -return ast_obj} -StringCtx.prototype.transition=function(token,value){var C=this -switch(token){case '[': -return new AbstractExprCtx(new SubscripCtx(C.parent),false) -case '(': -C.parent.tree[0]=C -return new CallCtx(C.parent) -case 'str': -if((this.is_bytes && ! value.startsWith('b'))|| -(! this.is_bytes && value.startsWith('b'))){raise_syntax_error(C,"cannot mix bytes and nonbytes literals")} -C.add_value(value) -return C -case 'JoinedStr': -C.parent.tree.pop() -var fstring=new FStringCtx(C.parent,value) -new StringCtx(fstring,fstring.quotes+this.value+fstring.quotes) -return fstring} -return transition(C.parent,token,value)} -var SubscripCtx=$B.parser.SubscripCtx=function(C){ -this.type='sub' -this.func='getitem' -this.value=C.tree[0] -this.position=$token.value -C.tree.pop() -C.tree[C.tree.length]=this -this.parent=C -this.tree=[]} -SubscripCtx.prototype.ast=function(){var slice -if(this.tree.length > 1){var slice_items=this.tree.map(x=> x.ast()) -slice=new ast.Tuple(slice_items)}else{slice=this.tree[0].ast()} -set_position(slice,this.position,this.end_position) -slice.ctx=new ast.Load() -var value=this.value.ast() -if(value.ctx){value.ctx=new ast.Load()} -var ast_obj=new ast.Subscript(value,slice,new ast.Load()) -ast_obj.lineno=value.lineno -ast_obj.col_offset=value.col_offset -ast_obj.end_lineno=slice.end_lineno -ast_obj.end_col_offset=slice.end_col_offset -return ast_obj} -SubscripCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'JoinedStr': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -var expr=new AbstractExprCtx(C,false) -return transition(expr,token,value) -case ']': -C.end_position=$token.value -if(C.parent.packed){return C.parent} -if(C.tree[0].tree.length > 0){return C.parent} -break -case ':': -return new AbstractExprCtx(new SliceCtx(C),false) -case ',': -return new AbstractExprCtx(C,false)} -raise_syntax_error(C)} -var TargetListCtx=$B.parser.TargetListCtx=function(C){ -this.type='target_list' -this.parent=C -this.tree=[] -this.position=$token.value -this.expect='id' -this.nb_packed=0 -C.tree[C.tree.length]=this} -TargetListCtx.prototype.ast=function(){if(this.tree.length==1 && ! this.implicit_tuple){let item=this.tree[0].ast() -item.ctx=new ast.Store() -if(item instanceof ast.Tuple){for(var target of item.elts){target.ctx=new ast.Store()}} -return item}else{let items=[] -for(let item of this.tree){item=item.ast() -if(item.hasOwnProperty('ctx')){item.ctx=new ast.Store()} -items.push(item)} -var ast_obj=new ast.Tuple(items,new ast.Store()) -set_position(ast_obj,this.position) -return ast_obj}} -TargetListCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'id': -if(C.expect=='id'){C.expect=',' -return new IdCtx( -new ExprCtx(C,'target',false),value)} -break -case 'op': -if(C.expect=='id' && value=='*'){ -this.nb_packed++ -C.expect=',' -return new AbstractExprCtx( -new StarredCtx(C),false)} -break -case '(': -case '[': -if(C.expect=='id'){C.expect=',' -return new ListOrTupleCtx(C,token=='(' ? 'tuple' :'list')} -break -case ')': -case ']': -if(C.expect==','){return C.parent} -break -case ',': -if(C.expect==','){C.expect='id' -C.implicit_tuple=true -return C}} -if(C.expect==','){return transition(C.parent,token,value)}else if(token=='in'){ -return transition(C.parent,token,value)} -console.log('unexpected token for target list',token,value) -console.log(C) -raise_syntax_error(C)} -var TernaryCtx=$B.parser.TernaryCtx=function(C){ -this.type='ternary' -this.position=C.position -C.parent.tree.pop() -var expr=new ExprCtx(C.parent,'ternary',false) -expr.tree.push(this) -this.parent=expr -this.tree=[C] -C.parent=this} -TernaryCtx.prototype.ast=function(){ -var ast_obj=new ast.IfExp(this.tree[1].ast(),this.tree[0].ast(),this.tree[2].ast()) -set_position(ast_obj,this.position) -return ast_obj} -TernaryCtx.prototype.transition=function(token,value){var C=this -if(token=='else'){C.in_else=true -return new AbstractExprCtx(C,false)}else if(! C.in_else){if(token==':'){raise_syntax_error(C)} -raise_syntax_error_known_range(C,C.position,last_position(C),"expected 'else' after 'if' expression")}else if(token==","){ -if(["assign","augm_assign","node","return"]. -indexOf(C.parent.type)>-1){C.parent.tree.pop() -var t=new ListOrTupleCtx(C.parent,'tuple') -t.implicit=true -t.tree[0]=C -C.parent=t -t.expect="id" -return t}} -return transition(C.parent,token,value)} -var TryCtx=$B.parser.TryCtx=function(C){ -this.type='try' -this.parent=C -this.position=$token.value -C.tree[C.tree.length]=this} -TryCtx.prototype.ast=function(){ -var node=this.parent.node,res={body:ast_body(this.parent),handlers:[],orelse:[],finalbody:[]} -var rank=node.parent.children.indexOf(node) -for(var child of node.parent.children.slice(rank+1)){var t=child.C.tree[0],type=t.type -if(type=='single_kw'){type=t.token} -if(type=='except'){res.handlers.push(t.ast())}else if(type=='else'){res.orelse=ast_body(child.C)}else if(type=='finally'){res.finalbody=ast_body(child.C)}else{break}} -if(res.handlers.length==0 && -res.finalbody.length==0){raise_syntax_error(this,"expected 'except' or 'finally' block")} -var klass=this.parent.is_trystar ? ast.TryStar :ast.Try -res=new klass(res.body,res.handlers,res.orelse,res.finalbody) -set_position(res,this.position) -return res} -TryCtx.prototype.transition=function(token){var C=this -if(token==':'){return BodyCtx(C)} -raise_syntax_error(C,"expected ':'")} -var TypeAliasCtx=$B.parser.TypeAlias=function(C,value){ -C.parent.parent.tree=[this] -this.parent=C.parent.parent -this.name=value -this.expect='=' -this.tree=[] -this.position=$token.value} -TypeAliasCtx.prototype.transition=function(token,value){var C=this -if(C.expect=='='){if(token=='['){if(this.tree.length > 0){raise_syntax_error(C)} -return new TypeParamsCtx(C)}else if(token=='='){C.has_value=true -return new AbstractExprCtx(C,false)}else if(token=='eol'){if(! C.has_value || -this.tree.length !==1 || -this.tree[0]instanceof AbstractExprCtx){raise_syntax_error(C)} -return transition(C.parent,token,value)}} -raise_syntax_error(C)} -TypeAliasCtx.prototype.ast=function(){var name=new ast.Name(this.name),params,value=this.tree[0].ast() -if(this.type_params){params=this.type_params.ast()} -var ast_obj=new ast.TypeAlias(name,params,value) -set_position(ast_obj,this.position) -return ast_obj} -var TypeParamsCtx=$B.parser.TypeParamsCtx=function(C){this.type='type_params' -this.parent=C -C.type_params=this -this.tree=[] -this.expect='param'} -TypeParamsCtx.prototype.check_duplicate=function(name){ -for(var item of this.tree){if(item.name==name){raise_syntax_error(this,`duplicate type parameter '${name}'`)}}} -TypeParamsCtx.prototype.transition=function(token,value){var C=this -if(C.expect=='param'){if(token=='id'){C.check_duplicate(value) -C.expect=',' -return new TypeVarCtx(C,value)}else if(token=='op'){if(value=='*'){C.expect=',' -return new TypeVarTupleCtx(C)}else if(value=='**'){C.expect=',' -return new TypeParamSpecCtx(C)}}else if(token==']'){return C.parent} -raise_syntax_error(C)}else if(C.expect==','){if(token==','){C.expect='param' -return C}else if(token==']'){return C.parent} -raise_syntax_error(C)} -raise_syntax_error(C)} -TypeParamsCtx.prototype.ast=function(){return this.tree.map(x=> x.ast())} -var TypeVarCtx=$B.parser.TypeVarCtx=function(C,name){this.name=name -this.parent=C -C.tree.push(this) -this.tree=[] -this.position=$token.value} -TypeVarCtx.prototype.transition=function(token,value){var C=this -if(token==':'){return new AbstractExprCtx(C,false)} -return transition(this.parent,token,value)} -TypeVarCtx.prototype.ast=function(){var name=this.name,bound -if(this.tree.length > 0){bound=this.tree[0].ast()} -var ast_obj=new ast.TypeVar(name,bound) -set_position(ast_obj,this.position) -return ast_obj} -var TypeParamSpecCtx=$B.parser.TypeParamSpecCtx=function(C){this.parent=C -C.tree.push(this) -this.tree=[] -this.position=$token.value} -TypeParamSpecCtx.prototype.transition=function(token,value){var C=this -if(token=='id'){if(C.name){raise_syntax_error(C)} -C.parent.check_duplicate(value) -C.name=value -return C}else if(token==':'){if(! C.name){raise_syntax_error(C)} -this.has_colon=true -return new AbstractExprCtx(C,false)}else if(this.has_colon){var msg -if(this.tree[0].name=='tuple'){msg='cannot use constraints with ParamSpec'}else{msg='cannot use bound with ParamSpec'} -raise_syntax_error_known_range(C,this.position,$token.value,msg)} -return transition(this.parent,token,value)} -TypeParamSpecCtx.prototype.ast=function(){var name=new ast.Name(this.name) -var ast_obj=new ast.ParamSpec(name) -set_position(ast_obj,this.position) -return ast_obj} -var TypeVarTupleCtx=$B.parser.TypeVarTupleCtx=function(C){this.parent=C -C.tree.push(this) -this.tree=[] -this.position=$token.value} -TypeVarTupleCtx.prototype.transition=function(token,value){var C=this -if(token=='id'){if(C.name){raise_syntax_error(C)} -C.parent.check_duplicate(value) -C.name=value -return C}else if(token==':'){if(! C.name){raise_syntax_error(C)} -this.has_colon=true -return new AbstractExprCtx(C,false)}else if(this.has_colon){var msg -if(this.tree[0].name=='tuple'){msg='cannot use constraints with TypeVarTuple'}else{msg='cannot use bound with TypeVarTuple'} -raise_syntax_error_known_range(C,this.position,$token.value,msg)} -return transition(this.parent,token,value)} -TypeVarTupleCtx.prototype.ast=function(){var name=new ast.Name(this.name) -var ast_obj=new ast.TypeVarTuple(name) -set_position(ast_obj,this.position) -return ast_obj} -var UnaryCtx=$B.parser.UnaryCtx=function(C,op){ -this.type='unary' -this.op=op -this.parent=C -this.tree=[] -this.position=$token.value -C.tree.push(this)} -UnaryCtx.prototype.ast=function(){var op={'+':ast.UAdd,'-':ast.USub,'~':ast.Invert}[this.op],ast_obj=new ast.UnaryOp(new op(),this.tree[0].ast()) -set_position(ast_obj,this.position) -return ast_obj} -UnaryCtx.prototype.transition=function(token,value){var C=this -switch(token){case 'op': -if('+'==value ||'-'==value){if(C.op===value){C.op='+'}else{C.op='-'} -return C} -break -case 'int': -case 'float': -case 'imaginary': -if(C.parent.type=="starred"){raise_syntax_error(C,"can't use starred expression here")} -var res=new NumberCtx(token,C,value) -return res -case 'id': -return transition(new AbstractExprCtx(C,false),token,value)} -if(this.tree.length==0 ||this.tree[0].type=='abstract_expr'){raise_syntax_error(C)} -return transition(C.parent,token,value)} -var WithCtx=$B.parser.WithCtx=function(C){ -this.type='with' -this.parent=C -this.position=$token.value -C.tree[C.tree.length]=this -this.tree=[] -this.expect='expr' -this.scope=get_scope(this)} -WithCtx.prototype.ast=function(){ -var withitems=[] -for(var withitem of this.tree){withitems.push(withitem.ast())} -var klass=this.async ? ast.AsyncWith :ast.With -var ast_obj=new klass(withitems,ast_body(this.parent)) -set_position(ast_obj,this.async ? this.async.position :this.position,last_position(this)) -return ast_obj} -WithCtx.prototype.transition=function(token,value){var C=this -function check_last(){var last=$B.last(C.tree) -if(last.tree.length > 1){var alias=last.tree[1] -if(alias.tree.length==0){raise_syntax_error(C,"expected ':'")} -check_assignment(alias)}} -switch(token){case '(': -case '[': -if(this.expect=='expr' && this.tree.length==0){ -C.parenth=token -return C}else{raise_syntax_error(C)} -break -case 'id': -case 'lambda': -if(C.expect=='expr'){ -C.expect=',' -return transition( -new AbstractExprCtx(new withitem(C),false),token,value)} -raise_syntax_error(C) -break -case ':': -if((! C.parenth)||C.parenth=='implicit'){check_last()} -return BodyCtx(C) -case ')': -case ']': -if(C.parenth==opening[token]){if(C.expect==',' ||C.expect=='expr'){check_last() -C.expect=':' -return C}} -break -case ',': -if(C.expect==','){if(! C.parenth){C.parenth='implicit'} -check_last() -C.expect='expr' -return C} -break -case 'eol': -raise_syntax_error(C,"expected ':'")} -raise_syntax_error(C)} -WithCtx.prototype.set_alias=function(ctx){var ids=[] -if(ctx.type=="id"){ids=[ctx]}else if(ctx.type=="list_or_tuple"){ -for(var expr of ctx.tree){if(expr.type=="expr" && expr.tree[0].type=="id"){ids.push(expr.tree[0])}}}} -var withitem=function(C){this.type='withitem' -this.parent=C -C.tree.push(this) -this.tree=[] -this.expect='as' -this.position=$token.value} -withitem.prototype.ast=function(){var ast_obj=new ast.withitem(this.tree[0].ast()) -if(this.tree[1]){ast_obj.optional_vars=this.tree[1].tree[0].ast() -if(ast_obj.optional_vars.elts){for(var elt of ast_obj.optional_vars.elts){elt.ctx=new ast.Store()}}else{ast_obj.optional_vars.ctx=new ast.Store()}} -set_position(ast_obj,this.position) -return ast_obj} -withitem.prototype.transition=function(token,value){var C=this -if(token=='as' && C.expect=='as'){C.expect='star_target' -return new AbstractExprCtx(C,false)}else{return transition(C.parent,token,value)}} -var YieldCtx=$B.parser.YieldCtx=function(C,is_await){ -this.type='yield' -this.parent=C -this.tree=[] -this.is_await=is_await -this.position=$token.value -C.tree[C.tree.length]=this -if(C.type=="list_or_tuple" && C.tree.length > 1){raise_syntax_error(C,"(non-parenthesized yield)")} -if(parent_match(C,{type:"annotation"})){raise_syntax_error(C,"'yield' outside function")} -var root=get_module(this) -root.yields_func_check=root.yields_func_check ||[] -root.yields_func_check.push(this) -var scope=this.scope=get_scope(this,true),node=get_node(this) -node.has_yield=this -var in_comp=parent_match(this,{type:"comprehension"}) -if(get_scope(this).id.startsWith("lc"+$B.lambda_magic)){delete node.has_yield} -if(in_comp){var outermost_expr=in_comp.tree[0].tree[1] -parent=C -while(parent){if(parent===outermost_expr){break} -parent=parent.parent} -if(! parent){raise_syntax_error(C,"'yield' inside list comprehension")}} -var in_lambda=false,parent=C -while(parent){if(parent.type=="lambda"){in_lambda=true -this.in_lambda=true -break} -parent=parent.parent} -parent=node.parent -while(parent){if(parent.C && parent.C.tree.length > 0 && -parent.C.tree[0].type=="with"){scope.C.tree[0].$has_yield_in_cm=true -break} -parent=parent.parent} -if(! in_lambda){switch(C.type){case 'node': -case 'assign': -case 'list_or_tuple': -break -default: -raise_syntax_error(C,'(non-parenthesized yield)')}}} -YieldCtx.prototype.ast=function(){ -var ast_obj -if(this.from){ast_obj=new ast.YieldFrom(this.tree[0].ast())}else if(this.tree.length==1){ast_obj=new ast.Yield(this.tree[0].ast())}else{ast_obj=new ast.Yield()} -set_position(ast_obj,this.position) -return ast_obj} -YieldCtx.prototype.transition=function(token){var C=this -if(token=='from'){ -if(C.tree[0].type !='abstract_expr'){ -raise_syntax_error(C,"('from' must follow 'yield')")} -C.from=true -C.from_num=$B.UUID() -return C.tree[0]}else{remove_abstract_expr(C.tree) -if(C.from && C.tree.length==0){raise_syntax_error(C)}} -return transition(C.parent,token)} -YieldCtx.prototype.check_in_function=function(){if(this.in_lambda){return} -var scope=get_scope(this),in_func=scope.is_function,func_scope=scope -if(! in_func && scope.comprehension){var parent=scope.parent_block -while(parent.comprehension){parent=parent.parent_block} -in_func=parent.is_function -func_scope=parent} -if(in_func){var def=func_scope.C.tree[0] -if(! this.is_await){def.type='generator'}}} -function parent_match(ctx,obj){ -var flag -while(ctx.parent){flag=true -for(var attr in obj){if(ctx.parent[attr]!=obj[attr]){flag=false -break}} -if(flag){return ctx.parent} -ctx=ctx.parent} -return false} -var get_previous=$B.parser.get_previous=function(C){var previous=C.node.parent.children[C.node.parent.children.length-2] -if(!previous ||!previous.C){raise_syntax_error(C,'(keyword not following correct keyword)')} -return previous.C.tree[0]} -var get_docstring=$B.parser.get_docstring=function(node){var doc_string=_b_.None -if(node.body.length > 0){var firstchild=node.body[0] -if(firstchild instanceof $B.ast.Constant && -typeof firstchild.value=='string'){doc_string=firstchild.value}} -return doc_string} -var get_scope=$B.parser.get_scope=function(C){ -var ctx_node=C.parent -while(true){if(ctx_node.type==='node'){break}else if(ctx_node.comprehension){return ctx_node} -ctx_node=ctx_node.parent} -var tree_node=ctx_node.node,scope=null -while(tree_node.parent && tree_node.parent.type !=='module'){var ntype=tree_node.parent.C.tree[0].type -switch(ntype){case 'def': -case 'class': -case 'generator': -scope=tree_node.parent -scope.ntype=ntype -scope.is_function=ntype !='class' -return scope} -tree_node=tree_node.parent} -scope=tree_node.parent ||tree_node -scope.ntype="module" -return scope} -var get_module=$B.parser.get_module=function(C){ -var ctx_node=C instanceof NodeCtx ? C :C.parent -while(ctx_node.type !=='node'){ctx_node=ctx_node.parent} -var tree_node=ctx_node.node -if(tree_node.ntype=="module"){return tree_node} -var scope=null -while(tree_node.parent.type !='module'){tree_node=tree_node.parent} -scope=tree_node.parent -scope.ntype="module" -return scope} -var get_node=$B.parser.get_node=function(C){var ctx=C -while(ctx.parent){ctx=ctx.parent} -return ctx.node} -var mangle_name=$B.parser.mangle_name=function(name,C){ -if(name.substr(0,2)=="__" && name.substr(name.length-2)!=="__"){var scope=get_scope(C) -while(true){if(scope.ntype=="module"){return name}else if(scope.ntype=="class"){var class_name=scope.C.tree[0].name -while(class_name.charAt(0)=='_'){class_name=class_name.substr(1)} -return '_'+class_name+name}else{if(scope.parent && scope.parent.C){scope=get_scope(scope.C.tree[0])}else{return name}}}}else{return name}} -$B.nb_debug_lines=0 -var transition=$B.parser.transition=function(C,token,value){if($B.nb_debug_lines > 100){alert('too many debug lines') -$B.nb_debug_lines=0} -if($B.track_transitions){console.log("C",C,"token",token,value) -$B.nb_debug_lines++} -return C.transition(token,value)} -var s_escaped='abfnrtvxuU"0123456789'+"'"+'\\',is_escaped={} -for(var i=0;i < s_escaped.length;i++){is_escaped[s_escaped.charAt(i)]=true} -function SurrogatePair(value){ -value=value-0x10000 -return String.fromCharCode(0xD800 |(value >> 10))+ -String.fromCharCode(0xDC00 |(value & 0x3FF))} -function test_num(num_lit){var len=num_lit.length,pos=0,char,elt=null,subtypes={b:'binary',o:'octal',x:'hexadecimal'},digits_re=/[_\d]/ -function error(message){throw SyntaxError(message)} -function check(elt){if(elt.value.length==0){let t=subtypes[elt.subtype]||'decimal' -error("invalid "+t+" literal")}else if(elt.value[elt.value.length-1].match(/[-+_]/)){let t=subtypes[elt.subtype]||'decimal' -error("invalid "+t+" literal")}else{ -elt.value=elt.value.replace(/_/g,"") -elt.length=pos -return elt}} -while(pos < len){char=num_lit[pos] -if(char.match(digits_re)){if(elt===null){elt={value:char}}else{if(char=='_' && elt.value.match(/[._+-]$/)){ -error('consecutive _ at '+pos)}else if(char=='_' && elt.subtype=='float' && -elt.value.match(/e$/i)){ -error('syntax error')}else if(elt.subtype=='b' && !(char.match(/[01_]/))){error(`invalid digit '${char}' in binary literal`)}else if(elt.subtype=='o' && !(char.match(/[0-7_]/))){error(`invalid digit '${char}' in octal literal`)}else if(elt.subtype===undefined && elt.value.startsWith("0")&& -!char.match(/[0_]/)){error("leading zeros in decimal integer literals are not"+ -" permitted; use an 0o prefix for octal integers")} -elt.value+=char} -pos++}else if(char.match(/[oxb]/i)){if(elt.value=="0"){elt.subtype=char.toLowerCase() -if(elt.subtype=="x"){digits_re=/[_\da-fA-F]/} -elt.value='' -pos++}else{error("invalid char "+char)}}else if(char=='.'){if(elt===null){error("invalid char in "+num_lit+" pos "+pos+": "+char)}else if(elt.subtype===undefined){elt.subtype="float" -if(elt.value.endsWith('_')){error("invalid decimal literal")} -elt.value=elt.value.replace(/_/g,"")+char -pos++}else{return check(elt)}}else if(char.match(/e/i)){if(num_lit[pos+1]===undefined){error("nothing after e")}else if(elt && subtypes[elt.subtype]!==undefined){ -error("syntax error")}else if(elt && elt.value.endsWith('_')){ -error("syntax error")}else if(num_lit[pos+1].match(/[+\-0-9_]/)){if(elt && elt.value){if(elt.exp){elt.length=pos -return elt} -elt.subtype='float' -elt.value+=char -elt.exp=true -pos++}else{error("unexpected e")}}else{return check(elt)}}else if(char.match(/[+-]/i)){if(elt===null){elt={value:char} -pos++}else if(elt.value.search(/e$/i)>-1){elt.value+=char -pos++}else{return check(elt)}}else if(char.match(/j/i)){if(elt &&(! elt.subtype ||elt.subtype=="float")){elt.imaginary=true -check(elt) -elt.length++ -return elt}else{error("invalid syntax")}}else{break}} -return check(elt)} -var opening={')':'(','}':'{',']':'['} -function line_ends_with_colon(token_reader){var braces=[] -token_reader.position-- -while(true){var token=token_reader.read() -if(! token){return false} -if(token.type=='OP' && token.string==':' && braces.length==0){return true}else if(token.type=='OP'){if('([{'.indexOf(token.string)>-1){braces.push(token)}else if(')]}'.indexOf(token.string)>-1){if(braces.length==0){let err=SyntaxError( -`unmatched '${token.string}'`) -err.offset=token.start[1] -throw err}else if($B.last(braces).string !=opening[token.string]){let err=SyntaxError("closing parenthesis "+ -`'${token.string}' does not match opening `+ -`parenthesis '${$B.last(braces).string}'`) -err.offset=token.start[1] -throw err}else{braces.pop()}}}else if(token.type=='NEWLINE'){return false}} -return false} -function prepare_number(n){ -if(n.startsWith('.')){if(n.endsWith("j")){return{type:'imaginary',value:prepare_number(n.substr(0,n.length-1))}}else{return{type:'float',value:n.replace(/_/g,'')}}}else if(n.startsWith('0')&& n !='0'){ -let num=test_num(n),base -if(num.imaginary){return{type:'imaginary',value:prepare_number(num.value)}} -if(num.subtype=='float'){return{type:num.subtype,value:num.value}} -if(num.subtype===undefined){base=10}else{base={'b':2,'o':8,'x':16}[num.subtype]} -if(base !==undefined){return{type:'int',value:[base,num.value]}}}else{let num=test_num(n) -if(num.subtype=="float"){if(num.imaginary){return{ -type:'imaginary',value:prepare_number(num.value)}}else{return{ -type:'float',value:num.value}}}else{if(num.imaginary){return{ -type:'imaginary',value:prepare_number(num.value)}}else{return{ -type:'int',value:[10,num.value]}}}}} -function test_escape(text,antislash_pos){ -var seq_end,mo -mo=/^[0-7]{1,3}/.exec(text.substr(antislash_pos+1)) -if(mo){return[String.fromCharCode(parseInt(mo[0],8)),1+mo[0].length]} -switch(text[antislash_pos+1]){case "x": -mo=/^[0-9A-F]{0,2}/i.exec(text.substr(antislash_pos+2)) -if(mo[0].length !=2){seq_end=antislash_pos+mo[0].length+1 -$token.value.start[1]=seq_end -throw Error( -"(unicode error) 'unicodeescape' codec can't decode "+ -`bytes in position ${antislash_pos}-${seq_end}: truncated `+ -"\\xXX escape")}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]} -break -case "u": -mo=/^[0-9A-F]{0,4}/i.exec(text.substr(antislash_pos+2)) -if(mo[0].length !=4){seq_end=antislash_pos+mo[0].length+1 -$token.value.start[1]=seq_end -throw Error( -"(unicode error) 'unicodeescape' codec can't decode "+ -`bytes in position ${antislash_pos}-${seq_end}: truncated `+ -"\\uXXXX escape")}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]} -break -case "U": -mo=/^[0-9A-F]{0,8}/i.exec(text.substr(antislash_pos+2)) -if(mo[0].length !=8){seq_end=antislash_pos+mo[0].length+1 -$token.value.start[1]=seq_end -throw Error( -"(unicode error) 'unicodeescape' codec can't decode "+ -`bytes in position ${antislash_pos}-${seq_end}: truncated `+ -"\\uXXXX escape")}else{let value=parseInt(mo[0],16) -if(value > 0x10FFFF){throw Error('invalid unicode escape '+mo[0])}else if(value >=0x10000){return[SurrogatePair(value),2+mo[0].length]}else{return[String.fromCharCode(value),2+mo[0].length]}}}} -$B.test_escape=test_escape -function prepare_string(C,s){var len=s.length,pos=0,string_modifier,_type="string" -let quote; -let inner; -while(pos < len){if(s[pos]=='"' ||s[pos]=="'"){quote=s[pos] -string_modifier=s.substr(0,pos) -if(s.substr(pos,3)==quote.repeat(3)){_type="triple_string" -inner=s.substring(pos+3,s.length-3)}else{inner=s.substring(pos+quote.length,len-quote.length)} -break} -pos++} -var result={quote} -var mods={r:'raw',f:'fstring',b:'bytes'} -for(var mod of string_modifier){result[mods[mod]]=true} -var raw=C.type=='str' && C.raw,bytes=false,fstring=false,end=null -if(string_modifier){switch(string_modifier){case 'r': -raw=true -break -case 'u': -break -case 'b': -bytes=true -break -case 'rb': -case 'br': -bytes=true -raw=true -break -case 'f': -fstring=true -break -case 'fr': -case 'rf': -fstring=true -raw=true -break} -string_modifier=false} -var escaped=false,zone='',src=inner -end=0 -while(end < src.length){if(escaped){if(src.charAt(end)=="a" && ! raw){zone=zone.substr(0,zone.length-1)+"\u0007"}else{zone+=src.charAt(end) -if(raw && src.charAt(end)=='\\'){zone+='\\'}} -escaped=false -end++}else if(src.charAt(end)=="\\"){if(raw){if(end < src.length-1 && -src.charAt(end+1)==quote){zone+='\\\\'+quote -end+=2}else{zone+='\\\\' -end++} -escaped=true}else{if(src.charAt(end+1)=='\n'){ -end+=2}else if(src.substr(end+1,2)=='N{'){ -let end_lit=end+3,re=new RegExp("[-a-zA-Z0-9 ]+"),search=re.exec(src.substr(end_lit)) -if(search===null){raise_syntax_error(C," (unicode error) "+ -"malformed \\N character escape",pos)} -end_lit=end_lit+search[0].length -if(src.charAt(end_lit)!="}"){raise_syntax_error(C," (unicode error) "+ -"malformed \\N character escape")} -var description=search[0].toUpperCase() -if($B.unicodedb===undefined){var xhr=new XMLHttpRequest() -xhr.open("GET",$B.brython_path+"unicode.txt",false) -xhr.onreadystatechange=function(){if(this.readyState==4){if(this.status==200){$B.unicodedb=this.responseText}else{console.log("Warning - could not "+ -"load unicode.txt")}}} -xhr.send()} -if($B.unicodedb !==undefined){let re=new RegExp("^([0-9A-F]+);"+ -description+";.*$","m") -search=re.exec($B.unicodedb) -if(search===null){raise_syntax_error(C," (unicode error) "+ -"unknown Unicode character name")} -var cp=parseInt(search[1],16) -zone+=String.fromCodePoint(cp) -end=end_lit+1}else{end++}}else{try{var esc=test_escape(src,end)}catch(err){raise_syntax_error(C,err.message)} -if(esc){if(esc[0]=='\\'){zone+='\\\\'}else{zone+=esc[0]} -end+=esc[1]}else{if(end < src.length-1 && -is_escaped[src.charAt(end+1)]===undefined){zone+='\\'} -zone+='\\' -escaped=true -end++}}}}else if(src.charAt(end)=='\n' && _type !='triple_string'){ -raise_syntax_error(C,"EOL while scanning string literal")}else{zone+=src.charAt(end) -end++}} -var $string=zone,string='' -for(var i=0;i < $string.length;i++){var $car=$string.charAt(i) -if($car==quote){if(raw ||(i==0 || -$string.charAt(i-1)!='\\')){string+='\\'}else if(_type=="triple_string"){ -var j=i-1 -while($string.charAt(j)=='\\'){j--} -if((i-j-1)% 2==0){string+='\\'}}} -string+=$car} -if(fstring){try{let re=new RegExp("\\\\"+quote,"g"),string_no_bs=string.replace(re,quote) -var elts=$B.parse_fstring(string_no_bs)}catch(err){raise_syntax_error(C,err.message)}} -if(bytes){result.value='b'+quote+string+quote}else if(fstring){result.value=elts}else{result.value=quote+string+quote} -C.raw=raw; -return result} +case "U": +mo=/^[0-9A-F]{0,8}/i.exec(text.substr(antislash_pos+2)) +if(mo[0].length !=8){seq_end=antislash_pos+mo[0].length+1 +$token.value.start[1]=seq_end +throw Error( +"(unicode error) 'unicodeescape' codec can't decode "+ +`bytes in position ${antislash_pos}-${seq_end}: truncated `+ +"\\uXXXX escape")}else{let value=parseInt(mo[0],16) +if(value > 0x10FFFF){throw Error('invalid unicode escape '+mo[0])}else if(value >=0x10000){return[SurrogatePair(value),2+mo[0].length]}else{return[String.fromCharCode(value),2+mo[0].length]}}}} +$B.test_escape=test_escape function unindent(src){ -var lines=src.split('\n'),line,global_indent,indent,unindented_lines=[] -for(var line_num=0,len=lines.length;line_num < len;line_num++){line=lines[line_num] -indent=line.match(/^\s*/)[0] -if(indent !=line){ -if(global_indent===undefined){ -if(indent.length==0){ -return src} -global_indent=indent -var start=global_indent.length -unindented_lines.push(line.substr(start))}else if(line.startsWith(global_indent)){unindented_lines.push(line.substr(start))}else{throw SyntaxError("first line starts at "+ -`column ${start}, line ${line_num} at column `+ -line.match(/\s*/).length+'\n '+line)}}else{unindented_lines.push('')}} +var lines=src.split('\n'),line,global_indent,indent,first,unindented_lines=[] +var min_indent +for(var line of lines){if(/^\s*$/.exec(line)){continue} +indent=line.match(/^\s*/)[0].length +if(indent==0){return src} +if(min_indent===undefined){min_indent=indent} +if(indent < min_indent){min_indent=indent}} +for(var line of lines){if(/^\s*$/.exec(line)){unindented_lines.push(line)}else{unindented_lines.push(line.substr(min_indent))}} return unindented_lines.join('\n')} -var unprintable_re=/\p{Cc}|\p{Cf}|\p{Co}|\p{Cs}|\p{Zl}|\p{Zp}|\p{Zs}/u -function handle_errortoken(C,token,token_reader){if(token.string=="'" ||token.string=='"'){raise_syntax_error(C,'unterminated string literal '+ -`(detected at line ${token.start[0]})`)}else if(token.string=='\\'){var nxt=token_reader.read() -if((! nxt)||nxt.type=='NEWLINE'){raise_syntax_error(C,'unexpected EOF while parsing')}else{raise_syntax_error_known_range(C,nxt,nxt,'unexpected character after line continuation character')}}else if(' `$'.indexOf(token.string)==-1){var u=_b_.ord(token.string).toString(16).toUpperCase() -u='U+'+'0'.repeat(Math.max(0,4-u.length))+u -let error_message; -if(unprintable_re.test(token.string)){error_message=`invalid non-printable character ${u}`}else{ -error_message=`invalid character '${token.string}' (${u})`} -raise_syntax_error(C,error_message);} -raise_syntax_error(C)} -const braces_opener={")":"(","]":"[","}":"{"},braces_open="([{",braces_closer={'(':')','{':'}','[':']'} -function check_brace_is_closed(brace,reader){ -var save_reader_pos=reader.position,closer=braces_closer[brace],nb_braces=1 -while(true){var tk=reader.read() -if(tk.type=='OP' && tk.string==brace){nb_braces+=1}else if(tk.type=='OP' && tk.string==closer){nb_braces-=1 -if(nb_braces==0){ -reader.seek(save_reader_pos) -break}}}} -var python_keywords=["class","return","break","for","lambda","try","finally","raise","def","from","nonlocal","while","del","global","with","as","elif","else","if","yield","assert","import","except","raise","in","pass","with","continue","async","await" -] var $token={} -var dispatch_tokens=$B.parser.dispatch_tokens=function(root){var src=root.src -root.token_reader=new $B.TokenReader(src,root.filename) -var braces_stack=[] -var unsupported=[] -var module=root.module -var lnum=root.line_num===undefined ? 1 :root.line_num -var node=new $Node() -node.line_num=lnum -root.add(node) -var C=null,expect_indent=false,indent=0 -var line2pos={0:0,1:0},line_num=1 -for(var pos=0,len=src.length;pos < len;pos++){if(src[pos]=='\n'){line_num++ -line2pos[line_num]=pos+1}} -var token -while(true){try{token=root.token_reader.read()}catch(err){C=C ||new NodeCtx(node) -if(err.type=='IndentationError'){raise_indentation_error(C,err.message)}else if(err instanceof SyntaxError){if(braces_stack.length > 0){let last_brace=$B.last(braces_stack) -$token.value=last_brace -raise_syntax_error(C,`'${last_brace.string}'`+ -' was never closed')} -var err_msg=err.message -if(err_msg=='EOF in multi-line statement'){err_msg='unexpected EOF while parsing'} -if(err.lineno){raise_error_known_location(_b_.SyntaxError,root.filename,err.lineno,err.col_offset,err.end_lineno,err.end_col_offset,err.line,err.message)}else{raise_syntax_error(C,err_msg)}} -throw err} -if(! token){throw Error('token done without ENDMARKER.')} -$token.value=token -if(token[2]===undefined){console.log('token incomplet',token,'module',module,root) -console.log('src',src)} -if(token.start===undefined){console.log('no start',token)} -lnum=token.start[0] -if(expect_indent && -['INDENT','COMMENT','NL'].indexOf(token.type)==-1){C=C ||new NodeCtx(node) -raise_indentation_error(C,"expected an indented block",expect_indent)} -switch(token.type){case 'ENDMARKER': -if(root.yields_func_check){for(const _yield of root.yields_func_check){$token.value=_yield.position -_yield.check_in_function()}} -if(indent !=0){raise_indentation_error(node.C,'expected an indented block')} -if(node.C===undefined ||node.C.tree.length==0){node.parent.children.pop()} -return -case 'ENCODING': -case 'TYPE_COMMENT': -continue -case 'NL': -if((! node.C)||node.C.tree.length==0){node.line_num++} -continue -case 'COMMENT': -continue -case 'ERRORTOKEN': -C=C ||new NodeCtx(node) -if(token.string !=' '){handle_errortoken(C,token,root.token_reader)} -continue} -switch(token[0]){case 'NAME': -case 'NUMBER': -case 'OP': -case 'STRING': -case 'FSTRING_START': -C=C ||new NodeCtx(node)} -switch(token[0]){case 'NAME': -var name=token[1] -if(python_keywords.indexOf(name)>-1){if(unsupported.indexOf(name)>-1){raise_syntax_error(C,"(Unsupported Python keyword '"+name+"')")} -C=transition(C,name)}else if(name=='not'){C=transition(C,'not')}else if(typeof $operators[name]=='string'){ -C=transition(C,'op',name)}else{C=transition(C,'id',name)} -continue -case 'OP': -var op=token[1] -if((op.length==1 && '()[]{}.,='.indexOf(op)>-1)|| -[':='].indexOf(op)>-1){if(braces_open.indexOf(op)>-1){braces_stack.push(token) -try{check_brace_is_closed(op,root.token_reader)}catch(err){if(err.message=='EOF in multi-line statement'){raise_syntax_error(C,`'${op}' was never closed`)}else{throw err}}}else if(braces_opener[op]){if(braces_stack.length==0){raise_syntax_error(C,"(unmatched '"+op+"')")}else{let last_brace=$B.last(braces_stack) -if(last_brace.string==braces_opener[op]){braces_stack.pop()}else{raise_syntax_error(C,`closing parenthesis '${op}' does not `+ -`match opening parenthesis '`+ -`${last_brace.string}'`)}}} -C=transition(C,token[1])}else if(op==':'){C=transition(C,':') -if(C.node && C.node.is_body_node){node=C.node}}else if(op=='...'){C=transition(C,'ellipsis')}else if(op=='->'){C=transition(C,'annotation')}else if(op==';'){if(C.type=='node' && C.tree.length==0){raise_syntax_error(C,'(statement cannot start with ;)')} -transition(C,'eol') -let new_node=new $Node() -new_node.line_num=token[2][0]+1 -C=new NodeCtx(new_node) -node.parent.add(new_node) -node=new_node}else if($augmented_assigns[op]){C=transition(C,'augm_assign',op)}else{C=transition(C,'op',op)} -continue -case 'STRING': -var prepared=prepare_string(C,token[1],token[2]) -if(prepared.value instanceof Array){C=transition(C,'JoinedStr',prepared.value)}else{C=transition(C,'str',prepared.value)} -continue -case 'FSTRING_START': -C=transition(C,'JoinedStr',token[1]) -break -case 'FSTRING_MIDDLE': -C=transition(C,'middle',token[1]) -break -case 'FSTRING_END': -C=transition(C,'end',token[1]) -break -case 'NUMBER': -try{var prepared_num=prepare_number(token[1])}catch(err){raise_syntax_error(C,err.message)} -C=transition(C,prepared_num.type,prepared_num.value) -continue -case 'NEWLINE': -if(C && C.node && C.node.is_body_node){expect_indent=C.node.parent} -C=C ||new NodeCtx(node) -transition(C,'eol') -var new_node=new $Node() -new_node.line_num=token[2][0]+1 -if(node.parent.children.length > 0 && -node.parent.children[0].is_body_node){node.parent.parent.add(new_node)}else{node.parent.add(new_node)} -C=new NodeCtx(new_node) -node=new_node -continue -case 'DEDENT': -indent-- -if(! indent_continuation){node.parent.children.pop() -node.parent.parent.add(node) -C=new NodeCtx(node)} -continue -case 'INDENT': -indent++ -var indent_continuation=false -if(! expect_indent){if(token.line.trim()=='\\'){ -indent_continuation=true}else{C=C ||new NodeCtx(node) -raise_indentation_error(C,'unexpected indent')}} -expect_indent=false -continue}}} -var create_root_node=$B.parser.create_root_node=function(src,module,locals_id,parent_block,line_num){var root=new $Node('module') -root.module=module -root.id=locals_id -root.parent_block=parent_block -root.line_num=line_num -root.indent=-1 -root.imports={} -if(typeof src=="object"){root.is_comp=src.is_comp -root.filename=src.filename -src=src.src} -src=src.replace(/\r\n/gm,"\n") -root.src=src -return root} $B.parse_time=0 $B.py2js=function(src,module,locals_id,parent_scope){ if(typeof module=="object"){module=module.__name__} @@ -4836,20 +805,8 @@ imported=src.imported src=src.src} var locals_is_module=Array.isArray(locals_id) if(locals_is_module){locals_id=locals_id[0]} -var _ast,t0=globalThis.performance.now() -if($B.parser_to_ast){console.log('use standard parser') -_ast=new $B.Parser(src,filename,'file').parse()}else if($B.py_tokens){ -console.log('use generated PEG parser') -var parser=new $B.Parser(src,filename,'file') -_ast=$B._PyPegen_parse(parser) -console.log('tokens',parser.tokens) -if(_ast===undefined){parser=new $B.Parser(src,filename,'file') -parser.call_invalid_rules=true -$B._PyPegen_parse(parser) -var err_token=$B.last(parser.tokens) -raise_error_known_location(_b_.SyntaxError,filename,err_token.lineno,err_token.col_offset,err_token.end_lineno,err_token.end_col_offset,err_token.line,'invalid syntax')}}else{var root=create_root_node({src,filename},module,locals_id,parent_scope) -dispatch_tokens(root) -_ast=root.ast()} +var t0=globalThis.performance.now() +var parser=new $B.Parser(src,filename,'file'),_ast=$B._PyPegen.run_parser(parser) $B.parse_time+=globalThis.performance.now()-t0 var future=$B.future_features(_ast,filename) var symtable=$B._PySymtable_Build(_ast,filename,future) @@ -4963,25 +920,24 @@ $B.hasOwnProperty("VFS")){$B.tasks.push([$B.idb_open])}} var src for(var worker of webworkers){if(worker.src){ $B.tasks.push([$B.ajax_load_script,{script:worker,name:worker.id,url:worker.src,is_ww:true}])}else{ -var source=(worker.innerText ||worker.textContent) -source=unindent(source) -source=source.replace(/^\n/,'') $B.webworkers[worker.id]=worker filename=$B.script_filename=$B.strip_host( $B.script_path+"#"+worker.id) +var source=(worker.innerText ||worker.textContent) +source=unindent(source) +source=source.replace(/^\n/,'') $B.url2name[filename]=worker.id $B.file_cache[filename]=source $B.scripts[filename]=worker $B.dispatch_load_event(worker)}} for(var script of scripts){module_name=script_to_id.get(script) if(script.src){ -$B.tasks.push([$B.ajax_load_script,{script,name:module_name,url:script.src,id:script.id}])}else{ +$B.tasks.push([$B.ajax_load_script,{script,name:module_name,url:script.src,id:script.id}])}else{filename=$B.script_filename=$B.strip_host( +$B.script_path+"#"+module_name) src=(script.innerHTML ||script.textContent) src=unindent(src) src=src.replace(/^\n/,'') if(src.endsWith('\n')){src=src.substr(0,src.length-1)} -filename=$B.script_filename=$B.strip_host( -$B.script_path+"#"+module_name) $B.file_cache[filename]=src $B.url2name[filename]=module_name $B.scripts[filename]=script @@ -5005,8 +961,6 @@ return $B.handle_error($B.exception(err))} var _script={__doc__:get_docstring(root._ast),js:js,__name__:name,__file__:url,script_element:script} $B.tasks.push(["execute",_script]) if(run_loop){$B.loop()}} -$B.$operators=$operators -$B.$Node=$Node $B.brython=brython})(__BRYTHON__) globalThis.brython=__BRYTHON__.brython if(__BRYTHON__.isNode){global.__BRYTHON__=__BRYTHON__ @@ -5473,10 +1427,12 @@ $B.unpacker=function(obj,nb_targets,has_starred){ var position,position_rank=3 if(has_starred){var nb_after_starred=arguments[3] position_rank++} -if($B.pep657){position=$B.decode_position(arguments[position_rank])} +position=$B.decode_position(arguments[position_rank]) var t=_b_.list.$factory(obj),right_length=t.length,left_length=nb_targets+(has_starred ? nb_after_starred-1 :0) -if(right_length < left_length){var exc=_b_.ValueError.$factory(`not enough values to unpack `+ -`(expected ${left_length}, got ${right_length})`) +if((! has_starred &&(right_length < nb_targets))|| +(has_starred &&(right_length < nb_targets-1))){var exc=_b_.ValueError.$factory(`not enough values to unpack `+ +`(expected ${has_starred ? ' at least ' : ''} `+ +`${left_length}, got ${right_length})`) if(position){$B.set_exception_offsets(exc,position)} throw exc} if((! has_starred)&& right_length > left_length){var exc=_b_.ValueError.$factory("too many values to unpack "+ @@ -6720,7 +2676,7 @@ var filename=$.co_filename=$.filename var interactive=$.mode=="single" &&($.flags & 0x200) $B.file_cache[filename]=$.source $B.url2name[filename]=module_name -if($.flags & $B.PyCF_TYPE_COMMENTS){throw _b_.NotImplementedError.$factory('Brython does not currently support parsing of type comments')} +if($.flags & $B.PyCF_TYPE_COMMENTS){} if($B.$isinstance($.source,_b_.bytes)){var encoding='utf-8',lfpos=$.source.source.indexOf(10),first_line,second_line if(lfpos==-1){first_line=$.source}else{first_line=_b_.bytes.$factory($.source.source.slice(0,lfpos))} first_line=_b_.bytes.decode(first_line,'latin-1') @@ -6738,77 +2694,39 @@ if(! $B.$isinstance(filename,[_b_.bytes,_b_.str])){ $B.warn(_b_.DeprecationWarning,`path should be string, bytes, or os.PathLike, `+ `not ${$B.class_name(filename)}`)} if(interactive && ! $.source.endsWith("\n")){ -var lines=$.source.split("\n") -if($B.last(lines).startsWith(" ")){throw _b_.SyntaxError.$factory("unexpected EOF while parsing")}} +var lines=$.source.split("\n"),last_line=$B.last(lines) +if(last_line.startsWith(" ")){var msg="unexpected EOF while parsing",exc=_b_.SyntaxError.$factory() +exc.filename=filename +exc.lineno=exc.end_lineno=lines.length-1 +exc.offset=0 +exc.end_offset=last_line.length-1 +exc.text=last_line +exc.args=[msg,$B.fast_tuple([filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset])] +throw exc}} if($.source.__class__ && $.source.__class__.__module__=='ast'){ $B.imported._ast._validate($.source) $._ast=$.source delete $.source return $} var _ast,parser -if($B.parser_to_ast){try{var parser_mode=$.mode=='eval' ? 'eval' :'file' -parser=new $B.Parser($.source,filename,parser_mode) -_ast=parser.parse()}catch(err){if($.mode=='single'){try{parser.tokens.next }catch(err2){ -var tokens=parser.tokens,tester=tokens[tokens.length-2] -if((tester.type=="NEWLINE" &&($.flags & 0x4000))|| -tester.type=="DEDENT" &&($.flags & 0x200)){err.__class__=_b_.SyntaxError -err.args[0]='incomplete input'}}} -throw err} -if($.mode=='single' && _ast.body.length==1 && -_ast.body[0]instanceof $B.ast.Expr){ -parser=new $B.Parser($.source,filename,'eval') -_ast=parser.parse() -$.single_expression=true} -if($.flags==$B.PyCF_ONLY_AST){delete $B.url2name[filename] -let res=$B.ast_js_to_py(_ast) -res.$js_ast=_ast -return res}}else if($B.py_tokens){ try{var parser_mode=$.mode=='eval' ? 'eval' :'file' parser=new $B.Parser($.source,filename,parser_mode) -_ast=$B._PyPegen_parse(parser) -if(_ast===undefined){parser=new $B.Parser(src,filename,'file') -parser.call_invalid_rules=true -$B._PyPegen_parse(parser) -var err_token=$B.last(parser.tokens) -$B.raise_error_known_location(_b_.SyntaxError,filename,err_token.lineno,err_token.col_offset,err_token.end_lineno,err_token.end_col_offset,err_token.line,'invalid syntax')}}catch(err){if($.mode=='single'){try{parser.tokens.next }catch(err2){ -var tokens=parser.tokens,tester=tokens[tokens.length-2] -if((tester.type=="NEWLINE" &&($.flags & 0x4000))|| -tester.type=="DEDENT" &&($.flags & 0x200)){err.__class__=_b_.SyntaxError -err.args[0]='incomplete input'}}} +parser.flags=$.flags +_ast=$B._PyPegen.run_parser(parser)}catch(err){if($.mode=='single'){var tester=parser.tokens[parser.tokens.length-2] +if(tester &&( +(tester.type=="NEWLINE" &&($.flags & 0x4000))|| +(tester.type=="DEDENT" &&($.flags & 0x200)))){err.__class__=_b_.SyntaxError +err.args[0]='incomplete input'}} throw err} if($.mode=='single' && _ast.body.length==1 && _ast.body[0]instanceof $B.ast.Expr){ parser=new $B.Parser($.source,filename,'eval') -_ast=parser.parse() +_ast=$B._PyPegen.run_parser(parser) $.single_expression=true} if($.flags==$B.PyCF_ONLY_AST){delete $B.url2name[filename] let res=$B.ast_js_to_py(_ast) res.$js_ast=_ast -return res}}else{var root=$B.parser.create_root_node( -{src:$.source,filename},module_name,module_name) -root.mode=$.mode -root.parent_block=$B.builtins_scope -try{$B.parser.dispatch_tokens(root,$.source) -_ast=root.ast()}catch(err){if($.mode=='single' && root.token_reader.read()===undefined){ -let tokens=root.token_reader.tokens,tester=tokens[tokens.length-2] -if((tester.type=="NEWLINE" &&($.flags & 0x4000))|| -tester.type=="DEDENT" &&($.flags & 0x200)){err.__class__=_b_.SyntaxError -err.args[0]='incomplete input'}} -throw err} -if($.mode=='single' && _ast.body.length==1 && -_ast.body[0]instanceof $B.ast.Expr){ -root=$B.parser.create_root_node( -{src:$.source,filename},module_name,module_name) -root.mode='eval' -$.single_expression=true -root.parent_block=$B.builtins_scope -$B.parser.dispatch_tokens(root,$.source) -_ast=root.ast()} -delete $B.url2name[filename] -if($.flags==$B.PyCF_ONLY_AST){$B.create_python_ast_classes() -let res=$B.ast_js_to_py(_ast) -res.$js_ast=_ast -return res}} +return res} delete $B.url2name[filename] $._ast=$B.ast_js_to_py(_ast) $._ast.$js_ast=_ast @@ -6911,20 +2829,9 @@ var _frame_obj=$B.frame_obj frame.$lineno=1 if(src.__class__===code){_ast=src._ast if(_ast.$js_ast){_ast=_ast.$js_ast}else{_ast=$B.ast_py_to_js(_ast)}} -try{if(! _ast){if($B.parser_to_ast){var _mode=mode=='eval' ? 'eval' :'file' -_ast=new $B.Parser(src,filename,_mode).parse()}else if($B.py_tokens){ -var _mode=mode=='eval' ? 'eval' :'file' +try{if(! _ast){var _mode=mode=='eval' ? 'eval' :'file' var parser=new $B.Parser(src,filename,_mode) -_ast=$B._PyPegen_parse(parser) -if(_ast===undefined){parser=new $B.Parser(src,filename,'file') -parser.call_invalid_rules=true -$B._PyPegen_parse(parser) -var err_token=$B.last(parser.tokens) -$B.raise_error_known_location(_b_.SyntaxError,filename,err_token.lineno,err_token.col_offset,err_token.end_lineno,err_token.end_col_offset,err_token.line,'invalid syntax')}}else{var root=$B.parser.create_root_node(src,'',frame[0],frame[2],1) -root.mode=mode -root.filename=filename -$B.parser.dispatch_tokens(root) -_ast=root.ast()}} +_ast=$B._PyPegen.run_parser(parser)} var future=$B.future_features(_ast,filename),symtable=$B._PySymtable_Build(_ast,filename,future),js_obj=$B.js_from_root({ast:_ast,symtable,filename,src,namespaces:{local_name,exec_locals,global_name,exec_globals}}),js=js_obj.js}catch(err){if(err.args){if(err.args[1]){exec_locals.$lineno=err.args[1][1]}}else{console.log('JS Error',err.message)} $B.frame_obj=save_frame_obj throw err} @@ -7248,7 +3155,10 @@ var len=_b_.len=function(obj){check_nb_args_no_kw('len',1,arguments) var klass=obj.__class__ ||$B.get_class(obj) try{var method=$B.$getattr(klass,'__len__')}catch(err){throw _b_.TypeError.$factory("object of type '"+ $B.class_name(obj)+"' has no len()")} -return $B.$call(method)(obj)} +let res=$B.$call(method)(obj) +if(!$B.$isinstance(res,_b_.int)){throw _b_.TypeError.$factory(`'${$B.class_name(res)}' object cannot be interpreted as an integer`)} +if(!$B.rich_comp('__ge__',res,0)){throw _b_.ValueError.$factory('ValueError: __len__() should return >= 0')} +return res} _b_.locals=function(){ check_nb_args('locals',0,arguments) var locals_obj=$B.frame_obj.frame[1] @@ -7587,7 +3497,7 @@ throw err}}} var $Reader=$B.make_class("Reader") $Reader.__bool__=function(){return true} $Reader.__enter__=function(self){return self} -$Reader.__exit__=function(){return false} +$Reader.__exit__=function(self){$Reader.close(self)} $Reader.__init__=function(_self,initial_value=''){_self.$content=initial_value _self.$counter=0} $Reader.__iter__=function(self){ @@ -8224,6 +4134,7 @@ $B.set_func_names(traceback,"builtins") var frame=$B.frame=$B.make_class("frame",function(frame_list){frame_list.__class__=frame return frame_list} ) +frame.__bool__=function(){return true} frame.__delattr__=function(_self,attr){if(attr=="f_trace"){_self.$f_trace=_b_.None}} frame.__dir__=function(){return _b_.object.__dir__(frame).concat(['clear','f_back','f_builtins','f_code','f_globals','f_lasti','f_lineno','f_locals','f_trace','f_trace_lines','f_trace_opcodes'])} frame.__getattr__=function(_self,attr){ @@ -8234,7 +4145,6 @@ frame_obj=frame_obj.prev} if(frame_obj.prev !==null){return frame.$factory(frame_obj.prev.frame)} return _b_.None}else if(attr=="clear"){return function(){}}else if(attr=="f_trace"){if(_self.$f_trace===undefined){return _b_.None} return _self.$f_trace} -console.log('no attr',attr,'for frame',_self) throw $B.attr_error(attr,_self)} frame.__setattr__=function(_self,attr,value){if(attr=="f_trace"){ _self.$f_trace=value}} @@ -8599,7 +4509,7 @@ if($B.get_option('debug',err)> 1){console.log('error args',err.args[1]) console.log('err line',line) console.log('indent',indent)} var start=err.offset-indent-1,end_offset=err.end_offset-1+ -(err.end_offset==err.offset ? 1 :0),marks=' '+' '.repeat(start),nb_marks=1 +(err.end_offset==err.offset ? 1 :0),marks=' '+' '.repeat(Math.max(0,start)),nb_marks=1 if(err.end_lineno){if(err.end_lineno > err.lineno){nb_marks=line.length-start-indent}else{nb_marks=end_offset-start-indent} if(nb_marks==0 && err.end_offset==line.substr(indent).length){nb_marks=1}} @@ -9280,6 +5190,8 @@ case "utf-8": case "utf8": case "U8": case "UTF": +if(globalThis.TextDecoder){var decoder=new TextDecoder('utf-8',{fatal:true}),array=new Uint8Array(b) +try{return decoder.decode(array)}catch(err){}} var pos=0,err_info while(pos < b.length){let byte=b[pos] err_info=null @@ -9373,6 +5285,9 @@ var t=[],pos=0,enc=normalise(encoding) switch(enc){case "utf-8": case "utf_8": case "utf8": +if(globalThis.TextEncoder){var encoder=new TextEncoder('utf-8',{fatal:true}) +try{var array=encoder.encode(s) +return fast_bytes(Array.from(array))}catch(err){}} for(let i=0,len=s.length;i < len;i++){let cp=s.charCodeAt(i) if(cp <=0x7f){t.push(cp)}else if(cp <=0x7ff){t.push(0xc0+(cp >> 6),0x80+(cp & 0x3f))}else if(cp <=0xffff){t.push(0xe0+(cp >> 12),0x80+((cp & 0xfff)>> 6),0x80+(cp & 0x3f))}else{console.log("4 bytes")}} break @@ -9763,7 +5678,7 @@ Module.__setattr__=function(self,attr,value){if(self.__name__=="__builtins__"){ $B.builtins[attr]=value}else{self[attr]=value}} $B.set_func_names(Module,"builtins") $B.make_import_paths=function(filename){ -var filepath=$B.domain ? $B.domain+'/'+filename :filename +var filepath=$B.script_domain ? $B.script_domain+'/'+filename :filename var elts=filepath.split('/') elts.pop() var script_dir=elts.join('/'),path=[$B.brython_path+'Lib',$B.brython_path+'libs',script_dir,$B.brython_path+'Lib/site-packages'] @@ -9816,7 +5731,7 @@ modobj[attr].$in_js_module=true}else if($B.$isinstance(modobj[attr],_b_.type)&& ! modobj[attr].hasOwnProperty('__module__')){modobj[attr].__module__=_module.__name__}} return true} function run_py(module_contents,path,module,compiled){ -var filename=$B.strip_host(path) +var filename=module.__file__ $B.file_cache[filename]=module_contents $B.url2name[filename]=module.__name__ var root,js,mod_name=module.__name__, @@ -10421,7 +6336,7 @@ var str_format=function(val,flags){ flags.pad_char=" " return format_padding(str.$factory(val),flags)} var num_format=function(val,flags){number_check(val,flags) -if($B.$isinstance(val,_b_.float)){val=parseInt(val.value)}else if(! $B.$isinstance(val,_b_.int)){val=parseInt(val)} +if($B.$isinstance(val,_b_.float)){val=parseInt(val.value)}else if(! $B.$isinstance(val,_b_.int)){val=parseInt(val)}else if($B.$isinstance(val,_b_.bool)){val=val ? 1 :0} var s=format_int_precision(val,flags) if(flags.pad_char==="0"){if(val < 0){s=s.substring(1) return "-"+format_padding(s,flags,true)} @@ -10511,7 +6426,7 @@ format_float_precision(val,upper,flags,_floating_exp_helper),flags)} $B.formatters={floating_point_format,floating_point_decimal_format,floating_point_exponential_format} var signed_hex_format=function(val,upper,flags){var ret if(! $B.$isinstance(val,_b_.int)){throw _b_.TypeError.$factory( -`%X format: an integer is required, not ${$B.class_name(val)}`)} +`%X format: an integer is required, not ${$B.class_name(val)}`)}else if($B.$isinstance(val,_b_.bool)){val=val ? 1 :0} if(val.__class__===$B.long_int){ret=val.value.toString(16)}else{ret=parseInt(val) ret=ret.toString(16)} ret=format_int_precision(ret,flags) @@ -10865,15 +6780,22 @@ str.isascii=function(){ var $=$B.args("isascii",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self) for(var i=0,len=_self.length;i < len;i++){if(_self.charCodeAt(i)> 127){return false}} return true} +var unicode_categories_contain_character=function(categories,cp){for(var cat of categories){console.log(cat,cp); +if($B.in_unicode_category(cat,cp)){return true}} +return false} +var alpha_categories=['Ll','Lu','Lm','Lt','Lo'] +var alnum_categories=['Ll','Lu','Lm','Lt','Lo','Nd'] str.isalnum=function(){ -var $=$B.args("isalnum",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self) -for(var char of _self){cp=_b_.ord(char) -for(var cat of['Ll','Lu','Lm','Lt','Lo','Nd','digits','numeric']){if(! $B.in_unicode_category(cat,cp)){return false}}} +var $=$B.args("isalnum",1,{self:null},["self"],arguments,{},null,null) +var _self=to_string($.self); +if(_self.length==0){return false} +for(var char of _self){if(!unicode_categories_contain_character(alnum_categories,_b_.ord(char))){return false}} return true} str.isalpha=function(){ -var $=$B.args("isalpha",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self) -for(var char of _self){cp=_b_.ord(char) -for(var cat of['Ll','Lu','Lm','Lt','Lo']){if(! $B.in_unicode_category(cat,cp)){return false}}} +var $=$B.args("isalpha",1,{self:null},["self"],arguments,{},null,null) +var _self=to_string($.self); +if(_self.length==0){return false} +for(var char of _self){if(!unicode_categories_contain_character(alpha_categories,_b_.ord(char))){return false}} return true} str.isdecimal=function(){ var $=$B.args("isdecimal",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self) @@ -11474,7 +7396,8 @@ return self.valueOf()} int.__index__=(self)=> int_value(self) int.__init__=()=> _b_.None int.__int__=(self)=> self -int.__invert__=(self)=> ~self +int.__invert__=function(self){if(Math.abs(self)< 2**31){return ~self} +return $B.rich_op('__sub__',$B.rich_op('__mul__',self,-1),1)} int.__mod__=function(self,other){ if($B.$isinstance(other,_b_.tuple)&& other.length==1){other=other[0]} if(other.__class__===$B.long_int){self=BigInt(self) @@ -11497,6 +7420,7 @@ if(self_as_int.__class__===$B.long_int){return $B.long_int.__neg__(self_as_int)} return-self} int.__new__=function(cls,value,base){if(cls===undefined){throw _b_.TypeError.$factory("int.__new__(): not enough arguments")}else if(! $B.$isinstance(cls,_b_.type)){throw _b_.TypeError.$factory("int.__new__(X): X is not a type object")} if(cls===int){return int.$factory(value,base)} +if(cls===bool){throw _b_.TypeError.$factory("int.__new__(bool) is not safe, use bool.__new__()")} return{ __class__:cls,__dict__:$B.empty_dict(),$brython_value:int.$factory(value,base),toString:function(){return value}}} int.__pos__=function(self){return self} @@ -11700,7 +7624,7 @@ if(test){console.log('bool(obj)',obj,'bool_class',bool_class,'klass',klass,'appl console.log('$B.$call(bool_method)',bool_method+'')} if(bool_method===missing){var len_method=$B.$getattr(klass,'__len__',missing) if(len_method===missing){return true} -return len_method(obj)> 0}else{var res=bool_class ? +return _b_.len(obj)> 0}else{var res=bool_class ? $B.$call(bool_method)(obj): $B.$call(bool_method)() if(res !==true && res !==false){throw _b_.TypeError.$factory("__bool__ should return "+ @@ -11722,13 +7646,24 @@ bool.__repr__=function(self){$B.builtins_repr_check(bool,arguments) return self ? "True" :"False"} bool.__xor__=function(self,other){if($B.$isinstance(other,bool)){return self ^ other ? true :false}else if($B.$isinstance(other,int)){return int.__xor__(bool.__index__(self),int.__index__(other))} return _b_.NotImplemented} +bool.__invert__=function(self){$B.warn(_b_.DeprecationWarning,`Bitwise inversion '~' on bool is deprecated.This returns the bitwise inversion of the underlying int object and is usually not what you expect from negating a bool.Use the 'not' operator for boolean negation or ~int(x) if you really want the bitwise inversion of the underlying int.`) +return int.__invert__(self)} bool.$factory=function(){ -var $=$B.args("bool",1,{x:null},["x"],arguments,{x:false},null,null) +var $=$B.args("bool",1,{x:null},["x"],arguments,{x:false},null,null,1) return $B.$bool($.x,true)} +bool.__new__=function(cls,value){if(cls===undefined){throw _b_.TypeError.$factory("bool.__new__(): not enough arguments")}else if(!$B.$isinstance(cls,_b_.type)){throw _b_.TypeError.$factory(`bool.__new__(X): X is not a type object (${$B.class_name(cls) })`)}else if(!_b_.issubclass(cls,bool)){let class_name=$B.class_name(cls) +throw _b_.TypeError.$factory(`bool.__new__(${class_name}): ${class_name} is not a subtype of bool`)} +if(arguments.length > 2){throw _b_.TypeError.$factory(`bool expected at most 1 argument, got ${arguments.length - 1}`)} +return bool.$factory(value)} +bool.from_bytes=function(){var $=$B.args("from_bytes",3,{bytes:null,byteorder:null,signed:null },["bytes","byteorder","signed"],arguments,{byteorder:'big',signed:false },null,null) +let int_result=int.from_bytes($.bytes,$.byteorder,$.signed) +return bool.$factory(int_result)} bool.numerator=int.numerator bool.denominator=int.denominator -bool.real=int.real +bool.real=(self)=> self ? 1 :0 bool.imag=int.imag +for(var attr of['real']){bool[attr].setter=(function(x){return function(self){throw _b_.AttributeError.$factory(`attribute '${x}' of `+ +`'${$B.class_name(self)}' objects is not writable`)}})(attr)} _b_.bool=bool $B.set_func_names(bool,"builtins")})(__BRYTHON__) ; @@ -13549,99 +9484,6 @@ _b_.tuple=tuple _b_.object.__bases__=tuple.$factory() _b_.type.__bases__=$B.fast_tuple([_b_.object])})(__BRYTHON__) ; -(function($B){ -var _b_=$B.builtins -var $GeneratorReturn={} -$B.generator_return=function(value){return{__class__:$GeneratorReturn,value:value}} -$B.generator=$B.make_class("generator",function(func,name){ -var res=function(){var gen=func.apply(null,arguments) -gen.$name=name ||'generator' -gen.$func=func -gen.$has_run=false -return{ -__class__:$B.generator,js_gen:gen}} -res.$infos=func.$infos -res.$is_genfunc=true -res.$name=name -return res} -) -$B.generator.__iter__=function(self){return self} -$B.generator.__next__=function(self){return $B.generator.send(self,_b_.None)} -$B.generator.__str__=function(self){var name=self.js_gen.$name ||'generator' -if(self.js_gen.$func && self.js_gen.$func.$infos){name=self.js_gen.$func.$infos.__qualname__} -return ``} -$B.generator.close=function(self){var save_frame_obj=$B.frame_obj -if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} -try{$B.generator.throw(self,_b_.GeneratorExit.$factory())}catch(err){if(! $B.is_exc(err,[_b_.GeneratorExit,_b_.StopIteration])){$B.frame_obj=save_frame_obj -throw _b_.RuntimeError.$factory("generator ignored GeneratorExit")}} -$B.frame_obj=save_frame_obj} -$B.generator.send=function(self,value){ -var gen=self.js_gen -gen.$has_run=true -if(gen.$finished){throw _b_.StopIteration.$factory(value)} -if(gen.gi_running===true){throw _b_.ValueError.$factory("generator already executing")} -gen.gi_running=true -var save_frame_obj=$B.frame_obj -if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} -try{var res=gen.next(value)}catch(err){gen.$finished=true -$B.frame_obj=save_frame_obj -throw err} -if($B.frame_obj !==null && $B.frame_obj.frame===self.$frame){$B.leave_frame()} -$B.frame_obj=save_frame_obj -if(res.value && res.value.__class__===$GeneratorReturn){gen.$finished=true -throw _b_.StopIteration.$factory(res.value.value)} -gen.gi_running=false -if(res.done){throw _b_.StopIteration.$factory(res.value)} -return res.value} -$B.generator.throw=function(){var $=$B.args('throw',4,{self:null,type:null,value:null,traceback:null},['self','type','value','traceback'],arguments,{value:_b_.None,traceback:_b_.None},null,null),self=$.self,type=$.type,value=$.value,traceback=$.traceback -var gen=self.js_gen,exc=type -if(exc.$is_class){if(! _b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+ -"instance of BaseException")}else if(value===undefined ||value===_b_.None){exc=$B.$call(exc)()}else if($B.$isinstance(value,type)){exc=value}}else{if(value===_b_.None){value=exc}else{exc=$B.$call(exc)(value)}} -if(traceback !==_b_.None){exc.$traceback=traceback} -var save_frame_obj=$B.frame_obj -if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} -var res=gen.throw(exc) -$B.frame_obj=save_frame_obj -if(res.done){throw _b_.StopIteration.$factory(res.value)} -return res.value} -$B.set_func_names($B.generator,"builtins") -$B.async_generator=$B.make_class("async_generator",function(func){var f=function(){var gen=func.apply(null,arguments) -var res=Object.create(null) -res.__class__=$B.async_generator -res.js_gen=gen -return res} -return f} -) -$B.async_generator.__aiter__=function(self){return self} -$B.async_generator.__anext__=function(self){return $B.async_generator.asend(self,_b_.None)} -$B.async_generator.aclose=function(self){self.js_gen.$finished=true -return _b_.None} -$B.async_generator.asend=async function(self,value){var gen=self.js_gen -if(gen.$finished){throw _b_.StopAsyncIteration.$factory(value)} -if(gen.ag_running===true){throw _b_.ValueError.$factory("generator already executing")} -gen.ag_running=true -var save_frame_obj=$B.frame_obj -if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} -try{var res=await gen.next(value)}catch(err){gen.$finished=true -$B.frame_obj=save_frame_obj -throw err} -if($B.frame_obj !==null && $B.frame_obj.frame===self.$frame){$B.leave_frame()} -$B.frame_obj=save_frame_obj -if(res.done){throw _b_.StopAsyncIteration.$factory(value)} -if(res.value.__class__===$GeneratorReturn){gen.$finished=true -throw _b_.StopAsyncIteration.$factory(res.value.value)} -gen.ag_running=false -return res.value} -$B.async_generator.athrow=async function(self,type,value,traceback){var gen=self.js_gen,exc=type -if(exc.$is_class){if(! _b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+ -"instance of BaseException")}else if(value===undefined){value=$B.$call(exc)()}}else{if(value===undefined){value=exc}else{exc=$B.$call(exc)(value)}} -if(traceback !==undefined){exc.$traceback=traceback} -var save_frame_obj=$B.frame_obj -if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} -await gen.throw(value) -$B.frame_obj=save_frame_obj} -$B.set_func_names($B.async_generator,"builtins")})(__BRYTHON__) -; (function($B){var _b_=$B.builtins function to_simple(value){switch(typeof value){case 'string': case 'number': @@ -13695,7 +9537,7 @@ return jsobj case 'string': return $B.String(jsobj)} if(Array.isArray(jsobj)){ -Object.defineProperty(jsobj,"$is_js_array",{value:true}); +try{Object.defineProperty(jsobj,"$is_js_array",{value:true});}catch(err){} return jsobj} let pyobj=jsobj[PYOBJ] if(pyobj !==undefined){return pyobj} @@ -13840,7 +9682,7 @@ klass.__mro__=[py_parent].concat(klass.__mro__)} var frame=$B.frame_obj.frame if(frame){$B.set_func_names(klass,frame[2])} return klass} -$B.JSObj.__getattribute__=function(_self,attr){var test=false +$B.JSObj.__getattribute__=function(_self,attr){var test=attr=="line" if(test){console.log("__ga__",_self,attr)} if(attr=="new" && typeof _self=="function"){ var new_func @@ -13865,7 +9707,7 @@ return jsobj2pyobj(class_attr.apply(null,args))}}else{return class_attr}} throw $B.attr_error(attr,_self)} if(js_attr !==null && js_attr.toString && -typeof js_attr.toString=='function' && +typeof js_attr=='function' && js_attr.toString().startsWith('class ')){ return jsclass2pyclass(js_attr)}else if(typeof js_attr==='function'){ return jsobj2pyobj(js_attr,_self.$js_func ||_self)}else{if(test){console.log('jsobj2pyobj on',js_attr)} @@ -13921,7 +9763,9 @@ if(obj===null ||obj===undefined){return $B.jsobj2pyobj(obj)} if(obj.__class__){ return obj} if(Array.isArray(obj)){return obj.map(convert_to_python)} -if($B.$isinstance(obj,$B.JSObj)){var res=$B.empty_dict() +if($B.$isinstance(obj,$B.JSObj)){if(typeof obj=='number'){ +return $B.fast_float(obj)} +var res=$B.empty_dict() for(var key in obj){_b_.dict.$setitem_string(res,key,convert_to_python(obj[key]))} return res} return $B.jsobj2pyobj(obj)} @@ -14034,6 +9878,99 @@ new_js_class.$is_js_class=true return new_js_class} $B.set_func_names($B.JSMeta,"builtins")})(__BRYTHON__) ; +(function($B){ +var _b_=$B.builtins +var $GeneratorReturn={} +$B.generator_return=function(value){return{__class__:$GeneratorReturn,value:value}} +$B.generator=$B.make_class("generator",function(func,name){ +var res=function(){var gen=func.apply(null,arguments) +gen.$name=name ||'generator' +gen.$func=func +gen.$has_run=false +return{ +__class__:$B.generator,js_gen:gen}} +res.$infos=func.$infos +res.$is_genfunc=true +res.$name=name +return res} +) +$B.generator.__iter__=function(self){return self} +$B.generator.__next__=function(self){return $B.generator.send(self,_b_.None)} +$B.generator.__str__=function(self){var name=self.js_gen.$name ||'generator' +if(self.js_gen.$func && self.js_gen.$func.$infos){name=self.js_gen.$func.$infos.__qualname__} +return ``} +$B.generator.close=function(self){var save_frame_obj=$B.frame_obj +if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} +try{$B.generator.throw(self,_b_.GeneratorExit.$factory())}catch(err){if(! $B.is_exc(err,[_b_.GeneratorExit,_b_.StopIteration])){$B.frame_obj=save_frame_obj +throw _b_.RuntimeError.$factory("generator ignored GeneratorExit")}} +$B.frame_obj=save_frame_obj} +$B.generator.send=function(self,value){ +var gen=self.js_gen +gen.$has_run=true +if(gen.$finished){throw _b_.StopIteration.$factory(value)} +if(gen.gi_running===true){throw _b_.ValueError.$factory("generator already executing")} +gen.gi_running=true +var save_frame_obj=$B.frame_obj +if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} +try{var res=gen.next(value)}catch(err){gen.$finished=true +$B.frame_obj=save_frame_obj +throw err} +if($B.frame_obj !==null && $B.frame_obj.frame===self.$frame){$B.leave_frame()} +$B.frame_obj=save_frame_obj +if(res.value && res.value.__class__===$GeneratorReturn){gen.$finished=true +throw _b_.StopIteration.$factory(res.value.value)} +gen.gi_running=false +if(res.done){throw _b_.StopIteration.$factory(res.value)} +return res.value} +$B.generator.throw=function(){var $=$B.args('throw',4,{self:null,type:null,value:null,traceback:null},['self','type','value','traceback'],arguments,{value:_b_.None,traceback:_b_.None},null,null),self=$.self,type=$.type,value=$.value,traceback=$.traceback +var gen=self.js_gen,exc=type +if(exc.$is_class){if(! _b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+ +"instance of BaseException")}else if(value===undefined ||value===_b_.None){exc=$B.$call(exc)()}else if($B.$isinstance(value,type)){exc=value}}else{if(value===_b_.None){value=exc}else{exc=$B.$call(exc)(value)}} +if(traceback !==_b_.None){exc.$traceback=traceback} +var save_frame_obj=$B.frame_obj +if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} +var res=gen.throw(exc) +$B.frame_obj=save_frame_obj +if(res.done){throw _b_.StopIteration.$factory(res.value)} +return res.value} +$B.set_func_names($B.generator,"builtins") +$B.async_generator=$B.make_class("async_generator",function(func){var f=function(){var gen=func.apply(null,arguments) +var res=Object.create(null) +res.__class__=$B.async_generator +res.js_gen=gen +return res} +return f} +) +$B.async_generator.__aiter__=function(self){return self} +$B.async_generator.__anext__=function(self){return $B.async_generator.asend(self,_b_.None)} +$B.async_generator.aclose=function(self){self.js_gen.$finished=true +return _b_.None} +$B.async_generator.asend=async function(self,value){var gen=self.js_gen +if(gen.$finished){throw _b_.StopAsyncIteration.$factory(value)} +if(gen.ag_running===true){throw _b_.ValueError.$factory("generator already executing")} +gen.ag_running=true +var save_frame_obj=$B.frame_obj +if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} +try{var res=await gen.next(value)}catch(err){gen.$finished=true +$B.frame_obj=save_frame_obj +throw err} +if($B.frame_obj !==null && $B.frame_obj.frame===self.$frame){$B.leave_frame()} +$B.frame_obj=save_frame_obj +if(res.done){throw _b_.StopAsyncIteration.$factory(value)} +if(res.value.__class__===$GeneratorReturn){gen.$finished=true +throw _b_.StopAsyncIteration.$factory(res.value.value)} +gen.ag_running=false +return res.value} +$B.async_generator.athrow=async function(self,type,value,traceback){var gen=self.js_gen,exc=type +if(exc.$is_class){if(! _b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+ +"instance of BaseException")}else if(value===undefined){value=$B.$call(exc)()}}else{if(value===undefined){value=exc}else{exc=$B.$call(exc)(value)}} +if(traceback !==undefined){exc.$traceback=traceback} +var save_frame_obj=$B.frame_obj +if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)} +await gen.throw(value) +$B.frame_obj=save_frame_obj} +$B.set_func_names($B.async_generator,"builtins")})(__BRYTHON__) +; (function($B){var _b_=$B.builtins,object=_b_.object,_window=globalThis function convertDomValue(v){if(v===null ||v===undefined){return _b_.None} return $B.jsobj2pyobj(v)} @@ -14831,7 +10768,7 @@ throw _b_.TypeError.$factory(`object ${$B.class_name(obj)} `+ `can't be used in 'await' expression`)}})(__BRYTHON__) ; -(function($B){$B.builtin_class_flags={builtins:{1074287874:['ReferenceError','SyntaxWarning','ConnectionAbortedError','Exception','OSError','KeyboardInterrupt','PermissionError','UnicodeTranslateError','InterruptedError','RuntimeWarning','Warning','SystemExit','ImportWarning','BaseException','FileNotFoundError','GeneratorExit','NotImplementedError','LookupError','WindowsError','IsADirectoryError','StopAsyncIteration','BlockingIOError','DeprecationWarning','StopIteration','BufferError','MemoryError','BaseExceptionGroup','FileExistsError','ModuleNotFoundError','ProcessLookupError','OverflowError','SyntaxError','EOFError','SystemError','RuntimeError','AssertionError','BytesWarning','EncodingWarning','RecursionError','ArithmeticError','PendingDeprecationWarning','TabError','UnboundLocalError','UnicodeDecodeError','NotADirectoryError','ResourceWarning','ChildProcessError','UnicodeError','BrokenPipeError','EnvironmentError','FloatingPointError','ValueError','UnicodeWarning','IndexError','NameError','IndentationError','ConnectionRefusedError','AttributeError','ConnectionError','FutureWarning','IOError','KeyError','TypeError','ConnectionResetError','TimeoutError','UnicodeEncodeError','ZeroDivisionError','ImportError','UserWarning'],1073763848:['ExceptionGroup'],21500162:['bool'],4723970:['bytearray','float'],138941698:['bytes'],546050:['zip','property','enumerate','classmethod','map','staticmethod','reversed','super','filter'],529666:['object','complex'],541611330:['dict'],4740354:['set','frozenset'],21501186:['int'],38294818:['list'],545058:['memoryview'],528674:['range'],545026:['slice'],273159426:['str'],71849250:['tuple'],2156420354:['type'],},types:{545154:['method-wrapper','async_generator','classmethod_descriptor','member_descriptor','getset_descriptor','coroutine','generator','frame'],547202:['builtin_function_or_method'],545026:['traceback','cell'],528642:['NotImplementedType','ellipsis','code','NoneType'],678146:['function'],545090:['mappingproxy'],678274:['method_descriptor'],547074:['method'],546050:['module'],676226:['wrapper_descriptor'],}}})(__BRYTHON__) +(function($B){$B.builtin_class_flags={builtins:{1074287874:['DeprecationWarning','Exception','RuntimeError','BytesWarning','EncodingWarning','FutureWarning','Warning','AttributeError','FileExistsError','ImportWarning','StopAsyncIteration','UnicodeDecodeError','ValueError','WindowsError','NameError','EnvironmentError','IndentationError','RuntimeWarning','LookupError','ChildProcessError','BaseException','OSError','StopIteration','PendingDeprecationWarning','InterruptedError','TimeoutError','UnboundLocalError','NotImplementedError','IndexError','IsADirectoryError','UnicodeEncodeError','UnicodeWarning','BaseExceptionGroup','SyntaxWarning','IOError','EOFError','ZeroDivisionError','GeneratorExit','ConnectionResetError','ImportError','SyntaxError','KeyError','UnicodeTranslateError','TypeError','ProcessLookupError','KeyboardInterrupt','OverflowError','BufferError','SystemExit','FileNotFoundError','NotADirectoryError','ConnectionError','RecursionError','PermissionError','UserWarning','ConnectionRefusedError','SystemError','AssertionError','ModuleNotFoundError','FloatingPointError','TabError','BrokenPipeError','ResourceWarning','ReferenceError','UnicodeError','ConnectionAbortedError','BlockingIOError','ArithmeticError','MemoryError'],1073763848:['ExceptionGroup'],21500162:['bool'],4723970:['bytearray','float'],138941698:['bytes'],546050:['map','staticmethod','property','super','filter','zip','enumerate','reversed','classmethod'],529666:['object','complex'],541611330:['dict'],4740354:['frozenset','set'],21501186:['int'],38294818:['list'],545058:['memoryview'],528674:['range'],545026:['slice'],273159426:['str'],71849250:['tuple'],2156420354:['type'],},types:{545154:['member_descriptor','classmethod_descriptor','async_generator','generator','getset_descriptor','coroutine','frame','method-wrapper'],547202:['builtin_function_or_method'],545026:['cell','traceback'],528642:['NoneType','ellipsis','NotImplementedType','code'],678146:['function'],545090:['mappingproxy'],678274:['method_descriptor'],547074:['method'],546050:['module'],676226:['wrapper_descriptor'],}}})(__BRYTHON__) ; (function($B){var _b_=$B.builtins var update=$B.update_obj=function(mod,data){for(let attr in data){mod[attr]=data[attr]}} @@ -14981,26 +10918,26 @@ js_constr.__class__=_b_.type return function(obj){obj.__bases__.splice(0,0,js_constr) obj.__mro__.splice(0,0,js_constr) return obj}},import_js:function(){ -var $=$B.args('import_js',2,{url:null,name:null},['url','name'],arguments,{name:_b_.None},null,null),url=$.url,name=$.name +var $=$B.args('import_js',2,{url:null,alias:null},['url','alias'],arguments,{alias:_b_.None},null,null),url=$.url,alias=$.alias var xhr=new XMLHttpRequest(),result xhr.open('GET',url,false) xhr.onreadystatechange=function(){if(this.readyState==4){if(this.status==200){var js=this.responseText+'\nreturn $module',f=new Function(js) console.log('f',f,f+'') var $module=f() -if(typeof $module !=='undefined'){result=$B.module.$factory(name) +if(typeof $module !=='undefined'){result=$B.module.$factory(alias) for(var key in $module){result[key]=$B.jsobj2pyobj($module[key])} result.__file__=url}else{console.log(this.responseText) result=_b_.ImportError.$factory('Javascript '+ -`module at ${url} doesn't define $module`)}}else{result=_b_.ModuleNotFoundError.$factory(name)}}} +`module at ${url} doesn't define $module`)}}else{result=_b_.ModuleNotFoundError.$factory(url)}}} xhr.send() -if($B.$isinstance(result,_b_.BaseException)){$B.handle_error(result)}else{if(name===_b_.None){ -name=url.split('.') +if($B.$isinstance(result,_b_.BaseException)){$B.handle_error(result)}else{if(alias===_b_.None){ +var name=url.split('.') if(name.length > 1){name.pop()} -name=name.join('.') -result.__name__=name} -$B.imported[name]=result +alias=name.join('.') +result.__name__=alias} +$B.imported[alias]=result var frame=$B.frame_obj.frame -frame[1][name]=result}},import_modules:function(refs,callback,loaded){ +frame[1][alias]=result}},import_modules:function(refs,callback,loaded){ if(loaded===undefined){loaded=[]} if(! Array.isArray(refs)){throw _b_.TypeError.$factory( `first argument must be a list, got ${$B.class_name(refs)}`)} @@ -15291,18 +11228,48 @@ $B.stdin={__class__:$io,__original__:true,closed:false,len:1,pos:0,read:function $B.tracefunc=_b_.None})(__BRYTHON__) ; (function($B){var _b_=$B.builtins +function ast_dump(tree,indent){var attr,value +indent=indent ||0 +if(tree===_b_.None){ +return 'None'}else if(typeof tree=='string'){return `'${tree}'`}else if(typeof tree=='number'){return tree+''}else if(tree.imaginary){return tree.value+'j'}else if(Array.isArray(tree)){if(tree.length==0){return '[]'} +res='[\n' +var items=[] +for(var x of tree){try{items.push(ast_dump(x,indent+1))}catch(err){console.log('error',tree) +console.log('for item',x) +throw err}} +res+=items.join(',\n') +return res+']'}else if(tree.$name){return tree.$name+'()'}else if(tree instanceof ast.MatchSingleton){return `MatchSingleton(value=${$B.AST.$convert(tree.value)})`}else if(tree instanceof ast.Constant){value=tree.value +if(value.imaginary){return `Constant(value=${_b_.repr(value.value)}j)`} +return `Constant(value=${$B.AST.$convert(value)})`} +var proto=Object.getPrototypeOf(tree).constructor +var res=' ' .repeat(indent)+proto.$name+'(' +if($B.ast_classes[proto.$name]===undefined){console.log('no ast class',proto)} +var attr_names=$B.ast_classes[proto.$name].split(','),attrs=[] +attr_names=attr_names.map(x=>(x.endsWith('*')||x.endsWith('?'))? +x.substr(0,x.length-1):x) +if([ast.Name].indexOf(proto)>-1){for(attr of attr_names){if(tree[attr]!==undefined){attrs.push(`${attr}=${ast_dump(tree[attr])}`)}} +return res+attrs.join(', ')+')'} +for(attr of attr_names){if(tree[attr]!==undefined){value=tree[attr] +attrs.push(attr+'='+ +ast_dump(tree[attr],indent+1).trimStart())}} +if(attrs.length > 0){res+='\n' +res+=attrs.map(x=> ' '.repeat(indent+1)+x).join(',\n')} +res+=')' +return res} +function string_from_ast_value(value){ +return value.replace(new RegExp("\\\\'",'g'),"'")} function compiler_error(ast_obj,message,end){var exc=_b_.SyntaxError.$factory(message) exc.filename=state.filename if(exc.filename !=''){var src=$B.file_cache[exc.filename],lines=src.split('\n'),line=lines[ast_obj.lineno-1] exc.text=line}else{exc.text=_b_.None} exc.lineno=ast_obj.lineno -exc.offset=ast_obj.col_offset +exc.offset=ast_obj.col_offset+1 end=end ||ast_obj exc.end_lineno=end.end_lineno -exc.end_offset=end.end_col_offset +exc.end_offset=end.end_col_offset+1 exc.args[1]=[exc.filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset] exc.$frame_obj=$B.frame_obj -if($B.frame_obj===null){console.log('frame obj is null')} +if($B.frame_obj===null){} throw exc} function fast_id(obj){ if(obj.$id !==undefined){return obj.$id} @@ -15598,11 +11565,14 @@ if(namespaces.exec_locals !==namespaces.exec_globals){for(let key in namespaces. return name} function compiler_check(obj){var check_func=Object.getPrototypeOf(obj)._check if(check_func){obj._check()}} +function check_assign_or_delete(obj,target,action){action=action ?? 'assign to' +if(target instanceof $B.ast.Attribute){if(target.attr=='__debug__'){compiler_error(obj,`cannot ${action} __debug__`,target)}}else if(target instanceof $B.ast.Name){if(target.id=='__debug__'){compiler_error(obj,`cannot ${action} __debug__`,target)}}else if(target instanceof $B.ast.Tuple){for(var elt of target.elts){check_assign_or_delete(elt,elt,action)}}else if(target instanceof $B.ast.Starred){check_assign_or_delete(obj,target.value,action)}} $B.ast.Assert.prototype.to_js=function(scopes){var test=$B.js_from_ast(this.test,scopes),msg=this.msg ? $B.js_from_ast(this.msg,scopes):'' return `if($B.set_lineno(frame, ${this.lineno}) && !$B.$bool(${test})){\n`+ `throw _b_.AssertionError.$factory(${msg})}\n`} function annotation_to_str(obj,scopes){return get_source_from_position(scopes.src,obj)} -$B.ast.AnnAssign.prototype.to_js=function(scopes){var postpone_annotation=scopes.symtable.table.future.features & +$B.ast.AnnAssign.prototype.to_js=function(scopes){compiler_check(this) +var postpone_annotation=scopes.symtable.table.future.features & $B.CO_FUTURE_ANNOTATIONS var scope=last_scope(scopes) var js='' @@ -15624,6 +11594,7 @@ js+=`${target_ref} = ann`}else if(this.target instanceof $B.ast.Attribute){js+=` js+=`$B.$setitem(locals.__annotations__, `+ `'${mangled}', ${ann_value})`}}} return `$B.set_lineno(frame, ${this.lineno})\n`+js} +$B.ast.AnnAssign.prototype._check=function(){check_assign_or_delete(this,this.target)} $B.ast.Assign.prototype.to_js=function(scopes){compiler_check(this) var js=this.lineno ? `$B.set_lineno(frame, ${this.lineno})\n` :'',value=$B.js_from_ast(this.value,scopes) function assign_one(target,value){if(target instanceof $B.ast.Name){return $B.js_from_ast(target,scopes)+' = '+value}else if(target instanceof $B.ast.Starred){return assign_one(target.value,value)}else if(target instanceof $B.ast.Subscript){return `$B.$setitem(${$B.js_from_ast(target.value, scopes)}`+ @@ -15657,6 +11628,7 @@ for(let target of this.targets){if(!(target instanceof $B.ast.Tuple)&& !(target instanceof $B.ast.List)){assigns.push(assign_one(target,value_id))}else{assigns.push(assign_many(target,value_id))}} js+=assigns.join('\n') return js} +$B.ast.Assign.prototype._check=function(){for(var target of this.targets){check_assign_or_delete(this,target)}} $B.ast.AsyncFor.prototype.to_js=function(scopes){if(!(last_scope(scopes).ast instanceof $B.ast.AsyncFunctionDef)){compiler_error(this,"'async for' outside async function")} return $B.ast.For.prototype.to_js.bind(this)(scopes)} $B.ast.AsyncFunctionDef.prototype.to_js=function(scopes){return $B.ast.FunctionDef.prototype.to_js.bind(this)(scopes)} @@ -15702,11 +11674,11 @@ var has_generator=scope.is_generator for(let item of this.items.slice().reverse()){js=add_item(item,js)} return `$B.set_lineno(frame, ${this.lineno})\n`+js} $B.ast.Attribute.prototype.to_js=function(scopes){var attr=mangle(scopes,last_scope(scopes),this.attr) -if(this.value instanceof $B.ast.Name && this.value.id=='axw'){return `${$B.js_from_ast(this.value, scopes)}.${attr}`} var position=encode_position(this.value.col_offset,this.value.col_offset,this.end_col_offset) return `$B.$getattr_pep657(${$B.js_from_ast(this.value, scopes)}, `+ `'${attr}', ${position})`} -$B.ast.AugAssign.prototype.to_js=function(scopes){var js,op_class=this.op.$name ? this.op :this.op.constructor +$B.ast.AugAssign.prototype.to_js=function(scopes){compiler_check(this) +var js,op_class=this.op.$name ? this.op :this.op.constructor for(var op in $B.op2ast_class){if($B.op2ast_class[op][1]===op_class){var iop=op+'=' break}} var value=$B.js_from_ast(this.value,scopes) @@ -15724,6 +11696,7 @@ js=`$B.$setattr((locals.$tg = ${this.target.value.to_js(scopes)}), `+ `$B.$getattr(locals.$tg, '${mangled}'), '${iop}', ${value}))`}else{let target=$B.js_from_ast(this.target,scopes),value=$B.js_from_ast(this.value,scopes) js=`${target} = $B.augm_assign(${target}, '${iop}', ${value})`} return `$B.set_lineno(frame, ${this.lineno})\n`+js} +$B.ast.AugAssign.prototype._check=function(){check_assign_or_delete(this,this.target)} $B.ast.Await.prototype.to_js=function(scopes){var ix=scopes.length-1 while(scopes[ix].parent){ix--} scopes[ix].nb_await=scopes[ix].nb_await===undefined ? 1 : @@ -15763,13 +11736,15 @@ scope.ast instanceof $B.ast.While){js+=`no_break_${scope.id} = false\n` break}} js+=`break` return js} -$B.ast.Call.prototype.to_js=function(scopes){var func=$B.js_from_ast(this.func,scopes),js=`$B.$call(${func}` +$B.ast.Call.prototype.to_js=function(scopes){compiler_check(this) +var func=$B.js_from_ast(this.func,scopes),js=`$B.$call(${func}` var position=encode_position(this.col_offset,this.col_offset,this.end_col_offset) js+=`, ${position}` js+=')' var args=make_args.bind(this)(scopes) return js+(args.has_starred ? `.apply(null, ${args.js})` : `(${args.js})`)} +$B.ast.Call.prototype._check=function(){for(var kw of this.keywords){if(kw.arg=='__debug__'){compiler_error(this,"cannot assign to __debug__",kw)}}} function make_args(scopes){var js='',named_args=[],named_kwargs=[],starred_kwargs=[],has_starred=false for(let arg of this.args){if(arg instanceof $B.ast.Starred){arg.$handled=true has_starred=true}else{named_args.push($B.js_from_ast(arg,scopes))}} @@ -15849,7 +11824,7 @@ scopes.pop() js+='\n$B.trace_return_and_leave(frame, _b_.None)\n'+ `return $B.$class_constructor('${this.name}', locals, metaclass, `+ `resolved_bases, bases, [${keywords.join(', ')}])\n`+ -`})('${this.name}', '${glob}', $B.fast_tuple([${bases}]))\n` +`})('${this.name}',${globals_name}.__name__ ?? '${glob}', $B.fast_tuple([${bases}]))\n` var class_ref=reference(scopes,enclosing_scope,this.name) if(decorated){class_ref=`decorated${$B.UUID()}` js+='var '} @@ -15897,6 +11872,7 @@ js+=`$B.$delete("${target.id}")\n`}else if(target instanceof $B.ast.Subscript){j `${$B.js_from_ast(target.slice, scopes)})\n`}else if(target instanceof $B.ast.Attribute){js+=`_b_.delattr(${$B.js_from_ast(target.value, scopes)}, `+ `'${target.attr}')\n`}} return `$B.set_lineno(frame, ${this.lineno})\n`+js} +$B.ast.Delete.prototype._check=function(){for(var target of this.targets){check_assign_or_delete(this,target,'delete')}} $B.ast.Dict.prototype.to_js=function(scopes){var items=[],keys=this.keys,has_packed=false function no_key(i){return keys[i]===_b_.None ||keys[i]===undefined} for(let i=0,len=this.keys.length;i < len;i++){if(no_key(i)){ @@ -15905,7 +11881,7 @@ items.push('_b_.list.$factory(_b_.dict.items('+ $B.js_from_ast(this.values[i],scopes)+'))')}else{var item=`[${$B.js_from_ast(this.keys[i], scopes)}, `+ `${$B.js_from_ast(this.values[i], scopes)}` if(this.keys[i]instanceof $B.ast.Constant){var v=this.keys[i].value -if(typeof v=='string'){item+=', '+$B.$hash($B.string_from_ast_value(v))}else{try{var hash=$B.$hash(this.keys[i].value) +if(typeof v=='string'){item+=', '+$B.$hash(string_from_ast_value(v))}else{try{var hash=$B.$hash(this.keys[i].value) item+=`, ${hash}`}catch(err){}}} items.push(item+']')}} if(! has_packed){return `_b_.dict.$literal([${items}])`} @@ -15919,6 +11895,7 @@ $B.js_from_ast(this.value,scopes)} $B.ast.Expression.prototype.to_js=function(scopes){init_scopes.bind(this)('expression',scopes) return $B.js_from_ast(this.body,scopes)} $B.ast.For.prototype.to_js=function(scopes){ +compiler_check(this) var id=$B.UUID(),iter=$B.js_from_ast(this.iter,scopes),js=`frame.$lineno = ${this.lineno}\n` var scope=$B.last(scopes),new_scope=copy_scope(scope,this,id) scopes.push(new_scope) @@ -16274,7 +12251,8 @@ if(tp.bound){if(! tp.bound.elts){js+=`_typing.${param_type}._set_lazy_eval(local `'__constraints__', BOUND_OF_${name})\n`}} return js} $B.make_args_parser_and_parse=function make_args_parser_and_parse(fct,args){return $B.make_args_parser(fct)(fct,args);} -$B.ast.FunctionDef.prototype.to_js=function(scopes){var symtable_block=scopes.symtable.table.blocks.get(fast_id(this)) +$B.ast.FunctionDef.prototype.to_js=function(scopes){compiler_check(this) +var symtable_block=scopes.symtable.table.blocks.get(fast_id(this)) var in_class=last_scope(scopes).ast instanceof $B.ast.ClassDef,is_async=this instanceof $B.ast.AsyncFunctionDef if(in_class){var class_scope=last_scope(scopes)} var func_name_scope=bind(this.name,scopes) @@ -16455,6 +12433,9 @@ for(let dec of decorators.reverse()){decorate=`$B.$call(${dec})(${decorate})`} js+=decorate}else{js+=`var locals_${type_params_ref} = TYPE_PARAMS_OF_${name2}()\n`}} js=decs_declare+js return js} +$B.ast.FunctionDef.prototype._check=function(){for(var arg of this.args.args){if(arg instanceof $B.ast.arg){if(arg.arg=='__debug__'){compiler_error(arg,'cannot assign to __debug__')}}} +for(var arg of this.args.kwonlyargs){if(arg instanceof $B.ast.arg){if(arg.arg=='__debug__'){compiler_error(arg,'cannot assign to __debug__')}}} +if(this.args.kwarg && this.args.kwarg.arg=='__debug__'){compiler_error(this.args.kwarg,'cannot assign to __debug__')}} $B.ast.GeneratorExp.prototype.to_js=function(scopes){var id=$B.UUID(),symtable_block=scopes.symtable.table.blocks.get(fast_id(this)),varnames=symtable_block.varnames.map(x=> `"${x}"`) var first_for=this.generators[0], outmost_expr=$B.js_from_ast(first_for.iter,scopes),nb_paren=1 @@ -16528,6 +12509,33 @@ for(var alias of this.names){if(alias.asname){bind(alias.asname,scopes)}else if( last_scope(scopes).blurred=true js+=`\n$B.import_all(locals, module)`}else{bind(alias.name,scopes)}} return js} +$B.ast.Interactive.prototype.to_js=function(scopes){mark_parents(this) +var name=init_scopes.bind(this)('module',scopes) +var module_id=name,global_name=make_scope_name(scopes),mod_name=module_name(scopes) +var js=`// Javascript code generated from ast\n`+ +`var $B = __BRYTHON__,\n_b_ = $B.builtins,\n` +js+=`${global_name} = {}, // $B.imported["${mod_name}"],\n`+ +`locals = ${global_name},\n`+ +`frame = ["${module_id}", locals, "${module_id}", locals]` +js+=`\nvar __file__ = frame.__file__ = '${scopes.filename || ""}'\n`+ +`locals.__name__ = '${name}'\n`+ +`locals.__doc__ = ${extract_docstring(this, scopes)}\n` +if(! scopes.imported){js+=`locals.__annotations__ = locals.__annotations__ || $B.empty_dict()\n`} +js+=`frame.$f_trace = $B.enter_frame(frame)\n` +js+=`$B.set_lineno(frame, 1)\n`+ +'\nvar _frame_obj = $B.frame_obj\n' +js+='var stack_length = $B.count_frames()\n' +js+=`try{\n`+ +add_body(this.body,scopes)+'\n'+ +`$B.leave_frame({locals, value: _b_.None})\n`+ +`}catch(err){\n`+ +`$B.set_exc_and_trace(frame, err)\n`+ +`$B.leave_frame({locals, value: _b_.None})\n`+ +'throw err\n'+ +`}` +scopes.pop() +console.log('Interactive',js) +return js} $B.ast.JoinedStr.prototype.to_js=function(scopes){var items=this.values.map(s=> $B.js_from_ast(s,scopes)) if(items.length==0){return "''"} return items.join(' + ')} @@ -16667,8 +12675,8 @@ js+=`\nvar __file__ = frame.__file__ = '${scopes.filename || ""}'\n`+ `locals.__name__ = '${name}'\n`+ `locals.__doc__ = ${extract_docstring(this, scopes)}\n` if(! scopes.imported){js+=`locals.__annotations__ = locals.__annotations__ || $B.empty_dict()\n`} -js+=`frame.$f_trace = $B.enter_frame(frame)\n` -if(! namespaces){js+=`$B.set_lineno(frame, 1)\n`+ +if(! namespaces){js+=`frame.$f_trace = $B.enter_frame(frame)\n` +js+=`$B.set_lineno(frame, 1)\n`+ '\nvar _frame_obj = $B.frame_obj\n'} js+='var stack_length = $B.count_frames()\n' js+=`try{\n`+ @@ -16689,7 +12697,7 @@ return reference(scopes,scope,this.id)}else if(this.ctx instanceof $B.ast.Load){ if(this.id=='__debugger__' && res.startsWith('$B.resolve_in_scopes')){ return 'debugger'} return res}} -$B.ast.NamedExpr.prototype.to_js=function(scopes){ +$B.ast.NamedExpr.prototype.to_js=function(scopes){compiler_check(this) var i=scopes.length-1 while(scopes[i].type=='comprehension'){i--} var enclosing_scopes=scopes.slice(0,i+1) @@ -16697,6 +12705,7 @@ enclosing_scopes.symtable=scopes.symtable bind(this.target.id,enclosing_scopes) return '('+$B.js_from_ast(this.target,enclosing_scopes)+' = '+ $B.js_from_ast(this.value,scopes)+')'} +$B.ast.NamedExpr.prototype._check=function(){check_assign_or_delete(this,this.target)} $B.ast.Nonlocal.prototype.to_js=function(scopes){var scope=$B.last(scopes) for(var name of this.names){scope.nonlocals.add(name)} return ''} @@ -16708,6 +12717,7 @@ if(this.exc){js+=$B.js_from_ast(this.exc,scopes)} if(this.cause){js+=', '+$B.js_from_ast(this.cause,scopes)} return js+')'} $B.ast.Return.prototype.to_js=function(scopes){ +if(last_scope(scopes).type !='def'){compiler_error(this,"'return' outside function")} compiler_check(this) var js=`$B.set_lineno(frame, ${this.lineno})\n`+ 'var result = '+ @@ -16725,7 +12735,7 @@ $B.ast.SetComp.prototype.to_js=function(scopes){return make_comp.bind(this)(scop $B.ast.Slice.prototype.to_js=function(scopes){var lower=this.lower ? $B.js_from_ast(this.lower,scopes):'_b_.None',upper=this.upper ? $B.js_from_ast(this.upper,scopes):'_b_.None',step=this.step ? $B.js_from_ast(this.step,scopes):'_b_.None' return `_b_.slice.$fast_slice(${lower}, ${upper}, ${step})`} $B.ast.Starred.prototype.to_js=function(scopes){if(this.$handled){return `_b_.list.$unpack(${$B.js_from_ast(this.value, scopes)})`} -if(this.ctx instanceof $B.ast.Store){compiler_error(this,"starred assignment target must be in a list or tuple")}else{compiler_error(this,"invalid syntax")}} +if(this.ctx instanceof $B.ast.Store){compiler_error(this,"starred assignment target must be in a list or tuple")}else{compiler_error(this,"can't use starred expression here")}} $B.ast.Subscript.prototype.to_js=function(scopes){var value=$B.js_from_ast(this.value,scopes),slice=$B.js_from_ast(this.slice,scopes) if(this.slice instanceof $B.ast.Slice){return `$B.getitem_slice(${value}, ${slice})`}else{var position=encode_position(this.value.col_offset,this.slice.col_offset,this.slice.end_col_offset) return `$B.$getitem(${value}, ${slice},${position})`}} @@ -17081,933 +13091,19387 @@ console.log("unhandled",ast.constructor.$name,ast,typeof ast) return '// unhandled class ast.'+ast.constructor.$name}})(__BRYTHON__) ; (function($B){var _b_=$B.builtins -var GLOBAL_PARAM="name '%s' is parameter and global",NONLOCAL_PARAM="name '%s' is parameter and nonlocal",GLOBAL_AFTER_ASSIGN="name '%s' is assigned to before global declaration",NONLOCAL_AFTER_ASSIGN="name '%s' is assigned to before nonlocal declaration",GLOBAL_AFTER_USE="name '%s' is used prior to global declaration",NONLOCAL_AFTER_USE="name '%s' is used prior to nonlocal declaration",GLOBAL_ANNOT="annotated name '%s' can't be global",NONLOCAL_ANNOT="annotated name '%s' can't be nonlocal",IMPORT_STAR_WARNING="import * only allowed at module level",NAMED_EXPR_COMP_IN_CLASS= -"assignment expression within a comprehension cannot be used in a class body",NAMED_EXPR_COMP_CONFLICT= -"assignment expression cannot rebind comprehension iteration variable '%s'",NAMED_EXPR_COMP_INNER_LOOP_CONFLICT= -"comprehension inner loop cannot rebind assignment expression target '%s'",NAMED_EXPR_COMP_ITER_EXPR= -"assignment expression cannot be used in a comprehension iterable expression",ANNOTATION_NOT_ALLOWED= -"'%s' can not be used within an annotation",DUPLICATE_ARGUMENT="duplicate argument '%s' in function definition",TYPEVAR_BOUND_NOT_ALLOWED="'%s' can not be used within a TypeVar bound",TYPEALIAS_NOT_ALLOWED="'%s' can not be used within a type alias",TYPEPARAM_NOT_ALLOWED= -"'%s' can not be used within the definition of a generic",DUPLICATE_TYPE_PARAM="duplicate type parameter '%s'" -var DEF_GLOBAL=1, -DEF_LOCAL=2 , -DEF_PARAM=2 << 1, -DEF_NONLOCAL=2 << 2, -USE=2 << 3 , -DEF_FREE=2 << 4 , -DEF_FREE_CLASS=2 << 5, -DEF_IMPORT=2 << 6, -DEF_ANNOT=2 << 7, -DEF_COMP_ITER=2 << 8, -DEF_TYPE_PARAM=2 << 9, -DEF_COMP_CELL=2 << 10 -var DEF_BOUND=DEF_LOCAL |DEF_PARAM |DEF_IMPORT -var SCOPE_OFFSET=12,SCOPE_MASK=(DEF_GLOBAL |DEF_LOCAL |DEF_PARAM |DEF_NONLOCAL) -var LOCAL=1,GLOBAL_EXPLICIT=2,GLOBAL_IMPLICIT=3,FREE=4,CELL=5 -var TYPE_MODULE=2 -var NULL=undefined -var ModuleBlock=2,ClassBlock=1,FunctionBlock=0,AnnotationBlock=4,TypeVarBoundBlock=5,TypeAliasBlock=6,TypeParamBlock=7 -var PyExc_SyntaxError=_b_.SyntaxError -function assert(test){if(! $B.$bool(test)){console.log('test fails',test) -throw Error('test fails')}} -function LOCATION(x){ -return[x.lineno,x.col_offset,x.end_lineno,x.end_col_offset]} -function ST_LOCATION(x){ -return[x.lineno,x.col_offset,x.end_lineno,x.end_col_offset]} -function _Py_Mangle(privateobj,ident){ -var plen,ipriv -if(privateobj==NULL ||! ident.startsWith('__')){return ident;} -plen=privateobj.length -if(ident.endsWith('__')||ident.search(/\./)!=-1){return ident;} -ipriv=0; -while(privateobj[ipriv]=='_'){ipriv++} -if(ipriv==plen){return ident } -var prefix=privateobj.substr(ipriv) -return '_'+prefix+ident} -var lambda=NULL -var NoComprehension=0,ListComprehension=1,DictComprehension=2,SetComprehension=3,GeneratorExpression=4 -function GET_IDENTIFIER(VAR){return VAR} -function Symtable(){this.filename=NULL; -this.stack=[] -this.blocks=new Map() -this.cur=NULL; -this.private=NULL;} -function id(obj){if(obj.$id !==undefined){return obj.$id} -return obj.$id=$B.UUID()} -function ste_new(st,name,block,key,lineno,col_offset,end_lineno,end_col_offset){var ste -ste={table:st,id:id(key), -name:name,directives:NULL,type:block,nested:0,free:0,varargs:0,varkeywords:0,opt_lineno:0,opt_col_offset:0,lineno:lineno,col_offset:col_offset,end_lineno:end_lineno,end_col_offset:end_col_offset} -if(st.cur !=NULL && -(st.cur.nested || -st.cur.type==FunctionBlock)){ste.nested=1;} -ste.child_free=0 -ste.generator=0 -ste.coroutine=0 -ste.comprehension=NoComprehension -ste.returns_value=0 -ste.needs_class_closure=0 -ste.comp_inlined=0 -ste.comp_iter_target=0 -ste.comp_iter_expr=0 -ste.symbols=$B.empty_dict() -ste.varnames=[] -ste.children=[] -st.blocks.set(ste.id,ste) -return ste} -$B._PySymtable_Build=function(mod,filename,future){var st=new Symtable(),seq -st.filename=filename; -st.future=future ||{} -st.type=TYPE_MODULE -if(!symtable_enter_block(st,'top',ModuleBlock,mod,0,0,0,0)){return NULL;} -st.top=st.cur -switch(mod.constructor){case $B.ast.Module: -seq=mod.body -for(let item of seq){visitor.stmt(st,item)} +var GLOBAL_PARAM="name '%s' is parameter and global",NONLOCAL_PARAM="name '%s' is parameter and nonlocal",GLOBAL_AFTER_ASSIGN="name '%s' is assigned to before global declaration",NONLOCAL_AFTER_ASSIGN="name '%s' is assigned to before nonlocal declaration",GLOBAL_AFTER_USE="name '%s' is used prior to global declaration",NONLOCAL_AFTER_USE="name '%s' is used prior to nonlocal declaration",GLOBAL_ANNOT="annotated name '%s' can't be global",NONLOCAL_ANNOT="annotated name '%s' can't be nonlocal",IMPORT_STAR_WARNING="import * only allowed at module level",NAMED_EXPR_COMP_IN_CLASS= +"assignment expression within a comprehension cannot be used in a class body",NAMED_EXPR_COMP_CONFLICT= +"assignment expression cannot rebind comprehension iteration variable '%s'",NAMED_EXPR_COMP_INNER_LOOP_CONFLICT= +"comprehension inner loop cannot rebind assignment expression target '%s'",NAMED_EXPR_COMP_ITER_EXPR= +"assignment expression cannot be used in a comprehension iterable expression",ANNOTATION_NOT_ALLOWED= +"'%s' can not be used within an annotation",DUPLICATE_ARGUMENT="duplicate argument '%s' in function definition",TYPEVAR_BOUND_NOT_ALLOWED="%s cannot be used within a TypeVar bound",TYPEALIAS_NOT_ALLOWED="%s cannot be used within a type alias",TYPEPARAM_NOT_ALLOWED= +"%s cannot be used within the definition of a generic",DUPLICATE_TYPE_PARAM="duplicate type parameter '%s'" +var DEF_GLOBAL=1, +DEF_LOCAL=2 , +DEF_PARAM=2 << 1, +DEF_NONLOCAL=2 << 2, +USE=2 << 3 , +DEF_FREE=2 << 4 , +DEF_FREE_CLASS=2 << 5, +DEF_IMPORT=2 << 6, +DEF_ANNOT=2 << 7, +DEF_COMP_ITER=2 << 8, +DEF_TYPE_PARAM=2 << 9, +DEF_COMP_CELL=2 << 10 +var DEF_BOUND=DEF_LOCAL |DEF_PARAM |DEF_IMPORT +var SCOPE_OFFSET=12,SCOPE_MASK=(DEF_GLOBAL |DEF_LOCAL |DEF_PARAM |DEF_NONLOCAL) +var LOCAL=1,GLOBAL_EXPLICIT=2,GLOBAL_IMPLICIT=3,FREE=4,CELL=5 +var TYPE_MODULE=2 +var NULL=undefined +var ModuleBlock=2,ClassBlock=1,FunctionBlock=0,AnnotationBlock=4,TypeVarBoundBlock=5,TypeAliasBlock=6,TypeParamBlock=7 +var PyExc_SyntaxError=_b_.SyntaxError +function assert(test){if(! $B.$bool(test)){console.log('test fails',test) +throw Error('test fails')}} +function LOCATION(x){ +return[x.lineno,x.col_offset,x.end_lineno,x.end_col_offset]} +function ST_LOCATION(x){ +return[x.lineno,x.col_offset,x.end_lineno,x.end_col_offset]} +function _Py_Mangle(privateobj,ident){ +var plen,ipriv +if(privateobj==NULL ||! ident.startsWith('__')){return ident;} +plen=privateobj.length +if(ident.endsWith('__')||ident.search(/\./)!=-1){return ident;} +ipriv=0; +while(privateobj[ipriv]=='_'){ipriv++} +if(ipriv==plen){return ident } +var prefix=privateobj.substr(ipriv) +return '_'+prefix+ident} +var lambda=NULL +var NoComprehension=0,ListComprehension=1,DictComprehension=2,SetComprehension=3,GeneratorExpression=4 +function GET_IDENTIFIER(VAR){return VAR} +function Symtable(){this.filename=NULL; +this.stack=[] +this.blocks=new Map() +this.cur=NULL; +this.private=NULL;} +function id(obj){if(obj.$id !==undefined){return obj.$id} +return obj.$id=$B.UUID()} +function ste_new(st,name,block,key,lineno,col_offset,end_lineno,end_col_offset){var ste +ste={table:st,id:id(key), +name:name,directives:NULL,type:block,nested:0,free:0,varargs:0,varkeywords:0,opt_lineno:0,opt_col_offset:0,lineno:lineno,col_offset:col_offset,end_lineno:end_lineno,end_col_offset:end_col_offset} +if(st.cur !=NULL && +(st.cur.nested || +st.cur.type==FunctionBlock)){ste.nested=1;} +ste.child_free=0 +ste.generator=0 +ste.coroutine=0 +ste.comprehension=NoComprehension +ste.returns_value=0 +ste.needs_class_closure=0 +ste.comp_inlined=0 +ste.comp_iter_target=0 +ste.comp_iter_expr=0 +ste.symbols=$B.empty_dict() +ste.varnames=[] +ste.children=[] +st.blocks.set(ste.id,ste) +return ste} +$B._PySymtable_Build=function(mod,filename,future){var st=new Symtable(),seq +st.filename=filename; +st.future=future ||{} +st.type=TYPE_MODULE +if(!symtable_enter_block(st,'top',ModuleBlock,mod,0,0,0,0)){return NULL;} +st.top=st.cur +switch(mod.constructor){case $B.ast.Module: +seq=mod.body +for(let item of seq){visitor.stmt(st,item)} +break +case $B.ast.Expression: +visitor.expr(st,mod.body) +break +case $B.ast.Interactive: +seq=mod.body +for(let item of seq){visitor.stmt(st,item)} +break} +symtable_analyze(st) +return st.top;} +function _PyST_GetSymbol(ste,name){if(! _b_.dict.$contains_string(ste.symbols,name)){return 0} +return _b_.dict.$getitem_string(ste.symbols,name)} +function _PyST_GetScope(ste,name){var symbol=_PyST_GetSymbol(ste,name); +return(symbol >> SCOPE_OFFSET)& SCOPE_MASK;} +function _PyST_IsFunctionLike(ste){return ste.type==FunctionBlock +||ste.type==TypeVarBoundBlock +||ste.type==TypeAliasBlock +||ste.type==TypeParamBlock;} +function PyErr_Format(exc_type,message,arg){if(arg){message=_b_.str.__mod__(message,arg)} +return exc_type.$factory(message)} +function PyErr_SetString(exc_type,message){return exc_type.$factory(message)} +function set_exc_info(exc,filename,lineno,offset,end_lineno,end_offset){exc.filename=filename +exc.lineno=lineno +exc.offset=offset+1 +exc.end_lineno=end_lineno +exc.end_offset=end_offset+1 +var src=$B.file_cache[filename] +if(src !==undefined){var lines=src.split('\n') +exc.text=lines[lineno-1]}else{exc.text=''} +exc.args[1]=[filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]} +function error_at_directive(exc,ste,name){assert(ste.directives) +for(var data of ste.directives){if(data[0]==name){set_exc_info(exc,ste.table.filename,data[1],data[2],data[3],data[4]) +return 0}} +throw _b_.RuntimeError.$factory( +"BUG: internal directive bookkeeping broken")} +function SET_SCOPE(DICT,NAME,I){DICT[NAME]=I} +function is_free_in_any_child(entry,key){for(var child_ste of entry.ste_children){var scope=_PyST_GetScope(child_ste,key) +if(scope==FREE){return 1}} +return 0} +function inline_comprehension(ste,comp,scopes,comp_free,inlined_cells){for(var item of _b_.dict.$iter_items(comp.symbols)){ +var k=item.key,comp_flags=item.value; +if(comp_flags & DEF_PARAM){ +continue;} +var scope=(comp_flags >> SCOPE_OFFSET)& SCOPE_MASK; +var only_flags=comp_flags &((1 << SCOPE_OFFSET)-1) +if(scope==CELL ||only_flags & DEF_COMP_CELL){inlined_cells.add(k)} +var existing=_b_.dict.$contains_string(ste.symbols,k) +if(!existing){ +var v_flags=only_flags +_b_.dict.$setitem(ste.symbols,k,v_flags); +SET_SCOPE(scopes,k,scope);}else{ +if((existing & DEF_BOUND)&& +!is_free_in_any_child(comp,k)&& +ste.type !==ClassBlock){_b_.set.remove(comp_free,k)}}} +return 1;} +function analyze_name(ste,scopes,name,flags,bound,local,free,global,type_params,class_entry){if(flags & DEF_GLOBAL){if(flags & DEF_NONLOCAL){let exc=PyErr_Format(_b_.SyntaxError,"name '%s' is nonlocal and global",name) +error_at_directive(exc,ste,name) +throw exc} +SET_SCOPE(scopes,name,GLOBAL_EXPLICIT) +global.add(name) +if(bound){bound.delete(name)} +return 1} +if(flags & DEF_NONLOCAL){if(!bound){let exc=PyErr_Format(_b_.SyntaxError,"nonlocal declaration not allowed at module level"); +error_at_directive(exc,ste,name) +throw exc} +if(! bound.has(name)){let exc=PyErr_Format(_b_.SyntaxError,"no binding for nonlocal '%s' found",name) +error_at_directive(exc,ste,name) +throw exc} +if(type_params.has(name)){let exc=PyErr_Format(_b_.SyntaxError,"nonlocal binding not allowed for type parameter '%s'",name); +error_at_directive(exc,ste,name) +throw exc} +SET_SCOPE(scopes,name,FREE) +ste.free=1 +free.add(name) +return 1} +if(flags & DEF_BOUND){SET_SCOPE(scopes,name,LOCAL) +local.add(name) +global.delete(name) +if(flags & DEF_TYPE_PARAM){type_params.add(name)}else{type_params.delete(name)} +return 1} +if(class_entry !=NULL){var class_flags=_PyST_GetSymbol(class_entry,name); +if(class_flags & DEF_GLOBAL){SET_SCOPE(scopes,name,GLOBAL_EXPLICIT) +return 1;}else if(class_flags & DEF_BOUND && +!(class_flags & DEF_NONLOCAL)){SET_SCOPE(scopes,name,GLOBAL_IMPLICIT) +return 1}} +if(bound && bound.has(name)){SET_SCOPE(scopes,name,FREE) +ste.free=1 +free.add(name) +return 1} +if(global && global.has(name)){SET_SCOPE(scopes,name,GLOBAL_IMPLICIT) +return 1} +if(ste.nested){ste.free=1} +SET_SCOPE(scopes,name,GLOBAL_IMPLICIT) +return 1} +function analyze_cells(scopes,free,inlined_cells){var v,v_cell; +v_cell=CELL; +if(!v_cell){return 0;} +for(let name in scopes){v=scopes[name] +var scope=v; +if(scope !=LOCAL){continue;} +if(free.has(name)&& ! inlined_cells.has(name)){continue;} +scopes[name]=v_cell +free.delete(name)} +return 1} +function drop_class_free(ste,free){var res=free.delete('__class__') +if(res){ste.needs_class_closure=1} +res=free.delete('__classdict__') +if(res){ste.needs_class_classdict=1} +return 1} +function update_symbols(symbols,scopes,bound,free,inlined_cells,classflag){var v,v_scope,v_new,v_free +for(let name of _b_.dict.$keys_string(symbols)){var test=false +let flags=_b_.dict.$getitem_string(symbols,name) +if(test){console.log('in update symbols, name',name,'flags',flags,flags & DEF_COMP_CELL)} +if(inlined_cells.has(name)){flags |=DEF_COMP_CELL} +v_scope=scopes[name] +var scope=v_scope +if(test){console.log('name',name,'scopes[name]',scopes[name],' flags |=',scope << SCOPE_OFFSET)} +flags |=(scope << SCOPE_OFFSET) +v_new=flags +if(!v_new){return 0;} +if(test){console.log('set symbol',name,'v_new',v_new,'def comp cell',DEF_COMP_CELL,v_new & DEF_COMP_CELL)} +_b_.dict.$setitem_string(symbols,name,v_new)} +v_free=FREE << SCOPE_OFFSET +for(let name of free){v=_b_.dict.$get_string(symbols,name) +if(v !==_b_.dict.$missing){ +if(classflag && +v &(DEF_BOUND |DEF_GLOBAL)){let flags=v |DEF_FREE_CLASS; +v_new=flags; +if(! v_new){return 0;} +_b_.dict.$setitem_string(symbols,name,v_new)} +continue;} +if(bound && !bound.has(name)){continue;} +_b_.dict.$setitem_string(symbols,name,v_free)} +return 1} +function analyze_block(ste,bound,free,global,typeparams,class_entry){var success=0 +let local=new Set() +let scopes={} +let newglobal=new Set() +let newfree=new Set() +let newbound=new Set() +let inlined_cells=new Set() +if(ste.type===ClassBlock){ +Set_Union(newglobal,global) +if(bound){Set_Union(newbound,bound)}} +for(let name of _b_.dict.$keys_string(ste.symbols)){var flags=_b_.dict.$getitem_string(ste.symbols,name) +if(!analyze_name(ste,scopes,name,flags,bound,local,free,global,typeparams,class_entry)){return 0}} +if(ste.type !=ClassBlock){ +if(_PyST_IsFunctionLike(ste)){Set_Union(newbound,local);} +if(bound){Set_Union(newbound,bound)} +Set_Union(newglobal,global);}else{ +newbound.add('__class__') +newbound.add('__classdict__')} +for(var c of ste.children){var child_free=new Set() +let entry=c +var new_class_entry=NULL; +if(entry.can_see_class_scope){if(ste.type==ClassBlock){new_class_entry=ste}else if(class_entry){new_class_entry=class_entry}} +var inline_comp=entry.comprehension && ! entry.generator; +if(! analyze_child_block(entry,newbound,newfree,newglobal,typeparams,new_class_entry,child_free)){return 0} +if(inline_comp){if(! inline_comprehension(ste,entry,scopes,child_free,inlined_cells)){} +entry.comp_inlined=1;} +Set_Union(newfree,child_free); +if(entry.free ||entry.child_free){ste.child_free=1}} +for(let i=ste.children.length-1;i >=0;i--){let entry=ste.children[i]; +if(entry.comp_inlined){ste.children.splice(i,0,...entry.children)}} +if(_PyST_IsFunctionLike(ste)&& !analyze_cells(scopes,newfree,inlined_cells)){return 0}else if(ste.type===ClassBlock && !drop_class_free(ste,newfree)){return 0} +if(!update_symbols(ste.symbols,scopes,bound,newfree,inlined_cells,ste.type===ClassBlock ||ste.can_see_class_scope)){return 0} +Set_Union(free,newfree) +success=1 +return success} +function PySet_New(arg){if(arg===NULL){return new Set()} +return new Set(arg)} +function Set_Union(setA,setB){for(let elem of setB){setA.add(elem)}} +function analyze_child_block(entry,bound,free,global,typeparams,class_entry,child_free){ +var temp_bound=PySet_New(bound),temp_free=PySet_New(free),temp_global=PySet_New(global),temp_typeparams=PySet_New(typeparams) +if(!analyze_block(entry,temp_bound,temp_free,temp_global,temp_typeparams,class_entry)){return 0} +Set_Union(child_free,temp_free); +return 1;} +function symtable_analyze(st){var free=new Set(),global=new Set(),typeparams=new Set() +return analyze_block(st.top,NULL,free,global,typeparams,NULL);} +function symtable_exit_block(st){var size=st.stack.length +st.cur=NULL; +if(size){st.stack.pop() +if(--size){st.cur=st.stack[size-1]}} +return 1} +function symtable_enter_block(st,name,block,ast,lineno,col_offset,end_lineno,end_col_offset){var prev +if(ast===undefined){console.log('call ste new, key undef',st,name)} +var ste=ste_new(st,name,block,ast,lineno,col_offset,end_lineno,end_col_offset) +st.stack.push(ste) +prev=st.cur +if(prev){ste.comp_iter_expr=prev.comp_iter_expr} +st.cur=ste +if(block===AnnotationBlock){return 1} +if(block===ModuleBlock){st.global=st.cur.symbols} +if(prev){prev.children.push(ste)} +return 1;} +function symtable_lookup(st,name){var mangled=_Py_Mangle(st.private,name) +if(!mangled){return 0;} +var ret=_PyST_GetSymbol(st.cur,mangled) +return ret;} +function symtable_add_def_helper(st,name,flag,ste,_location){var o,dict,val,mangled=_Py_Mangle(st.private,name) +if(!mangled){return 0} +dict=ste.symbols +if(_b_.dict.$contains_string(dict,mangled)){o=_b_.dict.$getitem_string(dict,mangled) +val=o +if((flag & DEF_PARAM)&&(val & DEF_PARAM)){ +let exc=PyErr_Format(_b_.SyntaxError,DUPLICATE_ARGUMENT,name); +set_exc_info(exc,st.filename,..._location) +throw exc} +if((flag & DEF_TYPE_PARAM)&&(val & DEF_TYPE_PARAM)){let exc=PyErr_Format(_b_.SyntaxError,DUPLICATE_TYPE_PARAM,name); +set_exc_info(exc,st.filename,...location); +throw exc} +val |=flag}else{val=flag} +if(ste.comp_iter_target){ +if(val &(DEF_GLOBAL |DEF_NONLOCAL)){let exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_INNER_LOOP_CONFLICT,name); +set_exc_info(exc,st.filename,..._location) +throw exc} +val |=DEF_COMP_ITER} +o=val +if(o==NULL){return 0} +_b_.dict.$setitem(dict,mangled,o) +if(flag & DEF_PARAM){ste.varnames.push(mangled)}else if(flag & DEF_GLOBAL){ +val=flag +if(st.global.hasOwnProperty(mangled)){ +val |=st.global[mangled]} +o=val +if(o==NULL){return 0} +st.global[mangled]=o} +return 1} +function symtable_add_def(st,name,flag,_location){return symtable_add_def_helper(st,name,flag,st.cur,_location);} +function symtable_enter_type_param_block(st,name,ast,has_defaults,has_kwdefaults,kind,_location){var prev=st.cur,current_type=st.cur.type; +if(!symtable_enter_block(st,name,TypeParamBlock,ast,..._location)){return 0;} +prev.$type_param=st.cur +if(current_type===ClassBlock){st.cur.can_see_class_scope=1; +if(!symtable_add_def(st,"__classdict__",USE,_location)){return 0;}} +if(kind==$B.ast.ClassDef){ +if(!symtable_add_def(st,"type_params",DEF_LOCAL,_location)){return 0;} +if(!symtable_add_def(st,"type_params",USE,_location)){return 0;} +st.st_private=name; +var generic_base=".generic_base"; +if(!symtable_add_def(st,generic_base,DEF_LOCAL,_location)){return 0;} +if(!symtable_add_def(st,generic_base,USE,_location)){return 0;}} +if(has_defaults){var defaults=".defaults"; +if(!symtable_add_def(st,defaults,DEF_PARAM,_location)){return 0;}} +if(has_kwdefaults){var kwdefaults=".kwdefaults"; +if(!symtable_add_def(st,kwdefaults,DEF_PARAM,_location)){return 0;}} +return 1;} +function VISIT_QUIT(ST,X){return X} +function VISIT(ST,TYPE,V){var f=visitor[TYPE] +if(!f(ST,V)){VISIT_QUIT(ST,0);}} +function VISIT_SEQ(ST,TYPE,SEQ){for(var elt of SEQ){if(! visitor[TYPE](ST,elt)){VISIT_QUIT(ST,0)}}} +function VISIT_SEQ_TAIL(ST,TYPE,SEQ,START){for(var i=START,len=SEQ.length;i < len;i++){var elt=SEQ[i]; +if(! visitor[TYPE](ST,elt)){VISIT_QUIT(ST,0)}}} +function VISIT_SEQ_WITH_NULL(ST,TYPE,SEQ){for(var elt of SEQ){if(! elt){continue } +if(! visitor[TYPE](ST,elt)){VISIT_QUIT((ST),0)}}} +function symtable_record_directive(st,name,lineno,col_offset,end_lineno,end_col_offset){var data,mangled +if(!st.cur.directives){st.cur.directives=[]} +mangled=_Py_Mangle(st.private,name); +if(!mangled){return 0;} +data=$B.fast_tuple([mangled,lineno,col_offset,end_lineno,end_col_offset]) +st.cur.directives.push(data); +return true} +function has_kwonlydefaults(kwonlyargs,kw_defaults){for(var i=0,len=kwonlyargs.length;i < len;i++){if(kw_defaults[i]){return 1;}} +return 0;} +var visitor={} +visitor.stmt=function(st,s){switch(s.constructor){case $B.ast.FunctionDef: +if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s))) +VISIT_QUIT(st,0) +if(s.args.defaults) +VISIT_SEQ(st,expr,s.args.defaults) +if(s.args.kw_defaults) +VISIT_SEQ_WITH_NULL(st,expr,s.args.kw_defaults) +if(s.type_params.length > 0){if(!symtable_enter_type_param_block( +st,s.name,s.type_params,s.args.defaults !=NULL,has_kwonlydefaults(s.args.kwonlyargs,s.args.kw_defaults),s.constructor,LOCATION(s))){VISIT_QUIT(st,0);} +VISIT_SEQ(st,type_param,s.type_params);} +if(!visitor.annotations(st,s,s.args,s.returns)) +VISIT_QUIT(st,0) +if(s.decorator_list){VISIT_SEQ(st,expr,s.decorator_list)} +if(!symtable_enter_block(st,s.name,FunctionBlock,s,...LOCATION(s))){VISIT_QUIT(st,0)} +VISIT(st,'arguments',s.args) +VISIT_SEQ(st,stmt,s.body) +if(!symtable_exit_block(st)){VISIT_QUIT(st,0)} +if(s.type_params.length > 0){if(!symtable_exit_block(st)){VISIT_QUIT(st,0)}} +break; +case $B.ast.ClassDef: +var tmp; +if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s))) +VISIT_QUIT(st,0) +VISIT_SEQ(st,expr,s.bases) +VISIT_SEQ(st,keyword,s.keywords) +if(s.decorator_list) +VISIT_SEQ(st,expr,s.decorator_list); +if(s.type_params.length > 0){if(!symtable_enter_type_param_block(st,s.name,s.type_params,false,false,s.constructor,LOCATION(s))){VISIT_QUIT(st,0);} +VISIT_SEQ(st,type_param,s.type_params);} +VISIT_SEQ(st,expr,s.bases); +VISIT_SEQ(st,keyword,s.keywords); +if(!symtable_enter_block(st,s.name,ClassBlock,s,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset)) +VISIT_QUIT(st,0) +tmp=st.private +st.private=s.name +if(s.type_params.length > 0){if(!symtable_add_def(st,'__type_params__',DEF_LOCAL,LOCATION(s))){VISIT_QUIT(st,0);} +if(!symtable_add_def(st,'type_params',USE,LOCATION(s))){VISIT_QUIT(st,0);}} +VISIT_SEQ(st,stmt,s.body) +st.private=tmp +if(! symtable_exit_block(st)) +VISIT_QUIT(st,0) +if(s.type_params.length > 0){if(!symtable_exit_block(st)) +VISIT_QUIT(st,0);} +break +case $B.ast.TypeAlias: +VISIT(st,expr,s.name); +assert(s.name instanceof $B.ast.Name); +var name=s.name.id,is_in_class=st.cur.type===ClassBlock,is_generic=s.type_params.length > 0 +if(is_generic){if(!symtable_enter_type_param_block( +st,name,s.type_params,false,false,s.kind,LOCATION(s))){VISIT_QUIT(st,0);} +VISIT_SEQ(st,type_param,s.type_params);} +if(!symtable_enter_block(st,name,TypeAliasBlock,s,LOCATION(s))){VISIT_QUIT(st,0);} +st.cur.can_see_class_scope=is_in_class; +if(is_in_class && !symtable_add_def(st,'__classdict__',USE,LOCATION(s.value))){VISIT_QUIT(st,0);} +VISIT(st,expr,s.value); +if(!symtable_exit_block(st)){VISIT_QUIT(st,0);} +if(is_generic){if(!symtable_exit_block(st)) +VISIT_QUIT(st,0);} +break +case $B.ast.Return: +if(s.value){VISIT(st,expr,s.value) +st.cur.returns_value=1} +break +case $B.ast.Delete: +VISIT_SEQ(st,expr,s.targets) +break +case $B.ast.Assign: +VISIT_SEQ(st,expr,s.targets) +VISIT(st,expr,s.value) +break +case $B.ast.AnnAssign: +if(s.target instanceof $B.ast.Name){var e_name=s.target +var cur=symtable_lookup(st,e_name.id) +if(cur < 0){VISIT_QUIT(st,0)} +if((cur &(DEF_GLOBAL |DEF_NONLOCAL)) +&&(st.cur.symbols !=st.global) +&& s.simple){var exc=PyErr_Format(_b_.SyntaxError,cur & DEF_GLOBAL ? GLOBAL_ANNOT :NONLOCAL_ANNOT,e_name.id) +exc.args[1]=[st.filename,s.lineno,s.col_offset+1,s.end_lineno,s.end_col_offset+1] +throw exc} +if(s.simple && +! symtable_add_def(st,e_name.id,DEF_ANNOT |DEF_LOCAL,LOCATION(e_name))){VISIT_QUIT(st,0)}else{if(s.value +&& !symtable_add_def(st,e_name.id,DEF_LOCAL,LOCATION(e_name))){VISIT_QUIT(st,0)}}}else{VISIT(st,expr,s.target)} +if(!visitor.annotation(st,s.annotation)){VISIT_QUIT(st,0)} +if(s.value){VISIT(st,expr,s.value)} +break +case $B.ast.AugAssign: +VISIT(st,expr,s.target) +VISIT(st,expr,s.value) +break +case $B.ast.For: +VISIT(st,expr,s.target) +VISIT(st,expr,s.iter) +VISIT_SEQ(st,stmt,s.body) +if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)} +break +case $B.ast.While: +VISIT(st,expr,s.test) +VISIT_SEQ(st,stmt,s.body) +if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)} +break +case $B.ast.If: +VISIT(st,expr,s.test) +VISIT_SEQ(st,stmt,s.body) +if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)} +break +case $B.ast.Match: +VISIT(st,expr,s.subject) +VISIT_SEQ(st,match_case,s.cases) +break +case $B.ast.Raise: +if(s.exc){VISIT(st,expr,s.exc) +if(s.cause){VISIT(st,expr,s.cause)}} +break +case $B.ast.Try: +VISIT_SEQ(st,stmt,s.body) +VISIT_SEQ(st,stmt,s.orelse) +VISIT_SEQ(st,excepthandler,s.handlers) +VISIT_SEQ(st,stmt,s.finalbody) +break +case $B.ast.TryStar: +VISIT_SEQ(st,stmt,s.body) +VISIT_SEQ(st,stmt,s.orelse) +VISIT_SEQ(st,excepthandler,s.handlers) +VISIT_SEQ(st,stmt,s.finalbody) +break +case $B.ast.Assert: +VISIT(st,expr,s.test) +if(s.msg){VISIT(st,expr,s.msg);} +break +case $B.ast.Import: +VISIT_SEQ(st,alias,s.names) +break +case $B.ast.ImportFrom: +VISIT_SEQ(st,alias,s.names) +break +case $B.ast.Global: +var seq=s.names +for(var name of seq){var cur=symtable_lookup(st,name) +if(cur < 0){VISIT_QUIT(st,0)} +if(cur &(DEF_PARAM |DEF_LOCAL |USE |DEF_ANNOT)){var msg +if(cur & DEF_PARAM){msg=GLOBAL_PARAM}else if(cur & USE){msg=GLOBAL_AFTER_USE}else if(cur & DEF_ANNOT){msg=GLOBAL_ANNOT}else{ +msg=GLOBAL_AFTER_ASSIGN} +var exc=PyErr_Format(_b_.SyntaxError,msg,name) +set_exc_info(exc,st.filename,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset) +throw exc} +if(! symtable_add_def(st,name,DEF_GLOBAL,LOCATION(s))) +VISIT_QUIT(st,0) +if(! symtable_record_directive(st,name,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset)) +VISIT_QUIT(st,0)} +break +case $B.ast.Nonlocal: +var seq=s.names; +for(var name of seq){var cur=symtable_lookup(st,name) +if(cur < 0){VISIT_QUIT(st,0)} +if(cur &(DEF_PARAM |DEF_LOCAL |USE |DEF_ANNOT)){var msg +if(cur & DEF_PARAM){msg=NONLOCAL_PARAM}else if(cur & USE){msg=NONLOCAL_AFTER_USE}else if(cur & DEF_ANNOT){msg=NONLOCAL_ANNOT}else{ +msg=NONLOCAL_AFTER_ASSIGN} +var exc=PyErr_Format(_b_.SyntaxError,msg,name) +set_exc_info(exc,st.filename,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset) +throw exc} +if(!symtable_add_def(st,name,DEF_NONLOCAL,LOCATION(s))) +VISIT_QUIT(st,0) +if(!symtable_record_directive(st,name,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset)) +VISIT_QUIT(st,0)} +break +case $B.ast.Expr: +VISIT(st,expr,s.value) +break +case $B.ast.Pass: +case $B.ast.Break: +case $B.ast.Continue: +break +case $B.ast.With: +VISIT_SEQ(st,'withitem',s.items) +VISIT_SEQ(st,stmt,s.body) +break +case $B.ast.AsyncFunctionDef: +if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s))) +VISIT_QUIT(st,0) +if(s.args.defaults) +VISIT_SEQ(st,expr,s.args.defaults) +if(s.args.kw_defaults) +VISIT_SEQ_WITH_NULL(st,expr,s.args.kw_defaults) +if(!visitor.annotations(st,s,s.args,s.returns)) +VISIT_QUIT(st,0) +if(s.decorator_list) +VISIT_SEQ(st,expr,s.decorator_list) +if(s.type_params.length > 0){if(!symtable_enter_type_param_block( +st,s.name,s.type_params,s.args.defaults !=NULL,has_kwonlydefaults(s.args.kwonlyargs,s.args.kw_defaults),s.constructor,LOCATION(s))){VISIT_QUIT(st,0);} +VISIT_SEQ(st,type_param,s.type_params);} +if(!visitor.annotations(st,s,s.args,s.returns)) +VISIT_QUIT(st,0); +if(!symtable_enter_block(st,s.name,FunctionBlock,s,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset)) +VISIT_QUIT(st,0) +st.cur.coroutine=1 +VISIT(st,'arguments',s.args) +VISIT_SEQ(st,stmt,s.body) +if(! symtable_exit_block(st)) +VISIT_QUIT(st,0) +if(s.type_params.length > 0){if(!symtable_exit_block(st)) +VISIT_QUIT(st,0);} +break +case $B.ast.AsyncWith: +VISIT_SEQ(st,withitem,s.items) +VISIT_SEQ(st,stmt,s.body) +break +case $B.ast.AsyncFor: +VISIT(st,expr,s.target) +VISIT(st,expr,s.iter) +VISIT_SEQ(st,stmt,s.body) +if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)} +break +default: +console.log('unhandled',s) +break} +VISIT_QUIT(st,1)} +function symtable_extend_namedexpr_scope(st,e){assert(st.stack) +assert(e instanceof $B.ast.Name) +var target_name=e.id +var i,size,ste +size=st.stack.length +assert(size) +for(i=size-1;i >=0;i--){ste=st.stack[i] +if(ste.comprehension){let target_in_scope=_PyST_GetSymbol(ste,target_name); +if(target_in_scope & DEF_COMP_ITER){let exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_CONFLICT,target_name); +set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.ed_lineno,e.end_col_offset) +throw exc} +continue;} +if(_PyST_IsFunctionLike(ste)){let target_in_scope=_PyST_GetSymbol(ste,target_name); +if(target_in_scope & DEF_GLOBAL){if(!symtable_add_def(st,target_name,DEF_GLOBAL,LOCATION(e))) +VISIT_QUIT(st,0);}else{ +if(!symtable_add_def(st,target_name,DEF_NONLOCAL,LOCATION(e))) +VISIT_QUIT(st,0);} +if(!symtable_record_directive(st,target_name,LOCATION(e))) +VISIT_QUIT(st,0); +return symtable_add_def_helper(st,target_name,DEF_LOCAL,ste,LOCATION(e));} +if(ste.type==ModuleBlock){if(!symtable_add_def(st,target_name,DEF_GLOBAL,LOCATION(e))) +VISIT_QUIT(st,0); +if(!symtable_record_directive(st,target_name,LOCATION(e))) +VISIT_QUIT(st,0); +return symtable_add_def_helper(st,target_name,DEF_GLOBAL,ste,LOCATION(e));} +if(ste.type==ClassBlock){let exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_IN_CLASS); +set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset); +throw exc}} +assert(0); +return 0;} +function symtable_handle_namedexpr(st,e){if(st.cur.comp_iter_expr > 0){ +var exc=PyErr_Format(PyExc_SyntaxError,NAMED_EXPR_COMP_ITER_EXPR); +set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset); +throw exc} +if(st.cur.comprehension){ +if(!symtable_extend_namedexpr_scope(st,e.target)) +return 0;} +VISIT(st,expr,e.value); +VISIT(st,expr,e.target); +return 1;} +const alias='alias',comprehension='comprehension',excepthandler='excepthandler',expr='expr',keyword='keyword',match_case='match_case',pattern='pattern',stmt='stmt',type_param='type_param',withitem='withitem' +visitor.expr=function(st,e){switch(e.constructor){case $B.ast.NamedExpr: +if(!symtable_raise_if_annotation_block(st,"named expression",e)){VISIT_QUIT(st,0);} +if(!symtable_handle_namedexpr(st,e)) +VISIT_QUIT(st,0); +break; +case $B.ast.BoolOp: +VISIT_SEQ(st,'expr',e.values); +break; +case $B.ast.BinOp: +VISIT(st,'expr',e.left); +VISIT(st,'expr',e.right); +break; +case $B.ast.UnaryOp: +VISIT(st,'expr',e.operand); +break; +case $B.ast.Lambda:{if(!GET_IDENTIFIER('lambda')) +VISIT_QUIT(st,0); +if(e.args.defaults) +VISIT_SEQ(st,'expr',e.args.defaults); +if(e.args.kw_defaults) +VISIT_SEQ_WITH_NULL(st,'expr',e.args.kw_defaults); +if(!symtable_enter_block(st,lambda,FunctionBlock,e,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset)) +VISIT_QUIT(st,0); +VISIT(st,'arguments',e.args); +VISIT(st,'expr',e.body); +if(!symtable_exit_block(st)) +VISIT_QUIT(st,0); +break;} +case $B.ast.IfExp: +VISIT(st,'expr',e.test); +VISIT(st,'expr',e.body); +VISIT(st,'expr',e.orelse); +break; +case $B.ast.Dict: +VISIT_SEQ_WITH_NULL(st,'expr',e.keys); +VISIT_SEQ(st,'expr',e.values); +break; +case $B.ast.Set: +VISIT_SEQ(st,'expr',e.elts); +break; +case $B.ast.GeneratorExp: +if(!visitor.genexp(st,e)) +VISIT_QUIT(st,0); +break; +case $B.ast.ListComp: +if(!visitor.listcomp(st,e)) +VISIT_QUIT(st,0); +break; +case $B.ast.SetComp: +if(!visitor.setcomp(st,e)) +VISIT_QUIT(st,0); +break; +case $B.ast.DictComp: +if(!visitor.dictcomp(st,e)) +VISIT_QUIT(st,0); +break; +case $B.ast.Yield: +if(!symtable_raise_if_annotation_block(st,"yield expression",e)){VISIT_QUIT(st,0);} +if(e.value) +VISIT(st,'expr',e.value); +st.cur.generator=1; +if(st.cur.comprehension){return symtable_raise_if_comprehension_block(st,e);} +break; +case $B.ast.YieldFrom: +if(!symtable_raise_if_annotation_block(st,"yield expression",e)){VISIT_QUIT(st,0);} +VISIT(st,'expr',e.value); +st.cur.generator=1; +if(st.cur.comprehension){return symtable_raise_if_comprehension_block(st,e);} +break; +case $B.ast.Await: +if(!symtable_raise_if_annotation_block(st,"await expression",e)){VISIT_QUIT(st,0);} +VISIT(st,'expr',e.value); +st.cur.coroutine=1; +break; +case $B.ast.Compare: +VISIT(st,'expr',e.left); +VISIT_SEQ(st,'expr',e.comparators); +break; +case $B.ast.Call: +VISIT(st,'expr',e.func); +VISIT_SEQ(st,'expr',e.args); +VISIT_SEQ_WITH_NULL(st,'keyword',e.keywords); +break; +case $B.ast.FormattedValue: +VISIT(st,'expr',e.value); +if(e.format_spec) +VISIT(st,'expr',e.format_spec); +break; +case $B.ast.JoinedStr: +VISIT_SEQ(st,'expr',e.values); +break; +case $B.ast.Constant: +break; +case $B.ast.Attribute: +VISIT(st,'expr',e.value); +break; +case $B.ast.Subscript: +VISIT(st,'expr',e.value); +VISIT(st,'expr',e.slice); +break; +case $B.ast.Starred: +VISIT(st,'expr',e.value); +break; +case $B.ast.Slice: +if(e.lower) +VISIT(st,expr,e.lower) +if(e.upper) +VISIT(st,expr,e.upper) +if(e.step) +VISIT(st,expr,e.step) +break; +case $B.ast.Name: +var flag=e.ctx instanceof $B.ast.Load ? USE :DEF_LOCAL +if(! symtable_add_def(st,e.id,flag,LOCATION(e))) +VISIT_QUIT(st,0); +if(e.ctx instanceof $B.ast.Load && +_PyST_IsFunctionLike(st.cur)&& +e.id=="super"){if(!GET_IDENTIFIER('__class__')|| +!symtable_add_def(st,'__class__',USE,LOCATION(e))) +VISIT_QUIT(st,0);} +break; +case $B.ast.List: +VISIT_SEQ(st,expr,e.elts); +break; +case $B.ast.Tuple: +VISIT_SEQ(st,expr,e.elts); +break;} +VISIT_QUIT(st,1);} +visitor.type_param=function(st,tp){switch(tp.constructor){case $B.ast.TypeVar: +if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM |DEF_LOCAL,LOCATION(tp))) +VISIT_QUIT(st,0); +if(tp.bound){var is_in_class=st.cur.can_see_class_scope; +if(!symtable_enter_block(st,tp.name,TypeVarBoundBlock,tp,LOCATION(tp))) +VISIT_QUIT(st,0); +st.cur.can_see_class_scope=is_in_class; +if(is_in_class && !symtable_add_def(st,"__classdict__",USE,LOCATION(tp.bound))){VISIT_QUIT(st,0);} +VISIT(st,expr,tp.bound); +if(!symtable_exit_block(st)) +VISIT_QUIT(st,0);} +break; +case $B.ast.TypeVarTuple: +if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM |DEF_LOCAL,LOCATION(tp))) +VISIT_QUIT(st,0); +break; +case $B.ast.ParamSpec: +if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM |DEF_LOCAL,LOCATION(tp))) +VISIT_QUIT(st,0); +break;} +VISIT_QUIT(st,1);} +visitor.pattern=function(st,p){switch(p.constructor){case $B.ast.MatchValue: +VISIT(st,expr,p.value); +break; +case $B.ast.MatchSingleton: +break; +case $B.ast.MatchSequence: +VISIT_SEQ(st,pattern,p.patterns); +break; +case $B.ast.MatchStar: +if(p.name){symtable_add_def(st,p.name,DEF_LOCAL,LOCATION(p));} +break; +case $B.ast.MatchMapping: +VISIT_SEQ(st,expr,p.keys); +VISIT_SEQ(st,pattern,p.patterns); +if(p.rest){symtable_add_def(st,p.rest,DEF_LOCAL,LOCATION(p));} +break; +case $B.ast.MatchClass: +VISIT(st,expr,p.cls); +VISIT_SEQ(st,pattern,p.patterns); +VISIT_SEQ(st,pattern,p.kwd_patterns); +break; +case $B.ast.MatchAs: +if(p.pattern){VISIT(st,pattern,p.pattern);} +if(p.name){symtable_add_def(st,p.name,DEF_LOCAL,LOCATION(p));} +break; +case $B.ast.MatchOr: +VISIT_SEQ(st,pattern,p.patterns); +break;} +VISIT_QUIT(st,1);} +function symtable_implicit_arg(st,pos){var id='.'+pos +if(!symtable_add_def(st,id,DEF_PARAM,ST_LOCATION(st.cur))){return 0;} +return 1;} +visitor.params=function(st,args){if(! args){return-1} +for(var arg of args){if(! symtable_add_def(st,arg.arg,DEF_PARAM,LOCATION(arg))) +return 0} +return 1} +visitor.annotation=function(st,annotation){var future_annotations=st.future.features & $B.CO_FUTURE_ANNOTATIONS +if(future_annotations && +!symtable_enter_block(st,'_annotation',AnnotationBlock,annotation,annotation.lineno,annotation.col_offset,annotation.end_lineno,annotation.end_col_offset)){VISIT_QUIT(st,0)} +VISIT(st,expr,annotation) +if(future_annotations && !symtable_exit_block(st)){VISIT_QUIT(st,0)} +return 1} +visitor.argannotations=function(st,args){if(!args){return-1} +for(var arg of args){if(arg.annotation){VISIT(st,expr,arg.annotation)}} +return 1} +visitor.annotations=function(st,o,a,returns){var future_annotations=st.future.ff_features & $B.CO_FUTURE_ANNOTATIONS; +if(future_annotations && +!symtable_enter_block(st,'_annotation',AnnotationBlock,o,o.lineno,o.col_offset,o.end_lineno,o.end_col_offset)){VISIT_QUIT(st,0);} +if(a.posonlyargs && !visitor.argannotations(st,a.posonlyargs)) +return 0; +if(a.args && !visitor.argannotations(st,a.args)) +return 0; +if(a.vararg && a.vararg.annotation) +VISIT(st,expr,a.vararg.annotation); +if(a.kwarg && a.kwarg.annotation) +VISIT(st,expr,a.kwarg.annotation); +if(a.kwonlyargs && !visitor.argannotations(st,a.kwonlyargs)) +return 0; +if(future_annotations && !symtable_exit_block(st)){VISIT_QUIT(st,0);} +if(returns && !visitor.annotation(st,returns)){VISIT_QUIT(st,0);} +return 1;} +visitor.arguments=function(st,a){ +if(a.posonlyargs && !visitor.params(st,a.posonlyargs)) +return 0; +if(a.args && !visitor.params(st,a.args)) +return 0; +if(a.kwonlyargs && !visitor.params(st,a.kwonlyargs)) +return 0; +if(a.vararg){if(!symtable_add_def(st,a.vararg.arg,DEF_PARAM,LOCATION(a.vararg))) +return 0; +st.cur.varargs=1;} +if(a.kwarg){if(!symtable_add_def(st,a.kwarg.arg,DEF_PARAM,LOCATION(a.kwarg))) +return 0; +st.cur.varkeywords=1;} +return 1;} +visitor.excepthandler=function(st,eh){if(eh.type) +VISIT(st,expr,eh.type); +if(eh.name) +if(!symtable_add_def(st,eh.name,DEF_LOCAL,LOCATION(eh))) +return 0; +VISIT_SEQ(st,stmt,eh.body); +return 1;} +visitor.withitem=function(st,item){VISIT(st,'expr',item.context_expr); +if(item.optional_vars){VISIT(st,'expr',item.optional_vars);} +return 1;} +visitor.match_case=function(st,m){VISIT(st,pattern,m.pattern); +if(m.guard){VISIT(st,expr,m.guard);} +VISIT_SEQ(st,stmt,m.body); +return 1;} +visitor.alias=function(st,a){ +var store_name,name=(a.asname==NULL)? a.name :a.asname; +var dot=name.search('\\.'); +if(dot !=-1){store_name=name.substring(0,dot); +if(!store_name) +return 0;}else{store_name=name;} +if(name !="*"){var r=symtable_add_def(st,store_name,DEF_IMPORT,LOCATION(a)); +return r;}else{if(st.cur.type !=ModuleBlock){var lineno=a.lineno,col_offset=a.col_offset,end_lineno=a.end_lineno,end_col_offset=a.end_col_offset; +var exc=PyErr_SetString(PyExc_SyntaxError,IMPORT_STAR_WARNING); +set_exc_info(exc,st.filename,lineno,col_offset,end_lineno,end_col_offset); +throw exc} +st.cur.$has_import_star=true +return 1;}} +visitor.comprehension=function(st,lc){st.cur.comp_iter_target=1; +VISIT(st,expr,lc.target); +st.cur.comp_iter_target=0; +st.cur.comp_iter_expr++; +VISIT(st,expr,lc.iter); +st.cur.comp_iter_expr--; +VISIT_SEQ(st,expr,lc.ifs); +if(lc.is_async){st.cur.coroutine=1;} +return 1;} +visitor.keyword=function(st,k){VISIT(st,expr,k.value); +return 1;} +function symtable_handle_comprehension(st,e,scope_name,generators,elt,value){var is_generator=(e.constructor===$B.ast.GeneratorExp); +var outermost=generators[0] +st.cur.comp_iter_expr++; +VISIT(st,expr,outermost.iter); +st.cur.comp_iter_expr--; +if(!scope_name || +!symtable_enter_block(st,scope_name,FunctionBlock,e,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset)){return 0;} +switch(e.constructor){case $B.ast.ListComp: +st.cur.comprehension=ListComprehension; +break; +case $B.ast.SetComp: +st.cur.comprehension=SetComprehension; +break; +case $B.ast.DictComp: +st.cur.comprehension=DictComprehension; +break; +default: +st.cur.comprehension=GeneratorExpression; +break;} +if(outermost.is_async){st.cur.coroutine=1;} +if(!symtable_implicit_arg(st,0)){symtable_exit_block(st); +return 0;} +st.cur.comp_iter_target=1; +VISIT(st,expr,outermost.target); +st.cur.comp_iter_target=0; +VISIT_SEQ(st,expr,outermost.ifs); +VISIT_SEQ_TAIL(st,comprehension,generators,1); +if(value) +VISIT(st,expr,value); +VISIT(st,expr,elt); +st.cur.generator=is_generator; +var is_async=st.cur.coroutine && !is_generator; +if(!symtable_exit_block(st)){return 0;} +if(is_async){st.cur.coroutine=1;} +return 1;} +visitor.genexp=function(st,e){return symtable_handle_comprehension(st,e,'genexpr',e.generators,e.elt,NULL);} +visitor.listcomp=function(st,e){return symtable_handle_comprehension(st,e,'listcomp',e.generators,e.elt,NULL);} +visitor.setcomp=function(st,e){return symtable_handle_comprehension(st,e,'setcomp',e.generators,e.elt,NULL);} +visitor.dictcomp=function(st,e){return symtable_handle_comprehension(st,e,'dictcomp',e.generators,e.key,e.value);} +function symtable_raise_if_annotation_block(st,name,e){var type=st.cur.type,exc +if(type==AnnotationBlock) +exc=PyErr_Format(PyExc_SyntaxError,ANNOTATION_NOT_ALLOWED,name); +else if(type==TypeVarBoundBlock) +exc=PyErr_Format(PyExc_SyntaxError,TYPEVAR_BOUND_NOT_ALLOWED,name); +else if(type==TypeAliasBlock) +exc=PyErr_Format(PyExc_SyntaxError,TYPEALIAS_NOT_ALLOWED,name); +else if(type==TypeParamBlock) +exc=PyErr_Format(PyExc_SyntaxError,TYPEPARAM_NOT_ALLOWED,name); +else +return 1; +set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset); +throw exc} +function symtable_raise_if_comprehension_block(st,e){var type=st.cur.comprehension; +var exc=PyErr_SetString(PyExc_SyntaxError,(type==ListComprehension)? "'yield' inside list comprehension" : +(type==SetComprehension)? "'yield' inside set comprehension" : +(type==DictComprehension)? "'yield' inside dict comprehension" : +"'yield' inside generator expression"); +exc.$frame_obj=$B.frame_obj +set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset); +throw exc}})(__BRYTHON__) +; + +(function($B){var _b_=$B.builtins,NULL=undefined,DOT='.',ELLIPSIS='...' +const STAR_TARGETS=1,DEL_TARGETS=2,FOR_TARGETS=3 +function make_string_for_ast_value(value){value=value.replace(/\n/g,'\\n\\\n') +value=value.replace(/\r/g,'\\r\\\r') +if(value[0]=="'"){var unquoted=value.substr(1,value.length-2) +return unquoted} +if(value.indexOf("'")>-1){var s='',escaped=false +for(var char of value){if(char=='\\'){if(escaped){s+='\\\\'} +escaped=!escaped}else{if(char=="'" && ! escaped){ +s+='\\'}else if(escaped){s+='\\'} +s+=char +escaped=false}} +value=s} +return value.substr(1,value.length-2)} +function encode_bytestring(s){s=s.replace(/\\t/g,'\t') +.replace(/\\n/g,'\n') +.replace(/\\r/g,'\r') +.replace(/\\f/g,'\f') +.replace(/\\v/g,'\v') +.replace(/\\\\/g,'\\') +var t=[] +for(var i=0,len=s.length;i < len;i++){var cp=s.codePointAt(i) +if(cp > 255){throw Error()} +t.push(cp)} +return t} +function EXTRA_EXPR(head,tail){return{ +lineno:head.lineno,col_offset:head.col_offset,end_lineno:tail.end_lineno,end_col_offset:tail.end_col_offset}} +function set_list(list,other){for(var item of other){list.push(item)}} +var positions=['lineno','col_offset','end_lineno','end_col_offset'] +function set_position_from_list(ast_obj,EXTRA){for(var i=0;i < 4;i++){ast_obj[positions[i]]=EXTRA[i]}} +function set_position_from_token(ast_obj,token){ast_obj.lineno=token.lineno +ast_obj.col_offset=token.col_offset +ast_obj.end_lineno=token.end_lineno +ast_obj.end_col_offset=token.end_col_offset} +function set_position_from_obj(ast_obj,obj){for(var position of positions){ast_obj[position]=obj[position]}} +function _get_names(p,names_with_defaults){var seq=[] +for(var pair of names_with_defaults){seq.push(pair.arg)} +return seq} +function _get_defaults(p,names_with_defaults){var seq=[] +for(var pair of names_with_defaults){seq.push(pair.value)} +return seq} +function _make_posonlyargs(p,slash_without_default,slash_with_default,posonlyargs){if(slash_without_default !=NULL){set_list(posonlyargs,slash_without_default)}else if(slash_with_default !=NULL){var slash_with_default_names= +_get_names(p,slash_with_default.names_with_defaults); +if(!slash_with_default_names){return-1;} +set_list(posonlyargs,$B._PyPegen.join_sequences( +p,slash_with_default.plain_names,slash_with_default_names))} +return posonlyargs==NULL ?-1 :0;} +function _make_posargs(p,plain_names,names_with_default,posargs){if(plain_names !=NULL && names_with_default !=NULL){var names_with_default_names=_get_names(p,names_with_default); +if(!names_with_default_names){return-1;} +var seqs=$B._PyPegen.join_sequences( +p,plain_names,names_with_default_names) +set_list(posargs,seqs);}else if(plain_names==NULL && names_with_default !=NULL){set_list(posargs,_get_names(p,names_with_default))} +else if(plain_names !=NULL && names_with_default==NULL){set_list(posargs,plain_names)} +return posargs==NULL ?-1 :0;} +function _make_posdefaults(p,slash_with_default,names_with_default,posdefaults){if(slash_with_default !=NULL && names_with_default !=NULL){var slash_with_default_values= +_get_defaults(p,slash_with_default.names_with_defaults); +if(!slash_with_default_values){return-1;} +var names_with_default_values=_get_defaults(p,names_with_default); +if(!names_with_default_values){return-1;} +set_list(posdefaults,$B._PyPegen.join_sequences( +p,slash_with_default_values,names_with_default_values))}else if(slash_with_default==NULL && names_with_default !=NULL){set_list(posdefaults,_get_defaults(p,names_with_default))} +else if(slash_with_default !=NULL && names_with_default==NULL){set_list(posdefaults,_get_defaults(p,slash_with_default.names_with_defaults))} +return posdefaults==NULL ?-1 :0;} +function _make_kwargs(p,star_etc,kwonlyargs,kwdefaults){if(star_etc !=NULL && star_etc.kwonlyargs !=NULL){set_list(kwonlyargs,_get_names(p,star_etc.kwonlyargs))}else{ +set_list(kwonlyargs,[])} +if(kwonlyargs==NULL){return-1;} +if(star_etc !=NULL && star_etc.kwonlyargs !=NULL){set_list(kwdefaults,_get_defaults(p,star_etc.kwonlyargs))} +else{ +set_list(kwdefaults,[])} +if(kwdefaults==NULL){return-1;} +return 0;} +function _seq_number_of_starred_exprs(seq){var n=0 +for(var k of seq){if(! k.is_keyword){n++;}} +return n} +$B._PyPegen={} +$B._PyPegen.constant_from_string=function(p,token){var prepared=$B.prepare_string(token) +var is_bytes=prepared.value.startsWith('b') +if(! is_bytes){var value=make_string_for_ast_value(prepared.value)}else{value=prepared.value.substr(2,prepared.value.length-3) +try{value=_b_.bytes.$factory(encode_bytestring(value))}catch(err){$B._PyPegen.raise_error_known_location(p,_b_.SyntaxError,token.lineno,token.col_offset,token.end_lineno,token.end_col_offset,'bytes can only contain ASCII literal characters')}} +var ast_obj=new $B.ast.Constant(value) +set_position_from_token(ast_obj,token) +return ast_obj} +$B._PyPegen.constant_from_token=function(p,t){var ast_obj=new $B.ast.Constant(t.string) +set_position_from_token(ast_obj,t) +return ast_obj} +$B._PyPegen.decoded_constant_from_token=function(p,t){var ast_obj=new $B.ast.Constant(t.string) +set_position_from_token(ast_obj,t) +return ast_obj} +$B._PyPegen.formatted_value=function(p,expression,debug,conversion,format,closing_brace,arena){var conversion_val=-1 +if(conversion){var conversion_expr=conversion.result,first=conversion_expr.id +if(first.length > 1 ||! 'sra'.includes(first)){$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(conversion_expr,`f-string: invalid conversion character {first}: `+ +"expected 's', 'r', or 'a'")} +var conversion_val=first.charCodeAt(0)} +var formatted_value=new $B.ast.FormattedValue(expression,conversion_val,format===undefined ? format :format.result) +set_position_from_obj(formatted_value,arena) +if(debug){var debug_end_line,debug_end_offset,debug_metadata +if(conversion){debug_end_line=conversion.result.lineno +debug_end_offset=conversion.result.col_offset +debug_metadata=conversion.metadata}else if(format){debug_end_line=format.result.lineno +debug_end_offset=format.result.col_offset+1 +debug_metadata=format.metadata}else{debug_end_line=p.end_lineno +debug_end_offset=p.end_col_offset +debug_metadata=closing_brace.metadata} +var debug=new $B.ast.Constant(debug_metadata) +debug.lineno=p.lineno +debug.col_offset=p.col_offset+1 +debug.end_lineno=debug_end_line +debug.end_col_offset=debug_end_offset +var joined_str=new $B.ast.JoinedStr([debug,formatted_value]) +set_position_from_obj(joined_str,arena) +return joined_str} +return formatted_value} +$B._PyPegen.joined_str=function(p,a,items,c){var ast_obj=new $B.ast.JoinedStr(items) +ast_obj.lineno=a.lineno +ast_obj.col_offset=a.col_offset +ast_obj.end_lineno=c.end_lineno +ast_obj.end_col_offset=c.end_col_offset +return ast_obj} +$B._PyPegen.setup_full_format_spec=function(p,colon,spec,arena){var ast_obj=new $B.ast.JoinedStr(spec) +set_position_from_obj(ast_obj,arena) +return result_token_with_metadata(p,ast_obj,colon.metadata)} +function result_token_with_metadata(p,result,metadata){return{result,metadata}} +$B._PyPegen.check_fstring_conversion=function(p,conv_token,conv){if(conv_token.lineno !=conv.lineno || +conv_token.end_col_offset !=conv.col_offset){$B._PyPegen.raise_error_known_location(p,_b_.SyntaxError,conv.lineno,conv.col_offset,conv.end_lineno,conv.end_col_offset,"f-string: conversion type must come right after the exclamanation mark" +)} +return result_token_with_metadata(p,conv,conv_token.metadata)} +$B._PyPegen.seq_count_dots=function(seq){if(seq===undefined){return 0} +var number_of_dots=0; +for(var token of seq){if(token.num_type==$B.py_tokens.DOT){number_of_dots+=token.string.length}else if(token.num_type==$B.py_tokens.ELLIPSIS){number_of_dots+=3}} +return number_of_dots;} +$B._PyPegen.map_names_to_ids=function(p,seq){return seq.map(e=> e.id)} +$B._PyPegen.alias_for_star=function(p,lineno,col_offset,end_lineno,end_col_offset,arena){var str="*" +return $B._PyAST.alias(str,NULL,lineno,col_offset,end_lineno,end_col_offset,arena);} +$B._PyPegen.cmpop_expr_pair=function(p,cmpop,expr){return{cmpop,expr}} +$B._PyPegen.get_cmpops=function(p,seq){var new_seq=[] +for(var pair of seq){new_seq.push(pair.cmpop)} +return new_seq} +$B._PyPegen.get_exprs=function(p,seq){var new_seq=[] +for(var pair of seq){new_seq.push(pair.expr)} +return new_seq} +function _set_seq_context(p,seq,ctx){var new_seq=[] +for(var e of seq){new_seq.push($B._PyPegen.set_expr_context(p,e,ctx))} +return new_seq} +function _set_name_context(p,e,ctx){return $B._PyAST.Name(e.id,ctx,EXTRA_EXPR(e,e))} +function _set_tuple_context(p,e,ctx){return $B._PyAST.Tuple( +_set_seq_context(p,e.elts,ctx),ctx,EXTRA_EXPR(e,e));} +function _set_list_context(p,e,ctx){return $B._PyAST.List( +_set_seq_context(p,e.elts,ctx),ctx,EXTRA_EXPR(e,e));} +function _set_subscript_context(p,e,ctx){console.log('set subscritp cntext',p,e) +return $B._PyAST.Subscript(e.value,e.slice,ctx,EXTRA_EXPR(e,e));} +function _set_attribute_context(p,e,ctx){return $B._PyAST.Attribute(e.value,e.attr,ctx,EXTRA_EXPR(e,e));} +function _set_starred_context(p,e,ctx){return $B._PyAST.Starred($B._PyPegen.set_expr_context(p,e.value,ctx),ctx,EXTRA_EXPR(e,e));} +$B._PyPegen.set_expr_context=function(p,expr,ctx){var _new=NULL; +switch(expr.constructor){case $B.ast.Name: +_new=_set_name_context(p,expr,ctx); +break; +case $B.ast.Tuple: +_new=_set_tuple_context(p,expr,ctx); +break; +case $B.ast.List: +_new=_set_list_context(p,expr,ctx); +break; +case $B.ast.Subscript: +_new=_set_subscript_context(p,expr,ctx); +break; +case $B.ast.Attribute: +_new=_set_attribute_context(p,expr,ctx); +break; +case $B.ast.Starred: +_new=_set_starred_context(p,expr,ctx); +break; +default: +_new=expr;} +return _new;} +$B._PyPegen.key_value_pair=function(p,key,value){return{key,value}} +$B._PyPegen.get_expr_name=function(e){switch(e.constructor.$name){case 'Attribute': +case 'Subscript': +case 'Starred': +case 'Name': +case 'List': +case 'Tuple': +case 'Lambda': +return e.constructor.$name.toLowerCase() +case 'Call': +return "function call" +case 'BoolOp': +case 'BinOp': +case 'UnaryOp': +return "expression" +case 'GeneratorExp': +return "generator expression"; +case 'Yield': +case 'YieldFrom': +return "yield expression"; +case 'Await': +return "await expression"; +case 'ListComp': +return "list comprehension"; +case 'SetComp': +return "set comprehension"; +case 'DictComp': +return "dict comprehension"; +case 'Dict': +return "dict literal"; +case 'Set': +return "set display"; +case 'JoinedStr': +case 'FormattedValue': +return "f-string expression"; +case 'Constant': +var value=e.value +if(value===_b_.None){return "None";} +if(value===false){return "False";} +if(value===true){return "True";} +if(value===_b_.Ellipsis){return "ellipsis";} +return "literal"; +case 'Compare': +return "comparison"; +case 'IfExp': +return "conditional expression"; +case 'NamedExpr': +return "named expression"; +default: +return NULL;}} +$B._PyPegen.get_keys=function(p,seq){return seq===undefined ?[]:seq.map(pair=> pair.key)} +$B._PyPegen.get_values=function(p,seq){return seq===undefined ?[]:seq.map(pair=> pair.value)} +$B._PyPegen.key_pattern_pair=function(p,key,pattern){return{key,pattern}} +$B._PyPegen.get_pattern_keys=function(p,seq){return seq===undefined ?[]:seq.map(x=> x.key)} +$B._PyPegen.get_patterns=function(p,seq){return seq===undefined ?[]:seq.map(x=> x.pattern)} +$B._PyPegen.check_legacy_stmt=function(p,name){return["print","exec"].includes(name)} +$B._PyPegen.dummy_name=function(p){var cache=NULL; +if(cache !=NULL){return cache;} +var id="dummy"+Math.random().toString(36).substr(2),ast_obj=new $B.ast.Name(id,new $B.ast.Load()) +set_position_from_list(ast_obj,[1,0,1,0]) +return ast_obj} +$B._PyPegen.add_type_comment_to_arg=function(p,a,tc){if(tc==NULL){return a} +var bytes=_b_.bytes.$factory(tc),tco=$B._PyPegen.new_type_comment(p,bytes); +var ast_obj=$B._PyAST.arg(a.arg,a.annotation,tco,a.lineno,a.col_offset,a.end_lineno,a.end_col_offset,p.arena); +console.log('arg with type comment',ast_obj) +return ast_obj} +$B._PyPegen.check_barry_as_flufl=function(p,t){return false} +$B._PyPegen.empty_arguments=function(p){return $B._PyAST.arguments([],[],NULL,[],[],NULL,[],p.arena)} +$B._PyPegen.augoperator=function(p,kind){return{kind}} +$B._PyPegen.function_def_decorators=function(p,decorators,function_def){var constr=function_def instanceof $B.ast.AsyncFunctionDef ? +$B.ast.AsyncFunctionDef :$B.ast.FunctionDef +var ast_obj=new constr( +function_def.name,function_def.args,function_def.body,decorators,function_def.returns,function_def.type_comment,function_def.type_params) +for(var position of positions){ast_obj[position]=function_def[position]} +return ast_obj} +$B._PyPegen.class_def_decorators=function(p,decorators,class_def){var ast_obj=$B._PyAST.ClassDef( +class_def.name,class_def.bases,class_def.keywords,class_def.body,decorators,class_def.type_params) +set_position_from_obj(ast_obj,class_def) +return ast_obj} +$B._PyPegen.keyword_or_starred=function(p,element,is_keyword){return{ +element,is_keyword}} +$B._PyPegen.make_arguments=function(p,slash_without_default,slash_with_default,plain_names,names_with_default,star_etc){ +var posonlyargs=[] +if(_make_posonlyargs(p,slash_without_default,slash_with_default,posonlyargs)==-1){return NULL;} +var posargs=[] +if(_make_posargs(p,plain_names,names_with_default,posargs)==-1){return NULL;} +var posdefaults=[] +if(_make_posdefaults(p,slash_with_default,names_with_default,posdefaults)==-1){return NULL;} +var vararg=NULL; +if(star_etc !=NULL && star_etc.vararg !=NULL){vararg=star_etc.vararg;} +var kwonlyargs=[],kwdefaults=[]; +if(_make_kwargs(p,star_etc,kwonlyargs,kwdefaults)==-1){return NULL;} +var kwarg=NULL; +if(star_etc !=NULL && star_etc.kwarg !=NULL){kwarg=star_etc.kwarg;} +var ast_obj=$B._PyAST.arguments(posonlyargs,posargs,vararg,kwonlyargs,kwdefaults,kwarg,posdefaults,p.arena) +if(ast_obj.posonlyargs===undefined){console.log('pas de posonlyargs',ast_bj) +alert()} +return ast_obj} +$B._PyPegen.name_default_pair=function(p,arg,value,tc){return{ +arg:$B._PyPegen.add_type_comment_to_arg(p,arg,tc),value:value}} +$B._PyPegen.raise_error=function(p,errtype,errmsg){if(p.fill==0){var va=[errmsg] +$B._PyPegen.raise_error_known_location(p,errtype,0,0,0,-1,errmsg,va); +return NULL} +var t=p.known_err_token !=NULL ? p.known_err_token :p.tokens[p.fill-1]; +var va=errmsg +$B._PyPegen.raise_error_known_location(p,errtype,t.lineno,t.col_offset,t.end_lineno,t.end_col_offset,errmsg,va);} +$B._PyPegen.raise_error_known_location=function(p,errtype,lineno,col_offset,end_lineno,end_col_offset,errmsg,va){var exc=errtype.$factory(errmsg) +exc.filename=p.filename +if(p.known_err_token){var token=p.known_err_token +exc.lineno=token.lineno +exc.offset=token.col_offset+1 +exc.end_lineno=token.end_lineno +exc.end_offset=token.end_col_offset +exc.text=token.line}else{exc.lineno=lineno +exc.offset=col_offset+1 +exc.end_lineno=end_lineno +exc.end_offset=end_col_offset+1 +var src=$B.file_cache[p.filename] +if(src !==undefined){var lines=src.split('\n'),line=lines[exc.lineno-1] +exc.text=line+'\n'}else{exc.text=_b_.None}} +exc.args[1]=$B.fast_tuple([p.filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]) +throw exc} +$B._PyPegen.seq_delete_starred_exprs=function(p,kwargs){var len=kwargs.length,new_len=len-_seq_number_of_starred_exprs(kwargs) +if(new_len==0){return NULL;} +var new_seq=[] +for(var k of kwargs){if(k.is_keyword){new_seq.push(k.element)}} +return new_seq} +$B._PyPegen.seq_extract_starred_exprs=function(p,kwargs){var new_len=_seq_number_of_starred_exprs(kwargs); +if(new_len==0){return NULL;} +var new_seq=[] +var idx=0; +for(var k of kwargs){if(! k.is_keyword){new_seq[idx++]=k.element}} +return new_seq} +$B._PyPegen.slash_with_default=function(p,plain_names,names_with_defaults){return{plain_names,names_with_defaults}} +$B._PyPegen.star_etc=function(p,vararg,kwonlyargs,kwarg){return{vararg,kwonlyargs,kwarg}} +$B._PyPegen.collect_call_seqs=function(p,a,b,lineno,col_offset,end_lineno,end_col_offset,arena){var args_len=a.length,total_len=args_len; +if(b==NULL){return $B._PyAST.Call($B._PyPegen.dummy_name(p),a,[],lineno,col_offset,end_lineno,end_col_offset,arena);} +var starreds=$B._PyPegen.seq_extract_starred_exprs(p,b),keywords=$B._PyPegen.seq_delete_starred_exprs(p,b); +if(starreds){total_len+=starreds.length} +var args=[] +for(var i=0;i < args_len;i++){args[i]=a[i]} +for(;i < total_len;i++){args[i]=starreds[i-args_len]} +return $B._PyAST.Call($B._PyPegen.dummy_name(p),args,keywords,lineno,col_offset,end_lineno,end_col_offset,arena);} +$B._PyPegen.join_sequences=function(p,a,b){return a.concat(b)} +function make_conversion_code(conv){switch(conv){case null: +return-1 +case 'a': +return 97 +case 'r': +return 114 +case 's': +return 115}} +function make_formatted_value(p,fmt_values){ +if(! fmt_values){return} +var seq=[] +for(var item of fmt_values){if(typeof item=='string'){var fmt_ast=new $B.ast.Constant(item) +set_position_from_obj(fmt_ast,p.arena)}else{var src=item.expression.trimStart() +var _ast=new $B.Parser(src,p.filename,'eval').parse() +var raw_value=_ast.body +var fmt_ast=new $B.ast.FormattedValue(raw_value,make_conversion_code(item.conversion),make_formatted_value(p,item.fmt)) +set_position_from_obj(fmt_ast,_ast)} +seq.push(fmt_ast)} +var ast_obj=new $B.ast.JoinedStr(seq) +set_position_from_obj(ast_obj,p.arena) +return ast_obj} +$B._PyPegen.concatenate_strings=function(p,strings){ +var res='',first=strings[0],last=$B.last(strings),type +var state=NULL,value,values=[] +function error(message){var a={lineno:first.start[0],col_offset:first.start[1],end_lineno :last.end[0],end_col_offset:last.end[1]} +$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a,message)} +function set_position_from_list(ast_obj,items){var first=items[0],last=items[items.length-1] +ast_obj.lineno=first.lineno +ast_obj.col_offset=first.col_offset +ast_obj.end_lineno=last.end_lineno +ast_obj.end_col_offset=last.end_col_offset} +var items=[],has_fstring=false,state +for(var token of strings){if(token instanceof $B.ast.JoinedStr){ +has_fstring=true +if(state=='bytestring'){error('cannot mix bytes and nonbytes literals')} +for(var fs_item of token.values){if(fs_item instanceof $B.ast.Constant){ +var parts=fs_item.value.split('\\\'') +parts=parts.map(x=> x.replace(new RegExp("'","g"),"\\'")) +fs_item.value=parts.join('\\\'') +fs_item.value=fs_item.value.replace(/\n/g,'\\n') +.replace(/\r/g,'\\r')} +items.push(fs_item)} +state='string'}else{items.push(token) +var is_bytes=token.value.__class__===_b_.bytes +if((is_bytes && state=='string')|| +(state=='bytestring' && ! is_bytes)){error('cannot mix bytes and nonbytes literals')} +state=is_bytes ? 'bytestring' :'string'}} +if(state=='bytestring'){ +var bytes=[] +for(var item of items){bytes=bytes.concat(item.value.source)} +value=_b_.bytes.$factory(bytes) +var ast_obj=new $B.ast.Constant(value) +set_position_from_list(ast_obj,items) +return ast_obj} +function group_consec_strings(items){if(items.length==1){return items[0]} +var values=items.map(x=> x.value) +let ast_obj=new $B.ast.Constant(values.join('')) +set_position_from_list(ast_obj,items) +return ast_obj} +var items1=[],consec_strs=[],item_type=null +for(var i=0,len=items.length;i < len;i++){item=items[i] +if(item_type===null){item_type=Object.getPrototypeOf(item)} +if(item instanceof $B.ast.Constant){consec_strs.push(item)}else{if(consec_strs.length > 0){items1.push(group_consec_strings(consec_strs))} +consec_strs=[] +items1.push(item)}} +if(consec_strs.length > 0){items1.push(group_consec_strings(consec_strs))} +if(! has_fstring){return items1[0]} +var jstr_values=items1 +var ast_obj=new $B.ast.JoinedStr(jstr_values) +set_position_from_list(ast_obj,strings) +return ast_obj} +$B._PyPegen.ensure_imaginary=function(p,exp){if(!(exp instanceof $B.ast.Constant)|| +exp.value.__class__ !=_b_.complex){$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(exp,"imaginary number required in complex literal"); +return NULL} +return exp} +$B._PyPegen.ensure_real=function(p,exp){if(!(exp instanceof $B.ast.Constant)||exp.value.type=='imaginary'){$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION( +exp,"real number required in complex literal"); +return NULL} +return exp} +$B._PyPegen.set_expr_context=function(p,a,ctx){a.ctx=ctx +return a} +$B._PyPegen.singleton_seq=function(p,a){return[a]} +$B._PyPegen.seq_insert_in_front=function(p,a,seq){return seq ?[a].concat(seq):[a]} +$B._PyPegen.seq_flatten=function(p,seqs){var res=[] +for(var seq of seqs){for(var item of seq){res.push(item)}} +return res} +$B._PyPegen.join_names_with_dot=function(p,first_name,second_name){var str=first_name.id+'.'+second_name.id +return $B._PyAST.Name(str,new $B.ast.Load(),EXTRA_EXPR(first_name,second_name))} +$B._PyPegen.make_module=function(p,a){return new $B.ast.Module(a)} +$B._PyPegen.new_type_comment=function(p,s){if(s.length===0){return NULL} +return s} +$B._PyPegen.get_last_comprehension_item=function(comprehension){if(comprehension.ifs==NULL ||comprehension.ifs.length==0){return comprehension.iter;} +return $B.last(comprehension.ifs);} +$B._PyPegen.arguments_parsing_error=function(p,e){var kwarg_unpacking=0; +for(let keyword of e.keywords){if(! keyword.arg){kwarg_unpacking=1;}} +var msg=NULL; +if(kwarg_unpacking){msg="positional argument follows keyword argument unpacking";}else{ +msg="positional argument follows keyword argument";} +return $B.helper_functions.RAISE_SYNTAX_ERROR(p,msg);} +$B._PyPegen.nonparen_genexp_in_call=function(p,args,comprehensions){ +var len=args.args.length +if(len <=1){return NULL;} +var last_comprehension=$B.last(comprehensions); +return $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,args.args[len-1],$B._PyPegen.get_last_comprehension_item(last_comprehension),"Generator expression must be parenthesized" +);} +$B._PyPegen.get_invalid_target=function(e,targets_type){if(e==NULL){return NULL;} +function VISIT_CONTAINER(CONTAINER,TYPE){for(var elt of CONTAINER.elts){var child=$B._PyPegen.get_invalid_target(elt,targets_type); +if(child !=NULL){return child;}}} +switch(e.constructor){case $B.ast.List: +case $B.ast.Tuple: +return VISIT_CONTAINER(e,e.constructor); +case $B.ast.Starred: +if(targets_type==DEL_TARGETS){return e;} +return $B._PyPegen.get_invalid_target(e.value,targets_type); +case $B.ast.Compare: +if(targets_type==FOR_TARGETS){var cmpop=e.ops[0] +if(cmpop instanceof $B.ast.In){return $B._PyPegen.get_invalid_target(e.left,targets_type);} +return NULL;} +return e; +case $B.ast.Name: +case $B.ast.Subscript: +case $B.ast.Attribute: +return NULL; +default: +return e;}}})(__BRYTHON__) +; +(function($B){var _b_=$B.builtins +var s_escaped='abfnrtvxuU"0123456789'+"'"+'\\',is_escaped={} +for(var i=0;i < s_escaped.length;i++){is_escaped[s_escaped.charAt(i)]=true} +function escaped_to_byte(char){var table={a:7,b:8,f:12,n:10,r:13,t:9,v:11} +if(table[char]!==undefined){return table[char]} +return char.charCodeAt(0)} +function to_bytes(s){var pos=0,bytes=[] +while(pos < s.length){if(s[pos]=='\\'){bytes[bytes.length]=escaped_to_byte(s[pos+1]) +pos+=2}else{bytes[bytes.length]=s.charCodeAt(pos) +pos++}} +return bytes} +function string_error(token,msg){var a={lineno:token.start[0],col_offset:token.start[1],end_lineno:token.end[0],end_col_offset:token.end[1]} +$B.Parser.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a,msg)} +function test_escape(token,C,text,string_start,antislash_pos){ +var seq_end,mo +mo=/^[0-7]{1,3}/.exec(text.substr(antislash_pos+1)) +if(mo){return[String.fromCharCode(parseInt(mo[0],8)),1+mo[0].length]} +switch(text[antislash_pos+1]){case "x": +var mo=/^[0-9A-F]{0,2}/i.exec(text.substr(antislash_pos+2)) +if(mo[0].length !=2){seq_end=antislash_pos+mo[0].length+1 +$token.value.start[1]=seq_end +string_error(token,["(unicode error) 'unicodeescape' codec can't decode "+ +`bytes in position ${antislash_pos}-${seq_end}: truncated `+ +"\\xXX escape"])}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]} +case "u": +var mo=/^[0-9A-F]{0,4}/i.exec(text.substr(antislash_pos+2)) +if(mo[0].length !=4){seq_end=antislash_pos+mo[0].length+1 +$token.value.start[1]=seq_end +string_error(token,["(unicode error) 'unicodeescape' codec can't decode "+ +`bytes in position ${antislash_pos}-${seq_end}: truncated `+ +"\\uXXXX escape"])}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]} +case "U": +var mo=/^[0-9A-F]{0,8}/i.exec(text.substr(antislash_pos+2)) +if(mo[0].length !=8){seq_end=antislash_pos+mo[0].length+1 +$token.value.start[1]=seq_end +string_error(token,["(unicode error) 'unicodeescape' codec can't decode "+ +`bytes in position ${antislash_pos}-${seq_end}: truncated `+ +"\\uXXXX escape"])}else{var value=parseInt(mo[0],16) +if(value > 0x10FFFF){string_error(token,'invalid unicode escape '+mo[0])}else if(value >=0x10000){return[SurrogatePair(value),2+mo[0].length]}else{return[String.fromCharCode(value),2+mo[0].length]}}}} +$B.prepare_string=function(token){var s=token.string,len=s.length,pos=0,string_modifier,_type="string",quote,C={type:'str'} +while(pos < len){if(s[pos]=='"' ||s[pos]=="'"){quote=s[pos] +string_modifier=s.substr(0,pos) +if(s.substr(pos,3)==quote.repeat(3)){_type="triple_string" +inner=s.substring(pos+3,s.length-3)}else{inner=s.substring(pos+quote.length,len-quote.length)} +break} +pos++} +var result={quote} +var mods={r:'raw',f:'fstring',b:'bytes'} +for(var mod of string_modifier){result[mods[mod]]=true} +var raw=C.type=='str' && C.raw,string_start=pos+1,bytes=false,fstring=false,sm_length, +end=null; +if(string_modifier){switch(string_modifier){case 'r': +raw=true break -case $B.ast.Expression: -visitor.expr(st,mod.body) +case 'u': break -case $B.ast.Interactive: -seq=mod.body -for(let item of seq){visitor.stmt(st,item)} +case 'b': +bytes=true +break +case 'rb': +case 'br': +bytes=true +raw=true +break +case 'f': +fstring=true +sm_length=1 +break +case 'fr': +case 'rf': +fstring=true +sm_length=2 +raw=true break} -symtable_analyze(st) -return st.top;} -function _PyST_GetSymbol(ste,name){if(! _b_.dict.$contains_string(ste.symbols,name)){return 0} -return _b_.dict.$getitem_string(ste.symbols,name)} -function _PyST_GetScope(ste,name){var symbol=_PyST_GetSymbol(ste,name); -return(symbol >> SCOPE_OFFSET)& SCOPE_MASK;} -function _PyST_IsFunctionLike(ste){return ste.type==FunctionBlock -||ste.type==TypeVarBoundBlock -||ste.type==TypeAliasBlock -||ste.type==TypeParamBlock;} -function PyErr_Format(exc_type,message,arg){if(arg){message=_b_.str.__mod__(message,arg)} -return exc_type.$factory(message)} -function PyErr_SetString(exc_type,message){return exc_type.$factory(message)} -function set_exc_info(exc,filename,lineno,offset,end_lineno,end_offset){exc.filename=filename +string_modifier=false} +var escaped=false,zone='',end=0,src=inner +if(bytes){var source=[]} +while(end < src.length){if(escaped){if(src.charAt(end)=="a" && ! raw){zone=zone.substr(0,zone.length-1)+"\u0007"}else{zone+=src.charAt(end) +if(raw && src.charAt(end)=='\\'){zone+='\\'}} +escaped=false +end++}else if(src.charAt(end)=="\\"){if(raw){if(end < src.length-1 && +src.charAt(end+1)==quote){zone+='\\\\'+quote +end+=2}else{zone+='\\\\' +end++} +escaped=true}else{if(src.charAt(end+1)=='\n'){ +end+=2}else if(src.substr(end+1,2)=='N{'){ +var end_lit=end+3,re=new RegExp("[-a-zA-Z0-9 ]+"),search=re.exec(src.substr(end_lit)) +if(search===null){string_error(token,"(unicode error) "+ +"malformed \\N character escape",pos)} +var end_lit=end_lit+search[0].length +if(src.charAt(end_lit)!="}"){string_error(token,"(unicode error) "+ +"malformed \\N character escape")} +var description=search[0].toUpperCase() +if($B.unicodedb===undefined){var xhr=new XMLHttpRequest +xhr.open("GET",$B.brython_path+"unicode.txt",false) +xhr.onreadystatechange=function(){if(this.readyState==4){if(this.status==200){$B.unicodedb=this.responseText}else{console.log("Warning - could not "+ +"load unicode.txt")}}} +xhr.send()} +if($B.unicodedb !==undefined){var re=new RegExp("^([0-9A-F]+);"+ +description+";.*$","m") +search=re.exec($B.unicodedb) +if(search===null){string_error(token,"(unicode error) "+ +"unknown Unicode character name")} +var cp=parseInt(search[1],16) +zone+=String.fromCodePoint(cp) +end=end_lit+1}else{end++}}else{var esc=test_escape(token,C,src,string_start,end) +if(esc){if(esc[0]=='\\'){zone+='\\\\'}else{zone+=esc[0]} +end+=esc[1]}else{if(end < src.length-1 && +is_escaped[src.charAt(end+1)]===undefined){zone+='\\'} +zone+='\\' +escaped=true +end++}}}}else if(src.charAt(end)=='\n' && _type !='triple_string'){ +console.log(pos,end,src.substring(pos,end)) +string_error(token,["EOL while scanning string literal"])}else{zone+=src.charAt(end) +end++}} +var $string=zone,string='' +for(var i=0;i < $string.length;i++){var $car=$string.charAt(i) +if($car==quote){if(raw ||(i==0 || +$string.charAt(i-1)!='\\')){string+='\\'}else if(_type=="triple_string"){ +var j=i-1 +while($string.charAt(j)=='\\'){j--} +if((i-j-1)% 2==0){string+='\\'}}} +string+=$car} +if(fstring){try{var re=new RegExp("\\\\"+quote,"g"),string_no_bs=string.replace(re,quote) +var elts=$B.parse_fstring(string_no_bs)}catch(err){string_error(token,err.message)}} +if(bytes){result.value='b'+quote+string+quote +result.bytes=to_bytes(string)}else if(fstring){result.value=elts}else{result.value=quote+string+quote} +C.raw=raw; +return result}})(__BRYTHON__) +; +(function($B){function test_num(num_lit){var len=num_lit.length,pos=0,char,elt=null,subtypes={b:'binary',o:'octal',x:'hexadecimal'},digits_re=/[_\d]/ +function error(message){throw SyntaxError(message)} +function check(elt){if(elt.value.length==0){var t=subtypes[elt.subtype]||'decimal' +error("invalid "+t+" literal")}else if(elt.value[elt.value.length-1].match(/[\-+_]/)){var t=subtypes[elt.subtype]||'decimal' +error("invalid "+t+" literal")}else{ +elt.value=elt.value.replace(/_/g,"") +elt.length=pos +return elt}} +while(pos < len){var char=num_lit[pos] +if(char.match(digits_re)){if(elt===null){elt={value:char}}else{if(char=='_' && elt.value.match(/[._+\-]$/)){ +error('consecutive _ at '+pos)}else if(char=='_' && elt.subtype=='float' && +elt.value.match(/e$/i)){ +error('syntax error')}else if(elt.subtype=='b' && !(char.match(/[01_]/))){error(`invalid digit '${char}' in binary literal`)}else if(elt.subtype=='o' && !(char.match(/[0-7_]/))){error(`invalid digit '${char}' in octal literal`)}else if(elt.subtype===undefined && elt.value.startsWith("0")&& +!char.match(/[0_]/)){error("leading zeros in decimal integer literals are not"+ +" permitted; use an 0o prefix for octal integers")} +elt.value+=char} +pos++}else if(char.match(/[oxb]/i)){if(elt.value=="0"){elt.subtype=char.toLowerCase() +if(elt.subtype=="x"){digits_re=/[_\da-fA-F]/} +elt.value='' +pos++}else{error("invalid char "+char)}}else if(char=='.'){if(elt===null){error("invalid char in "+num_lit+" pos "+pos+": "+char)}else if(elt.subtype===undefined){elt.subtype="float" +if(elt.value.endsWith('_')){error("invalid decimal literal")} +elt.value=elt.value.replace(/_/g,"")+char +pos++}else{return check(elt)}}else if(char.match(/e/i)){if(num_lit[pos+1]===undefined){error("nothing after e")}else if(elt && subtypes[elt.subtype]!==undefined){ +error("syntax error")}else if(elt && elt.value.endsWith('_')){ +error("syntax error")}else if(num_lit[pos+1].match(/[+\-0-9_]/)){if(elt && elt.value){if(elt.exp){elt.length=pos +return elt} +elt.subtype='float' +elt.value+=char +elt.exp=true +pos++}else{error("unexpected e")}}else{return check(elt)}}else if(char.match(/[\+\-]/i)){if(elt===null){elt={value:char} +pos++}else if(elt.value.search(/e$/i)>-1){elt.value+=char +pos++}else{return check(elt)}}else if(char.match(/j/i)){if(elt &&(! elt.subtype ||elt.subtype=="float")){elt.imaginary=true +check(elt) +elt.length++ +return elt}else{error("invalid syntax")}}else{break}} +return check(elt)} +$B.prepare_number=function(n){ +n=n.replace(/_/g,"") +if(n.startsWith('.')){if(n.endsWith("j")){return{type:'imaginary',value:$B.prepare_number(n.substr(0,n.length-1))}}else{return{type:'float',value:n+''}} +pos=j}else if(n.startsWith('0')&& n !='0'){ +var num=test_num(n),base +if(num.imaginary){return{type:'imaginary',value:$B.prepare_number(num.value)}} +if(num.subtype=='float'){return{type:num.subtype,value:num.value+''}} +if(num.subtype===undefined){base=10}else{base={'b':2,'o':8,'x':16}[num.subtype]} +if(base !==undefined){return{type:'int',value:[base,num.value]}}}else{var num=test_num(n) +if(num.subtype=="float"){if(num.imaginary){return{ +type:'imaginary',value:$B.prepare_number(num.value)}}else{return{ +type:'float',value:num.value+''}}}else{if(num.imaginary){return{ +type:'imaginary',value:$B.prepare_number(num.value)}}else{return{ +type:'int',value:[10,num.value]}}}}}})(__BRYTHON__) +; + +(function($B){var _b_=$B.builtins,debug=0 +var p={feature_version:$B.version_info[1]} +$B.parser_constants={Store:new $B.ast.Store(),Load:new $B.ast.Load(),Del:new $B.ast.Del(),NULL:undefined,alias_ty:$B.ast.alias,keyword_ty:$B.ast.keyword,arguments_ty:$B.ast.arguments,expr_ty:$B.ast.expr,asdl_stmt_seq:Array,asdl_int_seq:Array,asdl_expr_seq:Array,asdl_keyword_seq:Array,asdl_identifier_seq:Array,asdl_pattern_seq:Array,asdl_type_param_seq:Array,AugOperator:$B.ast.AugAssign,IsNot:$B.ast.IsNot,Py_Ellipsis:_b_.Ellipsis,Py_False:false,Py_True:true,Py_None:_b_.None,PyExc_SyntaxError:_b_.SyntaxError,STAR_TARGETS:1,DEL_TARGETS:2,FOR_TARGETS:3,PyBytes_AS_STRING:(b)=> b} +for(var op_type of $B.op_types){for(var key in op_type){var klass_name=op_type[key] +$B.parser_constants[klass_name]=new $B.ast[klass_name]()}} +var NULL=$B.parser_constants.NULL +$B._PyAST={} +for(var ast_class in $B.ast_classes){ +var args=$B.ast_classes[ast_class] +if(Array.isArray(args)){continue} +args=args.replace(/\*/g,'').replace(/\?/g,'') +var arg_names=args.split(',') +$B._PyAST[ast_class]=(function(ast_name,ast_args){return function(){var _args=Array.from(arguments).slice(0,ast_args.length+1) +var EXTRA=_args.pop() +var ast_obj=new $B.ast[ast_name](..._args) +set_position_from_EXTRA(ast_obj,EXTRA) +return ast_obj}})(ast_class,arg_names)} +function get_last_token(p){var last_token=$B.last(p.tokens) +if(last_token.type=="ENDMARKER"){var src=$B.file_cache[p.filename] +if(src){for(var token of $B.tokenizer(src)){if(token.type=="ENDMARKER"){break} +if(token.type !="DEDENT"){last_token=token}}}else{last_token=undefined}} +p.known_err_token=last_token} +var helper_functions={CHECK:function(type,obj){if(Array.isArray(type)){var check +for(var t of type){check=helper_functions.CHECK(t,obj) +if(check){return check}} +return undefined} +if(obj instanceof type){return obj} +return undefined},CHECK_VERSION:function(type,version,msg,node){return helper_functions.INVALID_VERSION_CHECK(p,version,msg,node)},CHECK_NULL_ALLOWED:function(type,obj){if(obj !==NULL){if(type instanceof Array){for(var t of type){if(obj instanceof t){return obj}} +return}else{return obj instanceof type ? obj :undefined}} +return obj},INVALID_VERSION_CHECK:function(p,version,msg,node){if(node==NULL){p.error_indicator=1; +return NULL;} +if(p.feature_version < version){p.error_indicator=1; +return helper_functions.RAISE_SYNTAX_ERROR("%s only supported in Python 3.%i and greater",msg,version);} +return node;},NEW_TYPE_COMMENT:function(p,x){return x},RAISE_ERROR_KNOWN_LOCATION:function(p,errtype,lineno,col_offset,end_lineno,end_col_offset,errmsg){var va=[errmsg] +$B._PyPegen.raise_error_known_location(p,errtype,lineno,col_offset,end_lineno,end_col_offset,errmsg,va); +return NULL;},RAISE_SYNTAX_ERROR:function(p,msg){var extra_args=[] +for(var i=1,len=arguments.length;i < len;i++){extra_args.push(arguments[i])} +get_last_token(p) +$B._PyPegen.raise_error(p,_b_.SyntaxError,msg,...extra_args)},RAISE_INDENTATION_ERROR:function(p,msg,arg){if(arg !==undefined){msg=_b_.str.__mod__(msg,arg)} +var last_token=$B.last(p.tokens) +if(last_token.type=="ENDMARKER"){var src=$B.file_cache[p.filename] +if(src){for(var token of $B.tokenizer(src)){if(token.type=="ENDMARKER"){break} +last_token=token}}} +get_last_token(p) +$B._PyPegen.raise_error(p,_b_.IndentationError,msg)},RAISE_SYNTAX_ERROR_KNOWN_LOCATION:function(p,a,err_msg,arg){if(arg !==undefined){err_msg=_b_.str.__mod__(err_msg,arg)} +helper_functions.RAISE_ERROR_KNOWN_LOCATION(p,_b_.SyntaxError,a.lineno,a.col_offset,a.end_lineno,a.end_col_offset,err_msg)},RAISE_SYNTAX_ERROR_KNOWN_RANGE:function(p,a,b,msg){var extra_args=arguments[4] +if(extra_args){msg=_b_.str.__mod__(msg,extra_args)} +helper_functions.RAISE_ERROR_KNOWN_LOCATION(p,_b_.SyntaxError,a.lineno,a.col_offset,b.end_lineno,b.end_col_offset,msg,extra_args)},RAISE_SYNTAX_ERROR_INVALID_TARGET:function(p,type,e){return helper_functions._RAISE_SYNTAX_ERROR_INVALID_TARGET(p,type,e)},_RAISE_SYNTAX_ERROR_INVALID_TARGET(p,type,e){var invalid_target=$B.helper_functions.CHECK_NULL_ALLOWED($B.ast.expr,$B._PyPegen.get_invalid_target(e,type)); +if(invalid_target !=NULL){var msg; +if(type==$B.parser_constants.STAR_TARGETS || +type==$B.parser_constants.FOR_TARGETS){msg="cannot assign to %s";}else{msg="cannot delete %s";} +return helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION( +p,invalid_target,msg,$B._PyPegen.get_expr_name(invalid_target) +)} +return NULL;},RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN:function(p,msg){return helper_functions.RAISE_SYNTAX_ERROR(p,msg)},RAISE_SYNTAX_ERROR_STARTING_FROM:function(p,a,msg,...args){var last=p.tokens[p.tokens.length-1] +return helper_functions.RAISE_ERROR_KNOWN_LOCATION(p,_b_.SyntaxError,a.lineno,a.col_offset,last.end_lineno,last.end_col_offset-1,msg,...args)},asdl_seq_LEN:(t)=> t.length,asdl_seq_GET:(t,i)=> t[i]} +$B.helper_functions=helper_functions +function raise_error_known_location(type,filename,lineno,col_offset,end_lineno,end_col_offset,line,message){var exc=type.$factory(message) +exc.filename=filename exc.lineno=lineno -exc.offset=offset+1 +exc.offset=col_offset+1 exc.end_lineno=end_lineno -exc.end_offset=end_offset+1 -var src=$B.file_cache[filename] -if(src !==undefined){var lines=src.split('\n') -exc.text=lines[lineno-1]}else{exc.text=''} -exc.args[1]=[filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]} -function error_at_directive(exc,ste,name){assert(ste.directives) -for(var data of ste.directives){if(data[0]==name){set_exc_info(exc,ste.table.filename,data[1],data[2],data[3],data[4]) -return 0}} -throw _b_.RuntimeError.$factory( -"BUG: internal directive bookkeeping broken")} -function SET_SCOPE(DICT,NAME,I){DICT[NAME]=I} -function is_free_in_any_child(entry,key){for(var child_ste of entry.ste_children){var scope=_PyST_GetScope(child_ste,key) -if(scope==FREE){return 1}} -return 0} -function inline_comprehension(ste,comp,scopes,comp_free,inlined_cells){for(var item of _b_.dict.$iter_items(comp.symbols)){ -var k=item.key,comp_flags=item.value; -if(comp_flags & DEF_PARAM){ -continue;} -var scope=(comp_flags >> SCOPE_OFFSET)& SCOPE_MASK; -var only_flags=comp_flags &((1 << SCOPE_OFFSET)-1) -if(scope==CELL ||only_flags & DEF_COMP_CELL){inlined_cells.add(k)} -var existing=_b_.dict.$contains_string(ste.symbols,k) -if(!existing){ -var v_flags=only_flags -_b_.dict.$setitem(ste.symbols,k,v_flags); -SET_SCOPE(scopes,k,scope);}else{ -if((existing & DEF_BOUND)&& -!is_free_in_any_child(comp,k)&& -ste.type !==ClassBlock){_b_.set.remove(comp_free,k)}}} -return 1;} -function analyze_name(ste,scopes,name,flags,bound,local,free,global,type_params,class_entry){if(flags & DEF_GLOBAL){if(flags & DEF_NONLOCAL){let exc=PyErr_Format(_b_.SyntaxError,"name '%s' is nonlocal and global",name) -error_at_directive(exc,ste,name) -throw exc} -SET_SCOPE(scopes,name,GLOBAL_EXPLICIT) -global.add(name) -if(bound){bound.delete(name)} -return 1} -if(flags & DEF_NONLOCAL){if(!bound){let exc=PyErr_Format(_b_.SyntaxError,"nonlocal declaration not allowed at module level"); -error_at_directive(exc,ste,name) -throw exc} -if(! bound.has(name)){let exc=PyErr_Format(_b_.SyntaxError,"no binding for nonlocal '%s' found",name) -error_at_directive(exc,ste,name) -throw exc} -if(type_params.has(name)){let exc=PyErr_Format(_b_.SyntaxError,"nonlocal binding not allowed for type parameter '%s'",name); -error_at_directive(exc,ste,name) -throw exc} -SET_SCOPE(scopes,name,FREE) -ste.free=1 -free.add(name) -return 1} -if(flags & DEF_BOUND){SET_SCOPE(scopes,name,LOCAL) -local.add(name) -global.delete(name) -if(flags & DEF_TYPE_PARAM){type_params.add(name)}else{type_params.delete(name)} -return 1} -if(class_entry !=NULL){var class_flags=_PyST_GetSymbol(class_entry,name); -if(class_flags & DEF_GLOBAL){SET_SCOPE(scopes,name,GLOBAL_EXPLICIT) -return 1;}else if(class_flags & DEF_BOUND && -!(class_flags & DEF_NONLOCAL)){SET_SCOPE(scopes,name,GLOBAL_IMPLICIT) -return 1}} -if(bound && bound.has(name)){SET_SCOPE(scopes,name,FREE) -ste.free=1 -free.add(name) -return 1} -if(global && global.has(name)){SET_SCOPE(scopes,name,GLOBAL_IMPLICIT) -return 1} -if(ste.nested){ste.free=1} -SET_SCOPE(scopes,name,GLOBAL_IMPLICIT) -return 1} -function analyze_cells(scopes,free,inlined_cells){var v,v_cell; -v_cell=CELL; -if(!v_cell){return 0;} -for(let name in scopes){v=scopes[name] -var scope=v; -if(scope !=LOCAL){continue;} -if(free.has(name)&& ! inlined_cells.has(name)){continue;} -scopes[name]=v_cell -free.delete(name)} -return 1} -function drop_class_free(ste,free){var res=free.delete('__class__') -if(res){ste.needs_class_closure=1} -res=free.delete('__classdict__') -if(res){ste.needs_class_classdict=1} -return 1} -function update_symbols(symbols,scopes,bound,free,inlined_cells,classflag){var v,v_scope,v_new,v_free -for(let name of _b_.dict.$keys_string(symbols)){var test=false -let flags=_b_.dict.$getitem_string(symbols,name) -if(test){console.log('in update symbols, name',name,'flags',flags,flags & DEF_COMP_CELL)} -if(inlined_cells.has(name)){flags |=DEF_COMP_CELL} -v_scope=scopes[name] -var scope=v_scope -if(test){console.log('name',name,'scopes[name]',scopes[name],' flags |=',scope << SCOPE_OFFSET)} -flags |=(scope << SCOPE_OFFSET) -v_new=flags -if(!v_new){return 0;} -if(test){console.log('set symbol',name,'v_new',v_new,'def comp cell',DEF_COMP_CELL,v_new & DEF_COMP_CELL)} -_b_.dict.$setitem_string(symbols,name,v_new)} -v_free=FREE << SCOPE_OFFSET -for(let name of free){v=_b_.dict.$get_string(symbols,name) -if(v !==_b_.dict.$missing){ -if(classflag && -v &(DEF_BOUND |DEF_GLOBAL)){let flags=v |DEF_FREE_CLASS; -v_new=flags; -if(! v_new){return 0;} -_b_.dict.$setitem_string(symbols,name,v_new)} -continue;} -if(bound && !bound.has(name)){continue;} -_b_.dict.$setitem_string(symbols,name,v_free)} -return 1} -function analyze_block(ste,bound,free,global,typeparams,class_entry){var success=0 -let local=new Set() -let scopes={} -let newglobal=new Set() -let newfree=new Set() -let newbound=new Set() -let inlined_cells=new Set() -if(ste.type===ClassBlock){ -Set_Union(newglobal,global) -if(bound){Set_Union(newbound,bound)}} -for(let name of _b_.dict.$keys_string(ste.symbols)){var flags=_b_.dict.$getitem_string(ste.symbols,name) -if(!analyze_name(ste,scopes,name,flags,bound,local,free,global,typeparams,class_entry)){return 0}} -if(ste.type !=ClassBlock){ -if(_PyST_IsFunctionLike(ste)){Set_Union(newbound,local);} -if(bound){Set_Union(newbound,bound)} -Set_Union(newglobal,global);}else{ -newbound.add('__class__') -newbound.add('__classdict__')} -for(var c of ste.children){var child_free=new Set() -let entry=c -var new_class_entry=NULL; -if(entry.can_see_class_scope){if(ste.type==ClassBlock){new_class_entry=ste}else if(class_entry){new_class_entry=class_entry}} -var inline_comp=entry.comprehension && ! entry.generator; -if(! analyze_child_block(entry,newbound,newfree,newglobal,typeparams,new_class_entry,child_free)){return 0} -if(inline_comp){if(! inline_comprehension(ste,entry,scopes,child_free,inlined_cells)){} -entry.comp_inlined=1;} -Set_Union(newfree,child_free); -if(entry.free ||entry.child_free){ste.child_free=1}} -for(let i=ste.children.length-1;i >=0;i--){let entry=ste.children[i]; -if(entry.comp_inlined){ste.children.splice(i,0,...entry.children)}} -if(_PyST_IsFunctionLike(ste)&& !analyze_cells(scopes,newfree,inlined_cells)){return 0}else if(ste.type===ClassBlock && !drop_class_free(ste,newfree)){return 0} -if(!update_symbols(ste.symbols,scopes,bound,newfree,inlined_cells,ste.type===ClassBlock ||ste.can_see_class_scope)){return 0} -Set_Union(free,newfree) -success=1 -return success} -function PySet_New(arg){if(arg===NULL){return new Set()} -return new Set(arg)} -function Set_Union(setA,setB){for(let elem of setB){setA.add(elem)}} -function analyze_child_block(entry,bound,free,global,typeparams,class_entry,child_free){ -var temp_bound=PySet_New(bound),temp_free=PySet_New(free),temp_global=PySet_New(global),temp_typeparams=PySet_New(typeparams) -if(!analyze_block(entry,temp_bound,temp_free,temp_global,temp_typeparams,class_entry)){return 0} -Set_Union(child_free,temp_free); -return 1;} -function symtable_analyze(st){var free=new Set(),global=new Set(),typeparams=new Set() -return analyze_block(st.top,NULL,free,global,typeparams,NULL);} -function symtable_exit_block(st){var size=st.stack.length -st.cur=NULL; -if(size){st.stack.pop() -if(--size){st.cur=st.stack[size-1]}} -return 1} -function symtable_enter_block(st,name,block,ast,lineno,col_offset,end_lineno,end_col_offset){var prev -if(ast===undefined){console.log('call ste new, key undef',st,name)} -var ste=ste_new(st,name,block,ast,lineno,col_offset,end_lineno,end_col_offset) -st.stack.push(ste) -prev=st.cur -if(prev){ste.comp_iter_expr=prev.comp_iter_expr} -st.cur=ste -if(block===AnnotationBlock){return 1} -if(block===ModuleBlock){st.global=st.cur.symbols} -if(prev){prev.children.push(ste)} -return 1;} -function symtable_lookup(st,name){var mangled=_Py_Mangle(st.private,name) -if(!mangled){return 0;} -var ret=_PyST_GetSymbol(st.cur,mangled) -return ret;} -function symtable_add_def_helper(st,name,flag,ste,_location){var o,dict,val,mangled=_Py_Mangle(st.private,name) -if(!mangled){return 0} -dict=ste.symbols -if(_b_.dict.$contains_string(dict,mangled)){o=_b_.dict.$getitem_string(dict,mangled) -val=o -if((flag & DEF_PARAM)&&(val & DEF_PARAM)){ -let exc=PyErr_Format(_b_.SyntaxError,DUPLICATE_ARGUMENT,name); -set_exc_info(exc,st.filename,..._location) -throw exc} -if((flag & DEF_TYPE_PARAM)&&(val & DEF_TYPE_PARAM)){let exc=PyErr_Format(_b_.SyntaxError,DUPLICATE_TYPE_PARAM,name); -set_exc_info(exc,st.filename,...location); -throw exc} -val |=flag}else{val=flag} -if(ste.comp_iter_target){ -if(val &(DEF_GLOBAL |DEF_NONLOCAL)){let exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_INNER_LOOP_CONFLICT,name); -set_exc_info(exc,st.filename,..._location) +exc.end_offset=end_col_offset+1 +exc.text=line +exc.args[1]=$B.fast_tuple([filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]) +exc.$frame_obj=$B.frame_obj throw exc} -val |=DEF_COMP_ITER} -o=val -if(o==NULL){return 0} -_b_.dict.$setitem(dict,mangled,o) -if(flag & DEF_PARAM){ste.varnames.push(mangled)}else if(flag & DEF_GLOBAL){ -val=flag -if(st.global.hasOwnProperty(mangled)){ -val |=st.global[mangled]} -o=val -if(o==NULL){return 0} -st.global[mangled]=o} -return 1} -function symtable_add_def(st,name,flag,_location){return symtable_add_def_helper(st,name,flag,st.cur,_location);} -function symtable_enter_type_param_block(st,name,ast,has_defaults,has_kwdefaults,kind,_location){var prev=st.cur,current_type=st.cur.type; -if(!symtable_enter_block(st,name,TypeParamBlock,ast,..._location)){return 0;} -prev.$type_param=st.cur -if(current_type===ClassBlock){st.cur.can_see_class_scope=1; -if(!symtable_add_def(st,"__classdict__",USE,_location)){return 0;}} -if(kind==$B.ast.ClassDef){ -if(!symtable_add_def(st,"type_params",DEF_LOCAL,_location)){return 0;} -if(!symtable_add_def(st,"type_params",USE,_location)){return 0;} -st.st_private=name; -var generic_base=".generic_base"; -if(!symtable_add_def(st,generic_base,DEF_LOCAL,_location)){return 0;} -if(!symtable_add_def(st,generic_base,USE,_location)){return 0;}} -if(has_defaults){var defaults=".defaults"; -if(!symtable_add_def(st,defaults,DEF_PARAM,_location)){return 0;}} -if(has_kwdefaults){var kwdefaults=".kwdefaults"; -if(!symtable_add_def(st,kwdefaults,DEF_PARAM,_location)){return 0;}} -return 1;} -function VISIT_QUIT(ST,X){return X} -function VISIT(ST,TYPE,V){var f=visitor[TYPE] -if(!f(ST,V)){VISIT_QUIT(ST,0);}} -function VISIT_SEQ(ST,TYPE,SEQ){for(var elt of SEQ){if(! visitor[TYPE](ST,elt)){VISIT_QUIT(ST,0)}}} -function VISIT_SEQ_TAIL(ST,TYPE,SEQ,START){for(var i=START,len=SEQ.length;i < len;i++){var elt=SEQ[i]; -if(! visitor[TYPE](ST,elt)){VISIT_QUIT(ST,0)}}} -function VISIT_SEQ_WITH_NULL(ST,TYPE,SEQ){for(var elt of SEQ){if(! elt){continue } -if(! visitor[TYPE](ST,elt)){VISIT_QUIT((ST),0)}}} -function symtable_record_directive(st,name,lineno,col_offset,end_lineno,end_col_offset){var data,mangled -if(!st.cur.directives){st.cur.directives=[]} -mangled=_Py_Mangle(st.private,name); -if(!mangled){return 0;} -data=$B.fast_tuple([mangled,lineno,col_offset,end_lineno,end_col_offset]) -st.cur.directives.push(data); -return true} -function has_kwonlydefaults(kwonlyargs,kw_defaults){for(var i=0,len=kwonlyargs.length;i < len;i++){if(kw_defaults[i]){return 1;}} +$B.raise_error_known_location=raise_error_known_location +function raise_error_known_token(type,filename,token,message){var exc=type.$factory(message) +exc.filename=filename +exc.lineno=token.lineno +exc.offset=token.col_offset+1 +exc.end_lineno=token.end_lineno +exc.end_offset=token.end_col_offset+1 +exc.text=token.line +exc.args[1]=$B.fast_tuple([filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]) +exc.$frame_obj=$B.frame_obj +throw exc} +$B.raise_error_known_token=raise_error_known_token +function set_position_from_EXTRA(ast_obj,EXTRA){for(var key in EXTRA){ast_obj[key]=EXTRA[key]}} +var Parser=$B.Parser=function(src,filename,mode){ +src=src.replace(/\r\n/gm,"\n") +var tokenizer=$B.tokenizer(src,filename,mode,this) +this.tokenizer=tokenizer +this.tok=tokenizer +this.mark=0 +this.fill=0 +this.level=0 +this.size=1 +this.starting_lineno=0; +this.starting_col_offset=0; +this.tokens=[] +this.src=src +this.filename=filename +this.mode=mode +this.memo={} +this.arena={a_objects:[]} +if(filename){p.filename=filename}} +var ignored=[$B.py_tokens.ENCODING,$B.py_tokens.NL,$B.py_tokens.COMMENT] +Parser.prototype.read_token=function(){while(true){var next=this.tokenizer.next() +if(! next.done){var value=next.value +if(! ignored.includes(value.num_type)){this.tokens.push(value) +return value}}else{throw Error('tokenizer exhausted')}}}})(__BRYTHON__) +; +(function($B){ +var _b_=__BRYTHON__.builtins +const Load=new $B.ast.Load() +const NULL=undefined; +const ENDMARKER=0,NAME=1,NUMBER=2,STRING=3 +function strchr(s,char){return s.includes(char)} +function strlen(s){return s.length} +function strncmp(a,b){return a < b ?-1 :a > b ? 1 :0} +function PyOS_strtol(s,end,base){return parseFloat(s)} +function PyOS_strtoul(s,end,base){return parseFloat(s)} +function PyOS_string_to_double(s,x,y){return parseFloat(s)} +function PyFloat_FromDouble(x){return x} +const NSTATISTICS=2000,memo_statistics={},TYPE_IGNORE='TYPE_IGNORE',ERRORTOKEN='ERRORTOKEN',NEWLINE=$B.py_tokens.NEWLINE,DEDENT=$B.py_tokens.DEDENT,Py_single_input='py_single_input',PyPARSE_ALLOW_INCOMPLETE_INPUT=0x0100 +function PyUnicode_IS_ASCII(char){return char.codePointAt(0)< 128} +function set_position_from_token(ast_obj,token){for(var attr of['lineno','col_offset','end_lineno','end_col_offset']){ast_obj[attr]=token[attr]}} +$B._PyPegen.interactive_exit=function(p){if(p.errcode){(p.errcode)=E_EOF;} +return NULL;} +$B._PyPegen.byte_offset_to_character_offset_raw=function(str,col_offset){var len=str.length +if(col_offset > len+1){col_offset=len+1;} +var text=PyUnicode_DecodeUTF8(str,col_offset,"replace"); +if(!text){return-1;} +return text.length} +$B._PyPegen.calculate_display_width=function(line,character_offset){var segment=line.substring(0,character_offset); +if(!segment){return-1;} +if(PyUnicode_IS_ASCII(segment)){return character_offset;} +var width_fn=_PyImport_GetModuleAttrString("unicodedata","east_asian_width"); +if(!width_fn){return-1;} +var width=0; +var len=segment.length +for(let i=0;i < len;i++){var chr=segment.substring(i,i+1); +if(!chr){Py_DECREF(segment); +Py_DECREF(width_fn); +return-1;} +var width_specifier=PyObject_CallOneArg(width_fn,chr); +if(!width_specifier){Py_DECREF(segment); +Py_DECREF(width_fn); +return-1;} +if(width_specifier=="W" || +width_specifier=="F"){width+=2;}else{ +width+=1;}} +return width;} +$B._PyPegen.byte_offset_to_character_offset=function(line,col_offset){var str=line +return _PyPegen_byte_offset_to_character_offset_raw(str,col_offset);} +$B._PyPegen.insert_memo=function(p,mark,type,node){ +var m={type,node,mark:p.mark,next:p.tokens[mark].memo} +p.tokens[mark].memo=m; return 0;} -var visitor={} -visitor.stmt=function(st,s){switch(s.constructor){case $B.ast.FunctionDef: -if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s))) -VISIT_QUIT(st,0) -if(s.args.defaults) -VISIT_SEQ(st,expr,s.args.defaults) -if(s.args.kw_defaults) -VISIT_SEQ_WITH_NULL(st,expr,s.args.kw_defaults) -if(s.type_params.length > 0){if(!symtable_enter_type_param_block( -st,s.name,s.type_params,s.args.defaults !=NULL,has_kwonlydefaults(s.args.kwonlyargs,s.args.kw_defaults),s.constructor,LOCATION(s))){VISIT_QUIT(st,0);} -VISIT_SEQ(st,type_param,s.type_params);} -if(!visitor.annotations(st,s,s.args,s.returns)) -VISIT_QUIT(st,0) -if(s.decorator_list){VISIT_SEQ(st,expr,s.decorator_list)} -if(!symtable_enter_block(st,s.name,FunctionBlock,s,...LOCATION(s))){VISIT_QUIT(st,0)} -VISIT(st,'arguments',s.args) -VISIT_SEQ(st,stmt,s.body) -if(!symtable_exit_block(st)){VISIT_QUIT(st,0)} -if(s.type_params.length > 0){if(!symtable_exit_block(st)){VISIT_QUIT(st,0)}} +$B._PyPegen.update_memo=function(p,mark,type,node){for(let m=p.tokens[mark].memo;m !=NULL;m=m.next){if(m.type==type){ +m.node=node; +m.mark=p.mark; +return 0;}} +return $B._PyPegen.insert_memo(p,mark,type,node);} +function init_normalization(p){if(p.normalize){return 1;} +p.normalize=_PyImport_GetModuleAttrString("unicodedata","normalize"); +if(!p.normalize) +{return 0;} +return 1;} +function growable_comment_array_init(arr,initial_size){ +arr.items=new Array(initial_size*arr.items.length); +arr.size=initial_size; +arr.num_items=0; +return arr.items !=NULL;} +function growable_comment_array_add(arr,lineno,comment){return 1} +function growable_comment_array_deallocate(arr){} +function _get_keyword_or_name_type(p,new_token){return p.keywords[new_token.string]?? NAME} +function initialize_token(p,parser_token,new_token,token_type){ +parser_token.num_type=(token_type==NAME)? _get_keyword_or_name_type(p,new_token):token_type; +if(parser_token.num_type==-1){console.log('bizarre',new_token) +console.log('keywords',p.keywords) +alert()} +parser_token.metadata=NULL; +if(new_token.metadata !=NULL){parser_token.metadata=new_token.metadata; +new_token.metadata=NULL;} +parser_token.level=new_token.level; +parser_token.lineno=new_token.lineno; +parser_token.col_offset=p.tok.lineno==p.starting_lineno ? p.starting_col_offset+new_token.col_offset +:new_token.col_offset; +parser_token.end_lineno=new_token.end_lineno; +parser_token.end_col_offset=p.tok.lineno==p.starting_lineno ? p.starting_col_offset+new_token.end_col_offset +:new_token.end_col_offset; +p.fill+=1; +if(token_type==ERRORTOKEN && p.tok.done==E_DECODE){return _Pypegen_raise_decode_error(p);} +return(token_type==ERRORTOKEN ? _Pypegen_tokenizer_error(p):0);} +function _PyToken_Init(token){token.metadata=NULL;} +function _PyTokenizer_Get(tok,new_token){var token=tok.next().value +for(var key in token){new_token[key]=token[key]} +return token.num_type} +function get_next_token(p,new_token){var token=p.tokens[p.fill]?? p.read_token() +for(var key in token){new_token[key]=token[key]} +if(token.num_type==$B.py_tokens.ENDMARKER){ +if(p.mode=='single'){var end_token=p.tokens[p.tokens.length-2] +if(end_token.num_type !=$B.py_tokens.NEWLINE){var newline=$B.clone(end_token) +newline.num_type=$B.py_tokens.NEWLINE +p.tokens.splice(p.tokens.length-1,0,newline) +token=newline}}} +return token.num_type} +$B._PyPegen.fill_token=function(p){var new_token={metadata:NULL} +var type=get_next_token(p,new_token); +while(type==TYPE_IGNORE){type=get_next_token(p,new_token);} +if(p.start_rule==Py_single_input && type==ENDMARKER && p.parsing_started){type=NEWLINE; +p.parsing_started=0; +if(p.tok.indent && !(p.flags & PyPARSE_DONT_IMPLY_DEDENT)){p.tok.pendin=-p.tok.indent; +p.tok.indent=0;}} +else{ +p.parsing_started=1;} +var t=p.tokens[p.fill]; +return initialize_token(p,t,new_token,type);} +$B._PyPegen.clear_memo_statistics=function(){for(let i=0;i < NSTATISTICS;i++){memo_statistics[i]=0;}} +$B._PyPegen.get_memo_statistics=function(){var ret=new Array(NSTATISTICS); +if(ret==NULL){return NULL;} +for(let i=0;i < NSTATISTICS;i++){var value=PyLong_FromLong(memo_statistics[i]); +if(value==NULL){return NULL;} +if(PyList_SetItem(ret,i,value)< 0){Py_DECREF(ret); +return NULL;}} +return ret;} +$B._PyPegen.is_memoized=function(p,type,pres){if(p.mark==p.fill){if($B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return-1;}} +var t=p.tokens[p.mark]; +for(var m=t.memo;m !=NULL;m=m.next){if(m.type==type){ +p.mark=m.mark; +pres.value=m.node; +return 1;}} +return 0;} +$B._PyPegen.lookahead_with_name=function(positive,func,p){var mark=p.mark; +var res=func(p); +p.mark=mark; +return(res !=NULL)==positive;} +$B._PyPegen.lookahead_with_string=function(positive,func,p,arg){var mark=p.mark; +var res=func(p,arg); +p.mark=mark; +return(res !=NULL)==positive;} +$B._PyPegen.lookahead_with_int=function(positive,func,p,arg){var mark=p.mark; +var res=func(p,arg); +p.mark=mark; +return(res !=NULL)==positive;} +$B._PyPegen.lookahead=function(positive,func,p){var mark=p.mark; +var res=func(p); +p.mark=mark; +return(res !=NULL)==positive;} +$B._PyPegen.expect_token=function(p,type){if(p.mark==p.fill){if($B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;}} +var t=p.tokens[p.mark]; +if(t.num_type !=type){return NULL;} +p.mark+=1; +return t;} +$B._PyPegen.expect_forced_result=function(p,result,expected){if(p.error_indicator==1){return NULL;} +if(result==NULL){RAISE_SYNTAX_ERROR("expected (%s)",expected); +return NULL;} +return result;} +$B._PyPegen.expect_forced_token=function(p,type,expected){if(p.error_indicator==1){return NULL;} +if(p.mark==p.fill){if($B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;}} +var t=p.tokens[p.mark]; +if(t.num_type !=type){$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,t,`expected '${expected}'`); +return NULL;} +p.mark+=1; +return t;} +$B._PyPegen.expect_soft_keyword=function(p,keyword){if(p.mark==p.fill){if($B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;}} +var t=p.tokens[p.mark]; +if(t.num_type !=NAME){return NULL;} +const s=t.string +if(!s){p.error_indicator=1; +return NULL;} +if(strcmp(s,keyword)!=0){return NULL;} +return $B._PyPegen.name_token(p);} +$B._PyPegen.get_last_nonnwhitespace_token=function(p){ +var token=NULL; +for(let m=p.mark-1;m >=0;m--){token=p.tokens[m]; +if(token.num_type !=ENDMARKER &&(token.num_type < NEWLINE ||token.num_type > DEDENT)){break;}} +return token;} +$B._PyPegen.new_identifier=function(p,n){var id=n +if(! PyUnicode_IS_ASCII(id)){var id2; +if(!init_normalization(p)) +{return error();} +var form=PyUnicode_InternFromString("NFKC"); +if(form==NULL) +{return error();} +var args={form,id}; +id2=_PyObject_FastCall(p.normalize,args,2); +if(!id2){return error()} +if(!PyUnicode_Check(id2)){PyErr_Format(PyExc_TypeError,"unicodedata.normalize() must return a string, not "+ +"%.200s",_PyType_Name(Py_TYPE(id2))); +return error()} +id=id2;} +PyUnicode_InternInPlace(id); +return id; +function error(){p.error_indicator=1; +return NULL;}} +$B._PyPegen.name_from_token=function(p,t){if(t==NULL){return NULL;} +var s=t.string +if(!s){p.error_indicator=1; +return NULL;} +var res=new $B.ast.Name(s,Load) +set_position_from_token(res,t) +return res} +$B._PyPegen.name_token=function(p){var t=$B._PyPegen.expect_token(p,NAME); +return $B._PyPegen.name_from_token(p,t);} +$B._PyPegen.string_token=function(p){return $B._PyPegen.expect_token(p,STRING);} +$B._PyPegen.soft_keyword_token=function(p){var t=$B._PyPegen.expect_token(p,NAME); +if(t==NULL){return NULL;} +var the_token; +var size; +the_token=t.string; +for(let keyword=p.soft_keywords;keyword !=NULL;keyword++){if(strncmp(keyword,the_token,size)==0){return $B._PyPegen.name_from_token(p,t);}} +return NULL;} +function prepared_number_value(prepared){switch(prepared.type){case 'float': +return $B.fast_float(prepared.value) +case 'imaginary': +return $B.make_complex(0,prepared_number_value(prepared.value)) +case 'int': +var res=parseInt(prepared.value[1],prepared.value[0]) +if(! Number.isSafeInteger(res)){var base=prepared.value[0],num_str=prepared.value[1] +switch(base){case 8: +return $B.fast_long_int(BigInt('0x'+num_str)) +case 10: +return $B.fast_long_int(BigInt(num_str)) +case 16: +return $B.fast_long_int(BigInt('0x'+num_str))}} +return res}} +function parsenumber_raw(s){var prepared=$B.prepare_number(s) +return prepared_number_value(prepared)} +function parsenumber(s){var dup; +var end; +var res=NULL; +if(strchr(s,'_')==NULL){return parsenumber_raw(s);} +dup=s.replace(/_/g,'') +res=parsenumber_raw(dup); +return res;} +$B._PyPegen.number_token=function(p){var t=$B._PyPegen.expect_token(p,NUMBER); +if(t==NULL){return NULL;} +var num_raw=t.string +if(num_raw==NULL){p.error_indicator=1; +return NULL;} +if(p.feature_version < 6 && strchr(num_raw,'_')!=NULL){p.error_indicator=1; +return RAISE_SYNTAX_ERROR("Underscores in numeric literals are only supported "+ +"in Python 3.6 and greater");} +var c=parsenumber(num_raw); +if(c==NULL){p.error_indicator=1; +var tstate=_PyThreadState_GET(); +if(tstate.current_exception !=NULL && +Py_TYPE(tstate.current_exception)==PyExc_ValueError +){var exc=PyErr_GetRaisedException(); +RAISE_ERROR_KNOWN_LOCATION( +p,PyExc_SyntaxError,t.lineno,-1 ,t.end_lineno,-1 ,"%S - Consider hexadecimal for huge integer literals "+ +"to avoid decimal conversion limits.",exc);} +return NULL;} +var res=new $B.ast.Constant(c,NULL); +set_position_from_token(res,t) +return res} +function bad_single_statement(p){var cur=p.tok.cur; +var c=cur; +var pos=0 +for(;;){while(c==' ' ||c=='\t' ||c=='\n' ||c=='\f'){c=cur[pos++]} +if(!c){return 0;} +if(c !='#'){return 1;} +while(c && c !='\n'){c=cur[pos++]}}} +function compute_parser_flags(flags){var parser_flags=0; +if(!flags){return 0;} +if(flags.cf_flags & PyCF_DONT_IMPLY_DEDENT){parser_flags |=PyPARSE_DONT_IMPLY_DEDENT;} +if(flags.cf_flags & PyCF_IGNORE_COOKIE){parser_flags |=PyPARSE_IGNORE_COOKIE;} +if(flags.cf_flags & CO_FUTURE_BARRY_AS_BDFL){parser_flags |=PyPARSE_BARRY_AS_BDFL;} +if(flags.cf_flags & PyCF_TYPE_COMMENTS){parser_flags |=PyPARSE_TYPE_COMMENTS;} +if((flags.cf_flags & PyCF_ONLY_AST)&& flags.cf_feature_version < 7){parser_flags |=PyPARSE_ASYNC_HACKS;} +if(flags.cf_flags & PyCF_ALLOW_INCOMPLETE_INPUT){parser_flags |=PyPARSE_ALLOW_INCOMPLETE_INPUT;} +return parser_flags;} +$B._PyPegen.Parser_New=function(tok,start_rule,flags,feature_version,errcode,arena){var p={} +if(p==NULL){return PyErr_NoMemory();} +tok.type_comments=(flags & PyPARSE_TYPE_COMMENTS)> 0; +tok.async_hacks=(flags & PyPARSE_ASYNC_HACKS)> 0; +p.tok=tok; +p.keywords=NULL; +p.n_keyword_lists=-1; +p.soft_keywords=NULL; +p.tokens=[] +if(!p.tokens){PyMem_Free(p); +return PyErr_NoMemory();} +p.tokens[0]=PyMem_Calloc(1,sizeof(Token)); +p.mark=0; +p.fill=0; +p.size=1; +p.errcode=errcode; +p.arena=arena; +p.start_rule=start_rule; +p.parsing_started=0; +p.normalize=NULL; +p.error_indicator=0; +p.starting_lineno=0; +p.starting_col_offset=0; +p.flags=flags; +p.feature_version=feature_version; +p.known_err_token=NULL; +p.level=0; +p.call_invalid_rules=0; +p.debug=_Py_GetConfig().parser_debug; +return p;} +$B._PyPegen.Parser_Free=function(p){} +function reset_parser_state_for_error_pass(p){for(let i=0;i < p.fill;i++){p.tokens[i].memo=NULL;} +p.mark=0; +p.call_invalid_rules=1;} +function _is_end_of_source(p){var err=p.tok.done; +return err==E_EOF ||err==E_EOFS ||err==E_EOLS;} +$B._PyPegen.tokenize_full_source_to_check_for_errors=function(p){var last_token=p.tokens[p.fill-1] +var tokenizer=$B.tokenizer(p.src,p.filename,p.mode,p) +for(var token of tokenizer){} +if(p.braces.length > 0){var brace=$B.last(p.braces),err_lineno,msg +if('([{'.includes(brace.char)){err_lineno=brace.line_num}else{if(p.braces.length > 1){err_lineno=p.braces[p.braces.length-2].line_num}else{err_lineno=brace.line_num}} +if(p.tokens.length==0 ||$B.last(p.tokens).lineno >=err_lineno){if('([{'.includes(brace.char)){msg=`'${brace.char}' was never closed`}else if(p.braces.length > 1){var closing=brace.char,opening=p.braces[p.braces.length-2].char +msg=`closing parenthesis '${closing}' does not match `+ +`opening parenthesis '${opening}'`}else{msg=`unmatched '${brace.char}'`} +$B.raise_error_known_location(_b_.SyntaxError,p.filename,brace.line_num,brace.pos-brace.line_start,brace.line_num,brace.pos-brace.line_start+1,brace.line,msg)}}} +$B._PyPegen.set_syntax_error=function(p,last_token){ +if(p.fill==0){$B.helper_functions.RAISE_SYNTAX_ERROR(p,"error at start before reading any input");} +$B._PyPegen.tokenize_full_source_to_check_for_errors(p); +if(last_token.num_type==ERRORTOKEN && p.tok.done==E_EOF){if(p.tok.level){raise_unclosed_parentheses_error(p);}else{ +$B.helper_functions.RAISE_SYNTAX_ERROR(p,"unexpected EOF while parsing");} +return;} +if(last_token.num_type==INDENT ||last_token.num_type==DEDENT){$B.helper_functions.RAISE_INDENTATION_ERROR(p,last_token.num_type==INDENT ? "unexpected indent" :"unexpected unindent"); +return;} +$B._PyPegen.tokenize_full_source_to_check_for_errors(p); +$B.raise_error_known_token(_b_.SyntaxError,p.filename,last_token,"invalid syntax");} +$B._PyPegen.run_parser=function(p){var res=$B._PyPegen.parse(p); +if(res==NULL){if((p.flags & PyPARSE_ALLOW_INCOMPLETE_INPUT)&& _is_end_of_source(p)){PyErr_Clear(); +return RAISE_SYNTAX_ERROR("incomplete input");} +var last_token=p.tokens[p.fill-1]; +reset_parser_state_for_error_pass(p); +try{$B._PyPegen.parse(p);}catch(err){last_token=p.tokens[p.fill-1] +$B._PyPegen.tokenize_full_source_to_check_for_errors(p) +throw err} +$B._PyPegen.set_syntax_error(p,last_token);} +if(p.start_rule==Py_single_input && bad_single_statement(p)){p.tok.done=E_BADSINGLE; +return RAISE_SYNTAX_ERROR("multiple statements found while compiling a single statement");} +return res;} +$B._PyPegen.run_parser_from_file_pointer=function(fp,start_rule,filename_ob,enc,ps1,ps2,flags,errcode,arena){var tok=_PyTokenizer_FromFile(fp,enc,ps1,ps2); +if(tok==NULL){if(PyErr_Occurred()){_PyPegen_raise_tokenizer_init_error(filename_ob); +return NULL;} +return NULL;} +if(!tok.fp ||ps1 !=NULL ||ps2 !=NULL || +PyUnicode_CompareWithASCIIString(filename_ob,"")==0){tok.fp_interactive=1;} +tok.filename=Py_NewRef(filename_ob); +var result=NULL; +var parser_flags=compute_parser_flags(flags); +var p=$B._PyPegen.Parser_New(tok,start_rule,parser_flags,PY_MINOR_VERSION,errcode,arena); +if(p==NULL){return error()} +result=_PyPegen_run_parser(p); +_PyPegen_Parser_Free(p); +function error(){_PyTokenizer_Free(tok); +return result;}} +$B._PyPegen.run_parser_from_string=function(str,start_rule,filename_ob,flags,arena){var exec_input=start_rule==Py_file_input; +var tok; +if(flags !=NULL && flags.cf_flags & PyCF_IGNORE_COOKIE){tok=_PyTokenizer_FromUTF8(str,exec_input,0);}else{ +tok=_PyTokenizer_FromString(str,exec_input,0);} +if(tok==NULL){if(PyErr_Occurred()){_PyPegen_raise_tokenizer_init_error(filename_ob);} +return NULL;} +tok.filename=Py_NewRef(filename_ob); +var result=NULL; +var parser_flags=compute_parser_flags(flags); +var feature_version=flags &&(flags.cf_flags & PyCF_ONLY_AST)? +flags.cf_feature_version :PY_MINOR_VERSION; +var p=$B._PyPegen.Parser_New(tok,start_rule,parser_flags,feature_version,NULL,arena); +if(p==NULL){return error()} +result=_PyPegen_run_parser(p); +_PyPegen_Parser_Free(p); +function error(){ +return result;}} +$B.PyPegen={first_item:function(a,type){return a[0]},last_item:function(a,ptype){return a[a.length-1]}}})(__BRYTHON__) +; + +function fprintf(dest,format){var args=Array.from(arguments).slice(2) +for(var arg of args){format=format.replace(/%\*?[a-z]/,arg)} +return format} +const stderr=null +function D(x){console.log(x)} +function UNUSED(){} +function strcmp(x,y){return x==y ? 0 :x < y ?-1 :1} +const MAXSTACK=6000,NULL=undefined +const ENDMARKER=0,NAME=1,NUMBER=2,STRING=3,NEWLINE=4,INDENT=5,DEDENT=6,LPAR=7,RPAR=8,LSQB=9,RSQB=10,COLON=11,COMMA=12,SEMI=13,PLUS=14,MINUS=15,STAR=16,SLASH=17,VBAR=18,AMPER=19,LESS=20,GREATER=21,EQUAL=22,DOT=23,PERCENT=24,LBRACE=25,RBRACE=26,EQEQUAL=27,NOTEQUAL=28,LESSEQUAL=29,GREATEREQUAL=30,TILDE=31,CIRCUMFLEX=32,LEFTSHIFT=33,RIGHTSHIFT=34,DOUBLESTAR=35,PLUSEQUAL=36,MINEQUAL=37,STAREQUAL=38,SLASHEQUAL=39,PERCENTEQUAL=40,AMPEREQUAL=41,VBAREQUAL=42,CIRCUMFLEXEQUAL=43,LEFTSHIFTEQUAL=44,RIGHTSHIFTEQUAL=45,DOUBLESTAREQUAL=46,DOUBLESLASH=47,DOUBLESLASHEQUAL=48,AT=49,ATEQUAL=50,RARROW=51,ELLIPSIS=52,COLONEQUAL=53,EXCLAMATION=54,OP=55,AWAIT=56,ASYNC=57,TYPE_IGNORE=58,TYPE_COMMENT=59,SOFT_KEYWORD=60,FSTRING_START=61,FSTRING_MIDDLE=62,FSTRING_END=63,COMMENT=64,NL=65,ERRORTOKEN=66,N_TOKENS=68 +function NEW_TYPE_COMMENT(){} +const Store=new $B.ast.Store(),Load=new $B.ast.Load +const EXTRA={} +const n_keyword_lists=9; +const _reserved_keywords={if:642,as:640,in:651,or:574,is:582,del:604,def:652,for:650,try:624,and:575,not:581,from:608,pass:504,with:615,elif:644,else:645,None:602,True:601,raise:522,yield:573,break:508,class:654,while:647,False:603,return:519,import:607,assert:526,global:523,except:637,lambda:600,finally:633,continue:509,nonlocal:524,}; +const reserved_keywords=Object.create(null) +for(var item of Object.entries(_reserved_keywords)){reserved_keywords[item[0]]=item[1]} +const soft_keywords=["_","case","match","type",NULL,]; +const file_type=1000,interactive_type=1001,eval_type=1002,func_type_type=1003,statements_type=1004,statement_type=1005,statement_newline_type=1006,simple_stmts_type=1007,simple_stmt_type=1008,compound_stmt_type=1009,assignment_type=1010,annotated_rhs_type=1011,augassign_type=1012,return_stmt_type=1013,raise_stmt_type=1014,global_stmt_type=1015,nonlocal_stmt_type=1016,del_stmt_type=1017,yield_stmt_type=1018,assert_stmt_type=1019,import_stmt_type=1020,import_name_type=1021,import_from_type=1022,import_from_targets_type=1023,import_from_as_names_type=1024,import_from_as_name_type=1025,dotted_as_names_type=1026,dotted_as_name_type=1027,dotted_name_type=1028,block_type=1029,decorators_type=1030,class_def_type=1031,class_def_raw_type=1032,function_def_type=1033,function_def_raw_type=1034,params_type=1035,parameters_type=1036,slash_no_default_type=1037,slash_with_default_type=1038,star_etc_type=1039,kwds_type=1040,param_no_default_type=1041,param_no_default_star_annotation_type=1042,param_with_default_type=1043,param_maybe_default_type=1044,param_type=1045,param_star_annotation_type=1046,annotation_type=1047,star_annotation_type=1048,default_type=1049,if_stmt_type=1050,elif_stmt_type=1051,else_block_type=1052,while_stmt_type=1053,for_stmt_type=1054,with_stmt_type=1055,with_item_type=1056,try_stmt_type=1057,except_block_type=1058,except_star_block_type=1059,finally_block_type=1060,match_stmt_type=1061,subject_expr_type=1062,case_block_type=1063,guard_type=1064,patterns_type=1065,pattern_type=1066,as_pattern_type=1067,or_pattern_type=1068,closed_pattern_type=1069,literal_pattern_type=1070,literal_expr_type=1071,complex_number_type=1072,signed_number_type=1073,signed_real_number_type=1074,real_number_type=1075,imaginary_number_type=1076,capture_pattern_type=1077,pattern_capture_target_type=1078,wildcard_pattern_type=1079,value_pattern_type=1080,attr_type=1081,name_or_attr_type=1082,group_pattern_type=1083,sequence_pattern_type=1084,open_sequence_pattern_type=1085,maybe_sequence_pattern_type=1086,maybe_star_pattern_type=1087,star_pattern_type=1088,mapping_pattern_type=1089,items_pattern_type=1090,key_value_pattern_type=1091,double_star_pattern_type=1092,class_pattern_type=1093,positional_patterns_type=1094,keyword_patterns_type=1095,keyword_pattern_type=1096,type_alias_type=1097,type_params_type=1098,type_param_seq_type=1099,type_param_type=1100,type_param_bound_type=1101,expressions_type=1102,expression_type=1103,yield_expr_type=1104,star_expressions_type=1105,star_expression_type=1106,star_named_expressions_type=1107,star_named_expression_type=1108,assignment_expression_type=1109,named_expression_type=1110,disjunction_type=1111,conjunction_type=1112,inversion_type=1113,comparison_type=1114,compare_op_bitwise_or_pair_type=1115,eq_bitwise_or_type=1116,noteq_bitwise_or_type=1117,lte_bitwise_or_type=1118,lt_bitwise_or_type=1119,gte_bitwise_or_type=1120,gt_bitwise_or_type=1121,notin_bitwise_or_type=1122,in_bitwise_or_type=1123,isnot_bitwise_or_type=1124,is_bitwise_or_type=1125,bitwise_or_type=1126,bitwise_xor_type=1127,bitwise_and_type=1128,shift_expr_type=1129,sum_type=1130,term_type=1131,factor_type=1132,power_type=1133,await_primary_type=1134,primary_type=1135,slices_type=1136,slice_type=1137,atom_type=1138,group_type=1139,lambdef_type=1140,lambda_params_type=1141,lambda_parameters_type=1142,lambda_slash_no_default_type=1143,lambda_slash_with_default_type=1144,lambda_star_etc_type=1145,lambda_kwds_type=1146,lambda_param_no_default_type=1147,lambda_param_with_default_type=1148,lambda_param_maybe_default_type=1149,lambda_param_type=1150,fstring_middle_type=1151,fstring_replacement_field_type=1152,fstring_conversion_type=1153,fstring_full_format_spec_type=1154,fstring_format_spec_type=1155,fstring_type=1156,string_type=1157,strings_type=1158,list_type=1159,tuple_type=1160,set_type=1161,dict_type=1162,double_starred_kvpairs_type=1163,double_starred_kvpair_type=1164,kvpair_type=1165,for_if_clauses_type=1166,for_if_clause_type=1167,listcomp_type=1168,setcomp_type=1169,genexp_type=1170,dictcomp_type=1171,arguments_type=1172,args_type=1173,kwargs_type=1174,starred_expression_type=1175,kwarg_or_starred_type=1176,kwarg_or_double_starred_type=1177,star_targets_type=1178,star_targets_list_seq_type=1179,star_targets_tuple_seq_type=1180,star_target_type=1181,target_with_star_atom_type=1182,star_atom_type=1183,single_target_type=1184,single_subscript_attribute_target_type=1185,t_primary_type=1186,t_lookahead_type=1187,del_targets_type=1188,del_target_type=1189,del_t_atom_type=1190,type_expressions_type=1191,func_type_comment_type=1192,invalid_arguments_type=1193,invalid_kwarg_type=1194,expression_without_invalid_type=1195,invalid_legacy_expression_type=1196,invalid_expression_type=1197,invalid_named_expression_type=1198,invalid_assignment_type=1199,invalid_ann_assign_target_type=1200,invalid_del_stmt_type=1201,invalid_block_type=1202,invalid_comprehension_type=1203,invalid_dict_comprehension_type=1204,invalid_parameters_type=1205,invalid_default_type=1206,invalid_star_etc_type=1207,invalid_kwds_type=1208,invalid_parameters_helper_type=1209,invalid_lambda_parameters_type=1210,invalid_lambda_parameters_helper_type=1211,invalid_lambda_star_etc_type=1212,invalid_lambda_kwds_type=1213,invalid_double_type_comments_type=1214,invalid_with_item_type=1215,invalid_for_target_type=1216,invalid_group_type=1217,invalid_import_type=1218,invalid_import_from_targets_type=1219,invalid_with_stmt_type=1220,invalid_with_stmt_indent_type=1221,invalid_try_stmt_type=1222,invalid_except_stmt_type=1223,invalid_finally_stmt_type=1224,invalid_except_stmt_indent_type=1225,invalid_except_star_stmt_indent_type=1226,invalid_match_stmt_type=1227,invalid_case_block_type=1228,invalid_as_pattern_type=1229,invalid_class_pattern_type=1230,invalid_class_argument_pattern_type=1231,invalid_if_stmt_type=1232,invalid_elif_stmt_type=1233,invalid_else_stmt_type=1234,invalid_while_stmt_type=1235,invalid_for_stmt_type=1236,invalid_def_raw_type=1237,invalid_class_def_raw_type=1238,invalid_double_starred_kvpairs_type=1239,invalid_kvpair_type=1240,invalid_starred_expression_type=1241,invalid_replacement_field_type=1242,invalid_conversion_character_type=1243,_loop0_1_type=1244,_loop0_2_type=1245,_loop1_3_type=1246,_loop0_5_type=1247,_gather_4_type=1248,_tmp_6_type=1249,_tmp_7_type=1250,_tmp_8_type=1251,_tmp_9_type=1252,_tmp_10_type=1253,_tmp_11_type=1254,_tmp_12_type=1255,_tmp_13_type=1256,_loop1_14_type=1257,_tmp_15_type=1258,_tmp_16_type=1259,_tmp_17_type=1260,_loop0_19_type=1261,_gather_18_type=1262,_loop0_21_type=1263,_gather_20_type=1264,_tmp_22_type=1265,_tmp_23_type=1266,_loop0_24_type=1267,_loop1_25_type=1268,_loop0_27_type=1269,_gather_26_type=1270,_tmp_28_type=1271,_loop0_30_type=1272,_gather_29_type=1273,_tmp_31_type=1274,_loop1_32_type=1275,_tmp_33_type=1276,_tmp_34_type=1277,_tmp_35_type=1278,_loop0_36_type=1279,_loop0_37_type=1280,_loop0_38_type=1281,_loop1_39_type=1282,_loop0_40_type=1283,_loop1_41_type=1284,_loop1_42_type=1285,_loop1_43_type=1286,_loop0_44_type=1287,_loop1_45_type=1288,_loop0_46_type=1289,_loop1_47_type=1290,_loop0_48_type=1291,_loop0_49_type=1292,_loop1_50_type=1293,_loop0_52_type=1294,_gather_51_type=1295,_loop0_54_type=1296,_gather_53_type=1297,_loop0_56_type=1298,_gather_55_type=1299,_loop0_58_type=1300,_gather_57_type=1301,_tmp_59_type=1302,_loop1_60_type=1303,_loop1_61_type=1304,_tmp_62_type=1305,_tmp_63_type=1306,_loop1_64_type=1307,_loop0_66_type=1308,_gather_65_type=1309,_tmp_67_type=1310,_tmp_68_type=1311,_tmp_69_type=1312,_tmp_70_type=1313,_loop0_72_type=1314,_gather_71_type=1315,_loop0_74_type=1316,_gather_73_type=1317,_tmp_75_type=1318,_loop0_77_type=1319,_gather_76_type=1320,_loop0_79_type=1321,_gather_78_type=1322,_loop0_81_type=1323,_gather_80_type=1324,_loop1_82_type=1325,_loop1_83_type=1326,_loop0_85_type=1327,_gather_84_type=1328,_loop1_86_type=1329,_loop1_87_type=1330,_loop1_88_type=1331,_tmp_89_type=1332,_loop0_91_type=1333,_gather_90_type=1334,_tmp_92_type=1335,_tmp_93_type=1336,_tmp_94_type=1337,_tmp_95_type=1338,_tmp_96_type=1339,_tmp_97_type=1340,_loop0_98_type=1341,_loop0_99_type=1342,_loop0_100_type=1343,_loop1_101_type=1344,_loop0_102_type=1345,_loop1_103_type=1346,_loop1_104_type=1347,_loop1_105_type=1348,_loop0_106_type=1349,_loop1_107_type=1350,_loop0_108_type=1351,_loop1_109_type=1352,_loop0_110_type=1353,_loop1_111_type=1354,_tmp_112_type=1355,_loop0_113_type=1356,_loop0_114_type=1357,_loop1_115_type=1358,_tmp_116_type=1359,_loop0_118_type=1360,_gather_117_type=1361,_loop1_119_type=1362,_loop0_120_type=1363,_loop0_121_type=1364,_tmp_122_type=1365,_loop0_124_type=1366,_gather_123_type=1367,_tmp_125_type=1368,_loop0_127_type=1369,_gather_126_type=1370,_loop0_129_type=1371,_gather_128_type=1372,_loop0_131_type=1373,_gather_130_type=1374,_loop0_133_type=1375,_gather_132_type=1376,_loop0_134_type=1377,_loop0_136_type=1378,_gather_135_type=1379,_loop1_137_type=1380,_tmp_138_type=1381,_loop0_140_type=1382,_gather_139_type=1383,_loop0_142_type=1384,_gather_141_type=1385,_loop0_144_type=1386,_gather_143_type=1387,_loop0_146_type=1388,_gather_145_type=1389,_loop0_148_type=1390,_gather_147_type=1391,_tmp_149_type=1392,_tmp_150_type=1393,_tmp_151_type=1394,_tmp_152_type=1395,_tmp_153_type=1396,_tmp_154_type=1397,_tmp_155_type=1398,_tmp_156_type=1399,_tmp_157_type=1400,_tmp_158_type=1401,_tmp_159_type=1402,_tmp_160_type=1403,_loop0_161_type=1404,_loop0_162_type=1405,_loop0_163_type=1406,_tmp_164_type=1407,_tmp_165_type=1408,_tmp_166_type=1409,_tmp_167_type=1410,_tmp_168_type=1411,_loop0_169_type=1412,_loop0_170_type=1413,_loop0_171_type=1414,_loop1_172_type=1415,_tmp_173_type=1416,_loop0_174_type=1417,_tmp_175_type=1418,_loop0_176_type=1419,_loop1_177_type=1420,_tmp_178_type=1421,_tmp_179_type=1422,_tmp_180_type=1423,_loop0_181_type=1424,_tmp_182_type=1425,_tmp_183_type=1426,_loop1_184_type=1427,_tmp_185_type=1428,_loop0_186_type=1429,_loop0_187_type=1430,_loop0_188_type=1431,_loop0_190_type=1432,_gather_189_type=1433,_tmp_191_type=1434,_loop0_192_type=1435,_tmp_193_type=1436,_loop0_194_type=1437,_loop1_195_type=1438,_loop1_196_type=1439,_tmp_197_type=1440,_tmp_198_type=1441,_loop0_199_type=1442,_tmp_200_type=1443,_tmp_201_type=1444,_tmp_202_type=1445,_loop0_204_type=1446,_gather_203_type=1447,_loop0_206_type=1448,_gather_205_type=1449,_loop0_208_type=1450,_gather_207_type=1451,_loop0_210_type=1452,_gather_209_type=1453,_loop0_212_type=1454,_gather_211_type=1455,_tmp_213_type=1456,_loop0_214_type=1457,_loop1_215_type=1458,_tmp_216_type=1459,_loop0_217_type=1460,_loop1_218_type=1461,_tmp_219_type=1462,_tmp_220_type=1463,_tmp_221_type=1464,_tmp_222_type=1465,_tmp_223_type=1466,_tmp_224_type=1467,_tmp_225_type=1468,_tmp_226_type=1469,_tmp_227_type=1470,_tmp_228_type=1471,_loop0_230_type=1472,_gather_229_type=1473,_tmp_231_type=1474,_tmp_232_type=1475,_tmp_233_type=1476,_tmp_234_type=1477,_tmp_235_type=1478,_tmp_236_type=1479,_tmp_237_type=1480,_tmp_238_type=1481,_tmp_239_type=1482,_tmp_240_type=1483,_tmp_241_type=1484,_tmp_242_type=1485,_tmp_243_type=1486,_loop0_244_type=1487,_tmp_245_type=1488,_tmp_246_type=1489,_tmp_247_type=1490,_tmp_248_type=1491,_tmp_249_type=1492,_tmp_250_type=1493,_tmp_251_type=1494,_tmp_252_type=1495,_tmp_253_type=1496,_tmp_254_type=1497,_tmp_255_type=1498,_tmp_256_type=1499,_tmp_257_type=1500,_tmp_258_type=1501,_tmp_259_type=1502,_tmp_260_type=1503,_tmp_261_type=1504,_tmp_262_type=1505,_tmp_263_type=1506,_tmp_264_type=1507,_tmp_265_type=1508,_tmp_266_type=1509,_tmp_267_type=1510,_tmp_268_type=1511,_tmp_269_type=1512,_tmp_270_type=1513,_tmp_271_type=1514,_tmp_272_type=1515,_tmp_273_type=1516,_loop0_275_type=1517,_gather_274_type=1518,_tmp_276_type=1519,_tmp_277_type=1520,_tmp_278_type=1521,_tmp_279_type=1522,_tmp_280_type=1523,_tmp_281_type=1524 +function file_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var endmarker_var; +if( +(a=statements_rule(p),!p.error_indicator) +&& +(endmarker_var=$B._PyPegen.expect_token(p,ENDMARKER)) +) +{_res=$B._PyPegen.make_module(p,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function interactive_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=statement_newline_rule(p)) +) +{_res=new $B._PyAST.Interactive(a,p.arena); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function eval_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _loop0_1_var; +var a; +var endmarker_var; +if( +(a=expressions_rule(p)) +&& +(_loop0_1_var=_loop0_1_rule(p)) +&& +(endmarker_var=$B._PyPegen.expect_token(p,ENDMARKER)) +) +{_res=new $B._PyAST.Expression(a,p.arena); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function func_type_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _literal_2; +var _loop0_2_var; +var a; +var b; +var endmarker_var; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=type_expressions_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +&& +(_literal_2=$B._PyPegen.expect_token(p,51)) +&& +(b=expression_rule(p)) +&& +(_loop0_2_var=_loop0_2_rule(p)) +&& +(endmarker_var=$B._PyPegen.expect_token(p,ENDMARKER)) +) +{_res=new $B._PyAST.FunctionType(a,b,p.arena); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function statements_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=_loop1_3_rule(p)) +) +{_res=$B._PyPegen.seq_flatten(p,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function statement_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=compound_stmt_rule(p)) +) +{_res=$B._PyPegen.singleton_seq(p,a); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=simple_stmts_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function statement_newline_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var newline_var; +if( +(a=compound_stmt_rule(p)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B._PyPegen.singleton_seq(p,a); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var simple_stmts_var; +if( +(simple_stmts_var=simple_stmts_rule(p)) +) +{_res=simple_stmts_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var newline_var; +if( +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B._PyPegen.singleton_seq(p,$B.helper_functions.CHECK($B.ast.stmt,new $B._PyAST.Pass(EXTRA ))); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var endmarker_var; +if( +(endmarker_var=$B._PyPegen.expect_token(p,ENDMARKER)) +) +{_res=$B._PyPegen.interactive_exit(p); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function simple_stmts_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var newline_var; +if( +(a=simple_stmt_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,13) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B._PyPegen.singleton_seq(p,a); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +var newline_var; +if( +(a=_gather_4_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,13),!p.error_indicator) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function simple_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,simple_stmt_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var assignment_var; +if( +(assignment_var=assignment_rule(p)) +) +{_res=assignment_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var type_alias_var; +if( +$B._PyPegen.lookahead_with_string(1,$B._PyPegen.expect_soft_keyword,p,"type") +&& +(type_alias_var=type_alias_rule(p)) +) +{_res=type_alias_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var e; +if( +(e=star_expressions_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Expr(e,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var return_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,519) +&& +(return_stmt_var=return_stmt_rule(p)) +) +{_res=return_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var import_stmt_var; +if( +$B._PyPegen.lookahead(1,_tmp_6_rule,p) +&& +(import_stmt_var=import_stmt_rule(p)) +) +{_res=import_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var raise_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,522) +&& +(raise_stmt_var=raise_stmt_rule(p)) +) +{_res=raise_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,504)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Pass(EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var del_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,604) +&& +(del_stmt_var=del_stmt_rule(p)) +) +{_res=del_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var yield_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,573) +&& +(yield_stmt_var=yield_stmt_rule(p)) +) +{_res=yield_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var assert_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,526) +&& +(assert_stmt_var=assert_stmt_rule(p)) +) +{_res=assert_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,508)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Break(EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,509)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Continue(EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var global_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,523) +&& +(global_stmt_var=global_stmt_rule(p)) +) +{_res=global_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var nonlocal_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,524) +&& +(nonlocal_stmt_var=nonlocal_stmt_rule(p)) +) +{_res=nonlocal_stmt_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,simple_stmt_type,_res); +return _res;} +function compound_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var function_def_var; +if( +$B._PyPegen.lookahead(1,_tmp_7_rule,p) +&& +(function_def_var=function_def_rule(p)) +) +{_res=function_def_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var if_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,642) +&& +(if_stmt_var=if_stmt_rule(p)) +) +{_res=if_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var class_def_var; +if( +$B._PyPegen.lookahead(1,_tmp_8_rule,p) +&& +(class_def_var=class_def_rule(p)) +) +{_res=class_def_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var with_stmt_var; +if( +$B._PyPegen.lookahead(1,_tmp_9_rule,p) +&& +(with_stmt_var=with_stmt_rule(p)) +) +{_res=with_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var for_stmt_var; +if( +$B._PyPegen.lookahead(1,_tmp_10_rule,p) +&& +(for_stmt_var=for_stmt_rule(p)) +) +{_res=for_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var try_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,624) +&& +(try_stmt_var=try_stmt_rule(p)) +) +{_res=try_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var while_stmt_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,647) +&& +(while_stmt_var=while_stmt_rule(p)) +) +{_res=while_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var match_stmt_var; +if( +(match_stmt_var=match_stmt_rule(p)) +) +{_res=match_stmt_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function assignment_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +var c; +if( +(a=$B._PyPegen.name_token(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=expression_rule(p)) +&& +(c=_tmp_11_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,6,"Variable annotation syntax is",new $B._PyAST.AnnAssign($B.helper_functions.CHECK($B.ast.expr,$B._PyPegen.set_expr_context(p,a,$B.parser_constants.Store )),b,c,1,EXTRA )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +var c; +if( +(a=_tmp_12_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=expression_rule(p)) +&& +(c=_tmp_13_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,6,"Variable annotations syntax is",new $B._PyAST.AnnAssign(a,b,c,0,EXTRA )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var tc; +if( +(a=_loop1_14_rule(p)) +&& +(b=_tmp_15_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,22) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Assign(a,b,$B.helper_functions.NEW_TYPE_COMMENT(p,tc ),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _cut_var=0; +var a; +var b; +var c; +if( +(a=single_target_rule(p)) +&& +(b=augassign_rule(p)) +&& +(_cut_var=1) +&& +(c=_tmp_16_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.AugAssign(a,b.kind,c,EXTRA); +break;} +p.mark=_mark; +if(_cut_var){return NULL;}} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_assignment_var; +if( +(invalid_assignment_var=invalid_assignment_rule(p)) +) +{_res=invalid_assignment_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function annotated_rhs_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function augassign_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,36)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.Add()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,37)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.Sub()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,38)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.Mult()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,50)) +) +{_res=$B.helper_functions.CHECK_VERSION($B.parser_constants.AugOperator,5,"The '@' operator is",$B._PyPegen.augoperator(p,new $B.ast.MatMult())); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,39)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.Div()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,40)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.Mod()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,41)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.BitAnd()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,42)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.BitOr()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,43)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.BitXor()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,44)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.LShift()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,45)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.RShift()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,46)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.Pow()); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,48)) +) +{_res=$B._PyPegen.augoperator(p,new $B.ast.FloorDiv()); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function return_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,519)) +&& +(a=star_expressions_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Return(a,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function raise_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +var b; +if( +(_keyword=$B._PyPegen.expect_token(p,522)) +&& +(a=expression_rule(p)) +&& +(b=_tmp_17_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Raise(a,b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,522)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Raise($B.parser_constants.NULL,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function global_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,523)) +&& +(a=_gather_18_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Global($B.helper_functions.CHECK($B.parser_constants.asdl_identifier_seq,$B._PyPegen.map_names_to_ids(p,a )),EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function nonlocal_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,524)) +&& +(a=_gather_20_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Nonlocal($B.helper_functions.CHECK($B.parser_constants.asdl_identifier_seq,$B._PyPegen.map_names_to_ids(p,a )),EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function del_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,604)) +&& +(a=del_targets_rule(p)) +&& +$B._PyPegen.lookahead(1,_tmp_22_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Delete(a,EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_del_stmt_var; +if( +(invalid_del_stmt_var=invalid_del_stmt_rule(p)) +) +{_res=invalid_del_stmt_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function yield_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var y; +if( +(y=yield_expr_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Expr(y,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function assert_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +var b; +if( +(_keyword=$B._PyPegen.expect_token(p,526)) +&& +(a=expression_rule(p)) +&& +(b=_tmp_23_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Assert(a,b,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function import_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_import_var; +if( +(invalid_import_var=invalid_import_rule(p)) +) +{_res=invalid_import_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var import_name_var; +if( +(import_name_var=import_name_rule(p)) +) +{_res=import_name_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var import_from_var; +if( +(import_from_var=import_from_rule(p)) +) +{_res=import_from_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function import_name_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,607)) +&& +(a=dotted_as_names_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Import(a,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function import_from_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _keyword_1; +var a; +var b; +var c; +if( +(_keyword=$B._PyPegen.expect_token(p,608)) +&& +(a=_loop0_24_rule(p)) +&& +(b=dotted_name_rule(p)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,607)) +&& +(c=import_from_targets_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.ImportFrom(b. id,c,$B._PyPegen.seq_count_dots(a ),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _keyword_1; +var a; +var b; +if( +(_keyword=$B._PyPegen.expect_token(p,608)) +&& +(a=_loop1_25_rule(p)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,607)) +&& +(b=import_from_targets_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.ImportFrom($B.parser_constants.NULL,b,$B._PyPegen.seq_count_dots(a ),EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function import_from_targets_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=import_from_as_names_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=a; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var import_from_as_names_var; +if( +(import_from_as_names_var=import_from_as_names_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,12) +) +{_res=import_from_as_names_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B._PyPegen.singleton_seq(p,$B.helper_functions.CHECK($B.ast.alias,$B._PyPegen.alias_for_star(p,EXTRA ))); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_import_from_targets_var; +if( +(invalid_import_from_targets_var=invalid_import_from_targets_rule(p)) +) +{_res=invalid_import_from_targets_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function import_from_as_names_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=_gather_26_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function import_from_as_name_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=$B._PyPegen.name_token(p)) +&& +(b=_tmp_28_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.alias(a. id,(b )?(b ). id :$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function dotted_as_names_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=_gather_29_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function dotted_as_name_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=dotted_name_rule(p)) +&& +(b=_tmp_31_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.alias(a. id,(b )?(b ). id :$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function dotted_name_raw(){}; +function dotted_name_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,dotted_name_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_0=$B._PyPegen.update_memo(p,_mark,dotted_name_type,_res); +if(tmpvar_0){return _res;} +p.mark=_mark; +var _raw=dotted_name_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) +break; +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function dotted_name_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=dotted_name_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,23)) +&& +(b=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.join_names_with_dot(p,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var name_var; +if( +(name_var=$B._PyPegen.name_token(p)) +) +{_res=name_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function block_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,block_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var dedent_var; +var indent_var; +var newline_var; +if( +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +(indent_var=$B._PyPegen.expect_token(p,INDENT)) +&& +(a=statements_rule(p)) +&& +(dedent_var=$B._PyPegen.expect_token(p,DEDENT)) +) +{_res=a; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var simple_stmts_var; +if( +(simple_stmts_var=simple_stmts_rule(p)) +) +{_res=simple_stmts_var; +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_block_var; +if( +(invalid_block_var=invalid_block_rule(p)) +) +{_res=invalid_block_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,block_type,_res); +return _res;} +function decorators_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=_loop1_32_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function class_def_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=decorators_rule(p)) +&& +(b=class_def_raw_rule(p)) +) +{_res=$B._PyPegen.class_def_decorators(p,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var class_def_raw_var; +if( +(class_def_raw_var=class_def_raw_rule(p)) +) +{_res=class_def_raw_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function class_def_raw_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_class_def_raw_var; +if( +(invalid_class_def_raw_var=invalid_class_def_raw_rule(p)) +) +{_res=invalid_class_def_raw_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +var b; +var c; +var t; +if( +(_keyword=$B._PyPegen.expect_token(p,654)) +&& +(a=$B._PyPegen.name_token(p)) +&& +(t=type_params_rule(p),!p.error_indicator) +&& +(b=_tmp_33_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(c=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.ClassDef(a. id,(b )?(b ). args :$B.parser_constants.NULL,(b )?(b ). keywords :$B.parser_constants.NULL,c,$B.parser_constants.NULL,t,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function function_def_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var d; +var f; +if( +(d=decorators_rule(p)) +&& +(f=function_def_raw_rule(p)) +) +{_res=$B._PyPegen.function_def_decorators(p,d,f); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var function_def_raw_var; +if( +(function_def_raw_var=function_def_raw_rule(p)) +) +{_res=function_def_raw_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function function_def_raw_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_def_raw_var; +if( +(invalid_def_raw_var=invalid_def_raw_rule(p)) +) +{_res=invalid_def_raw_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var _literal_1; +var _literal_2; +var a; +var b; +var n; +var params; +var t; +var tc; +if( +(_keyword=$B._PyPegen.expect_token(p,652)) +&& +(n=$B._PyPegen.name_token(p)) +&& +(t=type_params_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_forced_token(p,7,"(")) +&& +(params=params_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +&& +(a=_tmp_34_rule(p),!p.error_indicator) +&& +(_literal_2=$B._PyPegen.expect_forced_token(p,11,":")) +&& +(tc=func_type_comment_rule(p),!p.error_indicator) +&& +(b=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.FunctionDef(n. id,(params )? params :$B.helper_functions.CHECK($B.ast.arguments,$B._PyPegen.empty_arguments(p )),b,$B.parser_constants.NULL,a,$B.helper_functions.NEW_TYPE_COMMENT(p,tc ),t,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var _literal_1; +var _literal_2; +var a; +var async_var; +var b; +var n; +var params; +var t; +var tc; +if( +(async_var=$B._PyPegen.expect_token(p,ASYNC)) +&& +(_keyword=$B._PyPegen.expect_token(p,652)) +&& +(n=$B._PyPegen.name_token(p)) +&& +(t=type_params_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_forced_token(p,7,"(")) +&& +(params=params_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +&& +(a=_tmp_35_rule(p),!p.error_indicator) +&& +(_literal_2=$B._PyPegen.expect_forced_token(p,11,":")) +&& +(tc=func_type_comment_rule(p),!p.error_indicator) +&& +(b=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,5,"Async functions are",new $B._PyAST.AsyncFunctionDef(n. id,(params )? params :$B.helper_functions.CHECK($B.ast.arguments,$B._PyPegen.empty_arguments(p )),b,$B.parser_constants.NULL,a,$B.helper_functions.NEW_TYPE_COMMENT(p,tc ),t,EXTRA )); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function params_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_parameters_var; +if( +(invalid_parameters_var=invalid_parameters_rule(p)) +) +{_res=invalid_parameters_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var parameters_var; +if( +(parameters_var=parameters_rule(p)) +) +{_res=parameters_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function parameters_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var c; +var d; +if( +(a=slash_no_default_rule(p)) +&& +(b=_loop0_36_rule(p)) +&& +(c=_loop0_37_rule(p)) +&& +(d=star_etc_rule(p),!p.error_indicator) +) +{_res=$B.helper_functions.CHECK_VERSION($B.ast.arguments,8,"Positional-only parameters are",$B._PyPegen.make_arguments(p,a,$B.parser_constants.NULL,b,c,d )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var c; +if( +(a=slash_with_default_rule(p)) +&& +(b=_loop0_38_rule(p)) +&& +(c=star_etc_rule(p),!p.error_indicator) +) +{_res=$B.helper_functions.CHECK_VERSION($B.ast.arguments,8,"Positional-only parameters are",$B._PyPegen.make_arguments(p,$B.parser_constants.NULL,a,$B.parser_constants.NULL,b,c )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var c; +if( +(a=_loop1_39_rule(p)) +&& +(b=_loop0_40_rule(p)) +&& +(c=star_etc_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.make_arguments(p,$B.parser_constants.NULL,$B.parser_constants.NULL,a,b,c); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=_loop1_41_rule(p)) +&& +(b=star_etc_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.make_arguments(p,$B.parser_constants.NULL,$B.parser_constants.NULL,$B.parser_constants.NULL,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=star_etc_rule(p)) +) +{_res=$B._PyPegen.make_arguments(p,$B.parser_constants.NULL,$B.parser_constants.NULL,$B.parser_constants.NULL,$B.parser_constants.NULL,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function slash_no_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(a=_loop1_42_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +) +{_res=a; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(a=_loop1_43_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,8) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function slash_with_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=_loop0_44_rule(p)) +&& +(b=_loop1_45_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +) +{_res=$B._PyPegen.slash_with_default(p,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=_loop0_46_rule(p)) +&& +(b=_loop1_47_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,8) +) +{_res=$B._PyPegen.slash_with_default(p,a,b); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_etc_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_star_etc_var; +if( +(invalid_star_etc_var=invalid_star_etc_rule(p)) +) +{_res=invalid_star_etc_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +var c; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=param_no_default_rule(p)) +&& +(b=_loop0_48_rule(p)) +&& +(c=kwds_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.star_etc(p,a,b,c); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +var c; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=param_no_default_star_annotation_rule(p)) +&& +(b=_loop0_49_rule(p)) +&& +(c=kwds_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.star_etc(p,a,b,c); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var b; +var c; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(b=_loop1_50_rule(p)) +&& +(c=kwds_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.star_etc(p,$B.parser_constants.NULL,b,c); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=kwds_rule(p)) +) +{_res=$B._PyPegen.star_etc(p,$B.parser_constants.NULL,$B.parser_constants.NULL,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function kwds_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_kwds_var; +if( +(invalid_kwds_var=invalid_kwds_rule(p)) +) +{_res=invalid_kwds_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(a=param_no_default_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function param_no_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var tc; +if( +(a=param_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +) +{_res=$B._PyPegen.add_type_comment_to_arg(p,a,tc); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var tc; +if( +(a=param_rule(p)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,8) +) +{_res=$B._PyPegen.add_type_comment_to_arg(p,a,tc); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function param_no_default_star_annotation_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var tc; +if( +(a=param_star_annotation_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +) +{_res=$B._PyPegen.add_type_comment_to_arg(p,a,tc); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var tc; +if( +(a=param_star_annotation_rule(p)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,8) +) +{_res=$B._PyPegen.add_type_comment_to_arg(p,a,tc); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function param_with_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var c; +var tc; +if( +(a=param_rule(p)) +&& +(c=default_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +) +{_res=$B._PyPegen.name_default_pair(p,a,c,tc); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var c; +var tc; +if( +(a=param_rule(p)) +&& +(c=default_rule(p)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,8) +) +{_res=$B._PyPegen.name_default_pair(p,a,c,tc); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function param_maybe_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var c; +var tc; +if( +(a=param_rule(p)) +&& +(c=default_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +) +{_res=$B._PyPegen.name_default_pair(p,a,c,tc); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var c; +var tc; +if( +(a=param_rule(p)) +&& +(c=default_rule(p),!p.error_indicator) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,8) +) +{_res=$B._PyPegen.name_default_pair(p,a,c,tc); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function param_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=$B._PyPegen.name_token(p)) +&& +(b=annotation_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.arg(a. id,b,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function param_star_annotation_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=$B._PyPegen.name_token(p)) +&& +(b=star_annotation_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.arg(a. id,b,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function annotation_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(a=expression_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_annotation_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(a=star_expression_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(a=expression_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_default_var; +if( +(invalid_default_var=invalid_default_rule(p)) +) +{_res=invalid_default_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function if_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_if_stmt_var; +if( +(invalid_if_stmt_var=invalid_if_stmt_rule(p)) +) +{_res=invalid_if_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +var b; +var c; +if( +(_keyword=$B._PyPegen.expect_token(p,642)) +&& +(a=named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +&& +(c=elif_stmt_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.If(a,b,$B.helper_functions.CHECK($B.parser_constants.asdl_stmt_seq,$B._PyPegen.singleton_seq(p,c )),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +var b; +var c; +if( +(_keyword=$B._PyPegen.expect_token(p,642)) +&& +(a=named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +&& +(c=else_block_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.If(a,b,c,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function elif_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_elif_stmt_var; +if( +(invalid_elif_stmt_var=invalid_elif_stmt_rule(p)) +) +{_res=invalid_elif_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +var b; +var c; +if( +(_keyword=$B._PyPegen.expect_token(p,644)) +&& +(a=named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +&& +(c=elif_stmt_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.If(a,b,$B.helper_functions.CHECK($B.parser_constants.asdl_stmt_seq,$B._PyPegen.singleton_seq(p,c )),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +var b; +var c; +if( +(_keyword=$B._PyPegen.expect_token(p,644)) +&& +(a=named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +&& +(c=else_block_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.If(a,b,c,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function else_block_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_else_stmt_var; +if( +(invalid_else_stmt_var=invalid_else_stmt_rule(p)) +) +{_res=invalid_else_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var b; +if( +(_keyword=$B._PyPegen.expect_token(p,645)) +&& +(_literal=$B._PyPegen.expect_forced_token(p,11,":")) +&& +(b=block_rule(p)) +) +{_res=b; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function while_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_while_stmt_var; +if( +(invalid_while_stmt_var=invalid_while_stmt_rule(p)) +) +{_res=invalid_while_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +var b; +var c; +if( +(_keyword=$B._PyPegen.expect_token(p,647)) +&& +(a=named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +&& +(c=else_block_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.While(a,b,c,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function for_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_for_stmt_var; +if( +(invalid_for_stmt_var=invalid_for_stmt_rule(p)) +) +{_res=invalid_for_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _cut_var=0; +var _keyword; +var _keyword_1; +var _literal; +var b; +var el; +var ex; +var t; +var tc; +if( +(_keyword=$B._PyPegen.expect_token(p,650)) +&& +(t=star_targets_rule(p)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,651)) +&& +(_cut_var=1) +&& +(ex=star_expressions_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +&& +(b=block_rule(p)) +&& +(el=else_block_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.For(t,ex,b,el,$B.helper_functions.NEW_TYPE_COMMENT(p,tc ),EXTRA); +break;} +p.mark=_mark; +if(_cut_var){return NULL;}} +{ +if(p.error_indicator){return NULL;} +var _cut_var=0; +var _keyword; +var _keyword_1; +var _literal; +var async_var; +var b; +var el; +var ex; +var t; +var tc; +if( +(async_var=$B._PyPegen.expect_token(p,ASYNC)) +&& +(_keyword=$B._PyPegen.expect_token(p,650)) +&& +(t=star_targets_rule(p)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,651)) +&& +(_cut_var=1) +&& +(ex=star_expressions_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +&& +(b=block_rule(p)) +&& +(el=else_block_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,5,"Async for loops are",new $B._PyAST.AsyncFor(t,ex,b,el,$B.helper_functions.NEW_TYPE_COMMENT(p,tc ),EXTRA )); +break;} +p.mark=_mark; +if(_cut_var){return NULL;}} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_for_target_var; +if( +(invalid_for_target_var=invalid_for_target_rule(p)) +) +{_res=invalid_for_target_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function with_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_with_stmt_indent_var; +if( +(invalid_with_stmt_indent_var=invalid_with_stmt_indent_rule(p)) +) +{_res=invalid_with_stmt_indent_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var _literal_1; +var _literal_2; +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(_keyword=$B._PyPegen.expect_token(p,615)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=_gather_51_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +&& +(_literal_2=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,9,"Parenthesized C managers are",new $B._PyAST.With(a,b,$B.parser_constants.NULL,EXTRA )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +var b; +var tc; +if( +(_keyword=$B._PyPegen.expect_token(p,615)) +&& +(a=_gather_53_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +&& +(b=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.With(a,b,$B.helper_functions.NEW_TYPE_COMMENT(p,tc ),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var _literal_1; +var _literal_2; +var _opt_var; +UNUSED(_opt_var); +var a; +var async_var; +var b; +if( +(async_var=$B._PyPegen.expect_token(p,ASYNC)) +&& +(_keyword=$B._PyPegen.expect_token(p,615)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=_gather_55_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +&& +(_literal_2=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,5,"Async with statements are",new $B._PyAST.AsyncWith(a,b,$B.parser_constants.NULL,EXTRA )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +var async_var; +var b; +var tc; +if( +(async_var=$B._PyPegen.expect_token(p,ASYNC)) +&& +(_keyword=$B._PyPegen.expect_token(p,615)) +&& +(a=_gather_57_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(tc=$B._PyPegen.expect_token(p,TYPE_COMMENT),!p.error_indicator) +&& +(b=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,5,"Async with statements are",new $B._PyAST.AsyncWith(a,b,$B.helper_functions.NEW_TYPE_COMMENT(p,tc ),EXTRA )); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_with_stmt_var; +if( +(invalid_with_stmt_var=invalid_with_stmt_rule(p)) +) +{_res=invalid_with_stmt_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function with_item_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var e; +var t; +if( +(e=expression_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(t=star_target_rule(p)) +&& +$B._PyPegen.lookahead(1,_tmp_59_rule,p) +) +{_res=new $B._PyAST.withitem(e,t,p.arena); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_with_item_var; +if( +(invalid_with_item_var=invalid_with_item_rule(p)) +) +{_res=invalid_with_item_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var e; +if( +(e=expression_rule(p)) +) +{_res=new $B._PyAST.withitem(e,$B.parser_constants.NULL,p.arena); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function try_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_try_stmt_var; +if( +(invalid_try_stmt_var=invalid_try_stmt_rule(p)) +) +{_res=invalid_try_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var b; +var f; +if( +(_keyword=$B._PyPegen.expect_token(p,624)) +&& +(_literal=$B._PyPegen.expect_forced_token(p,11,":")) +&& +(b=block_rule(p)) +&& +(f=finally_block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Try(b,$B.parser_constants.NULL,$B.parser_constants.NULL,f,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var b; +var el; +var ex; +var f; +if( +(_keyword=$B._PyPegen.expect_token(p,624)) +&& +(_literal=$B._PyPegen.expect_forced_token(p,11,":")) +&& +(b=block_rule(p)) +&& +(ex=_loop1_60_rule(p)) +&& +(el=else_block_rule(p),!p.error_indicator) +&& +(f=finally_block_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Try(b,ex,el,f,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var b; +var el; +var ex; +var f; +if( +(_keyword=$B._PyPegen.expect_token(p,624)) +&& +(_literal=$B._PyPegen.expect_forced_token(p,11,":")) +&& +(b=block_rule(p)) +&& +(ex=_loop1_61_rule(p)) +&& +(el=else_block_rule(p),!p.error_indicator) +&& +(f=finally_block_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,11,"Exception groups are",new $B._PyAST.TryStar(b,ex,el,f,EXTRA )); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function except_block_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_except_stmt_indent_var; +if( +(invalid_except_stmt_indent_var=invalid_except_stmt_indent_rule(p)) +) +{_res=invalid_except_stmt_indent_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var b; +var e; +var t; +if( +(_keyword=$B._PyPegen.expect_token(p,637)) +&& +(e=expression_rule(p)) +&& +(t=_tmp_62_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.ExceptHandler(e,(t )?(t ). id :$B.parser_constants.NULL,b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var b; +if( +(_keyword=$B._PyPegen.expect_token(p,637)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.ExceptHandler($B.parser_constants.NULL,$B.parser_constants.NULL,b,EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_except_stmt_var; +if( +(invalid_except_stmt_var=invalid_except_stmt_rule(p)) +) +{_res=invalid_except_stmt_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function except_star_block_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_except_star_stmt_indent_var; +if( +(invalid_except_star_stmt_indent_var=invalid_except_star_stmt_indent_rule(p)) +) +{_res=invalid_except_star_stmt_indent_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var _literal_1; +var b; +var e; +var t; +if( +(_keyword=$B._PyPegen.expect_token(p,637)) +&& +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(e=expression_rule(p)) +&& +(t=_tmp_63_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,11)) +&& +(b=block_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.ExceptHandler(e,(t )?(t ). id :$B.parser_constants.NULL,b,EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_except_stmt_var; +if( +(invalid_except_stmt_var=invalid_except_stmt_rule(p)) +) +{_res=invalid_except_stmt_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function finally_block_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_finally_stmt_var; +if( +(invalid_finally_stmt_var=invalid_finally_stmt_rule(p)) +) +{_res=invalid_finally_stmt_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,633)) +&& +(_literal=$B._PyPegen.expect_forced_token(p,11,":")) +&& +(a=block_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function match_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var cases; +var dedent_var; +var indent_var; +var newline_var; +var subject; +if( +(_keyword=$B._PyPegen.expect_soft_keyword(p,"match")) +&& +(subject=subject_expr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +(indent_var=$B._PyPegen.expect_token(p,INDENT)) +&& +(cases=_loop1_64_rule(p)) +&& +(dedent_var=$B._PyPegen.expect_token(p,DEDENT)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,10,"Pattern matching is",new $B._PyAST.Match(subject,cases,EXTRA )); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_match_stmt_var; +if( +(invalid_match_stmt_var=invalid_match_stmt_rule(p)) +) +{_res=invalid_match_stmt_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function subject_expr_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var value; +var values; +if( +(value=star_named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(values=star_named_expressions_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple($B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.seq_insert_in_front(p,value,values )),$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var named_expression_var; +if( +(named_expression_var=named_expression_rule(p)) +) +{_res=named_expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function case_block_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_case_block_var; +if( +(invalid_case_block_var=invalid_case_block_rule(p)) +) +{_res=invalid_case_block_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var body; +var guard; +var pattern; +if( +(_keyword=$B._PyPegen.expect_soft_keyword(p,"case")) +&& +(pattern=patterns_rule(p)) +&& +(guard=guard_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(body=block_rule(p)) +) +{_res=new $B._PyAST.match_case(pattern,guard,body,p.arena); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function guard_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var guard; +if( +(_keyword=$B._PyPegen.expect_token(p,642)) +&& +(guard=named_expression_rule(p)) +) +{_res=guard; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function patterns_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var patterns; +if( +(patterns=open_sequence_pattern_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchSequence(patterns,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var pattern_var; +if( +(pattern_var=pattern_rule(p)) +) +{_res=pattern_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var as_pattern_var; +if( +(as_pattern_var=as_pattern_rule(p)) +) +{_res=as_pattern_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var or_pattern_var; +if( +(or_pattern_var=or_pattern_rule(p)) +) +{_res=or_pattern_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function as_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var pattern; +var target; +if( +(pattern=or_pattern_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(target=pattern_capture_target_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchAs(pattern,target. id,EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_as_pattern_var; +if( +(invalid_as_pattern_var=invalid_as_pattern_rule(p)) +) +{_res=invalid_as_pattern_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function or_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var patterns; +if( +(patterns=_gather_65_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.asdl_seq_LEN(patterns )==1 ? $B.helper_functions.asdl_seq_GET(patterns,0 ):new $B._PyAST.MatchOr(patterns,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function closed_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,closed_pattern_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var literal_pattern_var; +if( +(literal_pattern_var=literal_pattern_rule(p)) +) +{_res=literal_pattern_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var capture_pattern_var; +if( +(capture_pattern_var=capture_pattern_rule(p)) +) +{_res=capture_pattern_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var wildcard_pattern_var; +if( +(wildcard_pattern_var=wildcard_pattern_rule(p)) +) +{_res=wildcard_pattern_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var value_pattern_var; +if( +(value_pattern_var=value_pattern_rule(p)) +) +{_res=value_pattern_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var group_pattern_var; +if( +(group_pattern_var=group_pattern_rule(p)) +) +{_res=group_pattern_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var sequence_pattern_var; +if( +(sequence_pattern_var=sequence_pattern_rule(p)) +) +{_res=sequence_pattern_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var mapping_pattern_var; +if( +(mapping_pattern_var=mapping_pattern_rule(p)) +) +{_res=mapping_pattern_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var class_pattern_var; +if( +(class_pattern_var=class_pattern_rule(p)) +) +{_res=class_pattern_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,closed_pattern_type,_res); +return _res;} +function literal_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var value; +if( +(value=signed_number_rule(p)) +&& +$B._PyPegen.lookahead(0,_tmp_67_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchValue(value,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var value; +if( +(value=complex_number_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchValue(value,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var value; +if( +(value=strings_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchValue(value,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,602)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchSingleton($B.parser_constants.Py_None,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,601)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchSingleton($B.parser_constants.Py_True,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,603)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchSingleton($B.parser_constants.Py_False,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function literal_expr_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var signed_number_var; +if( +(signed_number_var=signed_number_rule(p)) +&& +$B._PyPegen.lookahead(0,_tmp_68_rule,p) +) +{_res=signed_number_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var complex_number_var; +if( +(complex_number_var=complex_number_rule(p)) +) +{_res=complex_number_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var strings_var; +if( +(strings_var=strings_rule(p)) +) +{_res=strings_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,602)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Constant($B.parser_constants.Py_None,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,601)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Constant($B.parser_constants.Py_True,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,603)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Constant($B.parser_constants.Py_False,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function complex_number_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var imag; +var real; +if( +(real=signed_real_number_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,14)) +&& +(imag=imaginary_number_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(real,new $B.ast.Add(),imag,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var imag; +var real; +if( +(real=signed_real_number_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,15)) +&& +(imag=imaginary_number_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(real,new $B.ast.Sub(),imag,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function signed_number_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var number_var; +if( +(number_var=$B._PyPegen.number_token(p)) +) +{_res=number_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var number; +if( +(_literal=$B._PyPegen.expect_token(p,15)) +&& +(number=$B._PyPegen.number_token(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.UnaryOp(new $B.ast.USub(),number,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function signed_real_number_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var real_number_var; +if( +(real_number_var=real_number_rule(p)) +) +{_res=real_number_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var real; +if( +(_literal=$B._PyPegen.expect_token(p,15)) +&& +(real=real_number_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.UnaryOp(new $B.ast.USub(),real,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function real_number_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var real; +if( +(real=$B._PyPegen.number_token(p)) +) +{_res=$B._PyPegen.ensure_real(p,real); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function imaginary_number_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var imag; +if( +(imag=$B._PyPegen.number_token(p)) +) +{_res=$B._PyPegen.ensure_imaginary(p,imag); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function capture_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var target; +if( +(target=pattern_capture_target_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchAs($B.parser_constants.NULL,target. id,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function pattern_capture_target_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var name; +if( +$B._PyPegen.lookahead_with_string(0,$B._PyPegen.expect_soft_keyword,p,"_") +&& +(name=$B._PyPegen.name_token(p)) +&& +$B._PyPegen.lookahead(0,_tmp_69_rule,p) +) +{_res=$B._PyPegen.set_expr_context(p,name,$B.parser_constants.Store); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function wildcard_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_soft_keyword(p,"_")) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchAs($B.parser_constants.NULL,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function value_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var attr; +if( +(attr=attr_rule(p)) +&& +$B._PyPegen.lookahead(0,_tmp_70_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchValue(attr,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function attr_raw(){}; +function attr_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,attr_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_1=$B._PyPegen.update_memo(p,_mark,attr_type,_res); +if(tmpvar_1){return _res;} +p.mark=_mark; +var _raw=attr_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) +break; +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function attr_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var attr; +var value; +if( +(value=name_or_attr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,23)) +&& +(attr=$B._PyPegen.name_token(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Attribute(value,attr. id,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function name_or_attr_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var attr_var; +if( +(attr_var=attr_rule(p)) +) +{_res=attr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var name_var; +if( +(name_var=$B._PyPegen.name_token(p)) +) +{_res=name_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function group_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var pattern; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(pattern=pattern_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=pattern; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function sequence_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var patterns; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(patterns=maybe_sequence_pattern_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchSequence(patterns,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var patterns; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(patterns=open_sequence_pattern_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchSequence(patterns,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function open_sequence_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var pattern; +var patterns; +if( +(pattern=maybe_star_pattern_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(patterns=maybe_sequence_pattern_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.seq_insert_in_front(p,pattern,patterns); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function maybe_sequence_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var patterns; +if( +(patterns=_gather_71_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{_res=patterns; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function maybe_star_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var star_pattern_var; +if( +(star_pattern_var=star_pattern_rule(p)) +) +{_res=star_pattern_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var pattern_var; +if( +(pattern_var=pattern_rule(p)) +) +{_res=pattern_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,star_pattern_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var target; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(target=pattern_capture_target_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchStar(target. id,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var wildcard_pattern_var; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(wildcard_pattern_var=wildcard_pattern_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchStar($B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,star_pattern_type,_res); +return _res;} +function mapping_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(_literal_1=$B._PyPegen.expect_token(p,26)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchMapping($B.parser_constants.NULL,$B.parser_constants.NULL,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +var rest; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(rest=double_star_pattern_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,26)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchMapping($B.parser_constants.NULL,$B.parser_constants.NULL,rest. id,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _literal_2; +var _opt_var; +UNUSED(_opt_var); +var items; +var rest; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(items=items_pattern_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(rest=double_star_pattern_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_2=$B._PyPegen.expect_token(p,26)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchMapping($B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.get_pattern_keys(p,items )),$B.helper_functions.CHECK($B.parser_constants.asdl_pattern_seq,$B._PyPegen.get_patterns(p,items )),rest. id,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +var items; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(items=items_pattern_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,26)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchMapping($B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.get_pattern_keys(p,items )),$B.helper_functions.CHECK($B.parser_constants.asdl_pattern_seq,$B._PyPegen.get_patterns(p,items )),$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function items_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _gather_73_var; +if( +(_gather_73_var=_gather_73_rule(p)) +) +{_res=_gather_73_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function key_value_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var key; +var pattern; +if( +(key=_tmp_75_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(pattern=pattern_rule(p)) +) +{_res=$B._PyPegen.key_pattern_pair(p,key,pattern); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function double_star_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var target; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(target=pattern_capture_target_rule(p)) +) +{_res=target; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function class_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var cls; +if( +(cls=name_or_attr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchClass(cls,$B.parser_constants.NULL,$B.parser_constants.NULL,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +var cls; +var patterns; +if( +(cls=name_or_attr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(patterns=positional_patterns_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchClass(cls,patterns,$B.parser_constants.NULL,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +var cls; +var keywords; +if( +(cls=name_or_attr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(keywords=keyword_patterns_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchClass(cls,$B.parser_constants.NULL,$B.helper_functions.CHECK($B.parser_constants.asdl_identifier_seq,$B._PyPegen.map_names_to_ids(p,$B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.get_pattern_keys(p,keywords )))),$B.helper_functions.CHECK($B.parser_constants.asdl_pattern_seq,$B._PyPegen.get_patterns(p,keywords )),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _literal_2; +var _opt_var; +UNUSED(_opt_var); +var cls; +var keywords; +var patterns; +if( +(cls=name_or_attr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(patterns=positional_patterns_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(keywords=keyword_patterns_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_2=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.MatchClass(cls,patterns,$B.helper_functions.CHECK($B.parser_constants.asdl_identifier_seq,$B._PyPegen.map_names_to_ids(p,$B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.get_pattern_keys(p,keywords )))),$B.helper_functions.CHECK($B.parser_constants.asdl_pattern_seq,$B._PyPegen.get_patterns(p,keywords )),EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_class_pattern_var; +if( +(invalid_class_pattern_var=invalid_class_pattern_rule(p)) +) +{_res=invalid_class_pattern_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function positional_patterns_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var args; +if( +(args=_gather_76_rule(p)) +) +{_res=args; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function keyword_patterns_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _gather_78_var; +if( +(_gather_78_var=_gather_78_rule(p)) +) +{_res=_gather_78_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function keyword_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var arg; +var value; +if( +(arg=$B._PyPegen.name_token(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(value=pattern_rule(p)) +) +{_res=$B._PyPegen.key_pattern_pair(p,arg,value); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function type_alias_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var b; +var n; +var t; +if( +(_keyword=$B._PyPegen.expect_soft_keyword(p,"type")) +&& +(n=$B._PyPegen.name_token(p)) +&& +(t=type_params_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(b=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.stmt,12,"Type statement is",new $B._PyAST.TypeAlias($B.helper_functions.CHECK($B.ast.expr,$B._PyPegen.set_expr_context(p,n,$B.parser_constants.Store )),t,b,EXTRA )); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function type_params_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var t; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(t=type_param_seq_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +) +{_res=$B.helper_functions.CHECK_VERSION($B.parser_constants.asdl_type_param_seq,12,"Type parameter lists are",t); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function type_param_seq_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(a=_gather_80_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function type_param_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,type_param_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=$B._PyPegen.name_token(p)) +&& +(b=type_param_bound_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.TypeVar(a. id,b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var colon; +var e; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=$B._PyPegen.name_token(p)) +&& +(colon=$B._PyPegen.expect_token(p,11)) +&& +(e=expression_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p,colon,e.kind==Tuple_kind ? "cannot use constraints with TypeVarTuple" :"cannot use bound with TypeVarTuple"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=$B._PyPegen.name_token(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.TypeVarTuple(a. id,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var colon; +var e; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(a=$B._PyPegen.name_token(p)) +&& +(colon=$B._PyPegen.expect_token(p,11)) +&& +(e=expression_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p,colon,e.kind==Tuple_kind ? "cannot use constraints with ParamSpec" :"cannot use bound with ParamSpec"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(a=$B._PyPegen.name_token(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.ParamSpec(a. id,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,type_param_type,_res); +return _res;} +function type_param_bound_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var e; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(e=expression_rule(p)) +) +{_res=e; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function expressions_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(a=expression_rule(p)) +&& +(b=_loop1_82_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple($B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.seq_insert_in_front(p,a,b )),$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(a=expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple($B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.singleton_seq(p,a )),$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var expression_var; +if( +(expression_var=expression_rule(p)) +) +{_res=expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,expression_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_expression_var; +if( +(invalid_expression_var=invalid_expression_rule(p)) +) +{_res=invalid_expression_var; +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_legacy_expression_var; +if( +(invalid_legacy_expression_var=invalid_legacy_expression_rule(p)) +) +{_res=invalid_legacy_expression_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _keyword_1; +var a; +var b; +var c; +if( +(a=disjunction_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,642)) +&& +(b=disjunction_rule(p)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,645)) +&& +(c=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.IfExp(b,a,c,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var disjunction_var; +if( +(disjunction_var=disjunction_rule(p)) +) +{_res=disjunction_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var lambdef_var; +if( +(lambdef_var=lambdef_rule(p)) +) +{_res=lambdef_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,expression_type,_res); +return _res;} +function yield_expr_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _keyword_1; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,573)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,608)) +&& +(a=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.YieldFrom(a,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,573)) +&& +(a=star_expressions_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Yield(a,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_expressions_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(a=star_expression_rule(p)) +&& +(b=_loop1_83_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple($B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.seq_insert_in_front(p,a,b )),$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(a=star_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple($B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.singleton_seq(p,a )),$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expression_var; +if( +(star_expression_var=star_expression_rule(p)) +) +{_res=star_expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,star_expression_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=bitwise_or_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Starred(a,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var expression_var; +if( +(expression_var=expression_rule(p)) +) +{_res=expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,star_expression_type,_res); +return _res;} +function star_named_expressions_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(a=_gather_84_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_named_expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=bitwise_or_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Starred(a,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var named_expression_var; +if( +(named_expression_var=named_expression_rule(p)) +) +{_res=named_expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function assignment_expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _cut_var=0; +var _literal; +var a; +var b; +if( +(a=$B._PyPegen.name_token(p)) +&& +(_literal=$B._PyPegen.expect_token(p,53)) +&& +(_cut_var=1) +&& +(b=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.expr,8,"Assignment expressions are",new $B._PyAST.NamedExpr($B.helper_functions.CHECK($B.ast.expr,$B._PyPegen.set_expr_context(p,a,$B.parser_constants.Store )),b,EXTRA )); +break;} +p.mark=_mark; +if(_cut_var){return NULL;}} +_res=NULL; +break;} +return _res;} +function named_expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var assignment_expression_var; +if( +(assignment_expression_var=assignment_expression_rule(p)) +) +{_res=assignment_expression_var; +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_named_expression_var; +if( +(invalid_named_expression_var=invalid_named_expression_rule(p)) +) +{_res=invalid_named_expression_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,53) +) +{_res=expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function disjunction_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,disjunction_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=conjunction_rule(p)) +&& +(b=_loop1_86_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BoolOp(new $B.ast.Or(),$B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.seq_insert_in_front(p,a,b )),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var conjunction_var; +if( +(conjunction_var=conjunction_rule(p)) +) +{_res=conjunction_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,disjunction_type,_res); +return _res;} +function conjunction_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,conjunction_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=inversion_rule(p)) +&& +(b=_loop1_87_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BoolOp(new $B.ast.And(),$B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.seq_insert_in_front(p,a,b )),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var inversion_var; +if( +(inversion_var=inversion_rule(p)) +) +{_res=inversion_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,conjunction_type,_res); +return _res;} +function inversion_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,inversion_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,581)) +&& +(a=inversion_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.UnaryOp(new $B.ast.Not(),a,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var comparison_var; +if( +(comparison_var=comparison_rule(p)) +) +{_res=comparison_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,inversion_type,_res); +return _res;} +function comparison_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=bitwise_or_rule(p)) +&& +(b=_loop1_88_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Compare(a,$B.helper_functions.CHECK($B.parser_constants.asdl_int_seq,$B._PyPegen.get_cmpops(p,b )),$B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.get_exprs(p,b )),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var bitwise_or_var; +if( +(bitwise_or_var=bitwise_or_rule(p)) +) +{_res=bitwise_or_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function compare_op_bitwise_or_pair_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var eq_bitwise_or_var; +if( +(eq_bitwise_or_var=eq_bitwise_or_rule(p)) +) +{_res=eq_bitwise_or_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var noteq_bitwise_or_var; +if( +(noteq_bitwise_or_var=noteq_bitwise_or_rule(p)) +) +{_res=noteq_bitwise_or_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var lte_bitwise_or_var; +if( +(lte_bitwise_or_var=lte_bitwise_or_rule(p)) +) +{_res=lte_bitwise_or_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var lt_bitwise_or_var; +if( +(lt_bitwise_or_var=lt_bitwise_or_rule(p)) +) +{_res=lt_bitwise_or_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var gte_bitwise_or_var; +if( +(gte_bitwise_or_var=gte_bitwise_or_rule(p)) +) +{_res=gte_bitwise_or_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var gt_bitwise_or_var; +if( +(gt_bitwise_or_var=gt_bitwise_or_rule(p)) +) +{_res=gt_bitwise_or_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var notin_bitwise_or_var; +if( +(notin_bitwise_or_var=notin_bitwise_or_rule(p)) +) +{_res=notin_bitwise_or_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var in_bitwise_or_var; +if( +(in_bitwise_or_var=in_bitwise_or_rule(p)) +) +{_res=in_bitwise_or_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var isnot_bitwise_or_var; +if( +(isnot_bitwise_or_var=isnot_bitwise_or_rule(p)) +) +{_res=isnot_bitwise_or_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var is_bitwise_or_var; +if( +(is_bitwise_or_var=is_bitwise_or_rule(p)) +) +{_res=is_bitwise_or_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function eq_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,27)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.Eq(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function noteq_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _tmp_89_var; +var a; +if( +(_tmp_89_var=_tmp_89_rule(p)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.NotEq(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lte_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,29)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.LtE(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lt_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,20)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.Lt(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function gte_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,30)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.GtE(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function gt_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,21)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.Gt(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function notin_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _keyword_1; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,581)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,651)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.NotIn(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function in_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,651)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.In(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function isnot_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _keyword_1; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,582)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,581)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.IsNot(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function is_bitwise_or_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,582)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.cmpop_expr_pair(p,new $B.ast.Is(),a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function bitwise_or_raw(){}; +function bitwise_or_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,bitwise_or_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_2=$B._PyPegen.update_memo(p,_mark,bitwise_or_type,_res); +if(tmpvar_2){return _res;} +p.mark=_mark; +var _raw=bitwise_or_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) +break; +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function bitwise_or_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=bitwise_or_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,18)) +&& +(b=bitwise_xor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.BitOr(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var bitwise_xor_var; +if( +(bitwise_xor_var=bitwise_xor_rule(p)) +) +{_res=bitwise_xor_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function bitwise_xor_raw(){}; +function bitwise_xor_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,bitwise_xor_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_3=$B._PyPegen.update_memo(p,_mark,bitwise_xor_type,_res); +if(tmpvar_3){return _res;} +p.mark=_mark; +var _raw=bitwise_xor_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) +break; +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function bitwise_xor_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=bitwise_xor_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,32)) +&& +(b=bitwise_and_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.BitXor(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var bitwise_and_var; +if( +(bitwise_and_var=bitwise_and_rule(p)) +) +{_res=bitwise_and_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function bitwise_and_raw(){}; +function bitwise_and_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,bitwise_and_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_4=$B._PyPegen.update_memo(p,_mark,bitwise_and_type,_res); +if(tmpvar_4){return _res;} +p.mark=_mark; +var _raw=bitwise_and_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) break; -case $B.ast.ClassDef: -var tmp; -if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s))) -VISIT_QUIT(st,0) -VISIT_SEQ(st,expr,s.bases) -VISIT_SEQ(st,keyword,s.keywords) -if(s.decorator_list) -VISIT_SEQ(st,expr,s.decorator_list); -if(s.type_params.length > 0){if(!symtable_enter_type_param_block(st,s.name,s.type_params,false,false,s.constructor,LOCATION(s))){VISIT_QUIT(st,0);} -VISIT_SEQ(st,type_param,s.type_params);} -VISIT_SEQ(st,expr,s.bases); -VISIT_SEQ(st,keyword,s.keywords); -if(!symtable_enter_block(st,s.name,ClassBlock,s,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset)) -VISIT_QUIT(st,0) -tmp=st.private -st.private=s.name -if(s.type_params.length > 0){if(!symtable_add_def(st,'__type_params__',DEF_LOCAL,LOCATION(s))){VISIT_QUIT(st,0);} -if(!symtable_add_def(st,'type_params',USE,LOCATION(s))){VISIT_QUIT(st,0);}} -VISIT_SEQ(st,stmt,s.body) -st.private=tmp -if(! symtable_exit_block(st)) -VISIT_QUIT(st,0) -if(s.type_params.length > 0){if(!symtable_exit_block(st)) -VISIT_QUIT(st,0);} -break -case $B.ast.TypeAlias: -VISIT(st,expr,s.name); -assert(s.name instanceof $B.ast.Name); -var name=s.name.id,is_in_class=st.cur.type===ClassBlock,is_generic=s.type_params.length > 0 -if(is_generic){if(!symtable_enter_type_param_block( -st,name,s.type_params,false,false,s.kind,LOCATION(s))){VISIT_QUIT(st,0);} -VISIT_SEQ(st,type_param,s.type_params);} -if(!symtable_enter_block(st,name,TypeAliasBlock,s,LOCATION(s))){VISIT_QUIT(st,0);} -st.cur.can_see_class_scope=is_in_class; -if(is_in_class && !symtable_add_def(st,'__classdict__',USE,LOCATION(s.value))){VISIT_QUIT(st,0);} -VISIT(st,expr,s.value); -if(!symtable_exit_block(st)){VISIT_QUIT(st,0);} -if(is_generic){if(!symtable_exit_block(st)) -VISIT_QUIT(st,0);} -break -case $B.ast.Return: -if(s.value){VISIT(st,expr,s.value) -st.cur.returns_value=1} -break -case $B.ast.Delete: -VISIT_SEQ(st,expr,s.targets) -break -case $B.ast.Assign: -VISIT_SEQ(st,expr,s.targets) -VISIT(st,expr,s.value) -break -case $B.ast.AnnAssign: -if(s.target instanceof $B.ast.Name){var e_name=s.target -var cur=symtable_lookup(st,e_name.id) -if(cur < 0){VISIT_QUIT(st,0)} -if((cur &(DEF_GLOBAL |DEF_NONLOCAL)) -&&(st.cur.symbols !=st.global) -&& s.simple){var exc=PyErr_Format(_b_.SyntaxError,cur & DEF_GLOBAL ? GLOBAL_ANNOT :NONLOCAL_ANNOT,e_name.id) -exc.args[1]=[st.filename,s.lineno,s.col_offset+1,s.end_lineno,s.end_col_offset+1] -throw exc} -if(s.simple && -! symtable_add_def(st,e_name.id,DEF_ANNOT |DEF_LOCAL,LOCATION(e_name))){VISIT_QUIT(st,0)}else{if(s.value -&& !symtable_add_def(st,e_name.id,DEF_LOCAL,LOCATION(e_name))){VISIT_QUIT(st,0)}}}else{VISIT(st,expr,s.target)} -if(!visitor.annotation(st,s.annotation)){VISIT_QUIT(st,0)} -if(s.value){VISIT(st,expr,s.value)} -break -case $B.ast.AugAssign: -VISIT(st,expr,s.target) -VISIT(st,expr,s.value) -break -case $B.ast.For: -VISIT(st,expr,s.target) -VISIT(st,expr,s.iter) -VISIT_SEQ(st,stmt,s.body) -if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)} -break -case $B.ast.While: -VISIT(st,expr,s.test) -VISIT_SEQ(st,stmt,s.body) -if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)} -break -case $B.ast.If: -VISIT(st,expr,s.test) -VISIT_SEQ(st,stmt,s.body) -if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)} -break -case $B.ast.Match: -VISIT(st,expr,s.subject) -VISIT_SEQ(st,match_case,s.cases) -break -case $B.ast.Raise: -if(s.exc){VISIT(st,expr,s.exc) -if(s.cause){VISIT(st,expr,s.cause)}} -break -case $B.ast.Try: -VISIT_SEQ(st,stmt,s.body) -VISIT_SEQ(st,stmt,s.orelse) -VISIT_SEQ(st,excepthandler,s.handlers) -VISIT_SEQ(st,stmt,s.finalbody) -break -case $B.ast.TryStar: -VISIT_SEQ(st,stmt,s.body) -VISIT_SEQ(st,stmt,s.orelse) -VISIT_SEQ(st,excepthandler,s.handlers) -VISIT_SEQ(st,stmt,s.finalbody) -break -case $B.ast.Assert: -VISIT(st,expr,s.test) -if(s.msg){VISIT(st,expr,s.msg);} -break -case $B.ast.Import: -VISIT_SEQ(st,alias,s.names) -break -case $B.ast.ImportFrom: -VISIT_SEQ(st,alias,s.names) -break -case $B.ast.Global: -var seq=s.names -for(var name of seq){var cur=symtable_lookup(st,name) -if(cur < 0){VISIT_QUIT(st,0)} -if(cur &(DEF_PARAM |DEF_LOCAL |USE |DEF_ANNOT)){var msg -if(cur & DEF_PARAM){msg=GLOBAL_PARAM}else if(cur & USE){msg=GLOBAL_AFTER_USE}else if(cur & DEF_ANNOT){msg=GLOBAL_ANNOT}else{ -msg=GLOBAL_AFTER_ASSIGN} -var exc=PyErr_Format(_b_.SyntaxError,msg,name) -set_exc_info(exc,st.filename,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset) -throw exc} -if(! symtable_add_def(st,name,DEF_GLOBAL,LOCATION(s))) -VISIT_QUIT(st,0) -if(! symtable_record_directive(st,name,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset)) -VISIT_QUIT(st,0)} -break -case $B.ast.Nonlocal: -var seq=s.names; -for(var name of seq){var cur=symtable_lookup(st,name) -if(cur < 0){VISIT_QUIT(st,0)} -if(cur &(DEF_PARAM |DEF_LOCAL |USE |DEF_ANNOT)){var msg -if(cur & DEF_PARAM){msg=NONLOCAL_PARAM}else if(cur & USE){msg=NONLOCAL_AFTER_USE}else if(cur & DEF_ANNOT){msg=NONLOCAL_ANNOT}else{ -msg=NONLOCAL_AFTER_ASSIGN} -var exc=PyErr_Format(_b_.SyntaxError,msg,name) -set_exc_info(exc,st.filename,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset) -throw exc} -if(!symtable_add_def(st,name,DEF_NONLOCAL,LOCATION(s))) -VISIT_QUIT(st,0) -if(!symtable_record_directive(st,name,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset)) -VISIT_QUIT(st,0)} -break -case $B.ast.Expr: -VISIT(st,expr,s.value) -break -case $B.ast.Pass: -case $B.ast.Break: -case $B.ast.Continue: -break -case $B.ast.With: -VISIT_SEQ(st,'withitem',s.items) -VISIT_SEQ(st,stmt,s.body) -break -case $B.ast.AsyncFunctionDef: -if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s))) -VISIT_QUIT(st,0) -if(s.args.defaults) -VISIT_SEQ(st,expr,s.args.defaults) -if(s.args.kw_defaults) -VISIT_SEQ_WITH_NULL(st,expr,s.args.kw_defaults) -if(!visitor.annotations(st,s,s.args,s.returns)) -VISIT_QUIT(st,0) -if(s.decorator_list) -VISIT_SEQ(st,expr,s.decorator_list) -if(s.type_params.length > 0){if(!symtable_enter_type_param_block( -st,s.name,s.type_params,s.args.defaults !=NULL,has_kwonlydefaults(s.args.kwonlyargs,s.args.kw_defaults),s.constructor,LOCATION(s))){VISIT_QUIT(st,0);} -VISIT_SEQ(st,type_param,s.type_params);} -if(!visitor.annotations(st,s,s.args,s.returns)) -VISIT_QUIT(st,0); -if(!symtable_enter_block(st,s.name,FunctionBlock,s,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset)) -VISIT_QUIT(st,0) -st.cur.coroutine=1 -VISIT(st,'arguments',s.args) -VISIT_SEQ(st,stmt,s.body) -if(! symtable_exit_block(st)) -VISIT_QUIT(st,0) -if(s.type_params.length > 0){if(!symtable_exit_block(st)) -VISIT_QUIT(st,0);} -break -case $B.ast.AsyncWith: -VISIT_SEQ(st,withitem,s.items) -VISIT_SEQ(st,stmt,s.body) -break -case $B.ast.AsyncFor: -VISIT(st,expr,s.target) -VISIT(st,expr,s.iter) -VISIT_SEQ(st,stmt,s.body) -if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)} -break -default: -console.log('unhandled',s) -break} -VISIT_QUIT(st,1)} -function symtable_extend_namedexpr_scope(st,e){assert(st.stack) -assert(e instanceof $B.ast.Name) -var target_name=e.id -var i,size,ste -size=st.stack.length -assert(size) -for(i=size-1;i >=0;i--){ste=st.stack[i] -if(ste.comprehension){let target_in_scope=_PyST_GetSymbol(ste,target_name); -if(target_in_scope & DEF_COMP_ITER){let exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_CONFLICT,target_name); -set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.ed_lineno,e.end_col_offset) -throw exc} -continue;} -if(_PyST_IsFunctionLike(ste)){let target_in_scope=_PyST_GetSymbol(ste,target_name); -if(target_in_scope & DEF_GLOBAL){if(!symtable_add_def(st,target_name,DEF_GLOBAL,LOCATION(e))) -VISIT_QUIT(st,0);}else{ -if(!symtable_add_def(st,target_name,DEF_NONLOCAL,LOCATION(e))) -VISIT_QUIT(st,0);} -if(!symtable_record_directive(st,target_name,LOCATION(e))) -VISIT_QUIT(st,0); -return symtable_add_def_helper(st,target_name,DEF_LOCAL,ste,LOCATION(e));} -if(ste.type==ModuleBlock){if(!symtable_add_def(st,target_name,DEF_GLOBAL,LOCATION(e))) -VISIT_QUIT(st,0); -if(!symtable_record_directive(st,target_name,LOCATION(e))) -VISIT_QUIT(st,0); -return symtable_add_def_helper(st,target_name,DEF_GLOBAL,ste,LOCATION(e));} -if(ste.type==ClassBlock){let exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_IN_CLASS); -set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset); -throw exc}} -assert(0); -return 0;} -function symtable_handle_namedexpr(st,e){if(st.cur.comp_iter_expr > 0){ -var exc=PyErr_Format(PyExc_SyntaxError,NAMED_EXPR_COMP_ITER_EXPR); -set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset); -throw exc} -if(st.cur.comprehension){ -if(!symtable_extend_namedexpr_scope(st,e.target)) -return 0;} -VISIT(st,expr,e.value); -VISIT(st,expr,e.target); -return 1;} -const alias='alias',comprehension='comprehension',excepthandler='excepthandler',expr='expr',keyword='keyword',match_case='match_case',pattern='pattern',stmt='stmt',type_param='type_param',withitem='withitem' -visitor.expr=function(st,e){switch(e.constructor){case $B.ast.NamedExpr: -if(!symtable_raise_if_annotation_block(st,"named expression",e)){VISIT_QUIT(st,0);} -if(!symtable_handle_namedexpr(st,e)) -VISIT_QUIT(st,0); +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function bitwise_and_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=bitwise_and_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,19)) +&& +(b=shift_expr_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.BitAnd(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var shift_expr_var; +if( +(shift_expr_var=shift_expr_rule(p)) +) +{_res=shift_expr_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function shift_expr_raw(){}; +function shift_expr_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,shift_expr_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_5=$B._PyPegen.update_memo(p,_mark,shift_expr_type,_res); +if(tmpvar_5){return _res;} +p.mark=_mark; +var _raw=shift_expr_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) break; -case $B.ast.BoolOp: -VISIT_SEQ(st,'expr',e.values); +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function shift_expr_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=shift_expr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,33)) +&& +(b=sum_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.LShift(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=shift_expr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,34)) +&& +(b=sum_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.RShift(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var sum_var; +if( +(sum_var=sum_rule(p)) +) +{_res=sum_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function sum_raw(){}; +function sum_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,sum_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_6=$B._PyPegen.update_memo(p,_mark,sum_type,_res); +if(tmpvar_6){return _res;} +p.mark=_mark; +var _raw=sum_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) break; -case $B.ast.BinOp: -VISIT(st,'expr',e.left); -VISIT(st,'expr',e.right); +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function sum_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=sum_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,14)) +&& +(b=term_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.Add(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=sum_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,15)) +&& +(b=term_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.Sub(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var term_var; +if( +(term_var=term_rule(p)) +) +{_res=term_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function term_raw(){}; +function term_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,term_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_7=$B._PyPegen.update_memo(p,_mark,term_type,_res); +if(tmpvar_7){return _res;} +p.mark=_mark; +var _raw=term_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) break; -case $B.ast.UnaryOp: -VISIT(st,'expr',e.operand); +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function term_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=term_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(b=factor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.Mult(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=term_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +(b=factor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.Div(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=term_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,47)) +&& +(b=factor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.FloorDiv(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=term_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,24)) +&& +(b=factor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.Mod(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=term_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,49)) +&& +(b=factor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.expr,5,"The '@' operator is",new $B._PyAST.BinOp(a,new $B.ast.MatMult(),b,EXTRA )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var factor_var; +if( +(factor_var=factor_rule(p)) +) +{_res=factor_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function factor_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,factor_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,14)) +&& +(a=factor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.UnaryOp(new $B.ast.UAdd(),a,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,15)) +&& +(a=factor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.UnaryOp(new $B.ast.USub(),a,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,31)) +&& +(a=factor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.UnaryOp(new $B.ast.Invert(),a,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var power_var; +if( +(power_var=power_rule(p)) +) +{_res=power_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,factor_type,_res); +return _res;} +function power_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=await_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(b=factor_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.BinOp(a,new $B.ast.Pow(),b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var await_primary_var; +if( +(await_primary_var=await_primary_rule(p)) +) +{_res=await_primary_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function await_primary_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,await_primary_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var await_var; +if( +(await_var=$B._PyPegen.expect_token(p,AWAIT)) +&& +(a=primary_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B.helper_functions.CHECK_VERSION($B.ast.expr,5,"Await expressions are",new $B._PyAST.Await(a,EXTRA )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var primary_var; +if( +(primary_var=primary_rule(p)) +) +{_res=primary_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,await_primary_type,_res); +return _res;} +function primary_raw(){}; +function primary_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,primary_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_8=$B._PyPegen.update_memo(p,_mark,primary_type,_res); +if(tmpvar_8){return _res;} +p.mark=_mark; +var _raw=primary_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) +break; +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function primary_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,23)) +&& +(b=$B._PyPegen.name_token(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Attribute(a,b. id,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=primary_rule(p)) +&& +(b=genexp_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Call(a,$B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.singleton_seq(p,b )),$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(b=arguments_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Call(a,(b )?(b ). args :$B.parser_constants.NULL,(b )?(b ). keywords :$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(b=slices_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Subscript(a,b,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var atom_var; +if( +(atom_var=atom_rule(p)) +) +{_res=atom_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function slices_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=slice_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,12) +) +{_res=a; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(a=_gather_90_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple(a,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function slice_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +var c; +if( +(a=expression_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=expression_rule(p),!p.error_indicator) +&& +(c=_tmp_92_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Slice(a,b,c,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=named_expression_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function atom_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var name_var; +if( +(name_var=$B._PyPegen.name_token(p)) +) +{_res=name_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,601)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Constant($B.parser_constants.Py_True,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,603)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Constant($B.parser_constants.Py_False,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,602)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Constant($B.parser_constants.Py_None,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var strings_var; +if( +$B._PyPegen.lookahead(1,_tmp_93_rule,p) +&& +(strings_var=strings_rule(p)) +) +{_res=strings_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var number_var; +if( +(number_var=$B._PyPegen.number_token(p)) +) +{_res=number_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _tmp_94_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,7) +&& +(_tmp_94_var=_tmp_94_rule(p)) +) +{_res=_tmp_94_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _tmp_95_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,9) +&& +(_tmp_95_var=_tmp_95_rule(p)) +) +{_res=_tmp_95_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _tmp_96_var; +if( +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,25) +&& +(_tmp_96_var=_tmp_96_rule(p)) +) +{_res=_tmp_96_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,52)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Constant($B.parser_constants.Py_Ellipsis,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function group_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=_tmp_97_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=a; +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_group_var; +if( +(invalid_group_var=invalid_group_rule(p)) +) +{_res=invalid_group_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambdef_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var a; +var b; +if( +(_keyword=$B._PyPegen.expect_token(p,600)) +&& +(a=lambda_params_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Lambda((a )? a :$B.helper_functions.CHECK($B.ast.arguments,$B._PyPegen.empty_arguments(p )),b,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_params_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_lambda_parameters_var; +if( +(invalid_lambda_parameters_var=invalid_lambda_parameters_rule(p)) +) +{_res=invalid_lambda_parameters_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var lambda_parameters_var; +if( +(lambda_parameters_var=lambda_parameters_rule(p)) +) +{_res=lambda_parameters_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_parameters_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var c; +var d; +if( +(a=lambda_slash_no_default_rule(p)) +&& +(b=_loop0_98_rule(p)) +&& +(c=_loop0_99_rule(p)) +&& +(d=lambda_star_etc_rule(p),!p.error_indicator) +) +{_res=$B.helper_functions.CHECK_VERSION($B.ast.arguments,8,"Positional-only parameters are",$B._PyPegen.make_arguments(p,a,$B.parser_constants.NULL,b,c,d )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var c; +if( +(a=lambda_slash_with_default_rule(p)) +&& +(b=_loop0_100_rule(p)) +&& +(c=lambda_star_etc_rule(p),!p.error_indicator) +) +{_res=$B.helper_functions.CHECK_VERSION($B.ast.arguments,8,"Positional-only parameters are",$B._PyPegen.make_arguments(p,$B.parser_constants.NULL,a,$B.parser_constants.NULL,b,c )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var c; +if( +(a=_loop1_101_rule(p)) +&& +(b=_loop0_102_rule(p)) +&& +(c=lambda_star_etc_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.make_arguments(p,$B.parser_constants.NULL,$B.parser_constants.NULL,a,b,c); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=_loop1_103_rule(p)) +&& +(b=lambda_star_etc_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.make_arguments(p,$B.parser_constants.NULL,$B.parser_constants.NULL,$B.parser_constants.NULL,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=lambda_star_etc_rule(p)) +) +{_res=$B._PyPegen.make_arguments(p,$B.parser_constants.NULL,$B.parser_constants.NULL,$B.parser_constants.NULL,$B.parser_constants.NULL,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_slash_no_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(a=_loop1_104_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +) +{_res=a; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(a=_loop1_105_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,11) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_slash_with_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=_loop0_106_rule(p)) +&& +(b=_loop1_107_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +) +{_res=$B._PyPegen.slash_with_default(p,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=_loop0_108_rule(p)) +&& +(b=_loop1_109_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,11) +) +{_res=$B._PyPegen.slash_with_default(p,a,b); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_star_etc_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_lambda_star_etc_var; +if( +(invalid_lambda_star_etc_var=invalid_lambda_star_etc_rule(p)) +) +{_res=invalid_lambda_star_etc_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +var c; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=lambda_param_no_default_rule(p)) +&& +(b=_loop0_110_rule(p)) +&& +(c=lambda_kwds_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.star_etc(p,a,b,c); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var b; +var c; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(b=_loop1_111_rule(p)) +&& +(c=lambda_kwds_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.star_etc(p,$B.parser_constants.NULL,b,c); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=lambda_kwds_rule(p)) +) +{_res=$B._PyPegen.star_etc(p,$B.parser_constants.NULL,$B.parser_constants.NULL,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_kwds_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_lambda_kwds_var; +if( +(invalid_lambda_kwds_var=invalid_lambda_kwds_rule(p)) +) +{_res=invalid_lambda_kwds_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(a=lambda_param_no_default_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_param_no_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(a=lambda_param_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=a; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=lambda_param_rule(p)) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,11) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_param_with_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var c; +if( +(a=lambda_param_rule(p)) +&& +(c=default_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=$B._PyPegen.name_default_pair(p,a,c,$B.parser_constants.NULL); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var c; +if( +(a=lambda_param_rule(p)) +&& +(c=default_rule(p)) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,11) +) +{_res=$B._PyPegen.name_default_pair(p,a,c,$B.parser_constants.NULL); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_param_maybe_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var c; +if( +(a=lambda_param_rule(p)) +&& +(c=default_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=$B._PyPegen.name_default_pair(p,a,c,$B.parser_constants.NULL); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var c; +if( +(a=lambda_param_rule(p)) +&& +(c=default_rule(p),!p.error_indicator) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,11) +) +{_res=$B._PyPegen.name_default_pair(p,a,c,$B.parser_constants.NULL); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function lambda_param_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=$B._PyPegen.name_token(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.arg(a. id,$B.parser_constants.NULL,$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function fstring_middle_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var fstring_replacement_field_var; +if( +(fstring_replacement_field_var=fstring_replacement_field_rule(p)) +) +{_res=fstring_replacement_field_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var t; +if( +(t=$B._PyPegen.expect_token(p,FSTRING_MIDDLE)) +) +{_res=$B._PyPegen.constant_from_token(p,t); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function fstring_replacement_field_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var conversion; +var debug_expr; +var format; +var rbrace; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=_tmp_112_rule(p)) +&& +(debug_expr=$B._PyPegen.expect_token(p,22),!p.error_indicator) +&& +(conversion=fstring_conversion_rule(p),!p.error_indicator) +&& +(format=fstring_full_format_spec_rule(p),!p.error_indicator) +&& +(rbrace=$B._PyPegen.expect_token(p,26)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B._PyPegen.formatted_value(p,a,debug_expr,conversion,format,rbrace,EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_replacement_field_var; +if( +(invalid_replacement_field_var=invalid_replacement_field_rule(p)) +) +{_res=invalid_replacement_field_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function fstring_conversion_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var conv; +var conv_token; +if( +(conv_token=$B._PyPegen.expect_token(p,54)) +&& +(conv=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.check_fstring_conversion(p,conv_token,conv); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function fstring_full_format_spec_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var colon; +var spec; +if( +(colon=$B._PyPegen.expect_token(p,11)) +&& +(spec=_loop0_113_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B._PyPegen.setup_full_format_spec(p,colon,spec,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function fstring_format_spec_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var t; +if( +(t=$B._PyPegen.expect_token(p,FSTRING_MIDDLE)) +) +{_res=$B._PyPegen.decoded_constant_from_token(p,t); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var fstring_replacement_field_var; +if( +(fstring_replacement_field_var=fstring_replacement_field_rule(p)) +) +{_res=fstring_replacement_field_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function fstring_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var c; +if( +(a=$B._PyPegen.expect_token(p,FSTRING_START)) +&& +(b=_loop0_114_rule(p)) +&& +(c=$B._PyPegen.expect_token(p,FSTRING_END)) +) +{_res=$B._PyPegen.joined_str(p,a,b,c); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function string_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var s; +if( +(s=$B._PyPegen.string_token(p)) +) +{_res=$B._PyPegen.constant_from_string(p,s); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function strings_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,strings_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=_loop1_115_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B._PyPegen.concatenate_strings(p,a,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,strings_type,_res); +return _res;} +function list_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(a=star_named_expressions_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.List(a,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function tuple_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=_tmp_116_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple(a,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function set_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=star_named_expressions_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,26)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Set(a,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function dict_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=double_starred_kvpairs_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,26)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Dict($B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.get_keys(p,a )),$B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.get_values(p,a )),EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var invalid_double_starred_kvpairs_var; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(invalid_double_starred_kvpairs_var=invalid_double_starred_kvpairs_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,26)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,invalid_double_starred_kvpairs_var,_literal_1); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function double_starred_kvpairs_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(a=_gather_117_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function double_starred_kvpair_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(a=bitwise_or_rule(p)) +) +{_res=$B._PyPegen.key_value_pair(p,$B.parser_constants.NULL,a); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var kvpair_var; +if( +(kvpair_var=kvpair_rule(p)) +) +{_res=kvpair_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function kvpair_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(b=expression_rule(p)) +) +{_res=$B._PyPegen.key_value_pair(p,a,b); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function for_if_clauses_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=_loop1_119_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function for_if_clause_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _cut_var=0; +var _keyword; +var _keyword_1; +var a; +var async_var; +var b; +var c; +if( +(async_var=$B._PyPegen.expect_token(p,ASYNC)) +&& +(_keyword=$B._PyPegen.expect_token(p,650)) +&& +(a=star_targets_rule(p)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,651)) +&& +(_cut_var=1) +&& +(b=disjunction_rule(p)) +&& +(c=_loop0_120_rule(p)) +) +{_res=$B.helper_functions.CHECK_VERSION($B.ast.comprehension,6,"Async comprehensions are",new $B._PyAST.comprehension(a,b,c,1,p.arena )); +break;} +p.mark=_mark; +if(_cut_var){return NULL;}} +{ +if(p.error_indicator){return NULL;} +var _cut_var=0; +var _keyword; +var _keyword_1; +var a; +var b; +var c; +if( +(_keyword=$B._PyPegen.expect_token(p,650)) +&& +(a=star_targets_rule(p)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,651)) +&& +(_cut_var=1) +&& +(b=disjunction_rule(p)) +&& +(c=_loop0_121_rule(p)) +) +{_res=new $B._PyAST.comprehension(a,b,c,0,p.arena); +break;} +p.mark=_mark; +if(_cut_var){return NULL;}} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_for_target_var; +if( +(invalid_for_target_var=invalid_for_target_rule(p)) +) +{_res=invalid_for_target_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function listcomp_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(a=named_expression_rule(p)) +&& +(b=for_if_clauses_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.ListComp(a,b,EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_comprehension_var; +if( +(invalid_comprehension_var=invalid_comprehension_rule(p)) +) +{_res=invalid_comprehension_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function setcomp_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=named_expression_rule(p)) +&& +(b=for_if_clauses_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,26)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.SetComp(a,b,EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_comprehension_var; +if( +(invalid_comprehension_var=invalid_comprehension_rule(p)) +) +{_res=invalid_comprehension_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function genexp_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=_tmp_122_rule(p)) +&& +(b=for_if_clauses_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.GeneratorExp(a,b,EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_comprehension_var; +if( +(invalid_comprehension_var=invalid_comprehension_rule(p)) +) +{_res=invalid_comprehension_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function dictcomp_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=kvpair_rule(p)) +&& +(b=for_if_clauses_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,26)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.DictComp(a.key,a.value,b,EXTRA); +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_dict_comprehension_var; +if( +(invalid_dict_comprehension_var=invalid_dict_comprehension_rule(p)) +) +{_res=invalid_dict_comprehension_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function arguments_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,arguments_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(a=args_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,8) +) +{_res=a; +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_arguments_var; +if( +(invalid_arguments_var=invalid_arguments_rule(p)) +) +{_res=invalid_arguments_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,arguments_type,_res); +return _res;} +function args_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=_gather_123_rule(p)) +&& +(b=_tmp_125_rule(p),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B._PyPegen.collect_call_seqs(p,a,b,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=kwargs_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Call($B._PyPegen.dummy_name(p ),$B.helper_functions.CHECK_NULL_ALLOWED($B.parser_constants.asdl_expr_seq,$B._PyPegen.seq_extract_starred_exprs(p,a )),$B.helper_functions.CHECK_NULL_ALLOWED($B.parser_constants.asdl_keyword_seq,$B._PyPegen.seq_delete_starred_exprs(p,a )),EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function kwargs_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=_gather_126_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(b=_gather_128_rule(p)) +) +{_res=$B._PyPegen.join_sequences(p,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _gather_130_var; +if( +(_gather_130_var=_gather_130_rule(p)) +) +{_res=_gather_130_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _gather_132_var; +if( +(_gather_132_var=_gather_132_rule(p)) +) +{_res=_gather_132_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function starred_expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_starred_expression_var; +if( +(invalid_starred_expression_var=invalid_starred_expression_rule(p)) +) +{_res=invalid_starred_expression_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Starred(a,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function kwarg_or_starred_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_kwarg_var; +if( +(invalid_kwarg_var=invalid_kwarg_rule(p)) +) +{_res=invalid_kwarg_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=$B._PyPegen.name_token(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(b=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B._PyPegen.keyword_or_starred(p,$B.helper_functions.CHECK($B.ast.keyword,new $B._PyAST.keyword(a. id,b,EXTRA )),1); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=starred_expression_rule(p)) +) +{_res=$B._PyPegen.keyword_or_starred(p,a,0); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function kwarg_or_double_starred_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_kwarg_var; +if( +(invalid_kwarg_var=invalid_kwarg_rule(p)) +) +{_res=invalid_kwarg_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=$B._PyPegen.name_token(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(b=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B._PyPegen.keyword_or_starred(p,$B.helper_functions.CHECK($B.ast.keyword,new $B._PyAST.keyword(a. id,b,EXTRA )),1); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(a=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=$B._PyPegen.keyword_or_starred(p,$B.helper_functions.CHECK($B.ast.keyword,new $B._PyAST.keyword($B.parser_constants.NULL,a,EXTRA )),1); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_targets_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=star_target_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,12) +) +{_res=a; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(a=star_target_rule(p)) +&& +(b=_loop0_134_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple($B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.seq_insert_in_front(p,a,b )),$B.parser_constants.Store,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_targets_list_seq_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(a=_gather_135_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_targets_tuple_seq_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(a=star_target_rule(p)) +&& +(b=_loop1_137_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{_res=$B._PyPegen.seq_insert_in_front(p,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(a=star_target_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=$B._PyPegen.singleton_seq(p,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function star_target_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,star_target_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=_tmp_138_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Starred($B.helper_functions.CHECK($B.ast.expr,$B._PyPegen.set_expr_context(p,a,$B.parser_constants.Store )),$B.parser_constants.Store,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var target_with_star_atom_var; +if( +(target_with_star_atom_var=target_with_star_atom_rule(p)) +) +{_res=target_with_star_atom_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,star_target_type,_res); +return _res;} +function target_with_star_atom_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,target_with_star_atom_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,23)) +&& +(b=$B._PyPegen.name_token(p)) +&& +$B._PyPegen.lookahead(0,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Attribute(a,b. id,$B.parser_constants.Store,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(b=slices_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +&& +$B._PyPegen.lookahead(0,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Subscript(a,b,$B.parser_constants.Store,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_atom_var; +if( +(star_atom_var=star_atom_rule(p)) +) +{_res=star_atom_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,target_with_star_atom_type,_res); +return _res;} +function star_atom_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.set_expr_context(p,a,$B.parser_constants.Store); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=target_with_star_atom_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=$B._PyPegen.set_expr_context(p,a,$B.parser_constants.Store); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=star_targets_tuple_seq_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple(a,$B.parser_constants.Store,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(a=star_targets_list_seq_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.List(a,$B.parser_constants.Store,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function single_target_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var single_subscript_attribute_target_var; +if( +(single_subscript_attribute_target_var=single_subscript_attribute_target_rule(p)) +) +{_res=single_subscript_attribute_target_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.set_expr_context(p,a,$B.parser_constants.Store); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=single_target_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function single_subscript_attribute_target_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,23)) +&& +(b=$B._PyPegen.name_token(p)) +&& +$B._PyPegen.lookahead(0,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Attribute(a,b. id,$B.parser_constants.Store,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(b=slices_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +&& +$B._PyPegen.lookahead(0,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Subscript(a,b,$B.parser_constants.Store,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function t_primary_raw(){}; +function t_primary_rule(p) +{var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,t_primary_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +var _resmark=p.mark; +while(1){var tmpvar_9=$B._PyPegen.update_memo(p,_mark,t_primary_type,_res); +if(tmpvar_9){return _res;} +p.mark=_mark; +var _raw=t_primary_raw(p); +if(p.error_indicator){return NULL;} +if(_raw==NULL ||p.mark <=_resmark) break; -case $B.ast.Lambda:{if(!GET_IDENTIFIER('lambda')) -VISIT_QUIT(st,0); -if(e.args.defaults) -VISIT_SEQ(st,'expr',e.args.defaults); -if(e.args.kw_defaults) -VISIT_SEQ_WITH_NULL(st,'expr',e.args.kw_defaults); -if(!symtable_enter_block(st,lambda,FunctionBlock,e,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset)) -VISIT_QUIT(st,0); -VISIT(st,'arguments',e.args); -VISIT(st,'expr',e.body); -if(!symtable_exit_block(st)) -VISIT_QUIT(st,0); +_resmark=p.mark; +_res=_raw;} +p.mark=_resmark; +return _res;} +function t_primary_raw(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,23)) +&& +(b=$B._PyPegen.name_token(p)) +&& +$B._PyPegen.lookahead(1,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Attribute(a,b. id,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(b=slices_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +&& +$B._PyPegen.lookahead(1,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Subscript(a,b,$B.parser_constants.Load,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(b=genexp_rule(p)) +&& +$B._PyPegen.lookahead(1,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Call(a,$B.helper_functions.CHECK($B.parser_constants.asdl_expr_seq,$B._PyPegen.singleton_seq(p,b )),$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(b=arguments_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +&& +$B._PyPegen.lookahead(1,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Call(a,(b )?(b ). args :$B.parser_constants.NULL,(b )?(b ). keywords :$B.parser_constants.NULL,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=atom_rule(p)) +&& +$B._PyPegen.lookahead(1,t_lookahead_rule,p) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function t_lookahead_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,23)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function del_targets_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(a=_gather_139_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function del_target_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,del_target_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,23)) +&& +(b=$B._PyPegen.name_token(p)) +&& +$B._PyPegen.lookahead(0,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Attribute(a,b. id,$B.parser_constants.Del,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=t_primary_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(b=slices_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +&& +$B._PyPegen.lookahead(0,t_lookahead_rule,p) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Subscript(a,b,$B.parser_constants.Del,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var del_t_atom_var; +if( +(del_t_atom_var=del_t_atom_rule(p)) +) +{_res=del_t_atom_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,del_target_type,_res); +return _res;} +function del_t_atom_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.set_expr_context(p,a,$B.parser_constants.Del); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=del_target_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=$B._PyPegen.set_expr_context(p,a,$B.parser_constants.Del); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=del_targets_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.Tuple(a,$B.parser_constants.Del,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +&& +(a=del_targets_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,10)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.List(a,$B.parser_constants.Del,EXTRA); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function type_expressions_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _literal_2; +var _literal_3; +var a; +var b; +var c; +if( +(a=_gather_141_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(_literal_1=$B._PyPegen.expect_token(p,16)) +&& +(b=expression_rule(p)) +&& +(_literal_2=$B._PyPegen.expect_token(p,12)) +&& +(_literal_3=$B._PyPegen.expect_token(p,35)) +&& +(c=expression_rule(p)) +) +{_res=$B._PyPegen.seq_append_to_end(p,$B.helper_functions.CHECK(asdl_seq,$B._PyPegen.seq_append_to_end(p,a,b )),c); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=_gather_143_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(_literal_1=$B._PyPegen.expect_token(p,16)) +&& +(b=expression_rule(p)) +) +{_res=$B._PyPegen.seq_append_to_end(p,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var b; +if( +(a=_gather_145_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(_literal_1=$B._PyPegen.expect_token(p,35)) +&& +(b=expression_rule(p)) +) +{_res=$B._PyPegen.seq_append_to_end(p,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _literal_2; +var a; +var b; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=expression_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(_literal_2=$B._PyPegen.expect_token(p,35)) +&& +(b=expression_rule(p)) +) +{_res=$B._PyPegen.seq_append_to_end(p,$B.helper_functions.CHECK(asdl_seq,$B._PyPegen.singleton_seq(p,a )),b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(a=expression_rule(p)) +) +{_res=$B._PyPegen.singleton_seq(p,a); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(a=expression_rule(p)) +) +{_res=$B._PyPegen.singleton_seq(p,a); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=_gather_147_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function func_type_comment_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var newline_var; +var t; +if( +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +(t=$B._PyPegen.expect_token(p,TYPE_COMMENT)) +&& +$B._PyPegen.lookahead(1,_tmp_149_rule,p) +) +{_res=t; +break;} +p.mark=_mark;} +if(p.call_invalid_rules){ +if(p.error_indicator){return NULL;} +var invalid_double_type_comments_var; +if( +(invalid_double_type_comments_var=invalid_double_type_comments_rule(p)) +) +{_res=invalid_double_type_comments_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var type_comment_var; +if( +(type_comment_var=$B._PyPegen.expect_token(p,TYPE_COMMENT)) +) +{_res=type_comment_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_arguments_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _tmp_150_var; +var b; +if( +(_tmp_150_var=_tmp_150_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(b=$B._PyPegen.expect_token(p,16)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,b,"iterable argument unpacking follows keyword argument unpacking"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(a=expression_rule(p)) +&& +(b=for_if_clauses_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(_opt_var=_tmp_151_rule(p),!p.error_indicator) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,$B._PyPegen.get_last_comprehension_item($B.PyPegen.last_item(b,$B.ast.comprehension )),"Generator expression must be parenthesized"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var expression_var; +var for_if_clauses_var; +if( +(a=$B._PyPegen.name_token(p)) +&& +(b=$B._PyPegen.expect_token(p,22)) +&& +(expression_var=expression_rule(p)) +&& +(for_if_clauses_var=for_if_clauses_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"invalid syntax. Maybe you meant '==' or ':=' instead of '='?"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(_opt_var=_tmp_152_rule(p),!p.error_indicator) +&& +(a=$B._PyPegen.name_token(p)) +&& +(b=$B._PyPegen.expect_token(p,22)) +&& +$B._PyPegen.lookahead(1,_tmp_153_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"expected argument value expression"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=args_rule(p)) +&& +(b=for_if_clauses_rule(p)) +) +{_res=$B._PyPegen.nonparen_genexp_in_call(p,a,b); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var args_var; +var b; +if( +(args_var=args_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(a=expression_rule(p)) +&& +(b=for_if_clauses_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,$B._PyPegen.get_last_comprehension_item($B.PyPegen.last_item(b,$B.ast.comprehension )),"Generator expression must be parenthesized"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var args_var; +if( +(a=args_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(args_var=args_rule(p)) +) +{_res=$B._PyPegen.arguments_parsing_error(p,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_kwarg_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=_tmp_154_rule(p)) +&& +(b=$B._PyPegen.expect_token(p,22)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"cannot assign to %s",$B.parser_constants.PyBytes_AS_STRING(a.bytes )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var expression_var; +var for_if_clauses_var; +if( +(a=$B._PyPegen.name_token(p)) +&& +(b=$B._PyPegen.expect_token(p,22)) +&& +(expression_var=expression_rule(p)) +&& +(for_if_clauses_var=for_if_clauses_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"invalid syntax. Maybe you meant '==' or ':=' instead of '='?"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +$B._PyPegen.lookahead(0,_tmp_155_rule,p) +&& +(a=expression_rule(p)) +&& +(b=$B._PyPegen.expect_token(p,22)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"expression cannot contain assignment, perhaps you meant \"==\"?"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +var expression_var; +if( +(a=$B._PyPegen.expect_token(p,35)) +&& +(expression_var=expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(b=expression_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"cannot assign to keyword argument unpacking"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function expression_without_invalid_rule(p) +{var _prev_call_invalid=p.call_invalid_rules; +p.call_invalid_rules=0; +if(p.error_indicator){p.call_invalid_rules=_prev_call_invalid; +return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +if(p.mark==p.fill && $B._PyPegen.fill_token(p)< 0){p.error_indicator=1; +p.call_invalid_rules=_prev_call_invalid; +return NULL;} +var EXTRA={} +EXTRA.lineno=p.tokens[_mark].lineno; +EXTRA.col_offset=p.tokens[_mark].col_offset; +{ +if(p.error_indicator){p.call_invalid_rules=_prev_call_invalid; +return NULL;} +var _keyword; +var _keyword_1; +var a; +var b; +var c; +if( +(a=disjunction_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,642)) +&& +(b=disjunction_rule(p)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,645)) +&& +(c=expression_rule(p)) +) +{var _token=$B._PyPegen.get_last_nonnwhitespace_token(p); +if(_token==NULL){p.call_invalid_rules=_prev_call_invalid; +return NULL;} +EXTRA.end_lineno=_token.end_lineno; +EXTRA.end_col_offset=_token.end_col_offset; +_res=new $B._PyAST.IfExp(b,a,c,EXTRA); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){p.call_invalid_rules=_prev_call_invalid; +return NULL;} +var disjunction_var; +if( +(disjunction_var=disjunction_rule(p)) +) +{_res=disjunction_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){p.call_invalid_rules=_prev_call_invalid; +return NULL;} +var lambdef_var; +if( +(lambdef_var=lambdef_rule(p)) +) +{_res=lambdef_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +p.call_invalid_rules=_prev_call_invalid; +return _res;} +function invalid_legacy_expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +(a=$B._PyPegen.name_token(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,7) +&& +(b=star_expressions_rule(p)) +) +{_res=$B._PyPegen.check_legacy_stmt(p,a )? $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"Missing parentheses in call to '%U'. Did you mean %U(...)?",a. id,a. id):$B.parser_constants.NULL; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +var b; +if( +$B._PyPegen.lookahead(0,_tmp_156_rule,p) +&& +(a=disjunction_rule(p)) +&& +(b=expression_without_invalid_rule(p)) +) +{_res=$B._PyPegen.check_legacy_stmt(p,a )? $B.parser_constants.NULL :p.tokens[p.mark-1].level==0 ? $B.parser_constants.NULL :$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"invalid syntax. Perhaps you forgot a comma?"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +var b; +if( +(a=disjunction_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,642)) +&& +(b=disjunction_rule(p)) +&& +$B._PyPegen.lookahead(0,_tmp_157_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"expected 'else' after 'if' expression"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(a=$B._PyPegen.expect_token(p,600)) +&& +(_opt_var=lambda_params_rule(p),!p.error_indicator) +&& +(b=$B._PyPegen.expect_token(p,11)) +&& +$B._PyPegen.lookahead_with_int(1,$B._PyPegen.expect_token,p,FSTRING_MIDDLE) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"f-string: lambda expressions are not allowed without parentheses"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_named_expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res={value:NULL}; +if($B._PyPegen.is_memoized(p,invalid_named_expression_type,_res)){return _res.value;} +_res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var expression_var; +if( +(a=expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,53)) +&& +(expression_var=expression_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"cannot use assignment expressions with %s",$B._PyPegen.get_expr_name(a )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +if( +(a=$B._PyPegen.name_token(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(b=bitwise_or_rule(p)) +&& +$B._PyPegen.lookahead(0,_tmp_158_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"invalid syntax. Maybe you meant '==' or ':=' instead of '='?"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var b; +var bitwise_or_var; +if( +$B._PyPegen.lookahead(0,_tmp_159_rule,p) +&& +(a=bitwise_or_rule(p)) +&& +(b=$B._PyPegen.expect_token(p,22)) +&& +(bitwise_or_var=bitwise_or_rule(p)) +&& +$B._PyPegen.lookahead(0,_tmp_160_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"cannot assign to %s here. Maybe you meant '==' instead of '='?",$B._PyPegen.get_expr_name(a )); +break;} +p.mark=_mark;} +_res=NULL; +break;} +$B._PyPegen.insert_memo(p,_mark,invalid_named_expression_type,_res); +return _res;} +function invalid_assignment_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var expression_var; +if( +(a=invalid_ann_assign_target_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(expression_var=expression_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"only single target (not %s) can be annotated",$B._PyPegen.get_expr_name(a )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _loop0_161_var; +var a; +var expression_var; +if( +(a=star_named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(_loop0_161_var=_loop0_161_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,11)) +&& +(expression_var=expression_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"only single target (not tuple) can be annotated"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var expression_var; +if( +(a=expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(expression_var=expression_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"illegal target for annotation"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _loop0_162_var; +var a; +if( +(_loop0_162_var=_loop0_162_rule(p)) +&& +(a=star_expressions_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET(p,$B.parser_constants.STAR_TARGETS,a); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _loop0_163_var; +var a; +if( +(_loop0_163_var=_loop0_163_rule(p)) +&& +(a=yield_expr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"assignment to yield expression not possible"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _tmp_164_var; +var a; +var augassign_var; +if( +(a=star_expressions_rule(p)) +&& +(augassign_var=augassign_rule(p)) +&& +(_tmp_164_var=_tmp_164_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"'%s' is an illegal expression for augmented assignment",$B._PyPegen.get_expr_name(a )); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_ann_assign_target_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var list_var; +if( +(list_var=list_rule(p)) +) +{_res=list_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var tuple_var; +if( +(tuple_var=tuple_rule(p)) +) +{_res=tuple_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=invalid_ann_assign_target_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_del_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,604)) +&& +(a=star_expressions_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET(p,$B.parser_constants.DEL_TARGETS,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_block_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var newline_var; +if( +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_comprehension_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _tmp_165_var; +var a; +var for_if_clauses_var; +if( +(_tmp_165_var=_tmp_165_rule(p)) +&& +(a=starred_expression_rule(p)) +&& +(for_if_clauses_var=for_if_clauses_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"iterable unpacking cannot be used in comprehension"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _tmp_166_var; +var a; +var b; +var for_if_clauses_var; +if( +(_tmp_166_var=_tmp_166_rule(p)) +&& +(a=star_named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(b=star_named_expressions_rule(p)) +&& +(for_if_clauses_var=for_if_clauses_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,$B.PyPegen.last_item(b,$B.ast.expr ),"did you forget parentheses around the comprehension target?"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _tmp_167_var; +var a; +var b; +var for_if_clauses_var; +if( +(_tmp_167_var=_tmp_167_rule(p)) +&& +(a=star_named_expression_rule(p)) +&& +(b=$B._PyPegen.expect_token(p,12)) +&& +(for_if_clauses_var=for_if_clauses_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"did you forget parentheses around the comprehension target?"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_dict_comprehension_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var bitwise_or_var; +var for_if_clauses_var; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=$B._PyPegen.expect_token(p,35)) +&& +(bitwise_or_var=bitwise_or_rule(p)) +&& +(for_if_clauses_var=for_if_clauses_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,26)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"dict unpacking cannot be used in dict comprehension"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_parameters_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(a=$B._PyPegen.expect_token(p,17)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"at least one argument must precede /"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _loop0_169_var; +var _tmp_168_var; +var a; +if( +(_tmp_168_var=_tmp_168_rule(p)) +&& +(_loop0_169_var=_loop0_169_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,17)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"/ may appear only once"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _loop0_170_var; +var _opt_var; +UNUSED(_opt_var); +var a; +var invalid_parameters_helper_var; +if( +(_opt_var=slash_no_default_rule(p),!p.error_indicator) +&& +(_loop0_170_var=_loop0_170_rule(p)) +&& +(invalid_parameters_helper_var=invalid_parameters_helper_rule(p)) +&& +(a=param_no_default_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"parameter without a default follows parameter with a default"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _loop0_171_var; +var _loop1_172_var; +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(_loop0_171_var=_loop0_171_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,7)) +&& +(_loop1_172_var=_loop1_172_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(b=$B._PyPegen.expect_token(p,8)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"Function parameters cannot be parenthesized"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _loop0_174_var; +var _loop0_176_var; +var _opt_var; +UNUSED(_opt_var); +var _tmp_175_var; +var a; +if( +(_opt_var=_tmp_173_rule(p),!p.error_indicator) +&& +(_loop0_174_var=_loop0_174_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(_tmp_175_var=_tmp_175_rule(p)) +&& +(_loop0_176_var=_loop0_176_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,17)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"/ must be ahead of *"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _loop1_177_var; +var a; +if( +(_loop1_177_var=_loop1_177_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +(a=$B._PyPegen.expect_token(p,16)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"expected comma between / and *"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_default_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=$B._PyPegen.expect_token(p,22)) +&& +$B._PyPegen.lookahead(1,_tmp_178_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"expected default value expression"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_star_etc_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _tmp_179_var; +var a; +if( +(a=$B._PyPegen.expect_token(p,16)) +&& +(_tmp_179_var=_tmp_179_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"named arguments must follow bare *"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var type_comment_var; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(type_comment_var=$B._PyPegen.expect_token(p,TYPE_COMMENT)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"bare * has associated type comment"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var param_var; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(param_var=param_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,22)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"var-positional argument cannot have default value"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _loop0_181_var; +var _tmp_180_var; +var _tmp_182_var; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(_tmp_180_var=_tmp_180_rule(p)) +&& +(_loop0_181_var=_loop0_181_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,16)) +&& +(_tmp_182_var=_tmp_182_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"* argument may appear only once"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_kwds_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var param_var; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(param_var=param_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,22)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"var-keyword argument cannot have default value"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var param_var; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(param_var=param_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(a=param_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"arguments cannot follow var-keyword argument"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var param_var; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(param_var=param_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(a=_tmp_183_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"arguments cannot follow var-keyword argument"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_parameters_helper_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=slash_with_default_rule(p)) +) +{_res=$B._PyPegen.singleton_seq(p,a); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _loop1_184_var; +if( +(_loop1_184_var=_loop1_184_rule(p)) +) +{_res=_loop1_184_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_lambda_parameters_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(a=$B._PyPegen.expect_token(p,17)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"at least one argument must precede /"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _loop0_186_var; +var _tmp_185_var; +var a; +if( +(_tmp_185_var=_tmp_185_rule(p)) +&& +(_loop0_186_var=_loop0_186_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,17)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"/ may appear only once"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _loop0_187_var; +var _opt_var; +UNUSED(_opt_var); +var a; +var invalid_lambda_parameters_helper_var; +if( +(_opt_var=lambda_slash_no_default_rule(p),!p.error_indicator) +&& +(_loop0_187_var=_loop0_187_rule(p)) +&& +(invalid_lambda_parameters_helper_var=invalid_lambda_parameters_helper_rule(p)) +&& +(a=lambda_param_no_default_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"parameter without a default follows parameter with a default"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _gather_189_var; +var _loop0_188_var; +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +if( +(_loop0_188_var=_loop0_188_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,7)) +&& +(_gather_189_var=_gather_189_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(b=$B._PyPegen.expect_token(p,8)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"Lambda expression parameters cannot be parenthesized"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _loop0_192_var; +var _loop0_194_var; +var _opt_var; +UNUSED(_opt_var); +var _tmp_193_var; +var a; +if( +(_opt_var=_tmp_191_rule(p),!p.error_indicator) +&& +(_loop0_192_var=_loop0_192_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(_tmp_193_var=_tmp_193_rule(p)) +&& +(_loop0_194_var=_loop0_194_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,17)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"/ must be ahead of *"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _loop1_195_var; +var a; +if( +(_loop1_195_var=_loop1_195_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,17)) +&& +(a=$B._PyPegen.expect_token(p,16)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"expected comma between / and *"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_lambda_parameters_helper_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=lambda_slash_with_default_rule(p)) +) +{_res=$B._PyPegen.singleton_seq(p,a); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _loop1_196_var; +if( +(_loop1_196_var=_loop1_196_rule(p)) +) +{_res=_loop1_196_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_lambda_star_etc_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _tmp_197_var; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(_tmp_197_var=_tmp_197_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"named arguments must follow bare *"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var lambda_param_var; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(lambda_param_var=lambda_param_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,22)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"var-positional argument cannot have default value"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _loop0_199_var; +var _tmp_198_var; +var _tmp_200_var; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(_tmp_198_var=_tmp_198_rule(p)) +&& +(_loop0_199_var=_loop0_199_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,16)) +&& +(_tmp_200_var=_tmp_200_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"* argument may appear only once"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_lambda_kwds_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var lambda_param_var; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(lambda_param_var=lambda_param_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,22)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"var-keyword argument cannot have default value"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var lambda_param_var; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(lambda_param_var=lambda_param_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(a=lambda_param_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"arguments cannot follow var-keyword argument"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var lambda_param_var; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +&& +(lambda_param_var=lambda_param_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,12)) +&& +(a=_tmp_201_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"arguments cannot follow var-keyword argument"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_double_type_comments_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var indent_var; +var newline_var; +var newline_var_1; +var type_comment_var; +var type_comment_var_1; +if( +(type_comment_var=$B._PyPegen.expect_token(p,TYPE_COMMENT)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +(type_comment_var_1=$B._PyPegen.expect_token(p,TYPE_COMMENT)) +&& +(newline_var_1=$B._PyPegen.expect_token(p,NEWLINE)) +&& +(indent_var=$B._PyPegen.expect_token(p,INDENT)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"Cannot have two type comments on def"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_with_item_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(a=expression_rule(p)) +&& +$B._PyPegen.lookahead(1,_tmp_202_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET(p,$B.parser_constants.STAR_TARGETS,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_for_target_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(_opt_var=$B._PyPegen.expect_token(p,ASYNC),!p.error_indicator) +&& +(_keyword=$B._PyPegen.expect_token(p,650)) +&& +(a=star_expressions_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET(p,$B.parser_constants.FOR_TARGETS,a); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_group_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=starred_expression_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"cannot use starred expression here"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var a; +var expression_var; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=$B._PyPegen.expect_token(p,35)) +&& +(expression_var=expression_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"cannot use double starred expression here"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_import_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _gather_203_var; +var _keyword; +var a; +var dotted_name_var; +if( +(a=$B._PyPegen.expect_token(p,607)) +&& +(_gather_203_var=_gather_203_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,608)) +&& +(dotted_name_var=dotted_name_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p,a,"Did you mean to use 'from ... import ...' instead?"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_import_from_targets_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var import_from_as_names_var; +var newline_var; +if( +(import_from_as_names_var=import_from_as_names_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"trailing comma not allowed without surrounding parentheses"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_with_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _gather_205_var; +var _keyword; +var _opt_var; +UNUSED(_opt_var); +var newline_var; +if( +(_opt_var=$B._PyPegen.expect_token(p,ASYNC),!p.error_indicator) +&& +(_keyword=$B._PyPegen.expect_token(p,615)) +&& +(_gather_205_var=_gather_205_rule(p)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _gather_207_var; +var _keyword; +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var newline_var; +if( +(_opt_var=$B._PyPegen.expect_token(p,ASYNC),!p.error_indicator) +&& +(_keyword=$B._PyPegen.expect_token(p,615)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(_gather_207_var=_gather_207_rule(p)) +&& +(_opt_var_1=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_with_stmt_indent_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _gather_209_var; +var _literal; +var _opt_var; +UNUSED(_opt_var); +var a; +var newline_var; +if( +(_opt_var=$B._PyPegen.expect_token(p,ASYNC),!p.error_indicator) +&& +(a=$B._PyPegen.expect_token(p,615)) +&& +(_gather_209_var=_gather_209_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'with' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _gather_211_var; +var _literal; +var _literal_1; +var _literal_2; +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var a; +var newline_var; +if( +(_opt_var=$B._PyPegen.expect_token(p,ASYNC),!p.error_indicator) +&& +(a=$B._PyPegen.expect_token(p,615)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(_gather_211_var=_gather_211_rule(p)) +&& +(_opt_var_1=$B._PyPegen.expect_token(p,12),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +&& +(_literal_2=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'with' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_try_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,624)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'try' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var block_var; +if( +(_keyword=$B._PyPegen.expect_token(p,624)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(block_var=block_rule(p)) +&& +$B._PyPegen.lookahead(0,_tmp_213_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected 'except' or 'finally' block"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var _literal_1; +var _loop0_214_var; +var _loop1_215_var; +var _opt_var; +UNUSED(_opt_var); +var a; +var b; +var expression_var; +if( +(_keyword=$B._PyPegen.expect_token(p,624)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(_loop0_214_var=_loop0_214_rule(p)) +&& +(_loop1_215_var=_loop1_215_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,637)) +&& +(b=$B._PyPegen.expect_token(p,16)) +&& +(expression_var=expression_rule(p)) +&& +(_opt_var=_tmp_216_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,11)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"cannot have both 'except' and 'except*' on the same 'try'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var _literal_1; +var _loop0_217_var; +var _loop1_218_var; +var _opt_var; +UNUSED(_opt_var); +var a; +if( +(_keyword=$B._PyPegen.expect_token(p,624)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(_loop0_217_var=_loop0_217_rule(p)) +&& +(_loop1_218_var=_loop1_218_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,637)) +&& +(_opt_var=_tmp_219_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,11)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"cannot have both 'except' and 'except*' on the same 'try'"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_except_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var a; +var expressions_var; +if( +(_keyword=$B._PyPegen.expect_token(p,637)) +&& +(_opt_var=$B._PyPegen.expect_token(p,16),!p.error_indicator) +&& +(a=expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(expressions_var=expressions_rule(p)) +&& +(_opt_var_1=_tmp_220_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,11)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p,a,"multiple exception types must be parenthesized"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var a; +var expression_var; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,637)) +&& +(_opt_var=$B._PyPegen.expect_token(p,16),!p.error_indicator) +&& +(expression_var=expression_rule(p)) +&& +(_opt_var_1=_tmp_221_rule(p),!p.error_indicator) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,637)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _tmp_222_var; +var a; +if( +(a=$B._PyPegen.expect_token(p,637)) +&& +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(_tmp_222_var=_tmp_222_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected one or more exception types"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_finally_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,633)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'finally' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_except_stmt_indent_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _opt_var; +UNUSED(_opt_var); +var a; +var expression_var; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,637)) +&& +(expression_var=expression_rule(p)) +&& +(_opt_var=_tmp_223_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'except' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,637)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'except' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_except_star_stmt_indent_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +var a; +var expression_var; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,637)) +&& +(_literal=$B._PyPegen.expect_token(p,16)) +&& +(expression_var=expression_rule(p)) +&& +(_opt_var=_tmp_224_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'except*' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_match_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var newline_var; +var subject_expr_var; +if( +(_keyword=$B._PyPegen.expect_soft_keyword(p,"match")) +&& +(subject_expr_var=subject_expr_rule(p)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.CHECK_VERSION(NULL,10,"Pattern matching is",$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'" )); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var newline_var; +var subject; +if( +(a=$B._PyPegen.expect_soft_keyword(p,"match")) +&& +(subject=subject_expr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'match' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_case_block_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _opt_var; +UNUSED(_opt_var); +var newline_var; +var patterns_var; +if( +(_keyword=$B._PyPegen.expect_soft_keyword(p,"case")) +&& +(patterns_var=patterns_rule(p)) +&& +(_opt_var=guard_rule(p),!p.error_indicator) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _opt_var; +UNUSED(_opt_var); +var a; +var newline_var; +var patterns_var; +if( +(a=$B._PyPegen.expect_soft_keyword(p,"case")) +&& +(patterns_var=patterns_rule(p)) +&& +(_opt_var=guard_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'case' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_as_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +var or_pattern_var; +if( +(or_pattern_var=or_pattern_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(a=$B._PyPegen.expect_soft_keyword(p,"_")) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"cannot use '_' as a target"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var a; +var or_pattern_var; +if( +(or_pattern_var=or_pattern_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +$B._PyPegen.lookahead_with_name(0,$B._PyPegen.name_token,p) +&& +(a=expression_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"invalid pattern target"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_class_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var name_or_attr_var; +if( +(name_or_attr_var=name_or_attr_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(a=invalid_class_argument_pattern_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,$B.PyPegen.first_item(a,$B.ast.pattern ),$B.PyPegen.last_item(a,$B.ast.pattern ),"positional patterns follow keyword patterns"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_class_argument_pattern_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _opt_var; +UNUSED(_opt_var); +var a; +var keyword_patterns_var; +if( +(_opt_var=_tmp_225_rule(p),!p.error_indicator) +&& +(keyword_patterns_var=keyword_patterns_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(a=positional_patterns_rule(p)) +) +{_res=a; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_if_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var named_expression_var; +var newline_var; +if( +(_keyword=$B._PyPegen.expect_token(p,642)) +&& +(named_expression_var=named_expression_rule(p)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var a_1; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,642)) +&& +(a_1=named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'if' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_elif_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var named_expression_var; +var newline_var; +if( +(_keyword=$B._PyPegen.expect_token(p,644)) +&& +(named_expression_var=named_expression_rule(p)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var named_expression_var; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,644)) +&& +(named_expression_var=named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'elif' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_else_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,645)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'else' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_while_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var named_expression_var; +var newline_var; +if( +(_keyword=$B._PyPegen.expect_token(p,647)) +&& +(named_expression_var=named_expression_rule(p)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var named_expression_var; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,647)) +&& +(named_expression_var=named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'while' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_for_stmt_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _keyword_1; +var _opt_var; +UNUSED(_opt_var); +var newline_var; +var star_expressions_var; +var star_targets_var; +if( +(_opt_var=$B._PyPegen.expect_token(p,ASYNC),!p.error_indicator) +&& +(_keyword=$B._PyPegen.expect_token(p,650)) +&& +(star_targets_var=star_targets_rule(p)) +&& +(_keyword_1=$B._PyPegen.expect_token(p,651)) +&& +(star_expressions_var=star_expressions_rule(p)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _literal; +var _opt_var; +UNUSED(_opt_var); +var a; +var newline_var; +var star_expressions_var; +var star_targets_var; +if( +(_opt_var=$B._PyPegen.expect_token(p,ASYNC),!p.error_indicator) +&& +(a=$B._PyPegen.expect_token(p,650)) +&& +(star_targets_var=star_targets_rule(p)) +&& +(_keyword=$B._PyPegen.expect_token(p,651)) +&& +(star_expressions_var=star_expressions_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after 'for' statement on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_def_raw_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _literal_2; +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var _opt_var_2; +UNUSED(_opt_var_2); +var _opt_var_3; +UNUSED(_opt_var_3); +var a; +var name_var; +var newline_var; +if( +(_opt_var=$B._PyPegen.expect_token(p,ASYNC),!p.error_indicator) +&& +(a=$B._PyPegen.expect_token(p,652)) +&& +(name_var=$B._PyPegen.name_token(p)) +&& +(_opt_var_1=type_params_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(_opt_var_2=params_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +&& +(_opt_var_3=_tmp_226_rule(p),!p.error_indicator) +&& +(_literal_2=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after function definition on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_class_def_raw_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var name_var; +var newline_var; +if( +(_keyword=$B._PyPegen.expect_token(p,654)) +&& +(name_var=$B._PyPegen.name_token(p)) +&& +(_opt_var=type_params_rule(p),!p.error_indicator) +&& +(_opt_var_1=_tmp_227_rule(p),!p.error_indicator) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR(p,"expected ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var a; +var name_var; +var newline_var; +if( +(a=$B._PyPegen.expect_token(p,654)) +&& +(name_var=$B._PyPegen.name_token(p)) +&& +(_opt_var=type_params_rule(p),!p.error_indicator) +&& +(_opt_var_1=_tmp_228_rule(p),!p.error_indicator) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,INDENT) +) +{_res=$B.helper_functions.RAISE_INDENTATION_ERROR(p,"expected an indented block after class definition on line %d",a.lineno); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_double_starred_kvpairs_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _gather_229_var; +var _literal; +var invalid_kvpair_var; +if( +(_gather_229_var=_gather_229_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(invalid_kvpair_var=invalid_kvpair_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_gather_229_var,_literal,invalid_kvpair_var); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var bitwise_or_var; +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(a=$B._PyPegen.expect_token(p,16)) +&& +(bitwise_or_var=bitwise_or_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p,a,"cannot use a starred expression in a dictionary value"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,11)) +&& +$B._PyPegen.lookahead(1,_tmp_231_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"expression expected after dictionary key and ':'"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_kvpair_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var a; +if( +(a=expression_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,11) +) +{_res=$B.helper_functions.RAISE_ERROR_KNOWN_LOCATION(p,$B.parser_constants.PyExc_SyntaxError,a.lineno,a.end_col_offset-1,a.end_lineno,-1,"':' expected after dictionary key"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var bitwise_or_var; +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(a=$B._PyPegen.expect_token(p,16)) +&& +(bitwise_or_var=bitwise_or_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p,a,"cannot use a starred expression in a dictionary value"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var a; +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +(a=$B._PyPegen.expect_token(p,11)) +&& +$B._PyPegen.lookahead(1,_tmp_232_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"expression expected after dictionary key and ':'"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_starred_expression_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +var b; +var expression_var; +if( +(a=$B._PyPegen.expect_token(p,16)) +&& +(expression_var=expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(b=expression_rule(p)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,a,b,"cannot assign to iterable argument unpacking"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_replacement_field_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=$B._PyPegen.expect_token(p,22)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"f-string: valid expression required before '='"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=$B._PyPegen.expect_token(p,54)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"f-string: valid expression required before '!'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=$B._PyPegen.expect_token(p,11)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"f-string: valid expression required before ':'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var a; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(a=$B._PyPegen.expect_token(p,26)) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p,a,"f-string: valid expression required before '}'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +$B._PyPegen.lookahead(0,_tmp_233_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p,"f-string: expecting a valid expression after '{'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _tmp_234_var; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(_tmp_234_var=_tmp_234_rule(p)) +&& +$B._PyPegen.lookahead(0,_tmp_235_rule,p) +) +{_res=PyErr_Occurred()? $B.parser_constants.NULL :$B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p,"f-string: expecting '=', or '!', or ':', or '}'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _tmp_236_var; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(_tmp_236_var=_tmp_236_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,22)) +&& +$B._PyPegen.lookahead(0,_tmp_237_rule,p) +) +{_res=PyErr_Occurred()? $B.parser_constants.NULL :$B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p,"f-string: expecting '!', or ':', or '}'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _opt_var; +UNUSED(_opt_var); +var _tmp_238_var; +var invalid_conversion_character_var; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(_tmp_238_var=_tmp_238_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,22),!p.error_indicator) +&& +(invalid_conversion_character_var=invalid_conversion_character_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,_tmp_238_var,_opt_var,invalid_conversion_character_var); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var _tmp_239_var; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(_tmp_239_var=_tmp_239_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,22),!p.error_indicator) +&& +(_opt_var_1=_tmp_240_rule(p),!p.error_indicator) +&& +$B._PyPegen.lookahead(0,_tmp_241_rule,p) +) +{_res=PyErr_Occurred()? $B.parser_constants.NULL :$B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p,"f-string: expecting ':' or '}'"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _loop0_244_var; +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var _tmp_242_var; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(_tmp_242_var=_tmp_242_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,22),!p.error_indicator) +&& +(_opt_var_1=_tmp_243_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,11)) +&& +(_loop0_244_var=_loop0_244_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,26) +) +{_res=PyErr_Occurred()? $B.parser_constants.NULL :$B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p,"f-string: expecting '}', or format specs"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _opt_var; +UNUSED(_opt_var); +var _opt_var_1; +UNUSED(_opt_var_1); +var _tmp_245_var; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +&& +(_tmp_245_var=_tmp_245_rule(p)) +&& +(_opt_var=$B._PyPegen.expect_token(p,22),!p.error_indicator) +&& +(_opt_var_1=_tmp_246_rule(p),!p.error_indicator) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,26) +) +{_res=PyErr_Occurred()? $B.parser_constants.NULL :$B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p,"f-string: expecting '}'"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function invalid_conversion_character_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,54)) +&& +$B._PyPegen.lookahead(1,_tmp_247_rule,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p,"f-string: missing conversion character"); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,54)) +&& +$B._PyPegen.lookahead_with_name(0,$B._PyPegen.name_token,p) +) +{_res=$B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p,"f-string: invalid conversion character"); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_1_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var newline_var; +while( +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=newline_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_2_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var newline_var; +while( +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=newline_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_3_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var statement_var; +while( +(statement_var=statement_rule(p)) +) +{_res=statement_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_5_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,13)) +&& +(elem=simple_stmt_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_4_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=simple_stmt_rule(p)) +&& +(seq=_loop0_5_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_6_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,607)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,608)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_7_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,652)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,49)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var async_var; +if( +(async_var=$B._PyPegen.expect_token(p,ASYNC)) +) +{_res=async_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_8_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,654)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,49)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_9_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,615)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var async_var; +if( +(async_var=$B._PyPegen.expect_token(p,ASYNC)) +) +{_res=async_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_10_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,650)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var async_var; +if( +(async_var=$B._PyPegen.expect_token(p,ASYNC)) +) +{_res=async_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_11_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var d; +if( +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(d=annotated_rhs_rule(p)) +) +{_res=d; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_12_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var b; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(b=single_target_rule(p)) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=b; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var single_subscript_attribute_target_var; +if( +(single_subscript_attribute_target_var=single_subscript_attribute_target_rule(p)) +) +{_res=single_subscript_attribute_target_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_13_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var d; +if( +(_literal=$B._PyPegen.expect_token(p,22)) +&& +(d=annotated_rhs_rule(p)) +) +{_res=d; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop1_14_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_248_var; +while( +(_tmp_248_var=_tmp_248_rule(p)) +) +{_res=_tmp_248_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_15_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_16_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_17_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var z; +if( +(_keyword=$B._PyPegen.expect_token(p,608)) +&& +(z=expression_rule(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_19_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=$B._PyPegen.name_token(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_18_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=$B._PyPegen.name_token(p)) +&& +(seq=_loop0_19_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_21_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=$B._PyPegen.name_token(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_20_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=$B._PyPegen.name_token(p)) +&& +(seq=_loop0_21_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_22_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,13)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var newline_var; +if( +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=newline_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_23_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var z; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(z=expression_rule(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_24_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_249_var; +while( +(_tmp_249_var=_tmp_249_rule(p)) +) +{_res=_tmp_249_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_25_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_250_var; +while( +(_tmp_250_var=_tmp_250_rule(p)) +) +{_res=_tmp_250_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_27_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=import_from_as_name_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_26_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=import_from_as_name_rule(p)) +&& +(seq=_loop0_27_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_28_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var z; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(z=$B._PyPegen.name_token(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_30_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=dotted_as_name_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_29_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=dotted_as_name_rule(p)) +&& +(seq=_loop0_30_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_31_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var z; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(z=$B._PyPegen.name_token(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop1_32_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_251_var; +while( +(_tmp_251_var=_tmp_251_rule(p)) +) +{_res=_tmp_251_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_33_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var z; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(z=arguments_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_34_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var z; +if( +(_literal=$B._PyPegen.expect_token(p,51)) +&& +(z=expression_rule(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_35_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var z; +if( +(_literal=$B._PyPegen.expect_token(p,51)) +&& +(z=expression_rule(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_36_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +while( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_37_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_with_default_var; +while( +(param_with_default_var=param_with_default_rule(p)) +) +{_res=param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_38_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_with_default_var; +while( +(param_with_default_var=param_with_default_rule(p)) +) +{_res=param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_39_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +while( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_40_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_with_default_var; +while( +(param_with_default_var=param_with_default_rule(p)) +) +{_res=param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_41_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_with_default_var; +while( +(param_with_default_var=param_with_default_rule(p)) +) +{_res=param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop1_42_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +while( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop1_43_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +while( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_44_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +while( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_45_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_with_default_var; +while( +(param_with_default_var=param_with_default_rule(p)) +) +{_res=param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_46_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +while( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_47_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_with_default_var; +while( +(param_with_default_var=param_with_default_rule(p)) +) +{_res=param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_48_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_maybe_default_var; +while( +(param_maybe_default_var=param_maybe_default_rule(p)) +) +{_res=param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_49_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_maybe_default_var; +while( +(param_maybe_default_var=param_maybe_default_rule(p)) +) +{_res=param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_50_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_maybe_default_var; +while( +(param_maybe_default_var=param_maybe_default_rule(p)) +) +{_res=param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_52_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=with_item_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_51_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=with_item_rule(p)) +&& +(seq=_loop0_52_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_54_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=with_item_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_53_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=with_item_rule(p)) +&& +(seq=_loop0_54_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_56_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=with_item_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_55_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=with_item_rule(p)) +&& +(seq=_loop0_56_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_58_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=with_item_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_57_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=with_item_rule(p)) +&& +(seq=_loop0_58_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_59_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,8)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop1_60_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var except_block_var; +while( +(except_block_var=except_block_rule(p)) +) +{_res=except_block_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop1_61_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var except_star_block_var; +while( +(except_star_block_var=except_star_block_rule(p)) +) +{_res=except_star_block_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_62_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var z; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(z=$B._PyPegen.name_token(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_63_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var z; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(z=$B._PyPegen.name_token(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop1_64_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var case_block_var; +while( +(case_block_var=case_block_rule(p)) +) +{_res=case_block_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_66_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,18)) +&& +(elem=closed_pattern_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_65_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=closed_pattern_rule(p)) +&& +(seq=_loop0_66_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_67_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,14)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,15)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_68_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,14)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,15)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_69_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,23)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_70_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,23)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_72_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=maybe_star_pattern_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_71_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=maybe_star_pattern_rule(p)) +&& +(seq=_loop0_72_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_74_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=key_value_pattern_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_73_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=key_value_pattern_rule(p)) +&& +(seq=_loop0_74_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_75_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var literal_expr_var; +if( +(literal_expr_var=literal_expr_rule(p)) +) +{_res=literal_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var attr_var; +if( +(attr_var=attr_rule(p)) +) +{_res=attr_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_77_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=pattern_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_76_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=pattern_rule(p)) +&& +(seq=_loop0_77_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_79_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=keyword_pattern_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_78_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=keyword_pattern_rule(p)) +&& +(seq=_loop0_79_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_81_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=type_param_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_80_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=type_param_rule(p)) +&& +(seq=_loop0_81_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop1_82_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_252_var; +while( +(_tmp_252_var=_tmp_252_rule(p)) +) +{_res=_tmp_252_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop1_83_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_253_var; +while( +(_tmp_253_var=_tmp_253_rule(p)) +) +{_res=_tmp_253_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_85_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=star_named_expression_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_84_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=star_named_expression_rule(p)) +&& +(seq=_loop0_85_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop1_86_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_254_var; +while( +(_tmp_254_var=_tmp_254_rule(p)) +) +{_res=_tmp_254_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop1_87_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_255_var; +while( +(_tmp_255_var=_tmp_255_rule(p)) +) +{_res=_tmp_255_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop1_88_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var compare_op_bitwise_or_pair_var; +while( +(compare_op_bitwise_or_pair_var=compare_op_bitwise_or_pair_rule(p)) +) +{_res=compare_op_bitwise_or_pair_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_89_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var tok; +if( +(tok=$B._PyPegen.expect_token(p,28)) +) +{_res=$B._PyPegen.check_barry_as_flufl(p,tok)? $B.parser_constants.NULL :tok; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_91_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=_tmp_256_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_90_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=_tmp_256_rule(p)) +&& +(seq=_loop0_91_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_92_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var d; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +&& +(d=expression_rule(p),!p.error_indicator) +) +{_res=d; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_93_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var string_var; +if( +(string_var=$B._PyPegen.string_token(p)) +) +{_res=string_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var fstring_start_var; +if( +(fstring_start_var=$B._PyPegen.expect_token(p,FSTRING_START)) +) +{_res=fstring_start_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_94_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var tuple_var; +if( +(tuple_var=tuple_rule(p)) +) +{_res=tuple_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var group_var; +if( +(group_var=group_rule(p)) +) +{_res=group_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var genexp_var; +if( +(genexp_var=genexp_rule(p)) +) +{_res=genexp_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_95_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var list_var; +if( +(list_var=list_rule(p)) +) +{_res=list_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var listcomp_var; +if( +(listcomp_var=listcomp_rule(p)) +) +{_res=listcomp_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_96_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var dict_var; +if( +(dict_var=dict_rule(p)) +) +{_res=dict_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var set_var; +if( +(set_var=set_rule(p)) +) +{_res=set_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var dictcomp_var; +if( +(dictcomp_var=dictcomp_rule(p)) +) +{_res=dictcomp_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var setcomp_var; +if( +(setcomp_var=setcomp_rule(p)) +) +{_res=setcomp_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_97_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var named_expression_var; +if( +(named_expression_var=named_expression_rule(p)) +) +{_res=named_expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_98_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +while( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_99_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_with_default_var; +while( +(lambda_param_with_default_var=lambda_param_with_default_rule(p)) +) +{_res=lambda_param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_100_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_with_default_var; +while( +(lambda_param_with_default_var=lambda_param_with_default_rule(p)) +) +{_res=lambda_param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_101_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +while( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_102_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_with_default_var; +while( +(lambda_param_with_default_var=lambda_param_with_default_rule(p)) +) +{_res=lambda_param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_103_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_with_default_var; +while( +(lambda_param_with_default_var=lambda_param_with_default_rule(p)) +) +{_res=lambda_param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop1_104_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +while( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop1_105_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +while( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_106_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +while( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_107_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_with_default_var; +while( +(lambda_param_with_default_var=lambda_param_with_default_rule(p)) +) +{_res=lambda_param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_108_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +while( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_109_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_with_default_var; +while( +(lambda_param_with_default_var=lambda_param_with_default_rule(p)) +) +{_res=lambda_param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_110_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_maybe_default_var; +while( +(lambda_param_maybe_default_var=lambda_param_maybe_default_rule(p)) +) +{_res=lambda_param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_111_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_maybe_default_var; +while( +(lambda_param_maybe_default_var=lambda_param_maybe_default_rule(p)) +) +{_res=lambda_param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_112_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_113_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var fstring_format_spec_var; +while( +(fstring_format_spec_var=fstring_format_spec_rule(p)) +) +{_res=fstring_format_spec_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_114_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var fstring_middle_var; +while( +(fstring_middle_var=fstring_middle_rule(p)) +) +{_res=fstring_middle_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_115_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_257_var; +while( +(_tmp_257_var=_tmp_257_rule(p)) +) +{_res=_tmp_257_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_116_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var y; +var z; +if( +(y=star_named_expression_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(z=star_named_expressions_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.seq_insert_in_front(p,y,z); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_118_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=double_starred_kvpair_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_117_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=double_starred_kvpair_rule(p)) +&& +(seq=_loop0_118_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop1_119_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var for_if_clause_var; +while( +(for_if_clause_var=for_if_clause_rule(p)) +) +{_res=for_if_clause_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop0_120_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_258_var; +while( +(_tmp_258_var=_tmp_258_rule(p)) +) +{_res=_tmp_258_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_121_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_259_var; +while( +(_tmp_259_var=_tmp_259_rule(p)) +) +{_res=_tmp_259_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _tmp_122_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var assignment_expression_var; +if( +(assignment_expression_var=assignment_expression_rule(p)) +) +{_res=assignment_expression_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,53) +) +{_res=expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_124_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=_tmp_260_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_123_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=_tmp_260_rule(p)) +&& +(seq=_loop0_124_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_125_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var k; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(k=kwargs_rule(p)) +) +{_res=k; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_127_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=kwarg_or_starred_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_126_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=kwarg_or_starred_rule(p)) +&& +(seq=_loop0_127_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_129_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=kwarg_or_double_starred_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_128_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=kwarg_or_double_starred_rule(p)) +&& +(seq=_loop0_129_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_131_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=kwarg_or_starred_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_130_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=kwarg_or_starred_rule(p)) +&& +(seq=_loop0_131_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_133_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=kwarg_or_double_starred_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_132_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=kwarg_or_double_starred_rule(p)) +&& +(seq=_loop0_133_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_134_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_261_var; +while( +(_tmp_261_var=_tmp_261_rule(p)) +) +{_res=_tmp_261_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_136_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=star_target_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_135_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=star_target_rule(p)) +&& +(seq=_loop0_136_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop1_137_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_262_var; +while( +(_tmp_262_var=_tmp_262_rule(p)) +) +{_res=_tmp_262_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_138_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var star_target_var; +if( +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,16) +&& +(star_target_var=star_target_rule(p)) +) +{_res=star_target_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_140_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=del_target_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_139_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=del_target_rule(p)) +&& +(seq=_loop0_140_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_142_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=expression_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_141_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=expression_rule(p)) +&& +(seq=_loop0_142_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_144_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=expression_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_143_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=expression_rule(p)) +&& +(seq=_loop0_144_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_146_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=expression_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_145_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=expression_rule(p)) +&& +(seq=_loop0_146_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_148_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=expression_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_147_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=expression_rule(p)) +&& +(seq=_loop0_148_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_149_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var indent_var; +var newline_var; +if( +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +&& +(indent_var=$B._PyPegen.expect_token(p,INDENT)) +) +{_res=$B._PyPegen.dummy_name(p,newline_var,indent_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_150_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _tmp_263_var; +if( +(_tmp_263_var=_tmp_263_rule(p)) +) +{_res=_tmp_263_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var kwargs_var; +if( +(kwargs_var=kwargs_rule(p)) +) +{_res=kwargs_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_151_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var args_var; +if( +(args_var=args_rule(p)) +) +{_res=args_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var expression_var; +var for_if_clauses_var; +if( +(expression_var=expression_rule(p)) +&& +(for_if_clauses_var=for_if_clauses_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,expression_var,for_if_clauses_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_152_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var args_var; +if( +(args_var=args_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=$B._PyPegen.dummy_name(p,args_var,_literal); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_153_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,8)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_154_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,601)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,603)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,602)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_155_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var name_var; +if( +(name_var=$B._PyPegen.name_token(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=$B._PyPegen.dummy_name(p,name_var,_literal); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_156_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var name_var; +var string_var; +if( +(name_var=$B._PyPegen.name_token(p)) +&& +(string_var=$B._PyPegen.string_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,name_var,string_var); +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var soft_keyword_var; +if( +(soft_keyword_var=$B._PyPegen.soft_keyword_token(p)) +) +{_res=soft_keyword_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_157_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,645)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_158_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,53)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_159_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var list_var; +if( +(list_var=list_rule(p)) +) +{_res=list_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var tuple_var; +if( +(tuple_var=tuple_rule(p)) +) +{_res=tuple_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var genexp_var; +if( +(genexp_var=genexp_rule(p)) +) +{_res=genexp_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,601)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,602)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,603)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_160_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,53)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_161_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var star_named_expressions_var; +while( +(star_named_expressions_var=star_named_expressions_rule(p)) +) +{_res=star_named_expressions_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_162_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_264_var; +while( +(_tmp_264_var=_tmp_264_rule(p)) +) +{_res=_tmp_264_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_163_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _tmp_265_var; +while( +(_tmp_265_var=_tmp_265_rule(p)) +) +{_res=_tmp_265_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _tmp_164_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_165_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,7)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_166_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_167_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,9)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,25)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_168_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var slash_no_default_var; +if( +(slash_no_default_var=slash_no_default_rule(p)) +) +{_res=slash_no_default_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var slash_with_default_var; +if( +(slash_with_default_var=slash_with_default_rule(p)) +) +{_res=slash_with_default_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_169_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_maybe_default_var; +while( +(param_maybe_default_var=param_maybe_default_rule(p)) +) +{_res=param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_170_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +while( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_171_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +while( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_172_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +while( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_173_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var slash_no_default_var; +if( +(slash_no_default_var=slash_no_default_rule(p)) +) +{_res=slash_no_default_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var slash_with_default_var; +if( +(slash_with_default_var=slash_with_default_rule(p)) +) +{_res=slash_with_default_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_174_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_maybe_default_var; +while( +(param_maybe_default_var=param_maybe_default_rule(p)) +) +{_res=param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _tmp_175_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +if( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_176_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_maybe_default_var; +while( +(param_maybe_default_var=param_maybe_default_rule(p)) +) +{_res=param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_177_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_maybe_default_var; +while( +(param_maybe_default_var=param_maybe_default_rule(p)) +) +{_res=param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_178_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,8)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_179_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,8)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _tmp_266_var; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(_tmp_266_var=_tmp_266_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,_tmp_266_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_180_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +if( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_181_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_maybe_default_var; +while( +(param_maybe_default_var=param_maybe_default_rule(p)) +) +{_res=param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _tmp_182_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var param_no_default_var; +if( +(param_no_default_var=param_no_default_rule(p)) +) +{_res=param_no_default_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_183_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,17)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop1_184_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var param_with_default_var; +while( +(param_with_default_var=param_with_default_rule(p)) +) +{_res=param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_185_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var lambda_slash_no_default_var; +if( +(lambda_slash_no_default_var=lambda_slash_no_default_rule(p)) +) +{_res=lambda_slash_no_default_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var lambda_slash_with_default_var; +if( +(lambda_slash_with_default_var=lambda_slash_with_default_rule(p)) +) +{_res=lambda_slash_with_default_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_186_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_maybe_default_var; +while( +(lambda_param_maybe_default_var=lambda_param_maybe_default_rule(p)) +) +{_res=lambda_param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_187_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +while( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_188_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +while( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop0_190_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=lambda_param_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_189_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=lambda_param_rule(p)) +&& +(seq=_loop0_190_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_191_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var lambda_slash_no_default_var; +if( +(lambda_slash_no_default_var=lambda_slash_no_default_rule(p)) +) +{_res=lambda_slash_no_default_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var lambda_slash_with_default_var; +if( +(lambda_slash_with_default_var=lambda_slash_with_default_rule(p)) +) +{_res=lambda_slash_with_default_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_192_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_maybe_default_var; +while( +(lambda_param_maybe_default_var=lambda_param_maybe_default_rule(p)) +) +{_res=lambda_param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _tmp_193_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +if( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_194_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_maybe_default_var; +while( +(lambda_param_maybe_default_var=lambda_param_maybe_default_rule(p)) +) +{_res=lambda_param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_195_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_maybe_default_var; +while( +(lambda_param_maybe_default_var=lambda_param_maybe_default_rule(p)) +) +{_res=lambda_param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _loop1_196_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_with_default_var; +while( +(lambda_param_with_default_var=lambda_param_with_default_rule(p)) +) +{_res=lambda_param_with_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_197_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +var _tmp_267_var; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(_tmp_267_var=_tmp_267_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,_tmp_267_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_198_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +if( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_199_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var lambda_param_maybe_default_var; +while( +(lambda_param_maybe_default_var=lambda_param_maybe_default_rule(p)) +) +{_res=lambda_param_maybe_default_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _tmp_200_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var lambda_param_no_default_var; +if( +(lambda_param_no_default_var=lambda_param_no_default_rule(p)) +) +{_res=lambda_param_no_default_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_201_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,16)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,17)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_202_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,8)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_204_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=dotted_name_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_203_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=dotted_name_rule(p)) +&& +(seq=_loop0_204_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_206_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=_tmp_268_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_205_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=_tmp_268_rule(p)) +&& +(seq=_loop0_206_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_208_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=_tmp_269_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_207_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=_tmp_269_rule(p)) +&& +(seq=_loop0_208_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_210_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=_tmp_270_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_209_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=_tmp_270_rule(p)) +&& +(seq=_loop0_210_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_212_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=_tmp_271_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_211_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=_tmp_271_rule(p)) +&& +(seq=_loop0_212_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_213_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,637)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _keyword; +if( +(_keyword=$B._PyPegen.expect_token(p,633)) +) +{_res=_keyword; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_214_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var block_var; +while( +(block_var=block_rule(p)) +) +{_res=block_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_215_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var except_block_var; +while( +(except_block_var=except_block_rule(p)) +) +{_res=except_block_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_216_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var name_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(name_var=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,name_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_217_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var block_var; +while( +(block_var=block_rule(p)) +) +{_res=block_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _loop1_218_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var except_star_block_var; +while( +(except_star_block_var=except_star_block_rule(p)) +) +{_res=except_star_block_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +if(_n==0 ||p.error_indicator){return NULL;} +return _children;} +function _tmp_219_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +(_opt_var=_tmp_272_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.dummy_name(p,expression_var,_opt_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_220_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var name_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(name_var=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,name_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_221_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var name_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(name_var=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,name_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_222_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var newline_var; +if( +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=newline_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; break;} -case $B.ast.IfExp: -VISIT(st,'expr',e.test); -VISIT(st,'expr',e.body); -VISIT(st,'expr',e.orelse); -break; -case $B.ast.Dict: -VISIT_SEQ_WITH_NULL(st,'expr',e.keys); -VISIT_SEQ(st,'expr',e.values); -break; -case $B.ast.Set: -VISIT_SEQ(st,'expr',e.elts); -break; -case $B.ast.GeneratorExp: -if(!visitor.genexp(st,e)) -VISIT_QUIT(st,0); -break; -case $B.ast.ListComp: -if(!visitor.listcomp(st,e)) -VISIT_QUIT(st,0); -break; -case $B.ast.SetComp: -if(!visitor.setcomp(st,e)) -VISIT_QUIT(st,0); -break; -case $B.ast.DictComp: -if(!visitor.dictcomp(st,e)) -VISIT_QUIT(st,0); -break; -case $B.ast.Yield: -if(!symtable_raise_if_annotation_block(st,"yield expression",e)){VISIT_QUIT(st,0);} -if(e.value) -VISIT(st,'expr',e.value); -st.cur.generator=1; -if(st.cur.comprehension){return symtable_raise_if_comprehension_block(st,e);} -break; -case $B.ast.YieldFrom: -if(!symtable_raise_if_annotation_block(st,"yield expression",e)){VISIT_QUIT(st,0);} -VISIT(st,'expr',e.value); -st.cur.generator=1; -if(st.cur.comprehension){return symtable_raise_if_comprehension_block(st,e);} -break; -case $B.ast.Await: -if(!symtable_raise_if_annotation_block(st,"await expression",e)){VISIT_QUIT(st,0);} -VISIT(st,'expr',e.value); -st.cur.coroutine=1; -break; -case $B.ast.Compare: -VISIT(st,'expr',e.left); -VISIT_SEQ(st,'expr',e.comparators); -break; -case $B.ast.Call: -VISIT(st,'expr',e.func); -VISIT_SEQ(st,'expr',e.args); -VISIT_SEQ_WITH_NULL(st,'keyword',e.keywords); -break; -case $B.ast.FormattedValue: -VISIT(st,'expr',e.value); -if(e.format_spec) -VISIT(st,'expr',e.format_spec); -break; -case $B.ast.JoinedStr: -VISIT_SEQ(st,'expr',e.values); -break; -case $B.ast.Constant: -break; -case $B.ast.Attribute: -VISIT(st,'expr',e.value); -break; -case $B.ast.Subscript: -VISIT(st,'expr',e.value); -VISIT(st,'expr',e.slice); -break; -case $B.ast.Starred: -VISIT(st,'expr',e.value); -break; -case $B.ast.Slice: -if(e.lower) -VISIT(st,expr,e.lower) -if(e.upper) -VISIT(st,expr,e.upper) -if(e.step) -VISIT(st,expr,e.step) -break; -case $B.ast.Name: -var flag=e.ctx instanceof $B.ast.Load ? USE :DEF_LOCAL -if(! symtable_add_def(st,e.id,flag,LOCATION(e))) -VISIT_QUIT(st,0); -if(e.ctx instanceof $B.ast.Load && -_PyST_IsFunctionLike(st.cur)&& -e.id=="super"){if(!GET_IDENTIFIER('__class__')|| -!symtable_add_def(st,'__class__',USE,LOCATION(e))) -VISIT_QUIT(st,0);} -break; -case $B.ast.List: -VISIT_SEQ(st,expr,e.elts); -break; -case $B.ast.Tuple: -VISIT_SEQ(st,expr,e.elts); +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_223_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var name_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(name_var=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,name_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_224_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var name_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(name_var=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,name_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_225_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var positional_patterns_var; +if( +(positional_patterns_var=positional_patterns_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=$B._PyPegen.dummy_name(p,positional_patterns_var,_literal); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_226_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var expression_var; +if( +(_literal=$B._PyPegen.expect_token(p,51)) +&& +(expression_var=expression_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,expression_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_227_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(_opt_var=arguments_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,_opt_var,_literal_1); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_228_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var _literal_1; +var _opt_var; +UNUSED(_opt_var); +if( +(_literal=$B._PyPegen.expect_token(p,7)) +&& +(_opt_var=arguments_rule(p),!p.error_indicator) +&& +(_literal_1=$B._PyPegen.expect_token(p,8)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,_opt_var,_literal_1); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_230_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=double_starred_kvpair_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_229_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=double_starred_kvpair_rule(p)) +&& +(seq=_loop0_230_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_231_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,26)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_232_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,26)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_233_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_234_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_235_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,54)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,26)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_236_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_237_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,54)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,26)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_238_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_239_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_240_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var name_var; +if( +(_literal=$B._PyPegen.expect_token(p,54)) +&& +(name_var=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,name_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_241_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,26)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_242_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_243_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var name_var; +if( +(_literal=$B._PyPegen.expect_token(p,54)) +&& +(name_var=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,name_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_244_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var fstring_format_spec_var; +while( +(fstring_format_spec_var=fstring_format_spec_rule(p)) +) +{_res=fstring_format_spec_var; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _tmp_245_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var yield_expr_var; +if( +(yield_expr_var=yield_expr_rule(p)) +) +{_res=yield_expr_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var star_expressions_var; +if( +(star_expressions_var=star_expressions_rule(p)) +) +{_res=star_expressions_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_246_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var name_var; +if( +(_literal=$B._PyPegen.expect_token(p,54)) +&& +(name_var=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,_literal,name_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_247_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,26)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_248_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var z; +if( +(z=star_targets_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_249_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,23)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,52)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_250_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,23)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,52)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_251_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var f; +var newline_var; +if( +(_literal=$B._PyPegen.expect_token(p,49)) +&& +(f=named_expression_rule(p)) +&& +(newline_var=$B._PyPegen.expect_token(p,NEWLINE)) +) +{_res=f; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_252_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var c; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(c=expression_rule(p)) +) +{_res=c; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_253_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var c; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(c=star_expression_rule(p)) +) +{_res=c; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_254_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var c; +if( +(_keyword=$B._PyPegen.expect_token(p,574)) +&& +(c=conjunction_rule(p)) +) +{_res=c; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_255_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var c; +if( +(_keyword=$B._PyPegen.expect_token(p,575)) +&& +(c=inversion_rule(p)) +) +{_res=c; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_256_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var slice_var; +if( +(slice_var=slice_rule(p)) +) +{_res=slice_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var starred_expression_var; +if( +(starred_expression_var=starred_expression_rule(p)) +) +{_res=starred_expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_257_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var fstring_var; +if( +(fstring_var=fstring_rule(p)) +) +{_res=fstring_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var string_var; +if( +(string_var=string_rule(p)) +) +{_res=string_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_258_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var z; +if( +(_keyword=$B._PyPegen.expect_token(p,642)) +&& +(z=disjunction_rule(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_259_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var z; +if( +(_keyword=$B._PyPegen.expect_token(p,642)) +&& +(z=disjunction_rule(p)) +) +{_res=z; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_260_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var starred_expression_var; +if( +(starred_expression_var=starred_expression_rule(p)) +) +{_res=starred_expression_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _tmp_273_var; +if( +(_tmp_273_var=_tmp_273_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,22) +) +{_res=_tmp_273_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_261_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var c; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(c=star_target_rule(p)) +) +{_res=c; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_262_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var c; +if( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(c=star_target_rule(p)) +) +{_res=c; +break;} +p.mark=_mark;} +_res=NULL; break;} -VISIT_QUIT(st,1);} -visitor.type_param=function(st,tp){switch(tp.constructor){case $B.ast.TypeVar: -if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM |DEF_LOCAL,LOCATION(tp))) -VISIT_QUIT(st,0); -if(tp.bound){var is_in_class=st.cur.can_see_class_scope; -if(!symtable_enter_block(st,tp.name,TypeVarBoundBlock,tp,LOCATION(tp))) -VISIT_QUIT(st,0); -st.cur.can_see_class_scope=is_in_class; -if(is_in_class && !symtable_add_def(st,"__classdict__",USE,LOCATION(tp.bound))){VISIT_QUIT(st,0);} -VISIT(st,expr,tp.bound); -if(!symtable_exit_block(st)) -VISIT_QUIT(st,0);} -break; -case $B.ast.TypeVarTuple: -if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM |DEF_LOCAL,LOCATION(tp))) -VISIT_QUIT(st,0); -break; -case $B.ast.ParamSpec: -if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM |DEF_LOCAL,LOCATION(tp))) -VISIT_QUIT(st,0); +return _res;} +function _tmp_263_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _gather_274_var; +var _literal; +var kwargs_var; +if( +(_gather_274_var=_gather_274_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(kwargs_var=kwargs_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_gather_274_var,_literal,kwargs_var); break;} -VISIT_QUIT(st,1);} -visitor.pattern=function(st,p){switch(p.constructor){case $B.ast.MatchValue: -VISIT(st,expr,p.value); -break; -case $B.ast.MatchSingleton: -break; -case $B.ast.MatchSequence: -VISIT_SEQ(st,pattern,p.patterns); -break; -case $B.ast.MatchStar: -if(p.name){symtable_add_def(st,p.name,DEF_LOCAL,LOCATION(p));} -break; -case $B.ast.MatchMapping: -VISIT_SEQ(st,expr,p.keys); -VISIT_SEQ(st,pattern,p.patterns); -if(p.rest){symtable_add_def(st,p.rest,DEF_LOCAL,LOCATION(p));} -break; -case $B.ast.MatchClass: -VISIT(st,expr,p.cls); -VISIT_SEQ(st,pattern,p.patterns); -VISIT_SEQ(st,pattern,p.kwd_patterns); -break; -case $B.ast.MatchAs: -if(p.pattern){VISIT(st,pattern,p.pattern);} -if(p.name){symtable_add_def(st,p.name,DEF_LOCAL,LOCATION(p));} -break; -case $B.ast.MatchOr: -VISIT_SEQ(st,pattern,p.patterns); +p.mark=_mark;} +_res=NULL; break;} -VISIT_QUIT(st,1);} -function symtable_implicit_arg(st,pos){var id='.'+pos -if(!symtable_add_def(st,id,DEF_PARAM,ST_LOCATION(st.cur))){return 0;} -return 1;} -visitor.params=function(st,args){if(! args){return-1} -for(var arg of args){if(! symtable_add_def(st,arg.arg,DEF_PARAM,LOCATION(arg))) -return 0} -return 1} -visitor.annotation=function(st,annotation){var future_annotations=st.future.features & $B.CO_FUTURE_ANNOTATIONS -if(future_annotations && -!symtable_enter_block(st,'_annotation',AnnotationBlock,annotation,annotation.lineno,annotation.col_offset,annotation.end_lineno,annotation.end_col_offset)){VISIT_QUIT(st,0)} -VISIT(st,expr,annotation) -if(future_annotations && !symtable_exit_block(st)){VISIT_QUIT(st,0)} -return 1} -visitor.argannotations=function(st,args){if(!args){return-1} -for(var arg of args){if(arg.annotation){VISIT(st,expr,arg.annotation)}} -return 1} -visitor.annotations=function(st,o,a,returns){var future_annotations=st.future.ff_features & $B.CO_FUTURE_ANNOTATIONS; -if(future_annotations && -!symtable_enter_block(st,'_annotation',AnnotationBlock,o,o.lineno,o.col_offset,o.end_lineno,o.end_col_offset)){VISIT_QUIT(st,0);} -if(a.posonlyargs && !visitor.argannotations(st,a.posonlyargs)) -return 0; -if(a.args && !visitor.argannotations(st,a.args)) -return 0; -if(a.vararg && a.vararg.annotation) -VISIT(st,expr,a.vararg.annotation); -if(a.kwarg && a.kwarg.annotation) -VISIT(st,expr,a.kwarg.annotation); -if(a.kwonlyargs && !visitor.argannotations(st,a.kwonlyargs)) -return 0; -if(future_annotations && !symtable_exit_block(st)){VISIT_QUIT(st,0);} -if(returns && !visitor.annotation(st,returns)){VISIT_QUIT(st,0);} -return 1;} -visitor.arguments=function(st,a){ -if(a.posonlyargs && !visitor.params(st,a.posonlyargs)) -return 0; -if(a.args && !visitor.params(st,a.args)) -return 0; -if(a.kwonlyargs && !visitor.params(st,a.kwonlyargs)) -return 0; -if(a.vararg){if(!symtable_add_def(st,a.vararg.arg,DEF_PARAM,LOCATION(a.vararg))) -return 0; -st.cur.varargs=1;} -if(a.kwarg){if(!symtable_add_def(st,a.kwarg.arg,DEF_PARAM,LOCATION(a.kwarg))) -return 0; -st.cur.varkeywords=1;} -return 1;} -visitor.excepthandler=function(st,eh){if(eh.type) -VISIT(st,expr,eh.type); -if(eh.name) -if(!symtable_add_def(st,eh.name,DEF_LOCAL,LOCATION(eh))) -return 0; -VISIT_SEQ(st,stmt,eh.body); -return 1;} -visitor.withitem=function(st,item){VISIT(st,'expr',item.context_expr); -if(item.optional_vars){VISIT(st,'expr',item.optional_vars);} -return 1;} -visitor.match_case=function(st,m){VISIT(st,pattern,m.pattern); -if(m.guard){VISIT(st,expr,m.guard);} -VISIT_SEQ(st,stmt,m.body); -return 1;} -visitor.alias=function(st,a){ -var store_name,name=(a.asname==NULL)? a.name :a.asname; -var dot=name.search('\\.'); -if(dot !=-1){store_name=name.substring(0,dot); -if(!store_name) -return 0;}else{store_name=name;} -if(name !="*"){var r=symtable_add_def(st,store_name,DEF_IMPORT,LOCATION(a)); -return r;}else{if(st.cur.type !=ModuleBlock){var lineno=a.lineno,col_offset=a.col_offset,end_lineno=a.end_lineno,end_col_offset=a.end_col_offset; -var exc=PyErr_SetString(PyExc_SyntaxError,IMPORT_STAR_WARNING); -set_exc_info(exc,st.filename,lineno,col_offset,end_lineno,end_col_offset); -throw exc} -st.cur.$has_import_star=true -return 1;}} -visitor.comprehension=function(st,lc){st.cur.comp_iter_target=1; -VISIT(st,expr,lc.target); -st.cur.comp_iter_target=0; -st.cur.comp_iter_expr++; -VISIT(st,expr,lc.iter); -st.cur.comp_iter_expr--; -VISIT_SEQ(st,expr,lc.ifs); -if(lc.is_async){st.cur.coroutine=1;} -return 1;} -visitor.keyword=function(st,k){VISIT(st,expr,k.value); -return 1;} -function symtable_handle_comprehension(st,e,scope_name,generators,elt,value){var is_generator=(e.constructor===$B.ast.GeneratorExp); -var outermost=generators[0] -st.cur.comp_iter_expr++; -VISIT(st,expr,outermost.iter); -st.cur.comp_iter_expr--; -if(!scope_name || -!symtable_enter_block(st,scope_name,FunctionBlock,e,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset)){return 0;} -switch(e.constructor){case $B.ast.ListComp: -st.cur.comprehension=ListComprehension; -break; -case $B.ast.SetComp: -st.cur.comprehension=SetComprehension; -break; -case $B.ast.DictComp: -st.cur.comprehension=DictComprehension; -break; -default: -st.cur.comprehension=GeneratorExpression; +return _res;} +function _tmp_264_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var star_targets_var; +if( +(star_targets_var=star_targets_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=$B._PyPegen.dummy_name(p,star_targets_var,_literal); break;} -if(outermost.is_async){st.cur.coroutine=1;} -if(!symtable_implicit_arg(st,0)){symtable_exit_block(st); -return 0;} -st.cur.comp_iter_target=1; -VISIT(st,expr,outermost.target); -st.cur.comp_iter_target=0; -VISIT_SEQ(st,expr,outermost.ifs); -VISIT_SEQ_TAIL(st,comprehension,generators,1); -if(value) -VISIT(st,expr,value); -VISIT(st,expr,elt); -st.cur.generator=is_generator; -var is_async=st.cur.coroutine && !is_generator; -if(!symtable_exit_block(st)){return 0;} -if(is_async){st.cur.coroutine=1;} -return 1;} -visitor.genexp=function(st,e){return symtable_handle_comprehension(st,e,'genexpr',e.generators,e.elt,NULL);} -visitor.listcomp=function(st,e){return symtable_handle_comprehension(st,e,'listcomp',e.generators,e.elt,NULL);} -visitor.setcomp=function(st,e){return symtable_handle_comprehension(st,e,'setcomp',e.generators,e.elt,NULL);} -visitor.dictcomp=function(st,e){return symtable_handle_comprehension(st,e,'dictcomp',e.generators,e.key,e.value);} -function symtable_raise_if_annotation_block(st,name,e){var type=st.cur.type,exc -if(type==AnnotationBlock) -exc=PyErr_Format(PyExc_SyntaxError,ANNOTATION_NOT_ALLOWED,name); -else if(type==TypeVarBoundBlock) -exc=PyErr_Format(PyExc_SyntaxError,TYPEVAR_BOUND_NOT_ALLOWED,name); -else if(type==TypeAliasBlock) -exc=PyErr_Format(PyExc_SyntaxError,TYPEALIAS_NOT_ALLOWED,name); -else if(type==TypeParamBlock) -exc=PyErr_Format(PyExc_SyntaxError,TYPEPARAM_NOT_ALLOWED,name); -else -return 1; -set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset); -throw exc} -function symtable_raise_if_comprehension_block(st,e){var type=st.cur.comprehension; -var exc=PyErr_SetString(PyExc_SyntaxError,(type==ListComprehension)? "'yield' inside list comprehension" : -(type==SetComprehension)? "'yield' inside set comprehension" : -(type==DictComprehension)? "'yield' inside dict comprehension" : -"'yield' inside generator expression"); -exc.$frame_obj=$B.frame_obj -set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset); -throw exc}})(__BRYTHON__) -; -var docs={ArithmeticError:"Base class for arithmetic errors.",AssertionError:"Assertion failed.",AttributeError:"Attribute not found.",BaseException:"Common base class for all exceptions",BaseExceptionGroup:"A combination of multiple unrelated exceptions.",BlockingIOError:"I/O operation would block.",BrokenPipeError:"Broken pipe.",BufferError:"Buffer error.",BytesWarning:"Base class for warnings about bytes and buffer related problems, mostly\nrelated to conversion from str or comparing to str.",ChildProcessError:"Child process error.",ConnectionAbortedError:"Connection aborted.",ConnectionError:"Connection error.",ConnectionRefusedError:"Connection refused.",ConnectionResetError:"Connection reset.",DeprecationWarning:"Base class for warnings about deprecated features.",EOFError:"Read beyond end of file.",Ellipsis:"",EncodingWarning:"Base class for warnings about encodings.",EnvironmentError:"Base class for I/O related errors.",Exception:"Common base class for all non-exit exceptions.",ExceptionGroup:"",False:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",FileExistsError:"File already exists.",FileNotFoundError:"File not found.",FloatingPointError:"Floating point operation failed.",FutureWarning:"Base class for warnings about constructs that will change semantically\nin the future.",GeneratorExit:"Request that a generator exit.",IOError:"Base class for I/O related errors.",ImportError:"Import can't find module, or can't find name in module.",ImportWarning:"Base class for warnings about probable mistakes in module imports",IndentationError:"Improper indentation.",IndexError:"Sequence index out of range.",InterruptedError:"Interrupted by signal.",IsADirectoryError:"Operation doesn't work on directories.",KeyError:"Mapping key not found.",KeyboardInterrupt:"Program interrupted by user.",LookupError:"Base class for lookup errors.",MemoryError:"Out of memory.",ModuleNotFoundError:"Module not found.",NameError:"Name not found globally.",None:"",NotADirectoryError:"Operation only works on directories.",NotImplemented:"",NotImplementedError:"Method or function hasn't been implemented yet.",OSError:"Base class for I/O related errors.",OverflowError:"Result too large to be represented.",PendingDeprecationWarning:"Base class for warnings about features which will be deprecated\nin the future.",PermissionError:"Not enough permissions.",ProcessLookupError:"Process not found.",RecursionError:"Recursion limit exceeded.",ReferenceError:"Weak ref proxy used after referent went away.",ResourceWarning:"Base class for warnings about resource usage.",RuntimeError:"Unspecified run-time error.",RuntimeWarning:"Base class for warnings about dubious runtime behavior.",StopAsyncIteration:"Signal the end from iterator.__anext__().",StopIteration:"Signal the end from iterator.__next__().",SyntaxError:"Invalid syntax.",SyntaxWarning:"Base class for warnings about dubious syntax.",SystemError:"Internal error in the Python interpreter.\n\nPlease report this to the Python maintainer, along with the traceback,\nthe Python version, and the hardware/OS platform and version.",SystemExit:"Request to exit from the interpreter.",TabError:"Improper mixture of spaces and tabs.",TimeoutError:"Timeout expired.",True:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",TypeError:"Inappropriate argument type.",UnboundLocalError:"Local name referenced but not bound to a value.",UnicodeDecodeError:"Unicode decoding error.",UnicodeEncodeError:"Unicode encoding error.",UnicodeError:"Unicode related error.",UnicodeTranslateError:"Unicode translation error.",UnicodeWarning:"Base class for warnings about Unicode related problems, mostly\nrelated to conversion problems.",UserWarning:"Base class for warnings generated by user code.",ValueError:"Inappropriate argument value (of correct type).",Warning:"Base class for warning categories.",WindowsError:"Base class for I/O related errors.",ZeroDivisionError:"Second argument to a division or modulo operation was zero.",__debug__:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",abs:"Return the absolute value of the argument.",aiter:"Return an AsyncIterator for an AsyncIterable object.",all:"Return True if bool(x) is True for all values x in the iterable.\n\nIf the iterable is empty, return True.",anext:"async anext(aiterator[, default])\n\nReturn the next item from the async iterator. If default is given and the async\niterator is exhausted, it is returned instead of raising StopAsyncIteration.",any:"Return True if bool(x) is True for any x in the iterable.\n\nIf the iterable is empty, return False.",ascii:"Return an ASCII-only representation of an object.\n\nAs repr(), return a string containing a printable representation of an\nobject, but escape the non-ASCII characters in the string returned by\nrepr() using \\\\x, \\\\u or \\\\U escapes. This generates a string similar\nto that returned by repr() in Python 2.",bin:"Return the binary representation of an integer.\n\n >>> bin(2796202)\n '0b1010101010101010101010'",bool:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",breakpoint:"breakpoint(*args, **kws)\n\nCall sys.breakpointhook(*args, **kws). sys.breakpointhook() must accept\nwhatever arguments are passed.\n\nBy default, this drops you into the pdb debugger.",bytearray:"bytearray(iterable_of_ints) -> bytearray\nbytearray(string, encoding[, errors]) -> bytearray\nbytearray(bytes_or_buffer) -> mutable copy of bytes_or_buffer\nbytearray(int) -> bytes array of size given by the parameter initialized with null bytes\nbytearray() -> empty bytes array\n\nConstruct a mutable bytearray object from:\n - an iterable yielding integers in range(256)\n - a text string encoded using the specified encoding\n - a bytes or a buffer object\n - any object implementing the buffer API.\n - an integer",bytes:"bytes(iterable_of_ints) -> bytes\nbytes(string, encoding[, errors]) -> bytes\nbytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer\nbytes(int) -> bytes object of size given by the parameter initialized with null bytes\nbytes() -> empty bytes object\n\nConstruct an immutable array of bytes from:\n - an iterable yielding integers in range(256)\n - a text string encoded using the specified encoding\n - any object implementing the buffer API.\n - an integer",callable:"Return whether the object is callable (i.e., some kind of function).\n\nNote that classes are callable, as are instances of classes with a\n__call__() method.",chr:"Return a Unicode string of one character with ordinal i; 0 <= i <= 0x10ffff.",classmethod:"classmethod(function) -> method\n\nConvert a function to be a class method.\n\nA class method receives the class as implicit first argument,\njust like an instance method receives the instance.\nTo declare a class method, use this idiom:\n\n class C:\n @classmethod\n def f(cls, arg1, arg2, argN):\n ...\n\nIt can be called either on the class (e.g. C.f()) or on an instance\n(e.g. C().f()). The instance is ignored except for its class.\nIf a class method is called for a derived class, the derived class\nobject is passed as the implied first argument.\n\nClass methods are different than C++ or Java static methods.\nIf you want those, see the staticmethod builtin.",compile:"Compile source into a code object that can be executed by exec() or eval().\n\nThe source code may represent a Python module, statement or expression.\nThe filename will be used for run-time error messages.\nThe mode must be 'exec' to compile a module, 'single' to compile a\nsingle (interactive) statement, or 'eval' to compile an expression.\nThe flags argument, if present, controls which future statements influence\nthe compilation of the code.\nThe dont_inherit argument, if true, stops the compilation inheriting\nthe effects of any future statements in effect in the code calling\ncompile; if absent or false these statements do influence the compilation,\nin addition to any features explicitly specified.",complex:"Create a complex number from a real part and an optional imaginary part.\n\nThis is equivalent to (real + imag*1j) where imag defaults to 0.",copyright:"interactive prompt objects for printing the license text, a list of\n contributors and the copyright notice.",credits:"interactive prompt objects for printing the license text, a list of\n contributors and the copyright notice.",delattr:"Deletes the named attribute from the given object.\n\ndelattr(x, 'y') is equivalent to ``del x.y``",dict:"dict() -> new empty dictionary\ndict(mapping) -> new dictionary initialized from a mapping object's\n (key, value) pairs\ndict(iterable) -> new dictionary initialized as if via:\n d = {}\n for k, v in iterable:\n d[k] = v\ndict(**kwargs) -> new dictionary initialized with the name=value pairs\n in the keyword argument list. For example: dict(one=1, two=2)",dir:"Show attributes of an object.\n\nIf called without an argument, return the names in the current scope.\nElse, return an alphabetized list of names comprising (some of) the attributes\nof the given object, and of attributes reachable from it.\nIf the object supplies a method named __dir__, it will be used; otherwise\nthe default dir() logic is used and returns:\n for a module object: the module's attributes.\n for a class object: its attributes, and recursively the attributes\n of its bases.\n for any other object: its attributes, its class's attributes, and\n recursively the attributes of its class's base classes.",divmod:"Return the tuple (x//y, x%y). Invariant: div*y + mod == x.",enumerate:"Return an enumerate object.\n\n iterable\n an object supporting iteration\n\nThe enumerate object yields pairs containing a count (from start, which\ndefaults to zero) and a value yielded by the iterable argument.\n\nenumerate is useful for obtaining an indexed list:\n (0, seq[0]), (1, seq[1]), (2, seq[2]), ...",eval:"Evaluate the given source in the C of globals and locals.\n\nThe source may be a string representing a Python expression\nor a code object as returned by compile().\nThe globals must be a dictionary and locals can be any mapping,\ndefaulting to the current globals and locals.\nIf only globals is given, locals defaults to it.",exec:"Execute the given source in the C of globals and locals.\n\nThe source may be a string representing one or more Python statements\nor a code object as returned by compile().\nThe globals must be a dictionary and locals can be any mapping,\ndefaulting to the current globals and locals.\nIf only globals is given, locals defaults to it.\nThe closure must be a tuple of cellvars, and can only be used\nwhen source is a code object requiring exactly that many cellvars.",exit:"",filter:"filter(function or None, iterable) --> filter object\n\nReturn an iterator yielding those items of iterable for which function(item)\nis true. If function is None, return the items that are true.",float:"Convert a string or number to a floating point number, if possible.",format:"Return type(value).__format__(value, format_spec)\n\nMany built-in types implement format_spec according to the\nFormat Specification Mini-language. See help('FORMATTING').\n\nIf type(value) does not supply a method named __format__\nand format_spec is empty, then str(value) is returned.\nSee also help('SPECIALMETHODS').",frozenset:"frozenset() -> empty frozenset object\nfrozenset(iterable) -> frozenset object\n\nBuild an immutable unordered collection of unique elements.",getattr:"Get a named attribute from an object.\n\ngetattr(x, 'y') is equivalent to x.y\nWhen a default argument is given, it is returned when the attribute doesn't\nexist; without it, an exception is raised in that case.",globals:"Return the dictionary containing the current scope's global variables.\n\nNOTE: Updates to this dictionary *will* affect name lookups in the current\nglobal scope and vice-versa.",hasattr:"Return whether the object has an attribute with the given name.\n\nThis is done by calling getattr(obj, name) and catching AttributeError.",hash:"Return the hash value for the given object.\n\nTwo objects that compare equal must also have the same hash value, but the\nreverse is not necessarily true.",help:"Define the builtin 'help'.\n\n This is a wrapper around pydoc.help that provides a helpful message\n when 'help' is typed at the Python interactive prompt.\n\n Calling help() at the Python prompt starts an interactive help session.\n Calling help(thing) prints help for the python object 'thing'.\n ",hex:"Return the hexadecimal representation of an integer.\n\n >>> hex(12648430)\n '0xc0ffee'",id:"Return the identity of an object.\n\nThis is guaranteed to be unique among simultaneously existing objects.\n(CPython uses the object's memory address.)",input:"Read a string from standard input. The trailing newline is stripped.\n\nThe prompt string, if given, is printed to standard output without a\ntrailing newline before reading input.\n\nIf the user hits EOF (*nix: Ctrl-D, Windows: Ctrl-Z+Return), raise EOFError.\nOn *nix systems, readline is used if available.",int:"int([x]) -> integer\nint(x, base=10) -> integer\n\nConvert a number or string to an integer, or return 0 if no arguments\nare given. If x is a number, return x.__int__(). For floating point\nnumbers, this truncates towards zero.\n\nIf x is not a number or if base is given, then x must be a string,\nbytes, or bytearray instance representing an integer literal in the\ngiven base. The literal can be preceded by '+' or '-' and be surrounded\nby whitespace. The base defaults to 10. Valid bases are 0 and 2-36.\nBase 0 means to interpret the base from the string as an integer literal.\n>>> int('0b100', base=0)\n4",isinstance:"Return whether an object is an instance of a class or of a subclass thereof.\n\nA tuple, as in ``isinstance(x, (A, B, ...))``, may be given as the target to\ncheck against. This is equivalent to ``isinstance(x, A) or isinstance(x, B)\nor ...`` etc.",issubclass:"Return whether 'cls' is derived from another class or is the same class.\n\nA tuple, as in ``issubclass(x, (A, B, ...))``, may be given as the target to\ncheck against. This is equivalent to ``issubclass(x, A) or issubclass(x, B)\nor ...``.",iter:"Get an iterator from an object.\n\nIn the first form, the argument must supply its own iterator, or be a sequence.\nIn the second form, the callable is called until it returns the sentinel.",len:"Return the number of items in a container.",license:"interactive prompt objects for printing the license text, a list of\n contributors and the copyright notice.",list:"Built-in mutable sequence.\n\nIf no argument is given, the constructor creates a new empty list.\nThe argument must be an iterable if specified.",locals:"Return a dictionary containing the current scope's local variables.\n\nNOTE: Whether or not updates to this dictionary will affect name lookups in\nthe local scope and vice-versa is *implementation dependent* and not\ncovered by any backwards compatibility guarantees.",map:"map(func, *iterables) --> map object\n\nMake an iterator that computes the function using arguments from\neach of the iterables. Stops when the shortest iterable is exhausted.",max:"max(iterable, *[, default=obj, key=func]) -> value\nmax(arg1, arg2, *args, *[, key=func]) -> value\n\nWith a single iterable argument, return its biggest item. The\ndefault keyword-only argument specifies an object to return if\nthe provided iterable is empty.\nWith two or more arguments, return the largest argument.",memoryview:"Create a new memoryview object which references the given object.",min:"min(iterable, *[, default=obj, key=func]) -> value\nmin(arg1, arg2, *args, *[, key=func]) -> value\n\nWith a single iterable argument, return its smallest item. The\ndefault keyword-only argument specifies an object to return if\nthe provided iterable is empty.\nWith two or more arguments, return the smallest argument.",next:"Return the next item from the iterator.\n\nIf default is given and the iterator is exhausted,\nit is returned instead of raising StopIteration.",object:"The base class of the class hierarchy.\n\nWhen called, it accepts no arguments and returns a new featureless\ninstance that has no instance attributes and cannot be given any.\n",oct:"Return the octal representation of an integer.\n\n >>> oct(342391)\n '0o1234567'",open:"Open file and return a stream. Raise OSError upon failure.\n\nfile is either a text or byte string giving the name (and the path\nif the file isn't in the current working directory) of the file to\nbe opened or an integer file descriptor of the file to be\nwrapped. (If a file descriptor is given, it is closed when the\nreturned I/O object is closed, unless closefd is set to False.)\n\nmode is an optional string that specifies the mode in which the file\nis opened. It defaults to 'r' which means open for reading in text\nmode. Other common values are 'w' for writing (truncating the file if\nit already exists), 'x' for creating and writing to a new file, and\n'a' for appending (which on some Unix systems, means that all writes\nappend to the end of the file regardless of the current seek position).\nIn text mode, if encoding is not specified the encoding used is platform\ndependent: locale.getencoding() is called to get the current locale encoding.\n(For reading and writing raw bytes use binary mode and leave encoding\nunspecified.) The available modes are:\n\n========= ===============================================================\nCharacter Meaning\n--------- ---------------------------------------------------------------\n'r' open for reading (default)\n'w' open for writing, truncating the file first\n'x' create a new file and open it for writing\n'a' open for writing, appending to the end of the file if it exists\n'b' binary mode\n't' text mode (default)\n'+' open a disk file for updating (reading and writing)\n========= ===============================================================\n\nThe default mode is 'rt' (open for reading text). For binary random\naccess, the mode 'w+b' opens and truncates the file to 0 bytes, while\n'r+b' opens the file without truncation. The 'x' mode implies 'w' and\nraises an `FileExistsError` if the file already exists.\n\nPython distinguishes between files opened in binary and text modes,\neven when the underlying operating system doesn't. Files opened in\nbinary mode (appending 'b' to the mode argument) return contents as\nbytes objects without any decoding. In text mode (the default, or when\n't' is appended to the mode argument), the contents of the file are\nreturned as strings, the bytes having been first decoded using a\nplatform-dependent encoding or using the specified encoding if given.\n\nbuffering is an optional integer used to set the buffering policy.\nPass 0 to switch buffering off (only allowed in binary mode), 1 to select\nline buffering (only usable in text mode), and an integer > 1 to indicate\nthe size of a fixed-size chunk buffer. When no buffering argument is\ngiven, the default buffering policy works as follows:\n\n* Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n \"block size\" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n* \"Interactive\" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\nencoding is the name of the encoding used to decode or encode the\nfile. This should only be used in text mode. The default encoding is\nplatform dependent, but any encoding supported by Python can be\npassed. See the codecs module for the list of supported encodings.\n\nerrors is an optional string that specifies how encoding errors are to\nbe handled---this argument should not be used in binary mode. Pass\n'strict' to raise a ValueError exception if there is an encoding error\n(the default of None has the same effect), or pass 'ignore' to ignore\nerrors. (Note that ignoring encoding errors can lead to data loss.)\nSee the documentation for codecs.register or run 'help(codecs.Codec)'\nfor a list of the permitted encoding error strings.\n\nnewline controls how universal newlines works (it only applies to text\nmode). It can be None, '', '\\n', '\\r', and '\\r\\n'. It works as\nfollows:\n\n* On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and\n these are translated into '\\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n* On output, if newline is None, any '\\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\\n', no translation takes place. If newline is any\n of the other legal values, any '\\n' characters written are translated\n to the given string.\n\nIf closefd is False, the underlying file descriptor will be kept open\nwhen the file is closed. This does not work when a file name is given\nand must be True in that case.\n\nA custom opener can be used by passing a callable as *opener*. The\nunderlying file descriptor for the file object is then obtained by\ncalling *opener* with (*file*, *flags*). *opener* must return an open\nfile descriptor (passing os.open as *opener* results in functionality\nsimilar to passing None).\n\nopen() returns a file object whose type depends on the mode, and\nthrough which the standard file operations such as reading and writing\nare performed. When open() is used to open a file in a text mode ('w',\n'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\na file in a binary mode, the returned class varies: in read binary\nmode, it returns a BufferedReader; in write binary and append binary\nmodes, it returns a BufferedWriter, and in read/write mode, it returns\na BufferedRandom.\n\nIt is also possible to use a string or bytearray as a file for both\nreading and writing. For strings StringIO can be used like a file\nopened in a text mode, and for bytes a BytesIO can be used like a file\nopened in a binary mode.",ord:"Return the Unicode code point for a one-character string.",pow:"Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\nSome types, such as ints, are able to use a more efficient algorithm when\ninvoked using the three argument form.",print:"Prints the values to a stream, or to sys.stdout by default.\n\n sep\n string inserted between values, default a space.\n end\n string appended after the last value, default a newline.\n file\n a file-like object (stream); defaults to the current sys.stdout.\n flush\n whether to forcibly flush the stream.",property:"Property attribute.\n\n fget\n function to be used for getting an attribute value\n fset\n function to be used for setting an attribute value\n fdel\n function to be used for del'ing an attribute\n doc\n docstring\n\nTypical use is to define a managed attribute x:\n\nclass C(object):\n def getx(self): return self._x\n def setx(self, value): self._x = value\n def delx(self): del self._x\n x = property(getx, setx, delx, \"I'm the 'x' property.\")\n\nDecorators make defining new properties or modifying existing ones easy:\n\nclass C(object):\n @property\n def x(self):\n \"I am the 'x' property.\"\n return self._x\n @x.setter\n def x(self, value):\n self._x = value\n @x.deleter\n def x(self):\n del self._x",quit:"",range:"range(stop) -> range object\nrange(start, stop[, step]) -> range object\n\nReturn an object that produces a sequence of integers from start (inclusive)\nto stop (exclusive) by step. range(i, j) produces i, i+1, i+2, ..., j-1.\nstart defaults to 0, and stop is omitted! range(4) produces 0, 1, 2, 3.\nThese are exactly the valid indices for a list of 4 elements.\nWhen step is given, it specifies the increment (or decrement).",repr:"Return the canonical string representation of the object.\n\nFor many object types, including most builtins, eval(repr(obj)) == obj.",reversed:"Return a reverse iterator over the values of the given sequence.",round:"Round a number to a given precision in decimal digits.\n\nThe return value is an integer if ndigits is omitted or None. Otherwise\nthe return value has the same type as the number. ndigits may be negative.",set:"set() -> new empty set object\nset(iterable) -> new set object\n\nBuild an unordered collection of unique elements.",setattr:"Sets the named attribute on the given object to the specified value.\n\nsetattr(x, 'y', v) is equivalent to ``x.y = v``",slice:"slice(stop)\nslice(start, stop[, step])\n\nCreate a slice object. This is used for extended slicing (e.g. a[0:10:2]).",sorted:"Return a new list containing all items from the iterable in ascending order.\n\nA custom key function can be supplied to customize the sort order, and the\nreverse flag can be set to request the result in descending order.",staticmethod:"staticmethod(function) -> method\n\nConvert a function to be a static method.\n\nA static method does not receive an implicit first argument.\nTo declare a static method, use this idiom:\n\n class C:\n @staticmethod\n def f(arg1, arg2, argN):\n ...\n\nIt can be called either on the class (e.g. C.f()) or on an instance\n(e.g. C().f()). Both the class and the instance are ignored, and\nneither is passed implicitly as the first argument to the method.\n\nStatic methods in Python are similar to those found in Java or C++.\nFor a more advanced concept, see the classmethod builtin.",str:"str(object='') -> str\nstr(bytes_or_buffer[, encoding[, errors]]) -> str\n\nCreate a new string object from the given object. If encoding or\nerrors is specified, then the object must expose a data buffer\nthat will be decoded using the given encoding and error handler.\nOtherwise, returns the result of object.__str__() (if defined)\nor repr(object).\nencoding defaults to sys.getdefaultencoding().\nerrors defaults to 'strict'.",sum:"Return the sum of a 'start' value (default: 0) plus an iterable of numbers\n\nWhen the iterable is empty, return the start value.\nThis function is intended specifically for use with numeric values and may\nreject non-numeric types.",super:"super() -> same as super(__class__, )\nsuper(type) -> unbound super object\nsuper(type, obj) -> bound super object; requires isinstance(obj, type)\nsuper(type, type2) -> bound super object; requires issubclass(type2, type)\nTypical use to call a cooperative superclass method:\nclass C(B):\n def meth(self, arg):\n super().meth(arg)\nThis works for class methods too:\nclass C(B):\n @classmethod\n def cmeth(cls, arg):\n super().cmeth(arg)\n",tuple:"Built-in immutable sequence.\n\nIf no argument is given, the constructor returns an empty tuple.\nIf iterable is specified the tuple is initialized from iterable's items.\n\nIf the argument is a tuple, the return value is the same object.",type:"type(object) -> the object's type\ntype(name, bases, dict, **kwds) -> a new type",vars:"Show vars.\n\nWithout arguments, equivalent to locals().\nWith an argument, equivalent to object.__dict__.",zip:"zip(*iterables, strict=False) --> Yield tuples until an input is exhausted.\n\n >>> list(zip('abcdefg', range(3), range(4)))\n [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)]\n\nThe zip object yields n-length tuples, where n is the number of iterables\npassed as positional arguments to zip(). The i-th element in every tuple\ncomes from the i-th iterable argument to zip(). This continues until the\nshortest argument is exhausted.\n\nIf strict is true and one of the arguments is exhausted before the others,\nraise a ValueError.",} -for(var key in docs){if(__BRYTHON__.builtins[key]){if(['object','function'].includes(typeof __BRYTHON__.builtins[key])){__BRYTHON__.builtins[key].__doc__=docs[key]}}} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_265_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +var star_targets_var; +if( +(star_targets_var=star_targets_rule(p)) +&& +(_literal=$B._PyPegen.expect_token(p,22)) +) +{_res=$B._PyPegen.dummy_name(p,star_targets_var,_literal); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_266_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,8)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_267_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,11)) +) +{_res=_literal; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _literal; +if( +(_literal=$B._PyPegen.expect_token(p,35)) +) +{_res=_literal; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_268_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +(_opt_var=_tmp_276_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.dummy_name(p,expression_var,_opt_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_269_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var expressions_var; +if( +(expressions_var=expressions_rule(p)) +&& +(_opt_var=_tmp_277_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.dummy_name(p,expressions_var,_opt_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_270_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +(_opt_var=_tmp_278_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.dummy_name(p,expression_var,_opt_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_271_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _opt_var; +UNUSED(_opt_var); +var expressions_var; +if( +(expressions_var=expressions_rule(p)) +&& +(_opt_var=_tmp_279_rule(p),!p.error_indicator) +) +{_res=$B._PyPegen.dummy_name(p,expressions_var,_opt_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_272_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var name_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(name_var=$B._PyPegen.name_token(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,name_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_273_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var assignment_expression_var; +if( +(assignment_expression_var=assignment_expression_rule(p)) +) +{_res=assignment_expression_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,53) +) +{_res=expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _loop0_275_rule(p) +{if(p.error_indicator){return NULL;} +var _res={value:NULL}; +_res=NULL; +var _mark=p.mark; +var _children=[]; +var _children_capacity=1; +var _n=0; +{ +if(p.error_indicator){return NULL;} +var _literal; +var elem; +while( +(_literal=$B._PyPegen.expect_token(p,12)) +&& +(elem=_tmp_280_rule(p)) +) +{_res=elem; +_children[_n++]=_res; +_mark=p.mark;} +p.mark=_mark;} +return _children;} +function _gather_274_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var elem; +var seq; +if( +(elem=_tmp_280_rule(p)) +&& +(seq=_loop0_275_rule(p)) +) +{_res=$B._PyPegen.seq_insert_in_front(p,elem,seq); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_276_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var star_target_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(star_target_var=star_target_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,star_target_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_277_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var star_target_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(star_target_var=star_target_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,star_target_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_278_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var star_target_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(star_target_var=star_target_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,star_target_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_279_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var _keyword; +var star_target_var; +if( +(_keyword=$B._PyPegen.expect_token(p,640)) +&& +(star_target_var=star_target_rule(p)) +) +{_res=$B._PyPegen.dummy_name(p,_keyword,star_target_var); +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_280_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var starred_expression_var; +if( +(starred_expression_var=starred_expression_rule(p)) +) +{_res=starred_expression_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var _tmp_281_var; +if( +(_tmp_281_var=_tmp_281_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,22) +) +{_res=_tmp_281_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +function _tmp_281_rule(p) +{if(p.error_indicator){return NULL;} +while(1){var _res=NULL; +var _mark=p.mark; +{ +if(p.error_indicator){return NULL;} +var assignment_expression_var; +if( +(assignment_expression_var=assignment_expression_rule(p)) +) +{_res=assignment_expression_var; +break;} +p.mark=_mark;} +{ +if(p.error_indicator){return NULL;} +var expression_var; +if( +(expression_var=expression_rule(p)) +&& +$B._PyPegen.lookahead_with_int(0,$B._PyPegen.expect_token,p,53) +) +{_res=expression_var; +break;} +p.mark=_mark;} +_res=NULL; +break;} +return _res;} +$B._PyPegen.parse=function(p){p.keywords=reserved_keywords; +p.n_keyword_lists=n_keyword_lists; +p.soft_keywords=soft_keywords; +switch(p.mode){case 'file': +return file_rule(p) +case 'eval': +return eval_rule(p) +case 'single': +return interactive_rule(p) +default: +console.log('unknown mode',p.mode) +alert()}} ; (function($B){$B.whenReady=new Promise(function(resolve,reject){resolve()})})(__BRYTHON__) ; diff --git a/www/src/brython.min.js b/www/src/brython.min.js index 68a251b18..f9dd33243 100644 --- a/www/src/brython.min.js +++ b/www/src/brython.min.js @@ -1 +1 @@ -var __BRYTHON__=globalThis.__BRYTHON__||{};try{eval("async function* f(){}")}catch(err){console.warn("Your browser is not fully supported. If you are using "+"Microsoft Edge, please upgrade to the latest version")}(function($B){$B.isWebWorker="undefined"!==typeof WorkerGlobalScope&&"function"===typeof importScripts&&navigator instanceof WorkerNavigator;$B.isNode=typeof process!=="undefined"&&process.release.name==="node"&&process.__nwjs!==1;var _window=globalThis;_window.location||={href:"",origin:"",pathname:""};_window.navigator||={userLanguage:""};_window.document||={getElementsByTagName:()=>[{src:"http://localhost/"}],currentScript:{src:"http://localhost/"},querySelectorAll:()=>[]};_window.HTMLElement||=class HTMLElement{};_window.MutationObserver||=function(){this.observe=()=>{}};_window.customElements||={define:()=>{}};var href=_window.location.href;$B.protocol=href.split(":")[0];$B.BigInt=_window.BigInt;$B.indexedDB=_window.indexedDB;var $path;if($B.brython_path===undefined){var this_url;if($B.isWebWorker){this_url=_window.location.href;if(this_url.startsWith("blob:")){this_url=this_url.substr(5)}}else{this_url=document.currentScript.src}var elts=this_url.split("/");elts.pop();$path=$B.brython_path=elts.join("/")+"/"}else{if(!$B.brython_path.endsWith("/")){$B.brython_path+="/"}$path=$B.brython_path}var parts_re=new RegExp("(.*?)://(.*?)/(.*)"),mo=parts_re.exec($B.brython_path);if(mo){$B.full_url={protocol:mo[1],host:mo[2],address:mo[3]}}var path=_window.location.origin+_window.location.pathname,path_elts=path.split("/");path_elts.pop();var $script_dir=$B.script_dir=path_elts.join("/");$B.__ARGV=[];$B.webworkers={};$B.file_cache={};$B.url2name={};$B.scripts={};$B.import_info={};$B.imported={};$B.precompiled={};$B.frame_obj=null;$B.builtins=Object.create(null);$B.builtins_scope={id:"__builtins__",module:"__builtins__",binding:{}};$B.builtin_funcs={};$B.builtin_classes=[];$B.__getattr__=function(attr){return this[attr]};$B.__setattr__=function(attr,value){if(["debug","stdout","stderr"].indexOf(attr)>-1){$B[attr]=value}else{throw $B.builtins.AttributeError.$factory("__BRYTHON__ object has no attribute "+attr)}};$B.language=_window.navigator.userLanguage||_window.navigator.language;$B.locale="C";var date=new Date;var formatter=new Intl.DateTimeFormat($B.language,{timeZoneName:"short"}),short=formatter.format(date);formatter=new Intl.DateTimeFormat($B.language,{timeZoneName:"long"});var long=formatter.format(date);var ix=0,minlen=Math.min(short.length,long.length);while(ix",mode||"file")){console.log(token.type,$B.builtins.repr(token.string),token.start,token.end,token.line)}};var py2js_magic=Math.random().toString(36).substr(2,8);function from_py(src,script_id){if(!$B.options_parsed){$B.parse_options()}script_id=script_id||"python_script_"+$B.UUID();var filename=$B.script_path+"#"+script_id;$B.url2name[filename]=script_id;$B.imported[script_id]={};var root=__BRYTHON__.py2js({src:src,filename:filename},script_id,script_id,__BRYTHON__.builtins_scope);return root.to_js()}$B.getPythonModule=function(name){return $B.imported[name]};$B.python_to_js=function(src,script_id){return"(function() {\n"+from_py(src,script_id)+"\nreturn locals}())"};$B.pythonToJS=$B.python_to_js;$B.runPythonSource=function(src,script_id){var js=from_py(src,script_id)+"\nreturn locals";var func=new Function("$B","_b_",js);$B.imported[script_id]=func($B,$B.builtins);return $B.imported[script_id]}})(__BRYTHON__);__BRYTHON__.ast_classes={Add:"",And:"",AnnAssign:"target,annotation,value?,simple",Assert:"test,msg?",Assign:"targets*,value,type_comment?",AsyncFor:"target,iter,body*,orelse*,type_comment?",AsyncFunctionDef:"name,args,body*,decorator_list*,returns?,type_comment?,type_params*",AsyncWith:"items*,body*,type_comment?",Attribute:"value,attr,ctx",AugAssign:"target,op,value",Await:"value",BinOp:"left,op,right",BitAnd:"",BitOr:"",BitXor:"",BoolOp:"op,values*",Break:"",Call:"func,args*,keywords*",ClassDef:"name,bases*,keywords*,body*,decorator_list*,type_params*",Compare:"left,ops*,comparators*",Constant:"value,kind?",Continue:"",Del:"",Delete:"targets*",Dict:"keys*,values*",DictComp:"key,value,generators*",Div:"",Eq:"",ExceptHandler:"type?,name?,body*",Expr:"value",Expression:"body",FloorDiv:"",For:"target,iter,body*,orelse*,type_comment?",FormattedValue:"value,conversion,format_spec?",FunctionDef:"name,args,body*,decorator_list*,returns?,type_comment?,type_params*",FunctionType:"argtypes*,returns",GeneratorExp:"elt,generators*",Global:"names*",Gt:"",GtE:"",If:"test,body*,orelse*",IfExp:"test,body,orelse",Import:"names*",ImportFrom:"module?,names*,level?",In:"",Interactive:"body*",Invert:"",Is:"",IsNot:"",JoinedStr:"values*",LShift:"",Lambda:"args,body",List:"elts*,ctx",ListComp:"elt,generators*",Load:"",Lt:"",LtE:"",MatMult:"",Match:"subject,cases*",MatchAs:"pattern?,name?",MatchClass:"cls,patterns*,kwd_attrs*,kwd_patterns*",MatchMapping:"keys*,patterns*,rest?",MatchOr:"patterns*",MatchSequence:"patterns*",MatchSingleton:"value",MatchStar:"name?",MatchValue:"value",Mod:"",Module:"body*,type_ignores*",Mult:"",Name:"id,ctx",NamedExpr:"target,value",Nonlocal:"names*",Not:"",NotEq:"",NotIn:"",Or:"",ParamSpec:"name",Pass:"",Pow:"",RShift:"",Raise:"exc?,cause?",Return:"value?",Set:"elts*",SetComp:"elt,generators*",Slice:"lower?,upper?,step?",Starred:"value,ctx",Store:"",Sub:"",Subscript:"value,slice,ctx",Try:"body*,handlers*,orelse*,finalbody*",TryStar:"body*,handlers*,orelse*,finalbody*",Tuple:"elts*,ctx",TypeAlias:"name,type_params*,value",TypeIgnore:"lineno,tag",TypeVar:"name,bound?",TypeVarTuple:"name",UAdd:"",USub:"",UnaryOp:"op,operand",While:"test,body*,orelse*",With:"items*,body*,type_comment?",Yield:"value?",YieldFrom:"value",alias:"name,asname?",arg:"arg,annotation?,type_comment?",arguments:"posonlyargs*,args*,vararg?,kwonlyargs*,kw_defaults*,kwarg?,defaults*",boolop:["And","Or"],cmpop:["Eq","NotEq","Lt","LtE","Gt","GtE","Is","IsNot","In","NotIn"],comprehension:"target,iter,ifs*,is_async",excepthandler:["ExceptHandler"],expr:["BoolOp","NamedExpr","BinOp","UnaryOp","Lambda","IfExp","Dict","Set","ListComp","SetComp","DictComp","GeneratorExp","Await","Yield","YieldFrom","Compare","Call","FormattedValue","JoinedStr","Constant","Attribute","Subscript","Starred","Name","List","Tuple","Slice"],expr_context:["Load","Store","Del"],keyword:"arg?,value",match_case:"pattern,guard?,body*",mod:["Module","Interactive","Expression","FunctionType"],operator:["Add","Sub","Mult","MatMult","Div","Mod","Pow","LShift","RShift","BitOr","BitXor","BitAnd","FloorDiv"],pattern:["MatchValue","MatchSingleton","MatchSequence","MatchMapping","MatchClass","MatchStar","MatchAs","MatchOr"],stmt:["FunctionDef","AsyncFunctionDef","ClassDef","Return","Delete","Assign","TypeAlias","AugAssign","AnnAssign","For","AsyncFor","While","If","With","AsyncWith","Match","Raise","Try","TryStar","Assert","Import","ImportFrom","Global","Nonlocal","Expr","Pass","Break","Continue"],type_ignore:["TypeIgnore"],type_param:["TypeVar","ParamSpec","TypeVarTuple"],unaryop:["Invert","Not","UAdd","USub"],withitem:"context_expr,optional_vars?"};var $B=__BRYTHON__;$B.unicode={No_digits:[178,179,185,[4969,9],6618,8304,[8308,6],[8320,10],[9312,9],[9332,9],[9352,9],9450,[9461,9],9471,[10102,9],[10112,9],[10122,9],[68160,4],[69216,9],[69714,9],[127232,11]],Lo_numeric:[13317,13443,14378,15181,19968,19971,19975,19977,20061,20108,20116,20118,20159,20160,20191,20200,20237,20336,20740,20806,[20841,3,2],21313,[21315,3],21324,[21441,4],22235,22769,22777,24186,24318,24319,[24332,3],24336,25342,25420,26578,28422,29590,30334,32902,33836,36014,36019,36144,38433,38470,38476,38520,38646,63851,63859,63864,63922,63953,63955,63997,131073,131172,131298,131361,133418,133507,133516,133532,133866,133885,133913,140176,141720,146203,156269,194704]};$B.digits_starts=[48,1632,1776,1984,2406,2534,2662,2790,2918,3046,3174,3302,3430,3558,3664,3792,3872,4160,4240,6112,6160,6470,6608,6784,6800,6992,7088,7232,7248,42528,43216,43264,43472,43504,43600,44016,65296,66720,68912,69734,69872,69942,70096,70384,70736,70864,71248,71360,71472,71904,72016,72784,73040,73120,73552,92768,92864,93008,120782,120792,120802,120812,120822,123200,123632,124144,125264,130032];$B.unicode_casefold={223:[115,115],304:[105,775],329:[700,110],496:[106,780],912:[953,776,769],944:[965,776,769],1415:[1381,1410],7830:[104,817],7831:[116,776],7832:[119,778],7833:[121,778],7834:[97,702],7838:[223],8016:[965,787],8018:[965,787,768],8020:[965,787,769],8022:[965,787,834],8064:[7936,953],8065:[7937,953],8066:[7938,953],8067:[7939,953],8068:[7940,953],8069:[7941,953],8070:[7942,953],8071:[7943,953],8072:[8064],8073:[8065],8074:[8066],8075:[8067],8076:[8068],8077:[8069],8078:[8070],8079:[8071],8080:[7968,953],8081:[7969,953],8082:[7970,953],8083:[7971,953],8084:[7972,953],8085:[7973,953],8086:[7974,953],8087:[7975,953],8088:[8080],8089:[8081],8090:[8082],8091:[8083],8092:[8084],8093:[8085],8094:[8086],8095:[8087],8096:[8032,953],8097:[8033,953],8098:[8034,953],8099:[8035,953],8100:[8036,953],8101:[8037,953],8102:[8038,953],8103:[8039,953],8104:[8096],8105:[8097],8106:[8098],8107:[8099],8108:[8100],8109:[8101],8110:[8102],8111:[8103],8114:[8048,953],8115:[945,953],8116:[940,953],8118:[945,834],8119:[945,834,953],8124:[8115],8130:[8052,953],8131:[951,953],8132:[942,953],8134:[951,834],8135:[951,834,953],8140:[8131],8146:[953,776,768],8147:[912],8150:[953,834],8151:[953,776,834],8162:[965,776,768],8163:[944],8164:[961,787],8166:[965,834],8167:[965,776,834],8178:[8060,953],8179:[969,953],8180:[974,953],8182:[969,834],8183:[969,834,953],8188:[8179],64256:[102,102],64257:[102,105],64258:[102,108],64259:[102,102,105],64260:[102,102,108],64261:[64262],64262:[115,116],64275:[1396,1398],64276:[1396,1381],64277:[1396,1387],64278:[1406,1398],64279:[1396,1389]};$B.unicode_bidi_whitespace=[9,10,11,12,13,28,29,30,31,32,133,5760,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8232,8233,8287,12288];(function($B){$B.stdlib={};var pylist=["VFS_import","__future__","_aio","_codecs","_codecs_jp","_collections","_collections_abc","_compat_pickle","_compression","_contextvars","_csv","_dummy_thread","_frozen_importlib","_functools","_imp","_io","_markupbase","_multibytecodec","_operator","_py_abc","_pydatetime","_pydecimal","_queue","_signal","_socket","_sre","_struct","_sysconfigdata","_sysconfigdata_0_brython_","_testcapi","_thread","_threading_local","_typing","_weakref","_weakrefset","abc","antigravity","argparse","ast","asyncio","atexit","base64","bdb","binascii","bisect","browser.ajax","browser.highlight","browser.idbcache","browser.indexed_db","browser.local_storage","browser.markdown","browser.object_storage","browser.session_storage","browser.svg","browser.template","browser.timer","browser.ui","browser.webcomponent","browser.websocket","browser.worker","calendar","cmath","cmd","code","codecs","codeop","colorsys","configparser","contextlib","contextvars","copy","copyreg","csv","dataclasses","datetime","decimal","difflib","doctest","enum","errno","external_import","faulthandler","fnmatch","formatter","fractions","functools","gc","genericpath","getopt","getpass","gettext","glob","gzip","heapq","hmac","imp","inspect","interpreter","io","ipaddress","itertools","keyword","linecache","locale","mimetypes","nntplib","ntpath","numbers","opcode","operator","optparse","os","pathlib","pdb","pickle","pkgutil","platform","posixpath","pprint","profile","pwd","py_compile","pydoc","queue","quopri","random","re","re1","reprlib","secrets","select","selectors","shlex","shutil","signal","site","site-packages.__future__","site-packages.docs","site-packages.header","site-packages.test_sp","socket","sre_compile","sre_constants","sre_parse","stat","statistics","string","stringprep","struct","subprocess","symtable","sys","sysconfig","tabnanny","tarfile","tb","tempfile","test.namespace_pkgs.module_and_namespace_package.a_test","textwrap","this","threading","time","timeit","token","tokenize","traceback","turtle","types","typing","uu","uuid","warnings","weakref","webbrowser","zipfile","zipimport","zlib"];for(var i=0;ilast){return false}}else if(last[0]+last[1]100){console.log("infinite loop for",cp);alert()}var item=table[ix];if(typeof item!="number"){item=item[0]}if(item==cp){return true}else if(item>cp){end=ix}else{start=ix}len=Math.floor((end-start)/2);if(end-start==1){break}ix=start+len}return table[start][0]+table[start][1]>cp};const XID_Start_re=/\p{XID_Start}/u;const Other_ID_Start=[6277,6278,8472,8494,12443,12444].map((x=>String.fromCodePoint(x)));function is_ID_Start(char){return/\p{Letter}/u.test(char)||/\p{Nl}/u.test(char)||char=="_"||Other_ID_Start.includes(char)}const Other_ID_Continue=[183,903,4969,4976,4977,6618,8204,8205,12539,65381].map((x=>String.fromCodePoint(x)));function is_ID_Continue(char){return is_ID_Start(char)||/\p{Mn}|\p{Mc}|\p{Nd}|\p{Pc}/u.test(char)||Other_ID_Continue.includes(char)}$B.is_XID_Start=function(cp){var char=String.fromCodePoint(cp);if(!is_ID_Start(char)){return false}var norm=char.normalize("NFKC");if(!is_ID_Start(norm[0])){return false}for(var char of norm.substr(1)){if(!is_ID_Continue(char)){return false}}return true};$B.is_XID_Continue=function(cp){var char=String.fromCodePoint(cp);if(!is_ID_Continue(char)){return false}var norm=char.normalize("NFKC");for(var char of norm.substr(1)){if(!is_ID_Continue(char)){return false}}return true};$B.in_unicode_category=function(category,cp){if(isNaN(cp)){return false}try{var re=new RegExp("\\p{"+category+"}","u");return re.test(String.fromCodePoint(cp))}catch(err){return in_unicode_category(category,cp)}};function in_unicode_category(category,cp){var table=$B.unicode[category],start=0,end=table.length-1,len=table.length,ix=Math.floor(len/2),nb=0;var first=table[start],item=typeof first=="number"?first:first[0];if(cplast){return false}}else if(last[0]+last[1]100){console.log("infinite loop for",cp);alert()}var item=table[ix];if(typeof item!="number"){item=item[0]}if(item==cp){return true}else if(item>cp){end=ix}else{start=ix}len=Math.floor((end-start)/2);if(end-start==1){break}ix=start+len}var step=table[start][2];if(step===undefined){return table[start][0]+table[start][1]>cp}return table[start][0]+step*table[start][1]>cp&&(cp-table[start][0])%step==0}const FSTRING_START="FSTRING_START",FSTRING_MIDDLE="FSTRING_MIDDLE",FSTRING_END="FSTRING_END";function ord(char){if(char.length==1){return char.charCodeAt(0)}var code=65536;code+=(char.charCodeAt(0)&1023)<<10;code+=char.charCodeAt(1)&1023;return code}function $last(array){return array[array.length-1]}var ops=".,:;+-*/%~^|&=<>[](){}@",op2=["**","//",">>","<<"],augm_op="+-*/%^|&=<>@",closing={"}":"{","]":"[",")":"("};function Token(type,string,start,end,line){start=start.slice(0,2);var res={type:type,string:string,start:start,end:end,line:line};res[0]=type;res[1]=string;res[2]=start;res[3]=end;res[4]=line;return res}var errors={};function TokenError(message,position){if(errors.TokenError===undefined){var $error_2={$name:"TokenError",$qualname:"TokenError",$is_class:true,__module__:"tokenize"};var error=errors.TokenError=$B.$class_constructor("TokenError",$error_2,_b_.tuple.$factory([_b_.Exception]),["_b_.Exception"],[]);error.__doc__=_b_.None;error.$factory=function(message,position){return{__class__:error,msg:message,lineno:position[0],colno:position[1]}};error.__str__=function(self){var s=self.msg;if(self.lineno>1){s+=` (${self.lineno}, ${self.colno})`}return s};$B.set_func_names(error,"tokenize")}var exc=errors.TokenError.$factory(message,position);console.log("error",exc.__class__,exc.args);return exc}function MAKE_TOKEN(token_type){return new Token("SYNTAXERROR")}function _get_line_at(src,pos){var end=src.substr(pos).search(/[\r\n]/),line=end==-1?src.substr(pos):src.substr(pos,end+1);return line}function get_comment(src,pos,line_num,line_start,token_name,line){var start=pos,ix;var t=[];while(true){if(pos>=src.length||(ix="\r\n".indexOf(src[pos]))>-1){t.push(Token("COMMENT",src.substring(start-1,pos),[line_num,start-line_start],[line_num,pos-line_start+1],line));if(ix!==undefined){var nb=1;if(src[pos]=="\r"&&src[pos+1]=="\n"){nb++}else if(src[pos]===undefined){nb=0}t.push(Token(token_name,src.substr(pos,nb),[line_num,pos-line_start+1],[line_num,pos-line_start+nb+1],line));if(src[pos]===undefined){t.push(Token("NEWLINE","\n",[line_num,pos-line_start+1],[line_num,pos-line_start+2],""))}pos+=nb}return{t:t,pos:pos}}pos++}}function test_num(num_type,char){switch(num_type){case"":return $B.in_unicode_category("Nd",ord(char));case"x":return"0123456789abcdef".includes(char.toLowerCase());case"b":return"01".includes(char);case"o":return"01234567".includes(char);default:throw Error("unknown num type "+num_type)}}$B.TokenReader=function(src,filename){this.tokens=[];this.tokenizer=$B.tokenizer(src,filename);this.position=0};$B.TokenReader.prototype.read=function(){if(this.position=0){var mode=token_modes[ix];if(mode.nesting!==undefined){return mode.nesting}ix--}}$B.tokenizer=function*(src,filename,mode){var whitespace=" \t\n",operators="*+-/%&^~=<>",allowed_after_identifier=",.()[]:;",string_prefix=/^(r|u|R|U|f|F|fr|Fr|fR|FR|rf|rF|Rf|RF)$/,bytes_prefix=/^(b|B|br|Br|bR|BR|rb|rB|Rb|RB)$/;src=src.replace(/\r\n/g,"\n").replace(/\r/g,"\n");if(mode!="eval"&&!src.endsWith("\n")){src+="\n"}var lines=src.split("\n"),linenum=0,line_at={};for(var i=0,len=src.length;i=55296&&cp<=56319){cp=ord(src.substr(pos,2));char=src.substr(pos,2);pos++}pos++;if(token_mode!=save_mode){if(token_mode=="fstring"){fstring_buffer="";fstring_escape=false}else if(token_mode=="format_specifier"){format_specifier=""}}save_mode=token_mode;if(token_mode=="fstring"){if(char==token_mode.quote){if(fstring_escape){fstring_buffer+="\\"+char;fstring_escape=false;continue}if(token_mode.triple_quote){if(src.substr(pos,2)!=token_mode.quote.repeat(2)){fstring_buffer+=char;continue}char=token_mode.quote.repeat(3);pos+=2}if(fstring_buffer.length>0){yield Token(FSTRING_MIDDLE,fstring_buffer,[line_num,fstring_start],[line_num,fstring_start+fstring_buffer.length],line)}yield Token(FSTRING_END,char,[line_num,pos],[line_num,pos],line);token_modes.pop();token_mode=$B.last(token_modes);state=null;continue}else if(char=="{"){if(src.charAt(pos)=="{"){fstring_buffer+=char;pos++;continue}else{if(fstring_buffer.length>0){yield Token(FSTRING_MIDDLE,fstring_buffer,[line_num,fstring_start],[line_num,fstring_start+fstring_buffer.length],line)}token_mode="regular_within_fstring";fstring_expr_start=pos-line_start;state=null;token_modes.push(token_mode)}}else if(char=="}"){if(src.charAt(pos)=="}"){fstring_buffer+=char;pos++;continue}else{yield Token("OP",char,[line_num,pos-line_start],[line_num,pos-line_start+1],line);continue}}else if(char=="\\"){if(token_mode.raw){fstring_buffer+=char+char}else{if(fstring_escape){fstring_buffer+=char}fstring_escape=!fstring_escape}continue}else{if(fstring_escape){fstring_buffer+="\\"}fstring_buffer+=char;fstring_escape=false;if(char=="\n"){line_num++}continue}}else if(token_mode=="format_specifier"){if(char==quote){if(format_specifier.length>0){yield Token(FSTRING_MIDDLE,format_specifier,[line_num,fstring_start],[line_num,fstring_start+format_specifier.length],line);token_modes.pop();token_mode=$B.last(token_modes);continue}}else if(char=="{"){yield Token(FSTRING_MIDDLE,format_specifier,[line_num,fstring_start],[line_num,fstring_start+format_specifier.length],line);token_mode="regular_within_fstring";fstring_expr_start=pos-line_start;state=null;token_modes.push(token_mode)}else if(char=="}"){yield Token(FSTRING_MIDDLE,format_specifier,[line_num,fstring_start],[line_num,fstring_start+format_specifier.length],line);yield Token("OP",char,[line_num,pos-line_start],[line_num,pos-line_start+1],line);if(braces.length==0||$B.last(braces)!=="{"){throw Error("wrong braces")}braces.pop();token_modes.pop();token_mode=$B.last(token_modes);continue}else{format_specifier+=char;continue}}switch(state){case"line_start":line=get_line_at(pos-1);line_start=pos;line_num++;if(mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos-1))){yield Token("NL",mo[0],[line_num,0],[line_num,mo[0].length],line);pos+=mo[0].length-1;continue}else if(char=="#"){comment=get_comment(src,pos,line_num,line_start,"NL",line);for(var item of comment.t){yield item}pos=comment.pos;state="line_start";continue}indent=0;if(char==" "){indent=1}else if(char=="\t"){indent=8}if(indent){while(pos$last(indents)){indents.push(indent);yield Token("INDENT","",[line_num,0],[line_num,indent],line)}else if(indent<$last(indents)){var ix=indents.indexOf(indent);if(ix==-1){var error=Error("unindent does not match "+"any outer indentation level");error.type="IndentationError";error.line_num=line_num;throw error}for(var i=indents.length-1;i>ix;i--){indents.pop();yield Token("DEDENT","",[line_num,indent],[line_num,indent],line)}}state=null}else{while(indents.length>0){indents.pop();yield Token("DEDENT","",[line_num,indent],[line_num,indent],line)}state=null;pos--}break;case null:switch(char){case'"':case"'":quote=char;triple_quote=src[pos]==char&&src[pos+1]==char;string_start=[line_num,pos-line_start,line_start];if(triple_quote){pos+=2}escaped=false;state="STRING";string="";prefix="";break;case"#":var token_name=braces.length>0?"NL":"NEWLINE";comment=get_comment(src,pos,line_num,line_start,token_name,line);for(var item of comment.t){yield item}pos=comment.pos;if(braces.length==0){state="line_start"}else{state=null;line_num++;line_start=pos+1;line=get_line_at(pos)}break;case"0":state="NUMBER";number=char;num_type="";if(src[pos]&&"xbo".includes(src[pos].toLowerCase())){number+=src[pos];num_type=src[pos].toLowerCase();pos++}else if(src[pos]){var pos1=pos;while(pos1=3){yield Token("OP","...",[line_num,dot_pos],[line_num,dot_pos+3],line);op=op.substr(3)}for(var i=0;i0?"NL":"NEWLINE";yield Token(token_name,mo[0],[line_num,pos-line_start],[line_num,pos-line_start+mo[0].length],line)}line_num++;pos+=mo[0].length;line_start=pos+1;line=get_line_at(pos)}else{var msg="unexpected character after line "+"continuation character";$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,pos,line_num,pos+1,line,msg)}break;case"\n":case"\r":var token_name=braces.length>0?"NL":"NEWLINE";mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos-1));yield Token(token_name,mo[0],[line_num,pos-line_start],[line_num,pos-line_start+mo[0].length],line);pos+=mo[0].length-1;if(token_name=="NEWLINE"){state="line_start"}else{line_num++;line_start=pos+1;line=get_line_at(pos)}break;default:if($B.is_XID_Start(ord(char))){state="NAME";name=char}else if($B.in_unicode_category("Nd",ord(char))){state="NUMBER";num_type="";number=char}else if(ops.includes(char)){if(token_mode=="regular_within_fstring"&&(char==":"||char=="}")){if(char==":"){if(nesting_level(token_modes)==braces.length-1){let colon=Token("OP",char,[line_num,pos-line_start-op.length+1],[line_num,pos-line_start+1],line);colon.metadata=src.substr(line_start+fstring_expr_start,pos-line_start-fstring_expr_start-1);yield colon;token_modes.pop();token_mode="format_specifier";token_modes.push(token_mode);continue}}else{let closing_brace=Token("OP",char,[line_num,pos-line_start-op.length+1],[line_num,pos-line_start+1],line);closing_brace.metadata=src.substring(line_start+fstring_start+2,pos-1);yield closing_brace;token_modes.pop();token_mode=token_modes[token_modes.length-1];if(braces.length==0||$B.last(braces)!=="{"){throw Error("wrong braces")}braces.pop();continue}}var op=char;if(op2.includes(char+src[pos])){op=char+src[pos];pos++}if(src[pos]=="="&&(op.length==2||augm_op.includes(op))){op+=src[pos];pos++}else if(char=="-"&&src[pos]==">"||char==":"&&src[pos]=="="){op+=src[pos];pos++}if("[({".includes(char)){braces.push(char)}else if("])}".includes(char)){if(braces&&$last(braces)==closing[char]){braces.pop()}else{braces.push(char)}}yield Token("OP",op,[line_num,pos-line_start-op.length+1],[line_num,pos-line_start+1],line)}else if(char=="!"){if(src[pos]=="="){yield Token("OP","!=",[line_num,pos-line_start],[line_num,pos-line_start+2],line);pos++}else{let token=Token("OP",char,[line_num,pos-line_start],[line_num,pos-line_start+1],line);token.metadata=src.substring(line_start+fstring_start+2,pos-1);yield token}}else if(char==" "||char=="\t"){}else{yield Token("ERRORTOKEN",char,[line_num,pos-line_start],[line_num,pos-line_start+1],line)}}break;case"NAME":if($B.is_XID_Continue(ord(char))){name+=char}else if(char=='"'||char=="'"){if(string_prefix.exec(name)||bytes_prefix.exec(name)){state="STRING";quote=char;triple_quote=src[pos]==quote&&src[pos+1]==quote;prefix=name;if(triple_quote){pos+=2}if(prefix.toLowerCase().includes("f")){fstring_start=pos-line_start-name.length;token_mode=new String("fstring");token_mode.nesting=braces.length;token_mode.quote=quote;token_mode.triple_quote=triple_quote;token_mode.raw=prefix.toLowerCase().includes("r");token_modes.push(token_mode);var s=triple_quote?quote.repeat(3):quote;var end_col=fstring_start+name.length+s.length;yield Token(FSTRING_START,prefix+s,[line_num,fstring_start],[line_num,end_col],line);continue}escaped=false;string_start=[line_num,pos-line_start-name.length,line_start];string=""}else{yield Token("NAME",name,[line_num,pos-line_start-name.length],[line_num,pos-line_start],line);state=null;pos--}}else{yield Token("NAME",name,[line_num,pos-line_start-name.length],[line_num,pos-line_start],line);state=null;pos--}break;case"STRING":switch(char){case quote:if(!escaped){var string_line=line;if(line_num>string_start[0]){string_line=src.substring(string_start[2]-1,pos+2)}if(!triple_quote){var full_string=prefix+quote+string+quote;yield Token("STRING",full_string,string_start,[line_num,pos-line_start+1],string_line);state=null}else if(char+src.substr(pos,2)==quote.repeat(3)){var full_string=prefix+quote.repeat(3)+string+quote.repeat(3);yield Token("STRING",full_string,string_start,[line_num,pos-line_start+3],string_line);pos+=2;state=null}else{string+=char}}else{string+=char}escaped=false;break;case"\r":case"\n":if(!escaped&&!triple_quote){var quote_pos=string_start[1]+line_start-1,pos=quote_pos;while(src[pos-1]==" "){pos--}while(pos0){throw SyntaxError("EOF in multi-line statement")}switch(state){case"line_start":line_num++;break;case"NAME":yield Token("NAME",name,[line_num,pos-line_start-name.length+1],[line_num,pos-line_start+1],line);break;case"NUMBER":yield Token("NUMBER",number,[line_num,pos-line_start-number.length+1],[line_num,pos-line_start+1],line);break;case"STRING":var msg=`unterminated ${triple_quote?"triple-quoted ":""}`+`string literal (detected at line ${line_num})`;throw SyntaxError(msg)}if(!src.endsWith("\n")&&state!=line_start){yield Token("NEWLINE","",[line_num,pos-line_start+1],[line_num,pos-line_start+1],line+"\n");line_num++}while(indents.length>0){indents.pop();yield Token("DEDENT","",[line_num,0],[line_num,0],"")}yield Token("ENDMARKER","",[line_num,0],[line_num,0],"")}})(__BRYTHON__);(function($B){var binary_ops={"+":"Add","-":"Sub","*":"Mult","/":"Div","//":"FloorDiv","%":"Mod","**":"Pow","<<":"LShift",">>":"RShift","|":"BitOr","^":"BitXor","&":"BitAnd","@":"MatMult"};var boolean_ops={and:"And",or:"Or"};var comparison_ops={"==":"Eq","!=":"NotEq","<":"Lt","<=":"LtE",">":"Gt",">=":"GtE",is:"Is",is_not:"IsNot",in:"In",not_in:"NotIn"};var unary_ops={unary_inv:"Invert",unary_pos:"UAdd",unary_neg:"USub",unary_not:"Not"};var op_types=$B.op_types=[binary_ops,boolean_ops,comparison_ops,unary_ops];var _b_=$B.builtins;var ast=$B.ast={};for(var kl in $B.ast_classes){var args=$B.ast_classes[kl],body="";if(typeof args=="string"){if(args.length>0){for(var arg of args.split(",")){if(arg.endsWith("*")){arg=arg.substr(0,arg.length-1);body+=` this.${arg} = ${arg} === undefined ? [] : ${arg}\n`}else if(arg.endsWith("?")){arg=arg.substr(0,arg.length-1);body+=` this.${arg} = ${arg}\n`}else{body+=` this.${arg} = ${arg}\n`}}}var arg_list=args.replace(/[*?]/g,"").split(",");ast[kl]=Function(...arg_list,body);ast[kl]._fields=args.split(",")}else{ast[kl]=args.map((x=>ast[x]))}ast[kl].$name=kl}$B.ast_js_to_py=function(obj){$B.create_python_ast_classes();if(obj===undefined){return _b_.None}else if(Array.isArray(obj)){return obj.map($B.ast_js_to_py)}else{var class_name=obj.constructor.$name,py_class=$B.python_ast_classes[class_name],py_ast_obj={__class__:py_class};if(py_class===undefined){return obj}for(var field of py_class._fields){py_ast_obj[field]=$B.ast_js_to_py(obj[field])}py_ast_obj._attributes=$B.fast_tuple([]);for(var loc of["lineno","col_offset","end_lineno","end_col_offset"]){if(obj[loc]!==undefined){py_ast_obj[loc]=obj[loc];py_ast_obj._attributes.push(loc)}}return py_ast_obj}};$B.ast_py_to_js=function(obj){if(obj===undefined||obj===_b_.None){return undefined}else if(Array.isArray(obj)){return obj.map($B.ast_py_to_js)}else if(typeof obj=="string"){return obj}else{var class_name=$B.class_name(obj),js_class=$B.ast[class_name];if(js_class===undefined){return obj}var js_ast_obj=new js_class;for(var field of js_class._fields){if(field.endsWith("?")||field.endsWith("*")){field=field.substr(0,field.length-1)}js_ast_obj[field]=$B.ast_py_to_js(obj[field])}for(var loc of["lineno","col_offset","end_lineno","end_col_offset"]){if(obj[loc]!==undefined){js_ast_obj[loc]=obj[loc]}}return js_ast_obj}};$B.create_python_ast_classes=function(){if($B.python_ast_classes){return}$B.python_ast_classes={};for(var klass in $B.ast_classes){$B.python_ast_classes[klass]=function(kl){var _fields,raw_fields;if(typeof $B.ast_classes[kl]=="string"){if($B.ast_classes[kl]==""){raw_fields=_fields=[]}else{raw_fields=$B.ast_classes[kl].split(",");_fields=raw_fields.map((x=>x.endsWith("*")||x.endsWith("?")?x.substr(0,x.length-1):x))}}var cls=$B.make_class(kl),$defaults={},slots={},nb_args=0;if(raw_fields){for(var i=0,len=_fields.length;i0){res[list[i]]=value}return res};$B.op2method={operations:{"**":"pow","//":"floordiv","<<":"lshift",">>":"rshift","+":"add","-":"sub","*":"mul","/":"truediv","%":"mod","@":"matmul"},augmented_assigns:{"//=":"ifloordiv",">>=":"irshift","<<=":"ilshift","**=":"ipow","+=":"iadd","-=":"isub","*=":"imul","/=":"itruediv","%=":"imod","&=":"iand","|=":"ior","^=":"ixor","@=":"imatmul"},binary:{"&":"and","|":"or","~":"invert","^":"xor"},comparisons:{"<":"lt",">":"gt","<=":"le",">=":"ge","==":"eq","!=":"ne"},boolean:{or:"or",and:"and",in:"in",not:"not",is:"is"},subset:function(){var res={},keys=[];if(arguments[0]=="all"){keys=Object.keys($B.op2method);keys.splice(keys.indexOf("subset"),1)}else{for(var arg of arguments){keys.push(arg)}}for(var key of keys){var ops=$B.op2method[key];if(ops===undefined){throw Error(key)}for(var attr in ops){res[attr]=ops[attr]}}return res}};var $operators=$B.op2method.subset("all");$B.method_to_op={};for(var category in $B.op2method){for(var op in $B.op2method[category]){var method=`__${$B.op2method[category][op]}__`;$B.method_to_op[method]=op}}var $augmented_assigns=$B.augmented_assigns=$B.op2method.augmented_assigns;var noassign=$B.list2obj(["True","False","None","__debug__"]);var $op_order=[["or"],["and"],["not"],["in","not_in"],["<","<=",">",">=","!=","==","is","is_not"],["|"],["^"],["&"],[">>","<<"],["+","-"],["*","@","/","//","%"],["unary_neg","unary_inv","unary_pos"],["**"]];var $op_weight={},$weight=1;for(var _tmp of $op_order){for(var item of _tmp){$op_weight[item]=$weight}$weight++}var ast=$B.ast,op2ast_class=$B.op2ast_class;function ast_body(block_ctx){var body=[];for(var child of block_ctx.node.children){var ctx=child.C.tree[0];if(["single_kw","except","decorator"].indexOf(ctx.type)>-1||ctx.type=="condition"&&ctx.token=="elif"){continue}var child_ast=ctx.ast();if(ast.expr.indexOf(child_ast.constructor)>-1){child_ast=new ast.Expr(child_ast);copy_position(child_ast,child_ast.value)}body.push(child_ast)}return body}var ast_dump=$B.ast_dump=function(tree,indent){var attr,value;indent=indent||0;if(tree===_b_.None){return"None"}else if(typeof tree=="string"){return`'${tree}'`}else if(typeof tree=="number"){return tree+""}else if(tree.imaginary){return tree.value+"j"}else if(Array.isArray(tree)){if(tree.length==0){return"[]"}res="[\n";var items=[];for(var x of tree){try{items.push(ast_dump(x,indent+1))}catch(err){console.log("error",tree);console.log("for item",x);throw err}}res+=items.join(",\n");return res+"]"}else if(tree.$name){return tree.$name+"()"}else if(tree instanceof ast.MatchSingleton){return`MatchSingleton(value=${$B.AST.$convert(tree.value)})`}else if(tree instanceof ast.Constant){value=tree.value;if(value.imaginary){return`Constant(value=${_b_.repr(value.value)}j)`}return`Constant(value=${$B.AST.$convert(value)})`}var proto=Object.getPrototypeOf(tree).constructor;var res=" ".repeat(indent)+proto.$name+"(";if($B.ast_classes[proto.$name]===undefined){console.log("no ast class",proto)}var attr_names=$B.ast_classes[proto.$name].split(","),attrs=[];attr_names=attr_names.map((x=>x.endsWith("*")||x.endsWith("?")?x.substr(0,x.length-1):x));if([ast.Name].indexOf(proto)>-1){for(attr of attr_names){if(tree[attr]!==undefined){attrs.push(`${attr}=${ast_dump(tree[attr])}`)}}return res+attrs.join(", ")+")"}for(attr of attr_names){if(tree[attr]!==undefined){value=tree[attr];attrs.push(attr+"="+ast_dump(tree[attr],indent+1).trimStart())}}if(attrs.length>0){res+="\n";res+=attrs.map((x=>" ".repeat(indent+1)+x)).join(",\n")}res+=")";return res};var CO_FUTURE_ANNOTATIONS=16777216;function get_line(filename,lineno){var src=$B.file_cache[filename],line=_b_.None;if(src!==undefined){var lines=src.split("\n");line=lines[lineno-1]}return line}var VALID_FUTURES=["nested_scopes","generators","division","absolute_import","with_statement","print_function","unicode_literals","barry_as_FLUFL","generator_stop","annotations"];$B.future_features=function(mod,filename){var features=0;var i=0;if(mod.body[0]instanceof $B.ast.Expr){if(mod.body[0].value instanceof $B.ast.Constant&&typeof mod.body[0].value.value=="string"){i++}}while(i0){ctx=ctx.tree[0]}return ctx.position}function last_position(C){var ctx=C;while(ctx.tree&&ctx.tree.length>0){ctx=$B.last(ctx.tree);if(ctx.end_position){return ctx.end_position}}return ctx.end_position||ctx.position}function raise_error_known_location(type,filename,lineno,col_offset,end_lineno,end_col_offset,line,message){var exc=type.$factory(message);exc.filename=filename;exc.lineno=lineno;exc.offset=col_offset+1;exc.end_lineno=end_lineno;exc.end_offset=end_col_offset+1;exc.text=line;exc.args[1]=$B.fast_tuple([filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]);exc.$frame_obj=$B.frame_obj;throw exc}$B.raise_error_known_location=raise_error_known_location;function raise_syntax_error_known_range(C,a,b,msg){raise_error_known_location(_b_.SyntaxError,get_module(C).filename,a.start[0],a.start[1],b.end[0],b.end[1],a.line,msg)}function raise_error(errtype,C,msg,token){var filename=get_module(C).filename;token=token||$token.value;msg=msg||"invalid syntax";if(msg.startsWith("(")){msg="invalid syntax "+msg}msg=msg.trim();raise_error_known_location(errtype,filename,token.start[0],token.start[1],token.end[0],token.end[1]-1,token.line,msg)}function raise_syntax_error(C,msg,token){raise_error(_b_.SyntaxError,C,msg,token)}function raise_indentation_error(C,msg,indented_node){if(indented_node){var type=indented_node.C.tree[0].type,token=indented_node.C.tree[0].token,lineno=indented_node.line_num;switch(type){case"class":type="class definition";break;case"condition":type=`'${token}' statement`;break;case"def":type="function definition";break;case"case":case"except":case"for":case"match":case"try":case"while":case"with":type=`'${type}' statement`;break;case"single_kw":type=`'${token}' statement`;break}msg+=` after ${type} on line ${lineno}`}raise_error(_b_.IndentationError,C,msg)}function check_assignment(C,kwargs){function in_left_side(C,assign_type){var ctx=C;while(ctx){if(ctx.parent&&ctx.parent.type==assign_type&&ctx===ctx.parent.tree[0]){return true}ctx=ctx.parent}}var once,action="assign to",augmented=false;if(kwargs){once=kwargs.once;action=kwargs.action||action;augmented=kwargs.augmented===undefined?false:kwargs.augmented}var ctx=C,forbidden=["assert","import","raise","return","decorator","comprehension","await"];if(action!="delete"){forbidden.push("del")}function report(wrong_type,a,b){a=a||C.position;b=b||$token.value;if(augmented){raise_syntax_error_known_range(C,a,b,`'${wrong_type}' is an illegal expression `+"for augmented assignment")}else{var msg=wrong_type;if(Array.isArray(msg)){msg=msg[0]}else if($token.value.string=="="&&$token.value.type=="OP"){if(parent_match(C,{type:"augm_assign"})){raise_syntax_error(C)}if(parent_match(C,{type:"assign"})){raise_syntax_error_known_range(C,a,b,`invalid syntax. Maybe you meant '==' or ':=' instead of '='?`)}if(!parent_match(C,{type:"list_or_tuple"})){msg+=" here. Maybe you meant '==' instead of '='?"}}raise_syntax_error_known_range(C,a,b,`cannot ${action} ${msg}`)}}if(C.type=="expr"){var upper_expr=C;ctx=C;while(ctx.parent){if(ctx.parent.type=="expr"){upper_expr=ctx.parent}ctx=ctx.parent}}if(in_left_side(C,"augm_assign")){raise_syntax_error(C)}if(C.type=="target_list"){for(var target of C.tree){check_assignment(target,{action:"assign to"})}return}ctx=C;while(ctx){if(forbidden.indexOf(ctx.type)>-1){raise_syntax_error(C,`(assign to ${ctx.type})`)}else if(ctx.type=="expr"){if(parent_match(ctx,{type:"annotation"})){return true}if(ctx.parent.type=="yield"){raise_syntax_error_known_range(ctx,ctx.parent.position,last_position(ctx),"assignment to yield expression not possible")}var assigned=ctx.tree[0];if(assigned.type=="op"){if($B.op2method.comparisons[ctx.tree[0].op]!==undefined){if(parent_match(ctx,{type:"target_list"})){raise_syntax_error(C)}report("comparison",assigned.tree[0].position,last_position(assigned))}else{report("expression",assigned.tree[0].position,last_position(assigned))}}else if(assigned.type=="attribute"&&parent_match(ctx,{type:"condition"})){report("attribute",ctx.position,last_position(C))}else if(assigned.type=="sub"&&parent_match(ctx,{type:"condition"})){report("subscript",ctx.position,last_position(C))}else if(assigned.type=="unary"){report("expression",assigned.position,last_position(assigned))}else if(assigned.type=="call"){report("function call",assigned.position,assigned.end_position)}else if(assigned.type=="id"){var name=assigned.value;if(["None","True","False","__debug__"].indexOf(name)>-1){if(name=="__debug__"&&augmented){$token.value=assigned.position;raise_syntax_error(assigned,"cannot assign to __debug__")}report([name])}if(noassign[name]===true){report(keyword)}}else if(["str","int","float","complex"].indexOf(assigned.type)>-1){if(ctx.parent.type!="op"){report("literal")}}else if(assigned.type=="ellipsis"){report("ellipsis")}else if(assigned.type=="genexpr"){report(["generator expression"])}else if(assigned.type=="starred"){if(action=="delete"){report("starred",assigned.position,last_position(assigned))}check_assignment(assigned.tree[0],{action:action,once:true})}else if(assigned.type=="named_expr"){if(!assigned.parenthesized){report("named expression")}else if(ctx.parent.type=="node"){raise_syntax_error_known_range(C,assigned.target.position,last_position(assigned),"cannot assign to named expression here. "+"Maybe you meant '==' instead of '='?")}else if(action=="delete"){report("named expression",assigned.position,last_position(assigned))}}else if(assigned.type=="list_or_tuple"){for(var item of ctx.tree){check_assignment(item,{action:action,once:true})}}else if(assigned.type=="dict_or_set"){if(assigned.closed){report(assigned.real=="set"?"set display":"dict literal",ctx.position,last_position(assigned))}}else if(assigned.type=="lambda"){report("lambda")}else if(assigned.type=="ternary"){report(["conditional expression"])}else if(assigned.type=="JoinedStr"){report("f-string expression",assigned.position,last_position(assigned))}}else if(ctx.type=="list_or_tuple"){for(var item of ctx.tree){check_assignment(item,{action:action,once:true})}}else if(ctx.type=="ternary"){report(["conditional expression"],ctx.position,last_position(C))}else if(ctx.type=="op"){var a=ctx.tree[0].position,last=$B.last(ctx.tree).tree[0],b=last.end_position||last.position;if($B.op2method.comparisons[ctx.op]!==undefined){if(parent_match(C,{type:"target_list"})){raise_syntax_error(C)}report("comparison",a,b)}else{report("expression",a,b)}}else if(ctx.type=="yield"){report("yield expression")}else if(ctx.comprehension){break}if(once){break}ctx=ctx.parent}}function remove_abstract_expr(tree){if(tree.length>0&&$B.last(tree).type=="abstract_expr"){tree.pop()}}$B.format_indent=function(js,indent){var indentation=" ",lines=js.split("\n"),level=indent,res="",last_is_closing_brace=false,last_is_backslash=false,last_is_var_and_comma=false;for(var i=0,len=lines.length;i2){console.log("wrong js indent");console.log(res)}level=0}try{res+=(add_spaces?indentation.repeat(level):"")+line+"\n"}catch(err){console.log(res);throw err}if(line.endsWith("{")){level++}else if(add_closing_brace){level--;if(level<0){level=0}try{res+=indentation.repeat(level)+"}\n"}catch(err){console.log(res);throw err}}last_is_backslash=line.endsWith("\\");last_is_var_and_comma=line.endsWith(",")&&(line.startsWith("var ")||last_is_var_and_comma)}return res};function show_line(ctx){var lnum=get_node(ctx).line_num,src=get_module(ctx).src;console.log("this",ctx,"\nline",lnum,src.split("\n")[lnum-1])}var $Node=$B.parser.$Node=function(type){this.type=type;this.children=[]};$Node.prototype.add=function(child){this.children[this.children.length]=child;child.parent=this;child.module=this.module};$Node.prototype.ast=function(){var root_ast=new ast.Module([],[]);root_ast.lineno=this.line_num;for(var node of this.children){var t=node.C.tree[0];if(["single_kw","except","decorator"].indexOf(t.type)>-1||t.type=="condition"&&t.token=="elif"){continue}var node_ast=node.C.tree[0].ast();if(ast.expr.indexOf(node_ast.constructor)>-1){node_ast=new ast.Expr(node_ast);copy_position(node_ast,node_ast.value)}root_ast.body.push(node_ast)}if(this.mode=="eval"){if(root_ast.body.length>1||!(root_ast.body[0]instanceof $B.ast.Expr)){raise_syntax_error(this.children[0].C,"eval() argument must be an expression")}root_ast=new $B.ast.Expression(root_ast.body[0].value);copy_position(root_ast,root_ast.body)}return root_ast};$Node.prototype.insert=function(pos,child){this.children.splice(pos,0,child);child.parent=this;child.module=this.module};$Node.prototype.show=function(indent){var res="";if(this.type==="module"){for(var child of this.children){res+=child.show(indent)}return res}indent=indent||0;res+=" ".repeat(indent);res+=this.C;if(this.children.length>0){res+="{"}res+="\n";for(var child of this.children){res+=child.show(indent+4)}if(this.children.length>0){res+=" ".repeat(indent);res+="}\n"}return res};var AbstractExprCtx=$B.parser.AbstractExprCtx=function(C,with_commas){this.type="abstract_expr";this.with_commas=with_commas;this.parent=C;this.tree=[];this.position=$token.value;C.tree.push(this)};AbstractExprCtx.prototype.transition=function(token,value){var C=this;var packed=C.packed,is_await=C.is_await,position=C.position;switch(token){case"await":case"id":case"imaginary":case"int":case"float":case"str":case"JoinedStr":case"bytes":case"ellipsis":case"[":case"(":case"{":case".":case"not":case"lambda":case"yield":C.parent.tree.pop();var commas=C.with_commas,star_position;if(C.packed){star_position=C.star_position}C=C.parent;C.packed=packed;C.is_await=is_await;if(C.position===undefined){C.position=$token.value}if(star_position){C.star_position=star_position}}switch(token){case"await":return new AbstractExprCtx(new AwaitCtx(new ExprCtx(C,"await",false)),false);case"id":return new IdCtx(new ExprCtx(C,"id",commas),value);case"str":return new StringCtx(new ExprCtx(C,"str",commas),value);case"JoinedStr":return new FStringCtx(new ExprCtx(C,"str",commas),value);case"bytes":return new StringCtx(new ExprCtx(C,"bytes",commas),value);case"int":return new NumberCtx("int",new ExprCtx(C,"int",commas),value);case"float":return new NumberCtx("float",new ExprCtx(C,"float",commas),value);case"imaginary":return new NumberCtx("imaginary",new ExprCtx(C,"imaginary",commas),value);case"(":return new ListOrTupleCtx(new ExprCtx(C,"tuple",commas),"tuple");case"[":return new ListOrTupleCtx(new ExprCtx(C,"list",commas),"list");case"{":return new AbstractExprCtx(new DictOrSetCtx(new ExprCtx(C,"dict_or_set",commas)),false);case"ellipsis":return new EllipsisCtx(new ExprCtx(C,"ellipsis",commas));case"not":if(C.type=="op"&&C.op=="is"){C.op="is_not";return new AbstractExprCtx(C,false)}return new AbstractExprCtx(new NotCtx(new ExprCtx(C,"not",commas)),false);case"lambda":return new LambdaCtx(new ExprCtx(C,"lambda",commas));case"op":var tg=value;if(C.parent.type=="op"&&"+-~".indexOf(tg)==-1){raise_syntax_error(C)}switch(tg){case"*":C.parent.tree.pop();var commas=C.with_commas;C=C.parent;C.position=$token.value;return new AbstractExprCtx(new StarredCtx(new ExprCtx(C,"expr",commas)),false);case"**":C.parent.tree.pop();var commas=C.with_commas;C=C.parent;C.position=$token.value;return new AbstractExprCtx(new KwdCtx(new ExprCtx(C,"expr",commas)),false);case"-":case"~":case"+":C.parent.tree.pop();return new AbstractExprCtx(new UnaryCtx(new ExprCtx(C.parent,"unary",false),tg),false);case"not":C.parent.tree.pop();var commas=C.with_commas;C=C.parent;return new NotCtx(new ExprCtx(C,"not",commas));case"...":return new EllipsisCtx(new ExprCtx(C,"ellipsis",commas))}raise_syntax_error(C);break;case"in":if(C.parent.type=="op"&&C.parent.op=="not"){C.parent.op="not_in";return C}raise_syntax_error(C);break;case"=":if(C.parent.type=="yield"){raise_syntax_error(C,"assignment to yield expression not possible",C.parent.position)}raise_syntax_error(C);break;case"yield":return new AbstractExprCtx(new YieldCtx(C),true);case":":if(C.parent.type=="sub"||C.parent.type=="list_or_tuple"&&C.parent.parent.type=="sub"){return new AbstractExprCtx(new SliceCtx(C.parent),false)}return transition(C.parent,token,value);case")":case",":switch(C.parent.type){case"list_or_tuple":case"slice":case"call_arg":case"op":case"yield":break;case"match":if(token==","){C.parent.tree.pop();var tuple=new ListOrTupleCtx(C.parent,"tuple");tuple.implicit=true;tuple.has_comma=true;tuple.tree=[C];C.parent=tuple;return tuple}break;default:raise_syntax_error(C)}break;case".":case"assert":case"break":case"class":case"continue":case"def":case"except":case"for":case"while":case"in":case"return":case"try":raise_syntax_error(C);break}return transition(C.parent,token,value)};var AliasCtx=$B.parser.AliasCtx=function(C){this.type="ctx_manager_alias";this.parent=C;this.tree=[];C.tree[C.tree.length-1].alias=this};AliasCtx.prototype.transition=function(token,value){var C=this;switch(token){case",":case")":case":":check_assignment(C.tree[0]);C.parent.set_alias(C.tree[0].tree[0]);return transition(C.parent,token,value);case"eol":$token.value=last_position(C);raise_syntax_error(C,"expected ':'")}raise_syntax_error(C)};var AnnotationCtx=$B.parser.AnnotationCtx=function(C){this.type="annotation";this.parent=C;this.tree=[];C.annotation=this;var scope=get_scope(C);if(scope.ntype=="def"&&C.tree&&C.tree.length>0&&C.tree[0].type=="id"){var name=C.tree[0].value;scope.annotations=scope.annotations||new Set;scope.annotations.add(name)}};AnnotationCtx.prototype.transition=function(token,value){var C=this;if(token=="eol"&&C.tree.length==1&&C.tree[0].tree.length==0){raise_syntax_error(C)}else if(token==":"&&C.parent.type!="def"){raise_syntax_error(C,"more than one annotation")}else if(token=="augm_assign"){raise_syntax_error(C,"augmented assign as annotation")}else if(token=="op"){raise_syntax_error(C,"operator as annotation")}if(C.parent.type=="expr"){C.parent.with_commas=false}return transition(C.parent,token)};var AssertCtx=$B.parser.AssertCtx=function(C){this.type="assert";this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this};AssertCtx.prototype.ast=function(){var msg=this.tree[1],ast_obj=new ast.Assert(this.tree[0].ast(),msg===undefined?msg:msg.ast());set_position(ast_obj,this.position);return ast_obj};AssertCtx.prototype.transition=function(token,value){var C=this;if(token==","){if(this.tree.length>1){raise_syntax_error(C,"(too many commas after assert)")}return new AbstractExprCtx(this,false)}if(token=="eol"){if(this.tree.length==1&&this.tree[0].type=="expr"&&this.tree[0].tree[0].type=="list_or_tuple"){$B.warn(_b_.SyntaxWarning,"assertion is always true, perhaps remove parentheses?",get_module(C).filename,$token.value)}return transition(C.parent,token)}raise_syntax_error(C)};var AssignCtx=$B.parser.AssignCtx=function(C,expression){check_assignment(C);this.type="assign";this.position=$token.value;C.parent.tree.pop();C.parent.tree.push(this);this.parent=C.parent;this.tree=[C];var scope=get_scope(this);if(C.type=="assign"){check_assignment(C.tree[1])}else{var assigned=C.tree[0];if(assigned.type=="ellipsis"){raise_syntax_error(C,"cannot assign to Ellipsis")}else if(assigned.type=="unary"){raise_syntax_error(C,"cannot assign to operator")}else if(assigned.type=="starred"){if(assigned.tree[0].name=="id"){var id=assigned.tree[0].tree[0].value;if(["None","True","False","__debug__"].indexOf(id)>-1){raise_syntax_error(C,"cannot assign to "+id)}}if(assigned.parent.in_tuple===undefined){raise_syntax_error(C,"starred assignment target must be in a list or tuple")}}}};function set_ctx_to_store(obj){if(Array.isArray(obj)){for(var item of obj){set_ctx_to_store(item)}}else if(obj instanceof ast.List||obj instanceof ast.Tuple){for(var item of obj.elts){set_ctx_to_store(item)}}else if(obj instanceof ast.Starred){obj.value.ctx=new ast.Store}else if(obj===undefined){}else if(obj.ctx){obj.ctx=new ast.Store}else{console.log("bizarre",obj,obj.constructor.$name)}}AssignCtx.prototype.ast=function(){var value=this.tree[1].ast(),targets=[],target=this.tree[0];if(target.type=="expr"&&target.tree[0].type=="list_or_tuple"){target=target.tree[0]}if(target.type=="list_or_tuple"){target=target.ast();target.ctx=new ast.Store;targets=[target]}else{while(target.type=="assign"){targets.splice(0,0,target.tree[1].ast());target=target.tree[0]}targets.splice(0,0,target.ast())}value.ctx=new ast.Load;var lineno=get_node(this).line_num;if(target.annotation){var ast_obj=new ast.AnnAssign(target.tree[0].ast(),target.annotation.tree[0].ast(),value,target.$was_parenthesized?0:1);set_position(ast_obj.annotation,target.annotation.position,last_position(target.annotation));ast_obj.target.ctx=new ast.Store}else{var ast_obj=new ast.Assign(targets,value)}set_position(ast_obj,this.position);set_ctx_to_store(ast_obj.targets);return ast_obj};AssignCtx.prototype.transition=function(token,value){var C=this;if(token=="eol"){if(C.tree[1].type=="abstract_expr"){raise_syntax_error(C)}return transition(C.parent,"eol")}raise_syntax_error(C)};var AsyncCtx=$B.parser.AsyncCtx=function(C){this.type="async";this.parent=C;C.async=true;this.position=C.position=$token.value};AsyncCtx.prototype.transition=function(token,value){var C=this;if(token=="def"){return transition(C.parent,token,value)}else if(token=="with"){var ctx=transition(C.parent,token,value);ctx.async=C;return ctx}else if(token=="for"){var ctx=transition(C.parent,token,value);ctx.parent.async=C;return ctx}raise_syntax_error(C)};var AttrCtx=$B.parser.AttrCtx=function(C){this.type="attribute";this.value=C.tree[0];this.parent=C;this.position=$token.value;C.tree.pop();C.tree[C.tree.length]=this;this.tree=[];this.func="getattr"};AttrCtx.prototype.ast=function(){var value=this.value.ast(),attr=this.unmangled_name,ctx=new ast.Load;if(this.func=="setattr"){ctx=new ast.Store}else if(this.func=="delattr"){ctx=new ast.Delete}var ast_obj=new ast.Attribute(value,attr,ctx);set_position(ast_obj,this.position,this.end_position);return ast_obj};AttrCtx.prototype.transition=function(token,value){var C=this;if(token==="id"){var name=value;if(name=="__debug__"){raise_syntax_error(C,"cannot assign to __debug__")}else if(noassign[name]===true){raise_syntax_error(C)}C.unmangled_name=name;C.position=$token.value;C.end_position=$token.value;name=mangle_name(name,C);C.name=name;return C.parent}raise_syntax_error(C)};var AugmentedAssignCtx=$B.parser.AugmentedAssignCtx=function(C,op){check_assignment(C,{augmented:true});this.type="augm_assign";this.C=C;this.parent=C.parent;this.position=$token.value;C.parent.tree.pop();C.parent.tree[C.parent.tree.length]=this;this.op=op;this.tree=[C];var scope=this.scope=get_scope(this);this.module=scope.module};AugmentedAssignCtx.prototype.ast=function(){var target=this.tree[0].ast(),value=this.tree[1].ast();target.ctx=new ast.Store;value.ctx=new ast.Load;var op=this.op.substr(0,this.op.length-1),ast_type_class=op2ast_class[op],ast_class=ast_type_class[1];var ast_obj=new ast.AugAssign(target,new ast_class,value);set_position(ast_obj,this.position);return ast_obj};AugmentedAssignCtx.prototype.transition=function(token,value){var C=this;if(token=="eol"){if(C.tree[1].type=="abstract_expr"){raise_syntax_error(C)}return transition(C.parent,"eol")}raise_syntax_error(C)};var AwaitCtx=$B.parser.AwaitCtx=function(C){this.type="await";this.parent=C;this.tree=[];this.position=$token.value;C.tree.push(this);var p=C;while(p){if(p.type=="list_or_tuple"){p.is_await=true}p=p.parent}var node=get_node(this);node.awaits=node.awaits||[];node.awaits.push(this)};AwaitCtx.prototype.ast=function(){var ast_obj=new ast.Await(this.tree[0].ast());set_position(ast_obj,this.position);return ast_obj};AwaitCtx.prototype.transition=function(token,value){var C=this;C.parent.is_await=true;return transition(C.parent,token,value)};var BodyCtx=$B.parser.BodyCtx=function(C){var ctx_node=C.parent;while(ctx_node.type!=="node"){ctx_node=ctx_node.parent}var tree_node=ctx_node.node;var body_node=new $Node;body_node.is_body_node=true;body_node.line_num=tree_node.line_num;tree_node.insert(0,body_node);return new NodeCtx(body_node)};var BreakCtx=$B.parser.BreakCtx=function(C){this.type="break";this.position=$token.value;this.parent=C;C.tree[C.tree.length]=this};BreakCtx.prototype.ast=function(){var ast_obj=new ast.Break;set_position(ast_obj,this.position);return ast_obj};BreakCtx.prototype.transition=function(token,value){var C=this;if(token=="eol"){return transition(C.parent,"eol")}raise_syntax_error(C)};var CallArgCtx=$B.parser.CallArgCtx=function(C){this.type="call_arg";this.parent=C;this.tree=[];this.position=$token.value;C.tree.push(this);this.expect="id"};CallArgCtx.prototype.transition=function(token,value){var C=this;switch(token){case"await":case"id":case"imaginary":case"int":case"float":case"str":case"JoinedStr":case"bytes":case"[":case"(":case"{":case".":case"ellipsis":case"not":case"lambda":if(C.expect=="id"){this.position=$token.value;C.expect=",";var expr=new AbstractExprCtx(C,false);return transition(expr,token,value)}break;case"=":if(C.expect==","){return new ExprCtx(new KwArgCtx(C),"kw_value",false)}break;case"for":return new TargetListCtx(new ForExpr(new GeneratorExpCtx(C)));case"op":if(C.expect=="id"){var op=value;C.expect=",";switch(op){case"+":case"-":case"~":return transition(new AbstractExprCtx(C,false),token,op);case"*":C.parent.tree.pop();return new StarArgCtx(C.parent);case"**":C.parent.tree.pop();return new DoubleStarArgCtx(C.parent)}}raise_syntax_error(C);break;case")":return transition(C.parent,token);case":":if(C.expect==","&&C.parent.parent.type=="lambda"){return transition(C.parent.parent,token)}break;case",":if(C.expect==","){return transition(C.parent,token,value)}}raise_syntax_error(C)};var CallCtx=$B.parser.CallCtx=function(C){this.position=$token.value;this.type="call";this.func=C.tree[0];if(this.func!==undefined){this.func.parent=this;this.parenth_position=this.position;this.position=this.func.position}this.parent=C;if(C.type!="class"){C.tree.pop();C.tree[C.tree.length]=this}else{C.args=this}this.expect="id";this.tree=[]};CallCtx.prototype.ast=function(){var res=new ast.Call(this.func.ast(),[],[]),keywords=new Set;for(var call_arg of this.tree){if(call_arg.type=="double_star_arg"){var value=call_arg.tree[0].tree[0].ast(),keyword=new ast.keyword(_b_.None,value);delete keyword.arg;res.keywords.push(keyword)}else if(call_arg.type=="star_arg"){if(res.keywords.length>0){if(!res.keywords[0].arg){raise_syntax_error(this,"iterable argument unpacking follows keyword argument unpacking")}}var starred=new ast.Starred(call_arg.tree[0].ast());set_position(starred,call_arg.position);starred.ctx=new ast.Load;res.args.push(starred)}else if(call_arg.type=="genexpr"){res.args.push(call_arg.ast())}else{var item=call_arg.tree[0];if(item===undefined){continue}if(item.type=="kwarg"){var key=item.tree[0].value;if(key=="__debug__"){raise_syntax_error_known_range(this,this.position,this.end_position,"cannot assign to __debug__")}else if(["True","False","None"].indexOf(key)>-1){raise_syntax_error_known_range(this,item.position,item.equal_sign_position,'expression cannot contain assignment, perhaps you meant "=="?')}if(keywords.has(key)){raise_syntax_error_known_range(item,item.position,last_position(item),`keyword argument repeated: ${key}`)}keywords.add(key);var keyword=new ast.keyword(item.tree[0].value,item.tree[1].ast());set_position(keyword,item.position);res.keywords.push(keyword)}else{if(res.keywords.length>0){if(res.keywords[0].arg){raise_syntax_error_known_range(this,item.position,last_position(item),"positional argument follows keyword argument")}else{raise_syntax_error_known_range(this,item.position,last_position(item),"positional argument follows keyword argument unpacking")}}res.args.push(item.ast())}}}set_position(res,this.position,this.end_position);return res};CallCtx.prototype.transition=function(token,value){var C=this;switch(token){case",":if(C.expect=="id"){raise_syntax_error(C)}C.expect="id";return C;case"await":case"id":case"imaginary":case"int":case"float":case"str":case"JoinedStr":case"bytes":case"[":case"(":case"{":case".":case"not":case"lambda":case"ellipsis":C.expect=",";return transition(new CallArgCtx(C),token,value);case")":C.end_position=$token.value;return C.parent;case"op":C.expect=",";switch(value){case"-":case"~":case"+":C.expect=",";return transition(new CallArgCtx(C),token,value);case"*":C.has_star=true;return new StarArgCtx(C);case"**":C.has_dstar=true;return new DoubleStarArgCtx(C)}raise_syntax_error(C);break;case"yield":raise_syntax_error(C)}return transition(C.parent,token,value)};var CaseCtx=$B.parser.CaseCtx=function(node_ctx){this.type="case";this.position=$token.value;node_ctx.tree=[this];this.parent=node_ctx;this.tree=[];this.expect="as"};CaseCtx.prototype.ast=function(){var ast_obj=new ast.match_case(this.tree[0].ast(),this.has_guard?this.tree[1].tree[0].ast():undefined,ast_body(this.parent));set_position(ast_obj,this.position);return ast_obj};CaseCtx.prototype.set_alias=function(name){this.alias=name};CaseCtx.prototype.transition=function(token,value){var C=this;switch(token){case"as":C.expect=":";return new AbstractExprCtx(new AliasCtx(C));case":":function is_irrefutable(pattern){var cause;if(pattern.type=="capture_pattern"){return pattern.tree[0]}else if(pattern.type=="or_pattern"){for(var subpattern of pattern.tree){if(cause=is_irrefutable(subpattern)){return cause}}}else if(pattern.type=="sequence_pattern"&&pattern.token=="("&&pattern.tree.length==1&&(cause=is_irrefutable(pattern.tree[0]))){return cause}return false}var cause;if(cause=is_irrefutable(this.tree[0])){get_node(C).parent.irrefutable=cause}switch(C.expect){case"id":case"as":case":":var last=$B.last(C.tree);if(last&&last.type=="sequence_pattern"){remove_empty_pattern(last)}return BodyCtx(C)}break;case"op":if(value=="|"){return new PatternCtx(new PatternOrCtx(C))}raise_syntax_error(C,"expected :");break;case",":if(C.expect==":"||C.expect=="as"){return new PatternCtx(new PatternSequenceCtx(C))}break;case"if":C.has_guard=true;return new AbstractExprCtx(new ConditionCtx(C,token),false);default:raise_syntax_error(C,"expected :")}};var ClassCtx=$B.parser.ClassCtx=function(C){this.type="class";this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this;this.expect="id";var scope=this.scope=get_scope(this);this.parent.node.parent_block=scope;this.parent.node.bound={}};ClassCtx.prototype.ast=function(){var decorators=get_decorators(this.parent.node),bases=[],keywords=[],type_params=[];if(this.args){for(var arg of this.args.tree){if(arg.tree[0].type=="kwarg"){keywords.push(new ast.keyword(arg.tree[0].tree[0].value,arg.tree[0].tree[1].ast()))}else{bases.push(arg.tree[0].ast())}}}if(this.type_params){type_params=this.type_params.ast()}var ast_obj=new ast.ClassDef(this.name,bases,keywords,ast_body(this.parent),decorators,type_params);set_position(ast_obj,this.position);return ast_obj};ClassCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":if(C.expect=="id"){C.set_name(value);C.expect="(:";return C}break;case"(":if(C.name===undefined){raise_syntax_error(C,"missing class name")}C.parenthesis_position=$token.value;return new CallCtx(C);case"[":if(C.name===undefined){raise_syntax_error(C,"missing class name")}return new TypeParamsCtx(C);case":":if(this.args){for(var arg of this.args.tree){var param=arg.tree[0];if(arg.type!="call_arg"){$token.value=C.parenthesis_position;raise_syntax_error(C,"expected ':'")}if(param.type=="expr"&¶m.name=="id"||param.type=="kwarg"){continue}$token.value=arg.position;raise_syntax_error(arg,"invalid class parameter")}}return BodyCtx(C);case"eol":raise_syntax_error(C,"expected ':'")}raise_syntax_error(C)};ClassCtx.prototype.set_name=function(name){var C=this.parent;this.random=$B.UUID();this.name=name;this.id=C.node.module+"_"+name+"_"+this.random;this.parent.node.id=this.id;var scope=this.scope,parent_block=scope;var block=scope,parent_classes=[];while(block.ntype=="class"){parent_classes.splice(0,0,block.C.tree[0].name);block=block.parent}this.qualname=parent_classes.concat([name]).join(".");while(parent_block.C&&parent_block.C.tree[0].type=="class"){parent_block=parent_block.parent}while(parent_block.C&&"def"!=parent_block.C.tree[0].type&&"generator"!=parent_block.C.tree[0].type){parent_block=parent_block.parent}this.parent.node.parent_block=parent_block};var Comprehension={generators:function(comps){var comprehensions=[];for(var item of comps){if(item.type=="for"){var target=item.tree[0].ast();set_ctx_to_store(target);comprehensions.push(new ast.comprehension(target,item.tree[1].ast(),[],item.is_async?1:0))}else{$B.last(comprehensions).ifs.push(item.tree[0].ast())}}return comprehensions},make_comp:function(comp,C){if(C.tree[0].type=="yield"){var comp_type=comp.type=="listcomp"?"list comprehension":comp.type=="dictcomp"?"dict comprehension":comp.type=="setcomp"?"set comprehension":comp.type=="genexpr"?"generator expression":"";var a=C.tree[0]}comp.comprehension=true;comp.position=$token.value;comp.parent=C.parent;comp.id=comp.type+$B.UUID();var scope=get_scope(C);comp.parent_block=scope;while(scope){if(scope.C&&scope.C.tree&&scope.C.tree.length>0&&scope.C.tree[0].async){comp.async=true;break}scope=scope.parent_block}comp.module=get_module(C).module;comp.module_ref=comp.module.replace(/\./g,"_");C.parent.tree[C.parent.tree.length-1]=comp;Comprehension.set_parent_block(C.tree[0],comp)},set_parent_block:function(ctx,parent_block){if(ctx.tree){for(var item of ctx.tree){if(item.comprehension){item.parent_block=parent_block}Comprehension.set_parent_block(item,parent_block)}}}};var ConditionCtx=$B.parser.ConditionCtx=function(C,token){this.type="condition";this.token=token;this.parent=C;this.tree=[];this.position=$token.value;this.node=get_node(this);this.scope=get_scope(this);if(token=="elif"){var rank=this.node.parent.children.indexOf(this.node),previous=this.node.parent.children[rank-1];previous.C.tree[0].orelse=this}C.tree.push(this)};ConditionCtx.prototype.ast=function(){var types={if:"If",while:"While",elif:"If"};var res=new ast[types[this.token]](this.tree[0].ast());if(this.orelse){if(this.orelse.token=="elif"){res.orelse=[this.orelse.ast()]}else{res.orelse=this.orelse.ast()}}else{res.orelse=[]}res.body=ast_body(this);set_position(res,this.position);return res};ConditionCtx.prototype.transition=function(token,value){var C=this;if(token==":"){if(C.tree[0].type=="abstract_expr"&&C.tree[0].tree.length==0){raise_syntax_error(C)}return BodyCtx(C)}else if(C.in_comp&&C.token=="if"){if(token=="]"){return transition(C.parent,token,value)}else if(token=="if"){var if_exp=new ConditionCtx(C.parent,"if");if_exp.in_comp=C.in_comp;return new AbstractExprCtx(if_exp,false)}else if(")]}".indexOf(token)>-1){return transition(this.parent,token,value)}else if(C.in_comp&&token=="for"){return new TargetListCtx(new ForExpr(C.parent))}if(token==","&&parent_match(C,{type:"call"})){raise_syntax_error_known_range(C,C.in_comp.position,last_position(C),"Generator expression must be parenthesized")}}raise_syntax_error(C,"expected ':'")};var ContinueCtx=$B.parser.ContinueCtx=function(C){this.type="continue";this.parent=C;this.position=$token.value;get_node(this).is_continue=true;C.tree[C.tree.length]=this};ContinueCtx.prototype.ast=function(){var ast_obj=new ast.Continue;set_position(ast_obj,this.position);return ast_obj};ContinueCtx.prototype.transition=function(token,value){var C=this;if(token=="eol"){return C.parent}raise_syntax_error(C)};var DecoratorCtx=$B.parser.DecoratorCtx=function(C){this.type="decorator";this.parent=C;C.tree[C.tree.length]=this;this.tree=[];this.position=$token.value};DecoratorCtx.prototype.transition=function(token,value){var C=this;if(token=="eol"){return transition(C.parent,token)}raise_syntax_error(C)};function get_decorators(node){var decorators=[];var parent_node=node.parent;var rank=parent_node.children.indexOf(node);while(true){rank--;if(rank<0){break}else if(parent_node.children[rank].C.tree[0].type=="decorator"){var deco=parent_node.children[rank].C.tree[0].tree[0];decorators.splice(0,0,deco.ast())}else{break}}return decorators}var DefCtx=$B.parser.DefCtx=function(C){this.type="def";this.name=null;this.parent=C;this.tree=[];this.async=C.async;if(this.async){this.position=C.position}else{this.position=$token.value}C.tree[C.tree.length]=this;this.enclosing=[];var scope=this.scope=get_scope(this);if(scope.C&&scope.C.tree[0].type=="class"){this.class_name=scope.C.tree[0].name}var parent_block=scope;while(parent_block.C&&parent_block.C.tree[0].type=="class"){parent_block=parent_block.parent}while(parent_block.C&&"def"!=parent_block.C.tree[0].type){parent_block=parent_block.parent}this.parent.node.parent_block=parent_block;var pb=parent_block;this.is_comp=pb.is_comp;while(pb&&pb.C){if(pb.C.tree[0].type=="def"){this.inside_function=true;break}pb=pb.parent_block}this.module=scope.module;this.root=get_module(this);this.positional_list=[];this.default_list=[];this.other_args=null;this.other_kw=null;this.after_star=[]};DefCtx.prototype.ast=function(){var args={posonlyargs:[],args:[],kwonlyargs:[],kw_defaults:[],defaults:[],type_params:[]},decorators=get_decorators(this.parent.node),func_args=this.tree[1],state="arg",default_value,res;args=func_args.ast();if(this.async){res=new ast.AsyncFunctionDef(this.name,args,[],decorators)}else{res=new ast.FunctionDef(this.name,args,[],decorators)}if(this.annotation){res.returns=this.annotation.tree[0].ast()}if(this.type_params){res.type_params=this.type_params.ast()}res.body=ast_body(this.parent);set_position(res,this.position);return res};DefCtx.prototype.set_name=function(name){if(["None","True","False"].indexOf(name)>-1){raise_syntax_error(this)}var id_ctx=new IdCtx(this,name);this.name=name;this.id=this.scope.id+"_"+name;this.id=this.id.replace(/\./g,"_");this.id+="_"+$B.UUID();this.parent.node.id=this.id;this.parent.node.module=this.module};DefCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":if(C.name){raise_syntax_error(C)}C.set_name(value);return C;case"(":if(C.name==null){raise_syntax_error(C,"missing name in function definition")}C.has_args=true;return new FuncArgs(C);case"[":if(C.name===undefined){raise_syntax_error(C,"missing function name")}return new TypeParamsCtx(C);case")":return C;case"annotation":return new AbstractExprCtx(new AnnotationCtx(C),true);case":":if(C.has_args){return BodyCtx(C)}raise_syntax_error(C,"missing function parameters");break;case"eol":if(C.has_args){raise_syntax_error(C,"expected ':'")}}raise_syntax_error(C)};var DelCtx=$B.parser.DelCtx=function(C){this.type="del";this.parent=C;C.tree.push(this);this.tree=[];this.position=$token.value};DelCtx.prototype.ast=function(){var targets;if(this.tree[0].type=="list_or_tuple"){targets=this.tree[0].tree.map((x=>x.ast()))}else if(this.tree[0].type=="expr"&&this.tree[0].tree[0].type=="list_or_tuple"){targets=this.tree[0].tree[0].ast();targets.ctx=new ast.Del;for(var elt of targets.elts){elt.ctx=new ast.Del}var ast_obj=new ast.Delete([targets]);set_position(ast_obj,this.position);return ast_obj}else{targets=[this.tree[0].tree[0].ast()]}for(var target of targets){target.ctx=new ast.Del}var ast_obj=new ast.Delete(targets);set_position(ast_obj,this.position);return ast_obj};DelCtx.prototype.transition=function(token,value){var C=this;if(token=="eol"){check_assignment(this.tree[0],{action:"delete"});return transition(C.parent,token)}raise_syntax_error(C)};var DictCompCtx=function(C){if(C.tree[0].type=="expr"&&C.tree[0].tree[0].comprehension){var comp=C.tree[0].tree[0];comp.parent_block=this}this.type="dictcomp";this.position=$token.value;this.comprehension=true;this.parent=C.parent;this.key=C.tree[0];this.value=C.tree[1];this.key.parent=this;this.value.parent=this;this.tree=[];this.id="dictcomp"+$B.UUID();this.parent_block=get_scope(C);this.module=get_module(C).module;C.parent.tree[C.parent.tree.length-1]=this;this.type="dictcomp";Comprehension.make_comp(this,C)};DictCompCtx.prototype.ast=function(){if(this.value.ast===undefined){console.log("dict comp ast, no value.ast",this)}var ast_obj=new ast.DictComp(this.key.ast(),this.value.ast(),Comprehension.generators(this.tree));set_position(ast_obj,this.position);return ast_obj};DictCompCtx.prototype.transition=function(token,value){var C=this;if(token=="}"){return this.parent}raise_syntax_error(C)};var DictOrSetCtx=$B.parser.DictOrSetCtx=function(C){this.type="dict_or_set";this.real="dict_or_set";this.expect=",";this.closed=false;this.position=$token.value;this.nb_items=0;this.parent=C;this.tree=[];C.tree[C.tree.length]=this};DictOrSetCtx.prototype.ast=function(){var ast_obj;if(this.real=="dict"){var keys=[],values=[];var t0=Date.now();for(var i=0,len=this.items.length;i1){$token.value=C.tree[0].position;raise_syntax_error(C,"did you forget "+"parentheses around the comprehension target?")}var expr=C.tree[0],err_msg;if(expr.type=="expr"){if(expr.tree[0].type=="kwd"){err_msg="dict unpacking cannot be used in dict comprehension"}else if(expr.tree[0].type=="starred"){err_msg="iterable unpacking cannot be used in comprehension"}if(err_msg){raise_syntax_error_known_range(C,expr.position,last_position(expr),err_msg)}}if(C.real=="dict_or_set"){return new TargetListCtx(new ForExpr(new SetCompCtx(this)))}else{return new TargetListCtx(new ForExpr(new DictCompCtx(this)))}}raise_syntax_error(C)}else if(C.expect=="value"){if(python_keywords.indexOf(token)>-1){var ae=new AbstractExprCtx(C,false);try{transition(ae,token,value);C.tree.pop()}catch(err){raise_syntax_error(C)}}try{C.expect=",";return transition(new AbstractExprCtx(C,false),token,value)}catch(err){$token.value=C.value_pos;raise_syntax_error(C,"expression expected after "+"dictionary key and ':'")}}return transition(C.parent,token,value)}};var DoubleStarArgCtx=$B.parser.DoubleStarArgCtx=function(C){this.type="double_star_arg";this.parent=C;this.tree=[];C.tree[C.tree.length]=this};DoubleStarArgCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":case"imaginary":case"int":case"float":case"str":case"JoinedStr":case"bytes":case"[":case"(":case"{":case".":case"not":case"lambda":return transition(new AbstractExprCtx(C,false),token,value);case",":case")":return transition(C.parent,token);case":":if(C.parent.parent.type=="lambda"){return transition(C.parent.parent,token)}}raise_syntax_error(C)};var EllipsisCtx=$B.parser.EllipsisCtx=function(C){this.type="ellipsis";this.parent=C;this.position=$token.value;C.tree[C.tree.length]=this};EllipsisCtx.prototype.ast=function(){var ast_obj=new ast.Constant(_b_.Ellipsis);set_position(ast_obj,this.position);return ast_obj};EllipsisCtx.prototype.transition=function(token,value){var C=this;return transition(C.parent,token,value)};var EndOfPositionalCtx=$B.parser.$EndOfConditionalCtx=function(C){this.type="end_positional";this.parent=C;C.has_end_positional=true;C.parent.pos_only=C.tree.length;C.tree.push(this)};EndOfPositionalCtx.prototype.transition=function(token,value){var C=this;if(token==","||token==")"){return transition(C.parent,token,value)}raise_syntax_error(C)};var ExceptCtx=$B.parser.ExceptCtx=function(C){this.type="except";this.position=$token.value;this.parent=C;C.tree[C.tree.length]=this;this.tree=[];this.scope=get_scope(this);var node=C.node,rank=node.parent.children.indexOf(node),ix=rank-1;while(node.parent.children[ix].C.tree[0].type!="try"){ix--}this.try_node=node.parent.children[ix];this.is_first_child=rank==ix+1;if(this.try_node.C.is_trystar){this.expect="*"}else{this.expect="id"}};ExceptCtx.prototype.ast=function(){var ast_obj=new ast.ExceptHandler(this.tree.length==1?this.tree[0].ast():undefined,this.has_alias?this.tree[0].alias:undefined,ast_body(this.parent));set_position(ast_obj,this.position);return ast_obj};ExceptCtx.prototype.transition=function(token,value){var C=this;if(token=="op"&&value=="*"){if(C.is_first_child){C.try_node.C.is_trystar=true;C.expect="id";return C}else if(C.expect!="*"){raise_syntax_error(C,"cannot have both 'except' and 'except*' "+"on the same 'try'")}else{C.expect="id";return C}}else if(C.expect=="*"){raise_syntax_error(C,"cannot have both 'except' and 'except*' "+"on the same 'try'")}switch(token){case"id":case"imaginary":case"int":case"float":case"str":case"JoinedStr":case"bytes":case"[":case"(":case"{":case"not":case"lambda":if(C.expect=="id"){C.expect="as";return transition(new AbstractExprCtx(C,false),token,value)}}switch(token){case"as":if(C.expect=="as"&&C.has_alias===undefined){C.expect="alias";C.has_alias=true;return C}break;case"id":if(C.expect=="alias"){C.expect=":";C.set_alias(value);return C}break;case":":var _ce=C.expect;if(_ce=="id"||_ce=="as"||_ce==":"){return BodyCtx(C)}break;case"(":if(C.expect=="id"&&C.tree.length==0){C.parenth=true;return C}break;case")":if(C.expect==","||C.expect=="as"){C.expect="as";return C}break;case",":if(C.parenth!==undefined&&C.has_alias===undefined&&(C.expect=="as"||C.expect==",")){C.expect="id";return C}else if(C.parenth===undefined){raise_syntax_error(C,"multiple exception types must be parenthesized")}break;case"eol":raise_syntax_error(C,"expected ':'")}raise_syntax_error(C)};ExceptCtx.prototype.set_alias=function(alias){this.tree[0].alias=mangle_name(alias,this)};var ExprCtx=$B.parser.ExprCtx=function(C,name,with_commas){this.type="expr";this.name=name;this.position=$token.value;this.with_commas=with_commas;this.expect=",";this.parent=C;if(C.packed){this.packed=C.packed}this.tree=[];C.tree[C.tree.length]=this};ExprCtx.prototype.ast=function(){var res=this.tree[0].ast();if(this.packed){}else if(this.annotation){res=new ast.AnnAssign(res,this.annotation.tree[0].ast(),undefined,this.$was_parenthesized?0:1);set_position(res,this.position)}return res};ExprCtx.prototype.transition=function(token,value){var C=this;if(python_keywords.indexOf(token)>-1&&["as","else","if","for","from","in"].indexOf(token)==-1){raise_syntax_error(C)}if(C.parent.expect=="star_target"){if(["pass","in","not","op","augm_assign","=",":=","if","eol"].indexOf(token)>-1){return transition(C.parent,token,value)}}switch(token){case"bytes":case"float":case"id":case"imaginary":case"int":case"lambda":case"pass":var msg="invalid syntax. Perhaps you forgot a comma?";raise_syntax_error_known_range(C,this.position,$token.value,msg);break;case"{":if(C.tree[0].type!="id"||["print","exec"].indexOf(C.tree[0].value)==-1){raise_syntax_error(C)}return new AbstractExprCtx(new DictOrSetCtx(C),false);case"[":case"(":case".":case"not":if(C.expect=="expr"){C.expect=",";return transition(new AbstractExprCtx(C,false),token,value)}}switch(token){case"not":if(C.expect==","){return new ExprNot(C)}break;case"in":if(C.parent.type=="target_list"){return transition(C.parent,token)}if(C.expect==","){return transition(C,"op","in")}break;case",":if(C.expect==","){if(C.name=="iterator"&&C.parent.parent.type!="node"){var for_expr=C.parent.parent;raise_syntax_error_known_range(C,first_position(for_expr),last_position(for_expr),"Generator expression must be parenthesized")}if(C.with_commas||["assign","return"].indexOf(C.parent.type)>-1){if(parent_match(C,{type:"yield",from:true})){raise_syntax_error(C,"no implicit tuple for yield from")}C.parent.tree.pop();var tuple=new ListOrTupleCtx(C.parent,"tuple");tuple.implicit=true;tuple.has_comma=true;tuple.tree=[C];C.parent=tuple;return tuple}}return transition(C.parent,token);case".":return new AttrCtx(C);case"[":if(C.tree[0].type=="id"){delete C.tree[0].bound}return new AbstractExprCtx(new SubscripCtx(C),true);case"(":return new CallCtx(C);case"op":if($op_weight[value]===undefined){var frs=parent_match(C,{type:"fstring_replacement_field"});if(frs){return transition(frs,token,value)}raise_syntax_error(C)}if(C.parent.type=="withitem"&&C.parent.tree.length==2){raise_syntax_error(C,"expected ':'")}if(value=="~"){raise_syntax_error(C)}var op_parent=C.parent,op=value;if(op_parent.type=="ternary"&&op_parent.in_else){var new_op=new OpCtx(C,op);return new AbstractExprCtx(new_op,false)}var op1=C.parent,repl=null;while(1){if(op1.type=="unary"&&op!=="**"){repl=op1;op1=op1.parent}else if(op1.type=="expr"){op1=op1.parent}else if(op1.type=="op"&&$op_weight[op1.op]>=$op_weight[op]&&!(op1.op=="**"&&op=="**")){repl=op1;op1=op1.parent}else if(op1.type=="not"&&$op_weight["not"]>$op_weight[op]){repl=op1;op1=op1.parent}else{break}}if(repl===null){if(op1.type=="op"){var right=op1.tree.pop(),expr=new ExprCtx(op1,"operand",C.with_commas);expr.tree.push(right);right.parent=expr;var new_op=new OpCtx(expr,op);return new AbstractExprCtx(new_op,false)}var position=C.position;while(C.parent!==op1){C=C.parent;op_parent=C.parent}C.parent.tree.pop();var expr=new ExprCtx(op_parent,"operand",C.with_commas);expr.position=position;expr.expect=",";C.parent=expr;var new_op=new OpCtx(C,op);return new AbstractExprCtx(new_op,false)}else{if(op==="and"||op==="or"){while(repl.parent.type=="not"||repl.parent.type=="expr"&&repl.parent.parent.type=="not"){repl=repl.parent;op_parent=repl.parent}}}if(repl.type=="op"){var _flag=false;switch(repl.op){case"<":case"<=":case"==":case"!=":case"is":case">=":case">":_flag=true}if(_flag){switch(op){case"<":case"<=":case"==":case"!=":case"is":case">=":case">":case"in":case"not_in":repl.ops=repl.ops||[repl.op];repl.ops.push(op);return new AbstractExprCtx(repl,false)}}}repl.parent.tree.pop();var expr=new ExprCtx(repl.parent,"operand",false);expr.tree=[op1];expr.position=op1.position;repl.parent=expr;var new_op=new OpCtx(repl,op);return new AbstractExprCtx(new_op,false);case"augm_assign":check_assignment(C,{augmented:true});var parent=C;while(parent){if(parent.type=="assign"||parent.type=="augm_assign"){raise_syntax_error(C,"augmented assignment inside assignment")}else if(parent.type=="op"){raise_syntax_error(C,"cannot assign to operator")}else if(parent.type=="list_or_tuple"){raise_syntax_error(C,`'${parent.real}' is an illegal`+" expression for augmented assignment")}else if(["list","tuple"].indexOf(parent.name)>-1){raise_syntax_error(C,`'${parent.name}' is an illegal`+" expression for augmented assignment")}else if(["dict_or_set"].indexOf(parent.name)>-1){raise_syntax_error(C,`'${parent.tree[0].real} display'`+" is an illegal expression for augmented assignment")}parent=parent.parent}if(C.expect==","){return new AbstractExprCtx(new AugmentedAssignCtx(C,value),true)}return transition(C.parent,token,value);case":":if(C.parent.type=="sub"||C.parent.type=="list_or_tuple"&&C.parent.parent.type=="sub"){return new AbstractExprCtx(new SliceCtx(C.parent),false)}else if(C.parent.type=="slice"){return transition(C.parent,token,value)}else if(C.parent.type=="node"){if(C.tree.length==1){var child=C.tree[0];check_assignment(child);if(["id","sub","attribute"].indexOf(child.type)>-1){return new AbstractExprCtx(new AnnotationCtx(C),false)}else if(child.real=="tuple"&&child.expect==","&&child.tree.length==1){return new AbstractExprCtx(new AnnotationCtx(child.tree[0]),false)}}var type=C.tree[0].real;raise_syntax_error_known_range(C,C.position,last_position(C),`only single target (not ${type}) can be annotated`)}break;case"=":var frs=parent_match(C,{type:"fstring_replacement_field"});if(frs){return transition(frs,token,value)}var call_arg=parent_match(C,{type:"call_arg"});try{check_assignment(C)}catch(err){if(call_arg){var ctx=C;while(ctx.parent!==call_arg){ctx=ctx.parent}raise_syntax_error_known_range(ctx,ctx.position,$token.value,'expression cannot contain assignment, perhaps you meant "=="?')}else{throw err}}var annotation;if(C.expect==","){if(C.parent.type=="call_arg"){if(C.tree[0].type!="id"){raise_syntax_error_known_range(C,C.position,$token.value,'expression cannot contain assignment, perhaps you meant "=="?')}return new AbstractExprCtx(new KwArgCtx(C),true)}else if(annotation=parent_match(C,{type:"annotation"})){return transition(annotation,token,value)}else if(C.parent.type=="op"){raise_syntax_error(C,"cannot assign to operator")}else if(C.parent.type=="not"){raise_syntax_error(C,"cannot assign to operator")}else if(C.parent.type=="with"){raise_syntax_error(C,"expected :")}else if(C.parent.type=="dict_or_set"){if(C.parent.expect==","){C.wrong_assignment=true;return transition(C,":=")}}else if(C.parent.type=="list_or_tuple"){for(var i=0;i-1){raise_syntax_error(C,"(:= invalid, parent "+ptype+")")}else if(ptype=="func_arg_id"&&C.parent.tree.length>0){raise_syntax_error(C,"(:= invalid, parent "+ptype+")")}else if(ptype=="call_arg"&&C.parent.parent.type=="call"&&C.parent.parent.parent.type=="lambda"){raise_syntax_error(C,"(:= invalid inside function arguments)")}if(C.tree.length==1&&C.tree[0].type=="id"){var scope=get_scope(C),name=C.tree[0].value;if(["None","True","False"].indexOf(name)>-1){raise_syntax_error(C,`cannot use assignment expressions with ${name}`)}else if(name=="__debug__"){raise_syntax_error(C,"cannot assign to __debug__")}while(scope.comprehension){scope=scope.parent_block}return new AbstractExprCtx(new NamedExprCtx(C),false)}raise_syntax_error(C);break;case"if":var in_comp=false,ctx=C.parent;while(ctx){if(ctx.comprehension){in_comp=true;break}else if(ctx.type=="list_or_tuple"){break}else if(ctx.type=="comp_for"){break}else if(ctx.type=="comp_if"){in_comp=true;break}else if(ctx.type=="call_arg"||ctx.type=="sub"){break}else if(ctx.type=="expr"){if(ctx.parent.type=="comp_iterable"){in_comp=true;break}}ctx=ctx.parent}if(in_comp){break}var ctx=C;while(ctx.parent&&(ctx.parent.type=="op"||ctx.parent.type=="not"||ctx.parent.type=="unary"||ctx.parent.type=="expr"&&ctx.parent.name=="operand")){ctx=ctx.parent}return new AbstractExprCtx(new TernaryCtx(ctx),false);case"JoinedStr":if(C.tree.length==1&&C.tree[0]instanceof FStringCtx){var fstring=C.tree[0];return fstring}else{var msg="invalid syntax. Perhaps you forgot a comma?";raise_syntax_error_known_range(C,this.position,$token.value,msg)}break;case"str":if(C.tree.length==1&&C.tree[0]instanceof FStringCtx){var fstring=C.tree[0];new StringCtx(fstring,value);return C}else{var msg="invalid syntax. Perhaps you forgot a comma?";raise_syntax_error_known_range(C,this.position,$token.value,msg)}break;case"eol":if(C.tree.length==2&&C.tree[0].type=="id"&&["print","exec"].indexOf(C.tree[0].value)>-1){var func=C.tree[0].value;raise_syntax_error_known_range(C,C.position,$token.value,"Missing parentheses in call "+`to '${func}'. Did you mean ${func}(...)?`)}if(["dict_or_set","list_or_tuple","str"].indexOf(C.parent.type)==-1){var t=C.tree[0];if(t.type=="starred"){$token.value=t.position;if(parent_match(C,{type:"del"})){raise_syntax_error(C,"cannot delete starred")}raise_syntax_error_known_range(C,t.position,last_position(t),"can't use starred expression here")}else if(t.type=="call"&&t.func.type=="starred"){$token.value=t.func.position;raise_syntax_error(C,"can't use starred expression here")}}}return transition(C.parent,token)};var ExprNot=$B.parser.ExprNot=function(C){this.type="expr_not";this.parent=C;this.tree=[];C.tree[C.tree.length]=this};ExprNot.prototype.transition=function(token,value){var C=this;if(token=="in"){C.parent.tree.pop();var op1=C.parent;while(op1.type!=="expr"){op1=op1.parent}return op1.transition("op","not_in")}raise_syntax_error(C)};var ForExpr=$B.parser.ForExpr=function(C){if(C.node&&C.node.parent.is_comp){C.node.parent.first_for=this}this.type="for";this.parent=C;this.tree=[];this.position=$token.value;C.tree.push(this);this.scope=get_scope(this);this.module=this.scope.module};ForExpr.prototype.ast=function(){var target=this.tree[0].ast(),iter=this.tree[1].ast(),orelse=this.orelse?this.orelse.ast():[],type_comment,body=ast_body(this.parent);set_ctx_to_store(target);var klass=this.async?ast.AsyncFor:ast.For;var ast_obj=new klass(target,iter,body,orelse,type_comment);set_position(ast_obj,this.async?this.async.position:this.position,last_position(this));return ast_obj};ForExpr.prototype.transition=function(token,value){var C=this;switch(token){case"in":if(C.tree[0].tree.length==0){raise_syntax_error(C,"(missing target between 'for' and 'in')")}check_assignment(C.tree[0]);return new AbstractExprCtx(new ExprCtx(C,"iterator",true),false);case":":check_assignment(C.tree[0]);if(C.tree.length<2||C.tree[1].tree[0].type=="abstract_expr"){raise_syntax_error(C)}return BodyCtx(C)}if(this.parent.comprehension){switch(token){case"]":if(this.parent.type=="listcomp"){return transition(this.parent,token,value)}break;case")":if(this.parent.type=="genexpr"){return transition(this.parent,token,value)}break;case"}":if(this.parent.type=="dictcomp"||this.parent.type=="setcomp"){return transition(this.parent,token,value)}break;case"for":return new TargetListCtx(new ForExpr(this.parent));case"if":var if_ctx=new ConditionCtx(this.parent,"if");if_ctx.in_comp=this.parent;return new AbstractExprCtx(if_ctx,false)}}if(token=="eol"){$token.value=last_position(C);if(C.tree.length==2){raise_syntax_error(C,"expected ':'")}}raise_syntax_error(C)};var FromCtx=$B.parser.FromCtx=function(C){this.type="from";this.parent=C;this.module="";this.names=[];this.names_position=[];this.position=$token.value;C.tree[C.tree.length]=this;this.expect="module";this.scope=get_scope(this)};FromCtx.prototype.ast=function(){var module=this.module,level=0,alias;while(module.length>0&&module.startsWith(".")){level++;module=module.substr(1)}var res={module:module||undefined,names:[],level:level};for(var i=0,len=this.names.length;i0){raise_syntax_error(C,"only one 'import' allowed after 'from'")}if(C.expect=="module"){C.expect="id";return C}break;case"op":if(value=="*"&&C.expect=="id"&&C.names.length==0){if(get_scope(C).ntype!=="module"){raise_syntax_error(C,"import * only allowed at module level")}C.add_name("*");C.expect="eol";return C}else{raise_syntax_error(C)}break;case",":if(C.expect==","){C.expect="id";return C}break;case"eol":switch(C.expect){case",":case"eol":return transition(C.parent,token);case"id":raise_syntax_error(C,"trailing comma not allowed without "+"surrounding parentheses");break;default:raise_syntax_error(C)}break;case"as":if(C.expect==","||C.expect=="eol"){C.expect="alias";return C}break;case"(":if(C.expect=="id"){C.expect="id";return C}break;case")":if(C.expect==","||C.expect=="id"){C.expect="eol";return C}}raise_syntax_error(C)};function escape_quotes(s,quotes){if(quotes.length==1){return quotes+s+quotes}else{var quote=quotes[0];return quote+s.replace(new RegExp(quote,"g"),"\\"+quote)+quote}}var FStringCtx=$B.parser.FStringCtx=function(C,start){for(var i=0;i-1};FStringCtx.prototype.transition=function(token,value){var C=this;if(token=="middle"){new StringCtx(C,escape_quotes(value,this.quotes));return C}else if(token=="{"){return new AbstractExprCtx(new FStringReplacementFieldCtx(C),false)}else if(token=="end"){return C.parent}raise_syntax_error(C)};FStringCtx.prototype.ast=function(){var res={type:"JoinedStr",values:[]};var state;for(var item of this.tree){if(item instanceof StringCtx){if(state=="string"){$B.last(res.values).value+=item.value}else{var item_ast=new ast.Constant(item.value);set_position(item_ast,item.position);res.values.push(item_ast)}state="string"}else{var item_ast=item.ast();set_position(item_ast,item.position);res.values.push(item_ast);state="formatted_value"}}var ast_obj=new ast.JoinedStr(res.values);set_position(ast_obj,this.position);return ast_obj};var FStringReplacementFieldCtx=$B.parser.FStringReplacementFieldCtx=function(C){this.type="fstring_replacement_field";this.tree=[];this.parent=C;this.position=$token.value;C.tree.push(this)};FStringReplacementFieldCtx.prototype.transition=function(token,value){var C=this;if(token=="="){if(C.equal_sign_pos){raise_syntax_error(C)}var expr_text=C.position.line.substring(C.position.start[1]+1,$token.value.start[1]);var quotes=C.parent.quotes;C.formula=new StringCtx(C.parent,escape_quotes(expr_text+"=",quotes));var s=C.parent.tree.pop();C.parent.tree.splice(C.parent.tree.length-1,0,s);C.equal_sign_pos=$token.value.start;return C}else if(C.equal_sign_pos){if(!C.insert_whitespace){var nb_ws=$token.value.start[1]-C.equal_sign_pos[1];if(nb_ws>1){C.formula.value+=" ".repeat(nb_ws-1)}C.insert_whitespace=true}}if(token=="op"&&value=="!"){C.expect="id";return C}else if(token==":"){return new FStringFormatSpecCtx(C)}else if(token=="}"){if(C.tree.length==1&&C.tree[0]instanceof AbstractExprCtx){raise_syntax_error(C,"f-string: valid expression required before '}'")}return C.parent}else if(token=="id"&&this.expect=="id"){if("sra".indexOf(value)>-1){C.conversion=value;delete this.expect;return C}raise_syntax_error(C,`unknown conversion type ${value}`)}raise_syntax_error(C)};FStringReplacementFieldCtx.prototype.ast=function(){var value=this.tree[0].ast(),format=this.tree[1];var conv_num={a:97,r:114,s:115},conversion=conv_num[this.conversion]||-1;if(format!==undefined){format=format.ast()}var res=new ast.FormattedValue(value,conversion,format);set_position(res,this.position);return res};var FStringFormatSpecCtx=$B.parser.FStringFormatSpecCtx=function(C){this.type="fstring_format_spec";this.tree=[];this.parent=C;this.position=$token.value;C.tree.push(this)};FStringFormatSpecCtx.prototype.transition=function(token,value){var C=this;if(token=="middle"){var quotes=this.parent.parent.quotes;new StringCtx(C,escape_quotes(value,quotes));return C}else if(token=="{"){return new AbstractExprCtx(new FStringReplacementFieldCtx(C),false)}else if(token=="}"){return transition(C.parent,token,value)}raise_syntax_error(C)};FStringFormatSpecCtx.prototype.ast=function(){if(this.tree.length==1){return this.tree[0].ast()}else{return FStringCtx.prototype.ast.call(this)}};var FuncArgs=$B.parser.FuncArgs=function(C){this.type="func_args";this.parent=C;this.tree=[];this.names=[];C.tree[C.tree.length]=this;this.expect="id";this.has_default=false;this.has_star_arg=false;this.has_kw_arg=false};FuncArgs.prototype.ast=function(){var args={posonlyargs:[],args:[],kwonlyargs:[],kw_defaults:[],defaults:[]},state="arg",default_value;for(var arg of this.tree){if(arg.type=="end_positional"){args.posonlyargs=args.args;args.args=[]}else if(arg.type=="func_star_arg"){state="kwonly";if(arg.op=="*"&&arg.name!="*"){args.vararg=new ast.arg(arg.name);if(arg.annotation){args.vararg.annotation=arg.annotation.tree[0].ast()}set_position(args.vararg,arg.position)}else if(arg.op=="**"){args.kwarg=new ast.arg(arg.name);if(arg.annotation){args.kwarg.annotation=arg.annotation.tree[0].ast()}set_position(args.kwarg,arg.position)}}else{default_value=false;if(arg.has_default){default_value=arg.tree[0].ast()}var argument=new ast.arg(arg.name);set_position(argument,arg.position,last_position(arg));if(arg.annotation){argument.annotation=arg.annotation.tree[0].ast()}if(state=="kwonly"){args.kwonlyargs.push(argument);if(default_value){args.kw_defaults.push(default_value)}else{args.kw_defaults.push(_b_.None)}}else{args.args.push(argument);if(default_value){args.defaults.push(default_value)}}}}var res=new ast.arguments(args.posonlyargs,args.args,args.vararg,args.kwonlyargs,args.kw_defaults,args.kwarg,args.defaults);return res};FuncArgs.prototype.transition=function(token,value){var C=this;function check(){if(C.tree.length==0){return}var last=$B.last(C.tree);if(C.has_default&&!last.has_default){if(last.type=="func_star_arg"||last.type=="end_positional"){return}if(C.has_star_arg){return}raise_syntax_error(C,"non-default argument follows default argument")}if(last.has_default){C.has_default=true}}function check_last(){var last=$B.last(C.tree);if(last&&last.type=="func_star_arg"){if(last.name=="*"){raise_syntax_error(C,"named arguments must follow bare *")}}}switch(token){case"id":if(C.has_kw_arg){raise_syntax_error(C,"duplicate keyword argument")}if(C.expect=="id"){C.expect=",";if(C.names.indexOf(value)>-1){raise_syntax_error(C,"duplicate argument "+value+" in function definition")}}return new FuncArgIdCtx(C,value);case",":if(C.expect==","){check();C.expect="id";return C}raise_syntax_error(C);break;case")":check();check_last();return transition(C.parent,token,value);case"op":if(C.has_kw_arg){raise_syntax_error(C,"(unpacking after '**' argument)")}var op=value;C.expect=",";if(op=="*"){if(C.has_star_arg){raise_syntax_error(C,"(only one '*' argument allowed)")}return new FuncStarArgCtx(C,"*")}else if(op=="**"){return new FuncStarArgCtx(C,"**")}else if(op=="/"){if(C.has_end_positional){raise_syntax_error(C,"/ may appear only once")}else if(C.has_star_arg){raise_syntax_error(C,"/ must be ahead of *")}return new EndOfPositionalCtx(C)}raise_syntax_error(C);break;case":":if(C.parent.type=="lambda"){return transition(C.parent,token)}}raise_syntax_error(C)};var FuncArgIdCtx=$B.parser.FuncArgIdCtx=function(C,name){this.type="func_arg_id";if(["None","True","False"].indexOf(name)>-1){raise_syntax_error(C)}if(name=="__debug__"){raise_syntax_error(C,"cannot assign to __debug__")}this.name=name;this.parent=C;this.position=$token.value;if(C.has_star_arg){C.parent.after_star.push(name)}else{C.parent.positional_list.push(name)}this.tree=[];C.tree[C.tree.length]=this;this.expect="="};FuncArgIdCtx.prototype.transition=function(token,value){var C=this;switch(token){case"=":if(C.expect=="="){C.has_default=true;var def_ctx=C.parent.parent;if(C.parent.has_star_arg){def_ctx.default_list.push(def_ctx.after_star.pop())}else{def_ctx.default_list.push(def_ctx.positional_list.pop())}return new AbstractExprCtx(C,false)}break;case",":case")":if(C.parent.has_default&&C.tree.length==0&&C.parent.has_star_arg===undefined){raise_syntax_error(C,"non-default argument follows default argument")}else{return transition(C.parent,token)}break;case":":if(C.parent.parent.type=="lambda"){return transition(C.parent.parent,":")}if(C.has_default){raise_syntax_error(C)}return new AbstractExprCtx(new AnnotationCtx(C),false)}raise_syntax_error(C)};var FuncStarArgCtx=$B.parser.FuncStarArgCtx=function(C,op){this.type="func_star_arg";this.op=op;this.parent=C;this.node=get_node(this);this.position=$token.value;C.has_star_arg=op=="*";C.has_kw_arg=op=="**";C.tree[C.tree.length]=this};FuncStarArgCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":if(C.name===undefined){if(C.parent.names.indexOf(value)>-1){raise_syntax_error(C,"duplicate argument "+value+" in function definition")}}if(["None","True","False"].indexOf(value)>-1){raise_syntax_error(C)}C.set_name(value);C.parent.names.push(value);return C;case",":case")":if(C.name===undefined){C.set_name("*");C.parent.names.push("*")}return transition(C.parent,token);case":":if(C.parent.parent.type=="lambda"){if(C.name===undefined){raise_syntax_error(C,"named arguments must follow bare *")}return transition(C.parent.parent,":")}if(C.name===undefined){raise_syntax_error(C,"(annotation on an unnamed parameter)")}return new AbstractExprCtx(new AnnotationCtx(C),false)}raise_syntax_error(C)};FuncStarArgCtx.prototype.set_name=function(name){if(name=="__debug__"){raise_syntax_error_known_range(this,this.position,$token.value,"cannot assign to __debug__")}this.name=name;var ctx=this.parent;while(ctx.parent!==undefined){if(ctx.type=="def"){break}ctx=ctx.parent}if(this.op=="*"){ctx.other_args='"'+name+'"'}else{ctx.other_kw='"'+name+'"'}};var GeneratorExpCtx=function(C){this.type="genexpr";this.tree=[C.tree[0]];this.tree[0].parent=this;this.position=C.position;Comprehension.make_comp(this,C)};GeneratorExpCtx.prototype.ast=function(){var res=new ast.GeneratorExp(this.tree[0].ast(),Comprehension.generators(this.tree.slice(1)));set_position(res,this.position);return res};GeneratorExpCtx.prototype.transition=function(token,value){var C=this;if(token==")"){if(this.parent.type=="call"){if(C.parent.tree.length>1){raise_syntax_error_known_range(C,first_position(C),last_position(C),"Generator expression must be parenthesized")}return this.parent.parent}return this.parent}raise_syntax_error(C)};var GlobalCtx=$B.parser.GlobalCtx=function(C){this.type="global";this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this;this.expect="id";this.scope=get_scope(this);this.module=get_module(this);if(this.module.module!==""){while(this.module.module!=this.module.id){this.module=this.module.parent_block}}};GlobalCtx.prototype.ast=function(){var ast_obj=new ast.Global(this.tree.map((item=>item.value)));set_position(ast_obj,this.position);return ast_obj};GlobalCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":if(C.expect=="id"){new IdCtx(C,value);C.add(value);C.expect=",";return C}break;case",":if(C.expect==","){C.expect="id";return C}break;case"eol":if(C.expect==","){return transition(C.parent,token)}break}raise_syntax_error(C)};GlobalCtx.prototype.add=function(name){if(this.scope.type=="module"){return}var mod=this.scope.parent_block;if(this.module.module.startsWith("$exec")){while(mod&&mod.parent_block!==this.module){mod._globals=mod._globals||new Map;mod._globals.set(name,this.module.id);mod=mod.parent_block}}};var IdCtx=$B.parser.IdCtx=function(C,value){this.type="id";this.value=value;this.parent=C;this.tree=[];C.tree[C.tree.length]=this;this.position=$token.value;var scope=this.scope=get_scope(this);this.blurred_scope=this.scope.blurred;if(["def","generator"].indexOf(scope.ntype)>-1){if(!(C instanceof GlobalCtx)&&!(C instanceof NonlocalCtx)){scope.referenced=scope.referenced||{};if(!$B.builtins[this.value]){scope.referenced[this.value]=true}}}if(C.parent.type=="call_arg"){this.call_arg=true}};IdCtx.prototype.ast=function(){var ast_obj;if(["True","False","None"].indexOf(this.value)>-1){ast_obj=new ast.Constant(_b_[this.value])}else{ast_obj=new ast.Name(this.value,this.bound?new ast.Store:new ast.Load)}set_position(ast_obj,this.position);return ast_obj};IdCtx.prototype.transition=function(token,value){var C=this,module=get_module(this);if(C.value=="case"&&C.parent.parent.type=="node"){var save_position=module.token_reader.position,ends_with_comma=check_line(module.token_reader,module.filename);module.token_reader.position=save_position;if(ends_with_comma){var node=get_node(C),parent=node.parent;if(!node.parent||!node.parent.is_match){raise_syntax_error(C,"('case' not inside 'match')")}else{if(node.parent.irrefutable){var name=node.parent.irrefutable,msg=name=="_"?"wildcard":`name capture '${name}'`;raise_syntax_error(C,`${msg} makes remaining patterns unreachable`)}}return transition(new PatternCtx(new CaseCtx(C.parent.parent)),token,value)}}else if(C.value=="match"&&C.parent.parent.type=="node"){var save_position=module.token_reader.position,ends_with_comma=check_line(module.token_reader,module.filename);module.token_reader.position=save_position;if(ends_with_comma){return transition(new AbstractExprCtx(new MatchCtx(C.parent.parent),true),token,value)}}else if(C.value=="type"&&C.parent.parent.type=="node"){if(token=="id"){return new TypeAliasCtx(C,value)}}switch(token){case"=":if(C.parent.type=="expr"&&C.parent.parent!==undefined&&C.parent.parent.type=="call_arg"){return new AbstractExprCtx(new KwArgCtx(C.parent),false)}return transition(C.parent,token,value);case".":delete this.bound;return transition(C.parent,token,value);case"op":return transition(C.parent,token,value);case"id":case"str":case"JoinedStr":case"int":case"float":case"imaginary":if(["print","exec"].indexOf(C.value)>-1){var f=C.value,msg=`Missing parentheses in call to '${f}'.`+` Did you mean ${f}(...)?`}else{var msg="invalid syntax. Perhaps you forgot a comma?"}var call_arg=parent_match(C,{type:"call_arg"});raise_syntax_error_known_range(C,this.position,$token.value,msg)}if(this.parent.parent.type=="starred"){if([".","[","("].indexOf(token)==-1){return this.parent.parent.transition(token,value)}}return transition(C.parent,token,value)};var ImportCtx=$B.parser.ImportCtx=function(C){this.type="import";this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this;this.expect="id"};ImportCtx.prototype.ast=function(){var names=[];for(var item of this.tree){var alias=new ast.alias(item.name);if(item.alias!=item.name){alias.asname=item.alias}names.push(alias)}var ast_obj=new ast.Import(names);set_position(ast_obj,this.position);return ast_obj};ImportCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":if(C.expect=="id"){if(C.order_error){raise_syntax_error(C,"Did you mean to use 'from ... import ...' instead?")}new ImportedModuleCtx(C,value);C.expect=",";return C}if(C.expect=="qual"){C.expect=",";C.tree[C.tree.length-1].name+="."+value;C.tree[C.tree.length-1].alias+="."+value;return C}if(C.expect=="alias"){C.expect=",";C.tree[C.tree.length-1].alias=value;return C}break;case".":if(C.expect==","){C.expect="qual";return C}break;case",":if(C.expect==","){C.expect="id";return C}break;case"as":if(C.expect==","){C.expect="alias";return C}break;case"eol":if(C.expect==","){return transition(C.parent,token)}break;case"from":if(C.expect==","){C.expect="id";C.order_error=true;return C}break}raise_syntax_error(C)};var ImportedModuleCtx=$B.parser.ImportedModuleCtx=function(C,name){this.type="imported module";this.parent=C;this.name=name;this.alias=name;C.tree[C.tree.length]=this};ImportedModuleCtx.prototype.transition=function(token,value){var C=this};var JoinedStrCtx=$B.parser.JoinedStrCtx=function(C,values){this.type="JoinedStr";this.parent=C;this.tree=[];this.position=$token.value;this.scope=get_scope(C);var line_num=get_node(C).line_num;for(var value of values){if(typeof value=="string"){new StringCtx(this,"'"+value.replace(new RegExp("'","g"),"\\"+"'")+"'")}else{if(value.format!==undefined){value.format=new JoinedStrCtx(this,value.format);this.tree.pop()}var src=value.expression.trimStart(),filename=get_module(this).filename,root=create_root_node(src,this.scope.module,this.scope.id,this.scope.parent_block,line_num);try{dispatch_tokens(root)}catch(err){var fstring_lineno=this.position.start[0],fstring_offset=this.position.start[1];err.filename=get_module(this).filename;err.lineno+=fstring_lineno-1;err.offset+=fstring_offset-1;err.end_lineno+=fstring_lineno-1;err.end_offset+=fstring_offset-1;err.text=this.position.string;err.args[1]=$B.fast_tuple([filename,err.lineno,err.offset,err.text,err.end_lineno,err.end_offset]);throw err}var expr=root.children[0].C.tree[0];this.tree.push(expr);expr.parent=this;expr.elt=value}}C.tree.push(this);this.raw=false};JoinedStrCtx.prototype.ast=function(){var res={type:"JoinedStr",values:[]};var state;for(var item of this.tree){if(item instanceof StringCtx){if(state=="string"){$B.last(res.values).value+=item.value}else{var item_ast=new ast.Constant(item.value);set_position(item_ast,item.position);res.values.push(item_ast)}state="string"}else{var conv_num={a:97,r:114,s:115},format=item.elt.format;format=format===undefined?format:format.ast();var value=new ast.FormattedValue(item.ast(),conv_num[item.elt.conversion]||-1,format);set_position(value,this.position);var format=item.format;if(format!==undefined){value.format=item.format.ast()}res.values.push(value);state="formatted_value"}}var ast_obj=new ast.JoinedStr(res.values);set_position(ast_obj,this.position);return ast_obj};JoinedStrCtx.prototype.transition=function(token,value){var C=this;switch(token){case"[":return new AbstractExprCtx(new SubscripCtx(C.parent),false);case"(":C.parent.tree[0]=C;return new CallCtx(C.parent);case"str":if(C.tree.length>0&&$B.last(C.tree).type=="str"){C.tree[C.tree.length-1].add_value(value)}else{new StringCtx(this,value)}return C;case"JoinedStr":var joined_expr=new JoinedStrCtx(C.parent,value);C.parent.tree.pop();if(C.tree.length>0&&$B.last(C.tree)instanceof StringCtx&&joined_expr.tree[0]instanceof StringCtx){$B.last(C.tree).value+=joined_expr.tree[0].value;C.tree=C.tree.concat(joined_expr.tree.slice(1))}else{C.tree=C.tree.concat(joined_expr.tree)}return C}return transition(C.parent,token,value)};var KwdCtx=$B.parser.KwdCtx=function(C){this.type="kwd";this.position=C.position;this.parent=C;this.tree=[];C.tree.push(this)};KwdCtx.prototype.ast=function(){var ast_obj=new $B.ast.keyword(this.tree[0].ast(),new ast.Load);set_position(ast_obj,this.position);return ast_obj};KwdCtx.prototype.transition=function(token,value){var C=this;return transition(C.parent,token,value)};var KwArgCtx=$B.parser.KwArgCtx=function(C){this.type="kwarg";this.parent=C.parent;this.position=first_position(C);this.equal_sign_position=$token.value;this.tree=[C.tree[0]];C.parent.tree.pop();C.parent.tree.push(this);C.parent.parent.has_kw=true};KwArgCtx.prototype.transition=function(token,value){var C=this;if(token==","){return new CallArgCtx(C.parent.parent)}else if(token=="for"){raise_syntax_error_known_range(C,C.position,C.equal_sign_position,"invalid syntax. "+"Maybe you meant '==' or ':=' instead of '='?")}return transition(C.parent,token)};var LambdaCtx=$B.parser.LambdaCtx=function(C){this.type="lambda";this.parent=C;C.tree[C.tree.length]=this;this.tree=[];this.position=$token.value;this.node=get_node(this);this.positional_list=[];this.default_list=[];this.other_args=null;this.other_kw=null;this.after_star=[]};LambdaCtx.prototype.ast=function(){var args;if(this.args.length==0){args=new ast.arguments([],[],undefined,[],[],undefined,[])}else{args=this.args[0].ast()}var ast_obj=new ast.Lambda(args,this.tree[0].ast());set_position(ast_obj,this.position);return ast_obj};LambdaCtx.prototype.transition=function(token,value){var C=this;if(token==":"&&C.args===undefined){C.args=C.tree;C.tree=[];return new AbstractExprCtx(C,false)}if(C.args!==undefined){return transition(C.parent,token)}if(C.args===undefined){if(token=="("){raise_syntax_error(C,"Lambda expression parameters cannot be parenthesized")}else if(C.tree.length>0&&C.tree[0].type=="func_args"){raise_syntax_error(C)}else{return transition(new FuncArgs(C),token,value)}}raise_syntax_error(C)};var ListCompCtx=function(C){this.type="listcomp";this.tree=[C.tree[0]];this.tree[0].parent=this;this.position=$token.value;Comprehension.make_comp(this,C)};ListCompCtx.prototype.ast=function(){var res=new ast.ListComp(this.tree[0].ast(),Comprehension.generators(this.tree.slice(1)));set_position(res,this.position);return res};ListCompCtx.prototype.transition=function(token,value){var C=this;if(token=="]"){return this.parent}raise_syntax_error(C)};var ListOrTupleCtx=$B.parser.ListOrTupleCtx=function(C,real){this.type="list_or_tuple";this.real=real;this.expect="id";this.closed=false;this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this};ListOrTupleCtx.prototype.ast=function(){var elts=this.tree.map((x=>x.ast())),ast_obj;if(this.real=="list"){ast_obj=new ast.List(elts,new ast.Load)}else if(this.real=="tuple"){ast_obj=new ast.Tuple(elts,new ast.Load)}set_position(ast_obj,this.position,this.end_position);return ast_obj};ListOrTupleCtx.prototype.transition=function(token,value){var C=this;if(C.closed){if(token=="["){return new AbstractExprCtx(new SubscripCtx(C.parent),false)}if(token=="("){return new CallCtx(C.parent)}return transition(C.parent,token,value)}else{if(C.expect==","){switch(C.real){case"tuple":if(token==")"){if(C.implicit){return transition(C.parent,token,value)}var close=true;C.end_position=$token.value;if(C.tree.length==1){if(parent_match(C,{type:"del"})&&C.tree[0].type=="expr"&&C.tree[0].tree[0].type=="starred"){raise_syntax_error_known_range(C,C.tree[0].tree[0].position,last_position(C.tree[0]),"cannot use starred expression here")}var grandparent=C.parent.parent;grandparent.tree.pop();grandparent.tree.push(C.tree[0]);C.tree[0].$was_parenthesized=true;C.tree[0].parent=grandparent;return C.tree[0]}if(C.packed||C.type=="list_or_tuple"&&C.tree.length==1&&C.tree[0].type=="expr"&&C.tree[0].tree[0].type=="starred"){raise_syntax_error(C,"cannot use starred expression here")}if(close){C.close()}if(C.parent.type=="starred"){return C.parent.parent}return C.parent}break;case"list":if(token=="]"){C.close();if(C.parent.type=="starred"){if(C.parent.tree.length>0){return C.parent.tree[0]}else{return C.parent.parent}}return C.parent}break}switch(token){case",":if(C.real=="tuple"){C.has_comma=true}C.expect="id";return C;case"for":if(C.real=="list"){if(this.tree.length>1){raise_syntax_error(C,"did you forget "+"parentheses around the comprehension target?")}return new TargetListCtx(new ForExpr(new ListCompCtx(C)))}else{return new TargetListCtx(new ForExpr(new GeneratorExpCtx(C)))}}return transition(C.parent,token,value)}else if(C.expect=="id"){switch(C.real){case"tuple":if(token==")"){C.close();return C.parent}if(token=="eol"&&C.implicit===true){C.close();return transition(C.parent,token)}break;case"list":if(token=="]"){C.close();return C}break}switch(token){case"=":if(C.real=="tuple"&&C.implicit===true){C.close();C.parent.tree.pop();var expr=new ExprCtx(C.parent,"tuple",false);expr.tree=[C];C.parent=expr;return transition(C.parent,token)}raise_syntax_error(C,"(unexpected '=' inside list)");break;case")":break;case"]":if(C.real=="tuple"&&C.implicit===true){return transition(C.parent,token,value)}else{break}raise_syntax_error(C,'(unexpected "if" inside list)');break;case",":raise_syntax_error(C,"(unexpected comma inside list)");break;case"str":case"JoinedStr":case"int":case"float":case"imaginary":case"ellipsis":case"lambda":case"yield":case"id":case"(":case"[":case"{":case"await":case"not":case":":C.expect=",";var expr=new AbstractExprCtx(C,false);return transition(expr,token,value);case"op":if("+-~*".indexOf(value)>-1||value=="**"){C.expect=",";var expr=new AbstractExprCtx(C,false);return transition(expr,token,value)}raise_syntax_error(C,`(unexpected operator: ${value})`);break;default:raise_syntax_error(C)}}else{return transition(C.parent,token,value)}}};ListOrTupleCtx.prototype.close=function(){this.closed=true;this.end_position=$token.value;this.src=get_module(this).src;for(var i=0,len=this.tree.length;i0){var previous=this.node.parent.children[rank-1];if(previous.C.tree[0].type=="try"&&["except","finally"].indexOf(token)==-1){raise_syntax_error(C,"expected 'except' or 'finally' block")}}}switch(token){case",":if(C.tree&&C.tree.length==0){raise_syntax_error(C)}var first=C.tree[0];C.tree=[];var implicit_tuple=new ListOrTupleCtx(C);implicit_tuple.real="tuple";implicit_tuple.implicit=0;implicit_tuple.tree.push(first);first.parent=implicit_tuple;return implicit_tuple;case"[":case"(":case"{":case".":case"bytes":case"float":case"id":case"imaginary":case"int":case"str":case"JoinedStr":case"not":case"lambda":var expr=new AbstractExprCtx(C,true);return transition(expr,token,value);case"assert":return new AbstractExprCtx(new AssertCtx(C),false,true);case"async":return new AsyncCtx(C);case"await":return new AbstractExprCtx(new AwaitCtx(C),false);case"break":return new BreakCtx(C);case"class":return new ClassCtx(C);case"continue":return new ContinueCtx(C);case"def":return new DefCtx(C);case"del":return new AbstractExprCtx(new DelCtx(C),true);case"elif":try{var previous=get_previous(C)}catch(err){raise_syntax_error(C,"('elif' does not follow 'if')")}if(["condition"].indexOf(previous.type)==-1||previous.token=="while"){raise_syntax_error(C,`(elif after ${previous.type})`)}return new AbstractExprCtx(new ConditionCtx(C,token),false);case"ellipsis":var expr=new AbstractExprCtx(C,true);return transition(expr,token,value);case"else":var previous=get_previous(C);if(["condition","except","for"].indexOf(previous.type)==-1){raise_syntax_error(C,`(else after ${previous.type})`)}return new SingleKwCtx(C,token);case"except":var previous=get_previous(C);if(["try","except"].indexOf(previous.type)==-1){raise_syntax_error(C,`(except after ${previous.type})`)}return new ExceptCtx(C);case"finally":var previous=get_previous(C);if(["try","except"].indexOf(previous.type)==-1&&(previous.type!="single_kw"||previous.token!="else")){raise_syntax_error(C,`finally after ${previous.type})`)}return new SingleKwCtx(C,token);case"for":return new TargetListCtx(new ForExpr(C));case"from":return new FromCtx(C);case"global":return new GlobalCtx(C);case"if":case"while":return new AbstractExprCtx(new ConditionCtx(C,token),false);case"import":return new ImportCtx(C);case"lambda":return new LambdaCtx(C);case"nonlocal":return new NonlocalCtx(C);case"op":switch(value){case"*":var expr=new AbstractExprCtx(C,true);return transition(expr,token,value);case"+":case"-":case"~":C.position=$token.value;var expr=new ExprCtx(C,"unary",true);return new AbstractExprCtx(new UnaryCtx(expr,value),false);case"@":return new AbstractExprCtx(new DecoratorCtx(C),false)}break;case"pass":return new PassCtx(C);case"raise":return new AbstractExprCtx(new RaiseCtx(C),false);case"return":return new AbstractExprCtx(new ReturnCtx(C),true);case"try":return new TryCtx(C);case"with":return new WithCtx(C);case"yield":return new AbstractExprCtx(new YieldCtx(C),true);case"eol":if(C.maybe_type){if(C.tree.length>0&&C.tree[0].type=="assign"){alert("type soft keyword")}else{raise_syntax_error(C)}}if(C.tree.length==0){C.node.parent.children.pop();return C.node.parent.C}return C}console.log("error, C",C,"token",token,value);raise_syntax_error(C)};var NonlocalCtx=$B.parser.NonlocalCtx=function(C){this.type="nonlocal";this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this;this.expect="id";this.scope=get_scope(this);this.scope.nonlocals=this.scope.nonlocals||new Set};NonlocalCtx.prototype.ast=function(){var ast_obj=new ast.Nonlocal(this.tree.map((item=>item.value)));set_position(ast_obj,this.position);return ast_obj};NonlocalCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":if(C.expect=="id"){new IdCtx(C,value);C.expect=",";return C}break;case",":if(C.expect==","){C.expect="id";return C}break;case"eol":if(C.expect==","){return transition(C.parent,token)}break}raise_syntax_error(C)};var NotCtx=$B.parser.NotCtx=function(C){this.type="not";this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this};NotCtx.prototype.ast=function(){var ast_obj=new ast.UnaryOp(new ast.Not,this.tree[0].ast());set_position(ast_obj,this.position);return ast_obj};NotCtx.prototype.transition=function(token,value){var C=this;switch(token){case"in":C.parent.parent.tree.pop();return new ExprCtx(new OpCtx(C.parent,"not_in"),"op",false);case"id":case"imaginary":case"int":case"float":case"str":case"JoinedStr":case"bytes":case"[":case"(":case"{":case".":case"not":case"lambda":var expr=new AbstractExprCtx(C,false);return transition(expr,token,value);case"op":var a=value;if("+"==a||"-"==a||"~"==a){var expr=new AbstractExprCtx(C,false);return transition(expr,token,value)}}if(this.tree.length==0||this.tree[0]instanceof AbstractExprCtx){raise_syntax_error(C)}return transition(C.parent,token)};var NumberCtx=$B.parser.NumberCtx=function(type,C,value){this.type=type;this.value=value;this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this};NumberCtx.prototype.ast=function(){var value=$B.AST.$convert(this),ast_obj=new $B.ast.Constant(value);set_position(ast_obj,this.position);return ast_obj};NumberCtx.prototype.transition=function(token,value){var C=this;var num_type={2:"binary",8:"octal",10:"decimal",16:"hexadecimal"}[this.value[0]];if(token=="id"){if(value=="_"){raise_syntax_error(C,"invalid decimal literal")}else if(["and","else","for","if","in","is","or"].indexOf(value)==-1){raise_syntax_error(C,`invalid ${num_type} literal`)}else if(num_type=="hexadecimal"&&this.value[1].length%2==1){$B.warn(_b_.SyntaxWarning,`invalid hexadecimal literal`,get_module(C).filename,$token.value)}}else if(token=="op"){if(["and","in","is","or"].indexOf(value)>-1&&num_type=="hexadecimal"&&this.value[1].length%2==1){$B.warn(_b_.SyntaxWarning,`invalid hexadecimal literal`,get_module(C).filename,$token.value)}}return transition(C.parent,token,value)};var OpCtx=$B.parser.OpCtx=function(C,op){this.type="op";this.op=op;this.parent=C.parent;this.position=$token.value;this.tree=[C];this.scope=get_scope(this);if(C.type=="expr"){if(["int","float","str"].indexOf(C.tree[0].type)>-1){this.left_type=C.tree[0].type}}C.parent.tree.pop();C.parent.tree.push(this)};OpCtx.prototype.ast=function(){var ast_type_class=op2ast_class[this.op],op_type=ast_type_class[0],ast_class=ast_type_class[1],ast_obj;if(op_type===ast.Compare){var left=this.tree[0].ast(),ops=[new ast_class];if(this.ops){for(var op of this.ops.slice(1)){ops.push(new op2ast_class[op][1])}ast_obj=new ast.Compare(left,ops,this.tree.slice(1).map((x=>x.ast())))}else{ast_obj=new ast.Compare(left,ops,[this.tree[1].ast()])}}else if(op_type===ast.UnaryOp){ast_obj=new op_type(new ast_class,this.tree[1].ast())}else if(op_type===ast.BoolOp){var values=[this.tree[1]],main_op=this.op,ctx=this;while(ctx.tree[0].type=="op"&&ctx.tree[0].op==main_op){values.splice(0,0,ctx.tree[0].tree[1]);ctx=ctx.tree[0]}values.splice(0,0,ctx.tree[0]);ast_obj=new op_type(new ast_class,values.map((x=>x.ast())))}else{ast_obj=new op_type(this.tree[0].ast(),new ast_class,this.tree[1].ast())}set_position(ast_obj,this.position);return ast_obj};function is_literal(expr){return expr.type=="expr"&&["int","str","float","imaginary"].indexOf(expr.tree[0].type)>-1}OpCtx.prototype.transition=function(token,value){var C=this;if(C.op===undefined){console.log("C has no op",C);raise_syntax_error(C)}if((C.op=="is"||C.op=="is_not")&&C.tree.length>1){for(var operand of C.tree){if(is_literal(operand)){var head=C.op=="is"?"is":"is not";$B.warn(_b_.SyntaxWarning,`"${head}" with a literal. Did you mean "=="?"`,get_module(C).filename,$token.value);break}}}switch(token){case"id":case"imaginary":case"int":case"float":case"str":case"JoinedStr":case"bytes":case"[":case"(":case"{":case".":case"not":case"lambda":return transition(new AbstractExprCtx(C,false),token,value);case"op":switch(value){case"+":case"-":case"~":return new UnaryCtx(C,value)}break;default:if(C.tree[C.tree.length-1].type=="abstract_expr"){raise_syntax_error(C)}}return transition(C.parent,token)};var PassCtx=$B.parser.PassCtx=function(C){this.type="pass";this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this};PassCtx.prototype.ast=function(){var ast_obj=new ast.Pass;set_position(ast_obj,this.position);return ast_obj};PassCtx.prototype.transition=function(token,value){var C=this;if(token=="eol"){return C.parent}raise_syntax_error(C)};var PatternCtx=$B.parser.PatternCtx=function(C){this.type="pattern";this.parent=C;this.tree=[];C.tree.push(this);this.expect="id"};PatternCtx.prototype.transition=function(token,value){var C=this;switch(C.expect){case"id":switch(token){case"str":case"int":case"float":case"imaginary":C.expect=",";return new PatternLiteralCtx(C,token,value);case"op":switch(value){case"-":case"+":C.expect=",";return new PatternLiteralCtx(C,{sign:value});case"*":C.expect="starred_id";return C;default:raise_syntax_error(C)}break;case"id":C.expect=",";if(["None","True","False"].indexOf(value)>-1){return new PatternLiteralCtx(C,token,value)}else{return new PatternCaptureCtx(C,value)}break;case"[":return new PatternCtx(new PatternSequenceCtx(C.parent,token));case"(":return new PatternCtx(new PatternGroupCtx(C.parent,token));case"{":return new PatternMappingCtx(C.parent,token);case"JoinedStr":raise_syntax_error(C,"patterns may only match "+"literals and attribute lookups")}break;case"starred_id":if(token=="id"){var capture=new PatternCaptureCtx(C,value);capture.starred=true;return capture}raise_syntax_error(C,"(expected id after '*')");break;case"number":switch(token){case"int":case"float":case"imaginary":C.expect=",";return new PatternLiteralCtx(C,token,value,C.sign);default:raise_syntax_error(C)}break;case",":switch(token){case",":if(C.parent instanceof PatternSequenceCtx){return new PatternCtx(C.parent)}return new PatternCtx(new PatternSequenceCtx(C.parent));case":":return BodyCtx(C)}}return C.parent.transition(token,value)};function as_pattern(C,token,value){if(C.expect=="as"){if(token=="as"){C.expect="alias";return C}else{return transition(C.parent,token,value)}}else if(C.expect=="alias"){if(token=="id"){if(value=="_"){raise_syntax_error(C,"cannot use '_' as a target")}if(C.bindings().indexOf(value)>-1){raise_syntax_error(C,`multiple assignments to name '${value}' in pattern`)}C.alias=value;return C.parent}else{raise_syntax_error(C,"invalid pattern target")}}}var PatternCaptureCtx=function(C,value){this.type="capture_pattern";this.parent=C.parent;C.parent.tree.pop();C.parent.tree.push(this);this.tree=[value];this.position=$token.value;this.positions=[this.position];this.expect="."};PatternCaptureCtx.prototype.ast=function(){var ast_obj;try{if(this.tree.length>1){var pattern=new ast.Name(this.tree[0],new ast.Load);set_position(pattern,this.position);for(var i=1;i1||pattern instanceof ast.MatchAs){ast_obj=pattern}else if(typeof pattern=="string"){ast_obj=new ast.MatchAs(undefined,pattern)}else if(!this.starred){ast_obj=new ast.MatchAs(undefined,pattern)}set_position(ast_obj,this.position);return ast_obj}catch(err){console.log("error capture ast");show_line(this);throw err}};PatternCaptureCtx.prototype.bindings=function(){var bindings=this.tree[0]=="_"?[]:this.tree.slice();if(this.alias){bindings.push(this.alias)}return bindings};PatternCaptureCtx.prototype.transition=function(token,value){var C=this;switch(C.expect){case".":if(token=="."){C.type="value_pattern";C.expect="id";return C}else if(token=="("){return new PatternCtx(new PatternClassCtx(C))}else if(C.parent instanceof PatternMappingCtx){return C.parent.transition(token,value)}else{C.expect="as";return C.transition(token,value)}break;case"as":case"alias":var res=as_pattern(C,token,value);return res;case"id":if(token=="id"){C.tree.push(value);C.positions.push($token.value);C.expect=".";return C}}return transition(C.parent,token,value)};const PatternClassCtx=function(C){this.type="class_pattern";this.tree=[];this.parent=C.parent;this.position=$token.value;this.class_id=C.tree.slice();this.positions=C.positions;C.tree.pop();this.attrs=C.tree.slice(2);C.parent.tree.pop();C.parent.tree.push(this);this.expect=",";this.keywords=[];this.positionals=[];this.bound_names=[]};PatternClassCtx.prototype.ast=function(){if(this.class_id.length==1){var cls=new ast.Name(this.class_id[0])}else{var cls;for(var i=0,len=this.class_id.length;i0){$token.value=last.position;raise_syntax_error(C,"positional patterns follow keyword patterns")}if(last.is_keyword){if(C.keywords.indexOf(last.tree[0])>-1){raise_syntax_error(C,`keyword argument repeated: ${last.tree[0]}`)}C.keywords.push(last.tree[0]);bound=last.tree[1].bindings()}else{bound=last.bindings()}for(var b of bound){if(C.bound_names.indexOf(b)>-1){raise_syntax_error(C,"multiple assignments "+`to name '${b}' in pattern`)}}C.bound_names=C.bound_names.concat(bound)}}switch(this.expect){case",":switch(token){case"=":var current=$B.last(this.tree);if(current instanceof PatternCaptureCtx){if(this.keywords.indexOf(current.tree[0])>-1){raise_syntax_error(C,"attribute name repeated in class pattern: "+current.tree[0])}current.is_keyword=true;return new PatternCtx(current)}raise_syntax_error(this,"'=' after non-capture");break;case",":check_last_arg();return new PatternCtx(this);case")":check_last_arg();if($B.last(this.tree).tree.length==0){this.tree.pop()}C.expect="as";return C;default:raise_syntax_error(C)}break;case"as":case"alias":return as_pattern(C,token,value)}return transition(C.parent,token,value)};var PatternGroupCtx=function(C){this.type="group_pattern";this.parent=C;this.position=$token.value;this.tree=[];C.tree.pop();this.expect=",|";C.tree.push(this)};function remove_empty_pattern(C){var last=$B.last(C.tree);if(last&&last instanceof PatternCtx&&last.tree.length==0){C.tree.pop()}}PatternGroupCtx.prototype.ast=function(){var ast_obj;if(this.tree.length==1&&!this.has_comma){ast_obj=this.tree[0].ast()}else{ast_obj=PatternSequenceCtx.prototype.ast.bind(this)()}if(this.alias){ast_obj=new ast.MatchAs(ast_obj,this.alias)}set_position(ast_obj,this.position);return ast_obj};PatternGroupCtx.prototype.bindings=function(){var bindings=[];for(var item of this.tree){bindings=bindings.concat(item.bindings())}if(this.alias){bindings.push(this.alias)}return bindings};PatternGroupCtx.prototype.transition=function(token,value){var C=this;switch(C.expect){case",|":if(token==")"){remove_empty_pattern(C);C.expect="as";return C}else if(token==","){C.expect="id";C.has_comma=true;return C}else if(token=="op"&&value=="|"){var opctx=new PatternOrCtx(C.parent);opctx.parenthese=true;return new PatternCtx(opctx)}else if(this.token===undefined){return transition(C.parent,token,value)}raise_syntax_error(C);break;case"as":case"alias":return as_pattern(C,token,value);case"id":if(token==")"){remove_empty_pattern(C);C.expect="as";return C}C.expect=",|";return transition(new PatternCtx(C),token,value)}raise_syntax_error(C)};var PatternLiteralCtx=function(C,token,value,sign){this.type="literal_pattern";this.parent=C.parent;this.position=$token.value;C.parent.tree.pop();C.parent.tree.push(this);if(token.sign){this.tree=[{sign:token.sign}];this.expect="number"}else{if(token=="str"){this.tree=[];new StringCtx(this,value)}else if(token=="JoinedStr"){raise_syntax_error(this,"patterns cannot include f-strings")}else{this.tree=[{type:token,value:value,sign:sign}]}this.expect="op"}};PatternLiteralCtx.prototype.ast=function(){try{var first=this.tree[0],result;if(first.type=="str"){var v=StringCtx.prototype.ast.bind(first)();result=new ast.MatchValue(v)}else if(first.type=="id"){result=new ast.MatchSingleton(_b_[first.value])}else{first.position=this.position;var num=NumberCtx.prototype.ast.bind(first)(),res=new ast.MatchValue(num);if(first.sign&&first.sign!="+"){var op={"+":ast.UAdd,"-":ast.USub,"~":ast.Invert}[first.sign];var unary_op=new ast.UnaryOp(new op,res.value);set_position(unary_op,this.position);res=new ast.MatchValue(unary_op);set_position(res,this.position)}if(this.tree.length==1){result=res}else{this.tree[2].position=this.position;var num2=NumberCtx.prototype.ast.bind(this.tree[2])(),binop=new ast.BinOp(res.value,this.tree[1]=="+"?new ast.Add:new ast.Sub,num2);set_position(binop,this.position);result=new ast.MatchValue(binop)}}set_position(result,this.position);if(this.tree.length==2){result=new ast.MatchValue(new ast.BinOp(this.tree[0].ast(),this.num_sign=="+"?ast.Add:ast.Sub,this.tree[1].ast()))}if(this.alias){result=new ast.MatchAs(result,this.alias)}set_position(result,this.position);return result}catch(err){show_line(this);throw err}};PatternLiteralCtx.prototype.bindings=function(){if(this.alias){return[this.alias]}return[]};PatternLiteralCtx.prototype.transition=function(token,value){var C=this;switch(C.expect){case"op":if(token=="op"){switch(value){case"+":case"-":if(["int","float"].indexOf(C.tree[0].type)>-1){C.expect="imaginary";this.tree.push(value);C.num_sign=value;return C}raise_syntax_error(C,"patterns cannot include operators");break;default:return transition(C.parent,token,value)}}break;case"number":switch(token){case"int":case"float":case"imaginary":var last=$B.last(C.tree);if(this.tree.token===undefined){last.type=token;last.value=value;C.expect="op";return C}break;default:raise_syntax_error(C)}break;case"imaginary":switch(token){case"imaginary":C.tree.push({type:token,value:value,sign:C.num_sign});return C.parent;default:raise_syntax_error(C,"(expected imaginary)")}break;case"as":case"alias":return as_pattern(C,token,value)}if(token=="as"&&C.tree.length==1){C.expect="as";return C.transition(token,value)}return transition(C.parent,token,value)};var PatternMappingCtx=function(C){this.type="mapping_pattern";this.parent=C;this.position=$token.value;C.tree.pop();this.tree=[];C.tree.push(this);this.expect="key_value_pattern";this.literal_keys=[];this.bound_names=[]};PatternMappingCtx.prototype.ast=function(){var keys=[],patterns=[];for(var item of this.tree){keys.push(item.tree[0].ast().value);if(item.tree[0]instanceof PatternLiteralCtx){patterns.push(item.tree[1].ast())}else{patterns.push(item.tree[2].ast())}}var res=new ast.MatchMapping(keys,patterns);if(this.double_star){res.rest=this.double_star.tree[0]}set_position(res,this.position);return res};PatternMappingCtx.prototype.bindings=function(){var bindings=[];for(var item of this.tree){bindings=bindings.concat(item.bindings())}if(this.rest){bindings=bindings.concat(this.rest.bindings())}if(this.alias){bindings.push(this.alias)}return bindings};PatternMappingCtx.prototype.transition=function(token,value){var C=this;function check_duplicate_names(){var last=$B.last(C.tree),bindings;if(last instanceof PatternKeyValueCtx){if(C.double_star){raise_syntax_error(C,"can't use starred name here (consider moving to end)")}if(last.tree[0].type=="value_pattern"){bindings=last.tree[2].bindings()}else{bindings=last.tree[1].bindings()}for(var binding of bindings){if(C.bound_names.indexOf(binding)>-1){raise_syntax_error(C,`multiple assignments to name '${binding}'`+" in pattern")}}C.bound_names=C.bound_names.concat(bindings)}}switch(C.expect){case"key_value_pattern":if(token=="}"||token==","){check_duplicate_names();if(C.double_star){var ix=C.tree.indexOf(C.double_star);if(ix!=C.tree.length-1){raise_syntax_error(C,"can't use starred name here (consider moving to end)")}C.rest=C.tree.pop()}return token==","?C:C.parent}if(token=="op"&&value=="**"){C.expect="capture_pattern";return C}var p=new PatternCtx(C);try{var lit_or_val=p.transition(token,value)}catch(err){raise_syntax_error(C,"mapping pattern keys may only "+"match literals and attribute lookups")}if(C.double_star){raise_syntax_error(C)}if(lit_or_val instanceof PatternLiteralCtx){C.tree.pop();new PatternKeyValueCtx(C,lit_or_val);return lit_or_val}else if(lit_or_val instanceof PatternCaptureCtx){C.has_value_pattern_keys=true;C.tree.pop();new PatternKeyValueCtx(C,lit_or_val);C.expect=".";return this}else{raise_syntax_error(C,"(expected key or **)")}break;case"capture_pattern":var p=new PatternCtx(C);var capture=transition(p,token,value);if(capture instanceof PatternCaptureCtx){if(C.double_star){raise_syntax_error(C,"only one double star pattern is accepted")}if(value=="_"){raise_syntax_error(C)}if(C.bound_names.indexOf(value)>-1){raise_syntax_error(C,"duplicate binding: "+value)}C.bound_names.push(value);capture.double_star=true;C.double_star=capture;C.expect=",";return C}else{raise_syntax_error(C,"(expected identifier)")}break;case",":if(token==","){C.expect="key_value_pattern";return C}else if(token=="}"){C.expect="key_value_pattern";return C.transition(token,value)}raise_syntax_error(C);break;case".":if(C.tree.length>0){var last=$B.last(C.tree);if(last instanceof PatternKeyValueCtx){new IdCtx(last,last.tree[0].tree[0]);C.expect="key_value_pattern";return transition(last.tree[0],token,value)}}raise_syntax_error(C)}return transition(C.parent,token,value)};var PatternKeyValueCtx=function(C,literal_or_value){this.type="pattern_key_value";this.parent=C;this.tree=[literal_or_value];literal_or_value.parent=this;this.expect=":";C.tree.push(this)};PatternKeyValueCtx.prototype.bindings=PatternMappingCtx.prototype.bindings;PatternKeyValueCtx.prototype.transition=function(token,value){var C=this;switch(C.expect){case":":switch(token){case":":var key_obj=this.tree[0];if(key_obj instanceof PatternLiteralCtx){var key=$B.AST.$convert(key_obj.tree[0]);if(_b_.list.__contains__(this.parent.literal_keys,key)){raise_syntax_error(C,`mapping pattern checks `+`duplicate key (${_b_.repr(key)})`)}this.parent.literal_keys.push(key)}this.expect=",";return new PatternCtx(this);default:raise_syntax_error(C,"(expected :)")}break;case",":switch(token){case"}":return transition(C.parent,token,value);case",":C.parent.expect="key_value_pattern";return transition(C.parent,token,value);case"op":if(value=="|"){return new PatternCtx(new PatternOrCtx(C))}}raise_syntax_error(C,"(expected ',' or '}')")}return transition(C.parent,token,value)};var PatternOrCtx=function(C){this.type="or_pattern";this.parent=C;this.position=$token.value;var first_pattern=C.tree.pop();if(first_pattern instanceof PatternGroupCtx&&first_pattern.expect!="as"){first_pattern=first_pattern.tree[0]}this.tree=[first_pattern];first_pattern.parent=this;this.expect="|";C.tree.push(this);this.check_reachable()};PatternOrCtx.prototype.ast=function(){var ast_obj=new ast.MatchOr(this.tree.map((x=>x.ast())));set_position(ast_obj,this.position);if(this.alias){ast_obj=new ast.MatchAs(ast_obj,this.alias)}set_position(ast_obj,this.position);return ast_obj};PatternOrCtx.prototype.bindings=function(){var names;for(var subpattern of this.tree){if(subpattern.bindings===undefined){console.log("no binding",subpattern)}var subbindings=subpattern.bindings();if(names===undefined){names=subbindings}else{for(let item of names){if(subbindings.indexOf(item)==-1){raise_syntax_error(this,"alternative patterns bind different names")}}for(let item of subbindings){if(names.indexOf(item)==-1){raise_syntax_error(this,"alternative patterns bind different names")}}}}if(this.alias){return names.concat(this.alias)}return names};PatternOrCtx.prototype.check_reachable=function(){var item=$B.last(this.tree);var capture;if(item.type=="capture_pattern"){capture=item.tree[0]}else if(item.type=="group_pattern"&&item.tree.length==1&&item.tree[0].type=="capture_pattern"){capture=item.tree[0].tree[0]}else if(item instanceof PatternOrCtx){item.check_reachable()}if(capture){var msg=capture=="_"?"wildcard":`name capture '${capture}'`;raise_syntax_error(this,`${msg} makes remaining patterns unreachable`)}};PatternOrCtx.prototype.transition=function(token,value){function set_alias(){var last=$B.last(C.tree);if(last.alias){C.alias=last.alias;delete last.alias}}var C=this;if(["as","alias"].indexOf(C.expect)>-1){return as_pattern(C,token,value)}if(token=="op"&&value=="|"){for(var item of C.tree){if(item.alias){raise_syntax_error(C,"(no as pattern inside or pattern)")}}C.check_reachable();return new PatternCtx(C)}else if(token==")"&&C.parenthese){set_alias();C.bindings();delete C.parenthese;C.expect="as";return C}set_alias();C.bindings();return transition(C.parent,token,value)};var PatternSequenceCtx=function(C,token){this.type="sequence_pattern";this.parent=C;this.position=$token.value;this.tree=[];this.bound_names=[];var first_pattern=C.tree.pop();if(token===undefined){this.bound_names=first_pattern.bindings();this.tree=[first_pattern];if(first_pattern.starred){this.has_star=true}first_pattern.parent=this}else{this.token=token}this.expect=",";C.tree.push(this)};PatternSequenceCtx.prototype.ast=function(){var ast_obj=new ast.MatchSequence(this.tree.map((x=>x.ast())));set_position(ast_obj,this.position);if(this.alias){ast_obj=new ast.MatchAs(ast_obj,this.alias);set_position(ast_obj,this.position)}return ast_obj};PatternSequenceCtx.prototype.bindings=PatternMappingCtx.prototype.bindings;PatternSequenceCtx.prototype.transition=function(token,value){function check_duplicate_names(){var last=$B.last(C.tree);if(!(last instanceof PatternCtx)){var last_bindings=last.bindings();for(var b of last_bindings){if(C.bound_names.indexOf(b)>-1){raise_syntax_error(C,"multiple assignments to"+` name '${b}' in pattern`)}}if(last.starred){if(C.has_star){raise_syntax_error(C,"multiple starred names in sequence pattern")}C.has_star=true}C.bound_names=C.bound_names.concat(last_bindings)}}var C=this;if(C.expect==","){if(C.token=="["&&token=="]"||C.token=="("&&token==")"){var nb_starred=0;for(var item of C.tree){if(item instanceof PatternCaptureCtx&&item.starred){nb_starred++;if(nb_starred>1){raise_syntax_error(C,"multiple starred names in sequence pattern")}}}C.expect="as";check_duplicate_names();remove_empty_pattern(C);return C}else if(token==","){check_duplicate_names();C.expect="id";return C}else if(token=="op"&&value=="|"){remove_empty_pattern(C);return new PatternCtx(new PatternOrCtx(C))}else if(this.token===undefined){check_duplicate_names();return transition(C.parent,token,value)}raise_syntax_error(C)}else if(C.expect=="as"){if(token=="as"){this.expect="alias";return C}return transition(C.parent,token,value)}else if(C.expect=="alias"){if(token=="id"){C.alias=value;return C.parent}raise_syntax_error(C,"expected alias")}else if(C.expect=="id"){C.expect=",";return transition(new PatternCtx(C),token,value)}};var RaiseCtx=$B.parser.RaiseCtx=function(C){this.type="raise";this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this;this.scope_type=get_scope(this).ntype};RaiseCtx.prototype.ast=function(){var ast_obj=new ast.Raise(...this.tree.map((x=>x.ast())));set_position(ast_obj,this.position);return ast_obj};RaiseCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":if(C.tree.length==0){return new IdCtx(new ExprCtx(C,"exc",false),value)}break;case"from":if(C.tree.length>0){return new AbstractExprCtx(C,false)}break;case"eol":remove_abstract_expr(this.tree);return transition(C.parent,token)}raise_syntax_error(C)};var ReturnCtx=$B.parser.ReturnCtx=function(C){this.type="return";this.parent=C;this.tree=[];this.position=$token.value;C.tree[C.tree.length]=this;this.scope=get_scope(this);if(["def","generator"].indexOf(this.scope.ntype)==-1){raise_syntax_error(C,"'return' outside function")}var node=this.node=get_node(this);while(node.parent){if(node.parent.C){var elt=node.parent.C.tree[0];if(elt.type=="for"){elt.has_return=true;break}else if(elt.type=="try"){elt.has_return=true}else if(elt.type=="single_kw"&&elt.token=="finally"){elt.has_return=true}}node=node.parent}};ReturnCtx.prototype.ast=function(){var res=new ast.Return;if(this.tree.length>0){res.value=this.tree[0].ast()}set_position(res,this.position);return res};ReturnCtx.prototype.transition=function(token,value){var C=this;if(token=="eol"&&this.tree.length==1&&this.tree[0].type=="abstract_expr"){this.tree.pop()}return transition(new AbstractExprCtx(C.parent,false),token,value)};var SetCompCtx=function(C){this.type="setcomp";this.tree=[C.tree[0]];this.tree[0].parent=this;Comprehension.make_comp(this,C)};SetCompCtx.prototype.ast=function(){var ast_obj=new ast.SetComp(this.tree[0].ast(),Comprehension.generators(this.tree.slice(1)));set_position(ast_obj,this.position);return ast_obj};SetCompCtx.prototype.transition=function(token){var C=this;if(token=="}"){return this.parent}raise_syntax_error(C)};var SingleKwCtx=$B.parser.SingleKwCtx=function(C,token){this.type="single_kw";this.token=token;this.parent=C;this.tree=[];C.tree[C.tree.length]=this;if(token=="else"){var node=C.node,rank=node.parent.children.indexOf(node),pctx=node.parent.children[rank-1].C;pctx.tree[0].orelse=this;if(pctx.tree.length>0){var elt=pctx.tree[0];if(elt.type=="for"||elt.type=="asyncfor"||elt.type=="condition"&&elt.token=="while"){elt.has_break=true;elt.else_node=get_node(this)}}}};SingleKwCtx.prototype.ast=function(){return ast_body(this.parent)};SingleKwCtx.prototype.transition=function(token){var C=this;if(token==":"){return BodyCtx(C)}else if(token=="eol"){raise_syntax_error(C,"expected ':'")}raise_syntax_error(C)};var SliceCtx=$B.parser.SliceCtx=function(C){this.type="slice";this.parent=C;this.position=$token.value;this.tree=C.tree.length>0?[C.tree.pop()]:[];C.tree.push(this)};SliceCtx.prototype.ast=function(){var slice=new ast.Slice;var attrs=["lower","upper","step"];for(var i=0;i0&&child.tree[0].type=="starred"){raise_syntax_error(C,"two starred expressions in assignment")}}}this.parent=C;this.tree=[];C.tree[C.tree.length]=this};StarredCtx.prototype.ast=function(){var ast_obj=new ast.Starred(this.tree[0].ast(),new ast.Load);set_position(ast_obj,this.position);return ast_obj};StarredCtx.prototype.transition=function(token,value){var C=this;return transition(C.parent,token,value)};var StringCtx=$B.parser.StringCtx=function(C,value){this.type="str";this.parent=C;this.position=this.end_position=$token.value;C.tree.push(this);this.is_bytes=value.startsWith("b");this.value=this.is_bytes?[]:"";this.add_value(value);this.raw=false};$B.string_from_ast_value=function(value){return value.replace(new RegExp("\\\\'","g"),"'")};var make_string_for_ast_value=$B.make_string_for_ast_value=function(value){value=value.replace(/\n/g,"\\n\\\n");value=value.replace(/\r/g,"\\r\\\r");if(value[0]=="'"){var unquoted=value.substr(1,value.length-2);return unquoted}if(value.indexOf("'")>-1){var s="",escaped=false;for(var char of value){if(char=="\\"){if(escaped){s+="\\\\"}escaped=!escaped}else{if(char=="'"&&!escaped){s+="\\"}else if(escaped){s+="\\"}s+=char;escaped=false}}value=s}return value.substr(1,value.length-2)};StringCtx.prototype.add_value=function(value){this.is_bytes=value.charAt(0)=="b";if(!this.is_bytes){this.value+=make_string_for_ast_value(value)}else{value=value.substr(2,value.length-3);try{var b=encode_bytestring(value)}catch(err){raise_syntax_error(this,"bytes can only contain ASCII literal characters")}this.value=this.value.concat(b)}};var encode_bytestring=$B.encode_bytestring=function(s){s=s.replace(/\\t/g,"\t").replace(/\\n/g,"\n").replace(/\\r/g,"\r").replace(/\\f/g,"\f").replace(/\\v/g,"\v").replace(/\\\\/g,"\\");var t=[];for(var i=0,len=s.length;i255){throw Error()}t.push(cp)}return t};StringCtx.prototype.ast=function(){var value=this.value;if(this.is_bytes){value=_b_.bytes.$factory(this.value)}var ast_obj=new ast.Constant(value);set_position(ast_obj,this.position);return ast_obj};StringCtx.prototype.transition=function(token,value){var C=this;switch(token){case"[":return new AbstractExprCtx(new SubscripCtx(C.parent),false);case"(":C.parent.tree[0]=C;return new CallCtx(C.parent);case"str":if(this.is_bytes&&!value.startsWith("b")||!this.is_bytes&&value.startsWith("b")){raise_syntax_error(C,"cannot mix bytes and nonbytes literals")}C.add_value(value);return C;case"JoinedStr":C.parent.tree.pop();var fstring=new FStringCtx(C.parent,value);new StringCtx(fstring,fstring.quotes+this.value+fstring.quotes);return fstring}return transition(C.parent,token,value)};var SubscripCtx=$B.parser.SubscripCtx=function(C){this.type="sub";this.func="getitem";this.value=C.tree[0];this.position=$token.value;C.tree.pop();C.tree[C.tree.length]=this;this.parent=C;this.tree=[]};SubscripCtx.prototype.ast=function(){var slice;if(this.tree.length>1){var slice_items=this.tree.map((x=>x.ast()));slice=new ast.Tuple(slice_items);set_position(slice,this.position,this.end_position)}else{slice=this.tree[0].ast()}slice.ctx=new ast.Load;var value=this.value.ast();if(value.ctx){value.ctx=new ast.Load}var ast_obj=new ast.Subscript(value,slice,new ast.Load);ast_obj.lineno=value.lineno;ast_obj.col_offset=value.col_offset;ast_obj.end_lineno=slice.end_lineno;ast_obj.end_col_offset=slice.end_col_offset;return ast_obj};SubscripCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":case"imaginary":case"int":case"float":case"str":case"JoinedStr":case"bytes":case"[":case"(":case"{":case".":case"not":case"lambda":var expr=new AbstractExprCtx(C,false);return transition(expr,token,value);case"]":C.end_position=$token.value;if(C.parent.packed){return C.parent}if(C.tree[0].tree.length>0){return C.parent}break;case":":return new AbstractExprCtx(new SliceCtx(C),false);case",":return new AbstractExprCtx(C,false)}raise_syntax_error(C)};var TargetListCtx=$B.parser.TargetListCtx=function(C){this.type="target_list";this.parent=C;this.tree=[];this.position=$token.value;this.expect="id";this.nb_packed=0;C.tree[C.tree.length]=this};TargetListCtx.prototype.ast=function(){if(this.tree.length==1&&!this.implicit_tuple){let item=this.tree[0].ast();item.ctx=new ast.Store;if(item instanceof ast.Tuple){for(var target of item.elts){target.ctx=new ast.Store}}return item}else{let items=[];for(let item of this.tree){item=item.ast();if(item.hasOwnProperty("ctx")){item.ctx=new ast.Store}items.push(item)}var ast_obj=new ast.Tuple(items,new ast.Store);set_position(ast_obj,this.position);return ast_obj}};TargetListCtx.prototype.transition=function(token,value){var C=this;switch(token){case"id":if(C.expect=="id"){C.expect=",";return new IdCtx(new ExprCtx(C,"target",false),value)}break;case"op":if(C.expect=="id"&&value=="*"){this.nb_packed++;C.expect=",";return new AbstractExprCtx(new StarredCtx(C),false)}break;case"(":case"[":if(C.expect=="id"){C.expect=",";return new ListOrTupleCtx(C,token=="("?"tuple":"list")}break;case")":case"]":if(C.expect==","){return C.parent}break;case",":if(C.expect==","){C.expect="id";C.implicit_tuple=true;return C}}if(C.expect==","){return transition(C.parent,token,value)}else if(token=="in"){return transition(C.parent,token,value)}console.log("unexpected token for target list",token,value);console.log(C);raise_syntax_error(C)};var TernaryCtx=$B.parser.TernaryCtx=function(C){this.type="ternary";this.position=C.position;C.parent.tree.pop();var expr=new ExprCtx(C.parent,"ternary",false);expr.tree.push(this);this.parent=expr;this.tree=[C];C.parent=this};TernaryCtx.prototype.ast=function(){var ast_obj=new ast.IfExp(this.tree[1].ast(),this.tree[0].ast(),this.tree[2].ast());set_position(ast_obj,this.position);return ast_obj};TernaryCtx.prototype.transition=function(token,value){var C=this;if(token=="else"){C.in_else=true;return new AbstractExprCtx(C,false)}else if(!C.in_else){if(token==":"){raise_syntax_error(C)}raise_syntax_error_known_range(C,C.position,last_position(C),"expected 'else' after 'if' expression")}else if(token==","){if(["assign","augm_assign","node","return"].indexOf(C.parent.type)>-1){C.parent.tree.pop();var t=new ListOrTupleCtx(C.parent,"tuple");t.implicit=true;t.tree[0]=C;C.parent=t;t.expect="id";return t}}return transition(C.parent,token,value)};var TryCtx=$B.parser.TryCtx=function(C){this.type="try";this.parent=C;this.position=$token.value;C.tree[C.tree.length]=this};TryCtx.prototype.ast=function(){var node=this.parent.node,res={body:ast_body(this.parent),handlers:[],orelse:[],finalbody:[]};var rank=node.parent.children.indexOf(node);for(var child of node.parent.children.slice(rank+1)){var t=child.C.tree[0],type=t.type;if(type=="single_kw"){type=t.token}if(type=="except"){res.handlers.push(t.ast())}else if(type=="else"){res.orelse=ast_body(child.C)}else if(type=="finally"){res.finalbody=ast_body(child.C)}else{break}}if(res.handlers.length==0&&res.finalbody.length==0){raise_syntax_error(this,"expected 'except' or 'finally' block")}var klass=this.parent.is_trystar?ast.TryStar:ast.Try;res=new klass(res.body,res.handlers,res.orelse,res.finalbody);set_position(res,this.position);return res};TryCtx.prototype.transition=function(token){var C=this;if(token==":"){return BodyCtx(C)}raise_syntax_error(C,"expected ':'")};var TypeAliasCtx=$B.parser.TypeAlias=function(C,value){C.parent.parent.tree=[this];this.parent=C.parent.parent;this.name=value;this.expect="=";this.tree=[];this.position=$token.value};TypeAliasCtx.prototype.transition=function(token,value){var C=this;if(C.expect=="="){if(token=="["){if(this.tree.length>0){raise_syntax_error(C)}return new TypeParamsCtx(C)}else if(token=="="){C.has_value=true;return new AbstractExprCtx(C,false)}else if(token=="eol"){if(!C.has_value||this.tree.length!==1||this.tree[0]instanceof AbstractExprCtx){raise_syntax_error(C)}return transition(C.parent,token,value)}}raise_syntax_error(C)};TypeAliasCtx.prototype.ast=function(){var name=new ast.Name(this.name),params,value=this.tree[0].ast();if(this.type_params){params=this.type_params.ast()}var ast_obj=new ast.TypeAlias(name,params,value);set_position(ast_obj,this.position);return ast_obj};var TypeParamsCtx=$B.parser.TypeParamsCtx=function(C){this.type="type_params";this.parent=C;C.type_params=this;this.tree=[];this.expect="param"};TypeParamsCtx.prototype.check_duplicate=function(name){for(var item of this.tree){if(item.name==name){raise_syntax_error(this,`duplicate type parameter '${name}'`)}}};TypeParamsCtx.prototype.transition=function(token,value){var C=this;if(C.expect=="param"){if(token=="id"){C.check_duplicate(value);C.expect=",";return new TypeVarCtx(C,value)}else if(token=="op"){if(value=="*"){C.expect=",";return new TypeVarTupleCtx(C)}else if(value=="**"){C.expect=",";return new TypeParamSpecCtx(C)}}else if(token=="]"){return C.parent}raise_syntax_error(C)}else if(C.expect==","){if(token==","){C.expect="param";return C}else if(token=="]"){return C.parent}raise_syntax_error(C)}raise_syntax_error(C)};TypeParamsCtx.prototype.ast=function(){return this.tree.map((x=>x.ast()))};var TypeVarCtx=$B.parser.TypeVarCtx=function(C,name){this.name=name;this.parent=C;C.tree.push(this);this.tree=[];this.position=$token.value};TypeVarCtx.prototype.transition=function(token,value){var C=this;if(token==":"){return new AbstractExprCtx(C,false)}return transition(this.parent,token,value)};TypeVarCtx.prototype.ast=function(){var name=this.name,bound;if(this.tree.length>0){bound=this.tree[0].ast()}var ast_obj=new ast.TypeVar(name,bound);set_position(ast_obj,this.position);return ast_obj};var TypeParamSpecCtx=$B.parser.TypeParamSpecCtx=function(C){this.parent=C;C.tree.push(this);this.tree=[];this.position=$token.value};TypeParamSpecCtx.prototype.transition=function(token,value){var C=this;if(token=="id"){if(C.name){raise_syntax_error(C)}C.parent.check_duplicate(value);C.name=value;return C}else if(token==":"){if(!C.name){raise_syntax_error(C)}this.has_colon=true;return new AbstractExprCtx(C,false)}else if(this.has_colon){var msg;if(this.tree[0].name=="tuple"){msg="cannot use constraints with ParamSpec"}else{msg="cannot use bound with ParamSpec"}raise_syntax_error_known_range(C,this.position,$token.value,msg)}return transition(this.parent,token,value)};TypeParamSpecCtx.prototype.ast=function(){var name=new ast.Name(this.name);var ast_obj=new ast.ParamSpec(name);set_position(ast_obj,this.position);return ast_obj};var TypeVarTupleCtx=$B.parser.TypeVarTupleCtx=function(C){this.parent=C;C.tree.push(this);this.tree=[];this.position=$token.value};TypeVarTupleCtx.prototype.transition=function(token,value){var C=this;if(token=="id"){if(C.name){raise_syntax_error(C)}C.parent.check_duplicate(value);C.name=value;return C}else if(token==":"){if(!C.name){raise_syntax_error(C)}this.has_colon=true;return new AbstractExprCtx(C,false)}else if(this.has_colon){var msg;if(this.tree[0].name=="tuple"){msg="cannot use constraints with TypeVarTuple"}else{msg="cannot use bound with TypeVarTuple"}raise_syntax_error_known_range(C,this.position,$token.value,msg)}return transition(this.parent,token,value)};TypeVarTupleCtx.prototype.ast=function(){var name=new ast.Name(this.name);var ast_obj=new ast.TypeVarTuple(name);set_position(ast_obj,this.position);return ast_obj};var UnaryCtx=$B.parser.UnaryCtx=function(C,op){this.type="unary";this.op=op;this.parent=C;this.tree=[];this.position=$token.value;C.tree.push(this)};UnaryCtx.prototype.ast=function(){var op={"+":ast.UAdd,"-":ast.USub,"~":ast.Invert}[this.op],ast_obj=new ast.UnaryOp(new op,this.tree[0].ast());set_position(ast_obj,this.position);return ast_obj};UnaryCtx.prototype.transition=function(token,value){var C=this;switch(token){case"op":if("+"==value||"-"==value){if(C.op===value){C.op="+"}else{C.op="-"}return C}break;case"int":case"float":case"imaginary":if(C.parent.type=="starred"){raise_syntax_error(C,"can't use starred expression here")}var res=new NumberCtx(token,C,value);return res;case"id":return transition(new AbstractExprCtx(C,false),token,value)}if(this.tree.length==0||this.tree[0].type=="abstract_expr"){raise_syntax_error(C)}return transition(C.parent,token,value)};var WithCtx=$B.parser.WithCtx=function(C){this.type="with";this.parent=C;this.position=$token.value;C.tree[C.tree.length]=this;this.tree=[];this.expect="expr";this.scope=get_scope(this)};WithCtx.prototype.ast=function(){var withitems=[];for(var withitem of this.tree){withitems.push(withitem.ast())}var klass=this.async?ast.AsyncWith:ast.With;var ast_obj=new klass(withitems,ast_body(this.parent));set_position(ast_obj,this.async?this.async.position:this.position,last_position(this));return ast_obj};WithCtx.prototype.transition=function(token,value){var C=this;function check_last(){var last=$B.last(C.tree);if(last.tree.length>1){var alias=last.tree[1];if(alias.tree.length==0){raise_syntax_error(C,"expected ':'")}check_assignment(alias)}}switch(token){case"(":case"[":if(this.expect=="expr"&&this.tree.length==0){C.parenth=token;return C}else{raise_syntax_error(C)}break;case"id":if(C.expect=="expr"){C.expect=",";return transition(new AbstractExprCtx(new withitem(C),false),token,value)}raise_syntax_error(C);break;case":":if(!C.parenth||C.parenth=="implicit"){check_last()}return BodyCtx(C);case")":case"]":if(C.parenth==opening[token]){if(C.expect==","||C.expect=="expr"){check_last();C.expect=":";return C}}break;case",":if(C.expect==","){if(!C.parenth){C.parenth="implicit"}check_last();C.expect="expr";return C}break;case"eol":raise_syntax_error(C,"expected ':'")}raise_syntax_error(C)};WithCtx.prototype.set_alias=function(ctx){var ids=[];if(ctx.type=="id"){ids=[ctx]}else if(ctx.type=="list_or_tuple"){for(var expr of ctx.tree){if(expr.type=="expr"&&expr.tree[0].type=="id"){ids.push(expr.tree[0])}}}};var withitem=function(C){this.type="withitem";this.parent=C;C.tree.push(this);this.tree=[];this.expect="as";this.position=$token.value};withitem.prototype.ast=function(){var ast_obj=new ast.withitem(this.tree[0].ast());if(this.tree[1]){ast_obj.optional_vars=this.tree[1].tree[0].ast();if(ast_obj.optional_vars.elts){for(var elt of ast_obj.optional_vars.elts){elt.ctx=new ast.Store}}else{ast_obj.optional_vars.ctx=new ast.Store}}set_position(ast_obj,this.position);return ast_obj};withitem.prototype.transition=function(token,value){var C=this;if(token=="as"&&C.expect=="as"){C.expect="star_target";return new AbstractExprCtx(C,false)}else{return transition(C.parent,token,value)}};var YieldCtx=$B.parser.YieldCtx=function(C,is_await){this.type="yield";this.parent=C;this.tree=[];this.is_await=is_await;this.position=$token.value;C.tree[C.tree.length]=this;if(C.type=="list_or_tuple"&&C.tree.length>1){raise_syntax_error(C,"(non-parenthesized yield)")}if(parent_match(C,{type:"annotation"})){raise_syntax_error(C,"'yield' outside function")}var root=get_module(this);root.yields_func_check=root.yields_func_check||[];root.yields_func_check.push(this);var scope=this.scope=get_scope(this,true),node=get_node(this);node.has_yield=this;var in_comp=parent_match(this,{type:"comprehension"});if(get_scope(this).id.startsWith("lc"+$B.lambda_magic)){delete node.has_yield}if(in_comp){var outermost_expr=in_comp.tree[0].tree[1];parent=C;while(parent){if(parent===outermost_expr){break}parent=parent.parent}if(!parent){raise_syntax_error(C,"'yield' inside list comprehension")}}var in_lambda=false,parent=C;while(parent){if(parent.type=="lambda"){in_lambda=true;this.in_lambda=true;break}parent=parent.parent}parent=node.parent;while(parent){if(parent.C&&parent.C.tree.length>0&&parent.C.tree[0].type=="with"){scope.C.tree[0].$has_yield_in_cm=true;break}parent=parent.parent}if(!in_lambda){switch(C.type){case"node":case"assign":case"list_or_tuple":break;default:raise_syntax_error(C,"(non-parenthesized yield)")}}};YieldCtx.prototype.ast=function(){var ast_obj;if(this.from){ast_obj=new ast.YieldFrom(this.tree[0].ast())}else if(this.tree.length==1){ast_obj=new ast.Yield(this.tree[0].ast())}else{ast_obj=new ast.Yield}set_position(ast_obj,this.position);return ast_obj};YieldCtx.prototype.transition=function(token){var C=this;if(token=="from"){if(C.tree[0].type!="abstract_expr"){raise_syntax_error(C,"('from' must follow 'yield')")}C.from=true;C.from_num=$B.UUID();return C.tree[0]}else{remove_abstract_expr(C.tree);if(C.from&&C.tree.length==0){raise_syntax_error(C)}}return transition(C.parent,token)};YieldCtx.prototype.check_in_function=function(){if(this.in_lambda){return}var scope=get_scope(this),in_func=scope.is_function,func_scope=scope;if(!in_func&&scope.comprehension){var parent=scope.parent_block;while(parent.comprehension){parent=parent.parent_block}in_func=parent.is_function;func_scope=parent}if(in_func){var def=func_scope.C.tree[0];if(!this.is_await){def.type="generator"}}};function parent_match(ctx,obj){var flag;while(ctx.parent){flag=true;for(var attr in obj){if(ctx.parent[attr]!=obj[attr]){flag=false;break}}if(flag){return ctx.parent}ctx=ctx.parent}return false}var get_previous=$B.parser.get_previous=function(C){var previous=C.node.parent.children[C.node.parent.children.length-2];if(!previous||!previous.C){raise_syntax_error(C,"(keyword not following correct keyword)")}return previous.C.tree[0]};var get_docstring=$B.parser.get_docstring=function(node){var doc_string=_b_.None;if(node.body.length>0){var firstchild=node.body[0];if(firstchild instanceof $B.ast.Constant&&typeof firstchild.value=="string"){doc_string=firstchild.value}}return doc_string};var get_scope=$B.parser.get_scope=function(C){var ctx_node=C.parent;while(true){if(ctx_node.type==="node"){break}else if(ctx_node.comprehension){return ctx_node}ctx_node=ctx_node.parent}var tree_node=ctx_node.node,scope=null;while(tree_node.parent&&tree_node.parent.type!=="module"){var ntype=tree_node.parent.C.tree[0].type;switch(ntype){case"def":case"class":case"generator":scope=tree_node.parent;scope.ntype=ntype;scope.is_function=ntype!="class";return scope}tree_node=tree_node.parent}scope=tree_node.parent||tree_node;scope.ntype="module";return scope};var get_module=$B.parser.get_module=function(C){var ctx_node=C instanceof NodeCtx?C:C.parent;while(ctx_node.type!=="node"){ctx_node=ctx_node.parent}var tree_node=ctx_node.node;if(tree_node.ntype=="module"){return tree_node}var scope=null;while(tree_node.parent.type!="module"){tree_node=tree_node.parent}scope=tree_node.parent;scope.ntype="module";return scope};var get_node=$B.parser.get_node=function(C){var ctx=C;while(ctx.parent){ctx=ctx.parent}return ctx.node};var mangle_name=$B.parser.mangle_name=function(name,C){if(name.substr(0,2)=="__"&&name.substr(name.length-2)!=="__"){var scope=get_scope(C);while(true){if(scope.ntype=="module"){return name}else if(scope.ntype=="class"){var class_name=scope.C.tree[0].name;while(class_name.charAt(0)=="_"){class_name=class_name.substr(1)}return"_"+class_name+name}else{if(scope.parent&&scope.parent.C){scope=get_scope(scope.C.tree[0])}else{return name}}}}else{return name}};$B.nb_debug_lines=0;var transition=$B.parser.transition=function(C,token,value){if($B.nb_debug_lines>100){alert("too many debug lines");$B.nb_debug_lines=0}if($B.track_transitions){console.log("C",C,"token",token,value);$B.nb_debug_lines++}return C.transition(token,value)};var s_escaped='abfnrtvxuU"0123456789'+"'"+"\\",is_escaped={};for(var i=0;i>10)+String.fromCharCode(56320|value&1023)}function test_num(num_lit){var len=num_lit.length,pos=0,char,elt=null,subtypes={b:"binary",o:"octal",x:"hexadecimal"},digits_re=/[_\d]/;function error(message){throw SyntaxError(message)}function check(elt){if(elt.value.length==0){let t=subtypes[elt.subtype]||"decimal";error("invalid "+t+" literal")}else if(elt.value[elt.value.length-1].match(/[-+_]/)){let t=subtypes[elt.subtype]||"decimal";error("invalid "+t+" literal")}else{elt.value=elt.value.replace(/_/g,"");elt.length=pos;return elt}}while(pos-1){elt.value+=char;pos++}else{return check(elt)}}else if(char.match(/j/i)){if(elt&&(!elt.subtype||elt.subtype=="float")){elt.imaginary=true;check(elt);elt.length++;return elt}else{error("invalid syntax")}}else{break}}return check(elt)}var opening={")":"(","}":"{","]":"["};function check_line(token_reader){var braces=[];token_reader.position--;while(true){var token=token_reader.read();if(!token){return false}if(token.type=="OP"&&token.string==":"&&braces.length==0){return true}else if(token.type=="OP"){if("([{".indexOf(token.string)>-1){braces.push(token)}else if(")]}".indexOf(token.string)>-1){if(braces.length==0){let err=SyntaxError(`unmatched '${token.string}'`);err.offset=token.start[1];throw err}else if($B.last(braces).string!=opening[token.string]){let err=SyntaxError("closing parenthesis "+`'${token.string}' does not match opening `+`parenthesis '${$B.last(braces).string}'`);err.offset=token.start[1];throw err}else{braces.pop()}}}else if(token.type=="NEWLINE"){return false}}return false}function prepare_number(n){if(n.startsWith(".")){if(n.endsWith("j")){return{type:"imaginary",value:prepare_number(n.substr(0,n.length-1))}}else{return{type:"float",value:n.replace(/_/g,"")}}}else if(n.startsWith("0")&&n!="0"){let num=test_num(n),base;if(num.imaginary){return{type:"imaginary",value:prepare_number(num.value)}}if(num.subtype=="float"){return{type:num.subtype,value:num.value}}if(num.subtype===undefined){base=10}else{base={b:2,o:8,x:16}[num.subtype]}if(base!==undefined){return{type:"int",value:[base,num.value]}}}else{let num=test_num(n);if(num.subtype=="float"){if(num.imaginary){return{type:"imaginary",value:prepare_number(num.value)}}else{return{type:"float",value:num.value}}}else{if(num.imaginary){return{type:"imaginary",value:prepare_number(num.value)}}else{return{type:"int",value:[10,num.value]}}}}}function test_escape(text,antislash_pos){var seq_end,mo;mo=/^[0-7]{1,3}/.exec(text.substr(antislash_pos+1));if(mo){return[String.fromCharCode(parseInt(mo[0],8)),1+mo[0].length]}switch(text[antislash_pos+1]){case"x":mo=/^[0-9A-F]{0,2}/i.exec(text.substr(antislash_pos+2));if(mo[0].length!=2){seq_end=antislash_pos+mo[0].length+1;$token.value.start[1]=seq_end;throw Error("(unicode error) 'unicodeescape' codec can't decode "+`bytes in position ${antislash_pos}-${seq_end}: truncated `+"\\xXX escape")}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]}break;case"u":mo=/^[0-9A-F]{0,4}/i.exec(text.substr(antislash_pos+2));if(mo[0].length!=4){seq_end=antislash_pos+mo[0].length+1;$token.value.start[1]=seq_end;throw Error("(unicode error) 'unicodeescape' codec can't decode "+`bytes in position ${antislash_pos}-${seq_end}: truncated `+"\\uXXXX escape")}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]}break;case"U":mo=/^[0-9A-F]{0,8}/i.exec(text.substr(antislash_pos+2));if(mo[0].length!=8){seq_end=antislash_pos+mo[0].length+1;$token.value.start[1]=seq_end;throw Error("(unicode error) 'unicodeescape' codec can't decode "+`bytes in position ${antislash_pos}-${seq_end}: truncated `+"\\uXXXX escape")}else{let value=parseInt(mo[0],16);if(value>1114111){throw Error("invalid unicode escape "+mo[0])}else if(value>=65536){return[SurrogatePair(value),2+mo[0].length]}else{return[String.fromCharCode(value),2+mo[0].length]}}}}$B.test_escape=test_escape;function prepare_string(C,s){var len=s.length,pos=0,string_modifier,_type="string";let quote;let inner;while(pos0){let last_brace=$B.last(braces_stack);$token.value=last_brace;raise_syntax_error(C,`'${last_brace.string}'`+" was never closed")}var err_msg=err.message;if(err_msg=="EOF in multi-line statement"){err_msg="unexpected EOF while parsing"}if(err.lineno){raise_error_known_location(_b_.SyntaxError,root.filename,err.lineno,err.col_offset,err.end_lineno,err.end_col_offset,err.line,err.message)}else{raise_syntax_error(C,err_msg)}}throw err}if(!token){throw Error("token done without ENDMARKER.")}$token.value=token;if(token[2]===undefined){console.log("token incomplet",token,"module",module,root);console.log("src",src)}if(token.start===undefined){console.log("no start",token)}lnum=token.start[0];if(expect_indent&&["INDENT","COMMENT","NL"].indexOf(token.type)==-1){C=C||new NodeCtx(node);raise_indentation_error(C,"expected an indented block",expect_indent)}switch(token.type){case"ENDMARKER":if(root.yields_func_check){for(const _yield of root.yields_func_check){$token.value=_yield.position;_yield.check_in_function()}}if(indent!=0){raise_indentation_error(node.C,"expected an indented block")}if(node.C===undefined||node.C.tree.length==0){node.parent.children.pop()}return;case"ENCODING":case"TYPE_COMMENT":continue;case"NL":if(!node.C||node.C.tree.length==0){node.line_num++}continue;case"COMMENT":continue;case"ERRORTOKEN":C=C||new NodeCtx(node);if(token.string!=" "){handle_errortoken(C,token,root.token_reader)}continue}switch(token[0]){case"NAME":case"NUMBER":case"OP":case"STRING":case"FSTRING_START":C=C||new NodeCtx(node)}switch(token[0]){case"NAME":var name=token[1];if(python_keywords.indexOf(name)>-1){if(unsupported.indexOf(name)>-1){raise_syntax_error(C,"(Unsupported Python keyword '"+name+"')")}C=transition(C,name)}else if(name=="not"){C=transition(C,"not")}else if(typeof $operators[name]=="string"){C=transition(C,"op",name)}else{C=transition(C,"id",name)}continue;case"OP":var op=token[1];if(op.length==1&&"()[]{}.,=".indexOf(op)>-1||[":="].indexOf(op)>-1){if(braces_open.indexOf(op)>-1){braces_stack.push(token);try{check_brace_is_closed(op,root.token_reader)}catch(err){if(err.message=="EOF in multi-line statement"){raise_syntax_error(C,`'${op}' was never closed`)}else{throw err}}}else if(braces_opener[op]){if(braces_stack.length==0){raise_syntax_error(C,"(unmatched '"+op+"')")}else{let last_brace=$B.last(braces_stack);if(last_brace.string==braces_opener[op]){braces_stack.pop()}else{raise_syntax_error(C,`closing parenthesis '${op}' does not `+`match opening parenthesis '`+`${last_brace.string}'`)}}}C=transition(C,token[1])}else if(op==":"){C=transition(C,":");if(C.node&&C.node.is_body_node){node=C.node}}else if(op=="..."){C=transition(C,"ellipsis")}else if(op=="->"){C=transition(C,"annotation")}else if(op==";"){if(C.type=="node"&&C.tree.length==0){raise_syntax_error(C,"(statement cannot start with ;)")}transition(C,"eol");let new_node=new $Node;new_node.line_num=token[2][0]+1;C=new NodeCtx(new_node);node.parent.add(new_node);node=new_node}else if($augmented_assigns[op]){C=transition(C,"augm_assign",op)}else{C=transition(C,"op",op)}continue;case"STRING":var prepared=prepare_string(C,token[1],token[2]);if(prepared.value instanceof Array){C=transition(C,"JoinedStr",prepared.value)}else{C=transition(C,"str",prepared.value)}continue;case"FSTRING_START":C=transition(C,"JoinedStr",token[1]);break;case"FSTRING_MIDDLE":C=transition(C,"middle",token[1]);break;case"FSTRING_END":C=transition(C,"end",token[1]);break;case"NUMBER":try{var prepared_num=prepare_number(token[1])}catch(err){raise_syntax_error(C,err.message)}C=transition(C,prepared_num.type,prepared_num.value);continue;case"NEWLINE":if(C&&C.node&&C.node.is_body_node){expect_indent=C.node.parent}C=C||new NodeCtx(node);transition(C,"eol");var new_node=new $Node;new_node.line_num=token[2][0]+1;if(node.parent.children.length>0&&node.parent.children[0].is_body_node){node.parent.parent.add(new_node)}else{node.parent.add(new_node)}C=new NodeCtx(new_node);node=new_node;continue;case"DEDENT":indent--;if(!indent_continuation){node.parent.children.pop();node.parent.parent.add(node);C=new NodeCtx(node)}continue;case"INDENT":indent++;var indent_continuation=false;if(!expect_indent){if(token.line.trim()=="\\"){indent_continuation=true}else{C=C||new NodeCtx(node);raise_indentation_error(C,"unexpected indent")}}expect_indent=false;continue}}};var create_root_node=$B.parser.create_root_node=function(src,module,locals_id,parent_block,line_num){var root=new $Node("module");root.module=module;root.id=locals_id;root.parent_block=parent_block;root.line_num=line_num;root.indent=-1;root.imports={};if(typeof src=="object"){root.is_comp=src.is_comp;root.filename=src.filename;src=src.src}src=src.replace(/\r\n/gm,"\n");root.src=src;return root};$B.py2js=function(src,module,locals_id,parent_scope){if(typeof module=="object"){module=module.__name__}parent_scope=parent_scope||$B.builtins_scope;var filename,imported;if(typeof src=="object"){filename=src.filename;imported=src.imported;src=src.src}var locals_is_module=Array.isArray(locals_id);if(locals_is_module){locals_id=locals_id[0]}var _ast;if($B.parser_to_ast){console.log("use standard parser");_ast=new $B.Parser(src,filename,"file").parse()}else{var root=create_root_node({src:src,filename:filename},module,locals_id,parent_scope);dispatch_tokens(root);_ast=root.ast()}var future=$B.future_features(_ast,filename);var symtable=$B._PySymtable_Build(_ast,filename,future);var js_obj=$B.js_from_root({ast:_ast,symtable:symtable,filename:filename,imported:imported});var js_from_ast=js_obj.js;return{_ast:_ast,imports:js_obj.imports,to_js:function(){return js_from_ast}}};$B.parse_options=function(options){if(options===undefined){options={}}else if(typeof options=="number"){options={debug:options}}else if(typeof options!=="object"){console.warn("ignoring invalid argument passed to brython():",options);options={}}$B.debug=options.debug===undefined?1:options.debug;_b_.__debug__=$B.debug>0;options.python_extension=options.python_extension||".py";if($B.$options.args){$B.__ARGV=$B.$options.args}else{$B.__ARGV=_b_.list.$factory([])}$B.options_parsed=true;return options};if(!($B.isWebWorker||$B.isNode)){var startup_observer=new MutationObserver((function(mutations){for(var mutation of mutations){for(var addedNode of mutation.addedNodes){addPythonScript(addedNode)}}}));startup_observer.observe(document.documentElement,{childList:true,subtree:true})}var brython_options={};var python_scripts=[];if(!$B.isWebWorker){python_scripts=python_scripts.concat(Array.from(document.querySelectorAll('script[type="text/python"]'))).concat(Array.from(document.querySelectorAll('script[type="text/python3"]')));var onload;addEventListener("DOMContentLoaded",(function(ev){if(ev.target.body){onload=ev.target.body.onload}if(!onload){ev.target.body.onload=function(){return brython()}}else{ev.target.body.onload=function(){onload();if(!status.brython_called){brython()}}}}));class BrythonOptions extends HTMLElement{constructor(){super()}connectedCallback(){for(var attr of this.getAttributeNames()){brython_options[attr]=convert_option(attr,this.getAttribute(attr))}}}customElements.define("brython-options",BrythonOptions)}var defined_ids={},script_to_id=new Map,id_to_script={};function addPythonScript(addedNode){if(addedNode.tagName=="SCRIPT"&&(addedNode.type=="text/python"||addedNode.type=="text/python3")){python_scripts.push(addedNode)}}var status={brython_called:false,first_unnamed_script:true};$B.dispatch_load_event=function(script){script.dispatchEvent(new Event("load"))};function injectPythonScript(addedNode){if(addedNode.tagName=="SCRIPT"&&addedNode.type=="text/python"){set_script_id(addedNode);run_scripts([addedNode])}}function set_script_id(script){if(script_to_id.has(script)){}else if(script.id){if(defined_ids[script.id]){throw Error("Brython error : Found 2 scripts with the "+"same id '"+script.id+"'")}else{defined_ids[script.id]=true}script_to_id.set(script,script.id)}else{if(script.className==="webworker"){throw _b_.AttributeError.$factory("webworker script has no attribute 'id'")}if(status.first_unnamed_script){script_to_id.set(script,"__main__");status.first_unnamed_script=false}else{script_to_id.set(script,"__main__"+$B.UUID())}}var id=script_to_id.get(script);id_to_script[id]=script;return id}var brython=$B.parser.brython=function(options){$B.$options=$B.parse_options(options);if(!($B.isWebWorker||$B.isNode)){if(!status.brython_called){status.brython_called=true;startup_observer.disconnect();var inject_observer=new MutationObserver((function(mutations){for(var mutation of mutations){for(var addedNode of mutation.addedNodes){injectPythonScript(addedNode)}}}));inject_observer.observe(document.documentElement,{childList:true,subtree:true})}}else if($B.isNode){return}for(var python_script of python_scripts){set_script_id(python_script)}var scripts=[];var $href=$B.script_path=_window.location.href.split("#")[0],$href_elts=$href.split("/");$href_elts.pop();if($B.isWebWorker||$B.isNode){$href_elts.pop()}$B.curdir=$href_elts.join("/");var kk=Object.keys(_window);var ids=$B.get_page_option("ids");if(ids!==undefined){if(!Array.isArray(ids)){throw _b_.ValueError.$factory("ids is not a list")}if(ids.length==0){}for(var id of ids){var script=document.querySelector(`script[id="${id}"]`);if(script){set_script_id(script);scripts.push(script)}else{console.log(`no script with id '${id}'`);throw _b_.KeyError.$factory(`no script with id '${id}'`)}}}else if($B.isWebWorker){}else{scripts=python_scripts.slice()}run_scripts(scripts)};function convert_option(option,value){if(option=="debug"){if(typeof value=="string"&&value.match(/^\d+$/)){return parseInt(value)}else{if(value!==null&&value!==undefined){console.debug(`Invalid value for debug: ${value}`)}}}else if(option=="cache"||option=="indexeddb"||option=="static_stdlib_import"){if(value=="1"||value.toLowerCase()=="true"){return true}else if(value=="0"||value.toLowerCase()=="false"){return false}else{console.debug(`Invalid value for ${option}: ${value}`)}}else if(option=="ids"||option=="pythonpath"||option=="args"){if(typeof value=="string"){if(value.trim().length==0){return[]}return value.trim().split(/\s+/)}}return value}const default_option={args:[],cache:false,debug:1,indexeddb:true,python_extension:".py",static_stdlib_import:true};$B.get_filename=function(){if($B.count_frames()>0){return $B.get_frame_at(0).__file__}};$B.get_filename_for_import=function(){var filename=$B.get_filename();if($B.import_info[filename]===undefined){$B.make_import_paths(filename)}return filename};$B.get_page_option=function(option){if($B.$options.hasOwnProperty(option)){return $B.$options[option]}else if(brython_options.hasOwnProperty(option.toLowerCase())){return brython_options[option.toLowerCase()]}else{return default_option[option]}};$B.get_option=function(option,err){var filename=$B.script_filename;if(err&&err.$frame_obj){filename=$B.get_frame_at(0,err.$frame_obj).__file__}else{filename=$B.get_filename()}return $B.get_option_from_filename(option,filename)};$B.get_option_from_filename=function(option,filename){if(!filename||!$B.scripts[filename]){return $B.get_page_option(option)}var value=$B.scripts[filename].getAttribute(option);if(value!==null){return convert_option(option,value)}else{return $B.get_page_option(option)}};function run_scripts(_scripts){var webworkers=_scripts.filter((script=>script.className==="webworker")),scripts=_scripts.filter((script=>script.className!=="webworker"));var module_name,filename;if(scripts.length>0||$B.isWebWorker){if($B.get_page_option("indexedDB")&&$B.has_indexedDB&&$B.hasOwnProperty("VFS")){$B.tasks.push([$B.idb_open])}}var src;for(var worker of webworkers){if(worker.src){$B.tasks.push([$B.ajax_load_script,{script:worker,name:worker.id,url:worker.src,is_ww:true}])}else{var source=worker.innerText||worker.textContent;source=unindent(source);source=source.replace(/^\n/,"");$B.webworkers[worker.id]=worker;filename=$B.script_filename=$B.script_path+"#"+worker.id;$B.url2name[filename]=worker.id;$B.file_cache[filename]=source;$B.scripts[filename]=worker;$B.dispatch_load_event(worker)}}for(var script of scripts){module_name=script_to_id.get(script);if(script.src){$B.tasks.push([$B.ajax_load_script,{script:script,name:module_name,url:script.src,id:script.id}])}else{src=script.innerHTML||script.textContent;src=unindent(src);src=src.replace(/^\n/,"");if(src.endsWith("\n")){src=src.substr(0,src.length-1)}filename=$B.script_filename=$B.script_path+"#"+module_name;$B.file_cache[filename]=src;$B.url2name[filename]=module_name;$B.scripts[filename]=script;$B.tasks.push([$B.run_script,script,src,module_name,filename,true])}}$B.loop()}$B.run_script=function(script,src,name,url,run_loop){var script_elts=url.split("/");script_elts.pop();$B.script_dir=script_elts.join("/");$B.file_cache[url]=src;$B.url2name[url]=name;$B.scripts[url]=script;$B.make_import_paths(url);_b_.__debug__=$B.get_option("debug")>0;var root,js;try{root=$B.py2js({src:src,filename:url},name,name);js=root.to_js();if($B.get_option_from_filename("debug",url)>1){console.log($B.format_indent(js,0))}}catch(err){return $B.handle_error($B.exception(err))}var _script={__doc__:get_docstring(root._ast),js:js,__name__:name,__file__:url,script_element:script};$B.tasks.push(["execute",_script]);if(run_loop){$B.loop()}};$B.$operators=$operators;$B.$Node=$Node;$B.brython=brython})(__BRYTHON__);globalThis.brython=__BRYTHON__.brython;if(__BRYTHON__.isNode){global.__BRYTHON__=__BRYTHON__;module.exports={__BRYTHON__:__BRYTHON__}}(function($B){var _b_=$B.builtins;if($B.VFS_timestamp&&$B.VFS_timestamp>$B.timestamp){$B.timestamp=$B.VFS_timestamp}function idb_load(evt,module){var res=evt.target.result;var debug=$B.get_page_option("debug");if(res===undefined||res.timestamp!=$B.timestamp||$B.VFS[module]&&res.source_ts!==$B.VFS[module].timestamp){if($B.VFS[module]!==undefined){var elts=$B.VFS[module],ext=elts[0],source=elts[1];if(ext==".py"){var is_package=elts.length==4,__package__;if(is_package){__package__=module}else{var parts=module.split(".");parts.pop();__package__=parts.join(".")}$B.imported[module]=$B.module.$factory(module,"",__package__);$B.url2name[module]=module;try{$B.py2js({src:source,filename:module},module,module)}catch(err){$B.handle_error(err)}delete $B.imported[module];if(debug>1){console.log("precompile",module)}}else{console.log("bizarre",module,ext)}}else{}}else{if(res.is_package){$B.precompiled[module]=[res.content]}else{$B.precompiled[module]=res.content}if(res.imports.length>0){if(debug>1){console.log(module,"imports",res.imports)}var subimports=res.imports.split(",");for(var i=0;i1){console.info("using indexedDB for stdlib modules cache")}var tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules"),record,outdated=[];var openCursor=store.openCursor();openCursor.onerror=function(evt){console.log("open cursor error",evt)};openCursor.onsuccess=function(evt){var cursor=evt.target.result;if(cursor){record=cursor.value;if(record.timestamp==$B.timestamp){if(!$B.VFS||!$B.VFS[record.name]||$B.VFS[record.name].timestamp==record.source_ts){if(record.is_package){$B.precompiled[record.name]=[record.content]}else{$B.precompiled[record.name]=record.content}if($B.get_page_option("debug")>1){console.info("load from cache",record.name)}}else{outdated.push(record.name)}}else{outdated.push(record.name)}cursor.continue()}else{if($B.get_page_option("debug")>1){console.log("done")}$B.outdated=outdated;loop()}}}};idb_cx.onupgradeneeded=function(){console.info("upgrade needed");var db=idb_cx.result,store=db.createObjectStore("modules",{keyPath:"name"});store.onsuccess=loop};idb_cx.onerror=function(){console.info("could not open indexedDB database");$B.idb_cx=null;$B.idb_name=null;$B.$options.indexedDB=false;loop()}};$B.ajax_load_script=function(s){var script=s.script,url=s.url,name=s.name,rel_path=url.substr($B.script_dir.length+1);if($B.files&&$B.files.hasOwnProperty(rel_path)){var src=atob($B.files[rel_path].content);$B.tasks.splice(0,0,[$B.run_script,script,src,name,url,true]);loop()}else if($B.protocol!="file"){$B.script_filename=url;$B.scripts[url]=script;var req=new XMLHttpRequest,cache=$B.get_option("cache"),qs=cache?"":(url.search(/\?/)>-1?"&":"?")+Date.now();req.open("GET",url+qs,true);req.onreadystatechange=function(){if(this.readyState==4){if(this.status==200){var src=this.responseText;if(s.is_ww){$B.webworkers[name]=script;$B.file_cache[url]=src;$B.dispatch_load_event(script)}else{$B.tasks.splice(0,0,[$B.run_script,script,src,name,url,true])}loop()}else if(this.status==404){throw Error(url+" not found")}}};req.send()}else{throw _b_.IOError.$factory("can't load external script at "+script.url+" (Ajax calls not supported with protocol file:///)")}};function add_jsmodule(module,source){source+="\nvar $locals_"+module.replace(/\./g,"_")+" = $module";$B.precompiled[module]=source}$B.inImported=function(module){if($B.imported.hasOwnProperty(module)){}else if(__BRYTHON__.VFS&&__BRYTHON__.VFS.hasOwnProperty(module)){var elts=__BRYTHON__.VFS[module];if(elts===undefined){console.log("bizarre",module)}var ext=elts[0],source=elts[1];if(ext==".py"){if($B.idb_cx&&!$B.idb_cx.$closed){$B.tasks.splice(0,0,[idb_get,module])}}else{add_jsmodule(module,source)}}else{console.log("bizarre",module)}loop()};function report_precompile(mod){if(!$B.isWebWorker){document.dispatchEvent(new CustomEvent("precompile",{detail:"remove outdated "+mod+" from cache"}))}}function report_close(){if(!$B.isWebWorker){document.dispatchEvent(new CustomEvent("precompile",{detail:"close"}))}}function report_done(){if(!$B.isWebWorker){document.dispatchEvent(new CustomEvent("brython_done",{detail:$B.obj_dict($B.$options)}))}}var loop=$B.loop=function(){if($B.tasks.length==0){if($B.idb_cx&&!$B.idb_cx.$closed){var db=$B.idb_cx.result,tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules");while($B.outdated.length>0){var module=$B.outdated.pop(),req=store.delete(module);req.onsuccess=function(mod){return function(event){if($B.get_page_option("debug")>1){console.info("delete outdated",mod)}report_precompile(mod)}}(module)}report_close();$B.idb_cx.result.close();$B.idb_cx.$closed=true}report_done();return}var task=$B.tasks.shift(),func=task[0],args=task.slice(1);if(func=="execute"){try{var script=task[1],script_id=script.__name__.replace(/\./g,"_"),module=$B.module.$factory(script.__name__);module.__file__=script.__file__;module.__doc__=script.__doc__;$B.imported[script_id]=module;var module=new Function(script.js+`\nreturn locals`)();for(var key in module){if(!key.startsWith("$")){$B.imported[script_id][key]=module[key]}}$B.dispatch_load_event(script.script_element)}catch(err){if(err.__class__===undefined){if(err.$py_exc){err=err.$py_exc}else{$B.freeze(err);var stack=err.$stack,frame_obj=err.$frame_obj,linenums=err.$linenums;var lineNumber=err.lineNumber;if(lineNumber!==undefined){console.log("around line",lineNumber);console.log(script.js.split("\n").slice(lineNumber-4,lineNumber).join("\n"))}$B.print_stack();err=_b_.RuntimeError.$factory(err+"");err.$stack=stack;err.$frame_obj=frame_obj;err.$linenums=linenums}}$B.handle_error(err)}loop()}else{try{func.apply(null,args)}catch(err){$B.handle_error(err)}}};$B.tasks=[];$B.has_indexedDB=self.indexedDB!==undefined})(__BRYTHON__);(function($B){var _b_=$B.builtins,_window=globalThis,isWebWorker="undefined"!==typeof WorkerGlobalScope&&"function"===typeof importScripts&&navigator instanceof WorkerNavigator;function missing_required_kwonly(fname,args){var plural=args.length==1?"":"s",arg_list;args=args.map((x=>`'${x}'`));if(args.length==1){arg_list=args[0]}else if(args.length==2){arg_list=args[0]+" and "+args[1]}else{arg_list=args.slice(0,args.length-1).join(", ")+", and "+args[args.length-1]}throw _b_.TypeError.$factory(fname+"() "+`missing ${args.length} required keyword-only argument${plural}: `+arg_list)}function missing_required_pos(fname,args){var plural=args.length==1?"":"s",arg_list;args=args.map((x=>`'${x}'`));if(args.length==1){arg_list=args[0]}else if(args.length==2){arg_list=args[0]+" and "+args[1]}else{arg_list=args.slice(0,args.length-1).join(", ")+", and "+args[args.length-1]}throw _b_.TypeError.$factory(fname+"() "+`missing ${args.length} required positional argument${plural}: `+arg_list)}function multiple_values(fname,arg){throw _b_.TypeError.$factory(fname+"() "+`got multiple values for argument '${arg}'`)}function pos_only_passed_as_keyword(fname,arg){return _b_.TypeError.$factory(fname+`() got some positional-only arguments passed as keyword arguments:`+` '${arg}'`)}function too_many_pos_args(fname,kwarg,arg_names,nb_kwonly,defaults,args,slots){var nb_pos=args.length,last=$B.last(args);if(last.$kw){if(!kwarg){var kw=$B.parse_kwargs(last.$kw,fname);for(var k in kw){if(!slots.hasOwnProperty(k)){throw unexpected_keyword(fname,k)}}}nb_pos--}var nb_def=defaults.length;var expected=arg_names.length-nb_kwonly,plural=expected==1?"":"s";if(nb_def){expected=`from ${expected-nb_def} to ${expected}`;plural="s"}var verb=nb_pos==1?"was":"were";return _b_.TypeError.$factory(fname+"() takes "+`${expected} positional argument${plural} but ${nb_pos} ${verb} given`)}function unexpected_keyword(fname,k){return _b_.TypeError.$factory(fname+`() got an unexpected keyword argument '${k}'`)}var empty={};function args0(f,args){var arg_names=f.$infos.arg_names,code=f.$infos.__code__,slots={};for(var arg_name of arg_names){slots[arg_name]=empty}return $B.parse_args(args,f.$infos.__name__,code.co_argcount,slots,arg_names,f.$infos.__defaults__,f.$infos.__kwdefaults__,f.$infos.vararg,f.$infos.kwarg,code.co_posonlyargcount,code.co_kwonlyargcount)}function args0_NEW(fct,args){const LAST_ARGS=args[args.length-1];const HAS_KW=LAST_ARGS!==undefined&&LAST_ARGS!==null&&LAST_ARGS.$kw!==undefined;let ARGS_POS_COUNT=args.length,ARGS_NAMED=null;if(HAS_KW){--ARGS_POS_COUNT;ARGS_NAMED=LAST_ARGS.$kw}const result={};const $INFOS=fct.$infos,$CODE=$INFOS.__code__,PARAMS_NAMES=$INFOS.arg_names,PARAMS_POS_COUNT=$CODE.co_argcount,PARAMS_NAMED_COUNT=$CODE.co_kwonlyargcount,PARAMS_VARARGS_NAME=$INFOS.vararg,PARAMS_KWARGS_NAME=$INFOS.kwarg,PARAMS_POS_DEFAULTS=$INFOS.__defaults__,PARAMS_POS_DEFAULTS_COUNT=PARAMS_POS_DEFAULTS.length,PARAMS_POS_DEFAULTS_OFFSET=PARAMS_POS_COUNT-PARAMS_POS_DEFAULTS_COUNT;const min=Math.min(ARGS_POS_COUNT,PARAMS_POS_COUNT);let offset=0;for(;offsetPARAMS_POS_COUNT){args0(fct,args);throw new Error("Too much positional arguments given (args0 should have raised an error) !")}if(ARGS_NAMED===null){if(offset=nb_pos_or_kw){if(vararg){varargs.push(arg)}else{throw too_many_pos_args(fname,kwarg,arg_names,nb_kwonly,defaults,args,slots)}}else{if(i0){throw missing_required_kwonly(fname,missing_kwonly)}if(!kwarg){for(var k in kw){if(!slots.hasOwnProperty(k)){throw unexpected_keyword(fname,k)}}}for(var k in kw){if(kw[k]===empty){continue}if(!slots.hasOwnProperty(k)){if(kwarg){extra_kw[k]=kw[k]}}else if(slots[k]!==empty){if(posonly_set[k]&&kwarg){extra_kw[k]=kw[k]}else{throw multiple_values(fname,k)}}else{slots[k]=kw[k]}}if(kwarg){slots[kwarg]=$B.obj_dict(extra_kw)}if(vararg){slots[vararg]=$B.fast_tuple(varargs)}return slots};$B.parse_kwargs=function(kw_args,fname){var kwa=kw_args[0];for(var i=1,len=kw_args.length;i0||y!==undefined&&y.$kw){throw _b_.TypeError.$factory(name+"() takes no keyword arguments")}};$B.check_nb_args_no_kw=function(name,expected,args){var len=args.length,last=args[len-1];if(last&&last.$kw){if(last.$kw.length==2&&Object.keys(last.$kw[0]).length==0){len--}else{throw _b_.TypeError.$factory(name+"() takes no keyword arguments")}}if(len!=expected){if(expected==0){throw _b_.TypeError.$factory(name+"() takes no argument"+" ("+len+" given)")}else{throw _b_.TypeError.$factory(name+"() takes exactly "+expected+" argument"+(expected<2?"":"s")+" ("+len+" given)")}}};$B.get_class=function(obj){if(obj===null){return $B.imported.javascript.NullType}if(obj===undefined){return $B.imported.javascript.UndefinedType}var klass=obj.__class__||obj.$tp_class;if(klass===undefined){switch(typeof obj){case"number":if(Number.isInteger(obj)){return _b_.int}break;case"string":return _b_.str;case"boolean":return _b_.bool;case"function":if(obj.$is_js_func){return $B.JSObj}return $B.function;case"object":if(Array.isArray(obj)){if(obj.$is_js_array){return $B.js_array}else if(Object.getPrototypeOf(obj)===Array.prototype){obj.__class__=_b_.list;return _b_.list}}else if(obj instanceof $B.str_dict){return _b_.dict}else if(typeof Node!=="undefined"&&obj instanceof Node){if(obj.tagName){return $B.imported["browser.html"][obj.tagName]||$B.DOMNode}return $B.DOMNode}break}}if(klass===undefined){return $B.get_jsobj_class(obj)}return klass};$B.class_name=function(obj){var klass=$B.get_class(obj);if(klass===$B.JSObj){return"Javascript "+obj.constructor.name}else{return klass.__name__}};$B.make_js_iterator=function(iterator,frame,lineno){var set_lineno=$B.set_lineno;if(frame===undefined){if($B.frame_obj===null){function set_lineno(){}}else{frame=$B.frame_obj.frame;lineno=frame.$lineno}}if(iterator.__class__===_b_.range){var obj={ix:iterator.start};if(iterator.step>0){return{[Symbol.iterator](){return this},next(){set_lineno(frame,lineno);if(obj.ix>=iterator.stop){return{done:true,value:null}}var value=obj.ix;obj.ix+=iterator.step;return{done:false,value:value}}}}else{return{[Symbol.iterator](){return this},next(){set_lineno(frame,lineno);if(obj.ix<=iterator.stop){return{done:true,value:null}}var value=obj.ix;obj.ix+=iterator.step;return{done:false,value:value}}}}}if(iterator[Symbol.iterator]&&!iterator.$is_js_array){var it=iterator[Symbol.iterator]();return{[Symbol.iterator](){return this},next(){set_lineno(frame,lineno);return it.next()}}}var next_func=$B.$call($B.$getattr(_b_.iter(iterator),"__next__"));return{[Symbol.iterator](){return this},next(){set_lineno(frame,lineno);try{var value=next_func();return{done:false,value:value}}catch(err){if($B.is_exc(err,[_b_.StopIteration])){return{done:true,value:null}}throw err}}}};$B.unpacker=function(obj,nb_targets,has_starred){var position,position_rank=3;if(has_starred){var nb_after_starred=arguments[3];position_rank++}if($B.pep657){position=$B.decode_position(arguments[position_rank])}var t=_b_.list.$factory(obj),right_length=t.length,left_length=nb_targets+(has_starred?nb_after_starred-1:0);if(right_lengthleft_length){var exc=_b_.ValueError.$factory("too many values to unpack "+`(expected ${left_length})`);if(position){$B.set_exception_offsets(exc,position)}throw exc}t.index=-1;t.read_one=function(){t.index++;return t[t.index]};t.read_rest=function(){t.index++;var res=t.slice(t.index,t.length-nb_after_starred);t.index=t.length-nb_after_starred-1;return res};return t};$B.set_lineno=function(frame,lineno){frame.$lineno=lineno;if(frame.$f_trace!==_b_.None){$B.trace_line()}return true};$B.get_method_class=function(method,ns,qualname,refs){var klass=ns;if(method.$infos&&method.$infos.$class){return method.$infos.$class}for(var ref of refs){if(klass[ref]===undefined){var fake_class=$B.make_class(qualname);return fake_class}klass=klass[ref]}return klass};$B.$JS2Py=function(src){if(typeof src==="number"){if(src%1===0){return src}return _b_.float.$factory(src)}if(src===null||src===undefined){return _b_.None}if(Array.isArray(src)&&Object.getPrototypeOf(src)===Array.prototype){src.$brython_class="js"}return src};$B.warn=function(klass,message,filename,token){var warning=klass.$factory(message);warning.filename=filename;if(klass===_b_.SyntaxWarning){warning.lineno=token.start[0];warning.offset=token.start[1];warning.end_lineno=token.end[0];warning.end_offset=token.end[1];warning.text=token.line;warning.args[1]=$B.fast_tuple([filename,warning.lineno,warning.offset,warning.text,warning.end_lineno,warning.end_offset])}$B.imported._warnings.warn(warning)};function index_error(obj){var type=typeof obj=="string"?"string":"list";throw _b_.IndexError.$factory(type+" index out of range")}$B.$getitem=function(obj,item,position){var is_list=Array.isArray(obj)&&obj.__class__===_b_.list,is_dict=obj.__class__===_b_.dict&&!obj.$jsobj;if(typeof item=="number"){if(is_list||typeof obj=="string"){item=item>=0?item:obj.length+item;if(obj[item]!==undefined){return obj[item]}else{index_error(obj)}}}else if(item.valueOf&&typeof item.valueOf()=="string"&&is_dict){return _b_.dict.$getitem(obj,item)}if(obj.$is_class){var class_gi=$B.$getattr(obj,"__class_getitem__",_b_.None);if(class_gi!==_b_.None){return $B.$call(class_gi)(item)}else if(obj.__class__){class_gi=$B.$getattr(obj.__class__,"__getitem__",_b_.None);if(class_gi!==_b_.None){return class_gi(obj,item)}else{throw _b_.TypeError.$factory("'"+$B.class_name(obj.__class__)+"' object is not subscriptable")}}}if(is_list){return _b_.list.$getitem(obj,item)}if(is_dict){return _b_.dict.$getitem(obj,item)}var gi=$B.$getattr(obj.__class__||$B.get_class(obj),"__getitem__",_b_.None);if(gi!==_b_.None){return gi(obj,item)}var exc=_b_.TypeError.$factory("'"+$B.class_name(obj)+"' object is not subscriptable");if(position){$B.set_exception_offsets(exc,$B.decode_position(position))}throw exc};$B.getitem_slice=function(obj,slice){var res;if(Array.isArray(obj)&&obj.__class__===_b_.list){if(slice.start===_b_.None&&slice.stop===_b_.None){if(slice.step===_b_.None||slice.step==1){res=obj.slice()}else if(slice.step==-1){res=obj.slice().reverse()}}else if(slice.step===_b_.None){if(slice.start===_b_.None){slice.start=0}if(slice.stop===_b_.None){slice.stop=obj.length}if(typeof slice.start=="number"&&typeof slice.stop=="number"){if(slice.start<0){slice.start+=obj.length}if(slice.stop<0){slice.stop+=obj.length}res=obj.slice(slice.start,slice.stop)}}if(res){res.__class__=obj.__class__;res.__brython__=true;return res}else{return _b_.list.$getitem(obj,slice)}}else if(typeof obj=="string"){return _b_.str.__getitem__(obj,slice)}return $B.$getattr($B.get_class(obj),"__getitem__")(obj,slice)};$B.$getattr_pep657=function(obj,attr,position){try{return $B.$getattr(obj,attr)}catch(err){$B.set_exception_offsets(err,$B.decode_position(position));throw err}};$B.set_list_slice=function(obj,start,stop,value){if(start===null){start=0}else{start=$B.$GetInt(start);if(start<0){start=Math.max(0,start+obj.length)}}if(stop===null){stop=obj.length}stop=$B.$GetInt(stop);if(stop<0){stop=Math.max(0,stop+obj.length)}var res=_b_.list.$factory(value);obj.splice.apply(obj,[start,stop-start].concat(res))};$B.set_list_slice_step=function(obj,start,stop,step,value){if(step===null||step==1){return $B.set_list_slice(obj,start,stop,value)}if(step==0){throw _b_.ValueError.$factory("slice step cannot be zero")}step=$B.$GetInt(step);if(start===null){start=step>0?0:obj.length-1}else{start=$B.$GetInt(start)}if(stop===null){stop=step>0?obj.length:-1}else{stop=$B.$GetInt(stop)}var repl=_b_.list.$factory(value),j=0,test,nb=0;if(step>0){test=function(i){return istop}}for(var i=start;test(i);i+=step){nb++}if(nb!=repl.length){throw _b_.ValueError.$factory("attempt to assign sequence of size "+repl.length+" to extended slice of size "+nb)}for(var i=start;test(i);i+=step){obj[i]=repl[j];j++}};$B.$setitem=function(obj,item,value){if(Array.isArray(obj)&&obj.__class__===undefined&&!obj.$is_js_array&&typeof item=="number"&&!$B.$isinstance(obj,_b_.tuple)){if(item<0){item+=obj.length}if(obj[item]===undefined){throw _b_.IndexError.$factory("list assignment index out of range")}obj[item]=value;return}else if(obj.__class__===_b_.dict){_b_.dict.$setitem(obj,item,value);return}else if(obj.__class__===_b_.list){return _b_.list.$setitem(obj,item,value)}var si=$B.$getattr(obj.__class__||$B.get_class(obj),"__setitem__",null);if(si===null||typeof si!="function"){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object does not support item assignment")}return si(obj,item,value)};$B.$delitem=function(obj,item){if(Array.isArray(obj)&&obj.__class__===_b_.list&&typeof item=="number"&&!$B.$isinstance(obj,_b_.tuple)){if(item<0){item+=obj.length}if(obj[item]===undefined){throw _b_.IndexError.$factory("list deletion index out of range")}obj.splice(item,1);return}else if(obj.__class__===_b_.dict){_b_.dict.__delitem__(obj,item);return}else if(obj.__class__===_b_.list){return _b_.list.__delitem__(obj,item)}var di=$B.$getattr(obj.__class__||$B.get_class(obj),"__delitem__",null);if(di===null){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object doesn't support item deletion")}return di(obj,item)};function num_result_type(x,y){var is_int,is_float,x_num,y_num;if(typeof x=="number"){x_num=x;if(typeof y=="number"){is_int=true;y_num=y}else if(y.__class__===_b_.float){is_float=true;y_num=y.value}}else if(x.__class__===_b_.float){x_num=x.value;if(typeof y=="number"){y_num=y;is_float=true}else if(y.__class__===_b_.float){is_float=true;y_num=y.value}}return{is_int:is_int,is_float:is_float,x:x_num,y:y_num}}$B.augm_assign=function(left,op,right){var res_type=num_result_type(left,right);if(res_type.is_int||res_type.is_float){var z;switch(op){case"+=":z=res_type.x+res_type.y;break;case"-=":z=res_type.x-res_type.y;break;case"*=":z=res_type.x*res_type.y;break;case"/=":z=res_type.x/res_type.y;break}if(z){if(res_type.is_int&&Number.isSafeInteger(z)){return z}else if(res_type.res_is_float){return $B.fast_float(z)}}}else if(op=="*="){if(typeof left=="number"&&typeof right=="string"){return left<=0?"":right.repeat(left)}else if(typeof left=="string"&&typeof right=="number"){return right<=0?"":left.repeat(right)}}else if(op=="+="){if(typeof left=="string"&&typeof right=="string"){return left+right}}var op1=op.substr(0,op.length-1),method=$B.op2method.augmented_assigns[op],augm_func=$B.$getattr(left,"__"+method+"__",null);if(augm_func!==null){var res=$B.$call(augm_func)(right);if(res===_b_.NotImplemented){throw _b_.TypeError.$factory(`unsupported operand type(s)`+` for ${op}: '${$B.class_name(left)}' `+`and '${$B.class_name(right)}'`)}return res}else{var method1=$B.op2method.operations[op1];if(method1===undefined){method1=$B.op2method.binary[op1]}return $B.rich_op(`__${method1}__`,left,right)}};$B.$is=function(a,b){if((a===undefined||a===$B.Undefined)&&(b===undefined||b===$B.Undefined)){return true}if(a===null){return b===null}if(b===null){return a===null}if(a.__class__===_b_.float&&b.__class__===_b_.float){if(isNaN(a.value)&&isNaN(b.value)){return true}return a.value==b.value}if(a===_b_.int&&b==$B.long_int||a===$B.long_int&&b===_b_.int){return true}return a===b};$B.is_or_equals=function(x,y){return $B.$is(x,y)||$B.rich_comp("__eq__",x,y)};$B.member_func=function(obj){var klass=$B.get_class(obj),contains=$B.$getattr(klass,"__contains__",null);if(contains!==null){contains=$B.$call(contains);return contains.bind(null,obj)}try{var iterator=$B.make_js_iterator(obj);return function(key){try{for(var item of iterator){if($B.is_or_equals(key,item)){return true}}return false}catch(err){return false}}}catch(err){var getitem=$B.$getattr(klass,"__getitem__",null);if(getitem!==null){return function(key){var i=-1;while(true){i++;try{var item=getitem(obj,i);if($B.is_or_equals(key,item)){return true}}catch(err){if($B.$is_exc(err,[_b_.StopIteration])){return false}throw err}}}}else{throw _b_.TypeError.$factory("argument of type "+`'${$B.class_name(obj)}' is not iterable`)}}};$B.$is_member=function(item,_set){return $B.member_func(_set)(item)};$B.$call=function(callable,position){callable=$B.$call1(callable);if(position){return function(){try{return callable.apply(null,arguments)}catch(exc){position=$B.decode_position(position);$B.set_exception_offsets(exc,position);throw exc}}}return callable};$B.$call1=function(callable){if(callable.__class__===$B.method){return callable}else if(callable.$factory){return callable.$factory}else if(callable.$is_class){return callable.$factory=$B.$instance_creator(callable)}else if(callable.$is_js_class){return callable.$factory=function(){return new callable(...arguments)}}else if(callable.$in_js_module){return function(){var res=callable(...arguments);return res===undefined?_b_.None:res}}else if(callable.$is_func||typeof callable=="function"){if(callable.$infos&&callable.$infos.__code__&&callable.$infos.__code__.co_flags&32){$B.frame_obj.frame.$has_generators=true}return callable}try{return $B.$getattr(callable,"__call__")}catch(err){throw _b_.TypeError.$factory("'"+$B.class_name(callable)+"' object is not callable")}};var r_opnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or"];var ropsigns=["+","-","*","/","//","%","**","<<",">>","&","^","|"];$B.make_rmethods=function(klass){for(var r_opname of r_opnames){if(klass["__r"+r_opname+"__"]===undefined&&klass["__"+r_opname+"__"]){klass["__r"+r_opname+"__"]=function(name){return function(self,other){return klass["__"+name+"__"](other,self)}}(r_opname)}}};$B.UUID=function(){return $B.$py_UUID++};$B.$GetInt=function(value){if(typeof value=="number"||value.constructor===Number){return value}else if(typeof value==="boolean"){return value?1:0}else if($B.$isinstance(value,_b_.int)){return value}else if($B.$isinstance(value,_b_.float)){return value.valueOf()}if(!value.$is_class){try{var v=$B.$getattr(value,"__int__")();return v}catch(e){}try{var v=$B.$getattr(value,"__index__")();return v}catch(e){}}throw _b_.TypeError.$factory("'"+$B.class_name(value)+"' object cannot be interpreted as an integer")};$B.to_num=function(obj,methods){var expected_class={__complex__:_b_.complex,__float__:_b_.float,__index__:_b_.int,__int__:_b_.int};var klass=obj.__class__||$B.get_class(obj);for(var i=0;i1e3){var exc=_b_.RecursionError.$factory("maximum recursion depth exceeded");$B.set_exc(exc,frame);throw exc}frame.__class__=$B.frame;$B.frame_obj=$B.push_frame(frame);if($B.tracefunc&&$B.tracefunc!==_b_.None){if(frame[4]===$B.tracefunc||$B.tracefunc.$infos&&frame[4]&&frame[4]===$B.tracefunc.$infos.__func__){$B.tracefunc.$frame_id=frame[0];return _b_.None}else{var frame_obj=$B.frame_obj;while(frame_obj!==null){if(frame_obj.frame[0]==$B.tracefunc.$frame_id){return _b_.None}frame_obj=frame_obj.prev}try{var res=$B.tracefunc(frame,"call",_b_.None);var frame_obj=$B.frame_obj;while(frame_obj!==null){if(frame_obj.frame[4]==res){return _b_.None}frame_obj=frame_obj.prev}return res}catch(err){$B.set_exc(err,frame);$B.frame_obj=$B.frame_obj.prev;err.$in_trace_func=true;throw err}}}else{$B.tracefunc=_b_.None}return _b_.None};$B.trace_exception=function(){var frame=$B.frame_obj.frame;if(frame[0]==$B.tracefunc.$current_frame_id){return _b_.None}var trace_func=frame.$f_trace,exc=frame[1].$current_exception;return trace_func(frame,"exception",$B.fast_tuple([exc.__class__,exc,$B.traceback.$factory(exc)]))};$B.trace_line=function(){var frame=$B.frame_obj.frame;if(frame[0]==$B.tracefunc.$current_frame_id){return _b_.None}var trace_func=frame.$f_trace;if(trace_func===undefined){console.log("trace line, frame",frame)}return trace_func(frame,"line",_b_.None)};$B.trace_return=function(value){var frame=$B.frame_obj.frame,trace_func=frame.$f_trace;if(frame[0]==$B.tracefunc.$current_frame_id){return _b_.None}trace_func(frame,"return",value)};$B.leave_frame=function(arg){if($B.frame_obj===null){return}if(arg&&arg.value!==undefined&&$B.tracefunc){if($B.frame_obj.frame.$f_trace===undefined){$B.frame_obj.frame.$f_trace=$B.tracefunc}if($B.frame_obj.frame.$f_trace!==_b_.None){$B.trace_return(arg.value)}}var frame=$B.frame_obj.frame;$B.frame_obj=$B.frame_obj.prev;if(frame.$has_generators){for(var key in frame[1]){if(frame[1][key]&&frame[1][key].__class__===$B.generator){var gen=frame[1][key];if(gen.$frame===undefined){continue}var ctx_managers=gen.$frame[1].$context_managers;if(ctx_managers){for(var cm of ctx_managers){$B.$call($B.$getattr(cm,"__exit__"))(_b_.None,_b_.None,_b_.None)}}}}}delete frame[1].$current_exception;return _b_.None};$B.push_frame=function(frame){var count=$B.frame_obj===null?0:$B.frame_obj.count;return{prev:$B.frame_obj,frame:frame,count:count+1}};$B.count_frames=function(frame_obj){frame_obj=frame_obj||$B.frame_obj;return frame_obj==null?0:frame_obj.count};$B.get_frame_at=function(pos,frame_obj){frame_obj=frame_obj||$B.frame_obj;var nb=$B.count_frames()-pos-1;for(var i=0;i",__ge__:">="};$B.rich_comp=function(op,x,y){if(x===undefined){throw _b_.RuntimeError.$factory("error in rich comp")}var x1=x!==null&&x.valueOf?x.valueOf():x,y1=y!==null&&y.valueOf?y.valueOf():y;if(typeof x1=="number"&&typeof y1=="number"&&x.__class__===undefined&&y.__class__===undefined){switch(op){case"__eq__":return x1==y1;case"__ne__":return x1!=y1;case"__le__":return x1<=y1;case"__lt__":return x1=y1;case"__gt__":return x1>y1}}var res;if(x!==null&&(x.$is_class||x.$factory)){if(op=="__eq__"){return x===y}else if(op=="__ne__"){return!(x===y)}else{throw _b_.TypeError.$factory("'"+method2comp[op]+"' not supported between instances of '"+$B.class_name(x)+"' and '"+$B.class_name(y)+"'")}}var x_class_op=$B.$call($B.$getattr($B.get_class(x),op)),rev_op=reversed_op[op]||op,y_rev_func;if(x!==null&&x.__class__&&y!==null&&y.__class__){if(y.__class__.__mro__.indexOf(x.__class__)>-1){y_rev_func=$B.$getattr(y,rev_op);res=$B.$call(y_rev_func)(x);if(res!==_b_.NotImplemented){return res}}}res=x_class_op(x,y);if(res!==_b_.NotImplemented){return res}if(y_rev_func===undefined){y_rev_func=$B.$call($B.$getattr($B.get_class(y),rev_op));res=y_rev_func(y,x);if(res!==_b_.NotImplemented){return res}}if(op=="__eq__"){return _b_.False}else if(op=="__ne__"){return _b_.True}throw _b_.TypeError.$factory("'"+method2comp[op]+"' not supported between instances of '"+$B.class_name(x)+"' and '"+$B.class_name(y)+"'")};var opname2opsign={__sub__:"-",__xor__:"^",__mul__:"*"};$B.rich_op=function(op,x,y,position){try{return $B.rich_op1(op,x,y)}catch(exc){if(position){$B.set_exception_offsets(exc,$B.decode_position(position))}throw exc}};$B.rich_op1=function(op,x,y){var res_is_int,res_is_float,x_num,y_num;if(typeof x=="number"){x_num=x;if(typeof y=="number"){res_is_int=true;y_num=y}else if(y.__class__===_b_.float){res_is_float=true;y_num=y.value}}else if(x.__class__===_b_.float){x_num=x.value;if(typeof y=="number"){y_num=y;res_is_float=true}else if(y.__class__===_b_.float){res_is_float=true;y_num=y.value}}if(res_is_int||res_is_float){var z;switch(op){case"__add__":z=x_num+y_num;break;case"__sub__":z=x_num-y_num;break;case"__mul__":z=x_num*y_num;break;case"__pow__":if(res_is_int&&y_num>=0){return _b_.int.$int_or_long(BigInt(x_num)**BigInt(y_num))}break;case"__truediv__":if(y_num==0){throw _b_.ZeroDivisionError.$factory("division by zero")}z=x_num/y_num;return{__class__:_b_.float,value:z}}if(z){if(res_is_int&&Number.isSafeInteger(z)){return z}else if(res_is_float){return{__class__:_b_.float,value:z}}}}else if(typeof x=="string"&&typeof y=="string"&&op=="__add__"){return x+y}var x_class=x.__class__||$B.get_class(x),y_class=y.__class__||$B.get_class(y),rop="__r"+op.substr(2),method;if(x_class===y_class){if(x_class===_b_.int){return _b_.int[op](x,y)}else if(x_class===_b_.bool){return(_b_.bool[op]||_b_.int[op])(x,y)}try{method=$B.$call($B.$getattr(x_class,op))}catch(err){if(err.__class__===_b_.AttributeError){var kl_name=$B.class_name(x);throw _b_.TypeError.$factory("unsupported operand type(s) "+"for "+opname2opsign[op]+": '"+kl_name+"' and '"+kl_name+"'")}throw err}return method(x,y)}if(_b_.issubclass(y_class,x_class)){var reflected_left=$B.$getattr(x_class,rop,false),reflected_right=$B.$getattr(y_class,rop,false);if(reflected_right&&reflected_left&&reflected_right!==reflected_left){return reflected_right(y,x)}}var res;try{var attr=$B.$getattr(x,op);method=$B.$getattr(x_class,op)}catch(err){if(err.__class__!==_b_.AttributeError){throw err}res=$B.$call($B.$getattr(y,rop))(x);if(res!==_b_.NotImplemented){return res}throw _b_.TypeError.$factory(`unsupported operand type(s) for ${$B.method_to_op[op]}:`+` '${$B.class_name(x)}' and '${$B.class_name(y)}'`)}if((op=="__add__"||op=="__mul__")&&(Array.isArray(x)||typeof x=="string"||$B.$isinstance(x,[_b_.str,_b_.bytes,_b_.bytearray,_b_.memoryview]))){try{res=method(x,y)}catch(err){res=_b_.NotImplemented}}else{res=method(x,y)}if(res===_b_.NotImplemented){try{var reflected=$B.$getattr(y,rop),method=$B.$getattr(y_class,rop)}catch(err){if(err.__class__!==_b_.AttributeError){throw err}throw _b_.TypeError.$factory(`unsupported operand type(s) for ${$B.method_to_op[op]}:`+` '${$B.class_name(x)}' and '${$B.class_name(y)}'`)}res=method(y,x);if(res===_b_.NotImplemented){throw _b_.TypeError.$factory(`unsupported operand type(s) for ${$B.method_to_op[op]}:`+` '${$B.class_name(x)}' and '${$B.class_name(y)}'`)}return res}else{return res}};$B.is_none=function(o){return o===undefined||o===null||o==_b_.None};var repr_stack=new Set;$B.repr={enter:function(obj){var obj_id=_b_.id(obj);if(repr_stack.has(obj_id)){return true}else{repr_stack.add(obj_id);if(repr_stack.size>$B.recursion_limit){repr_stack.clear();throw _b_.RecursionError.$factory("maximum recursion depth "+"exceeded while getting the repr of an object")}}},leave:function(obj){repr_stack.delete(_b_.id(obj))}}})(__BRYTHON__);__BRYTHON__.builtins.object=function($B){var _b_=$B.builtins;var object={__name__:"object",__qualname__:"object",$is_class:true,$native:true};var opnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or"];var opsigns=["+","-","*","/","//","%","**","<<",">>","&","^","|"];object.__delattr__=function(self,attr){if(self.__dict__&&$B.$isinstance(self.__dict__,_b_.dict)&&_b_.dict.$contains_string(self.__dict__,attr)){_b_.dict.$delete_string(self.__dict__,attr);return _b_.None}else if(self.__dict__===undefined&&self[attr]!==undefined){delete self[attr];return _b_.None}else{var klass=self.__class__;if(klass){var prop=$B.$getattr(klass,attr);if(prop.__class__===_b_.property){if(prop.__delete__!==undefined){prop.__delete__(self);return _b_.None}}}}throw $B.attr_error(attr,self)};object.__dir__=function(self){var objects;if(self.$is_class){objects=[self].concat(self.__mro__)}else{var klass=self.__class__||$B.get_class(self);objects=[self,klass].concat(klass.__mro__)}var res=[];for(var i=0,len=objects.length;i2){console.log("error in get.apply",err);console.log("get attr",attr,"of",obj);console.log("res",res);console.log("__get__",__get__);console.log(__get__+"")}throw err}}if(__get__===null&&typeof res=="function"){__get__=function(x){return x}}if(__get__!==null){res.__name__=attr;if(attr=="__new__"||res.__class__===$B.builtin_function_or_method){res.$type="staticmethod"}var res1=__get__.apply(null,[res,obj,klass]);if($test){console.log("res",res,"res1",res1)}if(typeof res1=="function"){if(res1.__class__===$B.method){return res}if(res.$type=="staticmethod"){return res}else{var self=res.__class__===$B.method?klass:obj,method=function(){var args=[self];for(var i=0,len=arguments.length;i0){throw _b_.TypeError.$factory("object() takes no parameters")}var res=Object.create(null);res.__class__=cls;res.__dict__=$B.obj_dict({});return res}};object.__new__=function(cls,...args){if(cls===undefined){throw _b_.TypeError.$factory("object.__new__(): not enough arguments")}var init_func=$B.$getattr(cls,"__init__");if(init_func===object.__init__){if(args.length>0){throw _b_.TypeError.$factory("object() takes no parameters")}}var res=Object.create(null);$B.update_obj(res,{__class__:cls,__dict__:$B.obj_dict({})});return res};object.__ne__=function(self,other){if(self===other){return false}var eq=$B.$getattr(self.__class__||$B.get_class(self),"__eq__",null);if(eq!==null){var res=$B.$call(eq)(self,other);if(res===_b_.NotImplemented){return res}return!$B.$bool(res)}return _b_.NotImplemented};object.__reduce__=function(self){if(!self.__dict__){throw _b_.TypeError.$factory(`cannot pickle '${$B.class_name(self)}' object`)}if($B.imported.copyreg===undefined){$B.$import("copyreg")}var res=[$B.imported.copyreg._reconstructor];var D=$B.get_class(self),B=object;for(var klass of D.__mro__){if(klass.__module__=="builtins"){B=klass;break}}var args=[D,B];if(B===object){args.push(_b_.None)}else{args.push($B.$call(B)(self))}res.push($B.fast_tuple(args));var d=$B.empty_dict();for(var attr of _b_.dict.$keys_string(self.__dict__)){_b_.dict.$setitem(d,attr,_b_.dict.$getitem_string(self.__dict__,attr))}res.push(d);return _b_.tuple.$factory(res)};function getNewArguments(self,klass){var newargs_ex=$B.$getattr(self,"__getnewargs_ex__",null);if(newargs_ex!==null){var newargs=newargs_ex();if(!newargs||newargs.__class__!==_b_.tuple){throw _b_.TypeError.$factory("__getnewargs_ex__ should "+`return a tuple, not '${$B.class_name(newargs)}'`)}if(newargs.length!=2){throw _b_.ValueError.$factory("__getnewargs_ex__ should "+`return a tuple of length 2, not ${newargs.length}`)}var args=newargs[0],kwargs=newargs[1];if(!args||args.__class__!==_b_.tuple){throw _b_.TypeError.$factory("first item of the tuple returned "+`by __getnewargs_ex__ must be a tuple, not '${$B.class_name(args)}'`)}if(!kwargs||kwargs.__class__!==_b_.dict){throw _b_.TypeError.$factory("second item of the tuple returned "+`by __getnewargs_ex__ must be a dict, not '${$B.class_name(kwargs)}'`)}return{args:args,kwargs:kwargs}}var newargs=klass.$getnewargs,args;if(!newargs){newargs=$B.$getattr(klass,"__getnewargs__",null)}if(newargs){args=newargs(self);if(!args||args.__class__!==_b_.tuple){throw _b_.TypeError.$factory("__getnewargs__ should "+`return a tuple, not '${$B.class_name(args)}'`)}return{args:args}}}object.__reduce_ex__=function(self,protocol){var klass=$B.get_class(self);if($B.imported.copyreg===undefined){$B.$import("copyreg")}if(protocol<2){return $B.$call($B.imported.copyreg._reduce_ex)(self,protocol)}var reduce=$B.$getattr(klass,"__reduce__");if(reduce!==object.__reduce__){return $B.$call(reduce)(self)}var res=[$B.imported.copyreg.__newobj__];var arg2=[klass];var newargs=getNewArguments(self,klass);if(newargs){arg2=arg2.concat(newargs.args)}res.push($B.fast_tuple(arg2));var d=$B.empty_dict(),nb=0;if(self.__dict__){for(var item of _b_.dict.$iter_items_with_hash(self.__dict__)){if(item.key=="__class__"||item.key.startsWith("$")){continue}_b_.dict.$setitem(d,item.key,item.value);nb++}}if(nb==0){d=_b_.None}res.push(d);var list_like_iterator=_b_.None;if($B.$getattr(klass,"append",null)!==null&&$B.$getattr(klass,"extend",null)!==null){list_like_iterator=_b_.iter(self)}res.push(list_like_iterator);var key_value_iterator=_b_.None;if($B.$isinstance(self,_b_.dict)){key_value_iterator=_b_.dict.items(self)}res.push(key_value_iterator);return _b_.tuple.$factory(res)};object.__repr__=function(self){if(self===object){return""}if(self.__class__===_b_.type){return""}var module=self.__class__.__module__;if(module!==undefined&&!module.startsWith("$")&&module!=="builtins"){return"<"+self.__class__.__module__+"."+$B.class_name(self)+" object>"}else{return"<"+$B.class_name(self)+" object>"}};object.__setattr__=function(self,attr,val){if(val===undefined){throw _b_.TypeError.$factory("can't set attributes of built-in/extension type 'object'")}else if(self.__class__===object){if(object[attr]===undefined){throw $B.attr_error(attr,self)}else{throw _b_.AttributeError.$factory("'object' object attribute '"+attr+"' is read-only")}}if(self.__dict__){_b_.dict.$setitem(self.__dict__,attr,val)}else{self[attr]=val}return _b_.None};object.__setattr__.__get__=function(obj){return function(attr,val){object.__setattr__(obj,attr,val)}};object.__setattr__.__str__=function(){return"method object.setattr"};object.__str__=function(self){if(self===undefined||self.$kw){throw _b_.TypeError.$factory("descriptor '__str__' of 'object' "+"object needs an argument")}var klass=self.__class__||$B.get_class(self);var repr_func=$B.$getattr(klass,"__repr__");return $B.$call(repr_func).apply(null,arguments)};object.__subclasshook__=function(){return _b_.NotImplemented};object.$factory=function(){if(arguments.length>0||arguments.length==1&&arguments[0].$kw&&Object.keys(arguments[0].$kw).length>0){throw _b_.TypeError.$factory("object() takes no arguments")}var res={__class__:object},args=[res];object.__init__.apply(null,args);return res};$B.set_func_names(object,"builtins");return object}(__BRYTHON__);(function($B){var _b_=$B.builtins;var TPFLAGS={STATIC_BUILTIN:1<<1,MANAGED_WEAKREF:1<<3,MANAGED_DICT:1<<4,SEQUENCE:1<<5,MAPPING:1<<6,DISALLOW_INSTANTIATION:1<<7,IMMUTABLETYPE:1<<8,HEAPTYPE:1<<9,BASETYPE:1<<10,HAVE_VECTORCALL:1<<11,READY:1<<12,READYING:1<<13,HAVE_GC:1<<14,METHOD_DESCRIPTOR:1<<17,VALID_VERSION_TAG:1<<19,IS_ABSTRACT:1<<20,MATCH_SELF:1<<22,LONG_SUBCLASS:1<<24,LIST_SUBCLASS:1<<25,TUPLE_SUBCLASS:1<<26,BYTES_SUBCLASS:1<<27,UNICODE_SUBCLASS:1<<28,DICT_SUBCLASS:1<<29,BASE_EXC_SUBCLASS:1<<30,TYPE_SUBCLASS:1<<31,HAVE_FINALIZE:1<<0,HAVE_VERSION_TAG:1<<18};$B.$class_constructor=function(class_name,class_obj_proxy,metaclass,resolved_bases,bases,kwargs){var dict;if(class_obj_proxy instanceof $B.str_dict){dict=$B.empty_dict();dict.$strings=class_obj_proxy}else{dict=class_obj_proxy.$target}var module=class_obj_proxy.__module__;for(var base of bases){if(base.__flags__!==undefined&&!(base.__flags__&TPFLAGS.BASETYPE)){throw _b_.TypeError.$factory("type 'bool' is not an acceptable base type")}}var extra_kwargs={};if(kwargs){for(var i=0;i0){if(bases[0].__class__===undefined){if(typeof bases[0]=="function"){if(bases.length!=1){throw _b_.TypeError.$factory("A Brython class "+"can inherit at most 1 Javascript constructor")}metaclass=bases[0].__class__=$B.JSMeta;$B.set_func_names(bases[0],module)}else{throw _b_.TypeError.$factory("Argument of "+class_name+" is not a class (type '"+$B.class_name(bases[0])+"')")}}for(var base of bases){var mc=base.__class__;if(metaclass===undefined){metaclass=mc}else if(mc===metaclass||_b_.issubclass(metaclass,mc)){}else if(_b_.issubclass(mc,metaclass)){metaclass=mc}else if(metaclass.__bases__&&metaclass.__bases__.indexOf(mc)==-1){throw _b_.TypeError.$factory("metaclass conflict: the "+"metaclass of a derived class must be a (non-"+"strict) subclass of the metaclasses of all its bases")}}}else{metaclass=metaclass||_b_.type}return metaclass};function set_attr_if_absent(dict,attr,value){try{$B.$getitem(dict,attr)}catch(err){$B.$setitem(dict,attr,value)}}$B.make_class_namespace=function(metaclass,class_name,module,qualname,bases){var class_dict=_b_.dict.$literal([["__module__",module],["__qualname__",qualname]]);if(metaclass!==_b_.type){var prepare=$B.$getattr(metaclass,"__prepare__",_b_.None);if(prepare!==_b_.None){class_dict=$B.$call(prepare)(class_name,bases);set_attr_if_absent(class_dict,"__module__",module);set_attr_if_absent(class_dict,"__qualname__",qualname)}}if(class_dict.__class__===_b_.dict){if(class_dict.$all_str){return class_dict.$strings}return new Proxy(class_dict,{get:function(target,prop){if(prop=="__class__"){return _b_.dict}else if(prop=="$target"){return target}if(_b_.dict.$contains_string(target,prop)){return _b_.dict.$getitem_string(target,prop)}return undefined},set:function(target,prop,value){_b_.dict.$setitem(target,prop,value)}})}else{var setitem=$B.$getattr(class_dict,"__setitem__"),getitem=$B.$getattr(class_dict,"__getitem__");return new Proxy(class_dict,{get:function(target,prop){if(prop=="__class__"){return $B.get_class(target)}else if(prop=="$target"){return target}try{return getitem(prop)}catch(err){return undefined}},set:function(target,prop,value){setitem(prop,value);return _b_.None}})}};$B.resolve_mro_entries=function(bases){var new_bases=[],has_mro_entries=false;for(var base of bases){if(!$B.$isinstance(base,_b_.type)){var mro_entries=$B.$getattr(base,"__mro_entries__",_b_.None);if(mro_entries!==_b_.None){has_mro_entries=true;var entries=_b_.list.$factory(mro_entries(bases));new_bases=new_bases.concat(entries)}else{new_bases.push(base)}}else{new_bases.push(base)}}return has_mro_entries?new_bases:bases};var type_getsets={__name__:"getset",__qualname__:"getset",__bases__:"getset",__module__:"getset",__abstractmethods__:"getset",__dict__:"get",__doc__:"getset",__text_signature__:"get",__annotations__:"getset"};$B.make_class=function(qualname,factory){var A={__class__:type,__bases__:[_b_.object],__mro__:[_b_.object],__name__:qualname,__qualname__:qualname,$is_class:true};A.$factory=factory;return A};var type=$B.make_class("type",(function(kls,bases,cl_dict){var missing={},$=$B.args("type",3,{kls:null,bases:null,cl_dict:null},["kls","bases","cl_dict"],arguments,{bases:missing,cl_dict:missing},null,"kw"),kls=$.kls,bases=$.bases,cl_dict=$.cl_dict,kw=$.kw;var kwarg={};for(var key in kw.$jsobj){kwarg[key]=kw.$jsobj[key]}var kwargs={$kw:[kwarg]};if(cl_dict===missing){if(bases!==missing){throw _b_.TypeError.$factory("type() takes 1 or 3 arguments")}return $B.get_class(kls)}else{var module=$B.frame_obj.frame[2],resolved_bases=$B.resolve_mro_entries(bases),metaclass=$B.get_metaclass(kls,module,resolved_bases);return type.__call__(metaclass,kls,resolved_bases,cl_dict,kwargs)}}));type.__class__=type;var classmethod=_b_.classmethod=$B.make_class("classmethod",(function(func){$B.check_nb_args_no_kw("classmethod",1,arguments);return{__class__:classmethod,__func__:func}}));classmethod.__get__=function(){var $=$B.args("classmethod",3,{self:null,obj:null,cls:null},["self","obj","cls"],arguments,{cls:_b_.None},null,null),self=$.self,obj=$.obj,cls=$.cls;if(cls===_b_.None||cls===undefined){cls=$B.get_class(obj)}var func_class=$B.get_class(self.__func__),candidates=[func_class].concat(func_class.__mro__);for(var candidate of candidates){if(candidate===$B.function){break}if(candidate.__get__){return candidate.__get__(self.__func__,cls,cls)}}return $B.method.$factory(self.__func__,cls)};$B.set_func_names(classmethod,"builtins");var staticmethod=_b_.staticmethod=$B.make_class("staticmethod",(function(func){return{__class__:staticmethod,__func__:func}}));staticmethod.__call__=function(self){return $B.$call(self.__func__)};staticmethod.__get__=function(self){return self.__func__};$B.set_func_names(staticmethod,"builtins");$B.getset_descriptor=$B.make_class("getset_descriptor",(function(klass,attr,getter,setter){var res={__class__:$B.getset_descriptor,__doc__:_b_.None,cls:klass,attr:attr,getter:getter,setter:setter};return res}));$B.getset_descriptor.__get__=function(self,obj,klass){console.log("__get__",self,obj,klass);if(obj===_b_.None){return self}return self.getter(self,obj,klass)};$B.getset_descriptor.__set__=function(self,klass,value){return self.setter(self,klass,value)};$B.getset_descriptor.__repr__=function(self){return``};$B.set_func_names($B.getset_descriptor,"builtins");var data_descriptors=["__abstractmethods__","__annotations__","__base__","__bases__","__basicsize__","__dictoffset__","__doc__","__flags__","__itemsize__","__module__","__mro__","__name__","__qualname__","__text_signature__","__weakrefoffset__"];type.$call=function(klass,new_func,init_func){return function(){var instance=new_func.bind(null,klass).apply(null,arguments);if($B.$isinstance(instance,klass)){init_func.bind(null,instance).apply(null,arguments)}return instance}};type.$call_no_init=function(klass,new_func){return new_func.bind(null,klass)};type.__call__=function(){var extra_args=[],klass=arguments[0];for(var i=1,len=arguments.length;i1){console.log("warning: no attribute $infos for",res,"klass",klass,"attr",attr)}if($test){console.log("res is function",res)}if(attr=="__new__"||res.__class__===$B.builtin_function_or_method){res.$type="staticmethod"}if((attr=="__class_getitem__"||attr=="__init_subclass__")&&res.__class__!==_b_.classmethod){res=_b_.classmethod.$factory(res);return _b_.classmethod.__get__(res,_b_.None,klass)}if(res.__class__===$B.method){return res.__get__(null,klass)}else{if($test){console.log("return res",res)}return res}}else{return res}}};type.__hash__=function(cls){return _b_.hash(cls)};type.__init__=function(){if(arguments.length==0){throw _b_.TypeError.$factory("descriptor '__init__' of 'type' "+"object needs an argument")}};type.__init_subclass__=function(){var $=$B.args("__init_subclass__",1,{cls:null},["cls"],arguments,{},"args","kwargs");if($.args.length>0){throw _b_.TypeError.$factory(`${$.cls.__qualname__}.__init_subclass__ takes no arguments `+`(${$.args.length} given)`)}for(var key in $.kwargs.$jsobj){throw _b_.TypeError.$factory(`${$.cls.__qualname__}.__init_subclass__() `+`takes no keyword arguments`)}return _b_.None};_b_.object.__init_subclass__=type.__init_subclass__;type.__instancecheck__=function(cls,instance){var kl=instance.__class__||$B.get_class(instance);if(kl===cls){return true}else{for(var i=0;i-1){continue}if(key.startsWith("$")){continue}if(v===undefined){continue}class_dict[key]=v;if(v.__class__){var set_name=$B.$getattr(v.__class__,"__set_name__",_b_.None);if(set_name!==_b_.None){set_name(v,class_dict,key)}}if(typeof v=="function"){if(v.$infos===undefined){console.log($B.make_frames_stack())}else{v.$infos.$class=class_dict;v.$infos.__qualname__=name+"."+v.$infos.__name__;if(v.$infos.$defaults){var $defaults=v.$infos.$defaults;$B.function.__setattr__(v,"__defaults__",$defaults)}}}}var sup=_b_.super.$factory(class_dict,class_dict);var init_subclass=_b_.super.__getattribute__(sup,"__init_subclass__");init_subclass(extra_kwargs);return class_dict};type.__or__=function(){var $=$B.args("__or__",2,{cls:null,other:null},["cls","other"],arguments,{},null,null),cls=$.cls,other=$.other;if(other!==_b_.None&&!$B.$isinstance(other,[type,$B.GenericAlias])){return _b_.NotImplemented}return $B.UnionType.$factory([cls,other])};type.__prepare__=function(){return $B.empty_dict()};type.__qualname__="type";type.__repr__=function(kls){$B.builtins_repr_check(type,arguments);var qualname=kls.__qualname__;if(kls.__module__&&kls.__module__!="builtins"&&!kls.__module__.startsWith("$")){qualname=kls.__module__+"."+qualname}return""};type.__ror__=function(){var len=arguments.length;if(len!=1){throw _b_.TypeError.$factory(`expected 1 argument, got ${len}`)}return _b_.NotImplemented};type.__setattr__=function(kls,attr,value){var $test=false;if($test){console.log("kls is class",type)}if(type[attr]&&type[attr].__get__&&type[attr].__set__){type[attr].__set__(kls,value);return _b_.None}if(kls.__module__=="builtins"){throw _b_.TypeError.$factory(`cannot set '${attr}' attribute of immutable type '`+kls.__qualname__+"'")}kls[attr]=value;var mp=kls.__dict__||$B.$getattr(kls,"__dict__");_b_.dict.$setitem(mp,attr,value);if(attr=="__init__"||attr=="__new__"){kls.$factory=$B.$instance_creator(kls)}else if(attr=="__bases__"){kls.__mro__=_b_.type.mro(kls)}if($test){console.log("after setattr",kls)}return _b_.None};type.mro=function(cls){if(cls===undefined){throw _b_.TypeError.$factory("unbound method type.mro() needs an argument")}var bases=cls.__bases__,seqs=[],pos1=0;for(var base of bases){var bmro=[],pos=0;if(base===undefined||base.__mro__===undefined){if(base.__class__===undefined){return[_b_.object]}else{console.log("error for base",base);console.log("cls",cls)}}bmro[pos++]=base;var _tmp=base.__mro__;if(_tmp){if(_tmp[0]===base){_tmp.splice(0,1)}for(var k=0;k<_tmp.length;k++){bmro[pos++]=_tmp[k]}}seqs[pos1++]=bmro}seqs[pos1++]=bases.slice();var mro=[cls],mpos=1;while(1){var non_empty=[],pos=0;for(var i=0;i0){non_empty[pos++]=seqs[i]}}if(non_empty.length==0){break}for(var i=0;i-1){not_head[pos++]=s}}if(not_head.length>0){candidate=null}else{break}}if(candidate===null){throw _b_.TypeError.$factory("inconsistent hierarchy, no C3 MRO is possible")}mro[mpos++]=candidate;for(var i=0;i-1};$B.set_func_names(type,"builtins");type.__init_subclass__=_b_.classmethod.$factory(type.__init_subclass__);_b_.type=type;var property=_b_.property=$B.make_class("property",(function(fget,fset,fdel,doc){var res={__class__:property};property.__init__(res,fget,fset,fdel,doc);return res}));property.__init__=function(self,fget,fset,fdel,doc){var $=$B.args("__init__",5,{self:null,fget:null,fset:null,fdel:null,doc:null},["self","fget","fset","fdel","doc"],arguments,{fget:_b_.None,fset:_b_.None,fdel:_b_.None,doc:_b_.None},null,null),self=$.self,fget=$.fget,fset=$.fset,fdel=$.fdel,doc=$.doc;self.__doc__=doc||"";self.$type=fget.$type;self.fget=fget;self.fset=fset;self.fdel=fdel;self.$is_property=true;if(fget&&fget.$attrs){for(var key in fget.$attrs){self[key]=fget.$attrs[key]}}self.__delete__=fdel;self.getter=function(fget){return property.$factory(fget,self.fset,self.fdel,self.__doc__)};self.setter=function(fset){return property.$factory(self.fget,fset,self.fdel,self.__doc__)};self.deleter=function(fdel){return property.$factory(self.fget,self.fset,fdel,self.__doc__)}};property.__get__=function(self,kls){if(self.fget===undefined){throw _b_.AttributeError.$factory("unreadable attribute")}return $B.$call(self.fget)(kls)};property.__new__=function(cls){return{__class__:cls}};property.__set__=function(self,obj,value){if(self.fset===undefined){var name=self.fget.$infos.__name__;var msg=`property '${name}' of '${$B.class_name(obj)}' object `+"has no setter";throw _b_.AttributeError.$factory(msg)}$B.$getattr(self.fset,"__call__")(obj,value)};$B.set_func_names(property,"builtins");var wrapper_descriptor=$B.wrapper_descriptor=$B.make_class("wrapper_descriptor");$B.set_func_names(wrapper_descriptor,"builtins");type.__call__.__class__=wrapper_descriptor;var $instance_creator=$B.$instance_creator=function(klass){var test=false;if(test){console.log("instance creator of",klass)}if(klass.prototype&&klass.prototype.constructor==klass){return function(){return new klass(...arguments)}}if(klass.__abstractmethods__&&$B.$bool(klass.__abstractmethods__)){return function(){var ams=Array.from($B.make_js_iterator(klass.__abstractmethods__));ams.sort();var msg=(ams.length>1?"s ":" ")+ams.join(", ");throw _b_.TypeError.$factory("Can't instantiate abstract class interface "+"with abstract method"+msg)}}var metaclass=klass.__class__||$B.get_class(klass),call_func,factory;if(metaclass===_b_.type){var new_func=type.__getattribute__(klass,"__new__"),init_func=type.__getattribute__(klass,"__init__");if(init_func===_b_.object.__init__){if(new_func===_b_.object.__new__){factory=_b_.object.$new(klass)}else{factory=new_func.bind(null,klass)}}else{factory=type.$call(klass,new_func,init_func)}}else{call_func=_b_.type.__getattribute__(metaclass,"__call__");if(call_func.$is_class){factory=$B.$call(call_func)}else{factory=call_func.bind(null,klass)}}factory.__class__=$B.function;factory.$infos={__name__:klass.__name__,__module__:klass.__module__};return factory};var method_wrapper=$B.method_wrapper=$B.make_class("method_wrapper",(function(attr,klass,method){var f=function(){return method.apply(null,arguments)};f.$infos={__name__:attr,__module__:klass.__module__};return f}));method_wrapper.__str__=method_wrapper.__repr__=function(self){return""};var member_descriptor=$B.member_descriptor=$B.make_class("member_descriptor",(function(attr,cls){return{__class__:member_descriptor,cls:cls,attr:attr}}));member_descriptor.__delete__=function(self,kls){if(kls.$slot_values===undefined||!kls.$slot_values.hasOwnProperty(self.attr)){throw _b_.AttributeError.$factory(self.attr)}kls.$slot_values.delete(self.attr)};member_descriptor.__get__=function(self,kls,obj_type){if(kls===_b_.None){return self}if(kls.$slot_values===undefined||!kls.$slot_values.has(self.attr)){throw $B.attr_error(self.attr,kls)}return kls.$slot_values.get(self.attr)};member_descriptor.__set__=function(self,kls,value){if(kls.$slot_values===undefined){kls.$slot_values=new Map}kls.$slot_values.set(self.attr,value)};member_descriptor.__str__=member_descriptor.__repr__=function(self){return""};$B.set_func_names(member_descriptor,"builtins");var method=$B.method=$B.make_class("method",(function(func,cls){var f=function(){return $B.$call(func).bind(null,cls).apply(null,arguments)};f.__class__=method;if(typeof func!=="function"){console.log("method from func w-o $infos",func,"all",$B.$call(func))}f.$infos=func.$infos||{};f.$infos.__func__=func;f.$infos.__self__=cls;f.$infos.__dict__=$B.empty_dict();return f}));method.__eq__=function(self,other){return self.$infos!==undefined&&other.$infos!==undefined&&self.$infos.__func__===other.$infos.__func__&&self.$infos.__self__===other.$infos.__self__};method.__ne__=function(self,other){return!$B.method.__eq__(self,other)};method.__get__=function(self){var f=function(){return self(arguments)};f.__class__=$B.method_wrapper;f.$infos=method.$infos;return f};method.__getattribute__=function(self,attr){var infos=self.$infos;if(infos&&infos[attr]){if(attr=="__code__"){var res={__class__:$B.Code};for(var attr in infos.__code__){res[attr]=infos.__code__[attr]}return res}else{return infos[attr]}}else if(method.hasOwnProperty(attr)){return _b_.object.__getattribute__(self,attr)}else{return $B.function.__getattribute__(self.$infos.__func__,attr)}};method.__repr__=method.__str__=function(self){return""};method.__setattr__=function(self,key,value){if(key=="__class__"){throw _b_.TypeError.$factory("__class__ assignment only supported "+"for heap types or ModuleType subclasses")}throw $B.attr_error(key,self)};$B.set_func_names(method,"builtins");$B.method_descriptor=$B.make_class("method_descriptor");$B.classmethod_descriptor=$B.make_class("classmethod_descriptor");_b_.object.__class__=type;$B.make_iterator_class=function(name){var klass={__class__:_b_.type,__mro__:[_b_.object],__name__:name,__qualname__:name,$factory:function(items){return{__class__:klass,__dict__:$B.empty_dict(),counter:-1,items:items,len:items.length,$builtin_iterator:true}},$is_class:true,$iterator_class:true,__iter__:function(self){self.counter=self.counter===undefined?-1:self.counter;self.len=self.items.length;return self},__len__:function(self){return self.items.length},__next__:function(self){if(typeof self.test_change=="function"){var message=self.test_change();if(message){throw _b_.RuntimeError.$factory(message)}}self.counter++;if(self.counter$B.fast_tuple(self.items)));$B.GenericAlias.__call__=function(self,...args){return self.origin_class.$factory.apply(null,args)};$B.GenericAlias.__eq__=function(self,other){if(!$B.$isinstance(other,$B.GenericAlias)){return false}return $B.rich_comp("__eq__",self.origin_class,other.origin_class)&&$B.rich_comp("__eq__",self.items,other.items)};$B.GenericAlias.__getitem__=function(self,item){throw _b_.TypeError.$factory("descriptor '__getitem__' for '"+self.origin_class.__name__+"' objects doesn't apply to a '"+$B.class_name(item)+"' object")};$B.GenericAlias.__mro_entries__=function(self,bases){return $B.fast_tuple([self.origin_class])};$B.GenericAlias.__new__=function(origin_class,items,kwds){var res={__class__:$B.GenericAlias,__mro__:[origin_class],origin_class:origin_class,items:items,$is_class:true};return res};$B.GenericAlias.__or__=function(self,other){var $=$B.args("__or__",2,{self:null,other:null},["self","other"],arguments,{},null,null);return $B.UnionType.$factory([self,other])};$B.GenericAlias.__origin__=_b_.property.$factory((self=>self.origin_class));$B.GenericAlias.__parameters__=_b_.property.$factory((self=>$B.fast_tuple([])));$B.GenericAlias.__repr__=function(self){var items=Array.isArray(self.items)?self.items:[self.items];var reprs=[];for(var item of items){if(item===_b_.Ellipsis){reprs.push("...")}else{if(item.$is_class){reprs.push(item.__name__)}else{reprs.push(_b_.repr(item))}}}return self.origin_class.__qualname__+"["+reprs.join(", ")+"]"};$B.set_func_names($B.GenericAlias,"types");$B.UnionType=$B.make_class("UnionType",(function(items){return{__class__:$B.UnionType,items:items}}));$B.UnionType.__args__=_b_.property.$factory((self=>$B.fast_tuple(self.items)));$B.UnionType.__eq__=function(self,other){if(!$B.$isinstance(other,$B.UnionType)){return _b_.NotImplemented}return _b_.list.__eq__(self.items,other.items)};$B.UnionType.__parameters__=_b_.property.$factory((()=>$B.fast_tuple([])));$B.UnionType.__repr__=function(self){var t=[];for(var item of self.items){if(item.$is_class){var s=item.__name__;if(item.__module__!=="builtins"){s=item.__module__+"."+s}t.push(s)}else{t.push(_b_.repr(item))}}return t.join(" | ")};$B.set_func_names($B.UnionType,"types")})(__BRYTHON__);(function($B){var _b_=$B.builtins;_b_.__debug__=false;$B.$comps={">":"gt",">=":"ge","<":"lt","<=":"le"};$B.$inv_comps={">":"lt",">=":"le","<":"gt","<=":"ge"};var check_nb_args=$B.check_nb_args,check_no_kw=$B.check_no_kw,check_nb_args_no_kw=$B.check_nb_args_no_kw;var NoneType=$B.NoneType={$factory:function(){return None},__bool__:function(){return False},__class__:_b_.type,__hash__:function(){return 0},__module__:"builtins",__mro__:[_b_.object],__name__:"NoneType",__qualname__:"NoneType",__repr__:function(){return"None"},__str__:function(){return"None"},$is_class:true};NoneType.__setattr__=function(self,attr){return no_set_attr(NoneType,attr)};var None=_b_.None={__class__:NoneType};None.__doc__=None;NoneType.__doc__=None;for(var $op in $B.$comps){var key=$B.$comps[$op];switch(key){case"ge":case"gt":case"le":case"lt":NoneType["__"+key+"__"]=function(){return function(){return _b_.NotImplemented}}($op)}}for(var $func in None){if(typeof None[$func]=="function"){None[$func].__str__=function(f){return function(){return""}}($func)}}$B.set_func_names(NoneType,"builtins");_b_.__build_class__=function(){throw _b_.NotImplementedError.$factory("__build_class__")};_b_.abs=function(obj){check_nb_args_no_kw("abs",1,arguments);var klass=obj.__class__||$B.get_class(obj);try{var method=$B.$getattr(klass,"__abs__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("Bad operand type for abs(): '"+$B.class_name(obj)+"'")}throw err}return $B.$call(method)(obj)};_b_.aiter=function(async_iterable){return $B.$call($B.$getattr(async_iterable,"__aiter__"))()};_b_.all=function(obj){check_nb_args_no_kw("all",1,arguments);var iterable=iter(obj);while(1){try{var elt=next(iterable);if(!$B.$bool(elt)){return false}}catch(err){return true}}};_b_.anext=function(){var missing={},$=$B.args("anext",2,{async_iterator:null,_default:null},["async_iterator","_default"],arguments,{_default:missing},null,null);var awaitable=$B.$call($B.$getattr($.async_iterator,"__anext__"))();return awaitable.catch((function(err){if($B.is_exc(err,[_b_.StopAsyncIteration])){if($._default!==missing){return $._default}}throw err}))};_b_.any=function(obj){check_nb_args_no_kw("any",1,arguments);for(var elt of $B.make_js_iterator(obj)){if($B.$bool(elt)){return true}}return false};_b_.ascii=function(obj){check_nb_args_no_kw("ascii",1,arguments);var res=repr(obj),res1="",cp;for(var i=0;i=0){return prefix+value.toString(base)}return"-"+prefix+(-value).toString(base)}function bin_hex_oct(base,obj){if($B.$isinstance(obj,_b_.int)){return $builtin_base_convert_helper(obj,base)}else{try{var klass=obj.__class__||$B.get_class(obj),method=$B.$getattr(klass,"__index__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object cannot be interpreted as an integer")}throw err}var res=$B.$call(method)(obj);return $builtin_base_convert_helper(res,base)}}_b_.bin=function(obj){check_nb_args_no_kw("bin",1,arguments);return bin_hex_oct(2,obj)};_b_.breakpoint=function(){$B.$import("sys",[]);var missing={},hook=$B.$getattr($B.imported.sys,"breakpointhook",missing);if(hook===missing){throw _b_.RuntimeError.$factory("lost sys.breakpointhook")}return $B.$call(hook).apply(null,arguments)};_b_.callable=function(obj){check_nb_args_no_kw("callable",1,arguments);return _b_.hasattr(obj,"__call__")};_b_.chr=function(i){check_nb_args_no_kw("chr",1,arguments);i=$B.PyNumber_Index(i);if(i<0||i>1114111){throw _b_.ValueError.$factory("Outside valid range")}else if(i>=65536&&i<=1114111){var code=i-65536,s=String.fromCodePoint(55296|code>>10)+String.fromCodePoint(56320|code&1023);return $B.make_String(s,[0])}else{return String.fromCodePoint(i)}};var code=_b_.code=$B.make_class("code");code.__repr__=code.__str__=function(_self){return``};code.__getattribute__=function(self,attr){return self[attr]};$B.set_func_names(code,"builtins");_b_.compile=function(){var $=$B.args("compile",7,{source:null,filename:null,mode:null,flags:null,dont_inherit:null,optimize:null,_feature_version:null},["source","filename","mode","flags","dont_inherit","optimize","_feature_version"],arguments,{flags:0,dont_inherit:false,optimize:-1,_feature_version:0},null,null);var module_name="$exec_"+$B.UUID();$.__class__=code;$.co_flags=$.flags;$.co_name="";var filename=$.co_filename=$.filename;var interactive=$.mode=="single"&&$.flags&512;$B.file_cache[filename]=$.source;$B.url2name[filename]=module_name;if($B.$isinstance($.source,_b_.bytes)){var encoding="utf-8",lfpos=$.source.source.indexOf(10),first_line,second_line;if(lfpos==-1){first_line=$.source}else{first_line=_b_.bytes.$factory($.source.source.slice(0,lfpos))}first_line=_b_.bytes.decode(first_line,"latin-1");var encoding_re=/^[\t\f]*#.*?coding[:=][\t]*([-_.a-zA-Z0-9]+)/;var mo=first_line.match(encoding_re);if(mo){encoding=mo[1]}else if(lfpos>-1){var rest=$.source.source.slice(lfpos+1);lfpos=rest.indexOf(10);if(lfpos>-1){second_line=_b_.bytes.$factory(rest.slice(0,lfpos))}else{second_line=_b_.bytes.$factory(rest)}second_line=_b_.bytes.decode(second_line,"latin-1");mo=second_line.match(encoding_re);if(mo){encoding=mo[1]}}$.source=_b_.bytes.decode($.source,encoding)}if(!$B.$isinstance(filename,[_b_.bytes,_b_.str])){$B.warn(_b_.DeprecationWarning,`path should be string, bytes, or os.PathLike, `+`not ${$B.class_name(filename)}`)}if(interactive&&!$.source.endsWith("\n")){var lines=$.source.split("\n");if($B.last(lines).startsWith(" ")){throw _b_.SyntaxError.$factory("unexpected EOF while parsing")}}if($.source.__class__&&$.source.__class__.__module__=="ast"){$B.imported._ast._validate($.source);$._ast=$.source;delete $.source;return $}var _ast,parser;if($B.parser_to_ast){try{var parser_mode=$.mode=="eval"?"eval":"file";parser=new $B.Parser($.source,filename,parser_mode);_ast=parser.parse()}catch(err){if($.mode=="single"){try{parser.tokens.next}catch(err2){var tokens=parser.tokens,tester=tokens[tokens.length-2];if(tester.type=="NEWLINE"&&$.flags&16384||tester.type=="DEDENT"&&$.flags&512){err.__class__=_b_.SyntaxError;err.args[0]="incomplete input"}}}throw err}if($.mode=="single"&&_ast.body.length==1&&_ast.body[0]instanceof $B.ast.Expr){parser=new $B.Parser($.source,filename,"eval");_ast=parser.parse();$.single_expression=true}if($.flags==$B.PyCF_ONLY_AST){delete $B.url2name[filename];let res=$B.ast_js_to_py(_ast);res.$js_ast=_ast;return res}}else{var root=$B.parser.create_root_node({src:$.source,filename:filename},module_name,module_name);root.mode=$.mode;root.parent_block=$B.builtins_scope;try{$B.parser.dispatch_tokens(root,$.source);_ast=root.ast()}catch(err){if($.mode=="single"&&root.token_reader.read()===undefined){let tokens=root.token_reader.tokens,tester=tokens[tokens.length-2];if(tester.type=="NEWLINE"&&$.flags&16384||tester.type=="DEDENT"&&$.flags&512){err.__class__=_b_.SyntaxError;err.args[0]="incomplete input"}}throw err}if($.mode=="single"&&_ast.body.length==1&&_ast.body[0]instanceof $B.ast.Expr){root=$B.parser.create_root_node({src:$.source,filename:filename},module_name,module_name);root.mode="eval";$.single_expression=true;root.parent_block=$B.builtins_scope;$B.parser.dispatch_tokens(root,$.source);_ast=root.ast()}delete $B.url2name[filename];if($.flags==$B.PyCF_ONLY_AST){$B.create_python_ast_classes();let res=$B.ast_js_to_py(_ast);res.$js_ast=_ast;return res}}delete $B.url2name[filename];$._ast=$B.ast_js_to_py(_ast);$._ast.$js_ast=_ast;return $};_b_.debug=$B.debug>0;_b_.delattr=function(obj,attr){check_nb_args_no_kw("delattr",2,arguments);if(typeof attr!="string"){throw _b_.TypeError.$factory("attribute name must be string, not '"+$B.class_name(attr)+"'")}return $B.$getattr(obj,"__delattr__")(attr)};$B.$delete=function(name,is_global){function del(obj){if(obj.__class__===$B.generator){obj.js_gen.return()}}var found=false,frame=$B.frame_obj.frame;if(!is_global){if(frame[1][name]!==undefined){found=true;del(frame[1][name]);delete frame[1][name]}}else{if(frame[2]!=frame[0]&&frame[3][name]!==undefined){found=true;del(frame[3][name]);delete frame[3][name]}}if(!found){throw $B.name_error(name)}};_b_.dir=function(obj){if(obj===undefined){var locals=_b_.locals();return _b_.sorted(locals)}check_nb_args_no_kw("dir",1,arguments);var klass=obj.__class__||$B.get_class(obj);if(obj.$is_class){var dir_func=$B.$getattr(obj.__class__,"__dir__");return $B.$call(dir_func)(obj)}try{let res=$B.$call($B.$getattr(klass,"__dir__"))(obj);res=_b_.list.$factory(res);return res}catch(err){if($B.get_option("debug")>2){console.log("error in dir, obj",obj,"klass",klass,$B.$getattr(klass,"__dir__"),err.message)}throw err}};_b_.divmod=function(x,y){check_nb_args_no_kw("divmod",2,arguments);try{return $B.rich_op("__divmod__",x,y)}catch(err){if($B.is_exc(err,[_b_.TypeError])){return _b_.tuple.$factory([$B.rich_op("__floordiv__",x,y),$B.rich_op("__mod__",x,y)])}throw err}};var enumerate=_b_.enumerate=$B.make_class("enumerate",(function(){var $ns=$B.args("enumerate",2,{iterable:null,start:null},["iterable","start"],arguments,{start:0},null,null),_iter=iter($ns["iterable"]),start=$ns["start"];return{__class__:enumerate,__name__:"enumerate iterator",counter:start-1,iter:_iter,start:start}}));enumerate.__iter__=function(self){self.counter=self.start-1;return self};enumerate.__next__=function(self){self.counter++;return $B.fast_tuple([self.counter,next(self.iter)])};$B.set_func_names(enumerate,"builtins");var $$eval=_b_.eval=function(){var $=$B.args("eval",4,{src:null,globals:null,locals:null,mode:null},["src","globals","locals","mode"],arguments,{globals:_b_.None,locals:_b_.None,mode:"eval"},null,null,4),src=$.src,_globals=$.globals,_locals=$.locals,mode=$.mode;if($.src.mode&&$.src.mode=="single"&&["",""].indexOf($.src.filename)>-1){_b_.print(">",$.src.source.trim())}var filename="";if(src.__class__===code){filename=src.filename}else if(!src.valueOf||typeof src.valueOf()!=="string"){throw _b_.TypeError.$factory(`${mode}() arg 1 must be a string,`+" bytes or code object")}else{src=src.valueOf()}var __name__="exec";if(_globals!==_b_.None&&_globals.__class__==_b_.dict&&_b_.dict.$contains_string(_globals,"__name__")){__name__=_b_.dict.$getitem_string(_globals,"__name__")}$B.url2name[filename]=__name__;var frame=$B.frame_obj.frame;$B.exec_scope=$B.exec_scope||{};if(typeof src=="string"&&src.endsWith("\\\n")){var exc=_b_.SyntaxError.$factory("unexpected EOF while parsing");var lines=src.split("\n"),line=lines[lines.length-2];exc.args=["unexpected EOF while parsing",[filename,lines.length-1,1,line]];exc.filename=filename;exc.text=line;throw exc}var local_name="locals_"+__name__,global_name="globals_"+__name__,exec_locals={},exec_globals={};if(_globals===_b_.None){if(frame[1]===frame[3]){global_name+="_globals";exec_locals=exec_globals=frame[3]}else{if(mode=="exec"){exec_locals=$B.clone(frame[1]);for(var attr in frame[3]){exec_locals[attr]=frame[3][attr]}exec_globals=exec_locals}else{exec_locals=frame[1];exec_globals=frame[3]}}}else{if(_globals.__class__!==_b_.dict){throw _b_.TypeError.$factory(`${mode}() globals must be `+"a dict, not "+$B.class_name(_globals))}exec_globals={};if(_globals.$jsobj){exec_globals=_globals.$jsobj}else{exec_globals=_globals.$jsobj={};for(var key of _b_.dict.$keys_string(_globals)){_globals.$jsobj[key]=_b_.dict.$getitem_string(_globals,key);if(key=="__name__"){__name__=_globals.$jsobj[key]}}_globals.$all_str=false}if(exec_globals.__builtins__===undefined){exec_globals.__builtins__=_b_.__builtins__}if(_locals===_b_.None){exec_locals=exec_globals}else{if(_locals===_globals){global_name+="_globals";exec_locals=exec_globals}else if(_locals.$jsobj){for(let key in _locals.$jsobj){exec_globals[key]=_locals.$jsobj[key]}}else{if(_locals.$jsobj){exec_locals=_locals.$jsobj}else{var klass=$B.get_class(_locals),getitem=$B.$call($B.$getattr(klass,"__getitem__")),setitem=$B.$call($B.$getattr(klass,"__setitem__"));exec_locals=new Proxy(_locals,{get(target,prop){if(prop=="$target"){return target}try{return getitem(target,prop)}catch(err){return undefined}},set(target,prop,value){return setitem(target,prop,value)}})}}}}var save_frame_obj=$B.frame_obj;var _ast;frame=[__name__,exec_locals,__name__,exec_globals];frame.is_exec_top=true;frame.__file__=filename;frame.$f_trace=$B.enter_frame(frame);var _frame_obj=$B.frame_obj;frame.$lineno=1;if(src.__class__===code){_ast=src._ast;if(_ast.$js_ast){_ast=_ast.$js_ast}else{_ast=$B.ast_py_to_js(_ast)}}try{if($B.parser_to_ast){if(!_ast){var _mode=mode=="eval"?"eval":"file";_ast=new $B.Parser(src,filename,_mode).parse()}}else{if(!_ast){var root=$B.parser.create_root_node(src,"",frame[0],frame[2],1);root.mode=mode;root.filename=filename;$B.parser.dispatch_tokens(root);_ast=root.ast()}}var future=$B.future_features(_ast,filename),symtable=$B._PySymtable_Build(_ast,filename,future),js_obj=$B.js_from_root({ast:_ast,symtable:symtable,filename:filename,namespaces:{local_name:local_name,exec_locals:exec_locals,global_name:global_name,exec_globals:exec_globals}}),js=js_obj.js}catch(err){if(err.args){if(err.args[1]){exec_locals.$lineno=err.args[1][1]}}else{console.log("JS Error",err.message)}$B.frame_obj=save_frame_obj;throw err}if(mode=="eval"){js=`var __file__ = '${filename}'\n`+`var locals = ${local_name}\nreturn ${js}`}else if(src.single_expression){js=`var result = ${js}\n`+`if(result !== _b_.None){\n`+`_b_.print(result)\n`+`}`}try{var exec_func=new Function("$B","_b_",local_name,global_name,"frame","_frame_obj",js)}catch(err){if($B.get_option("debug")>1){console.log("eval() error\n",$B.format_indent(js,0));console.log("-- python source\n",src)}throw err}try{var res=exec_func($B,_b_,exec_locals,exec_globals,frame,_frame_obj)}catch(err){if($B.get_option("debug")>2){console.log("Python code\n",src,"\nexec func",$B.format_indent(exec_func+"",0),"\n filename",filename,"\n name from filename",$B.url2name[filename],"\n local_name",local_name,"\n exec_locals",exec_locals,"\n global_name",global_name,"\n exec_globals",exec_globals,"\n frame",frame,"\n _ast",_ast,"\n js",js)}$B.frame_obj=save_frame_obj;throw err}if(_globals!==_b_.None&&!_globals.$jsobj){for(var _key in exec_globals){if(!_key.startsWith("$")){_b_.dict.$setitem(_globals,_key,exec_globals[_key])}}}$B.frame_obj=save_frame_obj;return res};$$eval.$is_func=true;var exec=_b_.exec=function(){var $=$B.args("exec",3,{src:null,globals:null,locals:null},["src","globals","locals"],arguments,{globals:_b_.None,locals:_b_.None},null,null,3),src=$.src,globals=$.globals,locals=$.locals;$$eval(src,globals,locals,"exec");return _b_.None};exec.$is_func=true;var exit=_b_.exit=function(){throw _b_.SystemExit};exit.__repr__=exit.__str__=function(){return"Use exit() or Ctrl-Z plus Return to exit"};var filter=_b_.filter=$B.make_class("filter",(function(func,iterable){check_nb_args_no_kw("filter",2,arguments);iterable=iter(iterable);if(func===_b_.None){func=$B.$bool}return{__class__:filter,func:func,iterable:iterable}}));filter.__iter__=function(self){return self};filter.__next__=function(self){while(true){var _item=next(self.iterable);if(self.func(_item)){return _item}}};$B.set_func_names(filter,"builtins");_b_.format=function(){var $=$B.args("format",2,{value:null,format_spec:null},["value","format_spec"],arguments,{format_spec:""},null,null),value=$.value;var klass=value.__class__||$B.get_class(value);try{var method=$B.$getattr(klass,"__format__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.NotImplementedError("__format__ is not implemented "+"for object '"+_b_.str.$factory(value)+"'")}throw err}return $B.$call(method)(value,$.format_spec)};function attr_error(attr,obj){var cname=$B.get_class(obj);var msg="bad operand type for unary #: '"+cname+"'";switch(attr){case"__neg__":throw _b_.TypeError.$factory(msg.replace("#","-"));case"__pos__":throw _b_.TypeError.$factory(msg.replace("#","+"));case"__invert__":throw _b_.TypeError.$factory(msg.replace("#","~"));case"__call__":throw _b_.TypeError.$factory("'"+cname+"'"+" object is not callable");default:throw $B.attr_error(attr,obj)}}_b_.getattr=function(){var missing={};var $=$B.args("getattr",3,{obj:null,attr:null,_default:null},["obj","attr","_default"],arguments,{_default:missing},null,null);if(!$B.$isinstance($.attr,_b_.str)){throw _b_.TypeError.$factory("attribute name must be string, "+`not '${$B.class_name($.attr)}'`)}return $B.$getattr($.obj,_b_.str.$to_string($.attr),$._default===missing?undefined:$._default)};function in_mro(klass,attr){if(klass===undefined){return false}if(klass.hasOwnProperty(attr)){return klass[attr]}var mro=klass.__mro__;for(var i=0,len=mro.length;i-1}))}break;case"__mro__":if(obj.__mro__){return _b_.tuple.$factory([obj].concat(obj.__mro__))}else if(obj.__dict__&&_b_.dict.$contains_string(obj.__dict__,"__mro__")){return _b_.dict.$getitem_string(obj.__dict__,"__mro__")}throw $B.attr_error(attr,obj);case"__subclasses__":if(klass.$factory||klass.$is_class){var subclasses=obj.$subclasses||[];return function(){return subclasses}}break}if(typeof obj=="function"){var value=obj[attr];if(value!==undefined){if(attr=="__module__"){return value}}}if(!is_class&&klass.$native){if(obj.$method_cache&&obj.$method_cache[attr]){return obj.$method_cache[attr]}if($test){console.log("native class",klass,klass[attr])}if(attr=="__doc__"&&klass[attr]===undefined){_get_builtins_doc();klass[attr]=$B.builtins_doc[klass.__name__]}if(klass[attr]===undefined){var object_attr=_b_.object[attr];if($test){console.log("object attr",object_attr)}if(object_attr!==undefined){klass[attr]=object_attr}else{if($test){console.log("obj[attr]",obj[attr])}var attrs=obj.__dict__;if(attrs&&_b_.dict.$contains_string(attrs,attr)){return _b_.dict.$getitem_string(attrs,attr)}if(_default===undefined){throw $B.attr_error(attr,obj)}return _default}}if(klass.$descriptors&&klass.$descriptors[attr]!==undefined){return klass[attr](obj)}if(typeof klass[attr]=="function"){var func=klass[attr];if(attr=="__new__"){func.$type="staticmethod"}if(func.$type=="staticmethod"){return func}var self=klass[attr].__class__==$B.method?klass:obj,method=klass[attr].bind(null,self);method.__class__=$B.method;method.$infos={__func__:func,__name__:attr,__self__:self,__qualname__:klass.__qualname__+"."+attr};if(typeof obj=="object"){obj.__class__=klass;obj.$method_cache=obj.$method_cache||{};if(obj.$method_cache){obj.$method_cache[attr]=method}}return method}else if(klass[attr].__class__===_b_.classmethod){return _b_.classmethod.__get__(klass[attr],obj,klass)}else if(klass[attr]!==undefined){return klass[attr]}attr_error(rawname,klass)}var attr_func;if(is_class){if($test){console.log("obj is class",obj);console.log("is a type ?",_b_.isinstance(klass,_b_.type));console.log("is type",klass===_b_.type)}if(klass===_b_.type){attr_func=_b_.type.__getattribute__}else{attr_func=$B.$call($B.$getattr(klass,"__getattribute__"))}if($test){console.log("attr func",attr_func)}}else{attr_func=klass.__getattribute__;if(attr_func===undefined){for(var cls of klass.__mro__){attr_func=cls["__getattribute__"];if(attr_func!==undefined){break}}}}if(typeof attr_func!=="function"){console.log(attr+" is not a function "+attr_func,klass)}var odga=_b_.object.__getattribute__;if($test){console.log("attr_func is odga ?",attr_func,attr_func===odga,"\n","\nobj[attr]",obj[attr])}if(attr_func===odga){res=obj[attr];if(Array.isArray(obj)&&Array.prototype[attr]!==undefined){res=undefined}else if(res===null){return null}else if(res!==undefined){if($test){console.log(obj,attr,obj[attr],res.__set__||res.$is_class)}if(res.$is_property){return _b_.property.__get__(res)}if(res.__set__===undefined||res.$is_class){if($test){console.log("return",res,res+"",res.__set__,res.$is_class)}return res}}}var getattr;try{res=attr_func(obj,attr);if($test){console.log("result of attr_func",res)}}catch(err){if($test){console.log("attr_func raised error",err.args,err.name)}if(klass===$B.module){getattr=obj.__getattr__;if($test){console.log("use module getattr",getattr);console.log(getattr+"")}if(getattr){try{return getattr(attr)}catch(err){if($test){console.log("encore erreur",err)}if(_default!==undefined){return _default}throw err}}}getattr=in_mro(klass,"__getattr__");if($test){console.log("try getattr",getattr)}if(getattr){if($test){console.log("try with getattr",getattr)}try{return getattr(obj,attr)}catch(err){if(_default!==undefined){return _default}throw err}}if(_default!==undefined){return _default}throw err}if(res!==undefined){return res}if(_default!==undefined){return _default}attr_error(rawname,is_class?obj:klass)};_b_.globals=function(){check_nb_args_no_kw("globals",0,arguments);var res=$B.obj_dict($B.frame_obj.frame[3]);res.$jsobj.__BRYTHON__=$B.JSObj.$factory($B);res.$is_namespace=true;return res};_b_.hasattr=function(obj,attr){check_nb_args_no_kw("hasattr",2,arguments);try{$B.$getattr(obj,attr);return true}catch(err){return false}};_b_.hash=function(obj){check_nb_args_no_kw("hash",1,arguments);return $B.$hash(obj)};$B.$hash=function(obj){if(obj.__hashvalue__!==undefined){return obj.__hashvalue__}if(typeof obj==="boolean"){return obj?1:0}if(obj.$is_class||obj.__class__===_b_.type||obj.__class__===$B.function){return obj.__hashvalue__=$B.$py_next_hash--}if(typeof obj=="string"){return _b_.str.__hash__(obj)}else if(typeof obj=="number"){return obj}else if(typeof obj=="boolean"){return obj?1:0}else if(obj.__class__===_b_.float){return _b_.float.$hash_func(obj)}var klass=obj.__class__||$B.get_class(obj);if(klass===undefined){throw _b_.TypeError.$factory("unhashable type: '"+_b_.str.$factory($B.JSObj.$factory(obj))+"'")}var hash_method=_b_.type.__getattribute__(klass,"__hash__",_b_.None);if(hash_method===_b_.None){throw _b_.TypeError.$factory("unhashable type: '"+$B.class_name(obj)+"'")}if(hash_method.$infos.__func__===_b_.object.__hash__){if(_b_.type.__getattribute__(klass,"__eq__")!==_b_.object.__eq__){throw _b_.TypeError.$factory("unhashable type: '"+$B.class_name(obj)+"'","hash")}else{return obj.__hashvalue__=_b_.object.__hash__(obj)}}else{return $B.$call(hash_method)(obj)}};function _get_builtins_doc(){if($B.builtins_doc===undefined){var url=$B.brython_path;if(url.charAt(url.length-1)=="/"){url=url.substr(0,url.length-1)}url+="/builtins_docstrings.js";var f=_b_.open(url);eval(f.$content);for(var key in docs){if(_b_[key]){_b_[key].__doc__=docs[key]}}$B.builtins_doc=docs}}var help=_b_.help=function(obj){if(obj===undefined){obj="help"}if(typeof obj=="string"){var lib_url="https://docs.python.org/3/library";var parts=obj.split("."),head=[],url;while(parts.length>0){head.push(parts.shift());if($B.stdlib[head.join(".")]){url=head.join(".")}else{break}}if(url){var doc_url;if(["browser","javascript","interpreter"].indexOf(obj.split(".")[0])>-1){doc_url="/static_doc/"+($B.language=="fr"?"fr":"en")}else{doc_url=lib_url}window.open(`${doc_url}/${url}.html#`+obj);return}if(_b_[obj]){if(obj==obj.toLowerCase()){url=lib_url+`/functions.html#${obj}`}else if(["False","True","None","NotImplemented","Ellipsis","__debug__"].indexOf(obj)>-1){url=lib_url+`/constants.html#${obj}`}else if(_b_[obj].$is_class&&_b_[obj].__bases__.indexOf(_b_.Exception)>-1){url=lib_url+`/exceptions.html#${obj}`}if(url){window.open(url);return}}$B.$import("pydoc");return $B.$call($B.$getattr($B.imported.pydoc,"help"))(obj)}if(obj.__class__===$B.module){return help(obj.__name__)}try{_b_.print($B.$getattr(obj,"__doc__"))}catch(err){return""}};help.__repr__=help.__str__=function(){return"Type help() for interactive help, or help(object) "+"for help about object."};_b_.hex=function(obj){check_nb_args_no_kw("hex",1,arguments);return bin_hex_oct(16,obj)};_b_.id=function(obj){check_nb_args_no_kw("id",1,arguments);if(obj.$id!==undefined){return obj.$id}else if($B.$isinstance(obj,[_b_.str,_b_.int,_b_.float])&&!$B.$isinstance(obj,$B.long_int)){return $B.$getattr(_b_.str.$factory(obj),"__hash__")()}else{return obj.$id=$B.UUID()}};_b_.__import__=function(){var $=$B.args("__import__",5,{name:null,globals:null,locals:null,fromlist:null,level:null},["name","globals","locals","fromlist","level"],arguments,{globals:None,locals:None,fromlist:_b_.tuple.$factory(),level:0},null,null);return $B.$__import__($.name,$.globals,$.locals,$.fromlist)};_b_.input=function(msg){var res=prompt(msg||"")||"";if($B.imported["sys"]&&$B.imported["sys"].ps1){var ps1=$B.imported["sys"].ps1,ps2=$B.imported["sys"].ps2;if(msg==ps1||msg==ps2){console.log(msg,res)}}return res};_b_.isinstance=function(obj,cls){check_nb_args_no_kw("isinstance",2,arguments);return $B.$isinstance(obj,cls)};$B.$isinstance=function(obj,cls){if(obj===null){return cls===$B.imported.javascript.NullType}if(obj===undefined){return false}var kls;if(Array.isArray(cls)){for(kls of cls){if($B.$isinstance(obj,kls)){return true}}return false}if(cls.__class__===$B.UnionType){for(kls of cls.items){if($B.$isinstance(obj,kls)){return true}}return false}if(cls.__class__===$B.GenericAlias){throw _b_.TypeError.$factory("isinstance() arg 2 cannot be a parameterized generic")}if(!cls.__class__&&!cls.$is_class){if(!$B.$getattr(cls,"__instancecheck__",false)){throw _b_.TypeError.$factory("isinstance() arg 2 must be a type "+"or tuple of types")}}if(cls===_b_.int&&(obj===True||obj===False)){return True}if(cls===_b_.bool){switch(typeof obj){case"string":return false;case"number":return false;case"boolean":return true}}var klass=obj.__class__;if(klass==undefined){if(typeof obj=="string"){if(cls==_b_.str){return true}else if($B.builtin_classes.indexOf(cls)>-1){return false}}else if(typeof obj=="number"&&Number.isFinite(obj)){if(Number.isFinite(obj)&&cls==_b_.int){return true}}klass=$B.get_class(obj)}if(klass===undefined){return false}if(klass===cls){return true}var mro=klass.__mro__;for(var i=0;i-1){return true}var sch=$B.$getattr(classinfo.__class__||$B.get_class(classinfo),"__subclasscheck__",_b_.None);if(sch==_b_.None){return false}return sch(classinfo,klass)};var iterator_class=$B.make_class("iterator",(function(getitem,len){return{__class__:iterator_class,getitem:getitem,len:len,counter:-1}}));iterator_class.__next__=function(self){self.counter++;if(self.len!==null&&self.counter==self.len){throw _b_.StopIteration.$factory("")}try{return self.getitem(self.counter)}catch(err){throw _b_.StopIteration.$factory("")}};$B.set_func_names(iterator_class,"builtins");const callable_iterator=$B.make_class("callable_iterator",(function(func,sentinel){return{__class__:callable_iterator,func:func,sentinel:sentinel}}));callable_iterator.__iter__=function(self){return self};callable_iterator.__next__=function(self){var res=self.func();if($B.rich_comp("__eq__",res,self.sentinel)){throw _b_.StopIteration.$factory()}return res};$B.set_func_names(callable_iterator,"builtins");$B.$iter=function(obj,sentinel){if(sentinel===undefined){var klass=obj.__class__||$B.get_class(obj);try{var _iter=$B.$call($B.$getattr(klass,"__iter__"))}catch(err){if(err.__class__===_b_.AttributeError){try{var gi_method=$B.$call($B.$getattr(klass,"__getitem__")),gi=function(i){return gi_method(obj,i)},len;try{len=len(obj)}catch(err){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object is not iterable")}return iterator_class.$factory(gi,len)}catch(err){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object is not iterable")}}throw err}var res=$B.$call(_iter)(obj);try{$B.$getattr(res,"__next__")}catch(err){if($B.$isinstance(err,_b_.AttributeError)){throw _b_.TypeError.$factory("iter() returned non-iterator of type '"+$B.class_name(res)+"'")}}return res}else{return callable_iterator.$factory(obj,sentinel)}};var iter=_b_.iter=function(){var $=$B.args("iter",1,{obj:null},["obj"],arguments,{},"args","kw"),sentinel;if($.args.length>0){sentinel=$.args[0]}return $B.$iter($.obj,sentinel)};var len=_b_.len=function(obj){check_nb_args_no_kw("len",1,arguments);var klass=obj.__class__||$B.get_class(obj);try{var method=$B.$getattr(klass,"__len__")}catch(err){throw _b_.TypeError.$factory("object of type '"+$B.class_name(obj)+"' has no len()")}return $B.$call(method)(obj)};_b_.locals=function(){check_nb_args("locals",0,arguments);var locals_obj=$B.frame_obj.frame[1];var class_locals=locals_obj.$target;if(class_locals){return class_locals}var res=$B.obj_dict($B.clone(locals_obj),(function(key){return key.startsWith("$")}));res.$is_namespace=true;return res};var map=_b_.map=$B.make_class("map",(function(){var $=$B.args("map",2,{func:null,it1:null},["func","it1"],arguments,{},"args",null),func=$B.$call($.func);var iter_args=[$B.make_js_iterator($.it1)];for(var arg of $.args){iter_args.push($B.make_js_iterator(arg))}var obj={__class__:map,args:iter_args,func:func};obj[Symbol.iterator]=function(){this.iters=[];for(var arg of this.args){this.iters.push(arg[Symbol.iterator]())}return this};obj.next=function(){var args=[];for(var iter of this.iters){var arg=iter.next();if(arg.done){return{done:true,value:null}}args.push(arg.value)}return{done:false,value:this.func.apply(null,args)}};return obj}));map.__iter__=function(self){self[Symbol.iterator]();return self};map.__next__=function(self){var args=[];for(var iter of self.iters){var arg=iter.next();if(arg.done){throw _b_.StopIteration.$factory("")}args.push(arg.value)}return self.func.apply(null,args)};$B.set_func_names(map,"builtins");function $extreme(args,op){var $op_name="min";if(op==="__gt__"){$op_name="max"}var $=$B.args($op_name,0,{},[],args,{},"args","kw");var has_default=false,func=false;for(var attr in $.kw.$jsobj){switch(attr){case"key":func=$.kw.$jsobj[attr];func=func===_b_.None?func:$B.$call(func);break;case"default":var default_value=$.kw.$jsobj[attr];has_default=true;break;default:throw _b_.TypeError.$factory("'"+attr+"' is an invalid keyword argument for this function")}}if(!func||func===_b_.None){func=x=>x}if($.args.length==0){throw _b_.TypeError.$factory($op_name+" expected 1 arguments, got 0")}else if($.args.length==1){var $iter=$B.make_js_iterator($.args[0]),res=null,x_value,extr_value;for(var x of $iter){if(res===null){extr_value=func(x);res=x}else{x_value=func(x);if($B.rich_comp(op,x_value,extr_value)){res=x;extr_value=x_value}}}if(res===null){if(has_default){return default_value}else{throw _b_.ValueError.$factory($op_name+"() arg is an empty sequence")}}else{return res}}else{if(has_default){throw _b_.TypeError.$factory("Cannot specify a default for "+$op_name+"() with multiple positional arguments")}var _args;if($B.last(args).$kw){_args=[$.args].concat($B.last(args))}else{_args=[$.args]}return $extreme.call(null,_args,op)}}_b_.max=function(){return $extreme(arguments,"__gt__")};var memoryview=_b_.memoryview=$B.make_class("memoryview",(function(obj){check_nb_args_no_kw("memoryview",1,arguments);if(obj.__class__===memoryview){return obj}if($B.get_class(obj).$buffer_protocol){return{__class__:memoryview,obj:obj,format:"B",itemsize:1,ndim:1,shape:_b_.tuple.$factory([_b_.len(obj)]),strides:_b_.tuple.$factory([1]),suboffsets:_b_.tuple.$factory([]),c_contiguous:true,f_contiguous:true,contiguous:true}}else{throw _b_.TypeError.$factory("memoryview: a bytes-like object "+"is required, not '"+$B.class_name(obj)+"'")}}));memoryview.$match_sequence_pattern=true,memoryview.$buffer_protocol=true;memoryview.$not_basetype=true;memoryview.__eq__=function(self,other){if(other.__class__!==memoryview){return false}return $B.$getattr(self.obj,"__eq__")(other.obj)};memoryview.__getitem__=function(self,key){var res;if($B.$isinstance(key,_b_.int)){var start=key*self.itemsize;if(self.format=="I"){res=self.obj.source[start];var coef=256;for(var i=1;i<4;i++){res+=self.obj.source[start+i]*coef;coef*=256}return res}else if("B".indexOf(self.format)>-1){if(key>self.obj.source.length-1){throw _b_.KeyError.$factory(key)}return self.obj.source[key]}else{return self.obj.source[key]}}res=self.obj.__class__.__getitem__(self.obj,key);if(key.__class__===_b_.slice){return memoryview.$factory(res)}};memoryview.__len__=function(self){return len(self.obj)/self.itemsize};memoryview.__setitem__=function(self,key,value){try{$B.$setitem(self.obj,key,value)}catch(err){throw _b_.TypeError.$factory("cannot modify read-only memory")}};memoryview.cast=function(self,format){switch(format){case"B":return memoryview.$factory(self.obj);case"I":var res=memoryview.$factory(self.obj),objlen=len(self.obj);res.itemsize=4;res.format="I";if(objlen%4!=0){throw _b_.TypeError.$factory("memoryview: length is not "+"a multiple of itemsize")}return res}};memoryview.hex=function(self){var res="",bytes=_b_.bytes.$factory(self);bytes.source.forEach((function(item){res+=item.toString(16)}));return res};memoryview.tobytes=function(self){return{__class__:_b_.bytes,source:self.obj.source}};memoryview.tolist=function(self){if(self.itemsize==1){return _b_.list.$factory(_b_.bytes.$factory(self.obj))}else if(self.itemsize==4){if(self.format=="I"){var res=[];for(var i=0;i=65536&&code<=131071||code>=131072&&code<=196607||code>=196608&&code<=262143||code>=851968&&code<=917503||code>=917504&&code<=1048575){return code}}throw _b_.TypeError.$factory("ord() expected a character, but "+"string of length "+c.length+" found")}switch($B.get_class(c)){case _b_.str:if(c.length==1){return c.charCodeAt(0)}throw _b_.TypeError.$factory("ord() expected a character, but "+"string of length "+c.length+" found");case _b_.bytes:case _b_.bytearray:if(c.source.length==1){return c.source[0]}throw _b_.TypeError.$factory("ord() expected a character, but "+"string of length "+c.source.length+" found");default:throw _b_.TypeError.$factory("ord() expected a character, but "+$B.class_name(c)+" was found")}};var complex_modulo=()=>_b_.ValueError.$factory("complex modulo");var all_ints=()=>_b_.TypeError.$factory("pow() 3rd argument not "+"allowed unless all arguments are integers");_b_.pow=function(){var $=$B.args("pow",3,{x:null,y:null,mod:null},["x","y","mod"],arguments,{mod:None},null,null),x=$.x,y=$.y,z=$.mod;if(z===_b_.None){return $B.rich_op("__pow__",x,y)}else{if($B.$isinstance(x,_b_.int)){if($B.$isinstance(y,_b_.float)){throw all_ints()}else if($B.$isinstance(y,_b_.complex)){throw complex_modulo()}else if($B.$isinstance(y,_b_.int)){if($B.$isinstance(z,_b_.complex)){throw complex_modulo()}else if(!$B.$isinstance(z,_b_.int)){throw all_ints()}}return _b_.int.__pow__(x,y,z)}else if($B.$isinstance(x,_b_.float)){throw all_ints()}else if($B.$isinstance(x,_b_.complex)){throw complex_modulo()}}};var $print=_b_.print=function(){var $ns=$B.args("print",0,{},[],arguments,{},"args","kw");var kw=$ns["kw"],end=$B.is_none(kw.$jsobj.end)?"\n":kw.$jsobj.end,sep=$B.is_none(kw.$jsobj.sep)?" ":kw.$jsobj.sep,file=$B.is_none(kw.$jsobj.file)?$B.get_stdout():kw.$jsobj.file;var args=$ns["args"],writer=$B.$getattr(file,"write");for(var i=0,len=args.length;i-1){has_slot=true;break}}}if(!has_slot){throw $B.attr_error(attr,klass)}}}if($test){console.log("attr",attr,"use _setattr",_setattr)}if(!_setattr){if(obj.__dict__===undefined){obj[attr]=value}else{_b_.dict.$setitem(obj.__dict__,attr,value)}if($test){console.log("no setattr, obj",obj)}}else{if($test){console.log("apply _setattr",obj,attr)}_setattr(obj,attr,value)}return None};_b_.sorted=function(){var $=$B.args("sorted",1,{iterable:null},["iterable"],arguments,{},null,"kw");var _list=_b_.list.$factory($.iterable),args=[_list].concat(Array.from(arguments).slice(1));_b_.list.sort.apply(null,args);return _list};_b_.sum=function(){var $=$B.args("sum",2,{iterable:null,start:null},["iterable","start"],arguments,{start:0},null,null),iterable=$.iterable,start=$.start;if($B.$isinstance(start,[_b_.str,_b_.bytes])){throw _b_.TypeError.$factory("sum() can't sum bytes"+" [use b''.join(seq) instead]")}var res=start;iterable=iter(iterable);while(true){try{var _item=next(iterable);res=$B.rich_op("__add__",res,_item)}catch(err){if(err.__class__===_b_.StopIteration){break}else{throw err}}}return res};$B.missing_super2=function(obj){obj.$missing=true;return obj};var $$super=_b_.super=$B.make_class("super",(function(_type,object_or_type){var no_object_or_type=object_or_type===undefined;if(_type===undefined&&object_or_type===undefined){var frame=$B.frame_obj.frame,pyframe=$B.imported["_sys"]._getframe(),code=$B.frame.f_code.__get__(pyframe),co_varnames=code.co_varnames;if(co_varnames.length>0){_type=frame[1].__class__;if(_type===undefined){throw _b_.RuntimeError.$factory("super(): no arguments")}object_or_type=frame[1][code.co_varnames[0]]}else{throw _b_.RuntimeError.$factory("super(): no arguments")}}if(!no_object_or_type&&Array.isArray(object_or_type)){object_or_type=object_or_type[0]}var $arg2;if(object_or_type!==undefined){if(object_or_type===_type||object_or_type.$is_class&&_b_.issubclass(object_or_type,_type)){$arg2="type"}else if($B.$isinstance(object_or_type,_type)){$arg2="object"}else{throw _b_.TypeError.$factory("super(type, obj): obj must be an instance "+"or subtype of type")}}return{__class__:$$super,__thisclass__:_type,__self_class__:object_or_type,$arg2:$arg2}}));$$super.__get__=function(self,instance){return $$super.$factory(self.__thisclass__,instance)};$$super.__getattribute__=function(self,attr){if(self.__thisclass__.$is_js_class){if(attr=="__init__"){return function(){mro[0].$js_func.call(self.__self_class__,...arguments)}}}var object_or_type=self.__self_class__,mro=self.$arg2=="type"?object_or_type.__mro__:$B.get_class(object_or_type).__mro__;var search_start=mro.indexOf(self.__thisclass__)+1,search_classes=mro.slice(search_start);var $test=attr=="new";if($test){console.log("super.__ga__, self",self,"search classes",search_classes)}var f;for(var klass of search_classes){if(klass===undefined){console.log("klass undef in super",self);console.log("mro",mro)}if(klass[attr]!==undefined){f=klass[attr];break}}if(f===undefined){if($$super[attr]!==undefined){return function(x){return function(){var args=[x];for(var i=0,len=arguments.length;i";if(self.__self_class__!==undefined){res+=", <"+self.__self_class__.__class__.__name__+" object>"}else{res+=", NULL"}return res+">"};$B.set_func_names($$super,"builtins");_b_.vars=function(){var def={},$=$B.args("vars",1,{obj:null},["obj"],arguments,{obj:def},null,null);if($.obj===def){return _b_.locals()}else{try{return $B.$getattr($.obj,"__dict__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("vars() argument must have __dict__ attribute")}throw err}}};var $Reader=$B.make_class("Reader");$Reader.__bool__=function(){return true};$Reader.__enter__=function(self){return self};$Reader.__exit__=function(){return false};$Reader.__init__=function(_self,initial_value=""){_self.$content=initial_value;_self.$counter=0};$Reader.__iter__=function(self){return iter($Reader.readlines(self))};$Reader.__len__=function(self){return self.lines.length};$Reader.__new__=function(cls){return{__class__:cls}};$Reader.close=function(self){self.closed=true};$Reader.flush=function(){return None};$Reader.read=function(){var $=$B.args("read",2,{self:null,size:null},["self","size"],arguments,{size:-1},null,null),self=$.self,size=$B.$GetInt($.size);if(self.closed===true){throw _b_.ValueError.$factory("I/O operation on closed file")}if(size<0){size=self.$length-self.$counter}var res;if(self.$binary){res=_b_.bytes.$factory(self.$content.source.slice(self.$counter,self.$counter+size))}else{res=self.$content.substr(self.$counter,size)}self.$counter+=size;return res};$Reader.readable=function(){return true};function make_lines(self){if(self.$lines===undefined){if(!self.$binary){self.$lines=self.$content.split("\n");if($B.last(self.$lines)==""){self.$lines.pop()}self.$lines=self.$lines.map((x=>x+"\n"))}else{var lines=[],pos=0,source=self.$content.source;while(pos-1){rest=rest.slice(0,size)}self.$counter=self.$content.source.length;return _b_.bytes.$factory(rest)}else{var line_source=self.$content.source.slice(self.$counter,ix+1);if(size>-1){line_source=line_source.slice(0,size)}result={__class__:_b_.bytes,source:line_source};self.$counter=ix+1;return result}}else{if(self.$counter==self.$content.length){return""}ix=self.$content.indexOf("\n",self.$counter);if(ix==-1){rest=self.$content.substr(self.$counter);if(size>-1){rest=rest.substr(0,size)}self.$counter=self.$content.length;return rest}else{result=self.$content.substring(self.$counter,ix+1);if(size>-1){result=result.substr(0,size)}self.$counter=ix+1;self.$lc+=1;return result}}};$Reader.readlines=function(){var $=$B.args("readlines",2,{self:null,hint:null},["self","hint"],arguments,{hint:-1},null,null),self=$.self,hint=$B.$GetInt($.hint);var nb_read=0;if(self.closed===true){throw _b_.ValueError.$factory("I/O operation on closed file")}self.$lc=self.$lc===undefined?-1:self.$lc;make_lines(self);var lines;if(hint<0){lines=self.$lines.slice(self.$lc+1)}else{lines=[];while(self.$lc-1;if(mode.search("w")>-1){result={$binary:is_binary,$content:is_binary?_b_.bytes.$factory():"",$encoding:encoding,closed:False,mode:mode,name:file};result.__class__=is_binary?$BufferedReader:$TextIOWrapper;$B.file_cache[file]=result.$content;return result}else if(["r","rb"].indexOf(mode)==-1){throw _b_.ValueError.$factory("Invalid mode '"+mode+"'")}if($B.$isinstance(file,_b_.str)){if($B.file_cache.hasOwnProperty($.file)){var f=$B.file_cache[$.file];result.content=f;if(is_binary&&typeof f=="string"){result.content=_b_.str.encode(f,"utf-8")}else if(f.__class__===_b_.bytes&&!is_binary){result.content=_b_.bytes.decode(f,encoding)}}else if($B.files&&$B.files.hasOwnProperty($.file)){var $res=atob($B.files[$.file].content);var source=[];for(const char of $res){source.push(char.charCodeAt(0))}result.content=_b_.bytes.$factory(source);if(!is_binary){try{result.content=_b_.bytes.decode(result.content,encoding)}catch(error){result.error=error}}}else if($B.protocol!="file"){var req=new XMLHttpRequest;req.overrideMimeType("text/plain;charset=x-user-defined");req.onreadystatechange=function(){if(this.readyState!=4){return}var status=this.status;if(status==404){result.error=_b_.FileNotFoundError.$factory(file)}else if(status!=200){result.error=_b_.IOError.$factory("Could not open file "+file+" : status "+status)}else{var bytes=[];for(var i=0,len=this.response.length;i63232){cp-=63232}bytes.push(cp)}result.content=_b_.bytes.$factory(bytes);if(!is_binary){try{result.content=_b_.bytes.decode(result.content,encoding)}catch(error){result.error=error}}}};var cache=$B.get_option("cache"),fake_qs=cache?"":"?foo="+(new Date).getTime();req.open("GET",encodeURI(file+fake_qs),false);req.send()}else{throw _b_.FileNotFoundError.$factory("cannot use 'open()' with protocol 'file'")}if(result.error!==undefined){throw result.error}var res={$binary:is_binary,$content:result.content,$counter:0,$encoding:encoding,$length:is_binary?result.content.source.length:result.content.length,closed:False,mode:mode,name:file};res.__class__=is_binary?$BufferedReader:$TextIOWrapper;return res}else{throw _b_.TypeError.$factory("invalid argument for open(): "+_b_.str.$factory(file))}};var zip=_b_.zip=$B.make_class("zip",(function(){var res={__class__:zip,items:[]};if(arguments.length==0){return res}var $ns=$B.args("zip",0,{},[],arguments,{},"args","kw");var _args=$ns["args"],strict=$B.$bool($ns.kw.$jsobj.strict||false);var iters=[];for(var arg of _args){iters.push($B.make_js_iterator(arg))}return{__class__:zip,iters:iters,strict:strict}}));zip.__iter__=function(self){return self};zip.__next__=function(self){var res=[],len=self.iters.length;for(var i=0;i0){throw _b_.ValueError.$factory(`zip() argument ${i+1} is longer than argument ${i}`)}else{for(var j=1;j!x.startsWith("$")))};$B.function.__get__=function(self,obj){if(obj===_b_.None){return self}return $B.method.$factory(self,obj)};$B.function.__getattribute__=function(self,attr){if(self.$infos&&self.$infos[attr]!==undefined){if(attr=="__code__"){var res={__class__:code};for(var _attr in self.$infos.__code__){res[_attr]=self.$infos.__code__[_attr]}res.name=self.$infos.__name__;res.filename=self.$infos.__code__.co_filename;res.co_code=self+"";return res}else if(attr=="__annotations__"){return $B.obj_dict(self.$infos[attr])}else if(self.$infos.hasOwnProperty(attr)){return self.$infos[attr]}}else if(self.$infos&&self.$infos.__dict__&&_b_.dict.$contains_string(self.$infos.__dict__,attr)){return _b_.dict.$getitem_string(self.$infos.__dict__,attr)}else if(attr=="__closure__"){var free_vars=self.$infos.__code__.co_freevars;if(free_vars.length==0){return None}var cells=[];for(var i=0;i"}else{return""}};$B.function.__mro__=[_b_.object];$B.make_function_infos=function(f,__module__,__defaults__,__globals__,__kwdefaults__,__doc__,arg_names,vararg,kwarg,co_argcount,co_filename,co_firstlineno,co_flags,co_freevars,co_kwonlyargcount,co_name,co_nlocals,co_posonlyargcount,co_qualname,co_varnames){f.$is_func=true;f.$infos={__module__:__module__,__defaults__:__defaults__,__globals__:__globals__,__kwdefaults__:__kwdefaults__,__doc__:__doc__,arg_names:arg_names,vararg:vararg,kwarg:kwarg};f.$infos.__name__=co_name;f.$infos.__qualname__=co_qualname;f.$infos.__code__={co_argcount:co_argcount,co_filename:co_filename,co_firstlineno:co_firstlineno,co_flags:co_flags,co_freevars:co_freevars,co_kwonlyargcount:co_kwonlyargcount,co_name:co_name,co_nlocals:co_nlocals,co_posonlyargcount:co_posonlyargcount,co_qualname:co_qualname,co_varnames:co_varnames}};$B.make_function_defaults=function(f){if(f.$infos===undefined||f.$infos.__code__===undefined){throw _b_.AttributeError.$factory(`cannot set defauts to ${_b_.str.$factory(f)}`)}const varnames=f.$infos.__code__.co_varnames,value=f.$infos.__defaults__,offset=f.$infos.__code__.co_argcount-value.length,$kwdefaults=new Map;var nb_kw_defaults=f.$infos.__kwdefaults__===_b_.None?0:_b_.dict.__len__(f.$infos.__kwdefaults__);if(f.$infos.__kwdefaults__!==_b_.None){const kwdef=f.$infos.__kwdefaults__;for(let kw of $B.make_js_iterator(kwdef)){$kwdefaults.set(kw,$B.$getitem(kwdef,kw))}}f.$kwdefaults=$kwdefaults;f.$kwdefaults_values=[...$kwdefaults.values()];f.$hasParams=new Set;for(let i=f.$infos.__code__.co_posonlyargcount;i0){named_defaults=PARAMS_NAMED_DEFAULTS_COUNT>=PARAMS_NAMED_COUNT?DEFAULTS.ALL:DEFAULTS.SOME}const PARAMS_POSONLY_COUNT=$CODE.co_posonlyargcount;const PARAMS_POS_COUNT=$CODE.co_argcount-PARAMS_POSONLY_COUNT;let pos_defaults=DEFAULTS.NONE;if(PARAMS_POS_COUNT!==0&&value.length>0){pos_defaults=value.length>=PARAMS_POS_COUNT?DEFAULTS.ALL:DEFAULTS.SOME}let posonly_defaults=DEFAULTS.NONE;if(value.length>PARAMS_POS_COUNT){posonly_defaults=value.length>=$CODE.co_argcount?DEFAULTS.ALL:DEFAULTS.SOME}f.$args_parser=f.$infos.args_parser=$B.getArgs0(PARAMS_POSONLY_COUNT!==0,posonly_defaults,PARAMS_POS_COUNT!==0,pos_defaults,$INFOS.vararg!==null,PARAMS_NAMED_COUNT!==0,named_defaults,$INFOS.kwarg!==null);return _b_.None};$B.function.__setattr__=function(self,attr,value){if(attr=="__closure__"){throw _b_.AttributeError.$factory("readonly attribute")}else if(attr=="__defaults__"){if(value===_b_.None){value=[]}else if(!$B.$isinstance(value,_b_.tuple)){throw _b_.TypeError.$factory("__defaults__ must be set to a tuple object")}if(self.$infos){self.$infos.__defaults__=value;$B.make_function_defaults(self)}else{throw _b_.AttributeError.$factory("cannot set attribute "+attr+" of "+_b_.str.$factory(self))}}else if(attr=="__kwdefaults__"){if(value===_b_.None){value=$B.empty_dict}else if(!$B.$isinstance(value,_b_.dict)){throw _b_.TypeError.$factory("__kwdefaults__ must be set to a dict object")}if(self.$infos){self.$infos.__kwdefaults__=value;$B.make_function_defaults(self)}else{throw _b_.AttributeError.$factory("cannot set attribute "+attr+" of "+_b_.str.$factory(self))}}if(self.$infos[attr]!==undefined){self.$infos[attr]=value}else{self.$attrs=self.$attrs||{};self.$attrs[attr]=value}};$B.function.$factory=function(){};$B.set_func_names($B.function,"builtins");_b_.__BRYTHON__=__BRYTHON__;$B.builtin_funcs=["__build_class__","abs","aiter","all","anext","any","ascii","bin","breakpoint","callable","chr","compile","delattr","dir","divmod","eval","exec","exit","format","getattr","globals","hasattr","hash","help","hex","id","input","isinstance","issubclass","iter","len","locals","max","min","next","oct","open","ord","pow","print","quit","repr","round","setattr","sorted","sum","vars"];var builtin_function=$B.builtin_function_or_method=$B.make_class("builtin_function_or_method",(function(f){f.__class__=builtin_function;return f}));builtin_function.__getattribute__=$B.function.__getattribute__;builtin_function.__reduce_ex__=builtin_function.__reduce__=function(self){return self.$infos.__name__};builtin_function.__repr__=builtin_function.__str__=function(self){return""};$B.set_func_names(builtin_function,"builtins");var method_wrapper=$B.make_class("method_wrapper");method_wrapper.__repr__=method_wrapper.__str__=function(self){return""};$B.set_func_names(method_wrapper,"builtins");$B.builtin_classes=["bool","bytearray","bytes","classmethod","complex","dict","enumerate","filter","float","frozenset","int","list","map","memoryview","object","property","range","reversed","set","slice","staticmethod","str","super","tuple","type","zip"];var other_builtins=["Ellipsis","False","None","True","__debug__","__import__","copyright","credits","license","NotImplemented"];var builtin_names=$B.builtin_funcs.concat($B.builtin_classes).concat(other_builtins);for(var name of builtin_names){try{if($B.builtin_funcs.indexOf(name)>-1){_b_[name].__class__=builtin_function;_b_[name].$infos={__module__:"builtins",__name__:name,__qualname__:name}}}catch(err){}}_b_.object.__init__.__class__=$B.wrapper_descriptor;_b_.object.__new__.__class__=builtin_function})(__BRYTHON__);(function($B){var _b_=$B.builtins;var DEFAULT_MIN_MERGE=32;var DEFAULT_MIN_GALLOPING=7;var DEFAULT_TMP_STORAGE_LENGTH=256;var POWERS_OF_TEN=[1,10,100,1e3,1e4,1e5,1e6,1e7,1e8,1e9];function log10(x){if(x<1e5){if(x<100){return x<10?0:1}if(x<1e4){return x<1e3?2:3}return 4}if(x<1e7){return x<1e6?5:6}if(x<1e9){return x<1e8?7:8}return 9}function alphabeticalCompare(a,b){if(a===b){return 0}if(~~a===a&&~~b===b){if(a===0||b===0){return a=0){return-1}if(a>=0){return 1}a=-a;b=-b}var al=log10(a),bl=log10(b);var t=0;if(albl){b*=POWERS_OF_TEN[al-bl-1];a/=10;t=1}if(a===b){return t}return a=DEFAULT_MIN_MERGE){r|=n&1;n>>=1}return n+r}function makeAscendingRun(array,lo,hi,compare){var runHi=lo+1;if(runHi===hi){return 1}if(compare(array[runHi++],array[lo])<0){while(runHi=0){runHi++}}return runHi-lo}function reverseRun(array,lo,hi){hi--;while(lo>>1;if(compare(pivot,array[mid])<0){right=mid}else{left=mid+1}}var n=start-left;switch(n){case 3:array[left+3]=array[left+2];case 2:array[left+2]=array[left+1];case 1:array[left+1]=array[left];break;default:while(n>0){array[left+n]=array[left+n-1];n--}}array[left]=pivot}}function gallopLeft(value,array,start,length,hint,compare){var lastOffset=0,maxOffset=0,offset=1;if(compare(value,array[start+hint])>0){maxOffset=length-hint;while(offset0){lastOffset=offset;offset=(offset<<1)+1;if(offset<=0){offset=maxOffset}}if(offset>maxOffset){offset=maxOffset}lastOffset+=hint;offset+=hint}else{maxOffset=hint+1;while(offsetmaxOffset){offset=maxOffset}var tmp=lastOffset;lastOffset=hint-offset;offset=hint-tmp}lastOffset++;while(lastOffset>>1);if(compare(value,array[start+m])>0){lastOffset=m+1}else{offset=m}}return offset}function gallopRight(value,array,start,length,hint,compare){var lastOffset=0,maxOffset=0,offset=1;if(compare(value,array[start+hint])<0){maxOffset=hint+1;while(offsetmaxOffset){offset=maxOffset}var tmp=lastOffset;lastOffset=hint-offset;offset=hint-tmp}else{maxOffset=length-hint;while(offset=0){lastOffset=offset;offset=(offset<<1)+1;if(offset<=0){offset=maxOffset}}if(offset>maxOffset){offset=maxOffset}lastOffset+=hint;offset+=hint}lastOffset++;while(lastOffset>>1);if(compare(value,array[start+m])<0){offset=m}else{lastOffset=m+1}}return offset}var TIM_SORT_ASSERTION="TimSortAssertion";var TimSortAssertion=function(message){this.name=TIM_SORT_ASSERTION;this.message=message};var TimSort=function(array,compare){var self={array:array,compare:compare,minGallop:DEFAULT_MIN_GALLOPING,length:array.length,tmpStorageLength:DEFAULT_TMP_STORAGE_LENGTH,stackLength:0,runStart:null,runLength:null,stackSize:0,pushRun:function(runStart,runLength){this.runStart[this.stackSize]=runStart;this.runLength[this.stackSize]=runLength;this.stackSize+=1},mergeRuns:function(){while(this.stackSize>1){var n=this.stackSize-2;if(n>=1&&this.runLength[n-1]<=this.runLength[n]+this.runLength[n+1]||n>=2&&this.runLength[n-2]<=this.runLength[n]+this.runLength[n-1]){if(this.runLength[n-1]this.runLength[n+1]){break}this.mergeAt(n)}},forceMergeRuns:function(){while(this.stackSize>1){var n=this.stackSize-2;if(n>0&&this.runLength[n-1]=DEFAULT_MIN_GALLOPING||count2>=DEFAULT_MIN_GALLOPING);if(exit){break}if(minGallop<0){minGallop=0}minGallop+=2}this.minGallop=minGallop;if(minGallop<1){this.minGallop=1}if(length1===1){for(var i=0;i=0;i--){array[customDest+i]=array[customCursor+i]}array[dest]=tmp[cursor2];return}var minGallop=this.minGallop;while(true){var count1=0,count2=0,exit=false;do{if(compare(tmp[cursor2],array[cursor1])<0){array[dest--]=array[cursor1--];count1++;count2=0;if(--length1===0){exit=true;break}}else{array[dest--]=tmp[cursor2--];count2++;count1=0;if(--length2===1){exit=true;break}}}while((count1|count2)=0;i--){array[customDest+i]=array[customCursor+i]}if(length1===0){exit=true;break}}array[dest--]=tmp[cursor2--];if(--length2===1){exit=true;break}count2=length2-gallopLeft(array[cursor1],tmp,0,length2,length2-1,compare);if(count2!==0){dest-=count2;cursor2-=count2;length2-=count2;customDest=dest+1;customCursor=cursor2+1;for(var i=0;i=DEFAULT_MIN_GALLOPING||count2>=DEFAULT_MIN_GALLOPING);if(exit){break}if(minGallop<0){minGallop=0}minGallop+=2}this.minGallop=minGallop;if(minGallop<1){this.minGallop=1}if(length2===1){dest-=length1;cursor1-=length1;customDest=dest+1;customCursor=cursor1+1;for(var i=length1-1;i>=0;i--){array[customDest+i]=array[customCursor+i]}array[dest]=tmp[cursor2]}else if(length2==0){throw new TimSortAssertion("mergeHigh preconditions were not respected")}else{customCursor=dest-(length2-1);for(var i=0;i>>1}self.tmp=new Array(self.tmpStorageLength);self.stackLength=self.length<120?5:self.length<1542?10:self.length<119151?19:40;self.runStart=new Array(self.stackLength);self.runLength=new Array(self.stackLength);return self};function tim_sort(array,compare,lo,hi){if(!Array.isArray(array)){throw _b_.TypeError.$factory("Can only sort arrays")}if(!compare){compare=alphabeticalCompare}else if(typeof compare!=="function"){hi=lo;lo=compare;compare=alphabeticalCompare}if(!lo){lo=0}if(!hi){hi=array.length}var remaining=hi-lo;if(remaining<2){return}var runLength=0;if(remainingminRun){force=minRun}binaryInsertionSort(array,lo,lo+force,lo+runLength,compare);runLength=force}ts.pushRun(lo,runLength);ts.mergeRuns();remaining-=runLength;lo+=runLength}while(remaining!==0);ts.forceMergeRuns()}function tim_sort_safe(array,compare){try{tim_sort(array,compare,0,array.length)}catch(e){if(e.name==TIM_SORT_ASSERTION){array.sort(compare)}else{throw e}}}$B.$TimSort=tim_sort_safe;$B.$AlphabeticalCompare=alphabeticalCompare})(__BRYTHON__);(function($B){var _b_=$B.builtins;$B.del_exc=function(frame){delete frame[1].$current_exception};$B.set_exc=function(exc,frame){if(frame===undefined){var msg="Internal error: no frame for exception "+_b_.repr(exc);console.error(["Traceback (most recent call last):",$B.print_stack(exc.$frame_obj),msg].join("\n"));if($B.get_option("debug",exc)>1){console.log(exc.args);console.log(exc.stack)}throw Error(msg)}else{frame[1].$current_exception=$B.exception(exc)}};$B.get_exc=function(){var frame=$B.frame_obj.frame;return frame[1].$current_exception};$B.set_exception_offsets=function(exc,position){exc.$positions=exc.$positions||{};exc.$positions[$B.frame_obj.count-1]=position;return exc};$B.$raise=function(arg,cause){var active_exc=$B.get_exc();if(arg===undefined){if(active_exc!==undefined){throw active_exc}throw _b_.RuntimeError.$factory("No active exception to reraise")}else{if($B.$isinstance(arg,_b_.BaseException)){if(arg.__class__===_b_.StopIteration&&$B.frame_obj.frame.$is_generator){arg=_b_.RuntimeError.$factory("generator raised StopIteration")}arg.__context__=active_exc===undefined?_b_.None:active_exc;arg.__cause__=cause||_b_.None;arg.__suppress_context__=cause!==undefined;throw arg}else if(arg.$is_class&&_b_.issubclass(arg,_b_.BaseException)){if(arg===_b_.StopIteration){if($B.frame_obj.frame[1].$is_generator){throw _b_.RuntimeError.$factory("generator raised StopIteration")}}var exc=$B.$call(arg)();exc.__context__=active_exc===undefined?_b_.None:active_exc;exc.__cause__=cause||_b_.None;exc.__suppress_context__=cause!==undefined;throw exc}else{throw _b_.TypeError.$factory("exceptions must derive from BaseException")}}};$B.print_stack=function(frame_obj){var stack=make_frames_stack(frame_obj||$B.frame_obj);var trace=[];for(var frame of stack){var lineno=frame.$lineno,filename=frame.__file__;if(lineno!==undefined){var local=frame[0]==frame[2]?"":frame[0];trace.push(` File "${filename}" line ${lineno}, in ${local}`);var src=$B.file_cache[filename];if(src){var lines=src.split("\n"),line=lines[lineno-1];trace.push(" "+line.trim())}}}return trace.join("\n")};$B.last_frame=function(){var frame=$B.frame_obj.frame;return`file ${frame.__file__} line ${frame.$lineno}`};var traceback=$B.traceback=$B.make_class("traceback",(function(exc){var frame_obj=exc.$frame_obj;if(frame_obj===null){return _b_.None}if($B.$isinstance(exc,_b_.SyntaxError)){frame_obj=frame_obj.prev}var $linenums=$B.make_linenums(frame_obj);return{__class__:traceback,$stack:make_frames_stack(frame_obj),$linenums:$linenums,pos:0}}));traceback.__getattribute__=function(_self,attr){switch(attr){case"tb_frame":return _self.$stack[_self.pos];case"tb_lineno":return _self.$linenums[_self.pos];case"tb_lasti":return-1;case"tb_next":if(_self.pos<_self.$stack.length-1){_self.pos++;return _self}else{return _b_.None}case"stack":return _self.$stack;default:return _b_.object.__getattribute__(_self,attr)}};$B.set_func_names(traceback,"builtins");var frame=$B.frame=$B.make_class("frame",(function(frame_list){frame_list.__class__=frame;return frame_list}));frame.__delattr__=function(_self,attr){if(attr=="f_trace"){_self.$f_trace=_b_.None}};frame.__dir__=function(_self){return _b_.object.__dir__(frame).concat(["clear","f_back","f_builtins","f_code","f_globals","f_lasti","f_lineno","f_locals","f_trace","f_trace_lines","f_trace_opcodes"])};frame.__getattr__=function(_self,attr){if(attr=="f_back"){var frame_obj=$B.frame_obj;while(frame_obj!==null){if(frame_obj.frame===_self){break}frame_obj=frame_obj.prev}if(frame_obj.prev!==null){return frame.$factory(frame_obj.prev.frame)}return _b_.None}else if(attr=="clear"){return function(){}}else if(attr=="f_trace"){var locals=_self[1];if(_self.$f_trace===undefined){return _b_.None}return _self.$f_trace}console.log("no attr",attr,"for frame",_self);throw $B.attr_error(attr,_self)};frame.__setattr__=function(_self,attr,value){if(attr=="f_trace"){_self.$f_trace=value}};frame.__str__=frame.__repr__=function(_self){return""};frame.f_builtins={__get__:function(_self){return $B.$getattr(_self[3].__builtins__,"__dict__")}};frame.f_code={__get__:function(_self){var res;if(_self[4]){res=_self[4].$infos.__code__}else if(_self.f_code){res=_self.f_code}else{res={co_name:_self[0]==_self[2]?"":_self[0],co_filename:_self.__file__,co_varnames:$B.fast_tuple([])};res.co_qualname=res.co_name}res.__class__=_b_.code;return res}};frame.f_globals={__get__:function(_self){if(_self.f_globals){return _self.f_globals}else if(_self.f_locals&&_self[1]==_self[3]){return _self.f_globals=_self.f_locals}else{return _self.f_globals=$B.obj_dict(_self[3])}}};frame.f_lineno={__get__:function(_self){return _self.$lineno}};frame.f_locals={__get__:function(_self){if(_self.f_locals){return _self.f_locals}else if(_self.f_globals&&_self[1]==_self[3]){return _self.f_locals=_self.f_globals}else{return _self.f_locals=$B.obj_dict(_self[1])}}};frame.f_trace={__get__:function(_self){return _self.$f_trace}};$B.set_func_names(frame,"builtins");$B._frame=frame;$B.deep_copy=function(stack){var res=[];for(const s of stack){var item=[s[0],{},s[2],{}];if(s[4]!==undefined){item.push(s[4])}for(const i of[1,3]){for(var key in s[i]){item[i][key]=s[i][key]}}res.push(item)}return res};$B.restore_frame_obj=function(frame_obj,locals){$B.frame_obj=frame_obj;$B.frame_obj.frame[1]=locals};$B.make_linenums=function(frame_obj){var res=[],frame_obj=frame_obj||$B.frame_obj;while(frame_obj!==null){res.push(frame_obj.frame.$lineno);frame_obj=frame_obj.prev}return res.reverse()};var make_frames_stack=$B.make_frames_stack=function(frame_obj){var stack=[];while(frame_obj!==null){stack[stack.length]=frame_obj.frame;frame_obj=frame_obj.prev}stack.reverse();return stack};$B.freeze=function(err){if(err.$frame_obj===undefined){err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums()}err.__traceback__=traceback.$factory(err)};$B.exception=function(js_exc,in_ctx_manager){if(!js_exc.__class__){if(js_exc.$py_exc){return js_exc.$py_exc}var exc=_b_.JavascriptError.$factory(js_exc.__name__||js_exc.name);exc.$js_exc=js_exc;if($B.is_recursion_error(js_exc)){return _b_.RecursionError.$factory("too much recursion")}exc.__cause__=_b_.None;exc.__context__=_b_.None;exc.__suppress_context__=false;var $message=js_exc.message||"<"+js_exc+">";exc.args=_b_.tuple.$factory([$message]);exc.$py_error=true;js_exc.$py_exc=exc;$B.freeze(exc)}else{var exc=js_exc;$B.freeze(exc)}return exc};$B.is_exc=function(exc,exc_list){if(exc.__class__===undefined){exc=$B.exception(exc)}var this_exc_class=exc.$is_class?exc:exc.__class__;for(var i=0;i1){res+=", "+_b_.repr($B.fast_tuple(self.args.slice(1)))}return res+")"};_b_.BaseException.__str__=function(self){if(self.args.length>0&&self.args[0]!==_b_.None){return _b_.str.$factory(self.args[0])}return""};_b_.BaseException.__new__=function(cls){var err=_b_.BaseException.$factory();err.__class__=cls;err.__dict__=$B.empty_dict();return err};_b_.BaseException.__getattr__=function(self,attr){if(attr=="__context__"){var frame=$B.frame_obj.frame,ctx=frame[1].$current_exception;return ctx||_b_.None}else{throw $B.attr_error(attr,self)}};_b_.BaseException.add_note=function(self,note){if(!$B.$isinstance(note,_b_.str)){throw _b_.TypeError.$factory("note must be a str, not "+`'${$B.class_name(note)}'`)}if(self.__notes__!==undefined){self.__notes__.push(note)}else{self.__notes__=[note]}};_b_.BaseException.with_traceback=function(_self,tb){_self.__traceback__=tb;return _self};$B.set_func_names(_b_.BaseException,"builtins");make_builtin_exception(["SystemExit","KeyboardInterrupt","GeneratorExit","Exception"],_b_.BaseException);make_builtin_exception("JavascriptError",_b_.Exception);make_builtin_exception(["ArithmeticError","AssertionError","BufferError","EOFError","LookupError","MemoryError","OSError","ReferenceError","RuntimeError","SystemError","TypeError","ValueError","Warning"],_b_.Exception);make_builtin_exception("StopIteration",_b_.Exception,"value");make_builtin_exception("StopAsyncIteration",_b_.Exception,"value");make_builtin_exception("ImportError",_b_.Exception,"name");make_builtin_exception("SyntaxError",_b_.Exception,"msg");make_builtin_exception(["FloatingPointError","OverflowError","ZeroDivisionError"],_b_.ArithmeticError);make_builtin_exception("ModuleNotFoundError",_b_.ImportError,"name");make_builtin_exception(["IndexError","KeyError"],_b_.LookupError);make_builtin_exception(["BlockingIOError","ChildProcessError","ConnectionError","FileExistsError","FileNotFoundError","InterruptedError","IsADirectoryError","NotADirectoryError","PermissionError","ProcessLookupError","TimeoutError"],_b_.OSError);make_builtin_exception(["BrokenPipeError","ConnectionAbortedError","ConnectionRefusedError","ConnectionResetError"],_b_.ConnectionError);make_builtin_exception(["NotImplementedError","RecursionError"],_b_.RuntimeError);make_builtin_exception("IndentationError",_b_.SyntaxError,"msg");make_builtin_exception("TabError",_b_.IndentationError);make_builtin_exception("UnicodeError",_b_.ValueError);make_builtin_exception(["UnicodeDecodeError","UnicodeEncodeError","UnicodeTranslateError"],_b_.UnicodeError);make_builtin_exception(["DeprecationWarning","PendingDeprecationWarning","RuntimeWarning","SyntaxWarning","UserWarning","FutureWarning","ImportWarning","UnicodeWarning","BytesWarning","ResourceWarning","EncodingWarning"],_b_.Warning);make_builtin_exception(["EnvironmentError","IOError","VMSError","WindowsError"],_b_.OSError);_b_.AttributeError=$B.make_class("AttributeError",(function(){var $=$B.args("AttributeError",3,{msg:null,name:null,obj:null},["msg","name","obj"],arguments,{msg:_b_.None,name:_b_.None,obj:_b_.None},"*",null);var err=Error();err.__class__=_b_.AttributeError;err.__traceback__=_b_.None;err.$py_error=true;err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums();err.args=$B.fast_tuple($.msg===_b_.None?[]:[$.msg]);err.name=$.name;err.obj=$.obj;if(err.obj===undefined){console.log("pas de obj",$)}err.__cause__=_b_.None;err.__context__=_b_.None;err.__suppress_context__=false;return err}));_b_.AttributeError.__bases__=[_b_.Exception];_b_.AttributeError.__mro__=_b_.type.mro(_b_.AttributeError);_b_.AttributeError.__str__=function(self){return self.args[0]};$B.set_func_names(_b_.AttributeError,"builtins");$B.attr_error=function(name,obj){if(obj.$is_class){var msg=`type object '${obj.__name__}'`}else{var msg=`'${$B.class_name(obj)}' object`}msg+=` has no attribute '${name}'`;return _b_.AttributeError.$factory({$kw:[{name:name,obj:obj,msg:msg}]})};_b_.NameError=$B.make_class("NameError",(function(){var $=$B.args("NameError",2,{message:null,name:null},["message","name"],arguments,{message:_b_.None,name:_b_.None},"*",null,1);var err=Error();err.__class__=_b_.NameError;err.__traceback__=_b_.None;err.$py_error=true;err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums();err.args=$B.fast_tuple($.message===_b_.None?[]:[$.message]);err.name=$.name;err.__cause__=_b_.None;err.__context__=_b_.None;err.__suppress_context__=false;return err}));_b_.NameError.__bases__=[_b_.Exception];_b_.NameError.__mro__=_b_.type.mro(_b_.NameError).slice(1);_b_.NameError.__str__=function(self){return self.args[0]};$B.set_func_names(_b_.NameError,"builtins");make_builtin_exception("UnboundLocalError",_b_.NameError);_b_.UnboundLocalError.__str__=function(self){return self.args[0]};$B.set_func_names(_b_.UnboundLocalError,"builtins");$B.name_error=function(name){var exc=_b_.NameError.$factory(`name '${name}' is not defined`);exc.name=name;exc.$frame_obj=$B.frame_obj;return exc};$B.recursion_error=function(frame){var exc=_b_.RecursionError.$factory("maximum recursion depth exceeded");$B.set_exc(exc,frame);return exc};var MAX_CANDIDATE_ITEMS=750,MAX_STRING_SIZE=40,MOVE_COST=2,CASE_COST=1,SIZE_MAX=65535;function LEAST_FIVE_BITS(n){return n&31}function levenshtein_distance(a,b,max_cost){if(a==b){return 0}if(a.lengthmax_cost){return max_cost+1}var buffer=[];for(var i=0;imax_cost){return max_cost+1}}return result}function substitution_cost(a,b){if(LEAST_FIVE_BITS(a)!=LEAST_FIVE_BITS(b)){return MOVE_COST}if(a==b){return 0}if(a.toLowerCase()==b.toLowerCase()){return CASE_COST}return MOVE_COST}function calculate_suggestions(dir,name){if(dir.length>=MAX_CANDIDATE_ITEMS){return null}var suggestion_distance=2**52,suggestion=null;for(var item of dir){var max_distance=(name.length+item.length+3)*MOVE_COST/6;max_distance=Math.min(max_distance,suggestion_distance-1);var current_distance=levenshtein_distance(name,item,max_distance);if(current_distance>max_distance){continue}if(!suggestion||current_distance!x.startsWith("$")));var suggestion=calculate_suggestions(locals,name);if(suggestion){return suggestion}if(frame[2]!=frame[0]){var globals=Object.keys(frame[3]).filter((x=>!x.startsWith("$")));var suggestion=calculate_suggestions(globals,name);if(suggestion){return suggestion}}if(frame[4]&&frame[4].$is_method){var instance_name=frame[4].$infos.__code__.co_varnames[0],instance=frame[1][instance_name];if(_b_.hasattr(instance,name)){return`self.${name}`}}return _b_.None};_b_.BaseExceptionGroup=$B.make_class("BaseExceptionGFroup",(function(){var missing={},$=$B.args("BaseExceptionGroup",2,{message:null,exceptions:null},["message","exceptions"],arguments,{exceptions:missing},null,null);var err=Error();err.args=$B.fast_tuple(Array.from(arguments));err.__class__=_b_.BaseExceptionGroup;err.__traceback__=_b_.None;err.$py_error=true;err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums();err.message=$.message;err.exceptions=$.exceptions===missing?[]:$.exceptions;if(err.exceptions!==_b_.None){var exc_list=_b_.list.$factory(err.exceptions);var all_exceptions=true;for(var exc of exc_list){if(!$B.$isinstance(exc,_b_.Exception)){all_exceptions=false;break}}if(all_exceptions){err.__class__=_b_.ExceptionGroup}}err.__cause__=_b_.None;err.__context__=_b_.None;err.__suppress_context__=false;return err}));_b_.BaseExceptionGroup.__bases__=[_b_.BaseException];_b_.BaseExceptionGroup.__mro__=_b_.type.mro(_b_.BaseExceptionGroup);_b_.BaseExceptionGroup.__str__=function(self){return`${self.message} (${self.exceptions.length} sub-exception`+`${self.exceptions.length>1?"s":""})`};_b_.BaseExceptionGroup.split=function(self,condition){var matching_excs=[],non_matching_excs=[];for(var exc of self.exceptions){if($B.$isinstance(exc,_b_.BaseExceptionGroup)){var subsplit=_b_.BaseExceptionGroup.split(exc,condition),matching=subsplit[0],non_matching=subsplit[1];if(matching===_b_.None){non_matching_excs.push(exc)}else if(matching.exceptions.length==exc.exceptions.length){matching_excs.push(exc)}else{if(matching.exceptions.length>0){matching_excs=matching_excs.concat(matching)}if(non_matching.exceptions.length>0){non_matching_excs=non_matching_excs.concat(non_matching)}}}else if(condition(exc)){matching_excs.push(exc)}else{non_matching_excs.push(exc)}}if(matching_excs.length==0){matching_excs=_b_.None}if(non_matching_excs.length==0){non_matching_excs=_b_.None}var res=[];for(var item of[matching_excs,non_matching_excs]){var eg=_b_.BaseExceptionGroup.$factory(self.message,item);eg.__cause__=self.__cause__;eg.__context__=self.__context__;eg.__traceback__=self.__traceback__;res.push(eg)}return $B.fast_tuple(res)};_b_.BaseExceptionGroup.subgroup=function(self,condition){return _b_.BaseExceptionGroup.split(self,condition)[0]};$B.set_func_names(_b_.BaseExceptionGroup,"builtins");_b_.ExceptionGroup=$B.make_class("ExceptionGFroup",(function(){var missing={},$=$B.args("ExceptionGroup",2,{message:null,exceptions:null},["message","exceptions"],arguments,{exceptions:missing},null,null);var err=Error();err.args=$B.fast_tuple(Array.from(arguments));err.__class__=_b_.ExceptionGroup;err.__traceback__=_b_.None;err.$py_error=true;err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums();err.message=$.message;err.exceptions=$.exceptions===missing?[]:$.exceptions;if(err.exceptions!==_b_.None){var exc_list=_b_.list.$factory(err.exceptions);for(var exc of exc_list){if(!$B.$isinstance(exc,_b_.Exception)){throw _b_.TypeError.$factory("Cannot nest BaseExceptions in an ExceptionGroup")}}}err.__cause__=_b_.None;err.__context__=_b_.None;err.__suppress_context__=false;return err}));_b_.ExceptionGroup.__bases__=[_b_.BaseExceptionGroup,_b_.Exception];_b_.ExceptionGroup.__mro__=_b_.type.mro(_b_.ExceptionGroup);$B.set_func_names(_b_.ExceptionGroup,"builtins");function trace_from_stack(err){function handle_repeats(src,count_repeats){if(count_repeats>0){var len=trace.length;for(var i=0;i<2;i++){if(src){trace.push(trace[len-2]);trace.push(trace[len-1])}else{trace.push(trace[len-1])}count_repeats--;if(count_repeats==0){break}}if(count_repeats>0){trace.push(`[Previous line repeated ${count_repeats} more`+` time${count_repeats>1?"s":""}]`)}}}var trace=[],save_filename,save_lineno,save_scope,count_repeats=0,stack=err.$frame_obj===undefined?[]:make_frames_stack(err.$frame_obj),linenos=err.$linenums;for(var frame_num=0,len=stack.length;frame_num":frame[0];if(filename==save_filename&&scope==save_scope&&lineno==save_lineno){count_repeats++;continue}handle_repeats(src,count_repeats);save_filename=filename;save_lineno=lineno;save_scope=scope;count_repeats=0;var src=$B.file_cache[filename];trace.push(` File "${filename}", line ${lineno}, in `+(frame[0]==frame[2]?"":frame[0]));if(src){var lines=src.split("\n"),line=lines[lineno-1];if(line){trace.push(" "+line.trim())}else{console.log("no line",line)}if(err.$positions!==undefined){var position=err.$positions[frame_num],trace_line="";if(position&&(position[1]!=position[0]||position[2]-position[1]!=line.trim().length||position[3])){var indent=line.length-line.trimLeft().length;var paddings=[position[0]-indent,position[1]-position[0],position[2]-position[1]];for(var padding in paddings){if(padding<0){console.log("wrong values, position",position,"indent",indent);paddings[paddings.indexOf(padding)]=0}}trace_line+=" "+" ".repeat(paddings[0])+"~".repeat(paddings[1])+"^".repeat(paddings[2]);if(position[3]!==undefined){trace_line+="~".repeat(position[3]-position[2])}trace.push(trace_line)}}}else{console.log("no src for filename",filename);console.log("in file_cache",Object.keys($B.file_cache).join("\n"))}}if(count_repeats>0){var len=trace.length;for(var i=0;i<2;i++){if(src){trace.push(trace[len-2]);trace.push(trace[len-1])}else{trace.push(trace[len-1])}}trace.push(`[Previous line repeated ${count_repeats-2} more times]`)}return trace.join("\n")+"\n"}$B.error_trace=function(err){var trace="",stack=err.$frame_obj===undefined?[]:make_frames_stack(err.$frame_obj);if($B.get_option("debug",err)>1){console.log("handle error",err.__class__,err.args);console.log("stack",stack);console.log(err.stack)}if(stack.length>0){trace="Traceback (most recent call last):\n"}if(err.__class__===_b_.SyntaxError||err.__class__===_b_.IndentationError){err.$frame_obj=err.$frame_obj===null?null:err.$frame_obj.prev;trace+=trace_from_stack(err);var filename=err.filename,line=err.text,indent=line.length-line.trimLeft().length;trace+=` File "${filename}", line ${err.args[1][1]}\n`+` ${line.trim()}\n`;if(err.__class__!==_b_.IndentationError&&err.text){if($B.get_option("debug",err)>1){console.log("error args",err.args[1]);console.log("err line",line);console.log("indent",indent)}var start=err.offset-indent-1,end_offset=err.end_offset-1+(err.end_offset==err.offset?1:0),marks=" "+" ".repeat(start),nb_marks=1;if(err.end_lineno){if(err.end_lineno>err.lineno){nb_marks=line.length-start-indent}else{nb_marks=end_offset-start-indent}if(nb_marks==0&&err.end_offset==line.substr(indent).length){nb_marks=1}}marks+="^".repeat(nb_marks)+"\n";trace+=marks}trace+=`${err.__class__.__name__}: ${err.args[0]}`}else if(err.__class__!==undefined){var name=$B.class_name(err);trace+=trace_from_stack(err);var args_str=_b_.str.$factory(err);trace+=name+(args_str?": "+args_str:"");var save_frame_obj=$B.frame_obj;$B.frame_obj=err.$frame_obj;if(err.__class__===_b_.NameError){var suggestion=$B.offer_suggestions_for_name_error(err);if(suggestion!==_b_.None){trace+=`. Did you mean '${suggestion}'?`}if($B.stdlib_module_names.indexOf(err.name)>-1){trace+=`. Did you forget to import '${err.name}'?`}}else if(err.__class__===_b_.AttributeError){var suggestion=$B.offer_suggestions_for_attribute_error(err);if(suggestion!==_b_.None){trace+=`. Did you mean: '${suggestion}'?`}}else if(err.__class__===_b_.ImportError){if(err.$suggestion!==_b_.None){trace+=`. Did you mean: '${err.$suggestion}'?`}}$B.frame_obj=save_frame_obj}else{trace=err+""}if(err.$js_exc){trace+="\n\nJavascript error\n"+err.$js_exc+"\n"+err.$js_exc.stack}return trace};$B.get_stderr=function(){if($B.imported.sys){return $B.imported.sys.stderr}return $B.imported._sys.stderr};$B.get_stdout=function(){if($B.imported.sys){return $B.imported.sys.stdout}return $B.imported._sys.stdout};$B.show_error=function(err){var trace=$B.error_trace($B.exception(err));try{var stderr=$B.get_stderr();$B.$getattr(stderr,"write")(trace);var flush=$B.$getattr(stderr,"flush",_b_.None);if(flush!==_b_.None){flush()}}catch(print_exc_err){console.debug(trace)}};$B.handle_error=function(err){if(err.$handled){return}err.$handled=true;$B.show_error(err);throw err}})(__BRYTHON__);(function($B){var _b_=$B.builtins,None=_b_.None,range={__class__:_b_.type,__mro__:[_b_.object],__qualname__:"range",$is_class:true,$native:true,$match_sequence_pattern:true,$not_basetype:true,$descriptors:{start:true,step:true,stop:true}};range.__contains__=function(self,other){if(range.__len__(self)==0){return false}try{other=$B.int_or_bool(other)}catch(err){try{range.index(self,other);return true}catch(err){return false}}var start=_b_.int.$to_bigint(self.start),stop=_b_.int.$to_bigint(self.stop),step=_b_.int.$to_bigint(self.step),other=_b_.int.$to_bigint(other);var sub=other-start,fl=sub/step,res=step*fl;if(res==sub){if(stop>start){return other>=start&&stop>other}else{return start>=other&&other>stop}}else{return false}};range.__delattr__=function(self,attr,value){throw _b_.AttributeError.$factory("readonly attribute")};range.__eq__=function(self,other){if($B.$isinstance(other,range)){var len=range.__len__(self);if(!$B.rich_comp("__eq__",len,range.__len__(other))){return false}if(len==0){return true}if(!$B.rich_comp("__eq__",self.start,other.start)){return false}if(len==1){return true}return $B.rich_comp("__eq__",self.step,other.step)}return false};function compute_item(r,i){var len=range.__len__(r);if(len==0){return r.start}else if(i>len){return r.stop}return $B.rich_op("__add__",r.start,$B.rich_op("__mul__",r.step,i))}range.__getitem__=function(self,rank){if($B.$isinstance(rank,_b_.slice)){var norm=_b_.slice.$conv_for_seq(rank,range.__len__(self)),substep=$B.rich_op("__mul__",self.step,norm.step),substart=compute_item(self,norm.start),substop=compute_item(self,norm.stop);return range.$factory(substart,substop,substep)}if(typeof rank!="number"){rank=$B.$GetInt(rank)}if($B.rich_comp("__gt__",0,rank)){rank=$B.rich_op("__add__",rank,range.__len__(self))}var res=$B.rich_op("__add__",self.start,$B.rich_op("__mul__",rank,self.step));if($B.rich_comp("__gt__",self.step,0)&&($B.rich_comp("__ge__",res,self.stop)||$B.rich_comp("__gt__",self.start,res))||$B.rich_comp("__gt__",0,self.step)&&($B.rich_comp("__ge__",self.stop,res)||$B.rich_comp("__gt__",res,self.start))){throw _b_.IndexError.$factory("range object index out of range")}return res};range.__hash__=function(self){var len=range.__len__(self);if(len==0){return _b_.hash(_b_.tuple.$factory([0,None,None]))}if(len==1){return _b_.hash(_b_.tuple.$factory([1,self.start,None]))}return _b_.hash(_b_.tuple.$factory([len,self.start,self.step]))};var RangeIterator=$B.make_class("range_iterator",(function(obj){return{__class__:RangeIterator,obj:obj}}));RangeIterator.__iter__=function(self){return self};RangeIterator.__next__=function(self){return _b_.next(self.obj)};$B.set_func_names(RangeIterator,"builtins");range.__iter__=function(self){var res={__class__:range,start:self.start,stop:self.stop,step:self.step};if(self.$safe){res.$counter=self.start-self.step}else{res.$counter=$B.rich_op("__sub__",self.start,self.step)}return RangeIterator.$factory(res)};range.__len__=function(self){var len,start=_b_.int.$to_bigint(self.start),stop=_b_.int.$to_bigint(self.stop),step=_b_.int.$to_bigint(self.step);if(self.step>0){if(self.start>=self.stop){return 0}len=1n+(stop-start-1n)/step}else{if(self.stop>=self.start){return 0}len=1n+(start-stop-1n)/-step}return _b_.int.$int_or_long(len)};range.__next__=function(self){if(self.$safe){self.$counter+=self.step;if(self.step>0&&self.$counter>=self.stop||self.step<0&&self.$counter<=self.stop){throw _b_.StopIteration.$factory("")}}else{self.$counter=$B.rich_op("__add__",self.$counter,self.step);if($B.rich_comp("__gt__",self.step,0)&&$B.rich_comp("__ge__",self.$counter,self.stop)||$B.rich_comp("__gt__",0,self.step)&&$B.rich_comp("__ge__",self.stop,self.$counter)){throw _b_.StopIteration.$factory("")}}return self.$counter};range.__reversed__=function(self){var n=$B.rich_op("__sub__",range.__len__(self),1);return range.$factory($B.rich_op("__add__",self.start,$B.rich_op("__mul__",n,self.step)),$B.rich_op("__sub__",self.start,self.step),$B.rich_op("__mul__",-1,self.step))};range.__repr__=function(self){$B.builtins_repr_check(range,arguments);var res="range("+_b_.str.$factory(self.start)+", "+_b_.str.$factory(self.stop);if(self.step!=1){res+=", "+_b_.str.$factory(self.step)}return res+")"};range.__setattr__=function(self,attr,value){throw _b_.AttributeError.$factory("readonly attribute")};range.start=function(self){return self.start};range.step=function(self){return self.step},range.stop=function(self){return self.stop};range.count=function(self,ob){if($B.$isinstance(ob,[_b_.int,_b_.float,_b_.bool])){return _b_.int.$factory(range.__contains__(self,ob))}else{var comp=function(other){return $B.rich_comp("__eq__",ob,other)},it=range.__iter__(self),_next=RangeIterator.__next__,nb=0;while(true){try{if(comp(_next(it))){nb++}}catch(err){if($B.$isinstance(err,_b_.StopIteration)){return nb}throw err}}}};range.index=function(self,other){var $=$B.args("index",2,{self:null,other:null},["self","other"],arguments,{},null,null),self=$.self,other=$.other;try{other=$B.int_or_bool(other)}catch(err){var comp=function(x){return $B.rich_comp("__eq__",other,x)},it=range.__iter__(self),_next=RangeIterator.__next__,nb=0;while(true){try{if(comp(_next(it))){return nb}nb++}catch(err){if($B.$isinstance(err,_b_.StopIteration)){throw _b_.ValueError.$factory(_b_.str.$factory(other)+" not in range")}throw err}}}var sub=$B.rich_op("__sub__",other,self.start),fl=$B.rich_op("__floordiv__",sub,self.step),res=$B.rich_op("__mul__",self.step,fl);if($B.rich_comp("__eq__",res,sub)){if($B.rich_comp("__gt__",self.stop,self.start)&&$B.rich_comp("__ge__",other,self.start)&&$B.rich_comp("__gt__",self.stop,other)||$B.rich_comp("__ge__",self.start,self.stop)&&$B.rich_comp("__ge__",self.start,other)&&$B.rich_comp("__gt__",other,self.stop)){return fl}else{throw _b_.ValueError.$factory(_b_.str.$factory(other)+" not in range")}}else{throw _b_.ValueError.$factory(_b_.str.$factory(other)+" not in range")}};range.$factory=function(){var $=$B.args("range",3,{start:null,stop:null,step:null},["start","stop","step"],arguments,{start:null,stop:null,step:null},null,null),start=$.start,stop=$.stop,step=$.step,safe;if(stop===null&&step===null){if(start==null){throw _b_.TypeError.$factory("range expected 1 arguments, got 0")}stop=$B.PyNumber_Index(start);safe=typeof stop==="number";return{__class__:range,start:0,stop:stop,step:1,$is_range:true,$safe:safe}}if(step===null){step=1}start=$B.PyNumber_Index(start);stop=$B.PyNumber_Index(stop);step=$B.PyNumber_Index(step);if(step==0){throw _b_.ValueError.$factory("range arg 3 must not be zero")}safe=typeof start=="number"&&typeof stop=="number"&&typeof step=="number";return{__class__:range,start:start,stop:stop,step:step,$is_range:true,$safe:safe}};$B.set_func_names(range,"builtins");var slice={__class__:_b_.type,__mro__:[_b_.object],__qualname__:"slice",$is_class:true,$native:true,$not_basetype:true,$descriptors:{start:true,step:true,stop:true}};slice.__eq__=function(self,other){var conv1=conv_slice(self),conv2=conv_slice(other);return conv1[0]==conv2[0]&&conv1[1]==conv2[1]&&conv1[2]==conv2[2]};slice.__repr__=function(self){$B.builtins_repr_check(slice,arguments);return"slice("+_b_.str.$factory(self.start)+", "+_b_.str.$factory(self.stop)+", "+_b_.str.$factory(self.step)+")"};slice.__setattr__=function(self,attr,value){throw _b_.AttributeError.$factory("readonly attribute")};function conv_slice(self){var attrs=["start","stop","step"],res=[];for(var i=0;i=0;i--){if(cars.indexOf(self.source[i])==-1){break}}return bytes.$factory(self.source.slice(0,i+1))}function invalid(other){return!$B.$isinstance(other,[bytes,bytearray])}var bytearray={__class__:_b_.type,__mro__:[_b_.object],__qualname__:"bytearray",$buffer_protocol:true,$is_class:true};var mutable_methods=["__delitem__","clear","copy","count","index","pop","remove","reverse"];for(var method of mutable_methods){bytearray[method]=function(m){return function(self){var args=[self.source],pos=1;for(var i=1,len=arguments.length;i255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}var pos=arg;if(arg<0){pos=self.source.length+pos}if(pos>=0&&pos=0;i--){if(!$B.$isinstance($temp[i],_b_.int)){throw _b_.TypeError.$factory("an integer is required")}else if($temp[i]>255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}self.source.splice(start,0,$temp[i])}}catch(err){throw _b_.TypeError.$factory("can only assign an iterable")}}else{throw _b_.TypeError.$factory("list indices must be integer, not "+$B.class_name(arg))}};bytearray.append=function(self,b){if(arguments.length!=2){throw _b_.TypeError.$factory("append takes exactly one argument ("+(arguments.length-1)+" given)")}if(!$B.$isinstance(b,_b_.int)){throw _b_.TypeError.$factory("an integer is required")}if(b>255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}self.source[self.source.length]=b};bytearray.extend=function(self,b){if(self.in_iteration){throw _b_.BufferError.$factory("Existing exports of data: object "+"cannot be re-sized")}if(b.__class__===bytearray||b.__class__===bytes){self.source=self.source.concat(b.source);return _b_.None}for(var item of $B.make_js_iterator(b)){bytearray.append(self,$B.PyNumber_Index(item))}return _b_.None};bytearray.insert=function(self,pos,b){if(arguments.length!=3){throw _b_.TypeError.$factory("insert takes exactly 2 arguments ("+(arguments.length-1)+" given)")}if(!$B.$isinstance(b,_b_.int)){throw _b_.TypeError.$factory("an integer is required")}if(b>255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}_b_.list.insert(self.source,pos,b)};bytearray.$factory=function(){var args=[bytearray];for(var i=0,len=arguments.length;i-1}if(self.source.length=0&&pos0){stop=Math.min(stop,self.source.length);if(stop<=start){return bytes.$factory([])}for(var i=start;i=start){return bytes.$factory([])}stop=Math.max(0,stop);for(var i=start;i>=stop;i+=step){res[pos++]=self.source[i]}}return bytes.$factory(res)}else if($B.$isinstance(arg,_b_.bool)){return self.source.__getitem__(_b_.int.$factory(arg))}};bytes.$getnewargs=function(self){return $B.fast_tuple([bytes_value(self)])};bytes.__getnewargs__=function(){return bytes.$getnewargs($B.single_arg("__getnewargs__","self",arguments))};bytes.__gt__=function(self,other){if(invalid(other)){return _b_.NotImplemented}return _b_.list.__gt__(self.source,other.source)};bytes.__hash__=function(self){if(self===undefined){return bytes.__hashvalue__||$B.$py_next_hash--}var hash=1;for(var i=0,len=self.source.length;i=0&&item<256){source.push(item)}else{throw _b_.ValueError.$factory("bytes must be in range (0, 256)")}}}return{__class__:$.cls,source:source}};bytes.$new=function(cls,source,encoding,errors){var self={__class__:cls},int_list=[],pos=0;if(source===undefined){}else if(typeof source=="number"||$B.$isinstance(source,_b_.int)){var i=source;while(i--){int_list[pos++]=0}}else{if(typeof source=="string"||$B.$isinstance(source,_b_.str)){if(encoding===undefined){throw _b_.TypeError.$factory("string argument without an encoding")}int_list=encode(source,encoding||"utf-8",errors||"strict")}else{if(encoding!==undefined){console.log("encoding",encoding);throw _b_.TypeError.$factory("encoding without a string argument")}if(Array.isArray(source)){int_list=source}else{try{int_list=_b_.list.$factory(source)}catch(err){var bytes_method=$B.$getattr(source,"__bytes__",_b_.None);if(bytes_method===_b_.None){throw _b_.TypeError.$factory("cannot convert "+`'${$B.class_name(source)}' object to bytes`)}var res=$B.$call(bytes_method)();if(!$B.$isinstance(res,_b_.bytes)){throw _b_.TypeError.$factory(`__bytes__ returned `+`non-bytes (type ${$B.class_name(res)})`)}return res}for(var i=0;i255){throw _b_.ValueError.$factory("bytes must be in range"+"(0, 256)")}}}}}self.source=int_list;self.encoding=encoding;self.errors=errors;return self};bytes.__repr__=bytes.__str__=function(self){var t=$B.special_string_repr,res="";for(var i=0,len=self.source.length;i=128){var hx=s.toString(16);hx=(hx.length==1?"0":"")+hx;res+="\\x"+hx}else if(s=="\\".charCodeAt(0)){res+="\\\\"}else{res+=String.fromCharCode(s)}}if(res.indexOf("'")>-1&&res.indexOf('"')==-1){return'b"'+res+'"'}else{return"b'"+res.replace(new RegExp("'","g"),"\\'")+"'"}};bytes.capitalize=function(self){var src=self.source,len=src.length,buffer=src.slice();if(buffer[0]>96&&buffer[0]<123){buffer[0]-=32}for(var i=1;i64&&buffer[i]<91){buffer[i]+=32}}return bytes.$factory(buffer)};bytes.center=function(){var $=$B.args("center",3,{self:null,width:null,fillbyte:null},["self","width","fillbyte"],arguments,{fillbyte:bytes.$factory([32])},null,null);var diff=$.width-$.self.source.length;if(diff<=0){return bytes.$factory($.self.source)}var ljust=bytes.ljust($.self,$.self.source.length+Math.floor(diff/2),$.fillbyte);return bytes.rjust(ljust,$.width,$.fillbyte)};bytes.count=function(){var $=$B.args("count",4,{self:null,sub:null,start:null,end:null},["self","sub","start","end"],arguments,{start:0,end:-1},null,null);var n=0,index=-1,len=0;if(typeof $.sub=="number"){if($.sub<0||$.sub>255)throw _b_.ValueError.$factory("byte must be in range(0, 256)");len=1}else if(!$.sub.__class__){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.sub)+"'")}else if(!$.sub.__class__.$buffer_protocol){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.sub)+"'")}else{len=$.sub.source.length}do{index=bytes.find($.self,$.sub,Math.max(index+len,$.start),$.end);if(index!=-1){n++}}while(index!=-1);return n};bytes.decode=function(self,encoding,errors){var $=$B.args("decode",3,{self:null,encoding:null,errors:null},["self","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null);switch($.errors){case"strict":case"ignore":case"replace":case"surrogateescape":case"surrogatepass":case"xmlcharrefreplace":case"backslashreplace":return decode($.self,$.encoding,$.errors);default:}};bytes.endswith=function(){var $=$B.args("endswith",4,{self:null,suffix:null,start:null,end:null},["self","suffix","start","end"],arguments,{start:-1,end:-1},null,null);if($B.$isinstance($.suffix,bytes)){var start=$.start==-1?$.self.source.length-$.suffix.source.length:Math.min($.self.source.length-$.suffix.source.length,$.start);var end=$.end==-1?$.self.source.length:$.end;var res=true;for(var i=$.suffix.source.length-1,len=$.suffix.source.length;i>=0&&res;--i){res=$.self.source[end-len+i]==$.suffix.source[i]}return res}else if($B.$isinstance($.suffix,_b_.tuple)){for(var i=0;i<$.suffix.length;++i){if($B.$isinstance($.suffix[i],bytes)){if(bytes.endswith($.self,$.suffix[i],$.start,$.end)){return true}}else{throw _b_.TypeError.$factory("endswith first arg must be "+"bytes or a tuple of bytes, not "+$B.class_name($.suffix))}}return false}else{throw _b_.TypeError.$factory("endswith first arg must be bytes "+"or a tuple of bytes, not "+$B.class_name($.suffix))}};bytes.expandtabs=function(){var $=$B.args("expandtabs",2,{self:null,tabsize:null},["self","tabsize"],arguments,{tabsize:8},null,null);var tab_spaces=[];for(let i=0;i<$.tabsize;++i){tab_spaces.push(32)}var buffer=$.self.source.slice();for(let i=0;i255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}return self.source.slice(0,end==-1?undefined:end).indexOf(sub,start)}else if(!sub.__class__){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name(sub)+"'")}else if(!sub.__class__.$buffer_protocol){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name(sub)+"'")}end=end==-1?self.source.length:Math.min(self.source.length,end);var len=sub.source.length;for(var i=start;i<=end-len;i++){var chunk=self.source.slice(i,i+len),found=true;for(var j=0;jstring.length){throw _b_.ValueError.$factory("non-hexadecimal number found "+"in fromhex() arg")}source.push(_b_.int.$factory(string.substr(i,2),16))}return $.cls.$factory(source)};bytes.hex=function(){var $=$B.args("hex",3,{self:null,sep:null,bytes_per_sep:null},["self","sep","bytes_per_sep"],arguments,{sep:"",bytes_per_sep:1},null,null),self=$.self,sep=$.sep,bytes_per_sep=$.bytes_per_sep,res="",digits="0123456789abcdef",bps=bytes_per_sep,jstart=bps,len=self.source.length;if(bytes_per_sep<0){bps=-bytes_per_sep;jstart=bps}else if(bytes_per_sep==0){sep=""}else{jstart=len%bps;if(jstart==0){jstart=bps}}for(var i=0,j=jstart;i>4];res+=digits[c&15]}return res};bytes.index=function(){var $=$B.args("index",4,{self:null,sub:null,start:null,end:null},["self","sub","start","end"],arguments,{start:0,end:-1},null,null);var index=bytes.find($.self,$.sub,$.start,$.end);console.log("index",index);if(index==-1){throw _b_.ValueError.$factory("subsection not found")}return index};bytes.isalnum=function(){var $=$B.args("isalnum",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,res=len>0;for(var i=0;i96&&src[i]<123||src[i]>64&&src[i]<91||src[i]>47&&src[i]<58}return res};bytes.isalpha=function(){var $=$B.args("isalpha",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,res=len>0;for(var i=0;i96&&src[i]<123||src[i]>64&&src[i]<91}return res};bytes.isdigit=function(){var $=$B.args("isdigit",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,res=len>0;for(let i=0;i47&&src[i]<58}return res};bytes.islower=function(){var $=$B.args("islower",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,res=false;for(let i=0;i96&&src[i]<123;if(src[i]>64&&src[i]<91){return false}}return res};bytes.isspace=function(){var $=$B.args("isspace",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length;for(let i=0;i64&&src[i]<91;if(src[i]>96&&src[i]<123){return false}}return res};bytes.istitle=function(){var $=$B.args("istitle",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,current_char_is_letter=false,prev_char_was_letter=false,is_uppercase=false,is_lowercase=false;for(var i=0;i96&&src[i]<123;is_uppercase=src[i]>64&&src[i]<91;current_char_is_letter=is_lowercase||is_uppercase;if(current_char_is_letter&&(prev_char_was_letter&&is_uppercase)||!prev_char_was_letter&&is_lowercase){return false}prev_char_was_letter=current_char_is_letter}return true};bytes.join=function(){var $ns=$B.args("join",2,{self:null,iterable:null},["self","iterable"],arguments,{}),self=$ns["self"],iterable=$ns["iterable"];var next_func=$B.$getattr(_b_.iter(iterable),"__next__"),res=self.__class__.$factory(),empty=true;while(true){try{var item=next_func();if(empty){empty=false}else{res=bytes.__add__(res,self)}res=bytes.__add__(res,item)}catch(err){if($B.$isinstance(err,_b_.StopIteration)){break}throw err}}return res};var _lower=function(char_code){if(char_code>=65&&char_code<=90){return char_code+32}else{return char_code}};bytes.lower=function(self){var _res=[],pos=0;for(var i=0,len=self.source.length;i=0?$.count:src.length;if(!$.old.__class__){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.old)+"'")}else if(!$.old.__class__.$buffer_protocol){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.sep)+"'")}if(!$.new.__class__){throw _b_.TypeError.$factory("second argument must be a bytes-like "+"object, not '"+$B.class_name($.old)+"'")}else if(!$.new.__class__.$buffer_protocol){throw _b_.TypeError.$factory("second argument must be a bytes-like "+"object, not '"+$B.class_name($.sep)+"'")}for(var i=0;i255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}return $.self.source.slice(start,$.end==-1?undefined:$.end).lastIndexOf(sub)+start}else if(!sub.__class__){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.sub)+"'")}else if(!sub.__class__.$buffer_protocol){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name(sub)+"'")}end=end==-1?self.source.length:Math.min(self.source.length,end);var len=sub.source.length;for(var i=end-len;i>=start;--i){var chunk=self.source.slice(i,i+len),found=true;for(var j=0;jstart){res.push(bytes.$factory(src.slice(start,stop)))}return res};bytes.splitlines=function(self){var $=$B.args("splitlines",2,{self:null,keepends:null},["self","keepends"],arguments,{keepends:false},null,null);if(!$B.$isinstance($.keepends,[_b_.bool,_b_.int])){throw _b_.TypeError("integer argument expected, got "+$B.get_class($.keepends).__name)}var keepends=_b_.int.$factory($.keepends),res=[],source=$.self.source,start=0,pos=0;if(!source.length){return res}while(pos96&&buffer[i]<123){buffer[i]-=32}else if(buffer[i]>64&&buffer[i]<91){buffer[i]+=32}}return bytes.$factory(buffer)};bytes.title=function(self){var src=self.source,len=src.length,buffer=src.slice(),current_char_is_letter=false,prev_char_was_letter=false,is_uppercase=false,is_lowercase=false;for(var i=0;i96&&buffer[i]<123;is_uppercase=buffer[i]>64&&buffer[i]<91;current_char_is_letter=is_lowercase||is_uppercase;if(current_char_is_letter){if(prev_char_was_letter&&is_uppercase){buffer[i]+=32}else if(!prev_char_was_letter&&is_lowercase){buffer[i]-=32}}prev_char_was_letter=current_char_is_letter}return bytes.$factory(buffer)};bytes.translate=function(self,table,_delete){if(_delete===undefined){_delete=[]}else if($B.$isinstance(_delete,bytes)){_delete=_delete.source}else{throw _b_.TypeError.$factory("Type "+$B.get_class(_delete).__name+" doesn't support the buffer API")}var res=[],pos=0;if($B.$isinstance(table,bytes)&&table.source.length==256){for(var i=0,len=self.source.length;i-1){continue}res[pos++]=table.source[self.source[i]]}}return bytes.$factory(res)};var _upper=function(char_code){if(char_code>=97&&char_code<=122){return char_code-32}else{return char_code}};bytes.upper=function(self){var _res=[],pos=0;for(var i=0,len=self.source.length;i>5==6){if(b[pos+1]===undefined){err_info=[byte,pos,"end"]}else if((b[pos+1]&192)!=128){err_info=[byte,pos,"continuation"]}if(err_info!==null){if(errors=="ignore"){pos++}else{throw _b_.UnicodeDecodeError.$factory("'utf-8' codec can't decode byte 0x"+err_info[0].toString(16)+" in position "+err_info[1]+(err_info[2]=="end"?": unexpected end of data":": invalid continuation byte"))}}else{var cp=byte&31;cp<<=6;cp+=b[pos+1]&63;s+=String.fromCodePoint(cp);pos+=2}}else if(byte>>4==14){if(b[pos+1]===undefined){err_info=[byte,pos,"end",pos+1]}else if((b[pos+1]&192)!=128){err_info=[byte,pos,"continuation",pos+2]}else if(b[pos+2]===undefined){err_info=[byte,pos+"-"+(pos+1),"end",pos+2]}else if((b[pos+2]&192)!=128){err_info=[byte,pos,"continuation",pos+3]}if(err_info!==null){if(errors=="ignore"){pos=err_info[3]}else if(errors=="surrogateescape"){for(var i=pos;i>3==30){if(b[pos+1]===undefined){err_info=[byte,pos,"end",pos+1]}else if((b[pos+1]&192)!=128){err_info=[byte,pos,"continuation",pos+2]}else if(b[pos+2]===undefined){err_info=[byte,pos+"-"+(pos+1),"end",pos+2]}else if((b[pos+2]&192)!=128){err_info=[byte,pos,"continuation",pos+3]}else if(b[pos+3]===undefined){err_info=[byte,pos+"-"+(pos+1)+"-"+(pos+2),"end",pos+3]}else if((b[pos+2]&192)!=128){err_info=[byte,pos,"continuation",pos+3]}if(err_info!==null){if(errors=="ignore"){pos=err_info[3]}else if(errors=="surrogateescape"){for(var i=pos;i")}}return decoded}return s};var encode=$B.encode=function(){var $=$B.args("encode",3,{s:null,encoding:null,errors:null},["s","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null),s=$.s,encoding=$.encoding,errors=$.errors;var t=[],pos=0,enc=normalise(encoding);switch(enc){case"utf-8":case"utf_8":case"utf8":for(var i=0,len=s.length;i>6),128+(cp&63))}else if(cp<=65535){t.push(224+(cp>>12),128+((cp&4095)>>6),128+(cp&63))}else{console.log("4 bytes")}}break;case"latin":case"latin1":case"latin-1":case"latin_1":case"L1":case"iso8859_1":case"iso_8859_1":case"8859":case"cp819":case"windows1252":for(var i=0,len=s.length;i>2>other_size){return set_copy_and_difference(so,other)}var result=make_new_set();if(other_is_dict){for(var entry of set_iter_with_hash(so)){if(!_b_.dict.$lookup_by_key(other,entry.item,entry.hash).found){set_add(result,entry.item,entry.hash)}}return result}for(var entry of set_iter_with_hash(so)){if(!set_contains(other,entry.item,entry.hash)){set_add(result,entry.item,entry.hash)}}result.__class__=so.__class__;return result}function set_difference_update(so,other){if(so===other){return set.clear(so)}if($B.$isinstance(other,[set,frozenset])){for(var entry of set_iter_with_hash(other)){set_discard_entry(so,entry.item,entry.hash)}}else if($B.$isinstance(other,_b_.dict)){for(var entry of _b_.dict.$iter_items_with_hash(other)){set_discard_entry(so,entry.key,entry.hash)}}else{var iterator=$B.make_js_iterator(other);for(var key of iterator){set_discard_key(so,key)}}}const DISCARD_NOTFOUND=0,DISCARD_FOUND=1;function set_discard_entry(so,key,hash){var entry=set_lookkey(so,key,hash);if(!entry){return DISCARD_NOTFOUND}if(so.$store[entry.hash]!==undefined){set_remove(so,entry.hash,entry.index)}}function set_discard_key(so,key){return set_discard_entry(so,key)}function*set_iter(so){var ordered_keys=Object.keys(so.$store).sort();for(var hash of ordered_keys){if(so.$store[hash]!==undefined){for(var item of so.$store[hash]){yield item}}}}function*set_iter_with_hash(so){for(var hash in so.$store){if(so.$store[hash]!==undefined){for(var item of so.$store[hash]){yield{item:item,hash:hash}}}}}function set_remove(so,hash,index){so.$store[hash].splice(index,1);if(so.$store[hash].length==0){delete so.$store[hash]}so.$used--}function set_intersection(so,other){if(so===other){return set_copy(so)}var result=make_new_set_base_type(so),iterator;if($B.$isinstance(other,[set,frozenset])){if(other.$used>so.$used){var tmp=so;so=other;other=tmp}for(var entry of set_iter_with_hash(other)){if(set_contains(so,entry.item,entry.hash)){set_add(result,entry.item,entry.hash)}}}else if($B.$isinstance(other,_b_.dict)){for(var entry of _b_.dict.$iter_items_with_hash(other)){if(set_contains(so,entry.key,entry.hash)){set_add(result,entry.key,entry.hash)}}}else{var iterator=$B.make_js_iterator(other);for(var other_item of iterator){var test=set_contains(so,other_item);if(test){set_add(result,other_item)}}}return result}function set_intersection_multi(so,args){var result=set_copy(so);if(args.length==0){return result}for(var other of args){result=set_intersection(result,other)}return result}function set_lookkey(so,key,hash){if(hash===undefined){try{hash=$B.$hash(key)}catch(err){if($B.$isinstance(key,set)){hash=$B.$hash(frozenset.$factory(key))}else{throw err}}}var items=so.$store[hash];if(items===undefined){return false}for(var index=0,len=so.$store[hash].length;index0){set.clear(self)}set.update(self,iterable);return _b_.None};var set_iterator=$B.make_class("set_iterator",(function(so){return{__class__:set_iterator,so:so,it:set_iter(so),version:so.$version}}));set_iterator.__iter__=function(self){return self};set_iterator.__length_hint__=function(self){return self.so.$used};set_iterator.__next__=function(self){var res=self.it.next();if(res.done){throw _b_.StopIteration.$factory()}if(self.so.$version!=self.version){throw _b_.RuntimeError.$factory("Set changed size during iteration")}return res.value};set_iterator.__reduce_ex__=function(self,protocol){return $B.fast_tuple([_b_.iter,$B.fast_tuple([set_make_items(self.so)])])};$B.set_func_names(set_iterator,"builtins");set.__iter__=function(self){return set_iterator.$factory(self)};function check_version(s,version){if(s.$version!=version){throw _b_.RuntimeError.$factory("Set changed size during iteration")}}function set_make_items(so){var items=[];for(var hash in so.$store){items=items.concat(so.$store[hash])}return items}function make_hash_iter(obj,hash){let version=obj.$version,hashes=obj.$hashes[hash],len=hashes.length,i=0;const iterator={*[Symbol.iterator](){while(iset.__len__(other)){return false}for(var entry of set_iter_with_hash(self)){if(!set_lookkey(other,entry.item,entry.hash)){return false}}return true}else if($B.$isinstance(other,_b_.dict)){for(var entry of _b_.dict.$iter_items_with_hash(self)){if(!set_lookkey(other,entry.key,entry.hash)){return false}}return true}else{var member_func=$B.member_func(other);for(var entry of set_iter_with_hash(self)){if(!member_func(entry.item)){return false}}return true}};set.issuperset=function(){var $=$B.args("issuperset",2,{self:null,other:null},["self","other"],arguments,{},"args",null),self=$.self,other=$.other;if($B.$isinstance(other,[set,frozenset])){return set.issubset(other,self)}else{return set.issubset(set.$factory(other),self)}};set.__iand__=function(self,other){if(!$B.$isinstance(other,[set,frozenset])){return _b_.NotImplemented}set.intersection_update(self,other);return self};set.__isub__=function(self,other){if(!$B.$isinstance(other,[set,frozenset])){return _b_.NotImplemented}set_difference_update(self,other);return self};set.__ixor__=function(self,other){if(!$B.$isinstance(other,[set,frozenset])){return _b_.NotImplemented}set.symmetric_difference_update(self,other);return self};set.__ior__=function(self,other){if(!$B.$isinstance(other,[set,frozenset])){return _b_.NotImplemented}set.update(self,other);return self};set.$literal=function(items){var res=make_new_set(set);for(var item of items){if(item.constant){set_add(res,item.constant[0],item.constant[1])}else if(item.starred){for(var item of $B.make_js_iterator(item.starred)){set_add(res,item)}}else{set_add(res,item.item)}}return res};set.$factory=function(){var args=[set].concat(Array.from(arguments)),self=set.__new__.apply(null,args);set.__init__(self,...arguments);return self};$B.set_func_names(set,"builtins");set.__class_getitem__=_b_.classmethod.$factory(set.__class_getitem__);var frozenset=$B.make_class("frozenset");frozenset.$native=true;for(var attr in set){switch(attr){case"add":case"clear":case"discard":case"pop":case"remove":case"update":break;default:if(frozenset[attr]==undefined){if(typeof set[attr]=="function"){frozenset[attr]=function(x){return function(){return set[x].apply(null,arguments)}}(attr)}else{frozenset[attr]=set[attr]}}}}frozenset.__hash__=function(self){if(self===undefined){return frozenset.__hashvalue__||$B.$py_next_hash--}if(self.__hashvalue__!==undefined){return self.__hashvalue__}var _hash=1927868237;_hash*=self.$used;for(var entry of set_iter_with_hash(self)){var _h=entry.hash;_hash^=(_h^89869747^_h<<16)*3644798167}_hash=_hash*69069+907133923;if(_hash==-1){_hash=590923713}return self.__hashvalue__=_hash};frozenset.__init__=function(){return _b_.None};frozenset.__new__=function(cls,iterable){if(cls===undefined){throw _b_.TypeError.$factory("frozenset.__new__(): not enough arguments")}var self=make_new_set(cls);if(iterable===undefined){return self}$B.check_nb_args_no_kw("__new__",2,arguments);if(cls===frozenset&&iterable.__class__===frozenset){return iterable}set.update(self,iterable);return self};frozenset.__repr__=function(self){$B.builtins_repr_check(frozenset,arguments);return set_repr(self)};frozenset.copy=function(self){if(self.__class__===frozenset){return self}return set_copy(self)};var singleton_id=Math.floor(Math.random()*Math.pow(2,40));function empty_frozenset(){var res=frozenset.__new__(frozenset);res.$id=singleton_id;return res}frozenset.$factory=function(){var args=[frozenset].concat(Array.from(arguments)),self=frozenset.__new__.apply(null,args);frozenset.__init__(self,...arguments);return self};$B.set_func_names(frozenset,"builtins");_b_.set=set;_b_.frozenset=frozenset})(__BRYTHON__);(function($B){var _b_=$B.builtins,_window=globalThis;var Module=$B.module=$B.make_class("module",(function(name,doc,$package){return{$tp_class:Module,__builtins__:_b_.__builtins__,__name__:name,__doc__:doc||_b_.None,__package__:$package||_b_.None}}));Module.__dir__=function(self){if(self.__dir__){return $B.$call(self.__dir__)()}var res=[];for(var key in self){if(key.startsWith("$")||key=="__class__"){continue}res[res.length]=key}return res.sort()};Module.__new__=function(cls,name,doc,$package){return{__class__:cls,__builtins__:_b_.__builtins__,__name__:name,__doc__:doc||_b_.None,__package__:$package||_b_.None}};Module.__repr__=Module.__str__=function(self){var res=""};Module.__setattr__=function(self,attr,value){if(self.__name__=="__builtins__"){$B.builtins[attr]=value}else{self[attr]=value}};$B.set_func_names(Module,"builtins");$B.make_import_paths=function(filename){var elts=filename.split("/");elts.pop();var script_dir=elts.join("/"),path=[$B.brython_path+"Lib",$B.brython_path+"libs",script_dir,$B.brython_path+"Lib/site-packages"];var meta_path=[],path_hooks=[];if($B.use_VFS){meta_path.push($B.finders.VFS)}var static_stdlib_import=$B.get_option_from_filename("static_stdlib_import",filename);if(static_stdlib_import!==false&&$B.protocol!="file"){meta_path.push($B.finders.stdlib_static);if(path.length>3){path.shift();path.shift()}}var pythonpath=$B.get_option_from_filename("pythonpath",filename);if(pythonpath){var ix=path.indexOf($B.script_dir);if(ix===-1){console.log("bizarre",path,$B.script_dir)}else{path.splice(ix,1,...pythonpath)}}if($B.protocol!=="file"){meta_path.push($B.finders.path);path_hooks.push($B.url_hook)}$B.import_info[filename]={meta_path:meta_path,path_hooks:path_hooks,path:path}};function $download_module(mod,url,$package){var xhr=new XMLHttpRequest,fake_qs="?v="+(new Date).getTime(),res=null,mod_name=mod.__name__;var timer=_window.setTimeout((function(){xhr.abort()}),5e3);if($B.get_option("cache")){xhr.open("GET",url,false)}else{xhr.open("GET",url+fake_qs,false)}xhr.send();if($B.$CORS){if(xhr.status==200||xhr.status==0){res=xhr.responseText}else{res=_b_.ModuleNotFoundError.$factory("No module named '"+mod_name+"'")}}else{if(xhr.readyState==4){if(xhr.status==200){res=xhr.responseText;mod.$last_modified=xhr.getResponseHeader("Last-Modified")}else{console.info("Error "+xhr.status+" means that Python module "+mod_name+" was not found at url "+url);res=_b_.ModuleNotFoundError.$factory("No module named '"+mod_name+"'")}}}_window.clearTimeout(timer);if(res==null){throw _b_.ModuleNotFoundError.$factory("No module named '"+mod_name+"' (res is null)")}if(res.constructor===Error){throw res}return res}$B.$download_module=$download_module;function import_js(mod,path){try{var module_contents=$download_module(mod,path,undefined)}catch(err){return null}run_js(module_contents,path,mod);return true}$B.addToImported=function(name,modobj){$B.imported[name]=modobj;if(modobj===undefined){throw _b_.ImportError.$factory("imported not set by module")}modobj.__class__=Module;modobj.__name__=name;for(var attr in modobj){if(typeof modobj[attr]=="function"){modobj[attr].$infos={__module__:name,__name__:attr,__qualname__:attr};modobj[attr].$in_js_module=true}else if($B.$isinstance(modobj[attr],_b_.type)&&!modobj[attr].hasOwnProperty("__module__")){modobj[attr].__module__=name}}};function run_js(module_contents,path,_module){var module_id="$locals_"+_module.__name__.replace(/\./g,"_");try{new Function(module_contents)()}catch(err){throw $B.exception(err)}var modobj=$B.imported[_module.__name__];if(modobj===undefined){throw _b_.ImportError.$factory("imported not set by module")}modobj.__class__=Module;modobj.__name__=_module.__name__;for(var attr in modobj){if(typeof modobj[attr]=="function"){modobj[attr].$infos={__module__:_module.__name__,__name__:attr,__qualname__:attr};modobj[attr].$in_js_module=true}else if($B.$isinstance(modobj[attr],_b_.type)&&!modobj[attr].hasOwnProperty("__module__")){modobj[attr].__module__=_module.__name__}}return true}function show_ns(){var kk=Object.keys(_window);for(var i=0,len=kk.length;i1){console.log("error in imported module",module);console.log("stack",$B.make_frames_stack(err.$frame_obj))}throw err}}try{js=compiled?module_contents:root.to_js();if($B.get_option("debug")==10){console.log("code for module "+module.__name__);console.log($B.format_indent(js,0))}var src=js;js="var $module = (function(){\n"+js;var prefix="locals_";js+="return "+prefix;js+=module.__name__.replace(/\./g,"_")+"})(__BRYTHON__)\n"+"return $module";var module_id=prefix+module.__name__.replace(/\./g,"_");var mod=new Function(module_id,js)(module)}catch(err){err.$frame_obj=err.$frame_obj||$B.frame_obj;if($B.get_option("debug",err)>2){console.log(err+" for module "+module.__name__);console.log("module",module);console.log(root);if($B.get_option("debug",err)>1){console.log($B.format_indent(js,0))}for(var attr in err){console.log(attr,err[attr])}console.log("message: "+err.$message);console.log("filename: "+err.fileName);console.log("linenum: "+err.lineNumber);console.log(js.split("\n").slice(err.lineNumber-3,err.lineNumber+3).join("\n"));console.log(err.stack)}throw err}try{for(var attr in mod){module[attr]=mod[attr]}module.__initializing__=false;$B.imported[module.__name__]=module;return{content:src,name:mod_name,imports:Object.keys(root.imports).join(",")}}catch(err){console.log(""+err+" "+" for module "+module.__name__);for(var attr in err){console.log(attr+" "+err[attr])}if($B.get_option("debug")>0){console.log("line info "+__BRYTHON__.line_info)}throw err}}$B.run_py=run_py;$B.run_js=run_js;var ModuleSpec=$B.make_class("ModuleSpec",(function(fields){fields.__class__=ModuleSpec;return fields}));ModuleSpec.__str__=ModuleSpec.__repr__=function(self){var res=`ModuleSpec(name='${self.name}', `+`loader=${_b_.str.$factory(self.loader)}, `+`origin='${self.origin}'`;if(self.submodule_search_locations!==_b_.None){res+=`, submodule_search_locations=`+`${_b_.str.$factory(self.submodule_search_locations)}`}return res+")"};$B.set_func_names(ModuleSpec,"builtins");function parent_package(mod_name){var parts=mod_name.split(".");parts.pop();return parts.join(".")}var VFSFinder=$B.make_class("VFSFinder",(function(){return{__class__:VFSFinder}}));VFSFinder.find_spec=function(cls,fullname,path){var stored,is_package,timestamp;if(!$B.use_VFS){return _b_.None}stored=$B.VFS[fullname];if(stored===undefined){return _b_.None}is_package=stored[3]||false;timestamp=stored.timestamp;if(stored){var is_builtin=$B.builtin_module_names.indexOf(fullname)>-1;return ModuleSpec.$factory({name:fullname,loader:VFSLoader.$factory(),origin:is_builtin?"built-in":"brython_stdlib",submodule_search_locations:is_package?[]:_b_.None,loader_state:{stored:stored,timestamp:timestamp},cached:_b_.None,parent:is_package?fullname:parent_package(fullname),has_location:_b_.False})}};$B.set_func_names(VFSFinder,"");for(var method in VFSFinder){if(typeof VFSFinder[method]=="function"){VFSFinder[method]=_b_.classmethod.$factory(VFSFinder[method])}}const VFSLoader=$B.make_class("VFSLoader",(function(){return{__class__:VFSLoader}}));VFSLoader.create_module=function(self,spec){return _b_.None};VFSLoader.exec_module=function(self,modobj){var stored=modobj.__spec__.loader_state.stored,timestamp=modobj.__spec__.loader_state.timestamp;var ext=stored[0],module_contents=stored[1],imports=stored[2];modobj.$is_package=stored[3]||false;var path="VFS."+modobj.__name__;path+=modobj.$is_package?"/__init__.py":ext;modobj.__file__=path;$B.file_cache[modobj.__file__]=$B.VFS[modobj.__name__][1];$B.url2name[modobj.__file__]=modobj.__name__;if(ext==".js"){run_js(module_contents,modobj.__path__,modobj)}else if($B.precompiled.hasOwnProperty(modobj.__name__)){if($B.get_option("debug")>1){console.info("load",modobj.__name__,"from precompiled")}var parts=modobj.__name__.split(".");for(var i=0;i";mod.__package__=parent;mod.$is_package=true}else{var elts=parent.split(".");elts.pop();mod.__package__=elts.join(".")}mod.__file__=path;try{var parent_id=parent.replace(/\./g,"_"),prefix="locals_";mod_js+="return "+prefix+parent_id;var $module=new Function(prefix+parent_id,mod_js)(mod)}catch(err){if($B.get_option("debug")>1){console.log("error in module",mod);console.log(err);for(var k in err){console.log(k,err[k])}console.log(Object.keys($B.imported));console.log(modobj,"mod_js",mod_js)}throw err}for(var attr in $module){mod[attr]=$module[attr]}$module.__file__=path;if(i>0){$B.builtins.setattr($B.imported[parts.slice(0,i).join(".")],parts[i],$module)}}return $module}else{var mod_name=modobj.__name__;if($B.get_option("debug")>1){console.log("run Python code from VFS",mod_name)}var record=run_py(module_contents,modobj.__file__,modobj);record.imports=imports.join(",");record.is_package=modobj.$is_package;record.timestamp=$B.timestamp;record.source_ts=timestamp;$B.precompiled[mod_name]=record.is_package?[record.content]:record.content;var elts=mod_name.split(".");if(elts.length>1){elts.pop()}if($B.$options.indexedDB&&$B.indexedDB&&$B.idb_name){var idb_cx=indexedDB.open($B.idb_name);idb_cx.onsuccess=function(evt){var db=evt.target.result,tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules"),cursor=store.openCursor(),request=store.put(record);request.onsuccess=function(){if($B.get_option("debug")>1){console.info(modobj.__name__,"stored in db")}};request.onerror=function(){console.info("could not store "+modobj.__name__)}}}}};$B.set_func_names(VFSLoader,"builtins");var finder_cpython={__class__:_b_.type,__mro__:[_b_.object],__qualname__:"CPythonFinder",$infos:{__module__:"builtins",__name__:"CPythonFinder"},create_module:function(cls,spec){return _b_.None},exec_module:function(cls,modobj){console.log("exec PYthon module",modobj);var loader_state=modobj.__spec__.loader_state;var content=loader_state.content;delete modobj.__spec__["loader_state"];modobj.$is_package=loader_state.is_package;modobj.__file__=loader_state.__file__;$B.file_cache[modobj.__file__]=content;$B.url2file[modobj.__file__]=modobj.__name__;var mod_name=modobj.__name__;if($B.get_option("debug")>1){console.log("run Python code from CPython",mod_name)}run_py(content,modobj.__path__,modobj)},find_module:function(cls,name,path){return{__class__:Loader,load_module:function(name,path){var spec=cls.find_spec(cls,name,path);var mod=Module.$factory(name);$B.imported[name]=mod;mod.__spec__=spec;cls.exec_module(cls,mod)}}},find_spec:function(cls,fullname,path){console.log("finder cpython",fullname);var xhr=new XMLHttpRequest,url="/cpython_import?module="+fullname,result;xhr.open("GET",url,false);xhr.onreadystatechange=function(){if(this.readyState==4&&this.status==200){var data=JSON.parse(this.responseText);result=ModuleSpec.$factory({name:fullname,loader:cls,origin:"CPython",submodule_search_locations:data.is_package?[]:_b_.None,loader_state:{content:data.content},cached:_b_.None,parent:data.is_package?fullname:parent_package(fullname),has_location:_b_.False})}};xhr.send();return result}};$B.set_func_names(finder_cpython,"");for(var method in finder_cpython){if(typeof finder_cpython[method]=="function"){finder_cpython[method]=_b_.classmethod.$factory(finder_cpython[method])}}finder_cpython.$factory=function(){return{__class__:finder_cpython}};var StdlibStaticFinder=$B.make_class("StdlibStaticFinder",(function(){return{__class__:StdlibStaticFinder}}));StdlibStaticFinder.find_spec=function(self,fullname,path){if($B.stdlib&&$B.get_option("static_stdlib_import")){var address=$B.stdlib[fullname];if(address===undefined){var elts=fullname.split(".");if(elts.length>1){elts.pop();var $package=$B.stdlib[elts.join(".")];if($package&&$package[1]){address=["py"]}}}if(address!==undefined){var ext=address[0],is_pkg=address[1]!==undefined,path=$B.brython_path+(ext=="py"?"Lib/":"libs/")+fullname.replace(/\./g,"/"),metadata={ext:ext,is_package:is_pkg,path:path+(is_pkg?"/__init__.py":ext=="py"?".py":".js"),address:address},_module=Module.$factory(fullname);metadata.code=$download_module(_module,metadata.path);var res=ModuleSpec.$factory({name:fullname,loader:PathLoader.$factory(),origin:metadata.path,submodule_search_locations:is_pkg?[path]:_b_.None,loader_state:metadata,cached:_b_.None,parent:is_pkg?fullname:parent_package(fullname),has_location:_b_.True});return res}}return _b_.None};$B.set_func_names(StdlibStaticFinder,"");for(var method in StdlibStaticFinder){if(typeof StdlibStaticFinder[method]=="function"){StdlibStaticFinder[method]=_b_.classmethod.$factory(StdlibStaticFinder[method])}}StdlibStaticFinder.$factory=function(){return{__class__:StdlibStaticFinder}};var PathFinder=$B.make_class("PathFinder",(function(){return{__class__:PathFinder}}));PathFinder.find_spec=function(cls,fullname,path){if($B.VFS&&$B.VFS[fullname]){return _b_.None}if($B.is_none(path)){path=get_info("path")}for(var i=0,li=path.length;i");for(var method in PathFinder){if(typeof PathFinder[method]=="function"){PathFinder[method]=_b_.classmethod.$factory(PathFinder[method])}}var PathEntryFinder=$B.make_class("PathEntryFinder",(function(path_entry,hint){return{__class__:PathEntryFinder,path_entry:path_entry,hint:hint}}));PathEntryFinder.find_spec=function(self,fullname){var loader_data={},notfound=true,hint=self.hint,base_path=self.path_entry+fullname.match(/[^.]+$/g)[0],modpaths=[],py_ext=$B.get_option("python_extension");var tryall=hint===undefined;if(tryall||hint=="py"){modpaths=modpaths.concat([[base_path+py_ext,"py",false],[base_path+"/__init__"+py_ext,"py",true]])}for(var j=0;notfound&&j-1){meta_path.splice(path_ix,1)}}for(var i=0,len=meta_path.length;i0;if(modobj==_b_.None){import_error(mod_name)}if(modobj===undefined){if($B.is_none(fromlist)){fromlist=[]}for(var i=0,modsep="",_mod_name="",len=parsed_name.length-1,__path__=_b_.None;i<=len;++i){var _parent_name=_mod_name;_mod_name+=modsep+parsed_name[i];modsep=".";var modobj=$B.imported[_mod_name];if($test){console.log("iter",i,_mod_name,"\nmodobj",modobj,"\n__path__",__path__,Array.isArray(__path__));alert()}if(modobj==_b_.None){import_error(_mod_name)}else if(modobj===undefined){try{import_engine(_mod_name,__path__,from_stdlib)}catch(err){delete $B.imported[_mod_name];throw err}if($B.is_none($B.imported[_mod_name])){import_error(_mod_name)}else{if(_parent_name){_b_.setattr($B.imported[_parent_name],parsed_name[i],$B.imported[_mod_name])}}}else if($B.imported[_parent_name]&&$B.imported[_parent_name][parsed_name[i]]===undefined){_b_.setattr($B.imported[_parent_name],parsed_name[i],$B.imported[_mod_name])}if(i0){return $B.imported[mod_name]}else{let package_name=mod_name;while(parsed_name.length>1){var module=parsed_name.pop();package_name=parsed_name.join(".");if($B.imported[package_name]===undefined){$B.$import(package_name,globals,locals,[]);$B.imported[package_name][module]=$B.imported[mod_name];mod_name=module}}return $B.imported[package_name]}};$B.$import=function(mod_name,fromlist,aliases,locals){var test=false;if(test){console.log("import",mod_name,fromlist,aliases);alert()}if(mod_name=="_frozen_importlib_external"){var alias=aliases[mod_name]||mod_name;var imp=$B.$import_from("importlib",["_bootstrap_external"],{_bootstrap_external:alias},0,locals);var _bootstrap=$B.imported.importlib._bootstrap,_bootstrap_external=$B.imported.importlib["_bootstrap_external"];_bootstrap_external._set_bootstrap_module(_bootstrap);_bootstrap._bootstap_external=_bootstrap_external;var _frozen_importlib=$B.imported._frozen_importlib;if(_frozen_importlib){_frozen_importlib._bootstrap_external=_bootstrap_external}return}var level=0,frame=$B.frame_obj.frame,current_module=frame[2],parts=current_module.split(".");while(mod_name.length>0&&mod_name.startsWith(".")){level++;mod_name=mod_name.substr(1);if(parts.length==0){throw _b_.ImportError.$factory("Parent module '' not loaded, "+"cannot perform relative import")}current_module=parts.join(".");parts.pop()}if(level>0){mod_name=current_module+(mod_name.length>0?"."+mod_name:"")}var parts=mod_name.split(".");if(mod_name[mod_name.length-1]=="."){parts.pop()}var norm_parts=[],prefix=true;for(var i=0,len=parts.length;i1){console.log($err3);console.log($B.frame_obj.frame)}throw _b_.ImportError.$factory("cannot import name '"+name+"'")}}}}return locals}};$B.$import_from=function(module,names,aliases,level,locals){var current_module_name=$B.frame_obj.frame[2],parts=current_module_name.split("."),relative=level>0;if(relative){var current_module=$B.imported[parts.join(".")];if(current_module===undefined){throw _b_.ImportError.$factory("attempted relative import with no known parent package")}if(!current_module.$is_package){if(parts.length==1){throw _b_.ImportError.$factory("attempted relative import with no known parent package")}else{parts.pop();current_module=$B.imported[parts.join(".")]}}while(level>0){var current_module=$B.imported[parts.join(".")];if(!current_module.$is_package){throw _b_.ImportError.$factory("attempted relative import with no known parent package")}level--;parts.pop()}if(module){var submodule=current_module.__name__+"."+module;$B.$import(submodule,[],{},{});current_module=$B.imported[submodule]}if(names.length>0&&names[0]=="*"){for(var key in current_module){if(key.startsWith("$")||key.startsWith("_")){continue}locals[key]=current_module[key]}}else{for(var name of names){var alias=aliases[name]||name;if(current_module[name]!==undefined){locals[alias]=current_module[name]}else{var sub_module=current_module.__name__+"."+name;$B.$import(sub_module,[],{},{});locals[alias]=$B.imported[sub_module]}}}}else{$B.$import(module,names,aliases,locals)}};$B.import_all=function(locals,module){for(var attr in module){if("_$".indexOf(attr.charAt(0))==-1){locals[attr]=module[attr]}}};$B.$meta_path=[VFSFinder,StdlibStaticFinder,PathFinder];$B.finders={VFS:VFSFinder,stdlib_static:StdlibStaticFinder,path:PathFinder,CPython:finder_cpython};function optimize_import_for_path(path,filetype){if(path.slice(-1)!="/"){path=path+"/"}var value=filetype=="none"?_b_.None:url_hook(path,filetype);$B.path_importer_cache[path]=value}var Loader={__class__:$B.$type,__mro__:[_b_.object],__name__:"Loader"};var _importlib_module={__class__:Module,__name__:"_importlib",Loader:Loader,VFSFinder:VFSFinder,StdlibStatic:StdlibStaticFinder,ImporterPath:PathFinder,UrlPathFinder:url_hook,optimize_import_for_path:optimize_import_for_path};_importlib_module.__repr__=_importlib_module.__str__=function(){return""};$B.imported["_importlib"]=_importlib_module})(__BRYTHON__);(function($B){var _b_=$B.builtins;var unicode_tables=$B.unicode_tables;$B.has_surrogate=function(s){for(var i=0;i=55296&&code<=56319){return true}}return false};var escape2cp={b:"\b",f:"\f",n:"\n",r:"\r",t:"\t",v:"\v"};$B.surrogates=function(s){var s1="",escaped=false;for(var char of s){if(escaped){var echar=escape2cp[char];if(echar!==undefined){s1+=echar}else{s1+="\\"+char}escaped=false}else if(char=="\\"){escaped=true}else{s1+=char}}var codepoints=[],surrogates=[],j=0;for(var i=0,len=s1.length;i=65536){surrogates.push(j);i++}j++}return surrogates};$B.String=function(s){var srg=$B.surrogates(s);return srg.length==0?s:$B.make_String(s,srg)};$B.make_String=function(s,surrogates){if(!Array.isArray(surrogates)){throw Error("not list")}var res=new String(s);res.__class__=str;res.surrogates=surrogates;return res};function pypos2jspos(s,pypos){if(s.surrogates===undefined){return pypos}var nb=0;while(s.surrogates[nb]=55296&&code<=56319){var v=65536;v+=(code&1023)<<10;v+=s.charCodeAt(i+1)&1023;cps.push(v);i++}else{cps.push(code)}}return s.codepoints=cps}str.__add__=function(_self,other){if(!$B.$isinstance(other,str)){try{return $B.$getattr(other,"__radd__")(_self)}catch(err){throw _b_.TypeError.$factory("Can't convert "+$B.class_name(other)+" to str implicitly")}}[_self,other]=to_string([_self,other]);var res=$B.String(_self+other);return res};str.__contains__=function(_self,item){if(!$B.$isinstance(item,str)){throw _b_.TypeError.$factory("'in ' requires "+"string as left operand, not "+$B.class_name(item))}[_self,item]=to_string([_self,item]);if(item.__class__===str||$B.$isinstance(item,str)){var nbcar=item.length}else{var nbcar=_b_.len(item)}if(nbcar==0){return true}var len=_self.length;if(len==0){return nbcar==0}for(var i=0,len=_self.length;i=0&&pos=65536){return $B.String(_self.substr(jspos,2))}else{return _self[jspos]}}throw _b_.IndexError.$factory("string index out of range")}if($B.$isinstance(arg,_b_.slice)){return _b_.str.$getitem_slice(_self,arg)}if($B.$isinstance(arg,_b_.bool)){return _self.__getitem__(_b_.int.$factory(arg))}throw _b_.TypeError.$factory("string indices must be integers")};str.$getitem_slice=function(_self,slice){var len=str.__len__(_self),s=_b_.slice.$conv_for_seq(slice,len),start=pypos2jspos(_self,s.start),stop=pypos2jspos(_self,s.stop),step=s.step;var res="",i=null;if(step>0){if(stop<=start){return""}for(var i=start;i=start){return""}for(var i=start;i>stop;i+=step){res+=_self[i]}}return $B.String(res)};var prefix=2,suffix=3,mask=2**32-1;str.$nb_str_hash_cache=0;function fnv(p){if(p.length==0){return 0}var x=prefix;x=(x^p[0]<<7)&mask;for(var i=0,len=p.length;imax_precision){throw _b_.OverflowError.$factory("precision too large")}var s;if(val.__class__===$B.long_int){s=$B.long_int.to_base(val,10)}else{s=val.toString()}if(precision-s.length>max_repeat){throw _b_.OverflowError.$factory("precision too large")}if(s[0]==="-"){return"-"+"0".repeat(Math.max(0,precision-s.length+1))+s.slice(1)}return"0".repeat(Math.max(0,precision-s.length))+s};var format_float_precision=function(val,upper,flags,modifier){var precision=flags.precision;if(isFinite(val)){return modifier(val,precision,flags,upper)}if(val===Infinity){val="inf"}else if(val===-Infinity){val="-inf"}else{val="nan"}if(upper){return val.toUpperCase()}return val};var format_sign=function(val,flags){if(flags.sign){if(val>=0||isNaN(val)||val===Number.POSITIVE_INFINITY){return"+"}}else if(flags.space){if(val>=0||isNaN(val)){return" "}}return""};var str_format=function(val,flags){flags.pad_char=" ";return format_padding(str.$factory(val),flags)};var num_format=function(val,flags){number_check(val,flags);if($B.$isinstance(val,_b_.float)){val=parseInt(val.value)}else if(!$B.$isinstance(val,_b_.int)){val=parseInt(val)}var s=format_int_precision(val,flags);if(flags.pad_char==="0"){if(val<0){s=s.substring(1);return"-"+format_padding(s,flags,true)}var sign=format_sign(val,flags);if(sign!==""){return sign+format_padding(s,flags,true)}}return format_padding(format_sign(val,flags)+s,flags)};var repr_format=function(val,flags){flags.pad_char=" ";return format_padding(_b_.repr(val),flags)};var ascii_format=function(val,flags,type){flags.pad_char=" ";var ascii;if(type=="bytes"){var repr=_b_.repr(val);ascii=_b_.str.encode(repr,"ascii","backslashreplace");ascii=_b_.bytes.decode(ascii,"ascii")}else{ascii=_b_.ascii(val)}return format_padding(ascii,flags)};var _float_helper=function(val,flags){number_check(val,flags);if(flags.precision===undefined){if(!flags.decimal_point){flags.precision=6}else{flags.precision=0}}else{flags.precision=parseInt(flags.precision,10);validate_precision(flags.precision)}return $B.$isinstance(val,_b_.int)?val:val.value};var trailing_zeros=/(.*?)(0+)([eE].*)/,leading_zeros=/\.(0*)/,trailing_dot=/\.$/;var validate_precision=function(precision){if(precision>20){precision=20}};function handle_special_values(value,upper){var special;if(isNaN(value)){special=upper?"NAN":"nan"}else if(value==Number.POSITIVE_INFINITY){special=upper?"INF":"inf"}else if(value==Number.NEGATIVE_INFINITY){special=upper?"-INF":"-inf"}return special}var floating_point_format=function(val,upper,flags){val=_float_helper(val,flags);var special=handle_special_values(val,upper);if(special){return format_padding(format_sign(val,flags)+special,flags)}var p=flags.precision;if(p==0){p=1}var exp_format=val.toExponential(p-1),e_index=exp_format.indexOf("e"),exp=parseInt(exp_format.substr(e_index+1)),res;function remove_zeros(v){if(flags.alternate){return v}if(v.indexOf(".")>-1){while(v.endsWith("0")){v=v.substr(0,v.length-1)}if(v.endsWith(".")){v=v.substr(0,v.length-1)}}return v}if(-4<=exp&&exp-1){return BigInt(v).toString()}const mul=Math.pow(10,d);var is_neg=v<0;if(is_neg){v=-v}var res_floor=(Math.floor(v*mul)/mul).toFixed(d),res_ceil=(Math.ceil(v*mul)/mul).toFixed(d),res;if(v-res_floor==res_ceil-v){var last=res_floor[res_floor.length-1];res=last.match(/[02468]/)?res_floor:res_ceil}else{res=v-res_floor1){mant+="."+parts[0].substr(1)+(parts[1]||"")}else if(parts[1]){mant+="."+parts[1]}}mant=parseFloat(mant);mant=roundDownToFixed(parseFloat(mant),precision);if(parseFloat(mant)==10){parts=mant.split(".");parts[0]="1";mant=parts.join(".");exp=parseInt(exp)+1}if(flags.alternate&&mant.indexOf(".")==-1){mant+="."}if(exp.toString().length==1){exp="0"+exp}return`${is_neg?"-":""}${mant}${upper?"E":"e"}${exp_sign}${exp}`};var floating_point_exponential_format=function(val,upper,flags){val=_float_helper(val,flags);return format_padding(format_sign(val,flags)+format_float_precision(val,upper,flags,_floating_exp_helper),flags)};$B.formatters={floating_point_format:floating_point_format,floating_point_decimal_format:floating_point_decimal_format,floating_point_exponential_format:floating_point_exponential_format};var signed_hex_format=function(val,upper,flags){var ret;if(!$B.$isinstance(val,_b_.int)){throw _b_.TypeError.$factory(`%X format: an integer is required, not ${$B.class_name(val)}`)}if(val.__class__===$B.long_int){ret=val.value.toString(16)}else{ret=parseInt(val);ret=ret.toString(16)}ret=format_int_precision(ret,flags);if(upper){ret=ret.toUpperCase()}if(flags.pad_char==="0"){if(val<0){ret=ret.substring(1);ret="-"+format_padding(ret,flags,true)}var sign=format_sign(val,flags);if(sign!==""){ret=sign+format_padding(ret,flags,true)}}if(flags.alternate){if(ret.charAt(0)==="-"){if(upper){ret="-0X"+ret.slice(1)}else{ret="-0x"+ret.slice(1)}}else{if(upper){ret="0X"+ret}else{ret="0x"+ret}}}return format_padding(format_sign(val,flags)+ret,flags)};var octal_format=function(val,flags){number_check(val,flags);var ret;if(val.__class__===$B.long_int){ret=$B.long_int.to_base(8)}else{ret=parseInt(val);ret=ret.toString(8)}ret=format_int_precision(ret,flags);if(flags.pad_char==="0"){if(val<0){ret=ret.substring(1);ret="-"+format_padding(ret,flags,true)}var sign=format_sign(val,flags);if(sign!==""){ret=sign+format_padding(ret,flags,true)}}if(flags.alternate){if(ret.charAt(0)==="-"){ret="-0o"+ret.slice(1)}else{ret="0o"+ret}}return format_padding(ret,flags)};function series_of_bytes(val,flags){if(val.__class__&&val.__class__.$buffer_protocol){var it=_b_.iter(val),ints=[];while(true){try{ints.push(_b_.next(it))}catch(err){if(err.__class__===_b_.StopIteration){var b=_b_.bytes.$factory(ints);return format_padding(_b_.bytes.decode(b,"ascii"),flags)}throw err}}}else{try{var bytes_obj=$B.$getattr(val,"__bytes__")();return format_padding(_b_.bytes.decode(bytes_obj),flags)}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("%b does not accept '"+$B.class_name(val)+"'")}throw err}}}var single_char_format=function(val,flags,type){if(type=="bytes"){if($B.$isinstance(val,_b_.int)){if(val.__class__===$B.long_int||val<0||val>255){throw _b_.OverflowError.$factory("%c arg not in range(256)")}}else if($B.$isinstance(val,[_b_.bytes,_b_.bytearray])){if(val.source.length>1){throw _b_.TypeError.$factory("%c requires an integer in range(256) or a single byte")}val=val.source[0]}}else{if($B.$isinstance(val,_b_.str)){if(_b_.str.__len__(val)==1){return val}throw _b_.TypeError.$factory("%c requires int or char")}else if(!$B.$isinstance(val,_b_.int)){throw _b_.TypeError.$factory("%c requires int or char")}if(val.__class__===$B.long_int&&(val.value<0||val.value>=1114112)||(val<0||val>=1114112)){throw _b_.OverflowError.$factory("%c arg not in range(0x110000)")}}return format_padding(_b_.chr(val),flags)};var num_flag=function(c,flags){if(c==="0"&&!flags.padding&&!flags.decimal_point&&!flags.left){flags.pad_char="0";return}if(!flags.decimal_point){flags.padding=(flags.padding||"")+c}else{flags.precision=(flags.precision||"")+c}};var decimal_point_flag=function(val,flags){if(flags.decimal_point){throw new UnsupportedChar}flags.decimal_point=true};var neg_flag=function(val,flags){flags.pad_char=" ";flags.left=true};var space_flag=function(val,flags){flags.space=true};var sign_flag=function(val,flags){flags.sign=true};var alternate_flag=function(val,flags){flags.alternate=true};var char_mapping={b:series_of_bytes,s:str_format,d:num_format,i:num_format,u:num_format,o:octal_format,r:repr_format,a:ascii_format,g:function(val,flags){return floating_point_format(val,false,flags)},G:function(val,flags){return floating_point_format(val,true,flags)},f:function(val,flags){return floating_point_decimal_format(val,false,flags)},F:function(val,flags){return floating_point_decimal_format(val,true,flags)},e:function(val,flags){return floating_point_exponential_format(val,false,flags)},E:function(val,flags){return floating_point_exponential_format(val,true,flags)},x:function(val,flags){return signed_hex_format(val,false,flags)},X:function(val,flags){return signed_hex_format(val,true,flags)},c:single_char_format,0:function(val,flags){return num_flag("0",flags)},1:function(val,flags){return num_flag("1",flags)},2:function(val,flags){return num_flag("2",flags)},3:function(val,flags){return num_flag("3",flags)},4:function(val,flags){return num_flag("4",flags)},5:function(val,flags){return num_flag("5",flags)},6:function(val,flags){return num_flag("6",flags)},7:function(val,flags){return num_flag("7",flags)},8:function(val,flags){return num_flag("8",flags)},9:function(val,flags){return num_flag("9",flags)},"-":neg_flag," ":space_flag,"+":sign_flag,".":decimal_point_flag,"#":alternate_flag};var UnsupportedChar=function(){this.name="UnsupportedChar"};const conversion_flags="#0- +",length_modifiers="hlL",conversion_types="diouxXeEfFgGcrsa";function parse_mod_format(s,type,pos){var flags={pad_char:" "},len=s.length,start_pos=pos,mo;pos++;while(pos-1){flags.conversion_flag=char;if(char=="#"){flags.alternate=true}else if(char=="-"){flags.left=true}else if(char=="+"){flags.sign="+"}else if(char=="0"){flags.pad_char="0"}else if(char==" "){flags.space=true}pos++}else if(char=="*"){flags.padding="*";pos++}else if(mo=/^\d+/.exec(s.substr(pos))){flags.padding=mo[0];pos+=mo[0].length}else if(char=="."){pos++;if(s[pos]=="*"){flags.precision="*";pos++}else if(mo=/^\d+/.exec(s.substr(pos))){flags.precision=mo[0];pos+=mo[0].length}else{flags.precision="0"}}else if(length_modifiers.indexOf(char)>-1){flags.length_modifier=char;pos++}else if(conversion_types.indexOf(char)>-1||char=="b"&&type=="bytes"){if(type=="bytes"){if(char=="s"){char="b"}else if(char=="r"){char="a"}}flags.conversion_type=char;flags.end=pos;flags.string=s.substring(start_pos,pos+1);if(flags.left&&flags.pad_char=="0"){flags.pad_char=" "}return flags}else{throw _b_.ValueError.$factory(`invalid character in format: ${char}`)}}throw _b_.ValueError.$factory("invalid format")}function is_mapping(obj){return _b_.hasattr(obj,"keys")&&_b_.hasattr(obj,"__getitem__")}$B.printf_format=function(s,type,args){var length=s.length,pos=0,argpos=null,getitem;if($B.$isinstance(args,_b_.tuple)){argpos=0}else{getitem=$B.$getattr(args,"__getitem__",_b_.None)}var ret="",nbph=0,pos=0,len=s.length;while(pos1){if(!$B.$isinstance(args,_b_.tuple)&&!is_mapping(args)){throw _b_.TypeError.$factory("not enough arguments for format string")}}var fmt=parse_mod_format(s,type,pos);pos=fmt.end+1;if(fmt.padding=="*"){if(args[argpos]===undefined){throw _b_.ValueError.$factory("no value for field width *")}fmt.padding=args[argpos];argpos++}if(fmt.precision=="*"){if(args[argpos]===undefined){throw _b_.ValueError.$factory("no value for precision *")}fmt.precision=args[argpos];argpos++}var func=char_mapping[fmt.conversion_type],value;if(fmt.mapping_key!==undefined){value=getitem(fmt.mapping_key)}else{if(argpos===null){value=args}else{value=args[argpos];if(value===undefined){throw _b_.TypeError.$factory("not enough arguments for format string")}argpos++}}ret+=func(value,fmt,type)}}if(argpos!==null){if(args.length>argpos){throw _b_.TypeError.$factory("not enough arguments for format string")}else if(args.length=127&&cp<160){cp=cp.toString(16);if(cp.length<2){cp="0"+cp}repl+="\\x"+cp}else if(cp>=768&&cp<=879){repl+="​"+chars[i]+" "}else if(cp.toString(16)=="feff"){repl+="\\ufeff"}else{repl+=chars[i]}}var res=repl;if(res.search('"')==-1&&res.search("'")==-1){return"'"+res+"'"}else if(_self.search('"')==-1){return'"'+res+'"'}var qesc=new RegExp("'","g");res="'"+res.replace(qesc,"\\'")+"'";return res};str.__rmod__=function(){var $=$B.args("__rmod__",2,{self:null,other:null},["self","other"],arguments,{},null,null);if(!$B.$isinstance($.other,str)){return _b_.NotImplemented}return str.__mod__($.other,$.self)};str.__rmul__=function(_self,other){_self=to_string(_self);if($B.$isinstance(other,_b_.int)){other=_b_.int.numerator(other);var res="";while(other>0){res+=_self;other--}return res}return _b_.NotImplemented};str.__setattr__=function(_self,attr,value){if(typeof _self==="string"){if(str.hasOwnProperty(attr)){throw _b_.AttributeError.$factory("'str' object attribute '"+attr+"' is read-only")}else{throw _b_.AttributeError.$factory("'str' object has no attribute '"+attr+"'")}}_b_.dict.$setitem(_self.__dict__,attr,value);return _b_.None};str.__setitem__=function(self,attr,value){throw _b_.TypeError.$factory("'str' object does not support item assignment")};var combining=[];for(var cp=768;cp<=879;cp++){combining.push(String.fromCharCode(cp))}var combining_re=new RegExp("("+combining.join("|")+")","g");str.__str__=function(_self){_self=to_string(_self);var repl="",chars=to_chars(_self);if(chars.length==_self.length){return _self.replace(combining_re,"​$1")}for(var i=0;i=768&&cp<=879){repl+="​"+chars[i]}else{repl+=chars[i]}}return repl};var body=`var _b_ = __BRYTHON__.builtins\nif(typeof other !== typeof _self){\n return _b_.NotImplemented}else if(typeof _self == "string"){\n return _self > other}else{\n return _self.$brython_value > other.$brython_value}`;var comps={">":"gt",">=":"ge","<":"lt","<=":"le"};for(var op in comps){str[`__${comps[op]}__`]=Function("_self","other",body.replace(/>/gm,op))}str.capitalize=function(){var $=$B.args("capitalize",1,{self:self},["self"],arguments,{},null,null),_self=to_string($.self);if(_self.length==0){return""}return _self.charAt(0).toUpperCase()+_self.substr(1).toLowerCase()};str.casefold=function(){var $=$B.args("casefold",1,{self:self},["self"],arguments,{},null,null),res="",char,cf,_self=to_string($.self),chars=to_chars(_self);for(var i=0,len=chars.length;i=0){n++;pos+=sub.length}else{break}}return n};str.encode=function(){var $=$B.args("encode",3,{self:null,encoding:null,errors:null},["self","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null),_self=to_string($.self);if($.encoding=="rot13"||$.encoding=="rot_13"){var res="";for(var i=0,len=_self.length;i0){res+=" ";col++}break;case"\r":case"\n":res+=car;col=0;break;default:res+=car;col++;break}pos++}return res};str.find=function(){var $=$B.args("str.find",4,{self:null,sub:null,start:null,end:null},["self","sub","start","end"],arguments,{start:0,end:null},null,null),_self,sub;check_str($.sub);normalize_start_end($);[_self,sub]=to_string([$.self,$.sub]);var len=str.__len__(_self),sub_len=str.__len__(sub);if(sub_len==0&&$.start==len){return len}if(len+sub_len==0){return-1}var js_start=pypos2jspos(_self,$.start),js_end=pypos2jspos(_self,$.end),ix=_self.slice(js_start,js_end).indexOf(sub);if(ix==-1){return-1}return jspos2pypos(_self,js_start+ix)};$B.parse_format=function(fmt_string){var elts=fmt_string.split(":"),name,conv,spec,name_ext=[];if(elts.length==1){name=fmt_string}else{name=elts[0];spec=elts.splice(1).join(":")}var elts=name.split("!");if(elts.length>1){name=elts[0];conv=elts[1]}if(name!==undefined){function name_repl(match){name_ext.push(match);return""}var name_ext_re=/\.[_a-zA-Z][_a-zA-Z0-9]*|\[[_a-zA-Z][_a-zA-Z0-9]*\]|\[[0-9]+\]/g;name=name.replace(name_ext_re,name_repl)}return{name:name,name_ext:name_ext,conv:conv,spec:spec||"",string:fmt_string}};$B.split_format=function(s){var pos=0,_len=s.length,car,text="",parts=[],rank=0;while(pos<_len){car=s.charAt(pos);if(car=="{"&&s.charAt(pos+1)=="{"){text+="{";pos+=2}else if(car=="}"&&s.charAt(pos+1)=="}"){text+="}";pos+=2}else if(car=="{"){parts.push(text);var end=pos+1,nb=1;while(end<_len){if(s.charAt(end)=="{"){nb++;end++}else if(s.charAt(end)=="}"){nb--;end++;if(nb==0){var fmt_string=s.substring(pos+1,end-1);var fmt_obj=$B.parse_format(fmt_string);fmt_obj.raw_name=fmt_obj.name;fmt_obj.raw_spec=fmt_obj.spec;if(!fmt_obj.name){fmt_obj.name=rank+"";rank++}if(fmt_obj.spec!==undefined){function replace_nested(name,key){if(key==""){return"{"+rank+++"}"}return"{"+key+"}"}fmt_obj.spec=fmt_obj.spec.replace(/\{(.*?)\}/g,replace_nested)}parts.push(fmt_obj);text="";break}}else{end++}}if(nb>0){throw _b_.ValueError.$factory("wrong format "+s)}pos=end}else{text+=car;pos++}}if(text){parts.push(text)}return parts};str.format=function(_self){var last_arg=$B.last(arguments);if(last_arg.$nat=="mapping"){var mapping=last_arg.mapping,getitem=$B.$getattr(mapping,"__getitem__");var args=[];for(var i=0,len=arguments.length-1;i-1){var pos=parseInt(fmt.name),value=_b_.tuple.__getitem__($.$args,pos)}else{var value=getitem(fmt.name)}for(var j=0;j-1){key=parseInt(key)}value=$B.$getattr(value,"__getitem__")(key)}}if(fmt.conv=="a"){value=_b_.ascii(value)}else if(fmt.conv=="r"){value=_b_.repr(value)}else if(fmt.conv=="s"){value=_b_.str.$factory(value)}if(value.$is_class||value.$factory){res+=value.__class__.__format__(value,fmt.spec)}else{res+=$B.$getattr(value,"__format__")(fmt.spec)}}return res};str.format_map=function(){var $=$B.args("format_map",2,{self:null,mapping:null},["self","mapping"],arguments,{},null,null),_self=to_string($.self);return str.format(_self,{$nat:"mapping",mapping:$.mapping})};str.index=function(self){var res=str.find.apply(null,arguments);if(res===-1){throw _b_.ValueError.$factory("substring not found")}return res};str.isascii=function(){var $=$B.args("isascii",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);for(var i=0,len=_self.length;i127){return false}}return true};str.isalnum=function(){var $=$B.args("isalnum",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);for(var cat of["Ll","Lu","Lm","Lt","Lo","Nd","digits","numeric"]){if(!$B.in_unicode_category(cat,cp)){return false}}}return true};str.isalpha=function(){var $=$B.args("isalpha",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);for(var cat of["Ll","Lu","Lm","Lt","Lo"]){if(!$B.in_unicode_category(cat,cp)){return false}}}return true};str.isdecimal=function(){var $=$B.args("isdecimal",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if(!$B.in_unicode_category("Nd",cp)){return false}}return _self.length>0};str.isdigit=function(){var $=$B.args("isdigit",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self);for(var char of _self){if(/\p{Nd}/u.test(char)){continue}cp=_b_.ord(char);if(!$B.in_unicode_category("No_digits",cp)){return false}}return _self.length>0};str.isidentifier=function(){var $=$B.args("isidentifier",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);if(_self.length==0){return false}var chars=to_chars(_self);if(!$B.is_XID_Start(_b_.ord(chars[0]))){return false}else{for(var char of chars){var cp=_b_.ord(char);if(!$B.is_XID_Continue(cp)){return false}}}return true};str.islower=function(){var $=$B.args("islower",1,{self:null},["self"],arguments,{},null,null),has_cased=false,cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if($B.in_unicode_category("Ll",cp)){has_cased=true;continue}else if($B.in_unicode_category("Lu",cp)||$B.in_unicode_category("Lt",cp)){return false}}return has_cased};const numeric_re=/\p{Nd}|\p{Nl}|\p{No}/u;str.isnumeric=function(){var $=$B.args("isnumeric",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);for(var char of _self){if(!numeric_re.test(char)&&!$B.in_unicode_category("Lo_numeric",_b_.ord(char))){return false}}return _self.length>0};var unprintable_re=/\p{Cc}|\p{Cf}|\p{Co}|\p{Cs}|\p{Zl}|\p{Zp}|\p{Zs}/u;str.isprintable=function(){var $=$B.args("isprintable",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);for(var char of _self){if(char==" "){continue}if(unprintable_re.test(char)){return false}}return true};str.isspace=function(self){var $=$B.args("isspace",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if(!$B.in_unicode_category("Zs",cp)&&$B.unicode_bidi_whitespace.indexOf(cp)==-1){return false}}return _self.length>0};str.istitle=function(self){var $=$B.args("istitle",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);return _self.length>0&&str.title(_self)==_self};str.isupper=function(self){var $=$B.args("islower",1,{self:null},["self"],arguments,{},null,null),is_upper=false,cp,_self=to_string(self);for(var char of _self){cp=_b_.ord(char);if($B.in_unicode_category("Lu",cp)){is_upper=true;continue}else if($B.in_unicode_category("Ll",cp)||$B.in_unicode_category("Lt",cp)){return false}}return is_upper};str.join=function(){var $=$B.args("join",2,{self:null,iterable:null},["self","iterable"],arguments,{},null,null),_self=to_string($.self);var iterable=_b_.iter($.iterable),res=[],count=0;while(1){try{var obj2=_b_.next(iterable);if(!$B.$isinstance(obj2,str)){throw _b_.TypeError.$factory("sequence item "+count+": expected str instance, "+$B.class_name(obj2)+" found")}res.push(obj2)}catch(err){if($B.$isinstance(err,_b_.StopIteration)){break}else{throw err}}}return res.join(_self)};str.ljust=function(self){var $=$B.args("ljust",3,{self:null,width:null,fillchar:null},["self","width","fillchar"],arguments,{fillchar:" "},null,null),_self=to_string($.self),len=str.__len__(_self);if($.width<=len){return _self}return _self+$.fillchar.repeat($.width-len)};str.lower=function(self){var $=$B.args("lower",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);return _self.toLowerCase()};str.lstrip=function(self,x){var $=$B.args("lstrip",2,{self:null,chars:null},["self","chars"],arguments,{chars:_b_.None},null,null),_self=$.self,chars=$.chars;if(chars===_b_.None){return self.trimStart()}[_self,chars]=to_string([_self,chars]);while(_self.length>0){var flag=false;for(var char of chars){if(_self.startsWith(char)){_self=_self.substr(char.length);flag=true;break}}if(!flag){return $.self.surrogates?$B.String(_self):_self}}return""};str.maketrans=function(){var $=$B.args("maketrans",3,{x:null,y:null,z:null},["x","y","z"],arguments,{y:null,z:null},null,null);var _t=$B.empty_dict();if($.y===null&&$.z===null){if(!$B.$isinstance($.x,_b_.dict)){throw _b_.TypeError.$factory("maketrans only argument must be a dict")}var items=_b_.list.$factory(_b_.dict.items($.x));for(var i=0,len=items.length;i0&&str.endswith(_self,suffix)){return _self.substr(0,_self.length-suffix.length)}return _self.substr(0)};function $re_escape(str){var specials="[.*+?|()$^";for(var i=0,len=specials.length;i-1&&elts.length>=count){var rest=elts.slice(count).join("");return _new+elts.slice(0,count).join(_new)+rest}else{return _new+elts.join(_new)+_new}}else{var elts=str.split(_self,old,count)}var res=_self,pos=-1;if(old.length==0){var res=_new;for(var i=0;i0){pos=res.indexOf(old,pos);if(pos<0){break}res=res.substr(0,pos)+_new+res.substr(pos+old.length);pos=pos+_new.length;count--}return res};str.rfind=function(self,substr){var $=$B.args("rfind",4,{self:null,sub:null,start:null,end:null},["self","sub","start","end"],arguments,{start:0,end:null},null,null),_self,sub;normalize_start_end($);check_str($.sub);[_self,sub]=to_string([$.self,$.sub]);var len=str.__len__(_self),sub_len=str.__len__(sub);if(sub_len==0){if($.js_start>len){return-1}else{return str.__len__(_self)}}var js_start=pypos2jspos(_self,$.start),js_end=pypos2jspos(_self,$.end),ix=_self.substring(js_start,js_end).lastIndexOf(sub);if(ix==-1){return-1}return jspos2pypos(_self,js_start+ix)-$.start};str.rindex=function(){var res=str.rfind.apply(null,arguments);if(res==-1){throw _b_.ValueError.$factory("substring not found")}return res};str.rjust=function(self){var $=$B.args("rjust",3,{self:null,width:null,fillchar:null},["self","width","fillchar"],arguments,{fillchar:" "},null,null),_self=to_string($.self);var len=str.__len__(_self);if($.width<=len){return _self}return $B.String($.fillchar.repeat($.width-len)+_self)};str.rpartition=function(self,sep){var $=$B.args("rpartition",2,{self:null,sep:null},["self","sep"],arguments,{},null,null),_self;check_str($.sep);[_self,sep]=[$.self,$.sep];_self=reverse(_self),sep=reverse(sep);var items=str.partition(_self,sep).reverse();for(var i=0;i0){var flag=false;for(var char of chars){if(_self.endsWith(char)){_self=_self.substr(0,_self.length-char.length);flag=true;break}}if(!flag){return _self.surrogates?$B.String(_self):_self}}return""};str.split=function(){var $=$B.args("split",3,{self:null,sep:null,maxsplit:null},["self","sep","maxsplit"],arguments,{sep:_b_.None,maxsplit:-1},null,null),maxsplit=$.maxsplit,sep=$.sep,pos=0,_self=to_string($.self);if(maxsplit.__class__===$B.long_int){maxsplit=parseInt(maxsplit.value)}if(sep==""){throw _b_.ValueError.$factory("empty separator")}if(sep===_b_.None){var res=[];while(pos<_self.length&&_self.charAt(pos).search(/\s/)>-1){pos++}if(pos===_self.length-1){return[_self]}var name="";while(1){if(_self.charAt(pos).search(/\s/)==-1){if(name==""){name=_self.charAt(pos)}else{name+=_self.charAt(pos)}}else{if(name!==""){res.push(name);if(maxsplit!==-1&&res.length==maxsplit+1){res.pop();res.push(name+_self.substr(pos));return res}name=""}}pos++;if(pos>_self.length-1){if(name){res.push(name)}break}}return res.map($B.String)}else{sep=to_string(sep);var res=[],s="",seplen=sep.length;if(maxsplit==0){return[$.self]}while(pos<_self.length){if(_self.substr(pos,seplen)==sep){res.push(s);pos+=seplen;if(maxsplit>-1&&res.length>=maxsplit){res.push(_self.substr(pos));return res.map($B.String)}s=""}else{s+=_self.charAt(pos);pos++}}res.push(s);return res.map($B.String)}};str.splitlines=function(self){var $=$B.args("splitlines",2,{self:null,keepends:null},["self","keepends"],arguments,{keepends:false},null,null);if(!$B.$isinstance($.keepends,[_b_.bool,_b_.int])){throw _b_.TypeError("integer argument expected, got "+$B.get_class($.keepends).__name)}var keepends=_b_.int.$factory($.keepends),res=[],start=0,pos=0,_self=to_string($.self);if(!_self.length){return res}while(pos<_self.length){if(_self.substr(pos,2)=="\r\n"){res.push(_self.slice(start,keepends?pos+2:pos));start=pos=pos+2}else if(_self[pos]=="\r"||_self[pos]=="\n"){res.push(_self.slice(start,keepends?pos+1:pos));start=pos=pos+1}else{pos++}}if(start<_self.length){res.push(_self.slice(start))}return res.map($B.String)};str.startswith=function(){var $=$B.args("startswith",4,{self:null,prefix:null,start:null,end:null},["self","prefix","start","end"],arguments,{start:0,end:null},null,null),_self;normalize_start_end($);var prefixes=$.prefix;if(!$B.$isinstance(prefixes,_b_.tuple)){prefixes=[prefixes]}_self=to_string($.self);prefixes=to_string(prefixes);var s=_self.substring($.start,$.end);for(var prefix of prefixes){if(!$B.$isinstance(prefix,str)){throw _b_.TypeError.$factory("endswith first arg must be str "+"or a tuple of str, not int")}if(s.substr(0,prefix.length)==prefix){return true}}return false};str.strip=function(){var $=$B.args("strip",2,{self:null,chars:null},["self","chars"],arguments,{chars:_b_.None},null,null);if($.chars===_b_.None){return $.self.trim()}return str.rstrip(str.lstrip($.self,$.chars),$.chars)};str.swapcase=function(self){var $=$B.args("swapcase",1,{self:self},["self"],arguments,{},null,null),res="",cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if($B.in_unicode_category("Ll",cp)){res+=char.toUpperCase()}else if($B.in_unicode_category("Lu",cp)){res+=char.toLowerCase()}else{res+=char}}return res};str.title=function(self){var $=$B.args("title",1,{self:self},["self"],arguments,{},null,null),state,cp,res="",_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if($B.in_unicode_category("Ll",cp)){if(!state){res+=char.toUpperCase();state="word"}else{res+=char}}else if($B.in_unicode_category("Lu",cp)||$B.in_unicode_category("Lt",cp)){res+=state?char.toLowerCase():char;state="word"}else{state=null;res+=char}}return res};str.translate=function(){var $=$B.args("translate",2,{self:null,table:null},["self","table"],arguments,{},null,null),table=$.table,res=[],getitem=$B.$getattr(table,"__getitem__"),cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);try{var repl=getitem(cp);if(repl!==_b_.None){if(typeof repl=="string"){res.push(repl)}else if(typeof repl=="number"){res.push(String.fromCharCode(repl))}}}catch(err){res.push(char)}}return res.join("")};str.upper=function(self){var $=$B.args("upper",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);return _self.toUpperCase()};str.zfill=function(self,width){var $=$B.args("zfill",2,{self:null,width:null},["self","width"],arguments,{},null,null),_self=to_string($.self);var len=str.__len__(_self);if($.width<=len){return _self}switch(_self.charAt(0)){case"+":case"-":return _self.charAt(0)+"0".repeat($.width-len)+_self.substr(1);default:return"0".repeat($.width-len)+_self}};str.$factory=function(arg,encoding,errors){if(arguments.length==0){return""}if(arg===undefined){return $B.UndefinedType.__str__()}else if(arg===null){return""}if(encoding!==undefined){var $=$B.args("str",3,{arg:null,encoding:null,errors:null},["arg","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null),encoding=$.encoding,errors=$.errors}if(typeof arg=="string"||arg instanceof String){return arg.toString()}else if(typeof arg=="number"&&Number.isInteger(arg)){return arg.toString()}try{if(arg.__class__&&arg.__class__===_b_.bytes&&encoding!==undefined){return _b_.bytes.decode(arg,$.encoding,$.errors)}var klass=arg.__class__||$B.get_class(arg);if(klass===undefined){return $B.JSObj.__str__($B.JSObj.$factory(arg))}var method=$B.$getattr(klass,"__str__",null);if(method===null){method=$B.$getattr(klass,"__repr__")}}catch(err){console.log("no __str__ for",arg);console.log("err ",err);if($B.get_option("debug")>1){console.log(err)}console.log("Warning - no method __str__ or __repr__, "+"default to toString",arg);throw err}var res=$B.$call(method)(arg);if(typeof res=="string"||$B.$isinstance(res,str)){return res}throw _b_.TypeError.$factory("__str__ returned non-string "+`(type ${$B.class_name(res)})`)};$B.set_func_names(str,"builtins");_b_.str=str;$B.parse_format_spec=function(spec,obj){if(spec==""){this.empty=true}else{var pos=0,aligns="<>=^",digits="0123456789",types="bcdeEfFgGnosxX%",align_pos=aligns.indexOf(spec.charAt(0));if(align_pos!=-1){if(spec.charAt(1)&&aligns.indexOf(spec.charAt(1))!=-1){this.fill=spec.charAt(0);this.align=spec.charAt(1);pos=2}else{this.align=aligns[align_pos];this.fill=" ";pos++}}else{align_pos=aligns.indexOf(spec.charAt(1));if(spec.charAt(1)&&align_pos!=-1){this.align=aligns[align_pos];this.fill=spec.charAt(0);pos=2}}var car=spec.charAt(pos);if(car=="+"||car=="-"||car==" "){this.sign=car;pos++;car=spec.charAt(pos)}if(car=="z"){this.z=true;pos++;car=spec.charAt(pos)}if(car=="#"){this.alternate=true;pos++;car=spec.charAt(pos)}if(car=="0"){this.fill="0";if(align_pos==-1){this.align="="}pos++;car=spec.charAt(pos)}while(car&&digits.indexOf(car)>-1){if(this.width===undefined){this.width=car}else{this.width+=car}pos++;car=spec.charAt(pos)}if(this.width!==undefined){this.width=parseInt(this.width)}if(this.width===undefined&&car=="{"){var end_param_pos=spec.substr(pos).search("}");this.width=spec.substring(pos,end_param_pos);pos+=end_param_pos+1}if(car==","||car=="_"){this.comma=true;this.grouping_option=car;pos++;car=spec.charAt(pos);if(car==","||car=="_"){if(car==this.grouping_option){throw _b_.ValueError.$factory(`Cannot specify '${car}' with '${car}'.`)}else{throw _b_.ValueError.$factory("Cannot specify both ',' and '_'.")}}}if(car=="."){if(digits.indexOf(spec.charAt(pos+1))==-1){throw _b_.ValueError.$factory("Missing precision in format spec")}this.precision=spec.charAt(pos+1);pos+=2;car=spec.charAt(pos);while(car&&digits.indexOf(car)>-1){this.precision+=car;pos++;car=spec.charAt(pos)}this.precision=parseInt(this.precision)}if(car&&types.indexOf(car)>-1){this.type=car;pos++;car=spec.charAt(pos)}if(pos!==spec.length){var err_msg=`Invalid format specifier '${spec}'`;if(obj){err_msg+=` for object of type '${$B.class_name(obj)}'`}throw _b_.ValueError.$factory(err_msg)}}this.toString=function(){return(this.fill===undefined?"":_b_.str.$factory(this.fill))+(this.align||"")+(this.sign||"")+(this.alternate?"#":"")+(this.sign_aware?"0":"")+(this.width||"")+(this.comma?",":"")+(this.precision?"."+this.precision:"")+(this.type||"")}};$B.format_width=function(s,fmt){if(fmt.width&&s.length":return fill.repeat(missing)+s;case"=":if("+-".indexOf(s.charAt(0))>-1){return s.charAt(0)+fill.repeat(missing)+s.substr(1)}else{return fill.repeat(missing)+s}case"^":var left=parseInt(missing/2);return fill.repeat(left)+s+fill.repeat(missing-left)}}return s};function fstring_expression(start){this.type="expression";this.start=start;this.expression="";this.conversion=null;this.fmt=null}function fstring_error(msg,pos){var error=Error(msg);error.position=pos;throw error}$B.parse_fstring=function(string){var elts=[],pos=0,current="",ctype=null,nb_braces=0,expr_start,car;while(pos-1){if(current.expression.length==0){throw Error("f-string: empty expression not allowed")}if("ars".indexOf(string.charAt(i+1))==-1){throw Error("f-string: invalid conversion character:"+" expected 's', 'r', or 'a'")}else{current.conversion=string.charAt(i+1);i+=2}}else if(car=="("||car=="["){nb_paren++;current.expression+=car;i++}else if(car==")"||car=="]"){nb_paren--;current.expression+=car;i++}else if(car=='"'){if(string.substr(i,3)=='"""'){var end=string.indexOf('"""',i+3);if(end==-1){fstring_error("f-string: unterminated string",pos)}else{var trs=string.substring(i,end+3);trs=trs.replace("\n","\\n\\");current.expression+=trs;i=end+3}}else{var end=string.indexOf('"',i+1);if(end==-1){fstring_error("f-string: unterminated string",pos)}else{current.expression+=string.substring(i,end+1);i=end+1}}}else if(nb_paren==0&&car==":"){current.fmt=true;var cb=0,fmt_complete=false;for(var j=i+1;j-1?"\\":"")+last_char;if(ce.length==0||nb_paren>0||string.charAt(i+1)=="="||"=!<>:".search(last_char_re)>-1){current.expression+=car;i+=1}else{var tail=car;while(string.charAt(i+1).match(/\s/)){tail+=string.charAt(i+1);i++}elts.push(current.expression+tail);while(ce.match(/\s$/)){ce=ce.substr(0,ce.length-1)}current.expression=ce;ctype="debug";i++}}else{current.expression+=car;i++}}if(nb_braces>0){fstring_error("f-string: expected '}'",pos)}}}if(current.length>0){elts.push(current)}for(var elt of elts){if(typeof elt=="object"){if(elt.fmt_pos!==undefined&&elt.expression.charAt(elt.fmt_pos)!=":"){throw Error()}}}return elts};var _chr=$B.codepoint2jsstring=function(i){if(i>=65536&&i<=1114111){var code=i-65536;return String.fromCodePoint(55296|code>>10)+String.fromCodePoint(56320|code&1023)}else{return String.fromCodePoint(i)}};var _ord=$B.jsstring2codepoint=function(c){if(c.length==1){return c.charCodeAt(0)}var code=65536;code+=(c.charCodeAt(0)&1023)<<10;code+=c.charCodeAt(1)&1023;return code}})(__BRYTHON__);(function($B){var _b_=$B.builtins;function $err(op,other){var msg="unsupported operand type(s) for "+op+" : 'int' and '"+$B.class_name(other)+"'";throw _b_.TypeError.$factory(msg)}function int_value(obj){if(typeof obj=="boolean"){return obj?1:0}return obj.$brython_value!==undefined?obj.$brython_value:obj}function bigint_value(obj){if(typeof obj=="boolean"){return obj?1n:0n}else if(typeof obj=="number"){return BigInt(obj)}else if(obj.__class__===$B.long_int){return obj.value}else if($B.$isinstance(obj,_b_.int)){return bigint_value(obj.$brython_value)}}var int={__class__:_b_.type,__dir__:_b_.object.__dir__,__mro__:[_b_.object],__qualname__:"int",$is_class:true,$native:true,$descriptors:{numerator:true,denominator:true,imag:true,real:true},$is_int_subclass:true};var int_or_long=int.$int_or_long=function(bigint){var res=Number(bigint);return Number.isSafeInteger(res)?res:$B.fast_long_int(bigint)};int.$to_js_number=function(obj){if(typeof obj=="number"){return obj}else if(obj.__class__===$B.long_int){return Number(obj.value)}else if($B.$isinstance(obj,_b_.int)){return int.$to_js_value(obj.$brython_value)}return null};int.$to_bigint=bigint_value;int.$int_value=int_value;int.as_integer_ratio=function(){var $=$B.args("as_integer_ratio",1,{self:null},["self"],arguments,{},null,null);return $B.fast_tuple([$.self,1])};int.from_bytes=function(){var $=$B.args("from_bytes",3,{bytes:null,byteorder:null,signed:null},["bytes","byteorder","signed"],arguments,{byteorder:"big",signed:false},null,null);var x=$.bytes,byteorder=$.byteorder,signed=$.signed,_bytes,_len;if($B.$isinstance(x,[_b_.bytes,_b_.bytearray])){_bytes=x.source;_len=x.source.length}else{_bytes=_b_.list.$factory(x);_len=_bytes.length;for(var i=0;i<_len;i++){_b_.bytes.$factory([_bytes[i]])}}if(byteorder=="big"){_bytes.reverse()}else if(byteorder!="little"){throw _b_.ValueError.$factory("byteorder must be either 'little' or 'big'")}var num=_bytes[0];if(signed&&num>=128){num=num-256}num=BigInt(num);var _mult=256n;for(var i=1;i<_len;i++){num+=_mult*BigInt(_bytes[i]);_mult*=256n}if(!signed){return int_or_long(num)}if(_bytes[_len-1]<128){return int_or_long(num)}return int_or_long(num-_mult)};int.to_bytes=function(){var $=$B.args("to_bytes",3,{self:null,len:null,byteorder:null,signed:null},["self","len","byteorder","signed"],arguments,{len:1,byteorder:"big",signed:false},null,null),self=$.self,len=$.len,byteorder=$.byteorder,signed=$.signed;if(!$B.$isinstance(len,_b_.int)){throw _b_.TypeError.$factory("integer argument expected, got "+$B.class_name(len))}if(["little","big"].indexOf(byteorder)==-1){throw _b_.ValueError.$factory("byteorder must be either 'little' or 'big'")}if($B.$isinstance(self,$B.long_int)){return $B.long_int.to_bytes(self,len,byteorder,signed)}if(self<0){if(!signed){throw _b_.OverflowError.$factory("can't convert negative int to unsigned")}self=Math.pow(256,len)+self}var res=[],value=self;while(value>0){var quotient=Math.floor(value/256),rest=value-256*quotient;res.push(rest);if(res.length>len){throw _b_.OverflowError.$factory("int too big to convert")}value=quotient}while(res.length=0){res=fmt.sign+res}}return res}int.__format__=function(self,format_spec){var fmt=new $B.parse_format_spec(format_spec,self);if(fmt.type&&"eEfFgG%".indexOf(fmt.type)!=-1){return _b_.float.__format__($B.fast_float(self),format_spec)}fmt.align=fmt.align||">";var res=preformat(self,fmt);if(fmt.comma){var sign=res[0]=="-"?"-":"",rest=res.substr(sign.length),len=rest.length,nb=Math.ceil(rest.length/3),chunks=[];for(var i=0;i0){if(exponent%2n==1n){result=result*base%z}exponent=exponent>>1n;base=base*base%z}return int_or_long(result)}else{if(typeof other=="number"){if(other>=0){return int_or_long(BigInt(self)**BigInt(other))}else{return $B.fast_float(Math.pow(self,other))}}else if(other.__class__===$B.long_int){if(other.value>=0){return int_or_long(BigInt(self)**other.value)}else{return $B.fast_float(Math.pow(self,other))}}else if($B.$isinstance(other,_b_.int)){return int_or_long(int.__pow__(self,other.$brython_value))}return _b_.NotImplemented}}if($B.$isinstance(other,_b_.float)){other=_b_.float.numerator(other);if(self>=0){return $B.fast_float(Math.pow(self,other))}else{return _b_.complex.__pow__($B.make_complex(self,0),other)}}else if($B.$isinstance(other,_b_.complex)){var preal=Math.pow(self,other.$real),ln=Math.log(self);return $B.make_complex(preal*Math.cos(ln),preal*Math.sin(ln))}var rpow=$B.$getattr(other,"__rpow__",_b_.None);if(rpow!==_b_.None){return rpow(self)}$err("**",other)};function __newobj__(){var $=$B.args("__newobj__",0,{},[],arguments,{},"args",null),args=$.args;var res=args.slice(1);res.__class__=args[0];return res}int.__repr__=function(self){$B.builtins_repr_check(int,arguments);var value=int_value(self),x=value.__class__===$B.long_int?value.value:value;if($B.int_max_str_digits!=0&&x>=10n**BigInt($B.int_max_str_digits)){throw _b_.ValueError.$factory(`Exceeds the limit `+`(${$B.int_max_str_digits}) for integer string conversion`)}return x.toString()};int.__setattr__=function(self,attr,value){if(typeof self=="number"||typeof self=="boolean"){var cl_name=$B.class_name(self);if(_b_.dir(self).indexOf(attr)>-1){throw _b_.AttributeError.$factory("attribute '"+attr+`' of '${cl_name}' objects is not writable`)}else{throw _b_.AttributeError.$factory(`'${cl_name}' object`+` has no attribute '${attr}'`)}}_b_.dict.$setitem(self.__dict__,attr,value);return _b_.None};int.__sub__=Function("self","other",op_model.replace(/\+/g,"-").replace(/__add__/g,"__sub__"));int.__truediv__=function(self,other){if($B.$isinstance(other,int)){other=int_value(other);if(other==0){throw _b_.ZeroDivisionError.$factory("division by zero")}if(other.__class__===$B.long_int){return $B.fast_float(self/parseInt(other.value))}return $B.fast_float(self/other)}return _b_.NotImplemented};int.bit_count=function(self){var s=_b_.bin(_b_.abs(self)),nb=0;for(var x of s){if(x=="1"){nb++}}return nb};int.bit_length=function(self){var s=_b_.bin(self);s=$B.$getattr(s,"lstrip")("-0b");return s.length};int.numerator=function(self){return int_value(self)};int.denominator=function(self){return int.$factory(1)};int.imag=function(self){return int.$factory(0)};int.real=function(self){return self};for(var attr of["numerator","denominator","imag","real"]){int[attr].setter=function(x){return function(self,value){throw _b_.AttributeError.$factory(`attribute '${x}' of `+`'${$B.class_name(self)}' objects is not writable`)}}(attr)}var model=`var _b_ = __BRYTHON__.builtins\nif(typeof other == "number"){\n // transform into BigInt: JS converts numbers to 32 bits\n return _b_.int.$int_or_long(BigInt(self) & BigInt(other))}else if(typeof other == "boolean"){\n return self & (other ? 1 : 0)}else if(other.__class__ === $B.long_int){\n return _b_.int.$int_or_long(BigInt(self) & other.value)}else if($B.$isinstance(other, _b_.int)){\n // int subclass\n return _b_.int.__and__(self, other.$brython_value)}\nreturn _b_.NotImplemented`;int.__and__=Function("self","other",model);int.__lshift__=Function("self","other",model.replace(/&/g,"<<").replace(/__and__/g,"__lshift__"));int.__rshift__=Function("self","other",model.replace(/&/g,">>").replace(/__and__/g,"__rshift__"));int.__or__=Function("self","other",model.replace(/&/g,"|").replace(/__and__/g,"__or__"));int.__xor__=Function("self","other",model.replace(/&/g,"^").replace(/__and__/g,"__xor__"));int.__ge__=function(self,other){self=int_value(self);if(typeof other=="number"){return self>=other}else if(other!==null&&other.__class__===$B.long_int){return self>=other.value}else if(typeof other=="boolean"){return self>=other?1:0}else if($B.$isinstance(other,_b_.int)){return self>=other.$brython_value}return _b_.NotImplemented};int.__gt__=function(self,other){var res=int.__le__(self,other);return res===_b_.NotImplemented?res:!res};int.__le__=function(self,other){self=int_value(self);if(typeof other=="number"){return self<=other}else if(other!==null&&other.__class__===$B.long_int){return self<=other.value}else if(typeof other=="boolean"){return self<=other?1:0}else if($B.$isinstance(other,_b_.int)){return self<=other.$brython_value}return _b_.NotImplemented};int.__lt__=function(self,other){var res=int.__ge__(self,other);return res===_b_.NotImplemented?res:!res};var r_opnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or","divmod"];for(var r_opname of r_opnames){if(int["__r"+r_opname+"__"]===undefined&&int["__"+r_opname+"__"]){int["__r"+r_opname+"__"]=function(name){return function(self,other){if($B.$isinstance(other,int)){other=int_value(other);return int["__"+name+"__"](other,self)}return _b_.NotImplemented}}(r_opname)}}var $valid_digits=function(base){var digits="";if(base===0){return"0"}if(base<10){for(var i=0;i=2&&base<=36)){if(base!=0){throw _b_.ValueError.$factory("invalid base")}}function invalid(base){throw _b_.ValueError.$factory("invalid literal for int() with base "+base+": "+_b_.repr(initial_value))}if(typeof value!="string"){value=_b_.str.$to_string(value)}var _value=value.trim(),sign="";if(_value.startsWith("+")||_value.startsWith("-")){var sign=_value[0];_value=_value.substr(1)}if(_value.length==2&&base==0&&(_value=="0b"||_value=="0o"||_value=="0x")){throw _b_.ValueError.$factory("invalid value")}if(_value.endsWith("_")){invalid(base)}if(value.indexOf("__")>-1){invalid(base)}if(_value.length>2){var _pre=_value.substr(0,2).toUpperCase();if(base==0){if(_pre=="0B"){base=2}else if(_pre=="0O"){base=8}else if(_pre=="0X"){base=16}else if(_value.startsWith("0")){_value=_value.replace(/_/g,"");if(_value.match(/^0+$/)){return 0}invalid(base)}}else if(_pre=="0X"&&base!=16){invalid(base)}else if(_pre=="0O"&&base!=8){invalid(base)}if(_pre=="0B"&&base==2||_pre=="0O"||_pre=="0X"){_value=_value.substr(2);if(_value.startsWith("_")){_value=_value.substr(1)}}}if(base==0){base=10}var _digits=$valid_digits(base),_re=new RegExp("^[+-]?["+_digits+"]"+"["+_digits+"_]*$","i"),match=_re.exec(_value);if(match===null){res=0;var coef=1,digit;for(var char of _value){if(/\p{Nd}/u.test(char)){var cp=char.codePointAt(0);for(var start of $B.digits_starts){if(cp-start<10){digit=cp-start;break}}}else{if(base>10&&_digits.indexOf(char.toUpperCase())>-1){digit=char.toUpperCase().charCodeAt(0)-55}else{invalid(base)}}if(digit$B.int_max_str_digits){throw _b_.ValueError.$factory("Exceeds the limit "+`(${$B.int_max_str_digits}) for integer string conversion: `+`value has ${value.length} digits; use `+"sys.set_int_max_str_digits() to increase the limit.")}if(base==10){res=BigInt(_value)}else{base=BigInt(base);var res=0n,coef=1n,char;for(var i=_value.length-1;i>=0;i--){char=_value[i].toUpperCase();res+=coef*BigInt(_digits.indexOf(char));coef*=base}}}if(sign=="-"){res=-res}return int_or_long(res)};$B.set_func_names(int,"builtins");_b_.int=int;$B.$bool=function(obj,bool_class){if(obj===null||obj===undefined){return false}switch(typeof obj){case"boolean":return obj;case"number":case"string":if(obj){return true}return false;default:if(obj.$is_class){return true}var klass=$B.get_class(obj),missing={},bool_method=bool_class?$B.$getattr(klass,"__bool__",missing):$B.$getattr(obj,"__bool__",missing);var test=false;if(test){console.log("bool(obj)",obj,"bool_class",bool_class,"klass",klass,"apply bool method",bool_method);console.log("$B.$call(bool_method)",bool_method+"")}if(bool_method===missing){var len_method=$B.$getattr(klass,"__len__",missing);if(len_method===missing){return true}return len_method(obj)>0}else{try{var res=bool_class?$B.$call(bool_method)(obj):$B.$call(bool_method)()}catch(err){throw err}if(res!==true&&res!==false){throw _b_.TypeError.$factory("__bool__ should return "+"bool, returned "+$B.class_name(res))}if(test){console.log("bool method returns",res)}return res}}};var bool={__bases__:[int],__class__:_b_.type,__mro__:[int,_b_.object],__qualname__:"bool",$is_class:true,$not_basetype:true,$native:true,$descriptors:{numerator:true,denominator:true,imag:true,real:true}};bool.__and__=function(self,other){if($B.$isinstance(other,bool)){return self&&other}else if($B.$isinstance(other,int)){return int.__and__(bool.__index__(self),int.__index__(other))}return _b_.NotImplemented};bool.__float__=function(self){return self?$B.fast_float(1):$B.fast_float(0)};bool.__hash__=bool.__index__=bool.__int__=function(self){if(self.valueOf())return 1;return 0};bool.__neg__=function(self){return-$B.int_or_bool(self)};bool.__or__=function(self,other){if($B.$isinstance(other,bool)){return self||other}else if($B.$isinstance(other,int)){return int.__or__(bool.__index__(self),int.__index__(other))}return _b_.NotImplemented};bool.__pos__=$B.int_or_bool;bool.__repr__=function(self){$B.builtins_repr_check(bool,arguments);return self?"True":"False"};bool.__xor__=function(self,other){if($B.$isinstance(other,bool)){return self^other?true:false}else if($B.$isinstance(other,int)){return int.__xor__(bool.__index__(self),int.__index__(other))}return _b_.NotImplemented};bool.$factory=function(){var $=$B.args("bool",1,{x:null},["x"],arguments,{x:false},null,null);return $B.$bool($.x,true)};bool.numerator=int.numerator;bool.denominator=int.denominator;bool.real=int.real;bool.imag=int.imag;_b_.bool=bool;$B.set_func_names(bool,"builtins")})(__BRYTHON__);(function($B){var _b_=$B.builtins;if($B.isWebWorker){var window=self}var long_int={__class__:_b_.type,__mro__:[_b_.int,_b_.object],__qualname__:"int",$infos:{__module__:"builtins",__name__:"int"},$is_class:true,$native:true,$descriptors:{numerator:true,denominator:true,imag:true,real:true}};var max_safe_divider=$B.max_int/9;var int_or_long=_b_.int.$int_or_long;var len=(Math.pow(2,53)-1+"").length-1;function preformat(self,fmt){if(fmt.empty){return _b_.str.$factory(self)}if(fmt.type&&"bcdoxXn".indexOf(fmt.type)==-1){throw _b_.ValueError.$factory("Unknown format code '"+fmt.type+"' for object of type 'int'")}var res;switch(fmt.type){case undefined:case"d":res=self.toString();break;case"b":res=(fmt.alternate?"0b":"")+BigInt(self.value).toString(2);break;case"c":res=_b_.chr(self);break;case"o":res=(fmt.alternate?"0o":"")+BigInt(self.value).toString(8);break;case"x":res=(fmt.alternate?"0x":"")+BigInt(self.value).toString(16);break;case"X":res=(fmt.alternate?"0X":"")+BigInt(self.value).toString(16).toUpperCase();break;case"n":return self}if(fmt.sign!==undefined){if((fmt.sign==" "||fmt.sign=="+")&&self>=0){res=fmt.sign+res}}return res}long_int.$to_js_number=function(self){return Number(self.value)};long_int.__format__=function(self,format_spec){var fmt=new $B.parse_format_spec(format_spec,self);if(fmt.type&&"eEfFgG%".indexOf(fmt.type)!=-1){return _b_.float.__format__(self,format_spec)}fmt.align=fmt.align||">";var res=preformat(self,fmt);if(fmt.comma){var sign=res[0]=="-"?"-":"",rest=res.substr(sign.length),len=rest.length,nb=Math.ceil(rest.length/3),chunks=[];for(var i=0;i0?self.value:-self.value)};long_int.__add__=function(self,other){if(typeof other=="number"){return int_or_long(self.value+BigInt(other))}else if(other.__class__===$B.long_int){return int_or_long(self.value+other.value)}else if(typeof other=="boolean"){return int_or_long(self.value+(other?1n:0n))}else if($B.$isinstance(other,_b_.int)){return long_int.__add__(self,other.$brython_value)}return _b_.NotImplemented};long_int.__divmod__=function(self,other){var a=self.value,b=_b_.int.$to_bigint(other),quotient;if(a>=0&&b>0||a<=0&&b<0){quotient=a/b}else{quotient=a/b-1n}var rest=a-quotient*b;return $B.fast_tuple([int_or_long(quotient),int_or_long(rest)])};long_int.__eq__=function(self,other){if(other.__class__===$B.long_int){return self.value==other.value}else if(typeof other=="number"||typeof other=="boolean"){return false}else if($B.$isinstance(other,_b_.int)){return long_int.__eq__(self,other.$brython_value)}return _b_.NotImplemented};long_int.__float__=function(self){if(!isFinite(Number(self.value))){throw _b_.OverflowError.$factory("int too large to convert to float")}return $B.fast_float(Number(self.value))};long_int.__floordiv__=function(self,other){if(typeof other=="number"){return int_or_long(self.value/BigInt(other))}else if(other.__class__===$B.long_int){return int_or_long(self.value/other.value)}else if(typeof other=="boolean"){return int_or_long(self.value/(other?1n:0n))}else if($B.$isinstance(other,_b_.int)){return int_or_long(self.value/other.$brython_value)}return _b_.NotImplemented};long_int.__ge__=function(self,other){if(typeof other=="number"){return self.value>=other}else if(other.__class__===$B.long_int){return self.value>=other.value}else if(typeof other=="boolean"){return self.value>=(other?1:0)}else if($B.$isinstance(other,_b_.int)){return self.value>=other.$brython_value}return _b_.NotImplemented};long_int.__gt__=function(self,other){var res=long_int.__le__(self,other);return res===_b_.NotImplemented?res:!res};long_int.__hash__=function(self){var modulus=2305843009213693951n,sign=self.value>=0?1n:-1n,self_pos=self.value*sign;var _hash=sign*(self_pos%modulus);return self.__hashvalue__=int_or_long(_hash)};long_int.__index__=function(self){return self};long_int.__invert__=function(self){return int_or_long(-1n-self.value)};long_int.__le__=function(self,other){if(typeof other=="number"){return self.value<=other}else if(other.__class__===$B.long_int){return self.value<=other.value}else if(typeof other=="boolean"){return self.value<=(other?1:0)}else if($B.$isinstance(other,_b_.int)){return self.value<=other.$brython_value}return _b_.NotImplemented};long_int.__lt__=function(self,other){var res=long_int.__ge__(self,other);return res===_b_.NotImplemented?res:!res};long_int.__lshift__=function(self,other){if(typeof other=="number"){return int_or_long(self.value<>BigInt(other))}else if(other.__class__===$B.long_int){return int_or_long(self.value>>other.value)}else if(typeof other=="boolean"){return int_or_long(self.value>>(other?1n:0n))}else if($B.$isinstance(other,_b_.int)){return long_int.__rshift__(self,other.$brython_value)}return _b_.NotImplemented};long_int.__repr__=function(self){$B.builtins_repr_check($B.long_int,arguments);if($B.int_max_str_digits!=0&&self.value>=10n**BigInt($B.int_max_str_digits)){throw _b_.ValueError.$factory(`Exceeds the limit `+`(${$B.int_max_str_digits}) for integer string conversion`)}return self.value.toString()};long_int.__sub__=function(self,other){if(typeof other=="number"){return int_or_long(self.value-BigInt(other))}else if(typeof other=="boolean"){return int_or_long(self.value-(other?1n:0n))}else if(other.__class__===$B.long_int){return int_or_long(self.value-other.value)}else if($B.$isinstance(other,_b_.int)){return long_int.__sub__(self,other.$brython_value)}return _b_.NotImplemented};long_int.__truediv__=function(self,other){if(typeof other=="number"){return $B.fast_float(Number(self.value)/other)}else if(typeof other=="boolean"){return $B.fast_float(Number(self.value)*(other?1:0))}else if(other.__class__===$B.long_int){return $B.fast_float(Number(self.value)/Number(other.value))}else if($B.$isinstance(other,_b_.int)){return long_int.__truediv__(self,other.$brython_value)}return _b_.NotImplemented};long_int.bit_count=function(self){var s=self.value.toString(2),nb=0;for(var x of s){if(x=="1"){nb++}}return nb};long_int.bit_length=function(self){return self.value.toString(2).length};function _infos(self){var nbits=$B.long_int.bit_length(self),pow2=2n**BigInt(nbits-1),rest=BigInt(self.value)-pow2,relative_rest=new Number(rest/pow2);return{nbits:nbits,pow2:pow2,rest:rest,relative_rest:relative_rest}}long_int.$log2=function(x){if(x.value<0){throw _b_.ValueError.$factory("math domain error")}var infos=_infos(x);return _b_.float.$factory(infos.nbits-1+Math.log(1+infos.relative_rest/Math.LN2))};long_int.$log10=function(x){if(x.value<0){throw _b_.ValueError.$factory("math domain error")}var x_string=x.value.toString(),exp=x_string.length-1,mant=parseFloat(x_string[0]+"."+x_string.substr(1));return _b_.float.$factory(exp+Math.log10(mant))};long_int.numerator=function(self){return self};long_int.denominator=function(self){return _b_.int.$factory(1)};long_int.imag=function(self){return _b_.int.$factory(0)};long_int.real=function(self){return self};var body=`var $B = __BRYTHON__,\n _b_ = $B.builtins\nif(typeof other == "number"){\n return _b_.int.$int_or_long(self.value & BigInt(other))}else if(typeof other == "boolean"){\n return _b_.int.$int_or_long(self.value & (other ? 1n : 0n))}else if(other.__class__ === $B.long_int){\n return _b_.int.$int_or_long(self.value & other.value)}else if($B.$isinstance(other, _b_.int)){\n // int subclass\n return $B.long_int.__and__(self, other.$brython_value)}\nreturn _b_.NotImplemented`;long_int.__and__=Function("self","other",body);long_int.__or__=Function("self","other",body.replace(/&/g,"|").replace(/__and__/g,"__or__"));long_int.__xor__=Function("self","other",body.replace(/&/g,"^").replace(/__and__/g,"__xor__"));long_int.to_bytes=function(self,len,byteorder,signed){var res=[],v=self.value;if(!$B.$bool(signed)&&v<0){throw _b_.OverflowError.$factory("can't convert negative int to unsigned")}while(v>0){var quot=v/256n,rest=v-quot*256n;v=quot;res.push(Number(rest));if(res.length>len){throw _b_.OverflowError.$factory("int too big to convert")}}while(res.length10){for(var i=0;iMIN_SAFE_INTEGER&&v0}};long_int.$factory=function(value,base){var is_digits=digits(base);for(var i=0;i=0;i--){char=value[i].toUpperCase();res+=coef*BigInt(is_digits[char]);coef*=base}}return{__class__:$B.long_int,value:res}};function extended_euclidean_algorithm(a,b){var s=0,old_s=1,t=1,old_t=0,r=b,old_r=a,quotient,tmp;while($B.rich_comp("__ne__",r,0)){quotient=$B.rich_op("__floordiv__",old_r,r);tmp=$B.rich_op("__sub__",old_r,$B.rich_op("__mul__",quotient,r));old_r=r;r=tmp;tmp=$B.rich_op("__sub__",old_s,$B.rich_op("__mul__",quotient,s));old_s=s;s=tmp;tmp=$B.rich_op("__sub__",old_t,$B.rich_op("__mul__",quotient,t));old_t=t;t=tmp}return[old_r,old_s,old_t]}function inverse_of(n,p){var gcd,x,y;[gcd,x,y]=extended_euclidean_algorithm(n,p);if($B.rich_comp("__ne__",gcd,1)){throw Error(`${n} has no multiplicative inverse '\n 'modulo ${p}`)}else{return $B.rich_op("__mod__",x,p)}}$B.inverse_of=inverse_of;$B.set_func_names(long_int,"builtins");$B.long_int=long_int;$B.fast_long_int=function(value){if(typeof value!=="bigint"){console.log("expected bigint, got",value);throw Error("not a big int")}return{__class__:$B.long_int,value:value}}})(__BRYTHON__);(function($B){var _b_=$B.builtins;var object=_b_.object;function $err(op,other){var msg="unsupported operand type(s) for "+op+": 'float' and '"+$B.class_name(other)+"'";throw _b_.TypeError.$factory(msg)}function float_value(obj){return obj.__class__===float?obj:fast_float(obj.value)}var float={__class__:_b_.type,__dir__:object.__dir__,__qualname__:"float",$is_class:true,$native:true,$descriptors:{numerator:true,denominator:true,imag:true,real:true}};float.$float_value=float_value;float.$to_js_number=function(self){if(self.__class__===float){return self.value}else{return float.$to_js_number(self.value)}};float.numerator=function(self){return self};float.denominator=function(self){return 1};float.imag=function(self){return 0};float.real=function(self){return self};float.__float__=function(self){return self};$B.shift1_cache={};float.as_integer_ratio=function(self){if(isinf(self)){throw _b_.OverflowError.$factory("Cannot pass infinity to "+"float.as_integer_ratio.")}if(isnan(self)){throw _b_.ValueError.$factory("Cannot pass NaN to "+"float.as_integer_ratio.")}var tmp=frexp(self),fp=tmp[0],exponent=tmp[1];for(var i=0;i<300;i++){if(fp==Math.floor(fp)){break}else{fp*=2;exponent--}}var numerator=_b_.int.$factory(fp),py_exponent=_b_.abs(exponent),denominator=1,x;if($B.shift1_cache[py_exponent]!==undefined){x=$B.shift1_cache[py_exponent]}else{x=$B.$getattr(1,"__lshift__")(py_exponent);$B.shift1_cache[py_exponent]=x}py_exponent=x;if(exponent>0){numerator=$B.rich_op("__mul__",numerator,py_exponent)}else{denominator=py_exponent}return $B.fast_tuple([_b_.int.$factory(numerator),_b_.int.$factory(denominator)])};function check_self_is_float(x,method){if(x.__class__===_b_.float||$B.$isinstance(x,_b_.float)){return true}throw _b_.TypeError.$factory(`descriptor '${method}' requires a `+`'float' object but received a '${$B.class_name(x)}'`)}float.__abs__=function(self){check_self_is_float(self,"__abs__");return fast_float(Math.abs(self.value))};float.__bool__=function(self){check_self_is_float(self,"__bool__");return _b_.bool.$factory(self.value)};float.__ceil__=function(self){check_self_is_float(self,"__ceil__");if(isnan(self)){throw _b_.ValueError.$factory("cannot convert float NaN to integer")}else if(isinf(self)){throw _b_.OverflowError.$factory("cannot convert float infinity to integer")}return Math.ceil(self.value)};float.__divmod__=function(self,other){check_self_is_float(self,"__divmod__");if(!$B.$isinstance(other,[_b_.int,float])){return _b_.NotImplemented}return $B.fast_tuple([float.__floordiv__(self,other),float.__mod__(self,other)])};float.__eq__=function(self,other){check_self_is_float(self,"__eq__");if(isNaN(self.value)&&($B.$isinstance(other,float)&&isNaN(other.value))){return false}if($B.$isinstance(other,_b_.int)){return self.value==other}if($B.$isinstance(other,float)){return self.value==other.value}if($B.$isinstance(other,_b_.complex)){if(!$B.rich_comp("__eq__",0,other.$imag)){return false}return float.__eq__(self,other.$real)}return _b_.NotImplemented};float.__floor__=function(self){check_self_is_float(self,"__floor__");if(isnan(self)){throw _b_.ValueError.$factory("cannot convert float NaN to integer")}else if(isinf(self)){throw _b_.OverflowError.$factory("cannot convert float infinity to integer")}return Math.floor(self.value)};float.__floordiv__=function(self,other){check_self_is_float(self,"__floordiv__");if($B.$isinstance(other,float)){if(other.value==0){throw _b_.ZeroDivisionError.$factory("division by zero")}return fast_float(Math.floor(self.value/other.value))}if($B.$isinstance(other,_b_.int)){if(other.valueOf()==0){throw _b_.ZeroDivisionError.$factory("division by zero")}return fast_float(Math.floor(self.value/other))}return _b_.NotImplemented};const DBL_MANT_DIG=53,LONG_MAX=__BRYTHON__.MAX_VALUE,DBL_MAX_EXP=2**10,LONG_MIN=__BRYTHON__.MIN_VALUE,DBL_MIN_EXP=-1021;float.fromhex=function(klass,s){function hex_from_char(char){return parseInt(char,16)}function finished(){while(s[pos]&&s[pos].match(/\s/)){pos++}if(pos!=s.length){throw parse_error()}if(negate){x=float.__neg__(x)}return klass===_b_.float?x:$B.$call(klass)(x)}function overflow_error(){throw _b_.OverflowError.$factory("hexadecimal value too large to represent as a float")}function parse_error(){throw _b_.ValueError.$factory("invalid hexadecimal floating-point string")}function insane_length_error(){throw _b_.ValueError.$factory("hexadecimal string too long to convert")}s=s.trim();var re_parts=[/^(?[+-])?(0x)?/,/(?[0-9a-fA-F]+)?/,/(?\.(?[0-9a-fA-F]+))?/,/(?p(?[+-])?(?\d+))?$/];var re=new RegExp(re_parts.map((r=>r.source)).join(""));var mo=re.exec(s);if(s.match(/^\+?inf(inity)?$/i)){return INF}else if(s.match(/^-inf(inity)?$/i)){return NINF}else if(s.match(/^[+-]?nan$/i)){return NAN}var pos=0,negate,ldexp=_b_.float.$funcs.ldexp;if(s[pos]=="-"){pos++;negate=1}else if(s[pos]=="+"){pos++}if(s.substr(pos,2).toLowerCase()=="0x"){pos+=2}var coeff_start=pos,coeff_end;while(hex_from_char(s[pos])>=0){pos++}var save_pos=pos;if(s[pos]=="."){pos++;while(hex_from_char(s[pos])>=0){pos++}coeff_end=pos-1}else{coeff_end=pos}var ndigits=coeff_end-coeff_start,fdigits=coeff_end-save_pos;if(ndigits==0){throw parse_error()}if(ndigits>Math.min(DBL_MIN_EXP-DBL_MANT_DIG-LONG_MIN/2,LONG_MAX/2+1-DBL_MAX_EXP)/4){throw insane_length_error()}var exp;if(s[pos]=="p"||s[pos]=="P"){pos++;var exp_start=pos;if(s[pos]=="-"||s[pos]=="+"){pos++}if(!("0"<=s[pos]&&s[pos]<="9")){throw parse_error()}pos++;while("0"<=s[pos]&&s[pos]<="9"){pos++}exp=parseInt(s.substr(exp_start))}else{exp=0}function HEX_DIGIT(j){if(!Number.isInteger(j)){throw Error("j pas entier")}var pos=j0&&HEX_DIGIT(ndigits-1)==0){ndigits--}if(ndigits==0||expLONG_MAX/2){console.log("overflow, exp",exp);throw overflow_error()}exp=exp-4*fdigits;var top_exp=exp+4*(ndigits-1);for(var digit=BigInt(HEX_DIGIT(ndigits-1));digit!=0;digit/=2n){top_exp++}if(top_expDBL_MAX_EXP){throw overflow_error()}var lsb=Math.max(top_exp,DBL_MIN_EXP)-DBL_MANT_DIG;var x=0;if(exp>=lsb){for(var i=ndigits-1;i>=0;i--){x=16*x+HEX_DIGIT(i)}x=ldexp($B.fast_float(x),exp);return finished()}var half_eps=1<<(lsb-exp-1)%4,key_digit=parseInt((lsb-exp-1)/4);for(var i=ndigits-1;i>key_digit;i--){x=16*x+HEX_DIGIT(i)}var digit=HEX_DIGIT(key_digit);x=16*x+(digit&16-2*half_eps);if((digit&half_eps)!=0){var round_up=0;if((digit&3*half_eps-1)!=0||half_eps==8&&key_digit+1=0;i--){if(HEX_DIGIT(i)!=0){round_up=1;break}}}if(round_up){x+=2*half_eps;if(top_exp==DBL_MAX_EXP&&x==ldexp(2*half_eps,DBL_MANT_DIG).value){throw overflow_error()}}}x=ldexp(x,exp+4*key_digit);return finished()};float.__getformat__=function(arg){if(arg=="double"||arg=="float"){return"IEEE, little-endian"}if(typeof arg!=="string"){throw _b_.TypeError.$factory(" __getformat__() argument must be str, not "+$B.class_name(arg))}throw _b_.ValueError.$factory("__getformat__() argument 1 must be "+"'double' or 'float'")};var format_sign=function(val,flags){switch(flags.sign){case"+":return val>=0||isNaN(val)?"+":"";case"-":return"";case" ":return val>=0||isNaN(val)?" ":""}if(flags.space){if(val>=0){return" "}}return""};function preformat(self,fmt){var value=self.value;if(fmt.empty){return _b_.str.$factory(self)}if(fmt.type&&"eEfFgGn%".indexOf(fmt.type)==-1){throw _b_.ValueError.$factory("Unknown format code '"+fmt.type+"' for object of type 'float'")}var special;if(isNaN(value)){special="efg".indexOf(fmt.type)>-1?"nan":"NAN"}else if(value==Number.POSITIVE_INFINITY){special="efg".indexOf(fmt.type)>-1?"inf":"INF"}else if(value==Number.NEGATIVE_INFINITY){special="efg".indexOf(fmt.type)>-1?"-inf":"-INF"}if(special){return format_sign(value,fmt)+special}if(fmt.precision===undefined&&fmt.type!==undefined){fmt.precision=6}if(fmt.type=="%"){value*=100}if(fmt.type=="e"){var res=value.toExponential(fmt.precision),exp=parseInt(res.substr(res.search("e")+1));if(Math.abs(exp)<10){res=res.substr(0,res.length-1)+"0"+res.charAt(res.length-1)}return res}if(fmt.precision!==undefined){var prec=fmt.precision;if(prec==0){return Math.round(value)+""}var res=$B.roundDownToFixed(value,prec),pt_pos=res.indexOf(".");if(fmt.type!==undefined&&(fmt.type=="%"||fmt.type.toLowerCase()=="f")){if(pt_pos==-1){res+="."+"0".repeat(fmt.precision)}else{var missing=fmt.precision-res.length+pt_pos+1;if(missing>0){res+="0".repeat(missing)}}}else if(fmt.type&&fmt.type.toLowerCase()=="g"){var exp_fmt=preformat(self,{type:"e"}).split("e"),exp=parseInt(exp_fmt[1]);if(-4<=exp&&exp0){while(signif.endsWith("0")){signif=signif.substr(0,signif.length-1)}}if(signif.endsWith(".")){signif=signif.substr(0,signif.length-1)}parts[0]=signif}res=parts.join("e");if(fmt.type=="G"){res=res.toUpperCase()}return res}else if(fmt.type===undefined){fmt.type="g";res=preformat(self,fmt);if(res.indexOf(".")==-1){var exp=res.length-1,exp=exp<10?"0"+exp:exp,is_neg=res.startsWith("-"),point_pos=is_neg?2:1,mant=res.substr(0,point_pos)+"."+res.substr(point_pos);return`${mant}e+${exp}`}fmt.type=undefined}else{var res1=value.toExponential(fmt.precision-1),exp=parseInt(res1.substr(res1.search("e")+1));if(exp<-4||exp>=fmt.precision-1){var elts=res1.split("e");while(elts[0].endsWith("0")){elts[0]=elts[0].substr(0,elts[0].length-1)}res=elts.join("e")}}}else{var res=_b_.str.$factory(self)}if(fmt.type===undefined||"gGn".indexOf(fmt.type)!=-1){if(res.search("e")==-1){while(res.charAt(res.length-1)=="0"){res=res.substr(0,res.length-1)}}if(res.charAt(res.length-1)=="."){if(fmt.type===undefined){res+="0"}else{res=res.substr(0,res.length-1)}}}if(fmt.sign!==undefined){if((fmt.sign==" "||fmt.sign=="+")&&value>0){res=fmt.sign+res}}if(fmt.type=="%"){res+="%"}return res}float.__format__=function(self,format_spec){check_self_is_float(self,"__format__");var fmt=new $B.parse_format_spec(format_spec,self);return float.$format(self,fmt)};float.$format=function(self,fmt){fmt.align=fmt.align||">";var pf=preformat(self,fmt);if(fmt.z&&Object.is(parseFloat(pf),-0)){pf=pf.substr(1)}var raw=pf.split("."),_int=raw[0];if(fmt.comma){var len=_int.length,nb=Math.ceil(_int.length/3),chunks=[];for(var i=0;i0?float.$factory(x):float.$factory(-x)}function frexp(x){var x1=x;if($B.$isinstance(x,float)){if(isnan(x)||isinf(x)){return[x,0]}x1=float_value(x).value}else if($B.$isinstance(x,$B.long_int)){var exp=x.value.toString(2).length,power=2n**BigInt(exp);return[$B.fast_float(Number(x.value)/Number(power)),exp]}if(x1==0){return[0,0]}var sign=1,ex=0,man=x1;if(man<0){sign=-sign;man=-man}while(man<.5){man*=2;ex--}while(man>=1){man*=.5;ex++}man*=sign;return[man,ex]}function ldexp(mantissa,exponent){if(isninf(mantissa)){return NINF}else if(isinf(mantissa)){return INF}if($B.$isinstance(mantissa,_b_.float)){mantissa=mantissa.value}if(mantissa==0){return ZERO}else if(isNaN(mantissa)){return NAN}if($B.$isinstance(exponent,$B.long_int)){if(exponent.value<0){return ZERO}else{throw _b_.OverflowError.$factory("overflow")}}else if(!isFinite(mantissa*Math.pow(2,exponent))){throw _b_.OverflowError.$factory("overflow")}var steps=Math.min(3,Math.ceil(Math.abs(exponent)/1023));var result=mantissa;for(var i=0;i=0){if(Number.isInteger(other)&&other%2==1){return self}return fast_float(0)}else if(self.value==Number.NEGATIVE_INFINITY&&!isNaN(other)){if(other%2==-1){return fast_float(-0)}else if(other<0){return fast_float(0)}else if(other%2==1){return fast_float(Number.NEGATIVE_INFINITY)}else{return fast_float(Number.POSITIVE_INFINITY)}}else if(self.value==Number.POSITIVE_INFINITY&&!isNaN(other)){return other>0?self:fast_float(0)}if(other==Number.NEGATIVE_INFINITY&&!isNaN(self.value)){return Math.abs(self.value)<1?fast_float(Number.POSITIVE_INFINITY):fast_float(0)}else if(other==Number.POSITIVE_INFINITY&&!isNaN(self.value)){return Math.abs(self.value)<1?fast_float(0):fast_float(Number.POSITIVE_INFINITY)}if(self.value<0&&!Number.isInteger(other)){return _b_.complex.__pow__($B.make_complex(self.value,0),fast_float(other))}return fast_float(Math.pow(self.value,other))}return _b_.NotImplemented};float.__repr__=function(self){$B.builtins_repr_check(float,arguments);self=self.value;if(self==Infinity){return"inf"}else if(self==-Infinity){return"-inf"}else if(isNaN(self)){return"nan"}else if(self===0){if(1/self===-Infinity){return"-0.0"}return"0.0"}var res=self+"";if(res.search(/[.eE]/)==-1){res+=".0"}var split_e=res.split(/e/i);if(split_e.length==2){var mant=split_e[0],exp=split_e[1];if(exp.startsWith("-")){var exp_str=parseInt(exp.substr(1))+"";if(exp_str.length<2){exp_str="0"+exp_str}return mant+"e-"+exp_str}}var x,y;[x,y]=res.split(".");var sign="";if(x[0]=="-"){x=x.substr(1);sign="-"}if(x.length>16){var exp=x.length-1,int_part=x[0],dec_part=x.substr(1)+y;while(dec_part.endsWith("0")){dec_part=dec_part.substr(0,dec_part.length-1)}var mant=int_part;if(dec_part.length>0){mant+="."+dec_part}return sign+mant+"e+"+exp}else if(x=="0"){var exp=0;while(exp3){var rest=y.substr(exp),exp=(exp+1).toString();while(rest.endsWith("0")){rest=rest.substr(0,res.length-1)}var mant=rest[0];if(rest.length>1){mant+="."+rest.substr(1)}if(exp.length==1){exp="0"+exp}return sign+mant+"e-"+exp}}return _b_.str.$factory(res)};float.__round__=function(){var $=$B.args("__round__",2,{self:null,ndigits:null},["self","ndigits"],arguments,{ndigits:_b_.None},null,null);return float.$round($.self,$.ndigits)};float.$round=function(x,ndigits){function overflow(){throw _b_.OverflowError.$factory("cannot convert float infinity to integer")}var no_digits=ndigits===_b_.None;if(isnan(x)){if(ndigits===_b_.None){throw _b_.ValueError.$factory("cannot convert float NaN to integer")}return NAN}else if(isninf(x)){return ndigits===_b_.None?overflow():NINF}else if(isinf(x)){return ndigits===_b_.None?overflow():INF}x=float_value(x);ndigits=ndigits===_b_.None?0:ndigits;if(ndigits==0){var res=Math.round(x.value);if(Math.abs(x.value-res)==.5){if(res%2){return res-1}}if(no_digits){return res}return $B.fast_float(res)}if(ndigits.__class__===$B.long_int){ndigits=Number(ndigits.value)}var pow1,pow2,y,z;if(ndigits>=0){if(ndigits>22){pow1=10**(ndigits-22);pow2=1e22}else{pow1=10**ndigits;pow2=1}y=x.value*pow1*pow2;if(!isFinite(y)){return x}}else{pow1=10**-ndigits;pow2=1;if(isFinite(pow1)){y=x.value/pow1}else{return ZERO}}z=Math.round(y);if(fabs(y-z).value==.5){z=2*Math.round(y/2)}if(ndigits>=0){z=z/pow2/pow1}else{z*=pow1}if(!isFinite(z)){throw _b_.OverflowError.$factory("overflow occurred during round")}return fast_float(z)};float.__setattr__=function(self,attr,value){if(self.__class__===float){if(float[attr]===undefined){throw _b_.AttributeError.$factory("'float' object has no attribute '"+attr+"'")}else{throw _b_.AttributeError.$factory("'float' object attribute '"+attr+"' is read-only")}}self[attr]=value;return _b_.None};float.__truediv__=function(self,other){if($B.$isinstance(other,_b_.int)){if(other.valueOf()==0){throw _b_.ZeroDivisionError.$factory("division by zero")}else if($B.$isinstance(other,$B.long_int)){return float.$factory(self.value/Number(other.value))}return float.$factory(self.value/other)}else if($B.$isinstance(other,float)){if(other.value==0){throw _b_.ZeroDivisionError.$factory("division by zero")}return float.$factory(self.value/other.value)}return _b_.NotImplemented};var op_func_body=`var $B = __BRYTHON__,\n _b_ = __BRYTHON__.builtins\n if($B.$isinstance(other, _b_.int)){\n if(typeof other == "boolean"){\n return other ? $B.fast_float(self.value - 1) : self\n }else if(other.__class__ === $B.long_int){\n return _b_.float.$factory(self.value - parseInt(other.value))\n }else{\n return $B.fast_float(self.value - other)\n }\n }\n if($B.$isinstance(other, _b_.float)){\n return $B.fast_float(self.value - other.value)\n }\n return _b_.NotImplemented`;var ops={"+":"add","-":"sub"};for(var op in ops){var body=op_func_body.replace(/-/gm,op);float[`__${ops[op]}__`]=Function("self","other",body)}var comp_func_body=`\nvar $B = __BRYTHON__,\n _b_ = $B.builtins\nif($B.$isinstance(other, _b_.int)){\n if(other.__class__ === $B.long_int){\n return self.value > parseInt(other.value)\n }\n return self.value > other.valueOf()}\nif($B.$isinstance(other, _b_.float)){\n return self.value > other.value}\nif($B.$isinstance(other, _b_.bool)) {\n return self.value > _b_.bool.__hash__(other)}\nif(_b_.hasattr(other, "__int__") || _b_.hasattr(other, "__index__")) {\n return _b_.int.__gt__(self.value, $B.$GetInt(other))}\n// See if other has the opposite operator, eg <= for >\nvar inv_op = $B.$getattr(other, "__le__", _b_.None)\nif(inv_op !== _b_.None){\n return inv_op(self)}\nthrow _b_.TypeError.$factory(\n "unorderable types: float() > " + $B.class_name(other) + "()")\n`;for(var op in $B.$comps){var body=comp_func_body.replace(/>/gm,op).replace(/__gt__/gm,`__${$B.$comps[op]}__`).replace(/__le__/,`__${$B.$inv_comps[op]}__`);float[`__${$B.$comps[op]}__`]=Function("self","other",body)}var r_opnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or","divmod"];for(var r_opname of r_opnames){if(float["__r"+r_opname+"__"]===undefined&&float["__"+r_opname+"__"]){float["__r"+r_opname+"__"]=function(name){return function(self,other){var other_as_num=_b_.int.$to_js_number(other);if(other_as_num!==null){var other_as_float=$B.fast_float(other_as_num);return float["__"+name+"__"](other_as_float,self)}return _b_.NotImplemented}}(r_opname)}}function $FloatClass(value){return new Number(value)}function to_digits(s){var arabic_digits="٠١٢٣٤٥٦٧٨٩",res="";for(var i=0;i-1){res+=x}else{res+=s[i]}}return res}const fast_float=$B.fast_float=function(value){return{__class__:_b_.float,value:value}};var fast_float_with_hash=function(value,hash_value){return{__class__:_b_.float,__hashvalue__:hash_value,value:value}};float.$factory=function(value){if(value===undefined){return fast_float(0)}$B.check_nb_args_no_kw("float",1,arguments);switch(value){case true:return fast_float(1);case false:return fast_float(0)}var original_value=value;if(typeof value=="number"){return fast_float(value)}if(value.__class__===float){return value}if($B.$isinstance(value,_b_.memoryview)){value=_b_.memoryview.tobytes(value)}if($B.$isinstance(value,_b_.bytes)){try{value=$B.$getattr(value,"decode")("utf-8")}catch(err){throw _b_.ValueError.$factory("could not convert string to float: "+_b_.repr(original_value))}}if(typeof value=="string"){if(value.trim().length==0){throw _b_.ValueError.$factory(`could not convert string to float: ${_b_.repr(value)}`)}value=value.trim();switch(value.toLowerCase()){case"+inf":case"inf":case"+infinity":case"infinity":return fast_float(Number.POSITIVE_INFINITY);case"-inf":case"-infinity":return fast_float(Number.NEGATIVE_INFINITY);case"+nan":case"nan":return fast_float(Number.NaN);case"-nan":return fast_float(-Number.NaN);default:var parts=value.split("e");if(parts[1]){if(parts[1].startsWith("+")||parts[1].startsWith("-")){parts[1]=parts[1].substr(1)}}parts=parts[0].split(".").concat(parts.splice(1));for(var part of parts){if(part.startsWith("_")||part.endsWith("_")){throw _b_.ValueError.$factory("invalid float literal "+value)}}if(value.indexOf("__")>-1){throw _b_.ValueError.$factory("invalid float literal "+value)}value=value.charAt(0)+value.substr(1).replace(/_/g,"");value=to_digits(value);if(isFinite(value)){return fast_float(parseFloat(value))}else{throw _b_.TypeError.$factory("could not convert string to float: "+_b_.repr(original_value))}}}var klass=value.__class__,float_method=$B.$getattr(klass,"__float__",null);if(float_method===null){var index_method=$B.$getattr(klass,"__index__",null);if(index_method===null){throw _b_.TypeError.$factory("float() argument must be a string or a "+"number, not '"+$B.class_name(value)+"'")}var res=$B.$call(index_method)(value),klass=$B.get_class(res);if(klass===_b_.int){return fast_float(res)}else if(klass===$B.long_int){return $B.long_int.__float__(res)}else if(klass.__mro__.indexOf(_b_.int)>-1){var msg=`${$B.class_name(value)}.__index__ returned `+`non-int (type ${$B.class_name(res)}). The `+"ability to return an instance of a strict subclass"+" of int is deprecated, and may be removed in a "+"future version of Python.";$B.warn(_b_.DeprecationWarning,msg);return fast_float(res)}throw _b_.TypeError.$factory("__index__ returned non-int"+` (type ${$B.class_name(res)})`)}var res=$B.$call(float_method)(value),klass=$B.get_class(res);if(klass!==_b_.float){if(klass.__mro__.indexOf(_b_.float)>-1){var msg=`${$B.class_name(value)}.__float__ returned `+`non-float (type ${$B.class_name(res)}). The `+"ability to return an instance of a strict subclass"+" of float is deprecated, and may be removed in a "+"future version of Python.";$B.warn(_b_.DeprecationWarning,msg);return float.$factory(res.value)}throw _b_.TypeError.$factory("__float__ returned non-float"+` (type ${$B.class_name(res)})`)}return res};$B.$FloatClass=$FloatClass;$B.set_func_names(float,"builtins");float.fromhex=_b_.classmethod.$factory(float.fromhex);_b_.float=float;$B.MAX_VALUE=fast_float(Number.MAX_VALUE);$B.MIN_VALUE=fast_float(22250738585072014e-324);const NINF=fast_float(Number.NEGATIVE_INFINITY),INF=fast_float(Number.POSITIVE_INFINITY),NAN=fast_float(Number.NaN),ZERO=fast_float(0),NZERO=fast_float(-0)})(__BRYTHON__);(function($B){var _b_=$B.builtins;function $UnsupportedOpType(op,class1,class2){throw _b_.TypeError.$factory("unsupported operand type(s) for "+op+": '"+class1+"' and '"+class2+"'")}var complex={__class__:_b_.type,__dir__:_b_.object.__dir__,__qualname__:"complex",$is_class:true,$native:true,$descriptors:{real:true,imag:true}};complex.__abs__=function(self){var _rf=isFinite(self.$real.value),_if=isFinite(self.$imag.value);if(_rf&&isNaN(self.$imag.value)||_if&&isNaN(self.$real.value)||isNaN(self.$imag.value)&&isNaN(self.$real.value)){return $B.fast_float(NaN)}if(!_rf||!_if){return $B.fast_float(Infinity)}var mag=Math.sqrt(Math.pow(self.$real.value,2)+Math.pow(self.$imag.value,2));if(!isFinite(mag)&&_rf&&_if){throw _b_.OverflowError.$factory("absolute value too large")}return $B.fast_float(mag)};complex.__add__=function(self,other){if($B.$isinstance(other,complex)){return make_complex(self.$real.value+other.$real.value,self.$imag.value+other.$imag.value)}if($B.$isinstance(other,_b_.int)){other=_b_.int.numerator(other);return make_complex($B.rich_op("__add__",self.$real.value,other.valueOf()),self.$imag.value)}if($B.$isinstance(other,_b_.float)){return make_complex(self.$real.value+other.value,self.$imag.value)}return _b_.NotImplemented};complex.__bool__=function(self){return!$B.rich_comp("__eq__",self.$real,0)||!$B.rich_comp("__eq__",self.$imag,0)};complex.__complex__=function(self){if(self.__class__===complex){return self}return $B.make_complex(self.$real,self.$imag)};complex.__eq__=function(self,other){if($B.$isinstance(other,complex)){return self.$real.value==other.$real.value&&self.$imag.value==other.$imag.value}if($B.$isinstance(other,_b_.int)){if(self.$imag.value!=0){return false}return self.$real.value==other.valueOf()}if($B.$isinstance(other,_b_.float)){if(!$B.rich_comp("__eq__",0,self.$imag)){return false}return self.$real.value==other.value}return _b_.NotImplemented};const max_precision=2**31-4,max_repeat=2**30-1;complex.__format__=function(self,format_spec){if(format_spec.length==0){return _b_.str.$factory(self)}var fmt=new $B.parse_format_spec(format_spec,self),type=fmt.conversion_type;var default_precision=6,skip_re,add_parens;if(type===undefined||"eEfFgGn".indexOf(type)>-1){if(fmt.precision>max_precision){throw _b_.ValueError.$factory("precision too big")}if(fmt.fill_char=="0"){throw _b_.ValueError.$factory("Zero padding is not allowed in complex format specifier")}if(fmt.align=="="){throw _b_.ValueError.$factory("'=' alignment flag is not allowed in complex format "+"specifier")}var re=self.$real.value,im=self.$imag.value,precision=parseInt(fmt.precision,10);if(type===undefined){type="r";default_precision=0;if(re==0&&Object.is(re,0)){skip_re=1}else{add_parens=1}}else if(type=="n"){type="g"}if(precision<0){precision=6}else if(type=="r"){type="g"}var format=$B.clone(fmt);format.conversion_type=type;format.precision=precision;var res="";if(!skip_re){res+=_b_.float.$format(self.$real,format);if(self.$imag.value>=0){res+="+"}}var formatted_im=_b_.float.$format(self.$imag,format);var pos=-1,last_num;for(var char of formatted_im){pos++;if(char.match(/\d/)){last_num=pos}}formatted_im=formatted_im.substr(0,last_num+1)+"j"+formatted_im.substr(last_num+1);res+=formatted_im;if(add_parens){res="("+res+")"}return res}throw _b_.ValueError.$factory(`invalid type for complex: ${type}`)};complex.$getnewargs=function(self){return $B.fast_tuple([self.$real,self.$imag])};complex.__getnewargs__=function(){return complex.$getnewargs($B.single_arg("__getnewargs__","self",arguments))};complex.__hash__=function(self){return $B.$hash(self.$real)+$B.$hash(self.$imag)*1000003};complex.__init__=function(){return _b_.None};complex.__invert__=function(self){return~self};complex.__mro__=[_b_.object];complex.__mul__=function(self,other){if($B.$isinstance(other,complex)){return make_complex(self.$real.value*other.$real.value-self.$imag.value*other.$imag.value,self.$imag.value*other.$real.value+self.$real.value*other.$imag.value)}else if($B.$isinstance(other,_b_.int)){return make_complex(self.$real.value*other.valueOf(),self.$imag.value*other.valueOf())}else if($B.$isinstance(other,_b_.float)){return make_complex(self.$real.value*other.value,self.$imag.value*other.value)}else if($B.$isinstance(other,_b_.bool)){if(other.valueOf()){return self}return make_complex(0,0)}$UnsupportedOpType("*",complex,other)};complex.__ne__=function(self,other){var res=complex.__eq__(self,other);return res===_b_.NotImplemented?res:!res};complex.__neg__=function(self){return make_complex(-self.$real.value,-self.$imag.value)};complex.__new__=function(cls){if(cls===undefined){throw _b_.TypeError.$factory("complex.__new__(): not enough arguments")}var res,missing={},$=$B.args("complex",3,{cls:null,real:null,imag:null},["cls","real","imag"],arguments,{real:0,imag:missing},null,null),cls=$.cls,first=$.real,second=$.imag;if(typeof first=="string"){if(second!==missing){throw _b_.TypeError.$factory("complex() can't take second arg "+"if first is a string")}else{var arg=first;first=first.trim();if(first.startsWith("(")&&first.endsWith(")")){first=first.substr(1);first=first.substr(0,first.length-1)}var complex_re=/^\s*([\+\-]*[0-9_]*\.?[0-9_]*(e[\+\-]*[0-9_]*)?)([\+\-]?)([0-9_]*\.?[0-9_]*(e[\+\-]*[0-9_]*)?)(j?)\s*$/i;var parts=complex_re.exec(first);function to_num(s){var res=parseFloat(s.charAt(0)+s.substr(1).replace(/_/g,""));if(isNaN(res)){throw _b_.ValueError.$factory("could not convert string "+"to complex: '"+arg+"'")}return res}if(parts===null){throw _b_.ValueError.$factory("complex() arg is a malformed string")}if(parts[_real]&&parts[_imag].startsWith(".")&&parts[_sign]==""){throw _b_.ValueError.$factory("complex() arg is a malformed string")}else if(parts[_real]=="."||parts[_imag]=="."||parts[_real]==".e"||parts[_imag]==".e"||parts[_real]=="e"||parts[_imag]=="e"){throw _b_.ValueError.$factory("complex() arg is a malformed string")}else if(parts[_j]!=""){if(parts[_sign]==""){first=0;if(parts[_real]=="+"||parts[_real]==""){second=1}else if(parts[_real]=="-"){second=-1}else{second=to_num(parts[_real])}}else{first=to_num(parts[_real]);second=parts[_imag]==""?1:to_num(parts[_imag]);second=parts[_sign]=="-"?-second:second}}else{if(parts[_sign]&&parts[_imag]==""){throw _b_.ValueError.$factory("complex() arg is a malformed string")}first=to_num(parts[_real]);second=0}res=make_complex(first,second);res.__class__=cls;res.__dict__=$B.empty_dict();return res}}if(first.__class__===complex&&cls===complex&&second===missing){return first}var arg1=_convert(first),r,i;if(arg1===null){throw _b_.TypeError.$factory("complex() first argument must be a "+`string or a number, not '${$B.class_name(first)}'`)}if(typeof second=="string"){throw _b_.TypeError.$factory("complex() second arg can't be a string")}var arg2=_convert(second===missing?0:second);if(arg2===null){throw _b_.TypeError.$factory("complex() second argument must be a "+`number, not '${$B.class_name(second)}'`)}if(arg1.method=="__complex__"){if(arg2.method=="__complex__"){r=$B.rich_op("__sub__",arg1.result.$real,arg2.result.$imag);i=$B.rich_op("__add__",arg1.result.$imag,arg2.result.$real)}else{r=arg1.result.$real;i=$B.rich_op("__add__",arg1.result.$imag,arg2.result)}}else{if(arg2.method=="__complex__"){r=$B.rich_op("__sub__",arg1.result,arg2.result.$imag);i=arg2.result.$real}else{r=arg1.result;i=arg2.result}}var res=make_complex(r,i);res.__class__=cls;res.__dict__=$B.empty_dict();return res};complex.__pos__=function(self){return self};function complex2expo(cx){var norm=Math.sqrt(cx.$real.value*cx.$real.value+cx.$imag.value*cx.$imag.value),sin=cx.$imag.value/norm,cos=cx.$real.value/norm,angle;if(cos==0){angle=sin==1?Math.PI/2:3*Math.PI/2}else if(sin==0){angle=cos==1?0:Math.PI}else{angle=Math.atan(sin/cos)}return{norm:norm,angle:angle}}function hypot(){var $=$B.args("hypot",0,{},[],arguments,{},"args",null);return _b_.float.$factory(Math.hypot(...$.args))}function c_powi(x,n){if(n>0){return c_powu(x,n)}else{return c_quot(c_1,c_powu(x,-n))}}function c_powu(x,n){var r,p,mask=1,r=c_1,p=x;while(mask>0&&n>=mask){if(n&mask){r=c_prod(r,p)}mask<<=1;p=c_prod(p,p)}return r}function c_prod(a,b){return make_complex(a.$real.value*b.$real.value-a.$imag.value*b.$imag.value,a.$real.value*b.$imag.value+a.$imag.value*b.$real.value)}function c_quot(a,b){var r,abs_breal=Math.abs(b.$real.value),abs_bimag=Math.abs(b.$imag.value);if($B.rich_comp("__ge__",abs_breal,abs_bimag)){if(abs_breal==0){throw _b_.ZeroDivisionError.$factory()}else{var ratio=b.$imag.value/b.$real.value,denom=b.$real.value+b.$imag.value*ratio;return make_complex((a.$real.value+a.$imag.value*ratio)/denom,(a.$imag.value-a.$real.value*ratio)/denom)}}else if(abs_bimag>=abs_breal){var ratio=b.$real.value/b.$imag.value,denom=b.$real.value*ratio+b.$imag.value;if(b.$imag.value==0){throw _b_.ZeroDivisionError.$factory()}return make_complex((a.$real.value*ratio+a.$imag.value)/denom,(a.$imag.value*ratio-a.$real.value)/denom)}else{return $B.make_complex("nan","nan")}}complex.__pow__=function(self,other,mod){if(mod!==undefined&&mod!==_b_.None){throw _b_.ValueError.$factory("complex modulo")}if($B.rich_comp("__eq__",other,1)){var funcs=_b_.float.$funcs;if(funcs.isinf(self.$real)||funcs.isninf(self.$real)||funcs.isinf(self.$imag)||funcs.isninf(self.$imag)){throw _b_.OverflowError.$factory("complex exponentiation")}return self}var small_int=null;if($B.$isinstance(other,_b_.int)&&_b_.abs(other)<100){small_int=other}else if($B.$isinstance(other,_b_.float)&&Number.isInteger(other.value)&&Math.abs(other.value<100)){small_int=other.value}else if($B.$isinstance(other,complex)&&other.$imag.value==0&&Number.isInteger(other.$real.value)&&Math.abs(other.$real.value)<100){small_int=other.$real.value}if(small_int!==null){return c_powi(self,small_int)}if($B.$isinstance(other,_b_.float)){other=_b_.float.$to_js_number(other)}if(self.$real.value==0&&self.$imag.value==0){if($B.$isinstance(other,complex)&&(other.$imag.value!=0||other.$real.value<0)){throw _b_.ZeroDivisionError.$factory("0.0 to a negative or complex power")}return $B.make_complex(0,0)}var exp=complex2expo(self),angle=exp.angle,res=Math.pow(exp.norm,other);if($B.$isinstance(other,_b_.int)){return make_complex(res*Math.cos(angle*other),res*Math.sin(angle*other))}else if($B.$isinstance(other,_b_.float)){return make_complex(res*Math.cos(angle*other.value),res*Math.sin(angle*other.value))}else if($B.$isinstance(other,complex)){var x=other.$real.value,y=other.$imag.value;var pw=Math.pow(exp.norm,x)*Math.pow(Math.E,-y*angle),theta=y*Math.log(exp.norm)-x*angle;if(pw==Number.POSITIVE_INFINITY||pw===Number.NEGATIVE_INFINITY){throw _b_.OverflowError.$factory("complex exponentiation")}return make_complex(pw*Math.cos(theta),pw*Math.sin(theta))}else{throw _b_.TypeError.$factory("unsupported operand type(s) "+"for ** or pow(): 'complex' and '"+$B.class_name(other)+"'")}};complex.__radd__=function(self,other){if($B.$isinstance(other,_b_.bool)){other=other?1:0}if($B.$isinstance(other,_b_.int)){return make_complex(other+self.$real.value,self.$imag.value)}else if($B.$isinstance(other,_b_.float)){return make_complex(other.value+self.$real.value,self.$imag.value)}return _b_.NotImplemented};complex.__repr__=function(self){$B.builtins_repr_check(complex,arguments);var real=Number.isInteger(self.$real.value)?self.$real.value+"":_b_.str.$factory(self.$real),imag=Number.isInteger(self.$imag.value)?self.$imag.value+"":_b_.str.$factory(self.$imag);if(imag.endsWith(".0")){imag=imag.substr(0,imag.length-2)}if(Object.is(self.$imag.value,-0)){imag="-0"}var sign=imag.startsWith("-")?"":"+";if(self.$real.value==0){if(Object.is(self.$real.value,-0)){return"(-0"+sign+imag+"j)"}else{return imag+"j"}}if(self.$imag.value>0||isNaN(self.$imag.value)){return"("+real+"+"+imag+"j)"}if(self.$imag.value==0){if(1/self.$imag.value<0){return"("+real+"-0j)"}return"("+real+"+0j)"}return"("+real+sign+imag+"j)"};complex.__rmul__=function(self,other){if($B.$isinstance(other,_b_.bool)){other=other?1:0}if($B.$isinstance(other,_b_.int)){return make_complex(other*self.$real.value,other*self.$imag.value)}else if($B.$isinstance(other,_b_.float)){return make_complex(other.value*self.$real.value,other.value*self.$imag.value)}return _b_.NotImplemented};complex.__sub__=function(self,other){if($B.$isinstance(other,complex)){return make_complex(self.$real.value-other.$real.value,self.$imag.value-other.$imag.value)}if($B.$isinstance(other,_b_.int)){other=_b_.int.numerator(other);return make_complex(self.$real.value-other.valueOf(),self.$imag.value)}if($B.$isinstance(other,_b_.float)){return make_complex(self.$real.value-other.value,self.$imag.value)}return _b_.NotImplemented};complex.__truediv__=function(self,other){if($B.$isinstance(other,complex)){if(other.$real.value==0&&other.$imag.value==0){throw _b_.ZeroDivisionError.$factory("division by zero")}var _num=self.$real.value*other.$real.value+self.$imag.value*other.$imag.value,_div=other.$real.value*other.$real.value+other.$imag.value*other.$imag.value;var _num2=self.$imag.value*other.$real.value-self.$real.value*other.$imag.value;return make_complex(_num/_div,_num2/_div)}if($B.$isinstance(other,_b_.int)){if(!other.valueOf()){throw _b_.ZeroDivisionError.$factory("division by zero")}return complex.__truediv__(self,complex.$factory(other.valueOf()))}if($B.$isinstance(other,_b_.float)){if(!other.value){throw _b_.ZeroDivisionError.$factory("division by zero")}return complex.__truediv__(self,complex.$factory(other.value))}$UnsupportedOpType("//","complex",other.__class__)};complex.conjugate=function(self){return make_complex(self.$real.value,-self.$imag.value)};complex.__ior__=complex.__or__;var r_opnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or"];for(var r_opname of r_opnames){if(complex["__r"+r_opname+"__"]===undefined&&complex["__"+r_opname+"__"]){complex["__r"+r_opname+"__"]=function(name){return function(self,other){if($B.$isinstance(other,_b_.int)){other=make_complex(other,0);return complex["__"+name+"__"](other,self)}else if($B.$isinstance(other,_b_.float)){other=make_complex(other.value,0);return complex["__"+name+"__"](other,self)}else if($B.$isinstance(other,complex)){return complex["__"+name+"__"](other,self)}return _b_.NotImplemented}}(r_opname)}}var comp_func_body=`\n var _b_ = __BRYTHON__.builtins\n if(other === undefined || other == _b_.None){\n return _b_.NotImplemented\n }\n throw _b_.TypeError.$factory("no ordering relation " +\n "is defined for complex numbers")`;for(var $op in $B.$comps){complex["__"+$B.$comps[$op]+"__"]=Function("self","other",comp_func_body.replace(/>/gm,$op))}complex.real=function(self){return self.$real};complex.real.setter=function(){throw _b_.AttributeError.$factory("readonly attribute")};complex.imag=function(self){return self.$imag};complex.imag.setter=function(){throw _b_.AttributeError.$factory("readonly attribute")};var _real=1,_real_mantissa=2,_sign=3,_imag=4,_imag_mantissa=5,_j=6;var expected_class={__complex__:complex,__float__:_b_.float,__index__:_b_.int};function _convert(obj){var klass=obj.__class__||$B.get_class(obj);for(var method_name in expected_class){var missing={},method=$B.$getattr(klass,method_name,missing);if(method!==missing){var res=method(obj);if(!$B.$isinstance(res,expected_class[method_name])){throw _b_.TypeError.$factory(method_name+"returned non-"+expected_class[method_name].__name__+"(type "+$B.get_class(res)+")")}if(method_name=="__index__"&&$B.rich_comp("__gt__",res,__BRYTHON__.MAX_VALUE)){throw _b_.OverflowError.$factory("int too large to convert to float")}if(method_name=="__complex__"&&res.__class__!==complex){$B.warn(_b_.DeprecationWarning,"__complex__ returned "+`non-complex (type ${$B.class_name(res)}). `+"The ability to return an instance of a strict subclass "+"of complex is deprecated, and may be removed in a future "+"version of Python.")}return{result:res,method:method_name}}}return null}var make_complex=$B.make_complex=function(real,imag){return{__class__:complex,$real:_b_.float.$factory(real),$imag:_b_.float.$factory(imag)}};var c_1=make_complex(1,0);complex.$factory=function(){return complex.__new__(complex,...arguments)};$B.set_func_names(complex,"builtins");_b_.complex=complex})(__BRYTHON__);(function($B){var _b_=$B.builtins;var str_hash=_b_.str.__hash__,$N=_b_.None;var set_ops=["eq","le","lt","ge","gt","sub","rsub","and","rand","or","ror","xor","rxor"];function is_sublist(t1,t2){for(var i=0,ilen=t1.length;i-1){continue}else if(!_b_.hasattr(v.__class__,"__hash__")){return false}}return true};dict.$iter_items_with_hash=function*(d){if(d.$all_str){for(var key in d.$strings){if(key!="$dict_strings"){yield{key:key,value:d.$strings[key]}}}}if(d.$jsobj){for(var key in d.$jsobj){if(!d.$exclude||!d.$exclude(key)){yield{key:key,value:d.$jsobj[key]}}}}else if(d.__class__===$B.jsobj_as_pydict){for(var key in d.obj){yield{key:key,value:d.obj[key]}}}else{var version=d.$version;for(var i=0,len=d._keys.length;i0};dict.__class_getitem__=function(cls,item){if(!Array.isArray(item)){item=[item]}return $B.GenericAlias.$factory(cls,item)};dict.$lookup_by_key=function(d,key,hash){hash=hash===undefined?_b_.hash(key):hash;var indices=d.table[hash],index;if(indices!==undefined){for(var i=0,len=indices.length;ix!==undefined)))}return res};dict.$setitem_string=function(self,key,value){if(self.$all_str){self.$strings[key]=value;return _b_.None}else{var h=_b_.hash(key),indices=self.table[h];if(indices!==undefined){self._values[indices[0]]=value;return _b_.None}}var index=self._keys.length;self.$strings[key]=index;self._keys.push(key);self._values.push(value);self.$version++;return _b_.None};dict.$getitem=function(self,key,ignore_missing){if(self.$all_str){if(typeof key=="string"){if(self.$strings.hasOwnProperty(key)){return self.$strings[key]}}else{var hash_method=$B.$getattr($B.get_class(key),"__hash__");if(hash_method!==_b_.object.__hash__){convert_all_str(self);var lookup=dict.$lookup_by_key(self,key);if(lookup.found){return lookup.value}}}}else if(self.$jsobj){if(self.$exclude&&self.$exclude(key)){throw _b_.KeyError.$factory(key)}if(self.$jsobj.hasOwnProperty(key)){return self.$jsobj[key]}if(!self.table){throw _b_.KeyError.$factory(key)}}else{var lookup=dict.$lookup_by_key(self,key);if(lookup.found){return lookup.value}}if(!ignore_missing){if(self.__class__!==dict&&!ignore_missing){try{var missing_method=$B.$getattr(self.__class__,"__missing__",_b_.None)}catch(err){console.log(err)}if(missing_method!==_b_.None){return missing_method(self,key)}}}throw _b_.KeyError.$factory(key)};dict.__hash__=_b_.None;function init_from_list(self,args){var i=0;for(var item of args){if(item.length!=2){throw _b_.ValueError.$factory("dictionary "+`update sequence element #${i} has length ${item.length}; 2 is required`)}dict.$setitem(self,item[0],item[1]);i++}}dict.__init__=function(self,first,second){if(first===undefined){return _b_.None}if(second===undefined){if(!first.$kw&&$B.$isinstance(first,$B.JSObj)){for(var key in first){dict.$setitem(self,key,first[key])}return _b_.None}else if(first.$jsobj){self.$jsobj={};for(var attr in first.$jsobj){self.$jsobj[attr]=first.$jsobj[attr]}self.$all_str=false;return $N}else if(first[Symbol.iterator]){init_from_list(self,first);return $N}else if(first.__class__===$B.generator){init_from_list(self,first.js_gen);return $N}}var $=$B.args("dict",1,{self:null},["self"],arguments,{},"first","second");var args=$.first;if(args.length>1){throw _b_.TypeError.$factory("dict expected at most 1 argument"+", got 2")}else if(args.length==1){args=args[0];if(args.__class__===dict){for(var entry of dict.$iter_items_with_hash(args)){dict.$setitem(self,entry.key,entry.value,entry.hash)}}else{var keys=$B.$getattr(args,"keys",null);if(keys!==null){var gi=$B.$getattr(args,"__getitem__",null);if(gi!==null){gi=$B.$call(gi);var kiter=_b_.iter($B.$call(keys)());while(true){try{var key=_b_.next(kiter),value=gi(key);dict.__setitem__(self,key,value)}catch(err){if(err.__class__===_b_.StopIteration){break}throw err}}return $N}}if(!Array.isArray(args)){args=_b_.list.$factory(args)}init_from_list(self,args)}}for(var key in $.second.$jsobj){dict.$setitem(self,key,$.second.$jsobj[key])}return _b_.None};dict.__iter__=function(self){return _b_.iter(dict.keys(self))};dict.__ior__=function(self,other){dict.update(self,other);return self};dict.__len__=function(self){var _count=0;if(self.$all_str){return Object.keys(self.$strings).length}if(self.$jsobj){for(var attr in self.$jsobj){if(attr.charAt(0)!="$"&&(!self.$exclude||!self.$exclude(attr))){_count++}}return _count}for(var d of self._keys){if(d!==undefined){_count++}}return _count};dict.__ne__=function(self,other){var res=dict.__eq__(self,other);return res===_b_.NotImplemented?res:!res};dict.__new__=function(cls){if(cls===undefined){throw _b_.TypeError.$factory("int.__new__(): not enough arguments")}var instance=$B.empty_dict();instance.__class__=cls;if(cls!==dict){instance.__dict__=$B.empty_dict()}return instance};dict.__or__=function(self,other){if(!$B.$isinstance(other,dict)){return _b_.NotImplemented}var res=dict.copy(self);dict.update(res,other);return res};dict.__repr__=function(self){$B.builtins_repr_check(dict,arguments);if(self.$jsobj){return dict.__repr__(jsobj2dict(self.$jsobj,self.$exclude))}if($B.repr.enter(self)){return"{...}"}var res=[],key,value;for(var entry of dict.$iter_items_with_hash(self)){res.push(_b_.repr(entry.key)+": "+_b_.repr(entry.value))}$B.repr.leave(self);return"{"+res.join(", ")+"}"};dict.$iter_items_reversed=function*(d){var version=d.$version;if(d.$all_str){for(var item of Object.entries(d.$strings).reverse()){yield $B.fast_tuple(item);if(d.$version!==version){throw _b_.RuntimeError.$factory("changed in iteration")}}}else{for(var i=d._keys.length-1;i>=0;i--){var key=d._keys[i];if(key!==undefined){yield $B.fast_tuple([key,d._values[i]]);if(d.$version!==version){throw _b_.RuntimeError.$factory("changed in iteration")}}}}if(d.$version!==version){throw _b_.RuntimeError.$factory("changed in iteration")}};dict.$iter_keys_reversed=function*(d){for(var entry of dict.$iter_items_reversed(d)){yield entry[0]}};dict.$iter_values_reversed=function*(d){for(var entry of dict.$iter_items_reversed(d)){yield entry[1]}};function make_reverse_iterator(name,iter_func){var klass=$B.make_class(name,(function(d){return{__class__:klass,d:d,iter:iter_func(d),make_iter:function(){return iter_func(d)}}}));klass.__iter__=function(self){self[Symbol.iterator]=self.make_iter;return self};klass.__next__=function(self){var res=self.iter.next();if(res.done){throw _b_.StopIteration.$factory("")}return res.value};klass.__reduce_ex__=function(self,protocol){return $B.fast_tuple([_b_.iter,$B.fast_tuple([Array.from(self.make_iter())])])};$B.set_func_names(klass,"builtins");return klass}const dict_reversekeyiterator=make_reverse_iterator("dict_reversekeyiterator",dict.$iter_keys_reversed);dict.__reversed__=function(self){return dict_reversekeyiterator.$factory(self)};dict.__ror__=function(self,other){if(!$B.$isinstance(other,dict)){return _b_.NotImplemented}var res=dict.copy(other);dict.update(res,self);return res};dict.__setitem__=function(self,key,value){var $=$B.args("__setitem__",3,{self:null,key:null,value:null},["self","key","value"],arguments,{},null,null);return dict.$setitem($.self,$.key,$.value)};function convert_all_str(d){d.$all_str=false;for(var key in d.$strings){dict.$setitem(d,key,d.$strings[key])}}dict.$setitem=function(self,key,value,$hash,from_setdefault){if(self.$all_str){if(typeof key=="string"){var int=parseInt(key);if(isNaN(int)||int>=0){self.$strings[key]=value;return _b_.None}else{convert_all_str(self)}}else{convert_all_str(self)}}if(self.$jsobj){if(self.$from_js){value=$B.pyobj2jsobj(value)}if(self.$jsobj.__class__===_b_.type){self.$jsobj[key]=value;if(key=="__init__"||key=="__new__"){self.$jsobj.$factory=$B.$instance_creator(self.$jsobj)}}else{self.$jsobj[key]=value}return $N}else if(self.__class__===$B.jsobj_as_pydict){return $B.jsobj_as_pydict.__setitem__(self,key,value)}if(key instanceof String){key=key.valueOf()}var hash=$hash!==undefined?$hash:$B.$hash(key);var index;if(self.table[hash]===undefined){index=self._keys.length;self.table[hash]=[index]}else{if(!from_setdefault){var lookup=dict.$lookup_by_key(self,key,hash);if(lookup.found){self._values[lookup.index]=value;return _b_.None}}index=self._keys.length;if(self.table[hash]===undefined){self.table[hash]=[index]}else{self.table[hash].push(index)}}self._keys.push(key);self._values.push(value);self._hashes.push(hash);self.$version++;return _b_.None};$B.make_rmethods(dict);dict.clear=function(){var $=$B.args("clear",1,{self:null},["self"],arguments,{},null,null),self=$.self;self.table=Object.create(null);self._keys=[];self._values=[];self.$all_str=true;self.$strings=new $B.str_dict;if(self.$jsobj){for(var attr in self.$jsobj){if(attr.charAt(0)!=="$"&&attr!=="__class__"){delete self.$jsobj[attr]}}}self.$version++;return $N};dict.copy=function(self){var $=$B.args("copy",1,{self:null},["self"],arguments,{},null,null),self=$.self,res=$B.empty_dict();if(self.__class__===_b_.dict){$copy_dict(res,self);return res}var it=$B.make_js_iterator(self);for(var k of it){console.log("iteration yields key",k)}return res};dict.fromkeys=function(){var $=$B.args("fromkeys",3,{cls:null,keys:null,value:null},["cls","keys","value"],arguments,{value:_b_.None},null,null),keys=$.keys,value=$.value;var cls=$.cls,res=$B.$call(cls)(),klass=$B.get_class(res),keys_iter=$B.$iter(keys),setitem=klass===dict?dict.$setitem:$B.$getattr(klass,"__setitem__");while(1){try{var key=_b_.next(keys_iter);setitem(res,key,value)}catch(err){if($B.is_exc(err,[_b_.StopIteration])){return res}throw err}}};dict.get=function(){var $=$B.args("get",3,{self:null,key:null,_default:null},["self","key","_default"],arguments,{_default:$N},null,null);try{return dict.$getitem($.self,$.key,true)}catch(err){if($B.$isinstance(err,_b_.KeyError)){return $._default}else{throw err}}};var dict_items=$B.make_class("dict_items",(function(d){return{__class__:dict_items,dict:d,make_iter:function*(){for(var entry of dict.$iter_items_with_hash(d)){yield $B.fast_tuple([entry.key,entry.value])}}}}));dict_items.__iter__=function(self){return dict_itemiterator.$factory(self.make_iter)};dict_items.__len__=function(self){return dict.__len__(self.dict)};dict_items.__reduce__=function(self){var items=Array.from(self.make_iter());return $B.fast_tuple([_b_.iter,$B.fast_tuple([items])])};dict_items.__repr__=function(self){var items=Array.from(self.make_iter());items=items.map($B.fast_tuple);return"dict_items("+_b_.repr(items)+")"};const dict_reverseitemiterator=make_reverse_iterator("dict_reverseitemiterator",dict.$iter_items_reversed);dict_items.__reversed__=function(self){return dict_reverseitemiterator.$factory(self.dict)};make_view_comparison_methods(dict_items);$B.set_func_names(dict_items,"builtins");var dict_itemiterator=$B.make_class("dict_itemiterator",(function(make_iter){return{__class__:dict_itemiterator,iter:make_iter(),make_iter:make_iter}}));dict_itemiterator.__iter__=function(self){self[Symbol.iterator]=function(){return self.iter};return self};dict_itemiterator.__next__=function(self){var res=self.iter.next();if(res.done){throw _b_.StopIteration.$factory("")}return $B.fast_tuple(res.value)};dict_itemiterator.__reduce_ex__=function(self,protocol){return $B.fast_tuple([_b_.iter,$B.fast_tuple([Array.from(self.make_iter())])])};$B.set_func_names(dict_itemiterator,"builtins");dict.items=function(self){var $=$B.args("items",1,{self:null},["self"],arguments,{},null,null);return dict_items.$factory(self)};var dict_keys=$B.make_class("dict_keys",(function(d){return{__class__:dict_keys,dict:d,make_iter:function(){return dict.$iter_keys_check(d)}}}));dict_keys.__iter__=function(self){return dict_keyiterator.$factory(self.make_iter)};dict_keys.__len__=function(self){return dict.__len__(self.dict)};dict_keys.__reduce__=function(self){var items=Array.from(self.make_iter());return $B.fast_tuple([_b_.iter,$B.fast_tuple([items])])};dict_keys.__repr__=function(self){var items=Array.from(self.make_iter());return"dict_keys("+_b_.repr(items)+")"};dict_keys.__reversed__=function(self){return dict_reversekeyiterator.$factory(self.dict)};make_view_comparison_methods(dict_keys);$B.set_func_names(dict_keys,"builtins");var dict_keyiterator=$B.make_class("dict_keyiterator",(function(make_iter){return{__class__:dict_keyiterator,iter:make_iter(),make_iter:make_iter}}));dict_keyiterator.__iter__=function(self){self[Symbol.iterator]=function(){return self.iter};return self};dict_keyiterator.__next__=function(self){var res=self.iter.next();if(res.done){throw _b_.StopIteration.$factory("")}return res.value};dict_keyiterator.__reduce_ex__=function(self,protocol){return $B.fast_tuple([_b_.iter,$B.fast_tuple([Array.from(self.make_iter())])])};$B.set_func_names(dict_keyiterator,"builtins");dict.keys=function(self){var $=$B.args("keys",1,{self:null},["self"],arguments,{},null,null);return dict_keys.$factory(self)};dict.pop=function(){var missing={},$=$B.args("pop",3,{self:null,key:null,_default:null},["self","key","_default"],arguments,{_default:missing},null,null),self=$.self,key=$.key,_default=$._default;try{var res=dict.__getitem__(self,key);dict.__delitem__(self,key);return res}catch(err){if(err.__class__===_b_.KeyError){if(_default!==missing){return _default}throw err}throw err}};dict.popitem=function(self){$B.check_nb_args_no_kw("popitem",1,arguments);if(dict.__len__(self)==0){throw _b_.KeyError.$factory("'popitem(): dictionary is empty'")}if(self.$all_str){for(var key in self.$strings){}var res=$B.fast_tuple([key,self.$strings[key]]);delete self.$strings[key];self.$version++;return res}var index=self._keys.length-1;while(index>=0){if(self._keys[index]!==undefined){var res=$B.fast_tuple([self._keys[index],self._values[index]]);delete self._keys[index];delete self._values[index];self.$version++;return res}index--}};dict.setdefault=function(){var $=$B.args("setdefault",3,{self:null,key:null,_default:null},["self","key","_default"],arguments,{_default:$N},null,null),self=$.self,key=$.key,_default=$._default;_default=_default===undefined?_b_.None:_default;if(self.$all_str){if(!self.$strings.hasOwnProperty(key)){self.$strings[key]=_default}return self.$strings[key]}if(self.$jsobj){if(!self.$jsobj.hasOwnProperty(key)){self.$jsobj[key]=_default}return self.$jsobj[key]}var lookup=dict.$lookup_by_key(self,key);if(lookup.found){return lookup.value}var hash=lookup.hash;dict.$setitem(self,key,_default,hash,true);return _default};dict.update=function(self){var $=$B.args("update",1,{self:null},["self"],arguments,{},"args","kw"),self=$.self,args=$.args,kw=$.kw;if(args.length>0){var o=args[0];if($B.$isinstance(o,dict)){if(o.$jsobj){o=jsobj2dict(o.$jsobj)}$copy_dict(self,o)}else if(_b_.hasattr(o,"keys")){var _keys=_b_.list.$factory($B.$call($B.$getattr(o,"keys"))());for(var i=0,len=_keys.length;i-1){continue}if(typeof dict[attr]=="function"){mappingproxy[attr]=function(key){return function(){return dict[key].apply(null,arguments)}}(attr)}else{mappingproxy[attr]=dict[attr]}}$B.set_func_names(mappingproxy,"builtins");function jsobj2dict(x,exclude){exclude=exclude||function(){return false};var d=$B.empty_dict();for(var attr in x){if(attr.charAt(0)!="$"&&!exclude(attr)){if(x[attr]===null){dict.$setitem(d,attr,_b_.None)}else if(x[attr]===undefined){continue}else if(x[attr].$jsobj===x){dict.$setitem(d,attr,d)}else{dict.$setitem(d,attr,$B.$JS2Py(x[attr]))}}}return d}$B.obj_dict=function(obj,exclude){var klass=obj.__class__||$B.get_class(obj);if(klass!==undefined&&klass.$native){throw $B.attr_error("__dict__",obj)}var res={__class__:dict,$jsobj:obj,$exclude:exclude||function(){return false}};return res};var jsobj_as_pydict=$B.jsobj_as_pydict=$B.make_class("jsobj_as_pydict",(function(jsobj){return{__class__:jsobj_as_pydict,obj:jsobj||{},new_keys:[],$version:0}}));jsobj_as_pydict.__contains__=function(self,key){if(self.new_keys.indexOf(key)>-1){return true}return self.obj[key]!==undefined};jsobj_as_pydict.__delitem__=function(self,key){jsobj_as_pydict.__getitem__(self,key);delete self.obj[key];var ix=self.new_keys.indexOf(key);if(ix>-1){self.new_keys.splice(ix,1)}};jsobj_as_pydict.__eq__=function(self,other){if(other.__class__!==jsobj_as_pydict&&!$B.$isinstance(other,_b_.dict)){return _b_.NotImplemented}var self1=$B.empty_dict(),other1=$B.empty_dict();dict.__init__(self1,jsobj_as_pydict.items(self));dict.__init__(other1,$B.get_class(other).items(other));return dict.__eq__(self1,other1)};jsobj_as_pydict.__ne__=function(self,other){var eq=jsobj_as_pydict.__eq__(self,other);return eq===_b_.NotImplemented?eq:!eq};jsobj_as_pydict.__getitem__=function(self,key){if(self.obj.hasOwnProperty(key)){return self.obj[key]}throw _b_.KeyError.$factory(key)};jsobj_as_pydict.__iter__=function(self){return _b_.iter(jsobj_as_pydict.keys(self))};jsobj_as_pydict.__len__=function(self){var len=0;for(var key in self.obj){len++}return len+self.new_keys.length};jsobj_as_pydict.__or__=function(self,other){if(!$B.$isinstance(other,[dict,jsobj_as_pydict])){return _b_.NotImplemented}var res=jsobj_as_pydict.copy(self);jsobj_as_pydict.update(res,other);return res};jsobj_as_pydict.__repr__=function(self){if($B.repr.enter(self)){return"{...}"}var res=[],items=_b_.list.$factory(jsobj_as_pydict.items(self));for(var item of items){res.push(_b_.repr(item[0])+": "+_b_.repr(item[1]))}$B.repr.leave(self);return"{"+res.join(", ")+"}"};jsobj_as_pydict.__setitem__=function(self,key,value){self.obj[key]=value};jsobj_as_pydict.clear=function(self){self.obj={};return _b_.None};jsobj_as_pydict.copy=function(self){var copy=jsobj_as_pydict.$factory();for(var key in self.obj){copy.obj[key]=self.obj[key]}return copy};jsobj_as_pydict.get=function(self,key,_default){_default=_default===undefined?_b_.None:_default;if(!self.obj.hasOwnProperty(key)){return _default}return self.obj[key]};jsobj_as_pydict.$iter_items=function*(self){for(var key in self.obj){yield $B.fast_tuple([key,self.obj[key]])}};jsobj_as_pydict.items=function(self){var items=Array.from(jsobj_as_pydict.$iter_items(self));return _b_.iter(items)};jsobj_as_pydict.keys=function(self){var items=Array.from(jsobj_as_pydict.$iter_items(self)),keys=items.map((x=>x[0]));return _b_.iter(keys)};jsobj_as_pydict.pop=function(){var missing={},$=$B.args("pop",3,{self:null,key:null,_default:null},["self","key","_default"],arguments,{_default:missing},null,null),self=$.self,key=$.key,_default=$._default;if(self.obj.hasOwnProperty(key)){var res=self.obj[key];delete self.obj[key];return res}else{if(_default!==missing){return _default}throw _b_.KeyError.$factory(key)}};jsobj_as_pydict.popitem=function(self){$B.check_nb_args_no_kw("popitem",1,arguments);for(var key in self.obj){var res=$B.fast_tuple([key,self.obj[key]]);delete self.obj[key];return res}throw _b_.KeyError.$factory("'popitem(): dictionary is empty'")};jsobj_as_pydict.update=function(self,other){var klass=$B.get_class(other),keys=$B.$call($B.$getattr(klass,"keys")),getitem;for(var key of $B.make_js_iterator(keys(other))){if(!getitem){getitem=$B.$call($B.$getattr(klass,"__getitem__"))}self.obj[key]=getitem(other,key)}return _b_.None};jsobj_as_pydict.values=function(self){var items=Array.from(jsobj_as_pydict.$iter_items(self)),values=items.map((x=>x[1]));return _b_.iter(values)};$B.set_func_names(jsobj_as_pydict,"builtins")})(__BRYTHON__);(function($B){var _b_=$B.builtins,object=_b_.object,getattr=$B.$getattr,isinstance=$B.$isinstance;function check_not_tuple(self,attr){if(self.__class__===tuple){throw $B.attr_error(attr,self)}}function $list(){return list.$factory.apply(null,arguments)}var list={__class__:_b_.type,__qualname__:"list",__mro__:[object],$is_class:true,$native:true,$match_sequence_pattern:true,__dir__:object.__dir__};list.__add__=function(self,other){if($B.get_class(self)!==$B.get_class(other)){var this_name=$B.class_name(self);var radd=$B.$getattr(other,"__radd__",null);if(radd===null){throw _b_.TypeError.$factory("can only concatenate "+this_name+' (not "'+$B.class_name(other)+'") to '+this_name)}return _b_.NotImplemented}var res=self.slice(),is_js=other.$brython_class=="js";for(const item of other){res.push(is_js?$B.$JS2Py(item):item)}res.__brython__=true;if(isinstance(self,tuple)){res=tuple.$factory(res)}return res};list.__bool__=function(self){return list.__len__(self)>0};list.__class_getitem__=function(cls,item){if(!Array.isArray(item)){item=[item]}return $B.GenericAlias.$factory(cls,item)};list.__contains__=function(self,item){var $=$B.args("__contains__",2,{self:null,item:null},["self","item"],arguments,{},null,null),self=$.self,item=$.item;for(var _item of self){if($B.is_or_equals(_item,item)){return true}}return false};list.__delitem__=function(self,arg){if(isinstance(arg,_b_.int)){var pos=arg;if(arg<0){pos=self.length+pos}if(pos>=0&&pos0?0:self.length}var stop=arg.stop;if(stop===_b_.None){stop=step>0?self.length:0}if(start<0){start=self.length+start}if(stop<0){stop=self.length+stop}var res=[],i=null,pos=0;if(step>0){if(stop>start){for(var i=start;istop;i+=step){if(self[i]!==undefined){res[pos++]=i}}res.reverse()}}var i=res.length;while(i--){self.splice(res[i],1)}return _b_.None}if(_b_.hasattr(arg,"__int__")||_b_.hasattr(arg,"__index__")){list.__delitem__(self,_b_.int.$factory(arg));return _b_.None}throw _b_.TypeError.$factory($B.class_name(self)+" indices must be integer, not "+$B.class_name(arg))};list.__eq__=function(self,other){var klass=isinstance(self,list)?list:tuple;if(isinstance(other,klass)){if(other.length==self.length){var i=self.length;while(i--){if(!$B.is_or_equals(self[i],other[i])){return false}}return true}return false}return _b_.NotImplemented};list.__getitem__=function(self,key){$B.check_nb_args_no_kw("__getitem__",2,arguments);return list.$getitem(self,key)};list.$getitem=function(self,key){var klass=self.__class__||$B.get_class(self);var factory=function(list_res){list_res.__class__=klass;return list_res};var int_key;try{int_key=$B.PyNumber_Index(key)}catch(err){}if(int_key!==undefined){var items=self.valueOf(),pos=int_key;if(int_key<0){pos=items.length+pos}if(pos>=0&&pos0){if(stop<=start){return factory(res)}for(var i=start;istart){return factory(res)}for(var i=start;i>stop;i+=step){res[pos++]=items[i]}return factory(res)}}throw _b_.TypeError.$factory($B.class_name(self)+" indices must be integer, not "+$B.class_name(key))};list.__ge__=function(self,other){if(!isinstance(other,list)){return _b_.NotImplemented}var res=list.__le__(other,self);if(res===_b_.NotImplemented){return res}return res};list.__gt__=function(self,other){if(!isinstance(other,list)){return _b_.NotImplemented}var res=list.__lt__(other,self);if(res===_b_.NotImplemented){return res}return res};list.__hash__=_b_.None;list.__iadd__=function(){var $=$B.args("__iadd__",2,{self:null,x:null},["self","x"],arguments,{},null,null);var x=list.$factory($B.$iter($.x));for(var i=0;i1){throw _b_.TypeError.$factory("expected at most 1 argument, got "+args.length)}if(_b_.dict.__len__(kw)>0){throw _b_.TypeError.$factory("list() takes no keyword arguments")}while(self.length>0){self.pop()}var arg=args[0];if(arg===undefined){return _b_.None}var pos=0;for(var item of $B.make_js_iterator(arg)){self[pos++]=item}return _b_.None};var list_iterator=$B.make_iterator_class("list_iterator");list_iterator.__reduce__=list_iterator.__reduce_ex__=function(self){return $B.fast_tuple([_b_.iter,$B.fast_tuple([list.$factory(self)]),0])};list.__iter__=function(self){return list_iterator.$factory(self)};list.__le__=function(self,other){if(!isinstance(other,[list,_b_.tuple])){return _b_.NotImplemented}var i=0;while(i$B.max_array_size/other){throw _b_.OverflowError.$factory(`cannot fit `+`'${$B.class_name(other)}' into an index-sized integer`)}var res=[],$temp=self.slice(),len=$temp.length;for(var i=0;i=0&&posbegin){var pivot=begin+Math.floor(Math.random()*(end-begin));pivot=$partition(arg,array,begin,end,pivot);$qsort(arg,array,begin,pivot);$qsort(arg,array,pivot+1,end)}}function $elts_class(self){if(self.length==0){return null}var cl=$B.get_class(self[0]),i=self.length;while(i--){if($B.get_class(self[i])!==cl){return false}}return cl}list.sort=function(self){var $=$B.args("sort",1,{self:null},["self"],arguments,{},null,"kw");check_not_tuple(self,"sort");var func=_b_.None,reverse=false,kw_args=$.kw;for(var key in kw_args.$jsobj){if(key=="key"){func=kw_args.$jsobj[key]}else if(key=="reverse"){reverse=kw_args.$jsobj[key]}else{throw _b_.TypeError.$factory("'"+key+"' is an invalid keyword argument for this function")}}if(self.length==0){return _b_.None}if(func!==_b_.None){func=$B.$call(func)}self.$cl=$elts_class(self);var cmp=null;function basic_cmp(a,b){return $B.rich_comp("__lt__",a,b)?-1:$B.rich_comp("__eq__",a,b)?0:1}function reverse_cmp(a,b){return basic_cmp(b,a)}if(func===_b_.None&&self.$cl===_b_.str){if(reverse){cmp=function(b,a){return $B.$AlphabeticalCompare(a,b)}}else{cmp=function(a,b){return $B.$AlphabeticalCompare(a,b)}}}else if(func===_b_.None&&self.$cl===_b_.int){if(reverse){cmp=function(b,a){return a-b}}else{cmp=function(a,b){return a-b}}}else{cmp=reverse?function(t1,t2){return basic_cmp(t2[0],t1[0])}:function(t1,t2){return basic_cmp(t1[0],t2[0])};if(func===_b_.None){cmp=reverse?reverse_cmp:basic_cmp;self.sort(cmp)}else{var temp=[],saved=self.slice();for(var i=0,len=self.length;i"};function make_args(args){var res=[args[0].js];for(var i=1,len=args.length;i`};$B.generator.close=function(self){var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}try{$B.generator.throw(self,_b_.GeneratorExit.$factory())}catch(err){if(!$B.is_exc(err,[_b_.GeneratorExit,_b_.StopIteration])){$B.frame_obj=save_frame_obj;throw _b_.RuntimeError.$factory("generator ignored GeneratorExit")}}$B.frame_obj=save_frame_obj};$B.generator.send=function(self,value){var gen=self.js_gen;gen.$has_run=true;if(gen.$finished){throw _b_.StopIteration.$factory(value)}if(gen.gi_running===true){throw _b_.ValueError.$factory("generator already executing")}gen.gi_running=true;var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}try{var res=gen.next(value)}catch(err){gen.$finished=true;$B.frame_obj=save_frame_obj;throw err}if($B.frame_obj!==null&&$B.frame_obj.frame===self.$frame){$B.leave_frame()}$B.frame_obj=save_frame_obj;if(res.value&&res.value.__class__===$GeneratorReturn){gen.$finished=true;throw _b_.StopIteration.$factory(res.value.value)}gen.gi_running=false;if(res.done){throw _b_.StopIteration.$factory(res.value)}return res.value};$B.generator.throw=function(self,type,value,traceback){var $=$B.args("throw",4,{self:null,type:null,value:null,traceback:null},["self","type","value","traceback"],arguments,{value:_b_.None,traceback:_b_.None},null,null),self=$.self,type=$.type,value=$.value,traceback=$.traceback;var gen=self.js_gen,exc=type;if(exc.$is_class){if(!_b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+"instance of BaseException")}else if(value===undefined||value===_b_.None){exc=$B.$call(exc)()}else if($B.$isinstance(value,type)){exc=value}}else{if(value===_b_.None){value=exc}else{exc=$B.$call(exc)(value)}}if(traceback!==_b_.None){exc.$traceback=traceback}var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}var res=gen.throw(exc);$B.frame_obj=save_frame_obj;if(res.done){throw _b_.StopIteration.$factory(res.value)}return res.value};$B.set_func_names($B.generator,"builtins");$B.async_generator=$B.make_class("async_generator",(function(func){var f=function(){var gen=func.apply(null,arguments);var res=Object.create(null);res.__class__=$B.async_generator;res.js_gen=gen;return res};return f}));var ag_closed={};$B.async_generator.__aiter__=function(self){return self};$B.async_generator.__anext__=function(self){return $B.async_generator.asend(self,_b_.None)};$B.async_generator.aclose=function(self){self.js_gen.$finished=true;return _b_.None};$B.async_generator.asend=async function(self,value){var gen=self.js_gen;if(gen.$finished){throw _b_.StopAsyncIteration.$factory(value)}if(gen.ag_running===true){throw _b_.ValueError.$factory("generator already executing")}gen.ag_running=true;var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}try{var res=await gen.next(value)}catch(err){gen.$finished=true;$B.frame_obj=save_frame_obj;throw err}if($B.frame_obj!==null&&$B.frame_obj.frame===self.$frame){$B.leave_frame()}$B.frame_obj=save_frame_obj;if(res.done){throw _b_.StopAsyncIteration.$factory(value)}if(res.value.__class__===$GeneratorReturn){gen.$finished=true;throw _b_.StopAsyncIteration.$factory(res.value.value)}gen.ag_running=false;return res.value};$B.async_generator.athrow=async function(self,type,value,traceback){var gen=self.js_gen,exc=type;if(exc.$is_class){if(!_b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+"instance of BaseException")}else if(value===undefined){value=$B.$call(exc)()}}else{if(value===undefined){value=exc}else{exc=$B.$call(exc)(value)}}if(traceback!==undefined){exc.$traceback=traceback}var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}await gen.throw(value);$B.frame_obj=save_frame_obj};$B.set_func_names($B.async_generator,"builtins")})(__BRYTHON__);(function($B){var _b_=$B.builtins;var object=_b_.object;var _window=globalThis;function to_simple(value){switch(typeof value){case"string":case"number":return value;case"boolean":return value?"true":"false";case"object":if(value===_b_.None){return"null"}else if(value instanceof Number){return value.valueOf()}else if(value instanceof String){return value.valueOf()}default:throw _b_.TypeError.$factory("keys must be str, int, "+"float, bool or None, not "+$B.class_name(value))}}$B.pyobj2structuredclone=function(obj,strict){strict=strict===undefined?true:strict;if(typeof obj=="boolean"||typeof obj=="number"||typeof obj=="string"||obj instanceof String){return obj}else if(obj.__class__===_b_.float){return obj.value}else if(obj===_b_.None){return null}else if(Array.isArray(obj)||obj.__class__===_b_.list||obj.__class__===_b_.tuple){var res=new Array(obj.length);for(var i=0,len=obj.length;ijsobj2pyobj(x))).catch($B.handle_error)}if(typeof jsobj==="function"){_this=_this===undefined?null:_this;if(_this===null){const pyobj=jsobj[PYOBJFCT];if(pyobj!==undefined){return pyobj}}else{const pyobjfcts=_this[PYOBJFCTS];if(pyobjfcts!==undefined){const pyobj=pyobjfcts.get(jsobj);if(pyobj!==undefined){return pyobj}}else{try{_this[PYOBJFCTS]=new Map}catch(err){}}}var res=function(){var args=new Array(arguments.length);for(var i=0,len=arguments.length;i-1){return pyobj}if([_b_.list,_b_.tuple].indexOf(klass)>-1){return pyobj.map(pyobj2jsobj)}if(klass===_b_.dict||_b_.issubclass(klass,_b_.dict)){var jsobj={};for(var entry of _b_.dict.$iter_items_with_hash(pyobj)){var key=entry.key;if(typeof key!=="string"){key=_b_.str.$factory(key)}if(typeof entry.value==="function"){entry.value.bind(jsobj)}jsobj[key]=pyobj2jsobj(entry.value)}return jsobj}if(klass===_b_.str){return pyobj.valueOf()}if(klass===$B.long_int){return pyobj.value}if(klass===_b_.float){return pyobj.value}if(klass===$B.function||klass===$B.method){if(pyobj.prototype&&pyobj.prototype.constructor===pyobj&&!pyobj.$is_func){return pyobj}if(pyobj.$is_async){const jsobj=function(){var res=pyobj.apply(null,arguments);return $B.coroutine.send(res)};pyobj[JSOBJ]=jsobj;jsobj[PYOBJ]=pyobj;return jsobj}var jsobj=function(){try{var args=new Array(arguments.length);for(var i=0;i`};$B.JSObj.bind=function(_self,evt,func){var js_func=function(ev){try{return func(jsobj2pyobj(ev))}catch(err){if(err.__class__!==undefined){$B.handle_error(err)}else{try{$B.$getattr($B.get_stderr(),"write")(err)}catch(err1){console.log(err)}}}};_self.$brython_events=_self.$brython_events||{};if(_self.$brython_events){_self.$brython_events[evt]=_self.$brython_events[evt]||[];_self.$brython_events[evt].push([func,js_func])}_self.addEventListener(evt,js_func);return _b_.None};$B.JSObj.bindings=function(_self){var res=$B.empty_dict();if(_self.$brython_events){for(var key in _self.$brython_events){_b_.dict.$setitem(res,key,$B.fast_tuple(_self.$brython_events[key].map((x=>x[0]))))}}return res};$B.JSObj.unbind=function(_self,evt,func){if(!_self.$brython_events){return _b_.None}if(!_self.$brython_events[evt]){return _b_.None}var events=_self.$brython_events[evt];if(func===undefined){for(var item of events){_self.removeEventListener(evt,item[1])}delete _self.$brython_events[evt]}else{for(var i=0,len=events.length;i-1){return function(){var args=new Array(arguments.length);args[0]=arguments[0];for(var i=1,len=arguments.length;i-1){return function(){var pylist=$B.$list(arguments[0].map(jsobj2pyobj));return jsobj2pyobj(_b_.list[attr].call(null,pylist,...Array.from(arguments).slice(1)))}}return function(){var js_array=arguments[0],t=jsobj2pyobj(js_array),args=[t];return _b_.list[attr].apply(null,args)}};$B.set_func_names(js_list_meta,"builtins");$B.SizedJSObj=$B.make_class("SizedJavascriptObject");$B.SizedJSObj.__bases__=[$B.JSObj];$B.SizedJSObj.__mro__=[$B.JSObj,_b_.object];$B.SizedJSObj.__len__=function(_self){return _self.length};$B.set_func_names($B.SizedJSObj,"builtins");$B.IterableJSObj=$B.make_class("IterableJavascriptObject");$B.IterableJSObj.__bases__=[$B.JSObj];$B.IterableJSObj.__mro__=[$B.JSObj,_b_.object];$B.IterableJSObj.__iter__=function(_self){return{__class__:$B.IterableJSObj,it:_self[Symbol.iterator]()}};$B.IterableJSObj.__len__=function(_self){return _self.length};$B.IterableJSObj.__next__=function(_self){var value=_self.it.next();if(!value.done){return jsobj2pyobj(value.value)}throw _b_.StopIteration.$factory("")};$B.set_func_names($B.IterableJSObj,"builtins");var js_array=$B.js_array=$B.make_class("Array");js_array.__class__=js_list_meta;js_array.__mro__=[$B.JSObj,_b_.object];js_array.__getattribute__=function(_self,attr){if(_b_.list[attr]===undefined){var proto=Object.getPrototypeOf(_self),res=proto[attr];if(res!==undefined){return jsobj2pyobj(res,_self)}if(_self.hasOwnProperty(attr)){return $B.JSObj.$factory(_self[attr])}throw $B.attr_error(attr,_self)}return function(){var args=pyobj2jsobj(Array.from(arguments));return _b_.list[attr].call(null,_self,...args)}};js_array.__getitem__=function(_self,i){i=$B.PyNumber_Index(i);return $B.jsobj2pyobj(_self[i])};var js_array_iterator=$B.make_class("JSArray_iterator",(function(obj){return{__class__:js_array_iterator,it:obj[Symbol.iterator]()}}));js_array_iterator.__next__=function(_self){var v=_self.it.next();if(v.done){throw _b_.StopIteration.$factory("")}return $B.jsobj2pyobj(v.value)};$B.set_func_names(js_array_iterator,"builtins");js_array.__iter__=function(_self){return js_array_iterator.$factory(_self)};js_array.__repr__=function(_self){if($B.repr.enter(_self)){return"[...]"}var _r=new Array(_self.length),res;for(var i=0;i<_self.length;++i){_r[i]=_b_.str.$factory(_self[i])}res="["+_r.join(", ")+"]";$B.repr.leave(_self);return res};$B.set_func_names(js_array,"javascript");$B.get_jsobj_class=function(obj){var proto=Object.getPrototypeOf(obj);if(proto===null){return $B.JSObj}if(proto[Symbol.iterator]!==undefined){return $B.IterableJSObj}else if(Object.getOwnPropertyNames(proto).indexOf("length")>-1){return $B.SizedJSObj}return $B.JSObj};$B.JSMeta=$B.make_class("JSMeta");$B.JSMeta.__call__=function(cls){var extra_args=new Array(arguments.length-1),klass=arguments[0];for(var i=1,len=arguments.length;i0)}catch(err){return false}};var $DOMEventAttrs_W3C=["NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","type","target","currentTarget","eventPhase","bubbles","cancelable","timeStamp","stopPropagation","preventDefault","initEvent"];var $DOMEventAttrs_IE=["altKey","altLeft","button","cancelBubble","clientX","clientY","contentOverflow","ctrlKey","ctrlLeft","data","dataFld","dataTransfer","fromElement","keyCode","nextPage","offsetX","offsetY","origin","propertyName","reason","recordset","repeat","screenX","screenY","shiftKey","shiftLeft","source","srcElement","srcFilter","srcUrn","toElement","type","url","wheelDelta","x","y"];$B.$isEvent=function(obj){var flag=true;for(var i=0;i<$DOMEventAttrs_W3C.length;i++){if(obj[$DOMEventAttrs_W3C[i]]===undefined){flag=false;break}}if(flag){return true}for(var i=0;i<$DOMEventAttrs_IE.length;i++){if(obj[$DOMEventAttrs_IE[i]]===undefined){return false}}return true};var $NodeTypes={1:"ELEMENT",2:"ATTRIBUTE",3:"TEXT",4:"CDATA_SECTION",5:"ENTITY_REFERENCE",6:"ENTITY",7:"PROCESSING_INSTRUCTION",8:"COMMENT",9:"DOCUMENT",10:"DOCUMENT_TYPE",11:"DOCUMENT_FRAGMENT",12:"NOTATION"};var Attributes=$B.make_class("Attributes",(function(elt){return{__class__:Attributes,elt:elt}}));Attributes.__contains__=function(){var $=$B.args("__getitem__",2,{self:null,key:null},["self","key"],arguments,{},null,null);if($.self.elt instanceof SVGElement){return $.self.elt.hasAttributeNS(null,$.key)}else if(typeof $.self.elt.hasAttribute=="function"){return $.self.elt.hasAttribute($.key)}return false};Attributes.__delitem__=function(){var $=$B.args("__getitem__",2,{self:null,key:null},["self","key"],arguments,{},null,null);if(!Attributes.__contains__($.self,$.key)){throw _b_.KeyError.$factory($.key)}if($.self.elt instanceof SVGElement){$.self.elt.removeAttributeNS(null,$.key);return _b_.None}else if(typeof $.self.elt.hasAttribute=="function"){$.self.elt.removeAttribute($.key);return _b_.None}};Attributes.__getitem__=function(){var $=$B.args("__getitem__",2,{self:null,key:null},["self","key"],arguments,{},null,null);if($.self.elt instanceof SVGElement&&$.self.elt.hasAttributeNS(null,$.key)){return $.self.elt.getAttributeNS(null,$.key)}else if(typeof $.self.elt.hasAttribute=="function"&&$.self.elt.hasAttribute($.key)){return $.self.elt.getAttribute($.key)}throw _b_.KeyError.$factory($.key)};Attributes.__iter__=function(self){self.$counter=0;var attrs=self.elt.attributes,items=[];for(var i=0;i");var DOMEvent=$B.DOMEvent=$B.make_class("DOMEvent",(function(evt_name){return DOMEvent.__new__(DOMEvent,evt_name)}));DOMEvent.__new__=function(cls,evt_name){var ev=new Event(evt_name);ev.__class__=DOMEvent;if(ev.preventDefault===undefined){ev.preventDefault=function(){ev.returnValue=false}}if(ev.stopPropagation===undefined){ev.stopPropagation=function(){ev.cancelBubble=true}}return ev};function dom2svg(svg_elt,coords){var pt=svg_elt.createSVGPoint();pt.x=coords.x;pt.y=coords.y;return pt.matrixTransform(svg_elt.getScreenCTM().inverse())}DOMEvent.__getattribute__=function(self,attr){switch(attr){case"__repr__":case"__str__":return function(){return""};case"x":return $mouseCoords(self).x;case"y":return $mouseCoords(self).y;case"data":if(self.dataTransfer!==null&&self.dataTransfer!==undefined){return Clipboard.$factory(self.dataTransfer)}return $B.$JS2Py(self["data"]);case"target":if(self.target!==undefined){return DOMNode.$factory(self.target)}case"char":return String.fromCharCode(self.which);case"svgX":if(self.target instanceof SVGSVGElement){return Math.floor(dom2svg(self.target,$mouseCoords(self)).x)}throw _b_.AttributeError.$factory("event target is not an SVG "+"element");case"svgY":if(self.target instanceof SVGSVGElement){return Math.floor(dom2svg(self.target,$mouseCoords(self)).y)}throw _b_.AttributeError.$factory("event target is not an SVG "+"element")}var res=self[attr];if(res!==undefined){if(typeof res=="function"){var func=function(){var args=[];for(var i=0;i");function $EventsList(elt,evt,arg){this.elt=elt;this.evt=evt;if($B.$isinstance(arg,_b_.list)){this.callbacks=arg}else{this.callbacks=[arg]}this.remove=function(callback){var found=false;for(var i=0;i"};dom.FileReader.__class__=_b_.type;dom.FileReader.__str__=function(){return""};var DOMNode=$B.make_class("DOMNode",(function(elt){return elt}));DOMNode.__add__=function(self,other){var res=TagSum.$factory();res.children=[self];var pos=1;if($B.$isinstance(other,TagSum)){res.children=res.children.concat(other.children)}else if($B.$isinstance(other,[_b_.str,_b_.int,_b_.float,_b_.list,_b_.dict,_b_.set,_b_.tuple])){res.children[pos++]=DOMNode.$factory(document.createTextNode(_b_.str.$factory(other)))}else if($B.$isinstance(other,DOMNode)){res.children[pos++]=other}else{try{res.children=res.children.concat(_b_.list.$factory(other))}catch(err){throw _b_.TypeError.$factory("can't add '"+$B.class_name(other)+"' object to DOMNode instance")}}return res};DOMNode.__bool__=function(self){return true};DOMNode.__contains__=function(self,key){if(self.nodeType==9&&typeof key=="string"){return document.getElementById(key)!==null}if(self.length!==undefined&&typeof self.item=="function"){for(var i=0,len=self.length;i-1){return function(selector){if(selector===undefined){self.select();return _b_.None}return DOMNode.select(self,selector)}}if(attr=="query"&&self.nodeType==9){var res={__class__:Query,_keys:[],_values:{}};var qs=location.search.substr(1).split("&");if(location.search!=""){for(var i=0;i-1){res._values[key].push(value)}else{res._keys.push(key);res._values[key]=[value]}}}return res}var klass=$B.get_class(self);var property=self[attr];if(property!==undefined&&self.__class__&&klass.__module__!="browser.html"&&klass.__module__!="browser.svg"&&!klass.$webcomponent){var from_class=$B.$getattr(klass,attr,null);if(from_class!==null){property=from_class;if(typeof from_class==="function"){return property.bind(self,self)}}else{var bases=self.__class__.__bases__;var show_message=true;for(var base of bases){if(base.__module__=="browser.html"){show_message=false;break}}if(show_message){var from_class=$B.$getattr(self.__class__,attr,_b_.None);if(from_class!==_b_.None){var frame=$B.frame_obj.frame,line=frame.$lineno;console.info("Warning: line "+line+", "+self.tagName+" element has instance attribute '"+attr+"' set."+" Attribute of class "+$B.class_name(self)+" is ignored.")}}}}if(property===undefined){if(self.tagName){var ce=customElements.get(self.tagName.toLowerCase());if(ce!==undefined&&ce.$cls!==undefined){var save_class=self.__class__;self.__class__=ce.$cls;try{var res=_b_.object.__getattribute__(self,attr);self.__class__=save_class;return res}catch(err){self.__class__=save_class;if(!$B.is_exc(err,[_b_.AttributeError])){throw err}}}}else{return object.__getattribute__(self,attr)}}var res=property;if(res!==undefined){if(res===null){return res}if(typeof res==="function"){if(self.__class__&&self.__class__.$webcomponent){var method=$B.$getattr(self.__class__,attr,null);if(method!==null){return res.bind(self)}}if(res.$is_func){return res}var func=function(f,elt){return function(){var args=[],pos=0;for(var i=0;i0){var res=TagSum.$factory();var pos=res.children.length;for(var i=0;i"}var res=""};DOMNode.__setattr__=function(self,attr,value){switch(attr){case"left":case"top":case"width":case"height":if($B.$isinstance(value,[_b_.int,_b_.float])&&self.nodeType==1){self.style[attr]=value+"px";return _b_.None}else{throw _b_.ValueError.$factory(attr+" value should be"+" an integer or float, not "+$B.class_name(value))}break}if(DOMNode["set_"+attr]!==undefined){return DOMNode["set_"+attr](self,value)}function warn(msg){console.log(msg);var frame=$B.frame_obj.frame;if(!frame){return}if($B.get_option("debug")>0){var file=frame.__file__,lineno=frame.$lineno;console.log("module",frame[2],"line",lineno);if($B.file_cache.hasOwnProperty(file)){var src=$B.file_cache[file];console.log(src.split("\n")[lineno-1])}}else{console.log("module",frame[2])}}var proto=Object.getPrototypeOf(self),nb=0;while(!!proto&&proto!==Object.prototype&&nb++<10){var descriptors=Object.getOwnPropertyDescriptors(proto);if(!!descriptors&&typeof descriptors.hasOwnProperty=="function"){if(descriptors.hasOwnProperty(attr)){if(!descriptors[attr].writable&&descriptors[attr].set===undefined){warn("Warning: property '"+attr+"' is not writable. Use element.attrs['"+attr+"'] instead.")}break}}else{break}proto=Object.getPrototypeOf(proto)}if(self.style&&self.style[attr]!==undefined&&attr!="src"){warn("Warning: '"+attr+"' is a property of element.style")}self[attr]=py_immutable_to_js(value);return _b_.None};DOMNode.__setitem__=function(self,key,value){if(typeof key=="number"){self.childNodes[key]=value}else if(typeof key=="string"){if(self.attributes){if(self instanceof SVGElement){self.setAttributeNS(null,key,value)}else if(typeof self.setAttribute=="function"){self.setAttribute(key,value)}}}};DOMNode.abs_left={__get__:function(self){return $getPosition(self).left},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+"'abs_left' is read-only")}};DOMNode.abs_top={__get__:function(self){return $getPosition(self).top},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+"'abs_top' is read-only")}};DOMNode.attach=DOMNode.__le__;DOMNode.bind=function(self,event){var $=$B.args("bind",4,{self:null,event:null,func:null,options:null},["self","event","func","options"],arguments,{func:_b_.None,options:_b_.None},null,null),self=$.self,event=$.event,func=$.func,options=$.options;if(func===_b_.None){return function(f){return DOMNode.bind(self,event,f)}}var callback=function(f){return function(ev){try{return $B.$call(f)($DOMEvent(ev))}catch(err){if(err.__class__!==undefined){$B.handle_error(err)}else{try{$B.$getattr($B.get_stderr(),"write")(err)}catch(err1){console.log(err)}}}}}(func);callback.$infos=func.$infos;callback.$attrs=func.$attrs||{};callback.$func=func;if(typeof options=="boolean"){self.addEventListener(event,callback,options)}else if(options.__class__===_b_.dict){self.addEventListener(event,callback,_b_.dict.$to_obj(options))}else if(options===_b_.None){self.addEventListener(event,callback,false)}self.$events=self.$events||{};self.$events[event]=self.$events[event]||[];self.$events[event].push([func,callback]);return self};DOMNode.children=function(self){var res=[];if(self.nodeType==9){self=self.body}for(var child of self.children){res.push(DOMNode.$factory(child))}return res};DOMNode.child_nodes=function(self){var res=[];if(self.nodeType==9){self=self.body}for(var child of self.childNodes){res.push(DOMNode.$factory(child))}return res};DOMNode.clear=function(self){var $=$B.args("clear",1,{self:null},["self"],arguments,{},null,null);if(self.nodeType==9){self=self.body}while(self.firstChild){self.removeChild(self.firstChild)}};DOMNode.Class=function(self){if(self.className!==undefined){return self.className}return _b_.None};DOMNode.class_name=function(self){return DOMNode.Class(self)};DOMNode.clone=function(self){var res=DOMNode.$factory(self.cloneNode(true));var events=self.$events||{};for(var event in events){var evt_list=events[event];evt_list.forEach((function(evt){var func=evt[0];DOMNode.bind(res,event,func)}))}return res};DOMNode.closest=function(self,selector){var $=$B.args("closest",2,{self:null,selector:null},["self","selector"],arguments,{},null,null);var res=self.closest(selector);if(res===null){throw _b_.KeyError.$factory("no parent with selector "+selector)}return DOMNode.$factory(res)};DOMNode.bindings=function(self){var res=$B.empty_dict();for(var key in self.$events){_b_.dict.$setitem(res,key,self.$events[key].map((x=>x[1])))}return res};DOMNode.events=function(self,event){self.$events=self.$events||{};var evt_list=self.$events[event]=self.$events[event]||[],callbacks=[];evt_list.forEach((function(evt){callbacks.push(evt[1])}));return callbacks};function make_list(node_list){var res=[];for(var i=0;i-1};Query.__getitem__=function(self,key){var result=self._values[key];if(result===undefined){throw _b_.KeyError.$factory(key)}else if(result.length==1){return result[0]}return result};var Query_iterator=$B.make_iterator_class("query string iterator");Query.__iter__=function(self){return Query_iterator.$factory(self._keys)};Query.__setitem__=function(self,key,value){self._values[key]=[value];return _b_.None};Query.__str__=Query.__repr__=function(self){var elts=[];for(var key in self._values){for(const val of self._values[key]){elts.push(encodeURIComponent(key)+"="+encodeURIComponent(val))}}if(elts.length==0){return""}else{return"?"+elts.join("&")}};Query.getfirst=function(self,key,_default){var result=self._values[key];if(result===undefined){if(_default===undefined){return _b_.None}return _default}return result[0]};Query.getlist=function(self,key){var result=self._values[key];if(result===undefined){return[]}return result};Query.getvalue=function(self,key,_default){try{return Query.__getitem__(self,key)}catch(err){if(_default===undefined){return _b_.None}return _default}};Query.keys=function(self){return self._keys};$B.set_func_names(Query,"");var TagSum=$B.make_class("TagSum",(function(){return{__class__:TagSum,children:[],toString:function(){return"(TagSum)"}}}));TagSum.appendChild=function(self,child){self.children.push(child)};TagSum.__add__=function(self,other){if($B.get_class(other)===TagSum){self.children=self.children.concat(other.children)}else if($B.$isinstance(other,[_b_.str,_b_.int,_b_.float,_b_.dict,_b_.set,_b_.list])){self.children=self.children.concat(DOMNode.$factory(document.createTextNode(other)))}else{self.children.push(other)}return self};TagSum.__radd__=function(self,other){var res=TagSum.$factory();res.children=self.children.slice();res.children.splice(0,0,DOMNode.$factory(document.createTextNode(other)));return res};TagSum.__repr__=function(self){var res=" ";for(var i=0;i");$B.TagSum=TagSum;var win=$B.JSObj.$factory(_window);win.get_postMessage=function(msg,targetOrigin){if($B.$isinstance(msg,_b_.dict)){var temp={__class__:"dict"},items=_b_.list.$factory(_b_.dict.items(msg));items.forEach((function(item){temp[item[0]]=item[1]}));msg=temp}return _window.postMessage(msg,targetOrigin)};$B.DOMNode=DOMNode;$B.win=win})(__BRYTHON__);(function($B){$B.pattern_match=function(subject,pattern){var _b_=$B.builtins,frame=$B.frame_obj.frame,locals=frame[1];function bind(pattern,subject){if(pattern.alias){locals[pattern.alias]=subject}}if(pattern.sequence){if($B.$isinstance(subject,[_b_.str,_b_.bytes,_b_.bytearray])){return false}var Sequence;if($B.imported["collections.abc"]){Sequence=$B.imported["collections.abc"].Sequence}var deque;if($B.imported["collections"]){deque=$B.imported["collections"].deque}var supported=false;var klass=subject.__class__||$B.get_class(subject);for(var base of[klass].concat(klass.__bases__||[])){if(base.$match_sequence_pattern){supported=true;break}else if(base===Sequence||base==deque){supported=true;break}}if(!supported&&Sequence){supported=_b_.issubclass(klass,Sequence)}if(!supported){return false}if(pattern.sequence.length==1&&pattern.sequence[0].capture_starred=="_"){return true}var subject_length=_b_.len(subject);var nb_fixed_length=0;for(var item of pattern.sequence){if(!item.capture_starred){nb_fixed_length++}}if(subject_length0){if([_b_.bool,_b_.bytearray,_b_.bytes,_b_.dict,_b_.float,_b_.frozenset,_b_.int,_b_.list,_b_.set,_b_.str,_b_.tuple].indexOf(klass)>-1){if(pattern.args.length>1){throw _b_.TypeError.$factory("for builtin type "+$B.class_name(subject)+", a single positional "+"subpattern is accepted")}return $B.pattern_match(subject,pattern.args[0])}else{var match_args=$B.$getattr(klass,"__match_args__",$B.fast_tuple([]));if(!$B.$isinstance(match_args,_b_.tuple)){throw _b_.TypeError.$factory("__match_args__() did not return a tuple")}if(pattern.args.length>match_args.length){throw _b_.TypeError.$factory("__match_args__() returns "+match_args.length+" names but "+pattern.args.length+" positional "+"arguments were passed")}for(var i=0,len=pattern.args.length;i"}else{return""}};$B.set_func_names(coroutine,"builtins");$B.make_async=func=>{if(func.$is_genfunc){return func}var f=function(){var args=arguments;return{__class__:coroutine,$args:args,$func:func}};f.$infos=func.$infos;f.$is_func=true;f.$is_async=true;return f};$B.promise=function(obj){if(obj.__class__===coroutine){obj.$frame_obj=$B.frame_obj;return coroutine.send(obj)}if(typeof obj=="function"){return obj()}if(obj instanceof Promise||typeof obj.then=="function"){return obj}var awaitable=$B.$getattr(obj,"__await__",null);if(awaitable!==null){awaitable=$B.$call(awaitable)();if($B.$getattr(awaitable,"__next__",null)===null){throw _b_.TypeError.$factory("__await__() returned non-iterator"+` of type '${$B.class_name(awaitable)}'`)}return awaitable}throw _b_.TypeError.$factory(`object ${$B.class_name(obj)} `+`can't be used in 'await' expression`)}})(__BRYTHON__);(function($B){$B.builtin_class_flags={builtins:{1074287874:["SystemError","NotImplementedError","TypeError","BufferError","BlockingIOError","IndexError","BytesWarning","ConnectionError","OverflowError","AssertionError","ArithmeticError","BaseException","EOFError","Exception","NameError","PermissionError","TimeoutError","ZeroDivisionError","ConnectionRefusedError","ChildProcessError","SyntaxWarning","ProcessLookupError","EncodingWarning","StopIteration","UnicodeError","WindowsError","AttributeError","BrokenPipeError","KeyError","ConnectionAbortedError","ReferenceError","TabError","ImportError","EnvironmentError","FileExistsError","ImportWarning","OSError","UnboundLocalError","FloatingPointError","UserWarning","IndentationError","RecursionError","NotADirectoryError","Warning","LookupError","UnicodeWarning","ModuleNotFoundError","IOError","StopAsyncIteration","ResourceWarning","BaseExceptionGroup","SystemExit","ConnectionResetError","RuntimeError","FutureWarning","FileNotFoundError","RuntimeWarning","UnicodeTranslateError","UnicodeEncodeError","PendingDeprecationWarning","UnicodeDecodeError","DeprecationWarning","IsADirectoryError","SyntaxError","KeyboardInterrupt","MemoryError","GeneratorExit","ValueError","InterruptedError"],1073763848:["ExceptionGroup"],21500162:["bool"],4723970:["bytearray","float"],138941698:["bytes"],546050:["staticmethod","reversed","filter","property","super","enumerate","classmethod","map","zip"],529666:["object","complex"],541611330:["dict"],4740354:["set","frozenset"],21501186:["int"],38294818:["list"],545058:["memoryview"],528674:["range"],545026:["slice"],273159426:["str"],71849250:["tuple"],2156420354:["type"]},types:{545154:["async_generator","member_descriptor","classmethod_descriptor","getset_descriptor","coroutine","method-wrapper","generator","frame"],547202:["builtin_function_or_method"],545026:["traceback","cell"],528642:["code","NoneType","ellipsis","NotImplementedType"],678146:["function"],545090:["mappingproxy"],678274:["method_descriptor"],547074:["method"],546050:["module"],676226:["wrapper_descriptor"]}}})(__BRYTHON__);(function($B){var _b_=$B.builtins;var update=$B.update_obj=function(mod,data){for(attr in data){mod[attr]=data[attr]}};var _window=globalThis;var modules={};var browser={$package:true,$is_package:true,__initialized__:true,__package__:"browser",__file__:$B.brython_path.replace(new RegExp("/*$","g"),"")+"/Lib/browser/__init__.py",bind:function(){var $=$B.args("bind",3,{elt:null,evt:null,options:null},["elt","evt","options"],arguments,{options:_b_.None},null,null);var options=$.options;if(typeof options=="boolean"){}else if(options.__class__===_b_.dict){var _options={};for(var key of _b_.dict.$keys_string(options)){_options[key]=_b_.dict.$getitem_string(options,key)}options=_options}else{options==false}return function(callback){if($B.get_class($.elt)===$B.JSObj){function f(ev){try{return callback($B.JSObj.$factory(ev))}catch(err){$B.handle_error(err)}}$.elt.addEventListener($.evt,f,options);return callback}else if($B.$isinstance($.elt,$B.DOMNode)){$B.DOMNode.bind($.elt,$.evt,callback,options);return callback}else if($B.$isinstance($.elt,_b_.str)){var items=document.querySelectorAll($.elt);for(var i=0;i1){console.log(err,err.__class__,err.args);console.log("first",first);console.log(arguments)}throw err}}}}for(var arg in $ns.kw.$jsobj){var value=$ns.kw.$jsobj[arg];if(arg.toLowerCase().substr(0,2)=="on"){$B.DOMNode.__setattr__(self,arg,value)}else if(arg.toLowerCase()=="style"){$B.DOMNode.set_style(self,value)}else{if(value!==false){try{arg=$B.imported["browser.html"].attribute_mapper(arg);self.setAttribute(arg,$B.pyobj2jsobj(value))}catch(err){throw _b_.ValueError.$factory("can't set attribute "+arg)}}}}};dict.__mro__=[$B.DOMNode,$B.builtins.object];dict.__new__=function(cls){var res=document.createElement(tagName);if(cls!==html[tagName]){res.__class__=cls}return res};dict.__rmul__=function(self,num){return $B.DOMNode.__mul__(self,num)};$B.set_func_names(dict,"browser.html");return dict}function makeFactory(klass,ComponentClass){return function(k){return function(){if(k.__name__=="SVG"){var res=$B.DOMNode.$factory(document.createElementNS("http://www.w3.org/2000/svg","svg"),true)}else{try{var res=document.createElement(k.__name__)}catch(err){console.log("error "+err);console.log("creating element",k.__name__);throw err}}var init=$B.$getattr(k,"__init__",null);if(init!==null){init(res,...arguments)}return res}}(klass)}var tags=["A","ABBR","ACRONYM","ADDRESS","APPLET","AREA","B","BASE","BASEFONT","BDO","BIG","BLOCKQUOTE","BODY","BR","BUTTON","CAPTION","CENTER","CITE","CODE","COL","COLGROUP","DD","DEL","DFN","DIR","DIV","DL","DT","EM","FIELDSET","FONT","FORM","FRAME","FRAMESET","H1","H2","H3","H4","H5","H6","HEAD","HR","HTML","I","IFRAME","IMG","INPUT","INS","ISINDEX","KBD","LABEL","LEGEND","LI","LINK","MAP","MENU","META","NOFRAMES","NOSCRIPT","OBJECT","OL","OPTGROUP","OPTION","P","PARAM","PRE","Q","S","SAMP","SCRIPT","SELECT","SMALL","SPAN","STRIKE","STRONG","STYLE","SUB","SUP","SVG","TABLE","TBODY","TD","TEXTAREA","TFOOT","TH","THEAD","TITLE","TR","TT","U","UL","VAR","ARTICLE","ASIDE","AUDIO","BDI","CANVAS","COMMAND","DATA","DATALIST","EMBED","FIGCAPTION","FIGURE","FOOTER","HEADER","KEYGEN","MAIN","MARK","MATH","METER","NAV","OUTPUT","PROGRESS","RB","RP","RT","RTC","RUBY","SECTION","SOURCE","TEMPLATE","TIME","TRACK","VIDEO","WBR","DETAILS","DIALOG","MENUITEM","PICTURE","SUMMARY"];var html={};html.tags=$B.empty_dict();function maketag(tagName,ComponentClass){if(!(typeof tagName=="string")){throw _b_.TypeError.$factory("html.maketag expects a string as argument")}if(html[tagName]!==undefined){throw _b_.ValueError.$factory("cannot reset class for "+tagName)}var klass=makeTagDict(tagName);klass.$factory=makeFactory(klass,ComponentClass);html[tagName]=klass;_b_.dict.$setitem(html.tags,tagName,html[tagName]);return klass}for(var tagName of tags){maketag(tagName)}html.maketag=maketag;html.attribute_mapper=function(attr){return attr.replace(/_/g,"-")};return html}(__BRYTHON__)}modules["browser"]=browser;$B.UndefinedType=$B.make_class("UndefinedType",(function(){return $B.Undefined}));$B.UndefinedType.__mro__=[_b_.object];$B.UndefinedType.__bool__=function(self){return false};$B.UndefinedType.__repr__=function(self){return""};$B.UndefinedType.__str__=$B.UndefinedType.__repr__;$B.Undefined={__class__:$B.UndefinedType};$B.set_func_names($B.UndefinedType,"javascript");var super_class=$B.make_class("JavascriptSuper",(function(){var res=_b_.super.$factory();var js_constr=res.__thisclass__.__bases__[0];return function(){var obj=new js_constr.$js_func(...arguments);console.log("obj from js constr",obj);for(var attr in obj){console.log("attr",attr);res.__self_class__.__dict__[attr]=$B.jsobj2pyobj(obj[attr])}return obj}}));super_class.__getattribute__=function(self,attr){if(attr=="__init__"||attr=="__call__"){return self.__init__}return $B.$getattr(self.__self_class__,attr)};$B.set_func_names(super_class,"javascript");modules["javascript"]={this:function(){if($B.js_this===undefined){return $B.builtins.None}return $B.JSObj.$factory($B.js_this)},Array:$B.js_array,Date:self.Date&&$B.JSObj.$factory(self.Date),extends:function(js_constr){if(!js_constr.$js_func||!js_constr.$js_func.toString().startsWith("class ")){console.log(js_constr);throw _b_.TypeError.$factory("argument of extend must be a Javascript class")}js_constr.__class__=_b_.type;return function(obj){obj.__bases__.splice(0,0,js_constr);obj.__mro__.splice(0,0,js_constr);return obj}},import_js:function(url,name){var $=$B.args("import_js",2,{url:null,alias:null},["url","alias"],arguments,{alias:_b_.None},null,null),url=$.url,alias=$.alias;var xhr=new XMLHttpRequest,result;xhr.open("GET",url,false);xhr.onreadystatechange=function(){if(this.readyState==4){if(this.status==200){var js=this.responseText+"\nreturn $module",f=new Function(js);console.log("f",f,f+"");var $module=f();if(typeof $module!=="undefined"){result=$B.module.$factory(name);for(var key in $module){result[key]=$B.jsobj2pyobj($module[key])}result.__file__=url}else{console.log(this.responseText);result=_b_.ImportError.$factory("Javascript "+`module at ${url} doesn't define $module`)}}else{result=_b_.ModuleNotFoundError.$factory(name)}}};xhr.send();if($B.$isinstance(result,_b_.BaseException)){$B.handle_error(result)}else{if(alias===_b_.None){alias=url.split(".");if(alias.length>1){alias.pop()}alias=alias.join(".");result.__name__=alias}$B.imported[alias]=result;var frame=$B.frame_obj.frame;frame[1][alias]=result}},import_modules:function(refs,callback,loaded){if(loaded===undefined){loaded=[]}if(!Array.isArray(refs)){throw _b_.TypeError.$factory(`first argument must be a list, got ${$B.class_name(refs)}`)}if(refs.length>1){var ref=refs.shift();import(ref).then((function(module){loaded.push(module);$B.imported.javascript.import_modules(refs,callback,loaded)})).catch($B.show_error)}else{import(refs[0]).then((function(module){loaded.push(module);return $B.$call(callback).apply(null,loaded)})).catch($B.show_error)}},import_scripts:function(refs,callback,loaded){console.log("import scripts",refs);if(loaded===undefined){loaded=[]}if(!Array.isArray(refs)){throw _b_.TypeError.$factory(`first argument must be a list, got ${$B.class_name(refs)}`)}if(refs.length>0){var ref=refs.shift();var script=document.createElement("script");script.src=ref;script.addEventListener("load",(function(ev){console.log("script loaded");loaded.push(script);$B.imported.javascript.import_scripts(refs,callback,loaded)}));document.body.appendChild(script)}else{console.log("appel callback",loaded);return $B.$call(callback).apply(null,loaded)}},JSObject:$B.JSObj,JSON:{__class__:$B.make_class("JSON"),parse:function(){return $B.structuredclone2pyobj(JSON.parse.apply(this,arguments))},stringify:function(obj,replacer,space){return JSON.stringify($B.pyobj2structuredclone(obj,false),$B.JSObj.$factory(replacer),space)}},jsobj2pyobj:function(obj){return $B.jsobj2pyobj(obj)},load:function(script_url){console.log('"javascript.load" is deprecrated. '+"Use browser.load instead.");var file_obj=$B.builtins.open(script_url);var content=$B.$getattr(file_obj,"read")();eval(content)},Math:self.Math&&$B.JSObj.$factory(self.Math),NULL:null,NullType:$B.make_class("NullType"),Number:self.Number&&$B.JSObj.$factory(self.Number),py2js:function(src,module_name){if(module_name===undefined){module_name="__main__"+$B.UUID()}var js=$B.py2js({src:src,filename:""},module_name,module_name,$B.builtins_scope).to_js();return $B.format_indent(js,0)},pyobj2jsobj:function(obj){return $B.pyobj2jsobj(obj)},RegExp:self.RegExp&&$B.JSObj.$factory(self.RegExp),String:self.String&&$B.JSObj.$factory(self.String),super:super_class,UNDEFINED:$B.Undefined,UndefinedType:$B.UndefinedType};modules.javascript.NullType.__module__="javascript";modules.javascript.NullType.__eq__=function(_self,other){return other===null||other===$B.Undefined};$B.set_func_names(modules.javascript.NullType,"javascript");modules.javascript.UndefinedType.__module__="javascript";var $io=$B.$io=$B.make_class("io",(function(out){return{__class__:$io,out:out,encoding:"utf-8"}}));$io.flush=function(self){if(self.buf){console[self.out](self.buf.join(""));self.buf=[]}};$io.write=function(self,msg){if(self.buf===undefined){self.buf=[]}if(typeof msg!="string"){throw _b_.TypeError.$factory("write() argument must be str, not "+$B.class_name(msg))}self.buf.push(msg);return _b_.None};var _b_=$B.builtins;modules["_sys"]={_getframe:function(){var $=$B.args("_getframe",1,{depth:null},["depth"],arguments,{depth:0},null,null),depth=$.depth,frame_obj=$B.frame_obj;for(var i=0;i0){var lines=headers.trim().split(/[\r\n]+/);lines.forEach((function(line){var parts=line.split(": ");var header=parts.shift();var value=parts.join(": ");_b_.dict.$setitem(res,header,value)}))}return res}));var Future=$B.make_class("Future",(function(){var methods={};var promise=new Promise((function(resolve,reject){methods.resolve=resolve;methods.reject=reject}));promise._methods=methods;promise._done=false;promise.__class__=Future;return promise}));Future.done=function(){var $=$B.args("done",1,{self:null},["self"],arguments,{},null,null);return!!self._done};Future.set_result=function(self,value){var $=$B.args("set_result",2,{self:null,value:null},["self","value"],arguments,{},null,null);self._done=true;self._methods.resolve(value);return _b_.None};Future.set_exception=function(self,exception){var $=$B.args("set_exception",2,{self:null,exception:null},["self","exception"],arguments,{},null,null);self._done=true;self._methods.reject(exception);return _b_.None};$B.set_func_names(Future,"browser.aio");modules["browser.aio"]={ajax:function(){var $=$B.args("ajax",2,{method:null,url:null},["method","url"],arguments,{},null,"kw"),method=$.method.toUpperCase(),url=$.url,kw=$.kw;var args=handle_kwargs(kw,"get");if(method=="GET"&&!args.cache){url=url+"?ts"+(new Date).getTime()+"=0"}if(args.body&&method=="GET"){url=url+(args.cache?"?":"&")+args.body}var func=function(){return new Promise((function(resolve,reject){var xhr=new XMLHttpRequest;xhr.open(method,url,true);for(var key in args.headers){xhr.setRequestHeader(key,args.headers[key])}xhr.format=args.format;xhr.responseType=responseType[args.format];xhr.onreadystatechange=function(){if(this.readyState==4){this.__class__=HTTPRequest;resolve(this)}};if(args.body&&["POST","PUT","DELETE","PATCH"].indexOf(method)>-1){xhr.send(args.body)}else{xhr.send()}}))};func.$infos={__name__:"ajax_"+method};return{__class__:$B.coroutine,$args:[url,args],$func:func}},event:function(){var $=$B.args("event",1,{element:null},["element"],arguments,{},"names",null),element=$.element,names=$.names;return new Promise((function(resolve){var callbacks=[];names.forEach((function(name){var callback=function(evt){callbacks.forEach((function(items){$B.DOMNode.unbind(element,items[0],items[1])}));resolve($B.$DOMEvent(evt))};callbacks.push([name,callback]);$B.DOMNode.bind(element,name,callback)}))}))},get:function(){return $B.imported["browser.aio"].ajax.bind(null,"GET").apply(null,arguments)},iscoroutine:function(f){return f.__class__===$B.coroutine},iscoroutinefunction:function(f){return(f.$infos.__code__.co_flags&128)!=0},post:function(){return $B.imported["browser.aio"].ajax.bind(null,"POST").apply(null,arguments)},run:function(coro){var handle_success=function(){$B.leave_frame()},handle_error=$B.show_error,error_func=handle_error;var $=$B.args("run",3,{coro:null,onsuccess:null,onerror:null},["coro","onsuccess","onerror"],arguments,{onsuccess:handle_success,onerror:handle_error},null,null),coro=$.coro,onsuccess=$.onsuccess,onerror=$.onerror,error_func=onerror;if(onerror!==handle_error){function error_func(exc){try{onerror(exc)}catch(err){handle_error(err)}}}var save_frame_obj=$B.frame_obj;$B.coroutine.send(coro).then(onsuccess).catch(error_func);$B.frame_obj=save_frame_obj;return _b_.None},sleep:function(seconds){if(seconds.__class__===_b_.float){seconds=seconds.value}else if(typeof seconds!="number"){throw _b_.TypeError.$factory("'sleep' argument must be "+"int or float, not "+$B.class_name(seconds))}var func=function(){return new Promise((resolve=>setTimeout((function(){resolve(_b_.None)}),1e3*seconds)))};func.$infos={__name__:"sleep"};return{__class__:$B.coroutine,$args:[seconds],$func:func}},Future:Future,__getattr__:function(attr){$B.$import("_aio");return $B.$getattr($B.imported._aio,attr)}};function load(name,module_obj){module_obj.__class__=$B.module;module_obj.__name__=name;$B.imported[name]=module_obj;for(var attr in module_obj){if(typeof module_obj[attr]=="function"){module_obj[attr].$infos={__module__:name,__name__:attr,__qualname__:name+"."+attr}}}}for(var attr in modules){load(attr,modules[attr])}if(!($B.isWebWorker||$B.isNode)){modules["browser"].html=modules["browser.html"];modules["browser"].aio=modules["browser.aio"]}var _b_=$B.builtins;_b_.__builtins__=$B.module.$factory("__builtins__","Python builtins");for(var attr in _b_){_b_.__builtins__[attr]=_b_[attr];$B.builtins_scope.binding[attr]=true;if(_b_[attr].$is_class){if(_b_[attr].__bases__){_b_[attr].__bases__.__class__=_b_.tuple}else{_b_[attr].__bases__=$B.fast_tuple([_b_.object])}}}_b_.__builtins__.__setattr__=function(attr,value){_b_[attr]=value};$B.method_descriptor.__getattribute__=$B.function.__getattribute__;$B.wrapper_descriptor.__getattribute__=$B.function.__getattribute__;for(var name in _b_){var builtin=_b_[name];if(_b_[name].__class__===_b_.type){_b_[name].__qualname__=name;_b_[name].__module__="builtins";_b_[name].__name__=name;_b_[name].$is_builtin_class=true;$B.builtin_classes.push(_b_[name]);for(var key in _b_[name]){var value=_b_[name][key];if(value===undefined||value.__class__||typeof value!="function"){continue}else if(key=="__new__"){value.__class__=$B.builtin_function_or_method}else if(key.startsWith("__")){value.__class__=$B.wrapper_descriptor}else{value.__class__=$B.method_descriptor}value.__objclass__=_b_[name]}}else if(typeof builtin=="function"){builtin.$infos={__name__:name,__qualname__:name}}}for(var attr in $B){if(Array.isArray($B[attr])){$B[attr].__class__=_b_.list}}$B.cell=$B.make_class("cell",(function(value){return{__class__:$B.cell,$cell_contents:value}}));$B.cell.cell_contents=$B.$call(_b_.property)((function(self){if(self.$cell_contents===null){throw _b_.ValueError.$factory("empty cell")}return self.$cell_contents}),(function(self,value){self.$cell_contents=value}));var $comps=Object.values($B.$comps).concat(["eq","ne"]);$comps.forEach((function(comp){var op="__"+comp+"__";$B.cell[op]=function(op){return function(self,other){if(!$B.$isinstance(other,$B.cell)){return _b_.NotImplemented}if(self.$cell_contents===null){if(other.$cell_contents===null){return op=="__eq__"}else{return["__ne__","__lt__","__le__"].indexOf(op)>-1}}else if(other.$cell_contents===null){return["__ne__","__gt__","__ge__"].indexOf(op)>-1}return $B.rich_comp(op,self.$cell_contents,other.$cell_contents)}}(op)}));$B.set_func_names($B.cell,"builtins");for(var flag in $B.builtin_class_flags.builtins){for(var key of $B.builtin_class_flags.builtins[flag]){if(_b_[key]){_b_[key].__flags__=parseInt(flag)}else{console.log("not in _b_",key)}}}for(var flag in $B.builtin_class_flags.types){for(var key of $B.builtin_class_flags.types[flag]){if($B[key]){$B[key].__flags__=parseInt(flag)}}}$B.AST={__class__:_b_.type,__mro__:[_b_.object],__name__:"AST",__qualname__:"AST",$is_class:true,$convert:function(js_node){if(js_node===undefined){return _b_.None}var constr=js_node.constructor;if(constr&&constr.$name){$B.create_python_ast_classes();return $B.python_ast_classes[constr.$name].$factory(js_node)}else if(Array.isArray(js_node)){return js_node.map($B.AST.$convert)}else if(js_node.type){switch(js_node.type){case"int":var value=js_node.value[1],base=js_node.value[0];var res=parseInt(value,base);if(!Number.isSafeInteger(res)){res=$B.long_int.$factory(value,base)}return res;case"float":return $B.fast_float(parseFloat(js_node.value));case"imaginary":return $B.make_complex(0,$B.AST.$convert(js_node.value));case"ellipsis":return _b_.Ellipsis;case"str":if(js_node.is_bytes){return _b_.bytes.$factory(js_node.value,"latin-1")}return js_node.value;case"id":if(["False","None","True"].indexOf(js_node.value)>-1){return _b_[js_node.value]}break}}else if(["string","number"].indexOf(typeof js_node)>-1){return js_node}else if(js_node.$name){return js_node.$name+"()"}else if([_b_.None,_b_.True,_b_.False].indexOf(js_node)>-1){return js_node}else if(js_node.__class__){return js_node}else{console.log("cannot handle",js_node);return js_node}}};$B.stdin={__class__:$io,__original__:true,closed:false,len:1,pos:0,read:function(){return""},readline:function(){return""}}})(__BRYTHON__);(function($B){var _b_=$B.builtins;var trace=1;function compiler_error(ast_obj,message,end){var exc=_b_.SyntaxError.$factory(message);exc.filename=state.filename;if(exc.filename!=""){var src=$B.file_cache[exc.filename],lines=src.split("\n"),line=lines[ast_obj.lineno-1];exc.text=line}else{exc.text=_b_.None}exc.lineno=ast_obj.lineno;exc.offset=ast_obj.col_offset;end=end||ast_obj;exc.end_lineno=end.end_lineno;exc.end_offset=end.end_col_offset;exc.args[1]=[exc.filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset];exc.$frame_obj=$B.frame_obj;if($B.frame_obj===null){alert("tiens !")}throw exc}function fast_id(obj){if(obj.$id!==undefined){return obj.$id}return obj.$id=$B.UUID()}function copy_position(target,origin){target.lineno=origin.lineno;target.col_offset=origin.col_offset;target.end_lineno=origin.end_lineno;target.end_col_offset=origin.end_col_offset}function encode_position(a,b,c,d){if(d===undefined){return`[${[a,b,c]}]`}else{return`[${[a,b,c,d]}]`}}$B.decode_position=function(pos){return pos};function last_scope(scopes){var ix=scopes.length-1;while(scopes[ix].parent){ix--}return scopes[ix]}function Scope(name,type,ast){this.name=name;this.locals=new Set;this.globals=new Set;this.nonlocals=new Set;this.freevars=new Set;this.type=type;this.ast=ast}function copy_scope(scope,ast,id){var new_scope=new Scope(scope.name,scope.type,ast);if(id!==undefined){new_scope.id=id}new_scope.parent=scope;return new_scope}function make_local(module_id){return`locals_${module_id.replace(/\./g,"_")}`}function qualified_scope_name(scopes,scope){if(scope!==undefined&&!(scope instanceof Scope)){console.log("bizarre",scope);throw Error("scope étrange")}var _scopes;if(!scope){_scopes=scopes.slice()}else{var ix=scopes.indexOf(scope);if(ix>-1){_scopes=scopes.slice(0,ix+1)}else{_scopes=scopes.concat(scope)}}var names=[];for(var _scope of _scopes){if(!_scope.parent){names.push(_scope.name)}}return names.join("_").replace(/\./g,"_")}function module_name(scopes){var _scopes=scopes.slice();var names=[];for(var _scope of _scopes){if(!_scope.parent){names.push(_scope.name)}}return names.join(".")}function make_scope_name(scopes,scope){if(scope===builtins_scope){return`_b_`}return"locals_"+qualified_scope_name(scopes,scope)}function make_search_namespaces(scopes){var namespaces=[];for(var scope of scopes.slice().reverse()){if(scope.parent||scope.type=="class"){continue}else if(scope.is_exec_scope){namespaces.push("$B.exec_scope")}namespaces.push(make_scope_name(scopes,scope))}namespaces.push("_b_");return namespaces}function mangle(scopes,scope,name){if(name.startsWith("__")&&!name.endsWith("__")){var ix=scopes.indexOf(scope);while(ix>=0){if(scopes[ix].ast instanceof $B.ast.ClassDef){var scope_name=scopes[ix].name;while(scope_name.length>0&&scope_name.startsWith("_")){scope_name=scope_name.substr(1)}if(scope_name.length==0){return name}return"_"+scope_name+name}ix--}}return name}function reference(scopes,scope,name){return make_scope_name(scopes,scope)+"."+mangle(scopes,scope,name)}function bind(name,scopes){var scope=$B.last(scopes),up_scope=last_scope(scopes);name=mangle(scopes,up_scope,name);if(up_scope.globals&&up_scope.globals.has(name)){scope=scopes[0]}else if(up_scope.nonlocals.has(name)){for(var i=scopes.indexOf(up_scope)-1;i>=0;i--){if(scopes[i].locals.has(name)){return scopes[i]}}}scope.locals.add(name);return scope}var CELL=5,FREE=4,LOCAL=1,GLOBAL_EXPLICIT=2,GLOBAL_IMPLICIT=3,SCOPE_MASK=15,SCOPE_OFF=12;var TYPE_CLASS=1,TYPE_FUNCTION=0,TYPE_MODULE=2;var DEF_GLOBAL=1,DEF_LOCAL=2,DEF_PARAM=2<<1,DEF_NONLOCAL=2<<2,USE=2<<3,DEF_FREE=2<<4,DEF_FREE_CLASS=2<<5,DEF_IMPORT=2<<6,DEF_ANNOT=2<<7,DEF_COMP_ITER=2<<8;function name_reference(name,scopes,position){var scope=name_scope(name,scopes);return make_ref(name,scopes,scope,position)}function make_ref(name,scopes,scope,position){if(scope.found){return reference(scopes,scope.found,name)}else if(scope.resolve=="all"){var scope_names=make_search_namespaces(scopes);return`$B.resolve_in_scopes('${name}', [${scope_names}], [${position}])`}else if(scope.resolve=="local"){return`$B.resolve_local('${name}', [${position}])`}else if(scope.resolve=="global"){return`$B.resolve_global('${name}', _frame_obj)`}else if(Array.isArray(scope.resolve)){return`$B.resolve_in_scopes('${name}', [${scope.resolve}], [${position}])`}else if(scope.resolve=="own_class_name"){return`$B.own_class_name('${name}')`}}function local_scope(name,scope){var s=scope;while(true){if(s.locals.has(name)){return{found:true,scope:s}}if(!s.parent){return{found:false}}s=s.parent}}function name_scope(name,scopes){var test=false;if(test){console.log("name scope",name,scopes.slice());alert()}var flags,block;if(scopes.length==0){return{found:false,resolve:"all"}}var scope=$B.last(scopes),up_scope=last_scope(scopes),name=mangle(scopes,scope,name);if(up_scope.ast===undefined){console.log("no ast",scope)}block=scopes.symtable.table.blocks.get(fast_id(up_scope.ast));if(block===undefined){console.log("no block",scope,scope.ast,"id",fast_id(up_scope.ast));console.log("scopes",scopes.slice());console.log("symtable",scopes.symtable)}try{flags=_b_.dict.$getitem_string(block.symbols,name)}catch(err){console.log("name",name,"not in symbols of block",block);console.log("symtables",scopes.symtable);console.log("scopes",scopes.slice());return{found:false,resolve:"all"}}var __scope=flags>>SCOPE_OFF&SCOPE_MASK,is_local=[LOCAL,CELL].indexOf(__scope)>-1;if(test){console.log("block",block,"is local",is_local,"__scope",__scope)}if(up_scope.ast instanceof $B.ast.ClassDef&&name==up_scope.name){return{found:false,resolve:"own_class_name"}}if(name=="__annotations__"){if(block.type==TYPE_CLASS&&up_scope.has_annotation){is_local=true}else if(block.type==TYPE_MODULE){is_local=true}}if(is_local){var l_scope=local_scope(name,scope);if(!l_scope.found){if(block.type==TYPE_CLASS){scope.needs_frames=true;return{found:false,resolve:"global"}}else if(block.type==TYPE_MODULE){scope.needs_frames=true;return{found:false,resolve:"global"}}return{found:false,resolve:"local"}}else{return{found:l_scope.scope}}}else if(scope.globals.has(name)){var global_scope=scopes[0];if(global_scope.locals.has(name)){return{found:global_scope}}scope.needs_frames=true;return{found:false,resolve:"global"}}else if(scope.nonlocals.has(name)){for(var i=scopes.length-2;i>=0;i--){block=scopes.symtable.table.blocks.get(fast_id(scopes[i].ast));if(block&&_b_.dict.$contains_string(block.symbols,name)){var fl=_b_.dict.$getitem_string(block.symbols,name),local_to_block=[LOCAL,CELL].indexOf(fl>>SCOPE_OFF&SCOPE_MASK)>-1;if(!local_to_block){continue}return{found:scopes[i]}}}}if(scope.has_import_star){if(!is_local){scope.needs_frames=true}return{found:false,resolve:is_local?"all":"global"}}for(var i=scopes.length-2;i>=0;i--){block=undefined;if(scopes[i].ast){block=scopes.symtable.table.blocks.get(fast_id(scopes[i].ast))}if(scopes[i].globals.has(name)){scope.needs_frames=true;return{found:false,resolve:"global"}}if(scopes[i].locals.has(name)&&scopes[i].type!="class"){return{found:scopes[i]}}else if(block&&_b_.dict.$contains_string(block.symbols,name)){flags=_b_.dict.$getitem_string(block.symbols,name);var __scope=flags>>SCOPE_OFF&SCOPE_MASK;if([LOCAL,CELL].indexOf(__scope)>-1){return{found:false,resolve:"all"}}}if(scopes[i].has_import_star){return{found:false,resolve:"all"}}}if(builtins_scope.locals.has(name)){return{found:builtins_scope}}var scope_names=make_search_namespaces(scopes);return{found:false,resolve:scope_names}}function resolve_in_namespace(name,ns){if(ns.$proxy){return ns[name]===undefined?{found:false}:{found:true,value:ns[name]}}if(!ns.hasOwnProperty){if(ns[name]!==undefined){return{found:true,value:ns[name]}}}else if(ns.hasOwnProperty(name)){return{found:true,value:ns[name]}}else if(ns.$dict){try{return{found:true,value:ns.$getitem(ns.$dict,name)}}catch(err){if(ns.$missing){try{return{found:true,value:$B.$call(ns.$missing)(ns.$dict,name)}}catch(err){if(!$B.is_exc(err,[_b_.KeyError])){throw err}}}}}return{found:false}}$B.resolve=function(name){var checked=new Set,current_globals,frame_obj=$B.frame_obj,frame;while(frame_obj!==null){frame=frame_obj.frame;if(current_globals===undefined){current_globals=frame[3]}else if(frame[3]!==current_globals){var v=resolve_in_namespace(name,current_globals);if(v.found){return v.value}checked.add(current_globals);current_globals=frame[3]}var v=resolve_in_namespace(name,frame[1]);if(v.found){return v.value}frame_obj=frame_obj.prev}if(!checked.has(frame[3])){var v=resolve_in_namespace(name,frame[3]);if(v.found){return v.value}}if(builtins_scope.locals.has(name)){return _b_[name]}throw $B.name_error(name)};$B.resolve_local=function(name,position){if($B.frame_obj!==null){var frame=$B.frame_obj.frame;if(frame[1].hasOwnProperty){if(frame[1].hasOwnProperty(name)){return frame[1][name]}}else{var value=frame[1][name];if(value!==undefined){return value}}}var exc=_b_.UnboundLocalError.$factory(`cannot access local variable `+`'${name}' where it is not associated with a value`);if(position&&$B.frame_obj){$B.set_exception_offsets(exc,position)}throw exc};$B.resolve_in_scopes=function(name,namespaces,position){for(var ns of namespaces){if(ns===$B.exec_scope){var exec_top,frame_obj=$B.frame_obj,frame;while(frame_obj!==null){frame=frame_obj.frame;if(frame.is_exec_top){exec_top=frame;break}frame_obj=frame_obj.prev}if(exec_top){for(var ns of[exec_top[1],exec_top[3]]){var v=resolve_in_namespace(name,ns);if(v.found){return v.value}}}}else{var v=resolve_in_namespace(name,ns);if(v.found){return v.value}}}var exc=$B.name_error(name);if(position){$B.set_exception_offsets(exc,position)}throw exc};$B.resolve_global=function(name,frame_obj){while(frame_obj!==null){var frame=frame_obj.frame,v=resolve_in_namespace(name,frame[3]);if(v.found){return v.value}if(frame.is_exec_top){break}frame_obj=frame_obj.prev}if(builtins_scope.locals.has(name)){return _b_[name]}throw $B.name_error(name)};$B.own_class_name=function(name){throw $B.name_error(name)};var $operators=$B.op2method.subset("all");var opname2opsign={};for(var key in $operators){opname2opsign[$operators[key]]=key}var opclass2dunder={};for(var op_type of $B.op_types){for(var operator in op_type){opclass2dunder[op_type[operator]]="__"+$operators[operator]+"__"}}opclass2dunder["UAdd"]="__pos__";opclass2dunder["USub"]="__neg__";opclass2dunder["Invert"]="__invert__";var builtins_scope=new Scope("__builtins__");for(var name in $B.builtins){builtins_scope.locals.add(name)}function mark_parents(node){if(node.body&&node.body instanceof Array){for(var child of node.body){child.$parent=node;mark_parents(child)}}else if(node.handlers){var p={$parent:node,type:"except_handler"};for(var child of node.handlers){child.$parent=p;mark_parents(child)}}}function add_body(body,scopes){var res="";let js;for(var item of body){js=$B.js_from_ast(item,scopes);if(js.length>0){res+=js+"\n"}}return res.trimRight()}function extract_docstring(ast_obj,scopes){var js="_b_.None";if(ast_obj.body.length&&ast_obj.body[0]instanceof $B.ast.Expr&&ast_obj.body[0].value instanceof $B.ast.Constant){var value=ast_obj.body[0].value.value;if(typeof value=="string"){js=ast_obj.body[0].value.to_js(scopes);ast_obj.body.shift()}}return js}function init_comprehension(comp,scopes){if(comp.type=="genexpr"){return init_genexpr(comp,scopes)}return`var next_func_${comp.id} = $B.make_js_iterator(expr, frame, ${comp.ast.lineno})\n`}function init_genexpr(comp,scopes){var comp_id=comp.type+"_"+comp.id,varnames=Object.keys(comp.varnames||{}).map((x=>`'${x}'`)).join(", ");return`var ${comp.locals_name} = {},\n`+`locals = ${comp.locals_name}\n`+`locals['.0'] = expr\n`+`var frame = ["<${comp.type.toLowerCase()}>", ${comp.locals_name}, `+`"${comp.module_name}", ${comp.globals_name}]\n`+`frame.$has_generators = true\n`+`frame.__file__ = '${scopes.filename}'\n`+`frame.$lineno = ${comp.ast.lineno}\n`+`frame.f_code = {\n`+`co_argcount: 1,\n`+`co_firstlineno:${comp.ast.lineno},\n`+`co_name: "<${comp.type.toLowerCase()}>",\n`+`co_filename: "${scopes.filename}",\n`+`co_flags: ${comp.type=="genexpr"?115:83},\n`+`co_freevars: $B.fast_tuple([]),\n`+`co_kwonlyargcount: 0,\n`+`co_posonlyargount: 0,\n`+`co_qualname: "<${comp.type.toLowerCase()}>",\n`+`co_varnames: $B.fast_tuple(['.0', ${varnames}])\n`+`}\n`+`var next_func_${comp.id} = $B.make_js_iterator(expr, frame, ${comp.ast.lineno})\n`+`frame.$f_trace = _b_.None\n`+`var _frame_obj = $B.frame_obj\n`}function make_comp(scopes){var id=$B.UUID(),type=this.constructor.$name,symtable_block=scopes.symtable.table.blocks.get(fast_id(this)),varnames=symtable_block.varnames.map((x=>`"${x}"`)),comp_iter,comp_scope=$B.last(scopes),upper_comp_scope=comp_scope;while(upper_comp_scope.parent){upper_comp_scope=upper_comp_scope.parent}var initial_nb_await_in_scope=upper_comp_scope.nb_await===undefined?0:upper_comp_scope.nb_await;for(var symbol of _b_.dict.$iter_items_with_hash(symtable_block.symbols)){if(symbol.value&DEF_COMP_ITER){comp_iter=symbol.key}}var comp_iter_scope=name_scope(comp_iter,scopes);var first_for=this.generators[0],outmost_expr=$B.js_from_ast(first_for.iter,scopes),nb_paren=1;var comp={ast:this,id:id,type:type,varnames:varnames,module_name:scopes[0].name,locals_name:make_scope_name(scopes),globals_name:make_scope_name(scopes,scopes[0])};var js=init_comprehension(comp,scopes);if(comp_iter_scope.found){js+=`var save_comp_iter = ${name_reference(comp_iter,scopes)}\n`}if(this instanceof $B.ast.ListComp){js+=`var result_${id} = []\n`}else if(this instanceof $B.ast.SetComp){js+=`var result_${id} = _b_.set.$factory()\n`}else if(this instanceof $B.ast.DictComp){js+=`var result_${id} = $B.empty_dict()\n`}var first=this.generators[0];js+=`try{\n`+`for(var next_${id} of next_func_${id}){\n`;var name=new $B.ast.Name(`next_${id}`,new $B.ast.Load);copy_position(name,first_for.iter);name.to_js=function(){return`next_${id}`};var assign=new $B.ast.Assign([first.target],name);assign.lineno=this.lineno;js+=assign.to_js(scopes)+"\n";for(var _if of first.ifs){nb_paren++;js+=`if($B.$bool(${$B.js_from_ast(_if,scopes)})){\n`}for(var comprehension of this.generators.slice(1)){js+=comprehension.to_js(scopes);nb_paren++;for(var _if of comprehension.ifs){nb_paren++}}if(this instanceof $B.ast.DictComp){var key=$B.js_from_ast(this.key,scopes),value=$B.js_from_ast(this.value,scopes)}else{var elt=$B.js_from_ast(this.elt,scopes)}var final_nb_await_in_scope=upper_comp_scope.nb_await===undefined?0:upper_comp_scope.nb_await;var has_await=final_nb_await_in_scope>initial_nb_await_in_scope;js=`(${has_await?"async ":""}function(expr){\n`+js;js+=has_await?"var save_frame_obj = $B.frame_obj;\n":"";if(this instanceof $B.ast.ListComp){js+=`result_${id}.push(${elt})\n`}else if(this instanceof $B.ast.SetComp){js+=`_b_.set.add(result_${id}, ${elt})\n`}else if(this instanceof $B.ast.DictComp){js+=`_b_.dict.$setitem(result_${id}, ${key}, ${value})\n`}for(var i=0;i")){name="exec"}else{name=filename.replace(/\./g,"_")}var top_scope=new Scope(name,`${type}`,this),block=scopes.symtable.table.blocks.get(fast_id(this));if(block&&block.$has_import_star){top_scope.has_import_star=true}scopes.push(top_scope);var namespaces=scopes.namespaces;if(namespaces){top_scope.is_exec_scope=true;for(var key in namespaces.exec_globals){if(!key.startsWith("$")){top_scope.globals.add(key)}}if(namespaces.exec_locals!==namespaces.exec_globals){for(var key in namespaces.exec_locals){if(!key.startsWith("$")){top_scope.locals.add(key)}}}}return name}function compiler_check(obj){var check_func=Object.getPrototypeOf(obj)._check;if(check_func){obj._check()}}$B.ast.Assert.prototype.to_js=function(scopes){var test=$B.js_from_ast(this.test,scopes),msg=this.msg?$B.js_from_ast(this.msg,scopes):"";return`if($B.set_lineno(frame, ${this.lineno}) && !$B.$bool(${test})){\n`+`throw _b_.AssertionError.$factory(${msg})}\n`};var CO_FUTURE_ANNOTATIONS=16777216;function annotation_to_str(obj){var s;if(obj instanceof $B.ast.Name){s=obj.id}else if(obj instanceof $B.ast.BinOp){s=annotation_to_str(obj.left)+"|"+annotation_to_str(obj.right)}else if(obj instanceof $B.ast.Subscript){s=annotation_to_str(obj.value)+"["+annotation_to_str(obj.slice)+"]"}else if(obj instanceof $B.ast.Constant){if(obj.value===_b_.None){s="None"}else{console.log("other constant",obj)}}else{console.log("other annotation",obj)}return s}$B.ast.AnnAssign.prototype.to_js=function(scopes){var postpone_annotation=scopes.symtable.table.future.features&CO_FUTURE_ANNOTATIONS;var scope=last_scope(scopes);var js="";if(!scope.has_annotation){js+="locals.__annotations__ = locals.__annotations__ || $B.empty_dict()\n";scope.has_annotation=true;scope.locals.add("__annotations__")}if(this.target instanceof $B.ast.Name){var ann_value=postpone_annotation?`'${annotation_to_str(this.annotation)}'`:$B.js_from_ast(this.annotation,scopes)}if(this.value){js+=`var ann = ${$B.js_from_ast(this.value,scopes)}\n`;if(this.target instanceof $B.ast.Name&&this.simple){var scope=bind(this.target.id,scopes),mangled=mangle(scopes,scope,this.target.id);if(scope.type!="def"){js+=`$B.$setitem(locals.__annotations__, `+`'${mangled}', ${ann_value})\n`}var target_ref=name_reference(this.target.id,scopes);js+=`${target_ref} = ann`}else if(this.target instanceof $B.ast.Attribute){js+=`$B.$setattr(${$B.js_from_ast(this.target.value,scopes)}`+`, "${this.target.attr}", ann)`}else if(this.target instanceof $B.ast.Subscript){js+=`$B.$setitem(${$B.js_from_ast(this.target.value,scopes)}`+`, ${$B.js_from_ast(this.target.slice,scopes)}, ann)`}}else{if(this.target instanceof $B.ast.Name){if(this.simple&&scope.type!="def"){var mangled=mangle(scopes,scope,this.target.id);var ann=`'${this.annotation.id}'`;js+=`$B.$setitem(locals.__annotations__, `+`'${mangled}', ${ann_value})`}}else{var ann=$B.js_from_ast(this.annotation,scopes)}}return`$B.set_lineno(frame, ${this.lineno})\n`+js};$B.ast.Assign.prototype.to_js=function(scopes){compiler_check(this);var js=this.lineno?`$B.set_lineno(frame, ${this.lineno})\n`:"",value=$B.js_from_ast(this.value,scopes);function assign_one(target,value){if(target instanceof $B.ast.Name){return $B.js_from_ast(target,scopes)+" = "+value}else if(target instanceof $B.ast.Starred){return assign_one(target.value,value)}else if(target instanceof $B.ast.Subscript){return`$B.$setitem(${$B.js_from_ast(target.value,scopes)}`+`, ${$B.js_from_ast(target.slice,scopes)}, ${value})`}else if(target instanceof $B.ast.Attribute){var attr=mangle(scopes,last_scope(scopes),target.attr);return`$B.$setattr(${$B.js_from_ast(target.value,scopes)}`+`, "${attr}", ${value})`}}function assign_many(target,value){var js="";var nb_targets=target.elts.length,has_starred=false,nb_after_starred;for(var i=0,len=nb_targets;i0){var arg_list=not_starred.map((x=>$B.js_from_ast(x,scopes)));if(start){args+=`[${arg_list.join(", ")}]`}else{args+=`.concat([${arg_list.join(", ")}])`}not_starred=[]}else if(args==""){args="[]"}var starred_arg=$B.js_from_ast(arg.value,scopes);args+=`.concat(_b_.list.$factory(${starred_arg}))`;start=false}else{not_starred.push(arg)}}if(not_starred.length>0){var arg_list=not_starred.map((x=>$B.js_from_ast(x,scopes)));if(start){args+=`[${arg_list.join(", ")}]`;start=false}else{args+=`.concat([${arg_list.join(", ")}])`}}if(args[0]=="."){console.log("bizarre",args)}}if(named_kwargs.length+starred_kwargs.length==0){return{has_starred:has_starred,js:js+`${args}`}}else{var kw=`{${named_kwargs.join(", ")}}`;for(var starred_kwarg of starred_kwargs){kw+=`, ${starred_kwarg}`}kw=`{$kw:[${kw}]}`;if(args.length>0){if(has_starred){kw=`.concat([${kw}])`}else{kw=", "+kw}}return{has_starred:has_starred,js:js+`${args}${kw}`}}}$B.ast.ClassDef.prototype.to_js=function(scopes){var enclosing_scope=bind(this.name,scopes);var class_scope=new Scope(this.name,"class",this);var js="",locals_name=make_scope_name(scopes,class_scope),ref=this.name+$B.UUID(),glob=scopes[0].name,globals_name=make_scope_name(scopes,scopes[0]),decorators=[],decorated=false;for(var dec of this.decorator_list){decorated=true;var dec_id="decorator"+$B.UUID();decorators.push(dec_id);js+=`$B.set_lineno(frame, ${dec.lineno})\n`+`var ${dec_id} = ${$B.js_from_ast(dec,scopes)}\n`}js+=`$B.set_lineno(frame, ${this.lineno})\n`;var qualname=this.name;var ix=scopes.length-1;while(ix>=0){if(scopes[ix].parent){ix--}else if(scopes[ix].ast instanceof $B.ast.ClassDef){qualname=scopes[ix].name+"."+qualname;ix--}else{break}}var keywords=[],metaclass;for(var keyword of this.keywords){if(keyword.arg=="metaclass"){metaclass=keyword.value}keywords.push(`["${keyword.arg}", `+$B.js_from_ast(keyword.value,scopes)+"]")}var bases=this.bases.map((x=>$B.js_from_ast(x,scopes)));var has_type_params=this.type_params.length>0;if(has_type_params){js+=`$B.$import('_typing')\n`+`var _typing = $B.imported._typing\n`;var params=[];for(var item of this.type_params){if(item instanceof $B.ast.TypeVar){params.push(`$B.$call(_typing.TypeVar)('${item.name}')`)}else if(item instanceof $B.ast.TypeVarTuple){params.push(`$B.$call($B.$getattr(_typing.Unpack, '__getitem__'))($B.$call(_typing.TypeVarTuple)('${item.name.id}'))`)}else if(item instanceof $B.ast.ParamSpec){params.push(`$B.$call(_typing.ParamSpec)('${item.name.id}')`)}}bases.push(`_typing.Generic.__class_getitem__(_typing.Generic,`+` $B.fast_tuple([${params}]))`);for(var item of this.type_params){var name,param_type=item.constructor.$name;if(param_type=="TypeVar"){name=item.name}else{name=item.name.id}js+=`locals.${name} = $B.$call(_typing.${param_type})('${name}')\n`}}var docstring=extract_docstring(this,scopes);js+=`var ${ref} = (function(name, module, bases){\n`+`var _frame_obj = $B.frame_obj,\n`+`resolved_bases = $B.resolve_mro_entries(bases),\n`+`metaclass = $B.get_metaclass(name, module, `+`resolved_bases`;if(metaclass){js+=`, ${metaclass.to_js(scopes)}`}js+=")\n";js+=`var ${locals_name} = $B.make_class_namespace(metaclass, `+`name, module ,"${qualname}", resolved_bases),\n`;js+=`locals = ${locals_name}\n`+`if(resolved_bases !== bases){\nlocals.__orig_bases__ = bases}\n`+`locals.__doc__ = ${docstring}\n`+`var frame = [name, locals, module, ${globals_name}]\n`+`frame.__file__ = __file__\n`+`frame.$lineno = ${this.lineno}\n`+`frame.$f_trace = $B.enter_frame(frame)\n`+`var _frame_obj = $B.frame_obj\n`;if(trace){js+=`if(frame.$f_trace !== _b_.None){\n$B.trace_line()}\n`}scopes.push(class_scope);js+=add_body(this.body,scopes);scopes.pop();var keywords=[];for(var keyword of this.keywords){keywords.push(`["${keyword.arg}", `+$B.js_from_ast(keyword.value,scopes)+"]")}if(trace){js+="\nif(frame.$f_trace !== _b_.None){\n"+"$B.trace_return(_b_.None)\n"+"}"}js+="\n$B.leave_frame()\n"+`return $B.$class_constructor('${this.name}', locals, metaclass, `+`resolved_bases, bases, [${keywords.join(", ")}])\n`+`})('${this.name}', '${glob}', $B.fast_tuple([${bases}]))\n`;var class_ref=reference(scopes,enclosing_scope,this.name);if(decorated){class_ref=`decorated${$B.UUID()}`;js+="var "}var bases=this.bases.map((x=>$B.js_from_ast(x,scopes)));js+=`${class_ref} = ${ref}\n`;if(decorated){js+=reference(scopes,enclosing_scope,this.name)+" = ";var decorate=class_ref;for(var dec of decorators.reverse()){decorate=`$B.$call(${dec})(${decorate})`}js+=decorate+"\n"}return js};$B.ast.Compare.prototype.to_js=function(scopes){var left=$B.js_from_ast(this.left,scopes),comps=[];var len=this.ops.length,prefix=len>1?"locals.$op = ":"";for(var i=0;i1){left="locals.$op"}}return comps.join(" && ")};$B.ast.comprehension.prototype.to_js=function(scopes){var id=$B.UUID(),iter=$B.js_from_ast(this.iter,scopes);var js=`var next_func_${id} = $B.make_js_iterator(${iter}, frame, ${this.lineno})\n`+`for(var next_${id} of next_func_${id}){\n`;var name=new $B.ast.Name(`next_${id}`,new $B.ast.Load);copy_position(name,this.target);name.to_js=function(){return`next_${id}`};var assign=new $B.ast.Assign([this.target],name);copy_position(assign,this.target);js+=assign.to_js(scopes)+" // assign to target\n";for(var _if of this.ifs){js+=`if($B.$bool(${$B.js_from_ast(_if,scopes)})){\n`}return js};$B.ast.Constant.prototype.to_js=function(scopes){if(this.value===true||this.value===false){return this.value+""}else if(this.value===_b_.None){return"_b_.None"}else if(typeof this.value=="string"){var s=this.value,srg=$B.surrogates(s);if(srg.length==0){return`'${s}'`}return`$B.make_String('${s}', [${srg}])`}else if(this.value.__class__===_b_.bytes){return`_b_.bytes.$factory([${this.value.source}])`}else if(typeof this.value=="number"){return this.value}else if(this.value.__class__===$B.long_int){return`$B.fast_long_int(${this.value.value}n)`}else if(this.value.__class__===_b_.float){return`({__class__: _b_.float, value: ${this.value.value}})`}else if(this.value.__class__===_b_.complex){return`$B.make_complex(${this.value.$real.value}, ${this.value.$imag.value})`}else if(this.value===_b_.Ellipsis){return`_b_.Ellipsis`}else{console.log("invalid value",this.value);throw SyntaxError("bad value",this.value)}};$B.ast.Continue.prototype.to_js=function(scopes){if(!in_loop(scopes)){compiler_error(this,"'continue' not properly in loop")}return"continue"};$B.ast.Delete.prototype.to_js=function(scopes){compiler_check(this);var js="";for(var target of this.targets){if(target instanceof $B.ast.Name){var scope=name_scope(target.id,scopes);if(scope.found){scope.found.locals.delete(target.id)}js+=`$B.$delete("${target.id}")\n`}else if(target instanceof $B.ast.Subscript){js+=`$B.$delitem(${$B.js_from_ast(target.value,scopes)}, `+`${$B.js_from_ast(target.slice,scopes)})\n`}else if(target instanceof $B.ast.Attribute){js+=`_b_.delattr(${$B.js_from_ast(target.value,scopes)}, `+`'${target.attr}')\n`}}return`$B.set_lineno(frame, ${this.lineno})\n`+js};$B.ast.Dict.prototype.to_js=function(scopes){var items=[],keys=this.keys,has_packed=false;function no_key(i){return keys[i]===_b_.None||keys[i]===undefined}for(var i=0,len=this.keys.length;i0){js+=`\nif(no_break_${id}){\n`+add_body(this.orelse,scopes)+"}\n"}return js};$B.ast.FormattedValue.prototype.to_js=function(scopes){var value=$B.js_from_ast(this.value,scopes);if(this.conversion==114){value=`_b_.repr(${value})`}else if(this.conversion==115){value=`_b_.str.$factory(${value})`}else if(this.conversion==97){value=`_b_.ascii(${value})`}if(this.format_spec){value=`_b_.str.format('{0:' + `+$B.js_from_ast(this.format_spec,scopes)+` + '}', ${value})`}else if(this.conversion==-1){value=`_b_.str.$factory(${value})`}return value};function transform_args(scopes){var has_posonlyargs=this.args.posonlyargs.length>0,_defaults=[],nb_defaults=this.args.defaults.length,positional=this.args.posonlyargs.concat(this.args.args),ix=positional.length-nb_defaults,default_names=[],kw_defaults=[],annotations;for(var arg of positional.concat(this.args.kwonlyargs).concat([this.args.vararg,this.args.kwarg])){if(arg&&arg.annotation){annotations=annotations||{};annotations[arg.arg]=arg.annotation}}for(var i=ix;i ${PARAMS_POS_COUNT} ) {\n $B.args0_old(fct, args);\n throw new Error('Too much positional arguments given (args0 should have raised an error) !');\n }\n`;if(hasPosOnly||hasPos){fct+=`\n for( ; offset < ARGS_POS_COUNT ; ++offset)\n result[ PARAMS_NAMES[offset] ] = args[offset];\n`}}if(!hasPos&&!hasNamedOnly&&!hasKWargs){fct+=`\n if( HAS_KW === true ) {\n for(let argname in ARGS_NAMED[0] ) {\n $B.args0_old(fct, args);\n throw new Error('No named arguments expected !!!');\n }\n for(let id = 1; id < ARGS_NAMED.length; ++id ) {\n const kargs = ARGS_NAMED[id];\n for(let argname of $B.make_js_iterator( kargs.__class__.keys(kargs) ) ) { //TODO: not optimal\n $B.args0_old(fct, args);\n throw new Error('No named arguments expected !!!');\n }\n }\n }\n`}else{fct+=`\n if( HAS_KW === false ) {\n `}if(hasPos||hasPosOnly){if(posOnlyDefaults!==DEFAULTS.ALL&&posDefaults!==DEFAULTS.ALL){fct+=`\n if( offset < ${PARAMS_POS_DEFAULTS_OFFSET} ) {\n $B.args0_old(fct, args);\n throw new Error('Not enough positional arguments given (args0 should have raised an error) !');\n }\n`}if(posOnlyDefaults!==DEFAULTS.NONE||posDefaults!==DEFAULTS.NONE){fct+=`\n for(let i = offset - PARAMS_POS_DEFAULTS_OFFSET;\n i < PARAMS_POS_DEFAULTS_COUNT;\n ++i)\n result[ PARAMS_NAMES[offset++] ] = PARAMS_POS_DEFAULTS[i];`}}if(hasKWargs){fct+=`\n result[$INFOS.kwarg] = __BRYTHON__.obj_dict({});`}if(hasNamedOnly&&namedOnlyDefaults!==DEFAULTS.ALL){fct+=`\n $B.args0_old(fct, args);\n throw new Error('Named argument expected (args0 should have raised an error) !');\n`}else if(namedOnlyDefaults!==DEFAULTS.NONE){fct+=`\n const kwargs_defaults_values = fct.$kwdefaults_values;\n for(let i = 0; i < kwargs_defaults_values.length; ++i )\n result[ PARAMS_NAMES[offset++] ] = kwargs_defaults_values[i];\n`}fct+=`\n return result;\n`;if(!hasPos&&!hasNamedOnly&&!hasKWargs){return fct}else{fct+=`\n }\n`}if(namedOnlyDefaults!==DEFAULTS.NONE){fct+=`\n const kwargs_defaults = fct.$kwdefaults;\n`}let PARAMS_POSONLY_COUNT="0";if(hasPosOnly){PARAMS_POSONLY_COUNT="PARAMS_POSONLY_COUNT";fct+=`\n const PARAMS_POSONLY_COUNT = $CODE.co_posonlyargcount;\n if( offset < PARAMS_POSONLY_COUNT ) {\n `;if(posOnlyDefaults!==DEFAULTS.SOME){fct+=`\n if( offset < ${PARAMS_POS_DEFAULTS_OFFSET} ) {\n $B.args0_old(fct, args);\n throw new Error('Not enough positional parameters given (args0 should have raised an error) !');\n }\n`}if(posOnlyDefaults===DEFAULTS.NONE){fct+=`\n $B.args0_old(fct, args);\n throw new Error('Not enough positional parameters given (args0 should have raised an error) !');\n`}fct+=`\n const max = ${PARAMS_POS_DEFAULTS_COUNT} - (${PARAMS_POS_COUNT} - PARAMS_POSONLY_COUNT);\n // default parameters\n for(let i = offset - ${PARAMS_POS_DEFAULTS_OFFSET};\n i < max;\n ++i)\n result[ PARAMS_NAMES[offset++] ] = PARAMS_POS_DEFAULTS[i];\n }\n`}if(hasKWargs){fct+=`\n const extra = {};\n let nb_extra_args = 0;\n`;if(hasPos||hasNamedOnly){fct+=`\n const HAS_PARAMS = fct.$hasParams;\n`}}fct+=`\n let nb_named_args = 0;\n const kargs = ARGS_NAMED[0];\n for(let argname in kargs) {\n `;if(!hasKWargs){fct+=`\n result[ argname ] = kargs[argname];\n ++nb_named_args;\n`}if(hasKWargs){if(!hasNamedOnly&&!hasPos){fct+=`\n extra[ argname ] = kargs[argname];\n ++nb_extra_args;\n`}else{fct+=`\n if( HAS_PARAMS.has(argname) ) {\n result[ argname ] = kargs[argname];\n ++nb_named_args;\n } else {\n extra[ argname ] = kargs[argname];\n ++nb_extra_args;\n }\n`}}fct+=`\n }\n for(let id = 1; id < ARGS_NAMED.length; ++id ) {\n const kargs = ARGS_NAMED[id];\n for(let argname of $B.make_js_iterator(kargs.__class__.keys(kargs)) ) {\n if( typeof argname !== "string") {\n $B.args0_old(fct, args);\n throw new Error('Non string key passed in **kargs');\n }\n `;if(!hasKWargs){fct+=`\n result[ argname ] = $B.$getitem(kargs, argname);\n ++nb_named_args;\n`}if(hasKWargs){if(!hasNamedOnly&&!hasPos){fct+=`\n extra[ argname ] = $B.$getitem(kargs, argname);\n ++nb_extra_args;\n`}else{fct+=`\n if( HAS_PARAMS.has(argname) ) {\n result[ argname ] = $B.$getitem(kargs, argname);\n ++nb_named_args;\n } else {\n extra[ argname ] = $B.$getitem(kargs, argname);\n ++nb_extra_args;\n }\n`}}fct+=`\n }\n }\n`;fct+=`\n let found = 0;\n let ioffset = offset;\n`;if((hasPosOnly||hasPos)&&(!hasPosOnly||posOnlyDefaults!==DEFAULTS.ALL)&&(!hasPos||posDefaults!==DEFAULTS.ALL)){fct+=`\n for( ; ioffset < ${PARAMS_POS_DEFAULTS_OFFSET}; ++ioffset) {\n const key = PARAMS_NAMES[ioffset];\n if( key in result ) // maybe could be speed up using "!(key in result)"\n continue;\n $B.args0_old(fct, args);\n throw new Error('Missing a named arguments (args0 should have raised an error) !');\n }\n`}if(hasPosOnly&&posOnlyDefaults!==DEFAULTS.NONE||hasPos&&posDefaults!==DEFAULTS.NONE){fct+=`\n for( ; ioffset < PARAMS_POS_COUNT; ++ioffset) {\n const key = PARAMS_NAMES[ioffset];\n if( key in result )\n continue;\n result[key] = PARAMS_POS_DEFAULTS[ioffset - ${PARAMS_POS_DEFAULTS_OFFSET}];\n ++found;\n }\n`}if(hasNamedOnly){fct+=`\n for( ; ioffset < PARAMS_NAMES.length; ++ioffset) {\n const key = PARAMS_NAMES[ioffset];\n if( key in result )\n continue;\n`;if(namedOnlyDefaults===DEFAULTS.SOME){fct+=`\n if( ! kwargs_defaults.has(key) ) {\n $B.args0_old(fct, args);\n throw new Error('Missing a named arguments (args0 should have raised an error) !');\n }\n`}if(namedOnlyDefaults===DEFAULTS.NONE){fct+=`\n $B.args0_old(fct, args);\n throw new Error('Missing a named arguments (args0 should have raised an error) !');\n`}if(namedOnlyDefaults!==DEFAULTS.NONE){fct+=`\n result[key] = kwargs_defaults.get(key);\n ++found;\n`}fct+=`\n }\n`}if(hasNamedOnly||hasPos)fct+=`\n if( found + nb_named_args !== PARAMS_NAMES.length - offset) {\n $B.args0_old(fct, args);\n throw new Error('Inexistant or duplicate named arguments (args0 should have raised an error) !');\n }\n`;if(hasKWargs){fct+=`\n if( Object.keys(extra).length !== nb_extra_args ) {\n $B.args0_old(fct, args);\n throw new Error('Duplicate name given to **kargs parameter (args0 should have raised an error) !');\n }\n result[$INFOS.kwarg] = __BRYTHON__.obj_dict(extra);\n`}fct+=`\n return result\n `;return fct}const USE_PERSO_ARGS0_EVERYWHERE=true;function type_param_in_def(tp,ref,scopes){var gname=scopes[0].name,globals_name=make_scope_name(scopes,scopes[0]);var js="";var name,param_type=tp.constructor.$name;if(param_type=="TypeVar"){name=tp.name}else{name=tp.name.id}bind(name,scopes);if(tp.bound){var typevarscope=new Scope(name,"typevarbound",tp);scopes.push(typevarscope);js+=`function BOUND_OF_${name}(){\n`+`var current_frame = $B.frame_obj.frame,\n`+`frame = ['BOUND_OF_${name}', {}, '${gname}', ${globals_name}]\n`+`frame.$f_trace = $B.enter_frame(frame)\n`+`frame.__file__ = '${scopes.filename}'\n`+`frame.$lineno = ${tp.bound.lineno}\n`+`try{\n`+`var res = ${tp.bound.to_js(scopes)}\n`+`$B.leave_frame()\nreturn res\n`+`}catch(err){\n`+`$B.leave_frame()\n`+`throw err\n}\n}\n`;scopes.pop()}js+=`locals_${ref}.${name} = `+`$B.$call(_typing.${param_type})('${name}')\n`+`type_params.push(locals_${ref}.${name})\n`;if(tp.bound){if(!tp.bound.elts){js+=`_typing.${param_type}._set_lazy_eval(locals_${ref}.${name}, `+`'__bound__', BOUND_OF_${name})\n`}else{js+=`_typing.${param_type}._set_lazy_eval(locals_${ref}.${name}, `+`'__constraints__', BOUND_OF_${name})\n`}}return js}$B.ast.FunctionDef.prototype.to_js=function(scopes){var symtable_block=scopes.symtable.table.blocks.get(fast_id(this));var in_class=last_scope(scopes).ast instanceof $B.ast.ClassDef,is_async=this instanceof $B.ast.AsyncFunctionDef;if(in_class){var class_scope=last_scope(scopes)}var func_name_scope=bind(this.name,scopes);var gname=scopes[0].name,globals_name=make_scope_name(scopes,scopes[0]);var decorators=[],decorated=false,decs_declare=this.decorator_list.length>0?"// declare decorators\n":"";for(var dec of this.decorator_list){decorated=true;var dec_id="decorator"+$B.UUID();decorators.push(dec_id);decs_declare+=`$B.set_lineno(frame, ${dec.lineno})\n`;decs_declare+=`var ${dec_id} = ${$B.js_from_ast(dec,scopes)}\n`}var docstring=extract_docstring(this,scopes);var parsed_args=transform_args.bind(this)(scopes),default_names=parsed_args.default_names,_defaults=parsed_args._defaults,positional=parsed_args.positional,has_posonlyargs=parsed_args.has_posonlyargs,kw_defaults=parsed_args.kw_defaults,kw_default_names=parsed_args.kw_default_names;var defaults=`$B.fast_tuple([${this.args.defaults.map((x=>x.to_js(scopes)))}])`,kw_defaults=kw_default_names.length==0?"_b_.None":`$B.obj_dict({${kw_defaults.join(", ")}})`;var id=$B.UUID(),name1=this.name+"$"+id,name2=this.name+id;var has_type_params=this.type_params.length>0,type_params="";if(has_type_params){var tp_name=`type_params_${name2}`;var type_params_scope=new Scope(tp_name,"type_params",this.type_params);scopes.push(type_params_scope);var type_params_ref=qualified_scope_name(scopes,type_params_scope);var type_params_func=`function TYPE_PARAMS_OF_${name2}(){\n`;var type_params=`$B.$import('_typing')\n`+`var _typing = $B.imported._typing\n`+`var locals_${type_params_ref} = {\n},\n`+`locals = locals_${type_params_ref},\n`+`frame = ['${type_params_ref}', locals, '${gname}', ${globals_name}],\n`+`type_params = []\n`+`frame.$f_trace = $B.enter_frame(frame)\n`+`frame.__file__ = '${scopes.filename}'\n`;var name=this.type_params[0].name;for(var item of this.type_params){type_params+=type_param_in_def(item,type_params_ref,scopes)}type_params_func+=type_params}var func_scope=new Scope(this.name,"def",this);scopes.push(func_scope);var args=positional.concat(this.args.kwonlyargs),slots=[],arg_names=[];for(var arg of args){slots.push(arg.arg+": null");bind(arg.arg,scopes)}for(var arg of this.args.posonlyargs){arg_names.push(`'${arg.arg}'`)}for(var arg of this.args.args.concat(this.args.kwonlyargs)){arg_names.push(`'${arg.arg}'`)}if(this.args.vararg){bind(this.args.vararg.arg,scopes)}if(this.args.kwarg){bind(this.args.kwarg.arg,scopes)}if(this.$is_lambda){var _return=new $B.ast.Return(this.body);copy_position(_return,this.body);var body=[_return],function_body=add_body(body,scopes)}else{var function_body=add_body(this.body,scopes)}var is_generator=symtable_block.generator;var parse_args=[name2];var js=`$B.set_lineno(frame, ${this.lineno})\n`;if(is_async&&!is_generator){js+="async "}js+=`function ${name2}(){\n`;var locals_name=make_scope_name(scopes,func_scope);js+=`var ${locals_name},\n locals\n`;parse_args.push("arguments");var args_vararg=this.args.vararg===undefined?"null":"'"+this.args.vararg.arg+"'",args_kwarg=this.args.kwarg===undefined?"null":"'"+this.args.kwarg.arg+"'";if(positional.length==0&&slots.length==0&&this.args.vararg===undefined&&this.args.kwarg===undefined){js+=`${locals_name} = locals = {};\n`;js+=`if(arguments.length !== 0) ${parse_args[0]}.$args_parser(${parse_args.join(", ")})\n;`}else if(USE_PERSO_ARGS0_EVERYWHERE){js+=`${locals_name} = locals = ${parse_args[0]}.$args_parser(${parse_args.join(", ")})\n`}else{js+=`${locals_name} = locals = $B.args0(${parse_args.join(", ")})\n`}js+=`var frame = ["${this.$is_lambda?"":this.name}", `+`locals, "${gname}", ${globals_name}, ${name2}]\n if(locals.$has_generators){\n frame.$has_generators = true\n }\n frame.__file__ = __file__\n frame.$lineno = ${this.lineno}\n frame.$f_trace = $B.enter_frame(frame)\n`;if(func_scope.needs_stack_length){js+=`var stack_length = $B.count_frames()\n`}if(func_scope.needs_frames||is_async){js+=`var _frame_obj = $B.frame_obj\n`+`_linenums = $B.make_linenums()\n`}if(is_async){js+="frame.$async = true\n"}if(is_generator){js+=`locals.$is_generator = true\n`;if(is_async){js+=`var gen_${id} = $B.async_generator.$factory(async function*(){\n`}else{js+=`var gen_${id} = $B.generator.$factory(function*(){\n`}}js+=`try{\n$B.js_this = this\n`;if(in_class){var ix=scopes.indexOf(class_scope),parent=scopes[ix-1];var scope_ref=make_scope_name(scopes,parent),class_ref=class_scope.name,refs=class_ref.split(".").map((x=>`'${x}'`));bind("__class__",scopes);js+=`locals.__class__ = `+`$B.get_method_class(${name2}, ${scope_ref}, "${class_ref}", [${refs}])\n`}js+=function_body+"\n";if(!this.$is_lambda&&!($B.last(this.body)instanceof $B.ast.Return)){js+="var result = _b_.None\n";if(trace){js+="if(frame.$f_trace !== _b_.None){\n"+"$B.trace_return(_b_.None)\n}\n"}js+="$B.leave_frame()\n"+"return result\n"}js+=`}catch(err){\n $B.set_exc(err, frame)\n`;if(func_scope.needs_frames){js+=`err.$frame_obj = _frame_obj\n`+`_linenums[_linenums.length - 1] = frame.$lineno\n`+`err.$linenums = _linenums\n`}if(trace){js+=`if((! err.$in_trace_func) && frame.$f_trace !== _b_.None){\n frame.$f_trace = $B.trace_exception()\n }\n`}js+=`$B.leave_frame();throw err\n }\n }\n`;if(is_generator){js+=`, '${this.name}')\n`+`var _gen_${id} = gen_${id}()\n`+`_gen_${id}.$frame = frame\n`+`$B.leave_frame()\n`+`return _gen_${id}}\n`}scopes.pop();var in_class=func_name_scope.ast instanceof $B.ast.ClassDef;var qualname=in_class?`${func_name_scope.name}.${this.name}`:this.name;var flags=3;if(this.args.vararg){flags|=4}if(this.args.kwarg){flags|=8}if(is_generator){flags|=32}if(is_async){flags|=128}var parameters=[],locals=[],identifiers=_b_.dict.$keys_string(symtable_block.symbols);var free_vars=[];for(var ident of identifiers){var flag=_b_.dict.$getitem_string(symtable_block.symbols,ident),_scope=flag>>SCOPE_OFF&SCOPE_MASK;if(_scope==FREE){free_vars.push(`'${ident}'`)}if(flag&DEF_PARAM){parameters.push(`'${ident}'`)}else if(flag&DEF_LOCAL){locals.push(`'${ident}'`)}}var varnames=parameters.concat(locals);if(in_class){js+=`${name2}.$is_method = true\n`}if(is_async){js+=`${name2}.$is_async = true\n`}js+=`$B.make_function_infos(${name2}, `+`'${gname}', `+`${defaults}, `+`_b_.globals(), `+`${kw_defaults}, `+`${docstring}, `+`[${arg_names}], `+`${args_vararg}, `+`${args_kwarg},\n`+`${positional.length}, `+`__file__, `+`${this.lineno}, `+`${flags}, `+`$B.fast_tuple([${free_vars}]), `+`${this.args.kwonlyargs.length}, `+`'${this.$is_lambda?"":this.name}', `+`${varnames.length}, `+`${this.args.posonlyargs.length}, `+`'${this.$is_lambda?"":qualname}', `+`$B.fast_tuple([${varnames}]))\n`;if(is_async&&!is_generator){js+=`${name2} = $B.make_async(${name2})\n`}js+=`$B.make_function_defaults(${name2})\n`;var mangled=mangle(scopes,func_name_scope,this.name),func_ref=`${make_scope_name(scopes,func_name_scope)}.${mangled}`;if(decorated){func_ref=`decorated${$B.UUID()}`;js+="var "}js+=`${func_ref} = ${name2}\n`;if(this.returns||parsed_args.annotations){var ann_items=[];if(this.returns){ann_items.push(`['return', ${this.returns.to_js(scopes)}]`)}if(parsed_args.annotations){for(var arg_ann in parsed_args.annotations){var value=parsed_args.annotations[arg_ann].to_js(scopes);if(in_class){arg_ann=mangle(scopes,class_scope,arg_ann)}ann_items.push(`['${arg_ann}', ${value}]`)}}js+=`${func_ref}.__annotations__ = _b_.dict.$factory([${ann_items.join(", ")}])\n`}else{js+=`${func_ref}.__annotations__ = $B.empty_dict()\n`}if(has_type_params){scopes.pop()}if(decorated&&!has_type_params){js+=`${make_scope_name(scopes,func_name_scope)}.${mangled} = `;var decorate=func_ref;for(var dec of decorators.reverse()){decorate=`$B.$call(${dec})(${decorate})`}js+=decorate}if(has_type_params){type_params_func+="\n"+js+"\n"+`${name2}.__type_params__ = $B.fast_tuple(type_params)\n`+`$B.leave_frame()\n`+`return ${name2}\n}\n`;js=type_params_func;if(decorated){js+=`var ${func_ref} = TYPE_PARAMS_OF_${name2}()\n`+`${make_scope_name(scopes,func_name_scope)}.${mangled} = `;var decorate=func_ref;for(var dec of decorators.reverse()){decorate=`$B.$call(${dec})(${decorate})`}js+=decorate}else{js+=`var locals_${type_params_ref} = TYPE_PARAMS_OF_${name2}()\n`}}js=decs_declare+js;return js};$B.ast.GeneratorExp.prototype.to_js=function(scopes){var id=$B.UUID(),symtable_block=scopes.symtable.table.blocks.get(fast_id(this)),varnames=symtable_block.varnames.map((x=>`"${x}"`));var expr=this.elt,first_for=this.generators[0],outmost_expr=$B.js_from_ast(first_for.iter,scopes),nb_paren=1;var comp_scope=new Scope(`genexpr_${id}`,"comprehension",this);scopes.push(comp_scope);var comp={ast:this,id:id,type:"genexpr",varnames:varnames,module_name:scopes[0].name,locals_name:make_scope_name(scopes),globals_name:make_scope_name(scopes,scopes[0])};var head=init_comprehension(comp,scopes);var first=this.generators[0];var js=`$B.enter_frame(frame)\n`+`var next_func_${id} = $B.make_js_iterator(expr, frame, ${this.lineno})\n`+`for(var next_${id} of next_func_${id}){\n`+`frame.$f_trace = $B.enter_frame(frame)\n`;var name=new $B.ast.Name(`next_${id}`,new $B.ast.Load);copy_position(name,first_for.iter);name.to_js=function(){return`next_${id}`};var assign=new $B.ast.Assign([first.target],name);assign.lineno=this.lineno;js+=assign.to_js(scopes)+"\n";for(var _if of first.ifs){nb_paren++;js+=`if($B.$bool(${$B.js_from_ast(_if,scopes)})){\n`}for(var comprehension of this.generators.slice(1)){js+=comprehension.to_js(scopes);nb_paren++;for(var _if of comprehension.ifs){nb_paren++}}var elt=$B.js_from_ast(this.elt,scopes),has_await=comp_scope.has_await;js=`var gen${id} = $B.generator.$factory(${has_await?"async ":""}function*(expr){\n`+js;js+=has_await?"var save_frame_obj = $B.frame_obj;\n":"";js+=`try{\n`+` yield ${elt}\n`+`}catch(err){\n`+(has_await?"$B.restore_frame_obj(save_frame_obj, locals)\n":"")+`$B.leave_frame()\nthrow err\n}\n`+(has_await?"\n$B.restore_frame_obj(save_frame_obj, locals);":"");for(var i=0;i")(expr)\n';scopes.pop();var func=`${head}\n${js}\nreturn gen${id}`;return`(function(expr){\n${func}\n})(${outmost_expr})\n`};$B.ast.Global.prototype.to_js=function(scopes){var scope=last_scope(scopes);for(var name of this.names){scope.globals.add(name)}return""};$B.ast.If.prototype.to_js=function(scopes){var scope=$B.last(scopes),new_scope=copy_scope(scope,this);var js=`if($B.set_lineno(frame, ${this.lineno}) && `+`$B.$bool(${$B.js_from_ast(this.test,scopes)})){\n`;scopes.push(new_scope);js+=add_body(this.body,scopes)+"\n}";scopes.pop();if(this.orelse.length>0){if(this.orelse[0]instanceof $B.ast.If&&this.orelse.length==1){js+="else "+$B.js_from_ast(this.orelse[0],scopes)+add_body(this.orelse.slice(1),scopes)}else{js+="\nelse{\n"+add_body(this.orelse,scopes)+"\n}"}}return js};$B.ast.IfExp.prototype.to_js=function(scopes){return"($B.$bool("+$B.js_from_ast(this.test,scopes)+") ? "+$B.js_from_ast(this.body,scopes)+": "+$B.js_from_ast(this.orelse,scopes)+")"};$B.ast.Import.prototype.to_js=function(scopes){var js=`$B.set_lineno(frame, ${this.lineno})\n`;for(var alias of this.names){js+=`$B.$import("${alias.name}", [], `;if(alias.asname){js+=`{'${alias.name}' : '${alias.asname}'}, `;bind(alias.asname,scopes)}else{js+="{}, ";bind(alias.name,scopes)}var parts=alias.name.split(".");for(var i=0;i`"${x.name}"`)).join(", "),aliases=[];for(var name of this.names){if(name.asname){aliases.push(`${name.name}: '${name.asname}'`)}}js+=`[${names}], {${aliases.join(", ")}}, ${this.level}, locals);`;for(var alias of this.names){if(alias.asname){bind(alias.asname,scopes)}else if(alias.name=="*"){last_scope(scopes).blurred=true;js+=`\n$B.import_all(locals, module)`}else{bind(alias.name,scopes)}}return js};$B.ast.JoinedStr.prototype.to_js=function(scopes){var items=this.values.map((s=>$B.js_from_ast(s,scopes)));if(items.length==0){return"''"}return items.join(" + ")};$B.ast.Lambda.prototype.to_js=function(scopes){var id=$B.UUID(),name="lambda_"+$B.lambda_magic+"_"+id;var f=new $B.ast.FunctionDef(name,this.args,this.body,[]);f.lineno=this.lineno;f.$id=fast_id(this);f.$is_lambda=true;var js=f.to_js(scopes),lambda_ref=reference(scopes,last_scope(scopes),name);return`(function(){ ${js}\n`+`return ${lambda_ref}\n})()`};function list_or_tuple_to_js(func,scopes){if(this.elts.filter((x=>x instanceof $B.ast.Starred)).length>0){var parts=[],simple=[];for(var elt of this.elts){if(elt instanceof $B.ast.Starred){elt.$handled=true;parts.push(`[${simple.join(", ")}]`);simple=[];parts.push(`_b_.list.$factory(${$B.js_from_ast(elt,scopes)})`)}else{simple.push($B.js_from_ast(elt,scopes))}}if(simple.length>0){parts.push(`[${simple.join(", ")}]`)}var js=parts[0];for(var part of parts.slice(1)){js+=`.concat(${part})`}return`${func}(${js})`}var elts=this.elts.map((x=>$B.js_from_ast(x,scopes)));return`${func}([${elts.join(", ")}])`}$B.ast.List.prototype.to_js=function(scopes){return list_or_tuple_to_js.bind(this)("$B.$list",scopes)};$B.ast.ListComp.prototype.to_js=function(scopes){compiler_check(this);return make_comp.bind(this)(scopes)};$B.ast.match_case.prototype.to_js=function(scopes){var js=`($B.set_lineno(frame, ${this.lineno}) && `+`$B.pattern_match(subject, {`+`${$B.js_from_ast(this.pattern,scopes)}})`;if(this.guard){js+=` && $B.$bool(${$B.js_from_ast(this.guard,scopes)})`}js+=`){\n`;js+=add_body(this.body,scopes)+"\n}";return js};function is_irrefutable(pattern){switch(pattern.constructor){case $B.ast.MatchAs:if(pattern.pattern===undefined){return pattern}else{return is_irrefutable(pattern.pattern)}case $B.ast.MatchOr:for(var i=0;i-1){compiler_error(this,`multiple assignment to name '${name}' in pattern`)}scope.bindings.push(name)}return params};$B.ast.MatchClass.prototype.to_js=function(scopes){var names=[];for(var pattern of this.patterns.concat(this.kwd_patterns)){var name=pattern.name;if(name){if(names.indexOf(name)>-1){compiler_error(pattern,`multiple assignment to name '${name}' in pattern`)}names.push(name)}}names=[];for(var i=0;i-1){compiler_error(this.kwd_patterns[i],`attribute name repeated in class pattern: ${kwd_attr}`)}names.push(kwd_attr)}var cls=$B.js_from_ast(this.cls,scopes),patterns=this.patterns.map((x=>`{${$B.js_from_ast(x,scopes)}}`));var kw=[];for(var i=0,len=this.kwd_patterns.length;i-1){compiler_error(pattern,`multiple assignments to name '${pattern.name}' in pattern`)}names.push(pattern.name)}}var items=[];for(var i=0,len=this.keys.length;i-1){compiler_error(pattern,`multiple assignments to name '${pattern.name}' in pattern`)}names.push(pattern.name)}items.push("{"+$B.js_from_ast(pattern,scopes)+"}")}return`sequence: [${items.join(", ")}]`};$B.ast.MatchSingleton.prototype.to_js=function(scopes){var value=this.value===true?"_b_.True":this.value===false?"_b_.False":"_b_.None";return`literal: ${value}`};$B.ast.MatchStar.prototype.to_js=function(scopes){var name=this.name===undefined?"_":this.name;return`capture_starred: '${name}'`};$B.ast.MatchValue.prototype.to_js=function(scopes){if(this.value instanceof $B.ast.Constant){return`literal: ${$B.js_from_ast(this.value,scopes)}`}else if(this.value instanceof $B.ast.Constant||this.value instanceof $B.ast.UnaryOp||this.value instanceof $B.ast.BinOp||this.value instanceof $B.ast.Attribute){return`value: ${$B.js_from_ast(this.value,scopes)}`}else{compiler_error(this,"patterns may only match literals and attribute lookups")}};$B.ast.Module.prototype.to_js=function(scopes){mark_parents(this);var name=init_scopes.bind(this)("module",scopes),namespaces=scopes.namespaces;var module_id=name,global_name=make_scope_name(scopes),mod_name=module_name(scopes);var js=`// Javascript code generated from ast\n`+`var $B = __BRYTHON__,\n_b_ = $B.builtins,\n`;if(!namespaces){js+=`${global_name} = $B.imported["${mod_name}"],\n`+`locals = ${global_name},\n`+`frame = ["${module_id}", locals, "${module_id}", locals]`}else{js+=`locals = ${namespaces.local_name},\n`+`globals = ${namespaces.global_name}`;if(name){js+=`,\nlocals_${name} = locals`}}js+=`\nvar __file__ = frame.__file__ = '${scopes.filename||""}'\n`+`locals.__name__ = '${name}'\n`+`locals.__doc__ = ${extract_docstring(this,scopes)}\n`;if(!scopes.imported){js+=`locals.__annotations__ = locals.__annotations__ || $B.empty_dict()\n`}if(!namespaces){js+=`frame.$f_trace = $B.enter_frame(frame)\n`}js+=`$B.set_lineno(frame, 1)\n`+"\nvar _frame_obj = $B.frame_obj,\n"+"stack_length = $B.count_frames()\n"+`try{\n`+add_body(this.body,scopes)+"\n"+(namespaces?"":`$B.leave_frame({locals, value: _b_.None})\n`)+`}catch(err){\n`+`$B.set_exc(err, frame)\n`;if(trace){js+=`if((! err.$in_trace_func) && frame.$f_trace !== _b_.None){\n`+`frame.$f_trace = $B.trace_exception()\n`+`}\n`}js+=(namespaces?"":`$B.leave_frame({locals, value: _b_.None})\n`)+"throw err\n"+`}`;scopes.pop();return js};$B.ast.Name.prototype.to_js=function(scopes){if(this.ctx instanceof $B.ast.Store){var scope=bind(this.id,scopes);if(scope===$B.last(scopes)&&scope.freevars.has(this.id)){scope.freevars.delete(this.id)}return reference(scopes,scope,this.id)}else if(this.ctx instanceof $B.ast.Load){var res=name_reference(this.id,scopes,[this.col_offset,this.col_offset,this.end_col_offset]);if(this.id=="__debugger__"&&res.startsWith("$B.resolve_in_scopes")){return"debugger"}return res}};$B.ast.NamedExpr.prototype.to_js=function(scopes){var i=scopes.length-1;while(scopes[i].type=="comprehension"){i--}var enclosing_scopes=scopes.slice(0,i+1);enclosing_scopes.symtable=scopes.symtable;bind(this.target.id,enclosing_scopes);return"("+$B.js_from_ast(this.target,enclosing_scopes)+" = "+$B.js_from_ast(this.value,scopes)+")"};$B.ast.Nonlocal.prototype.to_js=function(scopes){var scope=$B.last(scopes);for(var name of this.names){scope.nonlocals.add(name)}return""};$B.ast.Pass.prototype.to_js=function(scopes){return`$B.set_lineno(frame, ${this.lineno})\n`+"void(0)"};$B.ast.Raise.prototype.to_js=function(scopes){var js=`$B.set_lineno(frame, ${this.lineno})\n`+"$B.$raise(";if(this.exc){js+=$B.js_from_ast(this.exc,scopes)}if(this.cause){js+=", "+$B.js_from_ast(this.cause,scopes)}return js+")"};$B.ast.Return.prototype.to_js=function(scopes){compiler_check(this);var js=`$B.set_lineno(frame, ${this.lineno})\n`+"var result = "+(this.value?$B.js_from_ast(this.value,scopes):" _b_.None")+"\n";if(trace){js+=`if(frame.$f_trace !== _b_.None){\n`+`$B.trace_return(result)\n}\n`}js+=`$B.leave_frame()\nreturn result\n`;return js};$B.ast.Set.prototype.to_js=function(scopes){var elts=[];for(var elt of this.elts){var js;if(elt instanceof $B.ast.Constant){js=`{constant: [${$B.js_from_ast(elt,scopes)}, `+`${$B.$hash(elt.value)}]}`}else if(elt instanceof $B.ast.Starred){js=`{starred: ${$B.js_from_ast(elt.value,scopes)}}`}else{js=`{item: ${$B.js_from_ast(elt,scopes)}}`}elts.push(js)}return`_b_.set.$literal([${elts.join(", ")}])`};$B.ast.SetComp.prototype.to_js=function(scopes){return make_comp.bind(this)(scopes)};$B.ast.Slice.prototype.to_js=function(scopes){var lower=this.lower?$B.js_from_ast(this.lower,scopes):"_b_.None",upper=this.upper?$B.js_from_ast(this.upper,scopes):"_b_.None",step=this.step?$B.js_from_ast(this.step,scopes):"_b_.None";return`_b_.slice.$fast_slice(${lower}, ${upper}, ${step})`};$B.ast.Starred.prototype.to_js=function(scopes){if(this.$handled){return`_b_.list.$unpack(${$B.js_from_ast(this.value,scopes)})`}if(this.ctx instanceof $B.ast.Store){compiler_error(this,"starred assignment target must be in a list or tuple")}else{compiler_error(this,"can't use starred expression here")}};$B.ast.Subscript.prototype.to_js=function(scopes){var value=$B.js_from_ast(this.value,scopes),slice=$B.js_from_ast(this.slice,scopes);if(this.slice instanceof $B.ast.Slice){return`$B.getitem_slice(${value}, ${slice})`}else{var position=encode_position(this.value.col_offset,this.slice.col_offset,this.slice.end_col_offset);return`$B.$getitem(${value}, ${slice},${position})`}};$B.ast.Try.prototype.to_js=function(scopes){compiler_check(this);var id=$B.UUID(),has_except_handlers=this.handlers.length>0,has_else=this.orelse.length>0,has_finally=this.finalbody.length>0;var js=`$B.set_lineno(frame, ${this.lineno})\ntry{\n`;js+=`var stack_length_${id} = $B.count_frames()\n`;if(has_finally){js+=`var save_frame_obj_${id} = $B.frames_obj\n`}if(has_else){js+=`var failed${id} = false\n`}var try_scope=copy_scope($B.last(scopes));scopes.push(try_scope);js+=add_body(this.body,scopes)+"\n";if(has_except_handlers){var err="err"+id;js+="}\n";js+=`catch(${err}){\n`+`$B.set_exc(${err}, frame)\n`;if(trace){js+=`if(frame.$f_trace !== _b_.None){\n`+`frame.$f_trace = $B.trace_exception()}\n`}if(has_else){js+=`failed${id} = true\n`}var first=true,has_untyped_except=false;for(var handler of this.handlers){if(first){js+="if";first=false}else{js+="}else if"}js+=`($B.set_lineno(frame, ${handler.lineno})`;if(handler.type){js+=` && $B.is_exc(${err}, `;if(handler.type instanceof $B.ast.Tuple){js+=`${$B.js_from_ast(handler.type,scopes)}`}else{js+=`[${$B.js_from_ast(handler.type,scopes)}]`}js+=`)){\n`}else{has_untyped_except=true;js+="){\n"}if(handler.name){bind(handler.name,scopes);var mangled=mangle(scopes,try_scope,handler.name);js+=`locals.${mangled} = ${err}\n`}js+=add_body(handler.body,scopes)+"\n";if(!($B.last(handler.body)instanceof $B.ast.Return)){js+="$B.del_exc(frame)\n"}}if(!has_untyped_except){js+=`}else{\nthrow ${err}\n`}js+="}\n"}if(has_else||has_finally){js+="}\n";js+="finally{\n";var finalbody=`var exit = false\n`+`if($B.count_frames() < stack_length_${id}){\n`+`exit = true\n`+`$B.frame_obj = $B.push_frame(frame)\n`+`}\n`+add_body(this.finalbody,scopes);if(this.finalbody.length>0&&!($B.last(this.finalbody)instanceof $B.ast.Return)){finalbody+=`\nif(exit){\n`+`$B.leave_frame()\n`+`}`}var elsebody=`if($B.count_frames() == stack_length_${id} `+`&& ! failed${id}){\n`+add_body(this.orelse,scopes)+"\n}";if(has_else&&has_finally){js+=`try{\n`+elsebody+"\n}\n"+`finally{\n`+finalbody+"}\n"}else if(has_else&&!has_finally){js+=elsebody}else{js+=finalbody}js+="\n}\n"}else{js+="}\n"}scopes.pop();return js};$B.ast.TryStar.prototype.to_js=function(scopes){var id=$B.UUID(),has_except_handlers=this.handlers.length>0,has_else=this.orelse.length>0,has_finally=this.finalbody.length>0;var js=`$B.set_lineno(frame, ${this.lineno})\ntry{\n`;js+=`var stack_length_${id} = $B.count_frames()\n`;if(has_finally){js+=`var save_frame_obj_${id} = $B.frame_obj\n`}if(has_else){js+=`var failed${id} = false\n`}var try_scope=copy_scope($B.last(scopes));scopes.push(try_scope);js+=add_body(this.body,scopes)+"\n";if(has_except_handlers){var err="err"+id;js+="}\n";js+=`catch(${err}){\n`+`$B.set_exc(${err}, frame)\n`;if(trace){js+=`if(frame.$f_trace !== _b_.None){\n`+`frame.$f_trace = $B.trace_exception()\n`+`}\n`}js+=`if(! $B.$isinstance(${err}, _b_.BaseExceptionGroup)){\n`+`${err} = _b_.BaseExceptionGroup.$factory(_b_.None, [${err}])\n`+"}\n"+`function fake_split(exc, condition){\n`+`return condition(exc) ? `+`$B.fast_tuple([exc, _b_.None]) : $B.fast_tuple([_b_.None, exc])\n`+"}\n";if(has_else){js+=`failed${id} = true\n`}var first=true,has_untyped_except=false;for(var handler of this.handlers){js+=`$B.set_lineno(frame, ${handler.lineno})\n`;if(handler.type){js+="var condition = function(exc){\n"+" return $B.$isinstance(exc, "+`${$B.js_from_ast(handler.type,scopes)})\n`+"}\n"+`var klass = $B.get_class(${err}),\n`+`split_method = $B.$getattr(klass, 'split'),\n`+`split = $B.$call(split_method)(${err}, condition),\n`+" matching = split[0],\n"+" rest = split[1]\n"+"if(matching.exceptions !== _b_.None){\n"+" for(var err of matching.exceptions){\n";if(handler.name){bind(handler.name,scopes);var mangled=mangle(scopes,try_scope,handler.name);js+=`locals.${mangled} = ${err}\n`}js+=add_body(handler.body,scopes)+"\n";if(!($B.last(handler.body)instanceof $B.ast.Return)){js+="$B.del_exc(frame)\n"}js+="}\n";js+="}\n";js+=`${err} = rest\n`}}js+=`if(${err}.exceptions !== _b_.None){\n`+`throw ${err}\n`+"}\n"}if(has_else||has_finally){js+="}\n";js+="finally{\n";var finalbody=`var exit = false\n`+`if($B.count_frames() < stack_length_${id}){\n`+`exit = true\n`+`$B.frame_obj = $B.push_frame(frame)\n`+`}\n`+add_body(this.finalbody,scopes);if(this.finalbody.length>0&&!($B.last(this.finalbody)instanceof $B.ast.Return)){finalbody+=`\nif(exit){\n`+`$B.leave_frame(locals)\n`+`}`}var elsebody=`if($B.count_frames() == stack_length_${id} `+`&& ! failed${id}){\n`+add_body(this.orelse,scopes)+"\n}";if(has_else&&has_finally){js+=`try{\n`+elsebody+"\n}\n"+`finally{\n`+finalbody+"}\n"}else if(has_else&&!has_finally){js+=elsebody}else{js+=finalbody}js+="\n}\n"}else{js+="}\n"}scopes.pop();return js};$B.ast.Tuple.prototype.to_js=function(scopes){return list_or_tuple_to_js.bind(this)("$B.fast_tuple",scopes)};$B.ast.TypeAlias.prototype.to_js=function(scopes){var type_param_scope=new Scope("type_params","type_params",this.type_params);scopes.push(type_param_scope);var type_alias_scope=new Scope("type_alias","type_alias",this);scopes.push(type_alias_scope);var type_params_names=[];for(var type_param of this.type_params){if(type_param instanceof $B.ast.TypeVar){type_params_names.push(type_param.name)}else if(type_param instanceof $B.ast.TypeVarTuple||type_param instanceof $B.ast.ParamSpec){type_params_names.push(type_param.name.id)}}var type_params_list=type_params_names.map((x=>`'${x}'`));for(var name of type_params_names){bind(name,scopes)}var qualified_name=qualified_scope_name(scopes,type_alias_scope);var value=this.value.to_js(scopes),type_params=[];scopes.pop();scopes.pop();var js=`$B.$import('_typing')\n`;js+=`var locals_${qualified_scope_name(scopes,type_param_scope)} = {}\n`;js+=`function TYPE_PARAMS_OF_${this.name.id}(){\n`+`var locals_${qualified_name} = {},\n`+` locals = locals_${qualified_name}, \n`+` type_params = $B.fast_tuple([])\n`;for(var i=0,len=this.type_params.length;i0){js+=`\nif(no_break_${id}){\n`+add_body(this.orelse,scopes)+"}\n"}return js};var with_counter=[0];$B.ast.With.prototype.to_js=function(scopes){function add_item(item,js){var id=$B.UUID();var s=`var mgr_${id} = `+$B.js_from_ast(item.context_expr,scopes)+",\n"+`klass = $B.get_class(mgr_${id})\n`+`try{\n`+`var exit_${id} = $B.$getattr(klass, '__exit__'),\n`+`enter_${id} = $B.$getattr(klass, '__enter__')\n`+`}catch(err){\n`+`var klass_name = $B.class_name(mgr_${id})\n`+`throw _b_.TypeError.$factory("'" + klass_name + `+`"' object does not support the con`+`text manager protocol")\n`+`}\n`+`var value_${id} = $B.$call(enter_${id})(mgr_${id}),\n`+`exc_${id} = true\n`;if(in_generator){s+=`locals.$context_managers = locals.$context_managers || []\n`+`locals.$context_managers.push(mgr_${id})\n`}s+="try{\ntry{\n";if(item.optional_vars){var value={to_js:function(){return`value_${id}`}};copy_position(value,_with);var assign=new $B.ast.Assign([item.optional_vars],value);copy_position(assign,_with);s+=assign.to_js(scopes)+"\n"}s+=js;s+=`}catch(err_${id}){\n`+`frame.$lineno = ${lineno}\n`+`exc_${id} = false\n`+`err_${id} = $B.exception(err_${id}, frame)\n`+`var $b = exit_${id}(mgr_${id}, err_${id}.__class__, `+`err_${id}, $B.$getattr(err_${id}, '__traceback__'))\n`+`if(! $B.$bool($b)){\n`+`throw err_${id}\n`+`}\n`+`}\n`;s+=`}\nfinally{\n`+`frame.$lineno = ${lineno}\n`+(in_generator?`locals.$context_managers.pop()\n`:"")+`if(exc_${id}){\n`+`try{\n`+`exit_${id}(mgr_${id}, _b_.None, _b_.None, _b_.None)\n`+`}catch(err){\n`+`if($B.count_frames() < stack_length){\n`+`$B.frame_obj = $B.push_frame(frame)\n`+`}\n`+`throw err\n`+`}\n`+`}\n`+`}\n`;return s}var _with=this,scope=last_scope(scopes),lineno=this.lineno;scope.needs_stack_length=true;var js=add_body(this.body,scopes)+"\n";var in_generator=scopes.symtable.table.blocks.get(fast_id(scope.ast)).generator;for(var item of this.items.slice().reverse()){js=add_item(item,js)}return`$B.set_lineno(frame, ${this.lineno})\n`+js};$B.ast.Yield.prototype.to_js=function(scopes){var scope=last_scope(scopes);if(scope.type!="def"){compiler_error(this,"'yield' outside function")}last_scope(scopes).is_generator=true;var value=this.value?$B.js_from_ast(this.value,scopes):"_b_.None";return`yield ${value}`};$B.ast.YieldFrom.prototype.to_js=function(scopes){var scope=last_scope(scopes);if(scope.type!="def"){compiler_error(this,"'yield' outside function")}scope.is_generator=true;var value=$B.js_from_ast(this.value,scopes);var n=$B.UUID();return`yield* (function* f(){\n var _i${n} = _b_.iter(${value}),\n _r${n}\n var failed${n} = false\n try{\n var _y${n} = _b_.next(_i${n})\n }catch(_e){\n $B.set_exc(_e, frame)\n failed${n} = true\n $B.pmframe = $B.frame_obj.frame\n _e = $B.exception(_e)\n if(_e.__class__ === _b_.StopIteration){\n var _r${n} = $B.$getattr(_e, "value")\n }else{\n throw _e\n }\n }\n if(! failed${n}){\n while(true){\n var failed1${n} = false\n try{\n $B.leave_frame()\n var _s${n} = yield _y${n}\n $B.frame_obj = $B.push_frame(frame)\n }catch(_e){\n $B.set_exc(_e, frame)\n if(_e.__class__ === _b_.GeneratorExit){\n var failed2${n} = false\n try{\n var _m${n} = $B.$getattr(_i${n}, "close")\n }catch(_e1){\n failed2${n} = true\n if(_e1.__class__ !== _b_.AttributeError){\n throw _e1\n }\n }\n if(! failed2${n}){\n $B.$call(_m${n})()\n }\n throw _e\n }else if($B.is_exc(_e, [_b_.BaseException])){\n var sys_module = $B.imported._sys,\n _x${n} = sys_module.exc_info()\n var failed3${n} = false\n try{\n var _m${n} = $B.$getattr(_i${n}, "throw")\n }catch(err){\n failed3${n} = true\n if($B.is_exc(err, [_b_.AttributeError])){\n throw err\n }\n }\n if(! failed3${n}){\n try{\n _y${n} = $B.$call(_m${n}).apply(null,\n _b_.list.$factory(_x${n}))\n }catch(err){\n if($B.is_exc(err, [_b_.StopIteration])){\n _r${n} = $B.$getattr(err, "value")\n break\n }\n throw err\n }\n }\n }\n }\n if(! failed1${n}){\n try{\n if(_s${n} === _b_.None){\n _y${n} = _b_.next(_i${n})\n }else{\n _y${n} = $B.$call($B.$getattr(_i${n}, "send"))(_s${n})\n }\n }catch(err){\n if($B.is_exc(err, [_b_.StopIteration])){\n _r${n} = $B.$getattr(err, "value")\n break\n }\n throw err\n }\n }\n }\n }\n return _r${n}\n })()`};var state={};$B.js_from_root=function(arg){var ast_root=arg.ast,symtable=arg.symtable,filename=arg.filename,namespaces=arg.namespaces,imported=arg.imported;if($B.show_ast_dump){console.log($B.ast_dump(ast_root))}if($B.compiler_check){$B.compiler_check(ast_root,symtable)}var scopes=[];state.filename=filename;scopes.symtable=symtable;scopes.filename=filename;scopes.namespaces=namespaces;scopes.imported=imported;scopes.imports={};var js=ast_root.to_js(scopes);return{js:js,imports:scopes.imports}};$B.js_from_ast=function(ast,scopes){if(!scopes.symtable){throw Error("perdu symtable")}var js="";scopes=scopes||[];if(ast.to_js!==undefined){if(ast.col_offset===undefined){var klass=ast.constructor.$name;if(["match_case"].indexOf(klass)==-1){console.log("pas de col offset pour",klass);console.log(ast);throw Error("no col offset");alert()}}return ast.to_js(scopes)}console.log("unhandled",ast.constructor.$name,ast,typeof ast);return"// unhandled class ast."+ast.constructor.$name}})(__BRYTHON__);(function($B){var _b_=$B.builtins;var GLOBAL_PARAM="name '%s' is parameter and global",NONLOCAL_PARAM="name '%s' is parameter and nonlocal",GLOBAL_AFTER_ASSIGN="name '%s' is assigned to before global declaration",NONLOCAL_AFTER_ASSIGN="name '%s' is assigned to before nonlocal declaration",GLOBAL_AFTER_USE="name '%s' is used prior to global declaration",NONLOCAL_AFTER_USE="name '%s' is used prior to nonlocal declaration",GLOBAL_ANNOT="annotated name '%s' can't be global",NONLOCAL_ANNOT="annotated name '%s' can't be nonlocal",IMPORT_STAR_WARNING="import * only allowed at module level",NAMED_EXPR_COMP_IN_CLASS="assignment expression within a comprehension cannot be used in a class body",NAMED_EXPR_COMP_CONFLICT="assignment expression cannot rebind comprehension iteration variable '%s'",NAMED_EXPR_COMP_INNER_LOOP_CONFLICT="comprehension inner loop cannot rebind assignment expression target '%s'",NAMED_EXPR_COMP_ITER_EXPR="assignment expression cannot be used in a comprehension iterable expression",ANNOTATION_NOT_ALLOWED="'%s' can not be used within an annotation",DUPLICATE_ARGUMENT="duplicate argument '%s' in function definition",TYPEVAR_BOUND_NOT_ALLOWED="'%s' can not be used within a TypeVar bound",TYPEALIAS_NOT_ALLOWED="'%s' can not be used within a type alias",TYPEPARAM_NOT_ALLOWED="'%s' can not be used within the definition of a generic",DUPLICATE_TYPE_PARAM="duplicate type parameter '%s'";var DEF_GLOBAL=1,DEF_LOCAL=2,DEF_PARAM=2<<1,DEF_NONLOCAL=2<<2,USE=2<<3,DEF_FREE=2<<4,DEF_FREE_CLASS=2<<5,DEF_IMPORT=2<<6,DEF_ANNOT=2<<7,DEF_COMP_ITER=2<<8,DEF_TYPE_PARAM=2<<9,DEF_COMP_CELL=2<<10;var DEF_BOUND=DEF_LOCAL|DEF_PARAM|DEF_IMPORT;var SCOPE_OFFSET=12,SCOPE_MASK=DEF_GLOBAL|DEF_LOCAL|DEF_PARAM|DEF_NONLOCAL;var LOCAL=1,GLOBAL_EXPLICIT=2,GLOBAL_IMPLICIT=3,FREE=4,CELL=5;var GENERATOR=1,GENERATOR_EXPRESSION=2;var CO_FUTURE_ANNOTATIONS=16777216;var TYPE_CLASS=1,TYPE_FUNCTION=0,TYPE_MODULE=2;var NULL=undefined;var ModuleBlock=2,ClassBlock=1,FunctionBlock=0,AnnotationBlock=4,TypeVarBoundBlock=5,TypeAliasBlock=6,TypeParamBlock=7;var PyExc_SyntaxError=_b_.SyntaxError;function assert(test){if(!$B.$bool(test)){console.log("test fails",test);throw Error("test fails")}}function LOCATION(x){return[x.lineno,x.col_offset,x.end_lineno,x.end_col_offset]}function ST_LOCATION(x){return[x.lineno,x.col_offset,x.end_lineno,x.end_col_offset]}function _Py_Mangle(privateobj,ident){var result,nlen,plen,ipriv,maxchar;if(privateobj==NULL||!ident.startsWith("__")){return ident}nlen=ident.length;plen=privateobj.length;if(ident.endsWith("__")||ident.search(/\./)!=-1){return ident}ipriv=0;while(privateobj[ipriv]=="_"){ipriv++}if(ipriv==plen){return ident}var prefix=privateobj.substr(ipriv);return"_"+prefix+ident}var top=NULL,lambda=NULL,genexpr=NULL,listcomp=NULL,setcomp=NULL,dictcomp=NULL,__class__=NULL,_annotation=NULL;var NoComprehension=0,ListComprehension=1,DictComprehension=2,SetComprehension=3,GeneratorExpression=4;var internals={};function GET_IDENTIFIER(VAR){return VAR}function Symtable(){this.filename=NULL;this.stack=[];this.blocks=new Map;this.cur=NULL;this.private=NULL}function id(obj){if(obj.$id!==undefined){return obj.$id}return obj.$id=$B.UUID()}function ste_new(st,name,block,key,lineno,col_offset,end_lineno,end_col_offset){var ste;ste={table:st,id:id(key),name:name,directives:NULL,type:block,nested:0,free:0,varargs:0,varkeywords:0,opt_lineno:0,opt_col_offset:0,lineno:lineno,col_offset:col_offset,end_lineno:end_lineno,end_col_offset:end_col_offset};if(st.cur!=NULL&&(st.cur.nested||st.cur.type==FunctionBlock)){ste.nested=1}ste.child_free=0;ste.generator=0;ste.coroutine=0;ste.comprehension=NoComprehension;ste.returns_value=0;ste.needs_class_closure=0;ste.comp_inlined=0;ste.comp_iter_target=0;ste.comp_iter_expr=0;ste.symbols=$B.empty_dict();ste.varnames=[];ste.children=[];st.blocks.set(ste.id,ste);return ste}$B._PySymtable_Build=function(mod,filename,future){var st=new Symtable,seq;st.filename=filename;st.future=future||{};st.type=TYPE_MODULE;if(!symtable_enter_block(st,"top",ModuleBlock,mod,0,0,0,0)){return NULL}st.top=st.cur;switch(mod.constructor){case $B.ast.Module:seq=mod.body;for(var item of seq){visitor.stmt(st,item)}break;case $B.ast.Expression:visitor.expr(st,mod.body);break;case $B.ast.Interactive:seq=mod.body;for(var item of seq){visitor.stmt(st,item)}break}symtable_analyze(st);return st.top};function PySymtable_Lookup(st,key){return st.blocks.get(key)}function _PyST_GetSymbol(ste,name){if(!_b_.dict.$contains_string(ste.symbols,name)){return 0}return _b_.dict.$getitem_string(ste.symbols,name)}function _PyST_GetScope(ste,name){var symbol=_PyST_GetSymbol(ste,name);return symbol>>SCOPE_OFFSET&SCOPE_MASK}function _PyST_IsFunctionLike(ste){return ste.type==FunctionBlock||ste.type==TypeVarBoundBlock||ste.type==TypeAliasBlock||ste.type==TypeParamBlock}function PyErr_Format(exc_type,message,arg){if(arg){message=_b_.str.__mod__(message,arg)}return exc_type.$factory(message)}function PyErr_SetString(exc_type,message){return exc_type.$factory(message)}function set_exc_info(exc,filename,lineno,offset,end_lineno,end_offset){exc.filename=filename;exc.lineno=lineno;exc.offset=offset+1;exc.end_lineno=end_lineno;exc.end_offset=end_offset+1;var src=$B.file_cache[filename];if(src!==undefined){var lines=src.split("\n");exc.text=lines[lineno-1]}else{exc.text=""}exc.args[1]=[filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]}function error_at_directive(exc,ste,name){var data;assert(ste.directives);for(var data of ste.directives){if(data[0]==name){set_exc_info(exc,ste.table.filename,data[1],data[2],data[3],data[4]);return 0}}throw _b_.RuntimeError.$factory("BUG: internal directive bookkeeping broken")}function SET_SCOPE(DICT,NAME,I){DICT[NAME]=I}function is_free_in_any_child(entry,key){for(var child_ste of entry.ste_children){var scope=_PyST_GetScope(child_ste,key);if(scope==FREE){return 1}}return 0}function inline_comprehension(ste,comp,scopes,comp_free,inlined_cells){var pos=0;for(var item of _b_.dict.$iter_items_with_hash(comp.symbols)){var k=item.key,comp_flags=item.value;if(comp_flags&DEF_PARAM){continue}var scope=comp_flags>>SCOPE_OFFSET&SCOPE_MASK;var only_flags=comp_flags&(1<=0;i--){var entry=ste.children[i];if(entry.comp_inlined){ste.children.splice(i,0,...entry.children)}}if(_PyST_IsFunctionLike(ste)&&!analyze_cells(scopes,newfree,inlined_cells)){return 0}else if(ste.type===ClassBlock&&!drop_class_free(ste,newfree)){return 0}if(!update_symbols(ste.symbols,scopes,bound,newfree,inlined_cells,ste.type===ClassBlock||ste.can_see_class_scope)){return 0}Set_Union(free,newfree);success=1;return success}function PySet_New(arg){if(arg===NULL){return new Set}return new Set(arg)}function Set_Union(setA,setB){for(let elem of setB){setA.add(elem)}}function analyze_child_block(entry,bound,free,global,typeparams,class_entry,child_free){var temp_bound=PySet_New(bound),temp_free=PySet_New(free),temp_global=PySet_New(global),temp_typeparams=PySet_New(typeparams);if(!analyze_block(entry,temp_bound,temp_free,temp_global,temp_typeparams,class_entry)){return 0}Set_Union(child_free,temp_free);return 1}function symtable_analyze(st){var free=new Set,global=new Set,typeparams=new Set;return analyze_block(st.top,NULL,free,global,typeparams,NULL)}function symtable_exit_block(st){var size=st.stack.length;st.cur=NULL;if(size){st.stack.pop();if(--size){st.cur=st.stack[size-1]}}return 1}function symtable_enter_block(st,name,block,ast,lineno,col_offset,end_lineno,end_col_offset){var prev;if(ast===undefined){console.log("call ste new, key undef",st,name)}var ste=ste_new(st,name,block,ast,lineno,col_offset,end_lineno,end_col_offset);st.stack.push(ste);prev=st.cur;if(prev){ste.comp_iter_expr=prev.comp_iter_expr}st.cur=ste;if(block===AnnotationBlock){return 1}if(block===ModuleBlock){st.global=st.cur.symbols}if(prev){prev.children.push(ste)}return 1}function symtable_lookup(st,name){var mangled=_Py_Mangle(st.private,name);if(!mangled){return 0}var ret=_PyST_GetSymbol(st.cur,mangled);return ret}function symtable_add_def_helper(st,name,flag,ste,_location){var o,dict,val,mangled=_Py_Mangle(st.private,name);if(!mangled){return 0}dict=ste.symbols;if(_b_.dict.$contains_string(dict,mangled)){o=_b_.dict.$getitem_string(dict,mangled);val=o;if(flag&DEF_PARAM&&val&DEF_PARAM){var exc=PyErr_Format(_b_.SyntaxError,DUPLICATE_ARGUMENT,name);set_exc_info(exc,st.filename,..._location);throw exc}if(flag&DEF_TYPE_PARAM&&val&DEF_TYPE_PARAM){var exc=PyErr_Format(_b_.SyntaxError,DUPLICATE_TYPE_PARAM,name);set_exc_info(exc,st.filename,...location);throw exc}val|=flag}else{val=flag}if(ste.comp_iter_target){if(val&(DEF_GLOBAL|DEF_NONLOCAL)){var exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_INNER_LOOP_CONFLICT,name);set_exc_info(exc,st.filename,..._location);throw exc}val|=DEF_COMP_ITER}o=val;if(o==NULL){return 0}_b_.dict.$setitem(dict,mangled,o);if(flag&DEF_PARAM){ste.varnames.push(mangled)}else if(flag&DEF_GLOBAL){val=flag;if(st.global.hasOwnProperty(mangled)){val|=st.global[mangled]}o=val;if(o==NULL){return 0}st.global[mangled]=o}return 1}function symtable_add_def(st,name,flag,_location){return symtable_add_def_helper(st,name,flag,st.cur,_location)}function symtable_enter_type_param_block(st,name,ast,has_defaults,has_kwdefaults,kind,_location){var prev=st.cur,current_type=st.cur.type;if(!symtable_enter_block(st,name,TypeParamBlock,ast,..._location)){return 0}prev.$type_param=st.cur;if(current_type===ClassBlock){st.cur.can_see_class_scope=1;if(!symtable_add_def(st,"__classdict__",USE,_location)){return 0}}if(kind==$B.ast.ClassDef){if(!symtable_add_def(st,"type_params",DEF_LOCAL,_location)){return 0}if(!symtable_add_def(st,"type_params",USE,_location)){return 0}st.st_private=name;var generic_base=".generic_base";if(!symtable_add_def(st,generic_base,DEF_LOCAL,_location)){return 0}if(!symtable_add_def(st,generic_base,USE,_location)){return 0}}if(has_defaults){var defaults=".defaults";if(!symtable_add_def(st,defaults,DEF_PARAM,_location)){return 0}}if(has_kwdefaults){var kwdefaults=".kwdefaults";if(!symtable_add_def(st,kwdefaults,DEF_PARAM,_location)){return 0}}return 1}function VISIT_QUIT(ST,X){return X}function VISIT(ST,TYPE,V){var f=visitor[TYPE];if(!f(ST,V)){VISIT_QUIT(ST,0)}}function VISIT_SEQ(ST,TYPE,SEQ){for(var elt of SEQ){if(!visitor[TYPE](ST,elt)){VISIT_QUIT(ST,0)}}}function VISIT_SEQ_TAIL(ST,TYPE,SEQ,START){for(var i=START,len=SEQ.length;i0){if(!symtable_enter_type_param_block(st,s.name,s.type_params,s.args.defaults!=NULL,has_kwonlydefaults(s.args.kwonlyargs,s.args.kw_defaults),s.constructor,LOCATION(s))){VISIT_QUIT(st,0)}VISIT_SEQ(st,type_param,s.type_params)}if(!visitor.annotations(st,s,s.args,s.returns))VISIT_QUIT(st,0);if(s.decorator_list){VISIT_SEQ(st,expr,s.decorator_list)}if(!symtable_enter_block(st,s.name,FunctionBlock,s,...LOCATION(s))){VISIT_QUIT(st,0)}VISIT(st,"arguments",s.args);VISIT_SEQ(st,stmt,s.body);if(!symtable_exit_block(st)){VISIT_QUIT(st,0)}if(s.type_params.length>0){if(!symtable_exit_block(st)){VISIT_QUIT(st,0)}}break;case $B.ast.ClassDef:var tmp;if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s)))VISIT_QUIT(st,0);VISIT_SEQ(st,expr,s.bases);VISIT_SEQ(st,keyword,s.keywords);if(s.decorator_list)VISIT_SEQ(st,expr,s.decorator_list);if(s.type_params.length>0){if(!symtable_enter_type_param_block(st,s.name,s.type_params,false,false,s.constructor,LOCATION(s))){VISIT_QUIT(st,0)}VISIT_SEQ(st,type_param,s.type_params)}VISIT_SEQ(st,expr,s.bases);VISIT_SEQ(st,keyword,s.keywords);if(!symtable_enter_block(st,s.name,ClassBlock,s,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset))VISIT_QUIT(st,0);tmp=st.private;st.private=s.name;if(s.type_params.length>0){if(!symtable_add_def(st,"__type_params__",DEF_LOCAL,LOCATION(s))){VISIT_QUIT(st,0)}var type_params=".type_params";if(!symtable_add_def(st,"type_params",USE,LOCATION(s))){VISIT_QUIT(st,0)}}VISIT_SEQ(st,stmt,s.body);st.private=tmp;if(!symtable_exit_block(st))VISIT_QUIT(st,0);if(s.type_params.length>0){if(!symtable_exit_block(st))VISIT_QUIT(st,0)}break;case $B.ast.TypeAlias:VISIT(st,expr,s.name);assert(s.name instanceof $B.ast.Name);var name=s.name.id,is_in_class=st.cur.type===ClassBlock,is_generic=s.type_params.length>0;if(is_generic){if(!symtable_enter_type_param_block(st,name,s.type_params,false,false,s.kind,LOCATION(s))){VISIT_QUIT(st,0)}VISIT_SEQ(st,type_param,s.type_params)}if(!symtable_enter_block(st,name,TypeAliasBlock,s,LOCATION(s))){VISIT_QUIT(st,0)}st.cur.can_see_class_scope=is_in_class;if(is_in_class&&!symtable_add_def(st,"__classdict__",USE,LOCATION(s.value))){VISIT_QUIT(st,0)}VISIT(st,expr,s.value);if(!symtable_exit_block(st)){VISIT_QUIT(st,0)}if(is_generic){if(!symtable_exit_block(st))VISIT_QUIT(st,0)}break;case $B.ast.Return:if(s.value){VISIT(st,expr,s.value);st.cur.returns_value=1}break;case $B.ast.Delete:VISIT_SEQ(st,expr,s.targets);break;case $B.ast.Assign:VISIT_SEQ(st,expr,s.targets);VISIT(st,expr,s.value);break;case $B.ast.AnnAssign:if(s.target instanceof $B.ast.Name){var e_name=s.target;var cur=symtable_lookup(st,e_name.id);if(cur<0){VISIT_QUIT(st,0)}if(cur&(DEF_GLOBAL|DEF_NONLOCAL)&&st.cur.symbols!=st.global&&s.simple){var exc=PyErr_Format(_b_.SyntaxError,cur&DEF_GLOBAL?GLOBAL_ANNOT:NONLOCAL_ANNOT,e_name.id);exc.args[1]=[st.filename,s.lineno,s.col_offset+1,s.end_lineno,s.end_col_offset+1];throw exc}if(s.simple&&!symtable_add_def(st,e_name.id,DEF_ANNOT|DEF_LOCAL,LOCATION(e_name))){VISIT_QUIT(st,0)}else{if(s.value&&!symtable_add_def(st,e_name.id,DEF_LOCAL,LOCATION(e_name))){VISIT_QUIT(st,0)}}}else{VISIT(st,expr,s.target)}if(!visitor.annotation(st,s.annotation)){VISIT_QUIT(st,0)}if(s.value){VISIT(st,expr,s.value)}break;case $B.ast.AugAssign:VISIT(st,expr,s.target);VISIT(st,expr,s.value);break;case $B.ast.For:VISIT(st,expr,s.target);VISIT(st,expr,s.iter);VISIT_SEQ(st,stmt,s.body);if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)}break;case $B.ast.While:VISIT(st,expr,s.test);VISIT_SEQ(st,stmt,s.body);if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)}break;case $B.ast.If:VISIT(st,expr,s.test);VISIT_SEQ(st,stmt,s.body);if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)}break;case $B.ast.Match:VISIT(st,expr,s.subject);VISIT_SEQ(st,match_case,s.cases);break;case $B.ast.Raise:if(s.exc){VISIT(st,expr,s.exc);if(s.cause){VISIT(st,expr,s.cause)}}break;case $B.ast.Try:VISIT_SEQ(st,stmt,s.body);VISIT_SEQ(st,stmt,s.orelse);VISIT_SEQ(st,excepthandler,s.handlers);VISIT_SEQ(st,stmt,s.finalbody);break;case $B.ast.TryStar:VISIT_SEQ(st,stmt,s.body);VISIT_SEQ(st,stmt,s.orelse);VISIT_SEQ(st,excepthandler,s.handlers);VISIT_SEQ(st,stmt,s.finalbody);break;case $B.ast.Assert:VISIT(st,expr,s.test);if(s.msg){VISIT(st,expr,s.msg)}break;case $B.ast.Import:VISIT_SEQ(st,alias,s.names);break;case $B.ast.ImportFrom:VISIT_SEQ(st,alias,s.names);break;case $B.ast.Global:var seq=s.names;for(var name of seq){var cur=symtable_lookup(st,name);if(cur<0){VISIT_QUIT(st,0)}if(cur&(DEF_PARAM|DEF_LOCAL|USE|DEF_ANNOT)){var msg;if(cur&DEF_PARAM){msg=GLOBAL_PARAM}else if(cur&USE){msg=GLOBAL_AFTER_USE}else if(cur&DEF_ANNOT){msg=GLOBAL_ANNOT}else{msg=GLOBAL_AFTER_ASSIGN}var exc=PyErr_Format(_b_.SyntaxError,msg,name);set_exc_info(exc,st.filename,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset);throw exc}if(!symtable_add_def(st,name,DEF_GLOBAL,LOCATION(s)))VISIT_QUIT(st,0);if(!symtable_record_directive(st,name,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset))VISIT_QUIT(st,0)}break;case $B.ast.Nonlocal:var seq=s.names;for(var name of seq){var cur=symtable_lookup(st,name);if(cur<0){VISIT_QUIT(st,0)}if(cur&(DEF_PARAM|DEF_LOCAL|USE|DEF_ANNOT)){var msg;if(cur&DEF_PARAM){msg=NONLOCAL_PARAM}else if(cur&USE){msg=NONLOCAL_AFTER_USE}else if(cur&DEF_ANNOT){msg=NONLOCAL_ANNOT}else{msg=NONLOCAL_AFTER_ASSIGN}var exc=PyErr_Format(_b_.SyntaxError,msg,name);set_exc_info(exc,st.filename,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset);throw exc}if(!symtable_add_def(st,name,DEF_NONLOCAL,LOCATION(s)))VISIT_QUIT(st,0);if(!symtable_record_directive(st,name,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset))VISIT_QUIT(st,0)}break;case $B.ast.Expr:VISIT(st,expr,s.value);break;case $B.ast.Pass:case $B.ast.Break:case $B.ast.Continue:break;case $B.ast.With:VISIT_SEQ(st,"withitem",s.items);VISIT_SEQ(st,stmt,s.body);break;case $B.ast.AsyncFunctionDef:if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s)))VISIT_QUIT(st,0);if(s.args.defaults)VISIT_SEQ(st,expr,s.args.defaults);if(s.args.kw_defaults)VISIT_SEQ_WITH_NULL(st,expr,s.args.kw_defaults);if(!visitor.annotations(st,s,s.args,s.returns))VISIT_QUIT(st,0);if(s.decorator_list)VISIT_SEQ(st,expr,s.decorator_list);if(s.type_params.length>0){if(!symtable_enter_type_param_block(st,s.name,s.type_params,s.args.defaults!=NULL,has_kwonlydefaults(s.args.kwonlyargs,s.args.kw_defaults),s.constructor,LOCATION(s))){VISIT_QUIT(st,0)}VISIT_SEQ(st,type_param,s.type_params)}if(!visitor.annotations(st,s,s.args,s.returns))VISIT_QUIT(st,0);if(!symtable_enter_block(st,s.name,FunctionBlock,s,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset))VISIT_QUIT(st,0);st.cur.coroutine=1;VISIT(st,"arguments",s.args);VISIT_SEQ(st,stmt,s.body);if(!symtable_exit_block(st))VISIT_QUIT(st,0);if(s.type_params.length>0){if(!symtable_exit_block(st))VISIT_QUIT(st,0)}break;case $B.ast.AsyncWith:VISIT_SEQ(st,withitem,s.items);VISIT_SEQ(st,stmt,s.body);break;case $B.ast.AsyncFor:VISIT(st,expr,s.target);VISIT(st,expr,s.iter);VISIT_SEQ(st,stmt,s.body);if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)}break;default:console.log("unhandled",s);break}VISIT_QUIT(st,1)};function symtable_extend_namedexpr_scope(st,e){assert(st.stack);assert(e instanceof $B.ast.Name);var target_name=e.id;var i,size,ste;size=st.stack.length;assert(size);for(i=size-1;i>=0;i--){ste=st.stack[i];if(ste.comprehension){var target_in_scope=_PyST_GetSymbol(ste,target_name);if(target_in_scope&DEF_COMP_ITER){var exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_CONFLICT,target_name);set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.ed_lineno,e.end_col_offset);throw exc}continue}if(_PyST_IsFunctionLike(ste)){var target_in_scope=_PyST_GetSymbol(ste,target_name);if(target_in_scope&DEF_GLOBAL){if(!symtable_add_def(st,target_name,DEF_GLOBAL,LOCATION(e)))VISIT_QUIT(st,0)}else{if(!symtable_add_def(st,target_name,DEF_NONLOCAL,LOCATION(e)))VISIT_QUIT(st,0)}if(!symtable_record_directive(st,target_name,LOCATION(e)))VISIT_QUIT(st,0);return symtable_add_def_helper(st,target_name,DEF_LOCAL,ste,LOCATION(e))}if(ste.type==ModuleBlock){if(!symtable_add_def(st,target_name,DEF_GLOBAL,LOCATION(e)))VISIT_QUIT(st,0);if(!symtable_record_directive(st,target_name,LOCATION(e)))VISIT_QUIT(st,0);return symtable_add_def_helper(st,target_name,DEF_GLOBAL,ste,LOCATION(e))}if(ste.type==ClassBlock){var exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_IN_CLASS);set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset);throw exc}}assert(0);return 0}function symtable_handle_namedexpr(st,e){if(st.cur.comp_iter_expr>0){var exc=PyErr_Format(PyExc_SyntaxError,NAMED_EXPR_COMP_ITER_EXPR);set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset);throw exc}if(st.cur.comprehension){if(!symtable_extend_namedexpr_scope(st,e.target))return 0}VISIT(st,expr,e.value);VISIT(st,expr,e.target);return 1}const alias="alias",comprehension="comprehension",excepthandler="excepthandler",expr="expr",keyword="keyword",match_case="match_case",pattern="pattern",stmt="stmt",type_param="type_param",withitem="withitem";visitor.expr=function(st,e){switch(e.constructor){case $B.ast.NamedExpr:if(!symtable_raise_if_annotation_block(st,"named expression",e)){VISIT_QUIT(st,0)}if(!symtable_handle_namedexpr(st,e))VISIT_QUIT(st,0);break;case $B.ast.BoolOp:VISIT_SEQ(st,"expr",e.values);break;case $B.ast.BinOp:VISIT(st,"expr",e.left);VISIT(st,"expr",e.right);break;case $B.ast.UnaryOp:VISIT(st,"expr",e.operand);break;case $B.ast.Lambda:{if(!GET_IDENTIFIER("lambda"))VISIT_QUIT(st,0);if(e.args.defaults)VISIT_SEQ(st,"expr",e.args.defaults);if(e.args.kw_defaults)VISIT_SEQ_WITH_NULL(st,"expr",e.args.kw_defaults);if(!symtable_enter_block(st,lambda,FunctionBlock,e,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset))VISIT_QUIT(st,0);VISIT(st,"arguments",e.args);VISIT(st,"expr",e.body);if(!symtable_exit_block(st))VISIT_QUIT(st,0);break}case $B.ast.IfExp:VISIT(st,"expr",e.test);VISIT(st,"expr",e.body);VISIT(st,"expr",e.orelse);break;case $B.ast.Dict:VISIT_SEQ_WITH_NULL(st,"expr",e.keys);VISIT_SEQ(st,"expr",e.values);break;case $B.ast.Set:VISIT_SEQ(st,"expr",e.elts);break;case $B.ast.GeneratorExp:if(!visitor.genexp(st,e))VISIT_QUIT(st,0);break;case $B.ast.ListComp:if(!visitor.listcomp(st,e))VISIT_QUIT(st,0);break;case $B.ast.SetComp:if(!visitor.setcomp(st,e))VISIT_QUIT(st,0);break;case $B.ast.DictComp:if(!visitor.dictcomp(st,e))VISIT_QUIT(st,0);break;case $B.ast.Yield:if(!symtable_raise_if_annotation_block(st,"yield expression",e)){VISIT_QUIT(st,0)}if(e.value)VISIT(st,"expr",e.value);st.cur.generator=1;if(st.cur.comprehension){return symtable_raise_if_comprehension_block(st,e)}break;case $B.ast.YieldFrom:if(!symtable_raise_if_annotation_block(st,"yield expression",e)){VISIT_QUIT(st,0)}VISIT(st,"expr",e.value);st.cur.generator=1;if(st.cur.comprehension){return symtable_raise_if_comprehension_block(st,e)}break;case $B.ast.Await:if(!symtable_raise_if_annotation_block(st,"await expression",e)){VISIT_QUIT(st,0)}VISIT(st,"expr",e.value);st.cur.coroutine=1;break;case $B.ast.Compare:VISIT(st,"expr",e.left);VISIT_SEQ(st,"expr",e.comparators);break;case $B.ast.Call:VISIT(st,"expr",e.func);VISIT_SEQ(st,"expr",e.args);VISIT_SEQ_WITH_NULL(st,"keyword",e.keywords);break;case $B.ast.FormattedValue:VISIT(st,"expr",e.value);if(e.format_spec)VISIT(st,"expr",e.format_spec);break;case $B.ast.JoinedStr:VISIT_SEQ(st,"expr",e.values);break;case $B.ast.Constant:break;case $B.ast.Attribute:VISIT(st,"expr",e.value);break;case $B.ast.Subscript:VISIT(st,"expr",e.value);VISIT(st,"expr",e.slice);break;case $B.ast.Starred:VISIT(st,"expr",e.value);break;case $B.ast.Slice:if(e.lower)VISIT(st,expr,e.lower);if(e.upper)VISIT(st,expr,e.upper);if(e.step)VISIT(st,expr,e.step);break;case $B.ast.Name:var flag=e.ctx instanceof $B.ast.Load?USE:DEF_LOCAL;if(!symtable_add_def(st,e.id,flag,LOCATION(e)))VISIT_QUIT(st,0);if(e.ctx instanceof $B.ast.Load&&_PyST_IsFunctionLike(st.cur)&&e.id=="super"){if(!GET_IDENTIFIER("__class__")||!symtable_add_def(st,"__class__",USE,LOCATION(e)))VISIT_QUIT(st,0)}break;case $B.ast.List:VISIT_SEQ(st,expr,e.elts);break;case $B.ast.Tuple:VISIT_SEQ(st,expr,e.elts);break}VISIT_QUIT(st,1)};visitor.type_param=function(st,tp){switch(tp.constructor){case $B.ast.TypeVar:if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM|DEF_LOCAL,LOCATION(tp)))VISIT_QUIT(st,0);if(tp.bound){var is_in_class=st.cur.can_see_class_scope;if(!symtable_enter_block(st,tp.name,TypeVarBoundBlock,tp,LOCATION(tp)))VISIT_QUIT(st,0);st.cur.can_see_class_scope=is_in_class;if(is_in_class&&!symtable_add_def(st,"__classdict__",USE,LOCATION(tp.bound))){VISIT_QUIT(st,0)}VISIT(st,expr,tp.bound);if(!symtable_exit_block(st))VISIT_QUIT(st,0)}break;case $B.ast.TypeVarTuple:if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM|DEF_LOCAL,LOCATION(tp)))VISIT_QUIT(st,0);break;case $B.ast.ParamSpec:if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM|DEF_LOCAL,LOCATION(tp)))VISIT_QUIT(st,0);break}VISIT_QUIT(st,1)};visitor.pattern=function(st,p){switch(p.constructor){case $B.ast.MatchValue:VISIT(st,expr,p.value);break;case $B.ast.MatchSingleton:break;case $B.ast.MatchSequence:VISIT_SEQ(st,pattern,p.patterns);break;case $B.ast.MatchStar:if(p.name){symtable_add_def(st,p.name,DEF_LOCAL,LOCATION(p))}break;case $B.ast.MatchMapping:VISIT_SEQ(st,expr,p.keys);VISIT_SEQ(st,pattern,p.patterns);if(p.rest){symtable_add_def(st,p.rest,DEF_LOCAL,LOCATION(p))}break;case $B.ast.MatchClass:VISIT(st,expr,p.cls);VISIT_SEQ(st,pattern,p.patterns);VISIT_SEQ(st,pattern,p.kwd_patterns);break;case $B.ast.MatchAs:if(p.pattern){VISIT(st,pattern,p.pattern)}if(p.name){symtable_add_def(st,p.name,DEF_LOCAL,LOCATION(p))}break;case $B.ast.MatchOr:VISIT_SEQ(st,pattern,p.patterns);break}VISIT_QUIT(st,1)};function symtable_implicit_arg(st,pos){var id="."+pos;if(!symtable_add_def(st,id,DEF_PARAM,ST_LOCATION(st.cur))){return 0}return 1}visitor.params=function(st,args){var i;if(!args)return-1;for(var arg of args){if(!symtable_add_def(st,arg.arg,DEF_PARAM,LOCATION(arg)))return 0}return 1};visitor.annotation=function(st,annotation){var future_annotations=st.future.features&CO_FUTURE_ANNOTATIONS;if(future_annotations&&!symtable_enter_block(st,"_annotation",AnnotationBlock,annotation,annotation.lineno,annotation.col_offset,annotation.end_lineno,annotation.end_col_offset)){VISIT_QUIT(st,0)}VISIT(st,expr,annotation);if(future_annotations&&!symtable_exit_block(st)){VISIT_QUIT(st,0)}return 1};visitor.argannotations=function(st,args){var i;if(!args)return-1;for(var arg of args){if(arg.annotation)VISIT(st,expr,arg.annotation)}return 1};visitor.annotations=function(st,o,a,returns){var future_annotations=st.future.ff_features&CO_FUTURE_ANNOTATIONS;if(future_annotations&&!symtable_enter_block(st,"_annotation",AnnotationBlock,o,o.lineno,o.col_offset,o.end_lineno,o.end_col_offset)){VISIT_QUIT(st,0)}if(a.posonlyargs&&!visitor.argannotations(st,a.posonlyargs))return 0;if(a.args&&!visitor.argannotations(st,a.args))return 0;if(a.vararg&&a.vararg.annotation)VISIT(st,expr,a.vararg.annotation);if(a.kwarg&&a.kwarg.annotation)VISIT(st,expr,a.kwarg.annotation);if(a.kwonlyargs&&!visitor.argannotations(st,a.kwonlyargs))return 0;if(future_annotations&&!symtable_exit_block(st)){VISIT_QUIT(st,0)}if(returns&&!visitor.annotation(st,returns)){VISIT_QUIT(st,0)}return 1};visitor.arguments=function(st,a){if(a.posonlyargs&&!visitor.params(st,a.posonlyargs))return 0;if(a.args&&!visitor.params(st,a.args))return 0;if(a.kwonlyargs&&!visitor.params(st,a.kwonlyargs))return 0;if(a.vararg){if(!symtable_add_def(st,a.vararg.arg,DEF_PARAM,LOCATION(a.vararg)))return 0;st.cur.varargs=1}if(a.kwarg){if(!symtable_add_def(st,a.kwarg.arg,DEF_PARAM,LOCATION(a.kwarg)))return 0;st.cur.varkeywords=1}return 1};visitor.excepthandler=function(st,eh){if(eh.type)VISIT(st,expr,eh.type);if(eh.name)if(!symtable_add_def(st,eh.name,DEF_LOCAL,LOCATION(eh)))return 0;VISIT_SEQ(st,stmt,eh.body);return 1};visitor.withitem=function(st,item){VISIT(st,"expr",item.context_expr);if(item.optional_vars){VISIT(st,"expr",item.optional_vars)}return 1};visitor.match_case=function(st,m){VISIT(st,pattern,m.pattern);if(m.guard){VISIT(st,expr,m.guard)}VISIT_SEQ(st,stmt,m.body);return 1};visitor.alias=function(st,a){var store_name,name=a.asname==NULL?a.name:a.asname;var dot=name.search("\\.");if(dot!=-1){store_name=name.substring(0,dot);if(!store_name)return 0}else{store_name=name}if(name!="*"){var r=symtable_add_def(st,store_name,DEF_IMPORT,LOCATION(a));return r}else{if(st.cur.type!=ModuleBlock){var lineno=a.lineno,col_offset=a.col_offset,end_lineno=a.end_lineno,end_col_offset=a.end_col_offset;var exc=PyErr_SetString(PyExc_SyntaxError,IMPORT_STAR_WARNING);set_exc_info(exc,st.filename,lineno,col_offset,end_lineno,end_col_offset);throw exc}st.cur.$has_import_star=true;return 1}};visitor.comprehension=function(st,lc){st.cur.comp_iter_target=1;VISIT(st,expr,lc.target);st.cur.comp_iter_target=0;st.cur.comp_iter_expr++;VISIT(st,expr,lc.iter);st.cur.comp_iter_expr--;VISIT_SEQ(st,expr,lc.ifs);if(lc.is_async){st.cur.coroutine=1}return 1};visitor.keyword=function(st,k){VISIT(st,expr,k.value);return 1};function symtable_handle_comprehension(st,e,scope_name,generators,elt,value){var is_generator=e.constructor===$B.ast.GeneratorExp;var outermost=generators[0];st.cur.comp_iter_expr++;VISIT(st,expr,outermost.iter);st.cur.comp_iter_expr--;if(!scope_name||!symtable_enter_block(st,scope_name,FunctionBlock,e,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset)){return 0}switch(e.constructor){case $B.ast.ListComp:st.cur.comprehension=ListComprehension;break;case $B.ast.SetComp:st.cur.comprehension=SetComprehension;break;case $B.ast.DictComp:st.cur.comprehension=DictComprehension;break;default:st.cur.comprehension=GeneratorExpression;break}if(outermost.is_async){st.cur.coroutine=1}if(!symtable_implicit_arg(st,0)){symtable_exit_block(st);return 0}st.cur.comp_iter_target=1;VISIT(st,expr,outermost.target);st.cur.comp_iter_target=0;VISIT_SEQ(st,expr,outermost.ifs);VISIT_SEQ_TAIL(st,comprehension,generators,1);if(value)VISIT(st,expr,value);VISIT(st,expr,elt);st.cur.generator=is_generator;var is_async=st.cur.coroutine&&!is_generator;if(!symtable_exit_block(st)){return 0}if(is_async){st.cur.coroutine=1}return 1}visitor.genexp=function(st,e){return symtable_handle_comprehension(st,e,"genexpr",e.generators,e.elt,NULL)};visitor.listcomp=function(st,e){return symtable_handle_comprehension(st,e,"listcomp",e.generators,e.elt,NULL)};visitor.setcomp=function(st,e){return symtable_handle_comprehension(st,e,"setcomp",e.generators,e.elt,NULL)};visitor.dictcomp=function(st,e){return symtable_handle_comprehension(st,e,"dictcomp",e.generators,e.key,e.value)};function symtable_raise_if_annotation_block(st,name,e){var type=st.cur.type,exc;if(type==AnnotationBlock)exc=PyErr_Format(PyExc_SyntaxError,ANNOTATION_NOT_ALLOWED,name);else if(type==TypeVarBoundBlock)exc=PyErr_Format(PyExc_SyntaxError,TYPEVAR_BOUND_NOT_ALLOWED,name);else if(type==TypeAliasBlock)exc=PyErr_Format(PyExc_SyntaxError,TYPEALIAS_NOT_ALLOWED,name);else if(type==TypeParamBlock)exc=PyErr_Format(PyExc_SyntaxError,TYPEPARAM_NOT_ALLOWED,name);else return 1;set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset);throw exc}function symtable_raise_if_comprehension_block(st,e){var type=st.cur.comprehension;var exc=PyErr_SetString(PyExc_SyntaxError,type==ListComprehension?"'yield' inside list comprehension":type==SetComprehension?"'yield' inside set comprehension":type==DictComprehension?"'yield' inside dict comprehension":"'yield' inside generator expression");exc.$frame_obj=$B.frame_obj;set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset);throw exc}})(__BRYTHON__);var docs={ArithmeticError:"Base class for arithmetic errors.",AssertionError:"Assertion failed.",AttributeError:"Attribute not found.",BaseException:"Common base class for all exceptions",BaseExceptionGroup:"A combination of multiple unrelated exceptions.",BlockingIOError:"I/O operation would block.",BrokenPipeError:"Broken pipe.",BufferError:"Buffer error.",BytesWarning:"Base class for warnings about bytes and buffer related problems, mostly\nrelated to conversion from str or comparing to str.",ChildProcessError:"Child process error.",ConnectionAbortedError:"Connection aborted.",ConnectionError:"Connection error.",ConnectionRefusedError:"Connection refused.",ConnectionResetError:"Connection reset.",DeprecationWarning:"Base class for warnings about deprecated features.",EOFError:"Read beyond end of file.",Ellipsis:"",EncodingWarning:"Base class for warnings about encodings.",EnvironmentError:"Base class for I/O related errors.",Exception:"Common base class for all non-exit exceptions.",ExceptionGroup:"",False:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",FileExistsError:"File already exists.",FileNotFoundError:"File not found.",FloatingPointError:"Floating point operation failed.",FutureWarning:"Base class for warnings about constructs that will change semantically\nin the future.",GeneratorExit:"Request that a generator exit.",IOError:"Base class for I/O related errors.",ImportError:"Import can't find module, or can't find name in module.",ImportWarning:"Base class for warnings about probable mistakes in module imports",IndentationError:"Improper indentation.",IndexError:"Sequence index out of range.",InterruptedError:"Interrupted by signal.",IsADirectoryError:"Operation doesn't work on directories.",KeyError:"Mapping key not found.",KeyboardInterrupt:"Program interrupted by user.",LookupError:"Base class for lookup errors.",MemoryError:"Out of memory.",ModuleNotFoundError:"Module not found.",NameError:"Name not found globally.",None:"",NotADirectoryError:"Operation only works on directories.",NotImplemented:"",NotImplementedError:"Method or function hasn't been implemented yet.",OSError:"Base class for I/O related errors.",OverflowError:"Result too large to be represented.",PendingDeprecationWarning:"Base class for warnings about features which will be deprecated\nin the future.",PermissionError:"Not enough permissions.",ProcessLookupError:"Process not found.",RecursionError:"Recursion limit exceeded.",ReferenceError:"Weak ref proxy used after referent went away.",ResourceWarning:"Base class for warnings about resource usage.",RuntimeError:"Unspecified run-time error.",RuntimeWarning:"Base class for warnings about dubious runtime behavior.",StopAsyncIteration:"Signal the end from iterator.__anext__().",StopIteration:"Signal the end from iterator.__next__().",SyntaxError:"Invalid syntax.",SyntaxWarning:"Base class for warnings about dubious syntax.",SystemError:"Internal error in the Python interpreter.\n\nPlease report this to the Python maintainer, along with the traceback,\nthe Python version, and the hardware/OS platform and version.",SystemExit:"Request to exit from the interpreter.",TabError:"Improper mixture of spaces and tabs.",TimeoutError:"Timeout expired.",True:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",TypeError:"Inappropriate argument type.",UnboundLocalError:"Local name referenced but not bound to a value.",UnicodeDecodeError:"Unicode decoding error.",UnicodeEncodeError:"Unicode encoding error.",UnicodeError:"Unicode related error.",UnicodeTranslateError:"Unicode translation error.",UnicodeWarning:"Base class for warnings about Unicode related problems, mostly\nrelated to conversion problems.",UserWarning:"Base class for warnings generated by user code.",ValueError:"Inappropriate argument value (of correct type).",Warning:"Base class for warning categories.",WindowsError:"Base class for I/O related errors.",ZeroDivisionError:"Second argument to a division or modulo operation was zero.",__debug__:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",abs:"Return the absolute value of the argument.",aiter:"Return an AsyncIterator for an AsyncIterable object.",all:"Return True if bool(x) is True for all values x in the iterable.\n\nIf the iterable is empty, return True.",anext:"async anext(aiterator[, default])\n\nReturn the next item from the async iterator. If default is given and the async\niterator is exhausted, it is returned instead of raising StopAsyncIteration.",any:"Return True if bool(x) is True for any x in the iterable.\n\nIf the iterable is empty, return False.",ascii:"Return an ASCII-only representation of an object.\n\nAs repr(), return a string containing a printable representation of an\nobject, but escape the non-ASCII characters in the string returned by\nrepr() using \\\\x, \\\\u or \\\\U escapes. This generates a string similar\nto that returned by repr() in Python 2.",bin:"Return the binary representation of an integer.\n\n >>> bin(2796202)\n '0b1010101010101010101010'",bool:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",breakpoint:"breakpoint(*args, **kws)\n\nCall sys.breakpointhook(*args, **kws). sys.breakpointhook() must accept\nwhatever arguments are passed.\n\nBy default, this drops you into the pdb debugger.",bytearray:"bytearray(iterable_of_ints) -> bytearray\nbytearray(string, encoding[, errors]) -> bytearray\nbytearray(bytes_or_buffer) -> mutable copy of bytes_or_buffer\nbytearray(int) -> bytes array of size given by the parameter initialized with null bytes\nbytearray() -> empty bytes array\n\nConstruct a mutable bytearray object from:\n - an iterable yielding integers in range(256)\n - a text string encoded using the specified encoding\n - a bytes or a buffer object\n - any object implementing the buffer API.\n - an integer",bytes:"bytes(iterable_of_ints) -> bytes\nbytes(string, encoding[, errors]) -> bytes\nbytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer\nbytes(int) -> bytes object of size given by the parameter initialized with null bytes\nbytes() -> empty bytes object\n\nConstruct an immutable array of bytes from:\n - an iterable yielding integers in range(256)\n - a text string encoded using the specified encoding\n - any object implementing the buffer API.\n - an integer",callable:"Return whether the object is callable (i.e., some kind of function).\n\nNote that classes are callable, as are instances of classes with a\n__call__() method.",chr:"Return a Unicode string of one character with ordinal i; 0 <= i <= 0x10ffff.",classmethod:"classmethod(function) -> method\n\nConvert a function to be a class method.\n\nA class method receives the class as implicit first argument,\njust like an instance method receives the instance.\nTo declare a class method, use this idiom:\n\n class C:\n @classmethod\n def f(cls, arg1, arg2, argN):\n ...\n\nIt can be called either on the class (e.g. C.f()) or on an instance\n(e.g. C().f()). The instance is ignored except for its class.\nIf a class method is called for a derived class, the derived class\nobject is passed as the implied first argument.\n\nClass methods are different than C++ or Java static methods.\nIf you want those, see the staticmethod builtin.",compile:"Compile source into a code object that can be executed by exec() or eval().\n\nThe source code may represent a Python module, statement or expression.\nThe filename will be used for run-time error messages.\nThe mode must be 'exec' to compile a module, 'single' to compile a\nsingle (interactive) statement, or 'eval' to compile an expression.\nThe flags argument, if present, controls which future statements influence\nthe compilation of the code.\nThe dont_inherit argument, if true, stops the compilation inheriting\nthe effects of any future statements in effect in the code calling\ncompile; if absent or false these statements do influence the compilation,\nin addition to any features explicitly specified.",complex:"Create a complex number from a real part and an optional imaginary part.\n\nThis is equivalent to (real + imag*1j) where imag defaults to 0.",copyright:"interactive prompt objects for printing the license text, a list of\n contributors and the copyright notice.",credits:"interactive prompt objects for printing the license text, a list of\n contributors and the copyright notice.",delattr:"Deletes the named attribute from the given object.\n\ndelattr(x, 'y') is equivalent to ``del x.y``",dict:"dict() -> new empty dictionary\ndict(mapping) -> new dictionary initialized from a mapping object's\n (key, value) pairs\ndict(iterable) -> new dictionary initialized as if via:\n d = {}\n for k, v in iterable:\n d[k] = v\ndict(**kwargs) -> new dictionary initialized with the name=value pairs\n in the keyword argument list. For example: dict(one=1, two=2)",dir:"Show attributes of an object.\n\nIf called without an argument, return the names in the current scope.\nElse, return an alphabetized list of names comprising (some of) the attributes\nof the given object, and of attributes reachable from it.\nIf the object supplies a method named __dir__, it will be used; otherwise\nthe default dir() logic is used and returns:\n for a module object: the module's attributes.\n for a class object: its attributes, and recursively the attributes\n of its bases.\n for any other object: its attributes, its class's attributes, and\n recursively the attributes of its class's base classes.",divmod:"Return the tuple (x//y, x%y). Invariant: div*y + mod == x.",enumerate:"Return an enumerate object.\n\n iterable\n an object supporting iteration\n\nThe enumerate object yields pairs containing a count (from start, which\ndefaults to zero) and a value yielded by the iterable argument.\n\nenumerate is useful for obtaining an indexed list:\n (0, seq[0]), (1, seq[1]), (2, seq[2]), ...",eval:"Evaluate the given source in the C of globals and locals.\n\nThe source may be a string representing a Python expression\nor a code object as returned by compile().\nThe globals must be a dictionary and locals can be any mapping,\ndefaulting to the current globals and locals.\nIf only globals is given, locals defaults to it.",exec:"Execute the given source in the C of globals and locals.\n\nThe source may be a string representing one or more Python statements\nor a code object as returned by compile().\nThe globals must be a dictionary and locals can be any mapping,\ndefaulting to the current globals and locals.\nIf only globals is given, locals defaults to it.\nThe closure must be a tuple of cellvars, and can only be used\nwhen source is a code object requiring exactly that many cellvars.",exit:"",filter:"filter(function or None, iterable) --\x3e filter object\n\nReturn an iterator yielding those items of iterable for which function(item)\nis true. If function is None, return the items that are true.",float:"Convert a string or number to a floating point number, if possible.",format:"Return type(value).__format__(value, format_spec)\n\nMany built-in types implement format_spec according to the\nFormat Specification Mini-language. See help('FORMATTING').\n\nIf type(value) does not supply a method named __format__\nand format_spec is empty, then str(value) is returned.\nSee also help('SPECIALMETHODS').",frozenset:"frozenset() -> empty frozenset object\nfrozenset(iterable) -> frozenset object\n\nBuild an immutable unordered collection of unique elements.",getattr:"Get a named attribute from an object.\n\ngetattr(x, 'y') is equivalent to x.y\nWhen a default argument is given, it is returned when the attribute doesn't\nexist; without it, an exception is raised in that case.",globals:"Return the dictionary containing the current scope's global variables.\n\nNOTE: Updates to this dictionary *will* affect name lookups in the current\nglobal scope and vice-versa.",hasattr:"Return whether the object has an attribute with the given name.\n\nThis is done by calling getattr(obj, name) and catching AttributeError.",hash:"Return the hash value for the given object.\n\nTwo objects that compare equal must also have the same hash value, but the\nreverse is not necessarily true.",help:"Define the builtin 'help'.\n\n This is a wrapper around pydoc.help that provides a helpful message\n when 'help' is typed at the Python interactive prompt.\n\n Calling help() at the Python prompt starts an interactive help session.\n Calling help(thing) prints help for the python object 'thing'.\n ",hex:"Return the hexadecimal representation of an integer.\n\n >>> hex(12648430)\n '0xc0ffee'",id:"Return the identity of an object.\n\nThis is guaranteed to be unique among simultaneously existing objects.\n(CPython uses the object's memory address.)",input:"Read a string from standard input. The trailing newline is stripped.\n\nThe prompt string, if given, is printed to standard output without a\ntrailing newline before reading input.\n\nIf the user hits EOF (*nix: Ctrl-D, Windows: Ctrl-Z+Return), raise EOFError.\nOn *nix systems, readline is used if available.",int:"int([x]) -> integer\nint(x, base=10) -> integer\n\nConvert a number or string to an integer, or return 0 if no arguments\nare given. If x is a number, return x.__int__(). For floating point\nnumbers, this truncates towards zero.\n\nIf x is not a number or if base is given, then x must be a string,\nbytes, or bytearray instance representing an integer literal in the\ngiven base. The literal can be preceded by '+' or '-' and be surrounded\nby whitespace. The base defaults to 10. Valid bases are 0 and 2-36.\nBase 0 means to interpret the base from the string as an integer literal.\n>>> int('0b100', base=0)\n4",isinstance:"Return whether an object is an instance of a class or of a subclass thereof.\n\nA tuple, as in ``isinstance(x, (A, B, ...))``, may be given as the target to\ncheck against. This is equivalent to ``isinstance(x, A) or isinstance(x, B)\nor ...`` etc.",issubclass:"Return whether 'cls' is derived from another class or is the same class.\n\nA tuple, as in ``issubclass(x, (A, B, ...))``, may be given as the target to\ncheck against. This is equivalent to ``issubclass(x, A) or issubclass(x, B)\nor ...``.",iter:"Get an iterator from an object.\n\nIn the first form, the argument must supply its own iterator, or be a sequence.\nIn the second form, the callable is called until it returns the sentinel.",len:"Return the number of items in a container.",license:"interactive prompt objects for printing the license text, a list of\n contributors and the copyright notice.",list:"Built-in mutable sequence.\n\nIf no argument is given, the constructor creates a new empty list.\nThe argument must be an iterable if specified.",locals:"Return a dictionary containing the current scope's local variables.\n\nNOTE: Whether or not updates to this dictionary will affect name lookups in\nthe local scope and vice-versa is *implementation dependent* and not\ncovered by any backwards compatibility guarantees.",map:"map(func, *iterables) --\x3e map object\n\nMake an iterator that computes the function using arguments from\neach of the iterables. Stops when the shortest iterable is exhausted.",max:"max(iterable, *[, default=obj, key=func]) -> value\nmax(arg1, arg2, *args, *[, key=func]) -> value\n\nWith a single iterable argument, return its biggest item. The\ndefault keyword-only argument specifies an object to return if\nthe provided iterable is empty.\nWith two or more arguments, return the largest argument.",memoryview:"Create a new memoryview object which references the given object.",min:"min(iterable, *[, default=obj, key=func]) -> value\nmin(arg1, arg2, *args, *[, key=func]) -> value\n\nWith a single iterable argument, return its smallest item. The\ndefault keyword-only argument specifies an object to return if\nthe provided iterable is empty.\nWith two or more arguments, return the smallest argument.",next:"Return the next item from the iterator.\n\nIf default is given and the iterator is exhausted,\nit is returned instead of raising StopIteration.",object:"The base class of the class hierarchy.\n\nWhen called, it accepts no arguments and returns a new featureless\ninstance that has no instance attributes and cannot be given any.\n",oct:"Return the octal representation of an integer.\n\n >>> oct(342391)\n '0o1234567'",open:"Open file and return a stream. Raise OSError upon failure.\n\nfile is either a text or byte string giving the name (and the path\nif the file isn't in the current working directory) of the file to\nbe opened or an integer file descriptor of the file to be\nwrapped. (If a file descriptor is given, it is closed when the\nreturned I/O object is closed, unless closefd is set to False.)\n\nmode is an optional string that specifies the mode in which the file\nis opened. It defaults to 'r' which means open for reading in text\nmode. Other common values are 'w' for writing (truncating the file if\nit already exists), 'x' for creating and writing to a new file, and\n'a' for appending (which on some Unix systems, means that all writes\nappend to the end of the file regardless of the current seek position).\nIn text mode, if encoding is not specified the encoding used is platform\ndependent: locale.getencoding() is called to get the current locale encoding.\n(For reading and writing raw bytes use binary mode and leave encoding\nunspecified.) The available modes are:\n\n========= ===============================================================\nCharacter Meaning\n--------- ---------------------------------------------------------------\n'r' open for reading (default)\n'w' open for writing, truncating the file first\n'x' create a new file and open it for writing\n'a' open for writing, appending to the end of the file if it exists\n'b' binary mode\n't' text mode (default)\n'+' open a disk file for updating (reading and writing)\n========= ===============================================================\n\nThe default mode is 'rt' (open for reading text). For binary random\naccess, the mode 'w+b' opens and truncates the file to 0 bytes, while\n'r+b' opens the file without truncation. The 'x' mode implies 'w' and\nraises an `FileExistsError` if the file already exists.\n\nPython distinguishes between files opened in binary and text modes,\neven when the underlying operating system doesn't. Files opened in\nbinary mode (appending 'b' to the mode argument) return contents as\nbytes objects without any decoding. In text mode (the default, or when\n't' is appended to the mode argument), the contents of the file are\nreturned as strings, the bytes having been first decoded using a\nplatform-dependent encoding or using the specified encoding if given.\n\nbuffering is an optional integer used to set the buffering policy.\nPass 0 to switch buffering off (only allowed in binary mode), 1 to select\nline buffering (only usable in text mode), and an integer > 1 to indicate\nthe size of a fixed-size chunk buffer. When no buffering argument is\ngiven, the default buffering policy works as follows:\n\n* Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n \"block size\" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n* \"Interactive\" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\nencoding is the name of the encoding used to decode or encode the\nfile. This should only be used in text mode. The default encoding is\nplatform dependent, but any encoding supported by Python can be\npassed. See the codecs module for the list of supported encodings.\n\nerrors is an optional string that specifies how encoding errors are to\nbe handled---this argument should not be used in binary mode. Pass\n'strict' to raise a ValueError exception if there is an encoding error\n(the default of None has the same effect), or pass 'ignore' to ignore\nerrors. (Note that ignoring encoding errors can lead to data loss.)\nSee the documentation for codecs.register or run 'help(codecs.Codec)'\nfor a list of the permitted encoding error strings.\n\nnewline controls how universal newlines works (it only applies to text\nmode). It can be None, '', '\\n', '\\r', and '\\r\\n'. It works as\nfollows:\n\n* On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and\n these are translated into '\\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n* On output, if newline is None, any '\\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\\n', no translation takes place. If newline is any\n of the other legal values, any '\\n' characters written are translated\n to the given string.\n\nIf closefd is False, the underlying file descriptor will be kept open\nwhen the file is closed. This does not work when a file name is given\nand must be True in that case.\n\nA custom opener can be used by passing a callable as *opener*. The\nunderlying file descriptor for the file object is then obtained by\ncalling *opener* with (*file*, *flags*). *opener* must return an open\nfile descriptor (passing os.open as *opener* results in functionality\nsimilar to passing None).\n\nopen() returns a file object whose type depends on the mode, and\nthrough which the standard file operations such as reading and writing\nare performed. When open() is used to open a file in a text mode ('w',\n'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\na file in a binary mode, the returned class varies: in read binary\nmode, it returns a BufferedReader; in write binary and append binary\nmodes, it returns a BufferedWriter, and in read/write mode, it returns\na BufferedRandom.\n\nIt is also possible to use a string or bytearray as a file for both\nreading and writing. For strings StringIO can be used like a file\nopened in a text mode, and for bytes a BytesIO can be used like a file\nopened in a binary mode.",ord:"Return the Unicode code point for a one-character string.",pow:"Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\nSome types, such as ints, are able to use a more efficient algorithm when\ninvoked using the three argument form.",print:"Prints the values to a stream, or to sys.stdout by default.\n\n sep\n string inserted between values, default a space.\n end\n string appended after the last value, default a newline.\n file\n a file-like object (stream); defaults to the current sys.stdout.\n flush\n whether to forcibly flush the stream.",property:"Property attribute.\n\n fget\n function to be used for getting an attribute value\n fset\n function to be used for setting an attribute value\n fdel\n function to be used for del'ing an attribute\n doc\n docstring\n\nTypical use is to define a managed attribute x:\n\nclass C(object):\n def getx(self): return self._x\n def setx(self, value): self._x = value\n def delx(self): del self._x\n x = property(getx, setx, delx, \"I'm the 'x' property.\")\n\nDecorators make defining new properties or modifying existing ones easy:\n\nclass C(object):\n @property\n def x(self):\n \"I am the 'x' property.\"\n return self._x\n @x.setter\n def x(self, value):\n self._x = value\n @x.deleter\n def x(self):\n del self._x",quit:"",range:"range(stop) -> range object\nrange(start, stop[, step]) -> range object\n\nReturn an object that produces a sequence of integers from start (inclusive)\nto stop (exclusive) by step. range(i, j) produces i, i+1, i+2, ..., j-1.\nstart defaults to 0, and stop is omitted! range(4) produces 0, 1, 2, 3.\nThese are exactly the valid indices for a list of 4 elements.\nWhen step is given, it specifies the increment (or decrement).",repr:"Return the canonical string representation of the object.\n\nFor many object types, including most builtins, eval(repr(obj)) == obj.",reversed:"Return a reverse iterator over the values of the given sequence.",round:"Round a number to a given precision in decimal digits.\n\nThe return value is an integer if ndigits is omitted or None. Otherwise\nthe return value has the same type as the number. ndigits may be negative.",set:"set() -> new empty set object\nset(iterable) -> new set object\n\nBuild an unordered collection of unique elements.",setattr:"Sets the named attribute on the given object to the specified value.\n\nsetattr(x, 'y', v) is equivalent to ``x.y = v``",slice:"slice(stop)\nslice(start, stop[, step])\n\nCreate a slice object. This is used for extended slicing (e.g. a[0:10:2]).",sorted:"Return a new list containing all items from the iterable in ascending order.\n\nA custom key function can be supplied to customize the sort order, and the\nreverse flag can be set to request the result in descending order.",staticmethod:"staticmethod(function) -> method\n\nConvert a function to be a static method.\n\nA static method does not receive an implicit first argument.\nTo declare a static method, use this idiom:\n\n class C:\n @staticmethod\n def f(arg1, arg2, argN):\n ...\n\nIt can be called either on the class (e.g. C.f()) or on an instance\n(e.g. C().f()). Both the class and the instance are ignored, and\nneither is passed implicitly as the first argument to the method.\n\nStatic methods in Python are similar to those found in Java or C++.\nFor a more advanced concept, see the classmethod builtin.",str:"str(object='') -> str\nstr(bytes_or_buffer[, encoding[, errors]]) -> str\n\nCreate a new string object from the given object. If encoding or\nerrors is specified, then the object must expose a data buffer\nthat will be decoded using the given encoding and error handler.\nOtherwise, returns the result of object.__str__() (if defined)\nor repr(object).\nencoding defaults to sys.getdefaultencoding().\nerrors defaults to 'strict'.",sum:"Return the sum of a 'start' value (default: 0) plus an iterable of numbers\n\nWhen the iterable is empty, return the start value.\nThis function is intended specifically for use with numeric values and may\nreject non-numeric types.",super:"super() -> same as super(__class__, )\nsuper(type) -> unbound super object\nsuper(type, obj) -> bound super object; requires isinstance(obj, type)\nsuper(type, type2) -> bound super object; requires issubclass(type2, type)\nTypical use to call a cooperative superclass method:\nclass C(B):\n def meth(self, arg):\n super().meth(arg)\nThis works for class methods too:\nclass C(B):\n @classmethod\n def cmeth(cls, arg):\n super().cmeth(arg)\n",tuple:"Built-in immutable sequence.\n\nIf no argument is given, the constructor returns an empty tuple.\nIf iterable is specified the tuple is initialized from iterable's items.\n\nIf the argument is a tuple, the return value is the same object.",type:"type(object) -> the object's type\ntype(name, bases, dict, **kwds) -> a new type",vars:"Show vars.\n\nWithout arguments, equivalent to locals().\nWith an argument, equivalent to object.__dict__.",zip:"zip(*iterables, strict=False) --\x3e Yield tuples until an input is exhausted.\n\n >>> list(zip('abcdefg', range(3), range(4)))\n [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)]\n\nThe zip object yields n-length tuples, where n is the number of iterables\npassed as positional arguments to zip(). The i-th element in every tuple\ncomes from the i-th iterable argument to zip(). This continues until the\nshortest argument is exhausted.\n\nIf strict is true and one of the arguments is exhausted before the others,\nraise a ValueError."};for(var key in docs){if(__BRYTHON__.builtins[key]){if(["object","function"].includes(typeof __BRYTHON__.builtins[key])){__BRYTHON__.builtins[key].__doc__=docs[key]}}}(function($B){$B.whenReady=new Promise((function(resolve,reject){resolve()}))})(__BRYTHON__); \ No newline at end of file +var __BRYTHON__=globalThis.__BRYTHON__||{};try{eval("async function* f(){}")}catch(err){console.warn("Your browser is not fully supported. If you are using "+"Microsoft Edge, please upgrade to the latest version")}(function($B){$B.isWebWorker="undefined"!==typeof WorkerGlobalScope&&"function"===typeof importScripts&&navigator instanceof WorkerNavigator;$B.isNode=typeof process!=="undefined"&&process.release.name==="node"&&process.__nwjs!==1;var _window=globalThis;_window.location||={href:"",origin:"",pathname:""};_window.navigator||={userLanguage:""};_window.document||={getElementsByTagName:()=>[{src:"http://localhost/"}],currentScript:{src:"http://localhost/"},querySelectorAll:()=>[]};_window.HTMLElement||=class HTMLElement{};_window.MutationObserver||=function(){this.observe=()=>{}};_window.customElements||={define:()=>{}};var href=_window.location.href;$B.protocol=href.split(":")[0];$B.BigInt=_window.BigInt;$B.indexedDB=_window.indexedDB;if($B.brython_path===undefined){var this_url;if($B.isWebWorker){this_url=_window.location.href;if(this_url.startsWith("blob:")){this_url=this_url.substr(5)}}else{this_url=document.currentScript.src}var elts=this_url.split("/");elts.pop();$B.brython_path=elts.join("/")+"/"}else{if(!$B.brython_path.endsWith("/")){$B.brython_path+="/"}}var parts_re=new RegExp("(.*?)://(.*?)/(.*)"),mo=parts_re.exec($B.brython_path);if(mo){$B.full_url={protocol:mo[1],host:mo[2],address:mo[3]};if(["http","https"].includes(mo[1])){$B.domain=mo[1]+"://"+mo[2]}}var path=_window.location.origin+_window.location.pathname,path_elts=path.split("/");path_elts.pop();$B.script_dir=path_elts.join("/");mo=parts_re.exec($B.script_dir);if(mo){if(["http","https"].includes(mo[1])){$B.script_domain=mo[1]+"://"+mo[2]}}$B.strip_host=function(url){var parts_re=new RegExp("(.*?)://(.*?)/(.*)"),mo=parts_re.exec(url);if(mo){return mo[3]}console.log(Error().stack);throw Error("not a url: "+url)};$B.__ARGV=[];$B.webworkers={};$B.file_cache={};$B.url2name={};$B.scripts={};$B.import_info={};$B.imported={};$B.precompiled={};$B.frame_obj=null;$B.builtins=Object.create(null);$B.builtins_scope={id:"__builtins__",module:"__builtins__",binding:{}};$B.builtin_funcs={};$B.builtin_classes=[];$B.__getattr__=function(attr){return this[attr]};$B.__setattr__=function(attr,value){if(["debug","stdout","stderr"].indexOf(attr)>-1){$B[attr]=value}else{throw $B.builtins.AttributeError.$factory("__BRYTHON__ object has no attribute "+attr)}};$B.language=_window.navigator.userLanguage||_window.navigator.language;$B.locale="C";var date=new Date;var formatter=new Intl.DateTimeFormat($B.language,{timeZoneName:"short"}),short=formatter.format(date);formatter=new Intl.DateTimeFormat($B.language,{timeZoneName:"long"});var long=formatter.format(date);var ix=0,minlen=Math.min(short.length,long.length);while(ix>":"rshift","+":"add","-":"sub","*":"mul","/":"truediv","%":"mod","@":"matmul"},augmented_assigns:{"//=":"ifloordiv",">>=":"irshift","<<=":"ilshift","**=":"ipow","+=":"iadd","-=":"isub","*=":"imul","/=":"itruediv","%=":"imod","&=":"iand","|=":"ior","^=":"ixor","@=":"imatmul"},binary:{"&":"and","|":"or","~":"invert","^":"xor"},comparisons:{"<":"lt",">":"gt","<=":"le",">=":"ge","==":"eq","!=":"ne"},boolean:{or:"or",and:"and",in:"in",not:"not",is:"is"},subset:function(){var res={},keys=[];if(arguments[0]=="all"){keys=Object.keys($B.op2method);keys.splice(keys.indexOf("subset"),1)}else{for(var arg of arguments){keys.push(arg)}}for(var key of keys){var ops=$B.op2method[key];if(ops===undefined){throw Error(key)}for(var attr in ops){res[attr]=ops[attr]}}return res}};$B.method_to_op={};for(var category in $B.op2method){for(var op in $B.op2method[category]){var method=`__${$B.op2method[category][op]}__`;$B.method_to_op[method]=op}}$B.special_string_repr={8:"\\x08",9:"\\t",10:"\\n",11:"\\x0b",12:"\\x0c",13:"\\r",92:"\\\\",160:"\\xa0"};$B.$py_next_hash=Math.pow(2,53)-1;$B.$py_UUID=0;$B.lambda_magic=Math.random().toString(36).substr(2,8);$B.set_func_names=function(klass,module){for(var attr in klass){if(typeof klass[attr]=="function"){klass[attr].$infos={__doc__:klass[attr].__doc__||"",__module__:module,__qualname__:klass.__qualname__+"."+attr,__name__:attr};if(klass[attr].$type=="classmethod"){klass[attr].__class__=$B.method}}}klass.__module__=module};var has_storage=typeof Storage!=="undefined";if(has_storage){$B.has_local_storage=false;try{if(localStorage){$B.local_storage=localStorage;$B.has_local_storage=true}}catch(err){}$B.has_session_storage=false;try{if(sessionStorage){$B.session_storage=sessionStorage;$B.has_session_storage=true}}catch(err){}}else{$B.has_local_storage=false;$B.has_session_storage=false}$B.globals=function(){return $B.frame_obj.frame[3]};$B.scripts={};$B.$options={};$B.builtins_repr_check=function(builtin,args){var $=$B.args("__repr__",1,{self:null},["self"],args,{},null,null),self=$.self;if(!$B.$isinstance(self,builtin)){var _b_=$B.builtins;throw _b_.TypeError.$factory("descriptor '__repr__' requires a "+`'${builtin.__name__}' object but received a `+`'${$B.class_name(self)}'`)}};$B.update_VFS=function(scripts){$B.VFS=$B.VFS||{};var vfs_timestamp=scripts.$timestamp;if(vfs_timestamp!==undefined){delete scripts.$timestamp}for(var script in scripts){if($B.VFS.hasOwnProperty(script)){console.warn("Virtual File System: duplicate entry "+script)}$B.VFS[script]=scripts[script];$B.VFS[script].timestamp=vfs_timestamp}$B.stdlib_module_names=Object.keys($B.VFS)};$B.add_files=function(files){$B.files=$B.files||{};for(var file in files){$B.files[file]=files[file]}};$B.has_file=function(file){return $B.files&&$B.files.hasOwnProperty(file)};$B.show_tokens=function(src,mode){for(var token of $B.tokenizer(src,"",mode||"file")){console.log(token.type,$B.builtins.repr(token.string),token.start,token.end,token.line)}};function from_py(src,script_id){if(!$B.options_parsed){$B.parse_options()}script_id=script_id||"python_script_"+$B.UUID();var filename=$B.script_path+"#"+script_id;$B.url2name[filename]=script_id;$B.imported[script_id]={};var root=__BRYTHON__.py2js({src:src,filename:filename},script_id,script_id,__BRYTHON__.builtins_scope);return root.to_js()}$B.getPythonModule=function(name){return $B.imported[name]};$B.python_to_js=function(src,script_id){return"(function() {\n"+from_py(src,script_id)+"\nreturn locals}())"};$B.pythonToJS=$B.python_to_js;$B.runPythonSource=function(src,script_id){var js=from_py(src,script_id)+"\nreturn locals";var func=new Function("$B","_b_",js);$B.imported[script_id]=func($B,$B.builtins);return $B.imported[script_id]}})(__BRYTHON__);__BRYTHON__.ast_classes={Add:"",And:"",AnnAssign:"target,annotation,value?,simple",Assert:"test,msg?",Assign:"targets*,value,type_comment?",AsyncFor:"target,iter,body*,orelse*,type_comment?",AsyncFunctionDef:"name,args,body*,decorator_list*,returns?,type_comment?,type_params*",AsyncWith:"items*,body*,type_comment?",Attribute:"value,attr,ctx",AugAssign:"target,op,value",Await:"value",BinOp:"left,op,right",BitAnd:"",BitOr:"",BitXor:"",BoolOp:"op,values*",Break:"",Call:"func,args*,keywords*",ClassDef:"name,bases*,keywords*,body*,decorator_list*,type_params*",Compare:"left,ops*,comparators*",Constant:"value,kind?",Continue:"",Del:"",Delete:"targets*",Dict:"keys*,values*",DictComp:"key,value,generators*",Div:"",Eq:"",ExceptHandler:"type?,name?,body*",Expr:"value",Expression:"body",FloorDiv:"",For:"target,iter,body*,orelse*,type_comment?",FormattedValue:"value,conversion,format_spec?",FunctionDef:"name,args,body*,decorator_list*,returns?,type_comment?,type_params*",FunctionType:"argtypes*,returns",GeneratorExp:"elt,generators*",Global:"names*",Gt:"",GtE:"",If:"test,body*,orelse*",IfExp:"test,body,orelse",Import:"names*",ImportFrom:"module?,names*,level?",In:"",Interactive:"body*",Invert:"",Is:"",IsNot:"",JoinedStr:"values*",LShift:"",Lambda:"args,body",List:"elts*,ctx",ListComp:"elt,generators*",Load:"",Lt:"",LtE:"",MatMult:"",Match:"subject,cases*",MatchAs:"pattern?,name?",MatchClass:"cls,patterns*,kwd_attrs*,kwd_patterns*",MatchMapping:"keys*,patterns*,rest?",MatchOr:"patterns*",MatchSequence:"patterns*",MatchSingleton:"value",MatchStar:"name?",MatchValue:"value",Mod:"",Module:"body*,type_ignores*",Mult:"",Name:"id,ctx",NamedExpr:"target,value",Nonlocal:"names*",Not:"",NotEq:"",NotIn:"",Or:"",ParamSpec:"name",Pass:"",Pow:"",RShift:"",Raise:"exc?,cause?",Return:"value?",Set:"elts*",SetComp:"elt,generators*",Slice:"lower?,upper?,step?",Starred:"value,ctx",Store:"",Sub:"",Subscript:"value,slice,ctx",Try:"body*,handlers*,orelse*,finalbody*",TryStar:"body*,handlers*,orelse*,finalbody*",Tuple:"elts*,ctx",TypeAlias:"name,type_params*,value",TypeIgnore:"lineno,tag",TypeVar:"name,bound?",TypeVarTuple:"name",UAdd:"",USub:"",UnaryOp:"op,operand",While:"test,body*,orelse*",With:"items*,body*,type_comment?",Yield:"value?",YieldFrom:"value",alias:"name,asname?",arg:"arg,annotation?,type_comment?",arguments:"posonlyargs*,args*,vararg?,kwonlyargs*,kw_defaults*,kwarg?,defaults*",boolop:["And","Or"],cmpop:["Eq","NotEq","Lt","LtE","Gt","GtE","Is","IsNot","In","NotIn"],comprehension:"target,iter,ifs*,is_async",excepthandler:["ExceptHandler"],expr:["BoolOp","NamedExpr","BinOp","UnaryOp","Lambda","IfExp","Dict","Set","ListComp","SetComp","DictComp","GeneratorExp","Await","Yield","YieldFrom","Compare","Call","FormattedValue","JoinedStr","Constant","Attribute","Subscript","Starred","Name","List","Tuple","Slice"],expr_context:["Load","Store","Del"],keyword:"arg?,value",match_case:"pattern,guard?,body*",mod:["Module","Interactive","Expression","FunctionType"],operator:["Add","Sub","Mult","MatMult","Div","Mod","Pow","LShift","RShift","BitOr","BitXor","BitAnd","FloorDiv"],pattern:["MatchValue","MatchSingleton","MatchSequence","MatchMapping","MatchClass","MatchStar","MatchAs","MatchOr"],stmt:["FunctionDef","AsyncFunctionDef","ClassDef","Return","Delete","Assign","TypeAlias","AugAssign","AnnAssign","For","AsyncFor","While","If","With","AsyncWith","Match","Raise","Try","TryStar","Assert","Import","ImportFrom","Global","Nonlocal","Expr","Pass","Break","Continue"],type_ignore:["TypeIgnore"],type_param:["TypeVar","ParamSpec","TypeVarTuple"],unaryop:["Invert","Not","UAdd","USub"],withitem:"context_expr,optional_vars?"};(function($B){$B.stdlib={};var pylist=["VFS_import","__future__","_aio","_codecs","_codecs_jp","_collections","_collections_abc","_compat_pickle","_compression","_contextvars","_csv","_dummy_thread","_frozen_importlib","_functools","_imp","_io","_markupbase","_multibytecodec","_operator","_py_abc","_pydatetime","_pydecimal","_queue","_signal","_socket","_sre","_struct","_sysconfigdata","_sysconfigdata_0_brython_","_testcapi","_thread","_threading_local","_typing","_weakref","_weakrefset","abc","antigravity","argparse","ast","asyncio","atexit","base64","bdb","binascii","bisect","browser.ajax","browser.highlight","browser.idbcache","browser.indexed_db","browser.local_storage","browser.markdown","browser.object_storage","browser.session_storage","browser.svg","browser.template","browser.timer","browser.ui","browser.webcomponent","browser.websocket","browser.worker","calendar","cmath","cmd","code","codecs","codeop","colorsys","configparser","contextlib","contextvars","copy","copyreg","csv","dataclasses","datetime","decimal","difflib","doctest","enum","errno","external_import","faulthandler","fnmatch","formatter","fractions","functools","gc","genericpath","getopt","getpass","gettext","glob","gzip","heapq","hmac","imp","inspect","interpreter","io","ipaddress","itertools","keyword","linecache","locale","mimetypes","nntplib","ntpath","numbers","opcode","operator","optparse","os","pathlib","pdb","pickle","pkgutil","platform","posixpath","pprint","profile","pwd","py_compile","pyclbr","pydoc","queue","quopri","random","re","re1","reprlib","secrets","select","selectors","shlex","shutil","signal","site","site-packages.__future__","site-packages.docs","site-packages.header","site-packages.test_sp","socket","sre_compile","sre_constants","sre_parse","stat","statistics","string","stringprep","struct","subprocess","symtable","sys","sysconfig","tabnanny","tarfile","tb","tempfile","test.namespace_pkgs.module_and_namespace_package.a_test","textwrap","this","threading","time","timeit","token","tokenize","traceback","turtle","types","typing","uu","uuid","warnings","weakref","webbrowser","zipfile","zipimport","zlib"];for(var i=0;i":"RARROW",".":"DOT","...":"ELLIPSIS","/":"SLASH","//":"DOUBLESLASH","//=":"DOUBLESLASHEQUAL","/=":"SLASHEQUAL",":":"COLON",":=":"COLONEQUAL",";":"SEMI","<":"LESS","<<":"LEFTSHIFT","<<=":"LEFTSHIFTEQUAL","<=":"LESSEQUAL","=":"EQUAL","==":"EQEQUAL",">":"GREATER",">=":"GREATEREQUAL",">>":"RIGHTSHIFT",">>=":"RIGHTSHIFTEQUAL","@":"AT","@=":"ATEQUAL","[":"LSQB","]":"RSQB","^":"CIRCUMFLEX","^=":"CIRCUMFLEXEQUAL","{":"LBRACE","|":"VBAR","|=":"VBAREQUAL","}":"RBRACE","~":"TILDE"};function ISTERMINAL(x){return x=NT_OFFSET}function ISEOF(x){return x==ENDMARKER}})(__BRYTHON__);(function($B){var _b_=$B.builtins;function is_whitespace(char){return" \n\r\t\f".includes(char)}var unprintable_re=/\p{Cc}|\p{Cf}|\p{Co}|\p{Cs}|\p{Zl}|\p{Zp}|\p{Zs}/u;const Other_ID_Start=[6277,6278,8472,8494,12443,12444].map((x=>String.fromCodePoint(x)));function is_ID_Start(char){return/\p{Letter}/u.test(char)||/\p{Nl}/u.test(char)||char=="_"||Other_ID_Start.includes(char)}const Other_ID_Continue=[183,903,4969,4976,4977,6618,8204,8205,12539,65381].map((x=>String.fromCodePoint(x)));function is_ID_Continue(char){return is_ID_Start(char)||/\p{Mn}|\p{Mc}|\p{Nd}|\p{Pc}/u.test(char)||Other_ID_Continue.includes(char)}$B.is_XID_Start=function(cp){let char=String.fromCodePoint(cp);if(!is_ID_Start(char)){return false}var norm=char.normalize("NFKC");if(!is_ID_Start(norm[0])){return false}for(let char of norm.substr(1)){if(!is_ID_Continue(char)){return false}}return true};$B.is_XID_Continue=function(cp){let char=String.fromCodePoint(cp);if(!is_ID_Continue(char)){return false}var norm=char.normalize("NFKC");for(let char of norm.substr(1)){if(!is_ID_Continue(char)){return false}}return true};$B.in_unicode_category=function(category,cp){if(isNaN(cp)){return false}try{var re=new RegExp("\\p{"+category+"}","u");return re.test(String.fromCodePoint(cp))}catch(err){return in_unicode_category(category,cp)}};function in_unicode_category(category,cp){var table=$B.unicode[category],start=0,end=table.length-1,len=table.length,ix=Math.floor(len/2),nb=0;var first=table[start],item=typeof first=="number"?first:first[0];if(cplast){return false}}else if(last[0]+last[1]100){console.log("infinite loop for",cp);alert()}item=table[ix];if(typeof item!="number"){item=item[0]}if(item==cp){return true}else if(item>cp){end=ix}else{start=ix}len=Math.floor((end-start)/2);if(end-start==1){break}ix=start+len}var step=table[start][2];if(step===undefined){return table[start][0]+table[start][1]>cp}return table[start][0]+step*table[start][1]>cp&&(cp-table[start][0])%step==0}const FSTRING_START="FSTRING_START",FSTRING_MIDDLE="FSTRING_MIDDLE",FSTRING_END="FSTRING_END";function ord(char){if(char.length==1){return char.charCodeAt(0)}var code=65536;code+=(char.charCodeAt(0)&1023)<<10;code+=char.charCodeAt(1)&1023;return code}function $last(array){return array[array.length-1]}var ops=".,:;+-*/%~^|&=<>[](){}@",op2=["**","//",">>","<<"],augm_op="+-*/%^|&=<>@",closing={"}":"{","]":"[",")":"("};function Token(type,string,lineno,col_offset,end_lineno,end_col_offset,line){var res={type:type,string:string,line:line,lineno:lineno,col_offset:col_offset,end_lineno:end_lineno,end_col_offset:end_col_offset};res.num_type=$B.py_tokens[type];if(type=="OP"){res.num_type=$B.py_tokens[$B.EXACT_TOKEN_TYPES[string]]}else if(type=="NAME"&&["async","await"].includes(string)){res.num_type=$B.py_tokens[string.toUpperCase()]}else if(type=="ENCODING"){res.num_type=$B.py_tokens.ENCODING}res.bytes=res.string;return res}function get_comment(parser,src,pos,line_num,line_start,token_name,line){var start=pos,ix;var t=[];while(true){if(pos>=src.length||(ix="\r\n".indexOf(src[pos]))>-1){if(parser&&parser.flags&$B.PyCF_TYPE_COMMENTS){var comment=src.substring(start-1,pos),mo=/^#\s*type\s*:(.*)/.exec(comment);if(mo){var is_type_ignore=false;if(mo[1].startsWith("ignore")){if(mo[1].length==6){is_type_ignore=true}else{var char=mo[1][6];if(char.charCodeAt(0)<=128&&/[a-zA-Z0-9]/.exec(char)===null){is_type_ignore=true}}}if(is_type_ignore){t.push(Token("TYPE_IGNORE",comment,line_num,start-line_start,line_num,pos-line_start+1,line))}else{t.push(Token("TYPE_COMMENT",comment,line_num,start-line_start,line_num,pos-line_start+1,line))}return{t:t,pos:pos}}}t.push(Token("COMMENT",src.substring(start-1,pos),line_num,start-line_start,line_num,pos-line_start+1,line));if(ix!==undefined){var nb=1;if(src[pos]=="\r"&&src[pos+1]=="\n"){nb++}else if(src[pos]===undefined){nb=0}t.push(Token(token_name,src.substr(pos,nb),line_num,pos-line_start+1,line_num,pos-line_start+nb+1,line));if(src[pos]===undefined){t.push(Token("NEWLINE","\n",line_num,pos-line_start+1,line_num,pos-line_start+2,""))}pos+=nb}return{t:t,pos:pos}}pos++}}function test_num(num_type,char){switch(num_type){case"":return $B.in_unicode_category("Nd",ord(char));case"x":return"0123456789abcdef".includes(char.toLowerCase());case"b":return"01".includes(char);case"o":return"01234567".includes(char);default:throw Error("unknown num type "+num_type)}}function nesting_level(token_modes){var ix=token_modes.length-1;while(ix>=0){var mode=token_modes[ix];if(mode.nesting!==undefined){return mode.nesting}ix--}}$B.tokenizer=function*(src,filename,mode,parser){var string_prefix=/^(r|u|R|U|f|F|fr|Fr|fR|FR|rf|rF|Rf|RF)$/,bytes_prefix=/^(b|B|br|Br|bR|BR|rb|rB|Rb|RB)$/;src=src.replace(/\r\n/g,"\n").replace(/\r/g,"\n");if(mode!="eval"&&!src.endsWith("\n")){src+="\n"}var lines=src.split("\n"),linenum=0,line_at={};for(let i=0,len=src.length;i=55296&&cp<=56319){cp=ord(src.substr(pos,2));char=src.substr(pos,2);pos++}pos++;if(token_mode!=save_mode){if(token_mode=="fstring"){fstring_buffer="";fstring_escape=false}else if(token_mode=="format_specifier"){format_specifier=""}}save_mode=token_mode;if(token_mode=="fstring"){if(char==token_mode.quote){if(fstring_escape){fstring_buffer+="\\"+char;fstring_escape=false;continue}if(token_mode.triple_quote){if(src.substr(pos,2)!=token_mode.quote.repeat(2)){fstring_buffer+=char;continue}char=token_mode.quote.repeat(3);pos+=2}if(fstring_buffer.length>0){yield Token(FSTRING_MIDDLE,fstring_buffer,line_num,fstring_start,line_num,fstring_start+fstring_buffer.length,line)}yield Token(FSTRING_END,char,line_num,pos,line_num,pos,line);token_modes.pop();token_mode=$B.last(token_modes);state=null;continue}else if(char=="{"){if(src.charAt(pos)=="{"){fstring_buffer+=char;pos++;continue}else{if(fstring_buffer.length>0){yield Token(FSTRING_MIDDLE,fstring_buffer,line_num,fstring_start,line_num,fstring_start+fstring_buffer.length,line)}token_mode="regular_within_fstring";fstring_expr_start=pos-line_start;state=null;token_modes.push(token_mode)}}else if(char=="}"){if(src.charAt(pos)=="}"){fstring_buffer+=char;pos++;continue}else{yield Token("OP",char,line_num,pos-line_start,line_num,pos-line_start+1,line);continue}}else if(char=="\\"){if(token_mode.raw){fstring_buffer+=char+char}else{if(fstring_escape){fstring_buffer+="\\"+char}fstring_escape=!fstring_escape}continue}else{if(fstring_escape){fstring_buffer+="\\"}fstring_buffer+=char;fstring_escape=false;if(char=="\n"){line_num++}continue}}else if(token_mode=="format_specifier"){if(char==quote){if(format_specifier.length>0){yield Token(FSTRING_MIDDLE,format_specifier,line_num,fstring_start,line_num,fstring_start+format_specifier.length,line);token_modes.pop();token_mode=$B.last(token_modes);continue}}else if(char=="{"){yield Token(FSTRING_MIDDLE,format_specifier,line_num,fstring_start,line_num,fstring_start+format_specifier.length,line);token_mode="regular_within_fstring";fstring_expr_start=pos-line_start;state=null;token_modes.push(token_mode)}else if(char=="}"){yield Token(FSTRING_MIDDLE,format_specifier,line_num,fstring_start,line_num,fstring_start+format_specifier.length,line);yield Token("OP",char,line_num,pos-line_start,line_num,pos-line_start+1,line);if(braces.length==0||$B.last(braces).char!=="{"){throw Error("wrong braces")}braces.pop();token_modes.pop();token_mode=$B.last(token_modes);continue}else{format_specifier+=char;continue}}switch(state){case"line_start":line=get_line_at(pos-1);line_start=pos;line_num++;if(mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos-1))){yield Token("NL",mo[0],line_num,0,line_num,mo[0].length,line);pos+=mo[0].length-1;continue}else if(char=="#"){comment=get_comment(parser,src,pos,line_num,line_start,"NL",line);for(var item of comment.t){yield item}pos=comment.pos;state="line_start";continue}indent=0;if(char==" "){indent=1}else if(char=="\t"){indent=8}if(indent){var broken=false;while(pos0&&" \t".includes(src[pos])){console.log("indentation error 479");$B.raise_error_known_location(_b_.IndentationError,filename,line_num,pos-line_start,line_num,pos-line_start+1,line,"unindent does not match any outer indentation level")}if(src[pos]==" "){indent++}else if(src[pos]=="\t"){indent+=8}else if(src[pos]=="\\"&&src[pos+1]=="\n"){pos++;line_start=pos+2;line_num++;line=get_line_at(pos+2);broken=true}else{break}pos++}if(pos==src.length){line_num--;break}if(src[pos]=="#"){comment=get_comment(parser,src,pos+1,line_num,line_start,"NL",line);for(var item of comment.t){yield item}pos=comment.pos;continue}else if(src[pos]=="\\"){if(/^\f?(\r\n|\r|\n)/.exec(src[pos+1])){line_num++;pos++;continue}else{$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,pos+2-line_start,line_num,pos+3-line_start,line,"unexpected character after line continuation character")}}else if(mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos))){yield Token("NL","",line_num,pos-line_start+1,line_num,pos-line_start+1+mo[0].length,line);pos+=mo[0].length;continue}if(indents.length==0||indent>$last(indents)){indents.push(indent);yield Token("INDENT","",line_num,0,line_num,indent,line)}else if(indent<$last(indents)){var ix=indents.indexOf(indent);if(ix==-1){var message="unindent does not match "+"any outer indentation level";$B.raise_error_known_location(_b_.IndentationError,filename,line_num,0,line_num,0,line,message)}for(var i=indents.length-1;i>ix;i--){indents.pop();yield Token("DEDENT","",line_num,indent,line_num,indent,line)}}state=null}else{while(indents.length>0){indents.pop();yield Token("DEDENT","",line_num,indent,line_num,indent,line)}state=null;pos--}break;case null:switch(char){case'"':case"'":quote=char;triple_quote=src[pos]==char&&src[pos+1]==char;string_start=[line_num,pos-line_start,line_start];if(triple_quote){pos+=2}escaped=false;state="STRING";string="";prefix="";break;case"#":var token_name=braces.length>0?"NL":"NEWLINE";comment=get_comment(parser,src,pos,line_num,line_start,token_name,line);for(var item of comment.t){yield item}pos=comment.pos;if(braces.length==0){state="line_start"}else{state=null;line_num++;line_start=pos+1;line=get_line_at(pos)}break;case"0":state="NUMBER";number=char;num_type="";if(src[pos]&&"xbo".includes(src[pos].toLowerCase())){number+=src[pos];num_type=src[pos].toLowerCase();pos++}else if(src[pos]){var pos1=pos;while(pos1=3){yield Token("OP","...",line_num,dot_pos,line_num,dot_pos+3,line);op=op.substr(3)}for(var i=0;i0?"NL":"NEWLINE";mo=/^\f?(\r\n|\r|\n)/.exec(src.substr(pos-1));yield Token(token_name,mo[0],line_num,pos-line_start,line_num,pos-line_start+mo[0].length,line);pos+=mo[0].length-1;if(token_name=="NEWLINE"){state="line_start"}else{line_num++;line_start=pos+1;line=get_line_at(pos)}break;default:if($B.is_XID_Start(ord(char))){state="NAME";name=char}else if($B.in_unicode_category("Nd",ord(char))){state="NUMBER";num_type="";number=char}else if(ops.includes(char)){if(token_mode=="regular_within_fstring"&&(char==":"||char=="}")){if(char==":"){if(nesting_level(token_modes)==braces.length-1){let colon=Token("OP",char,line_num,pos-line_start-op.length+1,line_num,pos-line_start+1,line);colon.metadata=src.substr(line_start+fstring_expr_start,pos-line_start-fstring_expr_start-1);yield colon;token_modes.pop();token_mode="format_specifier";token_modes.push(token_mode);continue}}else{let closing_brace=Token("OP",char,line_num,pos-line_start-op.length+1,line_num,pos-line_start+1,line);closing_brace.metadata=src.substring(line_start+fstring_expr_start,pos-1);yield closing_brace;token_modes.pop();token_mode=token_modes[token_modes.length-1];if(braces.length==0||$B.last(braces).char!=="{"){throw Error("wrong braces")}braces.pop();continue}}var op=char;if(op2.includes(char+src[pos])){op=char+src[pos];pos++}if(src[pos]=="="&&(op.length==2||augm_op.includes(op))){op+=src[pos];pos++}else if(char=="-"&&src[pos]==">"||char==":"&&src[pos]=="="){op+=src[pos];pos++}if("[({".includes(char)){braces.push({char:char,pos:pos,line_num:line_num,line_start:line_start,line:line})}else if("])}".includes(char)){if(braces.length&&$last(braces).char==closing[char]){braces.pop()}else{braces.push({char:char,pos:pos,line_num:line_num,line_start:line_start,line:line})}}yield Token("OP",op,line_num,pos-line_start-op.length+1,line_num,pos-line_start+1,line)}else if(char=="!"){if(src[pos]=="="){yield Token("OP","!=",line_num,pos-line_start,line_num,pos-line_start+2,line);pos++}else{let token=Token("OP",char,line_num,pos-line_start,line_num,pos-line_start+1,line);token.metadata=src.substring(line_start+fstring_start+2,pos-1);yield token}}else if(char==" "||char=="\t"){}else{var cp=char.codePointAt(0),err_msg="invalid";if(unprintable_re.exec(char)){err_msg+=" non-printable"}var unicode=cp.toString(16).toUpperCase();while(unicode.length<4){unicode="0"+unicode}err_msg+=` character '${char}' (U+${unicode})`;if(char=="$"||char=="`"){err_msg="invalid syntax"}var err_token=Token("ERRORTOKEN",char,line_num,pos-line_start,line_num,pos-line_start+1,line);$B.raise_error_known_token(_b_.SyntaxError,filename,err_token,err_msg)}}break;case"NAME":if($B.is_XID_Continue(ord(char))){name+=char}else if(char=='"'||char=="'"){if(string_prefix.exec(name)||bytes_prefix.exec(name)){state="STRING";quote=char;triple_quote=src[pos]==quote&&src[pos+1]==quote;prefix=name;if(triple_quote){pos+=2}if(prefix.toLowerCase().includes("f")){fstring_start=pos-line_start-name.length;token_mode=new String("fstring");token_mode.nesting=braces.length;token_mode.quote=quote;token_mode.triple_quote=triple_quote;token_mode.raw=prefix.toLowerCase().includes("r");token_modes.push(token_mode);var s=triple_quote?quote.repeat(3):quote;var end_col=fstring_start+name.length+s.length;yield Token(FSTRING_START,prefix+s,line_num,fstring_start,line_num,end_col,line);continue}escaped=false;string_start=[line_num,pos-line_start-name.length,line_start];string=""}else{yield Token("NAME",name,line_num,pos-line_start-name.length,line_num,pos-line_start,line);state=null;pos--}}else{yield Token("NAME",name,line_num,pos-line_start-name.length,line_num,pos-line_start,line);state=null;pos--}break;case"STRING":switch(char){case quote:if(!escaped){var string_line=line;if(line_num>string_start[0]){string_line=src.substring(string_start[2]-1,pos+2)}var full_string;if(!triple_quote){full_string=prefix+quote+string+quote;yield Token("STRING",full_string,string_start[0],string_start[1],line_num,pos-line_start+1,string_line);state=null}else if(char+src.substr(pos,2)==quote.repeat(3)){full_string=prefix+quote.repeat(3)+string+quote.repeat(3);yield Token("STRING",full_string,string_start[0],string_start[1],line_num,pos-line_start+3,string_line);pos+=2;state=null}else{string+=char}}else{string+=char}escaped=false;break;case"\r":case"\n":if(!escaped&&!triple_quote){var msg=`unterminated string literal `+`(detected at line ${line_num})`,line_num=string_start[0],col_offset=string_start[1];$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,col_offset,line_num,col_offset,line,msg)}string+=char;line_num++;line_start=pos+1;if(char=="\r"&&src[pos]=="\n"){string+=src[pos];line_start++;pos++}line=get_line_at(pos);escaped=false;break;case"\\":string+=char;escaped=!escaped;break;default:escaped=false;string+=char;break}break;case"NUMBER":if(test_num(num_type,char)){number+=char}else if(char=="_"&&!number.endsWith(".")){if(number.endsWith("_")){throw SyntaxError("consecutive _ in number")}else if(src[pos]===undefined||!test_num(num_type,src[pos])){yield Token("NUMBER",number,line_num,pos-line_start-number.length,line_num,pos-line_start,line);state=null;pos--}else{number+=char}}else if(char=="."&&!number.includes(char)){number+=char}else if(char.toLowerCase()=="e"&&!number.toLowerCase().includes("e")){if("+-".includes(src[pos])||$B.in_unicode_category("Nd",ord(src[pos]))){number+=char}else{yield Token("NUMBER",number,line_num,pos-line_start-number.length,line_num,pos-line_start,line);state=null;pos--}}else if((char=="+"||char=="-")&&number.toLowerCase().endsWith("e")){number+=char}else if(char.toLowerCase()=="j"){number+=char;yield Token("NUMBER",number,line_num,pos-line_start-number.length+1,line_num,pos-line_start+1,line);state=null}else if(char.match(/\p{Letter}/u)){$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,pos-line_start-number.length,line_num,pos-line_start,line,"invalid decimal literal")}else{yield Token("NUMBER",number,line_num,pos-line_start-number.length,line_num,pos-line_start,line);state=null;pos--}break}}switch(state){case"line_start":line_num++;break;case"NAME":yield Token("NAME",name,line_num,pos-line_start-name.length+1,line_num,pos-line_start+1,line);break;case"NUMBER":yield Token("NUMBER",number,line_num,pos-line_start-number.length+1,line_num,pos-line_start+1,line);break;case"STRING":line_num=string_start[0];line=lines[line_num-1];var msg=`unterminated ${triple_quote?"triple-quoted ":""}`+`string literal (detected at line ${line_num})`,col_offset=string_start[1];$B.raise_error_known_location(_b_.SyntaxError,filename,line_num,col_offset,line_num,col_offset,line,msg)}if(!src.endsWith("\n")&&state!=line_start){yield Token("NEWLINE","",line_num,pos-line_start+1,line_num,pos-line_start+1,line+"\n");line_num++}while(indents.length>0){indents.pop();yield Token("DEDENT","",line_num,0,line_num,0,"")}yield Token("ENDMARKER","",line_num,0,line_num,0,"")}})(__BRYTHON__);(function($B){var binary_ops={"+":"Add","-":"Sub","*":"Mult","/":"Div","//":"FloorDiv","%":"Mod","**":"Pow","<<":"LShift",">>":"RShift","|":"BitOr","^":"BitXor","&":"BitAnd","@":"MatMult"};var boolean_ops={and:"And",or:"Or"};var comparison_ops={"==":"Eq","!=":"NotEq","<":"Lt","<=":"LtE",">":"Gt",">=":"GtE",is:"Is",is_not:"IsNot",in:"In",not_in:"NotIn"};var unary_ops={unary_inv:"Invert",unary_pos:"UAdd",unary_neg:"USub",unary_not:"Not"};var op_types=$B.op_types=[binary_ops,boolean_ops,comparison_ops,unary_ops];var _b_=$B.builtins;var ast=$B.ast={};for(var kl in $B.ast_classes){var args=$B.ast_classes[kl],body="";if(typeof args=="string"){if(args.length>0){for(var arg of args.split(",")){if(arg.endsWith("*")){arg=arg.substr(0,arg.length-1);body+=` this.${arg} = ${arg} === undefined ? [] : ${arg}\n`}else if(arg.endsWith("?")){arg=arg.substr(0,arg.length-1);body+=` this.${arg} = ${arg}\n`}else{body+=` this.${arg} = ${arg}\n`}}}var arg_list=args.replace(/[*?]/g,"").split(",");ast[kl]=Function(...arg_list,body);ast[kl]._fields=args.split(",")}else{ast[kl]=args.map((x=>ast[x]))}ast[kl].$name=kl}$B.ast_js_to_py=function(obj){$B.create_python_ast_classes();if(obj===undefined){return _b_.None}else if(Array.isArray(obj)){return obj.map($B.ast_js_to_py)}else{var class_name=obj.constructor.$name,py_class=$B.python_ast_classes[class_name],py_ast_obj={__class__:py_class};if(py_class===undefined){return obj}for(var field of py_class._fields){py_ast_obj[field]=$B.ast_js_to_py(obj[field])}py_ast_obj._attributes=$B.fast_tuple([]);for(var loc of["lineno","col_offset","end_lineno","end_col_offset"]){if(obj[loc]!==undefined){py_ast_obj[loc]=obj[loc];py_ast_obj._attributes.push(loc)}}return py_ast_obj}};$B.ast_py_to_js=function(obj){if(obj===undefined||obj===_b_.None){return undefined}else if(Array.isArray(obj)){return obj.map($B.ast_py_to_js)}else if(typeof obj=="string"){return obj}else{var class_name=$B.class_name(obj),js_class=$B.ast[class_name];if(js_class===undefined){return obj}var js_ast_obj=new js_class;for(var field of js_class._fields){if(field.endsWith("?")||field.endsWith("*")){field=field.substr(0,field.length-1)}js_ast_obj[field]=$B.ast_py_to_js(obj[field])}for(var loc of["lineno","col_offset","end_lineno","end_col_offset"]){if(obj[loc]!==undefined){js_ast_obj[loc]=obj[loc]}}return js_ast_obj}};$B.create_python_ast_classes=function(){if($B.python_ast_classes){return}$B.python_ast_classes={};for(var klass in $B.ast_classes){$B.python_ast_classes[klass]=function(kl){var _fields,raw_fields;if(typeof $B.ast_classes[kl]=="string"){if($B.ast_classes[kl]==""){raw_fields=_fields=[]}else{raw_fields=$B.ast_classes[kl].split(",");_fields=raw_fields.map((x=>x.endsWith("*")||x.endsWith("?")?x.substr(0,x.length-1):x))}}var cls=$B.make_class(kl),$defaults={},slots={},nb_args=0;if(raw_fields){for(let i=0,len=_fields.length;i2){console.log("wrong js indent");console.log(res)}level=0}try{res+=(add_spaces?indentation.repeat(level):"")+line+"\n"}catch(err){console.log(res);throw err}if(line.endsWith("{")){level++}else if(add_closing_brace){level--;if(level<0){level=0}try{res+=indentation.repeat(level)+"}\n"}catch(err){console.log(res);throw err}}last_is_backslash=line.endsWith("\\");last_is_var_and_comma=line.endsWith(",")&&(line.startsWith("var ")||last_is_var_and_comma)}return res};function get_docstring(node){var doc_string=_b_.None;if(node.body.length>0){var firstchild=node.body[0];if(firstchild instanceof $B.ast.Constant&&typeof firstchild.value=="string"){doc_string=firstchild.value}}return doc_string}var s_escaped='abfnrtvxuU"0123456789'+"'"+"\\",is_escaped={};for(var i=0;i>10)+String.fromCharCode(56320|value&1023)}function test_escape(text,antislash_pos){var seq_end,mo;mo=/^[0-7]{1,3}/.exec(text.substr(antislash_pos+1));if(mo){return[String.fromCharCode(parseInt(mo[0],8)),1+mo[0].length]}switch(text[antislash_pos+1]){case"x":mo=/^[0-9A-F]{0,2}/i.exec(text.substr(antislash_pos+2));if(mo[0].length!=2){seq_end=antislash_pos+mo[0].length+1;$token.value.start[1]=seq_end;throw Error("(unicode error) 'unicodeescape' codec can't decode "+`bytes in position ${antislash_pos}-${seq_end}: truncated `+"\\xXX escape")}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]}break;case"u":mo=/^[0-9A-F]{0,4}/i.exec(text.substr(antislash_pos+2));if(mo[0].length!=4){seq_end=antislash_pos+mo[0].length+1;$token.value.start[1]=seq_end;throw Error("(unicode error) 'unicodeescape' codec can't decode "+`bytes in position ${antislash_pos}-${seq_end}: truncated `+"\\uXXXX escape")}else{return[String.fromCharCode(parseInt(mo[0],16)),2+mo[0].length]}break;case"U":mo=/^[0-9A-F]{0,8}/i.exec(text.substr(antislash_pos+2));if(mo[0].length!=8){seq_end=antislash_pos+mo[0].length+1;$token.value.start[1]=seq_end;throw Error("(unicode error) 'unicodeescape' codec can't decode "+`bytes in position ${antislash_pos}-${seq_end}: truncated `+"\\uXXXX escape")}else{let value=parseInt(mo[0],16);if(value>1114111){throw Error("invalid unicode escape "+mo[0])}else if(value>=65536){return[SurrogatePair(value),2+mo[0].length]}else{return[String.fromCharCode(value),2+mo[0].length]}}}}$B.test_escape=test_escape;function unindent(src){var lines=src.split("\n"),line,global_indent,indent,first,unindented_lines=[];var min_indent;for(var line of lines){if(/^\s*$/.exec(line)){continue}indent=line.match(/^\s*/)[0].length;if(indent==0){return src}if(min_indent===undefined){min_indent=indent}if(indent0;options.python_extension=options.python_extension||".py";if($B.$options.args){$B.__ARGV=$B.$options.args}else{$B.__ARGV=_b_.list.$factory([])}$B.options_parsed=true;return options};if(!($B.isWebWorker||$B.isNode)){var startup_observer=new MutationObserver((function(mutations){for(var mutation of mutations){for(var addedNode of mutation.addedNodes){addPythonScript(addedNode)}}}));startup_observer.observe(document.documentElement,{childList:true,subtree:true})}var brython_options={};var python_scripts=[];if(!$B.isWebWorker){python_scripts=python_scripts.concat(Array.from(document.querySelectorAll('script[type="text/python"]'))).concat(Array.from(document.querySelectorAll('script[type="text/python3"]')));var onload;addEventListener("DOMContentLoaded",(function(ev){if(ev.target.body){onload=ev.target.body.onload}if(!onload){ev.target.body.onload=function(){return brython()}}else{ev.target.body.onload=function(){onload();if(!status.brython_called){brython()}}}}));class BrythonOptions extends HTMLElement{constructor(){super()}connectedCallback(){for(var attr of this.getAttributeNames()){brython_options[attr]=convert_option(attr,this.getAttribute(attr))}}}customElements.define("brython-options",BrythonOptions)}var defined_ids={},script_to_id=new Map,id_to_script={};function addPythonScript(addedNode){if(addedNode.tagName=="SCRIPT"&&(addedNode.type=="text/python"||addedNode.type=="text/python3")){python_scripts.push(addedNode)}}var status={brython_called:false,first_unnamed_script:true};$B.dispatch_load_event=function(script){script.dispatchEvent(new Event("load"))};function injectPythonScript(addedNode){if(addedNode.tagName=="SCRIPT"&&addedNode.type=="text/python"){set_script_id(addedNode);run_scripts([addedNode])}}function set_script_id(script){if(script_to_id.has(script)){}else if(script.id){if(defined_ids[script.id]){throw Error("Brython error : Found 2 scripts with the "+"same id '"+script.id+"'")}else{defined_ids[script.id]=true}script_to_id.set(script,script.id)}else{if(script.className==="webworker"){throw _b_.AttributeError.$factory("webworker script has no attribute 'id'")}if(status.first_unnamed_script){script_to_id.set(script,"__main__");status.first_unnamed_script=false}else{script_to_id.set(script,"__main__"+$B.UUID())}}var id=script_to_id.get(script);id_to_script[id]=script;return id}var brython=$B.parser.brython=function(options){$B.$options=$B.parse_options(options);if(!($B.isWebWorker||$B.isNode)){if(!status.brython_called){status.brython_called=true;startup_observer.disconnect();var inject_observer=new MutationObserver((function(mutations){for(var mutation of mutations){for(var addedNode of mutation.addedNodes){injectPythonScript(addedNode)}}}));inject_observer.observe(document.documentElement,{childList:true,subtree:true})}}else if($B.isNode){return}for(var python_script of python_scripts){set_script_id(python_script)}var scripts=[];$B.script_path=_window.location.href.split("#")[0];var $href=$B.script_path=_window.location.href.split("#")[0],$href_elts=$href.split("/");$href_elts.pop();if($B.isWebWorker||$B.isNode){$href_elts.pop()}$B.curdir=$href_elts.join("/");var kk=Object.keys(_window);var ids=$B.get_page_option("ids");if(ids!==undefined){if(!Array.isArray(ids)){throw _b_.ValueError.$factory("ids is not a list")}if(ids.length==0){}for(var id of ids){var script=document.querySelector(`script[id="${id}"]`);if(script){set_script_id(script);scripts.push(script)}else{console.log(`no script with id '${id}'`);throw _b_.KeyError.$factory(`no script with id '${id}'`)}}}else if($B.isWebWorker){}else{scripts=python_scripts.slice()}run_scripts(scripts)};function convert_option(option,value){if(option=="debug"){if(typeof value=="string"&&value.match(/^\d+$/)){return parseInt(value)}else{if(value!==null&&value!==undefined){console.debug(`Invalid value for debug: ${value}`)}}}else if(option=="cache"||option=="indexeddb"||option=="static_stdlib_import"){if(value=="1"||value.toLowerCase()=="true"){return true}else if(value=="0"||value.toLowerCase()=="false"){return false}else{console.debug(`Invalid value for ${option}: ${value}`)}}else if(option=="ids"||option=="pythonpath"||option=="args"){if(typeof value=="string"){if(value.trim().length==0){return[]}return value.trim().split(/\s+/)}}return value}const default_option={args:[],cache:false,debug:1,indexeddb:true,python_extension:".py",static_stdlib_import:true};$B.get_filename=function(){if($B.count_frames()>0){return $B.get_frame_at(0).__file__}};$B.get_filename_for_import=function(){var filename=$B.get_filename();if($B.import_info[filename]===undefined){$B.make_import_paths(filename)}return filename};$B.get_page_option=function(option){option=option.toLowerCase();if($B.$options.hasOwnProperty(option)){return $B.$options[option]}else if(brython_options.hasOwnProperty(option)){return brython_options[option]}else{return default_option[option]}};$B.get_option=function(option,err){var filename=$B.script_filename;if(err&&err.$frame_obj){filename=$B.get_frame_at(0,err.$frame_obj).__file__}else{filename=$B.get_filename()}return $B.get_option_from_filename(option,filename)};$B.get_option_from_filename=function(option,filename){if(!filename||!$B.scripts[filename]){return $B.get_page_option(option)}var value=$B.scripts[filename].getAttribute(option);if(value!==null){return convert_option(option,value)}else{return $B.get_page_option(option)}};function run_scripts(_scripts){var webworkers=_scripts.filter((script=>script.className==="webworker")),scripts=_scripts.filter((script=>script.className!=="webworker"));var module_name,filename;if(scripts.length>0||$B.isWebWorker){if($B.get_page_option("indexedDB")&&$B.has_indexedDB&&$B.hasOwnProperty("VFS")){$B.tasks.push([$B.idb_open])}}var src;for(var worker of webworkers){if(worker.src){$B.tasks.push([$B.ajax_load_script,{script:worker,name:worker.id,url:worker.src,is_ww:true}])}else{$B.webworkers[worker.id]=worker;filename=$B.script_filename=$B.strip_host($B.script_path+"#"+worker.id);var source=worker.innerText||worker.textContent;source=unindent(source);source=source.replace(/^\n/,"");$B.url2name[filename]=worker.id;$B.file_cache[filename]=source;$B.scripts[filename]=worker;$B.dispatch_load_event(worker)}}for(var script of scripts){module_name=script_to_id.get(script);if(script.src){$B.tasks.push([$B.ajax_load_script,{script:script,name:module_name,url:script.src,id:script.id}])}else{filename=$B.script_filename=$B.strip_host($B.script_path+"#"+module_name);src=script.innerHTML||script.textContent;src=unindent(src);src=src.replace(/^\n/,"");if(src.endsWith("\n")){src=src.substr(0,src.length-1)}$B.file_cache[filename]=src;$B.url2name[filename]=module_name;$B.scripts[filename]=script;$B.tasks.push([$B.run_script,script,src,module_name,$B.script_path,true])}}$B.loop()}$B.run_script=function(script,src,name,url,run_loop){var filename=$B.script_filename=$B.strip_host(url);var script_elts=url.split("/");script_elts.pop();$B.script_dir=script_elts.join("/");$B.file_cache[filename]=src;$B.url2name[filename]=name;$B.scripts[filename]=script;$B.make_import_paths(filename);_b_.__debug__=$B.get_option("debug")>0;var root,js;try{root=$B.py2js({src:src,filename:filename},name,name);js=root.to_js();if($B.get_option_from_filename("debug",filename)>1){console.log($B.format_indent(js,0))}}catch(err){console.log("err",err);return $B.handle_error($B.exception(err))}var _script={__doc__:get_docstring(root._ast),js:js,__name__:name,__file__:url,script_element:script};$B.tasks.push(["execute",_script]);if(run_loop){$B.loop()}};$B.brython=brython})(__BRYTHON__);globalThis.brython=__BRYTHON__.brython;if(__BRYTHON__.isNode){global.__BRYTHON__=__BRYTHON__;module.exports={__BRYTHON__:__BRYTHON__}}(function($B){var _b_=$B.builtins;if($B.VFS_timestamp&&$B.VFS_timestamp>$B.timestamp){$B.timestamp=$B.VFS_timestamp}function idb_load(evt,module){var res=evt.target.result;var debug=$B.get_page_option("debug");if(res===undefined||res.timestamp!=$B.timestamp||$B.VFS[module]&&res.source_ts!==$B.VFS[module].timestamp){if($B.VFS[module]!==undefined){var elts=$B.VFS[module],ext=elts[0],source=elts[1];if(ext==".py"){var is_package=elts.length==4,__package__;if(is_package){__package__=module}else{var parts=module.split(".");parts.pop();__package__=parts.join(".")}$B.imported[module]=$B.module.$factory(module,"",__package__);$B.url2name[module]=module;try{$B.py2js({src:source,filename:module},module,module)}catch(err){$B.handle_error(err)}delete $B.imported[module];if(debug>1){console.log("precompile",module)}}else{console.log("bizarre",module,ext)}}else{}}else{if(res.is_package){$B.precompiled[module]=[res.content]}else{$B.precompiled[module]=res.content}if(res.imports.length>0){if(debug>1){console.log(module,"imports",res.imports)}var subimports=res.imports.split(",");for(var i=0;i1){console.info("using indexedDB for stdlib modules cache")}var tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules"),record,outdated=[];var openCursor=store.openCursor();openCursor.onerror=function(evt){console.log("open cursor error",evt)};openCursor.onsuccess=function(evt){var cursor=evt.target.result;if(cursor){record=cursor.value;if(record.timestamp==$B.timestamp){if(!$B.VFS||!$B.VFS[record.name]||$B.VFS[record.name].timestamp==record.source_ts){if(record.is_package){$B.precompiled[record.name]=[record.content]}else{$B.precompiled[record.name]=record.content}if($B.get_page_option("debug")>1){console.info("load from cache",record.name)}}else{outdated.push(record.name)}}else{outdated.push(record.name)}cursor.continue()}else{if($B.get_page_option("debug")>1){console.log("done")}$B.outdated=outdated;loop()}}}};idb_cx.onupgradeneeded=function(){console.info("upgrade needed");var db=idb_cx.result,store=db.createObjectStore("modules",{keyPath:"name"});store.onsuccess=loop};idb_cx.onerror=function(){console.info("could not open indexedDB database");$B.idb_cx=null;$B.idb_name=null;$B.$options.indexeddb=false;loop()}};$B.ajax_load_script=function(s){var script=s.script,url=s.url,name=s.name,rel_path=url.substr($B.script_dir.length+1);if($B.files&&$B.files.hasOwnProperty(rel_path)){var src=atob($B.files[rel_path].content);$B.tasks.splice(0,0,[$B.run_script,script,src,name,url,true]);loop()}else if($B.protocol!="file"){var filename=$B.script_filename=$B.strip_host(url);$B.scripts[filename]=script;var req=new XMLHttpRequest,cache=$B.get_option("cache"),qs=cache?"":(url.search(/\?/)>-1?"&":"?")+Date.now();req.open("GET",url+qs,true);req.onreadystatechange=function(){if(this.readyState==4){if(this.status==200){var src=this.responseText;if(s.is_ww){$B.webworkers[name]=script;$B.file_cache[url]=src;$B.dispatch_load_event(script)}else{$B.tasks.splice(0,0,[$B.run_script,script,src,name,url,true])}loop()}else if(this.status==404){throw Error(url+" not found")}}};req.send()}else{throw _b_.IOError.$factory("can't load external script at "+script.url+" (Ajax calls not supported with protocol file:///)")}};function add_jsmodule(module,source){source+="\nvar $locals_"+module.replace(/\./g,"_")+" = $module";$B.precompiled[module]=source}$B.inImported=function(module){if($B.imported.hasOwnProperty(module)){}else if(__BRYTHON__.VFS&&__BRYTHON__.VFS.hasOwnProperty(module)){var elts=__BRYTHON__.VFS[module];if(elts===undefined){console.log("bizarre",module)}var ext=elts[0],source=elts[1];if(ext==".py"){if($B.idb_cx&&!$B.idb_cx.$closed){$B.tasks.splice(0,0,[idb_get,module])}}else{add_jsmodule(module,source)}}else{console.log("bizarre",module)}loop()};function report_precompile(mod){if(!$B.isWebWorker){document.dispatchEvent(new CustomEvent("precompile",{detail:"remove outdated "+mod+" from cache"}))}}function report_close(){if(!$B.isWebWorker){document.dispatchEvent(new CustomEvent("precompile",{detail:"close"}))}}function report_done(){if(!$B.isWebWorker){document.dispatchEvent(new CustomEvent("brython_done",{detail:_b_.dict.$from_js($B.$options)}))}}var loop=$B.loop=function(){if($B.tasks.length==0){if($B.idb_cx&&!$B.idb_cx.$closed){var db=$B.idb_cx.result,tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules");while($B.outdated.length>0){let module=$B.outdated.pop(),req=store.delete(module);req.onsuccess=function(mod){return function(){if($B.get_page_option("debug")>1){console.info("delete outdated",mod)}report_precompile(mod)}}(module)}report_close();$B.idb_cx.result.close();$B.idb_cx.$closed=true}report_done();return}var task=$B.tasks.shift(),func=task[0],args=task.slice(1);if(func=="execute"){let script=task[1],script_id=script.__name__.replace(/\./g,"_"),module=$B.module.$factory(script.__name__);module.__file__=script.__file__;module.__doc__=script.__doc__;$B.imported[script_id]=module;try{var modobj=new Function(script.js+`\nreturn locals`)();for(var key in modobj){if(!key.startsWith("$")){module[key]=modobj[key]}}$B.dispatch_load_event(script.script_element)}catch(err){if(err.__class__===undefined){if(err.$py_exc){err=err.$py_exc}else{$B.freeze(err);var stack=err.$stack,frame_obj=err.$frame_obj,linenums=err.$linenums;var lineNumber=err.lineNumber;if(lineNumber!==undefined){console.log("around line",lineNumber);console.log(script.js.split("\n").slice(lineNumber-4,lineNumber).join("\n"))}$B.print_stack();err=_b_.RuntimeError.$factory(err+"");err.$stack=stack;err.$frame_obj=frame_obj;err.$linenums=linenums}}$B.handle_error(err)}loop()}else{try{func.apply(null,args)}catch(err){$B.handle_error(err)}}};$B.tasks=[];$B.has_indexedDB=self.indexedDB!==undefined})(__BRYTHON__);(function($B){var _b_=$B.builtins,_window=globalThis,isWebWorker="undefined"!==typeof WorkerGlobalScope&&"function"===typeof importScripts&&navigator instanceof WorkerNavigator;function missing_required_kwonly(fname,args){var plural=args.length==1?"":"s",arg_list;args=args.map((x=>`'${x}'`));if(args.length==1){arg_list=args[0]}else if(args.length==2){arg_list=args[0]+" and "+args[1]}else{arg_list=args.slice(0,args.length-1).join(", ")+", and "+args[args.length-1]}throw _b_.TypeError.$factory(fname+"() "+`missing ${args.length} required keyword-only argument${plural}: `+arg_list)}function missing_required_pos(fname,args){var plural=args.length==1?"":"s",arg_list;args=args.map((x=>`'${x}'`));if(args.length==1){arg_list=args[0]}else if(args.length==2){arg_list=args[0]+" and "+args[1]}else{arg_list=args.slice(0,args.length-1).join(", ")+", and "+args[args.length-1]}throw _b_.TypeError.$factory(fname+"() "+`missing ${args.length} required positional argument${plural}: `+arg_list)}function multiple_values(fname,arg){throw _b_.TypeError.$factory(fname+"() "+`got multiple values for argument '${arg}'`)}function pos_only_passed_as_keyword(fname,arg){return _b_.TypeError.$factory(fname+`() got some positional-only arguments passed as keyword arguments:`+` '${arg}'`)}function too_many_pos_args(fname,kwarg,arg_names,nb_kwonly,defaults,args,slots){var nb_pos=args.length,last=$B.last(args);if(last.$kw){if(!kwarg){var kw=$B.parse_kwargs(last.$kw,fname);for(var k in kw){if(!slots.hasOwnProperty(k)){var suggestion=$B.offer_suggestions_for_unexpected_keyword_error(arg_names,k);throw unexpected_keyword(fname,k,suggestion)}}}nb_pos--}var nb_def=defaults.length;var expected=arg_names.length-nb_kwonly,plural=expected==1?"":"s";if(nb_def){expected=`from ${expected-nb_def} to ${expected}`;plural="s"}var verb=nb_pos==1?"was":"were";return _b_.TypeError.$factory(fname+"() takes "+`${expected} positional argument${plural} but ${nb_pos} ${verb} given`)}function unexpected_keyword(fname,k,suggestion){var msg=`${fname}() got an unexpected keyword argument '${k}'`;if(suggestion!==_b_.None){msg+=`. Did you mean: '${suggestion}'?`}return _b_.TypeError.$factory(msg)}var empty={};function args0(f,args){var arg_names=f.$infos.arg_names,code=f.$infos.__code__,slots={};for(var arg_name of arg_names){slots[arg_name]=empty}return $B.parse_args(args,f.$infos.__name__,code.co_argcount,slots,arg_names,f.$infos.__defaults__,f.$infos.__kwdefaults__,f.$infos.vararg,f.$infos.kwarg,code.co_posonlyargcount,code.co_kwonlyargcount)}function args0_NEW(fct,args){const LAST_ARGS=args[args.length-1];const HAS_KW=LAST_ARGS!==undefined&&LAST_ARGS!==null&&LAST_ARGS.$kw!==undefined;let ARGS_POS_COUNT=args.length,ARGS_NAMED=null;if(HAS_KW){--ARGS_POS_COUNT;ARGS_NAMED=LAST_ARGS.$kw}const result={};const $INFOS=fct.$infos,$CODE=$INFOS.__code__,PARAMS_NAMES=$INFOS.arg_names,PARAMS_POS_COUNT=$CODE.co_argcount,PARAMS_NAMED_COUNT=$CODE.co_kwonlyargcount,PARAMS_VARARGS_NAME=$INFOS.vararg,PARAMS_KWARGS_NAME=$INFOS.kwarg,PARAMS_POS_DEFAULTS=$INFOS.__defaults__,PARAMS_POS_DEFAULTS_COUNT=PARAMS_POS_DEFAULTS.length,PARAMS_POS_DEFAULTS_OFFSET=PARAMS_POS_COUNT-PARAMS_POS_DEFAULTS_COUNT;const min=Math.min(ARGS_POS_COUNT,PARAMS_POS_COUNT);let offset=0;for(;offsetPARAMS_POS_COUNT){args0(fct,args);throw new Error("Too much positional arguments given (args0 should have raised an error) !")}if(ARGS_NAMED===null){if(offset=nb_pos_or_kw){if(vararg){varargs.push(arg)}else{throw too_many_pos_args(fname,kwarg,arg_names,nb_kwonly,defaults,args,slots)}}else{if(i0){throw missing_required_kwonly(fname,missing_kwonly)}if(!kwarg){for(var k in kw){if(!slots.hasOwnProperty(k)){var suggestion=$B.offer_suggestions_for_unexpected_keyword_error(arg_names,k);throw unexpected_keyword(fname,k,suggestion)}}}for(var k in kw){if(kw[k]===empty){continue}if(!slots.hasOwnProperty(k)){if(kwarg){extra_kw[k]=kw[k]}}else if(slots[k]!==empty){if(posonly_set[k]&&kwarg){extra_kw[k]=kw[k]}else{throw multiple_values(fname,k)}}else{slots[k]=kw[k]}}if(kwarg){slots[kwarg]=_b_.dict.$from_js(extra_kw)}if(vararg){slots[vararg]=$B.fast_tuple(varargs)}return slots};$B.parse_kwargs=function(kw_args,fname){var kwa=kw_args[0];for(var i=1,len=kw_args.length;i0||y!==undefined&&y.$kw){throw _b_.TypeError.$factory(name+"() takes no keyword arguments")}};$B.check_nb_args_no_kw=function(name,expected,args){var len=args.length,last=args[len-1];if(last&&last.$kw){if(last.$kw.length==2&&Object.keys(last.$kw[0]).length==0){len--}else{throw _b_.TypeError.$factory(name+"() takes no keyword arguments")}}if(len!=expected){if(expected==0){throw _b_.TypeError.$factory(name+"() takes no argument"+" ("+len+" given)")}else{throw _b_.TypeError.$factory(name+"() takes exactly "+expected+" argument"+(expected<2?"":"s")+" ("+len+" given)")}}};$B.get_class=function(obj){if(obj===null){return $B.imported.javascript.NullType}if(obj===undefined){return $B.imported.javascript.UndefinedType}var klass=obj.__class__||obj.$tp_class;if(klass===undefined){switch(typeof obj){case"number":if(Number.isInteger(obj)){return _b_.int}break;case"string":return _b_.str;case"boolean":return _b_.bool;case"function":if(!obj.$js_func){return $B.function}case"object":if(Array.isArray(obj)){if(obj.$is_js_array){return $B.js_array}else if(Object.getPrototypeOf(obj)===Array.prototype){obj.__class__=_b_.list;return _b_.list}}else if(obj instanceof $B.str_dict){return _b_.dict}else if(typeof Node!=="undefined"&&obj instanceof Node){if(obj.tagName){return $B.imported["browser.html"][obj.tagName]||$B.DOMNode}return $B.DOMNode}else if(obj instanceof Event){return $B.DOMEvent}break}}if(klass===undefined){return $B.get_jsobj_class(obj)}return klass};$B.class_name=function(obj){var klass=$B.get_class(obj);if(klass===$B.JSObj){return"Javascript "+obj.constructor.name}else{return klass.__name__}};$B.make_js_iterator=function(iterator,frame,lineno){var set_lineno=$B.set_lineno;if(frame===undefined){if($B.frame_obj===null){function set_lineno(){}}else{frame=$B.frame_obj.frame;lineno=frame.$lineno}}if(iterator.__class__===_b_.range){var obj={ix:iterator.start};if(iterator.step>0){return{[Symbol.iterator](){return this},next(){set_lineno(frame,lineno);if(obj.ix>=iterator.stop){return{done:true,value:null}}var value=obj.ix;obj.ix+=iterator.step;return{done:false,value:value}}}}else{return{[Symbol.iterator](){return this},next(){set_lineno(frame,lineno);if(obj.ix<=iterator.stop){return{done:true,value:null}}var value=obj.ix;obj.ix+=iterator.step;return{done:false,value:value}}}}}if(iterator[Symbol.iterator]&&!iterator.$is_js_array){var it=iterator[Symbol.iterator]();return{[Symbol.iterator](){return this},next(){set_lineno(frame,lineno);return it.next()}}}var next_func=$B.$call($B.$getattr(_b_.iter(iterator),"__next__"));return{[Symbol.iterator](){return this},next(){set_lineno(frame,lineno);try{var value=next_func();return{done:false,value:value}}catch(err){if($B.is_exc(err,[_b_.StopIteration])){return{done:true,value:null}}throw err}}}};$B.unpacker=function(obj,nb_targets,has_starred){var position,position_rank=3;if(has_starred){var nb_after_starred=arguments[3];position_rank++}position=$B.decode_position(arguments[position_rank]);var t=_b_.list.$factory(obj),right_length=t.length,left_length=nb_targets+(has_starred?nb_after_starred-1:0);if(!has_starred&&right_lengthleft_length){var exc=_b_.ValueError.$factory("too many values to unpack "+`(expected ${left_length})`);if(position){$B.set_exception_offsets(exc,position)}throw exc}t.index=-1;t.read_one=function(){t.index++;return t[t.index]};t.read_rest=function(){t.index++;var res=t.slice(t.index,t.length-nb_after_starred);t.index=t.length-nb_after_starred-1;return res};return t};$B.set_lineno=function(frame,lineno){frame.$lineno=lineno;if(frame.$f_trace!==_b_.None){$B.trace_line()}return true};$B.get_method_class=function(method,ns,qualname,refs){var klass=ns;if(method.$infos&&method.$infos.$class){return method.$infos.$class}for(var ref of refs){if(klass[ref]===undefined){return $B.make_class(qualname)}klass=klass[ref]}return klass};$B.warn=function(klass,message,filename,token){var warning=klass.$factory(message);warning.filename=filename;if(klass===_b_.SyntaxWarning){warning.lineno=token.start[0];warning.offset=token.start[1];warning.end_lineno=token.end[0];warning.end_offset=token.end[1];warning.text=token.line;warning.args[1]=$B.fast_tuple([filename,warning.lineno,warning.offset,warning.text,warning.end_lineno,warning.end_offset])}$B.imported._warnings.warn(warning)};function index_error(obj){var type=typeof obj=="string"?"string":"list";throw _b_.IndexError.$factory(type+" index out of range")}$B.$getitem=function(obj,item,position){var is_list=Array.isArray(obj)&&obj.__class__===_b_.list,is_dict=obj.__class__===_b_.dict&&!obj.$jsobj;if(typeof item=="number"){if(is_list||typeof obj=="string"){item=item>=0?item:obj.length+item;if(obj[item]!==undefined){return obj[item]}else{index_error(obj)}}}else if(item.valueOf&&typeof item.valueOf()=="string"&&is_dict){return _b_.dict.$getitem(obj,item)}if(obj.$is_class){var class_gi=$B.$getattr(obj,"__class_getitem__",_b_.None);if(class_gi!==_b_.None){return $B.$call(class_gi)(item)}else if(obj.__class__){class_gi=$B.$getattr(obj.__class__,"__getitem__",_b_.None);if(class_gi!==_b_.None){return class_gi(obj,item)}else{throw _b_.TypeError.$factory("'"+$B.class_name(obj.__class__)+"' object is not subscriptable")}}}if(is_list){return _b_.list.$getitem(obj,item)}if(is_dict){return _b_.dict.$getitem(obj,item)}var gi=$B.$getattr(obj.__class__||$B.get_class(obj),"__getitem__",_b_.None);if(gi!==_b_.None){return gi(obj,item)}var exc=_b_.TypeError.$factory("'"+$B.class_name(obj)+"' object is not subscriptable");if(position){$B.set_exception_offsets(exc,$B.decode_position(position))}throw exc};$B.getitem_slice=function(obj,slice){var res;if(Array.isArray(obj)&&obj.__class__===_b_.list){if(slice.start===_b_.None&&slice.stop===_b_.None){if(slice.step===_b_.None||slice.step==1){res=obj.slice()}else if(slice.step==-1){res=obj.slice().reverse()}}else if(slice.step===_b_.None){if(slice.start===_b_.None){slice.start=0}if(slice.stop===_b_.None){slice.stop=obj.length}if(typeof slice.start=="number"&&typeof slice.stop=="number"){if(slice.start<0){slice.start+=obj.length}if(slice.stop<0){slice.stop+=obj.length}res=obj.slice(slice.start,slice.stop)}}if(res){res.__class__=obj.__class__;return res}else{return _b_.list.$getitem(obj,slice)}}else if(typeof obj=="string"){return _b_.str.__getitem__(obj,slice)}return $B.$getattr($B.get_class(obj),"__getitem__")(obj,slice)};$B.$getattr_pep657=function(obj,attr,position){try{return $B.$getattr(obj,attr)}catch(err){$B.set_exception_offsets(err,$B.decode_position(position));throw err}};$B.set_list_slice=function(obj,start,stop,value){if(start===null){start=0}else{start=$B.$GetInt(start);if(start<0){start=Math.max(0,start+obj.length)}}if(stop===null){stop=obj.length}stop=$B.$GetInt(stop);if(stop<0){stop=Math.max(0,stop+obj.length)}var res=_b_.list.$factory(value);obj.splice.apply(obj,[start,stop-start].concat(res))};$B.set_list_slice_step=function(obj,start,stop,step,value){if(step===null||step==1){return $B.set_list_slice(obj,start,stop,value)}if(step==0){throw _b_.ValueError.$factory("slice step cannot be zero")}step=$B.$GetInt(step);if(start===null){start=step>0?0:obj.length-1}else{start=$B.$GetInt(start)}if(stop===null){stop=step>0?obj.length:-1}else{stop=$B.$GetInt(stop)}var repl=_b_.list.$factory(value),j=0,test,nb=0;if(step>0){test=function(i){return istop}}for(var i=start;test(i);i+=step){nb++}if(nb!=repl.length){throw _b_.ValueError.$factory("attempt to assign sequence of size "+repl.length+" to extended slice of size "+nb)}for(var i=start;test(i);i+=step){obj[i]=repl[j];j++}};$B.$setitem=function(obj,item,value){if(Array.isArray(obj)&&obj.__class__===undefined&&!obj.$is_js_array&&typeof item=="number"&&!$B.$isinstance(obj,_b_.tuple)){if(item<0){item+=obj.length}if(obj[item]===undefined){throw _b_.IndexError.$factory("list assignment index out of range")}obj[item]=value;return}else if(obj.__class__===_b_.dict){_b_.dict.$setitem(obj,item,value);return}else if(obj.__class__===_b_.list){return _b_.list.$setitem(obj,item,value)}var si=$B.$getattr(obj.__class__||$B.get_class(obj),"__setitem__",null);if(si===null||typeof si!="function"){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object does not support item assignment")}return si(obj,item,value)};$B.$delitem=function(obj,item){if(Array.isArray(obj)&&obj.__class__===_b_.list&&typeof item=="number"&&!$B.$isinstance(obj,_b_.tuple)){if(item<0){item+=obj.length}if(obj[item]===undefined){throw _b_.IndexError.$factory("list deletion index out of range")}obj.splice(item,1);return}else if(obj.__class__===_b_.dict){_b_.dict.__delitem__(obj,item);return}else if(obj.__class__===_b_.list){return _b_.list.__delitem__(obj,item)}var di=$B.$getattr(obj.__class__||$B.get_class(obj),"__delitem__",null);if(di===null){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object doesn't support item deletion")}return di(obj,item)};function num_result_type(x,y){var is_int,is_float,x_num,y_num;if(typeof x=="number"){x_num=x;if(typeof y=="number"){is_int=true;y_num=y}else if(y.__class__===_b_.float){is_float=true;y_num=y.value}}else if(x.__class__===_b_.float){x_num=x.value;if(typeof y=="number"){y_num=y;is_float=true}else if(y.__class__===_b_.float){is_float=true;y_num=y.value}}return{is_int:is_int,is_float:is_float,x:x_num,y:y_num}}$B.augm_assign=function(left,op,right){var res_type=num_result_type(left,right);if(res_type.is_int||res_type.is_float){var z;switch(op){case"+=":z=res_type.x+res_type.y;break;case"-=":z=res_type.x-res_type.y;break;case"*=":z=res_type.x*res_type.y;break;case"/=":z=res_type.x/res_type.y;break}if(z){if(res_type.is_int&&Number.isSafeInteger(z)){return z}else if(res_type.res_is_float){return $B.fast_float(z)}}}else if(op=="*="){if(typeof left=="number"&&typeof right=="string"){return left<=0?"":right.repeat(left)}else if(typeof left=="string"&&typeof right=="number"){return right<=0?"":left.repeat(right)}}else if(op=="+="){if(typeof left=="string"&&typeof right=="string"){return left+right}}var op1=op.substr(0,op.length-1),method=$B.op2method.augmented_assigns[op],augm_func=$B.$getattr(left,"__"+method+"__",null);if(augm_func!==null){var res=$B.$call(augm_func)(right);if(res===_b_.NotImplemented){throw _b_.TypeError.$factory(`unsupported operand type(s)`+` for ${op}: '${$B.class_name(left)}' `+`and '${$B.class_name(right)}'`)}return res}else{var method1=$B.op2method.operations[op1];if(method1===undefined){method1=$B.op2method.binary[op1]}return $B.rich_op(`__${method1}__`,left,right)}};$B.$is=function(a,b){if((a===undefined||a===$B.Undefined)&&(b===undefined||b===$B.Undefined)){return true}if(a===null){return b===null}if(b===null){return a===null}if(a.__class__===_b_.float&&b.__class__===_b_.float){if(isNaN(a.value)&&isNaN(b.value)){return true}return a.value==b.value}if(a===_b_.int&&b==$B.long_int||a===$B.long_int&&b===_b_.int){return true}return a===b};$B.is_or_equals=function(x,y){return $B.$is(x,y)||$B.rich_comp("__eq__",x,y)};$B.member_func=function(obj){var klass=$B.get_class(obj),contains=$B.$getattr(klass,"__contains__",null);if(contains!==null){contains=$B.$call(contains);return contains.bind(null,obj)}try{var iterator=$B.make_js_iterator(obj);return function(key){try{for(var item of iterator){if($B.is_or_equals(key,item)){return true}}return false}catch(err){return false}}}catch(err){var getitem=$B.$getattr(klass,"__getitem__",null);if(getitem!==null){return function(key){var i=-1;while(true){i++;try{var item=getitem(obj,i);if($B.is_or_equals(key,item)){return true}}catch(err){if($B.$is_exc(err,[_b_.StopIteration])){return false}throw err}}}}else{throw _b_.TypeError.$factory("argument of type "+`'${$B.class_name(obj)}' is not iterable`)}}};$B.$is_member=function(item,_set){return $B.member_func(_set)(item)};$B.$call=function(callable,position){callable=$B.$call1(callable);if(position){return function(){try{return callable.apply(null,arguments)}catch(exc){position=$B.decode_position(position);$B.set_exception_offsets(exc,position);throw exc}}}return callable};$B.$call1=function(callable){if(callable.__class__===$B.method){return callable}else if(callable.$factory){return callable.$factory}else if(callable.$is_class){return callable.$factory=$B.$instance_creator(callable)}else if(callable.$is_js_class){return callable.$factory=function(){return new callable(...arguments)}}else if(callable.$in_js_module){return function(){var res=callable(...arguments);return res===undefined?_b_.None:res}}else if(callable.$is_func||typeof callable=="function"){if(callable.$infos&&callable.$infos.__code__&&callable.$infos.__code__.co_flags&32){$B.frame_obj.frame.$has_generators=true}return callable}try{return $B.$getattr(callable,"__call__")}catch(err){throw _b_.TypeError.$factory("'"+$B.class_name(callable)+"' object is not callable")}};var r_opnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or"];var ropsigns=["+","-","*","/","//","%","**","<<",">>","&","^","|"];$B.make_rmethods=function(klass){for(var r_opname of r_opnames){if(klass["__r"+r_opname+"__"]===undefined&&klass["__"+r_opname+"__"]){klass["__r"+r_opname+"__"]=function(name){return function(self,other){return klass["__"+name+"__"](other,self)}}(r_opname)}}};$B.UUID=function(){return $B.$py_UUID++};$B.$GetInt=function(value){if(typeof value=="number"||value.constructor===Number){return value}else if(typeof value==="boolean"){return value?1:0}else if($B.$isinstance(value,_b_.int)){return value}else if($B.$isinstance(value,_b_.float)){return value.valueOf()}if(!value.$is_class){try{var v=$B.$getattr(value,"__int__")();return v}catch(e){}try{var v=$B.$getattr(value,"__index__")();return v}catch(e){}}throw _b_.TypeError.$factory("'"+$B.class_name(value)+"' object cannot be interpreted as an integer")};$B.to_num=function(obj,methods){var expected_class={__complex__:_b_.complex,__float__:_b_.float,__index__:_b_.int,__int__:_b_.int};var klass=obj.__class__||$B.get_class(obj);for(var i=0;i1e3){var exc=_b_.RecursionError.$factory("maximum recursion depth exceeded");$B.set_exc(exc,frame);throw exc}frame.__class__=$B.frame;$B.frame_obj={prev:$B.frame_obj,frame:frame,count:count+1};if($B.tracefunc!==_b_.None){if(frame[4]===$B.tracefunc||$B.tracefunc.$infos&&frame[4]&&frame[4]===$B.tracefunc.$infos.__func__){$B.tracefunc.$frame_id=frame[0];return _b_.None}else{var frame_obj=$B.frame_obj;while(frame_obj!==null){if(frame_obj.frame[0]==$B.tracefunc.$frame_id){return _b_.None}frame_obj=frame_obj.prev}try{var res=$B.tracefunc(frame,"call",_b_.None);var frame_obj=$B.frame_obj;while(frame_obj!==null){if(frame_obj.frame[4]==res){return _b_.None}frame_obj=frame_obj.prev}return res}catch(err){$B.set_exc(err,frame);$B.frame_obj=$B.frame_obj.prev;err.$in_trace_func=true;throw err}}}return _b_.None};$B.trace_exception=function(){var frame=$B.frame_obj.frame;if(frame[0]==$B.tracefunc.$current_frame_id){return _b_.None}var trace_func=frame.$f_trace,exc=frame[1].$current_exception;return trace_func(frame,"exception",$B.fast_tuple([exc.__class__,exc,$B.traceback.$factory(exc)]))};$B.trace_line=function(){var frame=$B.frame_obj.frame;if(frame[0]==$B.tracefunc.$current_frame_id){return _b_.None}var trace_func=frame.$f_trace;if(trace_func===undefined){console.log("trace line, frame",frame)}return trace_func(frame,"line",_b_.None)};$B.trace_return=function(value){var frame=$B.frame_obj.frame,trace_func=frame.$f_trace;if(frame[0]==$B.tracefunc.$current_frame_id){return _b_.None}trace_func(frame,"return",value)};$B.leave_frame=function(arg){if($B.frame_obj===null){return}if(arg&&arg.value!==undefined&&$B.tracefunc!==_b_.None){if($B.frame_obj.frame.$f_trace===undefined){$B.frame_obj.frame.$f_trace=$B.tracefunc}if($B.frame_obj.frame.$f_trace!==_b_.None){$B.trace_return(arg.value)}}var frame=$B.frame_obj.frame;$B.frame_obj=$B.frame_obj.prev;if(frame.$has_generators){for(var key in frame[1]){if(frame[1][key]&&frame[1][key].__class__===$B.generator){var gen=frame[1][key];if(gen.$frame===undefined){continue}var ctx_managers=gen.$frame[1].$context_managers;if(ctx_managers){for(var cm of ctx_managers){$B.$call($B.$getattr(cm,"__exit__"))(_b_.None,_b_.None,_b_.None)}}}}}delete frame[1].$current_exception;return _b_.None};$B.trace_return_and_leave=function(frame,return_value){if(frame.$f_trace!==_b_.None){$B.trace_return(return_value)}return $B.leave_frame()};$B.push_frame=function(frame){var count=$B.frame_obj===null?0:$B.frame_obj.count;return{prev:$B.frame_obj,frame:frame,count:count+1}};var reversed_op={__lt__:"__gt__",__le__:"__ge__",__gt__:"__lt__",__ge__:"__le__"};var method2comp={__lt__:"<",__le__:"<=",__gt__:">",__ge__:">="};$B.rich_comp=function(op,x,y){if(x===undefined){throw _b_.RuntimeError.$factory("error in rich comp")}var x1=x!==null&&x.valueOf?x.valueOf():x,y1=y!==null&&y.valueOf?y.valueOf():y;if(typeof x1=="number"&&typeof y1=="number"&&x.__class__===undefined&&y.__class__===undefined){switch(op){case"__eq__":return x1==y1;case"__ne__":return x1!=y1;case"__le__":return x1<=y1;case"__lt__":return x1=y1;case"__gt__":return x1>y1}}var res;if(x!==null&&(x.$is_class||x.$factory)){if(op=="__eq__"){return x===y}else if(op=="__ne__"){return!(x===y)}else{throw _b_.TypeError.$factory("'"+method2comp[op]+"' not supported between instances of '"+$B.class_name(x)+"' and '"+$B.class_name(y)+"'")}}var x_class_op=$B.$call($B.$getattr($B.get_class(x),op)),rev_op=reversed_op[op]||op,y_rev_func;if(x!==null&&x.__class__&&y!==null&&y.__class__){if(y.__class__.__mro__.indexOf(x.__class__)>-1){y_rev_func=$B.$getattr(y,rev_op);res=$B.$call(y_rev_func)(x);if(res!==_b_.NotImplemented){return res}}}res=x_class_op(x,y);if(res!==_b_.NotImplemented){return res}if(y_rev_func===undefined){y_rev_func=$B.$call($B.$getattr($B.get_class(y),rev_op));res=y_rev_func(y,x);if(res!==_b_.NotImplemented){return res}}if(op=="__eq__"){return _b_.False}else if(op=="__ne__"){return _b_.True}throw _b_.TypeError.$factory("'"+method2comp[op]+"' not supported between instances of '"+$B.class_name(x)+"' and '"+$B.class_name(y)+"'")};var opname2opsign={__sub__:"-",__xor__:"^",__mul__:"*"};$B.rich_op=function(op,x,y,position){try{return $B.rich_op1(op,x,y)}catch(exc){if(position){$B.set_exception_offsets(exc,$B.decode_position(position))}throw exc}};$B.rich_op1=function(op,x,y){var res_is_int,res_is_float,x_num,y_num;if(typeof x=="number"){x_num=x;if(typeof y=="number"){res_is_int=true;y_num=y}else if(y.__class__===_b_.float){res_is_float=true;y_num=y.value}}else if(x.__class__===_b_.float){x_num=x.value;if(typeof y=="number"){y_num=y;res_is_float=true}else if(y.__class__===_b_.float){res_is_float=true;y_num=y.value}}if(res_is_int||res_is_float){var z;switch(op){case"__add__":z=x_num+y_num;break;case"__sub__":z=x_num-y_num;break;case"__mul__":z=x_num*y_num;break;case"__pow__":if(res_is_int&&y_num>=0){return _b_.int.$int_or_long(BigInt(x_num)**BigInt(y_num))}break;case"__truediv__":if(y_num==0){throw _b_.ZeroDivisionError.$factory("division by zero")}z=x_num/y_num;return{__class__:_b_.float,value:z}}if(z){if(res_is_int&&Number.isSafeInteger(z)){return z}else if(res_is_float){return{__class__:_b_.float,value:z}}}}else if(typeof x=="string"&&typeof y=="string"&&op=="__add__"){return x+y}var x_class=x.__class__||$B.get_class(x),y_class=y.__class__||$B.get_class(y),rop="__r"+op.substr(2),method;if(x_class===y_class){if(x_class===_b_.int){return _b_.int[op](x,y)}else if(x_class===_b_.bool){return(_b_.bool[op]||_b_.int[op])(x,y)}try{method=$B.$call($B.$getattr(x_class,op))}catch(err){if(err.__class__===_b_.AttributeError){var kl_name=$B.class_name(x);throw _b_.TypeError.$factory("unsupported operand type(s) "+"for "+opname2opsign[op]+": '"+kl_name+"' and '"+kl_name+"'")}throw err}return method(x,y)}if(_b_.issubclass(y_class,x_class)){var reflected_left=$B.$getattr(x_class,rop,false),reflected_right=$B.$getattr(y_class,rop,false);if(reflected_right&&reflected_left&&reflected_right!==reflected_left){return reflected_right(y,x)}}var res;try{var attr=$B.$getattr(x,op);method=$B.$getattr(x_class,op)}catch(err){if(err.__class__!==_b_.AttributeError){throw err}res=$B.$call($B.$getattr(y,rop))(x);if(res!==_b_.NotImplemented){return res}throw _b_.TypeError.$factory(`unsupported operand type(s) for ${$B.method_to_op[op]}:`+` '${$B.class_name(x)}' and '${$B.class_name(y)}'`)}if((op=="__add__"||op=="__mul__")&&(Array.isArray(x)||typeof x=="string"||$B.$isinstance(x,[_b_.str,_b_.bytes,_b_.bytearray,_b_.memoryview]))){try{res=method(x,y)}catch(err){res=_b_.NotImplemented}}else{res=method(x,y)}if(res===_b_.NotImplemented){try{method=$B.$getattr(y_class,rop)}catch(err){if(err.__class__!==_b_.AttributeError){throw err}throw _b_.TypeError.$factory(`unsupported operand type(s) for ${$B.method_to_op[op]}:`+` '${$B.class_name(x)}' and '${$B.class_name(y)}'`)}res=method(y,x);if(res===_b_.NotImplemented){throw _b_.TypeError.$factory(`unsupported operand type(s) for ${$B.method_to_op[op]}:`+` '${$B.class_name(x)}' and '${$B.class_name(y)}'`)}return res}else{return res}};$B.is_none=function(o){return o===undefined||o===null||o==_b_.None};var repr_stack=new Set;$B.repr={enter:function(obj){var obj_id=_b_.id(obj);if(repr_stack.has(obj_id)){return true}else{repr_stack.add(obj_id);if(repr_stack.size>$B.recursion_limit){repr_stack.clear();throw _b_.RecursionError.$factory("maximum recursion depth "+"exceeded while getting the repr of an object")}}},leave:function(obj){repr_stack.delete(_b_.id(obj))}}})(__BRYTHON__);(function($B){var _b_=$B.builtins;var object={__name__:"object",__qualname__:"object",$is_class:true,$native:true};object.__delattr__=function(self,attr){if(self.__dict__&&$B.$isinstance(self.__dict__,_b_.dict)&&_b_.dict.$contains_string(self.__dict__,attr)){_b_.dict.$delete_string(self.__dict__,attr);return _b_.None}else if(self.__dict__===undefined&&self[attr]!==undefined){delete self[attr];return _b_.None}else{var klass=self.__class__;if(klass){var prop=$B.$getattr(klass,attr);if(prop.__class__===_b_.property){if(prop.__delete__!==undefined){prop.__delete__(self);return _b_.None}}}}throw $B.attr_error(attr,self)};object.__dir__=function(self){var objects;if(self.$is_class){objects=[self].concat(self.__mro__)}else{var klass=self.__class__||$B.get_class(self);objects=[self,klass].concat(klass.__mro__)}var res=[];for(var i=0,len=objects.length;i2){console.log("error in get.apply",err);console.log("get attr",attr,"of",obj);console.log("res",res);console.log("__get__",__get__);console.log(__get__+"")}throw err}}if(__get__===null&&typeof res=="function"){__get__=function(x){return x}}if(__get__!==null){res.__name__=attr;if(attr=="__new__"||res.__class__===$B.builtin_function_or_method){res.$type="staticmethod"}var res1=__get__.apply(null,[res,obj,klass]);if($test){console.log("res",res,"res1",res1)}if(typeof res1=="function"){if(res1.__class__===$B.method){return res}if(res.$type=="staticmethod"){return res}else{var self=res.__class__===$B.method?klass:obj,method=function(){var args=[self];for(var i=0,len=arguments.length;i0){throw _b_.TypeError.$factory("object() takes no parameters")}var res=Object.create(null);res.__class__=cls;res.__dict__=$B.obj_dict({});return res}};object.$no_new_init=function(cls){var res=Object.create(null);res.__class__=cls;res.__dict__=$B.obj_dict({});return res};object.__new__=function(cls,...args){if(cls===undefined){throw _b_.TypeError.$factory("object.__new__(): not enough arguments")}var init_func=$B.$getattr(cls,"__init__");if(init_func===object.__init__){if(args.length>0){throw _b_.TypeError.$factory("object() takes no parameters")}}var res=Object.create(null);$B.update_obj(res,{__class__:cls,__dict__:$B.obj_dict({})});return res};object.__ne__=function(self,other){if(self===other){return false}var eq=$B.$getattr(self.__class__||$B.get_class(self),"__eq__",null);if(eq!==null){var res=$B.$call(eq)(self,other);if(res===_b_.NotImplemented){return res}return!$B.$bool(res)}return _b_.NotImplemented};object.__reduce__=function(self){if(!self.__dict__){throw _b_.TypeError.$factory(`cannot pickle '${$B.class_name(self)}' object`)}if($B.imported.copyreg===undefined){$B.$import("copyreg")}var res=[$B.imported.copyreg._reconstructor];var D=$B.get_class(self),B=object;for(var klass of D.__mro__){if(klass.__module__=="builtins"){B=klass;break}}var args=[D,B];if(B===object){args.push(_b_.None)}else{args.push($B.$call(B)(self))}res.push($B.fast_tuple(args));var d=$B.empty_dict();for(var attr of _b_.dict.$keys_string(self.__dict__)){_b_.dict.$setitem(d,attr,_b_.dict.$getitem_string(self.__dict__,attr))}res.push(d);return _b_.tuple.$factory(res)};function getNewArguments(self,klass){var newargs_ex=$B.$getattr(self,"__getnewargs_ex__",null);if(newargs_ex!==null){let newargs=newargs_ex();if(!newargs||newargs.__class__!==_b_.tuple){throw _b_.TypeError.$factory("__getnewargs_ex__ should "+`return a tuple, not '${$B.class_name(newargs)}'`)}if(newargs.length!=2){throw _b_.ValueError.$factory("__getnewargs_ex__ should "+`return a tuple of length 2, not ${newargs.length}`)}let args=newargs[0],kwargs=newargs[1];if(!args||args.__class__!==_b_.tuple){throw _b_.TypeError.$factory("first item of the tuple returned "+`by __getnewargs_ex__ must be a tuple, not '${$B.class_name(args)}'`)}if(!kwargs||kwargs.__class__!==_b_.dict){throw _b_.TypeError.$factory("second item of the tuple returned "+`by __getnewargs_ex__ must be a dict, not '${$B.class_name(kwargs)}'`)}return{args:args,kwargs:kwargs}}let newargs=klass.$getnewargs,args;if(!newargs){newargs=$B.$getattr(klass,"__getnewargs__",null)}if(newargs){args=newargs(self);if(!args||args.__class__!==_b_.tuple){throw _b_.TypeError.$factory("__getnewargs__ should "+`return a tuple, not '${$B.class_name(args)}'`)}return{args:args}}}object.__reduce_ex__=function(self,protocol){var klass=$B.get_class(self);if($B.imported.copyreg===undefined){$B.$import("copyreg")}if(protocol<2){return $B.$call($B.imported.copyreg._reduce_ex)(self,protocol)}var reduce=$B.$getattr(klass,"__reduce__");if(reduce!==object.__reduce__){return $B.$call(reduce)(self)}var res=[$B.imported.copyreg.__newobj__];var arg2=[klass];var newargs=getNewArguments(self,klass);if(newargs){arg2=arg2.concat(newargs.args)}res.push($B.fast_tuple(arg2));var d=$B.empty_dict(),nb=0;if(self.__dict__){for(var item of _b_.dict.$iter_items(self.__dict__)){if(item.key=="__class__"||item.key.startsWith("$")){continue}_b_.dict.$setitem(d,item.key,item.value);nb++}}if(nb==0){d=_b_.None}res.push(d);var list_like_iterator=_b_.None;if($B.$getattr(klass,"append",null)!==null&&$B.$getattr(klass,"extend",null)!==null){list_like_iterator=_b_.iter(self)}res.push(list_like_iterator);var key_value_iterator=_b_.None;if($B.$isinstance(self,_b_.dict)){key_value_iterator=_b_.dict.items(self)}res.push(key_value_iterator);return _b_.tuple.$factory(res)};object.__repr__=function(self){if(self===object){return""}if(self.__class__===_b_.type){return""}var module=self.__class__.__module__;if(module!==undefined&&!module.startsWith("$")&&module!=="builtins"){return"<"+self.__class__.__module__+"."+$B.class_name(self)+" object>"}else{return"<"+$B.class_name(self)+" object>"}};object.__setattr__=function(self,attr,val){if(val===undefined){throw _b_.TypeError.$factory("can't set attributes of built-in/extension type 'object'")}else if(self.__class__===object){if(object[attr]===undefined){throw $B.attr_error(attr,self)}else{throw _b_.AttributeError.$factory("'object' object attribute '"+attr+"' is read-only")}}if(self.__dict__){_b_.dict.$setitem(self.__dict__,attr,val)}else{self[attr]=val}return _b_.None};object.__setattr__.__get__=function(obj){return function(attr,val){object.__setattr__(obj,attr,val)}};object.__setattr__.__str__=function(){return"method object.setattr"};object.__str__=function(self){if(self===undefined||self.$kw){throw _b_.TypeError.$factory("descriptor '__str__' of 'object' "+"object needs an argument")}var klass=self.__class__||$B.get_class(self);var repr_func=$B.$getattr(klass,"__repr__");return $B.$call(repr_func).apply(null,arguments)};object.__subclasshook__=function(){return _b_.NotImplemented};object.$factory=function(){if(arguments.length>0||arguments.length==1&&arguments[0].$kw&&Object.keys(arguments[0].$kw).length>0){throw _b_.TypeError.$factory("object() takes no arguments")}var res={__class__:object},args=[res];object.__init__.apply(null,args);return res};$B.set_func_names(object,"builtins");_b_.object=object})(__BRYTHON__);(function($B){var _b_=$B.builtins;const TPFLAGS={STATIC_BUILTIN:1<<1,MANAGED_WEAKREF:1<<3,MANAGED_DICT:1<<4,SEQUENCE:1<<5,MAPPING:1<<6,DISALLOW_INSTANTIATION:1<<7,IMMUTABLETYPE:1<<8,HEAPTYPE:1<<9,BASETYPE:1<<10,HAVE_VECTORCALL:1<<11,READY:1<<12,READYING:1<<13,HAVE_GC:1<<14,METHOD_DESCRIPTOR:1<<17,VALID_VERSION_TAG:1<<19,IS_ABSTRACT:1<<20,MATCH_SELF:1<<22,LONG_SUBCLASS:1<<24,LIST_SUBCLASS:1<<25,TUPLE_SUBCLASS:1<<26,BYTES_SUBCLASS:1<<27,UNICODE_SUBCLASS:1<<28,DICT_SUBCLASS:1<<29,BASE_EXC_SUBCLASS:1<<30,TYPE_SUBCLASS:1<<31,HAVE_FINALIZE:1<<0,HAVE_VERSION_TAG:1<<18};$B.$class_constructor=function(class_name,class_obj_proxy,metaclass,resolved_bases,bases,kwargs){var dict;if(class_obj_proxy instanceof $B.str_dict){dict=$B.empty_dict();dict.$strings=class_obj_proxy}else{dict=class_obj_proxy.$target}var module=class_obj_proxy.__module__;for(var base of bases){if(base.__flags__!==undefined&&!(base.__flags__&TPFLAGS.BASETYPE)){throw _b_.TypeError.$factory("type 'bool' is not an acceptable base type")}}var extra_kwargs={};if(kwargs){for(let i=0;i0){if(bases[0].__class__===undefined){if(typeof bases[0]=="function"){if(bases.length!=1){throw _b_.TypeError.$factory("A Brython class "+"can inherit at most 1 Javascript constructor")}metaclass=bases[0].__class__=$B.JSMeta;$B.set_func_names(bases[0],module)}else{throw _b_.TypeError.$factory("Argument of "+class_name+" is not a class (type '"+$B.class_name(bases[0])+"')")}}for(var base of bases){var mc=base.__class__;if(metaclass===undefined){metaclass=mc}else if(mc===metaclass||_b_.issubclass(metaclass,mc)){}else if(_b_.issubclass(mc,metaclass)){metaclass=mc}else if(metaclass.__bases__&&metaclass.__bases__.indexOf(mc)==-1){throw _b_.TypeError.$factory("metaclass conflict: the "+"metaclass of a derived class must be a (non-"+"strict) subclass of the metaclasses of all its bases")}}}else{metaclass=metaclass||_b_.type}return metaclass};function set_attr_if_absent(dict,attr,value){try{$B.$getitem(dict,attr)}catch(err){$B.$setitem(dict,attr,value)}}$B.make_class_namespace=function(metaclass,class_name,module,qualname,bases){var class_dict=_b_.dict.$literal([["__module__",module],["__qualname__",qualname]]);if(metaclass!==_b_.type){var prepare=$B.$getattr(metaclass,"__prepare__",_b_.None);if(prepare!==_b_.None){class_dict=$B.$call(prepare)(class_name,bases);set_attr_if_absent(class_dict,"__module__",module);set_attr_if_absent(class_dict,"__qualname__",qualname)}}if(class_dict.__class__===_b_.dict){if(class_dict.$all_str){return class_dict.$strings}return new Proxy(class_dict,{get:function(target,prop){if(prop=="__class__"){return _b_.dict}else if(prop=="$target"){return target}if(_b_.dict.$contains_string(target,prop)){return _b_.dict.$getitem_string(target,prop)}return undefined},set:function(target,prop,value){_b_.dict.$setitem(target,prop,value)}})}else{var setitem=$B.$getattr(class_dict,"__setitem__"),getitem=$B.$getattr(class_dict,"__getitem__");return new Proxy(class_dict,{get:function(target,prop){if(prop=="__class__"){return $B.get_class(target)}else if(prop=="$target"){return target}try{return getitem(prop)}catch(err){return undefined}},set:function(target,prop,value){setitem(prop,value);return _b_.None}})}};$B.resolve_mro_entries=function(bases){var new_bases=[],has_mro_entries=false;for(var base of bases){if(!$B.$isinstance(base,_b_.type)){var mro_entries=$B.$getattr(base,"__mro_entries__",_b_.None);if(mro_entries!==_b_.None){has_mro_entries=true;var entries=_b_.list.$factory(mro_entries(bases));new_bases=new_bases.concat(entries)}else{new_bases.push(base)}}else{new_bases.push(base)}}return has_mro_entries?new_bases:bases};$B.make_class=function(qualname,factory){var A={__class__:type,__bases__:[_b_.object],__mro__:[_b_.object],__name__:qualname,__qualname__:qualname,$is_class:true};A.$factory=factory;return A};var type=$B.make_class("type",(function(){var missing={},$=$B.args("type",3,{kls:null,bases:null,cl_dict:null},["kls","bases","cl_dict"],arguments,{bases:missing,cl_dict:missing},null,"kw"),kls=$.kls,bases=$.bases,cl_dict=$.cl_dict,kw=$.kw;var kwarg={};for(var item of _b_.dict.$iter_items(kw)){kwarg[item.key]=item.value}var kwargs={$kw:[kwarg]};if(cl_dict===missing){if(bases!==missing){throw _b_.TypeError.$factory("type() takes 1 or 3 arguments")}return $B.get_class(kls)}else{var module=$B.frame_obj.frame[2],resolved_bases=$B.resolve_mro_entries(bases),metaclass=$B.get_metaclass(kls,module,resolved_bases);return type.__call__(metaclass,kls,resolved_bases,cl_dict,kwargs)}}));type.__class__=type;var classmethod=_b_.classmethod=$B.make_class("classmethod",(function(func){$B.check_nb_args_no_kw("classmethod",1,arguments);return{__class__:classmethod,__func__:func}}));classmethod.__get__=function(){var $=$B.args("classmethod",3,{self:null,obj:null,cls:null},["self","obj","cls"],arguments,{cls:_b_.None},null,null),self=$.self,obj=$.obj,cls=$.cls;if(cls===_b_.None||cls===undefined){cls=$B.get_class(obj)}var func_class=$B.get_class(self.__func__),candidates=[func_class].concat(func_class.__mro__);for(var candidate of candidates){if(candidate===$B.function){break}if(candidate.__get__){return candidate.__get__(self.__func__,cls,cls)}}return $B.method.$factory(self.__func__,cls)};$B.set_func_names(classmethod,"builtins");var staticmethod=_b_.staticmethod=$B.make_class("staticmethod",(function(func){return{__class__:staticmethod,__func__:func}}));staticmethod.__call__=function(self){return $B.$call(self.__func__)};staticmethod.__get__=function(self){return self.__func__};$B.set_func_names(staticmethod,"builtins");$B.getset_descriptor=$B.make_class("getset_descriptor",(function(klass,attr,getter,setter){var res={__class__:$B.getset_descriptor,__doc__:_b_.None,cls:klass,attr:attr,getter:getter,setter:setter};return res}));$B.getset_descriptor.__get__=function(self,obj,klass){console.log("__get__",self,obj,klass);if(obj===_b_.None){return self}return self.getter(self,obj,klass)};$B.getset_descriptor.__set__=function(self,klass,value){return self.setter(self,klass,value)};$B.getset_descriptor.__repr__=function(self){return``};$B.set_func_names($B.getset_descriptor,"builtins");type.$call=function(klass,new_func,init_func){return function(){var instance=new_func.bind(null,klass).apply(null,arguments);if($B.$isinstance(instance,klass)){init_func.bind(null,instance).apply(null,arguments)}return instance}};type.$call_no_new_init=function(klass,init_func){return function(){var instance=_b_.object.$no_new_init(klass);init_func(instance,...arguments);return instance}};type.$call_no_init=function(klass,new_func){return new_func.bind(null,klass)};type.__call__=function(){var extra_args=[],klass=arguments[0];for(var i=1,len=arguments.length;i1){console.log("warning: no attribute $infos for",res,"klass",klass,"attr",attr)}if($test){console.log("res is function",res)}if(attr=="__new__"||res.__class__===$B.builtin_function_or_method){res.$type="staticmethod"}if((attr=="__class_getitem__"||attr=="__init_subclass__")&&res.__class__!==_b_.classmethod){res=_b_.classmethod.$factory(res);return _b_.classmethod.__get__(res,_b_.None,klass)}if(res.__class__===$B.method){return res.__get__(null,klass)}else{if($test){console.log("return res",res)}return res}}else{return res}}};type.__hash__=function(cls){return _b_.hash(cls)};type.__init__=function(){if(arguments.length==0){throw _b_.TypeError.$factory("descriptor '__init__' of 'type' "+"object needs an argument")}};type.__init_subclass__=function(){var $=$B.args("__init_subclass__",1,{cls:null},["cls"],arguments,{},"args","kwargs");if($.args.length>0){throw _b_.TypeError.$factory(`${$.cls.__qualname__}.__init_subclass__ takes no arguments `+`(${$.args.length} given)`)}if(_b_.dict.__len__($.kwargs)>0){throw _b_.TypeError.$factory(`${$.cls.__qualname__}.__init_subclass__() `+`takes no keyword arguments`)}return _b_.None};_b_.object.__init_subclass__=type.__init_subclass__;type.__instancecheck__=function(cls,instance){var kl=instance.__class__||$B.get_class(instance);if(kl===cls){return true}else{for(var i=0;i"};type.__ror__=function(){var len=arguments.length;if(len!=1){throw _b_.TypeError.$factory(`expected 1 argument, got ${len}`)}return _b_.NotImplemented};function update_subclasses(kls,name,alias,value){for(var subclass of kls.$subclasses){if(!subclass.hasOwnProperty(name)){subclass[alias]=value;update_subclasses(subclass,name,alias,value)}}}type.__setattr__=function(kls,attr,value){var $test=false;if($test){console.log("kls is class",type)}if(type[attr]&&type[attr].__get__&&type[attr].__set__){type[attr].__set__(kls,value);return _b_.None}if(kls.__module__=="builtins"){throw _b_.TypeError.$factory(`cannot set '${attr}' attribute of immutable type '`+kls.__qualname__+"'")}kls[attr]=value;var mp=kls.__dict__||$B.$getattr(kls,"__dict__");_b_.dict.$setitem(mp,attr,value);switch(attr){case"__init__":case"__new__":kls.$factory=$B.$instance_creator(kls);break;case"__bases__":kls.__mro__=_b_.type.mro(kls);break;case"__setattr__":var initial_value=kls.$tp_setattr;kls.$tp_setattr=value;update_subclasses(kls,"__setattr__","$tp_setattr",value);break}if($test){console.log("after setattr",kls)}return _b_.None};type.mro=function(cls){if(cls===undefined){throw _b_.TypeError.$factory("unbound method type.mro() needs an argument")}var bases=cls.__bases__,seqs=[],pos1=0;for(var base of bases){let bmro=[],pos=0;if(base===undefined||base.__mro__===undefined){if(base.__class__===undefined){return[_b_.object]}else{console.log("error for base",base);console.log("cls",cls)}}bmro[pos++]=base;var _tmp=base.__mro__;if(_tmp){if(_tmp[0]===base){_tmp.splice(0,1)}for(var k=0;k<_tmp.length;k++){bmro[pos++]=_tmp[k]}}seqs[pos1++]=bmro}seqs[pos1++]=bases.slice();var mro=[cls],mpos=1;while(1){let non_empty=[],pos=0;for(let i=0;i0){non_empty[pos++]=seqs[i]}}if(non_empty.length==0){break}let candidate;for(let i=0;i-1){not_head[pos++]=s}}if(not_head.length>0){candidate=null}else{break}}if(candidate===null){throw _b_.TypeError.$factory("inconsistent hierarchy, no C3 MRO is possible")}mro[mpos++]=candidate;for(let i=0;i-1};$B.set_func_names(type,"builtins");type.__init_subclass__=_b_.classmethod.$factory(type.__init_subclass__);_b_.type=type;var property=_b_.property=$B.make_class("property",(function(fget,fset,fdel,doc){var res={__class__:property};property.__init__(res,fget,fset,fdel,doc);return res}));property.__init__=function(){var $=$B.args("__init__",5,{self:null,fget:null,fset:null,fdel:null,doc:null},["self","fget","fset","fdel","doc"],arguments,{fget:_b_.None,fset:_b_.None,fdel:_b_.None,doc:_b_.None},null,null),self=$.self,fget=$.fget,fset=$.fset,fdel=$.fdel,doc=$.doc;self.__doc__=doc||"";self.$type=fget.$type;self.fget=fget;self.fset=fset;self.fdel=fdel;self.$is_property=true;if(fget&&fget.$attrs){for(var key in fget.$attrs){self[key]=fget.$attrs[key]}}self.__delete__=fdel;self.getter=function(fget){return property.$factory(fget,self.fset,self.fdel,self.__doc__)};self.setter=function(fset){return property.$factory(self.fget,fset,self.fdel,self.__doc__)};self.deleter=function(fdel){return property.$factory(self.fget,self.fset,fdel,self.__doc__)}};property.__get__=function(self,kls){if(self.fget===undefined){throw _b_.AttributeError.$factory("unreadable attribute")}return $B.$call(self.fget)(kls)};property.__new__=function(cls){return{__class__:cls}};property.__set__=function(self,obj,value){if(self.fset===undefined){var name=self.fget.$infos.__name__;var msg=`property '${name}' of '${$B.class_name(obj)}' object `+"has no setter";throw _b_.AttributeError.$factory(msg)}$B.$getattr(self.fset,"__call__")(obj,value)};$B.set_func_names(property,"builtins");var wrapper_descriptor=$B.wrapper_descriptor=$B.make_class("wrapper_descriptor");wrapper_descriptor.__text_signature__={__get__:function(){return"(self, /, *args, **kwargs)"}};$B.set_func_names(wrapper_descriptor,"builtins");type.__call__.__class__=wrapper_descriptor;$B.$instance_creator=function(klass){var test=false;if(test){console.log("instance creator of",klass)}if(klass.prototype&&klass.prototype.constructor==klass){return function(){return new klass(...arguments)}}if(klass.__abstractmethods__&&$B.$bool(klass.__abstractmethods__)){return function(){var ams=Array.from($B.make_js_iterator(klass.__abstractmethods__));ams.sort();var msg=(ams.length>1?"s ":" ")+ams.join(", ");throw _b_.TypeError.$factory("Can't instantiate abstract class interface "+"with abstract method"+msg)}}var metaclass=klass.__class__||$B.get_class(klass),call_func,factory;if(metaclass===_b_.type){var new_func=type.__getattribute__(klass,"__new__"),init_func=type.__getattribute__(klass,"__init__");if(init_func===_b_.object.__init__){if(new_func===_b_.object.__new__){factory=_b_.object.$new(klass)}else{factory=new_func.bind(null,klass)}}else if(new_func===_b_.object.__new__){factory=type.$call_no_new_init(klass,init_func)}else{factory=type.$call(klass,new_func,init_func)}}else{call_func=_b_.type.__getattribute__(metaclass,"__call__");if(call_func.$is_class){factory=$B.$call(call_func)}else{factory=call_func.bind(null,klass)}}factory.__class__=$B.function;factory.$infos={__name__:klass.__name__,__module__:klass.__module__};return factory};var method_wrapper=$B.method_wrapper=$B.make_class("method_wrapper",(function(attr,klass,method){var f=function(){return method.apply(null,arguments)};f.$infos={__name__:attr,__module__:klass.__module__};return f}));method_wrapper.__str__=method_wrapper.__repr__=function(self){return""};var member_descriptor=$B.member_descriptor=$B.make_class("member_descriptor",(function(attr,cls){return{__class__:member_descriptor,cls:cls,attr:attr}}));member_descriptor.__delete__=function(self,kls){if(kls.$slot_values===undefined||!kls.$slot_values.hasOwnProperty(self.attr)){throw _b_.AttributeError.$factory(self.attr)}kls.$slot_values.delete(self.attr)};member_descriptor.__get__=function(self,kls){if(kls===_b_.None){return self}if(kls.$slot_values===undefined||!kls.$slot_values.has(self.attr)){throw $B.attr_error(self.attr,kls)}return kls.$slot_values.get(self.attr)};member_descriptor.__set__=function(self,kls,value){if(kls.$slot_values===undefined){kls.$slot_values=new Map}kls.$slot_values.set(self.attr,value)};member_descriptor.__str__=member_descriptor.__repr__=function(self){return""};$B.set_func_names(member_descriptor,"builtins");var method=$B.method=$B.make_class("method",(function(func,cls){var f=function(){return $B.$call(func).bind(null,cls).apply(null,arguments)};f.__class__=method;if(typeof func!=="function"){console.log("method from func w-o $infos",func,"all",$B.$call(func))}f.$infos=func.$infos||{};f.$infos.__func__=func;f.$infos.__self__=cls;f.$infos.__dict__=$B.empty_dict();return f}));method.__eq__=function(self,other){return self.$infos!==undefined&&other.$infos!==undefined&&self.$infos.__func__===other.$infos.__func__&&self.$infos.__self__===other.$infos.__self__};method.__ne__=function(self,other){return!$B.method.__eq__(self,other)};method.__get__=function(self){var f=function(){return self(arguments)};f.__class__=$B.method_wrapper;f.$infos=method.$infos;return f};method.__getattribute__=function(self,attr){var infos=self.$infos;if(infos&&infos[attr]){if(attr=="__code__"){var res={__class__:$B.Code};for(var key in infos.__code__){res[key]=infos.__code__[key]}return res}else{return infos[attr]}}else if(method.hasOwnProperty(attr)){return _b_.object.__getattribute__(self,attr)}else{return $B.function.__getattribute__(self.$infos.__func__,attr)}};method.__repr__=method.__str__=function(self){return""};method.__setattr__=function(self,key){if(key=="__class__"){throw _b_.TypeError.$factory("__class__ assignment only supported "+"for heap types or ModuleType subclasses")}throw $B.attr_error(key,self)};$B.set_func_names(method,"builtins");$B.method_descriptor=$B.make_class("method_descriptor");$B.classmethod_descriptor=$B.make_class("classmethod_descriptor");_b_.object.__class__=type;$B.make_iterator_class=function(name){var klass={__class__:_b_.type,__mro__:[_b_.object],__name__:name,__qualname__:name,$factory:function(items){return{__class__:klass,__dict__:$B.empty_dict(),counter:-1,items:items,len:items.length,$builtin_iterator:true}},$is_class:true,$iterator_class:true,__iter__:function(self){self.counter=self.counter===undefined?-1:self.counter;self.len=self.items.length;return self},__len__:function(self){return self.items.length},__next__:function(self){if(typeof self.test_change=="function"){var message=self.test_change();if(message){throw _b_.RuntimeError.$factory(message)}}self.counter++;if(self.counter$B.fast_tuple(self.items)));$B.GenericAlias.__call__=function(self,...args){return self.origin_class.$factory.apply(null,args)};$B.GenericAlias.__eq__=function(self,other){if(!$B.$isinstance(other,$B.GenericAlias)){return false}return $B.rich_comp("__eq__",self.origin_class,other.origin_class)&&$B.rich_comp("__eq__",self.items,other.items)};$B.GenericAlias.__getitem__=function(self,item){throw _b_.TypeError.$factory("descriptor '__getitem__' for '"+self.origin_class.__name__+"' objects doesn't apply to a '"+$B.class_name(item)+"' object")};$B.GenericAlias.__mro_entries__=function(self){return $B.fast_tuple([self.origin_class])};$B.GenericAlias.__new__=function(origin_class,items){var res={__class__:$B.GenericAlias,__mro__:[origin_class],origin_class:origin_class,items:items,$is_class:true};return res};$B.GenericAlias.__or__=function(){var $=$B.args("__or__",2,{self:null,other:null},["self","other"],arguments,{},null,null);return $B.UnionType.$factory([$.self,$.other])};$B.GenericAlias.__origin__=_b_.property.$factory((self=>self.origin_class));$B.GenericAlias.__parameters__=_b_.property.$factory((function(){return $B.fast_tuple([])}));$B.GenericAlias.__repr__=function(self){var items=Array.isArray(self.items)?self.items:[self.items];var reprs=[];for(var item of items){if(item===_b_.Ellipsis){reprs.push("...")}else{if(item.$is_class){reprs.push(item.__name__)}else{reprs.push(_b_.repr(item))}}}return self.origin_class.__qualname__+"["+reprs.join(", ")+"]"};$B.set_func_names($B.GenericAlias,"types");$B.UnionType=$B.make_class("UnionType",(function(items){return{__class__:$B.UnionType,items:items}}));$B.UnionType.__args__=_b_.property.$factory((self=>$B.fast_tuple(self.items)));$B.UnionType.__eq__=function(self,other){if(!$B.$isinstance(other,$B.UnionType)){return _b_.NotImplemented}return _b_.list.__eq__(self.items,other.items)};$B.UnionType.__or__=function(self,other){var items=self.items.slice();if(!items.includes(other)){items.push(other)}return $B.UnionType.$factory(items)};$B.UnionType.__parameters__=_b_.property.$factory((()=>$B.fast_tuple([])));$B.UnionType.__repr__=function(self){var t=[];for(var item of self.items){if(item.$is_class){var s=item.__name__;if(item.__module__!=="builtins"){s=item.__module__+"."+s}t.push(s)}else{t.push(_b_.repr(item))}}return t.join(" | ")};$B.set_func_names($B.UnionType,"types")})(__BRYTHON__);(function($B){var _b_=$B.builtins;_b_.__debug__=false;$B.$comps={">":"gt",">=":"ge","<":"lt","<=":"le"};$B.$inv_comps={">":"lt",">=":"le","<":"gt","<=":"ge"};var check_nb_args=$B.check_nb_args,check_no_kw=$B.check_no_kw,check_nb_args_no_kw=$B.check_nb_args_no_kw;var NoneType=$B.NoneType={$factory:function(){return None},__bool__:function(){return False},__class__:_b_.type,__hash__:function(){return 0},__module__:"builtins",__mro__:[_b_.object],__name__:"NoneType",__qualname__:"NoneType",__repr__:function(){return"None"},__str__:function(){return"None"},$is_class:true};NoneType.__setattr__=function(self,attr){return no_set_attr(NoneType,attr)};var None=_b_.None={__class__:NoneType};None.__doc__=None;NoneType.__doc__=None;for(var $op in $B.$comps){var key=$B.$comps[$op];switch(key){case"ge":case"gt":case"le":case"lt":NoneType["__"+key+"__"]=function(){return function(){return _b_.NotImplemented}}($op)}}for(var $func in None){if(typeof None[$func]=="function"){None[$func].__str__=function(f){return function(){return""}}($func)}}$B.set_func_names(NoneType,"builtins");_b_.__build_class__=function(){throw _b_.NotImplementedError.$factory("__build_class__")};_b_.abs=function(obj){check_nb_args_no_kw("abs",1,arguments);var klass=obj.__class__||$B.get_class(obj);try{var method=$B.$getattr(klass,"__abs__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("Bad operand type for abs(): '"+$B.class_name(obj)+"'")}throw err}return $B.$call(method)(obj)};_b_.aiter=function(async_iterable){return $B.$call($B.$getattr(async_iterable,"__aiter__"))()};_b_.all=function(obj){check_nb_args_no_kw("all",1,arguments);var iterable=iter(obj);while(1){try{var elt=next(iterable);if(!$B.$bool(elt)){return false}}catch(err){return true}}};_b_.anext=function(){var missing={},$=$B.args("anext",2,{async_iterator:null,_default:null},["async_iterator","_default"],arguments,{_default:missing},null,null);var awaitable=$B.$call($B.$getattr($.async_iterator,"__anext__"))();return awaitable.catch((function(err){if($B.is_exc(err,[_b_.StopAsyncIteration])){if($._default!==missing){return $._default}}throw err}))};_b_.any=function(obj){check_nb_args_no_kw("any",1,arguments);for(var elt of $B.make_js_iterator(obj)){if($B.$bool(elt)){return true}}return false};_b_.ascii=function(obj){check_nb_args_no_kw("ascii",1,arguments);var res=repr(obj),res1="",cp;for(var i=0;i=0){return prefix+value.toString(base)}return"-"+prefix+(-value).toString(base)}function bin_hex_oct(base,obj){if($B.$isinstance(obj,_b_.int)){return $builtin_base_convert_helper(obj,base)}else{try{var klass=obj.__class__||$B.get_class(obj),method=$B.$getattr(klass,"__index__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object cannot be interpreted as an integer")}throw err}var res=$B.$call(method)(obj);return $builtin_base_convert_helper(res,base)}}_b_.bin=function(obj){check_nb_args_no_kw("bin",1,arguments);return bin_hex_oct(2,obj)};_b_.breakpoint=function(){$B.$import("sys",[]);var missing={},hook=$B.$getattr($B.imported.sys,"breakpointhook",missing);if(hook===missing){throw _b_.RuntimeError.$factory("lost sys.breakpointhook")}return $B.$call(hook).apply(null,arguments)};_b_.callable=function(obj){check_nb_args_no_kw("callable",1,arguments);return _b_.hasattr(obj,"__call__")};_b_.chr=function(i){check_nb_args_no_kw("chr",1,arguments);i=$B.PyNumber_Index(i);if(i<0||i>1114111){throw _b_.ValueError.$factory("Outside valid range")}else if(i>=65536&&i<=1114111){var code=i-65536,s=String.fromCodePoint(55296|code>>10)+String.fromCodePoint(56320|code&1023);return $B.make_String(s,[0])}else{return String.fromCodePoint(i)}};var code=_b_.code=$B.make_class("code");code.__repr__=code.__str__=function(_self){return``};code.__getattribute__=function(self,attr){return self[attr]};$B.set_func_names(code,"builtins");_b_.compile=function(){var $=$B.args("compile",7,{source:null,filename:null,mode:null,flags:null,dont_inherit:null,optimize:null,_feature_version:null},["source","filename","mode","flags","dont_inherit","optimize","_feature_version"],arguments,{flags:0,dont_inherit:false,optimize:-1,_feature_version:0},null,null);var module_name="$exec_"+$B.UUID();$.__class__=code;$.co_flags=$.flags;$.co_name="";var filename=$.co_filename=$.filename;var interactive=$.mode=="single"&&$.flags&512;$B.file_cache[filename]=$.source;$B.url2name[filename]=module_name;if($.flags&$B.PyCF_TYPE_COMMENTS){}if($B.$isinstance($.source,_b_.bytes)){var encoding="utf-8",lfpos=$.source.source.indexOf(10),first_line,second_line;if(lfpos==-1){first_line=$.source}else{first_line=_b_.bytes.$factory($.source.source.slice(0,lfpos))}first_line=_b_.bytes.decode(first_line,"latin-1");var encoding_re=/^[\t\f]*#.*?coding[:=][\t]*([-_.a-zA-Z0-9]+)/;var mo=first_line.match(encoding_re);if(mo){encoding=mo[1]}else if(lfpos>-1){var rest=$.source.source.slice(lfpos+1);lfpos=rest.indexOf(10);if(lfpos>-1){second_line=_b_.bytes.$factory(rest.slice(0,lfpos))}else{second_line=_b_.bytes.$factory(rest)}second_line=_b_.bytes.decode(second_line,"latin-1");mo=second_line.match(encoding_re);if(mo){encoding=mo[1]}}$.source=_b_.bytes.decode($.source,encoding)}if(!$B.$isinstance(filename,[_b_.bytes,_b_.str])){$B.warn(_b_.DeprecationWarning,`path should be string, bytes, or os.PathLike, `+`not ${$B.class_name(filename)}`)}if(interactive&&!$.source.endsWith("\n")){var lines=$.source.split("\n"),last_line=$B.last(lines);if(last_line.startsWith(" ")){var msg="unexpected EOF while parsing",exc=_b_.SyntaxError.$factory();exc.filename=filename;exc.lineno=exc.end_lineno=lines.length-1;exc.offset=0;exc.end_offset=last_line.length-1;exc.text=last_line;exc.args=[msg,$B.fast_tuple([filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset])];throw exc}}if($.source.__class__&&$.source.__class__.__module__=="ast"){$B.imported._ast._validate($.source);$._ast=$.source;delete $.source;return $}var _ast,parser;try{var parser_mode=$.mode=="eval"?"eval":"file";parser=new $B.Parser($.source,filename,parser_mode);parser.flags=$.flags;_ast=$B._PyPegen.run_parser(parser)}catch(err){if($.mode=="single"){var tester=parser.tokens[parser.tokens.length-2];if(tester&&(tester.type=="NEWLINE"&&$.flags&16384||tester.type=="DEDENT"&&$.flags&512)){err.__class__=_b_.SyntaxError;err.args[0]="incomplete input"}}throw err}if($.mode=="single"&&_ast.body.length==1&&_ast.body[0]instanceof $B.ast.Expr){parser=new $B.Parser($.source,filename,"eval");_ast=$B._PyPegen.run_parser(parser);$.single_expression=true}if($.flags==$B.PyCF_ONLY_AST){delete $B.url2name[filename];let res=$B.ast_js_to_py(_ast);res.$js_ast=_ast;return res}delete $B.url2name[filename];$._ast=$B.ast_js_to_py(_ast);$._ast.$js_ast=_ast;var future=$B.future_features(_ast,filename);var symtable=$B._PySymtable_Build(_ast,filename,future);$B.js_from_root({ast:_ast,symtable:symtable,filename:filename,src:$.source});return $};_b_.debug=$B.debug>0;_b_.delattr=function(obj,attr){check_nb_args_no_kw("delattr",2,arguments);if(typeof attr!="string"){throw _b_.TypeError.$factory("attribute name must be string, not '"+$B.class_name(attr)+"'")}return $B.$getattr(obj,"__delattr__")(attr)};$B.$delete=function(name,is_global){function del(obj){if(obj.__class__===$B.generator){obj.js_gen.return()}}var found=false,frame=$B.frame_obj.frame;if(!is_global){if(frame[1][name]!==undefined){found=true;del(frame[1][name]);delete frame[1][name]}}else{if(frame[2]!=frame[0]&&frame[3][name]!==undefined){found=true;del(frame[3][name]);delete frame[3][name]}}if(!found){throw $B.name_error(name)}};_b_.dir=function(obj){if(obj===undefined){var locals=_b_.locals();return _b_.sorted(locals)}check_nb_args_no_kw("dir",1,arguments);var klass=obj.__class__||$B.get_class(obj);if(obj.$is_class){var dir_func=$B.$getattr(obj.__class__,"__dir__");return $B.$call(dir_func)(obj)}try{let res=$B.$call($B.$getattr(klass,"__dir__"))(obj);res=_b_.list.$factory(res);return res}catch(err){if($B.get_option("debug")>2){console.log("error in dir, obj",obj,"klass",klass,$B.$getattr(klass,"__dir__"),err.message)}throw err}};_b_.divmod=function(x,y){check_nb_args_no_kw("divmod",2,arguments);try{return $B.rich_op("__divmod__",x,y)}catch(err){if($B.is_exc(err,[_b_.TypeError])){return _b_.tuple.$factory([$B.rich_op("__floordiv__",x,y),$B.rich_op("__mod__",x,y)])}throw err}};var enumerate=_b_.enumerate=$B.make_class("enumerate",(function(){var $ns=$B.args("enumerate",2,{iterable:null,start:null},["iterable","start"],arguments,{start:0},null,null),_iter=iter($ns["iterable"]),start=$ns["start"];return{__class__:enumerate,__name__:"enumerate iterator",counter:start-1,iter:_iter,start:start}}));enumerate.__iter__=function(self){self.counter=self.start-1;return self};enumerate.__next__=function(self){self.counter++;return $B.fast_tuple([self.counter,next(self.iter)])};$B.set_func_names(enumerate,"builtins");var $$eval=_b_.eval=function(){var $=$B.args("eval",4,{src:null,globals:null,locals:null,mode:null},["src","globals","locals","mode"],arguments,{globals:_b_.None,locals:_b_.None,mode:"eval"},null,null,4),src=$.src,_globals=$.globals,_locals=$.locals,mode=$.mode;if($.src.mode&&$.src.mode=="single"&&["",""].indexOf($.src.filename)>-1){_b_.print(">",$.src.source.trim())}var filename="";if(src.__class__===code){filename=src.filename}else if(!src.valueOf||typeof src.valueOf()!=="string"){throw _b_.TypeError.$factory(`${mode}() arg 1 must be a string,`+" bytes or code object")}else{src=src.valueOf()}var __name__="exec";if(_globals!==_b_.None&&_globals.__class__==_b_.dict&&_b_.dict.$contains_string(_globals,"__name__")){__name__=_b_.dict.$getitem_string(_globals,"__name__")}$B.url2name[filename]=__name__;var frame=$B.frame_obj.frame;$B.exec_scope=$B.exec_scope||{};if(typeof src=="string"&&src.endsWith("\\\n")){var exc=_b_.SyntaxError.$factory("unexpected EOF while parsing");var lines=src.split("\n"),line=lines[lines.length-2];exc.args=["unexpected EOF while parsing",[filename,lines.length-1,1,line]];exc.filename=filename;exc.text=line;throw exc}var local_name=("locals_"+__name__).replace(/\./g,"_"),global_name=("globals_"+__name__).replace(/\./g,"_"),exec_locals={},exec_globals={};if(_globals===_b_.None){if(frame[1]===frame[3]){global_name+="_globals";exec_locals=exec_globals=frame[3]}else{if(mode=="exec"){exec_locals=$B.clone(frame[1]);for(var attr in frame[3]){exec_locals[attr]=frame[3][attr]}exec_globals=exec_locals}else{exec_locals=frame[1];exec_globals=frame[3]}}}else{if(_globals.__class__!==_b_.dict){throw _b_.TypeError.$factory(`${mode}() globals must be `+"a dict, not "+$B.class_name(_globals))}exec_globals={};if(_globals.$jsobj){exec_globals=_globals.$jsobj}else{exec_globals=_globals.$jsobj={};for(var key of _b_.dict.$keys_string(_globals)){_globals.$jsobj[key]=_b_.dict.$getitem_string(_globals,key);if(key=="__name__"){__name__=_globals.$jsobj[key]}}_globals.$all_str=false}if(exec_globals.__builtins__===undefined){exec_globals.__builtins__=_b_.__builtins__}if(_locals===_b_.None){exec_locals=exec_globals}else{if(_locals===_globals){global_name+="_globals";exec_locals=exec_globals}else if(_locals.$jsobj){for(let key in _locals.$jsobj){exec_globals[key]=_locals.$jsobj[key]}}else{if(_locals.$jsobj){exec_locals=_locals.$jsobj}else{var klass=$B.get_class(_locals),getitem=$B.$call($B.$getattr(klass,"__getitem__")),setitem=$B.$call($B.$getattr(klass,"__setitem__"));exec_locals=new Proxy(_locals,{get(target,prop){if(prop=="$target"){return target}try{return getitem(target,prop)}catch(err){return undefined}},set(target,prop,value){return setitem(target,prop,value)}})}}}}var save_frame_obj=$B.frame_obj;var _ast;frame=[__name__,exec_locals,__name__,exec_globals];frame.is_exec_top=true;frame.__file__=filename;frame.$f_trace=$B.enter_frame(frame);var _frame_obj=$B.frame_obj;frame.$lineno=1;if(src.__class__===code){_ast=src._ast;if(_ast.$js_ast){_ast=_ast.$js_ast}else{_ast=$B.ast_py_to_js(_ast)}}try{if(!_ast){var _mode=mode=="eval"?"eval":"file";var parser=new $B.Parser(src,filename,_mode);_ast=$B._PyPegen.run_parser(parser)}var future=$B.future_features(_ast,filename),symtable=$B._PySymtable_Build(_ast,filename,future),js_obj=$B.js_from_root({ast:_ast,symtable:symtable,filename:filename,src:src,namespaces:{local_name:local_name,exec_locals:exec_locals,global_name:global_name,exec_globals:exec_globals}}),js=js_obj.js}catch(err){if(err.args){if(err.args[1]){exec_locals.$lineno=err.args[1][1]}}else{console.log("JS Error",err.message)}$B.frame_obj=save_frame_obj;throw err}if(mode=="eval"){js=`var __file__ = '${filename}'\n`+`var locals = ${local_name}\nreturn ${js}`}else if(src.single_expression){js=`var __file__ = '${filename}'\n`+`var result = ${js}\n`+`if(result !== _b_.None){\n`+`_b_.print(result)\n`+`}`}try{var exec_func=new Function("$B","_b_",local_name,global_name,"frame","_frame_obj",js)}catch(err){if($B.get_option("debug")>1){console.log("eval() error\n",$B.format_indent(js,0));console.log("-- python source\n",src)}$B.frame_obj=save_frame_obj;throw err}try{var res=exec_func($B,_b_,exec_locals,exec_globals,frame,_frame_obj)}catch(err){if($B.get_option("debug")>2){console.log("Python code\n",src,"\nexec func",$B.format_indent(exec_func+"",0),"\n filename",filename,"\n name from filename",$B.url2name[filename],"\n local_name",local_name,"\n exec_locals",exec_locals,"\n global_name",global_name,"\n exec_globals",exec_globals,"\n frame",frame,"\n _ast",_ast,"\n js",js)}$B.frame_obj=save_frame_obj;throw err}if(_globals!==_b_.None&&!_globals.$jsobj){for(var _key in exec_globals){if(!_key.startsWith("$")){_b_.dict.$setitem(_globals,_key,exec_globals[_key])}}}$B.frame_obj=save_frame_obj;return res};$$eval.$is_func=true;var exec=_b_.exec=function(){var $=$B.args("exec",3,{src:null,globals:null,locals:null},["src","globals","locals"],arguments,{globals:_b_.None,locals:_b_.None},null,null,3),src=$.src,globals=$.globals,locals=$.locals;$$eval(src,globals,locals,"exec");return _b_.None};exec.$is_func=true;var exit=_b_.exit=function(){throw _b_.SystemExit};exit.__repr__=exit.__str__=function(){return"Use exit() or Ctrl-Z plus Return to exit"};var filter=_b_.filter=$B.make_class("filter",(function(func,iterable){check_nb_args_no_kw("filter",2,arguments);iterable=iter(iterable);if(func===_b_.None){func=$B.$bool}return{__class__:filter,func:func,iterable:iterable}}));filter.__iter__=function(self){return self};filter.__next__=function(self){while(true){var _item=next(self.iterable);if(self.func(_item)){return _item}}};$B.set_func_names(filter,"builtins");_b_.format=function(){var $=$B.args("format",2,{value:null,format_spec:null},["value","format_spec"],arguments,{format_spec:""},null,null),value=$.value;var klass=value.__class__||$B.get_class(value);try{var method=$B.$getattr(klass,"__format__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.NotImplementedError("__format__ is not implemented "+"for object '"+_b_.str.$factory(value)+"'")}throw err}return $B.$call(method)(value,$.format_spec)};function attr_error(attr,obj){var cname=$B.get_class(obj);var msg="bad operand type for unary #: '"+cname+"'";switch(attr){case"__neg__":throw _b_.TypeError.$factory(msg.replace("#","-"));case"__pos__":throw _b_.TypeError.$factory(msg.replace("#","+"));case"__invert__":throw _b_.TypeError.$factory(msg.replace("#","~"));case"__call__":throw _b_.TypeError.$factory("'"+cname+"'"+" object is not callable");default:throw $B.attr_error(attr,obj)}}_b_.getattr=function(){var missing={};var $=$B.args("getattr",3,{obj:null,attr:null,_default:null},["obj","attr","_default"],arguments,{_default:missing},null,null);if(!$B.$isinstance($.attr,_b_.str)){throw _b_.TypeError.$factory("attribute name must be string, "+`not '${$B.class_name($.attr)}'`)}return $B.$getattr($.obj,_b_.str.$to_string($.attr),$._default===missing?undefined:$._default)};$B.search_in_mro=function(klass,attr){if(klass.hasOwnProperty(attr)){return klass[attr]}var mro=klass.__mro__;for(var i=0,len=mro.length;i-1}))}break;case"__mro__":if(obj.__mro__){return _b_.tuple.$factory([obj].concat(obj.__mro__))}else if(obj.__dict__&&_b_.dict.$contains_string(obj.__dict__,"__mro__")){return _b_.dict.$getitem_string(obj.__dict__,"__mro__")}throw $B.attr_error(attr,obj);case"__subclasses__":if(klass.$factory||klass.$is_class){var subclasses=obj.$subclasses||[];return function(){return subclasses}}break}if(typeof obj=="function"){var value=obj[attr];if(value!==undefined){if(attr=="__module__"){return value}}}if(!is_class&&klass.$native){if(obj.$method_cache&&obj.$method_cache[attr]){return obj.$method_cache[attr]}if($test){console.log("native class",klass,klass[attr])}if(klass[attr]===undefined){var object_attr=_b_.object[attr];if($test){console.log("object attr",object_attr)}if(object_attr!==undefined){klass[attr]=object_attr}else{if($test){console.log("obj[attr]",obj[attr])}var attrs=obj.__dict__;if(attrs&&_b_.dict.$contains_string(attrs,attr)){return _b_.dict.$getitem_string(attrs,attr)}if(_default===undefined){throw $B.attr_error(attr,obj)}return _default}}if(klass.$descriptors&&klass.$descriptors[attr]!==undefined){return klass[attr](obj)}if(typeof klass[attr]=="function"){var func=klass[attr];if(attr=="__new__"){func.$type="staticmethod"}if(func.$type=="staticmethod"){return func}var self=klass[attr].__class__==$B.method?klass:obj,method=klass[attr].bind(null,self);method.__class__=$B.method;method.$infos={__func__:func,__name__:attr,__self__:self,__qualname__:klass.__qualname__+"."+attr};if(typeof obj=="object"){obj.__class__=klass;obj.$method_cache=obj.$method_cache||{};if(obj.$method_cache){obj.$method_cache[attr]=method}}return method}else if(klass[attr].__class__===_b_.classmethod){return _b_.classmethod.__get__(klass[attr],obj,klass)}else if(klass[attr]!==undefined){return klass[attr]}attr_error(rawname,klass)}var attr_func;if(is_class){if($test){console.log("obj is class",obj);console.log("is a type ?",_b_.isinstance(klass,_b_.type));console.log("is type",klass===_b_.type)}if(klass===_b_.type){attr_func=_b_.type.__getattribute__}else{attr_func=$B.$call($B.$getattr(klass,"__getattribute__"))}if($test){console.log("attr func",attr_func)}}else{attr_func=klass.__getattribute__;if(attr_func===undefined){for(var cls of klass.__mro__){attr_func=cls["__getattribute__"];if(attr_func!==undefined){break}}}if($test){console.log("attr func",attr_func)}}if(typeof attr_func!=="function"){console.log(attr+" is not a function "+attr_func,klass)}var odga=_b_.object.__getattribute__;if($test){console.log("attr_func is odga ?",attr_func,attr_func===odga,"\n","\nobj[attr]",obj[attr])}if(attr_func===odga){res=obj[attr];if(Array.isArray(obj)&&Array.prototype[attr]!==undefined){res=undefined}else if(res===null){return null}else if(res!==undefined){if($test){console.log(obj,attr,obj[attr],res.__set__||res.$is_class)}if(res.$is_property){return _b_.property.__get__(res)}if(res.__set__===undefined||res.$is_class){if($test){console.log("return",res,res+"",res.__set__,res.$is_class)}return res}}}var getattr;try{res=attr_func(obj,attr);if($test){console.log("result of attr_func",res)}}catch(err){if($test){console.log("attr_func raised error",err.args,err.name)}if(klass===$B.module){getattr=obj.__getattr__;if($test){console.log("use module getattr",getattr);console.log(getattr+"")}if(getattr){try{return getattr(attr)}catch(err){if($test){console.log("encore erreur",err)}if(_default!==undefined){return _default}throw err}}}getattr=$B.search_in_mro(klass,"__getattr__");if($test){console.log("try getattr",getattr)}if(getattr){if($test){console.log("try with getattr",getattr)}try{return getattr(obj,attr)}catch(err){if(_default!==undefined){return _default}throw err}}if(_default!==undefined){return _default}throw err}if(res!==undefined){return res}if(_default!==undefined){return _default}attr_error(rawname,is_class?obj:klass)};_b_.globals=function(){check_nb_args_no_kw("globals",0,arguments);var res=$B.obj_dict($B.frame_obj.frame[3]);res.$jsobj.__BRYTHON__=$B.jsobj2pyobj($B);res.$is_namespace=true;return res};_b_.hasattr=function(obj,attr){check_nb_args_no_kw("hasattr",2,arguments);try{$B.$getattr(obj,attr);return true}catch(err){return false}};_b_.hash=function(obj){check_nb_args_no_kw("hash",1,arguments);return $B.$hash(obj)};$B.$hash=function(obj){if(obj.__hashvalue__!==undefined){return obj.__hashvalue__}if(typeof obj==="boolean"){return obj?1:0}if(obj.$is_class||obj.__class__===_b_.type||obj.__class__===$B.function){return obj.__hashvalue__=$B.$py_next_hash--}if(typeof obj=="string"){return _b_.str.__hash__(obj)}else if(typeof obj=="number"){return obj}else if(typeof obj=="boolean"){return obj?1:0}else if(obj.__class__===_b_.float){return _b_.float.$hash_func(obj)}var klass=obj.__class__||$B.get_class(obj);if(klass===undefined){throw _b_.TypeError.$factory("unhashable type: '"+_b_.str.$factory($B.jsobj2pyobj(obj))+"'")}var hash_method=_b_.type.__getattribute__(klass,"__hash__",_b_.None);if(hash_method===_b_.None){throw _b_.TypeError.$factory("unhashable type: '"+$B.class_name(obj)+"'")}if(hash_method.$infos.__func__===_b_.object.__hash__){if(_b_.type.__getattribute__(klass,"__eq__")!==_b_.object.__eq__){throw _b_.TypeError.$factory("unhashable type: '"+$B.class_name(obj)+"'","hash")}else{return obj.__hashvalue__=_b_.object.__hash__(obj)}}else{return $B.$call(hash_method)(obj)}};var help=_b_.help=function(obj){if(obj===undefined){obj="help"}if(typeof obj=="string"){var lib_url="https://docs.python.org/3/library";var parts=obj.split("."),head=[],url;while(parts.length>0){head.push(parts.shift());if($B.stdlib[head.join(".")]){url=head.join(".")}else{break}}if(url){var doc_url;if(["browser","javascript","interpreter"].indexOf(obj.split(".")[0])>-1){doc_url="/static_doc/"+($B.language=="fr"?"fr":"en")}else{doc_url=lib_url}window.open(`${doc_url}/${url}.html#`+obj);return}if(_b_[obj]){if(obj==obj.toLowerCase()){url=lib_url+`/functions.html#${obj}`}else if(["False","True","None","NotImplemented","Ellipsis","__debug__"].indexOf(obj)>-1){url=lib_url+`/constants.html#${obj}`}else if(_b_[obj].$is_class&&_b_[obj].__bases__.indexOf(_b_.Exception)>-1){url=lib_url+`/exceptions.html#${obj}`}if(url){window.open(url);return}}$B.$import("pydoc");return $B.$call($B.$getattr($B.imported.pydoc,"help"))(obj)}if(obj.__class__===$B.module){return help(obj.__name__)}try{_b_.print($B.$getattr(obj,"__doc__"))}catch(err){return""}};help.__repr__=help.__str__=function(){return"Type help() for interactive help, or help(object) "+"for help about object."};_b_.hex=function(obj){check_nb_args_no_kw("hex",1,arguments);return bin_hex_oct(16,obj)};_b_.id=function(obj){check_nb_args_no_kw("id",1,arguments);if(obj.$id!==undefined){return obj.$id}else if($B.$isinstance(obj,[_b_.str,_b_.int,_b_.float])&&!$B.$isinstance(obj,$B.long_int)){return $B.$getattr(_b_.str.$factory(obj),"__hash__")()}else{return obj.$id=$B.UUID()}};_b_.__import__=function(){var $=$B.args("__import__",5,{name:null,globals:null,locals:null,fromlist:null,level:null},["name","globals","locals","fromlist","level"],arguments,{globals:None,locals:None,fromlist:_b_.tuple.$factory(),level:0},null,null);return $B.$__import__($.name,$.globals,$.locals,$.fromlist)};_b_.input=function(msg){var res=prompt(msg||"")||"";if($B.imported["sys"]&&$B.imported["sys"].ps1){var ps1=$B.imported["sys"].ps1,ps2=$B.imported["sys"].ps2;if(msg==ps1||msg==ps2){console.log(msg,res)}}return res};_b_.isinstance=function(obj,cls){check_nb_args_no_kw("isinstance",2,arguments);return $B.$isinstance(obj,cls)};$B.$isinstance=function(obj,cls){if(obj===null){return cls===$B.imported.javascript.NullType}if(obj===undefined){return false}var kls;if(Array.isArray(cls)){for(kls of cls){if($B.$isinstance(obj,kls)){return true}}return false}if(cls.__class__===$B.UnionType){for(kls of cls.items){if($B.$isinstance(obj,kls)){return true}}return false}if(cls.__class__===$B.GenericAlias){throw _b_.TypeError.$factory("isinstance() arg 2 cannot be a parameterized generic")}if(!cls.__class__&&!cls.$is_class){if(!$B.$getattr(cls,"__instancecheck__",false)){throw _b_.TypeError.$factory("isinstance() arg 2 must be a type "+"or tuple of types")}}if(cls===_b_.int&&(obj===True||obj===False)){return True}if(cls===_b_.bool){switch(typeof obj){case"string":return false;case"number":return false;case"boolean":return true}}var klass=obj.__class__;if(klass==undefined){if(typeof obj=="string"){if(cls==_b_.str){return true}else if($B.builtin_classes.indexOf(cls)>-1){return false}}else if(typeof obj=="number"&&Number.isFinite(obj)){if(Number.isFinite(obj)&&cls==_b_.int){return true}}klass=$B.get_class(obj)}if(klass===undefined){return false}if(klass===cls){return true}var mro=klass.__mro__;for(var i=0;i-1){return true}var sch=$B.$getattr(classinfo.__class__||$B.get_class(classinfo),"__subclasscheck__",_b_.None);if(sch==_b_.None){return false}return sch(classinfo,klass)};var iterator_class=$B.make_class("iterator",(function(getitem,len){return{__class__:iterator_class,getitem:getitem,len:len,counter:-1}}));iterator_class.__next__=function(self){self.counter++;if(self.len!==null&&self.counter==self.len){throw _b_.StopIteration.$factory("")}try{return self.getitem(self.counter)}catch(err){throw _b_.StopIteration.$factory("")}};$B.set_func_names(iterator_class,"builtins");const callable_iterator=$B.make_class("callable_iterator",(function(func,sentinel){return{__class__:callable_iterator,func:func,sentinel:sentinel}}));callable_iterator.__iter__=function(self){return self};callable_iterator.__next__=function(self){var res=self.func();if($B.rich_comp("__eq__",res,self.sentinel)){throw _b_.StopIteration.$factory()}return res};$B.set_func_names(callable_iterator,"builtins");$B.$iter=function(obj,sentinel){if(sentinel===undefined){var klass=obj.__class__||$B.get_class(obj);try{var _iter=$B.$call($B.$getattr(klass,"__iter__"))}catch(err){if(err.__class__===_b_.AttributeError){try{var gi_method=$B.$call($B.$getattr(klass,"__getitem__")),gi=function(i){return gi_method(obj,i)},len;try{len=len(obj)}catch(err){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object is not iterable")}return iterator_class.$factory(gi,len)}catch(err){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+"' object is not iterable")}}throw err}var res=$B.$call(_iter)(obj);try{$B.$getattr(res,"__next__")}catch(err){if($B.$isinstance(err,_b_.AttributeError)){throw _b_.TypeError.$factory("iter() returned non-iterator of type '"+$B.class_name(res)+"'")}}return res}else{return callable_iterator.$factory(obj,sentinel)}};var iter=_b_.iter=function(){var $=$B.args("iter",1,{obj:null},["obj"],arguments,{},"args","kw"),sentinel;if($.args.length>0){sentinel=$.args[0]}return $B.$iter($.obj,sentinel)};var len=_b_.len=function(obj){check_nb_args_no_kw("len",1,arguments);var klass=obj.__class__||$B.get_class(obj);try{var method=$B.$getattr(klass,"__len__")}catch(err){throw _b_.TypeError.$factory("object of type '"+$B.class_name(obj)+"' has no len()")}let res=$B.$call(method)(obj);if(!$B.$isinstance(res,_b_.int)){throw _b_.TypeError.$factory(`'${$B.class_name(res)}' object cannot be interpreted as an integer`)}if(!$B.rich_comp("__ge__",res,0)){throw _b_.ValueError.$factory("ValueError: __len__() should return >= 0")}return res};_b_.locals=function(){check_nb_args("locals",0,arguments);var locals_obj=$B.frame_obj.frame[1];var class_locals=locals_obj.$target;if(class_locals){return class_locals}var res=$B.obj_dict($B.clone(locals_obj),(function(key){return key.startsWith("$")}));res.$is_namespace=true;return res};var map=_b_.map=$B.make_class("map",(function(){var $=$B.args("map",2,{func:null,it1:null},["func","it1"],arguments,{},"args",null),func=$B.$call($.func);var iter_args=[$B.make_js_iterator($.it1)];for(var arg of $.args){iter_args.push($B.make_js_iterator(arg))}var obj={__class__:map,args:iter_args,func:func};obj[Symbol.iterator]=function(){this.iters=[];for(var arg of this.args){this.iters.push(arg[Symbol.iterator]())}return this};obj.next=function(){var args=[];for(var iter of this.iters){var arg=iter.next();if(arg.done){return{done:true,value:null}}args.push(arg.value)}return{done:false,value:this.func.apply(null,args)}};return obj}));map.__iter__=function(self){self[Symbol.iterator]();return self};map.__next__=function(self){var args=[];for(var iter of self.iters){var arg=iter.next();if(arg.done){throw _b_.StopIteration.$factory("")}args.push(arg.value)}return self.func.apply(null,args)};$B.set_func_names(map,"builtins");function $extreme(args,op){var $op_name="min";if(op==="__gt__"){$op_name="max"}var $=$B.args($op_name,0,{},[],args,{},"args","kw");var has_default=false,func=false;for(var item of _b_.dict.$iter_items($.kw)){switch(item.key){case"key":func=item.value;func=func===_b_.None?func:$B.$call(func);break;case"default":var default_value=item.value;has_default=true;break;default:throw _b_.TypeError.$factory("'"+item.key+"' is an invalid keyword argument for this function")}}if(!func||func===_b_.None){func=x=>x}if($.args.length==0){throw _b_.TypeError.$factory($op_name+" expected 1 arguments, got 0")}else if($.args.length==1){var $iter=$B.make_js_iterator($.args[0]),res=null,x_value,extr_value;for(var x of $iter){if(res===null){extr_value=func(x);res=x}else{x_value=func(x);if($B.rich_comp(op,x_value,extr_value)){res=x;extr_value=x_value}}}if(res===null){if(has_default){return default_value}else{throw _b_.ValueError.$factory($op_name+"() arg is an empty sequence")}}else{return res}}else{if(has_default){throw _b_.TypeError.$factory("Cannot specify a default for "+$op_name+"() with multiple positional arguments")}var _args;if($B.last(args).$kw){_args=[$.args].concat($B.last(args))}else{_args=[$.args]}return $extreme.call(null,_args,op)}}_b_.max=function(){return $extreme(arguments,"__gt__")};var memoryview=_b_.memoryview=$B.make_class("memoryview",(function(obj){check_nb_args_no_kw("memoryview",1,arguments);if(obj.__class__===memoryview){return obj}if($B.get_class(obj).$buffer_protocol){return{__class__:memoryview,obj:obj,format:"B",itemsize:1,ndim:1,shape:_b_.tuple.$factory([_b_.len(obj)]),strides:_b_.tuple.$factory([1]),suboffsets:_b_.tuple.$factory([]),c_contiguous:true,f_contiguous:true,contiguous:true}}else{throw _b_.TypeError.$factory("memoryview: a bytes-like object "+"is required, not '"+$B.class_name(obj)+"'")}}));memoryview.$match_sequence_pattern=true,memoryview.$buffer_protocol=true;memoryview.$not_basetype=true;memoryview.__eq__=function(self,other){if(other.__class__!==memoryview){return false}return $B.$getattr(self.obj,"__eq__")(other.obj)};memoryview.__getitem__=function(self,key){var res;if($B.$isinstance(key,_b_.int)){var start=key*self.itemsize;if(self.format=="I"){res=self.obj.source[start];var coef=256;for(var i=1;i<4;i++){res+=self.obj.source[start+i]*coef;coef*=256}return res}else if("B".indexOf(self.format)>-1){if(key>self.obj.source.length-1){throw _b_.KeyError.$factory(key)}return self.obj.source[key]}else{return self.obj.source[key]}}res=self.obj.__class__.__getitem__(self.obj,key);if(key.__class__===_b_.slice){return memoryview.$factory(res)}};memoryview.__len__=function(self){return len(self.obj)/self.itemsize};memoryview.__setitem__=function(self,key,value){try{$B.$setitem(self.obj,key,value)}catch(err){throw _b_.TypeError.$factory("cannot modify read-only memory")}};memoryview.cast=function(self,format){switch(format){case"B":return memoryview.$factory(self.obj);case"I":var res=memoryview.$factory(self.obj),objlen=len(self.obj);res.itemsize=4;res.format="I";if(objlen%4!=0){throw _b_.TypeError.$factory("memoryview: length is not "+"a multiple of itemsize")}return res}};memoryview.hex=function(self){var res="",bytes=_b_.bytes.$factory(self);bytes.source.forEach((function(item){res+=item.toString(16)}));return res};memoryview.tobytes=function(self){return{__class__:_b_.bytes,source:self.obj.source}};memoryview.tolist=function(self){if(self.itemsize==1){return _b_.list.$factory(_b_.bytes.$factory(self.obj))}else if(self.itemsize==4){if(self.format=="I"){var res=[];for(var i=0;i=65536&&code<=131071||code>=131072&&code<=196607||code>=196608&&code<=262143||code>=851968&&code<=917503||code>=917504&&code<=1048575){return code}}throw _b_.TypeError.$factory("ord() expected a character, but "+"string of length "+c.length+" found")}switch($B.get_class(c)){case _b_.str:if(c.length==1){return c.charCodeAt(0)}throw _b_.TypeError.$factory("ord() expected a character, but "+"string of length "+c.length+" found");case _b_.bytes:case _b_.bytearray:if(c.source.length==1){return c.source[0]}throw _b_.TypeError.$factory("ord() expected a character, but "+"string of length "+c.source.length+" found");default:throw _b_.TypeError.$factory("ord() expected a character, but "+$B.class_name(c)+" was found")}};var complex_modulo=()=>_b_.ValueError.$factory("complex modulo");var all_ints=()=>_b_.TypeError.$factory("pow() 3rd argument not "+"allowed unless all arguments are integers");_b_.pow=function(){var $=$B.args("pow",3,{x:null,y:null,mod:null},["x","y","mod"],arguments,{mod:None},null,null),x=$.x,y=$.y,z=$.mod;if(z===_b_.None){return $B.rich_op("__pow__",x,y)}else{if($B.$isinstance(x,_b_.int)){if($B.$isinstance(y,_b_.float)){throw all_ints()}else if($B.$isinstance(y,_b_.complex)){throw complex_modulo()}else if($B.$isinstance(y,_b_.int)){if($B.$isinstance(z,_b_.complex)){throw complex_modulo()}else if(!$B.$isinstance(z,_b_.int)){throw all_ints()}}return _b_.int.__pow__(x,y,z)}else if($B.$isinstance(x,_b_.float)){throw all_ints()}else if($B.$isinstance(x,_b_.complex)){throw complex_modulo()}}};var $print=_b_.print=function(){var $ns=$B.args("print",0,{},[],arguments,{},"args","kw");var kw=$ns["kw"],end=_b_.dict.get(kw,"end","\n"),sep=_b_.dict.get(kw,"sep"," "),file=_b_.dict.get(kw,"file",$B.get_stdout());var args=$ns["args"],writer=$B.$getattr(file,"write");for(var i=0,len=args.length;i-1){has_slot=true;break}}}if(!has_slot){throw $B.attr_error(attr,klass)}}}if($test){console.log("attr",attr,"use _setattr",_setattr)}if(!_setattr){if(obj.__dict__===undefined){obj[attr]=value}else{_b_.dict.$setitem(obj.__dict__,attr,value)}if($test){console.log("no setattr, obj",obj)}}else{if($test){console.log("apply _setattr",obj,attr)}_setattr(obj,attr,value)}return None};_b_.sorted=function(){var $=$B.args("sorted",1,{iterable:null},["iterable"],arguments,{},null,"kw");var _list=_b_.list.$factory($.iterable),args=[_list].concat(Array.from(arguments).slice(1));_b_.list.sort.apply(null,args);return _list};_b_.sum=function(){var $=$B.args("sum",2,{iterable:null,start:null},["iterable","start"],arguments,{start:0},null,null),iterable=$.iterable,start=$.start;if($B.$isinstance(start,[_b_.str,_b_.bytes])){throw _b_.TypeError.$factory("sum() can't sum bytes"+" [use b''.join(seq) instead]")}var res=start;iterable=iter(iterable);while(true){try{var _item=next(iterable);res=$B.rich_op("__add__",res,_item)}catch(err){if(err.__class__===_b_.StopIteration){break}else{throw err}}}return res};$B.missing_super2=function(obj){obj.$missing=true;return obj};var $$super=_b_.super=$B.make_class("super",(function(_type,object_or_type){var no_object_or_type=object_or_type===undefined;if(_type===undefined&&object_or_type===undefined){var frame=$B.frame_obj.frame,pyframe=$B.imported["_sys"]._getframe(),code=$B.frame.f_code.__get__(pyframe),co_varnames=code.co_varnames;if(co_varnames.length>0){_type=frame[1].__class__;if(_type===undefined){throw _b_.RuntimeError.$factory("super(): no arguments")}object_or_type=frame[1][code.co_varnames[0]]}else{throw _b_.RuntimeError.$factory("super(): no arguments")}}if(!no_object_or_type&&Array.isArray(object_or_type)){object_or_type=object_or_type[0]}var $arg2;if(object_or_type!==undefined){if(object_or_type===_type||object_or_type.$is_class&&_b_.issubclass(object_or_type,_type)){$arg2="type"}else if($B.$isinstance(object_or_type,_type)){$arg2="object"}else{throw _b_.TypeError.$factory("super(type, obj): obj must be an instance "+"or subtype of type")}}return{__class__:$$super,__thisclass__:_type,__self_class__:object_or_type,$arg2:$arg2}}));$$super.__get__=function(self,instance){return $$super.$factory(self.__thisclass__,instance)};$$super.__getattribute__=function(self,attr){if(self.__thisclass__.$is_js_class){if(attr=="__init__"){return function(){mro[0].$js_func.call(self.__self_class__,...arguments)}}}var object_or_type=self.__self_class__,mro=self.$arg2=="type"?object_or_type.__mro__:$B.get_class(object_or_type).__mro__;var search_start=mro.indexOf(self.__thisclass__)+1,search_classes=mro.slice(search_start);var $test=attr=="new";if($test){console.log("super.__ga__, self",self,"search classes",search_classes)}var f;for(var klass of search_classes){if(klass===undefined){console.log("klass undef in super",self);console.log("mro",mro)}if(klass[attr]!==undefined){f=klass[attr];break}}if(f===undefined){if($$super[attr]!==undefined){return function(x){return function(){var args=[x];for(var i=0,len=arguments.length;i";if(self.__self_class__!==undefined){res+=", <"+self.__self_class__.__class__.__name__+" object>"}else{res+=", NULL"}return res+">"};$B.set_func_names($$super,"builtins");_b_.vars=function(){var def={},$=$B.args("vars",1,{obj:null},["obj"],arguments,{obj:def},null,null);if($.obj===def){return _b_.locals()}else{try{return $B.$getattr($.obj,"__dict__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("vars() argument must have __dict__ attribute")}throw err}}};var $Reader=$B.make_class("Reader");$Reader.__bool__=function(){return true};$Reader.__enter__=function(self){return self};$Reader.__exit__=function(self){$Reader.close(self)};$Reader.__init__=function(_self,initial_value=""){_self.$content=initial_value;_self.$counter=0};$Reader.__iter__=function(self){return iter($Reader.readlines(self))};$Reader.__len__=function(self){return self.lines.length};$Reader.__new__=function(cls){return{__class__:cls}};$Reader.close=function(self){self.closed=true};$Reader.flush=function(){return None};$Reader.read=function(){var $=$B.args("read",2,{self:null,size:null},["self","size"],arguments,{size:-1},null,null),self=$.self,size=$B.$GetInt($.size);if(self.closed===true){throw _b_.ValueError.$factory("I/O operation on closed file")}if(size<0){size=self.$length-self.$counter}var res;if(self.$binary){res=_b_.bytes.$factory(self.$content.source.slice(self.$counter,self.$counter+size))}else{res=self.$content.substr(self.$counter,size)}self.$counter+=size;return res};$Reader.readable=function(){return true};function make_lines(self){if(self.$lines===undefined){if(!self.$binary){self.$lines=self.$content.split("\n");if($B.last(self.$lines)==""){self.$lines.pop()}self.$lines=self.$lines.map((x=>x+"\n"))}else{var lines=[],pos=0,source=self.$content.source;while(pos-1){rest=rest.slice(0,size)}self.$counter=self.$content.source.length;return _b_.bytes.$factory(rest)}else{var line_source=self.$content.source.slice(self.$counter,ix+1);if(size>-1){line_source=line_source.slice(0,size)}result={__class__:_b_.bytes,source:line_source};self.$counter=ix+1;return result}}else{if(self.$counter==self.$content.length){return""}ix=self.$content.indexOf("\n",self.$counter);if(ix==-1){rest=self.$content.substr(self.$counter);if(size>-1){rest=rest.substr(0,size)}self.$counter=self.$content.length;return rest}else{result=self.$content.substring(self.$counter,ix+1);if(size>-1){result=result.substr(0,size)}self.$counter=ix+1;self.$lc+=1;return result}}};$Reader.readlines=function(){var $=$B.args("readlines",2,{self:null,hint:null},["self","hint"],arguments,{hint:-1},null,null),self=$.self,hint=$B.$GetInt($.hint);var nb_read=0;if(self.closed===true){throw _b_.ValueError.$factory("I/O operation on closed file")}self.$lc=self.$lc===undefined?-1:self.$lc;make_lines(self);var lines;if(hint<0){lines=self.$lines.slice(self.$lc+1)}else{lines=[];while(self.$lc-1;if(mode.search("w")>-1){result={$binary:is_binary,$content:is_binary?_b_.bytes.$factory():"",$encoding:encoding,closed:False,mode:mode,name:file};result.__class__=is_binary?$BufferedReader:$TextIOWrapper;$B.file_cache[file]=result.$content;return result}else if(["r","rb"].indexOf(mode)==-1){throw _b_.ValueError.$factory("Invalid mode '"+mode+"'")}if($B.$isinstance(file,_b_.str)){if($B.file_cache.hasOwnProperty($.file)){var f=$B.file_cache[$.file];result.content=f;if(is_binary&&typeof f=="string"){result.content=_b_.str.encode(f,"utf-8")}else if(f.__class__===_b_.bytes&&!is_binary){result.content=_b_.bytes.decode(f,encoding)}}else if($B.files&&$B.files.hasOwnProperty($.file)){var $res=atob($B.files[$.file].content);var source=[];for(const char of $res){source.push(char.charCodeAt(0))}result.content=_b_.bytes.$factory(source);if(!is_binary){try{result.content=_b_.bytes.decode(result.content,encoding)}catch(error){result.error=error}}}else if($B.protocol!="file"){var req=new XMLHttpRequest;req.overrideMimeType("text/plain;charset=x-user-defined");req.onreadystatechange=function(){if(this.readyState!=4){return}var status=this.status;if(status==404){result.error=_b_.FileNotFoundError.$factory(file)}else if(status!=200){result.error=_b_.IOError.$factory("Could not open file "+file+" : status "+status)}else{var bytes=[];for(var i=0,len=this.response.length;i63232){cp-=63232}bytes.push(cp)}result.content=_b_.bytes.$factory(bytes);if(!is_binary){try{result.content=_b_.bytes.decode(result.content,encoding)}catch(error){result.error=error}}}};var cache=$B.get_option("cache"),fake_qs=cache?"":"?foo="+(new Date).getTime();req.open("GET",encodeURI(file+fake_qs),false);req.send()}else{throw _b_.FileNotFoundError.$factory("cannot use 'open()' with protocol 'file'")}if(result.error!==undefined){throw result.error}var res={$binary:is_binary,$content:result.content,$counter:0,$encoding:encoding,$length:is_binary?result.content.source.length:result.content.length,closed:False,mode:mode,name:file};res.__class__=is_binary?$BufferedReader:$TextIOWrapper;return res}else{throw _b_.TypeError.$factory("invalid argument for open(): "+_b_.str.$factory(file))}};var zip=_b_.zip=$B.make_class("zip",(function(){var res={__class__:zip,items:[]};if(arguments.length==0){return res}var $ns=$B.args("zip",0,{},[],arguments,{},"args","kw");var _args=$ns["args"],strict=$B.$bool(_b_.dict.get($ns.kw,"strict",false));var iters=[];for(var arg of _args){iters.push($B.make_js_iterator(arg))}return{__class__:zip,iters:iters,strict:strict}}));zip.__iter__=function(self){return self};zip.__next__=function(self){var res=[],len=self.iters.length;for(var i=0;i0){throw _b_.ValueError.$factory(`zip() argument ${i+1} is longer than argument ${i}`)}else{for(var j=1;j!x.startsWith("$")))};$B.function.__get__=function(self,obj){if(obj===_b_.None){return self}return $B.method.$factory(self,obj)};$B.function.__getattribute__=function(self,attr){if(self.$infos&&self.$infos[attr]!==undefined){if(attr=="__code__"){var res={__class__:code};for(var _attr in self.$infos.__code__){res[_attr]=self.$infos.__code__[_attr]}res.name=self.$infos.__name__;res.filename=self.$infos.__code__.co_filename;res.co_code=self+"";return res}else if(attr=="__annotations__"){return $B.obj_dict(self.$infos[attr])}else if(self.$infos.hasOwnProperty(attr)){return self.$infos[attr]}}else if(self.$infos&&self.$infos.__dict__&&_b_.dict.$contains_string(self.$infos.__dict__,attr)){return _b_.dict.$getitem_string(self.$infos.__dict__,attr)}else if(attr=="__closure__"){var free_vars=self.$infos.__code__.co_freevars;if(free_vars.length==0){return None}var cells=[];for(var i=0;i"}else{return""}};$B.function.__mro__=[_b_.object];$B.make_function_infos=function(f,__module__,__defaults__,__kwdefaults__,__doc__,arg_names,vararg,kwarg,co_argcount,co_filename,co_firstlineno,co_flags,co_freevars,co_kwonlyargcount,co_name,co_nlocals,co_posonlyargcount,co_qualname,co_varnames){f.$is_func=true;f.$infos={__module__:__module__,__defaults__:__defaults__,__kwdefaults__:__kwdefaults__,__doc__:__doc__,arg_names:arg_names,vararg:vararg,kwarg:kwarg};f.$infos.__name__=co_name;f.$infos.__qualname__=co_qualname;co_freevars.__class__=_b_.tuple;co_varnames.__class__=_b_.tuple;f.$infos.__code__={co_argcount:co_argcount,co_filename:co_filename,co_firstlineno:co_firstlineno,co_flags:co_flags,co_freevars:co_freevars,co_kwonlyargcount:co_kwonlyargcount,co_name:co_name,co_nlocals:co_nlocals,co_posonlyargcount:co_posonlyargcount,co_qualname:co_qualname,co_varnames:co_varnames}};$B.make_args_parser=function(f){if(f.$infos===undefined||f.$infos.__code__===undefined){throw _b_.AttributeError.$factory(`cannot set defauts to ${_b_.str.$factory(f)}`)}const varnames=f.$infos.__code__.co_varnames,value=f.$infos.__defaults__,offset=f.$infos.__code__.co_argcount-value.length,$kwdefaults=new Map;var nb_kw_defaults=f.$infos.__kwdefaults__===_b_.None?0:_b_.dict.__len__(f.$infos.__kwdefaults__);if(f.$infos.__kwdefaults__!==_b_.None){const kwdef=f.$infos.__kwdefaults__;for(let kw of $B.make_js_iterator(kwdef)){$kwdefaults.set(kw,$B.$getitem(kwdef,kw))}}f.$kwdefaults=$kwdefaults;f.$kwdefaults_values=[...$kwdefaults.values()];f.$hasParams=new Set;var nb_args=f.$infos.__code__.co_argcount+f.$infos.__code__.co_kwonlyargcount+(f.$infos.kwargs?1:0);for(let i=0;i0){named_defaults=PARAMS_NAMED_DEFAULTS_COUNT>=PARAMS_NAMED_COUNT?DEFAULTS.ALL:DEFAULTS.SOME}const PARAMS_POSONLY_COUNT=$CODE.co_posonlyargcount;const PARAMS_POS_COUNT=$CODE.co_argcount-PARAMS_POSONLY_COUNT;let pos_defaults=DEFAULTS.NONE;if(PARAMS_POS_COUNT!==0&&value.length>0){pos_defaults=value.length>=PARAMS_POS_COUNT?DEFAULTS.ALL:DEFAULTS.SOME}let posonly_defaults=DEFAULTS.NONE;if(value.length>PARAMS_POS_COUNT){posonly_defaults=value.length>=$CODE.co_argcount?DEFAULTS.ALL:DEFAULTS.SOME}f.$args_parser=f.$infos.args_parser=$B.getArgs0(PARAMS_POSONLY_COUNT!==0,posonly_defaults,PARAMS_POS_COUNT!==0,pos_defaults,$INFOS.vararg!==null,PARAMS_NAMED_COUNT!==0,named_defaults,$INFOS.kwarg!==null);return f.$args_parser};$B.function.__setattr__=function(self,attr,value){if(attr=="__closure__"){throw _b_.AttributeError.$factory("readonly attribute")}else if(attr=="__defaults__"){if(value===_b_.None){value=[]}else if(!$B.$isinstance(value,_b_.tuple)){throw _b_.TypeError.$factory("__defaults__ must be set to a tuple object")}if(self.$infos){self.$infos.__defaults__=value;$B.make_args_parser(self)}else{throw _b_.AttributeError.$factory("cannot set attribute "+attr+" of "+_b_.str.$factory(self))}}else if(attr=="__kwdefaults__"){if(value===_b_.None){value=$B.empty_dict}else if(!$B.$isinstance(value,_b_.dict)){throw _b_.TypeError.$factory("__kwdefaults__ must be set to a dict object")}if(self.$infos){self.$infos.__kwdefaults__=value;$B.make_args_parser(self)}else{throw _b_.AttributeError.$factory("cannot set attribute "+attr+" of "+_b_.str.$factory(self))}}if(self.$infos[attr]!==undefined){self.$infos[attr]=value}else{self.$attrs=self.$attrs||{};self.$attrs[attr]=value}};$B.function.$factory=function(){};$B.set_func_names($B.function,"builtins");_b_.__BRYTHON__=__BRYTHON__;$B.builtin_funcs=["__build_class__","abs","aiter","all","anext","any","ascii","bin","breakpoint","callable","chr","compile","delattr","dir","divmod","eval","exec","exit","format","getattr","globals","hasattr","hash","help","hex","id","input","isinstance","issubclass","iter","len","locals","max","min","next","oct","open","ord","pow","print","quit","repr","round","setattr","sorted","sum","vars"];var builtin_function=$B.builtin_function_or_method=$B.make_class("builtin_function_or_method",(function(f){f.__class__=builtin_function;return f}));builtin_function.__getattribute__=$B.function.__getattribute__;builtin_function.__reduce_ex__=builtin_function.__reduce__=function(self){return self.$infos.__name__};builtin_function.__repr__=builtin_function.__str__=function(self){return""};$B.set_func_names(builtin_function,"builtins");var method_wrapper=$B.make_class("method_wrapper");method_wrapper.__repr__=method_wrapper.__str__=function(self){return""};$B.set_func_names(method_wrapper,"builtins");$B.builtin_classes=["bool","bytearray","bytes","classmethod","complex","dict","enumerate","filter","float","frozenset","int","list","map","memoryview","object","property","range","reversed","set","slice","staticmethod","str","super","tuple","type","zip"];var other_builtins=["Ellipsis","False","None","True","__debug__","__import__","copyright","credits","license","NotImplemented"];var builtin_names=$B.builtin_funcs.concat($B.builtin_classes).concat(other_builtins);for(var name of builtin_names){try{if($B.builtin_funcs.indexOf(name)>-1){_b_[name].__class__=builtin_function;_b_[name].$infos={__module__:"builtins",__name__:name,__qualname__:name}}}catch(err){}}_b_.object.__init__.__class__=$B.wrapper_descriptor;_b_.object.__new__.__class__=builtin_function})(__BRYTHON__);(function($B){var _b_=$B.builtins;var DEFAULT_MIN_MERGE=32;var DEFAULT_MIN_GALLOPING=7;var DEFAULT_TMP_STORAGE_LENGTH=256;var POWERS_OF_TEN=[1,10,100,1e3,1e4,1e5,1e6,1e7,1e8,1e9];function log10(x){if(x<1e5){if(x<100){return x<10?0:1}if(x<1e4){return x<1e3?2:3}return 4}if(x<1e7){return x<1e6?5:6}if(x<1e9){return x<1e8?7:8}return 9}function alphabeticalCompare(a,b){if(a===b){return 0}if(~~a===a&&~~b===b){if(a===0||b===0){return a=0){return-1}if(a>=0){return 1}a=-a;b=-b}var al=log10(a),bl=log10(b);var t=0;if(albl){b*=POWERS_OF_TEN[al-bl-1];a/=10;t=1}if(a===b){return t}return a=DEFAULT_MIN_MERGE){r|=n&1;n>>=1}return n+r}function makeAscendingRun(array,lo,hi,compare){var runHi=lo+1;if(runHi===hi){return 1}if(compare(array[runHi++],array[lo])<0){while(runHi=0){runHi++}}return runHi-lo}function reverseRun(array,lo,hi){hi--;while(lo>>1;if(compare(pivot,array[mid])<0){right=mid}else{left=mid+1}}var n=start-left;switch(n){case 3:array[left+3]=array[left+2];case 2:array[left+2]=array[left+1];case 1:array[left+1]=array[left];break;default:while(n>0){array[left+n]=array[left+n-1];n--}}array[left]=pivot}}function gallopLeft(value,array,start,length,hint,compare){var lastOffset=0,maxOffset=0,offset=1;if(compare(value,array[start+hint])>0){maxOffset=length-hint;while(offset0){lastOffset=offset;offset=(offset<<1)+1;if(offset<=0){offset=maxOffset}}if(offset>maxOffset){offset=maxOffset}lastOffset+=hint;offset+=hint}else{maxOffset=hint+1;while(offsetmaxOffset){offset=maxOffset}var tmp=lastOffset;lastOffset=hint-offset;offset=hint-tmp}lastOffset++;while(lastOffset>>1);if(compare(value,array[start+m])>0){lastOffset=m+1}else{offset=m}}return offset}function gallopRight(value,array,start,length,hint,compare){var lastOffset=0,maxOffset=0,offset=1;if(compare(value,array[start+hint])<0){maxOffset=hint+1;while(offsetmaxOffset){offset=maxOffset}var tmp=lastOffset;lastOffset=hint-offset;offset=hint-tmp}else{maxOffset=length-hint;while(offset=0){lastOffset=offset;offset=(offset<<1)+1;if(offset<=0){offset=maxOffset}}if(offset>maxOffset){offset=maxOffset}lastOffset+=hint;offset+=hint}lastOffset++;while(lastOffset>>1);if(compare(value,array[start+m])<0){offset=m}else{lastOffset=m+1}}return offset}var TIM_SORT_ASSERTION="TimSortAssertion";var TimSortAssertion=function(message){this.name=TIM_SORT_ASSERTION;this.message=message};var TimSort=function(array,compare){var self={array:array,compare:compare,minGallop:DEFAULT_MIN_GALLOPING,length:array.length,tmpStorageLength:DEFAULT_TMP_STORAGE_LENGTH,stackLength:0,runStart:null,runLength:null,stackSize:0,pushRun:function(runStart,runLength){this.runStart[this.stackSize]=runStart;this.runLength[this.stackSize]=runLength;this.stackSize+=1},mergeRuns:function(){while(this.stackSize>1){var n=this.stackSize-2;if(n>=1&&this.runLength[n-1]<=this.runLength[n]+this.runLength[n+1]||n>=2&&this.runLength[n-2]<=this.runLength[n]+this.runLength[n-1]){if(this.runLength[n-1]this.runLength[n+1]){break}this.mergeAt(n)}},forceMergeRuns:function(){while(this.stackSize>1){var n=this.stackSize-2;if(n>0&&this.runLength[n-1]=DEFAULT_MIN_GALLOPING||count2>=DEFAULT_MIN_GALLOPING);if(exit){break}if(minGallop<0){minGallop=0}minGallop+=2}this.minGallop=minGallop;if(minGallop<1){this.minGallop=1}if(length1===1){for(let i=0;i=0;i--){array[customDest+i]=array[customCursor+i]}array[dest]=tmp[cursor2];return}var minGallop=this.minGallop;while(true){let count1=0,count2=0,exit=false;do{if(compare(tmp[cursor2],array[cursor1])<0){array[dest--]=array[cursor1--];count1++;count2=0;if(--length1===0){exit=true;break}}else{array[dest--]=tmp[cursor2--];count2++;count1=0;if(--length2===1){exit=true;break}}}while((count1|count2)=0;i--){array[customDest+i]=array[customCursor+i]}if(length1===0){exit=true;break}}array[dest--]=tmp[cursor2--];if(--length2===1){exit=true;break}count2=length2-gallopLeft(array[cursor1],tmp,0,length2,length2-1,compare);if(count2!==0){dest-=count2;cursor2-=count2;length2-=count2;customDest=dest+1;customCursor=cursor2+1;for(let i=0;i=DEFAULT_MIN_GALLOPING||count2>=DEFAULT_MIN_GALLOPING);if(exit){break}if(minGallop<0){minGallop=0}minGallop+=2}this.minGallop=minGallop;if(minGallop<1){this.minGallop=1}if(length2===1){dest-=length1;cursor1-=length1;customDest=dest+1;customCursor=cursor1+1;for(let i=length1-1;i>=0;i--){array[customDest+i]=array[customCursor+i]}array[dest]=tmp[cursor2]}else if(length2==0){throw new TimSortAssertion("mergeHigh preconditions were not respected")}else{customCursor=dest-(length2-1);for(let i=0;i>>1}self.tmp=new Array(self.tmpStorageLength);self.stackLength=self.length<120?5:self.length<1542?10:self.length<119151?19:40;self.runStart=new Array(self.stackLength);self.runLength=new Array(self.stackLength);return self};function tim_sort(array,compare,lo,hi){if(!Array.isArray(array)){throw _b_.TypeError.$factory("Can only sort arrays")}if(!compare){compare=alphabeticalCompare}else if(typeof compare!=="function"){hi=lo;lo=compare;compare=alphabeticalCompare}if(!lo){lo=0}if(!hi){hi=array.length}var remaining=hi-lo;if(remaining<2){return}var runLength=0;if(remainingminRun){force=minRun}binaryInsertionSort(array,lo,lo+force,lo+runLength,compare);runLength=force}ts.pushRun(lo,runLength);ts.mergeRuns();remaining-=runLength;lo+=runLength}while(remaining!==0);ts.forceMergeRuns()}function tim_sort_safe(array,compare){try{tim_sort(array,compare,0,array.length)}catch(e){if(e.name==TIM_SORT_ASSERTION){array.sort(compare)}else{throw e}}}$B.$TimSort=tim_sort_safe;$B.$AlphabeticalCompare=alphabeticalCompare})(__BRYTHON__);(function($B){var _b_=$B.builtins;$B.del_exc=function(frame){delete frame[1].$current_exception};$B.set_exc=function(exc,frame){if(frame===undefined){var msg="Internal error: no frame for exception "+_b_.repr(exc);console.error(["Traceback (most recent call last):",$B.print_stack(exc.$frame_obj),msg].join("\n"));if($B.get_option("debug",exc)>1){console.log(exc.args);console.log(exc.stack)}throw Error(msg)}else{frame[1].$current_exception=$B.exception(exc)}};$B.set_exc_and_trace=function(frame,exc){$B.set_exc(exc,frame);if(!exc.$in_trace_func&&frame.$f_trace!==_b_.None){frame.$f_trace=$B.trace_exception()}};$B.set_exc_and_leave=function(frame,exc){$B.set_exc_and_trace(frame,exc);$B.leave_frame()};$B.get_exc=function(){var frame=$B.frame_obj.frame;return frame[1].$current_exception};$B.set_exception_offsets=function(exc,position){exc.$positions=exc.$positions||{};exc.$positions[$B.frame_obj.count-1]=position;return exc};$B.$raise=function(arg,cause){var active_exc=$B.get_exc();if(arg===undefined){if(active_exc!==undefined){throw active_exc}throw _b_.RuntimeError.$factory("No active exception to reraise")}else{if($B.$isinstance(arg,_b_.BaseException)){if(arg.__class__===_b_.StopIteration&&$B.frame_obj.frame.$is_generator){arg=_b_.RuntimeError.$factory("generator raised StopIteration")}arg.__context__=active_exc===undefined?_b_.None:active_exc;arg.__cause__=cause||_b_.None;arg.__suppress_context__=cause!==undefined;throw arg}else if(arg.$is_class&&_b_.issubclass(arg,_b_.BaseException)){if(arg===_b_.StopIteration){if($B.frame_obj.frame[1].$is_generator){throw _b_.RuntimeError.$factory("generator raised StopIteration")}}var exc=$B.$call(arg)();exc.__context__=active_exc===undefined?_b_.None:active_exc;exc.__cause__=cause||_b_.None;exc.__suppress_context__=cause!==undefined;throw exc}else{throw _b_.TypeError.$factory("exceptions must derive from BaseException")}}};$B.print_stack=function(frame_obj){var stack=make_frames_stack(frame_obj||$B.frame_obj);var trace=[];for(var frame of stack){var lineno=frame.$lineno,filename=frame.__file__;if(lineno!==undefined){var local=frame[0]==frame[2]?"":frame[0];trace.push(` File "${filename}" line ${lineno}, in ${local}`);var src=$B.file_cache[filename];if(src){var lines=src.split("\n"),line=lines[lineno-1];trace.push(" "+line.trim())}}}return trace.join("\n")};$B.last_frame=function(){var frame=$B.frame_obj.frame;return`file ${frame.__file__} line ${frame.$lineno}`};$B.count_frames=function(frame_obj){frame_obj=frame_obj||$B.frame_obj;return frame_obj==null?0:frame_obj.count};$B.get_frame_at=function(pos,frame_obj){frame_obj=frame_obj||$B.frame_obj;var nb=frame_obj.count-pos-1;for(var i=0;i"};frame.f_builtins={__get__:function(_self){return $B.$getattr(_self[3].__builtins__,"__dict__")}};frame.f_code={__get__:function(_self){var res;if(_self[4]){res=_self[4].$infos.__code__}else if(_self.f_code){res=_self.f_code}else{res={co_name:_self[0]==_self[2]?"":_self[0],co_filename:_self.__file__,co_varnames:$B.fast_tuple([])};res.co_qualname=res.co_name}res.__class__=_b_.code;return res}};frame.f_globals={__get__:function(_self){if(_self.f_globals){return _self.f_globals}else if(_self.f_locals&&_self[1]==_self[3]){return _self.f_globals=_self.f_locals}else{return _self.f_globals=$B.obj_dict(_self[3])}}};frame.f_lineno={__get__:function(_self){return _self.$lineno}};frame.f_locals={__get__:function(_self){if(_self.f_locals){return _self.f_locals}else if(_self.f_globals&&_self[1]==_self[3]){return _self.f_locals=_self.f_globals}else{return _self.f_locals=$B.obj_dict(_self[1])}}};frame.f_trace={__get__:function(_self){return _self.$f_trace}};$B.set_func_names(frame,"builtins");$B._frame=frame;$B.deep_copy=function(stack){var res=[];for(const s of stack){var item=[s[0],{},s[2],{}];if(s[4]!==undefined){item.push(s[4])}for(const i of[1,3]){for(var key in s[i]){item[i][key]=s[i][key]}}res.push(item)}return res};$B.restore_frame_obj=function(frame_obj,locals){$B.frame_obj=frame_obj;$B.frame_obj.frame[1]=locals};$B.make_linenums=function(frame_obj){var res=[];frame_obj=frame_obj||$B.frame_obj;while(frame_obj!==null){res.push(frame_obj.frame.$lineno);frame_obj=frame_obj.prev}return res.reverse()};var make_frames_stack=$B.make_frames_stack=function(frame_obj){var stack=[];while(frame_obj!==null){stack[stack.length]=frame_obj.frame;frame_obj=frame_obj.prev}stack.reverse();return stack};$B.freeze=function(err){if(err.$frame_obj===undefined){err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums()}err.__traceback__=traceback.$factory(err)};$B.exception=function(js_exc){var exc;if(!js_exc.__class__){if(js_exc.$py_exc){return js_exc.$py_exc}var msg=js_exc.name+": "+js_exc.message;exc=_b_.JavascriptError.$factory(msg);exc.$js_exc=js_exc;if($B.is_recursion_error(js_exc)){return _b_.RecursionError.$factory("too much recursion")}exc.__cause__=_b_.None;exc.__context__=_b_.None;exc.__suppress_context__=false;exc.args=_b_.tuple.$factory([msg]);exc.$py_error=true;js_exc.$py_exc=exc;$B.freeze(exc)}else{exc=js_exc;$B.freeze(exc)}return exc};$B.is_exc=function(exc,exc_list){if(exc.__class__===undefined){exc=$B.exception(exc)}var this_exc_class=exc.$is_class?exc:exc.__class__;for(var i=0;i1){res+=", "+_b_.repr($B.fast_tuple(self.args.slice(1)))}return res+")"};_b_.BaseException.__str__=function(self){if(self.args.length>0&&self.args[0]!==_b_.None){return _b_.str.$factory(self.args[0])}return""};_b_.BaseException.__new__=function(cls){var err=_b_.BaseException.$factory();err.__class__=cls;err.__dict__=$B.empty_dict();return err};_b_.BaseException.__getattr__=function(self,attr){if(attr=="__context__"){var frame=$B.frame_obj.frame,ctx=frame[1].$current_exception;return ctx||_b_.None}else{throw $B.attr_error(attr,self)}};_b_.BaseException.add_note=function(self,note){if(!$B.$isinstance(note,_b_.str)){throw _b_.TypeError.$factory("note must be a str, not "+`'${$B.class_name(note)}'`)}if(self.__notes__!==undefined){self.__notes__.push(note)}else{self.__notes__=[note]}};_b_.BaseException.with_traceback=function(_self,tb){_self.__traceback__=tb;return _self};$B.set_func_names(_b_.BaseException,"builtins");make_builtin_exception(["SystemExit","KeyboardInterrupt","GeneratorExit","Exception"],_b_.BaseException);make_builtin_exception("JavascriptError",_b_.Exception);make_builtin_exception(["ArithmeticError","AssertionError","BufferError","EOFError","LookupError","MemoryError","OSError","ReferenceError","RuntimeError","SystemError","TypeError","ValueError","Warning"],_b_.Exception);make_builtin_exception("StopIteration",_b_.Exception,"value");make_builtin_exception("StopAsyncIteration",_b_.Exception,"value");make_builtin_exception("ImportError",_b_.Exception,"name");make_builtin_exception("SyntaxError",_b_.Exception,"msg");make_builtin_exception(["FloatingPointError","OverflowError","ZeroDivisionError"],_b_.ArithmeticError);make_builtin_exception("ModuleNotFoundError",_b_.ImportError,"name");make_builtin_exception(["IndexError","KeyError"],_b_.LookupError);make_builtin_exception(["BlockingIOError","ChildProcessError","ConnectionError","FileExistsError","FileNotFoundError","InterruptedError","IsADirectoryError","NotADirectoryError","PermissionError","ProcessLookupError","TimeoutError"],_b_.OSError);make_builtin_exception(["BrokenPipeError","ConnectionAbortedError","ConnectionRefusedError","ConnectionResetError"],_b_.ConnectionError);make_builtin_exception(["NotImplementedError","RecursionError"],_b_.RuntimeError);make_builtin_exception("IndentationError",_b_.SyntaxError,"msg");make_builtin_exception("TabError",_b_.IndentationError);make_builtin_exception("UnicodeError",_b_.ValueError);make_builtin_exception(["UnicodeDecodeError","UnicodeEncodeError","UnicodeTranslateError"],_b_.UnicodeError);make_builtin_exception(["DeprecationWarning","PendingDeprecationWarning","RuntimeWarning","SyntaxWarning","UserWarning","FutureWarning","ImportWarning","UnicodeWarning","BytesWarning","ResourceWarning","EncodingWarning"],_b_.Warning);make_builtin_exception(["EnvironmentError","IOError","VMSError","WindowsError"],_b_.OSError);_b_.AttributeError=$B.make_class("AttributeError",(function(){var $=$B.args("AttributeError",3,{msg:null,name:null,obj:null},["msg","name","obj"],arguments,{msg:_b_.None,name:_b_.None,obj:_b_.None},"*",null);var err=Error();err.__class__=_b_.AttributeError;err.__traceback__=_b_.None;err.$py_error=true;err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums();err.args=$B.fast_tuple($.msg===_b_.None?[]:[$.msg]);err.name=$.name;err.obj=$.obj;if(err.obj===undefined){console.log("pas de obj",$)}err.__cause__=_b_.None;err.__context__=_b_.None;err.__suppress_context__=false;return err}));_b_.AttributeError.__bases__=[_b_.Exception];_b_.AttributeError.__mro__=_b_.type.mro(_b_.AttributeError);_b_.AttributeError.__str__=function(self){return self.args[0]};$B.set_func_names(_b_.AttributeError,"builtins");$B.attr_error=function(name,obj){var msg;if(obj.$is_class){msg=`type object '${obj.__name__}'`}else{msg=`'${$B.class_name(obj)}' object`}msg+=` has no attribute '${name}'`;return _b_.AttributeError.$factory({$kw:[{name:name,obj:obj,msg:msg}]})};_b_.NameError=$B.make_class("NameError",(function(){var $=$B.args("NameError",2,{message:null,name:null},["message","name"],arguments,{message:_b_.None,name:_b_.None},"*",null,1);var err=Error();err.__class__=_b_.NameError;err.__traceback__=_b_.None;err.$py_error=true;err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums();err.args=$B.fast_tuple($.message===_b_.None?[]:[$.message]);err.name=$.name;err.__cause__=_b_.None;err.__context__=_b_.None;err.__suppress_context__=false;return err}));_b_.NameError.__bases__=[_b_.Exception];_b_.NameError.__mro__=_b_.type.mro(_b_.NameError).slice(1);_b_.NameError.__str__=function(self){return self.args[0]};$B.set_func_names(_b_.NameError,"builtins");make_builtin_exception("UnboundLocalError",_b_.NameError);_b_.UnboundLocalError.__str__=function(self){return self.args[0]};$B.set_func_names(_b_.UnboundLocalError,"builtins");$B.name_error=function(name){var exc=_b_.NameError.$factory(`name '${name}' is not defined`);exc.name=name;exc.$frame_obj=$B.frame_obj;return exc};$B.recursion_error=function(frame){var exc=_b_.RecursionError.$factory("maximum recursion depth exceeded");$B.set_exc(exc,frame);return exc};var MAX_CANDIDATE_ITEMS=750,MOVE_COST=2,CASE_COST=1,SIZE_MAX=65535;function LEAST_FIVE_BITS(n){return n&31}function levenshtein_distance(a,b,max_cost){if(a==b){return 0}if(a.lengthmax_cost){return max_cost+1}var buffer=[];for(var i=0;imax_cost){return max_cost+1}}return result}function substitution_cost(a,b){if(LEAST_FIVE_BITS(a)!=LEAST_FIVE_BITS(b)){return MOVE_COST}if(a==b){return 0}if(a.toLowerCase()==b.toLowerCase()){return CASE_COST}return MOVE_COST}function calculate_suggestions(dir,name){if(dir.length>=MAX_CANDIDATE_ITEMS){return null}var suggestion_distance=2**52,suggestion=null;for(var item of dir){var max_distance=(name.length+item.length+3)*MOVE_COST/6;max_distance=Math.min(max_distance,suggestion_distance-1);var current_distance=levenshtein_distance(name,item,max_distance);if(current_distance>max_distance){continue}if(!suggestion||current_distance!x.startsWith("$")));var suggestion=calculate_suggestions(locals,name);if(suggestion){return suggestion}if(frame[2]!=frame[0]){var globals=Object.keys(frame[3]).filter((x=>!x.startsWith("$")));suggestion=calculate_suggestions(globals,name);if(suggestion){return suggestion}}if(frame[4]&&frame[4].$is_method){var instance_name=frame[4].$infos.__code__.co_varnames[0],instance=frame[1][instance_name];if(_b_.hasattr(instance,name)){return`self.${name}`}}return _b_.None};$B.offer_suggestions_for_unexpected_keyword_error=function(arg_names,key){if(key===_b_.None){return _b_.None}var suggestions=calculate_suggestions(arg_names,key);return suggestions||_b_.None};_b_.BaseExceptionGroup=$B.make_class("BaseExceptionGFroup",(function(){var missing={},$=$B.args("BaseExceptionGroup",2,{message:null,exceptions:null},["message","exceptions"],arguments,{exceptions:missing},null,null);var err=Error();err.args=$B.fast_tuple(Array.from(arguments));err.__class__=_b_.BaseExceptionGroup;err.__traceback__=_b_.None;err.$py_error=true;err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums();err.message=$.message;err.exceptions=$.exceptions===missing?[]:$.exceptions;if(err.exceptions!==_b_.None){var exc_list=_b_.list.$factory(err.exceptions);var all_exceptions=true;for(var exc of exc_list){if(!$B.$isinstance(exc,_b_.Exception)){all_exceptions=false;break}}if(all_exceptions){err.__class__=_b_.ExceptionGroup}}err.__cause__=_b_.None;err.__context__=_b_.None;err.__suppress_context__=false;return err}));_b_.BaseExceptionGroup.__bases__=[_b_.BaseException];_b_.BaseExceptionGroup.__mro__=_b_.type.mro(_b_.BaseExceptionGroup);_b_.BaseExceptionGroup.__str__=function(self){return`${self.message} (${self.exceptions.length} sub-exception`+`${self.exceptions.length>1?"s":""})`};_b_.BaseExceptionGroup.split=function(self,condition){var matching_excs=[],non_matching_excs=[];for(var exc of self.exceptions){if($B.$isinstance(exc,_b_.BaseExceptionGroup)){var subsplit=_b_.BaseExceptionGroup.split(exc,condition),matching=subsplit[0],non_matching=subsplit[1];if(matching===_b_.None){non_matching_excs.push(exc)}else if(matching.exceptions.length==exc.exceptions.length){matching_excs.push(exc)}else{if(matching.exceptions.length>0){matching_excs=matching_excs.concat(matching)}if(non_matching.exceptions.length>0){non_matching_excs=non_matching_excs.concat(non_matching)}}}else if(condition(exc)){matching_excs.push(exc)}else{non_matching_excs.push(exc)}}if(matching_excs.length==0){matching_excs=_b_.None}if(non_matching_excs.length==0){non_matching_excs=_b_.None}var res=[];for(var item of[matching_excs,non_matching_excs]){var eg=_b_.BaseExceptionGroup.$factory(self.message,item);eg.__cause__=self.__cause__;eg.__context__=self.__context__;eg.__traceback__=self.__traceback__;res.push(eg)}return $B.fast_tuple(res)};_b_.BaseExceptionGroup.subgroup=function(self,condition){return _b_.BaseExceptionGroup.split(self,condition)[0]};$B.set_func_names(_b_.BaseExceptionGroup,"builtins");_b_.ExceptionGroup=$B.make_class("ExceptionGFroup",(function(){var missing={},$=$B.args("ExceptionGroup",2,{message:null,exceptions:null},["message","exceptions"],arguments,{exceptions:missing},null,null);var err=Error();err.args=$B.fast_tuple(Array.from(arguments));err.__class__=_b_.ExceptionGroup;err.__traceback__=_b_.None;err.$py_error=true;err.$frame_obj=$B.frame_obj;err.$linenums=$B.make_linenums();err.message=$.message;err.exceptions=$.exceptions===missing?[]:$.exceptions;if(err.exceptions!==_b_.None){var exc_list=_b_.list.$factory(err.exceptions);for(var exc of exc_list){if(!$B.$isinstance(exc,_b_.Exception)){throw _b_.TypeError.$factory("Cannot nest BaseExceptions in an ExceptionGroup")}}}err.__cause__=_b_.None;err.__context__=_b_.None;err.__suppress_context__=false;return err}));_b_.ExceptionGroup.__bases__=[_b_.BaseExceptionGroup,_b_.Exception];_b_.ExceptionGroup.__mro__=_b_.type.mro(_b_.ExceptionGroup);$B.set_func_names(_b_.ExceptionGroup,"builtins");function trace_from_stack(err){function handle_repeats(src,count_repeats){if(count_repeats>0){var len=trace.length;for(var i=0;i<2;i++){if(src){trace.push(trace[len-2]);trace.push(trace[len-1])}else{trace.push(trace[len-1])}count_repeats--;if(count_repeats==0){break}}if(count_repeats>0){trace.push(`[Previous line repeated ${count_repeats} more`+` time${count_repeats>1?"s":""}]`)}}}var trace=[],save_filename,save_lineno,save_scope,count_repeats=0,stack=err.$frame_obj===undefined?[]:make_frames_stack(err.$frame_obj),linenos=err.$linenums;for(let frame_num=0,len=stack.length;frame_num":frame[0];if(filename==save_filename&&scope==save_scope&&lineno==save_lineno){count_repeats++;continue}handle_repeats(src,count_repeats);save_filename=filename;save_lineno=lineno;save_scope=scope;count_repeats=0;var src=$B.file_cache[filename];trace.push(` File "${filename}", line ${lineno}, in `+(frame[0]==frame[2]?"":frame[0]));if(src){var lines=src.split("\n"),line=lines[lineno-1];if(line){trace.push(" "+line.trim())}else{console.log("no line",line)}if(err.$positions!==undefined){var position=err.$positions[frame_num],trace_line="";if(position&&(position[1]!=position[0]||position[2]-position[1]!=line.trim().length||position[3])){var indent=line.length-line.trimLeft().length;var paddings=[position[0]-indent,position[1]-position[0],position[2]-position[1]];for(var padding in paddings){if(padding<0){console.log("wrong values, position",position,"indent",indent);paddings[paddings.indexOf(padding)]=0}}trace_line+=" "+" ".repeat(paddings[0])+"~".repeat(paddings[1])+"^".repeat(paddings[2]);if(position[3]!==undefined){trace_line+="~".repeat(position[3]-position[2])}trace.push(trace_line)}}}else{console.log("no src for filename",filename);console.log("in file_cache",Object.keys($B.file_cache).join("\n"))}}if(count_repeats>1){let len=trace.length;for(let i=0;i<2;i++){if(src){trace.push(trace[len-2]);trace.push(trace[len-1])}else{trace.push(trace[len-1])}}trace.push(`[Previous line repeated ${count_repeats-2} more times]`)}return trace.join("\n")+"\n"}$B.error_trace=function(err){var trace="",stack=err.$frame_obj===undefined?[]:make_frames_stack(err.$frame_obj);if($B.get_option("debug",err)>1){console.log("handle error",err.__class__,err.args);console.log("stack",stack);console.log(err.stack)}if(stack.length>0){trace="Traceback (most recent call last):\n"}if(err.__class__===_b_.SyntaxError||err.__class__===_b_.IndentationError){err.$frame_obj=err.$frame_obj===null?null:err.$frame_obj.prev;trace+=trace_from_stack(err);var filename=err.filename,line=err.text,indent=line.length-line.trimLeft().length;trace+=` File "${filename}", line ${err.args[1][1]}\n`+` ${line.trim()}\n`;if(err.__class__!==_b_.IndentationError&&err.text){if($B.get_option("debug",err)>1){console.log("error args",err.args[1]);console.log("err line",line);console.log("indent",indent)}var start=err.offset-indent-1,end_offset=err.end_offset-1+(err.end_offset==err.offset?1:0),marks=" "+" ".repeat(Math.max(0,start)),nb_marks=1;if(err.end_lineno){if(err.end_lineno>err.lineno){nb_marks=line.length-start-indent}else{nb_marks=end_offset-start-indent}if(nb_marks==0&&err.end_offset==line.substr(indent).length){nb_marks=1}}marks+="^".repeat(nb_marks)+"\n";trace+=marks}trace+=`${err.__class__.__name__}: ${err.args[0]}`}else if(err.__class__!==undefined){var name=$B.class_name(err);trace+=trace_from_stack(err);var args_str=_b_.str.$factory(err);trace+=name+(args_str?": "+args_str:"");var save_frame_obj=$B.frame_obj;$B.frame_obj=err.$frame_obj;if(err.__class__===_b_.NameError){let suggestion=$B.offer_suggestions_for_name_error(err);if(suggestion!==_b_.None){trace+=`. Did you mean: '${suggestion}'?`}if($B.stdlib_module_names.indexOf(err.name)>-1){trace+=`. Did you forget to import '${err.name}'?`}}else if(err.__class__===_b_.AttributeError){let suggestion=$B.offer_suggestions_for_attribute_error(err);if(suggestion!==_b_.None){trace+=`. Did you mean: '${suggestion}'?`}}else if(err.__class__===_b_.ImportError){if(err.$suggestion!==_b_.None){trace+=`. Did you mean: '${err.$suggestion}'?`}}$B.frame_obj=save_frame_obj}else{trace=err+""}if(err.$js_exc){trace+="\n";if($B.get_option("debug",err)>1){trace+=err.$js_exc.stack}}return trace};$B.get_stderr=function(){if($B.imported.sys){return $B.imported.sys.stderr}return $B.imported._sys.stderr};$B.get_stdout=function(){if($B.imported.sys){return $B.imported.sys.stdout}return $B.imported._sys.stdout};$B.show_error=function(err){var trace=$B.error_trace($B.exception(err));try{var stderr=$B.get_stderr();$B.$getattr(stderr,"write")(trace);var flush=$B.$getattr(stderr,"flush",_b_.None);if(flush!==_b_.None){flush()}}catch(print_exc_err){console.debug(trace)}};$B.handle_error=function(err){if(err.$handled){return}err.$handled=true;$B.show_error(err);throw err}})(__BRYTHON__);(function($B){var _b_=$B.builtins,None=_b_.None,range={__class__:_b_.type,__mro__:[_b_.object],__qualname__:"range",$is_class:true,$native:true,$match_sequence_pattern:true,$not_basetype:true,$descriptors:{start:true,step:true,stop:true}};range.__contains__=function(self,other){if(range.__len__(self)==0){return false}try{other=$B.int_or_bool(other)}catch(err){try{range.index(self,other);return true}catch(err){return false}}var start=_b_.int.$to_bigint(self.start),stop=_b_.int.$to_bigint(self.stop),step=_b_.int.$to_bigint(self.step);other=_b_.int.$to_bigint(other);var sub=other-start,fl=sub/step,res=step*fl;if(res==sub){if(stop>start){return other>=start&&stop>other}else{return start>=other&&other>stop}}else{return false}};range.__delattr__=function(){throw _b_.AttributeError.$factory("readonly attribute")};range.__eq__=function(self,other){if($B.$isinstance(other,range)){var len=range.__len__(self);if(!$B.rich_comp("__eq__",len,range.__len__(other))){return false}if(len==0){return true}if(!$B.rich_comp("__eq__",self.start,other.start)){return false}if(len==1){return true}return $B.rich_comp("__eq__",self.step,other.step)}return false};function compute_item(r,i){var len=range.__len__(r);if(len==0){return r.start}else if(i>len){return r.stop}return $B.rich_op("__add__",r.start,$B.rich_op("__mul__",r.step,i))}range.__getitem__=function(self,rank){if($B.$isinstance(rank,_b_.slice)){var norm=_b_.slice.$conv_for_seq(rank,range.__len__(self)),substep=$B.rich_op("__mul__",self.step,norm.step),substart=compute_item(self,norm.start),substop=compute_item(self,norm.stop);return range.$factory(substart,substop,substep)}if(typeof rank!="number"){rank=$B.$GetInt(rank)}if($B.rich_comp("__gt__",0,rank)){rank=$B.rich_op("__add__",rank,range.__len__(self))}var res=$B.rich_op("__add__",self.start,$B.rich_op("__mul__",rank,self.step));if($B.rich_comp("__gt__",self.step,0)&&($B.rich_comp("__ge__",res,self.stop)||$B.rich_comp("__gt__",self.start,res))||$B.rich_comp("__gt__",0,self.step)&&($B.rich_comp("__ge__",self.stop,res)||$B.rich_comp("__gt__",res,self.start))){throw _b_.IndexError.$factory("range object index out of range")}return res};range.__hash__=function(self){var len=range.__len__(self);if(len==0){return _b_.hash(_b_.tuple.$factory([0,None,None]))}if(len==1){return _b_.hash(_b_.tuple.$factory([1,self.start,None]))}return _b_.hash(_b_.tuple.$factory([len,self.start,self.step]))};var RangeIterator=$B.make_class("range_iterator",(function(obj){return{__class__:RangeIterator,obj:obj}}));RangeIterator.__iter__=function(self){return self};RangeIterator.__next__=function(self){return _b_.next(self.obj)};$B.set_func_names(RangeIterator,"builtins");range.__iter__=function(self){var res={__class__:range,start:self.start,stop:self.stop,step:self.step};if(self.$safe){res.$counter=self.start-self.step}else{res.$counter=$B.rich_op("__sub__",self.start,self.step)}return RangeIterator.$factory(res)};range.__len__=function(self){var len,start=_b_.int.$to_bigint(self.start),stop=_b_.int.$to_bigint(self.stop),step=_b_.int.$to_bigint(self.step);if(self.step>0){if(self.start>=self.stop){return 0}len=1n+(stop-start-1n)/step}else{if(self.stop>=self.start){return 0}len=1n+(start-stop-1n)/-step}return _b_.int.$int_or_long(len)};range.__next__=function(self){if(self.$safe){self.$counter+=self.step;if(self.step>0&&self.$counter>=self.stop||self.step<0&&self.$counter<=self.stop){throw _b_.StopIteration.$factory("")}}else{self.$counter=$B.rich_op("__add__",self.$counter,self.step);if($B.rich_comp("__gt__",self.step,0)&&$B.rich_comp("__ge__",self.$counter,self.stop)||$B.rich_comp("__gt__",0,self.step)&&$B.rich_comp("__ge__",self.stop,self.$counter)){throw _b_.StopIteration.$factory("")}}return self.$counter};range.__reversed__=function(self){var n=$B.rich_op("__sub__",range.__len__(self),1);return range.$factory($B.rich_op("__add__",self.start,$B.rich_op("__mul__",n,self.step)),$B.rich_op("__sub__",self.start,self.step),$B.rich_op("__mul__",-1,self.step))};range.__repr__=function(self){$B.builtins_repr_check(range,arguments);var res="range("+_b_.str.$factory(self.start)+", "+_b_.str.$factory(self.stop);if(self.step!=1){res+=", "+_b_.str.$factory(self.step)}return res+")"};range.__setattr__=function(){throw _b_.AttributeError.$factory("readonly attribute")};range.start=function(self){return self.start};range.step=function(self){return self.step},range.stop=function(self){return self.stop};range.count=function(self,ob){if($B.$isinstance(ob,[_b_.int,_b_.float,_b_.bool])){return _b_.int.$factory(range.__contains__(self,ob))}else{var comp=function(other){return $B.rich_comp("__eq__",ob,other)},it=range.__iter__(self),_next=RangeIterator.__next__,nb=0;while(true){try{if(comp(_next(it))){nb++}}catch(err){if($B.$isinstance(err,_b_.StopIteration)){return nb}throw err}}}};range.index=function(){var $=$B.args("index",2,{self:null,other:null},["self","other"],arguments,{},null,null),self=$.self,other=$.other;try{other=$B.int_or_bool(other)}catch(err){var comp=function(x){return $B.rich_comp("__eq__",other,x)},it=range.__iter__(self),_next=RangeIterator.__next__,nb=0;while(true){try{if(comp(_next(it))){return nb}nb++}catch(err){if($B.$isinstance(err,_b_.StopIteration)){throw _b_.ValueError.$factory(_b_.str.$factory(other)+" not in range")}throw err}}}var sub=$B.rich_op("__sub__",other,self.start),fl=$B.rich_op("__floordiv__",sub,self.step),res=$B.rich_op("__mul__",self.step,fl);if($B.rich_comp("__eq__",res,sub)){if($B.rich_comp("__gt__",self.stop,self.start)&&$B.rich_comp("__ge__",other,self.start)&&$B.rich_comp("__gt__",self.stop,other)||$B.rich_comp("__ge__",self.start,self.stop)&&$B.rich_comp("__ge__",self.start,other)&&$B.rich_comp("__gt__",other,self.stop)){return fl}else{throw _b_.ValueError.$factory(_b_.str.$factory(other)+" not in range")}}else{throw _b_.ValueError.$factory(_b_.str.$factory(other)+" not in range")}};range.$factory=function(){var $=$B.args("range",3,{start:null,stop:null,step:null},["start","stop","step"],arguments,{start:null,stop:null,step:null},null,null),start=$.start,stop=$.stop,step=$.step,safe;if(stop===null&&step===null){if(start==null){throw _b_.TypeError.$factory("range expected 1 arguments, got 0")}stop=$B.PyNumber_Index(start);safe=typeof stop==="number";return{__class__:range,start:0,stop:stop,step:1,$is_range:true,$safe:safe}}if(step===null){step=1}start=$B.PyNumber_Index(start);stop=$B.PyNumber_Index(stop);step=$B.PyNumber_Index(step);if(step==0){throw _b_.ValueError.$factory("range arg 3 must not be zero")}safe=typeof start=="number"&&typeof stop=="number"&&typeof step=="number";return{__class__:range,start:start,stop:stop,step:step,$is_range:true,$safe:safe}};$B.set_func_names(range,"builtins");var slice={__class__:_b_.type,__mro__:[_b_.object],__qualname__:"slice",$is_class:true,$native:true,$not_basetype:true,$descriptors:{start:true,step:true,stop:true}};slice.__eq__=function(self,other){var conv1=conv_slice(self),conv2=conv_slice(other);return conv1[0]==conv2[0]&&conv1[1]==conv2[1]&&conv1[2]==conv2[2]};slice.__repr__=function(self){$B.builtins_repr_check(slice,arguments);return"slice("+_b_.str.$factory(self.start)+", "+_b_.str.$factory(self.stop)+", "+_b_.str.$factory(self.step)+")"};slice.__setattr__=function(){throw _b_.AttributeError.$factory("readonly attribute")};function conv_slice(self){var attrs=["start","stop","step"],res=[];for(var i=0;i=0;i--){if(cars.indexOf(self.source[i])==-1){break}}return bytes.$factory(self.source.slice(0,i+1))}function invalid(other){return!$B.$isinstance(other,[bytes,bytearray])}var bytearray={__class__:_b_.type,__mro__:[_b_.object],__qualname__:"bytearray",$buffer_protocol:true,$is_class:true};var mutable_methods=["__delitem__","clear","copy","count","index","pop","remove","reverse"];for(var method of mutable_methods){bytearray[method]=function(m){return function(self){var args=[self.source],pos=1;for(var i=1,len=arguments.length;i255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}var pos=arg;if(arg<0){pos=self.source.length+pos}if(pos>=0&&pos=0;i--){if(!$B.$isinstance($temp[i],_b_.int)){throw _b_.TypeError.$factory("an integer is required")}else if($temp[i]>255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}self.source.splice(start,0,$temp[i])}}catch(err){throw _b_.TypeError.$factory("can only assign an iterable")}}else{throw _b_.TypeError.$factory("list indices must be integer, not "+$B.class_name(arg))}};bytearray.append=function(self,b){if(arguments.length!=2){throw _b_.TypeError.$factory("append takes exactly one argument ("+(arguments.length-1)+" given)")}if(!$B.$isinstance(b,_b_.int)){throw _b_.TypeError.$factory("an integer is required")}if(b>255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}self.source[self.source.length]=b};bytearray.extend=function(self,b){if(self.in_iteration){throw _b_.BufferError.$factory("Existing exports of data: object "+"cannot be re-sized")}if(b.__class__===bytearray||b.__class__===bytes){self.source=self.source.concat(b.source);return _b_.None}for(var item of $B.make_js_iterator(b)){bytearray.append(self,$B.PyNumber_Index(item))}return _b_.None};bytearray.insert=function(self,pos,b){if(arguments.length!=3){throw _b_.TypeError.$factory("insert takes exactly 2 arguments ("+(arguments.length-1)+" given)")}if(!$B.$isinstance(b,_b_.int)){throw _b_.TypeError.$factory("an integer is required")}if(b>255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}_b_.list.insert(self.source,pos,b)};bytearray.$factory=function(){var args=[bytearray];for(var i=0,len=arguments.length;i-1}if(self.source.length=0&&pos0){stop=Math.min(stop,self.source.length);if(stop<=start){return bytes.$factory([])}for(let i=start;i=start){return bytes.$factory([])}stop=Math.max(0,stop);for(let i=start;i>=stop;i+=step){res[pos++]=self.source[i]}}return bytes.$factory(res)}else if($B.$isinstance(arg,_b_.bool)){return self.source.__getitem__(_b_.int.$factory(arg))}};bytes.$getnewargs=function(self){return $B.fast_tuple([bytes_value(self)])};bytes.__getnewargs__=function(){return bytes.$getnewargs($B.single_arg("__getnewargs__","self",arguments))};bytes.__gt__=function(self,other){if(invalid(other)){return _b_.NotImplemented}return _b_.list.__gt__(self.source,other.source)};bytes.__hash__=function(self){if(self===undefined){return bytes.__hashvalue__||$B.$py_next_hash--}var hash=1;for(var i=0,len=self.source.length;i=0&&item<256){source.push(item)}else{throw _b_.ValueError.$factory("bytes must be in range (0, 256)")}}}return{__class__:$.cls,source:source}};bytes.$new=function(cls,source,encoding,errors){var self={__class__:cls},int_list=[],pos=0;if(source===undefined){}else if(typeof source=="number"||$B.$isinstance(source,_b_.int)){let i=source;while(i--){int_list[pos++]=0}}else{if(typeof source=="string"||$B.$isinstance(source,_b_.str)){if(encoding===undefined){throw _b_.TypeError.$factory("string argument without an encoding")}int_list=encode(source,encoding||"utf-8",errors||"strict")}else{if(encoding!==undefined){console.log("encoding",encoding);throw _b_.TypeError.$factory("encoding without a string argument")}if(Array.isArray(source)){int_list=source}else{try{int_list=_b_.list.$factory(source)}catch(err){var bytes_method=$B.$getattr(source,"__bytes__",_b_.None);if(bytes_method===_b_.None){throw _b_.TypeError.$factory("cannot convert "+`'${$B.class_name(source)}' object to bytes`)}var res=$B.$call(bytes_method)();if(!$B.$isinstance(res,_b_.bytes)){throw _b_.TypeError.$factory(`__bytes__ returned `+`non-bytes (type ${$B.class_name(res)})`)}return res}for(let i=0;i255){throw _b_.ValueError.$factory("bytes must be in range"+"(0, 256)")}}}}}self.source=int_list;self.encoding=encoding;self.errors=errors;return self};bytes.__repr__=bytes.__str__=function(self){var t=$B.special_string_repr,res="";for(var i=0,len=self.source.length;i=128){var hx=s.toString(16);hx=(hx.length==1?"0":"")+hx;res+="\\x"+hx}else if(s=="\\".charCodeAt(0)){res+="\\\\"}else{res+=String.fromCharCode(s)}}if(res.indexOf("'")>-1&&res.indexOf('"')==-1){return'b"'+res+'"'}else{return"b'"+res.replace(new RegExp("'","g"),"\\'")+"'"}};bytes.capitalize=function(self){var src=self.source,len=src.length,buffer=src.slice();if(buffer[0]>96&&buffer[0]<123){buffer[0]-=32}for(var i=1;i64&&buffer[i]<91){buffer[i]+=32}}return bytes.$factory(buffer)};bytes.center=function(){var $=$B.args("center",3,{self:null,width:null,fillbyte:null},["self","width","fillbyte"],arguments,{fillbyte:bytes.$factory([32])},null,null);var diff=$.width-$.self.source.length;if(diff<=0){return bytes.$factory($.self.source)}var ljust=bytes.ljust($.self,$.self.source.length+Math.floor(diff/2),$.fillbyte);return bytes.rjust(ljust,$.width,$.fillbyte)};bytes.count=function(){var $=$B.args("count",4,{self:null,sub:null,start:null,end:null},["self","sub","start","end"],arguments,{start:0,end:-1},null,null);var n=0,index=-1,len=0;if(typeof $.sub=="number"){if($.sub<0||$.sub>255)throw _b_.ValueError.$factory("byte must be in range(0, 256)");len=1}else if(!$.sub.__class__){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.sub)+"'")}else if(!$.sub.__class__.$buffer_protocol){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.sub)+"'")}else{len=$.sub.source.length}do{index=bytes.find($.self,$.sub,Math.max(index+len,$.start),$.end);if(index!=-1){n++}}while(index!=-1);return n};bytes.decode=function(){var $=$B.args("decode",3,{self:null,encoding:null,errors:null},["self","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null);switch($.errors){case"strict":case"ignore":case"replace":case"surrogateescape":case"surrogatepass":case"xmlcharrefreplace":case"backslashreplace":return decode($.self,$.encoding,$.errors);default:}};bytes.endswith=function(){var $=$B.args("endswith",4,{self:null,suffix:null,start:null,end:null},["self","suffix","start","end"],arguments,{start:-1,end:-1},null,null);if($B.$isinstance($.suffix,bytes)){var end=$.end==-1?$.self.source.length:$.end;var res=true;for(let i=$.suffix.source.length-1,len=$.suffix.source.length;i>=0&&res;--i){res=$.self.source[end-len+i]==$.suffix.source[i]}return res}else if($B.$isinstance($.suffix,_b_.tuple)){for(let i=0;i<$.suffix.length;++i){if($B.$isinstance($.suffix[i],bytes)){if(bytes.endswith($.self,$.suffix[i],$.start,$.end)){return true}}else{throw _b_.TypeError.$factory("endswith first arg must be "+"bytes or a tuple of bytes, not "+$B.class_name($.suffix))}}return false}else{throw _b_.TypeError.$factory("endswith first arg must be bytes "+"or a tuple of bytes, not "+$B.class_name($.suffix))}};bytes.expandtabs=function(){var $=$B.args("expandtabs",2,{self:null,tabsize:null},["self","tabsize"],arguments,{tabsize:8},null,null);var tab_spaces=[];for(let i=0;i<$.tabsize;++i){tab_spaces.push(32)}var buffer=$.self.source.slice();for(let i=0;i255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}return self.source.slice(0,end==-1?undefined:end).indexOf(sub,start)}else if(!sub.__class__){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name(sub)+"'")}else if(!sub.__class__.$buffer_protocol){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name(sub)+"'")}end=end==-1?self.source.length:Math.min(self.source.length,end);var len=sub.source.length;for(var i=start;i<=end-len;i++){var chunk=self.source.slice(i,i+len),found=true;for(var j=0;jstring.length){throw _b_.ValueError.$factory("non-hexadecimal number found "+"in fromhex() arg")}source.push(_b_.int.$factory(string.substr(i,2),16))}return $.cls.$factory(source)};bytes.hex=function(){var $=$B.args("hex",3,{self:null,sep:null,bytes_per_sep:null},["self","sep","bytes_per_sep"],arguments,{sep:"",bytes_per_sep:1},null,null),self=$.self,sep=$.sep,bytes_per_sep=$.bytes_per_sep,res="",digits="0123456789abcdef",bps=bytes_per_sep,jstart=bps,len=self.source.length;if(bytes_per_sep<0){bps=-bytes_per_sep;jstart=bps}else if(bytes_per_sep==0){sep=""}else{jstart=len%bps;if(jstart==0){jstart=bps}}for(var i=0,j=jstart;i>4];res+=digits[c&15]}return res};bytes.index=function(){var $=$B.args("index",4,{self:null,sub:null,start:null,end:null},["self","sub","start","end"],arguments,{start:0,end:-1},null,null);var index=bytes.find($.self,$.sub,$.start,$.end);console.log("index",index);if(index==-1){throw _b_.ValueError.$factory("subsection not found")}return index};bytes.isalnum=function(){var $=$B.args("isalnum",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,res=len>0;for(var i=0;i96&&src[i]<123||src[i]>64&&src[i]<91||src[i]>47&&src[i]<58}return res};bytes.isalpha=function(){var $=$B.args("isalpha",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,res=len>0;for(var i=0;i96&&src[i]<123||src[i]>64&&src[i]<91}return res};bytes.isdigit=function(){var $=$B.args("isdigit",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,res=len>0;for(let i=0;i47&&src[i]<58}return res};bytes.islower=function(){var $=$B.args("islower",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,res=false;for(let i=0;i96&&src[i]<123;if(src[i]>64&&src[i]<91){return false}}return res};bytes.isspace=function(){var $=$B.args("isspace",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length;for(let i=0;i64&&src[i]<91;if(src[i]>96&&src[i]<123){return false}}return res};bytes.istitle=function(){var $=$B.args("istitle",1,{self:null},["self"],arguments,{},null,null),self=$.self;var src=self.source,len=src.length,current_char_is_letter=false,prev_char_was_letter=false,is_uppercase=false,is_lowercase=false;for(var i=0;i96&&src[i]<123;is_uppercase=src[i]>64&&src[i]<91;current_char_is_letter=is_lowercase||is_uppercase;if(current_char_is_letter&&(prev_char_was_letter&&is_uppercase)||!prev_char_was_letter&&is_lowercase){return false}prev_char_was_letter=current_char_is_letter}return true};bytes.join=function(){var $ns=$B.args("join",2,{self:null,iterable:null},["self","iterable"],arguments,{}),self=$ns["self"],iterable=$ns["iterable"];var next_func=$B.$getattr(_b_.iter(iterable),"__next__"),res=self.__class__.$factory(),empty=true;while(true){try{var item=next_func();if(empty){empty=false}else{res=bytes.__add__(res,self)}res=bytes.__add__(res,item)}catch(err){if($B.$isinstance(err,_b_.StopIteration)){break}throw err}}return res};var _lower=function(char_code){if(char_code>=65&&char_code<=90){return char_code+32}else{return char_code}};bytes.lower=function(self){var _res=[],pos=0;for(var i=0,len=self.source.length;i=0?$.count:src.length;if(!$.old.__class__){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.old)+"'")}else if(!$.old.__class__.$buffer_protocol){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.sep)+"'")}if(!$.new.__class__){throw _b_.TypeError.$factory("second argument must be a bytes-like "+"object, not '"+$B.class_name($.old)+"'")}else if(!$.new.__class__.$buffer_protocol){throw _b_.TypeError.$factory("second argument must be a bytes-like "+"object, not '"+$B.class_name($.sep)+"'")}for(var i=0;i255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")}return $.self.source.slice(start,$.end==-1?undefined:$.end).lastIndexOf(sub)+start}else if(!sub.__class__){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name($.sub)+"'")}else if(!sub.__class__.$buffer_protocol){throw _b_.TypeError.$factory("first argument must be a bytes-like "+"object, not '"+$B.class_name(sub)+"'")}end=end==-1?self.source.length:Math.min(self.source.length,end);var len=sub.source.length;for(var i=end-len;i>=start;--i){var chunk=self.source.slice(i,i+len),found=true;for(var j=0;jstart){res.push(bytes.$factory(src.slice(start,stop)))}return res};bytes.splitlines=function(){var $=$B.args("splitlines",2,{self:null,keepends:null},["self","keepends"],arguments,{keepends:false},null,null);if(!$B.$isinstance($.keepends,[_b_.bool,_b_.int])){throw _b_.TypeError("integer argument expected, got "+$B.get_class($.keepends).__name)}var keepends=_b_.int.$factory($.keepends),res=[],source=$.self.source,start=0,pos=0;if(!source.length){return res}while(pos96&&buffer[i]<123){buffer[i]-=32}else if(buffer[i]>64&&buffer[i]<91){buffer[i]+=32}}return bytes.$factory(buffer)};bytes.title=function(self){var src=self.source,len=src.length,buffer=src.slice(),current_char_is_letter=false,prev_char_was_letter=false,is_uppercase=false,is_lowercase=false;for(var i=0;i96&&buffer[i]<123;is_uppercase=buffer[i]>64&&buffer[i]<91;current_char_is_letter=is_lowercase||is_uppercase;if(current_char_is_letter){if(prev_char_was_letter&&is_uppercase){buffer[i]+=32}else if(!prev_char_was_letter&&is_lowercase){buffer[i]-=32}}prev_char_was_letter=current_char_is_letter}return bytes.$factory(buffer)};bytes.translate=function(self,table,_delete){if(_delete===undefined){_delete=[]}else if($B.$isinstance(_delete,bytes)){_delete=_delete.source}else{throw _b_.TypeError.$factory("Type "+$B.get_class(_delete).__name+" doesn't support the buffer API")}var res=[],pos=0;if($B.$isinstance(table,bytes)&&table.source.length==256){for(var i=0,len=self.source.length;i-1){continue}res[pos++]=table.source[self.source[i]]}}return bytes.$factory(res)};var _upper=function(char_code){if(char_code>=97&&char_code<=122){return char_code-32}else{return char_code}};bytes.upper=function(self){var _res=[],pos=0;for(var i=0,len=self.source.length;i>5==6){if(b[pos+1]===undefined){err_info=[byte,pos,"end"]}else if((b[pos+1]&192)!=128){err_info=[byte,pos,"continuation"]}if(err_info!==null){if(errors=="ignore"){pos++}else{throw _b_.UnicodeDecodeError.$factory("'utf-8' codec can't decode byte 0x"+err_info[0].toString(16)+" in position "+err_info[1]+(err_info[2]=="end"?": unexpected end of data":": invalid continuation byte"))}}else{let cp=byte&31;cp<<=6;cp+=b[pos+1]&63;s+=String.fromCodePoint(cp);pos+=2}}else if(byte>>4==14){if(b[pos+1]===undefined){err_info=[byte,pos,"end",pos+1]}else if((b[pos+1]&192)!=128){err_info=[byte,pos,"continuation",pos+2]}else if(b[pos+2]===undefined){err_info=[byte,pos+"-"+(pos+1),"end",pos+2]}else if((b[pos+2]&192)!=128){err_info=[byte,pos,"continuation",pos+3]}if(err_info!==null){if(errors=="ignore"){pos=err_info[3]}else if(errors=="surrogateescape"){for(let i=pos;i>3==30){if(b[pos+1]===undefined){err_info=[byte,pos,"end",pos+1]}else if((b[pos+1]&192)!=128){err_info=[byte,pos,"continuation",pos+2]}else if(b[pos+2]===undefined){err_info=[byte,pos+"-"+(pos+1),"end",pos+2]}else if((b[pos+2]&192)!=128){err_info=[byte,pos,"continuation",pos+3]}else if(b[pos+3]===undefined){err_info=[byte,pos+"-"+(pos+1)+"-"+(pos+2),"end",pos+3]}if(err_info!==null){if(errors=="ignore"){pos=err_info[3]}else if(errors=="surrogateescape"){for(let i=pos;i")}}return decoded}return s};var encode=$B.encode=function(){var $=$B.args("encode",3,{s:null,encoding:null,errors:null},["s","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null),s=$.s,encoding=$.encoding,errors=$.errors;var t=[],pos=0,enc=normalise(encoding);switch(enc){case"utf-8":case"utf_8":case"utf8":if(globalThis.TextEncoder){var encoder=new TextEncoder("utf-8",{fatal:true});try{var array=encoder.encode(s);return fast_bytes(Array.from(array))}catch(err){}}for(let i=0,len=s.length;i>6),128+(cp&63))}else if(cp<=65535){t.push(224+(cp>>12),128+((cp&4095)>>6),128+(cp&63))}else{console.log("4 bytes")}}break;case"latin":case"latin1":case"latin-1":case"latin_1":case"L1":case"iso8859_1":case"iso_8859_1":case"8859":case"cp819":case"windows1252":for(let i=0,len=s.length;i>2>other_size){return set_copy_and_difference(so,other)}var result=make_new_set();if(other_is_dict){for(let entry of set_iter_with_hash(so)){if(!_b_.dict.$lookup_by_key(other,entry.item,entry.hash).found){set_add(result,entry.item,entry.hash)}}return result}for(let entry of set_iter_with_hash(so)){if(!set_contains(other,entry.item,entry.hash)){set_add(result,entry.item,entry.hash)}}result.__class__=so.__class__;return result}function set_difference_update(so,other){if(so===other){return set.clear(so)}if($B.$isinstance(other,[set,frozenset])){for(let entry of set_iter_with_hash(other)){set_discard_entry(so,entry.item,entry.hash)}}else if($B.$isinstance(other,_b_.dict)){for(let entry of _b_.dict.$iter_items(other)){set_discard_entry(so,entry.key,entry.hash)}}else{var iterator=$B.make_js_iterator(other);for(let key of iterator){set_discard_key(so,key)}}}const DISCARD_NOTFOUND=0;function set_discard_entry(so,key,hash){var entry=set_lookkey(so,key,hash);if(!entry){return DISCARD_NOTFOUND}if(so.$store[entry.hash]!==undefined){set_remove(so,entry.hash,entry.index)}}function set_discard_key(so,key){return set_discard_entry(so,key)}function*set_iter(so){var ordered_keys=Object.keys(so.$store).sort();for(var hash of ordered_keys){if(so.$store[hash]!==undefined){for(var item of so.$store[hash]){yield item}}}}function*set_iter_with_hash(so){for(var hash in so.$store){if(so.$store[hash]!==undefined){for(var item of so.$store[hash]){yield{item:item,hash:hash}}}}}function set_remove(so,hash,index){so.$store[hash].splice(index,1);if(so.$store[hash].length==0){delete so.$store[hash]}so.$used--}function set_intersection(so,other){if(so===other){return set_copy(so)}var result=make_new_set_base_type(so);if($B.$isinstance(other,[set,frozenset])){if(other.$used>so.$used){var tmp=so;so=other;other=tmp}for(let entry of set_iter_with_hash(other)){if(set_contains(so,entry.item,entry.hash)){set_add(result,entry.item,entry.hash)}}}else if($B.$isinstance(other,_b_.dict)){for(let entry of _b_.dict.$iter_items(other)){if(set_contains(so,entry.key,entry.hash)){set_add(result,entry.key,entry.hash)}}}else{let iterator=$B.make_js_iterator(other);for(var other_item of iterator){var test=set_contains(so,other_item);if(test){set_add(result,other_item)}}}return result}function set_intersection_multi(so,args){var result=set_copy(so);if(args.length==0){return result}for(var other of args){result=set_intersection(result,other)}return result}function set_lookkey(so,key,hash){if(hash===undefined){try{hash=$B.$hash(key)}catch(err){if($B.$isinstance(key,set)){hash=$B.$hash(frozenset.$factory(key))}else{throw err}}}var items=so.$store[hash];if(items===undefined){return false}for(var index=0,len=so.$store[hash].length;index0){set.clear(self)}set.update(self,iterable);return _b_.None};var set_iterator=$B.make_class("set_iterator",(function(so){return{__class__:set_iterator,so:so,it:set_iter(so),version:so.$version}}));set_iterator.__iter__=function(self){return self};set_iterator.__length_hint__=function(self){return self.so.$used};set_iterator.__next__=function(self){var res=self.it.next();if(res.done){throw _b_.StopIteration.$factory()}if(self.so.$version!=self.version){throw _b_.RuntimeError.$factory("Set changed size during iteration")}return res.value};set_iterator.__reduce_ex__=function(self){return $B.fast_tuple([_b_.iter,$B.fast_tuple([set_make_items(self.so)])])};$B.set_func_names(set_iterator,"builtins");set.__iter__=function(self){return set_iterator.$factory(self)};function set_make_items(so){var items=[];for(var hash in so.$store){items=items.concat(so.$store[hash])}return items}set.__le__=function(self,other){if($B.$isinstance(other,[set,frozenset])){return set.issubset(self,other)}return _b_.NotImplemented};set.__len__=function(self){return self.$used};set.__lt__=function(self,other){if($B.$isinstance(other,[set,frozenset])){return set.__le__(self,other)&&set.__len__(self)set.__len__(other)){return false}for(let entry of set_iter_with_hash(self)){if(!set_lookkey(other,entry.item,entry.hash)){return false}}return true}else if($B.$isinstance(other,_b_.dict)){for(let entry of _b_.dict.$iter_items(self)){if(!set_lookkey(other,entry.key,entry.hash)){return false}}return true}else{var member_func=$B.member_func(other);for(let entry of set_iter_with_hash(self)){if(!member_func(entry.item)){return false}}return true}};set.issuperset=function(){var $=$B.args("issuperset",2,{self:null,other:null},["self","other"],arguments,{},"args",null),self=$.self,other=$.other;if($B.$isinstance(other,[set,frozenset])){return set.issubset(other,self)}else{return set.issubset(set.$factory(other),self)}};set.__iand__=function(self,other){if(!$B.$isinstance(other,[set,frozenset])){return _b_.NotImplemented}set.intersection_update(self,other);return self};set.__isub__=function(self,other){if(!$B.$isinstance(other,[set,frozenset])){return _b_.NotImplemented}set_difference_update(self,other);return self};set.__ixor__=function(self,other){if(!$B.$isinstance(other,[set,frozenset])){return _b_.NotImplemented}set.symmetric_difference_update(self,other);return self};set.__ior__=function(self,other){if(!$B.$isinstance(other,[set,frozenset])){return _b_.NotImplemented}set.update(self,other);return self};set.$literal=function(items){let res=make_new_set(set);for(let item of items){if(item.constant){set_add(res,item.constant[0],item.constant[1])}else if(item.starred){for(let _item of $B.make_js_iterator(item.starred)){set_add(res,_item)}}else{set_add(res,item.item)}}return res};set.$factory=function(){var args=[set].concat(Array.from(arguments)),self=set.__new__.apply(null,args);set.__init__(self,...arguments);return self};$B.set_func_names(set,"builtins");set.__class_getitem__=_b_.classmethod.$factory(set.__class_getitem__);var frozenset=$B.make_class("frozenset");frozenset.$native=true;for(var attr in set){switch(attr){case"add":case"clear":case"discard":case"pop":case"remove":case"update":break;default:if(frozenset[attr]==undefined){if(typeof set[attr]=="function"){frozenset[attr]=function(x){return function(){return set[x].apply(null,arguments)}}(attr)}else{frozenset[attr]=set[attr]}}}}frozenset.__hash__=function(self){if(self===undefined){return frozenset.__hashvalue__||$B.$py_next_hash--}if(self.__hashvalue__!==undefined){return self.__hashvalue__}var _hash=1927868237;_hash*=self.$used;for(var entry of set_iter_with_hash(self)){var _h=entry.hash;_hash^=(_h^89869747^_h<<16)*3644798167}_hash=_hash*69069+907133923;if(_hash==-1){_hash=590923713}return self.__hashvalue__=_hash};frozenset.__init__=function(){return _b_.None};frozenset.__new__=function(cls,iterable){if(cls===undefined){throw _b_.TypeError.$factory("frozenset.__new__(): not enough arguments")}var self=make_new_set(cls);if(iterable===undefined){return self}$B.check_nb_args_no_kw("__new__",2,arguments);if(cls===frozenset&&iterable.__class__===frozenset){return iterable}set.update(self,iterable);return self};frozenset.__repr__=function(self){$B.builtins_repr_check(frozenset,arguments);return set_repr(self)};frozenset.copy=function(self){if(self.__class__===frozenset){return self}return set_copy(self)};frozenset.$factory=function(){var args=[frozenset].concat(Array.from(arguments)),self=frozenset.__new__.apply(null,args);frozenset.__init__(self,...arguments);return self};$B.set_func_names(frozenset,"builtins");_b_.set=set;_b_.frozenset=frozenset})(__BRYTHON__);(function($B){var _b_=$B.builtins,_window=globalThis;var Module=$B.module=$B.make_class("module",(function(name,doc,$package){return{$tp_class:Module,__builtins__:_b_.__builtins__,__name__:name,__doc__:doc||_b_.None,__package__:$package||_b_.None}}));Module.__dir__=function(self){if(self.__dir__){return $B.$call(self.__dir__)()}var res=[];for(var key in self){if(key.startsWith("$")||key=="__class__"){continue}res[res.length]=key}return res.sort()};Module.__new__=function(cls,name,doc,$package){return{__class__:cls,__builtins__:_b_.__builtins__,__name__:name,__doc__:doc||_b_.None,__package__:$package||_b_.None}};Module.__repr__=Module.__str__=function(self){var res=""};Module.__setattr__=function(self,attr,value){if(self.__name__=="__builtins__"){$B.builtins[attr]=value}else{self[attr]=value}};$B.set_func_names(Module,"builtins");$B.make_import_paths=function(filename){var filepath=$B.script_domain?$B.script_domain+"/"+filename:filename;var elts=filepath.split("/");elts.pop();var script_dir=elts.join("/"),path=[$B.brython_path+"Lib",$B.brython_path+"libs",script_dir,$B.brython_path+"Lib/site-packages"];var meta_path=[],path_hooks=[];if($B.use_VFS){meta_path.push($B.finders.VFS)}var static_stdlib_import=$B.get_option_from_filename("static_stdlib_import",filename);if(static_stdlib_import!==false&&$B.protocol!="file"){meta_path.push($B.finders.stdlib_static);if(path.length>3){path.shift();path.shift()}}var pythonpath=$B.get_option_from_filename("pythonpath",filename);if(pythonpath){var ix=path.indexOf(script_dir);if(ix===-1){console.log("bizarre, script_dir",script_dir,"not in path",path)}else{path.splice(ix,1,...pythonpath)}}if($B.protocol!=="file"){meta_path.push($B.finders.path);path_hooks.push($B.url_hook)}$B.import_info[filename]={meta_path:meta_path,path_hooks:path_hooks,path:path}};function $download_module(mod,url){var xhr=new XMLHttpRequest,fake_qs="?v="+(new Date).getTime(),res=null,mod_name=mod.__name__;var timer=_window.setTimeout((function(){xhr.abort()}),5e3);if($B.get_option("cache")){xhr.open("GET",url,false)}else{xhr.open("GET",url+fake_qs,false)}xhr.send();if($B.$CORS){if(xhr.status==200||xhr.status==0){res=xhr.responseText}else{res=_b_.ModuleNotFoundError.$factory("No module named '"+mod_name+"'")}}else{if(xhr.readyState==4){if(xhr.status==200){res=xhr.responseText;mod.$last_modified=xhr.getResponseHeader("Last-Modified")}else{console.info("Trying to import "+mod_name+", not found at url "+url);res=_b_.ModuleNotFoundError.$factory("No module named '"+mod_name+"'")}}}_window.clearTimeout(timer);if(res==null){throw _b_.ModuleNotFoundError.$factory("No module named '"+mod_name+"' (res is null)")}if(res.constructor===Error){throw res}return res}$B.$download_module=$download_module;$B.addToImported=function(name,modobj){$B.imported[name]=modobj;if(modobj===undefined){throw _b_.ImportError.$factory("imported not set by module")}modobj.__class__=Module;modobj.__name__=name;for(var attr in modobj){if(typeof modobj[attr]=="function"){modobj[attr].$infos={__module__:name,__name__:attr,__qualname__:attr};modobj[attr].$in_js_module=true}else if($B.$isinstance(modobj[attr],_b_.type)&&!modobj[attr].hasOwnProperty("__module__")){modobj[attr].__module__=name}}};function run_js(module_contents,path,_module){try{new Function(module_contents)()}catch(err){throw $B.exception(err)}var modobj=$B.imported[_module.__name__];if(modobj===undefined){throw _b_.ImportError.$factory("imported not set by module")}modobj.__class__=Module;modobj.__name__=_module.__name__;for(var attr in modobj){if(typeof modobj[attr]=="function"){modobj[attr].$infos={__module__:_module.__name__,__name__:attr,__qualname__:attr};modobj[attr].$in_js_module=true}else if($B.$isinstance(modobj[attr],_b_.type)&&!modobj[attr].hasOwnProperty("__module__")){modobj[attr].__module__=_module.__name__}}return true}function run_py(module_contents,path,module,compiled){var filename=module.__file__;$B.file_cache[filename]=module_contents;$B.url2name[filename]=module.__name__;var root,js,mod_name=module.__name__,src;if(!compiled){src={src:module_contents,filename:filename,imported:true};try{root=$B.py2js(src,module,module.__name__,$B.builtins_scope)}catch(err){err.$frame_obj=$B.frame_obj;if($B.get_option("debug",err)>1){console.log("error in imported module",module);console.log("stack",$B.make_frames_stack(err.$frame_obj))}throw err}}try{js=compiled?module_contents:root.to_js();if($B.get_option("debug")==10){console.log("code for module "+module.__name__);console.log($B.format_indent(js,0))}src=js;js="var $module = (function(){\n"+js;var prefix="locals_";js+="return "+prefix;js+=module.__name__.replace(/\./g,"_")+"})(__BRYTHON__)\n"+"return $module";var module_id=prefix+module.__name__.replace(/\./g,"_");var mod=new Function(module_id,js)(module)}catch(err){err.$frame_obj=err.$frame_obj||$B.frame_obj;if($B.get_option("debug",err)>2){console.log(err+" for module "+module.__name__);console.log("module",module);console.log(root);if($B.get_option("debug",err)>1){console.log($B.format_indent(js,0))}for(let attr in err){console.log(attr,err[attr])}console.log("message: "+err.$message);console.log("filename: "+err.fileName);console.log("linenum: "+err.lineNumber);console.log(js.split("\n").slice(err.lineNumber-3,err.lineNumber+3).join("\n"));console.log(err.stack)}throw err}var imports=Object.keys(root.imports).join(",");try{for(let attr in mod){module[attr]=mod[attr]}module.__initializing__=false;$B.imported[module.__name__]=module;return{content:src,name:mod_name,imports:imports,is_package:module.$is_package,path:path,timestamp:$B.timestamp,source_ts:module.__spec__.loader_state.timestamp}}catch(err){console.log(""+err+" "+" for module "+module.__name__);for(let attr in err){console.log(attr+" "+err[attr])}if($B.get_option("debug")>0){console.log("line info "+__BRYTHON__.line_info)}throw err}}$B.run_py=run_py;$B.run_js=run_js;function save_in_indexedDB(record){if(dbUpdater&&$B.get_page_option("indexeddb")&&$B.indexedDB){dbUpdater.postMessage(record)}}var ModuleSpec=$B.make_class("ModuleSpec",(function(fields){fields.__class__=ModuleSpec;return fields}));ModuleSpec.__str__=ModuleSpec.__repr__=function(self){var res=`ModuleSpec(name='${self.name}', `+`loader=${_b_.str.$factory(self.loader)}, `+`origin='${self.origin}'`;if(self.submodule_search_locations!==_b_.None){res+=`, submodule_search_locations=`+`${_b_.str.$factory(self.submodule_search_locations)}`}return res+")"};$B.set_func_names(ModuleSpec,"builtins");function parent_package(mod_name){var parts=mod_name.split(".");parts.pop();return parts.join(".")}var VFSFinder=$B.make_class("VFSFinder",(function(){return{__class__:VFSFinder}}));VFSFinder.find_spec=function(cls,fullname){var stored,is_package,timestamp;if(!$B.use_VFS){return _b_.None}stored=$B.VFS[fullname];if(stored===undefined){return _b_.None}is_package=stored[3]||false;timestamp=stored.timestamp;if(stored){var is_builtin=$B.builtin_module_names.indexOf(fullname)>-1;return ModuleSpec.$factory({name:fullname,loader:VFSLoader.$factory(),origin:is_builtin?"built-in":"brython_stdlib",submodule_search_locations:is_package?[]:_b_.None,loader_state:{stored:stored,timestamp:timestamp},cached:_b_.None,parent:is_package?fullname:parent_package(fullname),has_location:_b_.False})}};$B.set_func_names(VFSFinder,"");for(let method in VFSFinder){if(typeof VFSFinder[method]=="function"){VFSFinder[method]=_b_.classmethod.$factory(VFSFinder[method])}}const VFSLoader=$B.make_class("VFSLoader",(function(){return{__class__:VFSLoader}}));VFSLoader.create_module=function(){return _b_.None};VFSLoader.exec_module=function(self,modobj){var stored=modobj.__spec__.loader_state.stored,timestamp=modobj.__spec__.loader_state.timestamp;var ext=stored[0],module_contents=stored[1],imports=stored[2];modobj.$is_package=stored[3]||false;var path="VFS."+modobj.__name__;path+=modobj.$is_package?"/__init__.py":ext;modobj.__file__=path;$B.file_cache[modobj.__file__]=$B.VFS[modobj.__name__][1];$B.url2name[modobj.__file__]=modobj.__name__;if(ext==".js"){run_js(module_contents,modobj.__path__,modobj)}else if($B.precompiled.hasOwnProperty(modobj.__name__)){if($B.get_option("debug")>1){console.info("load",modobj.__name__,"from precompiled")}var parts=modobj.__name__.split(".");for(var i=0;i";mod.__package__=parent;mod.$is_package=true}else{let elts=parent.split(".");elts.pop();mod.__package__=elts.join(".")}mod.__file__=path;try{var parent_id=parent.replace(/\./g,"_"),prefix="locals_";mod_js+="return "+prefix+parent_id;var $module=new Function(prefix+parent_id,mod_js)(mod)}catch(err){if($B.get_option("debug")>1){console.log("error in module",mod);console.log(err);for(var k in err){console.log(k,err[k])}console.log(Object.keys($B.imported));console.log(modobj,"mod_js",mod_js)}throw err}for(var attr in $module){mod[attr]=$module[attr]}$module.__file__=path;if(i>0){$B.builtins.setattr($B.imported[parts.slice(0,i).join(".")],parts[i],$module)}}return $module}else{var mod_name=modobj.__name__;if($B.get_option("debug")>1){console.log("run Python code from VFS",mod_name)}var path=$B.brython_path+"/"+modobj.__file__;var record=run_py(module_contents,path,modobj);record.imports=imports.join(",");record.is_package=modobj.$is_package;record.timestamp=$B.timestamp;record.source_ts=timestamp;$B.precompiled[mod_name]=record.is_package?[record.content]:record.content;let elts=mod_name.split(".");if(elts.length>1){elts.pop()}if($B.get_page_option("indexeddb")&&$B.indexedDB&&$B.idb_name){var idb_cx=indexedDB.open($B.idb_name);idb_cx.onsuccess=function(evt){var db=evt.target.result,tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules"),request=store.put(record);request.onsuccess=function(){if($B.get_option("debug")>1){console.info(modobj.__name__,"stored in db")}};request.onerror=function(){console.info("could not store "+modobj.__name__)}}}}};$B.set_func_names(VFSLoader,"builtins");var StdlibStaticFinder=$B.make_class("StdlibStaticFinder",(function(){return{__class__:StdlibStaticFinder}}));StdlibStaticFinder.find_spec=function(self,fullname){if($B.stdlib&&$B.get_option("static_stdlib_import")){var address=$B.stdlib[fullname];if(address===undefined){var elts=fullname.split(".");if(elts.length>1){elts.pop();var $package=$B.stdlib[elts.join(".")];if($package&&$package[1]){address=["py"]}}}if(address!==undefined){var ext=address[0],is_pkg=address[1]!==undefined,path=$B.brython_path+(ext=="py"?"Lib/":"libs/")+fullname.replace(/\./g,"/"),metadata={ext:ext,is_package:is_pkg,path:path+(is_pkg?"/__init__.py":ext=="py"?".py":".js"),address:address},_module=Module.$factory(fullname);metadata.code=$download_module(_module,metadata.path);var res=ModuleSpec.$factory({name:fullname,loader:PathLoader.$factory(),origin:metadata.path,submodule_search_locations:is_pkg?[path]:_b_.None,loader_state:metadata,cached:_b_.None,parent:is_pkg?fullname:parent_package(fullname),has_location:_b_.True});return res}}return _b_.None};$B.set_func_names(StdlibStaticFinder,"");for(let method in StdlibStaticFinder){if(typeof StdlibStaticFinder[method]=="function"){StdlibStaticFinder[method]=_b_.classmethod.$factory(StdlibStaticFinder[method])}}StdlibStaticFinder.$factory=function(){return{__class__:StdlibStaticFinder}};var PathFinder=$B.make_class("PathFinder",(function(){return{__class__:PathFinder}}));PathFinder.find_spec=function(cls,fullname,path){if($B.VFS&&$B.VFS[fullname]){return _b_.None}if($B.is_none(path)){path=get_info("path")}for(var i=0,li=path.length;i");for(let method in PathFinder){if(typeof PathFinder[method]=="function"){PathFinder[method]=_b_.classmethod.$factory(PathFinder[method])}}var PathEntryFinder=$B.make_class("PathEntryFinder",(function(path_entry,hint){return{__class__:PathEntryFinder,path_entry:path_entry,hint:hint}}));PathEntryFinder.find_spec=function(self,fullname){var loader_data={},notfound=true,hint=self.hint,base_path=self.path_entry+fullname.match(/[^.]+$/g)[0],modpaths=[],py_ext=$B.get_option("python_extension");var tryall=hint===undefined;if(tryall||hint=="py"){modpaths=modpaths.concat([[base_path+py_ext,"py",false],[base_path+"/__init__"+py_ext,"py",true]])}for(var j=0;notfound&&j-1){meta_path.splice(path_ix,1)}}for(var i=0,len=meta_path.length;i0;if(modobj==_b_.None){import_error(mod_name)}if(modobj===undefined){if($B.is_none(fromlist)){fromlist=[]}for(var i=0,modsep="",_mod_name="",len=parsed_name.length-1,__path__=_b_.None;i<=len;++i){var _parent_name=_mod_name;_mod_name+=modsep+parsed_name[i];modsep=".";modobj=$B.imported[_mod_name];if($test){console.log("iter",i,_mod_name,"\nmodobj",modobj,"\n__path__",__path__,Array.isArray(__path__));alert()}if(modobj==_b_.None){import_error(_mod_name)}else if(modobj===undefined){try{import_engine(_mod_name,__path__,from_stdlib)}catch(err){delete $B.imported[_mod_name];throw err}if($B.is_none($B.imported[_mod_name])){import_error(_mod_name)}else{if(_parent_name){_b_.setattr($B.imported[_parent_name],parsed_name[i],$B.imported[_mod_name])}}}else if($B.imported[_parent_name]&&$B.imported[_parent_name][parsed_name[i]]===undefined){_b_.setattr($B.imported[_parent_name],parsed_name[i],$B.imported[_mod_name])}if(i0){return $B.imported[mod_name]}else{let package_name=mod_name;while(parsed_name.length>1){var module=parsed_name.pop();package_name=parsed_name.join(".");if($B.imported[package_name]===undefined){$B.$import(package_name,[],{},locals);$B.imported[package_name][module]=$B.imported[mod_name];mod_name=module}}return $B.imported[package_name]}};$B.$import=function(mod_name,fromlist,aliases,locals){var test=false;if(test){console.log("import",mod_name,fromlist,aliases);alert()}if(mod_name=="_frozen_importlib_external"){let alias=aliases[mod_name]||mod_name;$B.$import_from("importlib",["_bootstrap_external"],{_bootstrap_external:alias},0,locals);let _bootstrap=$B.imported.importlib._bootstrap,_bootstrap_external=$B.imported.importlib["_bootstrap_external"];_bootstrap_external._set_bootstrap_module(_bootstrap);_bootstrap._bootstap_external=_bootstrap_external;let _frozen_importlib=$B.imported._frozen_importlib;if(_frozen_importlib){_frozen_importlib._bootstrap_external=_bootstrap_external}return}var level=0,frame=$B.frame_obj.frame,current_module=frame[2],parts=current_module.split(".");while(mod_name.length>0&&mod_name.startsWith(".")){level++;mod_name=mod_name.substr(1);if(parts.length==0){throw _b_.ImportError.$factory("Parent module '' not loaded, "+"cannot perform relative import")}current_module=parts.join(".");parts.pop()}if(level>0){mod_name=current_module+(mod_name.length>0?"."+mod_name:"")}parts=mod_name.split(".");if(mod_name[mod_name.length-1]=="."){parts.pop()}var norm_parts=[],prefix=true;for(var p of parts){if(prefix&&p==""){var elt=norm_parts.pop();if(elt===undefined){throw _b_.ImportError.$factory("Parent module '' not loaded, "+"cannot perform relative import")}}else{prefix=false;norm_parts.push(p)}}mod_name=norm_parts.join(".");fromlist=fromlist===undefined?[]:fromlist;aliases=aliases===undefined?{}:aliases;locals=locals===undefined?{}:locals;if(test){console.log("step 2, mod_name",mod_name,"fromlist",fromlist);alert()}if($B.get_option("debug")==10){console.log("$import "+mod_name);console.log("use VFS ? "+$B.use_VFS);console.log("use static stdlib paths ? "+$B.get_option("static_stdlib_import"))}var current_frame=$B.frame_obj.frame,_globals=current_frame[3],__import__=_globals["__import__"],globals=$B.obj_dict(_globals);if(__import__===undefined){__import__=$B.$__import__}var importer=typeof __import__=="function"?__import__:$B.$getattr(__import__,"__call__");if(test){console.log("use importer",importer,"mod_name",mod_name,"fromlist",fromlist);alert()}var modobj=importer(mod_name,globals,undefined,fromlist,0);if(test){console.log("step 3, mod_name",mod_name,"fromlist",fromlist);console.log("modobj",modobj);alert()}if(!fromlist||fromlist.length==0){let alias=aliases[mod_name];if(alias){locals[alias]=$B.imported[mod_name]}else{locals[norm_parts[0]]=modobj}}else{var __all__=fromlist,thunk={};if(fromlist&&fromlist[0]=="*"){if(test){console.log("import *",modobj);alert()}__all__=$B.$getattr(modobj,"__all__",thunk);if(__all__!==thunk){aliases={}}}if(__all__===thunk){for(var attr in modobj){if(attr[0]!=="_"){locals[attr]=modobj[attr]}}}else{for(let name of __all__){var alias=aliases[name]||name;try{locals[alias]=$B.$getattr(modobj,name)}catch($err1){if(!$B.is_exc($err1,[_b_.AttributeError])){throw $err1}try{$B.$getattr(__import__,"__call__")(mod_name+"."+name,globals,undefined,[],0);locals[alias]=$B.$getattr(modobj,name)}catch($err3){if(mod_name==="__future__"){var exc=_b_.SyntaxError.$factory("future feature "+name+" is not defined");throw exc}var $frame=[mod_name,modobj,mod_name,modobj],suggestion=$B.offer_suggestions_for_name_error({name:name},$frame);if($err3.$py_error){$err3.__class__=_b_.ImportError;$err3.args[0]=`cannot import name '${name}' `+`from '${mod_name}'`;if(modobj.__file__){$err3.args[0]+=` (${modobj.__file__})`}$err3.$suggestion=suggestion;throw $err3}if($B.get_option("debug")>1){console.log($err3);console.log($B.frame_obj.frame)}throw _b_.ImportError.$factory("cannot import name '"+name+"'")}}}}return locals}};$B.$import_from=function(module,names,aliases,level,locals){var current_module_name=$B.frame_obj.frame[2],parts=current_module_name.split("."),relative=level>0,current_module;if(relative){current_module=$B.imported[parts.join(".")];if(current_module===undefined){throw _b_.ImportError.$factory("attempted relative import with no known parent package")}if(!current_module.$is_package){if(parts.length==1){throw _b_.ImportError.$factory("attempted relative import with no known parent package")}else{parts.pop();current_module=$B.imported[parts.join(".")]}}while(level>0){current_module=$B.imported[parts.join(".")];if(!current_module.$is_package){throw _b_.ImportError.$factory("attempted relative import with no known parent package")}level--;parts.pop()}if(module){var submodule=current_module.__name__+"."+module;$B.$import(submodule,[],{},{});current_module=$B.imported[submodule]}if(names.length>0&&names[0]=="*"){for(var key in current_module){if(key.startsWith("$")||key.startsWith("_")){continue}locals[key]=current_module[key]}}else{for(var name of names){var alias=aliases[name]||name;if(current_module[name]!==undefined){locals[alias]=current_module[name]}else{var sub_module=current_module.__name__+"."+name;$B.$import(sub_module,[],{},{});locals[alias]=$B.imported[sub_module]}}}}else{$B.$import(module,names,aliases,locals)}};$B.import_all=function(locals,module){for(var attr in module){if("_$".indexOf(attr.charAt(0))==-1){locals[attr]=module[attr]}}};$B.$meta_path=[VFSFinder,StdlibStaticFinder,PathFinder];$B.finders={VFS:VFSFinder,stdlib_static:StdlibStaticFinder,path:PathFinder};function optimize_import_for_path(path,filetype){if(path.slice(-1)!="/"){path=path+"/"}var value=filetype=="none"?_b_.None:url_hook(path,filetype);$B.path_importer_cache[path]=value}var Loader={__class__:$B.$type,__mro__:[_b_.object],__name__:"Loader"};var _importlib_module={__class__:Module,__name__:"_importlib",Loader:Loader,VFSFinder:VFSFinder,StdlibStatic:StdlibStaticFinder,ImporterPath:PathFinder,UrlPathFinder:url_hook,optimize_import_for_path:optimize_import_for_path};_importlib_module.__repr__=_importlib_module.__str__=function(){return""};$B.imported["_importlib"]=_importlib_module})(__BRYTHON__);(function($B){var _b_=$B.builtins;var escape2cp={b:"\b",f:"\f",n:"\n",r:"\r",t:"\t",v:"\v"};$B.surrogates=function(s){var s1="",escaped=false;for(var char of s){if(escaped){var echar=escape2cp[char];if(echar!==undefined){s1+=echar}else{s1+="\\"+char}escaped=false}else if(char=="\\"){escaped=true}else{s1+=char}}var surrogates=[],j=0;for(var i=0,len=s1.length;i=65536){surrogates.push(j);i++}j++}return surrogates};$B.String=function(s){var srg=$B.surrogates(s);return srg.length==0?s:$B.make_String(s,srg)};$B.make_String=function(s,surrogates){var res=new String(s);res.__class__=str;res.surrogates=surrogates;return res};function pypos2jspos(s,pypos){if(s.surrogates===undefined){return pypos}var nb=0;while(s.surrogates[nb]' requires "+"string as left operand, not "+$B.class_name(item))}[_self,item]=to_string([_self,item]);return _self.includes(item)};str.__delitem__=function(){throw _b_.TypeError.$factory("'str' object doesn't support item deletion")};str.__dir__=_b_.object.__dir__;str.__eq__=function(_self,other){if($B.$isinstance(other,str)){[_self,other]=to_string([_self,other]);return _self+""==other+""}return _b_.NotImplemented};function preformat(_self,fmt){if(fmt.empty){return _b_.str.$factory(_self)}if(fmt.type&&fmt.type!="s"){throw _b_.ValueError.$factory("Unknown format code '"+fmt.type+"' for object of type 'str'")}return _self}str.__format__=function(_self,format_spec){[_self,format_spec]=to_string([_self,format_spec]);var fmt=new $B.parse_format_spec(format_spec,_self);if(fmt.sign!==undefined){throw _b_.ValueError.$factory("Sign not allowed in string format specifier")}if(fmt.precision){_self=_self.substr(0,fmt.precision)}fmt.align=fmt.align||"<";return $B.format_width(preformat(_self,fmt),fmt)};str.__getitem__=function(_self,arg){_self=to_string(_self);if($B.$isinstance(arg,_b_.int)){var len=str.__len__(_self);var pos=arg;if(arg<0){pos+=len}if(pos>=0&&pos=65536){return $B.String(_self.substr(jspos,2))}else{return _self[jspos]}}throw _b_.IndexError.$factory("string index out of range")}if($B.$isinstance(arg,_b_.slice)){return _b_.str.$getitem_slice(_self,arg)}if($B.$isinstance(arg,_b_.bool)){return _self.__getitem__(_b_.int.$factory(arg))}throw _b_.TypeError.$factory("string indices must be integers")};str.$getitem_slice=function(_self,slice){var len=str.__len__(_self),s=_b_.slice.$conv_for_seq(slice,len),start=pypos2jspos(_self,s.start),stop=pypos2jspos(_self,s.stop),step=s.step;var res="";if(step>0){if(stop<=start){return""}for(let i=start;i=start){return""}for(let i=start;i>stop;i+=step){res+=_self[i]}}return $B.String(res)};var prefix=2,suffix=3;str.$getnewargs=function(self){return $B.fast_tuple([to_string(self)])};str.__getnewargs__=function(){return str.$getnewargs($B.single_arg("__getnewargs__","self",arguments))};str.__hash__=function(_self){var s=to_string(_self);for(var i=0,h=0,len=s.length;imax_precision){throw _b_.OverflowError.$factory("precision too large")}var s;if(val.__class__===$B.long_int){s=$B.long_int.to_base(val,10)}else{s=val.toString()}if(precision-s.length>max_repeat){throw _b_.OverflowError.$factory("precision too large")}if(s[0]==="-"){return"-"+"0".repeat(Math.max(0,precision-s.length+1))+s.slice(1)}return"0".repeat(Math.max(0,precision-s.length))+s};var format_float_precision=function(val,upper,flags,modifier){var precision=flags.precision;if(isFinite(val)){return modifier(val,precision,flags,upper)}if(val===Infinity){val="inf"}else if(val===-Infinity){val="-inf"}else{val="nan"}if(upper){return val.toUpperCase()}return val};var format_sign=function(val,flags){if(flags.sign){if(val>=0||isNaN(val)||val===Number.POSITIVE_INFINITY){return"+"}}else if(flags.space){if(val>=0||isNaN(val)){return" "}}return""};var str_format=function(val,flags){flags.pad_char=" ";return format_padding(str.$factory(val),flags)};var num_format=function(val,flags){number_check(val,flags);if($B.$isinstance(val,_b_.float)){val=parseInt(val.value)}else if(!$B.$isinstance(val,_b_.int)){val=parseInt(val)}else if($B.$isinstance(val,_b_.bool)){val=val?1:0}var s=format_int_precision(val,flags);if(flags.pad_char==="0"){if(val<0){s=s.substring(1);return"-"+format_padding(s,flags,true)}var sign=format_sign(val,flags);if(sign!==""){return sign+format_padding(s,flags,true)}}return format_padding(format_sign(val,flags)+s,flags)};var repr_format=function(val,flags){flags.pad_char=" ";return format_padding(_b_.repr(val),flags)};var ascii_format=function(val,flags,type){flags.pad_char=" ";var ascii;if(type=="bytes"){var repr=_b_.repr(val);ascii=_b_.str.encode(repr,"ascii","backslashreplace");ascii=_b_.bytes.decode(ascii,"ascii")}else{ascii=_b_.ascii(val)}return format_padding(ascii,flags)};var _float_helper=function(val,flags){number_check(val,flags);if(flags.precision===undefined){if(!flags.decimal_point){flags.precision=6}else{flags.precision=0}}else{flags.precision=parseInt(flags.precision,10);validate_precision(flags.precision)}return $B.$isinstance(val,_b_.int)?val:val.value};var validate_precision=function(precision){if(precision>20){precision=20}};function handle_special_values(value,upper){var special;if(isNaN(value)){special=upper?"NAN":"nan"}else if(value==Number.POSITIVE_INFINITY){special=upper?"INF":"inf"}else if(value==Number.NEGATIVE_INFINITY){special=upper?"-INF":"-inf"}return special}var floating_point_format=function(val,upper,flags){val=_float_helper(val,flags);var special=handle_special_values(val,upper);if(special){return format_padding(format_sign(val,flags)+special,flags)}var p=flags.precision;if(p==0){p=1}var exp_format=val.toExponential(p-1),e_index=exp_format.indexOf("e"),exp=parseInt(exp_format.substr(e_index+1)),res;function remove_zeros(v){if(flags.alternate){return v}if(v.indexOf(".")>-1){while(v.endsWith("0")){v=v.substr(0,v.length-1)}if(v.endsWith(".")){v=v.substr(0,v.length-1)}}return v}if(-4<=exp&&exp-1){return BigInt(v).toString()}const mul=Math.pow(10,d);var is_neg=v<0;if(is_neg){v=-v}var res_floor=(Math.floor(v*mul)/mul).toFixed(d),res_ceil=(Math.ceil(v*mul)/mul).toFixed(d),res;if(v-res_floor==res_ceil-v){var last=res_floor[res_floor.length-1];res=last.match(/[02468]/)?res_floor:res_ceil}else{res=v-res_floor1){mant+="."+parts[0].substr(1)+(parts[1]||"")}else if(parts[1]){mant+="."+parts[1]}}mant=parseFloat(mant);mant=roundDownToFixed(parseFloat(mant),precision);if(parseFloat(mant)==10){parts=mant.split(".");parts[0]="1";mant=parts.join(".");exp=parseInt(exp)+1}if(flags.alternate&&mant.indexOf(".")==-1){mant+="."}if(exp.toString().length==1){exp="0"+exp}return`${is_neg?"-":""}${mant}${upper?"E":"e"}${exp_sign}${exp}`};var floating_point_exponential_format=function(val,upper,flags){val=_float_helper(val,flags);return format_padding(format_sign(val,flags)+format_float_precision(val,upper,flags,_floating_exp_helper),flags)};$B.formatters={floating_point_format:floating_point_format,floating_point_decimal_format:floating_point_decimal_format,floating_point_exponential_format:floating_point_exponential_format};var signed_hex_format=function(val,upper,flags){var ret;if(!$B.$isinstance(val,_b_.int)){throw _b_.TypeError.$factory(`%X format: an integer is required, not ${$B.class_name(val)}`)}else if($B.$isinstance(val,_b_.bool)){val=val?1:0}if(val.__class__===$B.long_int){ret=val.value.toString(16)}else{ret=parseInt(val);ret=ret.toString(16)}ret=format_int_precision(ret,flags);if(upper){ret=ret.toUpperCase()}if(flags.pad_char==="0"){if(val<0){ret=ret.substring(1);ret="-"+format_padding(ret,flags,true)}var sign=format_sign(val,flags);if(sign!==""){ret=sign+format_padding(ret,flags,true)}}if(flags.alternate){if(ret.charAt(0)==="-"){if(upper){ret="-0X"+ret.slice(1)}else{ret="-0x"+ret.slice(1)}}else{if(upper){ret="0X"+ret}else{ret="0x"+ret}}}return format_padding(format_sign(val,flags)+ret,flags)};var octal_format=function(val,flags){number_check(val,flags);var ret;if(val.__class__===$B.long_int){ret=$B.long_int.to_base(8)}else{ret=parseInt(val);ret=ret.toString(8)}ret=format_int_precision(ret,flags);if(flags.pad_char==="0"){if(val<0){ret=ret.substring(1);ret="-"+format_padding(ret,flags,true)}var sign=format_sign(val,flags);if(sign!==""){ret=sign+format_padding(ret,flags,true)}}if(flags.alternate){if(ret.charAt(0)==="-"){ret="-0o"+ret.slice(1)}else{ret="0o"+ret}}return format_padding(ret,flags)};function series_of_bytes(val,flags){if(val.__class__&&val.__class__.$buffer_protocol){var it=_b_.iter(val),ints=[];while(true){try{ints.push(_b_.next(it))}catch(err){if(err.__class__===_b_.StopIteration){var b=_b_.bytes.$factory(ints);return format_padding(_b_.bytes.decode(b,"ascii"),flags)}throw err}}}else{try{var bytes_obj=$B.$getattr(val,"__bytes__")();return format_padding(_b_.bytes.decode(bytes_obj),flags)}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("%b does not accept '"+$B.class_name(val)+"'")}throw err}}}var single_char_format=function(val,flags,type){if(type=="bytes"){if($B.$isinstance(val,_b_.int)){if(val.__class__===$B.long_int||val<0||val>255){throw _b_.OverflowError.$factory("%c arg not in range(256)")}}else if($B.$isinstance(val,[_b_.bytes,_b_.bytearray])){if(val.source.length>1){throw _b_.TypeError.$factory("%c requires an integer in range(256) or a single byte")}val=val.source[0]}}else{if($B.$isinstance(val,_b_.str)){if(_b_.str.__len__(val)==1){return val}throw _b_.TypeError.$factory("%c requires int or char")}else if(!$B.$isinstance(val,_b_.int)){throw _b_.TypeError.$factory("%c requires int or char")}if(val.__class__===$B.long_int&&(val.value<0||val.value>=1114112)||(val<0||val>=1114112)){throw _b_.OverflowError.$factory("%c arg not in range(0x110000)")}}return format_padding(_b_.chr(val),flags)};var num_flag=function(c,flags){if(c==="0"&&!flags.padding&&!flags.decimal_point&&!flags.left){flags.pad_char="0";return}if(!flags.decimal_point){flags.padding=(flags.padding||"")+c}else{flags.precision=(flags.precision||"")+c}};var decimal_point_flag=function(val,flags){if(flags.decimal_point){throw new UnsupportedChar}flags.decimal_point=true};var neg_flag=function(val,flags){flags.pad_char=" ";flags.left=true};var space_flag=function(val,flags){flags.space=true};var sign_flag=function(val,flags){flags.sign=true};var alternate_flag=function(val,flags){flags.alternate=true};var char_mapping={b:series_of_bytes,s:str_format,d:num_format,i:num_format,u:num_format,o:octal_format,r:repr_format,a:ascii_format,g:function(val,flags){return floating_point_format(val,false,flags)},G:function(val,flags){return floating_point_format(val,true,flags)},f:function(val,flags){return floating_point_decimal_format(val,false,flags)},F:function(val,flags){return floating_point_decimal_format(val,true,flags)},e:function(val,flags){return floating_point_exponential_format(val,false,flags)},E:function(val,flags){return floating_point_exponential_format(val,true,flags)},x:function(val,flags){return signed_hex_format(val,false,flags)},X:function(val,flags){return signed_hex_format(val,true,flags)},c:single_char_format,0:function(val,flags){return num_flag("0",flags)},1:function(val,flags){return num_flag("1",flags)},2:function(val,flags){return num_flag("2",flags)},3:function(val,flags){return num_flag("3",flags)},4:function(val,flags){return num_flag("4",flags)},5:function(val,flags){return num_flag("5",flags)},6:function(val,flags){return num_flag("6",flags)},7:function(val,flags){return num_flag("7",flags)},8:function(val,flags){return num_flag("8",flags)},9:function(val,flags){return num_flag("9",flags)},"-":neg_flag," ":space_flag,"+":sign_flag,".":decimal_point_flag,"#":alternate_flag};var UnsupportedChar=function(){this.name="UnsupportedChar"};const conversion_flags="#0- +",length_modifiers="hlL",conversion_types="diouxXeEfFgGcrsa";function parse_mod_format(s,type,pos){var flags={pad_char:" "},len=s.length,start_pos=pos,mo;pos++;while(pos-1){flags.conversion_flag=char;if(char=="#"){flags.alternate=true}else if(char=="-"){flags.left=true}else if(char=="+"){flags.sign="+"}else if(char=="0"){flags.pad_char="0"}else if(char==" "){flags.space=true}pos++}else if(char=="*"){flags.padding="*";pos++}else if(mo=/^\d+/.exec(s.substr(pos))){flags.padding=mo[0];pos+=mo[0].length}else if(char=="."){pos++;if(s[pos]=="*"){flags.precision="*";pos++}else if(mo=/^\d+/.exec(s.substr(pos))){flags.precision=mo[0];pos+=mo[0].length}else{flags.precision="0"}}else if(length_modifiers.indexOf(char)>-1){flags.length_modifier=char;pos++}else if(conversion_types.indexOf(char)>-1||char=="b"&&type=="bytes"){if(type=="bytes"){if(char=="s"){char="b"}else if(char=="r"){char="a"}}flags.conversion_type=char;flags.end=pos;flags.string=s.substring(start_pos,pos+1);if(flags.left&&flags.pad_char=="0"){flags.pad_char=" "}return flags}else{throw _b_.ValueError.$factory(`invalid character in format: ${char}`)}}throw _b_.ValueError.$factory("invalid format")}function is_mapping(obj){return _b_.hasattr(obj,"keys")&&_b_.hasattr(obj,"__getitem__")}$B.printf_format=function(s,type,args){var argpos=null,getitem;if($B.$isinstance(args,_b_.tuple)){argpos=0}else{getitem=$B.$getattr(args,"__getitem__",_b_.None)}var ret="",nbph=0,pos=0,len=s.length;while(pos1){if(!$B.$isinstance(args,_b_.tuple)&&!is_mapping(args)){throw _b_.TypeError.$factory("not enough arguments for format string")}}var fmt=parse_mod_format(s,type,pos);pos=fmt.end+1;if(fmt.padding=="*"){if(args[argpos]===undefined){throw _b_.ValueError.$factory("no value for field width *")}fmt.padding=args[argpos];argpos++}if(fmt.precision=="*"){if(args[argpos]===undefined){throw _b_.ValueError.$factory("no value for precision *")}fmt.precision=args[argpos];argpos++}var func=char_mapping[fmt.conversion_type],value;if(fmt.mapping_key!==undefined){value=getitem(fmt.mapping_key)}else{if(argpos===null){value=args}else{value=args[argpos];if(value===undefined){throw _b_.TypeError.$factory("not enough arguments for format string")}argpos++}}ret+=func(value,fmt,type)}}if(argpos!==null){if(args.length>argpos){throw _b_.TypeError.$factory("not enough arguments for format string")}else if(args.length=127&&cp<160){cp=cp.toString(16);if(cp.length<2){cp="0"+cp}repl+="\\x"+cp}else if(cp>=768&&cp<=879){repl+="​"+chars[i]+" "}else if(cp.toString(16)=="feff"){repl+="\\ufeff"}else{repl+=chars[i]}}var res=repl;if(res.search('"')==-1&&res.search("'")==-1){return"'"+res+"'"}else if(_self.search('"')==-1){return'"'+res+'"'}var qesc=new RegExp("'","g");res="'"+res.replace(qesc,"\\'")+"'";return res};str.__rmod__=function(){var $=$B.args("__rmod__",2,{self:null,other:null},["self","other"],arguments,{},null,null);if(!$B.$isinstance($.other,str)){return _b_.NotImplemented}return str.__mod__($.other,$.self)};str.__rmul__=function(_self,other){_self=to_string(_self);if($B.$isinstance(other,_b_.int)){other=_b_.int.numerator(other);var res="";while(other>0){res+=_self;other--}return res}return _b_.NotImplemented};str.__setattr__=function(_self,attr,value){if(typeof _self==="string"){if(str.hasOwnProperty(attr)){throw _b_.AttributeError.$factory("'str' object attribute '"+attr+"' is read-only")}else{throw _b_.AttributeError.$factory("'str' object has no attribute '"+attr+"'")}}_b_.dict.$setitem(_self.__dict__,attr,value);return _b_.None};str.__setitem__=function(){throw _b_.TypeError.$factory("'str' object does not support item assignment")};var combining=[];for(var cp=768;cp<=879;cp++){combining.push(String.fromCharCode(cp))}var combining_re=new RegExp("("+combining.join("|")+")","g");str.__str__=function(_self){_self=to_string(_self);var repl="",chars=to_chars(_self);if(chars.length==_self.length){return _self.replace(combining_re,"​$1")}for(var i=0;i=768&&cp<=879){repl+="​"+chars[i]}else{repl+=chars[i]}}return repl};var body=`var _b_ = __BRYTHON__.builtins\nif(typeof other !== typeof _self){\n return _b_.NotImplemented}else if(typeof _self == "string"){\n return _self > other}else{\n return _self.$brython_value > other.$brython_value}`;var comps={">":"gt",">=":"ge","<":"lt","<=":"le"};for(var op in comps){str[`__${comps[op]}__`]=Function("_self","other",body.replace(/>/gm,op))}str.capitalize=function(){var $=$B.args("capitalize",1,{self:self},["self"],arguments,{},null,null),_self=to_string($.self);if(_self.length==0){return""}return _self.charAt(0).toUpperCase()+_self.substr(1).toLowerCase()};str.casefold=function(){var $=$B.args("casefold",1,{self:self},["self"],arguments,{},null,null),res="",char,cf,_self=to_string($.self),chars=to_chars(_self);for(var i=0,len=chars.length;i=0){n++;pos+=sub.length}else{break}}return n};str.encode=function(){var $=$B.args("encode",3,{self:null,encoding:null,errors:null},["self","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null),_self=to_string($.self);if($.encoding=="rot13"||$.encoding=="rot_13"){var res="";for(var i=0,len=_self.length;i0){res+=" ";col++}break;case"\r":case"\n":res+=car;col=0;break;default:res+=car;col++;break}pos++}return res};str.find=function(){var $=$B.args("str.find",4,{self:null,sub:null,start:null,end:null},["self","sub","start","end"],arguments,{start:0,end:null},null,null),_self,sub;check_str($.sub);normalize_start_end($);[_self,sub]=to_string([$.self,$.sub]);var len=str.__len__(_self),sub_len=str.__len__(sub);if(sub_len==0&&$.start==len){return len}if(len+sub_len==0){return-1}var js_start=pypos2jspos(_self,$.start),js_end=pypos2jspos(_self,$.end),ix=_self.slice(js_start,js_end).indexOf(sub);if(ix==-1){return-1}return jspos2pypos(_self,js_start+ix)};$B.parse_format=function(fmt_string){var elts=fmt_string.split(":"),name,conv,spec,name_ext=[];if(elts.length==1){name=fmt_string}else{name=elts[0];spec=elts.splice(1).join(":")}elts=name.split("!");if(elts.length>1){name=elts[0];conv=elts[1]}if(name!==undefined){function name_repl(match){name_ext.push(match);return""}var name_ext_re=/\.[_a-zA-Z][_a-zA-Z0-9]*|\[[_a-zA-Z][_a-zA-Z0-9]*\]|\[[0-9]+\]/g;name=name.replace(name_ext_re,name_repl)}return{name:name,name_ext:name_ext,conv:conv,spec:spec||"",string:fmt_string}};$B.split_format=function(s){var pos=0,_len=s.length,car,text="",parts=[],rank=0;while(pos<_len){car=s.charAt(pos);if(car=="{"&&s.charAt(pos+1)=="{"){text+="{";pos+=2}else if(car=="}"&&s.charAt(pos+1)=="}"){text+="}";pos+=2}else if(car=="{"){parts.push(text);var end=pos+1,nb=1;while(end<_len){if(s.charAt(end)=="{"){nb++;end++}else if(s.charAt(end)=="}"){nb--;end++;if(nb==0){var fmt_string=s.substring(pos+1,end-1);var fmt_obj=$B.parse_format(fmt_string);fmt_obj.raw_name=fmt_obj.name;fmt_obj.raw_spec=fmt_obj.spec;if(!fmt_obj.name){fmt_obj.name=rank+"";rank++}if(fmt_obj.spec!==undefined){function replace_nested(name,key){if(key==""){return"{"+rank+++"}"}return"{"+key+"}"}fmt_obj.spec=fmt_obj.spec.replace(/\{(.*?)\}/g,replace_nested)}parts.push(fmt_obj);text="";break}}else{end++}}if(nb>0){throw _b_.ValueError.$factory("wrong format "+s)}pos=end}else{text+=car;pos++}}if(text){parts.push(text)}return parts};str.format=function(){var last_arg=$B.last(arguments),$,mapping,getitem;if(last_arg.$nat=="mapping"){mapping=last_arg.mapping;getitem=$B.$getattr(mapping,"__getitem__");var args=[];for(let i=0,len=arguments.length-1;i-1){let pos=parseInt(fmt.name);value=_b_.tuple.__getitem__($.$args,pos)}else{value=getitem(fmt.name)}for(var j=0;j-1){key=parseInt(key)}value=$B.$getattr(value,"__getitem__")(key)}}if(fmt.conv=="a"){value=_b_.ascii(value)}else if(fmt.conv=="r"){value=_b_.repr(value)}else if(fmt.conv=="s"){value=_b_.str.$factory(value)}if(value.$is_class||value.$factory){res+=value.__class__.__format__(value,fmt.spec)}else{res+=$B.$getattr(value,"__format__")(fmt.spec)}}return res};str.format_map=function(){var $=$B.args("format_map",2,{self:null,mapping:null},["self","mapping"],arguments,{},null,null),_self=to_string($.self);return str.format(_self,{$nat:"mapping",mapping:$.mapping})};str.index=function(){var res=str.find.apply(null,arguments);if(res===-1){throw _b_.ValueError.$factory("substring not found")}return res};str.isascii=function(){var $=$B.args("isascii",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);for(var i=0,len=_self.length;i127){return false}}return true};var unicode_categories_contain_character=function(categories,cp){for(var cat of categories){console.log(cat,cp);if($B.in_unicode_category(cat,cp)){return true}}return false};var alpha_categories=["Ll","Lu","Lm","Lt","Lo"];var alnum_categories=["Ll","Lu","Lm","Lt","Lo","Nd"];str.isalnum=function(){var $=$B.args("isalnum",1,{self:null},["self"],arguments,{},null,null);var _self=to_string($.self);if(_self.length==0){return false}for(var char of _self){if(!unicode_categories_contain_character(alnum_categories,_b_.ord(char))){return false}}return true};str.isalpha=function(){var $=$B.args("isalpha",1,{self:null},["self"],arguments,{},null,null);var _self=to_string($.self);if(_self.length==0){return false}for(var char of _self){if(!unicode_categories_contain_character(alpha_categories,_b_.ord(char))){return false}}return true};str.isdecimal=function(){var $=$B.args("isdecimal",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if(!$B.in_unicode_category("Nd",cp)){return false}}return _self.length>0};str.isdigit=function(){var $=$B.args("isdigit",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self);for(var char of _self){if(/\p{Nd}/u.test(char)){continue}cp=_b_.ord(char);if(!$B.in_unicode_category("No_digits",cp)){return false}}return _self.length>0};str.isidentifier=function(){var $=$B.args("isidentifier",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);if(_self.length==0){return false}var chars=to_chars(_self);if(!$B.is_XID_Start(_b_.ord(chars[0]))){return false}else{for(var char of chars){var cp=_b_.ord(char);if(!$B.is_XID_Continue(cp)){return false}}}return true};str.islower=function(){var $=$B.args("islower",1,{self:null},["self"],arguments,{},null,null),has_cased=false,cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if($B.in_unicode_category("Ll",cp)){has_cased=true;continue}else if($B.in_unicode_category("Lu",cp)||$B.in_unicode_category("Lt",cp)){return false}}return has_cased};const numeric_re=/\p{Nd}|\p{Nl}|\p{No}/u;str.isnumeric=function(){var $=$B.args("isnumeric",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);for(var char of _self){if(!numeric_re.test(char)&&!$B.in_unicode_category("Lo_numeric",_b_.ord(char))){return false}}return _self.length>0};var unprintable_re=/\p{Cc}|\p{Cf}|\p{Co}|\p{Cs}|\p{Zl}|\p{Zp}|\p{Zs}/u;str.isprintable=function(){var $=$B.args("isprintable",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);for(var char of _self){if(char==" "){continue}if(unprintable_re.test(char)){return false}}return true};str.isspace=function(){var $=$B.args("isspace",1,{self:null},["self"],arguments,{},null,null),cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if(!$B.in_unicode_category("Zs",cp)&&$B.unicode_bidi_whitespace.indexOf(cp)==-1){return false}}return _self.length>0};str.istitle=function(){var $=$B.args("istitle",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);return _self.length>0&&str.title(_self)==_self};str.isupper=function(){var $=$B.args("islower",1,{self:null},["self"],arguments,{},null,null),is_upper=false,cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if($B.in_unicode_category("Lu",cp)){is_upper=true;continue}else if($B.in_unicode_category("Ll",cp)||$B.in_unicode_category("Lt",cp)){return false}}return is_upper};str.join=function(){var $=$B.args("join",2,{self:null,iterable:null},["self","iterable"],arguments,{},null,null),_self=to_string($.self);var iterable=_b_.iter($.iterable),res=[],count=0;while(1){try{var obj2=_b_.next(iterable);if(!$B.$isinstance(obj2,str)){throw _b_.TypeError.$factory("sequence item "+count+": expected str instance, "+$B.class_name(obj2)+" found")}res.push(obj2)}catch(err){if($B.$isinstance(err,_b_.StopIteration)){break}else{throw err}}}return res.join(_self)};str.ljust=function(){var $=$B.args("ljust",3,{self:null,width:null,fillchar:null},["self","width","fillchar"],arguments,{fillchar:" "},null,null),_self=to_string($.self),len=str.__len__(_self);if($.width<=len){return _self}return _self+$.fillchar.repeat($.width-len)};str.lower=function(){var $=$B.args("lower",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);return _self.toLowerCase()};str.lstrip=function(){var $=$B.args("lstrip",2,{self:null,chars:null},["self","chars"],arguments,{chars:_b_.None},null,null),_self=$.self,chars=$.chars;if(chars===_b_.None){return _self.trimStart()}[_self,chars]=to_string([_self,chars]);while(_self.length>0){var flag=false;for(var char of chars){if(_self.startsWith(char)){_self=_self.substr(char.length);flag=true;break}}if(!flag){return $.self.surrogates?$B.String(_self):_self}}return""};str.maketrans=function(){var $=$B.args("maketrans",3,{x:null,y:null,z:null},["x","y","z"],arguments,{y:null,z:null},null,null);var _t=$B.empty_dict();if($.y===null&&$.z===null){if(!$B.$isinstance($.x,_b_.dict)){throw _b_.TypeError.$factory("maketrans only argument must be a dict")}var items=_b_.list.$factory(_b_.dict.items($.x));for(let i=0,len=items.length;i0&&str.endswith(_self,suffix)){return _self.substr(0,_self.length-suffix.length)}return _self.substr(0)};str.replace=function(){var $=$B.args("replace",4,{self:null,old:null,new:null,count:null},["self","old","new","count"],arguments,{count:-1},null,null),count=$.count,_self=$.self,old=$.old,_new=$.new;check_str(old,"replace() argument 1 ");check_str(_new,"replace() argument 2 ");if(!$B.$isinstance(count,[_b_.int,_b_.float])){throw _b_.TypeError.$factory("'"+$B.class_name(count)+"' object cannot be interpreted as an integer")}else if($B.$isinstance(count,_b_.float)){throw _b_.TypeError.$factory("integer argument expected, got float")}if(count==0){return _self}if(count.__class__==$B.long_int){count=parseInt(count.value)}[old,_new]=to_string([old,_new]);var elts;if(old==""){if(_new==""){return _self}if(_self==""){return _new}elts=_self.split("");if(count>-1&&elts.length>=count){var rest=elts.slice(count).join("");return _new+elts.slice(0,count).join(_new)+rest}else{return _new+elts.join(_new)+_new}}else{elts=str.split(_self,old,count)}var res=_self,pos=-1;if(old.length==0){res=_new;for(var i=0;i0){pos=res.indexOf(old,pos);if(pos<0){break}res=res.substr(0,pos)+_new+res.substr(pos+old.length);pos=pos+_new.length;count--}return res};str.rfind=function(){var $=$B.args("rfind",4,{self:null,sub:null,start:null,end:null},["self","sub","start","end"],arguments,{start:0,end:null},null,null),_self,sub;normalize_start_end($);check_str($.sub);[_self,sub]=to_string([$.self,$.sub]);var len=str.__len__(_self),sub_len=str.__len__(sub);if(sub_len==0){if($.js_start>len){return-1}else{return str.__len__(_self)}}var js_start=pypos2jspos(_self,$.start),js_end=pypos2jspos(_self,$.end),ix=_self.substring(js_start,js_end).lastIndexOf(sub);if(ix==-1){return-1}return jspos2pypos(_self,js_start+ix)-$.start};str.rindex=function(){var res=str.rfind.apply(null,arguments);if(res==-1){throw _b_.ValueError.$factory("substring not found")}return res};str.rjust=function(){var $=$B.args("rjust",3,{self:null,width:null,fillchar:null},["self","width","fillchar"],arguments,{fillchar:" "},null,null),_self=to_string($.self);var len=str.__len__(_self);if($.width<=len){return _self}return $B.String($.fillchar.repeat($.width-len)+_self)};str.rpartition=function(self,sep){var $=$B.args("rpartition",2,{self:null,sep:null},["self","sep"],arguments,{},null,null),_self;check_str($.sep);[_self,sep]=[$.self,$.sep];_self=reverse(_self),sep=reverse(sep);var items=str.partition(_self,sep).reverse();for(var i=0;i0){var flag=false;for(var char of chars){if(_self.endsWith(char)){_self=_self.substr(0,_self.length-char.length);flag=true;break}}if(!flag){return _self.surrogates?$B.String(_self):_self}}return""};str.split=function(){var $=$B.args("split",3,{self:null,sep:null,maxsplit:null},["self","sep","maxsplit"],arguments,{sep:_b_.None,maxsplit:-1},null,null),maxsplit=$.maxsplit,sep=$.sep,pos=0,_self=to_string($.self);if(maxsplit.__class__===$B.long_int){maxsplit=parseInt(maxsplit.value)}if(sep==""){throw _b_.ValueError.$factory("empty separator")}if(sep===_b_.None){let res=[];while(pos<_self.length&&_self.charAt(pos).search(/\s/)>-1){pos++}if(pos===_self.length-1){return[_self]}let name="";while(1){if(_self.charAt(pos).search(/\s/)==-1){if(name==""){name=_self.charAt(pos)}else{name+=_self.charAt(pos)}}else{if(name!==""){res.push(name);if(maxsplit!==-1&&res.length==maxsplit+1){res.pop();res.push(name+_self.substr(pos));return res}name=""}}pos++;if(pos>_self.length-1){if(name){res.push(name)}break}}return res.map($B.String)}else{sep=to_string(sep);let res=[],s="",seplen=sep.length;if(maxsplit==0){return[$.self]}while(pos<_self.length){if(_self.substr(pos,seplen)==sep){res.push(s);pos+=seplen;if(maxsplit>-1&&res.length>=maxsplit){res.push(_self.substr(pos));return res.map($B.String)}s=""}else{s+=_self.charAt(pos);pos++}}res.push(s);return res.map($B.String)}};str.splitlines=function(){var $=$B.args("splitlines",2,{self:null,keepends:null},["self","keepends"],arguments,{keepends:false},null,null);if(!$B.$isinstance($.keepends,[_b_.bool,_b_.int])){throw _b_.TypeError("integer argument expected, got "+$B.get_class($.keepends).__name)}var keepends=_b_.int.$factory($.keepends),res=[],start=0,pos=0,_self=to_string($.self);if(!_self.length){return res}while(pos<_self.length){if(_self.substr(pos,2)=="\r\n"){res.push(_self.slice(start,keepends?pos+2:pos));start=pos=pos+2}else if(_self[pos]=="\r"||_self[pos]=="\n"){res.push(_self.slice(start,keepends?pos+1:pos));start=pos=pos+1}else{pos++}}if(start<_self.length){res.push(_self.slice(start))}return res.map($B.String)};str.startswith=function(){var $=$B.args("startswith",4,{self:null,prefix:null,start:null,end:null},["self","prefix","start","end"],arguments,{start:0,end:null},null,null),_self;normalize_start_end($);var prefixes=$.prefix;if(!$B.$isinstance(prefixes,_b_.tuple)){prefixes=[prefixes]}_self=to_string($.self);prefixes=to_string(prefixes);var s=_self.substring($.start,$.end);for(var prefix of prefixes){if(!$B.$isinstance(prefix,str)){throw _b_.TypeError.$factory("endswith first arg must be str "+"or a tuple of str, not int")}if(s.substr(0,prefix.length)==prefix){return true}}return false};str.strip=function(){var $=$B.args("strip",2,{self:null,chars:null},["self","chars"],arguments,{chars:_b_.None},null,null);if($.chars===_b_.None){return $.self.trim()}return str.rstrip(str.lstrip($.self,$.chars),$.chars)};str.swapcase=function(self){var $=$B.args("swapcase",1,{self:self},["self"],arguments,{},null,null),res="",cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if($B.in_unicode_category("Ll",cp)){res+=char.toUpperCase()}else if($B.in_unicode_category("Lu",cp)){res+=char.toLowerCase()}else{res+=char}}return res};str.title=function(self){var $=$B.args("title",1,{self:self},["self"],arguments,{},null,null),state,cp,res="",_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);if($B.in_unicode_category("Ll",cp)){if(!state){res+=char.toUpperCase();state="word"}else{res+=char}}else if($B.in_unicode_category("Lu",cp)||$B.in_unicode_category("Lt",cp)){res+=state?char.toLowerCase():char;state="word"}else{state=null;res+=char}}return res};str.translate=function(){var $=$B.args("translate",2,{self:null,table:null},["self","table"],arguments,{},null,null),table=$.table,res=[],getitem=$B.$getattr(table,"__getitem__"),cp,_self=to_string($.self);for(var char of _self){cp=_b_.ord(char);try{var repl=getitem(cp);if(repl!==_b_.None){if(typeof repl=="string"){res.push(repl)}else if(typeof repl=="number"){res.push(String.fromCharCode(repl))}}}catch(err){res.push(char)}}return res.join("")};str.upper=function(){var $=$B.args("upper",1,{self:null},["self"],arguments,{},null,null),_self=to_string($.self);return _self.toUpperCase()};str.zfill=function(){var $=$B.args("zfill",2,{self:null,width:null},["self","width"],arguments,{},null,null),_self=to_string($.self);var len=str.__len__(_self);if($.width<=len){return _self}switch(_self.charAt(0)){case"+":case"-":return _self.charAt(0)+"0".repeat($.width-len)+_self.substr(1);default:return"0".repeat($.width-len)+_self}};str.$factory=function(arg,encoding){if(arguments.length==0){return""}if(arg===undefined){return $B.UndefinedType.__str__()}else if(arg===null){return""}if(encoding!==undefined){var $=$B.args("str",3,{arg:null,encoding:null,errors:null},["arg","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null);encoding=$.encoding}if(typeof arg=="string"||arg instanceof String){return arg.toString()}else if(typeof arg=="number"&&Number.isInteger(arg)){return arg.toString()}try{if(arg.__class__&&arg.__class__===_b_.bytes&&encoding!==undefined){return _b_.bytes.decode(arg,$.encoding,$.errors)}var klass=arg.__class__||$B.get_class(arg);if(klass===undefined){return $B.JSObj.__str__($B.jsobj2pyobj(arg))}var method=$B.$getattr(klass,"__str__",null);if(method===null){method=$B.$getattr(klass,"__repr__")}}catch(err){console.log("no __str__ for",arg);console.log("err ",err);if($B.get_option("debug")>1){console.log(err)}console.log("Warning - no method __str__ or __repr__, "+"default to toString",arg);throw err}var res=$B.$call(method)(arg);if(typeof res=="string"||$B.$isinstance(res,str)){return res}throw _b_.TypeError.$factory("__str__ returned non-string "+`(type ${$B.class_name(res)})`)};$B.set_func_names(str,"builtins");_b_.str=str;$B.parse_format_spec=function(spec,obj){if(spec==""){this.empty=true}else{var pos=0,aligns="<>=^",digits="0123456789",types="bcdeEfFgGnosxX%",align_pos=aligns.indexOf(spec.charAt(0));if(align_pos!=-1){if(spec.charAt(1)&&aligns.indexOf(spec.charAt(1))!=-1){this.fill=spec.charAt(0);this.align=spec.charAt(1);pos=2}else{this.align=aligns[align_pos];this.fill=" ";pos++}}else{align_pos=aligns.indexOf(spec.charAt(1));if(spec.charAt(1)&&align_pos!=-1){this.align=aligns[align_pos];this.fill=spec.charAt(0);pos=2}}var car=spec.charAt(pos);if(car=="+"||car=="-"||car==" "){this.sign=car;pos++;car=spec.charAt(pos)}if(car=="z"){this.z=true;pos++;car=spec.charAt(pos)}if(car=="#"){this.alternate=true;pos++;car=spec.charAt(pos)}if(car=="0"){this.fill="0";if(align_pos==-1){this.align="="}pos++;car=spec.charAt(pos)}while(car&&digits.indexOf(car)>-1){if(this.width===undefined){this.width=car}else{this.width+=car}pos++;car=spec.charAt(pos)}if(this.width!==undefined){this.width=parseInt(this.width)}if(this.width===undefined&&car=="{"){var end_param_pos=spec.substr(pos).search("}");this.width=spec.substring(pos,end_param_pos);pos+=end_param_pos+1}if(car==","||car=="_"){this.comma=true;this.grouping_option=car;pos++;car=spec.charAt(pos);if(car==","||car=="_"){if(car==this.grouping_option){throw _b_.ValueError.$factory(`Cannot specify '${car}' with '${car}'.`)}else{throw _b_.ValueError.$factory("Cannot specify both ',' and '_'.")}}}if(car=="."){if(digits.indexOf(spec.charAt(pos+1))==-1){throw _b_.ValueError.$factory("Missing precision in format spec")}this.precision=spec.charAt(pos+1);pos+=2;car=spec.charAt(pos);while(car&&digits.indexOf(car)>-1){this.precision+=car;pos++;car=spec.charAt(pos)}this.precision=parseInt(this.precision)}if(car&&types.indexOf(car)>-1){this.type=car;pos++;car=spec.charAt(pos)}if(pos!==spec.length){var err_msg=`Invalid format specifier '${spec}'`;if(obj){err_msg+=` for object of type '${$B.class_name(obj)}'`}throw _b_.ValueError.$factory(err_msg)}}this.toString=function(){return(this.fill===undefined?"":_b_.str.$factory(this.fill))+(this.align||"")+(this.sign||"")+(this.alternate?"#":"")+(this.sign_aware?"0":"")+(this.width||"")+(this.comma?",":"")+(this.precision?"."+this.precision:"")+(this.type||"")}};$B.format_width=function(s,fmt){if(fmt.width&&s.length":return fill.repeat(missing)+s;case"=":if("+-".indexOf(s.charAt(0))>-1){return s.charAt(0)+fill.repeat(missing)+s.substr(1)}else{return fill.repeat(missing)+s}case"^":var left=parseInt(missing/2);return fill.repeat(left)+s+fill.repeat(missing-left)}}return s};function fstring_expression(start){this.type="expression";this.start=start;this.expression="";this.conversion=null;this.fmt=null}function fstring_error(msg,pos){var error=Error(msg);error.position=pos;throw error}$B.parse_fstring=function(string){var elts=[],pos=0,current="",ctype=null,nb_braces=0,expr_start,car;while(pos-1){if(current.expression.length==0){throw Error("f-string: empty expression not allowed")}if("ars".indexOf(string.charAt(i+1))==-1){throw Error("f-string: invalid conversion character:"+" expected 's', 'r', or 'a'")}else{current.conversion=string.charAt(i+1);i+=2}}else if(car=="("||car=="["){nb_paren++;current.expression+=car;i++}else if(car==")"||car=="]"){nb_paren--;current.expression+=car;i++}else if(car=='"'){if(string.substr(i,3)=='"""'){let end=string.indexOf('"""',i+3);if(end==-1){fstring_error("f-string: unterminated string",pos)}else{var trs=string.substring(i,end+3);trs=trs.replace("\n","\\n\\");current.expression+=trs;i=end+3}}else{let end=string.indexOf('"',i+1);if(end==-1){fstring_error("f-string: unterminated string",pos)}else{current.expression+=string.substring(i,end+1);i=end+1}}}else if(nb_paren==0&&car==":"){current.fmt=true;var cb=0,fmt_complete=false;for(var j=i+1;j-1?"\\":"")+last_char;if(ce.length==0||nb_paren>0||string.charAt(i+1)=="="||"=!<>:".search(last_char_re)>-1){current.expression+=car;i+=1}else{var tail=car;while(string.charAt(i+1).match(/\s/)){tail+=string.charAt(i+1);i++}elts.push(current.expression+tail);while(ce.match(/\s$/)){ce=ce.substr(0,ce.length-1)}current.expression=ce;ctype="debug";i++}}else{current.expression+=car;i++}}if(nb_braces>0){fstring_error("f-string: expected '}'",pos)}}}if(current.length>0){elts.push(current)}for(var elt of elts){if(typeof elt=="object"){if(elt.fmt_pos!==undefined&&elt.expression.charAt(elt.fmt_pos)!=":"){throw Error()}}}return elts};$B.codepoint2jsstring=function(i){if(i>=65536&&i<=1114111){var code=i-65536;return String.fromCodePoint(55296|code>>10)+String.fromCodePoint(56320|code&1023)}else{return String.fromCodePoint(i)}};$B.jsstring2codepoint=function(c){if(c.length==1){return c.charCodeAt(0)}var code=65536;code+=(c.charCodeAt(0)&1023)<<10;code+=c.charCodeAt(1)&1023;return code}})(__BRYTHON__);(function($B){var _b_=$B.builtins;function $err(op,other){var msg="unsupported operand type(s) for "+op+" : 'int' and '"+$B.class_name(other)+"'";throw _b_.TypeError.$factory(msg)}function int_value(obj){if(typeof obj=="boolean"){return obj?1:0}return obj.$brython_value!==undefined?obj.$brython_value:obj}function bigint_value(obj){if(typeof obj=="boolean"){return obj?1n:0n}else if(typeof obj=="number"){return BigInt(obj)}else if(obj.__class__===$B.long_int){return obj.value}else if($B.$isinstance(obj,_b_.int)){return bigint_value(obj.$brython_value)}}var int={__class__:_b_.type,__dir__:_b_.object.__dir__,__mro__:[_b_.object],__qualname__:"int",$is_class:true,$native:true,$descriptors:{numerator:true,denominator:true,imag:true,real:true},$is_int_subclass:true};var int_or_long=int.$int_or_long=function(bigint){var res=Number(bigint);return Number.isSafeInteger(res)?res:$B.fast_long_int(bigint)};int.$to_js_number=function(obj){if(typeof obj=="number"){return obj}else if(obj.__class__===$B.long_int){return Number(obj.value)}else if($B.$isinstance(obj,_b_.int)){return int.$to_js_value(obj.$brython_value)}return null};int.$to_bigint=bigint_value;int.$int_value=int_value;int.as_integer_ratio=function(){var $=$B.args("as_integer_ratio",1,{self:null},["self"],arguments,{},null,null);return $B.fast_tuple([$.self,1])};int.from_bytes=function(){var $=$B.args("from_bytes",3,{bytes:null,byteorder:null,signed:null},["bytes","byteorder","signed"],arguments,{byteorder:"big",signed:false},null,null);var x=$.bytes,byteorder=$.byteorder,signed=$.signed,_bytes,_len;if($B.$isinstance(x,[_b_.bytes,_b_.bytearray])){_bytes=x.source;_len=x.source.length}else{_bytes=_b_.list.$factory(x);_len=_bytes.length;for(let i=0;i<_len;i++){_b_.bytes.$factory([_bytes[i]])}}if(byteorder=="big"){_bytes.reverse()}else if(byteorder!="little"){throw _b_.ValueError.$factory("byteorder must be either 'little' or 'big'")}var num=_bytes[0];if(signed&&num>=128){num=num-256}num=BigInt(num);var _mult=256n;for(let i=1;i<_len;i++){num+=_mult*BigInt(_bytes[i]);_mult*=256n}if(!signed){return int_or_long(num)}if(_bytes[_len-1]<128){return int_or_long(num)}return int_or_long(num-_mult)};int.to_bytes=function(){var $=$B.args("to_bytes",3,{self:null,len:null,byteorder:null,signed:null},["self","len","byteorder","signed"],arguments,{len:1,byteorder:"big",signed:false},null,null),self=$.self,len=$.len,byteorder=$.byteorder,signed=$.signed;if(!$B.$isinstance(len,_b_.int)){throw _b_.TypeError.$factory("integer argument expected, got "+$B.class_name(len))}if(["little","big"].indexOf(byteorder)==-1){throw _b_.ValueError.$factory("byteorder must be either 'little' or 'big'")}if($B.$isinstance(self,$B.long_int)){return $B.long_int.to_bytes(self,len,byteorder,signed)}if(self<0){if(!signed){throw _b_.OverflowError.$factory("can't convert negative int to unsigned")}self=Math.pow(256,len)+self}var res=[],value=self;while(value>0){var quotient=Math.floor(value/256),rest=value-256*quotient;res.push(rest);if(res.length>len){throw _b_.OverflowError.$factory("int too big to convert")}value=quotient}while(res.length=0){res=fmt.sign+res}}return res}int.__format__=function(self,format_spec){var fmt=new $B.parse_format_spec(format_spec,self);if(fmt.type&&"eEfFgG%".indexOf(fmt.type)!=-1){return _b_.float.__format__($B.fast_float(self),format_spec)}fmt.align=fmt.align||">";var res=preformat(self,fmt);if(fmt.comma){var sign=res[0]=="-"?"-":"",rest=res.substr(sign.length),len=rest.length,nb=Math.ceil(rest.length/3),chunks=[];for(var i=0;iint_value(self);int.__init__=()=>_b_.None;int.__int__=self=>self;int.__invert__=function(self){if(Math.abs(self)<2**31){return~self}return $B.rich_op("__sub__",$B.rich_op("__mul__",self,-1),1)};int.__mod__=function(self,other){if($B.$isinstance(other,_b_.tuple)&&other.length==1){other=other[0]}if(other.__class__===$B.long_int){self=BigInt(self);other=other.value;if(other==0){throw _b_.ZeroDivisionError.$factory("integer division or modulo by zero")}return int_or_long((self%other+other)%other)}if($B.$isinstance(other,int)){other=int_value(other);if(other===false){other=0}else if(other===true){other=1}if(other==0){throw _b_.ZeroDivisionError.$factory("integer division or modulo by zero")}return(self%other+other)%other}return _b_.NotImplemented};int.__mul__=Function("self","other",op_model.replace(/\+/g,"*").replace(/add/g,"mul"));int.__ne__=function(self,other){var res=int.__eq__(self,other);return res===_b_.NotImplemented?res:!res};int.__neg__=function(self){var self_as_int=int_value(self);if(self_as_int.__class__===$B.long_int){return $B.long_int.__neg__(self_as_int)}return-self};int.__new__=function(cls,value,base){if(cls===undefined){throw _b_.TypeError.$factory("int.__new__(): not enough arguments")}else if(!$B.$isinstance(cls,_b_.type)){throw _b_.TypeError.$factory("int.__new__(X): X is not a type object")}if(cls===int){return int.$factory(value,base)}if(cls===bool){throw _b_.TypeError.$factory("int.__new__(bool) is not safe, use bool.__new__()")}return{__class__:cls,__dict__:$B.empty_dict(),$brython_value:int.$factory(value,base),toString:function(){return value}}};int.__pos__=function(self){return self};function extended_euclidean(a,b){var d,u,v;if(b==0){return[a,1n,0n]}else{[d,u,v]=extended_euclidean(b,a%b);return[d,v,u-a/b*v]}}int.__pow__=function(self,other,z){if(!$B.$isinstance(other,int)){return _b_.NotImplemented}if(typeof other=="boolean"){other=other?1:0}if(typeof other=="number"||$B.$isinstance(other,int)){if(z!==undefined&&z!==_b_.None){self=bigint_value(self);other=bigint_value(other);z=bigint_value(z);if(z==1){return 0}var result=1n,base=self%z,exponent=other;if(exponent<0){var gcd,inv,_;[gcd,inv,_]=extended_euclidean(self,z);if(gcd!=1){throw _b_.ValueError.$factory("not relative primes: "+self+" and "+z)}return int.__pow__(int_or_long(inv),int_or_long(-exponent),int_or_long(z))}while(exponent>0){if(exponent%2n==1n){result=result*base%z}exponent=exponent>>1n;base=base*base%z}return int_or_long(result)}else{if(typeof other=="number"){if(other>=0){return int_or_long(BigInt(self)**BigInt(other))}else{return $B.fast_float(Math.pow(self,other))}}else if(other.__class__===$B.long_int){if(other.value>=0){return int_or_long(BigInt(self)**other.value)}else{return $B.fast_float(Math.pow(self,other))}}else if($B.$isinstance(other,_b_.int)){return int_or_long(int.__pow__(self,other.$brython_value))}return _b_.NotImplemented}}if($B.$isinstance(other,_b_.float)){other=_b_.float.numerator(other);if(self>=0){return $B.fast_float(Math.pow(self,other))}else{return _b_.complex.__pow__($B.make_complex(self,0),other)}}else if($B.$isinstance(other,_b_.complex)){var preal=Math.pow(self,other.$real),ln=Math.log(self);return $B.make_complex(preal*Math.cos(ln),preal*Math.sin(ln))}var rpow=$B.$getattr(other,"__rpow__",_b_.None);if(rpow!==_b_.None){return rpow(self)}$err("**",other)};int.__repr__=function(self){$B.builtins_repr_check(int,arguments);var value=int_value(self),x=value.__class__===$B.long_int?value.value:value;if($B.int_max_str_digits!=0&&x>=10n**BigInt($B.int_max_str_digits)){throw _b_.ValueError.$factory(`Exceeds the limit `+`(${$B.int_max_str_digits}) for integer string conversion`)}return x.toString()};int.__setattr__=function(self,attr,value){if(typeof self=="number"||typeof self=="boolean"){var cl_name=$B.class_name(self);if(_b_.dir(self).indexOf(attr)>-1){throw _b_.AttributeError.$factory("attribute '"+attr+`' of '${cl_name}' objects is not writable`)}else{throw _b_.AttributeError.$factory(`'${cl_name}' object`+` has no attribute '${attr}'`)}}_b_.dict.$setitem(self.__dict__,attr,value);return _b_.None};int.__sub__=Function("self","other",op_model.replace(/\+/g,"-").replace(/__add__/g,"__sub__"));int.__truediv__=function(self,other){if($B.$isinstance(other,int)){other=int_value(other);if(other==0){throw _b_.ZeroDivisionError.$factory("division by zero")}if(other.__class__===$B.long_int){return $B.fast_float(self/parseInt(other.value))}return $B.fast_float(self/other)}return _b_.NotImplemented};int.bit_count=function(self){var s=_b_.bin(_b_.abs(self)),nb=0;for(var x of s){if(x=="1"){nb++}}return nb};int.bit_length=function(self){var s=_b_.bin(self);s=$B.$getattr(s,"lstrip")("-0b");return s.length};int.numerator=self=>int_value(self);int.denominator=()=>1;int.imag=()=>0;int.real=self=>self;for(var attr of["numerator","denominator","imag","real"]){int[attr].setter=function(x){return function(self){throw _b_.AttributeError.$factory(`attribute '${x}' of `+`'${$B.class_name(self)}' objects is not writable`)}}(attr)}var model=`var _b_ = __BRYTHON__.builtins\nif(typeof other == "number"){\n // transform into BigInt: JS converts numbers to 32 bits\n return _b_.int.$int_or_long(BigInt(self) & BigInt(other))}else if(typeof other == "boolean"){\n return self & (other ? 1 : 0)}else if(other.__class__ === $B.long_int){\n return _b_.int.$int_or_long(BigInt(self) & other.value)}else if($B.$isinstance(other, _b_.int)){\n // int subclass\n return _b_.int.__and__(self, other.$brython_value)}\nreturn _b_.NotImplemented`;int.__and__=Function("self","other",model);int.__lshift__=Function("self","other",model.replace(/&/g,"<<").replace(/__and__/g,"__lshift__"));int.__rshift__=Function("self","other",model.replace(/&/g,">>").replace(/__and__/g,"__rshift__"));int.__or__=Function("self","other",model.replace(/&/g,"|").replace(/__and__/g,"__or__"));int.__xor__=Function("self","other",model.replace(/&/g,"^").replace(/__and__/g,"__xor__"));int.__ge__=function(self,other){self=int_value(self);if(typeof other=="number"){return self>=other}else if(other!==null&&other.__class__===$B.long_int){return self>=other.value}else if(typeof other=="boolean"){return self>=other?1:0}else if($B.$isinstance(other,_b_.int)){return self>=other.$brython_value}return _b_.NotImplemented};int.__gt__=function(self,other){var res=int.__le__(self,other);return res===_b_.NotImplemented?res:!res};int.__le__=function(self,other){self=int_value(self);if(typeof other=="number"){return self<=other}else if(other!==null&&other.__class__===$B.long_int){return self<=other.value}else if(typeof other=="boolean"){return self<=other?1:0}else if($B.$isinstance(other,_b_.int)){return self<=other.$brython_value}return _b_.NotImplemented};int.__lt__=function(self,other){var res=int.__ge__(self,other);return res===_b_.NotImplemented?res:!res};var r_opnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or","divmod"];for(var r_opname of r_opnames){if(int["__r"+r_opname+"__"]===undefined&&int["__"+r_opname+"__"]){int["__r"+r_opname+"__"]=function(name){return function(self,other){if($B.$isinstance(other,int)){other=int_value(other);return int["__"+name+"__"](other,self)}return _b_.NotImplemented}}(r_opname)}}var $valid_digits=function(base){var digits="";if(base===0){return"0"}if(base<10){for(let i=0;i=2&&base<=36)){if(base!=0){throw _b_.ValueError.$factory("invalid base")}}function invalid(base){throw _b_.ValueError.$factory("invalid literal for int() with base "+base+": "+_b_.repr(initial_value))}if(typeof value!="string"){value=_b_.str.$to_string(value)}var _value=value.trim(),sign="";if(_value.startsWith("+")||_value.startsWith("-")){sign=_value[0];_value=_value.substr(1)}if(_value.length==2&&base==0&&(_value=="0b"||_value=="0o"||_value=="0x")){throw _b_.ValueError.$factory("invalid value")}if(_value.endsWith("_")){invalid(base)}if(value.indexOf("__")>-1){invalid(base)}if(_value.length>2){let _pre=_value.substr(0,2).toUpperCase();if(base==0){if(_pre=="0B"){base=2}else if(_pre=="0O"){base=8}else if(_pre=="0X"){base=16}else if(_value.startsWith("0")){_value=_value.replace(/_/g,"");if(_value.match(/^0+$/)){return 0}invalid(base)}}else if(_pre=="0X"&&base!=16){invalid(base)}else if(_pre=="0O"&&base!=8){invalid(base)}if(_pre=="0B"&&base==2||_pre=="0O"||_pre=="0X"){_value=_value.substr(2);if(_value.startsWith("_")){_value=_value.substr(1)}}}if(base==0){base=10}var _digits=$valid_digits(base),_re=new RegExp("^[+-]?["+_digits+"]"+"["+_digits+"_]*$","i"),match=_re.exec(_value),res;if(match===null){res=0;var digit;for(var char of _value){if(/\p{Nd}/u.test(char)){let cp=char.codePointAt(0);for(let start of $B.digits_starts){if(cp-start<10){digit=cp-start;break}}}else{if(base>10&&_digits.indexOf(char.toUpperCase())>-1){digit=char.toUpperCase().charCodeAt(0)-55}else{invalid(base)}}if(digit$B.int_max_str_digits){throw _b_.ValueError.$factory("Exceeds the limit "+`(${$B.int_max_str_digits}) for integer string conversion: `+`value has ${value.length} digits; use `+"sys.set_int_max_str_digits() to increase the limit.")}if(base==10){res=BigInt(_value)}else{base=BigInt(base);res=0n;let coef=1n,char;for(let i=_value.length-1;i>=0;i--){char=_value[i].toUpperCase();res+=coef*BigInt(_digits.indexOf(char));coef*=base}}}if(sign=="-"){res=-res}return int_or_long(res)};$B.set_func_names(int,"builtins");_b_.int=int;$B.$bool=function(obj,bool_class){if(obj===null||obj===undefined){return false}switch(typeof obj){case"boolean":return obj;case"number":case"string":if(obj){return true}return false;default:if(obj.$is_class){return true}var klass=$B.get_class(obj),missing={},bool_method=bool_class?$B.$getattr(klass,"__bool__",missing):$B.$getattr(obj,"__bool__",missing);var test=false;if(test){console.log("bool(obj)",obj,"bool_class",bool_class,"klass",klass,"apply bool method",bool_method);console.log("$B.$call(bool_method)",bool_method+"")}if(bool_method===missing){var len_method=$B.$getattr(klass,"__len__",missing);if(len_method===missing){return true}return _b_.len(obj)>0}else{var res=bool_class?$B.$call(bool_method)(obj):$B.$call(bool_method)();if(res!==true&&res!==false){throw _b_.TypeError.$factory("__bool__ should return "+"bool, returned "+$B.class_name(res))}if(test){console.log("bool method returns",res)}return res}}};var bool={__bases__:[int],__class__:_b_.type,__mro__:[int,_b_.object],__qualname__:"bool",$is_class:true,$not_basetype:true,$native:true,$descriptors:{numerator:true,denominator:true,imag:true,real:true}};bool.__and__=function(self,other){if($B.$isinstance(other,bool)){return self&&other}else if($B.$isinstance(other,int)){return int.__and__(bool.__index__(self),int.__index__(other))}return _b_.NotImplemented};bool.__float__=function(self){return self?$B.fast_float(1):$B.fast_float(0)};bool.__hash__=bool.__index__=bool.__int__=function(self){if(self.valueOf())return 1;return 0};bool.__neg__=function(self){return-$B.int_or_bool(self)};bool.__or__=function(self,other){if($B.$isinstance(other,bool)){return self||other}else if($B.$isinstance(other,int)){return int.__or__(bool.__index__(self),int.__index__(other))}return _b_.NotImplemented};bool.__pos__=$B.int_or_bool;bool.__repr__=function(self){$B.builtins_repr_check(bool,arguments);return self?"True":"False"};bool.__xor__=function(self,other){if($B.$isinstance(other,bool)){return self^other?true:false}else if($B.$isinstance(other,int)){return int.__xor__(bool.__index__(self),int.__index__(other))}return _b_.NotImplemented};bool.__invert__=function(self){$B.warn(_b_.DeprecationWarning,`Bitwise inversion '~' on bool is deprecated.This returns the bitwise inversion of the underlying int object and is usually not what you expect from negating a bool.Use the 'not' operator for boolean negation or ~int(x) if you really want the bitwise inversion of the underlying int.`);return int.__invert__(self)};bool.$factory=function(){var $=$B.args("bool",1,{x:null},["x"],arguments,{x:false},null,null,1);return $B.$bool($.x,true)};bool.__new__=function(cls,value){if(cls===undefined){throw _b_.TypeError.$factory("bool.__new__(): not enough arguments")}else if(!$B.$isinstance(cls,_b_.type)){throw _b_.TypeError.$factory(`bool.__new__(X): X is not a type object (${$B.class_name(cls)})`)}else if(!_b_.issubclass(cls,bool)){let class_name=$B.class_name(cls);throw _b_.TypeError.$factory(`bool.__new__(${class_name}): ${class_name} is not a subtype of bool`)}if(arguments.length>2){throw _b_.TypeError.$factory(`bool expected at most 1 argument, got ${arguments.length-1}`)}return bool.$factory(value)};bool.from_bytes=function(){var $=$B.args("from_bytes",3,{bytes:null,byteorder:null,signed:null},["bytes","byteorder","signed"],arguments,{byteorder:"big",signed:false},null,null);let int_result=int.from_bytes($.bytes,$.byteorder,$.signed);return bool.$factory(int_result)};bool.numerator=int.numerator;bool.denominator=int.denominator;bool.real=self=>self?1:0;bool.imag=int.imag;for(var attr of["real"]){bool[attr].setter=function(x){return function(self){throw _b_.AttributeError.$factory(`attribute '${x}' of `+`'${$B.class_name(self)}' objects is not writable`)}}(attr)}_b_.bool=bool;$B.set_func_names(bool,"builtins")})(__BRYTHON__);(function($B){var _b_=$B.builtins;var long_int={__class__:_b_.type,__mro__:[_b_.int,_b_.object],__qualname__:"int",$infos:{__module__:"builtins",__name__:"int"},$is_class:true,$native:true,$descriptors:{numerator:true,denominator:true,imag:true,real:true}};var int_or_long=_b_.int.$int_or_long;function preformat(self,fmt){if(fmt.empty){return _b_.str.$factory(self)}if(fmt.type&&"bcdoxXn".indexOf(fmt.type)==-1){throw _b_.ValueError.$factory("Unknown format code '"+fmt.type+"' for object of type 'int'")}var res;switch(fmt.type){case undefined:case"d":res=self.toString();break;case"b":res=(fmt.alternate?"0b":"")+BigInt(self.value).toString(2);break;case"c":res=_b_.chr(self);break;case"o":res=(fmt.alternate?"0o":"")+BigInt(self.value).toString(8);break;case"x":res=(fmt.alternate?"0x":"")+BigInt(self.value).toString(16);break;case"X":res=(fmt.alternate?"0X":"")+BigInt(self.value).toString(16).toUpperCase();break;case"n":return self}if(fmt.sign!==undefined){if((fmt.sign==" "||fmt.sign=="+")&&self>=0){res=fmt.sign+res}}return res}long_int.$to_js_number=function(self){return Number(self.value)};long_int.__format__=function(self,format_spec){var fmt=new $B.parse_format_spec(format_spec,self);if(fmt.type&&"eEfFgG%".indexOf(fmt.type)!=-1){return _b_.float.__format__(self,format_spec)}fmt.align=fmt.align||">";var res=preformat(self,fmt);if(fmt.comma){var sign=res[0]=="-"?"-":"",rest=res.substr(sign.length),len=rest.length,nb=Math.ceil(rest.length/3),chunks=[];for(var i=0;i0?self.value:-self.value)};long_int.__add__=function(self,other){if(typeof other=="number"){return int_or_long(self.value+BigInt(other))}else if(other.__class__===$B.long_int){return int_or_long(self.value+other.value)}else if(typeof other=="boolean"){return int_or_long(self.value+(other?1n:0n))}else if($B.$isinstance(other,_b_.int)){return long_int.__add__(self,other.$brython_value)}return _b_.NotImplemented};long_int.__divmod__=function(self,other){var a=self.value,b=_b_.int.$to_bigint(other),quotient;if(a>=0&&b>0||a<=0&&b<0){quotient=a/b}else{quotient=a/b-1n}var rest=a-quotient*b;return $B.fast_tuple([int_or_long(quotient),int_or_long(rest)])};long_int.__eq__=function(self,other){if(other.__class__===$B.long_int){return self.value==other.value}else if(typeof other=="number"||typeof other=="boolean"){return false}else if($B.$isinstance(other,_b_.int)){return long_int.__eq__(self,other.$brython_value)}return _b_.NotImplemented};long_int.__float__=function(self){if(!isFinite(Number(self.value))){throw _b_.OverflowError.$factory("int too large to convert to float")}return $B.fast_float(Number(self.value))};long_int.__floordiv__=function(self,other){if(typeof other=="number"){return int_or_long(self.value/BigInt(other))}else if(other.__class__===$B.long_int){return int_or_long(self.value/other.value)}else if(typeof other=="boolean"){return int_or_long(self.value/(other?1n:0n))}else if($B.$isinstance(other,_b_.int)){return int_or_long(self.value/other.$brython_value)}return _b_.NotImplemented};long_int.__ge__=function(self,other){if(typeof other=="number"){return self.value>=other}else if(other.__class__===$B.long_int){return self.value>=other.value}else if(typeof other=="boolean"){return self.value>=(other?1:0)}else if($B.$isinstance(other,_b_.int)){return self.value>=other.$brython_value}return _b_.NotImplemented};long_int.__gt__=function(self,other){var res=long_int.__le__(self,other);return res===_b_.NotImplemented?res:!res};long_int.__hash__=function(self){var modulus=2305843009213693951n,sign=self.value>=0?1n:-1n,self_pos=self.value*sign;var _hash=sign*(self_pos%modulus);return self.__hashvalue__=int_or_long(_hash)};long_int.__index__=function(self){return self};long_int.__invert__=function(self){return int_or_long(-1n-self.value)};long_int.__le__=function(self,other){if(typeof other=="number"){return self.value<=other}else if(other.__class__===$B.long_int){return self.value<=other.value}else if(typeof other=="boolean"){return self.value<=(other?1:0)}else if($B.$isinstance(other,_b_.int)){return self.value<=other.$brython_value}return _b_.NotImplemented};long_int.__lt__=function(self,other){var res=long_int.__ge__(self,other);return res===_b_.NotImplemented?res:!res};long_int.__lshift__=function(self,other){if(typeof other=="number"){return int_or_long(self.value<>BigInt(other))}else if(other.__class__===$B.long_int){return int_or_long(self.value>>other.value)}else if(typeof other=="boolean"){return int_or_long(self.value>>(other?1n:0n))}else if($B.$isinstance(other,_b_.int)){return long_int.__rshift__(self,other.$brython_value)}return _b_.NotImplemented};long_int.__repr__=function(self){$B.builtins_repr_check($B.long_int,arguments);if($B.int_max_str_digits!=0&&self.value>=10n**BigInt($B.int_max_str_digits)){throw _b_.ValueError.$factory(`Exceeds the limit `+`(${$B.int_max_str_digits}) for integer string conversion`)}return self.value.toString()};long_int.__sub__=function(self,other){if(typeof other=="number"){return int_or_long(self.value-BigInt(other))}else if(typeof other=="boolean"){return int_or_long(self.value-(other?1n:0n))}else if(other.__class__===$B.long_int){return int_or_long(self.value-other.value)}else if($B.$isinstance(other,_b_.int)){return long_int.__sub__(self,other.$brython_value)}return _b_.NotImplemented};long_int.__truediv__=function(self,other){if(typeof other=="number"){return $B.fast_float(Number(self.value)/other)}else if(typeof other=="boolean"){return $B.fast_float(Number(self.value)*(other?1:0))}else if(other.__class__===$B.long_int){return $B.fast_float(Number(self.value)/Number(other.value))}else if($B.$isinstance(other,_b_.int)){return long_int.__truediv__(self,other.$brython_value)}return _b_.NotImplemented};long_int.bit_count=function(self){var s=self.value.toString(2),nb=0;for(var x of s){if(x=="1"){nb++}}return nb};long_int.bit_length=function(self){return self.value.toString(2).length};function _infos(self){var nbits=$B.long_int.bit_length(self),pow2=2n**BigInt(nbits-1),rest=BigInt(self.value)-pow2,relative_rest=new Number(rest/pow2);return{nbits:nbits,pow2:pow2,rest:rest,relative_rest:relative_rest}}long_int.$log2=function(x){if(x.value<0){throw _b_.ValueError.$factory("math domain error")}var infos=_infos(x);return _b_.float.$factory(infos.nbits-1+Math.log(1+infos.relative_rest/Math.LN2))};long_int.$log10=function(x){if(x.value<0){throw _b_.ValueError.$factory("math domain error")}var x_string=x.value.toString(),exp=x_string.length-1,mant=parseFloat(x_string[0]+"."+x_string.substr(1));return _b_.float.$factory(exp+Math.log10(mant))};long_int.numerator=self=>self;long_int.denominator=()=>1;long_int.imag=()=>0;long_int.real=self=>self;var body=`var $B = __BRYTHON__,\n _b_ = $B.builtins\nif(typeof other == "number"){\n return _b_.int.$int_or_long(self.value & BigInt(other))}else if(typeof other == "boolean"){\n return _b_.int.$int_or_long(self.value & (other ? 1n : 0n))}else if(other.__class__ === $B.long_int){\n return _b_.int.$int_or_long(self.value & other.value)}else if($B.$isinstance(other, _b_.int)){\n // int subclass\n return $B.long_int.__and__(self, other.$brython_value)}\nreturn _b_.NotImplemented`;long_int.__and__=Function("self","other",body);long_int.__or__=Function("self","other",body.replace(/&/g,"|").replace(/__and__/g,"__or__"));long_int.__xor__=Function("self","other",body.replace(/&/g,"^").replace(/__and__/g,"__xor__"));long_int.to_bytes=function(self,len,byteorder,signed){var res=[],v=self.value;if(!$B.$bool(signed)&&v<0){throw _b_.OverflowError.$factory("can't convert negative int to unsigned")}while(v>0){var quot=v/256n,rest=v-quot*256n;v=quot;res.push(Number(rest));if(res.length>len){throw _b_.OverflowError.$factory("int too big to convert")}}while(res.length10){for(let i=0;i0}};long_int.$factory=function(value,base){var is_digits=digits(base);for(let i=0;i=0;i--){char=value[i].toUpperCase();res+=coef*BigInt(is_digits[char]);coef*=base}}return{__class__:$B.long_int,value:res}};function extended_euclidean_algorithm(a,b){var s=0,old_s=1,t=1,old_t=0,r=b,old_r=a,quotient,tmp;while($B.rich_comp("__ne__",r,0)){quotient=$B.rich_op("__floordiv__",old_r,r);tmp=$B.rich_op("__sub__",old_r,$B.rich_op("__mul__",quotient,r));old_r=r;r=tmp;tmp=$B.rich_op("__sub__",old_s,$B.rich_op("__mul__",quotient,s));old_s=s;s=tmp;tmp=$B.rich_op("__sub__",old_t,$B.rich_op("__mul__",quotient,t));old_t=t;t=tmp}return[old_r,old_s,old_t]}function inverse_of(n,p){var gcd,x,y;[gcd,x,y]=extended_euclidean_algorithm(n,p);if($B.rich_comp("__ne__",gcd,1)){throw Error(`${n} has no multiplicative inverse '\n 'modulo ${p}`)}else{return $B.rich_op("__mod__",x,p)}}$B.inverse_of=inverse_of;$B.set_func_names(long_int,"builtins");$B.long_int=long_int;$B.fast_long_int=function(value){if(typeof value!=="bigint"){console.log("expected bigint, got",value);throw Error("not a big int")}return{__class__:$B.long_int,value:value}}})(__BRYTHON__);(function($B){var _b_=$B.builtins;function float_value(obj){return obj.__class__===float?obj:fast_float(obj.value)}var float={__class__:_b_.type,__dir__:_b_.object.__dir__,__qualname__:"float",$is_class:true,$native:true,$descriptors:{numerator:true,denominator:true,imag:true,real:true}};float.$float_value=float_value;float.$to_js_number=function(self){if(self.__class__===float){return self.value}else{return float.$to_js_number(self.value)}};float.numerator=self=>self;float.denominator=()=>1;float.imag=()=>0;float.real=self=>self;float.__float__=function(self){return self};$B.shift1_cache={};float.as_integer_ratio=function(self){if(isinf(self)){throw _b_.OverflowError.$factory("Cannot pass infinity to "+"float.as_integer_ratio.")}if(isnan(self)){throw _b_.ValueError.$factory("Cannot pass NaN to "+"float.as_integer_ratio.")}var tmp=frexp(self),fp=tmp[0],exponent=tmp[1];for(var i=0;i<300;i++){if(fp==Math.floor(fp)){break}else{fp*=2;exponent--}}var numerator=_b_.int.$factory(fp),py_exponent=_b_.abs(exponent),denominator=1,x;if($B.shift1_cache[py_exponent]!==undefined){x=$B.shift1_cache[py_exponent]}else{x=$B.$getattr(1,"__lshift__")(py_exponent);$B.shift1_cache[py_exponent]=x}py_exponent=x;if(exponent>0){numerator=$B.rich_op("__mul__",numerator,py_exponent)}else{denominator=py_exponent}return $B.fast_tuple([_b_.int.$factory(numerator),_b_.int.$factory(denominator)])};function check_self_is_float(x,method){if(x.__class__===_b_.float||$B.$isinstance(x,_b_.float)){return true}throw _b_.TypeError.$factory(`descriptor '${method}' requires a `+`'float' object but received a '${$B.class_name(x)}'`)}float.__abs__=function(self){check_self_is_float(self,"__abs__");return fast_float(Math.abs(self.value))};float.__bool__=function(self){check_self_is_float(self,"__bool__");return _b_.bool.$factory(self.value)};float.__ceil__=function(self){check_self_is_float(self,"__ceil__");if(isnan(self)){throw _b_.ValueError.$factory("cannot convert float NaN to integer")}else if(isinf(self)){throw _b_.OverflowError.$factory("cannot convert float infinity to integer")}return Math.ceil(self.value)};float.__divmod__=function(self,other){check_self_is_float(self,"__divmod__");if(!$B.$isinstance(other,[_b_.int,float])){return _b_.NotImplemented}return $B.fast_tuple([float.__floordiv__(self,other),float.__mod__(self,other)])};float.__eq__=function(self,other){check_self_is_float(self,"__eq__");if(isNaN(self.value)&&($B.$isinstance(other,float)&&isNaN(other.value))){return false}if($B.$isinstance(other,_b_.int)){return self.value==other}if($B.$isinstance(other,float)){return self.value==other.value}if($B.$isinstance(other,_b_.complex)){if(!$B.rich_comp("__eq__",0,other.$imag)){return false}return float.__eq__(self,other.$real)}return _b_.NotImplemented};float.__floor__=function(self){check_self_is_float(self,"__floor__");if(isnan(self)){throw _b_.ValueError.$factory("cannot convert float NaN to integer")}else if(isinf(self)){throw _b_.OverflowError.$factory("cannot convert float infinity to integer")}return Math.floor(self.value)};float.__floordiv__=function(self,other){check_self_is_float(self,"__floordiv__");if($B.$isinstance(other,float)){if(other.value==0){throw _b_.ZeroDivisionError.$factory("division by zero")}return fast_float(Math.floor(self.value/other.value))}if($B.$isinstance(other,_b_.int)){if(other.valueOf()==0){throw _b_.ZeroDivisionError.$factory("division by zero")}return fast_float(Math.floor(self.value/other))}return _b_.NotImplemented};const DBL_MANT_DIG=53,LONG_MAX=$B.MAX_VALUE,DBL_MAX_EXP=2**10,LONG_MIN=$B.MIN_VALUE,DBL_MIN_EXP=-1021;float.fromhex=function(klass,s){function hex_from_char(char){return parseInt(char,16)}function finished(){while(s[pos]&&s[pos].match(/\s/)){pos++}if(pos!=s.length){throw parse_error()}if(negate){x=float.__neg__(x)}return klass===_b_.float?x:$B.$call(klass)(x)}function overflow_error(){throw _b_.OverflowError.$factory("hexadecimal value too large to represent as a float")}function parse_error(){throw _b_.ValueError.$factory("invalid hexadecimal floating-point string")}function insane_length_error(){throw _b_.ValueError.$factory("hexadecimal string too long to convert")}s=s.trim();if(s.match(/^\+?inf(inity)?$/i)){return INF}else if(s.match(/^-inf(inity)?$/i)){return NINF}else if(s.match(/^[+-]?nan$/i)){return NAN}var pos=0,negate,ldexp=_b_.float.$funcs.ldexp;if(s[pos]=="-"){pos++;negate=1}else if(s[pos]=="+"){pos++}if(s.substr(pos,2).toLowerCase()=="0x"){pos+=2}var coeff_start=pos,coeff_end;while(hex_from_char(s[pos])>=0){pos++}var save_pos=pos;if(s[pos]=="."){pos++;while(hex_from_char(s[pos])>=0){pos++}coeff_end=pos-1}else{coeff_end=pos}var ndigits=coeff_end-coeff_start,fdigits=coeff_end-save_pos;if(ndigits==0){throw parse_error()}if(ndigits>Math.min(DBL_MIN_EXP-DBL_MANT_DIG-LONG_MIN/2,LONG_MAX/2+1-DBL_MAX_EXP)/4){throw insane_length_error()}var exp;if(s[pos]=="p"||s[pos]=="P"){pos++;var exp_start=pos;if(s[pos]=="-"||s[pos]=="+"){pos++}if(!("0"<=s[pos]&&s[pos]<="9")){throw parse_error()}pos++;while("0"<=s[pos]&&s[pos]<="9"){pos++}exp=parseInt(s.substr(exp_start))}else{exp=0}function HEX_DIGIT(j){if(!Number.isInteger(j)){throw Error("j pas entier")}return hex_from_char(s[j0&&HEX_DIGIT(ndigits-1)==0){ndigits--}if(ndigits==0||expLONG_MAX/2){console.log("overflow, exp",exp);throw overflow_error()}exp=exp-4*fdigits;var top_exp=exp+4*(ndigits-1);for(let digit=BigInt(HEX_DIGIT(ndigits-1));digit!=0;digit/=2n){top_exp++}if(top_expDBL_MAX_EXP){throw overflow_error()}var lsb=Math.max(top_exp,DBL_MIN_EXP)-DBL_MANT_DIG;var x=0;if(exp>=lsb){for(let i=ndigits-1;i>=0;i--){x=16*x+HEX_DIGIT(i)}x=ldexp($B.fast_float(x),exp);return finished()}var half_eps=1<<(lsb-exp-1)%4,key_digit=parseInt((lsb-exp-1)/4);for(let i=ndigits-1;i>key_digit;i--){x=16*x+HEX_DIGIT(i)}let digit=HEX_DIGIT(key_digit);x=16*x+(digit&16-2*half_eps);if((digit&half_eps)!=0){var round_up=0;if((digit&3*half_eps-1)!=0||half_eps==8&&key_digit+1=0;i--){if(HEX_DIGIT(i)!=0){round_up=1;break}}}if(round_up){x+=2*half_eps;if(top_exp==DBL_MAX_EXP&&x==ldexp(2*half_eps,DBL_MANT_DIG).value){throw overflow_error()}}}x=ldexp(x,exp+4*key_digit);return finished()};float.__getformat__=function(arg){if(arg=="double"||arg=="float"){return"IEEE, little-endian"}if(typeof arg!=="string"){throw _b_.TypeError.$factory(" __getformat__() argument must be str, not "+$B.class_name(arg))}throw _b_.ValueError.$factory("__getformat__() argument 1 must be "+"'double' or 'float'")};var format_sign=function(val,flags){switch(flags.sign){case"+":return val>=0||isNaN(val)?"+":"";case"-":return"";case" ":return val>=0||isNaN(val)?" ":""}if(flags.space){if(val>=0){return" "}}return""};function preformat(self,fmt){var value=self.value;if(fmt.empty){return _b_.str.$factory(self)}if(fmt.type&&"eEfFgGn%".indexOf(fmt.type)==-1){throw _b_.ValueError.$factory("Unknown format code '"+fmt.type+"' for object of type 'float'")}var special;if(isNaN(value)){special="efg".indexOf(fmt.type)>-1?"nan":"NAN"}else if(value==Number.POSITIVE_INFINITY){special="efg".indexOf(fmt.type)>-1?"inf":"INF"}else if(value==Number.NEGATIVE_INFINITY){special="efg".indexOf(fmt.type)>-1?"-inf":"-INF"}if(special){return format_sign(value,fmt)+special}if(fmt.precision===undefined&&fmt.type!==undefined){fmt.precision=6}if(fmt.type=="%"){value*=100}if(fmt.type=="e"){let res=value.toExponential(fmt.precision),exp=parseInt(res.substr(res.search("e")+1));if(Math.abs(exp)<10){res=res.substr(0,res.length-1)+"0"+res.charAt(res.length-1)}return res}var res;if(fmt.precision!==undefined){let prec=fmt.precision;if(prec==0){return Math.round(value)+""}res=$B.roundDownToFixed(value,prec);let pt_pos=res.indexOf(".");if(fmt.type!==undefined&&(fmt.type=="%"||fmt.type.toLowerCase()=="f")){if(pt_pos==-1){res+="."+"0".repeat(fmt.precision)}else{var missing=fmt.precision-res.length+pt_pos+1;if(missing>0){res+="0".repeat(missing)}}}else if(fmt.type&&fmt.type.toLowerCase()=="g"){let exp_fmt=preformat(self,{type:"e"}).split("e"),exp=parseInt(exp_fmt[1]);if(-4<=exp&&exp0){while(signif.endsWith("0")){signif=signif.substr(0,signif.length-1)}}if(signif.endsWith(".")){signif=signif.substr(0,signif.length-1)}parts[0]=signif}res=parts.join("e");if(fmt.type=="G"){res=res.toUpperCase()}return res}else if(fmt.type===undefined){fmt.type="g";res=preformat(self,fmt);if(res.indexOf(".")==-1){let exp=res.length-1;exp=exp<10?"0"+exp:exp;let is_neg=res.startsWith("-"),point_pos=is_neg?2:1,mant=res.substr(0,point_pos)+"."+res.substr(point_pos);return`${mant}e+${exp}`}fmt.type=undefined}else{let res1=value.toExponential(fmt.precision-1),exp=parseInt(res1.substr(res1.search("e")+1));if(exp<-4||exp>=fmt.precision-1){var elts=res1.split("e");while(elts[0].endsWith("0")){elts[0]=elts[0].substr(0,elts[0].length-1)}res=elts.join("e")}}}else{res=_b_.str.$factory(self)}if(fmt.type===undefined||"gGn".indexOf(fmt.type)!=-1){if(res.search("e")==-1){while(res.charAt(res.length-1)=="0"){res=res.substr(0,res.length-1)}}if(res.charAt(res.length-1)=="."){if(fmt.type===undefined){res+="0"}else{res=res.substr(0,res.length-1)}}}if(fmt.sign!==undefined){if((fmt.sign==" "||fmt.sign=="+")&&value>0){res=fmt.sign+res}}if(fmt.type=="%"){res+="%"}return res}float.__format__=function(self,format_spec){check_self_is_float(self,"__format__");var fmt=new $B.parse_format_spec(format_spec,self);return float.$format(self,fmt)};float.$format=function(self,fmt){fmt.align=fmt.align||">";var pf=preformat(self,fmt);if(fmt.z&&Object.is(parseFloat(pf),-0)){pf=pf.substr(1)}var raw=pf.split("."),_int=raw[0];if(fmt.comma){var len=_int.length,nb=Math.ceil(_int.length/3),chunks=[];for(var i=0;i0?float.$factory(x):float.$factory(-x)}function frexp(x){var x1=x;if($B.$isinstance(x,float)){if(isnan(x)||isinf(x)){return[x,0]}x1=float_value(x).value}else if($B.$isinstance(x,$B.long_int)){var exp=x.value.toString(2).length,power=2n**BigInt(exp);return[$B.fast_float(Number(x.value)/Number(power)),exp]}if(x1==0){return[0,0]}var sign=1,ex=0,man=x1;if(man<0){sign=-sign;man=-man}while(man<.5){man*=2;ex--}while(man>=1){man*=.5;ex++}man*=sign;return[man,ex]}function ldexp(mantissa,exponent){if(isninf(mantissa)){return NINF}else if(isinf(mantissa)){return INF}if($B.$isinstance(mantissa,_b_.float)){mantissa=mantissa.value}if(mantissa==0){return ZERO}else if(isNaN(mantissa)){return NAN}if($B.$isinstance(exponent,$B.long_int)){if(exponent.value<0){return ZERO}else{throw _b_.OverflowError.$factory("overflow")}}else if(!isFinite(mantissa*Math.pow(2,exponent))){throw _b_.OverflowError.$factory("overflow")}var steps=Math.min(3,Math.ceil(Math.abs(exponent)/1023));var result=mantissa;for(var i=0;i=0){if(Number.isInteger(other)&&other%2==1){return self}return fast_float(0)}else if(self.value==Number.NEGATIVE_INFINITY&&!isNaN(other)){if(other%2==-1){return fast_float(-0)}else if(other<0){return fast_float(0)}else if(other%2==1){return fast_float(Number.NEGATIVE_INFINITY)}else{return fast_float(Number.POSITIVE_INFINITY)}}else if(self.value==Number.POSITIVE_INFINITY&&!isNaN(other)){return other>0?self:fast_float(0)}if(other==Number.NEGATIVE_INFINITY&&!isNaN(self.value)){return Math.abs(self.value)<1?fast_float(Number.POSITIVE_INFINITY):fast_float(0)}else if(other==Number.POSITIVE_INFINITY&&!isNaN(self.value)){return Math.abs(self.value)<1?fast_float(0):fast_float(Number.POSITIVE_INFINITY)}if(self.value<0&&!Number.isInteger(other)){return _b_.complex.__pow__($B.make_complex(self.value,0),fast_float(other))}return fast_float(Math.pow(self.value,other))}return _b_.NotImplemented};float.__repr__=function(self){$B.builtins_repr_check(float,arguments);self=self.value;if(self==Infinity){return"inf"}else if(self==-Infinity){return"-inf"}else if(isNaN(self)){return"nan"}else if(self===0){if(1/self===-Infinity){return"-0.0"}return"0.0"}var res=self+"";if(res.search(/[.eE]/)==-1){res+=".0"}var split_e=res.split(/e/i);if(split_e.length==2){let mant=split_e[0],exp=split_e[1];if(exp.startsWith("-")){let exp_str=parseInt(exp.substr(1))+"";if(exp_str.length<2){exp_str="0"+exp_str}return mant+"e-"+exp_str}}var x,y;[x,y]=res.split(".");var sign="";if(x[0]=="-"){x=x.substr(1);sign="-"}if(x.length>16){let exp=x.length-1,int_part=x[0],dec_part=x.substr(1)+y;while(dec_part.endsWith("0")){dec_part=dec_part.substr(0,dec_part.length-1)}let mant=int_part;if(dec_part.length>0){mant+="."+dec_part}return sign+mant+"e+"+exp}else if(x=="0"){let exp=0;while(exp3){let rest=y.substr(exp);exp=(exp+1).toString();while(rest.endsWith("0")){rest=rest.substr(0,res.length-1)}let mant=rest[0];if(rest.length>1){mant+="."+rest.substr(1)}if(exp.length==1){exp="0"+exp}return sign+mant+"e-"+exp}}return _b_.str.$factory(res)};float.__round__=function(){var $=$B.args("__round__",2,{self:null,ndigits:null},["self","ndigits"],arguments,{ndigits:_b_.None},null,null);return float.$round($.self,$.ndigits)};float.$round=function(x,ndigits){function overflow(){throw _b_.OverflowError.$factory("cannot convert float infinity to integer")}var no_digits=ndigits===_b_.None;if(isnan(x)){if(ndigits===_b_.None){throw _b_.ValueError.$factory("cannot convert float NaN to integer")}return NAN}else if(isninf(x)){return ndigits===_b_.None?overflow():NINF}else if(isinf(x)){return ndigits===_b_.None?overflow():INF}x=float_value(x);ndigits=ndigits===_b_.None?0:ndigits;if(ndigits==0){var res=Math.round(x.value);if(Math.abs(x.value-res)==.5){if(res%2){return res-1}}if(no_digits){return res}return $B.fast_float(res)}if(ndigits.__class__===$B.long_int){ndigits=Number(ndigits.value)}var pow1,pow2,y,z;if(ndigits>=0){if(ndigits>22){pow1=10**(ndigits-22);pow2=1e22}else{pow1=10**ndigits;pow2=1}y=x.value*pow1*pow2;if(!isFinite(y)){return x}}else{pow1=10**-ndigits;pow2=1;if(isFinite(pow1)){y=x.value/pow1}else{return ZERO}}z=Math.round(y);if(fabs(y-z).value==.5){z=2*Math.round(y/2)}if(ndigits>=0){z=z/pow2/pow1}else{z*=pow1}if(!isFinite(z)){throw _b_.OverflowError.$factory("overflow occurred during round")}return fast_float(z)};float.__setattr__=function(self,attr,value){if(self.__class__===float){if(float[attr]===undefined){throw _b_.AttributeError.$factory("'float' object has no attribute '"+attr+"'")}else{throw _b_.AttributeError.$factory("'float' object attribute '"+attr+"' is read-only")}}self[attr]=value;return _b_.None};float.__truediv__=function(self,other){if($B.$isinstance(other,_b_.int)){if(other.valueOf()==0){throw _b_.ZeroDivisionError.$factory("division by zero")}else if($B.$isinstance(other,$B.long_int)){return float.$factory(self.value/Number(other.value))}return float.$factory(self.value/other)}else if($B.$isinstance(other,float)){if(other.value==0){throw _b_.ZeroDivisionError.$factory("division by zero")}return float.$factory(self.value/other.value)}return _b_.NotImplemented};var op_func_body=`var $B = __BRYTHON__,\n _b_ = __BRYTHON__.builtins\n if($B.$isinstance(other, _b_.int)){\n if(typeof other == "boolean"){\n return other ? $B.fast_float(self.value - 1) : self\n }else if(other.__class__ === $B.long_int){\n return _b_.float.$factory(self.value - parseInt(other.value))\n }else{\n return $B.fast_float(self.value - other)\n }\n }\n if($B.$isinstance(other, _b_.float)){\n return $B.fast_float(self.value - other.value)\n }\n return _b_.NotImplemented`;var ops={"+":"add","-":"sub"};for(let op in ops){let body=op_func_body.replace(/-/gm,op);float[`__${ops[op]}__`]=Function("self","other",body)}var comp_func_body=`\nvar $B = __BRYTHON__,\n _b_ = $B.builtins\nif($B.$isinstance(other, _b_.int)){\n if(other.__class__ === $B.long_int){\n return self.value > parseInt(other.value)\n }\n return self.value > other.valueOf()}\nif($B.$isinstance(other, _b_.float)){\n return self.value > other.value}\nif($B.$isinstance(other, _b_.bool)) {\n return self.value > _b_.bool.__hash__(other)}\nif(_b_.hasattr(other, "__int__") || _b_.hasattr(other, "__index__")) {\n return _b_.int.__gt__(self.value, $B.$GetInt(other))}\n// See if other has the opposite operator, eg <= for >\nvar inv_op = $B.$getattr(other, "__le__", _b_.None)\nif(inv_op !== _b_.None){\n return inv_op(self)}\nthrow _b_.TypeError.$factory(\n "unorderable types: float() > " + $B.class_name(other) + "()")\n`;for(let op in $B.$comps){let body=comp_func_body.replace(/>/gm,op).replace(/__gt__/gm,`__${$B.$comps[op]}__`).replace(/__le__/,`__${$B.$inv_comps[op]}__`);float[`__${$B.$comps[op]}__`]=Function("self","other",body)}var r_opnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or","divmod"];for(var r_opname of r_opnames){if(float["__r"+r_opname+"__"]===undefined&&float["__"+r_opname+"__"]){float["__r"+r_opname+"__"]=function(name){return function(self,other){var other_as_num=_b_.int.$to_js_number(other);if(other_as_num!==null){var other_as_float=$B.fast_float(other_as_num);return float["__"+name+"__"](other_as_float,self)}return _b_.NotImplemented}}(r_opname)}}function $FloatClass(value){return new Number(value)}function to_digits(s){var arabic_digits="٠١٢٣٤٥٦٧٨٩",res="";for(var i=0;i-1){res+=x}else{res+=s[i]}}return res}const fast_float=$B.fast_float=function(value){return{__class__:_b_.float,value:value}};float.$factory=function(value){if(value===undefined){return fast_float(0)}$B.check_nb_args_no_kw("float",1,arguments);switch(value){case true:return fast_float(1);case false:return fast_float(0)}var original_value=value;if(typeof value=="number"){return fast_float(value)}if(value.__class__===float){return value}if($B.$isinstance(value,_b_.memoryview)){value=_b_.memoryview.tobytes(value)}if($B.$isinstance(value,_b_.bytes)){try{value=$B.$getattr(value,"decode")("utf-8")}catch(err){throw _b_.ValueError.$factory("could not convert string to float: "+_b_.repr(original_value))}}if(typeof value=="string"){if(value.trim().length==0){throw _b_.ValueError.$factory(`could not convert string to float: ${_b_.repr(value)}`)}value=value.trim();switch(value.toLowerCase()){case"+inf":case"inf":case"+infinity":case"infinity":return fast_float(Number.POSITIVE_INFINITY);case"-inf":case"-infinity":return fast_float(Number.NEGATIVE_INFINITY);case"+nan":case"nan":return fast_float(Number.NaN);case"-nan":return fast_float(-Number.NaN);default:var parts=value.split("e");if(parts[1]){if(parts[1].startsWith("+")||parts[1].startsWith("-")){parts[1]=parts[1].substr(1)}}parts=parts[0].split(".").concat(parts.splice(1));for(var part of parts){if(part.startsWith("_")||part.endsWith("_")){throw _b_.ValueError.$factory("invalid float literal "+value)}}if(value.indexOf("__")>-1){throw _b_.ValueError.$factory("invalid float literal "+value)}value=value.charAt(0)+value.substr(1).replace(/_/g,"");value=to_digits(value);if(isFinite(value)){return fast_float(parseFloat(value))}else{throw _b_.TypeError.$factory("could not convert string to float: "+_b_.repr(original_value))}}}let klass=$B.get_class(value),float_method=$B.$getattr(klass,"__float__",null);if(float_method===null){var index_method=$B.$getattr(klass,"__index__",null);if(index_method===null){throw _b_.TypeError.$factory("float() argument must be a string or a "+"real number, not '"+$B.class_name(value)+"'")}let index=$B.$call(index_method)(value),index_klass=$B.get_class(index);if(index_klass===_b_.int){return fast_float(index)}else if(index_klass===$B.long_int){return $B.long_int.__float__(index)}else if(index_klass.__mro__.indexOf(_b_.int)>-1){let msg=`${$B.class_name(value)}.__index__ returned `+`non-int (type ${$B.class_name(index)}). The `+"ability to return an instance of a strict subclass"+" of int is deprecated, and may be removed in a "+"future version of Python.";$B.warn(_b_.DeprecationWarning,msg);return fast_float(index)}throw _b_.TypeError.$factory("__index__ returned non-int"+` (type ${$B.class_name(index)})`)}let res=$B.$call(float_method)(value);klass=$B.get_class(res);if(klass!==_b_.float){if(klass.__mro__.indexOf(_b_.float)>-1){let msg=`${$B.class_name(value)}.__float__ returned `+`non-float (type ${$B.class_name(res)}). The `+"ability to return an instance of a strict subclass"+" of float is deprecated, and may be removed in a "+"future version of Python.";$B.warn(_b_.DeprecationWarning,msg);return float.$factory(res.value)}throw _b_.TypeError.$factory("__float__ returned non-float"+` (type ${$B.class_name(res)})`)}return res};$B.$FloatClass=$FloatClass;$B.set_func_names(float,"builtins");float.fromhex=_b_.classmethod.$factory(float.fromhex);_b_.float=float;$B.MAX_VALUE=fast_float(Number.MAX_VALUE);$B.MIN_VALUE=fast_float(22250738585072014e-324);const NINF=fast_float(Number.NEGATIVE_INFINITY),INF=fast_float(Number.POSITIVE_INFINITY),NAN=fast_float(Number.NaN),ZERO=fast_float(0)})(__BRYTHON__);(function($B){var _b_=$B.builtins;function $UnsupportedOpType(op,class1,class2){throw _b_.TypeError.$factory("unsupported operand type(s) for "+op+": '"+class1+"' and '"+class2+"'")}var complex={__class__:_b_.type,__dir__:_b_.object.__dir__,__qualname__:"complex",$is_class:true,$native:true,$descriptors:{real:true,imag:true}};complex.__abs__=function(self){var _rf=isFinite(self.$real.value),_if=isFinite(self.$imag.value);if(_rf&&isNaN(self.$imag.value)||_if&&isNaN(self.$real.value)||isNaN(self.$imag.value)&&isNaN(self.$real.value)){return $B.fast_float(NaN)}if(!_rf||!_if){return $B.fast_float(Infinity)}var mag=Math.sqrt(Math.pow(self.$real.value,2)+Math.pow(self.$imag.value,2));if(!isFinite(mag)&&_rf&&_if){throw _b_.OverflowError.$factory("absolute value too large")}return $B.fast_float(mag)};complex.__add__=function(self,other){if($B.$isinstance(other,complex)){return make_complex(self.$real.value+other.$real.value,self.$imag.value+other.$imag.value)}if($B.$isinstance(other,_b_.int)){other=_b_.int.numerator(other);return make_complex($B.rich_op("__add__",self.$real.value,other.valueOf()),self.$imag.value)}if($B.$isinstance(other,_b_.float)){return make_complex(self.$real.value+other.value,self.$imag.value)}return _b_.NotImplemented};complex.__bool__=function(self){return!$B.rich_comp("__eq__",self.$real,0)||!$B.rich_comp("__eq__",self.$imag,0)};complex.__complex__=function(self){if(self.__class__===complex){return self}return $B.make_complex(self.$real,self.$imag)};complex.__eq__=function(self,other){if($B.$isinstance(other,complex)){return self.$real.value==other.$real.value&&self.$imag.value==other.$imag.value}if($B.$isinstance(other,_b_.int)){if(self.$imag.value!=0){return false}return self.$real.value==other.valueOf()}if($B.$isinstance(other,_b_.float)){if(!$B.rich_comp("__eq__",0,self.$imag)){return false}return self.$real.value==other.value}return _b_.NotImplemented};const max_precision=2**31-4;complex.__format__=function(self,format_spec){if(format_spec.length==0){return _b_.str.$factory(self)}var fmt=new $B.parse_format_spec(format_spec,self),type=fmt.conversion_type;var skip_re,add_parens;if(type===undefined||"eEfFgGn".indexOf(type)>-1){if(fmt.precision>max_precision){throw _b_.ValueError.$factory("precision too big")}if(fmt.fill_char=="0"){throw _b_.ValueError.$factory("Zero padding is not allowed in complex format specifier")}if(fmt.align=="="){throw _b_.ValueError.$factory("'=' alignment flag is not allowed in complex format "+"specifier")}var re=self.$real.value,precision=parseInt(fmt.precision,10);if(type===undefined){type="r";if(re==0&&Object.is(re,0)){skip_re=1}else{add_parens=1}}else if(type=="n"){type="g"}if(precision<0){precision=6}else if(type=="r"){type="g"}var format=$B.clone(fmt);format.conversion_type=type;format.precision=precision;var res="";if(!skip_re){res+=_b_.float.$format(self.$real,format);if(self.$imag.value>=0){res+="+"}}var formatted_im=_b_.float.$format(self.$imag,format);var pos=-1,last_num;for(var char of formatted_im){pos++;if(char.match(/\d/)){last_num=pos}}formatted_im=formatted_im.substr(0,last_num+1)+"j"+formatted_im.substr(last_num+1);res+=formatted_im;if(add_parens){res="("+res+")"}return res}throw _b_.ValueError.$factory(`invalid type for complex: ${type}`)};complex.$getnewargs=function(self){return $B.fast_tuple([self.$real,self.$imag])};complex.__getnewargs__=function(){return complex.$getnewargs($B.single_arg("__getnewargs__","self",arguments))};complex.__hash__=function(self){return $B.$hash(self.$real)+$B.$hash(self.$imag)*1000003};complex.__init__=function(){return _b_.None};complex.__invert__=function(self){return~self};complex.__mro__=[_b_.object];complex.__mul__=function(self,other){if($B.$isinstance(other,complex)){return make_complex(self.$real.value*other.$real.value-self.$imag.value*other.$imag.value,self.$imag.value*other.$real.value+self.$real.value*other.$imag.value)}else if($B.$isinstance(other,_b_.int)){return make_complex(self.$real.value*other.valueOf(),self.$imag.value*other.valueOf())}else if($B.$isinstance(other,_b_.float)){return make_complex(self.$real.value*other.value,self.$imag.value*other.value)}else if($B.$isinstance(other,_b_.bool)){if(other.valueOf()){return self}return make_complex(0,0)}$UnsupportedOpType("*",complex,other)};complex.__ne__=function(self,other){var res=complex.__eq__(self,other);return res===_b_.NotImplemented?res:!res};complex.__neg__=function(self){return make_complex(-self.$real.value,-self.$imag.value)};complex.__new__=function(cls){if(cls===undefined){throw _b_.TypeError.$factory("complex.__new__(): not enough arguments")}var res,missing={},$=$B.args("complex",3,{cls:null,real:null,imag:null},["cls","real","imag"],arguments,{real:0,imag:missing},null,null);cls=$.cls;var first=$.real,second=$.imag;if(typeof first=="string"){if(second!==missing){throw _b_.TypeError.$factory("complex() can't take second arg "+"if first is a string")}else{var arg=first;first=first.trim();if(first.startsWith("(")&&first.endsWith(")")){first=first.substr(1);first=first.substr(0,first.length-1)}var complex_re=/^\s*([+-]*[0-9_]*\.?[0-9_]*(e[+-]*[0-9_]*)?)([+-]?)([0-9_]*\.?[0-9_]*(e[+-]*[0-9_]*)?)(j?)\s*$/i;var parts=complex_re.exec(first);function to_num(s){var res=parseFloat(s.charAt(0)+s.substr(1).replace(/_/g,""));if(isNaN(res)){throw _b_.ValueError.$factory("could not convert string "+"to complex: '"+arg+"'")}return res}if(parts===null){throw _b_.ValueError.$factory("complex() arg is a malformed string")}if(parts[_real]&&parts[_imag].startsWith(".")&&parts[_sign]==""){throw _b_.ValueError.$factory("complex() arg is a malformed string")}else if(parts[_real]=="."||parts[_imag]=="."||parts[_real]==".e"||parts[_imag]==".e"||parts[_real]=="e"||parts[_imag]=="e"){throw _b_.ValueError.$factory("complex() arg is a malformed string")}else if(parts[_j]!=""){if(parts[_sign]==""){first=0;if(parts[_real]=="+"||parts[_real]==""){second=1}else if(parts[_real]=="-"){second=-1}else{second=to_num(parts[_real])}}else{first=to_num(parts[_real]);second=parts[_imag]==""?1:to_num(parts[_imag]);second=parts[_sign]=="-"?-second:second}}else{if(parts[_sign]&&parts[_imag]==""){throw _b_.ValueError.$factory("complex() arg is a malformed string")}first=to_num(parts[_real]);second=0}res=make_complex(first,second);res.__class__=cls;res.__dict__=$B.empty_dict();return res}}if(first.__class__===complex&&cls===complex&&second===missing){return first}var arg1=_convert(first),r,i;if(arg1===null){throw _b_.TypeError.$factory("complex() first argument must be a "+`string or a number, not '${$B.class_name(first)}'`)}if(typeof second=="string"){throw _b_.TypeError.$factory("complex() second arg can't be a string")}var arg2=_convert(second===missing?0:second);if(arg2===null){throw _b_.TypeError.$factory("complex() second argument must be a "+`number, not '${$B.class_name(second)}'`)}if(arg1.method=="__complex__"){if(arg2.method=="__complex__"){r=$B.rich_op("__sub__",arg1.result.$real,arg2.result.$imag);i=$B.rich_op("__add__",arg1.result.$imag,arg2.result.$real)}else{r=arg1.result.$real;i=$B.rich_op("__add__",arg1.result.$imag,arg2.result)}}else{if(arg2.method=="__complex__"){r=$B.rich_op("__sub__",arg1.result,arg2.result.$imag);i=arg2.result.$real}else{r=arg1.result;i=arg2.result}}res=make_complex(r,i);res.__class__=cls;res.__dict__=$B.empty_dict();return res};complex.__pos__=function(self){return self};function complex2expo(cx){var norm=Math.sqrt(cx.$real.value*cx.$real.value+cx.$imag.value*cx.$imag.value),sin=cx.$imag.value/norm,cos=cx.$real.value/norm,angle;if(cos==0){angle=sin==1?Math.PI/2:3*Math.PI/2}else if(sin==0){angle=cos==1?0:Math.PI}else{angle=Math.atan(sin/cos)}return{norm:norm,angle:angle}}function c_powi(x,n){if(n>0){return c_powu(x,n)}else{return c_quot(c_1,c_powu(x,-n))}}function c_powu(x,n){var mask=1,r=c_1,p=x;while(mask>0&&n>=mask){if(n&mask){r=c_prod(r,p)}mask<<=1;p=c_prod(p,p)}return r}function c_prod(a,b){return make_complex(a.$real.value*b.$real.value-a.$imag.value*b.$imag.value,a.$real.value*b.$imag.value+a.$imag.value*b.$real.value)}function c_quot(a,b){var abs_breal=Math.abs(b.$real.value),abs_bimag=Math.abs(b.$imag.value);if($B.rich_comp("__ge__",abs_breal,abs_bimag)){if(abs_breal==0){throw _b_.ZeroDivisionError.$factory()}else{let ratio=b.$imag.value/b.$real.value,denom=b.$real.value+b.$imag.value*ratio;return make_complex((a.$real.value+a.$imag.value*ratio)/denom,(a.$imag.value-a.$real.value*ratio)/denom)}}else if(abs_bimag>=abs_breal){let ratio=b.$real.value/b.$imag.value,denom=b.$real.value*ratio+b.$imag.value;if(b.$imag.value==0){throw _b_.ZeroDivisionError.$factory()}return make_complex((a.$real.value*ratio+a.$imag.value)/denom,(a.$imag.value*ratio-a.$real.value)/denom)}else{return $B.make_complex("nan","nan")}}complex.__pow__=function(self,other,mod){if(mod!==undefined&&mod!==_b_.None){throw _b_.ValueError.$factory("complex modulo")}if($B.rich_comp("__eq__",other,1)){var funcs=_b_.float.$funcs;if(funcs.isinf(self.$real)||funcs.isninf(self.$real)||funcs.isinf(self.$imag)||funcs.isninf(self.$imag)){throw _b_.OverflowError.$factory("complex exponentiation")}return self}var small_int=null;if($B.$isinstance(other,_b_.int)&&_b_.abs(other)<100){small_int=other}else if($B.$isinstance(other,_b_.float)&&Number.isInteger(other.value)&&Math.abs(other.value<100)){small_int=other.value}else if($B.$isinstance(other,complex)&&other.$imag.value==0&&Number.isInteger(other.$real.value)&&Math.abs(other.$real.value)<100){small_int=other.$real.value}if(small_int!==null){return c_powi(self,small_int)}if($B.$isinstance(other,_b_.float)){other=_b_.float.$to_js_number(other)}if(self.$real.value==0&&self.$imag.value==0){if($B.$isinstance(other,complex)&&(other.$imag.value!=0||other.$real.value<0)){throw _b_.ZeroDivisionError.$factory("0.0 to a negative or complex power")}return $B.make_complex(0,0)}var exp=complex2expo(self),angle=exp.angle,res=Math.pow(exp.norm,other);if($B.$isinstance(other,_b_.int)){return make_complex(res*Math.cos(angle*other),res*Math.sin(angle*other))}else if($B.$isinstance(other,_b_.float)){return make_complex(res*Math.cos(angle*other.value),res*Math.sin(angle*other.value))}else if($B.$isinstance(other,complex)){var x=other.$real.value,y=other.$imag.value;var pw=Math.pow(exp.norm,x)*Math.pow(Math.E,-y*angle),theta=y*Math.log(exp.norm)-x*angle;if(pw==Number.POSITIVE_INFINITY||pw===Number.NEGATIVE_INFINITY){throw _b_.OverflowError.$factory("complex exponentiation")}return make_complex(pw*Math.cos(theta),pw*Math.sin(theta))}else{throw _b_.TypeError.$factory("unsupported operand type(s) "+"for ** or pow(): 'complex' and '"+$B.class_name(other)+"'")}};complex.__radd__=function(self,other){if($B.$isinstance(other,_b_.bool)){other=other?1:0}if($B.$isinstance(other,_b_.int)){return make_complex(other+self.$real.value,self.$imag.value)}else if($B.$isinstance(other,_b_.float)){return make_complex(other.value+self.$real.value,self.$imag.value)}return _b_.NotImplemented};complex.__repr__=function(self){$B.builtins_repr_check(complex,arguments);var real=Number.isInteger(self.$real.value)?self.$real.value+"":_b_.str.$factory(self.$real),imag=Number.isInteger(self.$imag.value)?self.$imag.value+"":_b_.str.$factory(self.$imag);if(imag.endsWith(".0")){imag=imag.substr(0,imag.length-2)}if(Object.is(self.$imag.value,-0)){imag="-0"}var sign=imag.startsWith("-")?"":"+";if(self.$real.value==0){if(Object.is(self.$real.value,-0)){return"(-0"+sign+imag+"j)"}else{return imag+"j"}}if(self.$imag.value>0||isNaN(self.$imag.value)){return"("+real+"+"+imag+"j)"}if(self.$imag.value==0){if(1/self.$imag.value<0){return"("+real+"-0j)"}return"("+real+"+0j)"}return"("+real+sign+imag+"j)"};complex.__rmul__=function(self,other){if($B.$isinstance(other,_b_.bool)){other=other?1:0}if($B.$isinstance(other,_b_.int)){return make_complex(other*self.$real.value,other*self.$imag.value)}else if($B.$isinstance(other,_b_.float)){return make_complex(other.value*self.$real.value,other.value*self.$imag.value)}return _b_.NotImplemented};complex.__sub__=function(self,other){if($B.$isinstance(other,complex)){return make_complex(self.$real.value-other.$real.value,self.$imag.value-other.$imag.value)}if($B.$isinstance(other,_b_.int)){other=_b_.int.numerator(other);return make_complex(self.$real.value-other.valueOf(),self.$imag.value)}if($B.$isinstance(other,_b_.float)){return make_complex(self.$real.value-other.value,self.$imag.value)}return _b_.NotImplemented};complex.__truediv__=function(self,other){if($B.$isinstance(other,complex)){if(other.$real.value==0&&other.$imag.value==0){throw _b_.ZeroDivisionError.$factory("division by zero")}var _num=self.$real.value*other.$real.value+self.$imag.value*other.$imag.value,_div=other.$real.value*other.$real.value+other.$imag.value*other.$imag.value;var _num2=self.$imag.value*other.$real.value-self.$real.value*other.$imag.value;return make_complex(_num/_div,_num2/_div)}if($B.$isinstance(other,_b_.int)){if(!other.valueOf()){throw _b_.ZeroDivisionError.$factory("division by zero")}return complex.__truediv__(self,complex.$factory(other.valueOf()))}if($B.$isinstance(other,_b_.float)){if(!other.value){throw _b_.ZeroDivisionError.$factory("division by zero")}return complex.__truediv__(self,complex.$factory(other.value))}$UnsupportedOpType("//","complex",other.__class__)};complex.conjugate=function(self){return make_complex(self.$real.value,-self.$imag.value)};complex.__ior__=complex.__or__;var r_opnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or"];for(var r_opname of r_opnames){if(complex["__r"+r_opname+"__"]===undefined&&complex["__"+r_opname+"__"]){complex["__r"+r_opname+"__"]=function(name){return function(self,other){if($B.$isinstance(other,_b_.int)){other=make_complex(other,0);return complex["__"+name+"__"](other,self)}else if($B.$isinstance(other,_b_.float)){other=make_complex(other.value,0);return complex["__"+name+"__"](other,self)}else if($B.$isinstance(other,complex)){return complex["__"+name+"__"](other,self)}return _b_.NotImplemented}}(r_opname)}}var comp_func_body=`\n var _b_ = __BRYTHON__.builtins\n if(other === undefined || other == _b_.None){\n return _b_.NotImplemented\n }\n throw _b_.TypeError.$factory("no ordering relation " +\n "is defined for complex numbers")`;for(var $op in $B.$comps){complex["__"+$B.$comps[$op]+"__"]=Function("self","other",comp_func_body.replace(/>/gm,$op))}complex.real=function(self){return self.$real};complex.real.setter=function(){throw _b_.AttributeError.$factory("readonly attribute")};complex.imag=function(self){return self.$imag};complex.imag.setter=function(){throw _b_.AttributeError.$factory("readonly attribute")};var _real=1,_real_mantissa=2,_sign=3,_imag=4,_imag_mantissa=5,_j=6;var expected_class={__complex__:complex,__float__:_b_.float,__index__:_b_.int};function _convert(obj){var klass=obj.__class__||$B.get_class(obj);for(var method_name in expected_class){var missing={},method=$B.$getattr(klass,method_name,missing);if(method!==missing){var res=method(obj);if(!$B.$isinstance(res,expected_class[method_name])){throw _b_.TypeError.$factory(method_name+"returned non-"+expected_class[method_name].__name__+"(type "+$B.get_class(res)+")")}if(method_name=="__index__"&&$B.rich_comp("__gt__",res,__BRYTHON__.MAX_VALUE)){throw _b_.OverflowError.$factory("int too large to convert to float")}if(method_name=="__complex__"&&res.__class__!==complex){$B.warn(_b_.DeprecationWarning,"__complex__ returned "+`non-complex (type ${$B.class_name(res)}). `+"The ability to return an instance of a strict subclass "+"of complex is deprecated, and may be removed in a future "+"version of Python.")}return{result:res,method:method_name}}}return null}var make_complex=$B.make_complex=function(real,imag){return{__class__:complex,$real:_b_.float.$factory(real),$imag:_b_.float.$factory(imag)}};var c_1=make_complex(1,0);complex.$factory=function(){return complex.__new__(complex,...arguments)};$B.set_func_names(complex,"builtins");_b_.complex=complex})(__BRYTHON__);(function($B){var _b_=$B.builtins;var set_ops=["eq","le","lt","ge","gt","sub","rsub","and","rand","or","ror","xor","rxor"];function is_sublist(t1,t2){for(var i=0,ilen=t1.length;i-1){continue}else if(!_b_.hasattr(v.__class__,"__hash__")){return false}}return true};dict.$iter_items=function*(d){if(d.$all_str){for(let key in d.$strings){if(key!="$dict_strings"){yield{key:key,value:d.$strings[key]}}}}if(d.$jsobj){for(let key in d.$jsobj){if(!d.$exclude||!d.$exclude(key)){yield{key:key,value:d.$jsobj[key]}}}}else{var version=d.$version;for(var i=0,len=d._keys.length;i0};dict.__class_getitem__=function(cls,item){if(!Array.isArray(item)){item=[item]}return $B.GenericAlias.$factory(cls,item)};dict.$lookup_by_key=function(d,key,hash){hash=hash===undefined?_b_.hash(key):hash;var indices=d.table[hash],index;if(indices!==undefined){for(var i=0,len=indices.length;ix!==undefined)))}return res};dict.$setitem_string=function(self,key,value){if(self.$all_str){self.$strings[key]=value;return _b_.None}else{var h=_b_.hash(key),indices=self.table[h];if(indices!==undefined){self._values[indices[0]]=value;return _b_.None}}var index=self._keys.length;self.$strings[key]=index;self._keys.push(key);self._values.push(value);self.$version++;return _b_.None};dict.$getitem=function(self,key,ignore_missing){if(self.$all_str){if(typeof key=="string"){if(self.$strings.hasOwnProperty(key)){return self.$strings[key]}}else{var hash_method=$B.$getattr($B.get_class(key),"__hash__");if(hash_method!==_b_.object.__hash__){convert_all_str(self);let lookup=dict.$lookup_by_key(self,key);if(lookup.found){return lookup.value}}}}else if(self.$jsobj){if(self.$exclude&&self.$exclude(key)){throw _b_.KeyError.$factory(key)}if(self.$jsobj.hasOwnProperty(key)){return self.$jsobj[key]}if(!self.table){throw _b_.KeyError.$factory(key)}}else{let lookup=dict.$lookup_by_key(self,key);if(lookup.found){return lookup.value}}if(!ignore_missing){if(self.__class__!==dict&&!ignore_missing){try{var missing_method=$B.$getattr(self.__class__,"__missing__",_b_.None)}catch(err){console.log(err)}if(missing_method!==_b_.None){return missing_method(self,key)}}}throw _b_.KeyError.$factory(key)};dict.__hash__=_b_.None;function init_from_list(self,args){var i=0;for(var item of args){if(item.length!=2){throw _b_.ValueError.$factory("dictionary "+`update sequence element #${i} has length ${item.length}; 2 is required`)}dict.$setitem(self,item[0],item[1]);i++}}dict.$set_string_no_duplicate=function(d,keys,string,value){if(typeof string!=="string"){throw _b_.TypeError.$factory("keywords must be strings")}if(keys.has(string)){throw _b_.TypeError.$factory("dict() got multiple values for keyword "+`argument '${string}'`)}d.$strings[string]=value;keys.add(string)};function add_mapping(d,obj){for(var entry of _b_.dict.$iter_items(obj)){dict.$setitem(d,entry.key,entry.value,entry.hash)}}function add_iterable(d,js_iterable){var i=0;for(var entry of js_iterable){var items=Array.from($B.make_js_iterator(entry));if(items.length!==2){throw _b_.ValueError.$factory("dictionary "+`update sequence element #${i} has length ${items.length}; 2 is required`)}dict.$setitem(d,items[0],items[1]);i++}}dict.__init__=function(self,first,second){if(first===undefined){return _b_.None}if(second===undefined){if(!first.$kw&&$B.$isinstance(first,$B.JSObj)){for(let key in first){dict.$setitem(self,key,first[key])}return _b_.None}else if(first.$kw){var keys=new Set;for(let item of first.$kw){if($B.$isinstance(item,dict)){for(let subitem of dict.$iter_items(item)){dict.$set_string_no_duplicate(self,keys,subitem.key,subitem.value)}}else{for(let key in item){dict.$set_string_no_duplicate(self,keys,key,item[key])}}}return _b_.None}else if(first[Symbol.iterator]){init_from_list(self,first);return _b_.None}else if(first.__class__===$B.generator){init_from_list(self,first.js_gen);return _b_.None}}var $=$B.args("dict",1,{self:null},["self"],arguments,{},"first","second");var args=$.first;if(args.length>1){if($B._experimental_dict){console.log("try dict(*args)");for(var arg of args){if(_b_.isinstance(arg,_b_.dict)){add_mapping(self,arg)}else{try{var js_iterable=$B.make_js_iterator(arg)}catch(err){console.log(arg);console.log(err);throw _b_.TypeError.$factory("expected mapping or "+`iterable, got ${$B.class_name(arg)}`)}add_iterable(self,js_iterable)}}}else{throw _b_.TypeError.$factory("dict expected at most 1 argument"+`, got ${args.length}`)}}else if(args.length==1){args=args[0];if(args.__class__===dict){for(let entry of dict.$iter_items(args)){dict.$setitem(self,entry.key,entry.value,entry.hash)}}else{var keys=$B.$getattr(args,"keys",null);if(keys!==null){var gi=$B.$getattr(args,"__getitem__",null);if(gi!==null){gi=$B.$call(gi);let kiter=_b_.iter($B.$call(keys)());while(true){try{let key=_b_.next(kiter),value=gi(key);dict.__setitem__(self,key,value)}catch(err){if(err.__class__===_b_.StopIteration){break}throw err}}return _b_.None}}if(!Array.isArray(args)){args=_b_.list.$factory(args)}init_from_list(self,args)}}for(let item of _b_.dict.$iter_items($.second)){dict.$setitem(self,item.key,item.value)}return _b_.None};dict.__iter__=function(self){return _b_.iter(dict.keys(self))};dict.__ior__=function(self,other){dict.update(self,other);return self};dict.__len__=function(self){var _count=0;if(self.$all_str){return Object.keys(self.$strings).length}if(self.$jsobj){for(var attr in self.$jsobj){if(attr.charAt(0)!="$"&&(!self.$exclude||!self.$exclude(attr))){_count++}}return _count}for(var d of self._keys){if(d!==undefined){_count++}}return _count};dict.__ne__=function(self,other){var res=dict.__eq__(self,other);return res===_b_.NotImplemented?res:!res};dict.__new__=function(cls){if(cls===undefined){throw _b_.TypeError.$factory("int.__new__(): not enough arguments")}var instance=$B.empty_dict();instance.__class__=cls;if(cls!==dict){instance.__dict__=$B.empty_dict()}return instance};dict.__or__=function(self,other){if(!$B.$isinstance(other,dict)){return _b_.NotImplemented}var res=dict.copy(self);dict.update(res,other);return res};dict.__repr__=function(self){$B.builtins_repr_check(dict,arguments);if(self.$jsobj){return dict.__repr__(jsobj2dict(self.$jsobj,self.$exclude))}if($B.repr.enter(self)){return"{...}"}let res=[];for(let entry of dict.$iter_items(self)){res.push(_b_.repr(entry.key)+": "+_b_.repr(entry.value))}$B.repr.leave(self);return"{"+res.join(", ")+"}"};dict.$iter_items_reversed=function*(d){var version=d.$version;if(d.$all_str){for(var item of Object.entries(d.$strings).reverse()){yield $B.fast_tuple(item);if(d.$version!==version){throw _b_.RuntimeError.$factory("changed in iteration")}}}else{for(var i=d._keys.length-1;i>=0;i--){var key=d._keys[i];if(key!==undefined){yield $B.fast_tuple([key,d._values[i]]);if(d.$version!==version){throw _b_.RuntimeError.$factory("changed in iteration")}}}}if(d.$version!==version){throw _b_.RuntimeError.$factory("changed in iteration")}};dict.$iter_keys_reversed=function*(d){for(var entry of dict.$iter_items_reversed(d)){yield entry[0]}};dict.$iter_values_reversed=function*(d){for(var entry of dict.$iter_items_reversed(d)){yield entry[1]}};function make_reverse_iterator(name,iter_func){var klass=$B.make_class(name,(function(d){return{__class__:klass,d:d,iter:iter_func(d),make_iter:function(){return iter_func(d)}}}));klass.__iter__=function(self){self[Symbol.iterator]=self.make_iter;return self};klass.__next__=function(self){var res=self.iter.next();if(res.done){throw _b_.StopIteration.$factory("")}return res.value};klass.__reduce_ex__=function(self){return $B.fast_tuple([_b_.iter,$B.fast_tuple([Array.from(self.make_iter())])])};$B.set_func_names(klass,"builtins");return klass}const dict_reversekeyiterator=make_reverse_iterator("dict_reversekeyiterator",dict.$iter_keys_reversed);dict.__reversed__=function(self){return dict_reversekeyiterator.$factory(self)};dict.__ror__=function(self,other){if(!$B.$isinstance(other,dict)){return _b_.NotImplemented}var res=dict.copy(other);dict.update(res,self);return res};dict.__setitem__=function(){var $=$B.args("__setitem__",3,{self:null,key:null,value:null},["self","key","value"],arguments,{},null,null);return dict.$setitem($.self,$.key,$.value)};function convert_all_str(d){d.$all_str=false;for(var key in d.$strings){dict.$setitem(d,key,d.$strings[key])}}dict.$setitem=function(self,key,value,$hash,from_setdefault){if(self.$all_str){if(typeof key=="string"){var int=parseInt(key);if(isNaN(int)||int>=0){self.$strings[key]=value;return _b_.None}else{convert_all_str(self)}}else{convert_all_str(self)}}if(self.$jsobj){if(self.$from_js){value=$B.pyobj2jsobj(value)}if(self.$jsobj.__class__===_b_.type){self.$jsobj[key]=value;if(key=="__init__"||key=="__new__"){self.$jsobj.$factory=$B.$instance_creator(self.$jsobj)}}else{self.$jsobj[key]=value}return _b_.None}if(key instanceof String){key=key.valueOf()}var hash=$hash!==undefined?$hash:$B.$hash(key);var index;if(self.table[hash]===undefined){index=self._keys.length;self.table[hash]=[index]}else{if(!from_setdefault){var lookup=dict.$lookup_by_key(self,key,hash);if(lookup.found){self._values[lookup.index]=value;return _b_.None}}index=self._keys.length;if(self.table[hash]===undefined){self.table[hash]=[index]}else{self.table[hash].push(index)}}self._keys.push(key);self._values.push(value);self._hashes.push(hash);self.$version++;return _b_.None};$B.make_rmethods(dict);dict.clear=function(){var $=$B.args("clear",1,{self:null},["self"],arguments,{},null,null),self=$.self;self.table=Object.create(null);self._keys=[];self._values=[];self.$all_str=true;self.$strings=new $B.str_dict;if(self.$jsobj){for(var attr in self.$jsobj){if(attr.charAt(0)!=="$"&&attr!=="__class__"){delete self.$jsobj[attr]}}}self.$version++;return _b_.None};dict.copy=function(){var $=$B.args("copy",1,{self:null},["self"],arguments,{},null,null),self=$.self,res=$B.empty_dict();if(self.__class__===_b_.dict){$copy_dict(res,self);return res}var it=$B.make_js_iterator(self);for(var k of it){console.log("iteration yields key",k)}return res};dict.fromkeys=function(){var $=$B.args("fromkeys",3,{cls:null,keys:null,value:null},["cls","keys","value"],arguments,{value:_b_.None},null,null),keys=$.keys,value=$.value;var cls=$.cls,res=$B.$call(cls)(),klass=$B.get_class(res),keys_iter=$B.$iter(keys),setitem=klass===dict?dict.$setitem:$B.$getattr(klass,"__setitem__");while(1){try{var key=_b_.next(keys_iter);setitem(res,key,value)}catch(err){if($B.is_exc(err,[_b_.StopIteration])){return res}throw err}}};dict.get=function(){var $=$B.args("get",3,{self:null,key:null,_default:null},["self","key","_default"],arguments,{_default:_b_.None},null,null);try{return dict.$getitem($.self,$.key,true)}catch(err){if($B.$isinstance(err,_b_.KeyError)){return $._default}else{throw err}}};var dict_items=$B.make_class("dict_items",(function(d){return{__class__:dict_items,dict:d,make_iter:function*(){for(var entry of dict.$iter_items(d)){yield $B.fast_tuple([entry.key,entry.value])}}}}));dict_items.__iter__=function(self){return dict_itemiterator.$factory(self.make_iter)};dict_items.__len__=function(self){return dict.__len__(self.dict)};dict_items.__reduce__=function(self){var items=Array.from(self.make_iter());return $B.fast_tuple([_b_.iter,$B.fast_tuple([items])])};dict_items.__repr__=function(self){var items=Array.from(self.make_iter());items=items.map($B.fast_tuple);return"dict_items("+_b_.repr(items)+")"};const dict_reverseitemiterator=make_reverse_iterator("dict_reverseitemiterator",dict.$iter_items_reversed);dict_items.__reversed__=function(self){return dict_reverseitemiterator.$factory(self.dict)};make_view_comparison_methods(dict_items);$B.set_func_names(dict_items,"builtins");var dict_itemiterator=$B.make_class("dict_itemiterator",(function(make_iter){return{__class__:dict_itemiterator,iter:make_iter(),make_iter:make_iter}}));dict_itemiterator.__iter__=function(self){self[Symbol.iterator]=function(){return self.iter};return self};dict_itemiterator.__next__=function(self){var res=self.iter.next();if(res.done){throw _b_.StopIteration.$factory("")}return $B.fast_tuple(res.value)};dict_itemiterator.__reduce_ex__=function(self){return $B.fast_tuple([_b_.iter,$B.fast_tuple([Array.from(self.make_iter())])])};$B.set_func_names(dict_itemiterator,"builtins");dict.items=function(self){$B.args("items",1,{self:null},["self"],arguments,{},null,null);return dict_items.$factory(self)};var dict_keys=$B.make_class("dict_keys",(function(d){return{__class__:dict_keys,dict:d,make_iter:function(){return dict.$iter_keys_check(d)}}}));dict_keys.__iter__=function(self){return dict_keyiterator.$factory(self.make_iter)};dict_keys.__len__=function(self){return dict.__len__(self.dict)};dict_keys.__reduce__=function(self){var items=Array.from(self.make_iter());return $B.fast_tuple([_b_.iter,$B.fast_tuple([items])])};dict_keys.__repr__=function(self){var items=Array.from(self.make_iter());return"dict_keys("+_b_.repr(items)+")"};dict_keys.__reversed__=function(self){return dict_reversekeyiterator.$factory(self.dict)};make_view_comparison_methods(dict_keys);$B.set_func_names(dict_keys,"builtins");var dict_keyiterator=$B.make_class("dict_keyiterator",(function(make_iter){return{__class__:dict_keyiterator,iter:make_iter(),make_iter:make_iter}}));dict_keyiterator.__iter__=function(self){self[Symbol.iterator]=function(){return self.iter};return self};dict_keyiterator.__next__=function(self){var res=self.iter.next();if(res.done){throw _b_.StopIteration.$factory("")}return res.value};dict_keyiterator.__reduce_ex__=function(self){return $B.fast_tuple([_b_.iter,$B.fast_tuple([Array.from(self.make_iter())])])};$B.set_func_names(dict_keyiterator,"builtins");dict.keys=function(self){$B.args("keys",1,{self:null},["self"],arguments,{},null,null);return dict_keys.$factory(self)};dict.pop=function(){var missing={},$=$B.args("pop",3,{self:null,key:null,_default:null},["self","key","_default"],arguments,{_default:missing},null,null),self=$.self,key=$.key,_default=$._default;try{var res=dict.__getitem__(self,key);dict.__delitem__(self,key);return res}catch(err){if(err.__class__===_b_.KeyError){if(_default!==missing){return _default}throw err}throw err}};dict.popitem=function(self){$B.check_nb_args_no_kw("popitem",1,arguments);if(dict.__len__(self)==0){throw _b_.KeyError.$factory("'popitem(): dictionary is empty'")}if(self.$all_str){for(var key in self.$strings){}let res=$B.fast_tuple([key,self.$strings[key]]);delete self.$strings[key];self.$version++;return res}var index=self._keys.length-1;while(index>=0){if(self._keys[index]!==undefined){let res=$B.fast_tuple([self._keys[index],self._values[index]]);delete self._keys[index];delete self._values[index];self.$version++;return res}index--}};dict.setdefault=function(){var $=$B.args("setdefault",3,{self:null,key:null,_default:null},["self","key","_default"],arguments,{_default:_b_.None},null,null),self=$.self,key=$.key,_default=$._default;_default=_default===undefined?_b_.None:_default;if(self.$all_str){if(!self.$strings.hasOwnProperty(key)){self.$strings[key]=_default}return self.$strings[key]}if(self.$jsobj){if(!self.$jsobj.hasOwnProperty(key)){self.$jsobj[key]=_default}return self.$jsobj[key]}var lookup=dict.$lookup_by_key(self,key);if(lookup.found){return lookup.value}var hash=lookup.hash;dict.$setitem(self,key,_default,hash,true);return _default};dict.update=function(){var $=$B.args("update",1,{self:null},["self"],arguments,{},"args","kw"),self=$.self,args=$.args,kw=$.kw;if(args.length>0){var o=args[0];if($B.$isinstance(o,dict)){if(o.$jsobj){o=jsobj2dict(o.$jsobj)}$copy_dict(self,o)}else if(_b_.hasattr(o,"keys")){var _keys=_b_.list.$factory($B.$call($B.$getattr(o,"keys"))());for(let i=0,len=_keys.length;i-1){continue}if(typeof dict[attr]=="function"){mappingproxy[attr]=function(key){return function(){return dict[key].apply(null,arguments)}}(attr)}else{mappingproxy[attr]=dict[attr]}}$B.set_func_names(mappingproxy,"builtins");function jsobj2dict(x,exclude){exclude=exclude||function(){return false};var d=$B.empty_dict();for(var attr in x){if(attr.charAt(0)!="$"&&!exclude(attr)){if(x[attr]===null){dict.$setitem(d,attr,_b_.None)}else if(x[attr]===undefined){continue}else if(x[attr].$jsobj===x){dict.$setitem(d,attr,d)}else{dict.$setitem(d,attr,$B.jsobj2pyobj(x[attr]))}}}return d}$B.obj_dict=function(obj,exclude){var klass=obj.__class__||$B.get_class(obj);if(klass!==undefined&&klass.$native){throw $B.attr_error("__dict__",obj)}var res={__class__:dict,$jsobj:obj,$exclude:exclude||function(){return false}};return res}})(__BRYTHON__);(function($B){var _b_=$B.builtins,isinstance=$B.$isinstance;function check_not_tuple(self,attr){if(self.__class__===tuple){throw $B.attr_error(attr,self)}}var list={__class__:_b_.type,__qualname__:"list",__mro__:[_b_.object],$is_class:true,$native:true,$match_sequence_pattern:true,__dir__:_b_.object.__dir__};list.__add__=function(self,other){if($B.get_class(self)!==$B.get_class(other)){var this_name=$B.class_name(self);var radd=$B.$getattr(other,"__radd__",null);if(radd===null){throw _b_.TypeError.$factory("can only concatenate "+this_name+' (not "'+$B.class_name(other)+'") to '+this_name)}return _b_.NotImplemented}var res=self.slice(),is_js=other.$is_js_array;for(const item of other){res.push(is_js?$B.$jsobj2pyobj(item):item)}if(isinstance(self,tuple)){res=tuple.$factory(res)}return res};list.__bool__=function(self){return list.__len__(self)>0};list.__class_getitem__=function(cls,item){if(!Array.isArray(item)){item=[item]}return $B.GenericAlias.$factory(cls,item)};list.__contains__=function(){var $=$B.args("__contains__",2,{self:null,item:null},["self","item"],arguments,{},null,null),self=$.self,item=$.item;for(var _item of self){if($B.is_or_equals(_item,item)){return true}}return false};list.__delitem__=function(self,arg){if(isinstance(arg,_b_.int)){let pos=arg;if(arg<0){pos=self.length+pos}if(pos>=0&&pos0?0:self.length}var stop=arg.stop;if(stop===_b_.None){stop=step>0?self.length:0}if(start<0){start=self.length+start}if(stop<0){stop=self.length+stop}let res=[],pos=0;if(step>0){if(stop>start){for(let i=start;istop;i+=step){if(self[i]!==undefined){res[pos++]=i}}res.reverse()}}let i=res.length;while(i--){self.splice(res[i],1)}return _b_.None}if(_b_.hasattr(arg,"__int__")||_b_.hasattr(arg,"__index__")){list.__delitem__(self,_b_.int.$factory(arg));return _b_.None}throw _b_.TypeError.$factory($B.class_name(self)+" indices must be integer, not "+$B.class_name(arg))};list.__eq__=function(self,other){var klass=isinstance(self,list)?list:tuple;if(isinstance(other,klass)){if(other.length==self.length){var i=self.length;while(i--){if(!$B.is_or_equals(self[i],other[i])){return false}}return true}return false}return _b_.NotImplemented};list.__getitem__=function(self,key){$B.check_nb_args_no_kw("__getitem__",2,arguments);return list.$getitem(self,key)};list.$getitem=function(self,key){var klass=self.__class__||$B.get_class(self);var factory=function(list_res){list_res.__class__=klass;return list_res};var int_key;try{int_key=$B.PyNumber_Index(key)}catch(err){}if(int_key!==undefined){let items=self.valueOf(),pos=int_key;if(int_key<0){pos=items.length+pos}if(pos>=0&&pos0){if(stop<=start){return factory(res)}for(let i=start;istart){return factory(res)}for(let i=start;i>stop;i+=step){res[pos++]=items[i]}return factory(res)}}throw _b_.TypeError.$factory($B.class_name(self)+" indices must be integer, not "+$B.class_name(key))};list.__ge__=function(self,other){if(!isinstance(other,list)){return _b_.NotImplemented}var res=list.__le__(other,self);if(res===_b_.NotImplemented){return res}return res};list.__gt__=function(self,other){if(!isinstance(other,list)){return _b_.NotImplemented}var res=list.__lt__(other,self);if(res===_b_.NotImplemented){return res}return res};list.__hash__=_b_.None;list.__iadd__=function(){var $=$B.args("__iadd__",2,{self:null,x:null},["self","x"],arguments,{},null,null);var x=list.$factory($B.$iter($.x));for(var i=0;i1){throw _b_.TypeError.$factory("expected at most 1 argument, got "+args.length)}if(_b_.dict.__len__(kw)>0){throw _b_.TypeError.$factory("list() takes no keyword arguments")}while(self.length>0){self.pop()}var arg=args[0];if(arg===undefined){return _b_.None}var pos=0;for(var item of $B.make_js_iterator(arg)){self[pos++]=item}return _b_.None};var list_iterator=$B.make_iterator_class("list_iterator");list_iterator.__reduce__=list_iterator.__reduce_ex__=function(self){return $B.fast_tuple([_b_.iter,$B.fast_tuple([list.$factory(self)]),0])};list.__iter__=function(self){return list_iterator.$factory(self)};list.__le__=function(self,other){if(!isinstance(other,[list,_b_.tuple])){return _b_.NotImplemented}var i=0;while(i$B.max_array_size/other){throw _b_.OverflowError.$factory(`cannot fit `+`'${$B.class_name(other)}' into an index-sized integer`)}var res=[],$temp=self.slice(),len=$temp.length;for(var i=0;i=0&&pos0){if(args.length==1){for(var item of $B.make_js_iterator(args[0])){self.push(item)}}else{throw _b_.TypeError.$factory("tuple expected at most 1 "+`argument, got ${args.length}`)}}if(cls===tuple&&_b_.dict.__len__(kw)>0){throw _b_.TypeError.$factory("tuple() takes no keyword arguments")}return self};tuple.__repr__=function(self){$B.builtins_repr_check(tuple,arguments);return list_repr(self)};$B.set_func_names(tuple,"builtins");_b_.list=list;_b_.tuple=tuple;_b_.object.__bases__=tuple.$factory();_b_.type.__bases__=$B.fast_tuple([_b_.object])})(__BRYTHON__);(function($B){var _b_=$B.builtins;function to_simple(value){switch(typeof value){case"string":case"number":return value;case"boolean":return value?"true":"false";case"object":if(value===_b_.None){return"null"}else if(value instanceof Number){return value.valueOf()}else if(value instanceof String){return value.valueOf()}break;default:throw _b_.TypeError.$factory("keys must be str, int, "+"float, bool or None, not "+$B.class_name(value))}}$B.pyobj2structuredclone=function(obj,strict){strict=strict===undefined?true:strict;if(typeof obj=="boolean"||typeof obj=="number"||typeof obj=="string"||obj instanceof String){return obj}else if(obj.__class__===_b_.float){return obj.value}else if(obj===_b_.None){return null}else if(Array.isArray(obj)||obj.__class__===_b_.list||obj.__class__===_b_.tuple){let res=new Array(obj.length);for(var i=0,len=obj.length;i`}var js_repr=Object.prototype.toString.call(_self);return``};$B.JSObj.bind=function(_self,evt,func){var js_func=function(ev){try{return func(jsobj2pyobj(ev))}catch(err){if(err.__class__!==undefined){$B.handle_error(err)}else{try{$B.$getattr($B.get_stderr(),"write")(err)}catch(err1){console.log(err)}}}};Object.defineProperty(_self,"$brython_events",{value:_self.$brython_events||{},writable:true});if(_self.$brython_events){_self.$brython_events[evt]=_self.$brython_events[evt]||[];_self.$brython_events[evt].push([func,js_func])}_self.addEventListener(evt,js_func);return _b_.None};$B.JSObj.bindings=function(_self){var res=$B.empty_dict();if(_self.$brython_events){for(var key in _self.$brython_events){_b_.dict.$setitem(res,key,$B.fast_tuple(_self.$brython_events[key].map((x=>x[0]))))}}return res};$B.JSObj.unbind=function(_self,evt,func){if(!_self.$brython_events){return _b_.None}if(!_self.$brython_events[evt]){return _b_.None}var events=_self.$brython_events[evt];if(func===undefined){for(var item of events){_self.removeEventListener(evt,item[1])}delete _self.$brython_events[evt]}else{for(var i=0,len=events.length;i-1){return function(){var args=new Array(arguments.length);args[0]=arguments[0];for(var i=1,len=arguments.length;i-1){return function(){var pylist=$B.$list(arguments[0].map(jsobj2pyobj));return jsobj2pyobj(_b_.list[attr].call(null,pylist,...Array.from(arguments).slice(1)))}}return function(){var js_array=arguments[0],t=jsobj2pyobj(js_array),args=[t];return _b_.list[attr].apply(null,args)}};$B.set_func_names(js_list_meta,"builtins");$B.SizedJSObj=$B.make_class("SizedJavascriptObject");$B.SizedJSObj.__bases__=[$B.JSObj];$B.SizedJSObj.__mro__=[$B.JSObj,_b_.object];$B.SizedJSObj.__len__=function(_self){return _self.length};$B.set_func_names($B.SizedJSObj,"builtins");$B.IterableJSObj=$B.make_class("IterableJavascriptObject");$B.IterableJSObj.__bases__=[$B.JSObj];$B.IterableJSObj.__mro__=[$B.JSObj,_b_.object];$B.IterableJSObj.__contains__=function(self,key){if(self.contains!==undefined&&typeof self.contains=="function"){return self.contains(key)}else{for(var item of $B.IterableJSObj.__iter__(self).it){if($B.is_or_equals(item,key)){return true}}return false}};$B.IterableJSObj.__iter__=function(_self){return{__class__:$B.IterableJSObj,it:_self[Symbol.iterator]()}};$B.IterableJSObj.__len__=function(_self){return _self.length};$B.IterableJSObj.__next__=function(_self){var value=_self.it.next();if(!value.done){return jsobj2pyobj(value.value)}throw _b_.StopIteration.$factory("")};$B.set_func_names($B.IterableJSObj,"builtins");var js_array=$B.js_array=$B.make_class("Array");js_array.__class__=js_list_meta;js_array.__mro__=[$B.JSObj,_b_.object];js_array.__getattribute__=function(_self,attr){if(_b_.list[attr]===undefined){var proto=Object.getPrototypeOf(_self),res=proto[attr];if(res!==undefined){return jsobj2pyobj(res,_self)}if(_self.hasOwnProperty(attr)){return jsobj2pyobj(_self[attr])}throw $B.attr_error(attr,_self)}return function(){var args=pyobj2jsobj(Array.from(arguments));return _b_.list[attr].call(null,_self,...args)}};js_array.__getitem__=function(_self,i){i=$B.PyNumber_Index(i);return jsobj2pyobj(_self[i])};var js_array_iterator=$B.make_class("JSArray_iterator",(function(obj){return{__class__:js_array_iterator,it:obj[Symbol.iterator]()}}));js_array_iterator.__next__=function(_self){var v=_self.it.next();if(v.done){throw _b_.StopIteration.$factory("")}return jsobj2pyobj(v.value)};$B.set_func_names(js_array_iterator,"builtins");js_array.__iter__=function(_self){return js_array_iterator.$factory(_self)};js_array.__repr__=function(_self){if($B.repr.enter(_self)){return"[...]"}var _r=new Array(_self.length),res;for(var i=0;i<_self.length;++i){_r[i]=_b_.str.$factory(_self[i])}res="["+_r.join(", ")+"]";$B.repr.leave(_self);return res};$B.set_func_names(js_array,"javascript");$B.get_jsobj_class=function(obj){if(typeof obj=="function"){return $B.JSObj}var proto=Object.getPrototypeOf(obj);if(proto===null){return $B.JSObj}if(proto[Symbol.iterator]!==undefined){return $B.IterableJSObj}else if(Object.getOwnPropertyNames(proto).indexOf("length")>-1){return $B.SizedJSObj}return $B.JSObj};$B.JSMeta=$B.make_class("JSMeta");$B.JSMeta.__call__=function(cls){var extra_args=new Array(arguments.length-1),klass=arguments[0];for(var i=1,len=arguments.length;i`};$B.generator.close=function(self){var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}try{$B.generator.throw(self,_b_.GeneratorExit.$factory())}catch(err){if(!$B.is_exc(err,[_b_.GeneratorExit,_b_.StopIteration])){$B.frame_obj=save_frame_obj;throw _b_.RuntimeError.$factory("generator ignored GeneratorExit")}}$B.frame_obj=save_frame_obj};$B.generator.send=function(self,value){var gen=self.js_gen;gen.$has_run=true;if(gen.$finished){throw _b_.StopIteration.$factory(value)}if(gen.gi_running===true){throw _b_.ValueError.$factory("generator already executing")}gen.gi_running=true;var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}try{var res=gen.next(value)}catch(err){gen.$finished=true;$B.frame_obj=save_frame_obj;throw err}if($B.frame_obj!==null&&$B.frame_obj.frame===self.$frame){$B.leave_frame()}$B.frame_obj=save_frame_obj;if(res.value&&res.value.__class__===$GeneratorReturn){gen.$finished=true;throw _b_.StopIteration.$factory(res.value.value)}gen.gi_running=false;if(res.done){throw _b_.StopIteration.$factory(res.value)}return res.value};$B.generator.throw=function(){var $=$B.args("throw",4,{self:null,type:null,value:null,traceback:null},["self","type","value","traceback"],arguments,{value:_b_.None,traceback:_b_.None},null,null),self=$.self,type=$.type,value=$.value,traceback=$.traceback;var gen=self.js_gen,exc=type;if(exc.$is_class){if(!_b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+"instance of BaseException")}else if(value===undefined||value===_b_.None){exc=$B.$call(exc)()}else if($B.$isinstance(value,type)){exc=value}}else{if(value===_b_.None){value=exc}else{exc=$B.$call(exc)(value)}}if(traceback!==_b_.None){exc.$traceback=traceback}var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}var res=gen.throw(exc);$B.frame_obj=save_frame_obj;if(res.done){throw _b_.StopIteration.$factory(res.value)}return res.value};$B.set_func_names($B.generator,"builtins");$B.async_generator=$B.make_class("async_generator",(function(func){var f=function(){var gen=func.apply(null,arguments);var res=Object.create(null);res.__class__=$B.async_generator;res.js_gen=gen;return res};return f}));$B.async_generator.__aiter__=function(self){return self};$B.async_generator.__anext__=function(self){return $B.async_generator.asend(self,_b_.None)};$B.async_generator.aclose=function(self){self.js_gen.$finished=true;return _b_.None};$B.async_generator.asend=async function(self,value){var gen=self.js_gen;if(gen.$finished){throw _b_.StopAsyncIteration.$factory(value)}if(gen.ag_running===true){throw _b_.ValueError.$factory("generator already executing")}gen.ag_running=true;var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}try{var res=await gen.next(value)}catch(err){gen.$finished=true;$B.frame_obj=save_frame_obj;throw err}if($B.frame_obj!==null&&$B.frame_obj.frame===self.$frame){$B.leave_frame()}$B.frame_obj=save_frame_obj;if(res.done){throw _b_.StopAsyncIteration.$factory(value)}if(res.value.__class__===$GeneratorReturn){gen.$finished=true;throw _b_.StopAsyncIteration.$factory(res.value.value)}gen.ag_running=false;return res.value};$B.async_generator.athrow=async function(self,type,value,traceback){var gen=self.js_gen,exc=type;if(exc.$is_class){if(!_b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+"instance of BaseException")}else if(value===undefined){value=$B.$call(exc)()}}else{if(value===undefined){value=exc}else{exc=$B.$call(exc)(value)}}if(traceback!==undefined){exc.$traceback=traceback}var save_frame_obj=$B.frame_obj;if(self.$frame){$B.frame_obj=$B.push_frame(self.$frame)}await gen.throw(value);$B.frame_obj=save_frame_obj};$B.set_func_names($B.async_generator,"builtins")})(__BRYTHON__);(function($B){var _b_=$B.builtins,object=_b_.object,_window=globalThis;function convertDomValue(v){if(v===null||v===undefined){return _b_.None}return $B.jsobj2pyobj(v)}var py_immutable_to_js=$B.py_immutable_to_js=function(pyobj){if($B.$isinstance(pyobj,_b_.float)){return pyobj.value}else if($B.$isinstance(pyobj,$B.long_int)){return $B.long_int.$to_js_number(pyobj)}return pyobj};function js_immutable_to_py(jsobj){if(typeof jsobj=="number"){if(Number.isSafeInteger(jsobj)){return jsobj}else if(Number.isInteger(jsobj)){return $B.fast_long_int(BigInt(jsobj+""))}else{return $B.fast_float(jsobj)}}return jsobj}function $getPosition(e){var left=0,top=0,width=e.width||e.offsetWidth,height=e.height||e.offsetHeight;while(e.offsetParent){left+=e.offsetLeft;top+=e.offsetTop;e=e.offsetParent}left+=e.offsetLeft||0;top+=e.offsetTop||0;if(e.parentElement){var parent_pos=$getPosition(e.parentElement);left+=parent_pos.left;top+=parent_pos.top}return{left:left,top:top,width:width,height:height}}var $mouseCoords=$B.$mouseCoords=function(ev){if(ev.type.startsWith("touch")){let res={};res.x=_b_.int.$factory(ev.touches[0].screenX);res.y=_b_.int.$factory(ev.touches[0].screenY);res.__getattr__=function(attr){return this[attr]};res.__class__="MouseCoords";return res}var posx=0,posy=0;if(!ev){ev=_window.event}if(ev.pageX||ev.pageY){posx=ev.pageX;posy=ev.pageY}else if(ev.clientX||ev.clientY){posx=ev.clientX+document.body.scrollLeft+document.documentElement.scrollLeft;posy=ev.clientY+document.body.scrollTop+document.documentElement.scrollTop}let res={};res.x=_b_.int.$factory(posx);res.y=_b_.int.$factory(posy);res.__getattr__=function(attr){return this[attr]};res.__class__="MouseCoords";return res};$B.$isNode=function(o){return typeof Node==="object"?o instanceof Node:o&&typeof o==="object"&&typeof o.nodeType==="number"&&typeof o.nodeName==="string"};$B.$isNodeList=function(nodes){try{var result=Object.prototype.toString.call(nodes);var re=new RegExp("^\\[object (HTMLCollection|NodeList)\\]$");return typeof nodes==="object"&&re.exec(result)!==null&&nodes.length!==undefined&&(nodes.length==0||typeof nodes[0]==="object"&&nodes[0].nodeType>0)}catch(err){return false}};var $DOMEventAttrs_W3C=["NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","type","target","currentTarget","eventPhase","bubbles","cancelable","timeStamp","stopPropagation","preventDefault","initEvent"];var $DOMEventAttrs_IE=["altKey","altLeft","button","cancelBubble","clientX","clientY","contentOverflow","ctrlKey","ctrlLeft","data","dataFld","dataTransfer","fromElement","keyCode","nextPage","offsetX","offsetY","origin","propertyName","reason","recordset","repeat","screenX","screenY","shiftKey","shiftLeft","source","srcElement","srcFilter","srcUrn","toElement","type","url","wheelDelta","x","y"];$B.$isEvent=function(obj){var flag=true;for(let attr of $DOMEventAttrs_W3C){if(obj[attr]===undefined){flag=false;break}}if(flag){return true}for(let attr of $DOMEventAttrs_IE){if(obj[attr]===undefined){return false}}return true};var $NodeTypes={1:"ELEMENT",2:"ATTRIBUTE",3:"TEXT",4:"CDATA_SECTION",5:"ENTITY_REFERENCE",6:"ENTITY",7:"PROCESSING_INSTRUCTION",8:"COMMENT",9:"DOCUMENT",10:"DOCUMENT_TYPE",11:"DOCUMENT_FRAGMENT",12:"NOTATION"};var Attributes=$B.make_class("Attributes",(function(elt){return{__class__:Attributes,elt:elt}}));Attributes.__contains__=function(){var $=$B.args("__getitem__",2,{self:null,key:null},["self","key"],arguments,{},null,null);if($.self.elt instanceof SVGElement){return $.self.elt.hasAttributeNS(null,$.key)}else if(typeof $.self.elt.hasAttribute=="function"){return $.self.elt.hasAttribute($.key)}return false};Attributes.__delitem__=function(){var $=$B.args("__getitem__",2,{self:null,key:null},["self","key"],arguments,{},null,null);if(!Attributes.__contains__($.self,$.key)){throw _b_.KeyError.$factory($.key)}if($.self.elt instanceof SVGElement){$.self.elt.removeAttributeNS(null,$.key);return _b_.None}else if(typeof $.self.elt.hasAttribute=="function"){$.self.elt.removeAttribute($.key);return _b_.None}};Attributes.__getitem__=function(){var $=$B.args("__getitem__",2,{self:null,key:null},["self","key"],arguments,{},null,null);if($.self.elt instanceof SVGElement&&$.self.elt.hasAttributeNS(null,$.key)){return $.self.elt.getAttributeNS(null,$.key)}else if(typeof $.self.elt.hasAttribute=="function"&&$.self.elt.hasAttribute($.key)){return $.self.elt.getAttribute($.key)}throw _b_.KeyError.$factory($.key)};Attributes.__iter__=function(self){self.$counter=0;var attrs=self.elt.attributes,items=[];for(var i=0;i");var DOMEvent=$B.DOMEvent=$B.make_class("DOMEvent",(function(evt_name){return DOMEvent.__new__(DOMEvent,evt_name)}));DOMEvent.__new__=function(cls,evt_name){var ev=new Event(evt_name);ev.__class__=DOMEvent;if(ev.preventDefault===undefined){ev.preventDefault=function(){ev.returnValue=false}}if(ev.stopPropagation===undefined){ev.stopPropagation=function(){ev.cancelBubble=true}}return ev};function dom2svg(svg_elt,coords){var pt=svg_elt.createSVGPoint();pt.x=coords.x;pt.y=coords.y;return pt.matrixTransform(svg_elt.getScreenCTM().inverse())}DOMEvent.__getattribute__=function(ev,attr){switch(attr){case"__repr__":case"__str__":return function(){return""};case"x":return $mouseCoords(ev).x;case"y":return $mouseCoords(ev).y;case"data":if(ev.dataTransfer!==null&&ev.dataTransfer!==undefined){return Clipboard.$factory(ev.dataTransfer)}else if(ev.target instanceof Worker){return $B.structuredclone2pyobj(ev.data)}else if(typeof DedicatedWorkerGlobalScope!=="undefined"&&ev.target instanceof DedicatedWorkerGlobalScope){return $B.structuredclone2pyobj(ev.data)}return convertDomValue(ev.data);case"target":if(ev.target!==undefined){return DOMNode.$factory(ev.target)}break;case"char":return String.fromCharCode(ev.which);case"svgX":if(ev.target instanceof SVGSVGElement){return Math.floor(dom2svg(ev.target,$mouseCoords(ev)).x)}throw _b_.AttributeError.$factory("event target is not an SVG "+"element");case"svgY":if(ev.target instanceof SVGSVGElement){return Math.floor(dom2svg(ev.target,$mouseCoords(self)).y)}throw _b_.AttributeError.$factory("event target is not an SVG "+"element")}var res=ev[attr];if(res!==undefined){if(typeof res=="function"){var func=function(){var args=[];for(var i=0;i");var dom={File:function(){},FileReader:function(){}};dom.File.__class__=_b_.type;dom.File.__str__=function(){return""};dom.FileReader.__class__=_b_.type;dom.FileReader.__str__=function(){return""};var DOMNode=$B.make_class("DOMNode",(function(elt){return elt}));DOMNode.__add__=function(self,other){var res=TagSum.$factory();res.children=[self];var pos=1;if($B.$isinstance(other,TagSum)){res.children=res.children.concat(other.children)}else if($B.$isinstance(other,[_b_.str,_b_.int,_b_.float,_b_.list,_b_.dict,_b_.set,_b_.tuple])){res.children[pos++]=DOMNode.$factory(document.createTextNode(_b_.str.$factory(other)))}else if($B.$isinstance(other,DOMNode)){res.children[pos++]=other}else{try{res.children=res.children.concat(_b_.list.$factory(other))}catch(err){throw _b_.TypeError.$factory("can't add '"+$B.class_name(other)+"' object to DOMNode instance")}}return res};DOMNode.__bool__=function(){return true};DOMNode.__contains__=function(self,key){if(self.nodeType==9&&typeof key=="string"){return document.getElementById(key)!==null}if(self.length!==undefined&&typeof self.item=="function"){for(var i=0,len=self.length;i-1){return function(selector){if(selector===undefined){self.select();return _b_.None}return DOMNode.select(self,selector)}}if(attr=="query"&&self.nodeType==9){let res={__class__:Query,_keys:[],_values:{}};let qs=location.search.substr(1).split("&");if(location.search!=""){for(let i=0;i-1){res._values[key].push(value)}else{res._keys.push(key);res._values[key]=[value]}}}return res}var klass=$B.get_class(self);var property=self[attr];if(property!==undefined&&self.__class__&&klass.__module__!="browser.html"&&klass.__module__!="browser.svg"&&!klass.$webcomponent){var from_class=$B.$getattr(klass,attr,null);if(from_class!==null){property=from_class;if(typeof from_class==="function"){return property.bind(self,self)}}else{var bases=self.__class__.__bases__;var show_message=true;for(var base of bases){if(base.__module__=="browser.html"){show_message=false;break}}if(show_message){from_class=$B.$getattr(self.__class__,attr,_b_.None);if(from_class!==_b_.None){var frame=$B.frame_obj.frame,line=frame.$lineno;console.info("Warning: line "+line+", "+self.tagName+" element has instance attribute '"+attr+"' set."+" Attribute of class "+$B.class_name(self)+" is ignored.")}}}}if(property===undefined){if(self.tagName){var ce=customElements.get(self.tagName.toLowerCase());if(ce!==undefined&&ce.$cls!==undefined){var save_class=self.__class__;self.__class__=ce.$cls;try{let res=_b_.object.__getattribute__(self,attr);self.__class__=save_class;return res}catch(err){self.__class__=save_class;if(!$B.is_exc(err,[_b_.AttributeError])){throw err}}}}else{return object.__getattribute__(self,attr)}}var res=property;if(res!==undefined){if(res===null){return res}if(typeof res==="function"){if(self.__class__&&self.__class__.$webcomponent){var method=$B.$getattr(self.__class__,attr,null);if(method!==null){return res.bind(self)}}if(res.$is_func){return res}var func=function(f,elt){return function(){var args=[];for(var i=0;i0){var res=TagSum.$factory();var pos=res.children.length;for(var i=0;i"}var res=""};DOMNode.__setattr__=function(self,attr,value){switch(attr){case"left":case"top":case"width":case"height":if($B.$isinstance(value,[_b_.int,_b_.float])&&self.nodeType==1){self.style[attr]=value+"px";return _b_.None}else{throw _b_.ValueError.$factory(attr+" value should be"+" an integer or float, not "+$B.class_name(value))}}if(DOMNode["set_"+attr]!==undefined){return DOMNode["set_"+attr](self,value)}function warn(msg){console.log(msg);var frame=$B.frame_obj.frame;if(!frame){return}if($B.get_option("debug")>0){var file=frame.__file__,lineno=frame.$lineno;console.log("module",frame[2],"line",lineno);if($B.file_cache.hasOwnProperty(file)){var src=$B.file_cache[file];console.log(src.split("\n")[lineno-1])}}else{console.log("module",frame[2])}}var proto=Object.getPrototypeOf(self),nb=0;while(!!proto&&proto!==Object.prototype&&nb++<10){var descriptors=Object.getOwnPropertyDescriptors(proto);if(!!descriptors&&typeof descriptors.hasOwnProperty=="function"){if(descriptors.hasOwnProperty(attr)){if(!descriptors[attr].writable&&descriptors[attr].set===undefined){warn("Warning: property '"+attr+"' is not writable. Use element.attrs['"+attr+"'] instead.")}break}}else{break}proto=Object.getPrototypeOf(proto)}if(self.style&&self.style[attr]!==undefined&&attr!="src"){warn("Warning: '"+attr+"' is a property of element.style")}self[attr]=py_immutable_to_js(value);return _b_.None};DOMNode.__setitem__=function(self,key,value){if(typeof key=="number"){self.childNodes[key]=value}else if(typeof key=="string"){if(self.attributes){if(self instanceof SVGElement){self.setAttributeNS(null,key,value)}else if(typeof self.setAttribute=="function"){self.setAttribute(key,value)}}}};DOMNode.abs_left={__get__:function(self){return $getPosition(self).left},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+"'abs_left' is read-only")}};DOMNode.abs_top={__get__:function(self){return $getPosition(self).top},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+"'abs_top' is read-only")}};DOMNode.attach=DOMNode.__le__;DOMNode.bind=function(){var $=$B.args("bind",4,{self:null,event:null,func:null,options:null},["self","event","func","options"],arguments,{func:_b_.None,options:_b_.None},null,null),self=$.self,event=$.event,func=$.func,options=$.options;if(func===_b_.None){return function(f){return DOMNode.bind(self,event,f)}}var callback=function(f){return function(ev){try{return $B.$call(f)($DOMEvent(ev))}catch(err){if(err.__class__!==undefined){$B.handle_error(err)}else{try{$B.$getattr($B.get_stderr(),"write")(err)}catch(err1){console.log(err)}}}}}(func);callback.$infos=func.$infos;callback.$attrs=func.$attrs||{};callback.$func=func;if(typeof options=="boolean"){self.addEventListener(event,callback,options)}else if(options.__class__===_b_.dict){self.addEventListener(event,callback,_b_.dict.$to_obj(options))}else if(options===_b_.None){self.addEventListener(event,callback,false)}self.$events=self.$events||{};self.$events[event]=self.$events[event]||[];self.$events[event].push([func,callback]);return self};DOMNode.children=function(self){var res=[];if(self.nodeType==9){self=self.body}for(var child of self.children){res.push(DOMNode.$factory(child))}return res};DOMNode.child_nodes=function(self){var res=[];if(self.nodeType==9){self=self.body}for(var child of self.childNodes){res.push(DOMNode.$factory(child))}return res};DOMNode.clear=function(){var $=$B.args("clear",1,{self:null},["self"],arguments,{},null,null),self=$.self;if(self.nodeType==9){self=self.body}while(self.firstChild){self.removeChild(self.firstChild)}};DOMNode.Class=function(self){if(self.className!==undefined){return self.className}return _b_.None};DOMNode.class_name=function(self){return DOMNode.Class(self)};DOMNode.clone=function(self){var res=DOMNode.$factory(self.cloneNode(true));var events=self.$events||{};for(var event in events){var evt_list=events[event];evt_list.forEach((function(evt){var func=evt[0];DOMNode.bind(res,event,func)}))}return res};DOMNode.closest=function(){var $=$B.args("closest",2,{self:null,selector:null},["self","selector"],arguments,{},null,null),self=$.self,selector=$.selector;var res=self.closest(selector);if(res===null){throw _b_.KeyError.$factory("no parent with selector "+selector)}return DOMNode.$factory(res)};DOMNode.bindings=function(self){var res=$B.empty_dict();for(var key in self.$events){_b_.dict.$setitem(res,key,self.$events[key].map((x=>x[1])))}return res};DOMNode.events=function(self,event){self.$events=self.$events||{};var evt_list=self.$events[event]=self.$events[event]||[],callbacks=[];evt_list.forEach((function(evt){callbacks.push(evt[1])}));return callbacks};function make_list(node_list){var res=[];for(var i=0;i-1};Query.__getitem__=function(self,key){var result=self._values[key];if(result===undefined){throw _b_.KeyError.$factory(key)}else if(result.length==1){return result[0]}return result};var Query_iterator=$B.make_iterator_class("query string iterator");Query.__iter__=function(self){return Query_iterator.$factory(self._keys)};Query.__setitem__=function(self,key,value){self._values[key]=[value];return _b_.None};Query.__str__=Query.__repr__=function(self){var elts=[];for(var key in self._values){for(const val of self._values[key]){elts.push(encodeURIComponent(key)+"="+encodeURIComponent(val))}}if(elts.length==0){return""}else{return"?"+elts.join("&")}};Query.getfirst=function(self,key,_default){var result=self._values[key];if(result===undefined){if(_default===undefined){return _b_.None}return _default}return result[0]};Query.getlist=function(self,key){var result=self._values[key];if(result===undefined){return[]}return result};Query.getvalue=function(self,key,_default){try{return Query.__getitem__(self,key)}catch(err){if(_default===undefined){return _b_.None}return _default}};Query.keys=function(self){return self._keys};$B.set_func_names(Query,"");var TagSum=$B.make_class("TagSum",(function(){return{__class__:TagSum,children:[],toString:function(){return"(TagSum)"}}}));TagSum.appendChild=function(self,child){self.children.push(child)};TagSum.__add__=function(self,other){if($B.get_class(other)===TagSum){self.children=self.children.concat(other.children)}else if($B.$isinstance(other,[_b_.str,_b_.int,_b_.float,_b_.dict,_b_.set,_b_.list])){self.children=self.children.concat(DOMNode.$factory(document.createTextNode(other)))}else{self.children.push(other)}return self};TagSum.__radd__=function(self,other){var res=TagSum.$factory();res.children=self.children.slice();res.children.splice(0,0,DOMNode.$factory(document.createTextNode(other)));return res};TagSum.__repr__=function(self){var res=" ";for(var i=0;i");$B.TagSum=TagSum;$B.DOMNode=DOMNode})(__BRYTHON__);(function($B){$B.pattern_match=function(subject,pattern){var _b_=$B.builtins,frame=$B.frame_obj.frame,locals=frame[1];function bind(pattern,subject){if(pattern.alias){locals[pattern.alias]=subject}}if(pattern.sequence){if($B.$isinstance(subject,[_b_.str,_b_.bytes,_b_.bytearray])){return false}let Sequence;if($B.imported["collections.abc"]){Sequence=$B.imported["collections.abc"].Sequence}let deque;if($B.imported["collections"]){deque=$B.imported["collections"].deque}let supported=false;let klass=subject.__class__||$B.get_class(subject);for(let base of[klass].concat(klass.__bases__||[])){if(base.$match_sequence_pattern){supported=true;break}else if(base===Sequence||base==deque){supported=true;break}}if(!supported&&Sequence){supported=_b_.issubclass(klass,Sequence)}if(!supported){return false}if(pattern.sequence.length==1&&pattern.sequence[0].capture_starred=="_"){return true}let subject_length=_b_.len(subject),nb_fixed_length=0;for(let item of pattern.sequence){if(!item.capture_starred){nb_fixed_length++}}if(subject_length0){if([_b_.bool,_b_.bytearray,_b_.bytes,_b_.dict,_b_.float,_b_.frozenset,_b_.int,_b_.list,_b_.set,_b_.str,_b_.tuple].indexOf(klass)>-1){if(pattern.args.length>1){throw _b_.TypeError.$factory("for builtin type "+$B.class_name(subject)+", a single positional "+"subpattern is accepted")}return $B.pattern_match(subject,pattern.args[0])}else{let match_args=$B.$getattr(klass,"__match_args__",$B.fast_tuple([]));if(!$B.$isinstance(match_args,_b_.tuple)){throw _b_.TypeError.$factory("__match_args__() did not return a tuple")}if(pattern.args.length>match_args.length){throw _b_.TypeError.$factory("__match_args__() returns "+match_args.length+" names but "+pattern.args.length+" positional "+"arguments were passed")}for(let i=0,len=pattern.args.length;i"}else{return""}};$B.set_func_names(coroutine,"builtins");$B.make_async=func=>{if(func.$is_genfunc){return func}var f=function(){var args=arguments;return{__class__:coroutine,$args:args,$func:func}};f.$infos=func.$infos;f.$is_func=true;f.$is_async=true;return f};$B.promise=function(obj){if(obj.__class__===coroutine){obj.$frame_obj=$B.frame_obj;return coroutine.send(obj)}if(typeof obj=="function"){return obj()}if(obj instanceof Promise||typeof obj.then=="function"){return obj}var awaitable=$B.$getattr(obj,"__await__",null);if(awaitable!==null){awaitable=$B.$call(awaitable)();if($B.$getattr(awaitable,"__next__",null)===null){throw _b_.TypeError.$factory("__await__() returned non-iterator"+` of type '${$B.class_name(awaitable)}'`)}return awaitable}throw _b_.TypeError.$factory(`object ${$B.class_name(obj)} `+`can't be used in 'await' expression`)}})(__BRYTHON__);(function($B){$B.builtin_class_flags={builtins:{1074287874:["FloatingPointError","TabError","BytesWarning","UnboundLocalError","PermissionError","PendingDeprecationWarning","IndentationError","OverflowError","UnicodeEncodeError","MemoryError","ConnectionRefusedError","UnicodeDecodeError","DeprecationWarning","RuntimeError","NameError","IOError","StopIteration","StopAsyncIteration","RecursionError","IndexError","SystemError","UnicodeTranslateError","ImportWarning","FileNotFoundError","KeyError","ProcessLookupError","Warning","BufferError","IsADirectoryError","InterruptedError","UserWarning","LookupError","KeyboardInterrupt","ReferenceError","Exception","TimeoutError","SyntaxWarning","NotImplementedError","UnicodeWarning","ResourceWarning","FileExistsError","BlockingIOError","AssertionError","BaseExceptionGroup","ArithmeticError","ConnectionError","ChildProcessError","ConnectionResetError","GeneratorExit","ModuleNotFoundError","NotADirectoryError","EOFError","ValueError","EncodingWarning","EnvironmentError","FutureWarning","BaseException","ImportError","WindowsError","RuntimeWarning","SyntaxError","SystemExit","TypeError","UnicodeError","ConnectionAbortedError","AttributeError","OSError","ZeroDivisionError","BrokenPipeError"],1073763848:["ExceptionGroup"],21500162:["bool"],4723970:["float","bytearray"],138941698:["bytes"],546050:["zip","super","classmethod","reversed","filter","staticmethod","enumerate","property","map"],529666:["object","complex"],541611330:["dict"],4740354:["frozenset","set"],21501186:["int"],38294818:["list"],545058:["memoryview"],528674:["range"],545026:["slice"],273159426:["str"],71849250:["tuple"],2156420354:["type"]},types:{545154:["getset_descriptor","frame","async_generator","coroutine","member_descriptor","method-wrapper","generator","classmethod_descriptor"],547202:["builtin_function_or_method"],545026:["traceback","cell"],528642:["NoneType","code","ellipsis","NotImplementedType"],678146:["function"],545090:["mappingproxy"],678274:["method_descriptor"],547074:["method"],546050:["module"],676226:["wrapper_descriptor"]}}})(__BRYTHON__);(function($B){var _b_=$B.builtins;var update=$B.update_obj=function(mod,data){for(let attr in data){mod[attr]=data[attr]}};var modules={};var win=$B.jsobj2pyobj(globalThis);var browser={$package:true,$is_package:true,__initialized__:true,__package__:"browser",__file__:$B.brython_path.replace(new RegExp("/*$","g"),"")+"/Lib/browser/__init__.py",bind:function(){var $=$B.args("bind",3,{elt:null,evt:null,options:null},["elt","evt","options"],arguments,{options:_b_.None},null,null);var options=$.options;if(typeof options=="boolean"){}else if(options.__class__===_b_.dict){var _options={};for(var key of _b_.dict.$keys_string(options)){_options[key]=_b_.dict.$getitem_string(options,key)}options=_options}else{options==false}return function(callback){if($B.get_class($.elt)===$B.JSObj){function f(ev){try{return callback($B.jsobj2pyobj(ev))}catch(err){$B.handle_error(err)}}$.elt.addEventListener($.evt,f,options);return callback}else if($B.$isinstance($.elt,$B.DOMNode)){$B.DOMNode.bind($.elt,$.evt,callback,options);return callback}else if($B.$isinstance($.elt,_b_.str)){var items=document.querySelectorAll($.elt);for(var i=0;i1){console.log(err,err.__class__,err.args);console.log("first",first);console.log(arguments)}throw err}}}}for(var item of _b_.dict.$iter_items($ns.kw)){var arg=item.key,value=item.value;if(arg.toLowerCase().substr(0,2)=="on"){$B.DOMNode.__setattr__(self,arg,value)}else if(arg.toLowerCase()=="style"){$B.DOMNode.set_style(self,value)}else{if(value!==false){try{arg=$B.imported["browser.html"].attribute_mapper(arg);self.setAttribute(arg,$B.pyobj2jsobj(value))}catch(err){throw _b_.ValueError.$factory("can't set attribute "+arg)}}}}};dict.__mro__=[$B.DOMNode,$B.builtins.object];dict.__new__=function(cls){var res=document.createElement(tagName);if(cls!==html[tagName]){res.__class__=cls}return res};dict.__rmul__=function(self,num){return $B.DOMNode.__mul__(self,num)};$B.set_func_names(dict,"browser.html");return dict}function makeFactory(klass){return function(k){return function(){var res;if(k.__name__=="SVG"){res=$B.DOMNode.$factory(document.createElementNS("http://www.w3.org/2000/svg","svg"),true)}else{try{res=document.createElement(k.__name__)}catch(err){console.log("error "+err);console.log("creating element",k.__name__);throw err}}var init=$B.$getattr(k,"__init__",null);if(init!==null){init(res,...arguments)}return res}}(klass)}var tags=["A","ABBR","ACRONYM","ADDRESS","APPLET","AREA","B","BASE","BASEFONT","BDO","BIG","BLOCKQUOTE","BODY","BR","BUTTON","CAPTION","CENTER","CITE","CODE","COL","COLGROUP","DD","DEL","DFN","DIR","DIV","DL","DT","EM","FIELDSET","FONT","FORM","FRAME","FRAMESET","H1","H2","H3","H4","H5","H6","HEAD","HR","HTML","I","IFRAME","IMG","INPUT","INS","ISINDEX","KBD","LABEL","LEGEND","LI","LINK","MAP","MENU","META","NOFRAMES","NOSCRIPT","OBJECT","OL","OPTGROUP","OPTION","P","PARAM","PRE","Q","S","SAMP","SCRIPT","SELECT","SMALL","SPAN","STRIKE","STRONG","STYLE","SUB","SUP","SVG","TABLE","TBODY","TD","TEXTAREA","TFOOT","TH","THEAD","TITLE","TR","TT","U","UL","VAR","ARTICLE","ASIDE","AUDIO","BDI","CANVAS","COMMAND","DATA","DATALIST","EMBED","FIGCAPTION","FIGURE","FOOTER","HEADER","KEYGEN","MAIN","MARK","MATH","METER","NAV","OUTPUT","PROGRESS","RB","RP","RT","RTC","RUBY","SECTION","SOURCE","TEMPLATE","TIME","TRACK","VIDEO","WBR","DETAILS","DIALOG","MENUITEM","PICTURE","SUMMARY"];var html={};html.tags=$B.empty_dict();function maketag(tagName,ComponentClass){if(!(typeof tagName=="string")){throw _b_.TypeError.$factory("html.maketag expects a string as argument")}if(html[tagName]!==undefined){throw _b_.ValueError.$factory("cannot reset class for "+tagName)}var klass=makeTagDict(tagName);klass.$factory=makeFactory(klass,ComponentClass);html[tagName]=klass;_b_.dict.$setitem(html.tags,tagName,html[tagName]);return klass}for(var tagName of tags){maketag(tagName)}html.maketag=maketag;html.attribute_mapper=function(attr){return attr.replace(/_/g,"-")};return html}(__BRYTHON__)}modules["browser"]=browser;$B.UndefinedType=$B.make_class("UndefinedType",(function(){return $B.Undefined}));$B.UndefinedType.__mro__=[_b_.object];$B.UndefinedType.__bool__=function(){return false};$B.UndefinedType.__repr__=function(){return""};$B.UndefinedType.__str__=$B.UndefinedType.__repr__;$B.Undefined={__class__:$B.UndefinedType};$B.set_func_names($B.UndefinedType,"javascript");var super_class=$B.make_class("JavascriptSuper",(function(){var res=_b_.super.$factory();var js_constr=res.__thisclass__.__bases__[0];return function(){var obj=new js_constr.$js_func(...arguments);console.log("obj from js constr",obj);for(var attr in obj){console.log("attr",attr);res.__self_class__.__dict__[attr]=$B.jsobj2pyobj(obj[attr])}return obj}}));super_class.__getattribute__=function(self,attr){if(attr=="__init__"||attr=="__call__"){return self.__init__}return $B.$getattr(self.__self_class__,attr)};$B.set_func_names(super_class,"javascript");modules["javascript"]={this:function(){if($B.js_this===undefined){return $B.builtins.None}return $B.jsobj2pyobj($B.js_this)},Array:$B.js_array,Date:self.Date&&$B.jsobj2pyobj(self.Date),extends:function(js_constr){if(!js_constr.$js_func||!js_constr.$js_func.toString().startsWith("class ")){console.log(js_constr);throw _b_.TypeError.$factory("argument of extend must be a Javascript class")}js_constr.__class__=_b_.type;return function(obj){obj.__bases__.splice(0,0,js_constr);obj.__mro__.splice(0,0,js_constr);return obj}},import_js:function(){var $=$B.args("import_js",2,{url:null,name:null},["url","name"],arguments,{name:_b_.None},null,null),url=$.url,name=$.name;var xhr=new XMLHttpRequest,result;xhr.open("GET",url,false);xhr.onreadystatechange=function(){if(this.readyState==4){if(this.status==200){var js=this.responseText+"\nreturn $module",f=new Function(js);console.log("f",f,f+"");var $module=f();if(typeof $module!=="undefined"){result=$B.module.$factory(name);for(var key in $module){result[key]=$B.jsobj2pyobj($module[key])}result.__file__=url}else{console.log(this.responseText);result=_b_.ImportError.$factory("Javascript "+`module at ${url} doesn't define $module`)}}else{result=_b_.ModuleNotFoundError.$factory(name)}}};xhr.send();if($B.$isinstance(result,_b_.BaseException)){$B.handle_error(result)}else{if(name===_b_.None){name=url.split(".");if(name.length>1){name.pop()}name=name.join(".");result.__name__=name}$B.imported[name]=result;var frame=$B.frame_obj.frame;frame[1][name]=result}},import_modules:function(refs,callback,loaded){if(loaded===undefined){loaded=[]}if(!Array.isArray(refs)){throw _b_.TypeError.$factory(`first argument must be a list, got ${$B.class_name(refs)}`)}if(refs.length>1){var ref=refs.shift();import(ref).then((function(module){loaded.push(module);$B.imported.javascript.import_modules(refs,callback,loaded)})).catch($B.show_error)}else{import(refs[0]).then((function(module){loaded.push(module);return $B.$call(callback).apply(null,loaded)})).catch($B.show_error)}},import_scripts:function(refs,callback,loaded){console.log("import scripts",refs);if(loaded===undefined){loaded=[]}if(!Array.isArray(refs)){throw _b_.TypeError.$factory(`first argument must be a list, got ${$B.class_name(refs)}`)}if(refs.length>0){var ref=refs.shift();var script=document.createElement("script");script.src=ref;script.addEventListener("load",(function(){loaded.push(script);$B.imported.javascript.import_scripts(refs,callback,loaded)}));document.body.appendChild(script)}else{console.log("appel callback",loaded);return $B.$call(callback).apply(null,loaded)}},JSObject:$B.JSObj,JSON:{__class__:$B.make_class("JSON"),parse:function(){return $B.structuredclone2pyobj(JSON.parse.apply(this,arguments))},stringify:function(obj,replacer,space){return JSON.stringify($B.pyobj2structuredclone(obj,false),$B.jsobj2pyobj(replacer),space)}},jsobj2pyobj:function(obj){return $B.jsobj2pyobj(obj)},load:function(script_url){console.log('"javascript.load" is deprecrated. '+"Use browser.load instead.");var file_obj=$B.builtins.open(script_url);var content=$B.$getattr(file_obj,"read")();eval(content)},Math:self.Math&&$B.jsobj2pyobj(self.Math),NULL:null,NullType:$B.make_class("NullType"),Number:self.Number&&$B.jsobj2pyobj(self.Number),py2js:function(src,module_name){if(module_name===undefined){module_name="__main__"+$B.UUID()}var js=$B.py2js({src:src,filename:""},module_name,module_name,$B.builtins_scope).to_js();return $B.format_indent(js,0)},pyobj2jsobj:function(obj){return $B.pyobj2jsobj(obj)},RegExp:self.RegExp&&$B.jsobj2pyobj(self.RegExp),String:self.String&&$B.jsobj2pyobj(self.String),super:super_class,UNDEFINED:$B.Undefined,UndefinedType:$B.UndefinedType};modules.javascript.NullType.__module__="javascript";modules.javascript.NullType.__eq__=function(_self,other){return other===null||other===$B.Undefined};modules.javascript.NullType.__repr__=function(_self){return""};$B.set_func_names(modules.javascript.NullType,"javascript");modules.javascript.UndefinedType.__module__="javascript";var $io=$B.$io=$B.make_class("io",(function(out){return{__class__:$io,out:out,encoding:"utf-8"}}));$io.flush=function(self){if(self.buf){console[self.out](self.buf.join(""));self.buf=[]}};$io.write=function(self,msg){if(self.buf===undefined){self.buf=[]}if(typeof msg!="string"){throw _b_.TypeError.$factory("write() argument must be str, not "+$B.class_name(msg))}self.buf.push(msg);return _b_.None};modules["_sys"]={_getframe:function(){var $=$B.args("_getframe",1,{depth:null},["depth"],arguments,{depth:0},null,null),depth=$.depth,frame_obj=$B.frame_obj;for(var i=0;i0){var lines=headers.trim().split(/[\r\n]+/);lines.forEach((function(line){var parts=line.split(": ");var header=parts.shift();var value=parts.join(": ");_b_.dict.$setitem(res,header,value)}))}return res}));var Future=$B.make_class("Future",(function(){var methods={};var promise=new Promise((function(resolve,reject){methods.resolve=resolve;methods.reject=reject}));promise._methods=methods;promise._done=false;promise.__class__=Future;return promise}));Future.done=function(){var $=$B.args("done",1,{self:null},["self"],arguments,{},null,null);return!!$.self._done};Future.set_result=function(){var $=$B.args("set_result",2,{self:null,value:null},["self","value"],arguments,{},null,null);$.self._done=true;$.self._methods.resolve($.value);return _b_.None};Future.set_exception=function(){var $=$B.args("set_exception",2,{self:null,exception:null},["self","exception"],arguments,{},null,null);$.self._done=true;$.self._methods.reject($.exception);return _b_.None};$B.set_func_names(Future,"browser.aio");modules["browser.aio"]={ajax:function(){var $=$B.args("ajax",2,{method:null,url:null},["method","url"],arguments,{},null,"kw"),method=$.method.toUpperCase(),url=$.url,kw=$.kw;var args=handle_kwargs(kw,"get");if(method=="GET"&&!args.cache){url=url+"?ts"+(new Date).getTime()+"=0"}if(args.body&&method=="GET"){url=url+(args.cache?"?":"&")+args.body}var func=function(){return new Promise((function(resolve){var xhr=new XMLHttpRequest;xhr.open(method,url,true);for(var key in args.headers){xhr.setRequestHeader(key,args.headers[key])}xhr.format=args.format;xhr.responseType=responseType[args.format];xhr.onreadystatechange=function(){if(this.readyState==4){this.__class__=HTTPRequest;resolve(this)}};if(args.body&&["POST","PUT","DELETE","PATCH"].indexOf(method)>-1){xhr.send(args.body)}else{xhr.send()}}))};func.$infos={__name__:"ajax_"+method};return{__class__:$B.coroutine,$args:[url,args],$func:func}},event:function(){var $=$B.args("event",1,{element:null},["element"],arguments,{},"names",null),element=$.element,names=$.names;return new Promise((function(resolve){var callbacks=[];names.forEach((function(name){var callback=function(evt){callbacks.forEach((function(items){$B.DOMNode.unbind(element,items[0],items[1])}));resolve($B.$DOMEvent(evt))};callbacks.push([name,callback]);$B.DOMNode.bind(element,name,callback)}))}))},get:function(){return $B.imported["browser.aio"].ajax.bind(null,"GET").apply(null,arguments)},iscoroutine:function(f){return f.__class__===$B.coroutine},iscoroutinefunction:function(f){return(f.$infos.__code__.co_flags&128)!=0},post:function(){return $B.imported["browser.aio"].ajax.bind(null,"POST").apply(null,arguments)},run:function(){var handle_success=function(){$B.leave_frame()},handle_error=$B.show_error;var $=$B.args("run",3,{coro:null,onsuccess:null,onerror:null},["coro","onsuccess","onerror"],arguments,{onsuccess:handle_success,onerror:handle_error},null,null),coro=$.coro,onsuccess=$.onsuccess,onerror=$.onerror;var save_frame_obj=$B.frame_obj;$B.coroutine.send(coro).then(onsuccess).catch(onerror);$B.frame_obj=save_frame_obj;return _b_.None},sleep:function(seconds){if(seconds.__class__===_b_.float){seconds=seconds.value}else if(typeof seconds!="number"){throw _b_.TypeError.$factory("'sleep' argument must be "+"int or float, not "+$B.class_name(seconds))}var func=function(){return new Promise((resolve=>setTimeout((function(){resolve(_b_.None)}),1e3*seconds)))};func.$infos={__name__:"sleep"};return{__class__:$B.coroutine,$args:[seconds],$func:func}},Future:Future,__getattr__:function(attr){$B.$import("_aio");return $B.$getattr($B.imported._aio,attr)}};function load(name,module_obj){module_obj.__class__=$B.module;module_obj.__name__=name;$B.imported[name]=module_obj;for(var attr in module_obj){if(typeof module_obj[attr]=="function"){module_obj[attr].$infos={__module__:name,__name__:attr,__qualname__:name+"."+attr}}}}for(let attr in modules){load(attr,modules[attr])}if(!($B.isWebWorker||$B.isNode)){modules["browser"].html=modules["browser.html"];modules["browser"].aio=modules["browser.aio"]}_b_.__builtins__=$B.module.$factory("__builtins__","Python builtins");for(let attr in _b_){_b_.__builtins__[attr]=_b_[attr];$B.builtins_scope.binding[attr]=true;if(_b_[attr].$is_class){if(_b_[attr].__bases__){_b_[attr].__bases__.__class__=_b_.tuple}else{_b_[attr].__bases__=$B.fast_tuple([_b_.object])}}}_b_.__builtins__.__setattr__=function(attr,value){_b_[attr]=value};$B.method_descriptor.__getattribute__=$B.function.__getattribute__;$B.wrapper_descriptor.__getattribute__=$B.function.__getattribute__;var tp_dict=_b_.type.__dict__=$B.empty_dict(),setitem=_b_.dict.$setitem;for(let method in _b_.type){if(method.startsWith("__")&&method.endsWith("__")){setitem(tp_dict,method,_b_.type[method])}}setitem(tp_dict,"__mro__",{__get__:function(cls){return $B.fast_tuple([cls].concat(cls.__mro__))}});for(var name in _b_){var builtin=_b_[name];if(_b_[name].__class__===_b_.type){_b_[name].__qualname__=name;_b_[name].__module__="builtins";_b_[name].__name__=name;_b_[name].$is_builtin_class=true;$B.builtin_classes.push(_b_[name]);for(var key in _b_[name]){var value=_b_[name][key];if(value===undefined||value.__class__||typeof value!="function"){continue}else if(key=="__new__"){value.__class__=$B.builtin_function_or_method}else if(key.startsWith("__")){value.__class__=$B.wrapper_descriptor}else{value.__class__=$B.method_descriptor}value.__objclass__=_b_[name]}}else if(typeof builtin=="function"){builtin.$infos={__name__:name,__qualname__:name}}}for(let attr in $B){if(Array.isArray($B[attr])){$B[attr].__class__=_b_.list}}$B.cell=$B.make_class("cell",(function(value){return{__class__:$B.cell,$cell_contents:value}}));$B.cell.cell_contents=$B.$call(_b_.property)((function(self){if(self.$cell_contents===null){throw _b_.ValueError.$factory("empty cell")}return self.$cell_contents}),(function(self,value){self.$cell_contents=value}));var $comps=Object.values($B.$comps).concat(["eq","ne"]);$comps.forEach((function(comp){var op="__"+comp+"__";$B.cell[op]=function(op){return function(self,other){if(!$B.$isinstance(other,$B.cell)){return _b_.NotImplemented}if(self.$cell_contents===null){if(other.$cell_contents===null){return op=="__eq__"}else{return["__ne__","__lt__","__le__"].indexOf(op)>-1}}else if(other.$cell_contents===null){return["__ne__","__gt__","__ge__"].indexOf(op)>-1}return $B.rich_comp(op,self.$cell_contents,other.$cell_contents)}}(op)}));$B.set_func_names($B.cell,"builtins");for(let flag in $B.builtin_class_flags.builtins){for(let key of $B.builtin_class_flags.builtins[flag]){if(_b_[key]){_b_[key].__flags__=parseInt(flag)}else{console.log("not in _b_",key)}}}for(let flag in $B.builtin_class_flags.types){for(let key of $B.builtin_class_flags.types[flag]){if($B[key]){$B[key].__flags__=parseInt(flag)}}}$B.AST={__class__:_b_.type,__mro__:[_b_.object],__name__:"AST",__qualname__:"AST",$is_class:true,$convert:function(js_node){if(js_node===undefined){return _b_.None}var constr=js_node.constructor;if(constr&&constr.$name){$B.create_python_ast_classes();return $B.python_ast_classes[constr.$name].$factory(js_node)}else if(Array.isArray(js_node)){return js_node.map($B.AST.$convert)}else if(js_node.type){switch(js_node.type){case"int":var value=js_node.value[1],base=js_node.value[0];var res=parseInt(value,base);if(!Number.isSafeInteger(res)){res=$B.long_int.$factory(value,base)}return res;case"float":return $B.fast_float(parseFloat(js_node.value));case"imaginary":return $B.make_complex(0,$B.AST.$convert(js_node.value));case"ellipsis":return _b_.Ellipsis;case"str":if(js_node.is_bytes){return _b_.bytes.$factory(js_node.value,"latin-1")}return js_node.value;case"id":if(["False","None","True"].indexOf(js_node.value)>-1){return _b_[js_node.value]}break}}else if(["string","number"].indexOf(typeof js_node)>-1){return js_node}else if(js_node.$name){return js_node.$name+"()"}else if([_b_.None,_b_.True,_b_.False].indexOf(js_node)>-1){return js_node}else if(js_node.__class__){return js_node}else{console.log("cannot handle",js_node);return js_node}}};$B.stdin={__class__:$io,__original__:true,closed:false,len:1,pos:0,read:function(){return""},readline:function(){return""}};$B.tracefunc=_b_.None})(__BRYTHON__);(function($B){var _b_=$B.builtins;function ast_dump(tree,indent){var attr,value;indent=indent||0;if(tree===_b_.None){return"None"}else if(typeof tree=="string"){return`'${tree}'`}else if(typeof tree=="number"){return tree+""}else if(tree.imaginary){return tree.value+"j"}else if(Array.isArray(tree)){if(tree.length==0){return"[]"}res="[\n";var items=[];for(var x of tree){try{items.push(ast_dump(x,indent+1))}catch(err){console.log("error",tree);console.log("for item",x);throw err}}res+=items.join(",\n");return res+"]"}else if(tree.$name){return tree.$name+"()"}else if(tree instanceof ast.MatchSingleton){return`MatchSingleton(value=${$B.AST.$convert(tree.value)})`}else if(tree instanceof ast.Constant){value=tree.value;if(value.imaginary){return`Constant(value=${_b_.repr(value.value)}j)`}return`Constant(value=${$B.AST.$convert(value)})`}var proto=Object.getPrototypeOf(tree).constructor;var res=" ".repeat(indent)+proto.$name+"(";if($B.ast_classes[proto.$name]===undefined){console.log("no ast class",proto)}var attr_names=$B.ast_classes[proto.$name].split(","),attrs=[];attr_names=attr_names.map((x=>x.endsWith("*")||x.endsWith("?")?x.substr(0,x.length-1):x));if([ast.Name].indexOf(proto)>-1){for(attr of attr_names){if(tree[attr]!==undefined){attrs.push(`${attr}=${ast_dump(tree[attr])}`)}}return res+attrs.join(", ")+")"}for(attr of attr_names){if(tree[attr]!==undefined){value=tree[attr];attrs.push(attr+"="+ast_dump(tree[attr],indent+1).trimStart())}}if(attrs.length>0){res+="\n";res+=attrs.map((x=>" ".repeat(indent+1)+x)).join(",\n")}res+=")";return res}function string_from_ast_value(value){return value.replace(new RegExp("\\\\'","g"),"'")}function compiler_error(ast_obj,message,end){var exc=_b_.SyntaxError.$factory(message);exc.filename=state.filename;if(exc.filename!=""){var src=$B.file_cache[exc.filename],lines=src.split("\n"),line=lines[ast_obj.lineno-1];exc.text=line}else{exc.text=_b_.None}exc.lineno=ast_obj.lineno;exc.offset=ast_obj.col_offset+1;end=end||ast_obj;exc.end_lineno=end.end_lineno;exc.end_offset=end.end_col_offset+1;exc.args[1]=[exc.filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset];exc.$frame_obj=$B.frame_obj;if($B.frame_obj===null){}throw exc}function fast_id(obj){if(obj.$id!==undefined){return obj.$id}return obj.$id=$B.UUID()}function copy_position(target,origin){target.lineno=origin.lineno;target.col_offset=origin.col_offset;target.end_lineno=origin.end_lineno;target.end_col_offset=origin.end_col_offset}function encode_position(a,b,c,d){if(d===undefined){return`[${[a,b,c]}]`}else{return`[${[a,b,c,d]}]`}}$B.decode_position=function(pos){return pos};function get_source_from_position(src,ast_obj){var lines=src.split("\n"),start_line=lines[ast_obj.lineno-1];if(ast_obj.end_lineno==ast_obj.lineno){return start_line.substring(ast_obj.col_offset,ast_obj.end_col_offset)}else{var res=start_line.substr(ast_obj.col_offset),line_num=ast_obj.lineno+1;while(line_num-1){_scopes=scopes.slice(0,ix+1)}else{_scopes=scopes.concat(scope)}}var names=[];for(var _scope of _scopes){if(!_scope.parent){names.push(_scope.name)}}return names.join("_").replace(/\./g,"_")}function module_name(scopes){var _scopes=scopes.slice();var names=[];for(var _scope of _scopes){if(!_scope.parent){names.push(_scope.name)}}return names.join(".")}function make_scope_name(scopes,scope){if(scope===builtins_scope){return`_b_`}return"locals_"+qualified_scope_name(scopes,scope)}function make_search_namespaces(scopes){var namespaces=[];for(var scope of scopes.slice().reverse()){if(scope.parent||scope.type=="class"){continue}else if(scope.is_exec_scope){namespaces.push("$B.exec_scope")}namespaces.push(make_scope_name(scopes,scope))}namespaces.push("_b_");return namespaces}function mangle(scopes,scope,name){if(name.startsWith("__")&&!name.endsWith("__")){var ix=scopes.indexOf(scope);while(ix>=0){if(scopes[ix].ast instanceof $B.ast.ClassDef){var scope_name=scopes[ix].name;while(scope_name.length>0&&scope_name.startsWith("_")){scope_name=scope_name.substr(1)}if(scope_name.length==0){return name}return"_"+scope_name+name}ix--}}return name}function reference(scopes,scope,name){return make_scope_name(scopes,scope)+"."+mangle(scopes,scope,name)}function bind(name,scopes){var scope=$B.last(scopes),up_scope=last_scope(scopes);name=mangle(scopes,up_scope,name);if(up_scope.globals&&up_scope.globals.has(name)){scope=scopes[0]}else if(up_scope.nonlocals.has(name)){for(var i=scopes.indexOf(up_scope)-1;i>=0;i--){if(scopes[i].locals.has(name)){return scopes[i]}}}scope.locals.add(name);return scope}var CELL=5,FREE=4,LOCAL=1,SCOPE_MASK=15,SCOPE_OFF=12;var TYPE_CLASS=1,TYPE_MODULE=2;var DEF_LOCAL=2,DEF_PARAM=2<<1,DEF_COMP_ITER=2<<8;function name_reference(name,scopes,position){var scope=name_scope(name,scopes);return make_ref(name,scopes,scope,position)}function make_ref(name,scopes,scope,position){if(scope.found){return reference(scopes,scope.found,name)}else if(scope.resolve=="all"){var scope_names=make_search_namespaces(scopes);return`$B.resolve_in_scopes('${name}', [${scope_names}], [${position}])`}else if(scope.resolve=="local"){return`$B.resolve_local('${name}', [${position}])`}else if(scope.resolve=="global"){return`$B.resolve_global('${name}', _frame_obj)`}else if(Array.isArray(scope.resolve)){return`$B.resolve_in_scopes('${name}', [${scope.resolve}], [${position}])`}else if(scope.resolve=="own_class_name"){return`$B.own_class_name('${name}')`}}function local_scope(name,scope){var s=scope;while(true){if(s.locals.has(name)){return{found:true,scope:s}}if(!s.parent){return{found:false}}s=s.parent}}function name_scope(name,scopes){var test=false;if(test){console.log("name scope",name,scopes.slice());alert()}var flags,block;if(scopes.length==0){return{found:false,resolve:"all"}}var scope=$B.last(scopes),up_scope=last_scope(scopes);name=mangle(scopes,scope,name);if(up_scope.ast===undefined){console.log("no ast",scope)}block=scopes.symtable.table.blocks.get(fast_id(up_scope.ast));if(block===undefined){console.log("no block",scope,scope.ast,"id",fast_id(up_scope.ast));console.log("scopes",scopes.slice());console.log("symtable",scopes.symtable)}try{flags=_b_.dict.$getitem_string(block.symbols,name)}catch(err){console.log("name",name,"not in symbols of block",block);console.log("symtables",scopes.symtable);console.log("scopes",scopes.slice());return{found:false,resolve:"all"}}let __scope=flags>>SCOPE_OFF&SCOPE_MASK,is_local=[LOCAL,CELL].indexOf(__scope)>-1;if(test){console.log("block",block,"is local",is_local,"__scope",__scope)}if(up_scope.ast instanceof $B.ast.ClassDef&&name==up_scope.name){return{found:false,resolve:"own_class_name"}}if(name=="__annotations__"){if(block.type==TYPE_CLASS&&up_scope.has_annotation){is_local=true}else if(block.type==TYPE_MODULE){is_local=true}}if(is_local){var l_scope=local_scope(name,scope);if(!l_scope.found){if(block.type==TYPE_CLASS){scope.needs_frames=true;return{found:false,resolve:"global"}}else if(block.type==TYPE_MODULE){scope.needs_frames=true;return{found:false,resolve:"global"}}return{found:false,resolve:"local"}}else{return{found:l_scope.scope}}}else if(scope.globals.has(name)){var global_scope=scopes[0];if(global_scope.locals.has(name)){return{found:global_scope}}scope.needs_frames=true;return{found:false,resolve:"global"}}else if(scope.nonlocals.has(name)){for(let i=scopes.length-2;i>=0;i--){block=scopes.symtable.table.blocks.get(fast_id(scopes[i].ast));if(block&&_b_.dict.$contains_string(block.symbols,name)){var fl=_b_.dict.$getitem_string(block.symbols,name),local_to_block=[LOCAL,CELL].indexOf(fl>>SCOPE_OFF&SCOPE_MASK)>-1;if(!local_to_block){continue}return{found:scopes[i]}}}}if(scope.has_import_star){if(!is_local){scope.needs_frames=true}return{found:false,resolve:is_local?"all":"global"}}for(let i=scopes.length-2;i>=0;i--){block=undefined;if(scopes[i].ast){block=scopes.symtable.table.blocks.get(fast_id(scopes[i].ast))}if(scopes[i].globals.has(name)){scope.needs_frames=true;return{found:false,resolve:"global"}}if(scopes[i].locals.has(name)&&scopes[i].type!="class"){return{found:scopes[i]}}else if(block&&_b_.dict.$contains_string(block.symbols,name)){flags=_b_.dict.$getitem_string(block.symbols,name);let __scope=flags>>SCOPE_OFF&SCOPE_MASK;if([LOCAL,CELL].indexOf(__scope)>-1){return{found:false,resolve:"all"}}}if(scopes[i].has_import_star){return{found:false,resolve:"all"}}}if(builtins_scope.locals.has(name)){return{found:builtins_scope}}var scope_names=make_search_namespaces(scopes);return{found:false,resolve:scope_names}}function resolve_in_namespace(name,ns){if(ns.$proxy){return ns[name]===undefined?{found:false}:{found:true,value:ns[name]}}if(!ns.hasOwnProperty){if(ns[name]!==undefined){return{found:true,value:ns[name]}}}else if(ns.hasOwnProperty(name)){return{found:true,value:ns[name]}}else if(ns.$dict){try{return{found:true,value:ns.$getitem(ns.$dict,name)}}catch(err){if(ns.$missing){try{return{found:true,value:$B.$call(ns.$missing)(ns.$dict,name)}}catch(err){if(!$B.is_exc(err,[_b_.KeyError])){throw err}}}}}return{found:false}}$B.resolve=function(name){var checked=new Set,current_globals,frame_obj=$B.frame_obj,frame;while(frame_obj!==null){frame=frame_obj.frame;if(current_globals===undefined){current_globals=frame[3]}else if(frame[3]!==current_globals){let v=resolve_in_namespace(name,current_globals);if(v.found){return v.value}checked.add(current_globals);current_globals=frame[3]}let v=resolve_in_namespace(name,frame[1]);if(v.found){return v.value}frame_obj=frame_obj.prev}if(!checked.has(frame[3])){var v=resolve_in_namespace(name,frame[3]);if(v.found){return v.value}}if(builtins_scope.locals.has(name)){return _b_[name]}throw $B.name_error(name)};$B.resolve_local=function(name,position){if($B.frame_obj!==null){var frame=$B.frame_obj.frame;if(frame[1].hasOwnProperty){if(frame[1].hasOwnProperty(name)){return frame[1][name]}}else{var value=frame[1][name];if(value!==undefined){return value}}}var exc=_b_.UnboundLocalError.$factory(`cannot access local variable `+`'${name}' where it is not associated with a value`);if(position&&$B.frame_obj){$B.set_exception_offsets(exc,position)}throw exc};$B.resolve_in_scopes=function(name,namespaces,position){for(var ns of namespaces){if(ns===$B.exec_scope){var exec_top,frame_obj=$B.frame_obj,frame;while(frame_obj!==null){frame=frame_obj.frame;if(frame.is_exec_top){exec_top=frame;break}frame_obj=frame_obj.prev}if(exec_top){for(var ns1 of[exec_top[1],exec_top[3]]){let v=resolve_in_namespace(name,ns1);if(v.found){return v.value}}}}else{let v=resolve_in_namespace(name,ns);if(v.found){return v.value}}}var exc=$B.name_error(name);if(position){$B.set_exception_offsets(exc,position)}throw exc};$B.resolve_global=function(name,frame_obj){while(frame_obj!==null){var frame=frame_obj.frame,v=resolve_in_namespace(name,frame[3]);if(v.found){return v.value}if(frame.is_exec_top){break}frame_obj=frame_obj.prev}if(builtins_scope.locals.has(name)){return _b_[name]}throw $B.name_error(name)};$B.own_class_name=function(name){throw $B.name_error(name)};var $operators=$B.op2method.subset("all");var opname2opsign={};for(var key in $operators){opname2opsign[$operators[key]]=key}var opclass2dunder={};for(var op_type of $B.op_types){for(var operator in op_type){opclass2dunder[op_type[operator]]="__"+$operators[operator]+"__"}}opclass2dunder["UAdd"]="__pos__";opclass2dunder["USub"]="__neg__";opclass2dunder["Invert"]="__invert__";var builtins_scope=new Scope("__builtins__");for(var name in $B.builtins){builtins_scope.locals.add(name)}function mark_parents(node){if(node.body&&node.body instanceof Array){for(let child of node.body){child.$parent=node;mark_parents(child)}}else if(node.handlers){var p={$parent:node,type:"except_handler"};for(let child of node.handlers){child.$parent=p;mark_parents(child)}}}function add_body(body,scopes){var res="";let js;for(var item of body){js=$B.js_from_ast(item,scopes);if(js.length>0){res+=js+"\n"}}return res.trimRight()}function extract_docstring(ast_obj,scopes){var js="_b_.None";if(ast_obj.body.length&&ast_obj.body[0]instanceof $B.ast.Expr&&ast_obj.body[0].value instanceof $B.ast.Constant){var value=ast_obj.body[0].value.value;if(typeof value=="string"){js=ast_obj.body[0].value.to_js(scopes);ast_obj.body.shift()}}return js}function init_comprehension(comp,scopes){if(comp.type=="genexpr"){return init_genexpr(comp,scopes)}return`var next_func_${comp.id} = $B.make_js_iterator(expr, frame, ${comp.ast.lineno})\n`}function init_genexpr(comp,scopes){var varnames=Object.keys(comp.varnames||{}).map((x=>`'${x}'`)).join(", ");return`var ${comp.locals_name} = {},\n`+`locals = ${comp.locals_name}\n`+`locals['.0'] = expr\n`+`var frame = ["<${comp.type.toLowerCase()}>", ${comp.locals_name}, `+`"${comp.module_name}", ${comp.globals_name}]\n`+`frame.$has_generators = true\n`+`frame.__file__ = '${scopes.filename}'\n`+`frame.$lineno = ${comp.ast.lineno}\n`+`frame.f_code = {\n`+`co_argcount: 1,\n`+`co_firstlineno:${comp.ast.lineno},\n`+`co_name: "<${comp.type.toLowerCase()}>",\n`+`co_filename: "${scopes.filename}",\n`+`co_flags: ${comp.type=="genexpr"?115:83},\n`+`co_freevars: $B.fast_tuple([]),\n`+`co_kwonlyargcount: 0,\n`+`co_posonlyargount: 0,\n`+`co_qualname: "<${comp.type.toLowerCase()}>",\n`+`co_varnames: $B.fast_tuple(['.0', ${varnames}])\n`+`}\n`+`var next_func_${comp.id} = $B.make_js_iterator(expr, frame, ${comp.ast.lineno})\n`+`frame.$f_trace = _b_.None\n`+`var _frame_obj = $B.frame_obj\n`}function make_comp(scopes){var id=$B.UUID(),type=this.constructor.$name,symtable_block=scopes.symtable.table.blocks.get(fast_id(this)),varnames=symtable_block.varnames.map((x=>`"${x}"`)),comp_iter,comp_scope=$B.last(scopes),upper_comp_scope=comp_scope;while(upper_comp_scope.parent){upper_comp_scope=upper_comp_scope.parent}var initial_nb_await_in_scope=upper_comp_scope.nb_await===undefined?0:upper_comp_scope.nb_await;for(var symbol of _b_.dict.$iter_items(symtable_block.symbols)){if(symbol.value&DEF_COMP_ITER){comp_iter=symbol.key}}var comp_iter_scope=name_scope(comp_iter,scopes);var first_for=this.generators[0],outmost_expr=$B.js_from_ast(first_for.iter,scopes),nb_paren=1;var comp={ast:this,id:id,type:type,varnames:varnames,module_name:scopes[0].name,locals_name:make_scope_name(scopes),globals_name:make_scope_name(scopes,scopes[0])};var js=init_comprehension(comp,scopes);if(comp_iter_scope.found){js+=`var save_comp_iter = ${name_reference(comp_iter,scopes)}\n`}if(this instanceof $B.ast.ListComp){js+=`var result_${id} = []\n`}else if(this instanceof $B.ast.SetComp){js+=`var result_${id} = _b_.set.$factory()\n`}else if(this instanceof $B.ast.DictComp){js+=`var result_${id} = $B.empty_dict()\n`}var first=this.generators[0];js+=`try{\n`+`for(var next_${id} of next_func_${id}){\n`;var name=new $B.ast.Name(`next_${id}`,new $B.ast.Load);copy_position(name,first_for.iter);name.to_js=function(){return`next_${id}`};var assign=new $B.ast.Assign([first.target],name);assign.lineno=this.lineno;js+=assign.to_js(scopes)+"\n";for(let _if of first.ifs){nb_paren++;js+=`if($B.$bool(${$B.js_from_ast(_if,scopes)})){\n`}for(var comprehension of this.generators.slice(1)){js+=comprehension.to_js(scopes);nb_paren++;for(let _if of comprehension.ifs){nb_paren++}}if(this instanceof $B.ast.DictComp){var key=$B.js_from_ast(this.key,scopes),value=$B.js_from_ast(this.value,scopes)}else{var elt=$B.js_from_ast(this.elt,scopes)}var final_nb_await_in_scope=upper_comp_scope.nb_await===undefined?0:upper_comp_scope.nb_await;var has_await=final_nb_await_in_scope>initial_nb_await_in_scope;js=`(${has_await?"async ":""}function(expr){\n`+js;js+=has_await?"var save_frame_obj = $B.frame_obj;\n":"";if(this instanceof $B.ast.ListComp){js+=`result_${id}.push(${elt})\n`}else if(this instanceof $B.ast.SetComp){js+=`_b_.set.add(result_${id}, ${elt})\n`}else if(this instanceof $B.ast.DictComp){js+=`_b_.dict.$setitem(result_${id}, ${key}, ${value})\n`}for(var i=0;i")){name="exec"}else{name=filename.replace(/\./g,"_")}var top_scope=new Scope(name,`${type}`,this),block=scopes.symtable.table.blocks.get(fast_id(this));if(block&&block.$has_import_star){top_scope.has_import_star=true}scopes.push(top_scope);var namespaces=scopes.namespaces;if(namespaces){top_scope.is_exec_scope=true;for(let key in namespaces.exec_globals){if(!key.startsWith("$")){top_scope.globals.add(key)}}if(namespaces.exec_locals!==namespaces.exec_globals){for(let key in namespaces.exec_locals){if(!key.startsWith("$")){top_scope.locals.add(key)}}}}return name}function compiler_check(obj){var check_func=Object.getPrototypeOf(obj)._check;if(check_func){obj._check()}}function check_assign_or_delete(obj,target,action){action=action??"assign to";if(target instanceof $B.ast.Attribute){if(target.attr=="__debug__"){compiler_error(obj,`cannot ${action} __debug__`,target)}}else if(target instanceof $B.ast.Name){if(target.id=="__debug__"){compiler_error(obj,`cannot ${action} __debug__`,target)}}else if(target instanceof $B.ast.Tuple){for(var elt of target.elts){check_assign_or_delete(elt,elt,action)}}else if(target instanceof $B.ast.Starred){check_assign_or_delete(obj,target.value,action)}}$B.ast.Assert.prototype.to_js=function(scopes){var test=$B.js_from_ast(this.test,scopes),msg=this.msg?$B.js_from_ast(this.msg,scopes):"";return`if($B.set_lineno(frame, ${this.lineno}) && !$B.$bool(${test})){\n`+`throw _b_.AssertionError.$factory(${msg})}\n`};function annotation_to_str(obj,scopes){return get_source_from_position(scopes.src,obj)}$B.ast.AnnAssign.prototype.to_js=function(scopes){compiler_check(this);var postpone_annotation=scopes.symtable.table.future.features&$B.CO_FUTURE_ANNOTATIONS;var scope=last_scope(scopes);var js="";if(!scope.has_annotation){js+="locals.__annotations__ = locals.__annotations__ || $B.empty_dict()\n";scope.has_annotation=true;scope.locals.add("__annotations__")}if(this.target instanceof $B.ast.Name){var ann_value=postpone_annotation?`'${annotation_to_str(this.annotation,scopes)}'`:$B.js_from_ast(this.annotation,scopes)}if(this.value){js+=`var ann = ${$B.js_from_ast(this.value,scopes)}\n`;if(this.target instanceof $B.ast.Name&&this.simple){let scope=bind(this.target.id,scopes),mangled=mangle(scopes,scope,this.target.id);if(scope.type!="def"){js+=`$B.$setitem(locals.__annotations__, `+`'${mangled}', ${ann_value})\n`}let target_ref=name_reference(this.target.id,scopes);js+=`${target_ref} = ann`}else if(this.target instanceof $B.ast.Attribute){js+=`$B.$setattr(${$B.js_from_ast(this.target.value,scopes)}`+`, "${this.target.attr}", ann)`}else if(this.target instanceof $B.ast.Subscript){js+=`$B.$setitem(${$B.js_from_ast(this.target.value,scopes)}`+`, ${$B.js_from_ast(this.target.slice,scopes)}, ann)`}}else{if(this.target instanceof $B.ast.Name){if(this.simple&&scope.type!="def"){let mangled=mangle(scopes,scope,this.target.id);js+=`$B.$setitem(locals.__annotations__, `+`'${mangled}', ${ann_value})`}}}return`$B.set_lineno(frame, ${this.lineno})\n`+js};$B.ast.AnnAssign.prototype._check=function(){check_assign_or_delete(this,this.target)};$B.ast.Assign.prototype.to_js=function(scopes){compiler_check(this);var js=this.lineno?`$B.set_lineno(frame, ${this.lineno})\n`:"",value=$B.js_from_ast(this.value,scopes);function assign_one(target,value){if(target instanceof $B.ast.Name){return $B.js_from_ast(target,scopes)+" = "+value}else if(target instanceof $B.ast.Starred){return assign_one(target.value,value)}else if(target instanceof $B.ast.Subscript){return`$B.$setitem(${$B.js_from_ast(target.value,scopes)}`+`, ${$B.js_from_ast(target.slice,scopes)}, ${value})`}else if(target instanceof $B.ast.Attribute){var attr=mangle(scopes,last_scope(scopes),target.attr);return`$B.$setattr(${$B.js_from_ast(target.value,scopes)}`+`, "${attr}", ${value})`}}function assign_many(target,value){var js="";var nb_targets=target.elts.length,has_starred=false,nb_after_starred;for(var i=0,len=nb_targets;i0){let arg_list=not_starred.map((x=>$B.js_from_ast(x,scopes)));if(start){args+=`[${arg_list.join(", ")}]`}else{args+=`.concat([${arg_list.join(", ")}])`}not_starred=[]}else if(args==""){args="[]"}var starred_arg=$B.js_from_ast(arg.value,scopes);args+=`.concat(_b_.list.$factory(${starred_arg}))`;start=false}else{not_starred.push(arg)}}if(not_starred.length>0){let arg_list=not_starred.map((x=>$B.js_from_ast(x,scopes)));if(start){args+=`[${arg_list.join(", ")}]`;start=false}else{args+=`.concat([${arg_list.join(", ")}])`}}if(args[0]=="."){console.log("bizarre",args)}}if(named_kwargs.length+starred_kwargs.length==0){return{has_starred:has_starred,js:js+`${args}`}}else{var kw=`{${named_kwargs.join(", ")}}`;for(var starred_kwarg of starred_kwargs){kw+=`, ${starred_kwarg}`}kw=`{$kw:[${kw}]}`;if(args.length>0){if(has_starred){kw=`.concat([${kw}])`}else{kw=", "+kw}}return{has_starred:has_starred,js:js+`${args}${kw}`}}}$B.ast.ClassDef.prototype.to_js=function(scopes){var enclosing_scope=bind(this.name,scopes);var class_scope=new Scope(this.name,"class",this);var js="",locals_name=make_scope_name(scopes,class_scope),ref=this.name+$B.UUID(),glob=scopes[0].name,globals_name=make_scope_name(scopes,scopes[0]),decorators=[],decorated=false;for(let dec of this.decorator_list){decorated=true;var dec_id="decorator"+$B.UUID();decorators.push(dec_id);js+=`$B.set_lineno(frame, ${dec.lineno})\n`+`var ${dec_id} = ${$B.js_from_ast(dec,scopes)}\n`}js+=`$B.set_lineno(frame, ${this.lineno})\n`;var qualname=this.name;var ix=scopes.length-1;while(ix>=0){if(scopes[ix].parent){ix--}else if(scopes[ix].ast instanceof $B.ast.ClassDef){qualname=scopes[ix].name+"."+qualname;ix--}else{break}}var bases=this.bases.map((x=>$B.js_from_ast(x,scopes)));var has_type_params=this.type_params.length>0;if(has_type_params){js+=`$B.$import('_typing')\n`+`var _typing = $B.imported._typing\n`;var params=[];for(let item of this.type_params){if(item instanceof $B.ast.TypeVar){params.push(`$B.$call(_typing.TypeVar)('${item.name}')`)}else if(item instanceof $B.ast.TypeVarTuple){params.push(`$B.$call($B.$getattr(_typing.Unpack, '__getitem__'))($B.$call(_typing.TypeVarTuple)('${item.name.id}'))`)}else if(item instanceof $B.ast.ParamSpec){params.push(`$B.$call(_typing.ParamSpec)('${item.name.id}')`)}}bases.push(`_typing.Generic.__class_getitem__(_typing.Generic,`+` $B.fast_tuple([${params}]))`);for(let item of this.type_params){var name,param_type=item.constructor.$name;if(param_type=="TypeVar"){name=item.name}else{name=item.name.id}js+=`locals.${name} = $B.$call(_typing.${param_type})('${name}')\n`}}var keywords=[],metaclass;for(var keyword of this.keywords){if(keyword.arg=="metaclass"){metaclass=keyword.value}keywords.push(`["${keyword.arg}", `+$B.js_from_ast(keyword.value,scopes)+"]")}var docstring=extract_docstring(this,scopes);js+=`var ${ref} = (function(name, module, bases){\n`+`var _frame_obj = $B.frame_obj,\n`+`resolved_bases = $B.resolve_mro_entries(bases),\n`+`metaclass = $B.get_metaclass(name, module, `+`resolved_bases`;if(metaclass){js+=`, ${metaclass.to_js(scopes)}`}js+=")\n";js+=`var ${locals_name} = $B.make_class_namespace(metaclass, `+`name, module ,"${qualname}", resolved_bases),\n`;js+=`locals = ${locals_name}\n`+`if(resolved_bases !== bases){\nlocals.__orig_bases__ = bases}\n`+`locals.__doc__ = ${docstring}\n`+`var frame = [name, locals, module, ${globals_name}]\n`+`frame.__file__ = __file__\n`+`frame.$lineno = ${this.lineno}\n`+`frame.$f_trace = $B.enter_frame(frame)\n`+`var _frame_obj = $B.frame_obj\n`+`if(frame.$f_trace !== _b_.None){\n$B.trace_line()}\n`;scopes.push(class_scope);js+=add_body(this.body,scopes);scopes.pop();js+="\n$B.trace_return_and_leave(frame, _b_.None)\n"+`return $B.$class_constructor('${this.name}', locals, metaclass, `+`resolved_bases, bases, [${keywords.join(", ")}])\n`+`})('${this.name}',${globals_name}.__name__ ?? '${glob}', $B.fast_tuple([${bases}]))\n`;var class_ref=reference(scopes,enclosing_scope,this.name);if(decorated){class_ref=`decorated${$B.UUID()}`;js+="var "}js+=`${class_ref} = ${ref}\n`;if(decorated){js+=reference(scopes,enclosing_scope,this.name)+" = ";var decorate=class_ref;for(let dec of decorators.reverse()){decorate=`$B.$call(${dec})(${decorate})`}js+=decorate+"\n"}return js};$B.ast.Compare.prototype.to_js=function(scopes){var left=$B.js_from_ast(this.left,scopes),comps=[];var len=this.ops.length,prefix=len>1?"locals.$op = ":"";for(var i=0;i1){left="locals.$op"}}return comps.join(" && ")};$B.ast.comprehension.prototype.to_js=function(scopes){var id=$B.UUID(),iter=$B.js_from_ast(this.iter,scopes);var js=`var next_func_${id} = $B.make_js_iterator(${iter}, frame, ${this.lineno})\n`+`for(var next_${id} of next_func_${id}){\n`;var name=new $B.ast.Name(`next_${id}`,new $B.ast.Load);copy_position(name,this.target);name.to_js=function(){return`next_${id}`};var assign=new $B.ast.Assign([this.target],name);copy_position(assign,this.target);js+=assign.to_js(scopes)+" // assign to target\n";for(var _if of this.ifs){js+=`if($B.$bool(${$B.js_from_ast(_if,scopes)})){\n`}return js};$B.ast.Constant.prototype.to_js=function(){if(this.value===true||this.value===false){return this.value+""}else if(this.value===_b_.None){return"_b_.None"}else if(typeof this.value=="string"){var s=this.value,srg=$B.surrogates(s);if(srg.length==0){return`'${s}'`}return`$B.make_String('${s}', [${srg}])`}else if(this.value.__class__===_b_.bytes){return`_b_.bytes.$factory([${this.value.source}])`}else if(typeof this.value=="number"){if(Number.isInteger(this.value)){return this.value}else{return`({__class__: _b_.float, value: ${this.value}})`}}else if(this.value.__class__===$B.long_int){return`$B.fast_long_int(${this.value.value}n)`}else if(this.value.__class__===_b_.float){return`({__class__: _b_.float, value: ${this.value.value}})`}else if(this.value.__class__===_b_.complex){return`$B.make_complex(${this.value.$real.value}, ${this.value.$imag.value})`}else if(this.value===_b_.Ellipsis){return`_b_.Ellipsis`}else{console.log("invalid value",this.value);throw SyntaxError("bad value",this.value)}};$B.ast.Continue.prototype.to_js=function(scopes){if(!in_loop(scopes)){compiler_error(this,"'continue' not properly in loop")}return"continue"};$B.ast.Delete.prototype.to_js=function(scopes){compiler_check(this);var js="";for(var target of this.targets){if(target instanceof $B.ast.Name){var scope=name_scope(target.id,scopes);if(scope.found){scope.found.locals.delete(target.id)}js+=`$B.$delete("${target.id}")\n`}else if(target instanceof $B.ast.Subscript){js+=`$B.$delitem(${$B.js_from_ast(target.value,scopes)}, `+`${$B.js_from_ast(target.slice,scopes)})\n`}else if(target instanceof $B.ast.Attribute){js+=`_b_.delattr(${$B.js_from_ast(target.value,scopes)}, `+`'${target.attr}')\n`}}return`$B.set_lineno(frame, ${this.lineno})\n`+js};$B.ast.Delete.prototype._check=function(){for(var target of this.targets){check_assign_or_delete(this,target,"delete")}};$B.ast.Dict.prototype.to_js=function(scopes){var items=[],keys=this.keys,has_packed=false;function no_key(i){return keys[i]===_b_.None||keys[i]===undefined}for(let i=0,len=this.keys.length;i0){js+=`\nif(no_break_${id}){\n`+add_body(this.orelse,scopes)+"}\n"}return js};$B.ast.FormattedValue.prototype.to_js=function(scopes){var value=$B.js_from_ast(this.value,scopes);if(this.conversion==114){value=`_b_.repr(${value})`}else if(this.conversion==115){value=`_b_.str.$factory(${value})`}else if(this.conversion==97){value=`_b_.ascii(${value})`}if(this.format_spec){value=`_b_.str.format('{0:' + `+$B.js_from_ast(this.format_spec,scopes)+` + '}', ${value})`}else if(this.conversion==-1){value=`_b_.str.$factory(${value})`}return value};function transform_args(scopes){var has_posonlyargs=this.args.posonlyargs.length>0,_defaults=[],nb_defaults=this.args.defaults.length,positional=this.args.posonlyargs.concat(this.args.args),ix=positional.length-nb_defaults,default_names=[],kw_defaults=[],annotations;for(let arg of positional.concat(this.args.kwonlyargs).concat([this.args.vararg,this.args.kwarg])){if(arg&&arg.annotation){annotations=annotations||{};annotations[arg.arg]=arg.annotation}}for(var i=ix;i ${PARAMS_POS_COUNT} ) {\n $B.args0_old(fct, args);\n throw new Error('Too much positional arguments given (args0 should have raised an error) !');\n }\n`;if(hasPosOnly||hasPos){fct+=`\n for( ; offset < ARGS_POS_COUNT ; ++offset)\n result[ PARAMS_NAMES[offset] ] = args[offset];\n`}}if(!hasPos&&!hasNamedOnly&&!hasKWargs){fct+=`\n if( HAS_KW === true ) {\n for(let argname in ARGS_NAMED[0] ) {\n $B.args0_old(fct, args);\n throw new Error('No named arguments expected !!!');\n }\n for(let id = 1; id < ARGS_NAMED.length; ++id ) {\n const kargs = ARGS_NAMED[id];\n for(let argname of $B.make_js_iterator( $B.$getattr(kargs.__class__, "keys")(kargs) ) ) { //TODO: not optimal\n $B.args0_old(fct, args);\n throw new Error('No named arguments expected !!!');\n }\n }\n }\n`}else{fct+=`\n if( HAS_KW === false ) {\n `}if(hasPos||hasPosOnly){if(posOnlyDefaults!==DEFAULTS.ALL&&posDefaults!==DEFAULTS.ALL){fct+=`\n if( offset < ${PARAMS_POS_DEFAULTS_OFFSET} ) {\n $B.args0_old(fct, args);\n throw new Error('Not enough positional arguments given (args0 should have raised an error) !');\n }\n`}if(posOnlyDefaults!==DEFAULTS.NONE||posDefaults!==DEFAULTS.NONE){fct+=`\n for(let i = offset - PARAMS_POS_DEFAULTS_OFFSET;\n i < PARAMS_POS_DEFAULTS_COUNT;\n ++i)\n result[ PARAMS_NAMES[offset++] ] = PARAMS_POS_DEFAULTS[i];`}}if(hasKWargs){fct+=`\n result[$INFOS.kwarg] = __BRYTHON__.empty_dict();`}if(hasNamedOnly&&namedOnlyDefaults!==DEFAULTS.ALL){fct+=`\n $B.args0_old(fct, args);\n throw new Error('Named argument expected (args0 should have raised an error) !');\n`}else if(namedOnlyDefaults!==DEFAULTS.NONE){fct+=`\n const kwargs_defaults_values = fct.$kwdefaults_values;\n for(let i = 0; i < kwargs_defaults_values.length; ++i )\n result[ PARAMS_NAMES[offset++] ] = kwargs_defaults_values[i];\n`}fct+=`\n return result;\n`;if(!hasPos&&!hasNamedOnly&&!hasKWargs){return fct}else{fct+=`\n }\n`}if(namedOnlyDefaults!==DEFAULTS.NONE){fct+=`\n const kwargs_defaults = fct.$kwdefaults;\n`}if(hasPosOnly){fct+=`\n const PARAMS_POSONLY_COUNT = $CODE.co_posonlyargcount;\n if( offset < PARAMS_POSONLY_COUNT ) {\n `;if(posOnlyDefaults!==DEFAULTS.SOME){fct+=`\n if( offset < ${PARAMS_POS_DEFAULTS_OFFSET} ) {\n $B.args0_old(fct, args);\n throw new Error('Not enough positional parameters given (args0 should have raised an error) !');\n }\n`}if(posOnlyDefaults===DEFAULTS.NONE){fct+=`\n $B.args0_old(fct, args);\n throw new Error('Not enough positional parameters given (args0 should have raised an error) !');\n`}fct+=`\n const max = ${PARAMS_POS_DEFAULTS_COUNT} - (${PARAMS_POS_COUNT} - PARAMS_POSONLY_COUNT);\n // default parameters\n for(let i = offset - ${PARAMS_POS_DEFAULTS_OFFSET};\n i < max;\n ++i)\n result[ PARAMS_NAMES[offset++] ] = PARAMS_POS_DEFAULTS[i];\n }\n`}if(hasKWargs){fct+=`\n const extra = {};\n let nb_extra_args = 0;\n`;if(hasPos||hasNamedOnly){fct+=`\n const HAS_PARAMS = fct.$hasParams;\n`}}fct+=`\n let nb_named_args = 0;\n const kargs = ARGS_NAMED[0];\n for(let argname in kargs) {\n `;if(!hasKWargs){fct+=`\n result[ argname ] = kargs[argname];\n ++nb_named_args;\n`}if(hasKWargs){if(!hasNamedOnly&&!hasPos){fct+=`\n extra[ argname ] = kargs[argname];\n ++nb_extra_args;\n`}else{fct+=`\n if( HAS_PARAMS.has(argname) ) {\n result[ argname ] = kargs[argname];\n ++nb_named_args;\n } else {\n extra[ argname ] = kargs[argname];\n ++nb_extra_args;\n }\n`}}fct+=`\n }\n for(let id = 1; id < ARGS_NAMED.length; ++id ) {\n const kargs = ARGS_NAMED[id];\n for(let argname of $B.make_js_iterator($B.$getattr(kargs.__class__, "keys")(kargs)) ) {\n if( typeof argname !== "string") {\n $B.args0_old(fct, args);\n throw new Error('Non string key passed in **kargs');\n }\n `;if(!hasKWargs){fct+=`\n result[ argname ] = $B.$getitem(kargs, argname);\n ++nb_named_args;\n`}if(hasKWargs){if(!hasNamedOnly&&!hasPos){fct+=`\n extra[ argname ] = $B.$getitem(kargs, argname);\n ++nb_extra_args;\n`}else{fct+=`\n if( HAS_PARAMS.has(argname) ) {\n result[ argname ] = $B.$getitem(kargs, argname);\n ++nb_named_args;\n } else {\n extra[ argname ] = $B.$getitem(kargs, argname);\n ++nb_extra_args;\n }\n`}}fct+=`\n }\n }\n`;fct+=`\n let found = 0;\n let ioffset = offset;\n`;if((hasPosOnly||hasPos)&&(!hasPosOnly||posOnlyDefaults!==DEFAULTS.ALL)&&(!hasPos||posDefaults!==DEFAULTS.ALL)){fct+=`\n for( ; ioffset < ${PARAMS_POS_DEFAULTS_OFFSET}; ++ioffset) {\n const key = PARAMS_NAMES[ioffset];\n if( key in result ) // maybe could be speed up using "!(key in result)"\n continue;\n $B.args0_old(fct, args);\n throw new Error('Missing a named arguments (args0 should have raised an error) !');\n }\n`}if(hasPosOnly&&posOnlyDefaults!==DEFAULTS.NONE||hasPos&&posDefaults!==DEFAULTS.NONE){fct+=`\n for( ; ioffset < PARAMS_POS_COUNT; ++ioffset) {\n const key = PARAMS_NAMES[ioffset];\n if( key in result )\n continue;\n result[key] = PARAMS_POS_DEFAULTS[ioffset - ${PARAMS_POS_DEFAULTS_OFFSET}];\n ++found;\n }\n`}if(hasNamedOnly){fct+=`\n for( ; ioffset < PARAMS_NAMES.length; ++ioffset) {\n const key = PARAMS_NAMES[ioffset];\n if( key in result )\n continue;\n`;if(namedOnlyDefaults===DEFAULTS.SOME){fct+=`\n if( ! kwargs_defaults.has(key) ) {\n $B.args0_old(fct, args);\n throw new Error('Missing a named arguments (args0 should have raised an error) !');\n }\n`}if(namedOnlyDefaults===DEFAULTS.NONE){fct+=`\n $B.args0_old(fct, args);\n throw new Error('Missing a named arguments (args0 should have raised an error) !');\n`}if(namedOnlyDefaults!==DEFAULTS.NONE){fct+=`\n result[key] = kwargs_defaults.get(key);\n ++found;\n`}fct+=`\n }\n`}if(hasNamedOnly||hasPos)fct+=`\n if( found + nb_named_args !== PARAMS_NAMES.length - offset) {\n $B.args0_old(fct, args);\n throw new Error('Inexistant or duplicate named arguments (args0 should have raised an error) !');\n }\n`;if(hasKWargs){fct+=`\n if( Object.keys(extra).length !== nb_extra_args ) {\n $B.args0_old(fct, args);\n throw new Error('Duplicate name given to **kargs parameter (args0 should have raised an error) !');\n }\n result[$INFOS.kwarg] = __BRYTHON__.builtins.dict.$from_js(extra);\n`}fct+=`\n return result\n `;return fct}function type_param_in_def(tp,ref,scopes){var gname=scopes[0].name,globals_name=make_scope_name(scopes,scopes[0]);var js="";var name,param_type=tp.constructor.$name;if(param_type=="TypeVar"){name=tp.name}else{name=tp.name.id}bind(name,scopes);if(tp.bound){var typevarscope=new Scope(name,"typevarbound",tp);scopes.push(typevarscope);js+=`function BOUND_OF_${name}(){\n`+`var current_frame = $B.frame_obj.frame,\n`+`frame = ['BOUND_OF_${name}', {}, '${gname}', ${globals_name}]\n`+`frame.$f_trace = $B.enter_frame(frame)\n`+`frame.__file__ = '${scopes.filename}'\n`+`frame.$lineno = ${tp.bound.lineno}\n`+`try{\n`+`var res = ${tp.bound.to_js(scopes)}\n`+`$B.leave_frame()\nreturn res\n`+`}catch(err){\n`+`$B.leave_frame()\n`+`throw err\n}\n}\n`;scopes.pop()}js+=`locals_${ref}.${name} = `+`$B.$call(_typing.${param_type})('${name}')\n`+`type_params.push(locals_${ref}.${name})\n`;if(tp.bound){if(!tp.bound.elts){js+=`_typing.${param_type}._set_lazy_eval(locals_${ref}.${name}, `+`'__bound__', BOUND_OF_${name})\n`}else{js+=`_typing.${param_type}._set_lazy_eval(locals_${ref}.${name}, `+`'__constraints__', BOUND_OF_${name})\n`}}return js}$B.make_args_parser_and_parse=function make_args_parser_and_parse(fct,args){return $B.make_args_parser(fct)(fct,args)};$B.ast.FunctionDef.prototype.to_js=function(scopes){compiler_check(this);var symtable_block=scopes.symtable.table.blocks.get(fast_id(this));var in_class=last_scope(scopes).ast instanceof $B.ast.ClassDef,is_async=this instanceof $B.ast.AsyncFunctionDef;if(in_class){var class_scope=last_scope(scopes)}var func_name_scope=bind(this.name,scopes);var gname=scopes[0].name,globals_name=make_scope_name(scopes,scopes[0]);var decorators=[],decorated=false,decs_declare=this.decorator_list.length>0?"// declare decorators\n":"";for(let dec of this.decorator_list){decorated=true;var dec_id="decorator"+$B.UUID();decorators.push(dec_id);decs_declare+=`$B.set_lineno(frame, ${dec.lineno})\n`;decs_declare+=`var ${dec_id} = ${$B.js_from_ast(dec,scopes)}\n`}var docstring=extract_docstring(this,scopes);var parsed_args=transform_args.bind(this)(scopes),positional=parsed_args.positional,kw_defaults=parsed_args.kw_defaults,kw_default_names=parsed_args.kw_default_names;var defaults=`$B.fast_tuple([${this.args.defaults.map((x=>x.to_js(scopes)))}])`;kw_defaults=kw_default_names.length==0?"_b_.None":`_b_.dict.$from_js({${kw_defaults.join(", ")}})`;var id=$B.UUID(),name2=this.name+id;var has_type_params=this.type_params.length>0,type_params="";if(has_type_params){var tp_name=`type_params_${name2}`;var type_params_scope=new Scope(tp_name,"type_params",this.type_params);scopes.push(type_params_scope);var type_params_ref=qualified_scope_name(scopes,type_params_scope);var type_params_func=`function TYPE_PARAMS_OF_${name2}(){\n`;type_params=`$B.$import('_typing')\n`+`var _typing = $B.imported._typing\n`+`var locals_${type_params_ref} = {\n},\n`+`locals = locals_${type_params_ref},\n`+`frame = ['${type_params_ref}', locals, '${gname}', ${globals_name}],\n`+`type_params = []\n`+`frame.$f_trace = $B.enter_frame(frame)\n`+`frame.__file__ = '${scopes.filename}'\n`;for(var item of this.type_params){type_params+=type_param_in_def(item,type_params_ref,scopes)}type_params_func+=type_params}var func_scope=new Scope(this.name,"def",this);scopes.push(func_scope);var args=positional.concat(this.args.kwonlyargs),slots=[],arg_names=[];for(let arg of args){slots.push(arg.arg+": null");bind(arg.arg,scopes)}for(let arg of this.args.posonlyargs){arg_names.push(`'${arg.arg}'`)}for(let arg of this.args.args.concat(this.args.kwonlyargs)){arg_names.push(`'${arg.arg}'`)}if(this.args.vararg){bind(this.args.vararg.arg,scopes)}if(this.args.kwarg){bind(this.args.kwarg.arg,scopes)}var function_body;if(this.$is_lambda){var _return=new $B.ast.Return(this.body);copy_position(_return,this.body);var body=[_return];function_body=add_body(body,scopes)}else{function_body=add_body(this.body,scopes)}var is_generator=symtable_block.generator;var parse_args=[name2];var js=`$B.set_lineno(frame, ${this.lineno})\n`;if(is_async&&!is_generator){js+="async "}js+=`function ${name2}(){\n`;var locals_name=make_scope_name(scopes,func_scope);js+=`var ${locals_name},\n locals\n`;parse_args.push("arguments");var args_vararg=this.args.vararg===undefined?"null":"'"+this.args.vararg.arg+"'",args_kwarg=this.args.kwarg===undefined?"null":"'"+this.args.kwarg.arg+"'";if(positional.length==0&&slots.length==0&&this.args.vararg===undefined&&this.args.kwarg===undefined){js+=`${locals_name} = locals = {};\n`;js+=`if(arguments.length !== 0) ${name2}.$args_parser(${parse_args.join(", ")})\n;`}else{js+=`${locals_name} = locals = ${name2}.$args_parser(${parse_args.join(", ")})\n`}js+=`var frame = ["${this.$is_lambda?"":this.name}", `+`locals, "${gname}", ${globals_name}, ${name2}]\n if(locals.$has_generators){\n frame.$has_generators = true\n }\n frame.__file__ = __file__\n frame.$lineno = ${this.lineno}\n frame.$f_trace = $B.enter_frame(frame)\n`;if(func_scope.needs_stack_length){js+=`var stack_length = $B.count_frames()\n`}if(func_scope.needs_frames||is_async){js+=`var _frame_obj = $B.frame_obj\n`+`_linenums = $B.make_linenums()\n`}if(is_async){js+="frame.$async = true\n"}if(is_generator){js+=`locals.$is_generator = true\n`;if(is_async){js+=`var gen_${id} = $B.async_generator.$factory(async function*(){\n`}else{js+=`var gen_${id} = $B.generator.$factory(function*(){\n`}}js+=`try{\n$B.js_this = this\n`;if(in_class){var ix=scopes.indexOf(class_scope),parent=scopes[ix-1];var scope_ref=make_scope_name(scopes,parent),class_ref=class_scope.name,refs=class_ref.split(".").map((x=>`'${x}'`));bind("__class__",scopes);js+=`locals.__class__ = `+`$B.get_method_class(${name2}, ${scope_ref}, "${class_ref}", [${refs}])\n`}js+=function_body+"\n";if(!this.$is_lambda&&!($B.last(this.body)instanceof $B.ast.Return)){js+="var result = _b_.None\n"+"$B.trace_return_and_leave(frame, result)\n"+"return result\n"}js+=`}catch(err){\n`;if(func_scope.needs_frames){js+=`$B.set_exc_and_trace(frame, err)\n`+`err.$frame_obj = _frame_obj\n`+`_linenums[_linenums.length - 1] = frame.$lineno\n`+`err.$linenums = _linenums\n`+`$B.leave_frame()\n`}else{js+=`$B.set_exc_and_leave(frame, err)\n`}js+=`throw err\n }\n }\n`;if(is_generator){js+=`, '${this.name}')\n`+`var _gen_${id} = gen_${id}()\n`+`_gen_${id}.$frame = frame\n`+`$B.leave_frame()\n`+`return _gen_${id}}\n`}scopes.pop();var qualname=in_class?`${func_name_scope.name}.${this.name}`:this.name;var flags=3;if(this.args.vararg){flags|=4}if(this.args.kwarg){flags|=8}if(is_generator){flags|=32}if(is_async){flags|=128}var parameters=[],locals=[],identifiers=_b_.dict.$keys_string(symtable_block.symbols);var free_vars=[];for(var ident of identifiers){var flag=_b_.dict.$getitem_string(symtable_block.symbols,ident),_scope=flag>>SCOPE_OFF&SCOPE_MASK;if(_scope==FREE){free_vars.push(`'${ident}'`)}if(flag&DEF_PARAM){parameters.push(`'${ident}'`)}else if(flag&DEF_LOCAL){locals.push(`'${ident}'`)}}var varnames=parameters.concat(locals);if(in_class){js+=`${name2}.$is_method = true\n`}if(is_async){js+=`${name2}.$is_async = true\n`}js+=`$B.make_function_infos(${name2}, `+`'${gname}', `+`${defaults}, `+`${kw_defaults}, `+`${docstring}, `+`[${arg_names}], `+`${args_vararg}, `+`${args_kwarg},\n`+`${positional.length}, `+`__file__, `+`${this.lineno}, `+`${flags}, `+`[${free_vars}], `+`${this.args.kwonlyargs.length}, `+`'${this.$is_lambda?"":this.name}', `+`${varnames.length}, `+`${this.args.posonlyargs.length}, `+`'${this.$is_lambda?"":qualname}', `+`[${varnames}])\n`;if(is_async&&!is_generator){js+=`${name2} = $B.make_async(${name2})\n`}js+=`${name2}.$args_parser = $B.make_args_parser_and_parse\n`;var mangled=mangle(scopes,func_name_scope,this.name),func_ref=`${make_scope_name(scopes,func_name_scope)}.${mangled}`;if(decorated){func_ref=`decorated${$B.UUID()}`;js+="var "}js+=`${func_ref} = ${name2}\n`;if(this.returns||parsed_args.annotations){var features=scopes.symtable.table.future.features,postponed=features&$B.CO_FUTURE_ANNOTATIONS;if(postponed){var src=scopes.src;if(src===undefined){console.log("no src, filename",scopes)}}var ann_items=[];if(parsed_args.annotations){for(var arg_ann in parsed_args.annotations){var ann_ast=parsed_args.annotations[arg_ann];if(in_class){arg_ann=mangle(scopes,class_scope,arg_ann)}if(postponed){var ann_str=annotation_to_str(ann_ast,scopes);ann_items.push(`['${arg_ann}', '${ann_str}']`)}else{var value=ann_ast.to_js(scopes);ann_items.push(`['${arg_ann}', ${value}]`)}}}if(this.returns){if(postponed){var ann_str=annotation_to_str(this.returns,scopes);ann_items.push(`['return', '${ann_str}']`)}else{ann_items.push(`['return', ${this.returns.to_js(scopes)}]`)}}js+=`${func_ref}.__annotations__ = _b_.dict.$factory([${ann_items.join(", ")}])\n`}else{js+=`${func_ref}.__annotations__ = $B.empty_dict()\n`}if(has_type_params){scopes.pop()}if(decorated&&!has_type_params){js+=`${make_scope_name(scopes,func_name_scope)}.${mangled} = `;let decorate=func_ref;for(let dec of decorators.reverse()){decorate=`$B.$call(${dec})(${decorate})`}js+=decorate}if(has_type_params){type_params_func+="\n"+js+"\n"+`${name2}.__type_params__ = $B.fast_tuple(type_params)\n`+`$B.leave_frame()\n`+`return ${name2}\n}\n`;js=type_params_func;if(decorated){js+=`var ${func_ref} = TYPE_PARAMS_OF_${name2}()\n`+`${make_scope_name(scopes,func_name_scope)}.${mangled} = `;let decorate=func_ref;for(let dec of decorators.reverse()){decorate=`$B.$call(${dec})(${decorate})`}js+=decorate}else{js+=`var locals_${type_params_ref} = TYPE_PARAMS_OF_${name2}()\n`}}js=decs_declare+js;return js};$B.ast.FunctionDef.prototype._check=function(){for(var arg of this.args.args){if(arg instanceof $B.ast.arg){if(arg.arg=="__debug__"){compiler_error(arg,"cannot assign to __debug__")}}}for(var arg of this.args.kwonlyargs){if(arg instanceof $B.ast.arg){if(arg.arg=="__debug__"){compiler_error(arg,"cannot assign to __debug__")}}}if(this.args.kwarg&&this.args.kwarg.arg=="__debug__"){compiler_error(this.args.kwarg,"cannot assign to __debug__")}};$B.ast.GeneratorExp.prototype.to_js=function(scopes){var id=$B.UUID(),symtable_block=scopes.symtable.table.blocks.get(fast_id(this)),varnames=symtable_block.varnames.map((x=>`"${x}"`));var first_for=this.generators[0],outmost_expr=$B.js_from_ast(first_for.iter,scopes),nb_paren=1;var comp_scope=new Scope(`genexpr_${id}`,"comprehension",this);scopes.push(comp_scope);var comp={ast:this,id:id,type:"genexpr",varnames:varnames,module_name:scopes[0].name,locals_name:make_scope_name(scopes),globals_name:make_scope_name(scopes,scopes[0])};var head=init_comprehension(comp,scopes);var first=this.generators[0];var js=`$B.enter_frame(frame)\n`+`var next_func_${id} = $B.make_js_iterator(expr, frame, ${this.lineno})\n`+`for(var next_${id} of next_func_${id}){\n`+`frame.$f_trace = $B.enter_frame(frame)\n`;var name=new $B.ast.Name(`next_${id}`,new $B.ast.Load);copy_position(name,first_for.iter);name.to_js=function(){return`next_${id}`};var assign=new $B.ast.Assign([first.target],name);assign.lineno=this.lineno;js+=assign.to_js(scopes)+"\n";for(let _if of first.ifs){nb_paren++;js+=`if($B.$bool(${$B.js_from_ast(_if,scopes)})){\n`}for(var comprehension of this.generators.slice(1)){js+=comprehension.to_js(scopes);nb_paren++;for(let _if of comprehension.ifs){nb_paren++}}var elt=$B.js_from_ast(this.elt,scopes),has_await=comp_scope.has_await;js=`var gen${id} = $B.generator.$factory(${has_await?"async ":""}function*(expr){\n`+js;js+=has_await?"var save_frame_obj = $B.frame_obj;\n":"";js+=`try{\n`+` yield ${elt}\n`+`}catch(err){\n`+(has_await?"$B.restore_frame_obj(save_frame_obj, locals)\n":"")+`$B.leave_frame()\nthrow err\n}\n`+(has_await?"\n$B.restore_frame_obj(save_frame_obj, locals);":"");for(var i=0;i")(expr)\n';scopes.pop();var func=`${head}\n${js}\nreturn gen${id}`;return`(function(expr){\n${func}\n})(${outmost_expr})\n`};$B.ast.Global.prototype.to_js=function(scopes){var scope=last_scope(scopes);for(var name of this.names){scope.globals.add(name)}return""};$B.ast.If.prototype.to_js=function(scopes){var scope=$B.last(scopes),new_scope=copy_scope(scope,this);var js=`if($B.set_lineno(frame, ${this.lineno}) && `;if(this.test instanceof $B.ast.BoolOp){this.test.$dont_evaluate=true;js+=`${$B.js_from_ast(this.test,scopes)}){\n`}else{js+=`$B.$bool(${$B.js_from_ast(this.test,scopes)})){\n`}scopes.push(new_scope);js+=add_body(this.body,scopes)+"\n}";scopes.pop();if(this.orelse.length>0){if(this.orelse[0]instanceof $B.ast.If&&this.orelse.length==1){js+="else "+$B.js_from_ast(this.orelse[0],scopes)+add_body(this.orelse.slice(1),scopes)}else{js+="\nelse{\n"+add_body(this.orelse,scopes)+"\n}"}}return js};$B.ast.IfExp.prototype.to_js=function(scopes){return"($B.$bool("+$B.js_from_ast(this.test,scopes)+") ? "+$B.js_from_ast(this.body,scopes)+": "+$B.js_from_ast(this.orelse,scopes)+")"};$B.ast.Import.prototype.to_js=function(scopes){var js=`$B.set_lineno(frame, ${this.lineno})\n`;for(var alias of this.names){js+=`$B.$import("${alias.name}", [], `;if(alias.asname){js+=`{'${alias.name}' : '${alias.asname}'}, `;bind(alias.asname,scopes)}else{js+="{}, ";bind(alias.name,scopes)}var parts=alias.name.split(".");for(var i=0;i`"${x.name}"`)).join(", "),aliases=[];for(var name of this.names){if(name.asname){aliases.push(`${name.name}: '${name.asname}'`)}}js+=`[${names}], {${aliases.join(", ")}}, ${this.level}, locals);`;for(var alias of this.names){if(alias.asname){bind(alias.asname,scopes)}else if(alias.name=="*"){last_scope(scopes).blurred=true;js+=`\n$B.import_all(locals, module)`}else{bind(alias.name,scopes)}}return js};$B.ast.Interactive.prototype.to_js=function(scopes){mark_parents(this);var name=init_scopes.bind(this)("module",scopes);var module_id=name,global_name=make_scope_name(scopes),mod_name=module_name(scopes);var js=`// Javascript code generated from ast\n`+`var $B = __BRYTHON__,\n_b_ = $B.builtins,\n`;js+=`${global_name} = {}, // $B.imported["${mod_name}"],\n`+`locals = ${global_name},\n`+`frame = ["${module_id}", locals, "${module_id}", locals]`;js+=`\nvar __file__ = frame.__file__ = '${scopes.filename||""}'\n`+`locals.__name__ = '${name}'\n`+`locals.__doc__ = ${extract_docstring(this,scopes)}\n`;if(!scopes.imported){js+=`locals.__annotations__ = locals.__annotations__ || $B.empty_dict()\n`}js+=`frame.$f_trace = $B.enter_frame(frame)\n`;js+=`$B.set_lineno(frame, 1)\n`+"\nvar _frame_obj = $B.frame_obj\n";js+="var stack_length = $B.count_frames()\n";js+=`try{\n`+add_body(this.body,scopes)+"\n"+`$B.leave_frame({locals, value: _b_.None})\n`+`}catch(err){\n`+`$B.set_exc_and_trace(frame, err)\n`+`$B.leave_frame({locals, value: _b_.None})\n`+"throw err\n"+`}`;scopes.pop();console.log("Interactive",js);return js};$B.ast.JoinedStr.prototype.to_js=function(scopes){var items=this.values.map((s=>$B.js_from_ast(s,scopes)));if(items.length==0){return"''"}return items.join(" + ")};$B.ast.Lambda.prototype.to_js=function(scopes){var id=$B.UUID(),name="lambda_"+$B.lambda_magic+"_"+id;var f=new $B.ast.FunctionDef(name,this.args,this.body,[]);f.lineno=this.lineno;f.$id=fast_id(this);f.$is_lambda=true;var js=f.to_js(scopes),lambda_ref=reference(scopes,last_scope(scopes),name);return`(function(){ ${js}\n`+`return ${lambda_ref}\n})()`};function list_or_tuple_to_js(func,scopes){if(this.elts.filter((x=>x instanceof $B.ast.Starred)).length>0){var parts=[],simple=[];for(var elt of this.elts){if(elt instanceof $B.ast.Starred){elt.$handled=true;parts.push(`[${simple.join(", ")}]`);simple=[];parts.push(`_b_.list.$factory(${$B.js_from_ast(elt,scopes)})`)}else{simple.push($B.js_from_ast(elt,scopes))}}if(simple.length>0){parts.push(`[${simple.join(", ")}]`)}var js=parts[0];for(var part of parts.slice(1)){js+=`.concat(${part})`}return`${func}(${js})`}var elts=this.elts.map((x=>$B.js_from_ast(x,scopes)));return`${func}([${elts.join(", ")}])`}$B.ast.List.prototype.to_js=function(scopes){return list_or_tuple_to_js.bind(this)("$B.$list",scopes)};$B.ast.ListComp.prototype.to_js=function(scopes){compiler_check(this);return make_comp.bind(this)(scopes)};$B.ast.match_case.prototype.to_js=function(scopes){var js=`($B.set_lineno(frame, ${this.lineno}) && `+`$B.pattern_match(subject, {`+`${$B.js_from_ast(this.pattern,scopes)}})`;if(this.guard){js+=` && $B.$bool(${$B.js_from_ast(this.guard,scopes)})`}js+=`){\n`;js+=add_body(this.body,scopes)+"\n}";return js};function is_irrefutable(pattern){switch(pattern.constructor){case $B.ast.MatchAs:if(pattern.pattern===undefined){return pattern}else{return is_irrefutable(pattern.pattern)}case $B.ast.MatchOr:for(var i=0;i-1){compiler_error(this,`multiple assignment to name '${name}' in pattern`)}scope.bindings.push(name)}return params};$B.ast.MatchClass.prototype.to_js=function(scopes){var names=[];for(let pattern of this.patterns.concat(this.kwd_patterns)){let name=pattern.name;if(name){if(names.indexOf(name)>-1){compiler_error(pattern,`multiple assignment to name '${name}' in pattern`)}names.push(name)}}names=[];for(let i=0;i-1){compiler_error(this.kwd_patterns[i],`attribute name repeated in class pattern: ${kwd_attr}`)}names.push(kwd_attr)}var cls=$B.js_from_ast(this.cls,scopes),patterns=this.patterns.map((x=>`{${$B.js_from_ast(x,scopes)}}`));var kw=[];for(let i=0,len=this.kwd_patterns.length;i-1){compiler_error(pattern,`multiple assignments to name '${pattern.name}' in pattern`)}names.push(pattern.name)}}var items=[];for(let i=0,len=this.keys.length;i-1){compiler_error(pattern,`multiple assignments to name '${pattern.name}' in pattern`)}names.push(pattern.name)}items.push("{"+$B.js_from_ast(pattern,scopes)+"}")}return`sequence: [${items.join(", ")}]`};$B.ast.MatchSingleton.prototype.to_js=function(){var value=this.value===true?"_b_.True":this.value===false?"_b_.False":"_b_.None";return`literal: ${value}`};$B.ast.MatchStar.prototype.to_js=function(){var name=this.name===undefined?"_":this.name;return`capture_starred: '${name}'`};$B.ast.MatchValue.prototype.to_js=function(scopes){if(this.value instanceof $B.ast.Constant){return`literal: ${$B.js_from_ast(this.value,scopes)}`}else if(this.value instanceof $B.ast.Constant||this.value instanceof $B.ast.UnaryOp||this.value instanceof $B.ast.BinOp||this.value instanceof $B.ast.Attribute){return`value: ${$B.js_from_ast(this.value,scopes)}`}else{compiler_error(this,"patterns may only match literals and attribute lookups")}};$B.ast.Module.prototype.to_js=function(scopes){mark_parents(this);var name=init_scopes.bind(this)("module",scopes),namespaces=scopes.namespaces;var module_id=name,global_name=make_scope_name(scopes),mod_name=module_name(scopes);var js=`// Javascript code generated from ast\n`+`var $B = __BRYTHON__,\n_b_ = $B.builtins,\n`;if(!namespaces){js+=`${global_name} = $B.imported["${mod_name}"],\n`+`locals = ${global_name},\n`+`frame = ["${module_id}", locals, "${module_id}", locals]`}else{js+=`locals = ${namespaces.local_name},\n`+`globals = ${namespaces.global_name}`;if(name){let local_name=("locals_"+name).replace(/\./g,"_");js+=`,\n${local_name} = locals`}}js+=`\nvar __file__ = frame.__file__ = '${scopes.filename||""}'\n`+`locals.__name__ = '${name}'\n`+`locals.__doc__ = ${extract_docstring(this,scopes)}\n`;if(!scopes.imported){js+=`locals.__annotations__ = locals.__annotations__ || $B.empty_dict()\n`}if(!namespaces){js+=`frame.$f_trace = $B.enter_frame(frame)\n`;js+=`$B.set_lineno(frame, 1)\n`+"\nvar _frame_obj = $B.frame_obj\n"}js+="var stack_length = $B.count_frames()\n";js+=`try{\n`+add_body(this.body,scopes)+"\n"+`$B.leave_frame({locals, value: _b_.None})\n`+`}catch(err){\n`+`$B.set_exc_and_trace(frame, err)\n`+`$B.leave_frame({locals, value: _b_.None})\n`+"throw err\n"+`}`;scopes.pop();return js};$B.ast.Name.prototype.to_js=function(scopes){if(this.ctx instanceof $B.ast.Store){var scope=bind(this.id,scopes);if(scope===$B.last(scopes)&&scope.freevars.has(this.id)){scope.freevars.delete(this.id)}return reference(scopes,scope,this.id)}else if(this.ctx instanceof $B.ast.Load){var res=name_reference(this.id,scopes,[this.col_offset,this.col_offset,this.end_col_offset]);if(this.id=="__debugger__"&&res.startsWith("$B.resolve_in_scopes")){return"debugger"}return res}};$B.ast.NamedExpr.prototype.to_js=function(scopes){compiler_check(this);var i=scopes.length-1;while(scopes[i].type=="comprehension"){i--}var enclosing_scopes=scopes.slice(0,i+1);enclosing_scopes.symtable=scopes.symtable;bind(this.target.id,enclosing_scopes);return"("+$B.js_from_ast(this.target,enclosing_scopes)+" = "+$B.js_from_ast(this.value,scopes)+")"};$B.ast.NamedExpr.prototype._check=function(){check_assign_or_delete(this,this.target)};$B.ast.Nonlocal.prototype.to_js=function(scopes){var scope=$B.last(scopes);for(var name of this.names){scope.nonlocals.add(name)}return""};$B.ast.Pass.prototype.to_js=function(){return`$B.set_lineno(frame, ${this.lineno})\n`+"void(0)"};$B.ast.Raise.prototype.to_js=function(scopes){var js=`$B.set_lineno(frame, ${this.lineno})\n`+"$B.$raise(";if(this.exc){js+=$B.js_from_ast(this.exc,scopes)}if(this.cause){js+=", "+$B.js_from_ast(this.cause,scopes)}return js+")"};$B.ast.Return.prototype.to_js=function(scopes){if(last_scope(scopes).type!="def"){compiler_error(this,"'return' outside function")}compiler_check(this);var js=`$B.set_lineno(frame, ${this.lineno})\n`+"var result = "+(this.value?$B.js_from_ast(this.value,scopes):" _b_.None")+"\n"+`$B.trace_return_and_leave(frame, result)\nreturn result\n`;return js};$B.ast.Set.prototype.to_js=function(scopes){var elts=[];for(var elt of this.elts){var js;if(elt instanceof $B.ast.Constant){js=`{constant: [${$B.js_from_ast(elt,scopes)}, `+`${$B.$hash(elt.value)}]}`}else if(elt instanceof $B.ast.Starred){js=`{starred: ${$B.js_from_ast(elt.value,scopes)}}`}else{js=`{item: ${$B.js_from_ast(elt,scopes)}}`}elts.push(js)}return`_b_.set.$literal([${elts.join(", ")}])`};$B.ast.SetComp.prototype.to_js=function(scopes){return make_comp.bind(this)(scopes)};$B.ast.Slice.prototype.to_js=function(scopes){var lower=this.lower?$B.js_from_ast(this.lower,scopes):"_b_.None",upper=this.upper?$B.js_from_ast(this.upper,scopes):"_b_.None",step=this.step?$B.js_from_ast(this.step,scopes):"_b_.None";return`_b_.slice.$fast_slice(${lower}, ${upper}, ${step})`};$B.ast.Starred.prototype.to_js=function(scopes){if(this.$handled){return`_b_.list.$unpack(${$B.js_from_ast(this.value,scopes)})`}if(this.ctx instanceof $B.ast.Store){compiler_error(this,"starred assignment target must be in a list or tuple")}else{compiler_error(this,"can't use starred expression here")}};$B.ast.Subscript.prototype.to_js=function(scopes){var value=$B.js_from_ast(this.value,scopes),slice=$B.js_from_ast(this.slice,scopes);if(this.slice instanceof $B.ast.Slice){return`$B.getitem_slice(${value}, ${slice})`}else{var position=encode_position(this.value.col_offset,this.slice.col_offset,this.slice.end_col_offset);return`$B.$getitem(${value}, ${slice},${position})`}};$B.ast.Try.prototype.to_js=function(scopes){compiler_check(this);var id=$B.UUID(),has_except_handlers=this.handlers.length>0,has_else=this.orelse.length>0,has_finally=this.finalbody.length>0;var js=`$B.set_lineno(frame, ${this.lineno})\ntry{\n`;js+=`var stack_length_${id} = $B.count_frames()\n`;if(has_finally){js+=`var save_frame_obj_${id} = $B.frames_obj\n`}if(has_else){js+=`var failed${id} = false\n`}var try_scope=copy_scope($B.last(scopes));scopes.push(try_scope);js+=add_body(this.body,scopes)+"\n";if(has_except_handlers){var err="err"+id;js+="}\n";js+=`catch(${err}){\n`+`$B.set_exc_and_trace(frame, ${err})\n`;if(has_else){js+=`failed${id} = true\n`}var first=true,has_untyped_except=false;for(var handler of this.handlers){if(first){js+="if";first=false}else{js+="}else if"}js+=`($B.set_lineno(frame, ${handler.lineno})`;if(handler.type){js+=` && $B.is_exc(${err}, `;if(handler.type instanceof $B.ast.Tuple){js+=`${$B.js_from_ast(handler.type,scopes)}`}else{js+=`[${$B.js_from_ast(handler.type,scopes)}]`}js+=`)){\n`}else{has_untyped_except=true;js+="){\n"}if(handler.name){bind(handler.name,scopes);var mangled=mangle(scopes,try_scope,handler.name);js+=`locals.${mangled} = ${err}\n`}js+=add_body(handler.body,scopes)+"\n";if(!($B.last(handler.body)instanceof $B.ast.Return)){js+="$B.del_exc(frame)\n"}}if(!has_untyped_except){js+=`}else{\nthrow ${err}\n`}js+="}\n"}if(has_else||has_finally){js+="}\n";js+="finally{\n";var finalbody=`var exit = false\n`+`if($B.count_frames() < stack_length_${id}){\n`+`exit = true\n`+`$B.frame_obj = $B.push_frame(frame)\n`+`}\n`+add_body(this.finalbody,scopes);if(this.finalbody.length>0&&!($B.last(this.finalbody)instanceof $B.ast.Return)){finalbody+=`\nif(exit){\n`+`$B.leave_frame()\n`+`}`}var elsebody=`if($B.count_frames() == stack_length_${id} `+`&& ! failed${id}){\n`+add_body(this.orelse,scopes)+"\n}";if(has_else&&has_finally){js+=`try{\n`+elsebody+"\n}\n"+`finally{\n`+finalbody+"}\n"}else if(has_else&&!has_finally){js+=elsebody}else{js+=finalbody}js+="\n}\n"}else{js+="}\n"}scopes.pop();return js};$B.ast.TryStar.prototype.to_js=function(scopes){var id=$B.UUID(),has_except_handlers=this.handlers.length>0,has_else=this.orelse.length>0,has_finally=this.finalbody.length>0;var js=`$B.set_lineno(frame, ${this.lineno})\ntry{\n`;js+=`var stack_length_${id} = $B.count_frames()\n`;if(has_finally){js+=`var save_frame_obj_${id} = $B.frame_obj\n`}if(has_else){js+=`var failed${id} = false\n`}var try_scope=copy_scope($B.last(scopes));scopes.push(try_scope);js+=add_body(this.body,scopes)+"\n";if(has_except_handlers){var err="err"+id;js+="}\n";js+=`catch(${err}){\n`+`$B.set_exc_and_trace(frame, ${err})\n`+`if(! $B.$isinstance(${err}, _b_.BaseExceptionGroup)){\n`+`${err} = _b_.BaseExceptionGroup.$factory(_b_.None, [${err}])\n`+"}\n"+`function fake_split(exc, condition){\n`+`return condition(exc) ? `+`$B.fast_tuple([exc, _b_.None]) : $B.fast_tuple([_b_.None, exc])\n`+"}\n";if(has_else){js+=`failed${id} = true\n`}for(var handler of this.handlers){js+=`$B.set_lineno(frame, ${handler.lineno})\n`;if(handler.type){js+="var condition = function(exc){\n"+" return $B.$isinstance(exc, "+`${$B.js_from_ast(handler.type,scopes)})\n`+"}\n"+`var klass = $B.get_class(${err}),\n`+`split_method = $B.$getattr(klass, 'split'),\n`+`split = $B.$call(split_method)(${err}, condition),\n`+" matching = split[0],\n"+" rest = split[1]\n"+"if(matching.exceptions !== _b_.None){\n"+" for(var err of matching.exceptions){\n";if(handler.name){bind(handler.name,scopes);var mangled=mangle(scopes,try_scope,handler.name);js+=`locals.${mangled} = ${err}\n`}js+=add_body(handler.body,scopes)+"\n";if(!($B.last(handler.body)instanceof $B.ast.Return)){js+="$B.del_exc(frame)\n"}js+="}\n";js+="}\n";js+=`${err} = rest\n`}}js+=`if(${err}.exceptions !== _b_.None){\n`+`throw ${err}\n`+"}\n"}if(has_else||has_finally){js+="}\n";js+="finally{\n";var finalbody=`var exit = false\n`+`if($B.count_frames() < stack_length_${id}){\n`+`exit = true\n`+`$B.frame_obj = $B.push_frame(frame)\n`+`}\n`+add_body(this.finalbody,scopes);if(this.finalbody.length>0&&!($B.last(this.finalbody)instanceof $B.ast.Return)){finalbody+=`\nif(exit){\n`+`$B.leave_frame(locals)\n`+`}`}var elsebody=`if($B.count_frames() == stack_length_${id} `+`&& ! failed${id}){\n`+add_body(this.orelse,scopes)+"\n}";if(has_else&&has_finally){js+=`try{\n`+elsebody+"\n}\n"+`finally{\n`+finalbody+"}\n"}else if(has_else&&!has_finally){js+=elsebody}else{js+=finalbody}js+="\n}\n"}else{js+="}\n"}scopes.pop();return js};$B.ast.Tuple.prototype.to_js=function(scopes){return list_or_tuple_to_js.bind(this)("$B.fast_tuple",scopes)};$B.ast.TypeAlias.prototype.to_js=function(scopes){var type_param_scope=new Scope("type_params","type_params",this.type_params);scopes.push(type_param_scope);var type_alias_scope=new Scope("type_alias","type_alias",this);scopes.push(type_alias_scope);var type_params_names=[];for(var type_param of this.type_params){if(type_param instanceof $B.ast.TypeVar){type_params_names.push(type_param.name)}else if(type_param instanceof $B.ast.TypeVarTuple||type_param instanceof $B.ast.ParamSpec){type_params_names.push(type_param.name.id)}}for(var name of type_params_names){bind(name,scopes)}var qualified_name=qualified_scope_name(scopes,type_alias_scope);var value=this.value.to_js(scopes);scopes.pop();scopes.pop();var js=`$B.$import('_typing')\n`;js+=`var locals_${qualified_scope_name(scopes,type_param_scope)} = {}\n`;js+=`function TYPE_PARAMS_OF_${this.name.id}(){\n`+`var locals_${qualified_name} = {},\n`+` locals = locals_${qualified_name}, \n`+` type_params = $B.fast_tuple([])\n`;for(var i=0,len=this.type_params.length;i0){js+=`\nif(no_break_${id}){\n`+add_body(this.orelse,scopes)+"}\n"}return js};$B.ast.With.prototype.to_js=function(scopes){function add_item(item,js){var id=$B.UUID();var s=`var mgr_${id} = `+$B.js_from_ast(item.context_expr,scopes)+",\n"+`klass = $B.get_class(mgr_${id})\n`+`try{\n`+`var exit_${id} = $B.$getattr(klass, '__exit__'),\n`+`enter_${id} = $B.$getattr(klass, '__enter__')\n`+`}catch(err){\n`+`var klass_name = $B.class_name(mgr_${id})\n`+`throw _b_.TypeError.$factory("'" + klass_name + `+`"' object does not support the con`+`text manager protocol")\n`+`}\n`+`var value_${id} = $B.$call(enter_${id})(mgr_${id}),\n`+`exc_${id} = true\n`;if(in_generator){s+=`locals.$context_managers = locals.$context_managers || []\n`+`locals.$context_managers.push(mgr_${id})\n`}s+="try{\ntry{\n";if(item.optional_vars){var value={to_js:function(){return`value_${id}`}};copy_position(value,_with);var assign=new $B.ast.Assign([item.optional_vars],value);copy_position(assign,_with);s+=assign.to_js(scopes)+"\n"}s+=js;s+=`}catch(err_${id}){\n`+`frame.$lineno = ${lineno}\n`+`exc_${id} = false\n`+`err_${id} = $B.exception(err_${id}, frame)\n`+`var $b = exit_${id}(mgr_${id}, err_${id}.__class__, `+`err_${id}, $B.$getattr(err_${id}, '__traceback__'))\n`+`if(! $B.$bool($b)){\n`+`throw err_${id}\n`+`}\n`+`}\n`;s+=`}\nfinally{\n`+`frame.$lineno = ${lineno}\n`+(in_generator?`locals.$context_managers.pop()\n`:"")+`if(exc_${id}){\n`+`try{\n`+`exit_${id}(mgr_${id}, _b_.None, _b_.None, _b_.None)\n`+`}catch(err){\n`+`if($B.count_frames() < stack_length){\n`+`$B.frame_obj = $B.push_frame(frame)\n`+`}\n`+`throw err\n`+`}\n`+`}\n`+`}\n`;return s}var _with=this,scope=last_scope(scopes),lineno=this.lineno;scope.needs_stack_length=true;var js=add_body(this.body,scopes)+"\n";var in_generator=scopes.symtable.table.blocks.get(fast_id(scope.ast)).generator;for(var item of this.items.slice().reverse()){js=add_item(item,js)}return`$B.set_lineno(frame, ${this.lineno})\n`+js};$B.ast.Yield.prototype.to_js=function(scopes){var scope=last_scope(scopes);if(scope.type!="def"){compiler_error(this,"'yield' outside function")}last_scope(scopes).is_generator=true;var value=this.value?$B.js_from_ast(this.value,scopes):"_b_.None";return`yield ${value}`};$B.ast.YieldFrom.prototype.to_js=function(scopes){var scope=last_scope(scopes);if(scope.type!="def"){compiler_error(this,"'yield' outside function")}scope.is_generator=true;var value=$B.js_from_ast(this.value,scopes);var n=$B.UUID();return`yield* (function* f(){\n var _i${n} = _b_.iter(${value}),\n _r${n}\n var failed${n} = false\n try{\n var _y${n} = _b_.next(_i${n})\n }catch(_e){\n $B.set_exc(_e, frame)\n failed${n} = true\n $B.pmframe = $B.frame_obj.frame\n _e = $B.exception(_e)\n if(_e.__class__ === _b_.StopIteration){\n var _r${n} = $B.$getattr(_e, "value")\n }else{\n throw _e\n }\n }\n if(! failed${n}){\n while(true){\n var failed1${n} = false\n try{\n $B.leave_frame()\n var _s${n} = yield _y${n}\n $B.frame_obj = $B.push_frame(frame)\n }catch(_e){\n $B.set_exc(_e, frame)\n if(_e.__class__ === _b_.GeneratorExit){\n var failed2${n} = false\n try{\n var _m${n} = $B.$getattr(_i${n}, "close")\n }catch(_e1){\n failed2${n} = true\n if(_e1.__class__ !== _b_.AttributeError){\n throw _e1\n }\n }\n if(! failed2${n}){\n $B.$call(_m${n})()\n }\n throw _e\n }else if($B.is_exc(_e, [_b_.BaseException])){\n var sys_module = $B.imported._sys,\n _x${n} = sys_module.exc_info()\n var failed3${n} = false\n try{\n var _m${n} = $B.$getattr(_i${n}, "throw")\n }catch(err){\n failed3${n} = true\n if($B.is_exc(err, [_b_.AttributeError])){\n throw err\n }\n }\n if(! failed3${n}){\n try{\n _y${n} = $B.$call(_m${n}).apply(null,\n _b_.list.$factory(_x${n}))\n }catch(err){\n if($B.is_exc(err, [_b_.StopIteration])){\n _r${n} = $B.$getattr(err, "value")\n break\n }\n throw err\n }\n }\n }\n }\n if(! failed1${n}){\n try{\n if(_s${n} === _b_.None){\n _y${n} = _b_.next(_i${n})\n }else{\n _y${n} = $B.$call($B.$getattr(_i${n}, "send"))(_s${n})\n }\n }catch(err){\n if($B.is_exc(err, [_b_.StopIteration])){\n _r${n} = $B.$getattr(err, "value")\n break\n }\n throw err\n }\n }\n }\n }\n return _r${n}\n })()`};var state={};$B.js_from_root=function(arg){var ast_root=arg.ast,symtable=arg.symtable,filename=arg.filename,src=arg.src,namespaces=arg.namespaces,imported=arg.imported;if($B.show_ast_dump){console.log($B.ast_dump(ast_root))}if($B.compiler_check){$B.compiler_check(ast_root,symtable)}var scopes=[];state.filename=filename;scopes.symtable=symtable;scopes.filename=filename;scopes.src=src;scopes.namespaces=namespaces;scopes.imported=imported;scopes.imports={};var js=ast_root.to_js(scopes);return{js:js,imports:scopes.imports}};$B.js_from_ast=function(ast,scopes){if(!scopes.symtable){throw Error("perdu symtable")}scopes=scopes||[];if(ast.to_js!==undefined){if(ast.col_offset===undefined){var klass=ast.constructor.$name;if(["match_case"].indexOf(klass)==-1){console.log("no col_offset for",klass);console.log(ast);throw Error("no col offset")}}return ast.to_js(scopes)}console.log("unhandled",ast.constructor.$name,ast,typeof ast);return"// unhandled class ast."+ast.constructor.$name}})(__BRYTHON__);(function($B){var _b_=$B.builtins;var GLOBAL_PARAM="name '%s' is parameter and global",NONLOCAL_PARAM="name '%s' is parameter and nonlocal",GLOBAL_AFTER_ASSIGN="name '%s' is assigned to before global declaration",NONLOCAL_AFTER_ASSIGN="name '%s' is assigned to before nonlocal declaration",GLOBAL_AFTER_USE="name '%s' is used prior to global declaration",NONLOCAL_AFTER_USE="name '%s' is used prior to nonlocal declaration",GLOBAL_ANNOT="annotated name '%s' can't be global",NONLOCAL_ANNOT="annotated name '%s' can't be nonlocal",IMPORT_STAR_WARNING="import * only allowed at module level",NAMED_EXPR_COMP_IN_CLASS="assignment expression within a comprehension cannot be used in a class body",NAMED_EXPR_COMP_CONFLICT="assignment expression cannot rebind comprehension iteration variable '%s'",NAMED_EXPR_COMP_INNER_LOOP_CONFLICT="comprehension inner loop cannot rebind assignment expression target '%s'",NAMED_EXPR_COMP_ITER_EXPR="assignment expression cannot be used in a comprehension iterable expression",ANNOTATION_NOT_ALLOWED="'%s' can not be used within an annotation",DUPLICATE_ARGUMENT="duplicate argument '%s' in function definition",TYPEVAR_BOUND_NOT_ALLOWED="%s cannot be used within a TypeVar bound",TYPEALIAS_NOT_ALLOWED="%s cannot be used within a type alias",TYPEPARAM_NOT_ALLOWED="%s cannot be used within the definition of a generic",DUPLICATE_TYPE_PARAM="duplicate type parameter '%s'";var DEF_GLOBAL=1,DEF_LOCAL=2,DEF_PARAM=2<<1,DEF_NONLOCAL=2<<2,USE=2<<3,DEF_FREE=2<<4,DEF_FREE_CLASS=2<<5,DEF_IMPORT=2<<6,DEF_ANNOT=2<<7,DEF_COMP_ITER=2<<8,DEF_TYPE_PARAM=2<<9,DEF_COMP_CELL=2<<10;var DEF_BOUND=DEF_LOCAL|DEF_PARAM|DEF_IMPORT;var SCOPE_OFFSET=12,SCOPE_MASK=DEF_GLOBAL|DEF_LOCAL|DEF_PARAM|DEF_NONLOCAL;var LOCAL=1,GLOBAL_EXPLICIT=2,GLOBAL_IMPLICIT=3,FREE=4,CELL=5;var TYPE_MODULE=2;var NULL=undefined;var ModuleBlock=2,ClassBlock=1,FunctionBlock=0,AnnotationBlock=4,TypeVarBoundBlock=5,TypeAliasBlock=6,TypeParamBlock=7;var PyExc_SyntaxError=_b_.SyntaxError;function assert(test){if(!$B.$bool(test)){console.log("test fails",test);throw Error("test fails")}}function LOCATION(x){return[x.lineno,x.col_offset,x.end_lineno,x.end_col_offset]}function ST_LOCATION(x){return[x.lineno,x.col_offset,x.end_lineno,x.end_col_offset]}function _Py_Mangle(privateobj,ident){var plen,ipriv;if(privateobj==NULL||!ident.startsWith("__")){return ident}plen=privateobj.length;if(ident.endsWith("__")||ident.search(/\./)!=-1){return ident}ipriv=0;while(privateobj[ipriv]=="_"){ipriv++}if(ipriv==plen){return ident}var prefix=privateobj.substr(ipriv);return"_"+prefix+ident}var lambda=NULL;var NoComprehension=0,ListComprehension=1,DictComprehension=2,SetComprehension=3,GeneratorExpression=4;function GET_IDENTIFIER(VAR){return VAR}function Symtable(){this.filename=NULL;this.stack=[];this.blocks=new Map;this.cur=NULL;this.private=NULL}function id(obj){if(obj.$id!==undefined){return obj.$id}return obj.$id=$B.UUID()}function ste_new(st,name,block,key,lineno,col_offset,end_lineno,end_col_offset){var ste;ste={table:st,id:id(key),name:name,directives:NULL,type:block,nested:0,free:0,varargs:0,varkeywords:0,opt_lineno:0,opt_col_offset:0,lineno:lineno,col_offset:col_offset,end_lineno:end_lineno,end_col_offset:end_col_offset};if(st.cur!=NULL&&(st.cur.nested||st.cur.type==FunctionBlock)){ste.nested=1}ste.child_free=0;ste.generator=0;ste.coroutine=0;ste.comprehension=NoComprehension;ste.returns_value=0;ste.needs_class_closure=0;ste.comp_inlined=0;ste.comp_iter_target=0;ste.comp_iter_expr=0;ste.symbols=$B.empty_dict();ste.varnames=[];ste.children=[];st.blocks.set(ste.id,ste);return ste}$B._PySymtable_Build=function(mod,filename,future){var st=new Symtable,seq;st.filename=filename;st.future=future||{};st.type=TYPE_MODULE;if(!symtable_enter_block(st,"top",ModuleBlock,mod,0,0,0,0)){return NULL}st.top=st.cur;switch(mod.constructor){case $B.ast.Module:seq=mod.body;for(let item of seq){visitor.stmt(st,item)}break;case $B.ast.Expression:visitor.expr(st,mod.body);break;case $B.ast.Interactive:seq=mod.body;for(let item of seq){visitor.stmt(st,item)}break}symtable_analyze(st);return st.top};function _PyST_GetSymbol(ste,name){if(!_b_.dict.$contains_string(ste.symbols,name)){return 0}return _b_.dict.$getitem_string(ste.symbols,name)}function _PyST_GetScope(ste,name){var symbol=_PyST_GetSymbol(ste,name);return symbol>>SCOPE_OFFSET&SCOPE_MASK}function _PyST_IsFunctionLike(ste){return ste.type==FunctionBlock||ste.type==TypeVarBoundBlock||ste.type==TypeAliasBlock||ste.type==TypeParamBlock}function PyErr_Format(exc_type,message,arg){if(arg){message=_b_.str.__mod__(message,arg)}return exc_type.$factory(message)}function PyErr_SetString(exc_type,message){return exc_type.$factory(message)}function set_exc_info(exc,filename,lineno,offset,end_lineno,end_offset){exc.filename=filename;exc.lineno=lineno;exc.offset=offset+1;exc.end_lineno=end_lineno;exc.end_offset=end_offset+1;var src=$B.file_cache[filename];if(src!==undefined){var lines=src.split("\n");exc.text=lines[lineno-1]}else{exc.text=""}exc.args[1]=[filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]}function error_at_directive(exc,ste,name){assert(ste.directives);for(var data of ste.directives){if(data[0]==name){set_exc_info(exc,ste.table.filename,data[1],data[2],data[3],data[4]);return 0}}throw _b_.RuntimeError.$factory("BUG: internal directive bookkeeping broken")}function SET_SCOPE(DICT,NAME,I){DICT[NAME]=I}function is_free_in_any_child(entry,key){for(var child_ste of entry.ste_children){var scope=_PyST_GetScope(child_ste,key);if(scope==FREE){return 1}}return 0}function inline_comprehension(ste,comp,scopes,comp_free,inlined_cells){for(var item of _b_.dict.$iter_items(comp.symbols)){var k=item.key,comp_flags=item.value;if(comp_flags&DEF_PARAM){continue}var scope=comp_flags>>SCOPE_OFFSET&SCOPE_MASK;var only_flags=comp_flags&(1<=0;i--){let entry=ste.children[i];if(entry.comp_inlined){ste.children.splice(i,0,...entry.children)}}if(_PyST_IsFunctionLike(ste)&&!analyze_cells(scopes,newfree,inlined_cells)){return 0}else if(ste.type===ClassBlock&&!drop_class_free(ste,newfree)){return 0}if(!update_symbols(ste.symbols,scopes,bound,newfree,inlined_cells,ste.type===ClassBlock||ste.can_see_class_scope)){return 0}Set_Union(free,newfree);success=1;return success}function PySet_New(arg){if(arg===NULL){return new Set}return new Set(arg)}function Set_Union(setA,setB){for(let elem of setB){setA.add(elem)}}function analyze_child_block(entry,bound,free,global,typeparams,class_entry,child_free){var temp_bound=PySet_New(bound),temp_free=PySet_New(free),temp_global=PySet_New(global),temp_typeparams=PySet_New(typeparams);if(!analyze_block(entry,temp_bound,temp_free,temp_global,temp_typeparams,class_entry)){return 0}Set_Union(child_free,temp_free);return 1}function symtable_analyze(st){var free=new Set,global=new Set,typeparams=new Set;return analyze_block(st.top,NULL,free,global,typeparams,NULL)}function symtable_exit_block(st){var size=st.stack.length;st.cur=NULL;if(size){st.stack.pop();if(--size){st.cur=st.stack[size-1]}}return 1}function symtable_enter_block(st,name,block,ast,lineno,col_offset,end_lineno,end_col_offset){var prev;if(ast===undefined){console.log("call ste new, key undef",st,name)}var ste=ste_new(st,name,block,ast,lineno,col_offset,end_lineno,end_col_offset);st.stack.push(ste);prev=st.cur;if(prev){ste.comp_iter_expr=prev.comp_iter_expr}st.cur=ste;if(block===AnnotationBlock){return 1}if(block===ModuleBlock){st.global=st.cur.symbols}if(prev){prev.children.push(ste)}return 1}function symtable_lookup(st,name){var mangled=_Py_Mangle(st.private,name);if(!mangled){return 0}var ret=_PyST_GetSymbol(st.cur,mangled);return ret}function symtable_add_def_helper(st,name,flag,ste,_location){var o,dict,val,mangled=_Py_Mangle(st.private,name);if(!mangled){return 0}dict=ste.symbols;if(_b_.dict.$contains_string(dict,mangled)){o=_b_.dict.$getitem_string(dict,mangled);val=o;if(flag&DEF_PARAM&&val&DEF_PARAM){let exc=PyErr_Format(_b_.SyntaxError,DUPLICATE_ARGUMENT,name);set_exc_info(exc,st.filename,..._location);throw exc}if(flag&DEF_TYPE_PARAM&&val&DEF_TYPE_PARAM){let exc=PyErr_Format(_b_.SyntaxError,DUPLICATE_TYPE_PARAM,name);set_exc_info(exc,st.filename,...location);throw exc}val|=flag}else{val=flag}if(ste.comp_iter_target){if(val&(DEF_GLOBAL|DEF_NONLOCAL)){let exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_INNER_LOOP_CONFLICT,name);set_exc_info(exc,st.filename,..._location);throw exc}val|=DEF_COMP_ITER}o=val;if(o==NULL){return 0}_b_.dict.$setitem(dict,mangled,o);if(flag&DEF_PARAM){ste.varnames.push(mangled)}else if(flag&DEF_GLOBAL){val=flag;if(st.global.hasOwnProperty(mangled)){val|=st.global[mangled]}o=val;if(o==NULL){return 0}st.global[mangled]=o}return 1}function symtable_add_def(st,name,flag,_location){return symtable_add_def_helper(st,name,flag,st.cur,_location)}function symtable_enter_type_param_block(st,name,ast,has_defaults,has_kwdefaults,kind,_location){var prev=st.cur,current_type=st.cur.type;if(!symtable_enter_block(st,name,TypeParamBlock,ast,..._location)){return 0}prev.$type_param=st.cur;if(current_type===ClassBlock){st.cur.can_see_class_scope=1;if(!symtable_add_def(st,"__classdict__",USE,_location)){return 0}}if(kind==$B.ast.ClassDef){if(!symtable_add_def(st,"type_params",DEF_LOCAL,_location)){return 0}if(!symtable_add_def(st,"type_params",USE,_location)){return 0}st.st_private=name;var generic_base=".generic_base";if(!symtable_add_def(st,generic_base,DEF_LOCAL,_location)){return 0}if(!symtable_add_def(st,generic_base,USE,_location)){return 0}}if(has_defaults){var defaults=".defaults";if(!symtable_add_def(st,defaults,DEF_PARAM,_location)){return 0}}if(has_kwdefaults){var kwdefaults=".kwdefaults";if(!symtable_add_def(st,kwdefaults,DEF_PARAM,_location)){return 0}}return 1}function VISIT_QUIT(ST,X){return X}function VISIT(ST,TYPE,V){var f=visitor[TYPE];if(!f(ST,V)){VISIT_QUIT(ST,0)}}function VISIT_SEQ(ST,TYPE,SEQ){for(var elt of SEQ){if(!visitor[TYPE](ST,elt)){VISIT_QUIT(ST,0)}}}function VISIT_SEQ_TAIL(ST,TYPE,SEQ,START){for(var i=START,len=SEQ.length;i0){if(!symtable_enter_type_param_block(st,s.name,s.type_params,s.args.defaults!=NULL,has_kwonlydefaults(s.args.kwonlyargs,s.args.kw_defaults),s.constructor,LOCATION(s))){VISIT_QUIT(st,0)}VISIT_SEQ(st,type_param,s.type_params)}if(!visitor.annotations(st,s,s.args,s.returns))VISIT_QUIT(st,0);if(s.decorator_list){VISIT_SEQ(st,expr,s.decorator_list)}if(!symtable_enter_block(st,s.name,FunctionBlock,s,...LOCATION(s))){VISIT_QUIT(st,0)}VISIT(st,"arguments",s.args);VISIT_SEQ(st,stmt,s.body);if(!symtable_exit_block(st)){VISIT_QUIT(st,0)}if(s.type_params.length>0){if(!symtable_exit_block(st)){VISIT_QUIT(st,0)}}break;case $B.ast.ClassDef:var tmp;if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s)))VISIT_QUIT(st,0);VISIT_SEQ(st,expr,s.bases);VISIT_SEQ(st,keyword,s.keywords);if(s.decorator_list)VISIT_SEQ(st,expr,s.decorator_list);if(s.type_params.length>0){if(!symtable_enter_type_param_block(st,s.name,s.type_params,false,false,s.constructor,LOCATION(s))){VISIT_QUIT(st,0)}VISIT_SEQ(st,type_param,s.type_params)}VISIT_SEQ(st,expr,s.bases);VISIT_SEQ(st,keyword,s.keywords);if(!symtable_enter_block(st,s.name,ClassBlock,s,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset))VISIT_QUIT(st,0);tmp=st.private;st.private=s.name;if(s.type_params.length>0){if(!symtable_add_def(st,"__type_params__",DEF_LOCAL,LOCATION(s))){VISIT_QUIT(st,0)}if(!symtable_add_def(st,"type_params",USE,LOCATION(s))){VISIT_QUIT(st,0)}}VISIT_SEQ(st,stmt,s.body);st.private=tmp;if(!symtable_exit_block(st))VISIT_QUIT(st,0);if(s.type_params.length>0){if(!symtable_exit_block(st))VISIT_QUIT(st,0)}break;case $B.ast.TypeAlias:VISIT(st,expr,s.name);assert(s.name instanceof $B.ast.Name);var name=s.name.id,is_in_class=st.cur.type===ClassBlock,is_generic=s.type_params.length>0;if(is_generic){if(!symtable_enter_type_param_block(st,name,s.type_params,false,false,s.kind,LOCATION(s))){VISIT_QUIT(st,0)}VISIT_SEQ(st,type_param,s.type_params)}if(!symtable_enter_block(st,name,TypeAliasBlock,s,LOCATION(s))){VISIT_QUIT(st,0)}st.cur.can_see_class_scope=is_in_class;if(is_in_class&&!symtable_add_def(st,"__classdict__",USE,LOCATION(s.value))){VISIT_QUIT(st,0)}VISIT(st,expr,s.value);if(!symtable_exit_block(st)){VISIT_QUIT(st,0)}if(is_generic){if(!symtable_exit_block(st))VISIT_QUIT(st,0)}break;case $B.ast.Return:if(s.value){VISIT(st,expr,s.value);st.cur.returns_value=1}break;case $B.ast.Delete:VISIT_SEQ(st,expr,s.targets);break;case $B.ast.Assign:VISIT_SEQ(st,expr,s.targets);VISIT(st,expr,s.value);break;case $B.ast.AnnAssign:if(s.target instanceof $B.ast.Name){var e_name=s.target;var cur=symtable_lookup(st,e_name.id);if(cur<0){VISIT_QUIT(st,0)}if(cur&(DEF_GLOBAL|DEF_NONLOCAL)&&st.cur.symbols!=st.global&&s.simple){var exc=PyErr_Format(_b_.SyntaxError,cur&DEF_GLOBAL?GLOBAL_ANNOT:NONLOCAL_ANNOT,e_name.id);exc.args[1]=[st.filename,s.lineno,s.col_offset+1,s.end_lineno,s.end_col_offset+1];throw exc}if(s.simple&&!symtable_add_def(st,e_name.id,DEF_ANNOT|DEF_LOCAL,LOCATION(e_name))){VISIT_QUIT(st,0)}else{if(s.value&&!symtable_add_def(st,e_name.id,DEF_LOCAL,LOCATION(e_name))){VISIT_QUIT(st,0)}}}else{VISIT(st,expr,s.target)}if(!visitor.annotation(st,s.annotation)){VISIT_QUIT(st,0)}if(s.value){VISIT(st,expr,s.value)}break;case $B.ast.AugAssign:VISIT(st,expr,s.target);VISIT(st,expr,s.value);break;case $B.ast.For:VISIT(st,expr,s.target);VISIT(st,expr,s.iter);VISIT_SEQ(st,stmt,s.body);if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)}break;case $B.ast.While:VISIT(st,expr,s.test);VISIT_SEQ(st,stmt,s.body);if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)}break;case $B.ast.If:VISIT(st,expr,s.test);VISIT_SEQ(st,stmt,s.body);if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)}break;case $B.ast.Match:VISIT(st,expr,s.subject);VISIT_SEQ(st,match_case,s.cases);break;case $B.ast.Raise:if(s.exc){VISIT(st,expr,s.exc);if(s.cause){VISIT(st,expr,s.cause)}}break;case $B.ast.Try:VISIT_SEQ(st,stmt,s.body);VISIT_SEQ(st,stmt,s.orelse);VISIT_SEQ(st,excepthandler,s.handlers);VISIT_SEQ(st,stmt,s.finalbody);break;case $B.ast.TryStar:VISIT_SEQ(st,stmt,s.body);VISIT_SEQ(st,stmt,s.orelse);VISIT_SEQ(st,excepthandler,s.handlers);VISIT_SEQ(st,stmt,s.finalbody);break;case $B.ast.Assert:VISIT(st,expr,s.test);if(s.msg){VISIT(st,expr,s.msg)}break;case $B.ast.Import:VISIT_SEQ(st,alias,s.names);break;case $B.ast.ImportFrom:VISIT_SEQ(st,alias,s.names);break;case $B.ast.Global:var seq=s.names;for(var name of seq){var cur=symtable_lookup(st,name);if(cur<0){VISIT_QUIT(st,0)}if(cur&(DEF_PARAM|DEF_LOCAL|USE|DEF_ANNOT)){var msg;if(cur&DEF_PARAM){msg=GLOBAL_PARAM}else if(cur&USE){msg=GLOBAL_AFTER_USE}else if(cur&DEF_ANNOT){msg=GLOBAL_ANNOT}else{msg=GLOBAL_AFTER_ASSIGN}var exc=PyErr_Format(_b_.SyntaxError,msg,name);set_exc_info(exc,st.filename,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset);throw exc}if(!symtable_add_def(st,name,DEF_GLOBAL,LOCATION(s)))VISIT_QUIT(st,0);if(!symtable_record_directive(st,name,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset))VISIT_QUIT(st,0)}break;case $B.ast.Nonlocal:var seq=s.names;for(var name of seq){var cur=symtable_lookup(st,name);if(cur<0){VISIT_QUIT(st,0)}if(cur&(DEF_PARAM|DEF_LOCAL|USE|DEF_ANNOT)){var msg;if(cur&DEF_PARAM){msg=NONLOCAL_PARAM}else if(cur&USE){msg=NONLOCAL_AFTER_USE}else if(cur&DEF_ANNOT){msg=NONLOCAL_ANNOT}else{msg=NONLOCAL_AFTER_ASSIGN}var exc=PyErr_Format(_b_.SyntaxError,msg,name);set_exc_info(exc,st.filename,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset);throw exc}if(!symtable_add_def(st,name,DEF_NONLOCAL,LOCATION(s)))VISIT_QUIT(st,0);if(!symtable_record_directive(st,name,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset))VISIT_QUIT(st,0)}break;case $B.ast.Expr:VISIT(st,expr,s.value);break;case $B.ast.Pass:case $B.ast.Break:case $B.ast.Continue:break;case $B.ast.With:VISIT_SEQ(st,"withitem",s.items);VISIT_SEQ(st,stmt,s.body);break;case $B.ast.AsyncFunctionDef:if(!symtable_add_def(st,s.name,DEF_LOCAL,LOCATION(s)))VISIT_QUIT(st,0);if(s.args.defaults)VISIT_SEQ(st,expr,s.args.defaults);if(s.args.kw_defaults)VISIT_SEQ_WITH_NULL(st,expr,s.args.kw_defaults);if(!visitor.annotations(st,s,s.args,s.returns))VISIT_QUIT(st,0);if(s.decorator_list)VISIT_SEQ(st,expr,s.decorator_list);if(s.type_params.length>0){if(!symtable_enter_type_param_block(st,s.name,s.type_params,s.args.defaults!=NULL,has_kwonlydefaults(s.args.kwonlyargs,s.args.kw_defaults),s.constructor,LOCATION(s))){VISIT_QUIT(st,0)}VISIT_SEQ(st,type_param,s.type_params)}if(!visitor.annotations(st,s,s.args,s.returns))VISIT_QUIT(st,0);if(!symtable_enter_block(st,s.name,FunctionBlock,s,s.lineno,s.col_offset,s.end_lineno,s.end_col_offset))VISIT_QUIT(st,0);st.cur.coroutine=1;VISIT(st,"arguments",s.args);VISIT_SEQ(st,stmt,s.body);if(!symtable_exit_block(st))VISIT_QUIT(st,0);if(s.type_params.length>0){if(!symtable_exit_block(st))VISIT_QUIT(st,0)}break;case $B.ast.AsyncWith:VISIT_SEQ(st,withitem,s.items);VISIT_SEQ(st,stmt,s.body);break;case $B.ast.AsyncFor:VISIT(st,expr,s.target);VISIT(st,expr,s.iter);VISIT_SEQ(st,stmt,s.body);if(s.orelse){VISIT_SEQ(st,stmt,s.orelse)}break;default:console.log("unhandled",s);break}VISIT_QUIT(st,1)};function symtable_extend_namedexpr_scope(st,e){assert(st.stack);assert(e instanceof $B.ast.Name);var target_name=e.id;var i,size,ste;size=st.stack.length;assert(size);for(i=size-1;i>=0;i--){ste=st.stack[i];if(ste.comprehension){let target_in_scope=_PyST_GetSymbol(ste,target_name);if(target_in_scope&DEF_COMP_ITER){let exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_CONFLICT,target_name);set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.ed_lineno,e.end_col_offset);throw exc}continue}if(_PyST_IsFunctionLike(ste)){let target_in_scope=_PyST_GetSymbol(ste,target_name);if(target_in_scope&DEF_GLOBAL){if(!symtable_add_def(st,target_name,DEF_GLOBAL,LOCATION(e)))VISIT_QUIT(st,0)}else{if(!symtable_add_def(st,target_name,DEF_NONLOCAL,LOCATION(e)))VISIT_QUIT(st,0)}if(!symtable_record_directive(st,target_name,LOCATION(e)))VISIT_QUIT(st,0);return symtable_add_def_helper(st,target_name,DEF_LOCAL,ste,LOCATION(e))}if(ste.type==ModuleBlock){if(!symtable_add_def(st,target_name,DEF_GLOBAL,LOCATION(e)))VISIT_QUIT(st,0);if(!symtable_record_directive(st,target_name,LOCATION(e)))VISIT_QUIT(st,0);return symtable_add_def_helper(st,target_name,DEF_GLOBAL,ste,LOCATION(e))}if(ste.type==ClassBlock){let exc=PyErr_Format(_b_.SyntaxError,NAMED_EXPR_COMP_IN_CLASS);set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset);throw exc}}assert(0);return 0}function symtable_handle_namedexpr(st,e){if(st.cur.comp_iter_expr>0){var exc=PyErr_Format(PyExc_SyntaxError,NAMED_EXPR_COMP_ITER_EXPR);set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset);throw exc}if(st.cur.comprehension){if(!symtable_extend_namedexpr_scope(st,e.target))return 0}VISIT(st,expr,e.value);VISIT(st,expr,e.target);return 1}const alias="alias",comprehension="comprehension",excepthandler="excepthandler",expr="expr",keyword="keyword",match_case="match_case",pattern="pattern",stmt="stmt",type_param="type_param",withitem="withitem";visitor.expr=function(st,e){switch(e.constructor){case $B.ast.NamedExpr:if(!symtable_raise_if_annotation_block(st,"named expression",e)){VISIT_QUIT(st,0)}if(!symtable_handle_namedexpr(st,e))VISIT_QUIT(st,0);break;case $B.ast.BoolOp:VISIT_SEQ(st,"expr",e.values);break;case $B.ast.BinOp:VISIT(st,"expr",e.left);VISIT(st,"expr",e.right);break;case $B.ast.UnaryOp:VISIT(st,"expr",e.operand);break;case $B.ast.Lambda:{if(!GET_IDENTIFIER("lambda"))VISIT_QUIT(st,0);if(e.args.defaults)VISIT_SEQ(st,"expr",e.args.defaults);if(e.args.kw_defaults)VISIT_SEQ_WITH_NULL(st,"expr",e.args.kw_defaults);if(!symtable_enter_block(st,lambda,FunctionBlock,e,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset))VISIT_QUIT(st,0);VISIT(st,"arguments",e.args);VISIT(st,"expr",e.body);if(!symtable_exit_block(st))VISIT_QUIT(st,0);break}case $B.ast.IfExp:VISIT(st,"expr",e.test);VISIT(st,"expr",e.body);VISIT(st,"expr",e.orelse);break;case $B.ast.Dict:VISIT_SEQ_WITH_NULL(st,"expr",e.keys);VISIT_SEQ(st,"expr",e.values);break;case $B.ast.Set:VISIT_SEQ(st,"expr",e.elts);break;case $B.ast.GeneratorExp:if(!visitor.genexp(st,e))VISIT_QUIT(st,0);break;case $B.ast.ListComp:if(!visitor.listcomp(st,e))VISIT_QUIT(st,0);break;case $B.ast.SetComp:if(!visitor.setcomp(st,e))VISIT_QUIT(st,0);break;case $B.ast.DictComp:if(!visitor.dictcomp(st,e))VISIT_QUIT(st,0);break;case $B.ast.Yield:if(!symtable_raise_if_annotation_block(st,"yield expression",e)){VISIT_QUIT(st,0)}if(e.value)VISIT(st,"expr",e.value);st.cur.generator=1;if(st.cur.comprehension){return symtable_raise_if_comprehension_block(st,e)}break;case $B.ast.YieldFrom:if(!symtable_raise_if_annotation_block(st,"yield expression",e)){VISIT_QUIT(st,0)}VISIT(st,"expr",e.value);st.cur.generator=1;if(st.cur.comprehension){return symtable_raise_if_comprehension_block(st,e)}break;case $B.ast.Await:if(!symtable_raise_if_annotation_block(st,"await expression",e)){VISIT_QUIT(st,0)}VISIT(st,"expr",e.value);st.cur.coroutine=1;break;case $B.ast.Compare:VISIT(st,"expr",e.left);VISIT_SEQ(st,"expr",e.comparators);break;case $B.ast.Call:VISIT(st,"expr",e.func);VISIT_SEQ(st,"expr",e.args);VISIT_SEQ_WITH_NULL(st,"keyword",e.keywords);break;case $B.ast.FormattedValue:VISIT(st,"expr",e.value);if(e.format_spec)VISIT(st,"expr",e.format_spec);break;case $B.ast.JoinedStr:VISIT_SEQ(st,"expr",e.values);break;case $B.ast.Constant:break;case $B.ast.Attribute:VISIT(st,"expr",e.value);break;case $B.ast.Subscript:VISIT(st,"expr",e.value);VISIT(st,"expr",e.slice);break;case $B.ast.Starred:VISIT(st,"expr",e.value);break;case $B.ast.Slice:if(e.lower)VISIT(st,expr,e.lower);if(e.upper)VISIT(st,expr,e.upper);if(e.step)VISIT(st,expr,e.step);break;case $B.ast.Name:var flag=e.ctx instanceof $B.ast.Load?USE:DEF_LOCAL;if(!symtable_add_def(st,e.id,flag,LOCATION(e)))VISIT_QUIT(st,0);if(e.ctx instanceof $B.ast.Load&&_PyST_IsFunctionLike(st.cur)&&e.id=="super"){if(!GET_IDENTIFIER("__class__")||!symtable_add_def(st,"__class__",USE,LOCATION(e)))VISIT_QUIT(st,0)}break;case $B.ast.List:VISIT_SEQ(st,expr,e.elts);break;case $B.ast.Tuple:VISIT_SEQ(st,expr,e.elts);break}VISIT_QUIT(st,1)};visitor.type_param=function(st,tp){switch(tp.constructor){case $B.ast.TypeVar:if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM|DEF_LOCAL,LOCATION(tp)))VISIT_QUIT(st,0);if(tp.bound){var is_in_class=st.cur.can_see_class_scope;if(!symtable_enter_block(st,tp.name,TypeVarBoundBlock,tp,LOCATION(tp)))VISIT_QUIT(st,0);st.cur.can_see_class_scope=is_in_class;if(is_in_class&&!symtable_add_def(st,"__classdict__",USE,LOCATION(tp.bound))){VISIT_QUIT(st,0)}VISIT(st,expr,tp.bound);if(!symtable_exit_block(st))VISIT_QUIT(st,0)}break;case $B.ast.TypeVarTuple:if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM|DEF_LOCAL,LOCATION(tp)))VISIT_QUIT(st,0);break;case $B.ast.ParamSpec:if(!symtable_add_def(st,tp.name,DEF_TYPE_PARAM|DEF_LOCAL,LOCATION(tp)))VISIT_QUIT(st,0);break}VISIT_QUIT(st,1)};visitor.pattern=function(st,p){switch(p.constructor){case $B.ast.MatchValue:VISIT(st,expr,p.value);break;case $B.ast.MatchSingleton:break;case $B.ast.MatchSequence:VISIT_SEQ(st,pattern,p.patterns);break;case $B.ast.MatchStar:if(p.name){symtable_add_def(st,p.name,DEF_LOCAL,LOCATION(p))}break;case $B.ast.MatchMapping:VISIT_SEQ(st,expr,p.keys);VISIT_SEQ(st,pattern,p.patterns);if(p.rest){symtable_add_def(st,p.rest,DEF_LOCAL,LOCATION(p))}break;case $B.ast.MatchClass:VISIT(st,expr,p.cls);VISIT_SEQ(st,pattern,p.patterns);VISIT_SEQ(st,pattern,p.kwd_patterns);break;case $B.ast.MatchAs:if(p.pattern){VISIT(st,pattern,p.pattern)}if(p.name){symtable_add_def(st,p.name,DEF_LOCAL,LOCATION(p))}break;case $B.ast.MatchOr:VISIT_SEQ(st,pattern,p.patterns);break}VISIT_QUIT(st,1)};function symtable_implicit_arg(st,pos){var id="."+pos;if(!symtable_add_def(st,id,DEF_PARAM,ST_LOCATION(st.cur))){return 0}return 1}visitor.params=function(st,args){if(!args){return-1}for(var arg of args){if(!symtable_add_def(st,arg.arg,DEF_PARAM,LOCATION(arg)))return 0}return 1};visitor.annotation=function(st,annotation){var future_annotations=st.future.features&$B.CO_FUTURE_ANNOTATIONS;if(future_annotations&&!symtable_enter_block(st,"_annotation",AnnotationBlock,annotation,annotation.lineno,annotation.col_offset,annotation.end_lineno,annotation.end_col_offset)){VISIT_QUIT(st,0)}VISIT(st,expr,annotation);if(future_annotations&&!symtable_exit_block(st)){VISIT_QUIT(st,0)}return 1};visitor.argannotations=function(st,args){if(!args){return-1}for(var arg of args){if(arg.annotation){VISIT(st,expr,arg.annotation)}}return 1};visitor.annotations=function(st,o,a,returns){var future_annotations=st.future.ff_features&$B.CO_FUTURE_ANNOTATIONS;if(future_annotations&&!symtable_enter_block(st,"_annotation",AnnotationBlock,o,o.lineno,o.col_offset,o.end_lineno,o.end_col_offset)){VISIT_QUIT(st,0)}if(a.posonlyargs&&!visitor.argannotations(st,a.posonlyargs))return 0;if(a.args&&!visitor.argannotations(st,a.args))return 0;if(a.vararg&&a.vararg.annotation)VISIT(st,expr,a.vararg.annotation);if(a.kwarg&&a.kwarg.annotation)VISIT(st,expr,a.kwarg.annotation);if(a.kwonlyargs&&!visitor.argannotations(st,a.kwonlyargs))return 0;if(future_annotations&&!symtable_exit_block(st)){VISIT_QUIT(st,0)}if(returns&&!visitor.annotation(st,returns)){VISIT_QUIT(st,0)}return 1};visitor.arguments=function(st,a){if(a.posonlyargs&&!visitor.params(st,a.posonlyargs))return 0;if(a.args&&!visitor.params(st,a.args))return 0;if(a.kwonlyargs&&!visitor.params(st,a.kwonlyargs))return 0;if(a.vararg){if(!symtable_add_def(st,a.vararg.arg,DEF_PARAM,LOCATION(a.vararg)))return 0;st.cur.varargs=1}if(a.kwarg){if(!symtable_add_def(st,a.kwarg.arg,DEF_PARAM,LOCATION(a.kwarg)))return 0;st.cur.varkeywords=1}return 1};visitor.excepthandler=function(st,eh){if(eh.type)VISIT(st,expr,eh.type);if(eh.name)if(!symtable_add_def(st,eh.name,DEF_LOCAL,LOCATION(eh)))return 0;VISIT_SEQ(st,stmt,eh.body);return 1};visitor.withitem=function(st,item){VISIT(st,"expr",item.context_expr);if(item.optional_vars){VISIT(st,"expr",item.optional_vars)}return 1};visitor.match_case=function(st,m){VISIT(st,pattern,m.pattern);if(m.guard){VISIT(st,expr,m.guard)}VISIT_SEQ(st,stmt,m.body);return 1};visitor.alias=function(st,a){var store_name,name=a.asname==NULL?a.name:a.asname;var dot=name.search("\\.");if(dot!=-1){store_name=name.substring(0,dot);if(!store_name)return 0}else{store_name=name}if(name!="*"){var r=symtable_add_def(st,store_name,DEF_IMPORT,LOCATION(a));return r}else{if(st.cur.type!=ModuleBlock){var lineno=a.lineno,col_offset=a.col_offset,end_lineno=a.end_lineno,end_col_offset=a.end_col_offset;var exc=PyErr_SetString(PyExc_SyntaxError,IMPORT_STAR_WARNING);set_exc_info(exc,st.filename,lineno,col_offset,end_lineno,end_col_offset);throw exc}st.cur.$has_import_star=true;return 1}};visitor.comprehension=function(st,lc){st.cur.comp_iter_target=1;VISIT(st,expr,lc.target);st.cur.comp_iter_target=0;st.cur.comp_iter_expr++;VISIT(st,expr,lc.iter);st.cur.comp_iter_expr--;VISIT_SEQ(st,expr,lc.ifs);if(lc.is_async){st.cur.coroutine=1}return 1};visitor.keyword=function(st,k){VISIT(st,expr,k.value);return 1};function symtable_handle_comprehension(st,e,scope_name,generators,elt,value){var is_generator=e.constructor===$B.ast.GeneratorExp;var outermost=generators[0];st.cur.comp_iter_expr++;VISIT(st,expr,outermost.iter);st.cur.comp_iter_expr--;if(!scope_name||!symtable_enter_block(st,scope_name,FunctionBlock,e,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset)){return 0}switch(e.constructor){case $B.ast.ListComp:st.cur.comprehension=ListComprehension;break;case $B.ast.SetComp:st.cur.comprehension=SetComprehension;break;case $B.ast.DictComp:st.cur.comprehension=DictComprehension;break;default:st.cur.comprehension=GeneratorExpression;break}if(outermost.is_async){st.cur.coroutine=1}if(!symtable_implicit_arg(st,0)){symtable_exit_block(st);return 0}st.cur.comp_iter_target=1;VISIT(st,expr,outermost.target);st.cur.comp_iter_target=0;VISIT_SEQ(st,expr,outermost.ifs);VISIT_SEQ_TAIL(st,comprehension,generators,1);if(value)VISIT(st,expr,value);VISIT(st,expr,elt);st.cur.generator=is_generator;var is_async=st.cur.coroutine&&!is_generator;if(!symtable_exit_block(st)){return 0}if(is_async){st.cur.coroutine=1}return 1}visitor.genexp=function(st,e){return symtable_handle_comprehension(st,e,"genexpr",e.generators,e.elt,NULL)};visitor.listcomp=function(st,e){return symtable_handle_comprehension(st,e,"listcomp",e.generators,e.elt,NULL)};visitor.setcomp=function(st,e){return symtable_handle_comprehension(st,e,"setcomp",e.generators,e.elt,NULL)};visitor.dictcomp=function(st,e){return symtable_handle_comprehension(st,e,"dictcomp",e.generators,e.key,e.value)};function symtable_raise_if_annotation_block(st,name,e){var type=st.cur.type,exc;if(type==AnnotationBlock)exc=PyErr_Format(PyExc_SyntaxError,ANNOTATION_NOT_ALLOWED,name);else if(type==TypeVarBoundBlock)exc=PyErr_Format(PyExc_SyntaxError,TYPEVAR_BOUND_NOT_ALLOWED,name);else if(type==TypeAliasBlock)exc=PyErr_Format(PyExc_SyntaxError,TYPEALIAS_NOT_ALLOWED,name);else if(type==TypeParamBlock)exc=PyErr_Format(PyExc_SyntaxError,TYPEPARAM_NOT_ALLOWED,name);else return 1;set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset);throw exc}function symtable_raise_if_comprehension_block(st,e){var type=st.cur.comprehension;var exc=PyErr_SetString(PyExc_SyntaxError,type==ListComprehension?"'yield' inside list comprehension":type==SetComprehension?"'yield' inside set comprehension":type==DictComprehension?"'yield' inside dict comprehension":"'yield' inside generator expression");exc.$frame_obj=$B.frame_obj;set_exc_info(exc,st.filename,e.lineno,e.col_offset,e.end_lineno,e.end_col_offset);throw exc}})(__BRYTHON__);(function($B){var _b_=$B.builtins,NULL=undefined,DOT=".",ELLIPSIS="...";const STAR_TARGETS=1,DEL_TARGETS=2,FOR_TARGETS=3;function make_string_for_ast_value(value){value=value.replace(/\n/g,"\\n\\\n");value=value.replace(/\r/g,"\\r\\\r");if(value[0]=="'"){var unquoted=value.substr(1,value.length-2);return unquoted}if(value.indexOf("'")>-1){var s="",escaped=false;for(var char of value){if(char=="\\"){if(escaped){s+="\\\\"}escaped=!escaped}else{if(char=="'"&&!escaped){s+="\\"}else if(escaped){s+="\\"}s+=char;escaped=false}}value=s}return value.substr(1,value.length-2)}function encode_bytestring(s){s=s.replace(/\\t/g,"\t").replace(/\\n/g,"\n").replace(/\\r/g,"\r").replace(/\\f/g,"\f").replace(/\\v/g,"\v").replace(/\\\\/g,"\\");var t=[];for(var i=0,len=s.length;i255){throw Error()}t.push(cp)}return t}function EXTRA_EXPR(head,tail){return{lineno:head.lineno,col_offset:head.col_offset,end_lineno:tail.end_lineno,end_col_offset:tail.end_col_offset}}function set_list(list,other){for(var item of other){list.push(item)}}var positions=["lineno","col_offset","end_lineno","end_col_offset"];function set_position_from_list(ast_obj,EXTRA){for(var i=0;i<4;i++){ast_obj[positions[i]]=EXTRA[i]}}function set_position_from_token(ast_obj,token){ast_obj.lineno=token.lineno;ast_obj.col_offset=token.col_offset;ast_obj.end_lineno=token.end_lineno;ast_obj.end_col_offset=token.end_col_offset}function set_position_from_obj(ast_obj,obj){for(var position of positions){ast_obj[position]=obj[position]}}function _get_names(p,names_with_defaults){var seq=[];for(var pair of names_with_defaults){seq.push(pair.arg)}return seq}function _get_defaults(p,names_with_defaults){var seq=[];for(var pair of names_with_defaults){seq.push(pair.value)}return seq}function _make_posonlyargs(p,slash_without_default,slash_with_default,posonlyargs){if(slash_without_default!=NULL){set_list(posonlyargs,slash_without_default)}else if(slash_with_default!=NULL){var slash_with_default_names=_get_names(p,slash_with_default.names_with_defaults);if(!slash_with_default_names){return-1}set_list(posonlyargs,$B._PyPegen.join_sequences(p,slash_with_default.plain_names,slash_with_default_names))}return posonlyargs==NULL?-1:0}function _make_posargs(p,plain_names,names_with_default,posargs){if(plain_names!=NULL&&names_with_default!=NULL){var names_with_default_names=_get_names(p,names_with_default);if(!names_with_default_names){return-1}var seqs=$B._PyPegen.join_sequences(p,plain_names,names_with_default_names);set_list(posargs,seqs)}else if(plain_names==NULL&&names_with_default!=NULL){set_list(posargs,_get_names(p,names_with_default))}else if(plain_names!=NULL&&names_with_default==NULL){set_list(posargs,plain_names)}return posargs==NULL?-1:0}function _make_posdefaults(p,slash_with_default,names_with_default,posdefaults){if(slash_with_default!=NULL&&names_with_default!=NULL){var slash_with_default_values=_get_defaults(p,slash_with_default.names_with_defaults);if(!slash_with_default_values){return-1}var names_with_default_values=_get_defaults(p,names_with_default);if(!names_with_default_values){return-1}set_list(posdefaults,$B._PyPegen.join_sequences(p,slash_with_default_values,names_with_default_values))}else if(slash_with_default==NULL&&names_with_default!=NULL){set_list(posdefaults,_get_defaults(p,names_with_default))}else if(slash_with_default!=NULL&&names_with_default==NULL){set_list(posdefaults,_get_defaults(p,slash_with_default.names_with_defaults))}return posdefaults==NULL?-1:0}function _make_kwargs(p,star_etc,kwonlyargs,kwdefaults){if(star_etc!=NULL&&star_etc.kwonlyargs!=NULL){set_list(kwonlyargs,_get_names(p,star_etc.kwonlyargs))}else{set_list(kwonlyargs,[])}if(kwonlyargs==NULL){return-1}if(star_etc!=NULL&&star_etc.kwonlyargs!=NULL){set_list(kwdefaults,_get_defaults(p,star_etc.kwonlyargs))}else{set_list(kwdefaults,[])}if(kwdefaults==NULL){return-1}return 0}function _seq_number_of_starred_exprs(seq){var n=0;for(var k of seq){if(!k.is_keyword){n++}}return n}$B._PyPegen={};$B._PyPegen.constant_from_string=function(p,token){var prepared=$B.prepare_string(token);var is_bytes=prepared.value.startsWith("b");if(!is_bytes){var value=make_string_for_ast_value(prepared.value)}else{value=prepared.value.substr(2,prepared.value.length-3);try{value=_b_.bytes.$factory(encode_bytestring(value))}catch(err){$B._PyPegen.raise_error_known_location(p,_b_.SyntaxError,token.lineno,token.col_offset,token.end_lineno,token.end_col_offset,"bytes can only contain ASCII literal characters")}}var ast_obj=new $B.ast.Constant(value);set_position_from_token(ast_obj,token);return ast_obj};$B._PyPegen.constant_from_token=function(p,t){var ast_obj=new $B.ast.Constant(t.string);set_position_from_token(ast_obj,t);return ast_obj};$B._PyPegen.decoded_constant_from_token=function(p,t){var ast_obj=new $B.ast.Constant(t.string);set_position_from_token(ast_obj,t);return ast_obj};$B._PyPegen.formatted_value=function(p,expression,debug,conversion,format,closing_brace,arena){var conversion_val=-1;if(conversion){var conversion_expr=conversion.result,first=conversion_expr.id;if(first.length>1||!"sra".includes(first)){$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(conversion_expr,`f-string: invalid conversion character {first}: `+"expected 's', 'r', or 'a'")}var conversion_val=first.charCodeAt(0)}var formatted_value=new $B.ast.FormattedValue(expression,conversion_val,format===undefined?format:format.result);set_position_from_obj(formatted_value,arena);if(debug){var debug_end_line,debug_end_offset,debug_metadata;if(conversion){debug_end_line=conversion.result.lineno;debug_end_offset=conversion.result.col_offset;debug_metadata=conversion.metadata}else if(format){debug_end_line=format.result.lineno;debug_end_offset=format.result.col_offset+1;debug_metadata=format.metadata}else{debug_end_line=p.end_lineno;debug_end_offset=p.end_col_offset;debug_metadata=closing_brace.metadata}var debug=new $B.ast.Constant(debug_metadata);debug.lineno=p.lineno;debug.col_offset=p.col_offset+1;debug.end_lineno=debug_end_line;debug.end_col_offset=debug_end_offset;var joined_str=new $B.ast.JoinedStr([debug,formatted_value]);set_position_from_obj(joined_str,arena);return joined_str}return formatted_value};$B._PyPegen.joined_str=function(p,a,items,c){var ast_obj=new $B.ast.JoinedStr(items);ast_obj.lineno=a.lineno;ast_obj.col_offset=a.col_offset;ast_obj.end_lineno=c.end_lineno;ast_obj.end_col_offset=c.end_col_offset;return ast_obj};$B._PyPegen.setup_full_format_spec=function(p,colon,spec,arena){var ast_obj=new $B.ast.JoinedStr(spec);set_position_from_obj(ast_obj,arena);return result_token_with_metadata(p,ast_obj,colon.metadata)};function result_token_with_metadata(p,result,metadata){return{result:result,metadata:metadata}}$B._PyPegen.check_fstring_conversion=function(p,conv_token,conv){if(conv_token.lineno!=conv.lineno||conv_token.end_col_offset!=conv.col_offset){$B._PyPegen.raise_error_known_location(p,_b_.SyntaxError,conv.lineno,conv.col_offset,conv.end_lineno,conv.end_col_offset,"f-string: conversion type must come right after the exclamanation mark")}return result_token_with_metadata(p,conv,conv_token.metadata)};$B._PyPegen.seq_count_dots=function(seq){if(seq===undefined){return 0}var number_of_dots=0;for(var token of seq){if(token.num_type==$B.py_tokens.DOT){number_of_dots+=token.string.length}else if(token.num_type==$B.py_tokens.ELLIPSIS){number_of_dots+=3}}return number_of_dots};$B._PyPegen.map_names_to_ids=function(p,seq){return seq.map((e=>e.id))};$B._PyPegen.alias_for_star=function(p,lineno,col_offset,end_lineno,end_col_offset,arena){var str="*";return $B._PyAST.alias(str,NULL,lineno,col_offset,end_lineno,end_col_offset,arena)};$B._PyPegen.cmpop_expr_pair=function(p,cmpop,expr){return{cmpop:cmpop,expr:expr}};$B._PyPegen.get_cmpops=function(p,seq){var new_seq=[];for(var pair of seq){new_seq.push(pair.cmpop)}return new_seq};$B._PyPegen.get_exprs=function(p,seq){var new_seq=[];for(var pair of seq){new_seq.push(pair.expr)}return new_seq};function _set_seq_context(p,seq,ctx){var new_seq=[];for(var e of seq){new_seq.push($B._PyPegen.set_expr_context(p,e,ctx))}return new_seq}function _set_name_context(p,e,ctx){return $B._PyAST.Name(e.id,ctx,EXTRA_EXPR(e,e))}function _set_tuple_context(p,e,ctx){return $B._PyAST.Tuple(_set_seq_context(p,e.elts,ctx),ctx,EXTRA_EXPR(e,e))}function _set_list_context(p,e,ctx){return $B._PyAST.List(_set_seq_context(p,e.elts,ctx),ctx,EXTRA_EXPR(e,e))}function _set_subscript_context(p,e,ctx){console.log("set subscritp cntext",p,e);return $B._PyAST.Subscript(e.value,e.slice,ctx,EXTRA_EXPR(e,e))}function _set_attribute_context(p,e,ctx){return $B._PyAST.Attribute(e.value,e.attr,ctx,EXTRA_EXPR(e,e))}function _set_starred_context(p,e,ctx){return $B._PyAST.Starred($B._PyPegen.set_expr_context(p,e.value,ctx),ctx,EXTRA_EXPR(e,e))}$B._PyPegen.set_expr_context=function(p,expr,ctx){var _new=NULL;switch(expr.constructor){case $B.ast.Name:_new=_set_name_context(p,expr,ctx);break;case $B.ast.Tuple:_new=_set_tuple_context(p,expr,ctx);break;case $B.ast.List:_new=_set_list_context(p,expr,ctx);break;case $B.ast.Subscript:_new=_set_subscript_context(p,expr,ctx);break;case $B.ast.Attribute:_new=_set_attribute_context(p,expr,ctx);break;case $B.ast.Starred:_new=_set_starred_context(p,expr,ctx);break;default:_new=expr}return _new};$B._PyPegen.key_value_pair=function(p,key,value){return{key:key,value:value}};$B._PyPegen.get_expr_name=function(e){switch(e.constructor.$name){case"Attribute":case"Subscript":case"Starred":case"Name":case"List":case"Tuple":case"Lambda":return e.constructor.$name.toLowerCase();case"Call":return"function call";case"BoolOp":case"BinOp":case"UnaryOp":return"expression";case"GeneratorExp":return"generator expression";case"Yield":case"YieldFrom":return"yield expression";case"Await":return"await expression";case"ListComp":return"list comprehension";case"SetComp":return"set comprehension";case"DictComp":return"dict comprehension";case"Dict":return"dict literal";case"Set":return"set display";case"JoinedStr":case"FormattedValue":return"f-string expression";case"Constant":var value=e.value;if(value===_b_.None){return"None"}if(value===false){return"False"}if(value===true){return"True"}if(value===_b_.Ellipsis){return"ellipsis"}return"literal";case"Compare":return"comparison";case"IfExp":return"conditional expression";case"NamedExpr":return"named expression";default:return NULL}};$B._PyPegen.get_keys=function(p,seq){return seq===undefined?[]:seq.map((pair=>pair.key))};$B._PyPegen.get_values=function(p,seq){return seq===undefined?[]:seq.map((pair=>pair.value))};$B._PyPegen.key_pattern_pair=function(p,key,pattern){return{key:key,pattern:pattern}};$B._PyPegen.get_pattern_keys=function(p,seq){return seq===undefined?[]:seq.map((x=>x.key))};$B._PyPegen.get_patterns=function(p,seq){return seq===undefined?[]:seq.map((x=>x.pattern))};$B._PyPegen.check_legacy_stmt=function(p,name){return["print","exec"].includes(name)};$B._PyPegen.dummy_name=function(p){var cache=NULL;if(cache!=NULL){return cache}var id="dummy"+Math.random().toString(36).substr(2),ast_obj=new $B.ast.Name(id,new $B.ast.Load);set_position_from_list(ast_obj,[1,0,1,0]);return ast_obj};$B._PyPegen.add_type_comment_to_arg=function(p,a,tc){if(tc==NULL){return a}var bytes=_b_.bytes.$factory(tc),tco=$B._PyPegen.new_type_comment(p,bytes);var ast_obj=$B._PyAST.arg(a.arg,a.annotation,tco,a.lineno,a.col_offset,a.end_lineno,a.end_col_offset,p.arena);console.log("arg with type comment",ast_obj);return ast_obj};$B._PyPegen.check_barry_as_flufl=function(p,t){return false};$B._PyPegen.empty_arguments=function(p){return $B._PyAST.arguments([],[],NULL,[],[],NULL,[],p.arena)};$B._PyPegen.augoperator=function(p,kind){return{kind:kind}};$B._PyPegen.function_def_decorators=function(p,decorators,function_def){var constr=function_def instanceof $B.ast.AsyncFunctionDef?$B.ast.AsyncFunctionDef:$B.ast.FunctionDef;var ast_obj=new constr(function_def.name,function_def.args,function_def.body,decorators,function_def.returns,function_def.type_comment,function_def.type_params);for(var position of positions){ast_obj[position]=function_def[position]}return ast_obj};$B._PyPegen.class_def_decorators=function(p,decorators,class_def){var ast_obj=$B._PyAST.ClassDef(class_def.name,class_def.bases,class_def.keywords,class_def.body,decorators,class_def.type_params);set_position_from_obj(ast_obj,class_def);return ast_obj};$B._PyPegen.keyword_or_starred=function(p,element,is_keyword){return{element:element,is_keyword:is_keyword}};$B._PyPegen.make_arguments=function(p,slash_without_default,slash_with_default,plain_names,names_with_default,star_etc){var posonlyargs=[];if(_make_posonlyargs(p,slash_without_default,slash_with_default,posonlyargs)==-1){return NULL}var posargs=[];if(_make_posargs(p,plain_names,names_with_default,posargs)==-1){return NULL}var posdefaults=[];if(_make_posdefaults(p,slash_with_default,names_with_default,posdefaults)==-1){return NULL}var vararg=NULL;if(star_etc!=NULL&&star_etc.vararg!=NULL){vararg=star_etc.vararg}var kwonlyargs=[],kwdefaults=[];if(_make_kwargs(p,star_etc,kwonlyargs,kwdefaults)==-1){return NULL}var kwarg=NULL;if(star_etc!=NULL&&star_etc.kwarg!=NULL){kwarg=star_etc.kwarg}var ast_obj=$B._PyAST.arguments(posonlyargs,posargs,vararg,kwonlyargs,kwdefaults,kwarg,posdefaults,p.arena);if(ast_obj.posonlyargs===undefined){console.log("pas de posonlyargs",ast_bj);alert()}return ast_obj};$B._PyPegen.name_default_pair=function(p,arg,value,tc){return{arg:$B._PyPegen.add_type_comment_to_arg(p,arg,tc),value:value}};$B._PyPegen.raise_error=function(p,errtype,errmsg){if(p.fill==0){var va=[errmsg];$B._PyPegen.raise_error_known_location(p,errtype,0,0,0,-1,errmsg,va);return NULL}var t=p.known_err_token!=NULL?p.known_err_token:p.tokens[p.fill-1];var va=errmsg;$B._PyPegen.raise_error_known_location(p,errtype,t.lineno,t.col_offset,t.end_lineno,t.end_col_offset,errmsg,va)};$B._PyPegen.raise_error_known_location=function(p,errtype,lineno,col_offset,end_lineno,end_col_offset,errmsg,va){var exc=errtype.$factory(errmsg);exc.filename=p.filename;if(p.known_err_token){var token=p.known_err_token;exc.lineno=token.lineno;exc.offset=token.col_offset+1;exc.end_lineno=token.end_lineno;exc.end_offset=token.end_col_offset;exc.text=token.line}else{exc.lineno=lineno;exc.offset=col_offset+1;exc.end_lineno=end_lineno;exc.end_offset=end_col_offset+1;var src=$B.file_cache[p.filename];if(src!==undefined){var lines=src.split("\n"),line=lines[exc.lineno-1];exc.text=line+"\n"}else{exc.text=_b_.None}}exc.args[1]=$B.fast_tuple([p.filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]);throw exc};$B._PyPegen.seq_delete_starred_exprs=function(p,kwargs){var len=kwargs.length,new_len=len-_seq_number_of_starred_exprs(kwargs);if(new_len==0){return NULL}var new_seq=[];for(var k of kwargs){if(k.is_keyword){new_seq.push(k.element)}}return new_seq};$B._PyPegen.seq_extract_starred_exprs=function(p,kwargs){var new_len=_seq_number_of_starred_exprs(kwargs);if(new_len==0){return NULL}var new_seq=[];var idx=0;for(var k of kwargs){if(!k.is_keyword){new_seq[idx++]=k.element}}return new_seq};$B._PyPegen.slash_with_default=function(p,plain_names,names_with_defaults){return{plain_names:plain_names,names_with_defaults:names_with_defaults}};$B._PyPegen.star_etc=function(p,vararg,kwonlyargs,kwarg){return{vararg:vararg,kwonlyargs:kwonlyargs,kwarg:kwarg}};$B._PyPegen.collect_call_seqs=function(p,a,b,lineno,col_offset,end_lineno,end_col_offset,arena){var args_len=a.length,total_len=args_len;if(b==NULL){return $B._PyAST.Call($B._PyPegen.dummy_name(p),a,[],lineno,col_offset,end_lineno,end_col_offset,arena)}var starreds=$B._PyPegen.seq_extract_starred_exprs(p,b),keywords=$B._PyPegen.seq_delete_starred_exprs(p,b);if(starreds){total_len+=starreds.length}var args=[];for(var i=0;ix.replace(new RegExp("'","g"),"\\'")));fs_item.value=parts.join("\\'");fs_item.value=fs_item.value.replace(/\n/g,"\\n").replace(/\r/g,"\\r")}items.push(fs_item)}state="string"}else{items.push(token);var is_bytes=token.value.__class__===_b_.bytes;if(is_bytes&&state=="string"||state=="bytestring"&&!is_bytes){error("cannot mix bytes and nonbytes literals")}state=is_bytes?"bytestring":"string"}}if(state=="bytestring"){var bytes=[];for(var item of items){bytes=bytes.concat(item.value.source)}value=_b_.bytes.$factory(bytes);var ast_obj=new $B.ast.Constant(value);set_position_from_list(ast_obj,items);return ast_obj}function group_consec_strings(items){if(items.length==1){return items[0]}var values=items.map((x=>x.value));let ast_obj=new $B.ast.Constant(values.join(""));set_position_from_list(ast_obj,items);return ast_obj}var items1=[],consec_strs=[],item_type=null;for(var i=0,len=items.length;i0){items1.push(group_consec_strings(consec_strs))}consec_strs=[];items1.push(item)}}if(consec_strs.length>0){items1.push(group_consec_strings(consec_strs))}if(!has_fstring){return items1[0]}var jstr_values=items1;var ast_obj=new $B.ast.JoinedStr(jstr_values);set_position_from_list(ast_obj,strings);return ast_obj};$B._PyPegen.ensure_imaginary=function(p,exp){if(!(exp instanceof $B.ast.Constant)||exp.value.__class__!=_b_.complex){$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(exp,"imaginary number required in complex literal");return NULL}return exp};$B._PyPegen.ensure_real=function(p,exp){if(!(exp instanceof $B.ast.Constant)||exp.value.type=="imaginary"){$B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(exp,"real number required in complex literal");return NULL}return exp};$B._PyPegen.set_expr_context=function(p,a,ctx){a.ctx=ctx;return a};$B._PyPegen.singleton_seq=function(p,a){return[a]};$B._PyPegen.seq_insert_in_front=function(p,a,seq){return seq?[a].concat(seq):[a]};$B._PyPegen.seq_flatten=function(p,seqs){var res=[];for(var seq of seqs){for(var item of seq){res.push(item)}}return res};$B._PyPegen.join_names_with_dot=function(p,first_name,second_name){var str=first_name.id+"."+second_name.id;return $B._PyAST.Name(str,new $B.ast.Load,EXTRA_EXPR(first_name,second_name))};$B._PyPegen.make_module=function(p,a){return new $B.ast.Module(a)};$B._PyPegen.new_type_comment=function(p,s){if(s.length===0){return NULL}return s};$B._PyPegen.get_last_comprehension_item=function(comprehension){if(comprehension.ifs==NULL||comprehension.ifs.length==0){return comprehension.iter}return $B.last(comprehension.ifs)};$B._PyPegen.arguments_parsing_error=function(p,e){var kwarg_unpacking=0;for(let keyword of e.keywords){if(!keyword.arg){kwarg_unpacking=1}}var msg=NULL;if(kwarg_unpacking){msg="positional argument follows keyword argument unpacking"}else{msg="positional argument follows keyword argument"}return $B.helper_functions.RAISE_SYNTAX_ERROR(p,msg)};$B._PyPegen.nonparen_genexp_in_call=function(p,args,comprehensions){var len=args.args.length;if(len<=1){return NULL}var last_comprehension=$B.last(comprehensions);return $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p,args.args[len-1],$B._PyPegen.get_last_comprehension_item(last_comprehension),"Generator expression must be parenthesized")};$B._PyPegen.get_invalid_target=function(e,targets_type){if(e==NULL){return NULL}function VISIT_CONTAINER(CONTAINER,TYPE){for(var elt of CONTAINER.elts){var child=$B._PyPegen.get_invalid_target(elt,targets_type);if(child!=NULL){return child}}}switch(e.constructor){case $B.ast.List:case $B.ast.Tuple:return VISIT_CONTAINER(e,e.constructor);case $B.ast.Starred:if(targets_type==DEL_TARGETS){return e}return $B._PyPegen.get_invalid_target(e.value,targets_type);case $B.ast.Compare:if(targets_type==FOR_TARGETS){var cmpop=e.ops[0];if(cmpop instanceof $B.ast.In){return $B._PyPegen.get_invalid_target(e.left,targets_type)}return NULL}return e;case $B.ast.Name:case $B.ast.Subscript:case $B.ast.Attribute:return NULL;default:return e}}})(__BRYTHON__);(function($B){var _b_=$B.builtins;var s_escaped='abfnrtvxuU"0123456789'+"'"+"\\",is_escaped={};for(var i=0;i1114111){string_error(token,"invalid unicode escape "+mo[0])}else if(value>=65536){return[SurrogatePair(value),2+mo[0].length]}else{return[String.fromCharCode(value),2+mo[0].length]}}}}$B.prepare_string=function(token){var s=token.string,len=s.length,pos=0,string_modifier,_type="string",quote,C={type:"str"};while(pos-1){elt.value+=char;pos++}else{return check(elt)}}else if(char.match(/j/i)){if(elt&&(!elt.subtype||elt.subtype=="float")){elt.imaginary=true;check(elt);elt.length++;return elt}else{error("invalid syntax")}}else{break}}return check(elt)}$B.prepare_number=function(n){n=n.replace(/_/g,"");if(n.startsWith(".")){if(n.endsWith("j")){return{type:"imaginary",value:$B.prepare_number(n.substr(0,n.length-1))}}else{return{type:"float",value:n+""}}pos=j}else if(n.startsWith("0")&&n!="0"){var num=test_num(n),base;if(num.imaginary){return{type:"imaginary",value:$B.prepare_number(num.value)}}if(num.subtype=="float"){return{type:num.subtype,value:num.value+""}}if(num.subtype===undefined){base=10}else{base={b:2,o:8,x:16}[num.subtype]}if(base!==undefined){return{type:"int",value:[base,num.value]}}}else{var num=test_num(n);if(num.subtype=="float"){if(num.imaginary){return{type:"imaginary",value:$B.prepare_number(num.value)}}else{return{type:"float",value:num.value+""}}}else{if(num.imaginary){return{type:"imaginary",value:$B.prepare_number(num.value)}}else{return{type:"int",value:[10,num.value]}}}}}})(__BRYTHON__);(function($B){var _b_=$B.builtins,debug=0;var p={feature_version:$B.version_info[1]};$B.parser_constants={Store:new $B.ast.Store,Load:new $B.ast.Load,Del:new $B.ast.Del,NULL:undefined,alias_ty:$B.ast.alias,keyword_ty:$B.ast.keyword,arguments_ty:$B.ast.arguments,expr_ty:$B.ast.expr,asdl_stmt_seq:Array,asdl_int_seq:Array,asdl_expr_seq:Array,asdl_keyword_seq:Array,asdl_identifier_seq:Array,asdl_pattern_seq:Array,asdl_type_param_seq:Array,AugOperator:$B.ast.AugAssign,IsNot:$B.ast.IsNot,Py_Ellipsis:_b_.Ellipsis,Py_False:false,Py_True:true,Py_None:_b_.None,PyExc_SyntaxError:_b_.SyntaxError,STAR_TARGETS:1,DEL_TARGETS:2,FOR_TARGETS:3,PyBytes_AS_STRING:b=>b};for(var op_type of $B.op_types){for(var key in op_type){var klass_name=op_type[key];$B.parser_constants[klass_name]=new $B.ast[klass_name]}}var NULL=$B.parser_constants.NULL;$B._PyAST={};for(var ast_class in $B.ast_classes){var args=$B.ast_classes[ast_class];if(Array.isArray(args)){continue}args=args.replace(/\*/g,"").replace(/\?/g,"");var arg_names=args.split(",");$B._PyAST[ast_class]=function(ast_name,ast_args){return function(){var _args=Array.from(arguments).slice(0,ast_args.length+1);var EXTRA=_args.pop();var ast_obj=new $B.ast[ast_name](..._args);set_position_from_EXTRA(ast_obj,EXTRA);return ast_obj}}(ast_class,arg_names)}function get_last_token(p){var last_token=$B.last(p.tokens);if(last_token.type=="ENDMARKER"){var src=$B.file_cache[p.filename];if(src){for(var token of $B.tokenizer(src)){if(token.type=="ENDMARKER"){break}if(token.type!="DEDENT"){last_token=token}}}else{last_token=undefined}}p.known_err_token=last_token}var helper_functions={CHECK:function(type,obj){if(Array.isArray(type)){var check;for(var t of type){check=helper_functions.CHECK(t,obj);if(check){return check}}return undefined}if(obj instanceof type){return obj}return undefined},CHECK_VERSION:function(type,version,msg,node){return helper_functions.INVALID_VERSION_CHECK(p,version,msg,node)},CHECK_NULL_ALLOWED:function(type,obj){if(obj!==NULL){if(type instanceof Array){for(var t of type){if(obj instanceof t){return obj}}return}else{return obj instanceof type?obj:undefined}}return obj},INVALID_VERSION_CHECK:function(p,version,msg,node){if(node==NULL){p.error_indicator=1;return NULL}if(p.feature_versiont.length,asdl_seq_GET:(t,i)=>t[i]};$B.helper_functions=helper_functions;function raise_error_known_location(type,filename,lineno,col_offset,end_lineno,end_col_offset,line,message){var exc=type.$factory(message);exc.filename=filename;exc.lineno=lineno;exc.offset=col_offset+1;exc.end_lineno=end_lineno;exc.end_offset=end_col_offset+1;exc.text=line;exc.args[1]=$B.fast_tuple([filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]);exc.$frame_obj=$B.frame_obj;throw exc}$B.raise_error_known_location=raise_error_known_location;function raise_error_known_token(type,filename,token,message){var exc=type.$factory(message);exc.filename=filename;exc.lineno=token.lineno;exc.offset=token.col_offset+1;exc.end_lineno=token.end_lineno;exc.end_offset=token.end_col_offset+1;exc.text=token.line;exc.args[1]=$B.fast_tuple([filename,exc.lineno,exc.offset,exc.text,exc.end_lineno,exc.end_offset]);exc.$frame_obj=$B.frame_obj;throw exc}$B.raise_error_known_token=raise_error_known_token;function set_position_from_EXTRA(ast_obj,EXTRA){for(var key in EXTRA){ast_obj[key]=EXTRA[key]}}var Parser=$B.Parser=function(src,filename,mode){src=src.replace(/\r\n/gm,"\n");var tokenizer=$B.tokenizer(src,filename,mode,this);this.tokenizer=tokenizer;this.tok=tokenizer;this.mark=0;this.fill=0;this.level=0;this.size=1;this.starting_lineno=0;this.starting_col_offset=0;this.tokens=[];this.src=src;this.filename=filename;this.mode=mode;this.memo={};this.arena={a_objects:[]};if(filename){p.filename=filename}};var ignored=[$B.py_tokens.ENCODING,$B.py_tokens.NL,$B.py_tokens.COMMENT];Parser.prototype.read_token=function(){while(true){var next=this.tokenizer.next();if(!next.done){var value=next.value;if(!ignored.includes(value.num_type)){this.tokens.push(value);return value}}else{throw Error("tokenizer exhausted")}}}})(__BRYTHON__);(function($B){var _b_=__BRYTHON__.builtins;const Load=new $B.ast.Load;const NULL=undefined;const ENDMARKER=0,NAME=1,NUMBER=2,STRING=3;function strchr(s,char){return s.includes(char)}function strlen(s){return s.length}function strncmp(a,b){return ab?1:0}function PyOS_strtol(s,end,base){return parseFloat(s)}function PyOS_strtoul(s,end,base){return parseFloat(s)}function PyOS_string_to_double(s,x,y){return parseFloat(s)}function PyFloat_FromDouble(x){return x}const NSTATISTICS=2e3,memo_statistics={},TYPE_IGNORE="TYPE_IGNORE",ERRORTOKEN="ERRORTOKEN",NEWLINE=$B.py_tokens.NEWLINE,DEDENT=$B.py_tokens.DEDENT,Py_single_input="py_single_input",PyPARSE_ALLOW_INCOMPLETE_INPUT=256;function PyUnicode_IS_ASCII(char){return char.codePointAt(0)<128}function set_position_from_token(ast_obj,token){for(var attr of["lineno","col_offset","end_lineno","end_col_offset"]){ast_obj[attr]=token[attr]}}$B._PyPegen.interactive_exit=function(p){if(p.errcode){p.errcode=E_EOF}return NULL};$B._PyPegen.byte_offset_to_character_offset_raw=function(str,col_offset){var len=str.length;if(col_offset>len+1){col_offset=len+1}var text=PyUnicode_DecodeUTF8(str,col_offset,"replace");if(!text){return-1}return text.length};$B._PyPegen.calculate_display_width=function(line,character_offset){var segment=line.substring(0,character_offset);if(!segment){return-1}if(PyUnicode_IS_ASCII(segment)){return character_offset}var width_fn=_PyImport_GetModuleAttrString("unicodedata","east_asian_width");if(!width_fn){return-1}var width=0;var len=segment.length;for(let i=0;i=0;m--){token=p.tokens[m];if(token.num_type!=ENDMARKER&&(token.num_typeDEDENT)){break}}return token};$B._PyPegen.new_identifier=function(p,n){var id=n;if(!PyUnicode_IS_ASCII(id)){var id2;if(!init_normalization(p)){return error()}var form=PyUnicode_InternFromString("NFKC");if(form==NULL){return error()}var args={form:form,id:id};id2=_PyObject_FastCall(p.normalize,args,2);if(!id2){return error()}if(!PyUnicode_Check(id2)){PyErr_Format(PyExc_TypeError,"unicodedata.normalize() must return a string, not "+"%.200s",_PyType_Name(Py_TYPE(id2)));return error()}id=id2}PyUnicode_InternInPlace(id);return id;function error(){p.error_indicator=1;return NULL}};$B._PyPegen.name_from_token=function(p,t){if(t==NULL){return NULL}var s=t.string;if(!s){p.error_indicator=1;return NULL}var res=new $B.ast.Name(s,Load);set_position_from_token(res,t);return res};$B._PyPegen.name_token=function(p){var t=$B._PyPegen.expect_token(p,NAME);return $B._PyPegen.name_from_token(p,t)};$B._PyPegen.string_token=function(p){return $B._PyPegen.expect_token(p,STRING)};$B._PyPegen.soft_keyword_token=function(p){var t=$B._PyPegen.expect_token(p,NAME);if(t==NULL){return NULL}var the_token;var size;the_token=t.string;for(let keyword=p.soft_keywords;keyword!=NULL;keyword++){if(strncmp(keyword,the_token,size)==0){return $B._PyPegen.name_from_token(p,t)}}return NULL};function prepared_number_value(prepared){switch(prepared.type){case"float":return $B.fast_float(prepared.value);case"imaginary":return $B.make_complex(0,prepared_number_value(prepared.value));case"int":var res=parseInt(prepared.value[1],prepared.value[0]);if(!Number.isSafeInteger(res)){var base=prepared.value[0],num_str=prepared.value[1];switch(base){case 8:return $B.fast_long_int(BigInt("0x"+num_str));case 10:return $B.fast_long_int(BigInt(num_str));case 16:return $B.fast_long_int(BigInt("0x"+num_str))}}return res}}function parsenumber_raw(s){var prepared=$B.prepare_number(s);return prepared_number_value(prepared)}function parsenumber(s){var dup;var end;var res=NULL;if(strchr(s,"_")==NULL){return parsenumber_raw(s)}dup=s.replace(/_/g,"");res=parsenumber_raw(dup);return res}$B._PyPegen.number_token=function(p){var t=$B._PyPegen.expect_token(p,NUMBER);if(t==NULL){return NULL}var num_raw=t.string;if(num_raw==NULL){p.error_indicator=1;return NULL}if(p.feature_version<6&&strchr(num_raw,"_")!=NULL){p.error_indicator=1;return RAISE_SYNTAX_ERROR("Underscores in numeric literals are only supported "+"in Python 3.6 and greater")}var c=parsenumber(num_raw);if(c==NULL){p.error_indicator=1;var tstate=_PyThreadState_GET();if(tstate.current_exception!=NULL&&Py_TYPE(tstate.current_exception)==PyExc_ValueError){var exc=PyErr_GetRaisedException();RAISE_ERROR_KNOWN_LOCATION(p,PyExc_SyntaxError,t.lineno,-1,t.end_lineno,-1,"%S - Consider hexadecimal for huge integer literals "+"to avoid decimal conversion limits.",exc)}return NULL}var res=new $B.ast.Constant(c,NULL);set_position_from_token(res,t);return res};function bad_single_statement(p){var cur=p.tok.cur;var c=cur;var pos=0;for(;;){while(c==" "||c=="\t"||c=="\n"||c=="\f"){c=cur[pos++]}if(!c){return 0}if(c!="#"){return 1}while(c&&c!="\n"){c=cur[pos++]}}}function compute_parser_flags(flags){var parser_flags=0;if(!flags){return 0}if(flags.cf_flags&PyCF_DONT_IMPLY_DEDENT){parser_flags|=PyPARSE_DONT_IMPLY_DEDENT}if(flags.cf_flags&PyCF_IGNORE_COOKIE){parser_flags|=PyPARSE_IGNORE_COOKIE}if(flags.cf_flags&CO_FUTURE_BARRY_AS_BDFL){parser_flags|=PyPARSE_BARRY_AS_BDFL}if(flags.cf_flags&PyCF_TYPE_COMMENTS){parser_flags|=PyPARSE_TYPE_COMMENTS}if(flags.cf_flags&PyCF_ONLY_AST&&flags.cf_feature_version<7){parser_flags|=PyPARSE_ASYNC_HACKS}if(flags.cf_flags&PyCF_ALLOW_INCOMPLETE_INPUT){parser_flags|=PyPARSE_ALLOW_INCOMPLETE_INPUT}return parser_flags}$B._PyPegen.Parser_New=function(tok,start_rule,flags,feature_version,errcode,arena){var p={};if(p==NULL){return PyErr_NoMemory()}tok.type_comments=(flags&PyPARSE_TYPE_COMMENTS)>0;tok.async_hacks=(flags&PyPARSE_ASYNC_HACKS)>0;p.tok=tok;p.keywords=NULL;p.n_keyword_lists=-1;p.soft_keywords=NULL;p.tokens=[];if(!p.tokens){PyMem_Free(p);return PyErr_NoMemory()}p.tokens[0]=PyMem_Calloc(1,sizeof(Token));p.mark=0;p.fill=0;p.size=1;p.errcode=errcode;p.arena=arena;p.start_rule=start_rule;p.parsing_started=0;p.normalize=NULL;p.error_indicator=0;p.starting_lineno=0;p.starting_col_offset=0;p.flags=flags;p.feature_version=feature_version;p.known_err_token=NULL;p.level=0;p.call_invalid_rules=0;p.debug=_Py_GetConfig().parser_debug;return p};$B._PyPegen.Parser_Free=function(p){};function reset_parser_state_for_error_pass(p){for(let i=0;i0){var brace=$B.last(p.braces),err_lineno,msg;if("([{".includes(brace.char)){err_lineno=brace.line_num}else{if(p.braces.length>1){err_lineno=p.braces[p.braces.length-2].line_num}else{err_lineno=brace.line_num}}if(p.tokens.length==0||$B.last(p.tokens).lineno>=err_lineno){if("([{".includes(brace.char)){msg=`'${brace.char}' was never closed`}else if(p.braces.length>1){var closing=brace.char,opening=p.braces[p.braces.length-2].char;msg=`closing parenthesis '${closing}' does not match `+`opening parenthesis '${opening}'`}else{msg=`unmatched '${brace.char}'`}$B.raise_error_known_location(_b_.SyntaxError,p.filename,brace.line_num,brace.pos-brace.line_start,brace.line_num,brace.pos-brace.line_start+1,brace.line,msg)}}};$B._PyPegen.set_syntax_error=function(p,last_token){if(p.fill==0){$B.helper_functions.RAISE_SYNTAX_ERROR(p,"error at start before reading any input")}$B._PyPegen.tokenize_full_source_to_check_for_errors(p);if(last_token.num_type==ERRORTOKEN&&p.tok.done==E_EOF){if(p.tok.level){raise_unclosed_parentheses_error(p)}else{$B.helper_functions.RAISE_SYNTAX_ERROR(p,"unexpected EOF while parsing")}return}if(last_token.num_type==INDENT||last_token.num_type==DEDENT){$B.helper_functions.RAISE_INDENTATION_ERROR(p,last_token.num_type==INDENT?"unexpected indent":"unexpected unindent");return}$B._PyPegen.tokenize_full_source_to_check_for_errors(p);$B.raise_error_known_token(_b_.SyntaxError,p.filename,last_token,"invalid syntax")};$B._PyPegen.run_parser=function(p){var res=$B._PyPegen.parse(p);if(res==NULL){if(p.flags&PyPARSE_ALLOW_INCOMPLETE_INPUT&&_is_end_of_source(p)){PyErr_Clear();return RAISE_SYNTAX_ERROR("incomplete input")}var last_token=p.tokens[p.fill-1];reset_parser_state_for_error_pass(p);try{$B._PyPegen.parse(p)}catch(err){last_token=p.tokens[p.fill-1];$B._PyPegen.tokenize_full_source_to_check_for_errors(p);throw err}$B._PyPegen.set_syntax_error(p,last_token)}if(p.start_rule==Py_single_input&&bad_single_statement(p)){p.tok.done=E_BADSINGLE;return RAISE_SYNTAX_ERROR("multiple statements found while compiling a single statement")}return res};$B._PyPegen.run_parser_from_file_pointer=function(fp,start_rule,filename_ob,enc,ps1,ps2,flags,errcode,arena){var tok=_PyTokenizer_FromFile(fp,enc,ps1,ps2);if(tok==NULL){if(PyErr_Occurred()){_PyPegen_raise_tokenizer_init_error(filename_ob);return NULL}return NULL}if(!tok.fp||ps1!=NULL||ps2!=NULL||PyUnicode_CompareWithASCIIString(filename_ob,"")==0){tok.fp_interactive=1}tok.filename=Py_NewRef(filename_ob);var result=NULL;var parser_flags=compute_parser_flags(flags);var p=$B._PyPegen.Parser_New(tok,start_rule,parser_flags,PY_MINOR_VERSION,errcode,arena);if(p==NULL){return error()}result=_PyPegen_run_parser(p);_PyPegen_Parser_Free(p);function error(){_PyTokenizer_Free(tok);return result}};$B._PyPegen.run_parser_from_string=function(str,start_rule,filename_ob,flags,arena){var exec_input=start_rule==Py_file_input;var tok;if(flags!=NULL&&flags.cf_flags&PyCF_IGNORE_COOKIE){tok=_PyTokenizer_FromUTF8(str,exec_input,0)}else{tok=_PyTokenizer_FromString(str,exec_input,0)}if(tok==NULL){if(PyErr_Occurred()){_PyPegen_raise_tokenizer_init_error(filename_ob)}return NULL}tok.filename=Py_NewRef(filename_ob);var result=NULL;var parser_flags=compute_parser_flags(flags);var feature_version=flags&&flags.cf_flags&PyCF_ONLY_AST?flags.cf_feature_version:PY_MINOR_VERSION;var p=$B._PyPegen.Parser_New(tok,start_rule,parser_flags,feature_version,NULL,arena);if(p==NULL){return error()}result=_PyPegen_run_parser(p);_PyPegen_Parser_Free(p);function error(){return result}};$B.PyPegen={first_item:function(a,type){return a[0]},last_item:function(a,ptype){return a[a.length-1]}}})(__BRYTHON__);function fprintf(dest,format){var args=Array.from(arguments).slice(2);for(var arg of args){format=format.replace(/%\*?[a-z]/,arg)}return format}const stderr=null;function D(x){console.log(x)}function UNUSED(){}function strcmp(x,y){return x==y?0:x>": "rshift", + "+": "add", "-": "sub", "*": "mul", "/": "truediv", "%": "mod", + "@": "matmul" // PEP 465 + }, + augmented_assigns: { + "//=": "ifloordiv", ">>=": "irshift", "<<=": "ilshift", "**=": "ipow", + "+=": "iadd","-=": "isub", "*=": "imul", "/=": "itruediv", + "%=": "imod", "&=": "iand","|=": "ior","^=": "ixor", "@=": "imatmul" + }, + binary: { + "&": "and", "|": "or", "~": "invert", "^": "xor" + }, + comparisons: { + "<": "lt", ">": "gt", "<=": "le", ">=": "ge", "==": "eq", "!=": "ne" + }, + boolean: { + "or": "or", "and": "and", "in": "in", "not": "not", "is": "is" + }, + subset: function(){ + var res = {}, + keys = [] + if(arguments[0] == "all"){ + keys = Object.keys($B.op2method) + keys.splice(keys.indexOf("subset"), 1) + }else{ + for(var arg of arguments){ + keys.push(arg) + } + } + for(var key of keys){ + var ops = $B.op2method[key] + if(ops === undefined){ + throw Error(key) + } + for(var attr in ops){ + res[attr] = ops[attr] + } + } + return res + } +} + +$B.method_to_op = {} +for(var category in $B.op2method){ + for(var op in $B.op2method[category]){ + var method = `__${$B.op2method[category][op]}__` + $B.method_to_op[method] = op + } +} + // special repr() for some codepoints, used in py_string.js and py_bytes.js $B.special_string_repr = { diff --git a/www/src/brython_stdlib.js b/www/src/brython_stdlib.js index d2a3e0ea9..7e2a04876 100644 --- a/www/src/brython_stdlib.js +++ b/www/src/brython_stdlib.js @@ -1,3 +1,3 @@ __BRYTHON__.use_VFS = true; -var scripts = {"$timestamp": 1707022507807, "pyexpat": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nconst XML_PARAM_ENTITY_PARSING_NEVER = 0,\n XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE = 1,\n XML_PARAM_ENTITY_PARSING_ALWAYS = 2\n\nconst xml_entities = {\n '>': '>',\n '<': '<',\n '"': '\"',\n ''': \"'\",\n '&': '&'\n }\n\nvar xmlparser = $B.make_class('xmlparser',\n function(encoding, namespace_separator, intern){\n return {\n __class__: xmlparser,\n encoding,\n namespace_separator,\n intern,\n buffer_text: false,\n _buffer: '',\n _state: 'data',\n _data_buffer: '',\n _initialized: false,\n _maybe_entity: null,\n _element_stack: [],\n _chunk_size: 2 << 14\n }\n }\n)\n\nxmlparser._handle_stack = function(self){\n if(! (self._element instanceof ELEMENT)){\n return\n }\n if(self._element.name === undefined){\n console.log('name undefined', self._element)\n alert()\n }\n if(self._element.is_end){\n if(self._element_stack.length == 0){\n raise_error(self, 'no opening tag for closing ' + self._element.name)\n }else{\n var expected = $B.last(self._element_stack)\n if(expected !== self._element.name){\n console.log('error handle stack, stack', self._element_stack, self._element)\n raise_error(self, `tag mismatch, ` +\n `expected closing tag ${expected}, ` +\n `got: ${self._element.name}`)\n }\n self._element_stack.pop()\n if(self._element_stack.length == 0){\n flush_char_data(self)\n }\n }\n }else if(! self._element.self_closing){\n self._element_stack.push(self._element.name)\n }\n}\n\nxmlparser.CharacterDataHandler = _b_.None\n\nxmlparser.CommentHandler = _b_.None\n\nxmlparser.EndElementHandler = _b_.None\n\nfunction check_entity(parser, pos){\n var entity = parser._maybe_entity\n var decimal = /&#(\\d+);$/.exec(entity)\n if(decimal){\n return _b_.chr(parseInt(decimal[1]))\n }\n var hexa = /&#x(\\d+);$/.exec(entity)\n if(hexa){\n return _b_.chr(parseInt(hexa[1], 16))\n }\n var xml_entity = xml_entities[entity]\n if(xml_entity){\n return xml_entity\n }\n raise_error_known_position(parser, `unknown entity: \"${entity}\"`, pos)\n}\n\nfunction flush_char_data(parser){\n var buf = parser._data_buffer\n if(buf.length > 0){\n let handler = parser._handlers.CharacterDataHandler\n if(handler !== _b_.None){\n handler(buf)\n }\n }\n parser._data_buffer = ''\n}\n\nfunction flush_final_char_data(parser){\n var buf = parser._data_buffer\n for(var i = 0; i < buf.length; i++){\n if(! buf[i].match(/\\s/)){\n var pos = parser._pos - buf.length + i - 1\n var msg = `junk after document element: line 1, column ${pos}`\n raise_error(parser, msg)\n }\n }\n}\n\nconst encoding_re = /<\\?xml .*encoding\\s*=\\s*\"(.*?)\"/\n\nconst handler_names = [\n 'CharacterDataHandler',\n 'CommentHandler',\n 'StartElementHandler',\n 'EndElementHandler'\n ]\n\nxmlparser.Parse = function(){\n var $ = $B.args('Parse', 3,\n {self: null, data: null, isfinal: null},\n ['self', 'data', 'isfinal'], arguments,\n {}, null, null),\n self = $.self,\n data = $.data,\n isfinal = $.isfinal,\n decoder,\n array\n if(self.finished){\n throw Error('parsing finished')\n }\n if(_b_.isinstance(data, _b_.bytes)){\n if(self.encoding === _b_.None){\n // try getting encoding from prolog\n decoder = new TextDecoder('iso-8859-1')\n array = new Uint8Array(data.source.slice(0, 200))\n var head = decoder.decode(array)\n var mo = encoding_re.exec(head)\n if(mo){\n self.encoding = mo[1]\n }else{\n self.encoding = 'utf-8' // default\n }\n }\n // decode bytes\n decoder = new TextDecoder(self.encoding)\n array = new Uint8Array(data.source)\n data = decoder.decode(array)\n }\n if(! self._initialized){\n if(data[0] != '<'){\n throw Error(\"XML or text declaration not at start of entity\")\n }\n self._initialized = true\n }\n self._buffer = data\n self._buffer_length = _b_.len(data)\n self._pos = 0\n\n var handlers = self._handlers = {}\n for(var handler_name of handler_names){\n let handler = $B.$getattr(self, handler_name)\n if(handler !== _b_.None){\n handlers[handler_name] = $B.$call(handler)\n }else{\n handlers[handler_name] = _b_.None\n }\n }\n\n for(var token of xmlparser.xml_tokenizer(self)){\n if(token instanceof ELEMENT){\n if(! token.is_declaration && ! token.is_end){\n if(handlers.StartElementHandler !== _b_.None){\n flush_char_data(self)\n handlers.StartElementHandler(token.name, token.attrs)\n }\n if(token.self_closing &&\n handlers.EndElementHandler !== _b_.None){\n handlers.EndElementHandler(token.name)\n }\n }else if(token.is_end &&\n handlers.EndElementHandler !== _b_.None){\n flush_char_data(self)\n handlers.EndElementHandler(token.name)\n }\n }else if(token instanceof DATA &&\n handlers.CharacterDataHandler !== _b_.None){\n handlers.CharacterDataHandler(token.value)\n }else if(token instanceof COMMENT &&\n handlers.CommentHandler !== _b_.None){\n flush_char_data(self)\n handlers.CommentHandler(token.value)\n }\n }\n flush_final_char_data(self)\n if(isfinal){\n self.finished = true\n }\n}\n\nxmlparser.ParseFile = function(){\n var $ = $B.args('ParseFile', 2,\n {self: null, file: null},\n ['self', 'file'], arguments,\n {}, null, null),\n self = $.self,\n file = $.file\n var reader = $B.$call($B.$getattr(file, 'read'))\n while(true){\n var data = reader(self._chunk_size)\n if(data.length == 0){\n return xmlparser.Parse(self, data, true)\n }else{\n xmlparser.Parse(self, data, false)\n }\n }\n}\n\nxmlparser.SetBase = function(self, base){\n self._base = base\n return _b_.None\n}\n\nxmlparser.SetParamEntityParsing = function(self, peParsing){\n self._peParsing = peParsing\n return peParsing\n}\n\nxmlparser.StartElementHandler = _b_.None\n\nxmlparser.xml_tokenizer = function*(self){\n // convert bytes to string\n while(self._pos < self._buffer_length){\n\n var char = self._buffer[self._pos]\n if(self._state == 'data' && char == '<'){\n self._maybe_entity = null\n self._state = 'element'\n self._tag_state = 'tag_name'\n self._element = new ELEMENT(self)\n self._pos++\n }else if(self._state == 'data'){\n if(char == '\\n'){\n if(! self.buffer_text){\n flush_char_data(self)\n self._data_buffer = char\n flush_char_data(self)\n }else{\n self._data_buffer += char\n }\n self._maybe_entity = null\n }else{\n self._data_buffer += char\n if(char == '&'){\n // maybe start entity\n self._maybe_entity = char\n }else if(self._maybe_entity !== null){\n self._maybe_entity += char\n if(char == ';'){\n var entity_pos = self._pos - self._maybe_entity.length + 1\n var replacement = check_entity(self, entity_pos)\n self._data_buffer = self._data_buffer.replace(\n self._maybe_entity, replacement)\n self._maybe_entity = null\n }\n }\n }\n self._pos++\n }else if(self._state == 'element' &&\n self._element.expect == 'name_start'\n && char == '!'){\n self._element = new DTD(self)\n self._pos++\n }else if(self._state == 'element'){\n self._element = self._element.feed(char)\n if(self._element === undefined){\n console.log('undefined after char', char,\n self._buffer.substring(self._pos - 10, self._pos + 10))\n }\n if(self._element.closed){\n xmlparser._handle_stack(self)\n if(self._element instanceof DOCTYPE){\n if(self._element.declarations){\n var parser = xmlparser.$factory()\n xmlparser.Parse(parser,\n self._element.declarations.trim(), \n true)\n console.log('parser', parser)\n }\n }\n yield self._element\n self._state = 'data'\n // self._data_buffer = ''\n }else if(self._element.is_comment){\n self._state = 'comment'\n self._comment = new COMMENT(self)\n }\n self._pos++\n }else if(self._state == 'comment'){\n self._comment.feed(char)\n if(self._comment.closed){\n yield self._comment\n self._state = 'data'\n self._data_buffer = ''\n }\n self._pos++\n }else{\n self._pos++\n }\n }\n}\n\n$B.set_func_names(xmlparser, 'expat')\n\nfunction raise_error_known_position(parser, message, pos){\n message += ' at position ' + pos\n var ix = pos\n while(ix >= 0 && parser._buffer[ix] !== '\\n'){\n ix--\n }\n message += '\\n' + parser._buffer.substring(ix, pos + 1)\n throw error.$factory(message)\n}\n\nfunction raise_error(parser, message){\n throw error.$factory(message)\n}\n\nvar error = $B.make_class(\"error\",\n function(message){\n return {\n __class__: error,\n msg: message,\n args: $B.fast_tuple([message]),\n __cause__: _b_.None,\n __context__: _b_.None,\n __suppress_context__: false\n }\n })\nerror.__bases__ = [_b_.Exception, _b_.object]\nerror.__mro__ = [_b_.Exception, _b_.BaseException, _b_.object]\n\n$B.set_func_names(error, \"expat\")\n\nfunction DOCTYPE(parser){\n this.parser = parser\n this.expect = 'element_start'\n}\n\nDOCTYPE.prototype.feed = function(char){\n if(this.expect == 'element_start'){\n if(is_id_start(char)){\n this.root_element = char\n this.expect = 'element_continue'\n }else if(! is_whitespace(char)){\n throw Error('expected element start, got: ' + char)\n }\n }else if(this.expect == 'element_continue'){\n if(is_id_continue(char)){\n this.root_element += char\n }else{\n if(is_whitespace(char)){\n this.expect = 'rest'\n }else{\n throw Error('expected whitespace after root element, got: ' + char)\n }\n }\n }else if(this.expect == 'rest'){\n if(! is_whitespace(char)){\n if(is_id_start(char)){\n // external DTD\n this.type = 'external'\n this.decl = char\n this.expect = 'decl_continue'\n }else if(char == '['){\n this.type = 'internal'\n this.expect = ']'\n this.declarations = ''\n }else{\n throw Error('unexpected in DOCTYPE: ' + char)\n }\n }\n }else if(this.expect == 'decl_continue'){\n if(is_id_continue(char)){\n this.decl += char\n }else{\n if(is_whitespace(char)){\n this.expect = 'string_start'\n this.strings = []\n }else{\n throw Error('unexpected after declaration: ' + char)\n }\n }\n }else if(this.expect == 'string_start'){\n if(char == '['){\n this.type = 'mixed'\n this.declarations = ''\n this.expect = ']'\n }else if(! is_whitespace(char)){\n if(char == '\"' || char == \"'\"){\n this.quote = char\n this.string = ''\n this.expect = 'string_end'\n }else{\n raise_error(this.parser, 'expected quote, got: ' + char)\n }\n }\n }else if(this.expect == 'string_end'){\n if(char == this.quote){\n this.strings.push(this.string)\n if(this.strings.length == 1){\n this.fpi = this.strings[0]\n this.expect = 'string_start'\n this.string = ''\n }else{\n this.url = this.strings[1]\n this.expect = '>'\n }\n }else{\n this.string += char\n }\n }else if(this.expect == '>'){\n if(! is_whitespace(char)){\n if(char == '>'){\n this.closed = true\n }else{\n throw Error('expected >, ggot: ' + char)\n }\n }\n }else if(this.expect == ']'){\n if(char == ']'){\n this.expect = '>'\n }else{\n this.declarations += char\n }\n }else{\n throw Error('wrong expect: ' + this.expect)\n }\n return this\n}\n\nfunction CDATA(){\n this.content = ''\n this.expect = ']'\n this.level = 1\n}\n\nCDATA.prototype.feed = function(char){\n switch(this.expect){\n case ']':\n if(char == '>'){\n throw Error('closed without closing ]')\n }else if(char == '['){\n this.level++\n }else if(char == ']'){\n if(this.level == 1){\n this.expect = '>'\n }else{\n this.level--\n }\n }else{\n this.content += char\n }\n break\n case '>':\n if(char != '>'){\n console.log('-- error', this, 'char', char)\n throw Error('expected \">\", got: ' + char)\n }\n this.closed = true\n break\n }\n return this\n}\n\nfunction DTD(parser){\n this.parser = parser\n this.expect = 'name_start'\n this.items = []\n}\n\nDTD.prototype.feed = function(char){\n if(this.expect == 'name_start'){\n if(is_id_start(char)){\n this.name = char\n this.expect = 'name_continue'\n }else if(char == '-'){\n this.expect = '-' // maybe comment start\n }else if(char == '['){\n return new CDATA()\n }else{\n throw Error('expected name, got ' + char)\n }\n }else if(this.expect == 'name_continue'){\n if(is_id_continue(char)){\n this.name += char\n }else{\n console.log('DD, name', this.name)\n if(this.name == 'DOCTYPE'){\n return new DOCTYPE(this.parser)\n }else if(this.name == 'ENTITY'){\n return new ENTITY(this.parser)\n }\n if(char == '>'){\n this.closed = true\n }else{\n this.expect == 'any'\n }\n }\n }else if(this.expect == '-'){\n if(char == '-'){\n // comment\n this.is_comment = true\n }else{\n throw Error('expected -, got: ' + char)\n }\n }else{\n if(char == '>'){\n this.closed = true\n }else{\n this.items.push(char)\n }\n }\n return this\n}\n\nDTD.prototype.toString = function(){\n var res = ` 0){\n res += ' '\n var items = this.items.map(x => x.toString())\n res += items.join(' ')\n }\n return res + '>'\n}\n\nfunction COMMENT(parser){\n this.parser = parser\n this.value = ''\n this.expect = '-'\n}\n\nCOMMENT.prototype.feed = function(char){\n if(this.expect == '-'){\n if(char == '-'){\n this.expect = '--'\n }else{\n this.value += char\n }\n }else if(this.expect == '--'){\n if(char == '-'){\n this.expect = '>'\n }else{\n this.value += '-' + char\n this.expect = '-'\n }\n }else if(this.expect == '>'){\n if(char == '>'){\n this.closed = true\n }else{\n throw Error('comment, expected >, got: ' + char)\n }\n }\n}\n\nfunction ELEMENT(parser) {\n this.parser = parser\n this.expect = 'name_start'\n this.attrs = $B.empty_dict()\n}\n\nELEMENT.prototype.add_attribute_name = function(attr_name){\n if(_b_.dict.$contains(this.attrs, attr_name)){\n throw Error(`duplicate attribute name: ${attr_name}`)\n }\n _b_.dict.$setitem(this.attrs, attr_name, _b_.None)\n}\n\nELEMENT.prototype.set_attribute_value = function(value){\n _b_.dict.$setitem(this.attrs, this.attr_name, value)\n}\n\nELEMENT.prototype.feed = function(item){\n if(this.expect == 'name_start'){\n if(item == '?'){\n if(this.is_declaration){\n throw Error('already got ?')\n }\n this.is_declaration = true\n }else if(item == '/'){\n if(this.is_end){\n throw Error('already got /')\n }\n this.is_end = true\n }else if(is_id_start(item)){\n this.name = item\n this.expect = 'name_continue'\n }\n }else if(this.expect == 'name_continue'){\n if(is_id_continue(item)){\n this.name += item\n }else{\n // end of element name\n if(this.is_declaration){\n if(this.name == 'xml'){\n this.is_xml_header = true\n }else{\n return new PROCESSING_INSTRUCTION(this.parser, this.name)\n }\n }\n if(is_whitespace(item)){\n this.expect = 'attr_name_start'\n }else if(item == '>'){\n this.closed = true\n }else if(item == '/'){\n this.self_closing = true\n this.expect = '>'\n }else{\n throw Error('unexpected at end of element name: ' + item)\n }\n }\n }else if(this.expect == 'attr_name_start'){\n if(item == '/'){\n this.self_closing = true\n }else if(item == '>'){\n this.closed = true\n }else if(is_id_start(item)){\n this.attr_name = item\n this.expect = 'attr_name_continue'\n }else if(item == '?' && this.is_declaration){\n this.expect = '>'\n }else if(! is_whitespace(item)){\n throw Error('expected attribute name, got: ' + item)\n }\n }else if(this.expect == 'attr_name_continue'){\n if(is_id_continue(item)){\n this.attr_name += item\n }else if(item == '='){\n this.add_attribute_name(this.attr_name)\n this.expect = 'attr_value_start'\n this.attr_value = ''\n }else if(is_whitespace(item)){\n this.add_attribute_name(this.attr_name)\n this.expect = '='\n }else if(item == '>'){\n this.add_attribute_name(this.attr_name)\n this.closed = true\n }else{\n throw Error('unexpected character in attribute name: ' + item)\n }\n }else if(this.expect == '='){\n if(item == '='){\n this.expect = 'attr_value_start'\n }else if(! is_whitespace(item)){\n throw Error('expected =, got: ' + item)\n }\n }else if(this.expect == 'attr_value'){\n if(item == '='){\n this.expect = 'attr_value_start'\n this.attr_value = ''\n }else if(item == '>'){\n this.closed = true\n }else if(is_id_start(item)){\n this.attr_name = item\n this.expect = 'attr_name_continue'\n }else if(! is_whitespace(item)){\n throw Error('expected attribute value or name, got: ' + item)\n }\n }else if(this.expect == 'attr_value_start'){\n if(item == '\"' || item == \"'\"){\n this.expect = 'quote'\n this.quote = item\n this.attr_value = ''\n }else if(! is_whitespace(item)){\n throw Error('unexpect attribute value start: ' + item)\n }\n }else if(this.expect == \"quote\"){\n if(item == this.quote){\n this.set_attribute_value(this.attr_value)\n this.expect = 'attr_name_start'\n }else{\n this.attr_value += item\n }\n }else if(this.expect == '>'){\n if(item == '>'){\n this.closed = true\n }else{\n throw Error('expected >, got: ' + item)\n }\n }else if(this.expect == 'attr_name'){\n if(item instanceof Name){\n if(_b_.dict.__contains__(this.attrs, item.value)){\n throw Error('duplicate value ' + item.value)\n }\n _b_.dict.$setitem(this.attrs, item.value, _b_.None)\n this.last_attr = item.value\n }else if(item.value == '?' && this.is_declaration){\n if(this.question_mark){\n throw Error('already ?')\n }\n this.question_mark = true\n }else if(item == END){\n if(this.is_declaration && ! this.question_mark){\n throw Error('missing ')\n }\n }else if(item instanceof Punctuation && item.value == '/'){\n this.no_end = true\n this.expect = END\n }else{\n throw Error('expected attribute name, got ' + item)\n }\n }else if(this.expect == 'attr_value'){\n _b_.dict.$setitem(this.attrs, this.last_attr, item)\n this.expect = 'attr_name'\n }else if(this.expect == END){\n // after \"/\"\n if(item != END){\n throw Error('nothing after /')\n }\n }\n return this\n}\n\nELEMENT.prototype.toString = function() {\n var res = `<`\n res += this.is_end ? '/' : ''\n res += this.name\n if(this.attrs.length > 0){\n res += ' '\n }\n var attrs = []\n for(var item of _b_.dict.$iter_items(this.attrs)){\n console.log('item', item)\n attrs.push(`${item.key}: ${item.value.toString()}`)\n }\n res += attrs.join(' ')\n if(this.no_end){\n res += '/'\n }\n return res + '>'\n}\n\nfunction PROCESSING_INSTRUCTION(parser, name){\n this.parser = parser\n this.name = name\n this.expect = '?'\n this.content = ''\n}\n\nPROCESSING_INSTRUCTION.prototype.feed = function(char){\n // capture everything until the sequence ?>\n if(this.expect == '?'){\n if(char == '?'){\n this.expect = '>'\n }else{\n this.content += char\n }\n }else if(this.expect == '>'){\n if(char == '>'){\n this.closed = true\n }else{\n this.content += '?' + char\n this.expect = '-'\n }\n }\n return this\n}\n\nfunction ATTR(name){\n this.name = name\n}\n\nATTR.prototype.toString = function(){\n var res = this.name\n if(this.hasOwnProperty('value')){\n res += '=' + this.value\n }\n return res\n}\n\nfunction DATA(value) {\n this.value = value\n}\n\nDATA.prototype.toString = function() {\n return `${this.value}`\n}\n\nvar START = 'START'\nvar END = 'END'\n\n\nfunction Name(value){\n this.value = value\n}\n\nName.prototype.toString = function(){\n return this.value\n}\n\nfunction Punctuation(value){\n this.value = value\n}\n\nfunction String(quote, value){\n this.quote = quote\n this.value = value\n}\n\nString.prototype.toString = function(){\n return this.quote + this.value + this.quote\n}\n\nconst punctuations = '!?/'\n\nfunction open(url){\n var xhr = new XMLHttpRequest()\n xhr.open('GET', url, false)\n xhr.onreadystatechange = function(ev){\n if(this.readyState == 4){\n process(this.responseText)\n }\n }\n xhr.send()\n}\n\nfunction create_parser(){\n var $ = $B.args('ParserCreate', 3,\n {encoding: null, namespace_separator: null, intern: null},\n ['encoding', 'namespace_separator', 'intern'], arguments,\n {encoding: _b_.None, namespace_separator: _b_.None, intern: _b_.None},\n null, null),\n encoding = $.encoding,\n ns_sep = $.namespace_separator,\n intern = $.intern\n if(encoding !== _b_.None && ! _b_.isinstance(encoding, _b_.str)){\n throw _b_.TypeError.$factory(\n `ParserCreate() argument 'encoding' must be ` +\n `str or None, not ${$B.class_name(encoding)}`)\n }\n if(ns_sep !== _b_.None){\n if(! _b_.isinstance(ns_sep, _b_.str)){\n throw _b_.TypeError.$factory(\n `ParserCreate() argument 'namespace_separator' must be ` +\n `str or None, not ${$B.class_name(ns_sep)}`)\n }\n if(ns_sep.length != 1){\n throw _b_.ValueError.$factory(\"namespace_separator must be at \" +\n \"most one character, omitted, or None\")\n }\n }\n if(intern === _b_.None){\n intern = $B.empty_dict()\n }else if(! _b_.isinstance(intern, _b_.dict)){\n throw _b_.TypeError.$factory('intern must be a dictionary')\n }\n return xmlparser.$factory(encoding, ns_sep, intern)\n}\n\nfunction display(text){\n report.value += text + '\\n'\n}\n\nfunction process(src){\n var indent = 0\n for(var token of xml_tokenizer(src)){\n if(indent > 50){\n break\n }\n var head = ' '.repeat(indent)\n if(token instanceof DATA){\n display(head + ' ' + token.toString())\n }else if(token instanceof ELEMENT){\n if(token.is_end){\n indent--\n }\n head = ' '.repeat(indent)\n display(head + token.toString())\n if(token.is_end || token.self_closing || token.is_declaration){\n //\n }else{\n indent++\n }\n }else if(token instanceof DECLARATION){\n display(head + token.toString())\n }else{\n console.log(head + 'token', token, token.toString())\n }\n }\n}\n\nfunction is_id_start(char){\n return char.match(/\\p{L}/u) || char == \"_\"\n}\n\nfunction is_id_continue(char){\n return char.match(/\\p{L}/u) || \"-_:\".includes(char) || char.match(/\\d/)\n}\n\nfunction is_whitespace(s){\n for(let char of s){\n if(! ' \\n\\r\\t'.includes(char)){\n return false\n }\n }\n return s.length > 0\n}\n\nvar model = 'model',\n errors = 'errors'\n\n$B.addToImported('pyexpat',\n {\n create_parser,\n ParserCreate: create_parser,\n model,\n error,\n errors,\n XML_PARAM_ENTITY_PARSING_NEVER,\n XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE,\n XML_PARAM_ENTITY_PARSING_ALWAYS\n }\n)\n\n})(__BRYTHON__)"], "_svg": [".js", "// creation of a SVG element\n(function($B){\n\nvar _b_ = $B.builtins\nvar TagSum = $B.TagSum // defined in py_dom.js\n\nvar $svgNS = \"http://www.w3.org/2000/svg\"\nvar $xlinkNS = \"http://www.w3.org/1999/xlink\"\n\nfunction makeTagDict(tagName){\n // return the dictionary for the class associated with tagName\n var dict = $B.make_class(tagName)\n\n dict.__init__ = function(){\n var $ns = $B.args('__init__', 1, {self: null}, ['self'],\n arguments, {}, 'args', 'kw'),\n self = $ns['self'],\n args = $ns['args']\n if(args.length == 1){\n var first = args[0]\n if($B.$isinstance(first, [_b_.str, _b_.int, _b_.float])){\n self.appendChild(document.createTextNode(_b_.str.$factory(first)))\n }else if(first.__class__ === TagSum){\n for(var i = 0, len = first.children.length; i < len; i++){\n self.appendChild(first.children[i].elt)\n }\n }else{ // argument is another DOMNode instance\n try{self.appendChild(first.elt)}\n catch(err){throw _b_.ValueError.$factory('wrong element ' + first)}\n }\n }\n\n // attributes\n var items = _b_.list.$factory(_b_.dict.items($ns['kw']))\n for(var item of _b_.dict.$iter_items($ns.kw)){\n // keyword arguments\n var arg = item.key,\n value = $B.py_immutable_to_js(item.value)\n if(arg.toLowerCase().substr(0,2) == \"on\"){\n // Event binding passed as argument \"onclick\", \"onfocus\"...\n // Better use method bind of DOMNode objects\n $B.DOMNode.bind(self,\n arg.toLowerCase().substr(2),\n value)\n }else if(arg.toLowerCase() == \"style\"){\n $B.DOMNode.set_style(self, value)\n }else if(arg.toLowerCase().indexOf(\"href\") !== -1){ // xlink:href\n self.setAttributeNS( \"http://www.w3.org/1999/xlink\",\n \"href\",value)\n }else{\n if(value !== false){\n // option.selected=false sets it to true :-)\n try{\n arg = arg.replace('_', '-')\n self.setAttributeNS(null, arg, value)\n }catch(err){\n throw _b_.ValueError.$factory(\"can't set attribute \" + arg)\n }\n }\n }\n }\n }\n\n dict.__mro__ = [$B.DOMNode, $B.builtins.object]\n\n dict.__new__ = function(cls){\n var res = $B.DOMNode.$factory(document.createElementNS($svgNS, tagName))\n res.__class__ = cls\n return res\n }\n\n dict.$factory = function(){\n var res = $B.DOMNode.$factory(\n document.createElementNS($svgNS, tagName))\n res.__class__ = dict\n // apply __init__\n dict.__init__(res, ...arguments)\n return res\n }\n\n $B.set_func_names(dict, \"browser.svg\")\n\n return dict\n}\n\n\n// SVG\nvar $svg_tags = ['a',\n'altGlyph',\n'altGlyphDef',\n'altGlyphItem',\n'animate',\n'animateColor',\n'animateMotion',\n'animateTransform',\n'circle',\n'clipPath',\n'color_profile', // instead of color-profile\n'cursor',\n'defs',\n'desc',\n'ellipse',\n'feBlend',\n'foreignObject', //patch to enable foreign objects\n'g',\n'image',\n'line',\n'linearGradient',\n'marker',\n'mask',\n'path',\n'pattern',\n'polygon',\n'polyline',\n'radialGradient',\n'rect',\n'set',\n'stop',\n'svg',\n'text',\n'tref',\n'tspan',\n'use']\n\n// create classes\nvar obj = new Object()\nvar dicts = {}\nfor(var i = 0, len = $svg_tags.length; i < len; i++){\n var tag = $svg_tags[i]\n obj[tag] = makeTagDict(tag)\n}\n\n$B.imported._svg = obj\n})(__BRYTHON__)\n"], "_locale": [".js", "var am = {\n \"C\": \"AM\",\n \"aa\": \"saaku\",\n \"ab\": \"AM\",\n \"ae\": \"AM\",\n \"af\": \"vm.\",\n \"ak\": \"AN\",\n \"am\": \"\\u1325\\u12cb\\u1275\",\n \"an\": \"AM\",\n \"ar\": \"\\u0635\",\n \"as\": \"\\u09f0\\u09be\\u09a4\\u09bf\\u09aa\\u09c1\",\n \"av\": \"AM\",\n \"ay\": \"AM\",\n \"az\": \"AM\",\n \"ba\": \"\",\n \"be\": \"\",\n \"bg\": \"\",\n \"bh\": \"AM\",\n \"bi\": \"AM\",\n \"bm\": \"AM\",\n \"bn\": \"AM\",\n \"bo\": \"\\u0f66\\u0f94\\u0f0b\\u0f51\\u0fb2\\u0f7c\",\n \"br\": \"A.M.\",\n \"bs\": \"prijepodne\",\n \"ca\": \"a. m.\",\n \"ce\": \"AM\",\n \"ch\": \"AM\",\n \"co\": \"\",\n \"cr\": \"AM\",\n \"cs\": \"dop.\",\n \"cu\": \"\\u0414\\u041f\",\n \"cv\": \"AM\",\n \"cy\": \"yb\",\n \"da\": \"\",\n \"de\": \"\",\n \"dv\": \"\\u0789\\u0786\",\n \"dz\": \"\\u0f66\\u0f94\\u0f0b\\u0f46\\u0f0b\",\n \"ee\": \"\\u014bdi\",\n \"el\": \"\\u03c0\\u03bc\",\n \"en\": \"AM\",\n \"eo\": \"atm\",\n \"es\": \"\",\n \"et\": \"AM\",\n \"eu\": \"AM\",\n \"fa\": \"\\u0642.\\u0638\",\n \"ff\": \"\",\n \"fi\": \"ap.\",\n \"fj\": \"AM\",\n \"fo\": \"um fyr.\",\n \"fr\": \"\",\n \"fy\": \"AM\",\n \"ga\": \"r.n.\",\n \"gd\": \"m\",\n \"gl\": \"a.m.\",\n \"gn\": \"a.m.\",\n \"gu\": \"\\u0aaa\\u0ac2\\u0ab0\\u0acd\\u0ab5\\u00a0\\u0aae\\u0aa7\\u0acd\\u0aaf\\u0abe\\u0ab9\\u0acd\\u0aa8\",\n \"gv\": \"a.m.\",\n \"ha\": \"AM\",\n \"he\": \"AM\",\n \"hi\": \"\\u092a\\u0942\\u0930\\u094d\\u0935\\u093e\\u0939\\u094d\\u0928\",\n \"ho\": \"AM\",\n \"hr\": \"\",\n \"ht\": \"AM\",\n \"hu\": \"de.\",\n \"hy\": \"\",\n \"hz\": \"AM\",\n \"ia\": \"a.m.\",\n \"id\": \"AM\",\n \"ie\": \"AM\",\n \"ig\": \"A.M.\",\n \"ii\": \"\\ua0b5\\ua1aa\\ua20c\\ua210\",\n \"ik\": \"AM\",\n \"io\": \"AM\",\n \"is\": \"f.h.\",\n \"it\": \"\",\n \"iu\": \"AM\",\n \"ja\": \"\\u5348\\u524d\",\n \"jv\": \"\",\n \"ka\": \"AM\",\n \"kg\": \"AM\",\n \"ki\": \"Kiroko\",\n \"kj\": \"AM\",\n \"kk\": \"AM\",\n \"kl\": \"\",\n \"km\": \"\\u1796\\u17d2\\u179a\\u17b9\\u1780\",\n \"kn\": \"\\u0caa\\u0cc2\\u0cb0\\u0ccd\\u0cb5\\u0cbe\\u0cb9\\u0ccd\\u0ca8\",\n \"ko\": \"\\uc624\\uc804\",\n \"kr\": \"AM\",\n \"ks\": \"AM\",\n \"ku\": \"\\u067e.\\u0646\",\n \"kv\": \"AM\",\n \"kw\": \"a.m.\",\n \"ky\": \"\",\n \"la\": \"\",\n \"lb\": \"\",\n \"lg\": \"AM\",\n \"li\": \"AM\",\n \"ln\": \"nt\\u0254\\u0301ng\\u0254\\u0301\",\n \"lo\": \"\\u0e81\\u0ec8\\u0ead\\u0e99\\u0e97\\u0ec8\\u0ebd\\u0e87\",\n \"lt\": \"prie\\u0161piet\",\n \"lu\": \"Dinda\",\n \"lv\": \"priek\\u0161p.\",\n \"mg\": \"AM\",\n \"mh\": \"AM\",\n \"mi\": \"a.m.\",\n \"mk\": \"\\u043f\\u0440\\u0435\\u0442\\u043f\\u043b.\",\n \"ml\": \"AM\",\n \"mn\": \"??\",\n \"mo\": \"AM\",\n \"mr\": \"\\u092e.\\u092a\\u0942.\",\n \"ms\": \"PG\",\n \"mt\": \"AM\",\n \"my\": \"\\u1014\\u1036\\u1014\\u1000\\u103a\",\n \"na\": \"AM\",\n \"nb\": \"a.m.\",\n \"nd\": \"AM\",\n \"ne\": \"\\u092a\\u0942\\u0930\\u094d\\u0935\\u093e\\u0939\\u094d\\u0928\",\n \"ng\": \"AM\",\n \"nl\": \"\",\n \"nn\": \"f.m.\",\n \"no\": \"a.m.\",\n \"nr\": \"AM\",\n \"nv\": \"AM\",\n \"ny\": \"AM\",\n \"oc\": \"AM\",\n \"oj\": \"AM\",\n \"om\": \"WD\",\n \"or\": \"AM\",\n \"os\": \"AM\",\n \"pa\": \"\\u0a38\\u0a35\\u0a47\\u0a30\",\n \"pi\": \"AM\",\n \"pl\": \"AM\",\n \"ps\": \"\\u063a.\\u0645.\",\n \"pt\": \"\",\n \"qu\": \"a.m.\",\n \"rc\": \"AM\",\n \"rm\": \"AM\",\n \"rn\": \"Z.MU.\",\n \"ro\": \"a.m.\",\n \"ru\": \"\",\n \"rw\": \"AM\",\n \"sa\": \"\\u092e\\u0927\\u094d\\u092f\\u093e\\u0928\\u092a\\u0942\\u0930\\u094d\\u0935\",\n \"sc\": \"AM\",\n \"sd\": \"AM\",\n \"se\": \"i.b.\",\n \"sg\": \"ND\",\n \"sh\": \"AM\",\n \"si\": \"\\u0db4\\u0dd9.\\u0dc0.\",\n \"sk\": \"AM\",\n \"sl\": \"dop.\",\n \"sm\": \"AM\",\n \"sn\": \"AM\",\n \"so\": \"sn.\",\n \"sq\": \"e paradites\",\n \"sr\": \"pre podne\",\n \"ss\": \"AM\",\n \"st\": \"AM\",\n \"su\": \"AM\",\n \"sv\": \"\",\n \"sw\": \"AM\",\n \"ta\": \"\\u0b95\\u0bbe\\u0bb2\\u0bc8\",\n \"te\": \"\\u0c2a\\u0c42\\u0c30\\u0c4d\\u0c35\\u0c3e\\u0c39\\u0c4d\\u0c28\",\n \"tg\": \"\",\n \"th\": \"AM\",\n \"ti\": \"\\u1295\\u1309\\u1206 \\u1230\\u12d3\\u1270\",\n \"tk\": \"\",\n \"tl\": \"AM\",\n \"tn\": \"AM\",\n \"to\": \"AM\",\n \"tr\": \"\\u00d6\\u00d6\",\n \"ts\": \"AM\",\n \"tt\": \"\",\n \"tw\": \"AM\",\n \"ty\": \"AM\",\n \"ug\": \"\\u0686?\\u0634\\u062a\\u0649\\u0646 \\u0628?\\u0631?\\u0646\",\n \"uk\": \"AM\",\n \"ur\": \"AM\",\n \"uz\": \"TO\",\n \"ve\": \"AM\",\n \"vi\": \"SA\",\n \"vo\": \"AM\",\n \"wa\": \"AM\",\n \"wo\": \"\",\n \"xh\": \"AM\",\n \"yi\": \"\\ua0b5\\ua1aa\\ua20c\\ua210\",\n \"yo\": \"\\u00c0\\u00e1r?`\",\n \"za\": \"AM\",\n \"zh\": \"\\u4e0a\\u5348\",\n \"zu\": \"AM\"\n}\nvar pm = {\n \"C\": \"PM\",\n \"aa\": \"carra\",\n \"ab\": \"PM\",\n \"ae\": \"PM\",\n \"af\": \"nm.\",\n \"ak\": \"EW\",\n \"am\": \"\\u12a8\\u1230\\u12d3\\u1275\",\n \"an\": \"PM\",\n \"ar\": \"\\u0645\",\n \"as\": \"\\u0986\\u09ac\\u09c7\\u09b2\\u09bf\",\n \"av\": \"PM\",\n \"ay\": \"PM\",\n \"az\": \"PM\",\n \"ba\": \"\",\n \"be\": \"\",\n \"bg\": \"\",\n \"bh\": \"PM\",\n \"bi\": \"PM\",\n \"bm\": \"PM\",\n \"bn\": \"PM\",\n \"bo\": \"\\u0f55\\u0fb1\\u0f72\\u0f0b\\u0f51\\u0fb2\\u0f7c\",\n \"br\": \"G.M.\",\n \"bs\": \"popodne\",\n \"ca\": \"p. m.\",\n \"ce\": \"PM\",\n \"ch\": \"PM\",\n \"co\": \"\",\n \"cr\": \"PM\",\n \"cs\": \"odp.\",\n \"cu\": \"\\u041f\\u041f\",\n \"cv\": \"PM\",\n \"cy\": \"yh\",\n \"da\": \"\",\n \"de\": \"\",\n \"dv\": \"\\u0789\\u078a\",\n \"dz\": \"\\u0f55\\u0fb1\\u0f72\\u0f0b\\u0f46\\u0f0b\",\n \"ee\": \"\\u0263etr\\u0254\",\n \"el\": \"\\u03bc\\u03bc\",\n \"en\": \"PM\",\n \"eo\": \"ptm\",\n \"es\": \"\",\n \"et\": \"PM\",\n \"eu\": \"PM\",\n \"fa\": \"\\u0628.\\u0638\",\n \"ff\": \"\",\n \"fi\": \"ip.\",\n \"fj\": \"PM\",\n \"fo\": \"um sein.\",\n \"fr\": \"\",\n \"fy\": \"PM\",\n \"ga\": \"i.n.\",\n \"gd\": \"f\",\n \"gl\": \"p.m.\",\n \"gn\": \"p.m.\",\n \"gu\": \"\\u0a89\\u0aa4\\u0acd\\u0aa4\\u0ab0\\u00a0\\u0aae\\u0aa7\\u0acd\\u0aaf\\u0abe\\u0ab9\\u0acd\\u0aa8\",\n \"gv\": \"p.m.\",\n \"ha\": \"PM\",\n \"he\": \"PM\",\n \"hi\": \"\\u0905\\u092a\\u0930\\u093e\\u0939\\u094d\\u0928\",\n \"ho\": \"PM\",\n \"hr\": \"\",\n \"ht\": \"PM\",\n \"hu\": \"du.\",\n \"hy\": \"\",\n \"hz\": \"PM\",\n \"ia\": \"p.m.\",\n \"id\": \"PM\",\n \"ie\": \"PM\",\n \"ig\": \"P.M.\",\n \"ii\": \"\\ua0b5\\ua1aa\\ua20c\\ua248\",\n \"ik\": \"PM\",\n \"io\": \"PM\",\n \"is\": \"e.h.\",\n \"it\": \"\",\n \"iu\": \"PM\",\n \"ja\": \"\\u5348\\u5f8c\",\n \"jv\": \"\",\n \"ka\": \"PM\",\n \"kg\": \"PM\",\n \"ki\": \"Hwa\\u0129-in\\u0129\",\n \"kj\": \"PM\",\n \"kk\": \"PM\",\n \"kl\": \"\",\n \"km\": \"\\u179b\\u17d2\\u1784\\u17b6\\u1785\",\n \"kn\": \"\\u0c85\\u0caa\\u0cb0\\u0cbe\\u0cb9\\u0ccd\\u0ca8\",\n \"ko\": \"\\uc624\\ud6c4\",\n \"kr\": \"PM\",\n \"ks\": \"PM\",\n \"ku\": \"\\u062f.\\u0646\",\n \"kv\": \"PM\",\n \"kw\": \"p.m.\",\n \"ky\": \"\",\n \"la\": \"\",\n \"lb\": \"\",\n \"lg\": \"PM\",\n \"li\": \"PM\",\n \"ln\": \"mp\\u00f3kwa\",\n \"lo\": \"\\u0eab\\u0ebc\\u0eb1\\u0e87\\u0e97\\u0ec8\\u0ebd\\u0e87\",\n \"lt\": \"popiet\",\n \"lu\": \"Dilolo\",\n \"lv\": \"p\\u0113cp.\",\n \"mg\": \"PM\",\n \"mh\": \"PM\",\n \"mi\": \"p.m.\",\n \"mk\": \"\\u043f\\u043e\\u043f\\u043b.\",\n \"ml\": \"PM\",\n \"mn\": \"?\\u0425\",\n \"mo\": \"PM\",\n \"mr\": \"\\u092e.\\u0928\\u0902.\",\n \"ms\": \"PTG\",\n \"mt\": \"PM\",\n \"my\": \"\\u100a\\u1014\\u1031\",\n \"na\": \"PM\",\n \"nb\": \"p.m.\",\n \"nd\": \"PM\",\n \"ne\": \"\\u0905\\u092a\\u0930\\u093e\\u0939\\u094d\\u0928\",\n \"ng\": \"PM\",\n \"nl\": \"\",\n \"nn\": \"e.m.\",\n \"no\": \"p.m.\",\n \"nr\": \"PM\",\n \"nv\": \"PM\",\n \"ny\": \"PM\",\n \"oc\": \"PM\",\n \"oj\": \"PM\",\n \"om\": \"WB\",\n \"or\": \"PM\",\n \"os\": \"PM\",\n \"pa\": \"\\u0a36\\u0a3e\\u0a2e\",\n \"pi\": \"PM\",\n \"pl\": \"PM\",\n \"ps\": \"\\u063a.\\u0648.\",\n \"pt\": \"\",\n \"qu\": \"p.m.\",\n \"rc\": \"PM\",\n \"rm\": \"PM\",\n \"rn\": \"Z.MW.\",\n \"ro\": \"p.m.\",\n \"ru\": \"\",\n \"rw\": \"PM\",\n \"sa\": \"\\u092e\\u0927\\u094d\\u092f\\u093e\\u0928\\u092a\\u091a\\u094d\\u092f\\u093e\\u0924\",\n \"sc\": \"PM\",\n \"sd\": \"PM\",\n \"se\": \"e.b.\",\n \"sg\": \"LK\",\n \"sh\": \"PM\",\n \"si\": \"\\u0db4.\\u0dc0.\",\n \"sk\": \"PM\",\n \"sl\": \"pop.\",\n \"sm\": \"PM\",\n \"sn\": \"PM\",\n \"so\": \"gn.\",\n \"sq\": \"e pasdites\",\n \"sr\": \"po podne\",\n \"ss\": \"PM\",\n \"st\": \"PM\",\n \"su\": \"PM\",\n \"sv\": \"\",\n \"sw\": \"PM\",\n \"ta\": \"\\u0bae\\u0bbe\\u0bb2\\u0bc8\",\n \"te\": \"\\u0c05\\u0c2a\\u0c30\\u0c3e\\u0c39\\u0c4d\\u0c28\",\n \"tg\": \"\",\n \"th\": \"PM\",\n \"ti\": \"\\u12f5\\u1215\\u122d \\u1230\\u12d3\\u1275\",\n \"tk\": \"\",\n \"tl\": \"PM\",\n \"tn\": \"PM\",\n \"to\": \"PM\",\n \"tr\": \"\\u00d6S\",\n \"ts\": \"PM\",\n \"tt\": \"\",\n \"tw\": \"PM\",\n \"ty\": \"PM\",\n \"ug\": \"\\u0686?\\u0634\\u062a\\u0649\\u0646 \\u0643?\\u064a\\u0649\\u0646\",\n \"uk\": \"PM\",\n \"ur\": \"PM\",\n \"uz\": \"TK\",\n \"ve\": \"PM\",\n \"vi\": \"CH\",\n \"vo\": \"PM\",\n \"wa\": \"PM\",\n \"wo\": \"\",\n \"xh\": \"PM\",\n \"yi\": \"\\ua0b5\\ua1aa\\ua20c\\ua248\",\n \"yo\": \"?`s\\u00e1n\",\n \"za\": \"PM\",\n \"zh\": \"\\u4e0b\\u5348\",\n \"zu\": \"PM\"\n}\n\nvar X_format = {\n \"%H:%M:%S\": [\n \"C\",\n \"ab\",\n \"ae\",\n \"af\",\n \"an\",\n \"av\",\n \"ay\",\n \"az\",\n \"ba\",\n \"be\",\n \"bg\",\n \"bh\",\n \"bi\",\n \"bm\",\n \"bo\",\n \"br\",\n \"bs\",\n \"ca\",\n \"ce\",\n \"ch\",\n \"co\",\n \"cr\",\n \"cs\",\n \"cu\",\n \"cv\",\n \"cy\",\n \"da\",\n \"de\",\n \"dv\",\n \"eo\",\n \"es\",\n \"et\",\n \"eu\",\n \"ff\",\n \"fj\",\n \"fo\",\n \"fr\",\n \"fy\",\n \"ga\",\n \"gd\",\n \"gl\",\n \"gn\",\n \"gu\",\n \"gv\",\n \"ha\",\n \"he\",\n \"hi\",\n \"ho\",\n \"hr\",\n \"ht\",\n \"hu\",\n \"hy\",\n \"hz\",\n \"ia\",\n \"ie\",\n \"ig\",\n \"ik\",\n \"io\",\n \"is\",\n \"it\",\n \"ja\",\n \"ka\",\n \"kg\",\n \"ki\",\n \"kj\",\n \"kk\",\n \"kl\",\n \"km\",\n \"kn\",\n \"kv\",\n \"kw\",\n \"ky\",\n \"la\",\n \"lb\",\n \"lg\",\n \"li\",\n \"ln\",\n \"lo\",\n \"lt\",\n \"lu\",\n \"lv\",\n \"mg\",\n \"mh\",\n \"mk\",\n \"mn\",\n \"mo\",\n \"mr\",\n \"mt\",\n \"my\",\n \"na\",\n \"nb\",\n \"nd\",\n \"ng\",\n \"nl\",\n \"nn\",\n \"no\",\n \"nr\",\n \"nv\",\n \"ny\",\n \"oj\",\n \"or\",\n \"os\",\n \"pi\",\n \"pl\",\n \"ps\",\n \"pt\",\n \"rc\",\n \"rm\",\n \"rn\",\n \"ro\",\n \"ru\",\n \"rw\",\n \"sa\",\n \"sc\",\n \"se\",\n \"sg\",\n \"sh\",\n \"sk\",\n \"sl\",\n \"sm\",\n \"sn\",\n \"sr\",\n \"ss\",\n \"st\",\n \"su\",\n \"sv\",\n \"sw\",\n \"ta\",\n \"te\",\n \"tg\",\n \"th\",\n \"tk\",\n \"tl\",\n \"tn\",\n \"tr\",\n \"ts\",\n \"tt\",\n \"tw\",\n \"ty\",\n \"ug\",\n \"uk\",\n \"uz\",\n \"ve\",\n \"vo\",\n \"wa\",\n \"wo\",\n \"xh\",\n \"yo\",\n \"za\",\n \"zh\",\n \"zu\"\n ],\n \"%i:%M:%S %p\": [\n \"aa\",\n \"ak\",\n \"am\",\n \"bn\",\n \"el\",\n \"en\",\n \"iu\",\n \"kr\",\n \"ks\",\n \"mi\",\n \"ml\",\n \"ms\",\n \"ne\",\n \"om\",\n \"sd\",\n \"so\",\n \"sq\",\n \"ti\",\n \"to\",\n \"ur\",\n \"vi\"\n ],\n \"%I:%M:%S %p\": [\n \"ar\",\n \"fa\",\n \"ku\",\n \"qu\"\n ],\n \"%p %i:%M:%S\": [\n \"as\",\n \"ii\",\n \"ko\",\n \"yi\"\n ],\n \"\\u0f46\\u0f74\\u0f0b\\u0f5a\\u0f7c\\u0f51\\u0f0b%i:%M:%S %p\": [\n \"dz\"\n ],\n \"%p ga %i:%M:%S\": [\n \"ee\"\n ],\n \"%H.%M.%S\": [\n \"fi\",\n \"id\",\n \"jv\",\n \"oc\",\n \"si\"\n ],\n \"%p %I:%M:%S\": [\n \"pa\"\n ]\n}\nvar x_format = {\n \"%m/%d/%y\": [\n \"C\"\n ],\n \"%d/%m/%Y\": [\n \"aa\",\n \"am\",\n \"bm\",\n \"bn\",\n \"ca\",\n \"co\",\n \"cy\",\n \"el\",\n \"es\",\n \"ff\",\n \"fr\",\n \"ga\",\n \"gd\",\n \"gl\",\n \"gn\",\n \"gv\",\n \"ha\",\n \"he\",\n \"id\",\n \"ig\",\n \"it\",\n \"iu\",\n \"jv\",\n \"ki\",\n \"kr\",\n \"kw\",\n \"la\",\n \"lg\",\n \"ln\",\n \"lo\",\n \"lu\",\n \"mi\",\n \"ml\",\n \"ms\",\n \"mt\",\n \"nd\",\n \"oc\",\n \"om\",\n \"pt\",\n \"qu\",\n \"rn\",\n \"sd\",\n \"sg\",\n \"so\",\n \"sw\",\n \"ti\",\n \"to\",\n \"uk\",\n \"ur\",\n \"uz\",\n \"vi\",\n \"wo\",\n \"yo\"\n ],\n \"%m/%d/%Y\": [\n \"ab\",\n \"ae\",\n \"an\",\n \"av\",\n \"ay\",\n \"bh\",\n \"bi\",\n \"ch\",\n \"cr\",\n \"cv\",\n \"ee\",\n \"en\",\n \"fj\",\n \"ho\",\n \"ht\",\n \"hz\",\n \"ie\",\n \"ik\",\n \"io\",\n \"kg\",\n \"kj\",\n \"ks\",\n \"kv\",\n \"li\",\n \"mh\",\n \"mo\",\n \"na\",\n \"ne\",\n \"ng\",\n \"nv\",\n \"ny\",\n \"oj\",\n \"pi\",\n \"rc\",\n \"sc\",\n \"sh\",\n \"sm\",\n \"su\",\n \"tl\",\n \"tw\",\n \"ty\",\n \"wa\",\n \"za\",\n \"zu\"\n ],\n \"%Y-%m-%d\": [\n \"af\",\n \"br\",\n \"ce\",\n \"dz\",\n \"eo\",\n \"ko\",\n \"lt\",\n \"mg\",\n \"nr\",\n \"rw\",\n \"se\",\n \"si\",\n \"sn\",\n \"ss\",\n \"st\",\n \"sv\",\n \"tn\",\n \"ts\",\n \"ug\",\n \"ve\",\n \"vo\",\n \"xh\"\n ],\n \"%Y/%m/%d\": [\n \"ak\",\n \"bo\",\n \"eu\",\n \"ia\",\n \"ii\",\n \"ja\",\n \"ku\",\n \"yi\",\n \"zh\"\n ],\n \"null\": [\n \"ar\",\n \"fa\",\n \"ps\",\n \"th\"\n ],\n \"%d-%m-%Y\": [\n \"as\",\n \"da\",\n \"fy\",\n \"hi\",\n \"kl\",\n \"mr\",\n \"my\",\n \"nl\",\n \"rm\",\n \"sa\",\n \"ta\"\n ],\n \"%d.%m.%Y\": [\n \"az\",\n \"cs\",\n \"de\",\n \"et\",\n \"fi\",\n \"fo\",\n \"hy\",\n \"is\",\n \"ka\",\n \"kk\",\n \"lv\",\n \"mk\",\n \"nb\",\n \"nn\",\n \"no\",\n \"os\",\n \"pl\",\n \"ro\",\n \"ru\",\n \"sq\",\n \"tg\",\n \"tr\",\n \"tt\"\n ],\n \"%d.%m.%y\": [\n \"ba\",\n \"be\",\n \"lb\"\n ],\n \"%d.%m.%Y \\u0433.\": [\n \"bg\"\n ],\n \"%d.%m.%Y.\": [\n \"bs\",\n \"hr\",\n \"sr\"\n ],\n \"%Y.%m.%d\": [\n \"cu\",\n \"mn\"\n ],\n \"%d/%m/%y\": [\n \"dv\",\n \"km\"\n ],\n \"%d-%m-%y\": [\n \"gu\",\n \"kn\",\n \"or\",\n \"pa\",\n \"te\"\n ],\n \"%Y. %m. %d.\": [\n \"hu\"\n ],\n \"%d-%b %y\": [\n \"ky\"\n ],\n \"%d. %m. %Y\": [\n \"sk\",\n \"sl\"\n ],\n \"%d.%m.%y \\u00fd.\": [\n \"tk\"\n ]\n}\n\n\n\n__BRYTHON__.imported._locale = (function($B){\n var _b_ = $B.builtins\n return {\n CHAR_MAX: 127,\n LC_ALL: 6,\n LC_COLLATE: 3,\n LC_CTYPE: 0,\n LC_MESSAGES: 5,\n LC_MONETARY: 4,\n LC_NUMERIC: 1,\n LC_TIME: 2,\n Error: _b_.ValueError,\n\n _date_format: function(spec, hour){\n var t,\n locale = __BRYTHON__.locale.substr(0, 2)\n\n if(spec == \"p\"){\n var res = hours < 12 ? am[locale] : pm[locale]\n if(res === undefined){\n throw _b_.ValueError.$factory(\"no format \" + spec + \" for locale \" +\n locale)\n }\n return res\n }\n else if(spec == \"x\"){\n t = x_format\n }else if(spec == \"X\"){\n t = X_format\n }else{\n throw _b_.ValueError.$factory(\"invalid format\", spec)\n }\n for(var key in t){\n if(t[key].indexOf(locale) > -1){\n return key\n }\n }\n throw _b_.ValueError.$factory(\"no format \" + spec + \" for locale \" +\n locale)\n },\n\n localeconv: function(){\n var conv = {'grouping': [127],\n 'currency_symbol': '',\n 'n_sign_posn': 127,\n 'p_cs_precedes': 127,\n 'n_cs_precedes': 127,\n 'mon_grouping': [],\n 'n_sep_by_space': 127,\n 'decimal_point': '.',\n 'negative_sign': '',\n 'positive_sign': '',\n 'p_sep_by_space': 127,\n 'int_curr_symbol': '',\n 'p_sign_posn': 127,\n 'thousands_sep': '',\n 'mon_thousands_sep': '',\n 'frac_digits': 127,\n 'mon_decimal_point': '',\n 'int_frac_digits': 127\n }\n var res = $B.empty_dict()\n for(var key in conv){\n _b_.dict.$setitem(res, key, conv[key])\n }\n\n return res\n },\n\n setlocale : function(){\n var $ = $B.args(\"setlocale\", 2, {category: null, locale: null},\n [\"category\", \"locale\"], arguments, {locale: _b_.None},\n null, null)\n /// XXX category is currently ignored\n if($.locale == \"\"){\n // use browser language setting, if it is set\n var LANG = ($B.language || \"\").substr(0, 2)\n if(am.hasOwnProperty(LANG)){\n $B.locale = LANG\n return LANG\n }else{\n console.log(\"Unknown locale: \" + LANG)\n }\n }else if($.locale === _b_.None){\n // return current locale\n return $B.locale\n }else{\n // Only use 2 first characters\n try{$.locale.substr(0, 2)}\n catch(err){\n throw $module.Error.$factory(\"Invalid locale: \" + $.locale)\n }\n if(am.hasOwnProperty($.locale.substr(0, 2))){\n $B.locale = $.locale\n return $.locale\n }else{\n throw $module.Error.$factory(\"Unknown locale: \" + $.locale)\n }\n }\n }\n }\n})(__BRYTHON__)\n"], "modulefinder": [".js", "(function($B){\n\nvar _b_=$B.builtins\nvar _mod = {}\n\n$ModuleFinderDict = {__class__:_b_.type,__name__:'ModuleFinder'}\n$ModuleFinderDict.__mro__ = [_b_.object]\n\n$ModuleFinderDict.run_script = function(self, pathname){\n // pathname is the url of a Python script\n var py_src = _b_.$open(pathname).read()\n // transform into internal Brython tree structure\n var root = $B.py2js(py_src)\n // walk the tree to find occurences of imports\n function walk(node){\n var modules = []\n var ctx = node.context\n if(ctx && ctx.type=='node'){ctx = ctx.tree[0]}\n\n if(ctx && ctx.type==\"import\"){\n for(var i=0, _len_i = ctx.tree.length; i < _len_i;i++){\n if(modules.indexOf(ctx.tree[i].name)==-1){\n modules.push(ctx.tree[i].name)\n }\n }\n }else if(ctx && ctx.type==\"from\"){\n if(modules.indexOf(ctx.module)==-1){\n modules.push(ctx.module)\n }\n }\n\n for(var i=0, _len_i = node.children.length; i < _len_i;i++){\n mods = walk(node.children[i])\n for(var j=0, _len_j = mods.length; j < _len_j;j++){\n if(modules.indexOf(mods[j])==-1){modules.push(mods[j])}\n }\n }\n return modules\n }\n self.modules = walk(root)\n}\n\n_mod.ModuleFinder = function(){return {__class__:$ModuleFinderDict}\n}\n_mod.ModuleFinder.$dict = $ModuleFinderDict\n_mod.ModuleFinder.__class__ = $B.$factory\n$ModuleFinderDict.$factory = _mod.ModuleFinder\n\n$B.addToImported('modulefinder', _mod)\n\n})(__BRYTHON__)\n"], "_profile": [".js", "// Private interface to the profiling instrumentation implemented in py_utils.js.\n// Uses local a copy of the eval function from py_builtin_functions.js\n\nvar $module=(function($B) {\n eval($B.InjectBuiltins());\n return {\n brython:$B,\n data:$B.$profile_data,\n start:$B.$profile.start,\n stop:$B.$profile.stop,\n pause:$B.$profile.pause,\n status:$B.$profile.status,\n clear:$B.$profile.clear,\n elapsed:$B.$profile.elapsed,\n run:function(src,_globals,_locals,nruns) {\n var current_frame = $B.frames_stack[$B.frames_stack.length-1]\n if(current_frame!==undefined){\n var current_locals_id = current_frame[0].replace(/\\./,'_'),\n current_globals_id = current_frame[2].replace(/\\./,'_')\n }\n\n var is_exec = true,\n leave = false\n\n // code will be run in a specific block\n var globals_id = '$profile_'+$B.UUID(),\n locals_id\n\n if(_locals===_globals){\n locals_id = globals_id\n }else{\n locals_id = '$profile_'+$B.UUID()\n }\n // Initialise the object for block namespaces\n eval('var $locals_'+globals_id+' = {}\\nvar $locals_'+locals_id+' = {}')\n\n // Initialise block globals\n\n // A _globals dictionary is provided, set or reuse its attribute\n // globals_id\n _globals.globals_id = _globals.globals_id || globals_id\n globals_id = _globals.globals_id\n\n if(_locals === _globals || _locals === undefined){\n locals_id = globals_id\n parent_scope = $B.builtins_scope\n }else{\n // The parent block of locals must be set to globals\n parent_scope = {\n id: globals_id,\n parent_block: $B.builtins_scope,\n binding: {}\n }\n for(var attr of _b_.dict.$keys_string(_globals)){\n parent_scope.binding[attr] = true\n }\n }\n\n // Initialise block globals\n if(_globals.$jsobj){\n var items = _globals.$jsobj\n }else{\n var items = {}\n for(var key of _b_.dict.$keys_string(_globals)){\n items[key] = _b_.dict.$getitem_string(_globals, key)\n }\n }\n for(var item in items){\n item1 = to_alias(item)\n try{\n eval('$locals_' + globals_id + '[\"' + item1 +\n '\"] = items[item]')\n }catch(err){\n console.log(err)\n console.log('error setting', item)\n break\n }\n }\n\n // Initialise block locals\n var items = _b_.dict.items(_locals), item\n if(_locals.$jsobj){\n var items = _locals.$jsobj\n }else{\n var items = {}\n for(var key of _b_.dict.$keys_string(_locals)){\n items[key] = _b_.dict.$getitem_string(_locals, key)\n } }\n for(var item in items){\n item1 = to_alias(item)\n try{\n eval('$locals_' + locals_id + '[\"' + item[0] + '\"] = item[1]')\n }catch(err){\n console.log(err)\n console.log('error setting', item)\n break\n }\n }\n //var nb_modules = Object.keys(__BRYTHON__.modules).length\n //console.log('before exec', nb_modules)\n\n console.log(\"call py2js\", src, globals_id, locals_id, parent_scope)\n var root = $B.py2js(src, globals_id, locals_id, parent_scope),\n js, gns, lns\n\n try{\n\n var js = root.to_js()\n\n var i,res,gns;\n for(i=0;i 2){\n var brython_scripts = [\n 'brython_builtins',\n\n 'py_ast_classes',\n 'unicode_data',\n 'stdlib_paths',\n 'version_info',\n\n 'python_tokenizer',\n 'py_ast',\n 'py2js',\n 'loaders',\n 'py_utils',\n 'py_object',\n 'py_type',\n 'py_builtin_functions',\n 'py_sort',\n 'py_exceptions',\n 'py_range_slice',\n 'py_bytes',\n 'py_set',\n 'js_objects',\n 'py_import',\n 'py_string',\n 'py_int',\n 'py_long_int',\n 'py_float',\n 'py_complex',\n 'py_dict',\n 'py_list',\n 'py_generator',\n 'py_dom',\n 'py_pattern_matching',\n 'async',\n 'py_flags',\n 'builtin_modules',\n 'ast_to_js',\n 'symtable',\n 'builtins_docstrings'\n ]\n }else{\n var brython_scripts = ['brython']\n }\n\n if(VFS !== null){\n brython_scripts.push(VFS)\n }\n return brython_scripts\n}\n\nvar wclass = $B.make_class(\"Worker\",\n function(worker){\n return {\n __class__: wclass,\n worker\n }\n }\n)\n\nwclass.send = function(){\n var $ = $B.args('send', 2, {self: null, message: null}, ['self', 'message'],\n arguments, {}, 'args', null)\n var message = $B.pyobj2structuredclone($.message)\n return $.self.worker.postMessage(message, ...$.args)\n}\n\nwclass.__mro__ = [$B.JSObj, _b_.object]\n\n$B.set_func_names(wclass, \"browser.worker\")\n\n\nvar _Worker = $B.make_class(\"Worker\", function(id, onmessage, onerror){\n $B.warn(_b_.DeprecationWarning,\n \"worker.Worker is deprecated in version 3.12. \" +\n \"Use worker.create_worker instead\")\n var $ = $B.args(\"__init__\", 3, {id: null, onmessage: null, onerror: null},\n ['id', 'onmessage', 'onerror'], arguments,\n {onmessage: _b_.None, onerror: _b_.None}, null, null),\n id = $.id,\n worker_script = $B.webworkers[id]\n\n if(worker_script === undefined){\n throw _b_.KeyError.$factory(id)\n }\n var filepath = worker_script.src ? worker_script.src : $B.script_path + \"#\" + id,\n filename = $B.strip_host(filepath),\n src = $B.file_cache[filename]\n\n var indexedDB = worker_script.attributes &&\n worker_script.attributes.getNamedItem('indexedDB')\n var script_id = \"worker\" + $B.UUID(),\n filename = $B.script_path + \"#\" + id\n $B.url2name[filename] = script_id\n\n var js = $B.py2js({src, filename}, script_id).to_js(),\n header = '';\n var brython_scripts = scripts_to_load(\n $B.get_option_from_filename('debug', filename))\n brython_scripts.forEach(function(script){\n if(script != VFS || VFS == \"brython_stdlib\"){\n var url = $B.brython_path + script + \".js\"\n }else{\n // attribute $B.brython_modules is set to the path of\n // brython_modules.js by the script itself\n var url = $B.brython_modules\n }\n if(! $B.get_option('cache')){ // cf. issue 1954\n url += '?' + (new Date()).getTime()\n }\n header += 'importScripts(\"' + url + '\")\\n'\n })\n // set __BRYTHON__.imported[script_id]\n header += `\n var $B = __BRYTHON__,\n _b_ = $B.builtins\n var module = $B.module.$factory(\"${script_id}\")\n module.__file__ = \"${filename}\"\n module.__doc__ = _b_.None\n $B.imported[\"${script_id}\"] = module\\n`\n // restore brython_path\n header += `$B.brython_path = \"${$B.brython_path}\"\\n`\n // restore path for imports (cf. issue #1305)\n header += `$B.make_import_paths(\"${filename}\")\\n`\n // Call brython() to initialize internal Brython values\n header += `brython(${JSON.stringify($B.$options)})\\n`\n js = header + js\n js = `try{${js}}catch(err){$B.handle_error(err)}`\n\n var blob = new Blob([js], {type: \"application/js\"}),\n url = URL.createObjectURL(blob),\n w = new Worker(url),\n res = wclass.$factory(w)\n return res\n})\n\nfunction create_worker(){\n var $ = $B.args(\"__init__\", 4,\n {id: null, onready: null, onmessage: null, onerror: null},\n ['id', 'onready', 'onmessage', 'onerror'], arguments,\n {onready: _b_.None, onmessage: _b_.None, onerror: _b_.None},\n null, null),\n id = $.id,\n worker_script = $B.webworkers[id],\n onready = $.onready === _b_.None ? _b_.None : $B.$call($.onready),\n onmessage = $.onmessage === _b_.None ? _b_.None : $B.$call($.onmessage),\n onerror = $.onerror === _b_.None ? _b_.None : $B.$call($.onerror)\n\n if(worker_script === undefined){\n throw _b_.RuntimeError.$factory(`No webworker with id '${id}'`)\n }\n var script_id = \"worker\" + $B.UUID(),\n filepath = worker_script.src ? worker_script.src : $B.script_path + \"#\" + id,\n filename = $B.strip_host(filepath),\n src = $B.file_cache[filename]\n $B.url2name[filename] = script_id\n\n var brython_scripts = scripts_to_load(\n $B.get_option_from_filename('debug', filename))\n\n var js = $B.py2js({src, filename}, script_id).to_js(),\n header = '';\n for(var script of brython_scripts){\n if(script != VFS || VFS == \"brython_stdlib\"){\n var url = $B.brython_path + script + \".js\"\n }else{\n // attribute $B.brython_modules is set to the path of\n // brython_modules.js by the script itself\n var url = $B.brython_modules\n }\n if(! $B.get_option('cache')){ // cf. issue 1954\n url += '?' + (new Date()).getTime()\n }\n header += 'importScripts(\"' + url + '\")\\n'\n }\n // set __BRYTHON__.imported[script_id]\n header += `\n var $B = __BRYTHON__,\n _b_ = $B.builtins\n var module = $B.module.$factory(\"${script_id}\")\n module.__file__ = \"${filename}\"\n module.__doc__ = _b_.None\n $B.imported[\"${script_id}\"] = module\\n`\n\n header += '$B.file_cache[module.__file__] = `' + src + '`\\n'\n // restore brython_path\n header += `$B.brython_path = \"${$B.brython_path}\"\\n`\n // restore path for imports (cf. issue #1305)\n header += `$B.make_import_paths(\"${filename}\")\\n`\n\n // Call brython() to initialize internal Brython values\n var save_option = JSON.stringify($B.save_options)\n header += `brython(${save_option})\\n`\n\n // send dummy message to trigger resolution of Promise\n var ok_token = Math.random().toString(36).substr(2, 8),\n error_token = Math.random().toString(36).substr(2, 8)\n\n // open indexedDB cache before running worker code\n js = `$B.idb_open_promise().then(function(){\\n` +\n `try{\\n` +\n `${js}\\n` +\n `self.postMessage('${ok_token}')\\n` +\n `}catch(err){\\n` +\n `self.postMessage(\"${error_token}Error in worker ${id}\\\\n\" + $B.error_trace(err))\\n` +\n `}\\n})`\n js = header + js\n\n var p = new Promise(function(resolve, reject){\n try{\n var blob = new Blob([js], {type: \"application/js\"}),\n url = URL.createObjectURL(blob),\n w = new Worker(url),\n res = wclass.$factory(w)\n }catch(err){\n reject(err)\n }\n\n w.onmessage = function(ev){\n if(ev.data == ok_token){\n resolve(res)\n }else if(typeof ev.data == 'string' &&\n ev.data.startsWith(error_token)){\n reject(ev.data.substr(error_token.length))\n }else{\n if(onmessage !== _b_.None){\n onmessage(ev)\n }\n try{\n resolve(res)\n }catch(err){\n reject(err)\n }\n }\n }\n\n return res\n })\n\n var error_func = onerror === _b_.None ? $B.handle_error : onerror\n\n if(onready !== _b_.None){\n p.then(onready).catch(error_func)\n }else{\n p.catch(error_func)\n }\n return _b_.None\n}\n\nvar module = {\n Worker: _Worker,\n create_worker\n}\n\n$B.addToImported('_webworker', module)\n\n})(__BRYTHON__)\n"], "_ast": [".js", "(function($B){\n\nvar _b_ = $B.builtins,\n ast = $B.ast, // created in py2js\n mod = {}\nmod.PyCF_ONLY_AST = $B.PyCF_ONLY_AST\nmod.PyCF_TYPE_COMMENTS = $B.PyCF_TYPE_COMMENTS\nmod.AST = $B.AST // in builtin_modules.js\n$B.create_python_ast_classes() // in py_ast.js\nfor(var klass in ast){\n mod[klass] = $B.python_ast_classes[klass]\n}\n\nvar Load = 'Load',\n Store = 'Store',\n Del = 'Del'\n\n// Note: the ensure_literal_* functions are only used to validate a restricted\n// set of non-recursive literals that have already been checked with\n// validate_expr, so they don't accept the validator state\nfunction ensure_literal_number(exp, allow_real, allow_imaginary){\n if(exp.__class__ !== mod.Constant){\n return false\n }\n var value = exp.value\n if(allow_real && $B.$isinstance(value, [_b_.int, _b_.float])){\n return true\n }\n if(allow_imaginary && $B.$isinstance(value, _b_.complex)){\n return true\n }\n return false\n}\n\nfunction ensure_literal_negative(exp, allow_real, allow_imaginary){\n if(exp.__class__ !== mod.UnaryOp){\n return false\n }\n // Must be negation ...\n if(exp.op !== mod.USub) {\n return false\n }\n // ... of a constant ...\n var operand = exp.operand\n if(operand.__class__ !== mod.Constant){\n return false\n }\n // ... number\n return ensure_literal_number(operand, allow_real, allow_imaginary)\n}\n\nfunction ensure_literal_complex(exp){\n if(exp.__class__ !== mod.BinOp){\n return false\n }\n var left = exp.left,\n right = exp.right;\n // Ensure op is addition or subtraction\n if(exp.op !== mod.Add && exp.op !== mod.Sub){\n return false\n }\n // Check LHS is a real number (potentially signed)\n switch(left.__class__){\n case mod.Constant:\n if(!ensure_literal_number(left, true, false)){\n return false\n }\n break;\n case mod.UnaryOp:\n if(!ensure_literal_negative(left, true, false)){\n return false\n }\n break;\n default:\n return false\n }\n // Check RHS is an imaginary number (no separate sign allowed)\n switch(right.__class__){\n case mod.Constant:\n if(!ensure_literal_number(right, false, true)){\n return false\n }\n break;\n default:\n return false\n }\n return true\n}\n\nfunction validate_arguments(args){\n validate_args(args.posonlyargs)\n validate_args(args.args)\n if(args.vararg && args.vararg.annotation){\n validate_expr(args.vararg.annotation, Load)\n }\n validate_args(args.kwonlyargs)\n if(args.kwarg && args.kwarg.annotation){\n validate_expr(args.kwarg.annotation, Load)\n }\n if(args.defaults.length > args.posonlyargs.length + args.args.length){\n throw _b_.ValueError.$factory(\n \"more positional defaults than args on arguments\")\n }\n if(args.kw_defaults.length != args.kwonlyargs.length){\n throw _b_.ValueError.$factory(\n \"length of kwonlyargs is not the same as \" +\n \"kw_defaults on arguments\")\n }\n validate_exprs(args.defaults, Load, 0)\n validate_exprs(args.kw_defaults, Load, 1)\n}\n\nfunction validate_pattern(p, star_ok){\n var ret = -1\n switch(p.__class__) {\n case mod.MatchValue:\n validate_pattern_match_value(p.value)\n break;\n case mod.MatchSingleton:\n if([_b_.None, _b_.True, _b_.False].indexOf(p.value) == -1){\n throw _b_.ValueError(\n \"MatchSingleton can only contain True, False and None\")\n }\n break;\n case mod.MatchSequence:\n validate_patterns(p.patterns, 1);\n break;\n case mod.MatchMapping:\n if(p.keys.length != p.patterns.length){\n throw _b_.ValueError.$factory(\n \"MatchMapping doesn't have the same number of keys as patterns\");\n }\n if(p.rest){\n validate_capture(p.rest)\n }\n\n var keys = p.keys;\n for(var key of keys){\n if(key.__class__ === mod.Constant) {\n var literal = key.value;\n if([_b_.None, _b_.True, _b_.False].indexOf(literal) > -1){\n /* validate_pattern_match_value will ensure the key\n doesn't contain True, False and None but it is\n syntactically valid, so we will pass those on in\n a special case. */\n continue;\n }\n }\n validate_pattern_match_value(key)\n }\n validate_patterns(p.patterns, 0);\n break;\n case mod.MatchClass:\n if(p.kwd_attrs.length != p.kwd_patterns.length){\n throw _b_.ValueError.$factory(\n \"MatchClass doesn't have the same number of \" +\n \"keyword attributes as patterns\")\n }\n validate_expr(p.cls, Load)\n var cls = p.cls;\n while(true){\n if(cls.__class__ === mod.Name){\n break\n }else if(cls.__class__ === mod.Attribute) {\n cls = cls.value;\n continue;\n }else {\n throw _b_.ValueError.$factory(\n \"MatchClass cls field can only contain Name \" +\n \"or Attribute nodes.\")\n }\n }\n\n for(var identifier of p.kwd_attrs){\n validate_name(identifier)\n }\n\n validate_patterns(p.patterns, 0)\n validate_patterns(p.kwd_patterns, 0);\n break;\n case mod.MatchStar:\n if (!star_ok) {\n throw _b_.ValueError.$factory(\"can't use MatchStar here\")\n }\n if(p.name === undefined){\n validate_capture(p.name)\n }\n break;\n case mod.MatchAs:\n if(p.name){\n validate_capture(p.name)\n }\n if(p.pattern == undefined){\n ret = 1;\n }else if(p.name == undefined){\n throw _b_.ValueError.$factory(\n \"MatchAs must specify a target name if a pattern is given\")\n }else{\n validate_pattern(p.pattern, 0);\n }\n break;\n case mod.MatchOr:\n if(p.patterns.length < 2){\n throw _b_.ValueError.$factory(\n \"MatchOr requires at least 2 patterns\")\n }\n validate_patterns(p.patterns, 0)\n break;\n // No default case, so the compiler will emit a warning if new pattern\n // kinds are added without being handled here\n }\n if(ret < 0){\n throw _b_.SystemError.$factory(\"unexpected pattern\")\n }\n return true\n}\n\nfunction validate_patterns(patterns, star_ok){\n for(var pattern of patterns){\n validate_pattern(pattern, star_ok)\n }\n return true\n}\n\nfunction validate_pattern_match_value(exp){\n validate_expr(exp, Load)\n switch (exp.__class__){\n case mod.Constant:\n /* Ellipsis and immutable sequences are not allowed.\n For True, False and None, MatchSingleton() should\n be used */\n validate_expr(exp, Load)\n var literal = exp.value\n if($B.$isinstance(literal, [_b_.int, _b_.float, _b_.bytes,\n _b_.complex, _b_.str])){\n return true\n }\n throw _b_.ValueError.$factory(\n \"unexpected constant inside of a literal pattern\")\n case mod.Attribute:\n // Constants and attribute lookups are always permitted\n return true\n case mod.UnaryOp:\n // Negated numbers are permitted (whether real or imaginary)\n // Compiler will complain if AST folding doesn't create a constant\n if(ensure_literal_negative(exp, true, true)){\n return true\n }\n break;\n case mod.BinOp:\n // Complex literals are permitted\n // Compiler will complain if AST folding doesn't create a constant\n if(ensure_literal_complex(exp)){\n return true\n }\n break;\n case mod.JoinedStr:\n // Handled in the later stages\n return 1;\n default:\n break;\n }\n throw _b_.ValueError.$factory(\n \"patterns may only match literals and attribute lookups\")\n}\n\nfunction validate_capture(name){\n if(name == \"_\"){\n throw _b_.ValueError.$factory(\"can't capture name '_' in patterns\")\n }\n validate_name(name)\n}\n\nfunction validate_name(name){\n var forbidden = [\"None\", \"True\", \"False\"]\n if(forbidden.indexOf(name) > -1){\n throw _b_.ValueError.$factory(`identifier field can't represent` +\n ` '${name}' constant\", forbidden[i]`)\n }\n return true\n}\n\nfunction validate_comprehension(gens){\n if(gens.length == 0) {\n throw _b_.ValueError.$factory(\"comprehension with no generators\")\n }\n for(var comp of gens){\n validate_expr(comp.target, Store)\n validate_expr(comp.iter, Load)\n validate_exprs(comp.ifs, Load, 0)\n }\n return true\n}\n\nfunction validate_keywords(keywords){\n for(var keyword of keywords){\n validate_expr(keyword.value, Load)\n }\n return true\n}\n\nfunction validate_args(args){\n for(var arg of args){\n if(arg.annotation){\n validate_expr(arg.annotation, Load)\n }\n }\n return true\n}\n\nfunction validate_nonempty_seq(seq, what, owner){\n if(seq.length > 0){\n return true\n }\n throw _b_.ValueError.$factory(`empty ${what} on ${owner}`)\n}\n\nfunction validate_assignlist(targets, ctx){\n validate_nonempty_seq(targets, \"targets\", ctx == Del ? \"Delete\" : \"Assign\")\n validate_exprs(targets, ctx, 0)\n}\n\nfunction validate_body(body, owner){\n validate_nonempty_seq(body, \"body\", owner)\n validate_stmts(body)\n}\n\nfunction validate_exprs(exprs, ctx, null_ok){\n for(var expr of exprs){\n if(expr !== _b_.None){\n validate_expr(expr, ctx)\n }else if(!null_ok){\n throw _b_.ValueError.$factory(\n \"None disallowed in expression list\")\n }\n\n }\n return true\n}\n\nfunction validate_expr(exp, ctx){\n var check_ctx = 1,\n actual_ctx;\n\n /* First check expression context. */\n switch (exp.__class__) {\n case mod.Name:\n validate_name(exp.id)\n actual_ctx = exp.ctx\n break;\n case mod.Attribute:\n case mod.Subscript:\n case mod.Starred:\n case mod.List:\n case mod.Tuple:\n actual_ctx = exp.ctx;\n break\n default:\n if(ctx != Load){\n throw _b_.ValueError.$factory(\"expression which can't be \" +\n `assigned to in ${ctx} context`)\n }\n check_ctx = 0;\n /* set actual_ctx to prevent gcc warning */\n actual_ctx = 0;\n }\n actual_ctx = actual_ctx === 0 ? actual_ctx :\n actual_ctx.__class__.__name__\n if(check_ctx && actual_ctx != ctx){\n throw _b_.ValueError.$factory(`expression must have ` +\n `${ctx} context but has ${actual_ctx} instead`)\n }\n\n /* Now validate expression. */\n switch (exp.__class__) {\n case mod.BoolOp:\n if(exp.values.length < 2){\n throw _b_.ValueError.$factory(\"BoolOp with less than 2 values\")\n }\n validate_exprs(exp.values, Load, 0);\n break;\n case mod.BinOp:\n validate_expr(exp.left, Load)\n validate_expr(exp.right, Load)\n break;\n case mod.UnaryOp:\n validate_expr(exp.operand, Load);\n break;\n case mod.Lambda:\n validate_arguments(exp.args)\n validate_expr(exp.body, Load);\n break;\n case mod.IfExp:\n validate_expr(exp.test, Load)\n validate_expr(exp.body, Load)\n validate_expr(exp.orelse, Load)\n break;\n case mod.Dict:\n if(exp.keys.length != exp.values.length){\n throw _b_.ValueError.$factory(\n \"Dict doesn't have the same number of keys as values\");\n }\n /* null_ok=1 for keys expressions to allow dict unpacking to work in\n dict literals, i.e. ``{**{a:b}}`` */\n validate_exprs(exp.keys, Load, 1)\n validate_exprs(exp.values, Load, 0);\n break;\n case mod.Set:\n validate_exprs(exp.elts, Load, 0);\n break;\n case mod.ListComp:\n case mod.SetComp:\n case mod.GeneratorExp:\n validate_comprehension(exp.generators)\n validate_expr(exp.elt, Load)\n break;\n case mod.DictComp:\n validate_comprehension(exp.generators)\n validate_expr(exp.key, Load)\n validate_expr(exp.value, Load)\n break;\n case mod.Yield:\n if(exp.value){\n validate_expr(exp.value, Load)\n }\n break;\n case mod.YieldFrom:\n validate_expr(exp.value, Load)\n break;\n case mod.Await:\n validate_expr(exp.value, Load)\n break;\n case mod.Compare:\n if(exp.comparators.length == 0){\n throw _b_.ValueError.$factory(\"Compare with no comparators\")\n }\n if(exp.comparators.length != exp.ops){\n throw _b_.ValueError.$factory(\"Compare has a different number \" +\n \"of comparators and operands\")\n }\n validate_exprs(exp.comparators, Load, 0)\n validate_expr(exp.left, Load)\n break;\n case mod.Call:\n validate_expr(exp.func, Load)\n validate_exprs(exp.args, Load, 0)\n validate_keywords(exp.keywords)\n break;\n case mod.Constant:\n validate_constant(exp.value)\n break;\n case mod.JoinedStr:\n validate_exprs(exp.values, Load, 0)\n break;\n case mod.FormattedValue:\n validate_expr(exp.value, Load)\n if (exp.format_spec) {\n validate_expr(exp.format_spec, Load)\n break;\n }\n break;\n case mod.Attribute:\n validate_expr(exp.value, Load)\n break;\n case mod.Subscript:\n validate_expr(exp.slice, Load)\n validate_expr(exp.value, Load)\n break;\n case mod.Starred:\n validate_expr(exp.value, ctx)\n break;\n case mod.Slice:\n if(exp.lower){\n validate_expr(exp.lower, Load)\n }\n if(exp.upper){\n validate_expr(exp.upper, Load)\n }\n if(exp.step){\n validate_expr(exp.step, Load)\n }\n break;\n case mod.List:\n validate_exprs(exp.elts, ctx, 0)\n break;\n case mod.Tuple:\n validate_exprs(exp.elts, ctx, 0)\n break;\n case mod.NamedExpr:\n validate_expr(exp.value, Load)\n break;\n /* This last case doesn't have any checking. */\n case mod.Name:\n ret = 1;\n break;\n // No default case mod.so compiler emits warning for unhandled cases\n }\n return true\n}\n\nfunction validate_constant(value){\n if (value == _b_.None || value == _b_.Ellipsis){\n return true\n }\n if($B.$isinstance(value,\n [_b_.int, _b_.float, _b_.complex, _b_.bool, _b_.bytes, _b_.str])){\n return true\n }\n\n if($B.$isinstance(value, [_b_.tuple, _b_.frozenset])){\n var it = _b_.iter(value)\n while(true){\n try{\n var item = _b_.next(it)\n validate_constant(item)\n }catch(err){\n if($B.is_exc(err, [_b_.StopIteration])){\n return true\n }\n throw err\n }\n }\n }\n}\n\nfunction validate_stmts(seq){\n for(var stmt of seq) {\n if(stmt !== _b_.None){\n validate_stmt(stmt)\n }else{\n throw _b_.ValueError.$factory(\"None disallowed in statement list\");\n }\n }\n}\n\nfunction validate_stmt(stmt){\n switch (stmt.__class__) {\n case mod.FunctionDef:\n validate_body(stmt.body, \"FunctionDef\")\n validate_arguments(stmt.args)\n validate_exprs(stmt.decorator_list, Load, 0)\n if(stmt.returns){\n validate_expr(stmt.returns, Load)\n }\n break;\n case mod.ClassDef:\n validate_body(stmt.body, \"ClassDef\")\n validate_exprs(stmt.bases, Load, 0)\n validate_keywords(stmt.keywords)\n validate_exprs(stmtdecorator_list, Load, 0)\n break;\n case mod.Return:\n if(stmt.value){\n validate_expr(stmt.value, Load)\n }\n break;\n case mod.Delete:\n validate_assignlist(stmt.targets, Del);\n break;\n case mod.Assign:\n validate_assignlist(stmt.targets, Store)\n validate_expr(stmt.value, Load)\n break;\n case mod.AugAssign:\n validate_expr(stmt.target, Store) &&\n validate_expr(stmt.value, Load);\n break;\n case mod.AnnAssign:\n if(stmt.target.__class__ != mod.Name && stmt.simple){\n throw _b_.TypeError.$factory(\n \"AnnAssign with simple non-Name target\")\n }\n validate_expr(stmt.target, Store)\n if(stmt.value){\n validate_expr(stmt.value, Load)\n validate_expr(stmt.annotation, Load);\n }\n break;\n case mod.For:\n validate_expr(stmt.target, Store)\n validate_expr(stmt.iter, Load)\n validate_body(stmt.body, \"For\")\n validate_stmts(stmt.orelse)\n break;\n case mod.AsyncFor:\n validate_expr(stmt.target, Store)\n validate_expr(stmt.iter, Load)\n validate_body(stmt.body, \"AsyncFor\")\n validate_stmts(stmt.orelse)\n break;\n case mod.While:\n validate_expr(stmt.test, Load)\n validate_body(stmt.body, \"While\")\n validate_stmts(stmt.orelse)\n break;\n case mod.If:\n validate_expr(stmt.test, Load)\n validate_body(stmt.body, \"If\")\n validate_stmts(stmt.orelse)\n break;\n case mod.With:\n validate_nonempty_seq(stmt.items, \"items\", \"With\")\n for (var item of stmt.items){\n validate_expr(item.context_expr, Load) &&\n (! item.optional_vars || validate_expr(item.optional_vars, Store))\n }\n validate_body(stmt.body, \"With\");\n break;\n case mod.AsyncWith:\n validate_nonempty_seq(stmt.items, \"items\", \"AsyncWith\")\n for(var item of stmt.items){\n validate_expr(item.context_expr, Load)\n if(item.optional_vars){\n validate_expr(item.optional_vars, Store)\n }\n }\n validate_body(stmt.body, \"AsyncWith\");\n break;\n case mod.Match:\n validate_expr(stmt.subject, Load)\n validate_nonempty_seq(stmt.cases, \"cases\", \"Match\")\n for(var m of stmt.cases){\n validate_pattern(m.pattern, 0)\n if(m.guard){\n validate_expr(m.guard, Load)\n }\n validate_body(m.body, \"match_case\")\n }\n break;\n case mod.Raise:\n if(stmt.exc){\n validate_expr(stmt.exc, Load)\n if(stmt.cause){\n validate_expr(stmt.cause, Load)\n }\n break;\n }\n if(stmt.cause) {\n throw _b_.ValueError.$factory(\"Raise with cause but no exception\");\n }\n break;\n case mod.Try:\n validate_body(stmt.body, \"Try\")\n if(stmt.handlers.length == 0 + stmt.finalbody.length == 0){\n throw _b_.ValueError.$factor(\n \"Try has neither except handlers nor finalbody\");\n }\n if(stmt.handlers.length == 0 && stmt.orelse.length > 0){\n throw _b_.ValueError.$factory(\n \"Try has orelse but no except handlers\");\n }\n for(var handler of stmt.handlers){\n if(handler.type){\n validate_expr(handler.type, Load)\n validate_body(handler.body, \"ExceptHandler\")\n }\n }\n if(stmt.finalbody.length > 0){\n validate_stmts(stmt.finalbody)\n }\n if(stmt.orelse.length > 0){\n validate_stmts(stmt.orelse)\n }\n break;\n case mod.TryStar:\n validate_body(stmt.body, \"TryStar\")\n if(stmt.handlers.length + stmt.finalbody.length == 0){\n throw _b_.ValueError.$factory(\n \"TryStar has neither except handlers nor finalbody\");\n }\n if(stmt.handlers.length == 0 && stmt.orelse.length > 0){\n throw _b_.ValueError.$factory(\n \"TryStar has orelse but no except handlers\");\n }\n for(var handler of stm.handlers){\n if(handler.type){\n validate_expr(handler.type, Load)\n validate_body(handler.body, \"ExceptHandler\")\n }\n }\n if(stmt.finalbody.length > 0){\n validate_stmts(stmt.finalbody)\n }\n if(stmt.orelse.length > 0){\n validate_stmts(stmt.orelse)\n }\n break;\n case mod.Assert:\n validate_expr(stmt.test, Load)\n if(stmt.msg){\n validate_expr(stmt.msg, Load)\n }\n break;\n case mod.Import:\n validate_nonempty_seq(stmt.names, \"names\", \"Import\");\n break;\n case mod.ImportFrom:\n if(stmt.level < 0) {\n throw _b_.ValueError.$factory(\"Negative ImportFrom level\")\n }\n validate_nonempty_seq(stmt.names, \"names\", \"ImportFrom\");\n break;\n case mod.Global:\n validate_nonempty_seq(stmt.names, \"names\", \"Global\");\n break;\n case mod.Nonlocal:\n validate_nonempty_seq(stmt.names, \"names\", \"Nonlocal\");\n break;\n case mod.Expr:\n validate_expr(stmt.value, Load);\n break;\n case mod.AsyncFunctionDef:\n validate_body(stmt.body, \"AsyncFunctionDef\")\n validate_arguments(stmt.args)\n validate_exprs(stmt.decorator_list, Load, 0)\n if(stmt.returns){\n validate_expr(stmt.returns, Load)\n }\n break;\n case mod.Pass:\n case mod.Break:\n case mod.Continue:\n break;\n // No default case so compiler emits warning for unhandled cases\n }\n}\n\n\nmod._validate = function(ast_obj){\n switch (ast_obj.__class__) {\n case mod.Module:\n validate_stmts(ast_obj.body);\n break;\n case mod.Interactive:\n validate_stmts(ast_obj.body);\n break;\n case mod.Expression:\n validate_expr(ast_obj.body, Load);\n break;\n case mod.FunctionType:\n validate_exprs(ast_obj.argtypes, Load, 0) &&\n validate_expr(ast_obj.returns, Load);\n break;\n // No default case so compiler emits warning for unhandled cases\n }\n}\n\n$B.imported._ast = mod\n\n}\n)(__BRYTHON__)\n"], "_strptime": [".js", "\n(function($B){\n var _b_ = __BRYTHON__.builtins\n $B.imported._strptime = {\n _strptime_datetime: function(cls, s, fmt){\n var pos_s = 0,\n pos_fmt = 0,\n dt = {}\n function error(time_data, format){\n throw _b_.ValueError.$factory(\n `time data '${time_data}' does not match format '${format}'`)\n }\n\n var locale = __BRYTHON__.locale,\n shortdays = [],\n longdays = [],\n conv_func = locale == \"C\" ?\n function(d, options){\n return d.toLocaleDateString('en-EN', options)\n } :\n function(d, options){\n return d.toLocaleDateString(locale, options)\n }\n\n for(var day = 16; day < 23; day++){\n var d = new Date(Date.UTC(2012, 11, day, 3, 0, 0))\n shortdays.push(conv_func(d, {weekday: 'short'}))\n longdays.push(conv_func(d, {weekday: 'long'}))\n }\n\n var shortmonths = [],\n longmonths = []\n\n for(var month = 0; month < 12; month++){\n var d = new Date(Date.UTC(2012, month, 11, 3, 0, 0))\n shortmonths.push(conv_func(d, {month: 'short'}))\n longmonths.push(conv_func(d, {month: 'long'}))\n }\n\n var shortdays_re = new RegExp(shortdays.join(\"|\").replace(\".\", \"\\\\.\")),\n longdays_re = new RegExp(longdays.join(\"|\")),\n shortmonths_re = new RegExp(shortmonths.join(\"|\").replace(\".\", \"\\\\.\")),\n longmonths_re = new RegExp(longmonths.join(\"|\"))\n\n var regexps = {\n d: [\"day\", new RegExp(\"^[123][0-9]|0?[1-9]\")],\n f: [\"microsecond\", new RegExp(\"^\\\\d{1,6}\")],\n H: [\"hour\", new RegExp(\"^[01][0-9]|2[0-3]|\\\\d\")],\n I: [\"hour\", new RegExp(\"^1[0-2]|0?[0-9]\")],\n m: [\"month\", new RegExp(\"^1[012]|0?[1-9]\")],\n M: [\"minute\", new RegExp(\"^[1-5][0-9]|0?[0-9]\")],\n S: [\"second\", new RegExp(\"^[1-5]\\\\d|0?\\\\d\")],\n y: [\"year\", new RegExp(\"^0{0,2}\\\\d{2}\")],\n Y: [\"year\", new RegExp(\"^\\\\d{4}\")],\n z: [\"tzinfo\", new RegExp(\"Z\")]\n }\n\n for(var key in regexps){\n var re = new RegExp('%' + key, \"g\"),\n mo = fmt.match(re)\n if(mo && mo.length > 1){\n throw _b_.ValueError.$factory('strptime directive %' +\n key + ' defined more than once')\n }\n }\n\n while(pos_fmt < fmt.length){\n var car = fmt.charAt(pos_fmt)\n if(car == \"%\"){\n var spec = fmt.charAt(pos_fmt + 1),\n regexp = regexps[spec]\n if(regexp !== undefined){\n var re = regexp[1],\n attr = regexp[0],\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error(s, fmt)\n }else{\n dt[attr] = parseInt(res[0])\n if(attr == \"microsecond\"){\n while(dt[attr] < 100000){\n dt[attr] *= 10\n }\n }else if(attr == \"tzinfo\"){\n // Only value supported for the moment : Z\n // (UTC)\n var dt_module = $B.imported[cls.__module__]\n dt.tzinfo = dt_module.timezone.utc\n }\n pos_fmt += 2\n pos_s += res[0].length\n }\n }else if(spec == \"a\" || spec == \"A\"){\n // Locale's abbreviated (a) or full (A) weekday name\n var attr = \"weekday\",\n re = spec == \"a\" ? shortdays_re : longdays_re,\n t = spec == \"a\" ? shortdays : longdays\n res = re.exec(s.substr(pos_s))\n if(res === null){\n console.log('error', re, 'string', s.substr(pos_s), 'fmt', fmt)\n error(s, fmt)\n }else{\n var match = res[0],\n ix = t.indexOf(match)\n }\n dt.weekday = ix\n pos_fmt += 2\n pos_s += match.length\n }else if(spec == \"b\" || spec == \"B\"){\n // Locales's abbreviated (b) or full (B) month\n var attr = \"month\",\n re = spec == \"b\" ? shortmonths_re : longmonths_re,\n t = spec == \"b\" ? shortmonths : longmonths,\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error(s, fmt)\n }else{\n var match = res[0],\n ix = t.indexOf(match)\n }\n dt.month = ix + 1\n pos_fmt += 2\n pos_s += match.length\n }else if(spec == \"c\"){\n // Locale's appropriate date and time representation\n var fmt1 = fmt.substr(0, pos_fmt - 1) + _locale_c_format() +\n fmt.substr(pos_fmt + 2)\n fmt = fmt1\n }else if(spec == \"%\"){\n if(s.charAt(pos_s) == \"%\"){\n pos_fmt++\n pos_s++\n }else{\n error(s, fmt)\n }\n }else{\n pos_fmt++\n }\n }else{\n if(car == s.charAt(pos_s)){\n pos_fmt++\n pos_s++\n }else{\n error(s, fmt)\n }\n }\n }\n\n if(pos_s < s.length){\n throw _b_.ValueError.$factory('unconverted data remains: ' +\n s.substr(pos_s))\n }\n\n return $B.$call(cls)(dt.year, dt.month, dt.day,\n dt.hour || 0, dt.minute || 0, dt.second || 0,\n dt.microsecond || 0, dt.tzinfo || _b_.None)\n }\n }\n})(__BRYTHON__)\n"], "dis": [".js", "(function($B){\n\nvar dict = $B.builtins.dict\nvar mod = {\n dis:function(src){\n $B.$py_module_path['__main__'] = $B.brython_path\n return __BRYTHON__.py2js(src,'__main__','__main__',\n $B.builtins_scope).to_js()\n },\n OPTIMIZED: 1,\n NEWLOCALS: 2,\n VARARGS: 4,\n VARKEYWORDS: 8,\n NESTED: 16,\n GENERATOR: 32,\n NOFREE: 64,\n COROUTINE: 128,\n ITERABLE_COROUTINE: 256,\n ASYNC_GENERATOR: 512,\n COMPILER_FLAG_NAMES: $B.builtins.dict.$factory()\n}\nmod.COMPILER_FLAG_NAMES = dict.$factory([\n [1, \"OPTIMIZED\"],\n [2, \"NEWLOCALS\"],\n [4, \"VARARGS\"],\n [8, \"VARKEYWORDS\"],\n [16, \"NESTED\"],\n [32, \"GENERATOR\"],\n [64, \"NOFREE\"],\n [128, \"COROUTINE\"],\n [256, \"ITERABLE_COROUTINE\"],\n [512, \"ASYNC_GENERATOR\"]\n])\n\n$B.addToImported('dis', mod)\n\n})(__BRYTHON__)"], "_zlib_utils": [".js", "\n\n(function($B){\n\n\nfunction rfind(buf, seq){\n var buflen = buf.length,\n len = seq.length\n for(var i = buflen - len; i >= 0; i--){\n var chunk = buf.slice(i, i + len),\n found = true\n for(var j = 0; j < len; j++){\n if(chunk[j] != seq[j]){\n found = false\n break\n }\n }\n if(found){return i}\n }\n return -1\n}\n\n\nvar c;\nvar crcTable = [];\nfor(var n =0; n < 256; n++){\n c = n;\n for(var k =0; k < 8; k++){\n c = ((c&1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));\n }\n crcTable[n] = c;\n}\n\nvar mod = {\n crc32: function(bytes, crc) {\n var crc = crc ^ (-1);\n\n for (var byte of bytes.source) {\n crc = (crc >>> 8) ^ crcTable[(crc ^ byte) & 0xFF];\n }\n\n return (crc ^ (-1)) >>> 0;\n },\n\n lz_generator: function(text, size, min_len){\n /*\n Returns a list of items based on the LZ algorithm, using the\n specified window size and a minimum match length.\n The items are a tuple (length, distance) if a match has been\n found, and a byte otherwise.\n */\n // 'text' is an instance of Python 'bytes' class, the actual\n // bytes are in text.source\n text = text.source\n if(min_len === undefined){\n min_len = 3\n }\n var pos = 0, // position in text\n items = [] // returned items\n while(pos < text.length){\n sequence = text.slice(pos, pos + min_len)\n if(sequence.length < 3){\n for(var i = pos; i < text.length; i++){\n items.push(text[i])\n }\n break\n }\n // Search the sequence in the 'size' previous bytes\n buf = text.slice(pos - size, pos)\n buf_pos = rfind(buf, sequence)\n if(buf_pos > -1){\n // Match of length 3 found; search a longer one\n var len = 1\n while(len < 259 &&\n buf_pos + len < buf.length &&\n pos + len < text.length &&\n text[pos + len] == buf[buf_pos + len]){\n len += 1\n }\n match = text.slice(pos, pos + len)\n // \"Lazy matching\": search longer match starting at next\n // position\n longer_match = false\n if(pos + len < text.length - 2){\n match2 = text.slice(pos + 1, pos + len + 2)\n longer_buf_pos = rfind(buf, match2)\n if(longer_buf_pos > -1){\n // found longer match : emit current byte as\n // literal and move 1 byte forward\n longer_match = true\n char = text[pos]\n items.push(char)\n pos += 1\n }\n }\n if(! longer_match){\n distance = buf.length - buf_pos\n items.push($B.fast_tuple([len, distance]))\n if(pos + len == text.length){\n break\n }else{\n pos += len\n items.push(text[pos])\n pos += 1\n }\n }\n }else{\n char = text[pos]\n items.push(char)\n pos += 1\n }\n }\n return items\n }\n}\n\n$B.addToImported('_zlib_utils', mod)\n\n})(__BRYTHON__)"], "marshal": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nvar module = {\n loads: function(){\n var $ = $B.args('loads', 1, {obj:null}, ['obj'], arguments, {},\n null, null)\n return $B.structuredclone2pyobj(JSON.parse($.obj))\n },\n load: function(){\n var $ = $B.args('load', 1, {file:null}, ['file'], arguments, {},\n null, null)\n var content = $B.$call($B.$getattr($.file, \"read\"))()\n return $module.loads(_b_.bytes.decode(content, \"latin-1\"));\n },\n dump: function(){\n var $ = $B.args('dump', 2, {value:null, file: null},\n ['value', 'file'], arguments, {}, null, null)\n var s = JSON.stringify($B.pyobj2structuredclone($.value))\n $B.$getattr($.file, \"write\")(_b_.str.encode(s, 'latin-1'))\n var flush = $B.$getattr($.file, \"flush\", null)\n if(flush !== null){\n $B.$call(flush)()\n }\n return _b_.None\n },\n dumps: function(){\n var $ = $B.args('dumps', 1, {obj:null}, ['obj'], arguments, {},\n null, null)\n return JSON.stringify($B.pyobj2structuredclone($.obj))\n }\n}\n\n$B.addToImported('marshal', module)\n\n})(__BRYTHON__)\n"], "_json": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nfunction simple(obj){\n switch(typeof obj){\n case 'string':\n case 'number':\n case 'boolean':\n return true\n }\n if(obj instanceof Number ||\n Array.isArray(obj) ||\n $B.$isinstance(obj, [_b_.list, _b_.tuple, _b_.dict])){\n return true\n }\n return false\n}\n\nfunction to_json(obj, level){\n var $defaults = {skipkeys:_b_.False, ensure_ascii:_b_.True,\n check_circular:_b_.True, allow_nan:_b_.True, cls:_b_.None,\n indent:_b_.None, separators:_b_.None, \"default\":_b_.None,\n sort_keys:_b_.False},\n $ = $B.args(\"to_json\", 2, {obj: null, level: null}, ['obj', 'level'],\n arguments, {level: 1}, null, \"kw\")\n\n var kw = _b_.dict.$to_obj($.kw)\n for(var key in $defaults){\n if(! kw.hasOwnProperty(key)){\n kw[key] = $defaults[key]\n }\n }\n\n var indent = kw.indent,\n ensure_ascii = kw.ensure_ascii,\n separators = kw.separators === _b_.None ?\n kw.indent === _b_.None ? [', ', ': '] : [',', ': '] :\n kw.separators,\n skipkeys = kw.skipkeys,\n _default = kw.default,\n sort_keys = kw.sort_keys,\n allow_nan = kw.allow_nan,\n check_circular = kw.check_circular\n\n var item_separator = separators[0],\n key_separator = separators[1]\n if(indent !== _b_.None){\n var indent_str\n if(typeof indent == \"string\"){\n indent_str = indent\n }else if(typeof indent == \"number\" && indent >= 1){\n indent_str = \" \".repeat(indent)\n }else{\n throw _b_.ValueError.$factory(\"invalid indent: \" +\n _b_.str.$factory(indent))\n }\n }\n var kwarg = {$kw: [{}]}\n for(var key in kw){\n kwarg.$kw[0][key] = kw[key]\n }\n\n switch(typeof obj){\n case 'string':\n var res = JSON.stringify(obj)\n if(ensure_ascii){\n var escaped = ''\n for(var i = 0, len = res.length; i < len; i++){\n var u = res.codePointAt(i)\n if(u > 127){\n u = u.toString(16)\n while(u.length < 4){\n u = \"0\" + u\n }\n escaped += '\\\\u' + u\n }else{\n escaped += res.charAt(i)\n }\n }\n return escaped\n }\n return res\n case 'boolean':\n return obj.toString()\n case 'number':\n if([Infinity, -Infinity].indexOf(obj) > -1 ||\n isNaN(obj)){\n if(! allow_nan){\n throw _b_.ValueError.$factory(\n 'Out of range float values are not JSON compliant')\n }\n }\n return obj.toString()\n }\n if(obj instanceof String){\n if(! ensure_ascii){\n return $B.String(obj)\n }\n // string with surrogate pairs. cf. issue #1903.\n var res = ''\n if(obj.surrogates){\n var s_ix = 0,\n s_pos = obj.surrogates[s_ix]\n for(var i = 0, len = obj.length; i < len; i++){\n if(i == s_pos){\n var code = obj.codePointAt(i) - 0x10000\n res += '\\\\u' + (0xD800 | (code >> 10)).toString(16) +\n '\\\\u' + (0xDC00 | (code & 0x3FF)).toString(16)\n i++\n s_ix++\n s_pos = obj.surrogates[s_ix]\n }else{\n var code = obj.charCodeAt(i)\n if(code < 127){\n var x = _b_.repr(obj[i])\n res += x.substr(1, x.length - 2)\n }else{\n var x = code.toString(16)\n while(x.length < 4){\n x = '0' + x\n }\n res += '\\\\u' + x\n }\n }\n }\n }\n return '\"' + res.replace(new RegExp('\"', \"g\"), '\\\\\"') + '\"'\n }\n\n if($B.$isinstance(obj, _b_.list)){\n var res = []\n var sep = item_separator,\n first = '[',\n last = ']'\n if(indent !== _b_.None){\n sep += \"\\n\" + indent_str.repeat(level)\n first = '[' + '\\n' + indent_str.repeat(level)\n last = '\\n' + indent_str.repeat(level - 1) + ']'\n level++\n }\n for(var i = 0, len = obj.length; i < len; i++){\n res.push(to_json(obj[i], level, kwarg))\n }\n return first + res.join(sep) + last\n }else if($B.$isinstance(obj, _b_.float)){\n return obj.value\n }else if(obj.__class__ === $B.long_int){\n return obj.value.toString()\n }else if(obj === _b_.None){\n return \"null\"\n }else if($B.$isinstance(obj, _b_.dict)){\n var res = [],\n items = Array.from($B.make_js_iterator(_b_.dict.items(obj)))\n if(sort_keys){\n // Sort keys by alphabetical order\n items.sort()\n }\n var sep = item_separator,\n first = '{',\n last = '}'\n if(indent !== _b_.None){\n sep += \"\\n\" + indent_str.repeat(level)\n first = '{' + '\\n' + indent_str.repeat(level)\n last = '\\n' + indent_str.repeat(level - 1) + '}'\n level++\n }\n for(var i = 0, len = items.length; i < len; i++){\n var item = items[i]\n if(! simple(item[0])){\n if(! skipkeys){\n throw _b_.TypeError.$factory(\"keys must be str, int, \" +\n \"float, bool or None, not \" + $B.class_name(obj))\n }\n }else{\n // In the result, key must be a string\n var key = _b_.str.$factory(item[0])\n // Check circular reference\n if(check_circular && $B.repr.enter(item[1])){\n throw _b_.ValueError.$factory(\"Circular reference detected\")\n }\n res.push(\n [to_json(key, level, kwarg), to_json(item[1], level, kwarg)].\n join(key_separator))\n if(check_circular){\n $B.repr.leave(item[1])\n }\n }\n }\n return first + res.join(sep) + last\n }\n // For other types, use function default if provided\n if(_default == _b_.None){\n throw _b_.TypeError.$factory(\"Object of type \" + $B.class_name(obj) +\n \" is not JSON serializable\")\n }else{\n return to_json($B.$call(_default)(obj), level, kwarg)\n }\n}\n\nfunction loads(s){\n var args = []\n for(var i = 1, len = arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n var decoder = JSONDecoder.$factory.apply(null, args)\n return JSONDecoder.decode(decoder, s)\n}\n\nfunction to_py(obj, kw){\n // Conversion to Python objects\n // kw are the keyword arguments to loads()\n var res\n if(obj instanceof List){\n return obj.items.map(x => to_py(x, kw))\n }else if(obj instanceof Dict){\n if(kw.object_pairs_hook !== _b_.None){\n var pairs = []\n for(var i = 0, len = obj.keys.length; i < len; i++){\n pairs.push($B.fast_tuple([obj.keys[i],\n to_py(obj.values[i], kw)]))\n }\n return $B.$call(kw.object_pairs_hook)(pairs)\n }else{\n var dict = $B.empty_dict()\n for(var i = 0, len = obj.keys.length; i < len; i++){\n _b_.dict.$setitem(dict, obj.keys[i], to_py(obj.values[i], kw))\n }\n return kw.object_hook === _b_.None ? dict :\n $B.$call(kw.object_hook)(dict)\n }\n }else if(obj.type == 'str'){\n return obj.value\n }else if(obj.type == 'num'){\n if(obj.value.search(/[.eE]/) > -1){\n // float\n if(kw.parse_float !== _b_.None){\n return $B.$call(kw.parse_float)(obj.value)\n }\n return $B.fast_float(parseFloat(obj.value))\n }else{\n // integer\n if(kw.parse_int !== _b_.None){\n return $B.$call(kw.parse_int)(obj.value)\n }\n var int = parseInt(obj.value)\n if(Math.abs(int) < $B.max_int){\n return int\n }else{\n return $B.fast_long_int(BigInt(obj.value))\n }\n }\n }else{\n if(obj instanceof Number && kw.parse_float !== _b_.None){\n return $B.$call(kw.parse_float)(obj)\n }else if(kw.parse_int !== _b_.None &&\n (typeof obj == 'number' || obj.__class__ === $B.long_int)){\n return $B.$call(kw.parse_int)(obj)\n }else if(kw.parse_constant !== _b_.None && ! isFinite(obj)){\n return kw.parse_constant(obj)\n }\n return obj\n }\n}\n\nvar escapes = {'n': '\\n',\n 't': '\\t',\n 'b': '\\b',\n 'r': '\\r',\n 'f': '\\f',\n '\\\\': '\\\\',\n '\"': '\\\"',\n \"'\": \"\\\\'\",\n '/': '/'\n }\n\nfunction string_at(s, i){\n var error = $B.$call($B.imported[\"json\"].JSONDecodeError)\n\n var j = i + 1,\n escaped = false,\n len = s.length,\n value = ''\n while(j < len){\n if(s[j] == '\"' && ! escaped){\n return [{type: 'str', value}, j + 1]\n }else if(! escaped && s[j] == '\\\\'){\n escaped = ! escaped\n j++\n }else if(escaped){\n var esc = escapes[s[j]]\n if(esc){\n value += esc\n j++\n escaped = false\n }else if(s[j] == 'u' &&\n s.substr(j + 1, 4).match(/[0-9a-fA-f]{4}/)){\n // unicode escape\n value += String.fromCharCode(parseInt(s.substr(j + 1, 4), 16))\n j += 5\n escaped = ! escaped\n }else{\n throw error('invalid escape \"' + s[j] + '\"', s, j)\n }\n }else{\n value += s[j]\n j++\n }\n }\n}\n\nfunction to_num(num_string, nb_dots, exp){\n // convert to correct Brython type\n if(exp || nb_dots){\n return new Number(num_string)\n }else{\n var int = parseInt(num_string)\n if(Math.abs(int) < $B.max_int){\n return int\n }else{\n if(num_string.startsWith('-')){\n return $B.fast_long_int(num_string.substr(1), false)\n }else{\n return $B.fast_long_int(num_string, true)\n }\n }\n }\n}\n\nfunction num_at(s, i){\n var res = s[i],\n j = i + 1,\n nb_dots = 0,\n exp = false,\n len = s.length\n while(j < len){\n if(s[j].match(/\\d/)){\n j++\n }else if(s[j] == '.' && nb_dots == 0){\n nb_dots++\n j++\n }else if('eE'.indexOf(s[j]) > -1 && ! exp){\n exp = ! exp\n j++\n }else if(s[j] == '-' && 'eE'.includes(s[j-1])){\n j++\n }else{\n return [{type: 'num', value: s.substring(i, j)}, j]\n }\n }\n return [{type: 'num', value: s.substring(i, j)}, j]\n}\n\nvar JSONError = $B.make_class('json.decoder.JSONError')\nJSONError.__bases__ = [_b_.Exception]\nJSONError.__mro__ = _b_.type.mro(JSONError)\n\n\nfunction* tokenize(s){\n var i = 0,\n len = s.length,\n line_num = 1,\n column_start = 0,\n value,\n end\n while(i < len){\n if(s[i] == \" \" || s[i] == '\\r' || s[i] == '\\n' || s[i] == '\\t'){\n i++\n line_num++\n column_start = i\n }else if('[]{}:,'.indexOf(s[i]) > -1){\n yield [s[i], i]\n i++\n }else if(s.substr(i, 4) == 'null'){\n yield [_b_.None , i]\n i += 4\n }else if(s.substr(i, 4) == 'true'){\n yield [true, i]\n i += 4\n }else if(s.substr(i, 5) == 'false'){\n yield [false, i]\n i += 5\n }else if(s.substr(i, 8) == 'Infinity'){\n yield [{type: 'num', value: 'Infinity'}, i]\n i += 8\n }else if(s.substr(i, 9) == '-Infinity'){\n yield [{type: 'num', value: '-Infinity'}, i]\n i += 9\n }else if(s.substr(i, 3) == 'NaN'){\n yield [{type: 'num', value: 'NaN'}, i]\n i += 3\n }else if(s[i] == '\"'){\n value = string_at(s, i)\n yield value\n i = value[1]\n }else if(s[i].match(/\\d/) || s[i] == '-'){\n value = num_at(s, i)\n yield value\n i = value[1]\n }else{\n throw $B.$call(JSONError)('Extra data: ' +\n `line ${line_num} column ${1 + i - column_start}`)\n }\n }\n}\n\nfunction Node(parent){\n this.parent = parent\n if(parent instanceof List){\n this.list = parent.items\n }else if(parent instanceof Dict){\n this.list = parent.values\n }else if(parent === undefined){\n this.list = []\n }\n}\n\nNode.prototype.transition = function(token){\n if([true, false, _b_.None].includes(token) ||\n ['str', 'num'].includes(token.type)){\n if(this.parent === undefined &&\n (this.list.length > 0 || this.content)){\n throw Error('Extra data')\n }\n this.list.push(token)\n return this.parent ? this.parent : this\n }else if(token == '{'){\n if(this.parent === undefined){\n this.content = new Dict(this)\n return this.content\n }\n return new Dict(this.parent)\n }else if(token == '['){\n if(this.parent === undefined){\n this.content = new List(this)\n return this.content\n }\n return new List(this.parent)\n }else{\n throw Error('unexpected item:' + token)\n }\n}\n\nfunction Dict(parent){\n this.parent = parent\n this.keys = []\n this.values = []\n this.expect = 'key'\n if(parent instanceof List){\n parent.items.push(this)\n }else if(parent instanceof Dict){\n parent.values.push(this)\n }\n}\n\nDict.prototype.transition = function(token){\n if(this.expect == 'key'){\n if(token.type == 'str'){\n this.keys.push(token.value)\n this.expect = ':'\n return this\n }else if(token == '}' && this.keys.length == 0){\n return this.parent\n }else{\n throw Error('expected str')\n }\n }else if(this.expect == ':'){\n if(token == ':'){\n this.expect = '}'\n return new Node(this)\n }else{\n throw Error('expected :')\n }\n }else if(this.expect == '}'){\n if(token == '}'){\n return this.parent\n }else if(token == ','){\n this.expect = 'key'\n return this\n }\n throw Error('expected }')\n }\n}\n\nfunction List(parent){\n if(parent instanceof List){\n parent.items.push(this)\n }\n this.parent = parent\n this.items = []\n this.expect = 'item'\n}\n\nList.prototype.transition = function(token){\n if(this.expect == 'item'){\n this.expect = ','\n if([true, false, _b_.None].indexOf(token) > -1){\n this.items.push(token)\n return this\n }else if(token.type == 'num' || token.type == 'str'){\n this.items.push(token)\n return this\n }else if(token == '{'){\n return new Dict(this)\n }else if(token == '['){\n return new List(this)\n }else if(token == ']'){\n if(this.items.length == 0){\n if(this.parent instanceof Dict){\n this.parent.values.push(this)\n }\n return this.parent\n }\n throw Error('unexpected ]')\n }else{\n console.log('token', token)\n throw Error('unexpected item:' + token)\n }\n\n }else if(this.expect == ','){\n this.expect = 'item'\n if(token == ','){\n return this\n }else if(token == ']'){\n if(this.parent instanceof Dict){\n this.parent.values.push(this)\n }\n return this.parent\n }else{\n throw Error('expected :')\n }\n }\n}\n\nfunction parse(s){\n var res,\n state,\n node = new Node(),\n root = node,\n token\n for(var item of tokenize(s)){\n token = item[0]\n try{\n node = node.transition(token)\n }catch(err){\n console.log('error, item', item)\n console.log(err, err.message)\n console.log('node', node)\n if(err.__class__){\n throw err\n }else{\n var error = $B.$call($B.imported[\"json\"].JSONDecodeError)\n throw error(err.message, s, item[1])\n }\n }\n }\n return root.content ? root.content : root.list[0]\n}\n\nvar JSONDecoder = $B.make_class(\"JSONDecoder\",\n function(){\n var $defaults = {cls: _b_.None, object_hook: _b_.None,\n parse_float: _b_.None, parse_int: _b_.None,\n parse_constant: _b_.None, object_pairs_hook: _b_.None},\n $ = $B.args(\"decode\", 0, {}, [], arguments, {}, null, \"kw\")\n var kw = _b_.dict.$to_obj($.kw)\n for(var key in $defaults){\n if(kw[key] === undefined){\n kw[key] = $defaults[key]\n }\n }\n return {\n __class__: JSONDecoder,\n object_hook: kw.object_hook,\n parse_float: kw.parse_float,\n parse_int: kw.parse_int,\n parse_constant: kw.parse_constant,\n object_pairs_hook: kw.object_pairs_hook,\n memo: $B.empty_dict()\n }\n }\n)\n\nJSONDecoder.decode = function(self, s){\n return to_py(parse(s), self)\n}\n\n$B.imported._json = {\n dumps: function(){\n return _b_.str.$factory(to_json.apply(null, arguments))\n },\n loads,\n JSONDecoder\n}\n\n})(__BRYTHON__)"], "_binascii": [".js", "(function($B){\n\nvar _b_ = $B.builtins,\n _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\"\n\nvar error = $B.make_class(\"error\", _b_.Exception.$factory)\nerror.__bases__ = [_b_.Exception]\n$B.set_func_names(error, \"binascii\")\n\nfunction decode(bytes, altchars, validate){\n var output = [],\n chr1, chr2, chr3,\n enc1, enc2, enc3, enc4\n\n var alphabet = make_alphabet(altchars)\n\n var input = bytes.source\n\n // If validate is set, check that all characters in input\n // are in the alphabet\n var _input = ''\n var padding = 0\n for(var i = 0, len = input.length; i < len; i++){\n var car = String.fromCharCode(input[i])\n var char_num = alphabet.indexOf(car)\n if(char_num == -1){\n if(validate){throw error.$factory(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i < input.length - 2){\n if(validate){throw error.$factory(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i >= input.length - 2){\n padding++\n _input += car\n }else{\n _input += car\n }\n }\n input = _input\n if(_input.length == padding){return _b_.bytes.$factory([])}\n if( _input.length % 4 > 0){throw error.$factory(\"Incorrect padding\")}\n\n var i = 0\n while(i < input.length){\n\n enc1 = alphabet.indexOf(input.charAt(i++))\n enc2 = alphabet.indexOf(input.charAt(i++))\n enc3 = alphabet.indexOf(input.charAt(i++))\n enc4 = alphabet.indexOf(input.charAt(i++))\n\n chr1 = (enc1 << 2) | (enc2 >> 4)\n chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)\n chr3 = ((enc3 & 3) << 6) | enc4\n\n output.push(chr1)\n\n if(enc3 != 64){output.push(chr2)}\n if(enc4 != 64){output.push(chr3)}\n\n }\n // return Python bytes\n return _b_.bytes.$factory(output)\n}\n\n\nvar hex2int = {},\n hex = '0123456789abcdef'\nfor(var i = 0; i < hex.length; i++){\n hex2int[hex[i]] = i\n hex2int[hex[i].toUpperCase()] = i\n}\n\nfunction make_alphabet(altchars){\n var alphabet = _keyStr\n if(altchars !== undefined && altchars !== _b_.None){\n // altchars is an instance of Python bytes\n var source = altchars.source\n alphabet = alphabet.substr(0,alphabet.length-3) +\n _b_.chr(source[0]) + _b_.chr(source[1]) + '='\n }\n return alphabet\n}\n\nvar module = {\n a2b_base64: function(){\n var $ = $B.args(\"a2b_base64\", 2, {s: null, strict_mode: null}, \n ['s', 'strict_mode'],\n arguments, {strict_mode: false}, null, null)\n var bytes\n if($B.$isinstance($.s, _b_.str)){\n bytes = _b_.str.encode($.s, 'ascii')\n }else if($B.$isinstance($.s, [_b_.bytes, _b_.bytearray])){\n bytes = $.s\n }else{\n throw _b_.TypeError.$factory('wrong type: ' + $B.class_name($.s))\n }\n return decode(bytes)\n },\n a2b_hex: function(){\n var $ = $B.args(\"a2b_hex\", 1, {s: null}, ['s'],\n arguments, {}, null, null),\n s = $.s\n if($B.$isinstance(s, _b_.bytes)){\n s = _b_.bytes.decode(s, 'ascii')\n }\n if(typeof s !== \"string\"){\n throw _b_.TypeError.$factory(\"argument should be bytes, \" +\n \"buffer or ASCII string, not '\" + $B.class_name(s) + \"'\")\n }\n\n var len = s.length\n if(len % 2 == 1){\n throw _b_.TypeError.$factory('Odd-length string')\n }\n\n var res = []\n for(var i = 0; i < len; i += 2){\n res.push((hex2int[s.charAt(i)] << 4) + hex2int[s.charAt(i + 1)])\n }\n return _b_.bytes.$factory(res)\n },\n b2a_base64: function(){\n var $ = $B.args(\"b2a_base64\", 1, {data: null}, ['data'],\n arguments, {}, null, \"kw\")\n var newline = _b_.dict.$get_string($.kw, 'newline', false)\n\n var string = $B.to_bytes($.data),\n res = btoa(String.fromCharCode.apply(null, string))\n if(newline){res += \"\\n\"}\n return _b_.bytes.$factory(res, \"ascii\")\n },\n b2a_hex: function(obj){\n var string = $B.to_bytes(obj),\n res = []\n function conv(c){\n if(c > 9){\n c = c + 'a'.charCodeAt(0) - 10\n }else{\n c = c + '0'.charCodeAt(0)\n }\n return c\n }\n string.forEach(function(char){\n res.push(conv((char >> 4) & 0xf))\n res.push(conv(char & 0xf))\n })\n return _b_.bytes.$factory(res)\n },\n b2a_uu: function(obj){\n var string = _b_.bytes.decode(obj, 'ascii')\n var len = string.length,\n res = String.fromCharCode((0x20 + len) & 0x3F)\n while(string.length > 0){\n var s = string.slice(0, 3)\n while(s.length < 3){s.push(String.fromCharCode(0))}\n var A = s[0],\n B = s[1],\n C = s[2]\n var a = (A >> 2) & 0x3F,\n b = ((A << 4) | ((B >> 4) & 0xF)) & 0x3F,\n c = (((B << 2) | ((C >> 6) & 0x3)) & 0x3F),\n d = C & 0x3F\n res += String.fromCharCode(0x20 + a, 0x20 + b, 0x20 + c, 0x20 + d)\n string = string.slice(3)\n }\n return _b_.bytes.$factory(res + \"\\n\", \"ascii\")\n },\n error: error\n}\n\nmodule.hexlify = module.b2a_hex\nmodule.unhexlify = module.a2b_hex\n\n$B.imported._binascii = module\n}\n)(__BRYTHON__)"], "_string": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nfunction parts(format_string){\n var result = [],\n _parts = $B.split_format(format_string) // defined in py_string.js\n for(var i = 0; i < _parts.length; i+= 2){\n result.push({pre: _parts[i], fmt: _parts[i + 1]})\n }\n return result\n}\n\nfunction Tuple(){\n var args = []\n for(var i=0, len=arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n return _b_.tuple.$factory(args)\n}\n\n$B.imported._string = {\n\n formatter_field_name_split: function(fieldname){\n // Split the argument as a field name\n var parsed = $B.parse_format(fieldname),\n first = parsed.name,\n rest = []\n if(first.match(/\\d+/)){first = parseInt(first)}\n parsed.name_ext.forEach(function(ext){\n if(ext.startsWith(\"[\")){\n var item = ext.substr(1, ext.length - 2)\n if(item.match(/\\d+/)){\n rest.push(Tuple(false, parseInt(item)))\n }else{\n rest.push(Tuple(false, item))\n }\n }else{\n rest.push(Tuple(true, ext.substr(1)))\n }\n })\n return Tuple(first, _b_.iter(rest))\n },\n formatter_parser: function(format_string){\n // Parse the argument as a format string\n\n if(! _b_.isinstance(format_string, _b_.str)){\n throw _b_.ValueError.$factory(\"Invalid format string type: \" +\n $B.class_name(format_string))\n }\n\n var result = []\n parts(format_string).forEach(function(item){\n var pre = item.pre === undefined ? \"\" : item.pre,\n fmt = item.fmt\n if(fmt === undefined){\n result.push(Tuple(pre, _b_.None, _b_.None, _b_.None))\n }else if(fmt.string == ''){\n result.push(Tuple(pre, '', '', _b_.None))\n }else{\n result.push(Tuple(pre,\n fmt.raw_name + fmt.name_ext.join(\"\"),\n fmt.raw_spec,\n fmt.conv || _b_.None))\n }\n })\n return result\n }\n}\n})(__BRYTHON__)"], "_webcomponent": [".js", "// module for Web Components\n(function($B){\n\nvar _b_ = $B.builtins\n\nfunction define(tag_name, cls, options){\n var $ = $B.args(\"define\", 3, {tag_name: null, cls: null, options: null},\n [\"tag_name\", \"cls\", \"options\"], arguments, {options: _b_.None},\n null, null),\n tag_name = $.tag_name,\n cls = $.cls,\n options = $.options,\n _extends,\n extend_dom_name = 'HTMLElement'\n if(options !== _b_.None){\n if(! $B.$isinstance(options, _b_.dict)){\n throw _b_.TypeError.$factory('options can only be None or a ' +\n `dict, not '${$B.class_name(options)}'`)\n }\n try{\n _extends = _b_.dict.$getitem(options, 'extends')\n }catch(err){\n // ignore\n }\n }else{\n let stack = [...cls.__bases__];\n while(stack.length) {\n base = stack.pop();\n if(base.__module__ === 'browser.html'){\n _extends = base.__name__.toLowerCase()\n break\n }\n\n stack.push(...base.__bases__);\n }\n }\n\n if(_extends){\n if(typeof _extends != 'string'){\n throw _b_.TypeError.$factory('value for extends must be a ' +\n `string, not '${$B.class_name(_extends)}'`)\n }\n var elt = document.createElement(_extends)\n if(elt instanceof HTMLUnknownElement){\n throw _b_.ValueError.$factory(`'${_extends}' is not a valid ` +\n 'tag name')\n }\n var extend_tag = _extends.toLowerCase()\n extend_dom_name = Object.getPrototypeOf(elt).constructor.name\n }\n if(typeof tag_name != \"string\"){\n throw _b_.TypeError.$factory(\"first argument of define() \" +\n \"must be a string, not '\" + $B.class_name(tag_name) + \"'\")\n }else if(tag_name.indexOf(\"-\") == -1){\n throw _b_.ValueError.$factory(\"custom tag name must \" +\n \"contain a hyphen (-)\")\n }\n if(!$B.$isinstance(cls, _b_.type)){\n throw _b_.TypeError.$factory(\"second argument of define() \" +\n \"must be a class, not '\" + $B.class_name(tag_name) + \"'\")\n }\n cls.$webcomponent = true\n\n // Create the Javascript class used for the component. It must have\n // the same name as the Python class\n var src = String.raw`var WebComponent = class extends HTMLElement {\n constructor(){\n // Always call super first in constructor\n super()\n var html = $B.imported['browser.html']\n // Create tag in module html\n if(html['tag_name'] === undefined){\n html.maketag('tag_name', WebComponent)\n }\n var init = $B.$getattr(cls, \"__init__\", _b_.None)\n if(init !== _b_.None){\n try{\n var _self = $B.DOMNode.$factory(this),\n attrs_before_init = []\n for(var i = 0, len = _self.attributes.length; i < len; i++){\n attrs_before_init.push(_self.attributes.item(i))\n }\n _self.__class__ = cls\n $B.$call(init)(_self)\n if(WebComponent.initialized){\n // Check that init() did not introduce new attributes,\n // which is illegal\n // cf. https://html.spec.whatwg.org/multipage/custom-elements.html#custom-element-conformance\n for(var i = 0, len = _self.attributes.length; i < len; i++){\n var item = _self.attributes.item(i)\n if(attrs_before_init.indexOf(item) == -1){\n throw _b_.TypeError.$factory(\"Custom element \" +\n \"must not create attributes, found: \" +\n item.name + '=\"' + item.value + '\"')\n }\n }\n }\n }catch(err){\n $B.handle_error(err)\n }\n }\n }\n static get observedAttributes(){\n var obs_attr = $B.$getattr(cls, \"observedAttributes\", null)\n if(obs_attr === null){\n return []\n }else if(typeof obs_attr == \"function\"){\n var warning = _b_.DeprecationWarning.$factory(\n \"Setting observedAttributes as a method \" +\n \"is deprecated. Set it as a class attribute.\")\n // module _warning is in builtin_modules.js\n $B.imported._warnings.warn(warning)\n return $B.$call(obs_attr)(this)\n }else if(Array.isArray(obs_attr)){\n return obs_attr\n }else{\n throw _b_.TypeError.$factory(\n \"wrong type for observedAttributes: \" +\n $B.class_name(obs_attr))\n }\n }\n }\n `\n var name = cls.__name__,\n code = src.replace(/WebComponent/g, name).\n replace(/tag_name/g, tag_name).\n replace(/HTMLElement/, extend_dom_name)\n var src = eval(code)\n var webcomp = eval(name) // JS class for component\n webcomp.$cls = cls\n\n // Override __getattribute__ to handle DOMNode attributes such as\n // attachShadow\n cls.__getattribute__ = function(self, attr){\n try{\n return $B.DOMNode.__getattribute__(self, attr)\n }catch(err){\n if($B.DOMNode[attr]){\n if(typeof $B.DOMNode[attr] == 'function'){\n return function(){\n var args = [self]\n for(var i = 0, len = arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n return $B.DOMNode[attr].apply(null, args)\n }\n }else{\n return $B.DOMNode[attr]\n }\n }\n throw err\n }\n }\n\n var mro = [cls].concat(cls.__mro__).reverse()\n for(var i = 0, len = mro.length; i < len; i++){\n var pcls = mro[i]\n for(var key in pcls){\n if((! webcomp.hasOwnProperty(key)) &&\n typeof pcls[key] == \"function\" &&\n // don't set $factory (would make it a class)\n key !== '$factory'\n ){\n webcomp.prototype[key] = (function(attr, klass){\n return function(){\n try{\n return $B.$call(klass[attr])($B.DOMNode.$factory(this), ...arguments)\n }catch(err){\n $B.show_error(err)\n }\n }\n })(key, pcls)\n }\n }\n }\n\n // define WebComp as the class to use for the specified tag name\n if(_extends){\n customElements.define(tag_name, webcomp, {extends: extend_tag})\n }else{\n customElements.define(tag_name, webcomp)\n }\n webcomp.initialized = true\n}\n\nfunction get(name){\n var ce = customElements.get(name)\n if(ce && ce.$cls){return ce.$cls}\n return _b_.None\n}\n\nvar module = {\n define: define,\n get: get\n}\n\n$B.addToImported('_webcomponent', module)\n\n})(__BRYTHON__)\n"], "html_parser": [".js", "(function($B){\n\n_b_ = $B.builtins\n\nvar ELEMENT_NODE = 1,\n TEXT_NODE = 3,\n COMMENT_NODE = 8,\n DOCUMENT_TYPE_NODE = 10\n\nvar HTMLNode = $B.make_class(\"HTMLNode\",\n function(){\n return {\n __class__: HTMLNode,\n nodeType: TEXT_NODE,\n text: \"\"\n }\n }\n)\n\nHTMLNode.__str__ = function(self){\n return self.text\n}\n\n$B.set_func_names(HTMLNode, \"_html_parser\")\n\nfunction* tokenize(src){\n var node = HTMLNode.$factory(),\n pos = 0,\n tag = \"\",\n type = \"text\"\n while(pos < src.length){\n var char = src[pos]\n switch(type){\n case \"text\":\n if(char == \"<\"){\n // starts a tag if immediately followed by a letter or by /\n var tag_mo = /^(\\/?)[a-zA-Z]+/.exec(src.substr(pos + 1))\n if(tag_mo){\n yield node\n node = HTMLNode.$factory()\n type = \"tag\"\n node.tagName = \"\"\n node.nodeType = ELEMENT_NODE\n node.closing = tag_mo[1] != \"\"\n node.attrs = []\n }else{\n // doctype declaration\n var decl_mo = /^/i.exec(src.substr(pos))\n if(decl_mo){\n yield node\n node = HTMLNode.$factory()\n node.text = decl_mo[0]\n node.doctype = decl_mo[1]\n node.nodeType = DOCUMENT_TYPE_NODE\n yield node\n node = HTMLNode.$factory()\n type = \"text\"\n pos += decl_mo[0].length\n break\n }else{\n // comment\n var comment_mo = /^\\/.exec(src.substr(pos))\n if(comment_mo){\n yield node\n node = HTMLNode.$factory()\n node.text = comment_mo[0]\n node.comment = comment_mo[1]\n node.nodeType = COMMENT_NODE\n yield node\n node = HTMLNode.$factory()\n type = \"text\"\n pos += comment_mo[0].length\n break\n }\n }\n }\n }\n pos++\n node.text += char\n break\n case \"tag\":\n if(char.search(/[_a-zA-Z]/) > -1){\n var mo = /\\w+/.exec(src.substr(pos))\n if(mo !== null){\n pos += mo[0].length\n if(node.tagName == \"\"){\n node.tagName = mo[0].toUpperCase()\n }\n node.text += mo[0]\n }else{\n pos++\n }\n }else if(char == \">\"){\n node.text += char\n yield node\n node = HTMLNode.$factory()\n type = \"text\"\n pos++\n }else if(char == \"=\"){\n node.text += char\n pos++\n }else if(char == \"'\" || char == '\"'){\n var i = pos + 1,\n found_string_end = false\n while(i < src.length){\n if(src[i] == char){\n var nb_escape = 0\n while(src[i - 1 - nb_escape] == '/'){\n nb_escape++\n }\n if(nb_escape % 2 == 0){\n node.text += src.substr(pos, i + 1 - pos)\n pos = i + 1\n found_string_end = true\n break\n }else{\n i++\n }\n }else if(src[i] == '>'){\n break\n }else{\n i++\n }\n }\n if(! found_string_end){\n // unterminated string: ignore\n pos++\n }\n }else{\n node.text += char\n pos++\n }\n break\n default:\n pos++\n }\n }\n yield node\n}\nvar module = {\n ELEMENT_NODE: 1,\n TEXT_NODE: 3,\n COMMENT_NODE: 8,\n DOCUMENT_TYPE_NODE: 10,\n tokenize: tokenize\n}\n\n$B.addToImported('html_parser', module)\n\n})(__BRYTHON__)\n"], "_ajax": [".js", "// ajax\n__BRYTHON__.imported._ajax = (function($B){\n\n\nvar $N = $B.builtins.None,\n _b_ = $B.builtins\n\nvar add_to_res = function(res, key, val) {\n if($B.$isinstance(val, _b_.list)){\n for (j = 0; j < val.length; j++) {\n add_to_res(res, key, val[j])\n }\n }else if (val instanceof File || val instanceof Blob){\n res.append(key, val)\n }else{res.append(key, _b_.str.$factory(val))}\n}\n\nfunction set_timeout(self, timeout){\n if(timeout.seconds !== undefined){\n self.js.$requestTimer = setTimeout(\n function() {\n self.js.abort()\n if(timeout.func){\n timeout.func()\n }\n },\n timeout.seconds * 1000)\n }\n}\n\nfunction _read(req){\n var xhr = req.js\n if(xhr.responseType == \"json\"){\n return $B.structuredclone2pyobj(xhr.response)\n }\n if(req.charset_user_defined){\n // on blocking mode, xhr.response is a string\n var bytes = []\n for(var i = 0, len = xhr.response.length; i < len; i++){\n var cp = xhr.response.codePointAt(i)\n if(cp > 0xf700){\n bytes.push(cp - 0xf700)\n }else{\n bytes.push(cp)\n }\n }\n }else if(typeof xhr.response == \"string\"){\n if(req.mode == 'binary'){\n return _b_.str.encode(xhr.response, req.encoding || 'utf-8')\n }\n return xhr.response\n }else{\n // else it's an ArrayBuffer\n var buf = new Uint8Array(xhr.response),\n bytes = Array.from(buf.values())\n }\n var b = _b_.bytes.$factory(bytes)\n if(req.mode == \"binary\"){\n return b\n }else if(req.mode == \"document\"){\n return $B.jsobj2pyobj(xhr.response)\n }else{\n var encoding = req.encoding || \"utf-8\"\n return _b_.bytes.decode(b, encoding)\n }\n}\n\nfunction stringify(d){\n var items = []\n for(var entry of _b_.dict.$iter_items(d)){\n items.push(encodeURIComponent(entry.key) + \"=\" +\n encodeURIComponent(entry.value))\n }\n return items.join(\"&\")\n}\n\nfunction handle_kwargs(self, kw, method){\n // kw was created with $B.obj_dict(), its keys/values are in kw.$jsobj\n var data,\n encoding,\n headers={},\n cache,\n mode = \"text\",\n timeout = {}\n\n for(var item of _b_.dict.$iter_items(kw)){\n var key = item.key\n if(key == \"data\"){\n var params = item.value\n if(typeof params == \"string\" || params instanceof FormData){\n data = params\n }else if(params.__class__ === _b_.dict){\n data = stringify(params)\n }else{\n throw _b_.TypeError.$factory(\"wrong type for data: \" +\n $B.class_name(params))\n }\n }else if(key == \"encoding\"){\n encoding = item.value\n }else if(key == \"headers\"){\n var value = item.value\n if(! $B.$isinstance(value, _b_.dict)){\n throw _b_.ValueError.$factory(\n \"headers must be a dict, not \" + $B.class_name(value))\n }\n for(var subitem of _b_.dict.$iter_items(value)){\n headers[subitem.key.toLowerCase()] = subitem.value\n }\n }else if(key.startsWith(\"on\")){\n var event = key.substr(2)\n if(event == \"timeout\"){\n timeout.func = item.value\n }else{\n var f = item.value\n ajax.bind(self, event, f)\n }\n }else if(key == \"mode\"){\n var mode = item.value\n }else if(key == \"timeout\"){\n timeout.seconds = item.value\n }else if(key == \"cache\"){\n cache = item.value\n }\n }\n if(encoding && mode != \"text\"){\n throw _b_.ValueError.$factory(\"encoding not supported for mode \" +\n mode)\n }\n if((method == \"post\" || method == \"put\") && ! headers){\n // For POST requests, set default header\n self.js.setRequestHeader(\"Content-type\",\n \"application/x-www-form-urlencoded\")\n }\n\n return {cache, data, encoding, headers, mode, timeout}\n}\n\nvar ajax = $B.make_class('ajax')\n\najax.__repr__ = function(self){\n return ''\n}\n\najax.__getattribute__ = function(self, attr){\n if(ajax[attr] !== undefined){\n return function(){\n return ajax[attr].call(null, self, ...arguments)\n }\n }else if(attr == \"text\"){\n return _read(self)\n }else if(attr == \"json\"){\n if(self.js.responseType == \"json\"){\n return _read(self)\n }else{\n var resp = _read(self)\n try{\n return $B.structuredclone2pyobj(JSON.parse(resp))\n }catch(err){\n console.log('attr json, invalid resp', resp)\n throw err\n }\n }\n }else if(self.js[attr] !== undefined){\n if(typeof self.js[attr] == \"function\"){\n return function(){\n if(attr == \"setRequestHeader\"){\n ajax.set_header.call(null, self, ...arguments)\n }else{\n if(attr == 'overrideMimeType'){\n console.log('override mime type')\n self.hasMimeType = true\n }\n return self.js[attr](...arguments)\n }\n }\n }else{\n return self.js[attr]\n }\n }else if(attr == \"xml\"){\n return $B.jsobj2pyobj(self.js.responseXML)\n }\n}\n\najax.bind = function(self, evt, func){\n // req.bind(evt,func) is the same as req.onevt = func\n self.js['on' + evt] = function(){\n try{\n return func.apply(null, arguments)\n }catch(err){\n $B.handle_error(err)\n }\n }\n return _b_.None\n}\n\najax.open = function(){\n var $ = $B.args('open', 4,\n {self: null, method: null, url: null, async: null},\n ['self', 'method', 'url', 'async'], arguments,\n {async: true}, null, null),\n self = $.self,\n method = $.method,\n url = $.url,\n async = $.async\n if(typeof method !== \"string\"){\n throw _b_.TypeError.$factory(\n 'open() argument method should be string, got ' +\n $B.class_name(method))\n }\n if(typeof url !== \"string\"){\n throw _b_.TypeError.$factory(\n 'open() argument url should be string, got ' +\n $B.class_name(url))\n }\n self.$method = method\n self.blocking = ! self.async\n self.js.open(method, url, async)\n}\n\najax.read = function(self){\n return _read(self)\n}\n\najax.send = function(self, params){\n // params can be Python dictionary or string\n var content_type\n for(var key in self.headers){\n var value = self.headers[key]\n self.js.setRequestHeader(key, value)\n if(key == 'content-type'){\n content_type = value\n }\n }\n if((self.encoding || self.blocking) && ! self.hasMimeType){\n // On blocking mode, or if an encoding has been specified,\n // override Mime type so that bytes are not processed\n // (unless the Mime type has been explicitely set)\n self.js.overrideMimeType('text/plain;charset=x-user-defined')\n self.charset_user_defined = true\n }\n var res = ''\n if(! params){\n self.js.send()\n return _b_.None\n }\n if($B.$isinstance(params, _b_.str)){\n res = params\n }else if($B.$isinstance(params, _b_.dict)){\n if(content_type == 'multipart/form-data'){\n // The FormData object serializes the data in the 'multipart/form-data'\n // content-type so we may as well override that header if it was set\n // by the user.\n res = new FormData()\n var items = _b_.list.$factory(_b_.dict.items(params))\n for(var i = 0, len = items.length; i < len; i++){\n add_to_res(res, _b_.str.$factory(items[i][0]), items[i][1])\n }\n }else{\n if(self.$method && self.$method.toUpperCase() == \"POST\" &&\n ! content_type){\n // Set default Content-Type for POST requests\n self.js.setRequestHeader(\"Content-Type\",\n \"application/x-www-form-urlencoded\")\n }\n var items = _b_.list.$factory(_b_.dict.items(params))\n for(var i = 0, len = items.length; i < len; i++){\n var key = encodeURIComponent(_b_.str.$factory(items[i][0]));\n if($B.$isinstance(items[i][1], _b_.list)){\n for (j = 0; j < items[i][1].length; j++) {\n res += key +'=' +\n encodeURIComponent(_b_.str.$factory(items[i][1][j])) + '&'\n }\n }else{\n res += key + '=' +\n encodeURIComponent(_b_.str.$factory(items[i][1])) + '&'\n }\n }\n res = res.substr(0, res.length - 1)\n }\n }else if(params instanceof FormData){\n res = params\n }else{\n throw _b_.TypeError.$factory(\n \"send() argument must be string or dictionary, not '\" +\n _b_.str.$factory(params.__class__) + \"'\")\n }\n self.js.send(res)\n return _b_.None\n}\n\najax.set_header = function(self, key, value){\n self.headers[key.toLowerCase()] = value\n}\n\najax.set_timeout = function(self, seconds, func){\n self.js.$requestTimer = setTimeout(\n function() {\n self.js.abort()\n func()\n },\n seconds * 1000)\n}\n\najax.$factory = function(){\n\n var xmlhttp = new XMLHttpRequest()\n\n xmlhttp.onreadystatechange = function(){\n // here, \"this\" refers to xmlhttp\n var state = this.readyState\n if(this.responseType == \"\" || this.responseType == \"text\"){\n res.js.text = this.responseText\n }\n var timer = this.$requestTimer\n if(state == 0 && this.onuninitialized){\n this.onuninitialized(res)\n }else if(state == 1 && this.onloading){\n this.onloading(res)\n }else if(state == 2 && this.onloaded){\n this.onloaded(res)\n }else if(state == 3 && this.oninteractive){\n this.oninteractive(res)\n }else if(state == 4 && this.oncomplete){\n if(timer !== null){\n globalThis.clearTimeout(timer)\n }\n this.oncomplete(res)\n }\n }\n var res = {\n __class__: ajax,\n js: xmlhttp,\n headers: {}\n }\n return res\n}\n\n\nfunction _request_without_body(method){\n var $ = $B.args(method, 3, {method: null, url: null, blocking: null},\n [\"method\", \"url\", \"blocking\"], arguments, {blocking: false},\n null, \"kw\"),\n method = $.method,\n url = $.url,\n async = !$.blocking,\n kw = $.kw\n\n var self = ajax.$factory()\n self.blocking = $.blocking\n var items = handle_kwargs(self, kw, method),\n mode = self.mode = items.mode,\n encoding = self.encoding = items.encoding,\n qs = items.data\n if(qs){\n url += \"?\" + qs\n }\n if(! (items.cache === true)){\n url += (qs ? \"&\" : \"?\") + (new Date()).getTime()\n }\n self.js.open(method.toUpperCase(), url, async)\n\n if(async){\n if(mode == \"json\" || mode == \"document\"){\n self.js.responseType = mode\n }else{\n self.js.responseType = \"arraybuffer\"\n if(mode != \"text\" && mode != \"binary\"){\n throw _b_.ValueError.$factory(\"invalid mode: \" + mode)\n }\n }\n }else{\n self.js.overrideMimeType('text/plain;charset=x-user-defined')\n self.charset_user_defined = true\n }\n for(var key in items.headers){\n self.js.setRequestHeader(key, items.headers[key])\n }\n var timeout = items.timeout\n if(timeout.seconds){\n ajax.set_timeout(self, timeout.seconds, timeout.func)\n }\n // Add function read() to return str or bytes according to mode\n self.js.send()\n}\n\nfunction _request_with_body(method){\n var $ = $B.args(method, 3, {method: null, url: null, blocking: null},\n [\"method\", \"url\", \"blocking\"], arguments, {blocking: false},\n null, \"kw\"),\n method = $.method,\n url = $.url,\n async = !$.blocking,\n kw = $.kw,\n content_type\n var self = ajax.$factory()\n self.js.open(method.toUpperCase(), url, async)\n var items = handle_kwargs(self, kw, method), // common with browser.aio\n data = items.data\n\n if($B.$isinstance(data, _b_.dict)){\n data = stringify(data)\n }\n for(var key in items.headers){\n var value = items.headers[key]\n self.js.setRequestHeader(key, value)\n if(key == 'content-type'){\n content_type = value\n }\n }\n if(method.toUpperCase() == 'POST' && !content_type){\n // set default Content-Type for POST requests\n self.js.setRequestHeader('Content-Type',\n 'application/x-www-form-urlencoded')\n }\n\n // Add function read() to return str or bytes according to mode\n self.js.read = function(){\n return _read(self)\n }\n self.js.send(data)\n}\n\nfunction form_data(form){\n var missing = {},\n $ = $B.args('form_data', 1, {form: null}, ['form'], arguments,\n {form: missing}, null, null)\n if($.form === missing){\n return new FormData()\n }else{\n return new FormData($.form)\n }\n}\n\nfunction connect(){\n _request_without_body.call(null, \"connect\", ...arguments)\n}\n\nfunction _delete(){\n _request_without_body.call(null, \"delete\", ...arguments)\n}\n\nfunction get(){\n _request_without_body.call(null, \"get\", ...arguments)\n}\n\nfunction head(){\n _request_without_body.call(null, \"head\", ...arguments)\n}\n\nfunction options(){\n _request_without_body.call(null, \"options\", ...arguments)\n}\n\nfunction patch(){\n _request_with_body.call(null, \"put\", ...arguments)\n}\n\nfunction post(){\n _request_with_body.call(null, \"post\", ...arguments)\n}\n\nfunction put(){\n _request_with_body.call(null, \"put\", ...arguments)\n}\n\nfunction trace(){\n _request_without_body.call(null, \"trace\", ...arguments)\n}\n\nfunction file_upload(){\n // ajax.file_upload(url, file, method=\"POST\", **callbacks)\n var $ = $B.args(\"file_upload\", 2, {url: null, \"file\": file},\n [\"url\", \"file\"], arguments, {}, null, \"kw\"),\n url = $.url,\n file = $.file,\n kw = $.kw\n\n var self = ajax.$factory()\n\n var items = handle_kwargs(self, kw, method),\n data = items.data,\n headers = items.headers\n\n for(var key in headers){\n var value = headers[key]\n self.js.setRequestHeader(key, value)\n if(key == 'content-type'){\n content_type = value\n }\n }\n\n var timeout = items.timeout\n if(timeout.seconds){\n ajax.set_timeout(self, timeout.seconds, timeout.func)\n }\n\n var method = _b_.dict.$get_string(kw, 'method', 'POST'),\n field_name = _b_.dict.$get_string(kw, 'field_name', 'filetosave')\n\n var formdata = new FormData()\n formdata.append(field_name, file, file.name)\n\n if(data){\n if(data instanceof FormData){\n // append additional data\n for(var d of data){\n formdata.append(d[0], d[1])\n }\n }else if($B.$isinstance(data, _b_.dict)){\n for(var item of _b_.dict.$iter_items(data)){\n formdata.append(item.key, item.value)\n }\n }else{\n throw _b_.ValueError.$factory(\n 'data value must be a dict of form_data')\n }\n }\n\n self.js.open(method, url, _b_.True)\n self.js.send(formdata)\n\n}\n\n$B.set_func_names(ajax)\n\nreturn {\n ajax: ajax,\n Ajax: ajax,\n delete: _delete,\n file_upload: file_upload,\n connect,\n form_data,\n get,\n head,\n options,\n patch,\n post,\n put,\n trace\n}\n\n})(__BRYTHON__)\n"], "array": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nvar typecodes = {\n 'b': Int8Array, // signed char, 1 byte\n 'B': Uint8Array, // unsigned char, 1\n 'u': Uint32Array, // Py_UNICODE Unicode character, 2 (deprecated)\n 'h': Int16Array, // signed short, 2\n 'H': Uint16Array, // unsigned short, 2\n 'i': Int16Array, // signed int, 2\n 'I': Uint16Array, // unsigned int, 2\n 'l': Int32Array, // signed long, 4\n 'L': Uint32Array, // unsigned long, 4\n 'q': null, // signed long, 8 (not implemented)\n 'Q': null, // unsigned long, 8 (not implemented)\n 'f': Float32Array, // float, 4\n 'd': Float64Array // double float, 8\n}\n\nvar array = $B.make_class(\"array\",\n function(){\n var missing = {},\n $ = $B.args(\"array\", 2, {typecode: null, initializer: null},\n [\"typecode\", \"initializer\"], arguments, {initializer: missing},\n null, null),\n typecode = $.typecode,\n initializer = $.initializer\n if(! typecodes.hasOwnProperty(typecode)){\n throw _b_.ValueError.$factory(\"bad typecode (must be b, \" +\n \"B, u, h, H, i, I, l, L, q, Q, f or d)\")\n }\n if(typecodes[typecode] === null){\n console.log(\"array factory, $\", $, typecode)\n throw _b_.NotImplementedError.$factory(\"type code \" +\n typecode + \" is not implemented\")\n }\n var res = {\n __class__: array,\n typecode: typecode,\n obj: null\n }\n if(initializer !== missing){\n if(Array.isArray(initializer)){\n array.fromlist(res, initializer)\n }else if($B.$isinstance(initializer, _b_.bytes)){\n array.frombytes(res, initializer)\n }else{\n array.extend(res, initializer)\n }\n }\n return res\n }\n)\n\narray.$buffer_protocol = true\narray.$match_sequence_pattern = true // for Pattern Matching (PEP 634)\n\narray.__getitem__ = function(self, key){\n if(self.obj && self.obj[key] !== undefined){\n return self.obj[key]\n }\n throw _b_.IndexError.$factory(\"array index out of range\")\n}\n\nvar array_iterator = $B.make_iterator_class(\"array_iterator\")\narray.__iter__ = function(self){\n return array_iterator.$factory(self.obj === null ? [] : self.obj)\n}\n\narray.__len__ = function(self){\n return self.obj === null ? 0 : self.obj.length\n}\n\narray.__mul__ = function(self, nb){\n if(typeof nb == \"number\" || $B.$isinstance(nb, _b_.int)){\n var t = [],\n copy = self.obj.slice()\n for(var i = 0; i < nb; i++){\n t = t.concat(copy)\n }\n return {\n __class__: array,\n typecode: self.typecode,\n obj: t\n }\n }\n throw _b_.ValueError.$factory(\"cannot multiply array by \" +\n $B.class_name(nb))\n}\n\narray.__setitem__ = function(_self, index, value){\n if(_self.obj[index] === undefined){\n throw _b_.IndexError.$factory(\"array index out of range\")\n }\n _self.obj[index] = value\n}\n\narray.__str__ = function(self){\n $B.args(\"__str__\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n var res = \"array('\" + self.typecode + \"'\"\n if(self.obj !== null){\n res += \", [\" + self.obj + \"]\"\n }\n return res + \")\"\n}\n\nfunction normalize_index(self, i){\n // return an index i between 0 and self.obj.length - 1\n if(i < 0){\n i = self.obj.length + i\n }\n if(i < 0){i = 0}\n else if(i > self.obj.length - 1){\n i = self.obj.length\n }\n return i\n}\n\narray.append = function(self, value){\n $B.args(\"append\", 2, {self: null, value: null},\n [\"self\", \"value\"], arguments, {}, null, null)\n var pos = self.obj === null ? 0 : self.obj.length\n return array.insert(self, pos, value)\n}\n\narray.count = function(self, x){\n $B.args(\"count\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n if(self.obj === null){return 0}\n return self.obj.filter(function(item){return item == x}).length\n}\n\narray.extend = function(self, iterable){\n $B.args(\"extend\", 2, {self: null, iterable: null},\n [\"self\", \"iterable\"], arguments, {}, null, null)\n if(iterable.__class__ === array){\n if(iterable.typecode !== self.typecode){\n throw _b_.TypeError.$factory(\"can only extend with array \" +\n \"of same kind\")\n }\n if(iterable.obj === null){return _b_.None}\n // create new object with length = sum of lengths\n var newobj = new typecodes[self.typecode](self.obj.length +\n iterable.obj.length)\n // copy self.obj\n newobj.set(self.obj)\n // copy iterable.obj\n newobj.set(iterable.obj, self.obj.length)\n self.obj = newobj\n }else{\n var it = _b_.iter(iterable)\n while(true){\n try{\n var item = _b_.next(it)\n array.append(self, item)\n }catch(err){\n if(err.__class__ !== _b_.StopIteration){\n throw err\n }\n break\n }\n }\n }\n return _b_.None\n}\n\narray.frombytes = function(self, s){\n $B.args(\"frombytes\", 2, {self: null, s: null},\n [\"self\", \"s\"], arguments, {}, null, null)\n if(! $B.$isinstance(s, _b_.bytes)){\n throw _b_.TypeError.$factory(\"a bytes-like object is required, \" +\n \"not '\" + $B.class_name(s) + \"'\")\n }\n self.obj = new typecodes[self.typecode](s.source)\n return _b_.None\n}\n\narray.fromlist = function(self, list){\n $B.args(\"fromlist\", 2, {self: null, list: null},\n [\"self\", \"list\"], arguments, {}, null, null)\n var it = _b_.iter(list)\n while(true){\n try{\n var item = _b_.next(it)\n try{\n array.append(self, item)\n }catch(err){\n console.log(err)\n return _b_.None\n }\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n return _b_.None\n }\n throw err\n }\n }\n}\n\narray.fromstring = array.frombytes\n\narray.index = function(self, x){\n $B.args(\"index\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n var res = self.obj.findIndex(function(item){return x == item})\n if(res == -1){\n throw _b_.ValueError.$factory(\"array.index(x): x not in array\")\n }\n return res\n}\n\narray.insert = function(self, i, value){\n $B.args(\"insert\", 3, {self: null, i: null, value: null},\n [\"self\", \"i\", \"value\"], arguments, {}, null, null)\n if(self.obj === null){\n self.obj = [value]\n }else{\n self.obj.splice(i, 0, value)\n }\n return _b_.None\n}\n\narray.itemsize = function(self){\n return typecodes[self.typecode].BYTES_PER_ELEMENT\n}\n\narray.pop = function(self, i){\n var $ = $B.args(\"count\", 2, {self: null, i: null},\n [\"self\", \"i\"], arguments, {i: -1}, null, null)\n i = $.i\n if(self.obj === null){\n throw _b_.IndexError.$factory(\"pop from empty array\")\n }else if(self.obj.length == 1){\n var res = self.obj[0]\n self.obj = null\n return res\n }\n i = normalize_index(self, i)\n // store value to return\n var res = self.obj[i]\n // create new array, size = previous size - 1\n var newobj = new typecodes[self.typecode](self.obj.length - 1)\n // fill new array with values until i excluded\n newobj.set(self.obj.slice(0, i))\n // fill with values after i\n newobj.set(self.obj.slice(i + 1), i)\n // set self.obj to new array\n self.obj = newobj\n // return stored value\n return res\n}\n\narray.remove = function(self, x){\n $B.args(\"remove\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n var res = self.obj.findIndex(function(item){return x == item})\n if(res == -1){\n throw _b_.ValueError.$factory(\"array.remove(x): x not in array\")\n }\n array.pop(self, res)\n return _b_.None\n}\n\narray.reverse = function(self){\n $B.args(\"reverse\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n if(self.obj === null){return _b_.None}\n self.obj.reverse()\n return _b_.None\n}\n\narray.tobytes = function(self){\n $B.args(\"tobytes\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n var items = Array.prototype.slice.call(self.obj),\n res = []\n items.forEach(function(item){\n while(item > 256){\n res.push(item % 256)\n item = Math.floor(item / 256)\n }\n res.push(item)\n })\n return _b_.bytes.$factory(res)\n}\n\narray.tolist = function(self){\n $B.args(\"tolist\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n if(self.obj === null){\n return $B.$list([])\n }\n return Array.prototype.slice.call(self.obj)\n}\n\narray.tostring = array.tobytes\n\narray.typecode = function(self){\n return self.typecode\n}\n\n$B.set_func_names(array, \"array\")\n\nvar module = {\n array: array,\n typecodes: Object.keys(typecodes).join('')\n}\n\n$B.addToImported('array', module)\n\n})(__BRYTHON__)\n"], "_tokenize": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\n$B.$import('token')\n\nvar TokenizerIter = $B.make_class('TokenizerIter',\n function(it){\n return {\n __class__: TokenizerIter,\n it\n }\n }\n)\n\nTokenizerIter.__iter__ = function(self){\n var js_iter = function*(){\n var line_num = 0\n while(true){\n try{\n var bytes = self.it()\n }catch(err){\n if($B.is_exc(err, [_b_.StopIteration])){\n token = endmarker\n token.start[0]++\n token.end[0]++\n var type_code = $B.imported.token[token.type]\n yield $B.fast_tuple([type_code, token.string,\n $B.fast_tuple(token.start),\n $B.fast_tuple(token.end),\n token.line])\n }\n throw err\n }\n line_num++\n var line = _b_.bytes.decode(bytes, 'utf-8')\n for(var token of $B.tokenizer(line, 'test')){\n if(token.type == 'ENCODING'){ // skip encoding token\n continue\n }else if(token.type == 'ENDMARKER'){\n var endmarker = token\n continue\n }\n token.start[0] = line_num\n token.end[0] = line_num\n var type_code = $B.imported.token[token.type]\n yield $B.fast_tuple([type_code, token.string,\n $B.fast_tuple(token.start),\n $B.fast_tuple(token.end),\n token.line])\n }\n }\n\n }\n return $B.generator.$factory(js_iter)()\n}\n\nTokenizerIter.__next__ = function*(self){\n\n}\n\n$B.set_func_names(TokenizerIter, '_tokenize')\n\n$B.addToImported('_tokenize', {TokenizerIter})\n\n\n})(__BRYTHON__)"], "_base64": [".js", "(function($B){\n\nvar _b_ = $B.builtins,\n _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\"\n\nfunction make_alphabet(altchars){\n var alphabet = _keyStr\n if(altchars !== undefined && altchars !== _b_.None){\n // altchars is an instance of Python bytes\n var source = altchars.source\n alphabet = alphabet.substr(0,alphabet.length-3) +\n _b_.chr(source[0]) + _b_.chr(source[1]) + '='\n }\n return alphabet\n}\n\nvar Base64 = {\n error: function(){return 'binascii_error'},\n\n encode: function(bytes, altchars){\n\n var input = bytes.source,\n output = \"\",\n chr1, chr2, chr3, enc1, enc2, enc3, enc4\n var i = 0\n\n var alphabet = make_alphabet(altchars)\n\n while(i < input.length){\n\n chr1 = input[i++]\n chr2 = input[i++]\n chr3 = input[i++]\n\n enc1 = chr1 >> 2\n enc2 = ((chr1 & 3) << 4) | (chr2 >> 4)\n enc3 = ((chr2 & 15) << 2) | (chr3 >> 6)\n enc4 = chr3 & 63\n\n if(isNaN(chr2)){\n enc3 = enc4 = 64\n }else if(isNaN(chr3)){\n enc4 = 64\n }\n\n output = output + alphabet.charAt(enc1) +\n alphabet.charAt(enc2) +\n alphabet.charAt(enc3) +\n alphabet.charAt(enc4)\n\n }\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n },\n\n\n decode: function(bytes, altchars, validate){\n var output = [],\n chr1, chr2, chr3,\n enc1, enc2, enc3, enc4\n\n var alphabet = make_alphabet(altchars)\n\n var input = bytes.source\n\n // If validate is set, check that all characters in input\n // are in the alphabet\n var _input = ''\n var padding = 0\n for(var i = 0, len = input.length; i < len; i++){\n var car = String.fromCharCode(input[i])\n var char_num = alphabet.indexOf(car)\n if(char_num == -1){\n if(validate){throw Base64.error(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i < input.length - 2){\n if(validate){throw Base64.error(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i >= input.length - 2){\n padding++\n _input += car\n }else{\n _input += car\n }\n }\n input = _input\n if(_input.length == padding){return _b_.bytes.$factory([])}\n if( _input.length % 4 > 0){throw Base64.error(\"Incorrect padding\")}\n\n var i = 0\n while(i < input.length){\n\n enc1 = alphabet.indexOf(input.charAt(i++))\n enc2 = alphabet.indexOf(input.charAt(i++))\n enc3 = alphabet.indexOf(input.charAt(i++))\n enc4 = alphabet.indexOf(input.charAt(i++))\n\n chr1 = (enc1 << 2) | (enc2 >> 4)\n chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)\n chr3 = ((enc3 & 3) << 6) | enc4\n\n output.push(chr1)\n\n if(enc3 != 64){output.push(chr2)}\n if(enc4 != 64){output.push(chr3)}\n\n }\n // return Python bytes\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n\n },\n\n _utf8_encode: function(string) {\n string = string.replace(/\\r\\n/g, \"\\n\")\n var utftext = \"\";\n\n for(var n = 0; n < string.length; n++){\n\n var c = string.charCodeAt(n)\n\n if(c < 128){\n utftext += String.fromCharCode(c)\n }else if((c > 127) && (c < 2048)){\n utftext += String.fromCharCode((c >> 6) | 192)\n utftext += String.fromCharCode((c & 63) | 128)\n }else{\n utftext += String.fromCharCode((c >> 12) | 224)\n utftext += String.fromCharCode(((c >> 6) & 63) | 128)\n utftext += String.fromCharCode((c & 63) | 128)\n }\n\n }\n\n return utftext\n },\n\n _utf8_decode: function(utftext) {\n var string = \"\",\n i = 0,\n c = c1 = c2 = 0\n\n while(i < utftext.length){\n\n c = utftext.charCodeAt(i)\n\n if(c < 128){\n string += String.fromCharCode(c)\n i++\n }else if((c > 191) && (c < 224)){\n c2 = utftext.charCodeAt(i + 1)\n string += String.fromCharCode(((c & 31) << 6) | (c2 & 63))\n i += 2\n }else{\n c2 = utftext.charCodeAt(i + 1)\n c3 = utftext.charCodeAt(i + 2)\n string += String.fromCharCode(\n ((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63))\n i += 3\n }\n\n }\n\n return string\n }\n\n}\n\n$B.addToImported('_base64', {Base64:Base64})\n}\n\n)(__BRYTHON__)"], "posix": [".js", "/*\nThis module provides access to operating system functionality that is\nstandardized by the C Standard and the POSIX standard (a thinly\ndisguised Unix interface). Refer to the library manual and\ncorresponding Unix manual entries for more information on calls.\n*/\nvar $B = __BRYTHON__,\n _b_ = $B.builtins\n\nfunction _randint(a, b){\n return parseInt(Math.random() * (b - a + 1) + a)\n}\n\nvar stat_result = $B.make_class(\"stat_result\",\n function(filename){\n filename = _b_.str.$factory(filename)\n if($B.file_cache && $B.file_cache.hasOwnProperty(filename)){\n var f = $B.file_cache[filename],\n res = {\n __class__: stat_result,\n st_atime: __BRYTHON__.timestamp,\n st_ctime: f.ctime,\n st_mtime: f.mtime,\n st_uid: -1,\n st_gid: -1,\n st_ino: -1,\n st_mode: 0,\n st_size: f.length\n };\n [\"mtime\", \"ctime\", \"atime_ns\", \"mtime_ns\", \"ctime_ns\"].\n forEach(function(item){\n res[\"st_\" + item] = res.st_atime\n });\n return res\n }else if($B.files && $B.files.hasOwnProperty(filename)){\n var f = $B.files[filename],\n res = {\n __class__: stat_result,\n st_atime: __BRYTHON__.timestamp,\n st_ctime: f.ctime,\n st_mtime: f.mtime,\n st_uid: -1,\n st_gid: -1,\n st_ino: -1,\n st_mode: 0,\n st_size: f.content.length\n };\n for(var item of [\"mtime\", \"ctime\", \"atime_ns\", \"mtime_ns\", \"ctime_ns\"]){\n res[\"st_\" + item] = res.st_atime\n }\n return res\n\n }else{\n var res = {\n __class__: stat_result,\n st_atime: __BRYTHON__.timestamp,\n st_uid: -1,\n st_gid: -1,\n st_ino: -1,\n st_mode: filename.endsWith('/') ? 16895 : 33206,\n st_size: 1 // fake\n };\n [\"mtime\", \"ctime\", \"atime_ns\", \"mtime_ns\", \"ctime_ns\"].\n forEach(function(item){\n res[\"st_\" + item] = res.st_atime\n });\n return res\n }\n }\n)\n$B.set_func_names(stat_result, \"posix\")\n\nvar module = {\n F_OK: 0,\n O_APPEND: 8,\n O_BINARY: 32768,\n O_CREAT: 256,\n O_EXCL: 1024,\n O_NOINHERIT: 128,\n O_RANDOM: 16,\n O_RDONLY: 0,\n O_RDWR: 2,\n O_SEQUENTIAL: 32,\n O_SHORT_LIVED: 4096,\n O_TEMPORARY: 64,\n O_TEXT: 16384,\n O_TRUNC: 512,\n O_WRONLY: 1,\n P_DETACH: 4,\n P_NOWAIT: 1,\n P_NOWAITO: 3,\n P_OVERLAY: 2,\n P_WAIT: 0,\n R_OK: 4,\n TMP_MAX: 32767,\n W_OK: 2,\n X_OK: 1,\n _have_functions: ['MS_WINDOWS'],\n environ: _b_.dict.$factory(\n [['PYTHONPATH', $B.brython_path],\n ['PYTHONUSERBASE', ' ']]),\n error: _b_.OSError,\n fspath: function(path){\n return path\n },\n getcwd: function(){return $B.brython_path},\n getpid: function(){return 0},\n lstat: function(filename){\n return stat_result.$factory(filename)\n },\n open: function(path, flags){return _b_.open(path, flags)},\n stat: function(filename){return stat_result.$factory(filename)},\n stat_result: function(filename){return stat_result.$factory(filename)},\n urandom: function(n){\n const randbytes = new Uint8Array(n);\n crypto.getRandomValues(randbytes);\n return _b_.bytes.$factory(Array.from(randbytes));\n },\n WTERMSIG: function(){return 0},\n WNOHANG: function(){return _b_.tuple.$factory([0, 0])}\n};\n\n[\"WCOREDUMP\", \"WIFCONTINUED\", \"WIFSTOPPED\", \"WIFSIGNALED\", \"WIFEXITED\"].forEach(function(funcname){\n module[funcname] = function(){return false}\n });\n\n[\"WEXITSTATUS\", \"WSTOPSIG\", \"WTERMSIG\"].\n forEach(function(funcname){\n module[funcname] = function(){return _b_.None}\n });\n\n[\"_exit\", \"_getdiskusage\", \"_getfileinformation\", \"_getfinalpathname\",\n \"_getfullpathname\", \"_isdir\", \"abort\", \"access\", \"chdir\", \"chmod\",\n \"close\", \"closerange\", \"device_encoding\", \"dup\", \"dup2\",\n \"execv\", \"execve\", \"fsat\", \"fsync\", \"get_terminal_size\", \"getcwdb\",\n \"getlogin\", \"getppid\", \"isatty\", \"kill\", \"link\", \"listdir\", \"lseek\",\n \"mkdir\", \"pipe\", \"putenv\", \"read\", \"readlink\", \"remove\", \"rename\",\n \"replace\", \"rmdir\", \"spawnv\", \"spawnve\", \"startfile\", \"stat_float_times\",\n \"statvfs_result\", \"strerror\", \"symlink\", \"system\", \"terminal_size\",\n \"times\", \"times_result\", \"umask\", \"uname_result\", \"unlink\", \"utime\",\n \"waitpid\", \"write\"].forEach(function(funcname){\n module[funcname] = function(){\n throw _b_.NotImplementedError.$factory(\"posix.\" + funcname +\n \" is not implemented\")\n }\n });\n\n$B.addToImported('posix', module)"], "_io_classes": [".js", "var _b_ = __BRYTHON__.builtins\n\nfunction get_self(name, args){\n return $B.args(name, 1, {self: null}, [\"self\"], args, {}, null, null).self\n}\n\nvar _IOBase = $B.make_class(\"_IOBase\")\n_IOBase.__mro__ = [_b_.object]\n\n_IOBase.close = function(){\n get_self(\"close\", arguments).__closed = true\n}\n\n_IOBase.flush = function(){\n get_self(\"flush\", arguments)\n return _b_.None\n}\n\n$B.set_func_names(_IOBase, '_io')\n\n// Base class for binary streams that support some kind of buffering.\nvar _BufferedIOBase = $B.make_class(\"_BufferedIOBase\")\n_BufferedIOBase.__mro__ = [_IOBase, _b_.object]\n\n_BufferedIOBase.__enter__ = function(self){\n return self\n}\n_BufferedIOBase.__exit__ = function(self, type, value, traceback){\n try{\n $B.$call($B.$getattr(self, 'close'))()\n self.__closed = true\n return true\n }catch(err){\n return false\n }\n}\n\n$B.set_func_names(_BufferedIOBase, '_io')\n\n// Base class for raw binary I/O.\nvar _RawIOBase = $B.make_class(\"_RawIOBase\")\n\n_RawIOBase.__mro__ = [_IOBase, _b_.object]\n\n_RawIOBase.read = function(){\n var $ = $B.args(\"read\", 2, {self: null, size: null}, [\"self\", \"size\"],\n arguments, {size: -1}, null, null),\n self = $.self,\n size = $.size,\n res\n self.$pos = self.$pos || 0\n if(size == -1){\n if(self.$pos == 0){\n res = self.$content\n }else{\n res = _b_.bytes.$factory(self.$content.source.slice(self.$pos))\n }\n self.$pos = self.$content.source.length - 1\n }else{\n res = _b_.bytes.$factory(self.$content.source.slice(self.$pos, size))\n self.$pos += size\n }\n return res\n}\n\n_RawIOBase.readall = function(){\n return _RawIOBase.read(get_self(\"readall\", arguments))\n}\n\n$B.set_func_names(_RawIOBase, '_io')\n\n// Base class for text streams.\n_TextIOBase = $B.make_class(\"_TextIOBase\")\n_TextIOBase.__mro__ = [_IOBase, _b_.object]\n\nvar StringIO = $B.make_class(\"StringIO\",\n function(){\n var $ = $B.args(\"StringIO\", 2, {value: null, newline: null},\n [\"value\", \"newline\"], arguments, {value: '', newline: \"\\n\"},\n null, null)\n return {\n __class__: StringIO,\n $counter: 0,\n $content: $.value\n }\n }\n)\n\nStringIO.__mro__ = [$B.Reader, _b_.object]\n\nStringIO.getvalue = function(){\n var $ = $B.args(\"getvalue\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n return $.self.$content.substr(0) // copy\n}\n\nStringIO.truncate = function(self, size){\n var $ = $B.args('truncate', 2, {self: null, size: null}, ['self', 'size'],\n arguments, {size: _b_.None}, null, null),\n self = $.self,\n size = $.size\n if(size === _b_.None){\n size = self.$counter\n }\n self.$content = self.$content.substr(0, size)\n self.$counter = self.$content.length\n return self.$counter\n}\n\nStringIO.write = function(){\n var $ = $B.args(\"write\", 2, {self: null, data: null},\n [\"self\", \"data\"], arguments, {}, null, null)\n if(! $B.$isinstance($.data, _b_.str)){\n throw _b_.TypeError.$factory('string argument expected, got ' +\n `'${$B.class_name($.data)}'`)\n }\n var text = $.self.$content,\n position = $.self.$counter\n text = text.substr(0, position) + $.data +\n text.substr(position + $.data.length)\n $.self.$content = text\n $.self.$counter = position + $.data.length\n return $.data.length\n}\n\n$B.set_func_names(StringIO, \"_io\")\n\nvar BytesIO = $B.make_class(\"BytesIO\",\n function(){\n var $ = $B.args(\"BytesIO\", 1, {value: null},\n [\"value\"], arguments, {value: _b_.bytes.$factory()},\n null, null)\n return {\n __class__: BytesIO,\n $binary: true,\n $content: $.value,\n $length: $.value.source.length,\n $counter: 0\n }\n }\n)\nBytesIO.__mro__ = [$B.Reader, _b_.object]\n\nBytesIO.getbuffer = function(){\n var self = get_self(\"getbuffer\", arguments)\n return self.$content\n}\n\nBytesIO.getvalue = function(){\n var self = get_self(\"getvalue\", arguments)\n return self.$content\n}\n\nBytesIO.read = function(){\n var $ = $B.args(\"read\", 2, {self: null, nbytes: null},\n [\"self\", \"nbytes\"], arguments, {nbytes: _b_.None}, null, null),\n self = $.self,\n nbytes = $.nbytes,\n res\n var source = self.$content.source\n if(nbytes === _b_.None){\n res = $B.fast_bytes(source.slice(self.$counter))\n self.$counter = source.length\n }else if(! _b_.isinstance(nbytes, _b_.int)){\n throw _b_.TypeError.$factory('number of bytes should be int, not ' +\n $B.class_name(nbytes))\n }else{\n res = $B.fast_bytes(source.slice(self.$counter,\n self.$counter + nbytes))\n self.$counter = Math.min(self.$counter + nbytes, source.length)\n }\n return res\n}\n\nBytesIO.write = function(){\n var $ = $B.args(\"write\", 2, {self: null, data: null},\n [\"self\", \"data\"], arguments, {}, null, null)\n $.self.$content.source = $.self.$content.source.concat(\n $.data.source)\n $.self.$counter += $.data.source.length\n return _b_.None\n}\n\n$B.set_func_names(BytesIO, \"_io\")\n\nvar BlockingIOError = $B.make_class('BlockingIOError')\nBlockingIOError.__bases__ = [_b_.OSError]\n\n$B.set_func_names(BlockingIOError, '_io')\n\nvar $module = (function($B){\n return {\n _BufferedIOBase,\n _IOBase,\n _RawIOBase,\n _TextIOBase: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n BlockingIOError,\n BytesIO: BytesIO,\n FileIO: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n StringIO: StringIO,\n BufferedReader: $B.BufferedReader,\n BufferedWriter: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n BufferedRWPair: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n BufferedRandom: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n IncrementalNewlineDecoder: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n TextIOWrapper: $B.TextIOWrapper\n }\n})(__BRYTHON__)\n$module._IOBase.__doc__ = \"_IOBase\"\n\n__BRYTHON__.imported._io_classes = $module"], "math": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nconst INF = $B.fast_float(Number.POSITIVE_INFINITY),\n NINF = $B.fast_float(Number.NEGATIVE_INFINITY),\n ZERO = $B.fast_float(0),\n NAN = $B.fast_float(Number.NaN)\n\nvar float_check = function(x) {\n // Returns a Javascript number\n if(x.__class__ === $B.long_int){\n var res = parseInt(x.value)\n if(! isFinite(res)){\n throw _b_.OverflowError.$factory('int too big for float')\n }\n return res\n }else if(x.__class__ === _b_.float){\n return x.value\n }\n try{\n return _b_.float.$factory(x).value\n }catch(err){\n throw _b_.TypeError.$factory('must be real number, not ' +\n $B.class_name(x))\n }\n}\n\nfunction check_int(x){\n if(! $B.$isinstance(x, _b_.int)){\n throw _b_.TypeError.$factory(\"'\" + $B.class_name(x) +\n \"' object cannot be interpreted as an integer\")\n }\n}\n\nfunction check_int_or_round_float(x){\n return ($B.$isinstance(x, _b_.float) && Number.isInteger(x.value)) ||\n $B.$isinstance(x, _b_.int)\n}\n\nvar isWholeNumber = function(x){return (x * 10) % 10 == 0}\n\nvar isOdd = function(x) {return isWholeNumber(x) && 2 * Math.floor(x / 2) != x}\n\nvar isNegZero = function(x) {return x === 0 && Math.atan2(x,x) < 0}\n\nfunction overflow(){\n throw _b_.OverflowError.$factory(\"math range error\")\n}\n\nfunction value_error(){\n throw _b_.ValueError.$factory(\"math range error\")\n}\n\nvar EPSILON = Math.pow(2, -52),\n MAX_VALUE = (2 - EPSILON) * Math.pow(2, 1023),\n MIN_VALUE = Math.pow(2, -1022),\n Py_HUGE_VAL = Number.POSITIVE_INFINITY,\n logpi = 1.144729885849400174143427351353058711647,\n sqrtpi = 1.772453850905516027298167483341145182798\n\nfunction nextUp(x){\n if(x !== x){ // NaN\n return x\n }\n if(_b_.float.$funcs.isinf(x)){\n if(_b_.float.$funcs.isninf(x)){\n return -MAX_VALUE\n }\n return _mod.inf\n }\n if($B.$isinstance(x, $B.long_int)){\n x = Number(x.value)\n }else if($B.$isinstance(x, _b_.float)){\n x = x.value\n }\n\n if(x == +MAX_VALUE){\n return +1 / 0\n }\n if(typeof x == \"number\"){\n var y = x * (x < 0 ? 1 - EPSILON / 2 : 1 + EPSILON)\n if(y == x){\n y = MIN_VALUE * EPSILON > 0 ? x + MIN_VALUE * EPSILON : x + MIN_VALUE\n }\n if(y === +1 / 0){\n y = +MAX_VALUE\n }\n var b = x + (y - x) / 2\n if(x < b && b < y){\n y = b;\n }\n var c = (y + x) / 2\n if(x < c && c < y){\n y = c;\n }\n return y === 0 ? -0 : y\n }else{\n var factor = $B.rich_comp('__lt__', x, 0) ? 1 - EPSILON / 2 :\n 1 + EPSILON\n var y = $B.rich_op(\"__mul__\", x , factor)\n if(y == x){\n y = MIN_VALUE * EPSILON > 0 ?\n $B.rich_op('__add__', x, MIN_VALUE * EPSILON) :\n $B.rich_op('__add__', x, MIN_VALUE)\n }\n if(y === +1 / 0){\n y = +MAX_VALUE\n }\n var y_minus_x = $B.rich_op('__sub__', y, x)\n var z = $B.rich_op('__truediv__', y_minus_x, 2) // (y - x) / 2\n\n var b = $B.rich_op('__add__', x, z)\n if($B.rich_comp('__lt__', x, b) && $B.rich_comp('__lt__', b, y)){\n y = b;\n }\n var c = $B.rich_op('__truediv__', $B.rich_op('__add__', y, x), 2)\n if($B.rich_comp('__lt__', x, c) && $B.rich_comp('__lt__', c, y)){\n y = c;\n }\n return y === 0 ? -0 : y\n }\n}\n\nfunction gcd2(a, b){\n // GCD of 2 factors\n if($B.rich_comp(\"__gt__\", b, a)){\n var temp = a\n a = b\n b = temp\n }\n while(true){\n if(b == 0){\n return a\n }\n a = $B.rich_op(\"__mod__\", a, b)\n if(a == 0){\n return b\n }\n b = $B.rich_op(\"__mod__\", b, a)\n }\n}\n\nconst LANCZOS_N = 13,\n lanczos_g = 6.024680040776729583740234375,\n lanczos_g_minus_half = 5.524680040776729583740234375,\n lanczos_num_coeffs = [\n 23531376880.410759688572007674451636754734846804940,\n 42919803642.649098768957899047001988850926355848959,\n 35711959237.355668049440185451547166705960488635843,\n 17921034426.037209699919755754458931112671403265390,\n 6039542586.3520280050642916443072979210699388420708,\n 1439720407.3117216736632230727949123939715485786772,\n 248874557.86205415651146038641322942321632125127801,\n 31426415.585400194380614231628318205362874684987640,\n 2876370.6289353724412254090516208496135991145378768,\n 186056.26539522349504029498971604569928220784236328,\n 8071.6720023658162106380029022722506138218516325024,\n 210.82427775157934587250973392071336271166969580291,\n 2.5066282746310002701649081771338373386264310793408\n ],\n /* denominator is x*(x+1)*...*(x+LANCZOS_N-2) */\n lanczos_den_coeffs = [\n 0.0, 39916800.0, 120543840.0, 150917976.0, 105258076.0, 45995730.0,\n 13339535.0, 2637558.0, 357423.0, 32670.0, 1925.0, 66.0, 1.0],\n /* gamma values for small positive integers, 1 though NGAMMA_INTEGRAL */\n NGAMMA_INTEGRAL = 23,\n gamma_integral = [\n 1.0, 1.0, 2.0, 6.0, 24.0, 120.0, 720.0, 5040.0, 40320.0, 362880.0,\n 3628800.0, 39916800.0, 479001600.0, 6227020800.0, 87178291200.0,\n 1307674368000.0, 20922789888000.0, 355687428096000.0,\n 6402373705728000.0, 121645100408832000.0, 2432902008176640000.0,\n 51090942171709440000.0, 1124000727777607680000.0]\n\n/* Lanczos' sum L_g(x), for positive x */\nfunction lanczos_sum(x){\n var num = 0.0,\n den = 0.0,\n i\n /* evaluate the rational function lanczos_sum(x). For large\n x, the obvious algorithm risks overflow, so we instead\n rescale the denominator and numerator of the rational\n function by x**(1-LANCZOS_N) and treat this as a\n rational function in 1/x. This also reduces the error for\n larger x values. The choice of cutoff point (5.0 below) is\n somewhat arbitrary; in tests, smaller cutoff values than\n this resulted in lower accuracy. */\n if (x < 5.0) {\n for (i = LANCZOS_N; --i >= 0; ) {\n num = num * x + lanczos_num_coeffs[i];\n den = den * x + lanczos_den_coeffs[i];\n }\n }else{\n for (i = 0; i < LANCZOS_N; i++) {\n num = num / x + lanczos_num_coeffs[i];\n den = den / x + lanczos_den_coeffs[i];\n }\n }\n return num/den;\n}\n\nfunction m_sinpi(x){\n // x is float\n // returns a float\n var r,\n y = fmod(fabs(x), 2.0), // float\n n = _b_.round($B.fast_float(2.0 * y.value)) // int\n switch(n){\n case 0:\n r = sin(pi.value * y.value);\n break;\n case 1:\n r = cos(pi.value * (y.value - 0.5));\n break;\n case 2:\n /* N.B. -sin(pi*(y-1.0)) is *not* equivalent: it would give\n -0.0 instead of 0.0 when y == 1.0. */\n r = sin(pi.value * (1.0 - y.value));\n break;\n case 3:\n r = _b_.float.__neg__(cos(pi.value *(y.value - 1.5)))\n break;\n case 4:\n r = sin(pi.value * (y.value - 2.0));\n break;\n }\n return $B.fast_float(copysign(1.0, x).value * r.value);\n}\n\n/*\n lgamma: natural log of the absolute value of the Gamma function.\n For large arguments, Lanczos' formula works extremely well here.\n*/\nfunction m_lgamma(x){\n var r,\n absx\n\n /* special cases */\n if(! isfinite(x)){\n if(isnan(x)){\n return x; /* lgamma(nan) = nan */\n }else{\n return $B.fast_float(Number.POSITIVE_INFINITY); /* lgamma(+-inf) = +inf */\n }\n }\n\n /* integer arguments */\n var x1 = float_check(x)\n if(Number.isInteger(x1) && x1 <= 2.0){\n if(x1 <= 0.0){\n value_error()\n }else{\n return $B.fast_float(0.0); /* lgamma(1) = lgamma(2) = 0.0 */\n }\n }\n\n absx = fabs(x)\n /* tiny arguments: lgamma(x) ~ -log(fabs(x)) for small x */\n if (absx.value < 1e-20){\n return $B.fast_float(-log(absx).value);\n }\n /* Lanczos' formula. We could save a fraction of a ulp in accuracy by\n having a second set of numerator coefficients for lanczos_sum that\n absorbed the exp(-lanczos_g) term, and throwing out the lanczos_g\n subtraction below; it's probably not worth it. */\n var lsum = $B.fast_float(lanczos_sum(absx.value))\n r = log(lsum).value - lanczos_g;\n r += (absx.value - 0.5) *\n (log($B.fast_float(absx.value + lanczos_g - 0.5)).value - 1)\n if (x1 < 0.0){\n /* Use reflection formula to get value for negative x. */\n r = logpi - log(fabs(m_sinpi(absx))).value - log(absx).value - r\n }\n r = $B.fast_float(r)\n if(isinf(r)){\n overflow()\n }\n return r;\n}\n\nfunction acos(x){\n $B.check_nb_args('acos', 1, arguments)\n $B.check_no_kw('acos', x)\n if(_mod.isinf(x)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }else if(_mod.isnan(x)){\n return _mod.nan\n }else{\n x = float_check(x)\n if(x > 1 || x < -1){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return _b_.float.$factory(Math.acos(x))\n }\n}\n\nfunction acosh(x){\n $B.check_nb_args('acosh', 1, arguments)\n $B.check_no_kw('acosh', x)\n\n if(_b_.float.$funcs.isinf(x)){\n if(_b_.float.$funcs.isninf(x)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return _mod.inf\n }else if(_mod.isnan(x)){\n return _mod.nan\n }\n var y = float_check(x)\n if(y <= 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if(y > Math.pow(2, 28)){ // issue 1590\n return _b_.float.$factory(_mod.log(y).value + _mod.log(2).value)\n }\n return _b_.float.$factory(Math.log(y + Math.sqrt(y * y - 1)))\n}\n\nfunction asin(x){\n $B.check_nb_args('asin', 1, arguments)\n $B.check_no_kw('asin', x)\n if(_mod.isinf(x)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }else if(_mod.isnan(x)){\n return _mod.nan\n }else{\n x = float_check(x)\n if(x > 1 || x < -1){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return _b_.float.$factory(Math.asin(x))\n }\n}\n\nfunction asinh(x){\n $B.check_nb_args('asinh', 1, arguments)\n $B.check_no_kw('asinh', x)\n\n var y = float_check(x)\n if(_b_.float.$funcs.isninf(x)){\n return NINF\n }else if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n if(y == 0 && 1 / y === -Infinity){\n return $B.fast_float(-0.0)\n }\n return _b_.float.$factory(Math.asinh(y))\n}\n\nfunction atan(x){\n $B.check_nb_args('atan', 1, arguments)\n $B.check_no_kw('atan', x)\n\n if(_b_.float.$funcs.isninf(x)){return _b_.float.$factory(-Math.PI / 2)}\n if(_b_.float.$funcs.isinf(x)){return _b_.float.$factory(Math.PI / 2)}\n return _b_.float.$factory(Math.atan(float_check(x)))\n}\n\nfunction atan2(x, y){\n $B.check_nb_args('atan2', 2, arguments)\n $B.check_no_kw('atan2', x, y)\n\n return _b_.float.$factory(Math.atan2(float_check(x), float_check(y)))\n}\n\nfunction atanh(x){\n $B.check_nb_args('atanh', 1, arguments)\n $B.check_no_kw('atanh', x)\n if(_b_.float.$funcs.isinf(x)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n var y = float_check(x)\n if(y == 0){\n return 0\n }else if(y <= -1 || y >= 1){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return _b_.float.$factory(0.5 * Math.log((1 / y + 1)/(1 / y - 1)));\n}\n\nfunction cbrt(x){\n // Cubic root\n $B.check_nb_args('cbrt ', 1, arguments)\n $B.check_no_kw('cbrt ', x)\n\n var y = float_check(x)\n if(_b_.float.$funcs.isninf(x)){\n return NINF\n }else if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n var _r = $B.fast_float(Math.cbrt(y))\n if(_b_.float.$funcs.isinf(_r)){\n throw _b_.OverflowError.$factory(\"math range error\")\n }\n return _r\n}\n\nfunction ceil(x){\n $B.check_nb_args('ceil', 1, arguments)\n $B.check_no_kw('ceil', x)\n\n var res\n\n if($B.$isinstance(x, _b_.float)){\n if(_b_.float.$funcs.isinf(x)){\n throw _b_.OverflowError.$factory(\n \"cannot convert float infinity to integer\")\n }else if(_mod.isnan(x)){\n throw _b_.OverflowError.$factory(\n \"cannot convert float NaN to integer\")\n }\n }\n\n var klass = x.__class__ || $B.get_class(x)\n\n try{\n // Use attribute of the object's class, not of the object\n // itself (special method)\n return $B.$call($B.$getattr(klass, '__ceil__'))(x)\n }catch(err){\n if(! $B.is_exc(err, [_b_.AttributeError])){\n throw err\n }\n }\n\n try{\n x = $B.$call($B.$getattr(klass, '__float__'))(x)\n }catch(err){\n if(! $B.is_exc(err, [_b_.AttributeError])){\n throw err\n }else{\n throw _b_.TypeError.$factory(\"must be real number, not \" +\n $B.class_name(x))\n }\n }\n return _mod.ceil(x)\n}\n\nconst ULLONG_MAX = 2n ** 64n - 1n,\n LONG_MAX = 2147483647,\n LONG_MIN = -2147483647,\n LLONG_MAX = 9223372036854775807n,\n LLONG_MIN = -9223372036854775807n,\n p2_64 = 2n ** 64n\n\nconst reduced_factorial_odd_part = [\n 0x0000000000000001n, 0x0000000000000001n, 0x0000000000000001n, 0x0000000000000003n,\n 0x0000000000000003n, 0x000000000000000fn, 0x000000000000002dn, 0x000000000000013bn,\n 0x000000000000013bn, 0x0000000000000b13n, 0x000000000000375fn, 0x0000000000026115n,\n 0x000000000007233fn, 0x00000000005cca33n, 0x0000000002898765n, 0x00000000260eeeebn,\n 0x00000000260eeeebn, 0x0000000286fddd9bn, 0x00000016beecca73n, 0x000001b02b930689n,\n 0x00000870d9df20adn, 0x0000b141df4dae31n, 0x00079dd498567c1bn, 0x00af2e19afc5266dn,\n 0x020d8a4d0f4f7347n, 0x335281867ec241efn, 0x9b3093d46fdd5923n, 0x5e1f9767cc5866b1n,\n 0x92dd23d6966aced7n, 0xa30d0f4f0a196e5bn, 0x8dc3e5a1977d7755n, 0x2ab8ce915831734bn,\n 0x2ab8ce915831734bn, 0x81d2a0bc5e5fdcabn, 0x9efcac82445da75bn, 0xbc8b95cf58cde171n,\n 0xa0e8444a1f3cecf9n, 0x4191deb683ce3ffdn, 0xddd3878bc84ebfc7n, 0xcb39a64b83ff3751n,\n 0xf8203f7993fc1495n, 0xbd2a2a78b35f4bddn, 0x84757be6b6d13921n, 0x3fbbcfc0b524988bn,\n 0xbd11ed47c8928df9n, 0x3c26b59e41c2f4c5n, 0x677a5137e883fdb3n, 0xff74e943b03b93ddn,\n 0xfe5ebbcb10b2bb97n, 0xb021f1de3235e7e7n, 0x33509eb2e743a58fn, 0x390f9da41279fb7dn,\n 0xe5cb0154f031c559n, 0x93074695ba4ddb6dn, 0x81c471caa636247fn, 0xe1347289b5a1d749n,\n 0x286f21c3f76ce2ffn, 0x00be84a2173e8ac7n, 0x1595065ca215b88bn, 0xf95877595b018809n,\n 0x9c2efe3c5516f887n, 0x373294604679382bn, 0xaf1ff7a888adcd35n, 0x18ddf279a2c5800bn,\n 0x18ddf279a2c5800bn, 0x505a90e2542582cbn, 0x5bacad2cd8d5dc2bn, 0xfe3152bcbff89f41n,\n 0xe1467e88bf829351n, 0xb8001adb9e31b4d5n, 0x2803ac06a0cbb91fn, 0x1904b5d698805799n,\n 0xe12a648b5c831461n, 0x3516abbd6160cfa9n, 0xac46d25f12fe036dn, 0x78bfa1da906b00efn,\n 0xf6390338b7f111bdn, 0x0f25f80f538255d9n, 0x4ec8ca55b8db140fn, 0x4ff670740b9b30a1n,\n 0x8fd032443a07f325n, 0x80dfe7965c83eeb5n, 0xa3dc1714d1213afdn, 0x205b7bbfcdc62007n,\n 0xa78126bbe140a093n, 0x9de1dc61ca7550cfn, 0x84f0046d01b492c5n, 0x2d91810b945de0f3n,\n 0xf5408b7f6008aa71n, 0x43707f4863034149n, 0xdac65fb9679279d5n, 0xc48406e7d1114eb7n,\n 0xa7dc9ed3c88e1271n, 0xfb25b2efdb9cb30dn, 0x1bebda0951c4df63n, 0x5c85e975580ee5bdn,\n 0x1591bc60082cb137n, 0x2c38606318ef25d7n, 0x76ca72f7c5c63e27n, 0xf04a75d17baa0915n,\n 0x77458175139ae30dn, 0x0e6c1330bc1b9421n, 0xdf87d2b5797e8293n, 0xefa5c703e1e68925n,\n 0x2b6b1b3278b4f6e1n, 0xceee27b382394249n, 0xd74e3829f5dab91dn, 0xfdb17989c26b5f1fn,\n 0xc1b7d18781530845n, 0x7b4436b2105a8561n, 0x7ba7c0418372a7d7n, 0x9dbc5c67feb6c639n,\n 0x502686d7f6ff6b8fn, 0x6101855406be7a1fn, 0x9956afb5806930e7n, 0xe1f0ee88af40f7c5n,\n 0x984b057bda5c1151n, 0x9a49819acc13ea05n, 0x8ef0dead0896ef27n, 0x71f7826efe292b21n,\n 0xad80a480e46986efn, 0x01cdc0ebf5e0c6f7n, 0x6e06f839968f68dbn, 0xdd5943ab56e76139n,\n 0xcdcf31bf8604c5e7n, 0x7e2b4a847054a1cbn, 0x0ca75697a4d3d0f5n, 0x4703f53ac514a98bn,\n];\n\nconst inverted_factorial_odd_part = [\n 0x0000000000000001n, 0x0000000000000001n, 0x0000000000000001n, 0xaaaaaaaaaaaaaaabn,\n 0xaaaaaaaaaaaaaaabn, 0xeeeeeeeeeeeeeeefn, 0x4fa4fa4fa4fa4fa5n, 0x2ff2ff2ff2ff2ff3n,\n 0x2ff2ff2ff2ff2ff3n, 0x938cc70553e3771bn, 0xb71c27cddd93e49fn, 0xb38e3229fcdee63dn,\n 0xe684bb63544a4cbfn, 0xc2f684917ca340fbn, 0xf747c9cba417526dn, 0xbb26eb51d7bd49c3n,\n 0xbb26eb51d7bd49c3n, 0xb0a7efb985294093n, 0xbe4b8c69f259eabbn, 0x6854d17ed6dc4fb9n,\n 0xe1aa904c915f4325n, 0x3b8206df131cead1n, 0x79c6009fea76fe13n, 0xd8c5d381633cd365n,\n 0x4841f12b21144677n, 0x4a91ff68200b0d0fn, 0x8f9513a58c4f9e8bn, 0x2b3e690621a42251n,\n 0x4f520f00e03c04e7n, 0x2edf84ee600211d3n, 0xadcaa2764aaacdfdn, 0x161f4f9033f4fe63n,\n 0x161f4f9033f4fe63n, 0xbada2932ea4d3e03n, 0xcec189f3efaa30d3n, 0xf7475bb68330bf91n,\n 0x37eb7bf7d5b01549n, 0x46b35660a4e91555n, 0xa567c12d81f151f7n, 0x4c724007bb2071b1n,\n 0x0f4a0cce58a016bdn, 0xfa21068e66106475n, 0x244ab72b5a318ae1n, 0x366ce67e080d0f23n,\n 0xd666fdae5dd2a449n, 0xd740ddd0acc06a0dn, 0xb050bbbb28e6f97bn, 0x70b003fe890a5c75n,\n 0xd03aabff83037427n, 0x13ec4ca72c783bd7n, 0x90282c06afdbd96fn, 0x4414ddb9db4a95d5n,\n 0xa2c68735ae6832e9n, 0xbf72d71455676665n, 0xa8469fab6b759b7fn, 0xc1e55b56e606caf9n,\n 0x40455630fc4a1cffn, 0x0120a7b0046d16f7n, 0xa7c3553b08faef23n, 0x9f0bfd1b08d48639n,\n 0xa433ffce9a304d37n, 0xa22ad1d53915c683n, 0xcb6cbc723ba5dd1dn, 0x547fb1b8ab9d0ba3n,\n 0x547fb1b8ab9d0ba3n, 0x8f15a826498852e3n, 0x32e1a03f38880283n, 0x3de4cce63283f0c1n,\n 0x5dfe6667e4da95b1n, 0xfda6eeeef479e47dn, 0xf14de991cc7882dfn, 0xe68db79247630ca9n,\n 0xa7d6db8207ee8fa1n, 0x255e1f0fcf034499n, 0xc9a8990e43dd7e65n, 0x3279b6f289702e0fn,\n 0xe7b5905d9b71b195n, 0x03025ba41ff0da69n, 0xb7df3d6d3be55aefn, 0xf89b212ebff2b361n,\n 0xfe856d095996f0adn, 0xd6e533e9fdf20f9dn, 0xf8c0e84a63da3255n, 0xa677876cd91b4db7n,\n 0x07ed4f97780d7d9bn, 0x90a8705f258db62fn, 0xa41bbb2be31b1c0dn, 0x6ec28690b038383bn,\n 0xdb860c3bb2edd691n, 0x0838286838a980f9n, 0x558417a74b36f77dn, 0x71779afc3646ef07n,\n 0x743cda377ccb6e91n, 0x7fdf9f3fe89153c5n, 0xdc97d25df49b9a4bn, 0x76321a778eb37d95n,\n 0x7cbb5e27da3bd487n, 0x9cff4ade1a009de7n, 0x70eb166d05c15197n, 0xdcf0460b71d5fe3dn,\n 0x5ac1ee5260b6a3c5n, 0xc922dedfdd78efe1n, 0xe5d381dc3b8eeb9bn, 0xd57e5347bafc6aadn,\n 0x86939040983acd21n, 0x395b9d69740a4ff9n, 0x1467299c8e43d135n, 0x5fe440fcad975cdfn,\n 0xcaa9a39794a6ca8dn, 0xf61dbd640868dea1n, 0xac09d98d74843be7n, 0x2b103b9e1a6b4809n,\n 0x2ab92d16960f536fn, 0x6653323d5e3681dfn, 0xefd48c1c0624e2d7n, 0xa496fefe04816f0dn,\n 0x1754a7b07bbdd7b1n, 0x23353c829a3852cdn, 0xbf831261abd59097n, 0x57a8e656df0618e1n,\n 0x16e9206c3100680fn, 0xadad4c6ee921dac7n, 0x635f2b3860265353n, 0xdd6d0059f44b3d09n,\n 0xac4dd6b894447dd7n, 0x42ea183eeaa87be3n, 0x15612d1550ee5b5dn, 0x226fa19d656cb623n,\n]\n\nconst factorial_trailing_zeros = [\n 0, 0, 1, 1, 3, 3, 4, 4, 7, 7, 8, 8, 10, 10, 11, 11, // 0-15\n 15, 15, 16, 16, 18, 18, 19, 19, 22, 22, 23, 23, 25, 25, 26, 26, // 16-31\n 31, 31, 32, 32, 34, 34, 35, 35, 38, 38, 39, 39, 41, 41, 42, 42, // 32-47\n 46, 46, 47, 47, 49, 49, 50, 50, 53, 53, 54, 54, 56, 56, 57, 57, // 48-63\n 63, 63, 64, 64, 66, 66, 67, 67, 70, 70, 71, 71, 73, 73, 74, 74, // 64-79\n 78, 78, 79, 79, 81, 81, 82, 82, 85, 85, 86, 86, 88, 88, 89, 89, // 80-95\n 94, 94, 95, 95, 97, 97, 98, 98, 101, 101, 102, 102, 104, 104, 105, 105, // 96-111\n 109, 109, 110, 110, 112, 112, 113, 113, 116, 116, 117, 117, 119, 119, 120, 120, // 112-127\n].map(BigInt)\n\nconst NULL = undefined\n\n/* Calculate C(n, k) for n in the 63-bit range. */\n\nfunction perm_comb_small(n, k, iscomb){\n if(k == 0){\n return 1n\n }\n\n /* For small enough n and k the result fits in the 64-bit range and can\n * be calculated without allocating intermediate PyLong objects. */\n if(iscomb){\n /* Maps k to the maximal n so that 2*k-1 <= n <= 127 and C(n, k)\n * fits into a uint64_t. Exclude k = 1, because the second fast\n * path is faster for this case.*/\n var fast_comb_limits1 = [\n 0, 0, 127, 127, 127, 127, 127, 127, // 0-7\n 127, 127, 127, 127, 127, 127, 127, 127, // 8-15\n 116, 105, 97, 91, 86, 82, 78, 76, // 16-23\n 74, 72, 71, 70, 69, 68, 68, 67, // 24-31\n 67, 67, 67 // 32-34\n ];\n if(k < fast_comb_limits1.length && n <= fast_comb_limits1[k]){\n /*\n comb(n, k) fits into a uint64_t. We compute it as\n comb_odd_part << shift\n where 2**shift is the largest power of two dividing comb(n, k)\n and comb_odd_part is comb(n, k) >> shift. comb_odd_part can be\n calculated efficiently via arithmetic modulo 2**64, using three\n lookups and two uint64_t multiplications.\n */\n var comb_odd_part = reduced_factorial_odd_part[n]\n * inverted_factorial_odd_part[k]\n * inverted_factorial_odd_part[n - k];\n comb_odd_part %= p2_64\n var shift = factorial_trailing_zeros[n]\n - factorial_trailing_zeros[k]\n - factorial_trailing_zeros[n - k];\n return comb_odd_part << shift;\n }\n\n /* Maps k to the maximal n so that 2*k-1 <= n <= 127 and C(n, k)*k\n * fits into a long long (which is at least 64 bit). Only contains\n * items larger than in fast_comb_limits1. */\n var fast_comb_limits2 = [\n 0, ULLONG_MAX, 4294967296, 3329022, 102570, 13467, 3612, 1449, // 0-7\n 746, 453, 308, 227, 178, 147 // 8-13\n ];\n if (k < fast_comb_limits2.length && n <= fast_comb_limits2[k]) {\n /* C(n, k) = C(n, k-1) * (n-k+1) / k */\n var result = n,\n i = 1n;\n while(i < k){\n result *= --n;\n result /= ++i;\n }\n return result;\n }\n }else{\n /* Maps k to the maximal n so that k <= n and P(n, k)\n * fits into a long long (which is at least 64 bit). */\n var fast_perm_limits = [\n 0, ULLONG_MAX, 4294967296, 2642246, 65537, 7133, 1627, 568, // 0-7\n 259, 142, 88, 61, 45, 36, 30, 26, // 8-15\n 24, 22, 21, 20, 20 // 16-20\n ];\n if (k < fast_perm_limits.length && n <= fast_perm_limits[k]) {\n if(n <= 127){\n /* P(n, k) fits into a uint64_t. */\n var perm_odd_part = reduced_factorial_odd_part[n]\n * inverted_factorial_odd_part[n - k];\n perm_odd_part %= p2_64\n var shift = factorial_trailing_zeros[n]\n - factorial_trailing_zeros[n - k];\n var res = perm_odd_part << shift\n\n return res;\n }\n\n /* P(n, k) = P(n, k-1) * (n-k+1) */\n var result = n;\n for (var i = 1; i < k; i++) {\n result *= --n;\n }\n return result\n }\n }\n\n /* For larger n use recursive formulas:\n *\n * P(n, k) = P(n, j) * P(n-j, k-j)\n * C(n, k) = C(n, j) * C(n-j, k-j) // C(k, j)\n */\n var j = k / 2n;\n var a = perm_comb_small(n, j, iscomb);\n var b = perm_comb_small(n - j, k - j, iscomb);\n a = a * b;\n if(iscomb){\n b = perm_comb_small(k, j, 1);\n a = a / b;\n }\n return a;\n}\n\n/* Calculate P(n, k) or C(n, k) using recursive formulas.\n * It is more efficient than sequential multiplication thanks to\n * Karatsuba multiplication.\n */\nfunction perm_comb(n, k, iscomb){\n if(k == 0){\n return 1;\n }\n if(k == 1){\n return n;\n }\n\n /* P(n, k) = P(n, j) * P(n-j, k-j) */\n /* C(n, k) = C(n, j) * C(n-j, k-j) // C(k, j) */\n var j = k / 2n\n var a = perm_comb(n, j, iscomb);\n //var t = j\n //n = n - t;\n var b = perm_comb(n - j, k - j, iscomb);\n a = a * b;\n if(iscomb){\n b = perm_comb_small(k, j, 1);\n a = a / b;\n }\n return a;\n}\n\nfunction comb(n, k){\n var $ = $B.args('comb', 2, {n: null, k: null}, ['n', 'k'],\n arguments, {}, null, null),\n n = $.n,\n k = $.k\n\n var result = NULL,\n temp,\n overflow, cmp;\n\n // accept integers or objects with __index__\n n = $B.PyNumber_Index(n)\n k = $B.PyNumber_Index(k)\n\n n = _b_.int.$to_bigint(n);\n k = _b_.int.$to_bigint(k);\n\n if(n < 0){\n throw _b_.ValueError.$factory(\n \"n must be a non-negative integer\");\n }\n if(k < 0){\n throw _b_.ValueError.$factory(\n \"k must be a non-negative integer\");\n }\n\n overflow = n > LLONG_MAX || n < LLONG_MIN\n if(! overflow){\n overflow = k > LLONG_MAX || k < LLONG_MIN\n if (overflow || k > n) {\n result = 0n;\n }else{\n if(n - k < k){\n k = n - k\n }\n if (k > 1) {\n result = perm_comb_small(n, k, 1);\n }\n }\n /* For k == 1 just return the original n in perm_comb(). */\n }else{\n /* k = min(k, n - k) */\n temp = n - k\n if(temp < 0) {\n result = 0n;\n }\n if (temp < k) {\n k = temp\n }\n\n overflow = k > LLONG_MAX || k < LLONG_MIN\n if (overflow) {\n throw _b_.OverflowError.$factory(\n \"min(n - k, k) must not exceed \" +\n LLONG_MAX);\n }\n }\n if(result === undefined){\n result = perm_comb(n, k, 1);\n }\n\n return _b_.int.$int_or_long(result)\n}\n\n\nfunction copysign(x, y){\n $B.check_nb_args_no_kw('copysign', 2, arguments)\n\n var x1 = Math.abs(float_check(x))\n var y1 = float_check(y)\n var sign = Math.sign(y1)\n sign = (sign == 1 || Object.is(sign, +0)) ? 1 : - 1\n return _b_.float.$factory(x1 * sign)\n}\n\nfunction cos(x){\n $B.check_nb_args('cos ', 1, arguments)\n $B.check_no_kw('cos ', x)\n return _b_.float.$factory(Math.cos(float_check(x)))\n}\n\nfunction cosh(x){\n $B.check_nb_args('cosh', 1, arguments)\n $B.check_no_kw('cosh', x)\n\n if(_b_.float.$funcs.isinf(x)){return INF}\n var y = float_check(x)\n if(Math.cosh !== undefined){return _b_.float.$factory(Math.cosh(y))}\n return _b_.float.$factory((Math.pow(Math.E, y) +\n Math.pow(Math.E, -y)) / 2)\n}\n\nfunction degrees(x){\n $B.check_nb_args('degrees', 1, arguments)\n $B.check_no_kw('degrees', x)\n return _b_.float.$factory(float_check(x) * 180 / Math.PI)\n}\n\nfunction dist(p, q){\n $B.check_nb_args_no_kw('dist', 2, arguments)\n\n function test(x){\n if(typeof x === \"number\"){\n return x\n }else if(x.__class__ === _b_.float){\n return x.value\n }\n var y = $B.$getattr(x, '__float__', null)\n if(y === null){\n throw _b_.TypeError.$factory('not a float')\n }\n return $B.$call(y)().value\n }\n\n // build list of differences (as floats) between coordinates of p and q\n var diffs = [],\n diff\n\n if(Array.isArray(p) && Array.isArray(q)){\n // simple case : p and q are lists of tuples\n if(p.length != q.length){\n throw _b_.ValueError.$factory(\"both points must have \" +\n \"the same number of dimensions\")\n }\n p = p.map(test)\n q = q.map(test)\n for(var i = 0, len = p.length; i < len; i++){\n var next_p = p[i],\n next_q = q[i]\n var diff = Math.abs(next_p - next_q)\n diffs.push(diff)\n }\n }else{\n var itp = _b_.iter(p),\n itq = _b_.iter(q),\n res = 0\n\n while(true){\n try{\n var next_p = _b_.next(itp)\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n // check that the other iterator is also exhausted\n try{\n var next_q = _b_.next(itq)\n throw _b_.ValueError.$factory(\"both points must have \" +\n \"the same number of dimensions\")\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n break\n }\n throw err\n }\n }\n throw err\n }\n next_p = test(next_p)\n try{\n var next_q = _b_.next(itq)\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n throw _b_.ValueError.$factory(\"both points must have \" +\n \"the same number of dimensions\")\n }\n throw err\n }\n next_q = test(next_q)\n diff = Math.abs(next_p - next_q)\n diffs.push(diff)\n }\n }\n for(var diff of diffs){\n if(! isFinite(diff) && ! isNaN(diff)){\n return _mod.inf\n }\n }\n for(var diff of diffs){\n if(isNaN(diff)){\n return _mod.nan\n }\n }\n\n var res = 0,\n scale = 1,\n max_diff = Math.max(...diffs),\n min_diff = Math.min(...diffs)\n max_value = Math.sqrt(Number.MAX_VALUE) / p.length,\n min_value = Math.sqrt(Number.MIN_VALUE) * p.length\n if(max_diff > max_value){\n var nb = 0\n while(max_diff > max_value){\n scale *= 2\n max_diff /= 2\n nb++\n }\n for(var diff of diffs){\n diff = diff / scale\n res += diff * diff\n }\n return $B.fast_float(scale * Math.sqrt(res))\n }else if(min_diff !== 0 && min_diff < min_value){\n while(min_diff < min_value){\n scale *= 2\n min_diff *= 2\n }\n for(var diff of diffs){\n diff = diff * scale\n res += diff * diff\n }\n return $B.fast_float(Math.sqrt(res) / scale)\n }else{\n for(var diff of diffs){\n res += Math.pow(diff, 2)\n }\n return $B.fast_float(Math.sqrt(res))\n }\n}\n\nconst e = _b_.float.$factory(Math.E)\n\nconst ERF_SERIES_CUTOFF = 1.5,\n ERF_SERIES_TERMS = 25,\n ERFC_CONTFRAC_CUTOFF = 30.0,\n ERFC_CONTFRAC_TERMS = 50\n\n/*\n Error function, via power series.\n Given a finite float x, return an approximation to erf(x).\n Converges reasonably fast for small x.\n*/\n\nfunction m_erf_series(x){\n var x2, acc, fk, result\n var i\n\n x2 = x * x\n acc = 0.0\n fk = ERF_SERIES_TERMS + 0.5\n for(i = 0; i < ERF_SERIES_TERMS; i++){\n acc = 2.0 + x2 * acc / fk\n fk -= 1.0\n }\n result = acc * x * exp(-x2).value / sqrtpi\n return result\n}\n\nfunction m_erfc_contfrac(x){\n var x2, a, da, p, p_last, q, q_last, b, result;\n var i\n\n if(x >= ERFC_CONTFRAC_CUTOFF){\n return 0.0\n }\n\n x2 = x * x\n a = 0.0\n da = 0.5\n p = 1.0\n p_last = 0.0\n q = da + x2\n q_last = 1.0\n for(i = 0; i < ERFC_CONTFRAC_TERMS; i++){\n var temp\n a += da\n da += 2.0\n b = da + x2\n temp = p; p = b * p - a * p_last; p_last = temp\n temp = q; q = b * q - a * q_last; q_last = temp\n }\n result = p / q * x * exp(-x2).value / sqrtpi\n return result\n}\n\n\nfunction erf(x){\n var absx,\n cf\n var x1 = float_check(x)\n if(isNaN(x1)){\n return x\n }\n absx = fabs(x)\n if(absx.value < ERF_SERIES_CUTOFF){\n return $B.fast_float(m_erf_series(x1))\n }else{\n cf = m_erfc_contfrac(absx.value)\n return $B.fast_float(x1 > 0.0 ? 1.0 - cf : cf - 1.0)\n }\n}\n\nfunction erfc(x){\n\n // inspired from\n // http://stackoverflow.com/questions/457408/is-there-an-easily-available-implementation-of-erf-for-python\n var y = float_check(x)\n var t = 1.0 / (1.0 + 0.5 * Math.abs(y))\n var ans = 1 - t * Math.exp( -y * y - 1.26551223 +\n t * ( 1.00002368 +\n t * ( 0.37409196 +\n t * ( 0.09678418 +\n t * (-0.18628806 +\n t * ( 0.27886807 +\n t * (-1.13520398 +\n t * ( 1.48851587 +\n t * (-0.82215223 +\n t * 0.17087277)))))))))\n if(y >= 0.0){return 1 - ans}\n return 1 + ans\n}\n\nfunction erfc(x){\n $B.check_nb_args_no_kw('erfc', 1, arguments)\n var absx, cf;\n\n var x1 = float_check(x)\n if(isNaN(x1)){\n return x\n }\n absx = fabs(x);\n if(absx.value < ERF_SERIES_CUTOFF){\n return $B.fast_float(1.0 - m_erf_series(x1))\n }else{\n cf = m_erfc_contfrac(absx.value)\n return $B.fast_float(x1 > 0.0 ? cf : 2.0 - cf)\n }\n}\n\nfunction exp(x){\n $B.check_nb_args('exp', 1, arguments)\n $B.check_no_kw('exp', x)\n\n if(_b_.float.$funcs.isninf(x)){\n return _b_.float.$factory(0)\n }\n if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n var _r = Math.exp(float_check(x))\n if(! isNaN(_r) && ! isFinite(_r)){\n throw _b_.OverflowError.$factory(\"math range error\")\n }\n return _b_.float.$factory(_r)\n}\n\nfunction exp2(x){\n return pow(2, x)\n}\n\nfunction expm1(x){\n $B.check_nb_args('expm1', 1, arguments)\n $B.check_no_kw('expm1', x)\n\n if(_b_.float.$funcs.isninf(x)){\n return $B.fast_float(-1)\n }else if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n var _r = Math.expm1(float_check(x))\n if((! isNaN(_r)) && ! isFinite(_r)){\n overflow()\n }\n return $B.fast_float(_r)\n}\n\nfunction fabs(x){\n $B.check_nb_args_no_kw('fabs', 1, arguments)\n return _b_.float.$funcs.fabs(float_check(x)) // located in py_float.js\n}\n\n// factorial implementation, adapted from CPython's mathmodule.c\n\nconst SmallFactorials = [\n 1n, 1n, 2n, 6n, 24n, 120n, 720n, 5040n, 40320n,\n 362880n, 3628800n, 39916800n, 479001600n,\n 6227020800n, 87178291200n, 1307674368000n,\n 20922789888000n, 355687428096000n, 6402373705728000n,\n 121645100408832000n, 2432902008176640000n\n ]\n\nconst SIZEOF_LONG = 4\n\nfunction _Py_bit_length(x){\n const BIT_LENGTH_TABLE = [\n 0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4,\n 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5\n ]\n var msb = 0;\n while(x >= 32n){\n msb += 6;\n x >>= 6n;\n }\n msb += BIT_LENGTH_TABLE[parseInt(x)];\n return msb\n}\nfunction count_set_bits(n){\n var count = 0n;\n while(n != 0){\n ++count;\n n &= n - 1n; /* clear least significant bit */\n }\n return count;\n}\n\nfunction factorial_partial_product(start, stop, max_bits){\n var midpoint,\n num_operands,\n left,\n right,\n result\n\n /* If the return value will fit an unsigned long, then we can\n * multiply in a tight, fast loop where each multiply is O(1).\n * Compute an upper bound on the number of bits required to store\n * the answer.\n *\n * Storing some integer z requires floor(lg(z))+1 bits, which is\n * conveniently the value returned by bit_length(z). The\n * product x*y will require at most\n * bit_length(x) + bit_length(y) bits to store, based\n * on the idea that lg product = lg x + lg y.\n *\n * We know that stop - 2 is the largest number to be multiplied. From\n * there, we have: bit_length(answer) <= num_operands *\n * bit_length(stop - 2)\n */\n\n num_operands = (stop - start) / 2n;\n max_bits = BigInt(max_bits)\n /* The \"num_operands <= 8 * SIZEOF_LONG\" check guards against the\n * unlikely case of an overflow in num_operands * max_bits. */\n if(num_operands <= 8 * SIZEOF_LONG &&\n num_operands * max_bits <= 8 * SIZEOF_LONG) {\n var j,\n total;\n for (total = start, j = start + 2n; j < stop; j += 2n){\n total *= j;\n }\n return total\n }\n\n /* find midpoint of range(start, stop), rounded up to next odd number. */\n midpoint = (start + num_operands) | 1n;\n left = factorial_partial_product(start, midpoint,\n _Py_bit_length(midpoint - 2n));\n right = factorial_partial_product(midpoint, stop, max_bits);\n result = left * right\n return result;\n}\n\n\nfunction factorial_odd_part(n){\n var i,\n v, lower, upper,\n partial, tmp, inner, outer;\n\n inner = 1n\n outer = inner;\n upper = 3n;\n for (i = BigInt(_Py_bit_length(n)) - 2n; i >= 0; i--) {\n v = n >> i;\n if (v <= 2){\n continue\n }\n lower = upper;\n /* (v + 1) | 1 = least odd integer strictly larger than n / 2**i */\n upper = (v + 1n) | 1n;\n /* Here inner is the product of all odd integers j in the range (0,\n n/2**(i+1)]. The factorial_partial_product call below gives the\n product of all odd integers j in the range (n/2**(i+1), n/2**i]. */\n partial = factorial_partial_product(lower, upper,\n _Py_bit_length(upper-2n));\n /* inner *= partial */\n tmp = inner * partial\n inner = tmp;\n /* Now inner is the product of all odd integers j in the range (0,\n n/2**i], giving the inner product in the formula above. */\n\n /* outer *= inner; */\n tmp = outer * inner\n outer = tmp;\n }\n return outer;\n}\n\nfunction factorial(arg){\n var x,\n two_valuation,\n overflow,\n result,\n odd_part;\n // Check that arg can be converted to an integer, and transform it to\n // a bigint\n x = _b_.int.$to_bigint($B.PyNumber_Index(arg))\n overflow = x > LONG_MAX || x < LONG_MIN\n if(x > LONG_MAX) {\n throw _b_.OverflowError.$factory(\n \"factorial() argument should not exceed \" +\n LONG_MAX)\n }else if(x < 0) {\n throw _b_.ValueError.$factory(\n \"factorial() not defined for negative values\");\n }\n\n /* use lookup table if x is small */\n if (x < SmallFactorials.length){\n return _b_.int.$int_or_long(SmallFactorials[x]);\n }\n /* else express in the form odd_part * 2**two_valuation, and compute as\n odd_part << two_valuation. */\n odd_part = factorial_odd_part(x);\n two_valuation = x - count_set_bits(x);\n return _b_.int.$int_or_long(odd_part << two_valuation);\n}\n\nfunction floor(x){\n $B.check_nb_args_no_kw('floor', 1, arguments)\n\n if(typeof x == \"number\" || x.__class__ === _b_.float){\n return Math.floor(float_check(x))\n }\n var klass = $B.get_class(x)\n try{\n return $B.$call($B.$getattr(klass, \"__floor__\"))(x)\n }catch(err){\n if($B.is_exc(err, [_b_.AttributeError])){\n try{\n var float = $B.$call($B.$getattr(klass, \"__float__\"))(x)\n return floor(float)\n }catch(err){\n if($B.is_exc(err, [_b_.AttributeError])){\n throw _b_.TypeError.$factory(\"no __float__\")\n }\n throw err\n }\n }\n }\n}\n\nfunction fmod(x, y){\n $B.check_nb_args_no_kw('fmod', 2, arguments)\n if($B.$isinstance(x, _b_.float)){\n if(_b_.float.$funcs.isinf(x)){\n throw _b_.ValueError.$factory('math domain error')\n }\n }\n y = float_check(y)\n if(y == 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n return _b_.float.$factory(float_check(x) % float_check(y))\n}\n\nfunction frexp(x){\n $B.check_nb_args_no_kw('frexp', 1, arguments)\n\n var _l = _b_.float.$funcs.frexp(x)\n return _b_.tuple.$factory([_b_.float.$factory(_l[0]), _l[1]])\n}\n\nfunction fsum(x){\n $B.check_nb_args_no_kw('fsum', 1, arguments)\n\n /* Translation into Javascript of the function msum in an Active\n State Cookbook recipe : https://code.activestate.com/recipes/393090/\n by Raymond Hettinger\n */\n var partials = [],\n res = new Number(),\n _it = _b_.iter(x)\n while(true){\n try{\n var x = _b_.next(_it),\n i = 0\n x = float_check(x)\n for(var j = 0, len = partials.length; j < len; j++){\n var y = float_check(partials[j])\n if(Math.abs(x) < Math.abs(y)){\n var z = x\n x = y\n y = z\n }\n var hi = x + y,\n lo = y - (hi - x)\n if(lo){\n partials[i] = lo\n i++\n }\n x = hi\n }\n partials = partials.slice(0, i).concat([x])\n }catch(err){\n if($B.$isinstance(err, _b_.StopIteration)){break}\n throw err\n }\n }\n var res = 0\n for(var i = 0; i < partials.length; i++){\n res += partials[i]\n }\n return $B.fast_float(res)\n}\n\nfunction gamma(x){\n $B.check_nb_args('gamma', 1, arguments)\n $B.check_no_kw('gamma', x)\n var x_as_number = x,\n r,\n y,\n z,\n sqrtpow\n\n /* special cases */\n if($B.$isinstance(x, _b_.float)){\n x_as_number = x.value\n }else if(! $B.$isinstance(x, _b_.int)){\n throw _b_.TypeError.$factory(\"must be real number, not \" +\n $B.class_name(x))\n }\n if(x_as_number === Number.POSITIVE_INFINITY || isNaN(x_as_number)){\n return x\n }else if(x_as_number === Number.NEGATIVE_INFINITY || x_as_number == 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n\n /* integer arguments */\n if(Number.isInteger(x_as_number)){\n if($B.rich_comp('__lt__', x, 0.0)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if($B.rich_comp('__le__', x, NGAMMA_INTEGRAL)){\n return $B.fast_float(gamma_integral[x_as_number - 1])\n }\n }\n var absx = fabs(x)\n\n /* tiny arguments: tgamma(x) ~ 1/x for x near 0 */\n if(absx.value < 1e-20){\n r = 1.0 / x_as_number\n if(r === Infinity || r === -Infinity){\n overflow()\n }\n return $B.fast_float(r)\n }\n\n /* large arguments: assuming IEEE 754 doubles, tgamma(x) overflows for\n x > 200, and underflows to +-0.0 for x < -200, not a negative\n integer. */\n if(absx.value > 200.0){\n if(x_as_number < 0.0){\n return $B.fast_float(0.0 / m_sinpi(x).value);\n }else{\n overflow()\n }\n }\n\n y = absx.value + lanczos_g_minus_half;\n /* compute error in sum */\n if (absx.value > lanczos_g_minus_half) {\n /* note: the correction can be foiled by an optimizing\n compiler that (incorrectly) thinks that an expression like\n a + b - a - b can be optimized to 0.0. This shouldn't\n happen in a standards-conforming compiler. */\n var q = y - absx.value;\n z = q - lanczos_g_minus_half;\n }else{\n var q = y - lanczos_g_minus_half;\n z = q - absx.value;\n }\n z = z * lanczos_g / y;\n if (x_as_number < 0.0) {\n r = -pi.value / m_sinpi(absx).value /\n absx.value * _mod.exp(y).value /\n lanczos_sum(absx.value);\n r -= z * r;\n if(absx.value < 140.0){\n r /= pow(y, absx.value - 0.5).value;\n }else{\n sqrtpow = pow(y, absx.value / 2.0 - 0.25);\n r /= sqrtpow.value;\n r /= sqrtpow.value;\n }\n }else{\n r = lanczos_sum(absx.value) / exp(y).value;\n r += z * r;\n if(absx.value < 140.0){\n r *= pow(y, absx.value - 0.5).value;\n }else{\n sqrtpow = pow(y, absx.value / 2.0 - 0.25);\n r *= sqrtpow.value;\n r *= sqrtpow.value;\n }\n }\n if(r === Number.POSITIVE_INFINITY){\n overflow()\n }\n return $B.fast_float(r);\n}\n\n\n// GCD algorithm. Javascript adaptation of Python script at\n// https://gist.github.com/cmpute/baa545f0c2b6be8b628e9ded3c19f6c1\n// by Jacob Zhong\nfunction bit_length(x){\n return x.toString(2).length\n}\n\n$B.nb_simple_gcd = 0\n\nfunction simple_gcd(a, b){\n /* a fits into a long, so b must too */\n $B.nb_simple_gcd++\n var x = a >= 0 ? a : -a,\n y = b >= 0 ? b : -b\n\n /* usual Euclidean algorithm for longs */\n while (y != 0) {\n t = y;\n y = x % y;\n x = t;\n }\n return x\n}\n\nfunction lgcd(x, y){\n var a, b, c, d\n if(x < y){\n return lgcd(y, x)\n }\n var shift = BigInt(Math.max(Math.floor(bit_length(x) / 64),\n Math.floor(bit_length(y) / 64))),\n xbar = x >> (shift * 64n),\n ybar = y >> (shift * 64n)\n while(y > p2_64){\n [a, b, c, d] = [1n, 0n, 0n, 1n]\n while(ybar + c != 0 && ybar + d != 0){\n q = (xbar + a) / (ybar + c)\n p = (xbar + b) / (ybar + d)\n if(q != p){\n break\n }\n [a, c] = [c, a - q * c]\n [b, d] = [d, b - q * d]\n [xbar, ybar] = [ybar, xbar - q * ybar]\n }\n if(b == 0){\n [x, y] = [y, x % y]\n }else{\n [x, y] = [a * x + b * y, c * x + d * y]\n }\n }\n return simple_gcd(x, y)\n}\n\nfunction xgcd(x, y){\n var xneg = x < 0 ? -1n : 1n,\n yneg = y < 0 ? -1n : 1n,\n last_r,\n last_s,\n last_t,\n q, r, s, t;\n\n [x, y] = [x >= 0 ? x : -x, y >= 0 ? y : -y];\n\n // it's maintained that r = s * x + t * y, last_r = last_s * x + last_t * y\n [last_r, r] = [x, y];\n [last_s, s] = [1n, 0n];\n [last_t, t] = [0n, 1n];\n\n while(r > 0){\n q = last_r / r;\n [last_r, r] = [r, last_r - q * r];\n [last_s, s] = [s, last_s - q * s];\n [last_t, t] = [t, last_t - q * t];\n }\n return [last_r, last_s * xneg, last_t * yneg]\n}\n\nfunction lxgcd(x, y){\n var g, cy, cx,\n s, last_s,\n t, last_t,\n a, b, c, d\n x = x >= 0 ? x : -x\n y = y >= 0 ? y : -y\n\n if(x < y){\n [g, cy, cx] = xgcd(y, x)\n return [g, cx, cy]\n }\n\n var shift = BigInt(Math.max(Math.floor(bit_length(x) / 64),\n Math.floor(bit_length(y) / 64))),\n xbar = x >> (shift * 64n),\n ybar = y >> (shift * 64n);\n\n [last_s, s] = [1n, 0n];\n [last_t, t] = [0n, 1n];\n\n while(y > p2_64){\n [a, b, c, d] = [1n, 0n, 0n, 1n]\n while(ybar + c != 0 && ybar + d != 0){\n q = (xbar + a) / (ybar + c)\n p = (xbar + b) / (ybar + d)\n if(q != p){\n break\n };\n [a, c = c], [a - q * c];\n [b, d = d], [b - q * d];\n [xbar, ybar] = [ybar, xbar - q * ybar];\n }\n if(b == 0){\n q = x / y;\n [x, y] = [y, x % y];\n [last_s, s] = [s, last_s - q * s];\n [last_t, t] = [t, last_t - q * t];\n }else{\n [x, y] = [a * x + b * y, c * x + d * y];\n [last_s, s] = [a * last_s + b * s, c * last_s + d * s];\n [last_t, t] = [a * last_t + b * t, c * last_t + d * t];\n }\n }\n // notice that here x, y could be negative\n [g, cx, cy] = xgcd(x, y)\n\n return [g, cx * last_s + cy * s, cx * last_t + cy * t]\n}\n\nfunction gcd(x, y){\n var $ = $B.args(\"gcd\", 0, {}, [], arguments, {}, 'args', null)\n var args = $.args.map($B.PyNumber_Index)\n if(args.length == 0){\n return 0\n }else if(args.length == 1){\n return _b_.abs(args[0])\n }\n x = _b_.int.$to_bigint(args[0])\n y = _b_.int.$to_bigint(args[1])\n var res = lxgcd(x, y)[0],\n i = 2\n while(i < args.length){\n res = lxgcd(res, _b_.int.$to_bigint(args[i]))[0]\n i++\n }\n return _b_.int.$int_or_long(res)\n}\n\n\nfunction hypot(x, y){\n var $ = $B.args(\"hypot\", 0, {}, [],\n arguments, {}, \"args\", null)\n var args = []\n for(var arg of $.args){\n try{\n args.push(float_check(arg))\n }catch(err){\n if($B.is_exc(err, [_b_.ValueError])){\n throw _b_.TypeError.$factory('must be real number, not ' +\n $B.class_name(arg))\n }\n throw err\n }\n }\n return $B.fast_float(Math.hypot(...args))\n}\n\nvar inf = INF\n\nfunction isclose(){\n var $ = $B.args(\"isclose\",\n 4,\n {a: null, b: null, rel_tol: null, abs_tol: null},\n ['a', 'b', 'rel_tol', 'abs_tol'],\n arguments,\n {rel_tol: $B.fast_float(1e-09),\n abs_tol: $B.fast_float(0.0)},\n '*',\n null)\n var a = float_check($.a),\n b = float_check($.b),\n rel_tol = float_check($.rel_tol),\n abs_tol = float_check($.abs_tol)\n\n if(rel_tol < 0.0 || abs_tol < 0.0){\n throw _b_.ValueError.$factory('tolerances must be non-negative')\n }\n\n if(a == b){\n return _b_.True\n }\n if(_b_.float.$funcs.isinf(a) || _b_.float.$funcs.isinf(b)){\n return a === b\n }\n // isclose(a, b, rel_tol, abs_tol) is the same as\n // abs_diff = abs(a - b)\n // max_ab = max(abs(a), abs(b))\n // abs_diff <= abs_tol or abs_diff / max_ab <= rel_tol\n // This is more correct than in Python docs:\n // \"abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)\"\n // because this fails for Decimal instances, which do not support\n // multiplication by floats\n\n var diff = b - a,\n abs_diff = Math.abs(diff)\n if(abs_diff <= abs_tol){\n return true\n }\n var abs_a = Math.abs(a),\n abs_b = Math.abs(b),\n max_ab = Math.max(abs_a, abs_b)\n return abs_diff / max_ab <= rel_tol\n}\n\nfunction isfinite(x){\n $B.check_nb_args('isfinite', 1, arguments)\n $B.check_no_kw('isfinite', x)\n return isFinite(float_check(x))\n}\n\nfunction isinf(x){\n $B.check_nb_args('isinf', 1, arguments)\n $B.check_no_kw('isinf', x)\n return _b_.float.$funcs.isinf(x)\n}\n\nfunction isnan(x){\n $B.check_nb_args('isnan', 1, arguments)\n $B.check_no_kw('isnan', x)\n return isNaN(float_check(x))\n}\n\nfunction isqrt(x){\n $B.check_nb_args_no_kw('isqrt', 1, arguments)\n\n x = $B.PyNumber_Index(x)\n if($B.rich_comp(\"__lt__\", x, 0)){\n throw _b_.ValueError.$factory(\n \"isqrt() argument must be nonnegative\")\n }\n if(typeof x == \"number\"){\n return Math.floor(Math.sqrt(x))\n }else{ // big integer\n // adapted from code in mathmodule.c\n var n = x.value,\n bit_length = n.toString(2).length,\n c = BigInt(Math.floor((bit_length - 1) / 2)),\n c_bit_length = c.toString(2).length,\n a = 1n,\n d = 0n,\n e\n\n for(var s = BigInt(c_bit_length - 1); s >= 0; s--){\n // Loop invariant: (a-1)**2 < (n >> 2*(c - d)) < (a+1)**2\n e = d\n d = c >> s\n a = (a << d - e - 1n) + (n >> 2n*c - e - d + 1n) / a\n }\n return _b_.int.$int_or_long(a - (a * a > n ? 1n : 0n))\n }\n}\n\nfunction lcm(){\n var $ = $B.args(\"lcm\", 0, {}, [], arguments, {}, 'args', null),\n product = 1\n\n var args = $.args.map($B.PyNumber_Index)\n if(args.length == 0){\n return 1\n }else if(args.length == 1){\n return _b_.abs(args[0])\n }\n var a = _b_.abs(args[0]),\n b,\n product, gcd\n for(var i = 0, len = args.length; i < len; i++){\n b = _b_.abs(args[i])\n if(b == 0){\n return 0\n }\n gcd = gcd2(a, b)\n product = $B.rich_op('__mul__', a, b)\n a = $B.$getattr(product, \"__floordiv__\")(gcd)\n }\n return a\n}\n\nfunction ldexp(x, i){\n $B.check_nb_args('ldexp', 2, arguments)\n $B.check_no_kw('ldexp', x, i)\n return _b_.float.$funcs.ldexp(x, i) // in py_float.js\n}\n\nfunction lgamma(x){\n $B.check_nb_args('lgamma', 1, arguments)\n $B.check_no_kw('lgamma', x)\n\n return m_lgamma(x)\n}\n\nfunction longint_mant_exp(long_int){\n // Returns mantissa and exponent of a long integer\n var value = long_int.value,\n exp = value.toString(2).length,\n exp1 = exp,\n nb = 0n\n // 2 ** exp is infinite if n > 1023\n var nb = Math.floor(exp / 1023),\n exp1 = BigInt(exp - 1023 * nb)\n nb = BigInt(nb)\n var reduced_value = long_int.value / 2n ** (nb * 1023n)\n var mant = Number(reduced_value) / Number(2n ** exp1)\n return [mant, exp]\n}\n\nvar log10_func = Math.log10 || (x => Math.log(x) / Math.log(10)),\n log2_func = Math.log2 || (x => Math.log(x) / Math.log(2))\n\nfunction log(x, base){\n var $ = $B.args(\"log\", 2, {x: null, base: null}, ['x', 'base'],\n arguments, {base: _b_.None}, null, null),\n x = $.x,\n base = $.base\n if(base == 10){\n return log10(x)\n }else if(base == 2){\n return log2(x)\n }\n var log\n if($B.$isinstance(x, $B.long_int)){\n if(x.value <= 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n var mant_exp = longint_mant_exp(x)\n log = Math.log(mant_exp[0]) + Math.log(2) * mant_exp[1]\n }else if($B.$isinstance(x, _b_.int)){\n x = _b_.int.$int_value(x)\n if(x <= 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n log = Math.log(x)\n }else{\n var x1 = float_check(x)\n if(x1 <= 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n log = Math.log(x1)\n }\n if(x1 <= 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if(base === _b_.None){\n return $B.fast_float(log)\n }\n var denom = _mod.log(base).value\n if(denom == 0){\n throw _b_.ZeroDivisionError.$factory('float division by zero')\n }\n return $B.fast_float(log / denom)\n}\n\nfunction log1p(x){\n $B.check_nb_args('log1p', 1, arguments)\n $B.check_no_kw('log1p', x)\n if($B.$isinstance(x, $B.long_int)){\n if($B.long_int.bit_length(x) > 1024){\n throw _b_.OverflowError.$factory(\n \"int too large to convert to float\")\n }\n x = $B.long_int.$log2($B.fast_long_int(x.value + 1n))\n return $B.fast_float(Number(x.value) * Math.LN2)\n }\n x = float_check(x)\n if(x + 1 <= 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return $B.fast_float(Math.log1p(x))\n}\n\nfunction log2(x){\n $B.check_nb_args('log2', 1, arguments)\n $B.check_no_kw('log2', x)\n var log2_func = Math.log2 || (x => Math.log(x) / Math.LN2)\n if($B.$isinstance(x, $B.long_int)){\n if(x.value <= 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n var mant_exp = longint_mant_exp(x)\n return $B.fast_float(log2_func(mant_exp[0]) + mant_exp[1])\n }\n if(_b_.float.$funcs.isninf(x)){\n throw _b_.ValueError.$factory('')\n }\n x = float_check(x)\n if(x == 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if(isNaN(x)){\n return _b_.float.$factory('nan')\n }\n if(x < 0.0){\n throw _b_.ValueError.$factory('math domain error')\n }\n return $B.fast_float(log2_func(x))\n}\n\nfunction log10(x){\n $B.check_nb_args('log10', 1, arguments)\n $B.check_no_kw('log10', x)\n if($B.$isinstance(x, $B.long_int)){\n return $B.fast_float($B.long_int.$log10(x).value)\n }\n x = float_check(x)\n if(x <= 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return $B.fast_float(Math.log10(x))\n}\n\nfunction modf(x){\n $B.check_nb_args('modf', 1, arguments)\n $B.check_no_kw('modf', x)\n\n if(_b_.float.$funcs.isninf(x)){\n return _b_.tuple.$factory([0.0, NINF])\n }\n if(_b_.float.$funcs.isinf(x)){\n return _b_.tuple.$factory([0.0, INF])\n }\n var x1 = float_check(x)\n\n if(isNaN(x1)){\n return _b_.tuple.$factory([_b_.float.$factory('nan'),\n _b_.float.$factory('nan')])\n }\n\n if(x1 > 0){\n var i = _b_.float.$factory(x1 - Math.floor(x1))\n return _b_.tuple.$factory([i, _b_.float.$factory(x1 - i.value)])\n }\n\n var x2 = Math.ceil(x1)\n var i = _b_.float.$factory(x1 - x2)\n return _b_.tuple.$factory([i, _b_.float.$factory(x2)])\n}\n\nvar nan = _b_.float.$factory('nan')\n\nfunction _nextafter(x, y){\n // always returns a Javascript number\n if($B.rich_comp('__lt__', y, x)){\n var nu = nextUp($B.rich_op('__mul__', -1, x))\n return -nu\n }else if($B.rich_comp('__gt__', y, x)){\n return nextUp(x)\n }else{\n var res = x !== x ? x : y\n res = typeof res == 'number' ? res : res.value\n return res\n }\n}\n\nfunction make_float(x){\n return typeof x == 'number' ? $B.fast_float(x) : x\n}\n\nfunction make_number(x){\n return typeof x == 'number' ? x : x.value\n}\n\nfunction doubleToByteArray(number) {\n // adapted from https://stackoverflow.com/questions/\n // 25942516/double-to-byte-array-conversion-in-javascript\n var buffer = new ArrayBuffer(8); // JS numbers are 8 bytes long, or 64 bits\n var longNum = new Float64Array(buffer); // so equivalent to Float64\n\n longNum[0] = number;\n\n return Array.from(new Uint8Array(buffer)).reverse(); // reverse to get little endian\n}\n\nfunction byteArrayToDouble(bytearray) {\n // adapted from https://stackoverflow.com/questions/\n // 42699162/javascript-convert-array-of-4-bytes-into-a-float-value-from-modbustcp-read\n // Create a buffer\n var buf = new ArrayBuffer(8);\n // Create a data view of it\n var view = new DataView(buf);\n\n // set bytes\n bytearray.forEach(function (b, i) {\n view.setUint8(i, b);\n });\n\n // Read the bits as a float\n var num = view.getFloat64(0);\n // Done\n return num\n}\n\nfunction addSteps(array, steps){\n // convert to BigInt, avoids issue when steps >= 2 ** 32\n if(steps.__class__ == $B.long_int){\n steps = steps.value\n }else{\n steps = BigInt(steps)\n }\n var positive = steps > 0n\n if(steps < 0n){\n steps = -steps\n }\n var x1 = steps >> 32n,\n x2 = steps - x1 * 2n ** 32n\n var buffer = new ArrayBuffer(8)\n var longStep = new BigInt64Array(buffer)\n longStep[0] = steps\n var stepArray = Array.from(new Uint8Array(buffer)).reverse()\n if(positive){\n var carry = 0\n for(var i = 7; i >= 0; i--){\n array[i] += stepArray[i] + carry\n if(array[i] > 255){\n carry = 1\n array[i] -= 256\n }else{\n carry = 0\n }\n }\n }else{\n var carry = 0\n for(var i = 7; i >= 0; i--){\n array[i] -= stepArray[i] - carry\n if(array[i] < 0){\n carry = -1\n array[i] += 256\n }else{\n carry = 0\n }\n }\n }\n}\n\nfunction nextafter(){\n var $ = $B.args(\"nextafter\", 3, {x: null, y: null, steps: null},\n ['x', 'y', 'steps'], arguments, {steps: _b_.None}, null, null),\n x = $.x,\n y = $.y,\n steps = $.steps\n if(! $B.$isinstance(x, [_b_.int, _b_.float])){\n throw _b_.TypeError.$factory('must be a real number, not ' +\n $B.class_name(x))\n }\n if(! $B.$isinstance(y, [_b_.int, _b_.float])){\n throw _b_.TypeError.$factory('must be a real number, not ' +\n $B.class_name(y))\n }\n if(isnan(x)){\n return make_float(x)\n }\n if(isnan(y)){\n return make_float(y)\n }\n if(steps === _b_.None){\n return $B.fast_float(_nextafter(x, y))\n }\n steps = $B.PyNumber_Index(steps);\n if(steps < 0) {\n throw _b_.ValueError.$factory(\n \"steps must be a non-negative integer\");\n }\n if(steps == 0){\n return make_float(x)\n }\n if(isnan(x)){\n return make_float(x)\n }\n if(isnan(y)){\n return make_float(y)\n }\n var x1 = make_number(x),\n y1 = make_number(y)\n\n if(y1 == x1){\n return make_float(y)\n }else if(y1 > x1){\n var x_uint64 = doubleToByteArray(x1)\n addSteps(x_uint64, steps)\n var res = byteArrayToDouble(x_uint64)\n return res >= y1 ? y : make_float(res)\n }else{\n var x_uint64 = doubleToByteArray(x1)\n addSteps(x_uint64, -steps)\n var res = byteArrayToDouble(x_uint64)\n return res <= y1 ? y : make_float(res)\n }\n}\n\nfunction perm(n, k){\n var $ = $B.args(\"perm\", 2, {n: null, k: null}, ['n', 'k'],\n arguments, {k: _b_.None}, null, null),\n n = $.n,\n k = $.k\n\n if(k === _b_.None){\n check_int(n)\n return _mod.factorial(n)\n }\n // raise TypeError if n or k is not an integer\n n = $B.PyNumber_Index(n)\n k = $B.PyNumber_Index(k)\n\n // transform to Javascript BigInt\n var n1 = _b_.int.$to_bigint(n),\n k1 = _b_.int.$to_bigint(k);\n\n if(k1 < 0){\n throw _b_.ValueError.$factory(\"k must be a non-negative integer\")\n }\n if(n1 < 0){\n throw _b_.ValueError.$factory(\"n must be a non-negative integer\")\n }\n if(k1 == 0){\n return 1\n }\n if(k1 == 1){\n return n\n }\n if(k1 == 2){\n return _b_.int.$int_or_long(n1 * (n1 - 1n))\n }\n if(k1 > n1){\n return 0\n }\n // Evaluates to n! / (n - k)!\n var fn = _mod.factorial(n),\n fn_k = _mod.factorial(n - k)\n return $B.rich_op('__floordiv__', fn, fn_k)\n}\n\nconst pi = $B.fast_float(Math.PI)\n\nfunction pow(){\n var $ = $B.args(\"pow\", 2, {base: null, exp: null}, ['base', 'exp'],\n arguments, {}, null, null),\n x = $.base,\n y = $.exp\n\n var x1 = float_check(x)\n var y1 = float_check(y)\n\n if(y1 == 0){\n return _b_.float.$factory(1)\n }\n if(x1 == 0 && y1 < 0){\n if(y1 === -Infinity){\n return INF\n }\n throw _b_.ValueError.$factory('math domain error')\n }\n if(isFinite(x1) && x1 < 0 && isFinite(y1) && ! Number.isInteger(y1)){\n throw _b_.ValueError.$factory('math domain error')\n }\n\n if(isNaN(y1)){\n if(x1 == 1){return _b_.float.$factory(1)}\n return NAN\n }\n if(x1 == 0){\n return ZERO\n }\n\n if(_b_.float.$funcs.isninf(y)){\n if(_b_.float.$funcs.isinf(x)){ // pow(INF, NINF) = 0.0\n return ZERO\n }else if(_b_.float.$funcs.isninf(x)){ // pow(NINF, NINF) = 0.0\n return ZERO\n }\n if(x1 == 1 || x1 == -1){return _b_.float.$factory(1)}\n if(x1 < 1 && x1 > -1){return INF}\n return ZERO\n }\n if(_b_.float.$funcs.isinf(y)){\n if(_b_.float.$funcs.isinf(x)){ // pow(INF, INF)\n return INF\n }\n if(_b_.float.$funcs.isninf(x)){\n return INF\n }\n if(x1 == 1 || x1 == -1){return _b_.float.$factory(1)}\n if(x1 < 1 && x1 > -1){return ZERO}\n return INF\n }\n\n if(isNaN(x1)){return _b_.float.$factory('nan')}\n if(_b_.float.$funcs.isninf(x)){\n if(y1 > 0 && isOdd(y1)){return NINF}\n if(y1 > 0){return INF} // this is even or a float\n if(y1 < 0){return ZERO}\n if(_b_.float.$float.isinf(y)){return INF}\n return _b_.float.$factory(1)\n }\n\n if(_b_.float.$funcs.isinf(x)){\n if(y1 > 0){return INF}\n if(y1 < 0){return ZERO}\n return _b_.float.$factory(1)\n }\n\n var r = Math.pow(x1, y1)\n if(isNaN(r)){\n return NAN\n }\n if(! isFinite(r)){\n overflow()\n }\n return _b_.float.$factory(r)\n}\n\nfunction prod(){\n var $ = $B.args(\"prod\", 1, {iterable:null, start:null},\n [\"iterable\", \"start\"], arguments, {start: 1}, \"*\",\n null),\n iterable = $.iterable,\n start = $.start\n var res = start,\n it = _b_.iter(iterable),\n x\n while(true){\n try{\n x = _b_.next(it)\n if(x == 0){\n return 0\n }\n res = $B.rich_op('__mul__', res, x)\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n return res\n }\n throw err\n }\n }\n}\n\nfunction radians(x){\n $B.check_nb_args('radians', 1, arguments)\n $B.check_no_kw('radians', x)\n\n return _b_.float.$factory(float_check(x) * Math.PI / 180)\n}\n\nfunction is_finite(x){\n return typeof x == \"number\" ||\n (x.__class__ === _b_.floar && isFinite(x.value)) ||\n $B.$isinstance(x, _b_.int) ||\n ($B.$isinstance(x, _b_.float) && isFinite(x.value))\n}\n\nfunction remainder(x, y){\n $B.check_nb_args_no_kw('remainder', 2, arguments)\n float_check(x) // might raise TypeError\n /* Deal with most common case first. */\n if(is_finite(x) && is_finite(y)){\n var absx,\n absy,\n c,\n m,\n r;\n\n if(float_check(y) == 0.0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n\n absx = fabs(x);\n absy = fabs(y);\n m = fmod(absx, absy);\n\n c = absy.value - m.value\n if(m.value < c){\n r = m.value\n }else if(m.value > c){\n r = -c\n }else{\n r = m.value -\n 2.0 * fmod($B.fast_float(0.5 * (absx.value - m.value)), absy).value;\n }\n return $B.fast_float(copysign(1.0, x).value * r);\n }\n\n /* Special values. */\n if(float_check(y) == 0){\n if(isnan(x)){\n return x\n }\n }\n if(isinf(x)){\n if(isnan(y)){\n return y\n }\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if(isnan(y)){\n return y;\n }\n return x;\n}\n\nfunction sin(x){\n $B.check_nb_args('sin ', 1, arguments)\n $B.check_no_kw('sin ', x)\n return _b_.float.$factory(Math.sin(float_check(x)))\n}\n\nfunction sinh(x) {\n $B.check_nb_args('sinh', 1, arguments)\n $B.check_no_kw('sinh', x)\n\n var y = float_check(x)\n if(Math.sinh !== undefined){\n return _b_.float.$factory(Math.sinh(y))\n }\n return _b_.float.$factory(\n (Math.pow(Math.E, y) - Math.pow(Math.E, -y)) / 2)\n}\n\nfunction sqrt(x){\n $B.check_nb_args('sqrt ', 1, arguments)\n $B.check_no_kw('sqrt ', x)\n\n if(_b_.float.$funcs.isninf(x)){\n value_error()\n }else if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n var y = float_check(x)\n if(y < 0){\n value_error()\n }\n var _r = $B.fast_float(Math.sqrt(y))\n if(_b_.float.$funcs.isinf(_r)){\n overflow()\n }\n return _r\n}\n\n/*[clinic input]\nmath.sumprod\n\n p: object\n q: object\n /\n\nReturn the sum of products of values from two iterables p and q.\n\nRoughly equivalent to:\n\n sum(itertools.starmap(operator.mul, zip(p, q, strict=True)))\n\nFor float and mixed int/float inputs, the intermediate products\nand sums are computed with extended precision.\n[clinic start generated code]*/\n\nconst tl_zero = {hi: 0, lo: 0, tiny: 0}\n\nfunction _check_long_mult_overflow(a, b) {\n\n /* From Python2's int_mul code:\n\n Integer overflow checking for * is painful: Python tried a couple ways, but\n they didn't work on all platforms, or failed in endcases (a product of\n -sys.maxint-1 has been a particular pain).\n\n Here's another way:\n\n The native long product x*y is either exactly right or *way* off, being\n just the last n bits of the true product, where n is the number of bits\n in a long (the delivered product is the true product plus i*2**n for\n some integer i).\n\n The native double product (double)x * (double)y is subject to three\n rounding errors: on a sizeof(long)==8 box, each cast to double can lose\n info, and even on a sizeof(long)==4 box, the multiplication can lose info.\n But, unlike the native long product, it's not in *range* trouble: even\n if sizeof(long)==32 (256-bit longs), the product easily fits in the\n dynamic range of a double. So the leading 50 (or so) bits of the double\n product are correct.\n\n We check these two ways against each other, and declare victory if they're\n approximately the same. Else, because the native long product is the only\n one that can lose catastrophic amounts of information, it's the native long\n product that must have overflowed.\n\n */\n\n /*\n\n var longprod = (long)((unsigned long)a * b);\n double doubleprod = (double)a * (double)b;\n double doubled_longprod = (double)longprod;\n\n if (doubled_longprod == doubleprod) {\n return 0;\n }\n\n const double diff = doubled_longprod - doubleprod;\n const double absdiff = diff >= 0.0 ? diff : -diff;\n const double absprod = doubleprod >= 0.0 ? doubleprod : -doubleprod;\n\n if (32.0 * absdiff <= absprod) {\n return 0;\n }\n\n return 1;\n */\n return 0\n}\n\nfunction long_add_would_overflow(a, b){\n return (a > 0n) ? (b > BigInt(LONG_MAX) - a) : (b < BigInt(LONG_MIN) - a);\n}\n\nfunction PyLong_CheckExact(n){\n return typeof n == 'number' || n.__class__ === $B.long_int\n}\n\n/*\n The default implementation of dl_mul() depends on the C math library\n having an accurate fma() function as required by \u00a7 7.12.13.1 of the\n C99 standard.\n\n The UNRELIABLE_FMA option is provided as a slower but accurate\n alternative for builds where the fma() function is found wanting.\n The speed penalty may be modest (17% slower on an Apple M1 Max),\n so don't hesitate to enable this build option.\n\n The algorithms are from the T. J. Dekker paper:\n A Floating-Point Technique for Extending the Available Precision\n https://csclub.uwaterloo.ca/~pbarfuss/dekker1971.pdf\n*/\n\nfunction dl_split(x) {\n // Dekker (5.5) and (5.6).\n var t = x * 134217729.0; // Veltkamp constant = 2.0 ** 27 + 1\n var hi = t - (t - x);\n var lo = x - hi;\n return {hi, lo};\n}\n\nfunction dl_mul(x, y){\n // Dekker (5.12) and mul12()\n var xx = dl_split(x);\n var yy = dl_split(y);\n var p = xx.hi * yy.hi;\n var q = xx.hi * yy.lo + xx.lo * yy.hi;\n var z = p + q;\n var zz = p - z + q + xx.lo * yy.lo;\n return {hi: z, lo: zz};\n}\n\nfunction dl_sum(a, b){\n /* Algorithm 3.1 Error-free transformation of the sum */\n var x = a + b;\n var z = x - a;\n var y = (a - (x - z)) + (b - z);\n return {hi: x, lo: y};\n}\n\nfunction tl_fma(x, y, total){\n /* Algorithm 5.10 with SumKVert for K=3 */\n var pr = dl_mul(x, y);\n var sm = dl_sum(total.hi, pr.hi);\n var r1 = dl_sum(total.lo, pr.lo);\n var r2 = dl_sum(r1.hi, sm.lo);\n return {hi: sm.hi, lo: r2.hi, tiny: total.tiny + r1.lo + r2.lo}\n}\n\nfunction tl_to_d(total){\n var last = dl_sum(total.lo, total.hi);\n return total.tiny + last.lo + last.hi;\n}\n\nfunction sumprod(p, q){\n var $ = $B.args('sumprod', 2, {p: null, q: null}, ['p', 'q'],\n arguments, {}, null, null)\n var p_i = NULL,\n q_i = NULL,\n term_i = NULL,\n new_total = NULL;\n var p_it, q_it, total;\n var p_next, q_next;\n var p_stopped = false, q_stopped = false;\n var int_path_enabled = true,\n int_total_in_use = false;\n var flt_path_enabled = true,\n flt_total_in_use = false;\n var int_total = 0n;\n var flt_total = tl_zero;\n\n p_it = $B.make_js_iterator(p);\n q_it = $B.make_js_iterator(q);\n total = 0\n p_next = p_it.next\n q_next = q_it.next\n while (1) {\n var finished;\n p_i = p_it.next()\n if (p_i.done) {\n /*\n if (PyErr_Occurred()) {\n if (!PyErr_ExceptionMatches(PyExc_StopIteration)) {\n goto err_exit;\n }\n PyErr_Clear();\n }\n */\n p_stopped = true;\n }else{\n p_i = p_i.value\n }\n q_i = q_it.next()\n if (q_i.done) {\n /*\n if (PyErr_Occurred()) {\n if (!PyErr_ExceptionMatches(PyExc_StopIteration)) {\n goto err_exit;\n }\n PyErr_Clear();\n }\n */\n q_stopped = true;\n }else{\n q_i = q_i.value\n }\n if (p_stopped != q_stopped) {\n throw _b_.ValueError.$factory(\"Inputs are not the same length\");\n }\n\n finished = p_stopped & q_stopped;\n\n if (int_path_enabled) {\n\n if (! finished && PyLong_CheckExact(p_i) & PyLong_CheckExact(q_i)) {\n var overflow;\n var int_p, int_q, int_prod;\n\n int_p = _b_.int.$to_bigint($B.PyNumber_Index(p_i))\n overflow = int_p > LONG_MAX || int_p < LONG_MIN\n\n if (overflow) {\n finalize_int_path()\n }\n int_q = _b_.int.$to_bigint($B.PyNumber_Index(q_i));\n overflow = int_q > LONG_MAX || int_q < LONG_MIN\n if (overflow) {\n finalize_int_path()\n }\n if (_check_long_mult_overflow(int_p, int_q)) {\n finalize_int_path()\n }\n int_prod = int_p * int_q;\n if (long_add_would_overflow(int_total, int_prod)) {\n finalize_int_path()\n }\n if(int_path_enabled){\n int_total = int_total + int_prod;\n int_total_in_use = true;\n continue;\n }\n }\n\n if(finished){\n finalize_int_path()\n }\n\n function finalize_int_path(){\n // We're finished, overflowed, or have a non-int\n int_path_enabled = false;\n if (int_total_in_use) {\n term_i = _b_.int.$int_or_long(int_total);\n new_total = $B.rich_op('__add__', total, term_i);\n total = new_total\n new_total = NULL;\n int_total = 0; // An ounce of prevention, ...\n int_total_in_use = false;\n }\n }\n }\n\n if (flt_path_enabled) {\n\n if (!finished) {\n var flt_p, flt_q;\n var p_type_float = p_i.__class__ === _b_.float;\n var q_type_float = q_i.__class__ === _b_.float\n if(p_type_float && q_type_float) {\n flt_p = p_i;\n flt_q = q_i;\n }else if (p_type_float && (PyLong_CheckExact(q_i) ||\n typeof q_i == 'boolean')){\n /* We care about float/int pairs and int/float pairs because\n they arise naturally in several use cases such as price\n times quantity, measurements with integer weights, or\n data selected by a vector of bools. */\n flt_p = p_i\n flt_q = _b_.int.$int_value(q_i)\n }else if(q_type_float && (PyLong_CheckExact(p_i) ||\n typeof p_i == 'boolean')) {\n flt_q = q_i\n flt_p = _b_.int.$int_value(p_i)\n }else{\n finalize_flt_path()\n }\n if(flt_path_enabled){\n var new_flt_total = tl_fma(flt_p.value, flt_q.value, flt_total);\n if (isfinite(new_flt_total.hi)) {\n flt_total = new_flt_total;\n flt_total_in_use = true;\n continue;\n }\n }\n }\n if(finished){\n finalize_flt_path()\n }\n\n function finalize_flt_path(){\n // We're finished, overflowed, have a non-float, or got a non-finite value\n flt_path_enabled = false;\n if(flt_total_in_use){\n term_i = $B.fast_float(tl_to_d(flt_total));\n if (term_i == NULL) {\n err_exit()\n }\n new_total = $B.rich_op('__add__', total, term_i);\n total = new_total\n new_total = NULL\n flt_total = tl_zero;\n flt_total_in_use = false;\n }\n }\n }\n\n if (finished) {\n return total\n }\n term_i = $B.rich_op('__mul__', p_i, q_i);\n new_total = $B.rich_op('__add__', total, term_i);\n total = new_total\n new_total = NULL;\n }\n\n}\n\n\n\nfunction tan(x) {\n $B.check_nb_args('tan', 1, arguments)\n $B.check_no_kw('tan', x)\n\n var y = float_check(x)\n return _b_.float.$factory(Math.tan(y))\n}\n\nfunction tanh(x) {\n $B.check_nb_args('tanh', 1, arguments)\n $B.check_no_kw('tanh', x)\n\n var y = float_check(x)\n if(Math.tanh !== undefined){return _b_.float.$factory(Math.tanh(y))}\n return _b_.float.$factory((Math.pow(Math.E, y) - Math.pow(Math.E, -y))/\n (Math.pow(Math.E, y) + Math.pow(Math.E, -y)))\n}\n\nconst tau = $B.fast_float(2 * Math.PI)\n\nfunction trunc(x) {\n $B.check_nb_args('trunc', 1, arguments)\n $B.check_no_kw('trunc', x)\n\n try{return $B.$getattr(x, '__trunc__')()}catch(err){}\n var x1 = float_check(x)\n if(!isNaN(parseFloat(x1)) && isFinite(x1)){\n if(Math.trunc !== undefined){return _b_.int.$factory(Math.trunc(x1))}\n if(x1 > 0){return _b_.int.$factory(Math.floor(x1))}\n return _b_.int.$factory(Math.ceil(x1)) // x1 < 0\n }\n throw _b_.ValueError.$factory(\n 'object is not a number and does not contain __trunc__')\n}\n\nfunction ulp(){\n var $ = $B.args(\"ulp\", 1, {x: null}, ['x'], arguments, {}, null, null),\n x = $.x\n if($B.$isinstance(x, _b_.float)){\n if(_b_.float.$funcs.isinf(x)){\n return _mod.inf\n }else if(_b_.float.$funcs.isnan(x)){\n return _mod.nan\n }\n }\n if(typeof x == \"number\"){\n return x >= 0 ? $B.fast_float(nextUp(x) - x) :\n $B.fast_float(x - (-nextUp(-x)))\n }else if($B.$isinstance(x, $B.long_int)){\n x = Number(_b_.int.$to_bigint(x))\n return x > 0 ? $B.fast_float(nextUp(x) - x) :\n $B.fast_float(x - (-nextUp(-x)))\n }else{\n if($B.rich_comp('__ge__', x, 0)){\n return $B.rich_op('__sub__', $B.fast_float(nextUp(x.value)), x)\n }else{\n var neg_x = $B.$call($B.$getattr(x, \"__neg__\"))()\n return $B.rich_op('__sub__', x,\n $B.$call($B.$getattr($B.fast_float(nextUp(neg_x.value)), '__neg__'))())\n }\n }\n}\n\nvar _mod = {\n acos,\n acosh,\n asin,\n asinh,\n atan,\n atan2,\n atanh,\n cbrt,\n ceil,\n comb,\n copysign,\n cos,\n cosh,\n degrees,\n dist,\n e,\n erf,\n erfc,\n exp,\n exp2,\n expm1,\n fabs,\n factorial,\n floor,\n fmod,\n frexp,\n fsum,\n gamma,\n gcd,\n hypot,\n inf,\n isclose,\n isfinite,\n isinf,\n isnan,\n isqrt,\n lcm,\n ldexp,\n lgamma,\n log,\n log1p,\n log2,\n log10,\n modf,\n nan,\n nextafter,\n perm,\n pi,\n pow,\n prod,\n radians,\n remainder,\n sin,\n sinh,\n sqrt,\n sumprod,\n tan,\n tanh,\n tau,\n trunc,\n ulp\n}\n\nfor(var $attr in _mod){\n if(typeof _mod[$attr] === 'function'){\n _mod[$attr].__class__ = $B.builtin_function_or_method\n }\n}\n\n$B.addToImported('math', _mod)\n\n})(__BRYTHON__)\n"], "python_re": [".js", "// Regular expression\n(function($B){\n\nvar _debug = {value: 0}\n\nvar _b_ = $B.builtins\n\nvar MAXGROUPS = 2147483647,\n MAXREPEAT = 2147483648\n\nvar word_gcs = ['Ll', 'Lu', 'Lm', 'Lt', 'Lo',\n 'Nd',\n 'Mc', 'Me', 'Mn',\n 'Pc']\n\nfunction is_word(cp){\n if((cp >= 97 && cp <= 122) // a-z\n || (cp >= 65 && cp <= 90) // A-Z\n ){\n return true\n }\n for(var word_gc of word_gcs){\n if($B.in_unicode_category(word_gc, cp)){\n return true\n }\n }\n return false\n}\n\nvar ascii_word = {}\n\nfor(var cp = 0; cp <= 127; cp++){\n if(is_word(cp)){\n ascii_word[cp] = true\n }\n}\n\nfunction is_ascii_word(cp){\n return ascii_word[cp] !== undefined\n}\n\nfunction is_digit(cp){\n if(cp >= 48 && cp <= 57){\n return true\n }\n return $B.in_unicode_category('Nd', cp)\n}\n\nfunction is_ascii_digit(cp){\n return cp <= 127 && is_digit(cp)\n}\n\nvar $error_2 = {\n $name: \"error\",\n $qualname: \"error\",\n $is_class: true,\n __module__: \"re\"\n}\n\nvar error = $B.make_class(\"error\",\n function(message){\n return {\n __class__: error,\n msg: message,\n args: $B.fast_tuple([]),\n __cause__: _b_.None,\n __context__: _b_.None,\n __suppress_context__: false\n }\n })\nerror.__bases__ = [_b_.Exception, _b_.object]\nerror.__mro__ = [_b_.Exception, _b_.BaseException, _b_.object]\n\nerror.__str__ = function(self){\n var s = self.msg + ' at position ' + self.pos\n if(self.lineno > 1){\n s += ` (line ${self.lineno}, column ${self.colno})`\n }\n return s\n}\n\n$B.set_func_names(error, \"re\")\n\nfunction $last(t){\n return t[t.length - 1]\n}\n\nfunction fail(message, pos, pattern){\n var err = error.$factory(message)\n err.msg = message\n err.pos = pos\n if(pattern){\n err.pattern = pattern.py_obj // Python object passed to compile()\n err.lineno = 1\n var linestart = 0\n for(var i = 0, len = pattern.string.length; i < pos; i++){\n if(pattern.string[i] == '\\n'){\n err.lineno++\n linestart = i + 1\n }\n }\n err.colno = pos - linestart + 1\n }\n throw err\n}\n\nfunction warn(klass, message, pos, text){\n var frame = $B.frame_obj.frame,\n file = frame[3].__file__,\n src = $B.file_cache[file]\n if(text === undefined){\n var lineno = frame[1].$lineno\n var lines = src.split('\\n'),\n line = lines[lineno - 1]\n }else{\n if(Array.isArray(text)){\n text = from_codepoint_list(text)\n }\n var lineno = 1,\n line_start = 0\n for(var i = 0; i < pos; i++){\n if(text[i] == '\\n'){\n lineno++\n line_start = i + 1\n }\n }\n var line_end = text.substr(line_start).search('\\n'),\n line\n if(line_end == -1){\n line = text.substr(line_start)\n }else{\n line = text.substr(line_start, line_end)\n }\n var col_offset = pos - line_start\n }\n var warning = klass.$factory(message)\n warning.pos = pos\n warning.args[1] = [file, lineno, col_offset, lineno, col_offset,\n line]\n warning.filename = file\n warning.lineno = warning.end_lineno = lineno\n warning.offset = warning.end_offset = col_offset\n warning.line = line\n // module _warning is in builtin_modules.js\n $B.imported._warnings.warn(warning)\n}\n\nfunction chr(i){\n if(i < 0 || i > 1114111){\n throw _b_.ValueError.$factory('Outside valid range')\n }else if(i >= 0x10000 && i <= 0x10FFFF){\n var code = (i - 0x10000)\n return String.fromCodePoint(0xD800 | (code >> 10)) +\n String.fromCodePoint(0xDC00 | (code & 0x3FF))\n }else{\n return String.fromCodePoint(i)\n }\n}\n\nfunction ord(char){\n return char.charCodeAt(0)\n}\n\nconst LETTERS = {\n b: ord('b'),\n N: ord('N'),\n P: ord('P'),\n u: ord('u'),\n U: ord('U'),\n x: ord('x')\n}\n\nconst PARENTH_OPEN = ord('('),\n PARENTH_CLOSE = ord(')'),\n BRACKET_OPEN = ord('['),\n BRACKET_CLOSE = ord(']'),\n BRACE_OPEN = ord('{'),\n BRACE_CLOSE = ord('}'),\n EQUAL = ord('='),\n SUP = ord('>'),\n INF = ord('<'),\n MINUS = ord('-'),\n PLUS = ord('+'),\n OR = ord('|'),\n DOT = ord('.'),\n QUESTION_MARK = ord('?'),\n EXCLAMATION_MARK = ord('!'),\n COLON = ord(':'),\n BACKSLASH = ord('\\\\'),\n DOLLAR = ord('$'),\n CARET = ord('^'),\n LINEFEED = ord('\\n')\n\n// pattern tokenizer\n\nfunction is_ascii(name){\n return /^[\\x00-\\x7F]*$/.test(name)\n}\n\nfunction open_unicode_db(){\n if($B.unicodedb === undefined){\n var xhr = new XMLHttpRequest\n xhr.open(\"GET\",\n $B.brython_path + \"unicode.txt?\" + (new Date()).getTime(), false)\n xhr.onreadystatechange = function(){\n if(this.readyState == 4){\n if(this.status == 200){\n $B.unicodedb = this.responseText\n }else{\n console.log(\n \"Warning - could not load unicode.txt\")\n }\n }\n }\n xhr.send()\n }\n}\n\nfunction validate_named_char(description, pos){\n // validate that \\N{} is in the Unicode db\n // Load unicode table if not already loaded\n if(description.length == 0){\n fail(\"missing character name\", pos)\n }\n open_unicode_db()\n if($B.unicodedb !== undefined){\n var re = new RegExp(\"^([0-9A-F]+);\" +\n description.toUpperCase() + \";.*$\", \"m\")\n search = re.exec($B.unicodedb)\n if(search === null){\n fail(`undefined character name '${description}'`, pos)\n }\n return parseInt(search[1], 16)\n }else{\n fail(\"could not load unicode.txt\", pos)\n }\n}\n\nfunction validate_group_name(sname, pos, is_bytes){\n // sname is an instance of StringObj\n if(! _b_.str.isidentifier(sname.string)){\n fail(`bad character in group name '${sname.string}'`, pos + 4)\n }\n if(is_bytes && ! is_ascii(sname.string)){\n var s = _b_.bytes.decode(_b_.bytes.$factory(sname.codepoints),\n 'ascii', 'backslashreplace')\n warn(_b_.DeprecationWarning,\n `bad character in group name '${s}' at position ${pos + 4}`)\n }\n return true\n}\n\nfunction validate_group_num(so, pos){\n var s = so.string\n if(s.match(/^\\d+$/)){\n return true\n }\n try{\n var num = _b_.int.$factory(s)\n warn(_b_.DeprecationWarning,\n `bad character in group name '${s}' at position ${pos + 3}`,\n pos + 3, s)\n so.string = num + ''\n return true\n }catch(err){\n return false\n }\n}\n\nfunction validate_num_or_name(so, pos, is_bytes){\n return validate_group_num(so, pos, is_bytes) ||\n validate_group_name(so, pos - 1, is_bytes)\n}\n\nvar character_classes = {\n in_charset: to_codepoint_list('dDsSwW'),\n in_re: to_codepoint_list('AbBdDsSwWZ')\n}\n\nfunction escaped_char(args){\n var cps = args.codepoints,\n pos = args.pos,\n in_charset = args.in_charset,\n is_bytes = args.is_bytes // if pattern is bytes\n var special = cps[pos + 1]\n if(special === undefined){\n fail('bad escape (end of pattern)', pos)\n }\n var key = in_charset ? 'in_charset' : 'in_re'\n if(in_charset && special == LETTERS.b){\n // Inside a character range, \\b represents the backspace character,\n // for compatibility with Python\u2019s string literals.\n return '\\b'\n }\n if(character_classes[key].indexOf(special) > -1){\n return new CharacterClass(pos, special, 2)\n }else if(special == LETTERS.N && ! is_bytes){\n if(cps[pos + 2] != BRACE_OPEN){\n fail('missing {', pos)\n }\n var i = pos + 3,\n description = []\n while(i < cps.length){\n if(cps[i] == BRACE_CLOSE){\n break\n }\n description.push(cps[i])\n i++\n }\n if(description.length == 0){\n fail(\"missing character name\", pos)\n }\n if(i == cps.length){\n fail(\"missing }, unterminated name\", pos)\n }\n var cp = validate_named_char(from_codepoint_list(description), pos)\n return {\n type: 'N',\n ord: cp,\n char: chr(cp),\n length: i - pos + 1\n }\n }else if(special == LETTERS.x){\n // \\xhh = character with hex value hh\n var rest = from_codepoint_list(cps.slice(pos + 2)),\n mo = /^[0-9a-fA-F]{0,2}/.exec(rest),\n hh = mo ? mo[0] : ''\n if(mo && mo[0].length == 2){\n var cp = parseInt(mo[0], 16)\n return {\n type: 'x',\n ord: cp,\n char: chr(cp),\n length: 2 + mo[0].length\n }\n }\n fail('incomplete escape \\\\x' + hh, pos)\n }else if(special == LETTERS.u){\n // \\uxxxx = character with 16-bit hex value xxxx\n var rest = from_codepoint_list(cps.slice(pos + 2)),\n mo = /^[0-9a-fA-F]{0,4}/.exec(rest),\n xx = mo ? mo[0] : ''\n if(mo && mo[0].length == 4){\n var cp = parseInt(mo[0], 16)\n return {\n type: 'u',\n ord: cp,\n char: chr(cp),\n length: 2 + mo[0].length\n }\n }\n fail('incomplete escape \\\\u' + xx, pos)\n }else if(special == LETTERS.U){\n // \\Uxxxxxxxx = character with 32-bit hex value xxxxxxxx\n var rest = from_codepoint_list(cps.slice(pos + 2)),\n mo = /^[0-9a-fA-F]{0,8}/.exec(rest),\n xx = mo ? mo[0] : ''\n if(mo && mo[0].length == 8){\n var cp = parseInt(mo[0], 16)\n if(cp > 0x10FFFF){\n fail(`bad escape \\\\U${mo[0]}`, pos)\n }\n return {\n type: 'U',\n ord: cp,\n char: chr(cp),\n length: 2 + mo[0].length\n }\n }\n fail('incomplete escape \\\\U' + xx, pos)\n }else{\n // octal ?\n // If the first digit of number is 0, or number is 3 octal digits\n // long, it will not be interpreted as a group match, but as the\n // character with octal value number\n var rest = from_codepoint_list(cps.slice(pos + 1)),\n mo = /^[0-7]{3}/.exec(rest)\n if(in_charset){\n try{\n var res = $B.test_escape(rest, -1)\n if(res){\n return {\n type: 'u',\n ord: res[0].codePointAt(0),\n char: res[0],\n length: res[1]\n }\n }\n }catch(err){\n // ignore\n }\n }\n if(mo == null){\n mo = /^0[0-7]*/.exec(rest)\n }\n if(mo){\n var octal_value = parseInt(mo[0], 8)\n if(octal_value > 0o377){\n fail(`octal escape value \\\\` +\n `${mo[0]} outside of range 0-0o377`, pos)\n }\n return {\n type: 'o',\n ord: octal_value,\n char: chr(octal_value),\n length: 1 + mo[0].length\n }\n }\n var mo = /^\\d{1,2}/.exec(rest) // backref is at most 99\n if(mo){\n return {\n type: 'backref',\n value: parseInt(mo[0]),\n length: 1 + mo[0].length\n }\n }\n var trans = {a: chr(7), f: '\\f', n: '\\n', r: '\\r', t: '\\t', v: '\\v'},\n res = trans[chr(special)]\n if(res){\n return ord(res)\n }\n if(chr(special).match(/[a-zA-Z]/)){\n fail(\"bad escape \\\\\" + chr(special), pos)\n }else{\n return special\n }\n }\n}\n\nfunction check_character_range(t, positions){\n // Check if last 2 items in t are a valid character range\n var start = t[t.length - 2],\n end = t[t.length - 1]\n if(start instanceof CharacterClass || end instanceof CharacterClass){\n fail(`bad character range ${start}-${end}`,\n positions[positions.length - 2])\n }else if(end < start){\n fail(`bad character range ${start}-${end}`,\n positions[positions.length - 2])\n }\n t.splice(t.length - 2, 2, {\n type: 'character_range',\n start: start,\n end: end,\n ord: [start.ord, end.ord]\n })\n}\n\nfunction parse_character_set(text, pos, is_bytes){\n // Parse character set starting at position \"pos\" in \"text\"\n // pos is the position of the leading \"[\"\n var start = pos,\n result = {items: []},\n positions = []\n pos++\n if(text[pos] == CARET){\n result.neg = true\n pos++\n }else if(text[pos] == BRACKET_CLOSE){\n // a leading ] is the character \"]\", not the set end\n result.items.push(']')\n positions.push(pos)\n pos++\n }else if(text[pos] == BRACKET_OPEN){\n // send FutureWarning\n warn(_b_.FutureWarning, \"Possible nested set\", pos, text)\n }\n var range = false\n while(pos < text.length){\n var cp = text[pos],\n char = chr(cp)\n if(char == ']'){\n if(pos == start + 2 && result.neg){\n // in \"[^]]\", the first ] is the character \"]\"\n result.items.push(']')\n }else{\n return [result, pos]\n }\n }\n if(char == '\\\\'){\n var escape = escaped_char({\n codepoints: text,\n pos,\n in_charset: true,\n is_bytes\n })\n if(typeof escape == \"number\"){\n var s = chr(escape)\n escape = {\n ord: escape,\n length: 2,\n toString: function(){\n return s\n }\n }\n }\n if(escape.type == \"num\"){\n // [\\9] is invalid\n fail(\"bad escape 1 \\\\\" +\n escape.value.toString()[0], pos)\n }\n result.items.push(escape)\n positions.push(pos)\n if(range){\n check_character_range(result.items, positions)\n }\n range = false\n pos += escape.length\n }else if(char == '-'){\n // Character range, or character \"-\"\n if(pos == start + 1 ||\n (result.neg && pos == start + 2) ||\n pos == text.length - 2 || // [a-]\n range ||\n (result.items.length > 0 &&\n result.items[result.items.length - 1].type ==\n \"character_range\")){\n result.items.push({\n ord: cp,\n char,\n toString: function(){\n return this.char\n }\n })\n if(text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set difference\", pos, text)\n }\n pos++\n if(range){\n check_character_range(result.items, positions)\n }\n range = false\n }else{\n range = true\n if(text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set difference\", pos, text)\n }\n pos++\n }\n }else{\n positions.push(pos)\n result.items.push({\n ord: cp,\n char,\n toString: function(){\n return this.char\n }\n })\n if(range){\n check_character_range(result.items, positions)\n }\n range = false\n // FutureWarning for consecutive \"&\", \"|\" or \"~\"\n if(char == \"&\" && text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set intersection\", pos, text)\n }else if(char == \"|\" && text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set union\", pos, text)\n }else if(char == \"~\" && text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set symmetric difference\",\n pos, text)\n }\n pos++\n }\n }\n fail(\"unterminated character set\", start)\n}\n\nfunction* tokenize(pattern, type, _verbose){\n // pattern is a list of codepoints\n var is_bytes = type == \"bytes\"\n // verbose_stack is the stack of verbose state for each group in the regex\n var verbose_stack = [_verbose],\n verbose = _verbose,\n parenth_pos\n var pos = 0\n while(pos < pattern.length){\n var cp = pattern[pos],\n char = String.fromCharCode(cp)\n if(verbose){\n // current group is in verbose mode\n if(char == \"#\"){\n // skip until next line feed\n while(pos < pattern.length && pattern[pos] != 10){\n pos++\n }\n pos++\n continue\n }else{\n while(pos < pattern.length &&\n [9, 10, 11, 12, 13, 32].indexOf(pattern[pos]) > -1){\n pos++\n }\n }\n cp = pattern[pos]\n if(cp === undefined){\n break\n }\n char = String.fromCharCode(cp)\n if(char == '#'){\n continue\n }\n }\n if(char == '('){\n parenth_pos = pos\n if(pattern[pos + 1] == QUESTION_MARK){\n if(pattern[pos + 2] == LETTERS.P){\n if(pattern[pos + 3] == INF){\n var name = [],\n i = pos + 4\n while(i < pattern.length){\n if(pattern[i] == SUP){\n break\n }else if(pattern[i] == PARENTH_CLOSE){\n fail(\"missing >, unterminated name\", pos)\n }\n name.push(pattern[i])\n i++\n }\n var sname = StringObj.from_codepoints(name)\n validate_group_name(sname, pos, is_bytes)\n name = sname\n if(i == pattern.length){\n fail(\"missing >, unterminated name\", pos)\n }\n yield new Group(pos, {type: 'name_def', value: name})\n verbose_stack.push(verbose)\n pos = i + 1\n continue\n }else if(pattern[pos + 3] == EQUAL){\n var name = [],\n i = pos + 4\n while(i < pattern.length){\n if(pattern[i] == PARENTH_CLOSE){\n break\n }\n name.push(pattern[i])\n i++\n }\n name = StringObj.from_codepoints(name)\n validate_group_name(name, pos, is_bytes)\n if(i == pattern.length){\n fail(\"missing ), unterminated name\", pos)\n }\n yield new BackReference(pos, 'name', name.string)\n pos = i + 1\n continue\n }else if(pattern[pos + 3] === undefined){\n fail(\"unexpected end of pattern\", pos)\n }else{\n fail(\"unknown extension ?P\" + chr(pattern[pos + 3]), pos)\n }\n }else if(pattern[pos + 2] == PARENTH_OPEN){\n var ref = [],\n i = pos + 3\n while(i < pattern.length){\n if(pattern[i] == PARENTH_CLOSE){\n break\n }\n ref.push(pattern[i])\n i++\n }\n var sref = StringObj.from_codepoints(ref)\n if(sref.string.match(/^\\d+$/)){\n ref = parseInt(sref.string)\n }else{\n validate_num_or_name(sref, pos, is_bytes)\n ref = sref.string\n }\n if(i == pattern.length){\n fail(\"missing ), unterminated name\", pos)\n }\n yield new ConditionalBackref(pos, ref)\n pos = i + 1\n continue\n }else if(pattern[pos + 2] == EQUAL){\n // (?=...) : lookahead assertion\n yield new Group(pos, {type: 'lookahead_assertion'})\n verbose_stack.push(verbose)\n pos += 3\n continue\n }else if(pattern[pos + 2] == EXCLAMATION_MARK){\n // (?!...) : negative lookahead assertion\n yield new Group(pos, {type: 'negative_lookahead_assertion'})\n verbose_stack.push(verbose)\n pos += 3\n continue\n }else if(from_codepoint_list(pattern.slice(pos + 2, pos + 4)) == ' -1){\n if(pattern[pos + 2] == MINUS){\n var on_flags = [],\n has_off = true,\n off_flags = []\n pos += 3\n }else{\n var on_flags = [chr(pattern[pos + 2])],\n has_off = false,\n off_flags = [],\n auL = auL_flags.indexOf(pattern[pos + 2]) > -1 ?\n 1 : 0,\n closed = false\n pos += 3\n while(pos < pattern.length){\n if(flags.indexOf(pattern[pos]) > -1){\n if(auL_flags.indexOf(pattern[pos]) > -1){\n auL++\n if(auL > 1){\n fail(\"bad inline flags: flags 'a', 'u'\" +\n \" and 'L' are incompatible\", pos)\n }\n }\n on_flags.push(chr(pattern[pos]))\n pos++\n }else if(pattern[pos] == MINUS){\n has_off = true\n closed = true\n pos++\n break\n }else if(String.fromCharCode(pattern[pos]).\n match(/[a-zA-Z]/)){\n fail(\"unknown flag\", pos)\n }else if(pattern[pos] == PARENTH_CLOSE){\n closed = true\n break\n }else if(pattern[pos] == COLON){\n yield new Group(pos, {name: \"Group\", type: \"flags\"})\n verbose_stack.push(verbose)\n closed = true\n break\n }else{\n fail(\"missing -, : or )\", pos)\n }\n }\n if(! closed){\n fail(\"missing -, : or )\", pos)\n }\n }\n if(has_off){\n while(pos < pattern.length){\n if(flags.indexOf(pattern[pos]) > -1){\n if(auL_flags.indexOf(pattern[pos]) > -1){\n fail(\"bad inline flags: cannot turn off \" +\n \"flags 'a', 'u' and 'L'\", pos)\n }\n if(on_flags.indexOf(chr(pattern[pos])) > -1){\n fail(\"bad inline flags: flag turned on and off\", pos)\n }\n off_flags.push(chr(pattern[pos]))\n pos++\n }else if(pattern[pos] == COLON){\n yield new Group(pos, {name: \"Group\", type: \"flags\"})\n verbose_stack.push(verbose)\n break\n }else if(String.fromCharCode(pattern[pos]).\n match(/[a-zA-Z]/)){\n fail(\"unknown flag\", pos)\n }else if(off_flags.length == 0){\n fail(\"missing flag\", pos)\n }else{\n fail(\"missing :\", pos)\n }\n }\n if(off_flags.length == 0){\n fail(\"missing flag\", pos)\n }\n }\n if(has_off && pattern[pos] != COLON){\n fail(\"missing :\", pos)\n }\n if(on_flags.length == 0 && off_flags.length == 0){\n fail(\"missing flag\", pos)\n }\n var set_flags = new SetFlags(flags_start,\n {on_flags, off_flags})\n\n yield set_flags\n // reset verbose\n if(on_flags.indexOf('x') > -1){\n verbose = true\n verbose_stack.push(verbose)\n }\n if(off_flags.indexOf('x') > -1){\n verbose = false\n }\n if(! closed){\n node = set_flags\n }\n pos++\n }else if(pattern[pos + 2] == ord('#')){\n pos += 3\n while(pos < pattern.length){\n if(pattern[pos] == PARENTH_CLOSE){\n break\n }\n pos++\n }\n if(pos == pattern.length){\n fail(\"missing ), unterminated comment\", pos)\n }\n pos++\n continue\n }else{\n fail(\"unknown extension ?\" + _b_.chr(pattern[pos + 2]),\n pos)\n }\n }else{\n yield new Group(pos)\n verbose_stack.push(verbose)\n pos++\n }\n }else if(cp == PARENTH_CLOSE){\n yield new GroupEnd(pos)\n verbose_stack.pop()\n verbose = $last(verbose_stack)\n pos++\n }else if(cp == BACKSLASH){\n var escape = escaped_char({codepoints: pattern, pos, is_bytes})\n if(escape instanceof CharacterClass){\n yield escape\n pos += escape.length\n }else if(escape.char !== undefined){\n yield new Char(pos, escape.ord)\n pos += escape.length\n }else if(escape.type == \"backref\"){\n var len = escape.length\n if(escape.value.length > 2){\n escape.value = escape.value.substr(0, 2)\n len = 2\n }\n yield new BackReference(pos, \"num\", escape.value)\n pos += len\n }else if(typeof escape == \"number\"){\n // eg \"\\.\"\n var esc = new Char(pos, escape)\n esc.escaped = true\n yield esc\n pos += 2\n }else{\n yield new Char(pos, escape)\n pos += escape.length\n }\n }else if(cp == BRACKET_OPEN){\n // Set of characters\n var set,\n end_pos\n [set, end_pos] = parse_character_set(pattern, pos, is_bytes)\n yield new CharacterSet(pos, set)\n pos = end_pos + 1\n }else if('+?*'.indexOf(char) > -1){\n yield new Repeater(pos, char)\n pos++\n }else if(cp == BRACE_OPEN){\n var reps = /\\{(\\d*)((,)(\\d*))?\\}/.exec(\n from_codepoint_list(pattern.slice(pos)))\n if(reps && reps[0] != '{}'){\n if(reps[1] == \"\"){\n var limits = [0]\n }else{\n var limits = [parseInt(reps[1])]\n }\n if(reps[4] !== undefined){\n if(reps[4] == \"\"){\n var max = Number.POSITIVE_INFINITY\n }else{\n var max = parseInt(reps[4])\n }\n limits.push(max)\n }\n yield new Repeater(pos, limits)\n pos += reps[0].length\n }else if(pattern[pos + 1] == BRACE_CLOSE){\n // {} is the characters \"{\" and \"}\"\n yield new Char(pos, BRACE_OPEN)\n pos++\n }else{\n yield new Char(pos, BRACE_OPEN)\n pos++\n }\n }else if(cp == OR){\n yield new Or(pos)\n pos++\n }else if(cp == DOT){\n yield new CharacterClass(pos, cp, 1)\n pos++\n }else if(cp == CARET){\n yield new StringStart(pos)\n pos++\n }else if(cp == DOLLAR){\n yield new StringEnd(pos)\n pos++\n }else{\n yield new Char(pos, cp)\n pos++\n }\n }\n}\n\nfunction transform_repl(data, pattern){\n // data.repl is a StringObj instance\n var repl = data.repl.string\n repl = repl.replace(/\\\\n/g, '\\n')\n repl = repl.replace(/\\\\r/g, '\\r')\n repl = repl.replace(/\\\\t/g, '\\t')\n repl = repl.replace(/\\\\b/g, '\\b')\n repl = repl.replace(/\\\\v/g, '\\v')\n repl = repl.replace(/\\\\f/g, '\\f')\n repl = repl.replace(/\\\\a/g, '\\x07')\n // detect backreferences\n var pos = 0,\n escaped = false,\n br = false,\n repl1 = \"\",\n has_backref = false\n while(pos < repl.length){\n br = false\n if(repl[pos] == \"\\\\\"){\n escaped = ! escaped\n if(escaped){\n pos++\n continue\n }\n }else if(escaped){\n escaped = false\n var mo = /^\\d+/.exec(repl.substr(pos))\n if(mo){\n var cps = to_codepoint_list(repl)\n var escape = escaped_char({\n codepoints: cps,\n pos: pos - 1,\n is_bytes: cps.type == \"bytes\"\n })\n if(escape.type == \"o\"){\n if(escape.ord > 0o377){\n fail(`octal escape value \\\\${mo[0]} ` +\n \" outside of range 0-0o377\", pos)\n }\n repl1 += escape.char\n pos += escape.length - 1\n continue\n }else if(escape.type != \"backref\"){\n var group_num = mo[0].substr(0,\n Math.min(2, mo[0].length))\n fail(`invalid group reference ${group_num}`, pos)\n }else{\n // only keep first 2 digits\n var group_num = mo[0].substr(0,\n Math.min(2, mo[0].length))\n // check that pattern has the specified group num\n if(pattern.groups === undefined){\n throw _b_.AttributeError.$factory(\"$groups\")\n }\n if(pattern.groups[group_num] === undefined){\n fail(`invalid group reference ${group_num}`,\n pos)\n }else{\n mo[0] = group_num\n }\n }\n if(! has_backref){\n var parts = [repl.substr(0, pos - 1),\n parseInt(mo[0])]\n }else{\n parts.push(repl.substring(next_pos, pos - 1))\n parts.push(parseInt(mo[0]))\n }\n has_backref = true\n var next_pos = pos + mo[0].length\n br = true\n pos += mo[0].length\n }else if(repl[pos] == \"g\"){\n pos++\n if(repl[pos] != '<'){\n fail(\"missing <\", pos)\n }\n pos++\n mo = /(.*?)>/.exec(repl.substr(pos))\n if(mo){\n if(mo[1] == \"\"){\n pos += mo[0].length\n fail(\"missing group name\", pos - 1)\n }\n var group_name = mo[1]\n if(group_name == '0'){\n // The backreference \\g<0> substitutes in the entire\n // substring matched by the RE.\n }else if(/^\\d+$/.exec(group_name)){\n if(pattern.groups[group_name] === undefined){\n fail(`invalid group reference ${group_name}`,\n pos)\n }\n }else{\n try{\n var group_num = _b_.int.$factory(group_name)\n if(group_num < 0){\n fail(`bad character in group name ` +\n `'${group_name}' at position ${pos}`, pos)\n }\n warn(_b_.DeprecationWarning,\n `bad character in group name '${group_name}' ` +\n `at position ${pos}`)\n mo[1] = group_name = group_num + ''\n }catch(err){\n if(! _b_.str.isidentifier(group_name)){\n var cps = to_codepoint_list(group_name)\n if(! $B.is_XID_Start(cps[0])){\n fail(\"bad character in group name '\" +\n group_name + \"'\", pos)\n }else{\n for(cp of cps.slice(1)){\n if(! $B.is_XID_Continue(cp)){\n fail(\"bad character in group name '\" +\n group_name + \"'\", pos)\n }\n }\n }\n }else if(data.type == \"bytes\" && ! is_ascii(group_name)){\n var b = _b_.bytes.$factory(group_name, 'latin-1'),\n s = _b_.bytes.decode(b, 'ascii', 'backslashreplace')\n warn(_b_.DeprecationWarning,\n `bad character in group name '${s}'` +\n ` at position ${pos}`)\n }\n }\n if(pattern.groups[group_name] === undefined){\n throw _b_.IndexError.$factory(\n `unknown group name '${group_name}'`,\n pos)\n }\n }\n if(! has_backref){\n var parts = [repl.substr(0, pos - 3),\n mo[1]]\n }else{\n parts.push(repl.substring(next_pos, pos - 3))\n parts.push(mo[1])\n }\n has_backref = true\n var next_pos = pos + mo[0].length\n br = true\n pos = next_pos\n }else{\n if(repl.substr(pos).length > 0){\n fail(\"missing >, unterminated name\", pos)\n }else{\n fail(\"missing group name\", pos)\n }\n }\n }else{\n if(/[a-zA-Z]/.exec(repl[pos])){\n fail(\"unknown escape\", pos)\n }\n pos += repl[pos]\n }\n }\n if(! br){\n repl1 += repl[pos]\n pos ++\n }\n }\n data.repl1 = repl1\n if(has_backref){\n parts.push(repl.substr(next_pos))\n data.repl = function(bmo){\n var mo = bmo.mo,\n res = parts[0],\n groups = mo.$groups,\n s = mo.string,\n group,\n is_bytes = s.type == 'bytes'\n for(var i = 1, len = parts.length; i < len; i += 2){\n if(parts[i] == 0){\n var x = s.substring(mo.start, mo.end)\n if(is_bytes){\n x = _b_.bytes.decode(x, 'latin-1')\n }\n res += x\n }else if(groups[parts[i]] === undefined){\n if(mo.node.$groups[parts[i]] !== undefined){\n // group is defined in the RE, but didn't contribute\n // to the match\n // groups[parts[i]] = ''\n }else{\n // group is not defined in the RE\n pos++\n group_num = parts[i].toString().substr(0, 2)\n fail(`invalid group reference ${group_num}`, pos)\n }\n }else{\n group = groups[parts[i]]\n var x = s.substring(group.start, group.end)\n if(is_bytes){\n x = _b_.bytes.decode(x, 'latin-1')\n }\n res += x\n }\n res += parts[i + 1]\n }\n return res\n }\n }else{\n data.repl = new StringObj(repl)\n }\n return data\n}\n\n\n\nvar Flag = $B.make_class(\"Flag\",\n function(value){\n return {\n __class__: Flag,\n value\n }\n }\n)\n\nFlag.__and__ = function(self, other){\n if(other.__class__ === Flag){\n return Flag.$factory(self.value & other.value)\n }else if(typeof other == \"number\" || typeof other == \"boolean\"){\n return Flag.$factory(self.value & other)\n }\n return _b_.NotImplemented\n}\n\nFlag.__index__ = function(self){\n return self.value\n}\n\nFlag.__invert__ = function(self){\n return Flag.$factory(~self.value)\n}\n\nFlag.__eq__ = function(self, other){\n return self.value == other.value\n}\n\nFlag.__or__ = function(self, other){\n if(other.__class__ === Flag){\n return Flag.$factory(self.value | other.value)\n }else if(typeof other == \"number\" || typeof other == \"boolean\"){\n return Flag.$factory(self.value | other)\n }\n return _b_.NotImplemented\n}\n\nFlag.__rand__ = function(self, other){\n if(typeof other == \"number\" || $B.$isinstance(other, _b_.int)){\n if(other == 0){\n return false // Flag.$factory(self.value)\n }\n return self.value & other\n }\n return _b_.NotImplemented\n}\n\nFlag.__ror__ = function(self, other){\n if(typeof other == \"number\" || $B.$isinstance(other, _b_.int)){\n if(other == 0){\n return self.value\n }\n return self.value | other\n }\n return _b_.NotImplemented\n}\n\nFlag.__repr__ = Flag.__str__ = function(self){\n if(self.value == 0){\n return \"re.none\"\n }\n var inverted = self.value < 0\n\n var t = [],\n value = inverted ? ~self.value : self.value\n for(var flag in inline_flags){\n if(value & inline_flags[flag].value){\n t.push('re.' + flag_names[flag])\n value &= ~inline_flags[flag].value\n }\n }\n if(value > 0){\n t.push('0x' + value.toString(16))\n }\n var res = t.join('|')\n if(inverted){\n if(t.length > 1){\n return '~(' + res + ')'\n }else{\n return '~' + res\n }\n }\n return res\n}\n\nFlag.__xor__ = function(self, other){\n return Flag.$factory(self.value ^ other.value)\n}\n\n$B.set_func_names(Flag, \"re\")\n\nvar no_flag = {}\n\nvar Scanner = $B.make_class(\"Scanner\",\n function(pattern, string, pos, endpos){\n var $ = $B.args('__init__', 4,\n {pattern: null, string: null, pos: null, endpos:null},\n ['pattern', 'string', 'pos', 'endpos'],\n arguments, {pos: 0, endpos: _b_.None}, null, null),\n endpos = endpos === _b_.None ? $.string.length : endpos\n return {\n __class__: Scanner,\n $string: $.string,\n pattern: $.pattern,\n pos: $.pos,\n endpos\n }\n }\n)\n\nScanner.match = function(self){\n return Pattern.match(self.pattern, self.$string)\n}\n\nScanner.search = function(self){\n if(! self.$iterator){\n self.$iterator = module.finditer(self.pattern, self.$string)\n }\n // return last match\n var mo = _b_.None\n for(mo of self.$iterator.js_gen){\n // set mo\n }\n return mo\n}\n\nvar GroupIndex = $B.make_class(\"GroupIndex\",\n function(self, _default){\n var res = $B.empty_dict()\n res.__class__ = GroupIndex\n for(var key in self.$groups){\n if(isNaN(parseInt(key))){\n _b_.dict.$setitem(res, key, self.$groups[key].num)\n }\n }\n return res\n }\n)\nGroupIndex.__mro__ = [_b_.dict, _b_.object]\nGroupIndex.__setitem__ = function(){\n throw _b_.TypeError.$factory(\"read only\")\n}\n\n$B.set_func_names(GroupIndex, \"re\")\n\nvar Pattern = $B.make_class(\"Pattern\",\n function(pattern){\n var nb_groups = 0\n for(var key in pattern.groups){\n if(isFinite(key)){\n nb_groups++\n }\n }\n return {\n __class__: Pattern,\n pattern: pattern.text,\n groups: nb_groups,\n flags: pattern.flags,\n $groups: pattern.groups,\n $pattern: pattern\n }\n }\n)\n\nPattern.__copy__ = function(self){\n return self\n}\n\nPattern.__deepcopy__ = function(self){\n return self\n}\n\nPattern.__eq__ = function(self, other){\n if(other.$pattern && self.$pattern.type != other.$pattern.$type){\n // warn(_b_.BytesWarning, \"cannot compare str and bytes pattern\", 1)\n }\n return self.pattern == other.pattern &&\n self.flags.value == other.flags.value\n}\n\nPattern.__hash__ = function(self){\n // best effort ;-)\n return _b_.hash(self.pattern) + self.flags.value\n}\n\nPattern.__new__ = Pattern.$factory\n\nPattern.__reduce__ = function(self){\n return Pattern.__reduce_ex__(self, 4)\n}\n\nPattern.__reduce_ex__ = function(self, protocol){\n var res = _reconstructor,\n state = [self.__class__].concat(self.__class__.__mro__)\n var d = $B.empty_dict()\n _b_.dict.$setitem(d, 'pattern', self.pattern)\n _b_.dict.$setitem(d, 'flags', self.flags.value)\n state.push(d)\n return $B.fast_tuple([res, $B.fast_tuple(state)])\n}\n\nfunction _reconstructor(cls, base, state){\n var pattern = _b_.dict.$getitem(state, 'pattern'),\n flags = Flag.$factory(_b_.dict.$getitem(state, 'flags'))\n return module.compile(pattern, flags)\n}\n\nPattern.__repr__ = Pattern.__str__ = function(self){\n var text = self.$pattern.text,\n s = text\n if(self.$pattern.type == \"bytes\"){\n s = _b_.str.$factory(_b_.str.encode(s, 'latin-1'))\n }else{\n s = _b_.repr(s)\n }\n s = s.substr(0, 200)\n var res = `re.compile(${s}`,\n flags = self.$pattern.flags\n if(flags === no_flag){\n return res + ')'\n }\n // mask UNICODE flag\n if(flags.__class__ === Flag){\n // copy flag, otherwise U.value would become 0\n flags = Flag.$factory(flags.value)\n flags.value &= ~U.value\n }else if(typeof flags == \"number\"){\n flags &= ~U.value\n }\n if(flags != 0 && flags.value != 0){\n res += `, ${_b_.str.$factory(flags)}`\n }\n return res + ')'\n}\n\nPattern.findall = function(self){\n var iter = Pattern.finditer.apply(null, arguments).js_gen,\n res = []\n\n while(true){\n var next = iter.next()\n if(next.done){\n return res\n }\n var bmo = next.value,\n mo = bmo.mo,\n groups = MatchObject.groups(bmo)\n\n // replace None by the empty string\n for(var i = 0, len = groups.length; i < len; i++){\n groups[i] = groups[i] === _b_.None ? \"\" : groups[i]\n }\n if(groups.length > 0){\n if(groups.length == 1){\n res.push(groups[0])\n }else{\n res.push($B.fast_tuple(groups))\n }\n }else{\n res.push(mo.string.substring(mo.start, mo.end))\n }\n }\n}\n\nPattern.finditer = function(self){\n var $ = $B.args(\"finditer\", 4,\n {self: null, string: null, pos: null, endpos: null},\n 'self string pos endpos'.split(' '), arguments,\n {pos: 0, endpos: _b_.None}, null, null)\n var data = prepare({string: $.string})\n var endpos = $.endpos === _b_.None ? data.string.length : $.endpos\n return $B.generator.$factory(iterator)(self.$pattern, data.string,\n self.flags, $.string, $.pos, endpos)\n}\n\nPattern.fullmatch = function(self, string){\n var $ = $B.args(\"match\", 4,\n {self: null, string: null, pos: null, endpos: null},\n [\"self\", \"string\", \"pos\", \"endpos\"], arguments,\n {pos: 0, endpos: _b_.None}, null, null)\n if($.endpos === _b_.None){\n $.endpos = $.string.length\n }\n var data = prepare({string: $.string})\n if(self.$pattern.type != data.string.type){\n throw _b_.TypeError.$factory(\"not the same type for pattern \" +\n \"and string\")\n }\n var fullmatch_pattern = create_fullmatch_pattern($.self.$pattern)\n var mo = match(fullmatch_pattern, data.string, $.pos, $.endpos)\n if(mo && mo.end - mo.start == $.endpos - $.pos){\n return MatchObject.$factory(mo)\n }else{\n return _b_.None\n }\n}\n\nPattern.groupindex = {\n __get__: function(self){\n return GroupIndex.$factory(self)\n }\n}\n\nPattern.match = function(self, string){\n var $ = $B.args(\"match\", 4,\n {self: null, string: null, pos: null, endpos: null},\n [\"self\", \"string\", \"pos\", \"endpos\"], arguments,\n {pos: 0, endpos: _b_.None}, null, null)\n if($.endpos === _b_.None){\n $.endpos = $.string.length\n }\n var data = prepare({string: $.string})\n if(self.$pattern.type != data.string.type){\n throw _b_.TypeError.$factory(\"not the same type for pattern \" +\n \"and string\")\n }\n var mo = match($.self.$pattern, data.string, $.pos,\n $.endpos)\n return mo ? MatchObject.$factory(mo) : _b_.None\n}\n\nPattern.scanner = function(self, string, pos, endpos){\n return Scanner.$factory.apply(null, arguments) // self, string, pos, endpos)\n}\n\nPattern.search = function(self, string){\n var $ = $B.args(\"match\", 4,\n {self: null, string: null, pos: null, endpos: null},\n [\"self\", \"string\", \"pos\", \"endpos\"], arguments,\n {pos: 0, endpos: _b_.None}, null, null)\n var data = prepare({string: $.string})\n if(self.$pattern.type != data.string.type){\n throw _b_.TypeError.$factory(\"not the same type for pattern \" +\n \"and string\")\n }\n if($.endpos === _b_.None){\n $.endpos = data.string.length\n }\n var pos = $.pos\n while(pos <= $.endpos){\n var mo = match(self.$pattern, data.string, pos)\n if(mo){\n return MatchObject.$factory(mo)\n }else{\n pos++\n }\n }\n return _b_.None\n}\n\nPattern.split = function(){\n return module.split.apply(null, arguments)\n}\n\nPattern.sub = function(){\n var $ = $B.args(\"match\", 4,\n {self: null, repl: null, string: null, count: null},\n \"self repl string count\".split(' '), arguments,\n {count: 0}, null, null)\n var data = prepare({string: $.string})\n if($.self.$pattern.type != data.string.type){\n throw _b_.TypeError.$factory(\"not the same type for pattern \" +\n \"and string\")\n }\n\n return module.sub($.self, $.repl, $.string, $.count)\n}\n\n$B.set_func_names(Pattern, \"re\")\n\nfunction Node(parent){\n this.parent = parent\n this.items = []\n}\n\nNode.prototype.add = function(item){\n this.items.push(item)\n item.parent = this\n}\n\nNode.prototype.fixed_length = function(){\n // Return the sum of items lengths if fixed, else undefined\n if(this.repeat){\n return false\n }\n var len = 0\n for(var item of this.items){\n if(item.fixed_length === undefined){\n console.log(\"pas de fixed length\", item)\n alert()\n }\n var sublen = item.fixed_length()\n if(sublen === false){\n return false\n }\n len += sublen\n }\n return len\n}\n\nfunction get_top(node){\n var top = node.parent\n while(top.parent){\n top = top.parent\n }\n return top\n}\n\nvar BackReference = function(pos, type, value){\n // for \"\\number\"\n this.name = \"BackReference\"\n this.pos = pos\n this.type = type // \"name\" or \"num\"\n this.value = value\n this.groups = []\n}\n\nBackReference.prototype.fixed_length = function(){\n // Return length of referenced group if it is fixed, else undefined\n if(this.repeat){\n return undefined\n }\n var group = this.get_group()\n if(group.fixed_length === undefined){\n console.log(\"group\", group, \"no fixed length\")\n }\n return group === undefined ? false : group.fixed_length()\n}\n\nBackReference.prototype.get_group = function(){\n var top = get_top(this)\n return top.$groups[this.value]\n}\n\nBackReference.prototype.match = function(string, pos, endpos, groups){\n this.repeat = this.repeat || {min: 1, max: 1}\n\n var group = groups[this.value]\n if(group === undefined){\n if(this.repeat.min == 0){\n return {\n nb_min: 0,\n nb_max: 0\n }\n }\n return false\n }\n\n // Get the codepoints matched by the referenced group\n group_cps = string.codepoints.slice(group.start, group.end)\n\n // search (repetitions of) the matched group codepoints\n var _pos = pos,\n nb = 0,\n group_len = group_cps.length,\n flag,\n cp\n while(string.cp_at(_pos) !== undefined && nb < this.repeat.max){\n flag = true\n for(var i = 0; i < group_len; i++){\n cp = string.cp_at(_pos + i)\n if(cp != group_cps[i]){\n flag = false\n break\n }\n }\n if(flag){\n nb++\n _pos += group_len\n }else{\n break\n }\n }\n if(nb >= this.repeat.min){\n // Returns the accepted minimum and maximum number of repeats\n // and the length of each repeat\n return {\n nb_min: this.repeat.min,\n nb_max: nb,\n group_len\n }\n }\n return false\n}\n\nBackReference.prototype.toString = function(){\n return \"BackRef to group\" + this.value\n}\n\nvar Case = function(){\n this.name = \"Case\"\n this.items = []\n this.groups = []\n this.text = 'Case '\n}\n\nCase.prototype.add = function(item){\n this.items.push(item)\n item.parent = this\n}\n\nCase.prototype.fixed_length = function(){\n var len\n for(var item of this.items){\n var fl = item.fixed_length()\n if(fl === false){\n return false\n }else if(len === undefined){\n len = fl\n }else{\n len += fl\n }\n }\n return len\n}\n\nCase.prototype.toString = function(){\n var res = 'Case '\n res += this.items.map(x => x + '').join(' ')\n return this.text = res\n}\n\nvar Choice = function(){\n this.type = \"choice\"\n this.items = []\n this.groups = []\n}\n\nChoice.prototype.add = Node.prototype.add\n\nChoice.prototype.fixed_length = function(){\n var len\n for(var item of this.items){\n var fl = item.fixed_length()\n if(fl === false){\n return false\n }else if(len === undefined){\n len = fl\n }else if(len != fl){\n return false\n }\n }\n return len\n}\n\nChoice.prototype.toString = function(){\n return 'Choice'\n}\n\nvar EmptyString = {\n toString: function(){\n return ''\n },\n match: function(string, pos, endpos){\n return {nb_min: 0, nb_max: 0}\n },\n fixed_length: function(){\n return 1\n },\n length: 0\n },\n Flags = function(flags){\n this.flags = flags\n },\n GroupEnd = function(pos){\n this.name = \"GroupEnd\"\n this.pos = pos\n this.text = ')'\n this.toString = function(){\n return '[end of group #' + this.group.num + ']'\n }\n },\n Or = function(pos){\n this.name = \"Or\"\n this.pos = pos\n this.text = '|'\n this.toString = function(){\n return '|'\n }\n },\n Repeater = function(pos, op){\n this.name = \"Repeater\"\n this.pos = pos\n this.op = op\n }\n\nfunction cased_cps(cp, ignore_case, ascii){\n // If cp is the codepoint of a cased Unicode character, return the list\n // of the codepoints that match the character in a case-insensitive way\n\n // ignore_case = this.flags && this.flags.value & IGNORECASE.value\n // ascii = this.flags.value & ASCII.value\n var cps,\n char = $B.codepoint2jsstring(cp)\n if(! ignore_case){\n return [cp]\n }\n if(ascii){\n // only test ASCII letters\n ignore_case = ignore_case && (\n (char >= 'a' && char <= 'z') ||\n (char >= 'A' && char <= 'Z'))\n }\n if(ignore_case){\n var char_up = char.toUpperCase(),\n char_low = char.toLowerCase(),\n cps = new Set([cp, $B.jsstring2codepoint(char_low),\n $B.jsstring2codepoint(char_up)])\n // special cases\n if(char.toLowerCase() == \"k\"){\n cps.add(0x212a) // Kelvin sign\n }\n if(cp == 0x212a){\n cps.add(ord('k'))\n cps.add(ord('K'))\n }\n if(char.toLowerCase() == \"s\"){\n cps.add(0x017f) // (Latin small letter long s)\n }\n if(cp == 0x017f){\n cps.add(ord('s'))\n cps.add(ord('S'))\n }\n if(char.toLowerCase() == 'i'){\n cps.add(0x0130) // (Latin capital letter I with dot above)\n cps.add(0x0131) // (Latin small letter dotless i)\n }\n if(cp == 0x0130 || cp == 0x0131){\n cps.add(ord('i'))\n cps.add(ord('I'))\n }\n return Array.from(cps)\n }else{\n cps = [cp]\n }\n return cps\n}\n\nvar Char = function(pos, cp, groups){\n // character in a regular expression or in a character set\n // pos : position of the character in the pattern string\n // cp : the character's codepoint\n // groups (optional) : the groups that contain the character\n this.pos = pos\n this.cp = cp\n this.char = chr(this.cp)\n this.text = this.char\n}\n\nChar.prototype.fixed_length = function(){\n if(this.repeat){\n return this.repeat.min\n }\n return this.char === EmptyString ? 0 : 1\n}\n\nChar.prototype.match = function(string, pos, endpos){\n // Returns {pos1, pos2} such that \"this\" matches all the substrings\n // string[pos:i] with pos1 <= i < pos2, or false if no match\n this.repeat = this.repeat || {min: 1, max: 1}\n\n var i = 0\n\n // browse string codepoints until they don't match, or the number of\n // matches is above the maximum allowed\n if(this.flags){\n if(this.flags.value & ASCII.value){\n if(this.cp > 127){\n return false\n }\n }\n if(this.flags.value & IGNORECASE.value &&\n (! this.is_bytes || this.cp <= 127)){\n // Flag IGNORECASE set\n // For bytes pattern, case insensitive matching only works\n // for ASCII characters\n var char_upper = this.char.toUpperCase(),\n char_lower = this.char.toLowerCase(),\n cp\n while(i < this.repeat.max && pos + i < endpos){\n cp = string.cp_at(pos + i)\n var char = chr(cp)\n if(char.toUpperCase() != char_upper &&\n char.toLowerCase() != char_lower){\n break\n }\n i++\n }\n }else{\n while(pos + i < endpos &&\n string.cp_at(pos + i) == this.cp &&\n i < this.repeat.max){\n i++\n }\n }\n }else{\n while(pos + i < endpos &&\n string.cp_at(pos + i) == this.cp &&\n i < this.repeat.max){\n i++\n }\n }\n var nb = i\n if(nb >= this.repeat.min){\n // Number of repeats ok\n return {\n nb_min: this.repeat.min,\n nb_max: nb\n }\n }else{\n return false\n }\n}\n\nChar.prototype.toString = function(){\n var res = 'Char ' + this.text\n if(this.repeat !== undefined){\n res += ' repeat {' + this.repeat.min + ',' + this.repeat.max + '}'\n if(this.non_greedy){\n res += '?'\n }\n }\n return res\n}\n\nfunction CharSeq(chars, flags){\n // sequence of consecutive characters\n this.chars = chars\n this.flags = flags\n this.merge_same_chars()\n}\n\nCharSeq.prototype.add_char = function(char){\n this.chars.push(char)\n this.merge_same_chars()\n}\n\nCharSeq.prototype.fixed_length = function(){\n var len = 0,\n cps = [],\n char_len\n for(var char of this.chars){\n if(! char.repeat){\n char_len = 1\n }else if(char.repeat.min == char.repeat.max){\n char_len = char.repeat.min\n }else{\n len = false\n break\n }\n for(var i = 0; i < char_len; i++){\n cps.push(char.cp)\n }\n len += char_len\n }\n this.cps = cps\n return this.len = len\n}\n\nCharSeq.prototype.match = function(string, pos, endpos){\n var mos = [],\n i = 0,\n backtrack,\n nb\n this.len = this.len === undefined ? this.fixed_length() : this.len\n // optimization if character sequence has a fixed length\n if(this.len !== false && ! (this.flags.value & IGNORECASE.value)){\n for(var i = 0; i < this.len; i++){\n if(string.cp_at(pos + i) !== this.cps[i]){\n return false\n }\n }\n return {nb_min: this.len, nb_max: this.len}\n }\n for(var i = 0, len = this.chars.length; i < len; i++){\n var char = this.chars[i],\n mo = char.match(string, pos, endpos) // form {nb_min, nb_max}\n if(_debug.value){\n console.log('CharSeq match, pos', pos, 'char', char, 'mo', mo)\n alert()\n }\n if(mo){\n nb = char.non_greedy ? mo.nb_min : mo.nb_max\n mos.push({nb,\n nb_min: mo.nb_min,\n nb_max: mo.nb_max,\n non_greedy: !!char.non_greedy\n })\n pos += nb\n }else{\n // backtrack\n backtrack = false\n while(mos.length > 0){\n i--\n mo = mos.pop()\n pos -= mo.nb\n nb = mo.nb\n if(mo.non_greedy && nb < mo.nb_max){\n nb += 1\n backtrack = true\n }else if(! mo.non_greedy && nb - 1 >= mo.nb_min){\n nb -= 1\n backtrack = true\n }\n if(backtrack){\n pos += nb\n mo.nb = nb\n mos.push(mo)\n break\n }\n }\n if(mos.length == 0){\n return false\n }\n }\n }\n var nb = 0,\n last_mo = $B.last(mos)\n for(var mo of mos.slice(0, mos.length - 1)){\n nb += mo.nb\n }\n var res = {\n nb_min: nb + last_mo.nb_min,\n nb_max: nb + last_mo.nb_max\n }\n return res\n}\n\nCharSeq.prototype.merge_same_chars = function(){\n // b?b merged into b+ etc.\n var current,\n chars = [],\n merged\n for(var item of this.chars){\n if(current && current.char == item.char &&\n current.non_greedy === item.non_greedy){\n if(! current.repeat){\n current.repeat = {min: 1, max: 1}\n }\n if(item.repeat){\n current.repeat.min += item.repeat.min\n current.repeat.max += item.repeat.max\n }else{\n current.repeat.min += 1\n current.repeat.max += 1\n }\n merged = true\n }else{\n chars.push(item)\n }\n current = item\n }\n if(merged){\n this.chars = chars\n }\n}\n\nCharSeq.prototype.toString = function(){\n var res = ''\n for(var char of this.chars){\n res += char.text\n }\n return 'CharSeq ' + res\n}\n\nfunction CharacterClass(pos, cp, length, groups){\n this.cp = cp\n this.value = chr(cp)\n this.length = length\n this.pos = pos\n\n var flags = this.flags\n\n // Test function : test(string, pos) returns:\n // - true if \"this\" matches 1 character string[pos]\n // - [true, 0] if \"this\" matches the empty string at pos\n // - false or undefined if \"this\" doesn't match\n switch(this.value){\n case 'A':\n this.test_func = function(string, pos){\n if(pos == 0){\n return [true, 0]\n }\n }\n break\n case 's':\n this.test_func = function(string, pos){\n var cp = string.cp_at(pos)\n return $B.in_unicode_category('Zs', cp) ||\n $B.unicode_bidi_whitespace.indexOf(cp) > -1\n }\n break\n case 'S':\n this.test_func = function(string, pos){\n var cp = string.cp_at(pos)\n return cp !== undefined &&\n ! $B.in_unicode_category('Zs', cp) &&\n $B.unicode_bidi_whitespace.indexOf(cp) == -1\n }\n break\n case '.':\n this.test_func = function(string, pos){\n if(string.cp_at(pos) === undefined){\n return false\n }\n if(this.flags.value & DOTALL.value){\n return true\n }else{\n return string.cp_at(pos) != 10\n }\n }\n break\n case 'd':\n this.test_func = function(string, pos){\n if(this.flags === undefined){\n console.log(\"\\\\d, no flags\", this)\n }\n var cp = string.cp_at(pos),\n tester = (this.flags.value & ASCII.value) ?\n is_ascii_digit : is_digit\n return tester(cp)\n }\n break\n case 'D':\n this.test_func = function(string, pos){\n var cp = string.cp_at(pos),\n tester = (this.flags.value & ASCII.value) ?\n is_ascii_digit : is_digit\n return ! tester(cp)\n }\n break\n case 'b':\n this.test_func = function(string, pos){\n var tester = is_word\n if(this.is_bytes || (this.flags.value & ASCII.value)){\n tester = is_ascii_word\n }\n var cp = string.cp_at(pos),\n ok = {nb_min: 0, nb_max: 0}\n\n // return true if char at pos is at the beginning or start\n // of a word\n if(pos == 0 && tester(cp)){\n return ok\n }\n if(string.cp_at(pos) === undefined && tester(string.cp_at(pos - 1))){\n return ok\n }\n if(pos > 0 && string.cp_at(pos) !== undefined){\n if((tester(string.cp_at(pos - 1))) !==\n tester(cp)){\n return ok\n }\n }\n return false\n }\n break\n case 'B':\n this.test_func = function(string, pos){\n var tester = is_word\n if(this.is_bytes || (this.flags.value & ASCII.value)){\n tester = is_ascii_word\n }\n\n var cp = string.cp_at(pos),\n ok = {nb_min: 0, nb_max: 0}\n // test is true if char at pos is not at the beginning or\n // start of a word\n if(pos == 0 && cp === undefined){\n // empty string\n return false\n }\n if(pos == 0 && tester(cp)){\n return false\n }\n if(cp === undefined &&\n tester(string.cp_at(pos - 1))){\n return false\n }\n if(pos > 0 && cp !== undefined){\n if(tester(string.cp_at(pos - 1)) !== tester(cp)){\n return false\n }\n }\n return ok\n }\n break\n case 'w':\n this.test_func = function(string, pos){\n var tester = is_word\n if(this.is_bytes || (this.flags.value & ASCII.value)){\n tester = is_ascii_word\n }\n return tester(string.cp_at(pos))\n }\n break\n case 'W':\n this.test_func = function(string, pos){\n var tester = is_word\n if(this.is_bytes || (this.flags.value & ASCII.value)){\n tester = is_ascii_word\n }\n return ! tester(string.cp_at(pos))\n }\n break\n case 'Z':\n this.test_func = function(string, pos){\n if(string.cp_at(pos) === undefined){\n return {nb_min: 0, nb_max: 0}\n }\n }\n break\n }\n}\n\nCharacterClass.prototype.fixed_length = function(){\n return this.repeat ? false : 1\n}\n\nCharacterClass.prototype.match = function(string, pos, endpos){\n // Returns {pos1, pos2} such that \"this\" matches all the substrings\n // string[pos:i] with pos1 <= i < pos2, or false if no match\n if(pos === undefined){\n console.log('no pos')\n throw Error()\n }\n var len = string.length\n this.repeat = this.repeat || {min: 1, max: 1}\n\n // browse string codepoints until they don't match, or the number of\n // matches is above the maximum allowed\n var i = 0\n while(i < this.repeat.max && i < len){\n var test = this.test_func(string, pos + i, this.flags)\n if(! test){\n break\n }\n i++\n }\n\n var nb = i\n if(nb >= this.repeat.min){\n // Number of repeats ok\n if('bBAZ'.indexOf(this.value) > -1 ){\n return {nb_min: 0, nb_max: 0}\n }\n return {\n nb_min: this.repeat.min,\n nb_max: nb\n }\n }else{\n return false\n }\n}\n\nCharacterClass.prototype.nb_repeats = Char.prototype.nb_repeats\n\nCharacterClass.prototype.toString = function(){\n return '\\\\' + this.value\n}\n\nvar CharacterSet = function(pos, set, groups){\n // character set\n this.pos = pos\n this.set = set\n this.neg = set.neg\n}\n\nCharacterSet.prototype.fixed_length = function(){\n return 1\n}\n\nCharacterSet.prototype.match = function(string, pos, endpos){\n var ignore_case = this.flags && (this.flags.value & IGNORECASE.value),\n test,\n match = false,\n i = 0,\n cp\n\n this.repeat = this.repeat || {min: 1, max: 1}\n\n while(i < this.repeat.max && (cp = string.cp_at(pos + i)) !== undefined){\n test = false\n\n if(string.cp_at(pos) === undefined){\n cp = EmptyString\n }\n try{\n $B.codepoint2jsstring(cp)\n }catch(err){\n console.log(err.message)\n console.log('cp', cp, '\\nstring', string, 'pos', pos)\n console.log($B.print_stack())\n throw _b_.Exception.$factory('bad codepoint')\n }\n var char = $B.codepoint2jsstring(cp),\n cps = cased_cps(cp, ignore_case, this.flags.value & ASCII.value),\n char_is_cased = cps.length > 1\n\n for(var cp1 of cps){\n for(var item of this.set.items){\n if(typeof item == 'string'){\n\n }\n if(Array.isArray(item.ord)){\n if(cp1 >= item.ord[0] &&\n cp1 <= item.ord[1]){\n test = true\n break\n }else if(ignore_case && char_is_cased){\n var start1 = chr(item.ord[0]).toUpperCase(),\n end1 = chr(item.ord[1]).toUpperCase(),\n char1 = char.toUpperCase()\n if(char1 >= start1 && char1 <= end1){\n test = true\n }\n var start1 = chr(item.ord[0]).toLowerCase(),\n end1 = chr(item.ord[1]).toLowerCase(),\n char1 = char.toLowerCase()\n if(char1 >= start1 && char1 <= end1){\n test = true\n }\n }\n }else if(item instanceof CharacterClass){\n test = !! item.match(string, pos + i, endpos) // boolean\n }else{\n if(item.ord == cp1){\n test = true\n break\n }\n item_str = typeof item == 'string' ? item : chr(item.ord)\n if(item_str == char){\n test = true\n break\n }\n if(ignore_case && char_is_cased &&\n (char.toUpperCase() == item_str.toUpperCase() ||\n char.toLowerCase() == item_str.toLowerCase())){\n test = true\n break\n }\n }\n }\n }\n if(this.neg){\n test = ! test\n }\n if(test){\n i++\n }else{\n break\n }\n }\n var nb = i\n if(nb >= this.repeat.min){\n // Number of repeats ok\n return {\n nb_min: this.repeat.min,\n nb_max: nb\n }\n }else{\n return false\n }\n\n}\n\nCharacterSet.prototype.nb_repeats = Char.prototype.nb_repeats\n\nCharacterSet.prototype.toString = function(){\n return 'CharSet'\n}\n\nvar ConditionalBackref = function(pos, group_ref){\n this.type = \"conditional backref\"\n this.pos = pos\n this.group_ref = group_ref\n this.chars = []\n this.match_codepoints = []\n this.nb_success = 0\n this.re_if_exists = new Group(pos)\n this.re_if_not_exists = new Group(pos)\n this.nb_options = 1\n}\n\nConditionalBackref.prototype.add = function(item){\n if(this.nb_options == 1){\n this.re_if_exists.add(item)\n }else if(this.nb_options == 2){\n this.re_if_not_exists.add(item)\n }\n item.parent = this\n}\n\nConditionalBackref.prototype.fixed_length = function(){\n var len = this.re_if_exists.fixed_length()\n if(len !== false && len == this.re_if_not_exists.fixed_length()){\n return len\n }\n return false\n}\n\nConditionalBackref.prototype.match = function(string, pos, endpos, groups){\n var re = groups[this.group_ref] ? this.re_if_exists :\n this.re_if_not_exists,\n pattern = {node: re, text: re + ''},\n mo = match(pattern, string, pos, endpos, false, groups)\n if(mo){\n return {nb_min: mo.end - mo.start, nb_max: mo.end - mo.start}\n }\n return false\n}\n\nConditionalBackref.prototype.toString = function(){\n return 'ConditionalBackref'\n}\n\nvar Group = function(pos, extension){\n this.type = \"group\"\n this.pos = pos\n this.items = []\n this.chars = []\n this.groups = []\n for(var key in extension){\n this[key] = extension[key]\n }\n if(extension && extension.type){\n if(extension.type.indexOf('lookahead') > -1){\n this.is_lookahead = true\n }else if(extension.type.indexOf('lookbehind') > -1){\n this.is_lookbehind = true\n }\n }\n}\n\nGroup.prototype.add = Node.prototype.add\n\nGroup.prototype.toString = function(){\n if(this.num === undefined){\n var res = 'Group ' + this.type + ' ' + this.pattern\n }else{\n var res = 'Group #' + this.num + ' ' + this.pattern\n }\n if(this.repeat !== undefined){\n res += ' repeat {' + this.repeat.min + ',' + this.repeat.max + '}'\n if(this.non_greedy){\n res += '?'\n }\n }\n return res\n}\n\nBackReference.prototype.nb_repeats = Group.prototype.nb_repeats\n\nGroup.prototype.fixed_length = Node.prototype.fixed_length\n\nfunction groups_in(pattern, group_list){\n if(group_list === undefined){\n group_list = new Set()\n }\n if(pattern instanceof Group && pattern.hasOwnProperty('num')){\n group_list.add(pattern.num)\n }\n if(pattern.items){\n for(var subpattern of pattern.items){\n for(var group of groups_in(subpattern, group_list)){\n group_list.add(group)\n }\n }\n }\n return group_list\n}\n\nfunction GroupRef(group_num, item){\n this.num = group_num\n this.item = item\n}\n\nGroupRef.prototype.fixed_length = function(){\n return this.item.fixed_length()\n}\n\nfunction Lookbehind(item){\n this.re = item\n this.neg = this.re.type == \"negative_lookbehind\"\n}\n\nLookbehind.prototype.match = function(string, pos, endpos, groups){\n var ok = {nb_min: 0, nb_max: 0},\n pattern = {node: this.re, text: this.re + ''},\n length = this.re.length,\n mo\n if(pos - length < 0){\n mo = false\n }else{\n mo = match(pattern, string, pos - length, endpos, false, groups)\n }\n if(mo){\n return this.neg ? false : ok\n }else{\n return this.neg ? ok : false\n }\n}\n\nLookbehind.prototype.fixed_length = function(){\n return this.re.fixed_length()\n}\n\nLookbehind.prototype.toString = function(){\n return \"Lookbehind\"\n}\n\nfunction SetFlags(pos, flags){\n this.pos = pos\n this.on_flags = flags.on_flags\n this.off_flags = flags.off_flags\n this.items = []\n}\n\nSetFlags.prototype.add = Node.prototype.add\n\nfunction StringStart(pos){\n this.pos = pos\n}\n\nStringStart.prototype.match = function(string, pos, endpos){\n var ok = {nb_min:0, nb_max: 0}\n if(this.flags.value & MULTILINE.value){\n return (pos == 0 || string.cp_at(pos - 1) == 10) ? ok : false\n }\n return pos == 0 ? ok : false\n}\n\nStringStart.prototype.fixed_length = function(){\n return 0\n}\n\nStringStart.prototype.toString = function(){\n return '^'\n}\n\nfunction StringEnd(pos){\n this.pos = pos\n}\n\nStringEnd.prototype.match = function(string, pos, endpos){\n var ok = {nb_min:0, nb_max: 0},\n cp = string.cp_at(pos)\n if(this.flags.value & MULTILINE.value){\n return (pos > string.codepoints.length - 1 ||\n cp == 10) ? ok : false\n }\n return pos > endpos - 1 ? ok :\n (pos == endpos - 1 && cp == 10) ? ok : false\n}\n\nStringEnd.prototype.fixed_length = function(){\n return 0\n}\n\nStringEnd.prototype.toString = function(){\n return '$'\n}\n\nvar cache = new Map()\n\nfunction compile(pattern, flags){\n if(pattern.__class__ === Pattern){\n if(flags !== no_flag){\n throw _b_.ValueError.$factory(\"no flags\")\n }\n return pattern\n }\n if(cache.has(pattern.py_obj)){\n if(cache.get(pattern.py_obj).has(flags.value || 0)){\n return cache.get(pattern.py_obj).get(flags.value || 0)\n }\n }\n var original_pattern = pattern,\n original_flags = flags,\n type = pattern.type,\n choices,\n allow_global_flags = true\n pattern = pattern.codepoints\n var is_bytes = type !== \"str\"\n if(is_bytes && flags && (flags.value & U.value)){\n throw _b_.ValueError.$factory(\"cannot use UNICODE flag with \" +\n \"a bytes pattern\")\n }\n if(flags && (flags.value & U.value) &&\n (flags.value & ASCII.value)){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n if(is_bytes){\n // bytes patterns ignore re.ASCII flag\n flags = Flag.$factory(flags.value || 0)\n //flags.value &= ~ASCII.value\n }\n var group_num = 0,\n group_stack = [],\n groups = {},\n pos,\n lookbehind,\n node = new Node(),\n accept_inline_flag = true,\n verbose = (flags.value || 0) & VERBOSE.value,\n comment = false,\n backrefs = {}\n node.$groups = groups\n for(var item of tokenize(pattern, type, verbose)){\n item.flags = flags\n item.is_bytes = is_bytes\n if(lookbehind){\n item.lookbehind = lookbehind\n lookbehind.parent = item\n lookbehind = false\n }\n if(allow_global_flags &&\n (group_stack.length > 0 || ! (item instanceof SetFlags))){\n allow_global_flags = false\n }\n if(item instanceof Group){\n group_stack.push(item)\n node.add(item)\n item.state = \"open\"\n group_num++\n item.num = group_num\n node = item // next items will be stored as group's items\n pos = item.pos\n if(item.non_capturing){\n delete item.num\n group_num--\n }else if(item.type == \"name_def\"){\n var value = item.value\n if(groups[value.string] !== undefined){\n fail(`redefinition of group name` +\n ` '${value.string}' as group ${group_num}; was group` +\n ` ${groups[value.string].num}`, pos)\n }\n item.name = value.string\n groups[value.string] = groups[group_num] =\n new GroupRef(group_num, item)\n }else if(item.is_lookahead){\n // a lookahead assertion is relative to the previous regexp\n group_num--\n while(node.items.length > 0){\n item.add(node.items.shift())\n }\n node = item\n }else if(item.is_lookbehind){\n // a lookbehind assertion is relative to the next regexp\n node.parent.items.pop() // remove from node items\n // temporarily create a group\n groups[group_num] = new GroupRef(group_num, item)\n }else if(item.type == \"flags\"){\n // save flags before a group with inline flags, eg \"(?i:a)\"\n item.flags_before = Flag.$factory(flags.value | 0)\n }else{\n groups[group_num] = new GroupRef(group_num, item)\n }\n }else if(item instanceof GroupEnd){\n end_pos = item.pos\n if(group_stack.length == 0){\n fail(\"unbalanced parenthesis\", end_pos, original_pattern)\n }\n var item = group_stack.pop()\n item.end_pos = end_pos\n try{\n item.pattern = from_codepoint_list(\n pattern.slice(item.pos, end_pos + 1))\n }catch(err){\n console.log(\"err avec pattern substring\", pattern)\n throw err\n }\n if(item.is_lookbehind){\n delete groups[group_num]\n group_num--\n // check that all elements have a fixed length\n item.length = item.fixed_length()\n if(item.length === false){\n fail(\"look-behind requires fixed-width pattern\", pos)\n }\n item.parent.add(new Lookbehind(item))\n item.non_capturing = true\n // store in variable \"lookbehind\", will be applied to next item\n lookbehind = item\n }else if(item.is_lookahead){\n delete item.num\n }\n if(item instanceof Group && item.items.length == 0){\n item.add(EmptyString)\n }else if(item instanceof ConditionalBackref){\n if(groups[item.group_ref] === undefined){\n // might be defined later; store in backrefs and check\n // when all items have been processed\n backrefs[item.group_ref] = backrefs[item.group_ref] | pos + 3\n }\n if(item.re_if_exists.items.length == 0){\n item.re_if_exists.add(EmptyString)\n }else if(item.re_if_not_exists.items.length == 0){\n item.re_if_not_exists.pos = pos\n item.re_if_not_exists.add(EmptyString)\n }\n }else if(item.type == \"flags\"){\n // restore flags when entering the group\n flags = Flag.$factory(item.flags_before.value)\n }\n item.state = 'closed'\n node = item.parent\n }else if(item instanceof ConditionalBackref){\n var pos = item.pos,\n group_ref = item.group_ref\n if(typeof group_ref == \"number\"){\n if(group_ref == 0){\n fail(`bad group number`, pos + 3)\n }else if(group_ref >= MAXGROUPS){\n fail(`invalid group reference ${group_ref}`, pos + 1)\n }else if(groups[group_ref] &&\n groups[group_ref].item.state == \"open\"){\n fail(\"cannot refer to an open group\", pos)\n }\n }else if(groups[group_ref] !== undefined){\n if(groups[group_ref].item.state == \"open\"){\n fail(\"cannot refer to an open group\", pos)\n }\n }else{\n fail(`unknown group name '${group_ref}'`, pos)\n }\n group_stack.push(item)\n node.add(item)\n item.state = \"open\"\n node = item // next items will be stored as group's items\n }else if(item instanceof BackReference){\n pos = item.pos\n if(item.type == \"num\" && item.value > 99){\n var head = item.value.toString().substr(0, 2)\n fail(`invalid group reference ${head}`, pos + 1)\n }\n if(groups[item.value] !== undefined){\n if(groups[item.value].item.state == \"open\"){\n fail(\"cannot refer to an open group\", pos)\n }\n var ref_item = groups[item.value].item.parent\n while(ref_item){\n if(ref_item.is_lookbehind){\n fail(\"cannot refer to group defined in the same lookbehind subpattern\", pos)\n }\n ref_item = ref_item.parent\n }\n }else if(item.type == \"name\"){\n fail(`unknown group name '${item.value}'`, pos)\n }else if(item.type == \"num\"){\n fail(`invalid group reference ${item.value}`, pos)\n }\n node.add(item)\n }else if(item instanceof Char ||\n item instanceof CharacterClass ||\n item instanceof CharacterSet){\n if(item instanceof CharacterSet){\n for(var elt of item.set.items){\n elt.flags = flags\n }\n }\n var added_to_charseq = false\n if(item instanceof Char){\n if(node.items && node.items.length > 0){\n var previous = $last(node.items)\n if(previous instanceof CharSeq){\n previous.add_char(item)\n added_to_charseq = true\n }else if(previous instanceof Char && ! previous.repeater){\n node.items.pop()\n node.items.push(new CharSeq([previous, item], flags))\n added_to_charseq = true\n }\n }\n }\n if(! added_to_charseq){\n node.add(item)\n }\n }else if(item instanceof Repeater){\n // check that item is not in a lookbehind group\n var pnode = node\n while(pnode){\n if(pnode.extension && pnode.extension.type &&\n pnode.extension.type.indexOf(\"lookbehind\") > -1){\n fail(\"look-behind requires fixed-width pattern\", pos)\n }\n pnode = pnode.parent\n }\n pos = item.pos\n if(node.items.length == 0){\n fail(\"nothing to repeat\", pos)\n }\n previous = $last(node.items)\n if(previous instanceof Char ||\n previous instanceof CharSeq ||\n previous instanceof CharacterClass ||\n previous instanceof CharacterSet ||\n previous instanceof Group ||\n previous instanceof BackReference){\n if(previous instanceof GroupEnd){\n // associate repeat with Group\n previous = previous.group\n }else if(previous instanceof CharSeq){\n previous = $last(previous.chars)\n }\n if(previous.repeater){\n if(item.op == '?' && ! previous.non_greedy){\n if(previous.possessive){\n fail('multiple repeat', pos)\n }\n previous.non_greedy = true\n if(previous instanceof CharacterClass &&\n previous.value == '.'){\n previous.min_repeat_one = true\n }\n }else{\n if(item instanceof Repeater && item.op == '+'){\n if(previous.possessive || previous.non_greedy){\n fail('multiple repeat', pos)\n }\n previous.possessive = true\n }else{\n fail(\"multiple repeat\", pos)\n }\n }\n }else{\n // convert to minimum and maximum number of repeats\n var min = 1,\n max = 1\n if(Array.isArray(item.op)){\n min = item.op[0]\n if(min >= MAXREPEAT){\n throw _b_.OverflowError.$factory(\n \"the repetition number is too large\")\n }\n max = item.op[1] === undefined ? min : item.op[1]\n if(isFinite(max) && max >= MAXREPEAT){\n throw _b_.OverflowError.$factory(\n \"the repetition number is too large\")\n }\n if(max < min){\n fail('min repeat greater than max repeat', pos)\n }\n }else if(item.op == \"?\"){\n min = 0\n max = 1\n }else if(item.op == \"*\"){\n min = 0\n max = Number.POSITIVE_INFINITY\n }else if(item.op == \"+\"){\n min = 1\n max = Number.POSITIVE_INFINITY\n }\n previous.repeater = item\n previous.repeat = {min, max}\n // mark all parents of item as no fixed length\n var parent = item\n while(parent){\n parent.fixed_length = false\n parent = parent.parent\n }\n }\n }else{\n fail(\"nothing to repeat\", pos)\n }\n }else if(item instanceof Or){\n if(group_stack.length > 0){\n item.group = group_stack[group_stack.length - 1]\n }else{\n item.group = false\n }\n pos = item.pos\n if(node instanceof ConditionalBackref){\n // case '(?(num)a|'\n if(node.nb_options == 1){\n node.nb_options++\n }else{\n fail('conditional backref with more than ' +\n 'two branches', pos)\n }\n }else if(node.items.length == 0){\n // token \"|\" in \"(|...)\" : first option is the empty string\n var choice = new Choice(),\n case1 = new Case()\n case1.add(new Char(pos, EmptyString))\n choice.add(case1)\n node.add(choice)\n var case2 = new Case()\n choice.add(case2)\n node = case2\n }else if(node instanceof Case){\n // node.parent is already a Choice\n var new_case = new Case()\n node.parent.add(new_case)\n node = new_case\n }else{\n // token \"|\" in \"(ab|...)\"\n var previous = node.items[node.items.length - 1]\n if(previous instanceof Case){\n var new_case = new Case()\n previous.add(new_case)\n node = new_case\n }else{\n var choice = new Choice(),\n case1 = new Case(),\n first_rank = node.items[0].rank\n while(node.items.length > 0){\n case1.add(node.items.shift())\n }\n case1.groups = node.$groups\n for(var group of group_stack){\n choice.groups.push(group)\n }\n choice.add(case1)\n node.add(choice)\n var case2 = new Case()\n choice.add(case2)\n node = case2\n }\n }\n }else if(item instanceof StringStart ||\n item instanceof StringEnd){\n node.add(item)\n }else if(item instanceof SetFlags){\n if(group_stack.length == 0 && ! allow_global_flags){\n // pattern like (?x) only allowed as first in reg exp\n fail('global flags not at the start of the ' +\n 'expression', item.pos)\n }\n // copy flags, otherwise re.ASCII etc might be modified\n flags = Flag.$factory(flags.value || U.value)\n if(item.on_flags.indexOf('u') > -1){\n if(is_bytes){\n fail(\"re.error: bad inline flags: cannot use 'u' flag \" +\n \"with a bytes pattern\", pos)\n }\n if(flags && flags.value & ASCII.value){\n // switch to Unicode\n flags.value ^= ASCII.value\n }\n if(group_stack.length == 0 &&\n original_flags && original_flags.value & ASCII.value){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n if(item.on_flags.indexOf('a') > -1){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n }\n if(item.on_flags.indexOf('a') > -1){\n if(group_stack.length == 0 &&\n original_flags && original_flags.value & U.value){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n if(flags && flags.value & U.value){\n // switch to ASCII\n flags.value ^= U.value\n }\n if(item.on_flags.indexOf('u') > -1){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n }\n if(flags.value === undefined){\n flags.value = 32\n }\n if(item.items.length == 0){\n if(! accept_inline_flag && group_stack.length == 0){\n var s = from_codepoint_list(pattern)\n warn(_b_.DeprecationWarning,\n `Flags not at the start of the expression '${s}'`,\n pos)\n }\n for(var on_flag of item.on_flags){\n if(! is_bytes || on_flag !== 'a'){\n flags.value |= inline_flags[on_flag].value\n }\n }\n for(var off_flag of item.off_flags){\n if(! is_bytes || off_flag !== 'a'){\n flags.value ^= inline_flags[off_flag].value\n }\n }\n }else{\n node.add(item)\n }\n }else{\n fail(\"unknown item type \" + item, pos)\n }\n if(! (item instanceof SetFlags) &&\n ! (item instanceof Group && item.type == \"flags\")){\n accept_inline_flag = false\n }\n }\n for(ref in backrefs){\n if(groups[ref] === undefined){\n fail('invalid group name ' + ref, backrefs[ref])\n }\n }\n if(group_stack.length > 0){\n var last = group_stack[group_stack.length - 1]\n fail(\"missing ), unterminated subpattern\", last.pos)\n }\n while(node.parent){\n node = node.parent\n }\n node.pattern = from_codepoint_list(pattern)\n node.groups = group_num\n flags = flags === no_flag ? 32 : flags\n node.flags = flags\n var res = {\n node,\n groups,\n flags,\n original_flags,\n text: from_codepoint_list(pattern),\n type, // \"str\" or \"bytes\"\n fixed_length: node.fixed_length()\n }\n if(! cache.has(original_pattern.py_obj)){\n cache.set(original_pattern.py_obj, new Map())\n }\n cache.get(original_pattern.py_obj).set(original_flags.value || 0, res)\n if(_debug.value){\n show(node)\n }\n return res\n}\n\nfunction show(node, indent){\n indent = indent === undefined ? 0 : indent\n if(indent == 0){\n log('root', node)\n }\n log(' '.repeat(indent) + node)\n if(node.items !== undefined){\n for(var item of node.items){\n show(item, indent + 1)\n }\n }\n}\n\nfunction to_codepoint_list(s){\n var items = []\n if(typeof s == \"string\" || $B.$isinstance(s, _b_.str)){\n if(typeof s != \"string\"){\n s = s.valueOf()\n }\n for(var char of s){\n items.push(char.codePointAt(0))\n }\n items.type = \"unicode\"\n }else if($B.$isinstance(s, [_b_.bytes, _b_.bytearray, _b_.memoryview])){\n if($B.$isinstance(s, _b_.memoryview)){\n items = s.obj.source\n }else{\n items = s.source\n }\n items.type = \"bytes\"\n }else{\n throw Error('invalid type ' + $B.class_name(s))\n }\n return items\n}\n\n$B.nb_from_cp = 0\nfunction from_codepoint_list(codepoints, type){\n $B.nb_from_cp++\n // Return a string\n if(type == \"bytes\"){\n return _b_.bytes.$factory(codepoints)\n }\n var s = ''\n for(var cp of codepoints){\n s += _b_.chr(cp)\n }\n return $B.String(s)\n}\n\nfunction string2bytes(s){\n var t = []\n for(var i = 0, len = s.length; i < len; i++){\n t.push(s.charCodeAt(i))\n }\n return _b_.bytes.$factory(t)\n}\n\nfunction check_pattern_flags(pattern, flags){\n if(pattern.__class__ === Pattern){\n if(flags !== no_flag){\n throw _b_.ValueError.$factory(\n \"cannot process flags argument with a compiled pattern\")\n }\n }\n return pattern\n}\n\nfunction StringObj(obj){\n // A StringObj object is a bridge between a Python string or bytes-like\n // object and Javascript\n // obj is the Python object\n // this.string is a Javascript string\n this.py_obj = obj\n this.codepoints = []\n this.type = \"str\"\n this.is_string = typeof obj == 'string'\n if(typeof obj == \"string\" ||\n (obj instanceof String && ! obj.codepoints)){\n // Python object represented as a Javascript string\n this.string = obj\n // Maps a position in codepoints to position in string\n this.index_map = {}\n for(var i = 0, len = obj.length; i < len; i++){\n this.index_map[this.codepoints.length] = i\n var cp = obj.codePointAt(i)\n this.codepoints.push(cp)\n if(cp >= 0x10000){\n i++\n }\n }\n this.length = _b_.str.__len__(obj)\n if(obj instanceof String){\n // store for next use\n obj.codepoints = this.codepoints\n obj.index_map = this.index_map\n }\n }else if(obj instanceof String){\n // string with surrogate pairs\n this.string = obj.string\n this.codepoints = obj.codepoints\n this.index_map = obj.index_map\n this.length = _b_.str.__len__(obj)\n }else if($B.$isinstance(obj, _b_.str)){ // str subclass\n var so = new StringObj(_b_.str.$factory(obj))\n this.string = so.string\n this.codepoints = so.codepoints\n this.length = _b_.str.__len__(obj)\n }else if($B.$isinstance(obj, [_b_.bytes, _b_.bytearray])){\n this.string = _b_.bytes.decode(obj, 'latin1')\n this.codepoints = obj.source\n this.type = \"bytes\"\n }else if($B.$isinstance(obj, _b_.memoryview)){\n this.string = _b_.bytes.decode(obj.obj, 'latin1')\n this.codepoints = obj.obj.source\n this.type = \"bytes\"\n }else if(obj.__class__ && obj.__class__.$buffer_protocol){\n // eg array.array\n this.codepoints = _b_.list.$factory(obj)\n this.string = from_codepoint_list(this.codepoints, \"bytes\")\n this.type = \"bytes\"\n }else if(Array.isArray(obj)){\n // list of codepoints\n this.codepoints = obj\n }else{\n throw _b_.TypeError.$factory(\n `expected string or bytes-like object, got '${$B.class_name(obj)}'`)\n }\n if(this.length === undefined){\n this.length = this.codepoints.length\n }\n}\n\nStringObj.prototype.cp_at = function(pos){\n if(pos >= this.length){\n return undefined\n }\n /*\n if(typeof this.string == 'string'){\n return this.string.charCodeAt(pos)\n }\n */\n var res = this.codepoints[pos]\n if(res !== undefined){\n return res\n }\n}\n\nStringObj.prototype.substring = function(start, end){\n // Returns a string\n var s\n if(this.string && this.index_map){\n if(this.index_map[start] === undefined){\n return ''\n }\n if(end === undefined){\n return this.string.substr(this.index_map[start])\n }\n return this.string.substring(this.index_map[start],\n this.index_map[end])\n }\n var codepoints,\n res = ''\n if(end === undefined){\n codepoints = this.codepoints.slice(start)\n }else{\n codepoints = this.codepoints.slice(start, end)\n }\n return from_codepoint_list(codepoints, this.type)\n}\n\nStringObj.prototype.to_str = function(){\n if(this.hasOwnProperty('string')){\n return this.string\n }\n return from_codepoint_list(this.codepoints, this.type)\n}\n\nStringObj.from_codepoints = function(cps){\n var res = new StringObj('')\n res.codepoints = cps\n for(var cp of cps){\n res.string += _b_.chr(cp)\n }\n return res\n}\n\nfunction prepare(args){\n // Check that all arguments are of the same type (string or bytes-like).\n // Return an object with all attributes transformed into StringObj\n // instances\n var res = {},\n keys = Object.keys(args),\n first = keys[0]\n res[first] = new StringObj(args[first])\n res.type = res[first].type\n for(var key of keys.slice(1)){\n res[key] = new StringObj(args[key])\n if(res[key].type != res.type){\n throw _b_.TypeError.$factory(`not the same type for ${first} and ${key}`)\n }\n }\n return res\n}\n\n\nfunction subn(pattern, repl, string, count, flags){\n // string is a StringObj instance\n // pattern is either a Pattern instance or a StringObj instance\n var res = '',\n pos = 0,\n nb_sub = 0\n\n if(pattern instanceof StringObj){\n pattern = compile(pattern, flags)\n }\n if(typeof repl != \"function\"){\n var data1 = transform_repl({repl}, pattern)\n repl1 = data1.repl1\n }\n pos = 0\n var s = string.to_str()\n for(var bmo of module.finditer(Pattern.$factory(pattern), s).js_gen){\n // finditer yields instances of MatchObject\n var mo = bmo.mo // instance of MO\n res += from_codepoint_list(string.codepoints.slice(pos, mo.start))\n if(typeof repl == \"function\"){\n var x = $B.$call(repl)(bmo)\n if(x.__class__ === _b_.bytes){\n x = _b_.bytes.decode(x, 'latin-1')\n }\n res += x // $B.$call(repl)(bmo)\n }else{\n res += repl1\n }\n nb_sub++\n pos = mo.end\n if(count != 0 && nb_sub >= count){\n break\n }\n }\n if(string.is_string){\n res += string.string.substr(pos)\n }else{\n res += from_codepoint_list(string.codepoints.slice(pos))\n }\n if(pattern.type === \"bytes\"){\n res = _b_.str.encode(res, \"latin-1\")\n }\n return [res, nb_sub]\n}\n\n// escaped chars : '\\t\\n\\x0b\\x0c\\r #$&()*+-.?[\\\\]^{|}~'\nvar escaped = [9, 10, 11, 12, 13, 32, 35, 36, 38, 40, 41, 42, 43, 45, 46, 63,\n 91, 92, 93, 94, 123, 124, 125, 126]\n\nfunction starts_with_string_start(pattern){\n // returns true if the pattern starts with ^ or \\A\n if(pattern.node){\n pattern = pattern.node\n }\n if(pattern.items){\n if(pattern.items.length == 0){\n return false\n }\n return starts_with_string_start(pattern.items[0])\n }else if(pattern instanceof CharacterClass){\n return pattern.value == 'A'\n }else if(pattern instanceof StringStart){\n return true\n }else{\n return false\n }\n}\n\nfunction* iterator(pattern, string, flags, original_string, pos, endpos){\n var result = [],\n pos = pos | 0,\n cp,\n accept_one = true // used to test one position after string end\n while((cp = string.cp_at(pos)) !== undefined || accept_one){\n var mo = match(pattern, string, pos, endpos)\n if(mo){\n yield MatchObject.$factory(mo)\n if(mo.end == mo.start){\n // If match has zero with, retry at the same position but\n // with the flag no_zero_width set, to avoid infinite loops\n mo = match(pattern, string, pos, endpos, true)\n if(mo){\n yield MatchObject.$factory(mo)\n pos = mo.end\n }else{\n pos++ // at least 1, else infinite loop\n }\n }else{\n pos = mo.end\n }\n }else{\n pos++\n }\n if(cp === undefined){\n accept_one = false\n }\n if (starts_with_string_start(pattern) && !(flags.value & MULTILINE.value)) {\n break\n }\n }\n delete original_string.in_iteration\n}\n\n\nfunction MO(node, pos, mo, len){\n // Match Object\n this.node = node\n this.start = pos\n this.mo = mo\n this.nb_min = mo.nb_min\n this.nb_max = mo.nb_max\n this.len = len\n this.nb = this.node.non_greedy ? mo.nb_min : mo.nb_max\n this.end = pos + len * this.nb\n}\n\nMO.prototype.backtrack = function(string, groups){\n if(this.node.possessive){\n return false\n }\n if(this.node.non_greedy && this.nb < this.nb_max){\n this.nb++\n this.end = this.start + this.len * this.nb\n return true\n }else if((! this.node.non_greedy) && this.nb > this.nb_min){\n this.nb--\n this.end = this.start + this.len * this.nb\n return true\n }else{\n return false\n }\n}\n\nfunction del_groups(groups, node){\n if(node.num !== undefined){\n delete groups[node.num]\n groups.$last.splice(groups.$last.indexOf(node.num), 1)\n if(node.name !== undefined){\n delete groups[node.name]\n }\n }\n for(var child of node.items){\n if(child instanceof Group){\n del_groups(groups, child)\n }\n }\n}\n\nfunction GroupMO(node, start, matches, string, groups, endpos){\n // Match Object for Groups\n this.node = node\n this.start = start\n this.matches = matches\n this.string = string\n this.end = matches.length > 0 ? $last(matches).end : start\n this.endpos = endpos === undefined ? this.end : endpos\n this.$groups = groups\n}\n\nGroupMO.prototype.backtrack = function(string, groups){\n if(_debug.value){\n console.log('group MO backtrack, this', this)\n alert()\n }\n // Try backtracking in the last match\n if(this.node.possessive || this.node.atomic){\n return false\n }\n if(this.matches.length > 0){\n var _match = $last(this.matches),\n mos = _match.mos,\n nb0 = mos.length\n while(mos.length > 0){\n var mo = mos.pop()\n if(mo.node instanceof Case){\n var rank = mo.node.parent.items.indexOf(mo.node)\n for(var _case of mo.node.parent.items.slice(rank + 1)){\n var _mo = match({node: _case, text: _case.text},\n string, mo.start)\n if(_mo){\n // update GroupMO object\n mos.push(_mo)\n this.end = _mo.end\n if(this.$groups.$last.length > 0){\n var ix = this.$groups.$last[this.$groups.$last.length - 1]\n this.$groups[ix].end = _mo.end\n }\n return true\n }\n }\n }\n if(mo.backtrack(string, groups)){\n mos.push(mo)\n if(this.node.num !== undefined){\n groups[this.node.num].end = mo.end\n }\n this.end = mo.end\n return true\n }\n }\n }\n // Else, remove last match if possible\n if(this.matches.length > this.node.repeat.min &&\n this.matches.length >= 1){\n this.matches.pop()\n if(this.matches.length > 0){\n this.end = $last(this.matches).end\n }else{\n // remove this group and its children from groups\n del_groups(groups, this.node)\n this.end = this.start\n }\n return true\n }\n // Group fails; if some of its subgroups succeded, remove them from\n // groups\n if(this.node.repeat.min > 0){\n del_groups(groups, this.node)\n }\n return false\n}\n\nGroupMO.prototype.toString = function(){\n var repr = _b_.repr(this.string.substring(this.start, this.end))\n repr = repr.substring(0, 50)\n return ''\n}\n\nGroupMO.prototype.groups = function(_default){\n var res = [],\n groupobj = this.$groups\n\n for(var key in this.node.$groups){\n if(isFinite(key)){\n res[key] = groupobj[key] === undefined ? _default :\n this.string.substring(groupobj[key].start, groupobj[key].end)\n }\n }\n res.shift()\n return $B.fast_tuple(res)\n}\n\n// Brython MatchObject\nvar MatchObject = $B.make_class(\"Match\",\n function(mo){\n return {\n __class__: MatchObject,\n mo\n }\n }\n)\n\nMatchObject.__copy__ = function(self){\n return self\n}\n\nMatchObject.__deepcopy__ = function(self){\n return self\n}\n\nMatchObject.__getitem__ = function(){\n var $ = $B.args(\"__getitem__\", 2, {self: null, key: null},\n ['self', 'key'], arguments, {}, null, null),\n self = $.self,\n key = $.key\n if(Array.isArray(key)){\n throw _b_.IndexError.$factory(\"no such group\")\n }\n if(key == 0){\n return self.mo.string.substring(self.mo.start, self.mo.end)\n }\n var match = self.mo.$groups[key]\n if(match !== undefined){\n return self.mo.string.substring(match.start, match.end)\n }else if(self.mo.node.$groups[key] !== undefined){\n return _b_.None\n }\n throw _b_.IndexError.$factory(\"no such group\")\n}\n\nMatchObject.__repr__ = MatchObject.__str__ = function(self){\n return self.mo.toString()\n}\n\nMatchObject.end = function(self){\n var $ = $B.args('end', 2, {self: null, group: null}, ['self', 'group'],\n arguments, {group: 0}, null, null)\n var group = MatchObject.group(self, $.group)\n if(group === _b_.None){\n return -1\n }else if($.group == 0){\n return self.mo.end\n }else{\n return self.mo.$groups[$.group].end\n }\n}\n\nMatchObject.endpos = _b_.property.$factory(\n function(self){\n return self.mo.endpos\n }\n)\n\nMatchObject.expand = function(){\n var $ = $B.args(\"expand\", 2, {self: null, template: null},\n ['self', 'template'], arguments, {}, null, null)\n var data = {\n repl: new StringObj($.template),\n }\n data = transform_repl(data, {groups: $.self.mo.node.$groups})\n if(typeof data.repl == \"function\"){\n return $B.$call(data.repl)(MatchObject.$factory($.self.mo))\n }else{\n return data.repl1\n }\n}\n\nMatchObject.group = function(self){\n var $ = $B.args(\"group\", 1, {self: null}, ['self'], arguments,\n {}, 'args', null),\n self = $.self,\n args = $.args\n if(args.length == 0){\n args[0] = 0\n }\n var groupobj = self.mo.$groups,\n result = []\n for(var group_id of args){\n if($B.rich_comp('__eq__', group_id, 0)){\n result.push(self.mo.string.substring(self.mo.start, self.mo.end))\n continue\n }\n try{\n // Convert group_id to int if possible\n group_id = $B.PyNumber_Index(group_id) // in py_utils.js\n }catch(err){\n // group_id can be an identifier\n }\n if(self.mo.node.$groups[group_id] === undefined){\n throw _b_.IndexError.$factory(\"no such group\")\n }\n var group = groupobj[group_id] // found in match\n result.push(group === undefined ?\n _b_.None :\n self.mo.string.substring(group.start, group.end))\n }\n if(args.length == 1){\n return result[0]\n }\n return $B.fast_tuple(result)\n}\n\nMatchObject.groupdict = function(){\n /*\n Return a dictionary containing all the named subgroups of the match, keyed\n by the subgroup name. The default argument is used for groups that did not\n participate in the match; it defaults to None.\n */\n var $ = $B.args(\"groupdict\", 2, {self: null, default: null},\n ['self', 'default'], arguments, {default: _b_.None},\n null, null),\n self = $.self,\n groupobj = $.self.mo.$groups,\n d = $B.empty_dict()\n for(var key in $.self.mo.node.$groups){\n if(! isFinite(key)){\n var value = groupobj[key] === undefined ? $.default :\n groupobj[key]\n if(value !== $.default){\n value = self.mo.string.substring(value.start, value.end)\n }\n _b_.dict.$setitem(d, key, value)\n }\n }\n return d\n}\n\nMatchObject.groups = function(self){\n var $ = $B.args(\"group\", 2, {self: null, default: null},\n ['self', 'default'], arguments,\n {default: _b_.None}, null, null),\n self = $.self,\n _default = $.default\n return self.mo.groups(_default)\n}\n\nMatchObject.lastindex = _b_.property.$factory(\n function(self){\n /* The integer index of the last matched capturing group, or None if\n no group was matched at all.\n */\n var last = self.mo.$groups.$last\n if(last.length == 0){\n return _b_.None\n }\n return parseInt($last(last))\n }\n)\n\nMatchObject.lastgroup = _b_.property.$factory(\n function(self){\n /* The name of the last matched capturing group, or None if the group\n didn't have a name, or if no group was matched at all.\n */\n var lastindex = MatchObject.lastindex.fget(self)\n if(lastindex === _b_.None){\n return _b_.None\n }\n var group = self.mo.node.$groups[lastindex],\n name = group.item.name\n return name === undefined ? _b_.None : name\n }\n)\n\nMatchObject.pos = _b_.property.$factory(\n function(self){\n return self.mo.start\n }\n)\n\nMatchObject.re = _b_.property.$factory(\n function(self){\n return self.mo.node.pattern\n }\n)\n\nMatchObject.regs = _b_.property.$factory(\n function(self){\n var res = [$B.fast_tuple($B.fast_tuple([self.mo.start, self.mo.end]))]\n for(var group_num in self.mo.node.$groups){\n if(isFinite(group_num)){\n var group = self.mo.node.$groups[group_num].item\n // group.pattern includes the opening and closing brackets\n res.push($B.fast_tuple([group.pos,\n group.pos + group.pattern.length - 2]))\n }\n }\n return $B.fast_tuple(res)\n }\n)\n\nMatchObject.span = function(){\n /*\n Match.span([group])\n\n For a match m, return the 2-tuple (m.start(group), m.end(group)). Note\n that if group did not contribute to the match, this is (-1, -1). group\n defaults to zero, the entire match.\n */\n var $ = $B.args(\"span\", 2, {self: null, group: null},\n ['self', 'group'], arguments,\n {group: 0}, null, null),\n self = $.self,\n group = $.group\n if(group == 0){\n return $B.fast_tuple([self.mo.start, self.mo.end])\n }\n var span = self.mo.$groups[group]\n if(span === undefined){\n return $B.fast_tuple([-1, -1])\n }\n return $B.fast_tuple([span.start, span.end])\n}\n\nMatchObject.start = function(self){\n var $ = $B.args('end', 2, {self: null, group: null}, ['self', 'group'],\n arguments, {group: 0}, null, null)\n var group = MatchObject.group(self, $.group)\n if(group === _b_.None){\n return -1\n }else if($.group == 0){\n return self.mo.start\n }else{\n return self.mo.$groups[$.group].start\n }\n}\n\nMatchObject.string = _b_.property.$factory(\n function(self){\n return self.mo.string.to_str()\n }\n)\n\n$B.set_func_names(MatchObject, 're')\n\nfunction log(){\n if(_debug.value){\n console.log.apply(null, arguments)\n }\n}\n\nfunction create_fullmatch_pattern(pattern){\n // transform into \"(?:)$\"\n // use a new pattern object, otherwise if pattern is in cache the\n // value in cache would be changed\n var new_pattern = {}\n for(var key in pattern){\n if(key == 'node'){\n continue\n }\n new_pattern[key] = pattern[key]\n }\n\n var ncgroup = new Group() // non-capturing group\n ncgroup.pos = 0\n ncgroup.non_capturing = true\n for(var item of pattern.node.items){\n ncgroup.add(item)\n }\n var se = new StringEnd()\n se.flags = Flag.$factory(32)\n new_pattern.node = new Node()\n new_pattern.node.add(ncgroup)\n new_pattern.node.add(se)\n return new_pattern\n}\n\nfunction match(pattern, string, pos, endpos, no_zero_width, groups){\n // Follow the pattern tree structure\n if(_debug.value){\n console.log('match pattern', pattern.text, 'pos', pos, string.substring(pos))\n alert()\n }\n if(endpos !== undefined){\n if(endpos < pos){\n return false\n }\n }else{\n endpos = string.length\n }\n if(pattern.node instanceof Node){\n show(pattern.node)\n }\n if(groups === undefined){\n groups = {$last:[]}\n }\n if(pattern.text === undefined){\n console.log('no text', pattern)\n }\n var node = pattern.node,\n mo\n if(node.items){\n // node is either a Choice between several items, or a sequence of\n // items\n if(node instanceof Choice){\n mo = false\n for(var _case of node.items){\n mo = match({node: _case, text: _case.text}, string, pos,\n endpos, no_zero_width, groups)\n if(mo){\n // remove groups inside choice and before successful case\n // that did not contribute to the match\n var groups_succeed = groups_in(_case),\n min_num = Math.min(Array.from(groups_succeed))\n for(var group_num of groups_in(node)){\n if(group_num < min_num){\n delete groups[group_num]\n }\n }\n if(_debug.value){\n console.log('case', _case + '', 'of choice', node +\n ' succeeds, groups', groups)\n }\n return mo\n }else{\n if(_debug.value){\n console.log('case', _case + '', 'of choice', node +\n ' fails')\n }\n }\n }\n return false\n }else{\n // sequence of items\n node.repeat = node.repeat === undefined ? {min: 1, max: 1} :\n node.repeat\n var start = pos,\n nb_repeat = 0,\n nb_zerolength_repeat = 0,\n matches = [],\n mos,\n match_start,\n empty_matches = {}\n // loop until we get enough repetitions\n while(true){\n if(empty_matches[pos]){\n // no use trying again\n return matches.length == 0 ? false :\n new GroupMO(node, start, matches, string, groups,\n endpos)\n }\n var initial_groups = Object.keys(groups)\n mos = []\n match_start = pos\n if(_debug.value){\n console.log(\"pattern\", pattern.text,\n \"loop in group match, match start\", match_start)\n }\n var i = 0\n while(i < node.items.length){\n var item = node.items[i]\n if(_debug.value){\n console.log('item', i, '/', node.items.length - 1,\n 'of pattern', pattern.text)\n }\n var mo = match({node: item, text: item + ''}, string, pos,\n endpos, no_zero_width, groups)\n if(mo){\n if(item instanceof Group &&\n item.type == \"lookahead_assertion\"){\n log(\"lookahead assertion\", item + '',\n \"succeeds, mo\", mo)\n }else{\n mos.push(mo)\n pos = mo.end\n }\n i++\n }else if(false && item instanceof Group &&\n item.type == \"negative_lookahead_assertion\"){\n log(\"negative lookahead assertion\", item, \"fails : ok !\")\n i++\n }else{\n if(_debug.value){\n console.log('item ' + item, 'of group fails, nb_repeat',\n nb_repeat, 'node repeat', node.repeat)\n }\n var backtrack = false\n while(mos.length > 0){\n var mo = mos.pop()\n if(mo.backtrack === undefined){\n log('no backtrack for', mo)\n }\n if(_debug.value){\n console.log('try backtrack on mo', mo)\n }\n if(mo.backtrack(string, groups)){\n log('can backtrack, mo', mo)\n mos.push(mo)\n i = mos.length\n log('mos', mos, 'restart at item', i)\n pos = mo.end\n backtrack = true\n break\n }\n }\n if(backtrack){\n log('backtrack ok')\n continue\n }else{\n if(node.type == \"negative_lookahead_assertion\"){\n // If a negative lookahead assertion fails,\n // return a match\n var res = new GroupMO(node, start, matches,\n string, groups, endpos)\n return res\n }\n if(nb_repeat == 0){\n // remove the groups introduced before\n // reaching this point\n for(var key in groups){\n if(initial_groups.indexOf(key) == -1){\n delete groups[key]\n }\n }\n }\n if(nb_repeat >= node.repeat.min){\n log(\"enough repetitions for node\", node)\n if(node.type == \"negative_lookahead_assertion\"){\n return false\n }\n return new GroupMO(node, start, matches, string,\n groups, endpos)\n }\n return false\n }\n }\n }\n if(node.type == \"negative_lookahead_assertion\"){\n // If a negative lookahead succeeds, return false\n return false\n }\n nb_repeat++\n if(pos > match_start){\n nb_zerolength_repeat = 0\n }else{\n nb_zerolength_repeat++\n empty_matches[pos] = true\n }\n matches.push({start: match_start, end: pos, mos})\n if(node.num !== undefined){\n groups[node.num] = $last(matches)\n if(node.name !== undefined){\n groups[node.name] = groups[node.num]\n }\n if(node.num != $last(groups.$last)){\n var ix = groups.$last.indexOf(node.num)\n if(ix > -1){\n groups.$last.splice(ix, 1)\n }\n groups.$last.push(node.num)\n }\n }\n if(nb_repeat >= node.repeat.max){\n var res = new GroupMO(node, start, matches, string,\n groups, endpos)\n if(res.start == res.end && no_zero_width){\n // no_zero_width is set when previous match in\n // iterator() had length 0; avoids infinite loops\n return false\n }\n return res\n }\n log('loop on group', pattern.text, 'nb repeats', nb_repeat,\n 'nb zero length', nb_zerolength_repeat, 'groups', groups)\n if(nb_zerolength_repeat == 65535){\n return matches.length == 0 ? false :\n new GroupMO(node, start, matches, string, groups,\n endpos)\n }\n }\n }\n }else{\n // for BackReference, Char, CharSeq, CharacterClass, CharacterSet,\n // ConditionalBackref, Lookbehind, StringStart, StringEnd\n var mo = node.match(string, pos, endpos, groups)\n if(_debug.value){\n console.log(node + '', \"mo\", mo)\n }\n if(mo){\n var len = mo.group_len === undefined ? 1 : mo.group_len,\n ix = node.non_greedy ? mo.nb_min : mo.nb_max,\n end = pos + len * ix\n return new MO(node, pos, mo, len)\n }else{\n return false\n }\n }\n}\n\n// expose re module API\nvar module = {\n cache: cache,\n compile: function(){\n var $ = $B.args(\"compile\", 2, {pattern: null, flags: null},\n ['pattern', 'flags'], arguments, {flags: no_flag},\n null, null)\n if($.pattern && $.pattern.__class__ === Pattern){\n if($.flags !== no_flag){\n throw _b_.ValueError.$factory(\n \"cannot process flags argument with a compiled pattern\")\n }\n return $.pattern\n }\n $.pattern = check_pattern_flags($.pattern, $.flags)\n var data = prepare({pattern: $.pattern})\n if(typeof $.flags == \"number\"){\n $.flags = Flag.$factory($.flags)\n }\n var jspat = compile(data.pattern, $.flags)\n return Pattern.$factory(jspat)\n },\n error: error,\n escape: function(){\n var $ = $B.args(\"escape\", 1, {pattern: null}, ['pattern'], arguments,\n {}, null, null),\n data = prepare({pattern: $.pattern}),\n pattern = data.pattern,\n res = []\n for(var cp of pattern.codepoints){\n if(escaped.indexOf(cp) > -1){\n res.push(BACKSLASH)\n }\n res.push(cp)\n }\n res = from_codepoint_list(res, data.type)\n if(data.type == \"bytes\" && $B.$isinstance(res, _b_.str)){\n res = _b_.str.encode(res, 'latin1')\n }\n return res\n },\n findall: function(){\n /* Return all non-overlapping matches of pattern in string, as a list\n of strings. The string is scanned left-to-right, and matches are\n returned in the order found. If one or more groups are present in\n the pattern, return a list of groups; this will be a list of tuples\n if the pattern has more than one group. Empty matches are included\n in the result.\n */\n var $ = $B.args(\"findall\", 3,\n {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags,\n data\n pattern = check_pattern_flags(pattern, flags)\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n }else{\n data = prepare({string, pattern})\n pattern = Pattern.$factory(compile(data.pattern, flags))\n }\n if(data.type === \"str\"){\n function conv(s){\n return s === EmptyString ? '' : s\n }\n }else{\n function conv(s){\n return string2bytes(s)\n }\n }\n\n var iter = module.finditer.apply(null, arguments).js_gen,\n res = []\n while(true){\n var next = iter.next()\n if(next.done){\n return res\n }\n var bmo = next.value,\n mo = bmo.mo,\n groups = MatchObject.groups(bmo)\n\n // replace None by the empty string\n for(var i = 0, len = groups.length; i < len; i++){\n groups[i] = groups[i] === _b_.None ? \"\" : groups[i]\n }\n if(groups.length > 0){\n if(groups.length == 1){\n res.push(groups[0])\n }else{\n res.push($B.fast_tuple(groups))\n }\n }else{\n res.push(mo.string.substring(mo.start, mo.end))\n }\n }\n console.log(\"end findall\")\n },\n finditer: function(){\n var $ = $B.args(\"finditer\", 3,\n {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags\n if($B.$isinstance(string, [_b_.bytearray, _b_.memoryview])){\n string.in_iteration = true\n }\n var original_string = string,\n data\n pattern = check_pattern_flags(pattern, flags)\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n flags = pattern.flags\n }else{\n data = prepare({string, pattern})\n pattern = Pattern.$factory(compile(data.pattern, flags))\n }\n if(pattern.__class__ !== Pattern){\n throw Error(\"pattern not a Python object\")\n }\n return $B.generator.$factory(iterator)(pattern.$pattern, data.string,\n flags, original_string)\n },\n fullmatch: function(){\n var $ = $B.args(\"fullmatch\", 3, {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags\n pattern = check_pattern_flags(pattern, flags)\n var data\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n pattern = pattern.$pattern\n }else{\n data = prepare({pattern, string})\n pattern = compile(data.pattern, flags)\n }\n\n var new_pattern = create_fullmatch_pattern(pattern)\n\n // match transformed RE\n var res = match(new_pattern, data.string, 0)\n var bmo = res === false ? _b_.None : MatchObject.$factory(res)\n if(bmo !== _b_.None){\n if(bmo.mo.string.codepoints.length != bmo.mo.end - bmo.mo.start){\n return _b_.None\n }else{\n return bmo\n }\n }\n return _b_.None\n },\n Match: MatchObject,\n match: function(){\n var $ = $B.args(\"match\", 3, {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags\n pattern = check_pattern_flags(pattern, flags)\n var data\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n pattern = pattern.$pattern\n }else{\n data = prepare({pattern, string})\n pattern = compile(data.pattern, flags)\n }\n var res = match(pattern, data.string, 0)\n return res === false ? _b_.None : MatchObject.$factory(res)\n },\n Pattern,\n purge: function(){\n var $ = $B.args(\"purge\", 0, {}, [], arguments, {}, null, null)\n cache.clear()\n return _b_.None\n },\n _reconstructor,\n Scanner,\n search: function(){\n var $ = $B.args(\"search\", 3, {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags,\n data\n pattern = check_pattern_flags(pattern, flags)\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n }else{\n data = prepare({string, pattern})\n pattern = Pattern.$factory(compile(data.pattern, flags))\n }\n data.pattern = pattern\n // optimizations\n if(pattern.pattern.startsWith('\\\\A') ||\n pattern.pattern.startsWith('^')){\n if(! (pattern.$pattern.node.items[0] instanceof Choice)){\n var mo = match(data.pattern.$pattern, data.string, 0)\n if(mo){\n return MatchObject.$factory(mo)\n }else if(pattern.flags.value & MULTILINE.value){\n var pos = 0,\n cp\n while((cp = data.string.cp_at(pos)) !== undefined){\n if(cp == LINEFEED){\n mo = match(data.pattern.$pattern, data.string, pos + 1)\n if(mo){\n return MatchObject.$factory(mo)\n }\n }\n pos++\n }\n }else{\n return _b_.None\n }\n }\n }\n if(pattern.$pattern.fixed_length !== false &&\n isFinite(pattern.$pattern.fixed_length) &&\n pattern.pattern.endsWith('$') &&\n ! (pattern.flags.value & MULTILINE.value)){\n var mo = match(data.pattern.$pattern, data.string,\n data.string.length - pattern.$pattern.fixed_length)\n if(mo){\n return MatchObject.$factory(mo)\n }\n return _b_.None\n }\n var pos = 0\n if(data.string.codepoints.length == 0){\n mo = match(data.pattern.$pattern, data.string, 0)\n if(mo){\n mo.start = mo.end = 0\n }\n return mo ? MatchObject.$factory(mo) : _b_.None\n }\n while(pos < data.string.codepoints.length){\n var mo = match(data.pattern.$pattern, data.string, pos)\n if(mo){\n return MatchObject.$factory(mo)\n }else{\n pos++\n }\n }\n return _b_.None\n },\n set_debug: function(value){\n _debug.value = value\n },\n split: function(){\n var $ = $B.args(\"split\", 4,\n {pattern: null, string: null, maxsplit: null, flags: null},\n ['pattern', 'string', 'maxsplit', 'flags'],\n arguments, {maxsplit: 0, flags: no_flag}, null, null)\n var res = [],\n pattern = $.pattern,\n string = $.string,\n flags = $.flags,\n pos = 0,\n nb_split = 0,\n data\n if(pattern.__class__ !== Pattern){\n data = prepare({pattern, string})\n var comp = compile(data.pattern, flags)\n pattern = Pattern.$factory(comp)\n }else{\n data = {pattern, string}\n }\n for(var bmo of module.finditer(pattern, $.string).js_gen){\n var mo = bmo.mo, // finditer returns instances of MatchObject\n groupobj = mo.$groups\n res.push(data.string.substring(pos, mo.start))\n for(var key in mo.node.$groups){\n if(isFinite(key)){\n if(groupobj[key] !== undefined){\n res.push(data.string.substring(groupobj[key].start,\n groupobj[key].end))\n }else{\n res.push(_b_.None)\n }\n }\n }\n nb_split++\n pos = mo.end\n if(pos >= $.string.length){\n break\n }\n if($.maxsplit != 0 && nb_split >= $.maxsplit){\n break\n }\n }\n res.push(data.string.substring(pos))\n if(data.type === \"bytes\"){\n res = res.map(\n function(x){\n return $B.$isinstance(x, _b_.bytes) ?\n x :\n _b_.str.encode(x, \"latin-1\")\n }\n )\n }\n return res\n },\n sub: function(){\n var $ = $B.args(\"sub\", 5,\n {pattern: null, repl: null, string: null, count: null, flags: null},\n ['pattern', 'repl', 'string', 'count', 'flags'],\n arguments, {count: 0, flags: no_flag}, null, null),\n pattern = $.pattern,\n repl = $.repl,\n string = $.string,\n count = $.count,\n flags = $.flags,\n data\n check_pattern_flags(pattern, flags)\n if(typeof repl != \"function\"){\n if(pattern.__class__ != Pattern){\n data = prepare({pattern, string, repl})\n pattern = compile(data.pattern, flags)\n }else{\n data = prepare({string, repl})\n flags = pattern.flags\n pattern = pattern.$pattern\n }\n data = transform_repl(data, pattern)\n }else{\n if(pattern.__class__ != Pattern){\n data = prepare({pattern, string})\n pattern = compile(data.pattern, flags)\n }else{\n data = prepare({string})\n flags = pattern.flags\n pattern = pattern.$pattern\n }\n data.repl = repl\n }\n return subn(pattern, data.repl, data.string, count, flags)[0]\n },\n subn: function(){\n var $ = $B.args(\"sub\", 5,\n {pattern: null, repl: null, string: null, count: null, flags: null},\n ['pattern', 'repl', 'string', 'count', 'flags'],\n arguments, {count: 0, flags: no_flag}, null, null),\n pattern = $.pattern,\n repl = $.repl,\n string = $.string,\n count = $.count,\n flags = $.flags,\n data\n if(pattern.__class__ != Pattern){\n data = prepare({pattern, repl, string})\n }else{\n data = prepare({repl, string})\n data.pattern = pattern.$pattern\n }\n return $B.fast_tuple(subn(data.pattern, data.repl, data.string, count,\n flags))\n }\n\n}\n\nvar ASCII = module.A = module.ASCII = Flag.$factory(256)\nvar IGNORECASE = module.I = module.IGNORECASE = Flag.$factory(2)\nvar LOCALE = module.L = module.LOCALE = Flag.$factory(4)\nvar MULTILINE = module.M = module.MULTILINE = Flag.$factory(8)\nvar DOTALL = module.S = module.DOTALL = Flag.$factory(16)\nvar U = module.U = module.UNICODE = Flag.$factory(32)\nvar VERBOSE = module.X = module.VERBOSE = Flag.$factory(64)\nmodule.cache = cache\nmodule._compile = module.compile\n\n\nvar inline_flags = {\n i: IGNORECASE,\n L: LOCALE,\n m: MULTILINE,\n s: DOTALL,\n u: U,\n x: VERBOSE,\n a: ASCII\n}\n\nvar flag_names = {\n i: 'IGNORECASE',\n L: 'LOCALE',\n m: 'MULTILINE',\n s: 'DOTALL',\n u: 'U',\n x: 'VERBOSE',\n a: 'ASCII'\n}\n\n$B.addToImported('python_re', module)\n\n})(__BRYTHON__)"], "_sre_utils": [".js", "var $module=(function($B){\n\n function unicode_iscased(cp){\n // cp : Unicode code point\n var letter = String.fromCodePoint(cp)\n return (letter != letter.toLowerCase() ||\n letter != letter.toUpperCase())\n }\n\n function ascii_iscased(cp){\n if(cp > 255){return false}\n return unicode_iscased(cp)\n }\n\n function unicode_tolower(cp){\n var letter = String.fromCodePoint(cp),\n lower = letter.toLowerCase()\n return lower.charCodeAt(0)\n }\n\n function ascii_tolower(cp){\n return unicode_tolower(cp)\n }\n\nreturn {\n unicode_iscased: unicode_iscased,\n ascii_iscased: ascii_iscased,\n unicode_tolower: unicode_tolower,\n ascii_tolower: ascii_tolower\n}\n\n}\n\n)(__BRYTHON__)"], "_multiprocessing": [".js", "// multiprocessing\n(function($B){\n\nvar _b_ = $B.builtins\n\nvar Process = $B.make_class('Process')\n\nvar $convert_args=function(args) {\n var _list=[]\n for(var i=0, _len_i = args.length; i < _len_i; i++) {\n var _a=args[i]\n if($B.$isinstance(_a, _b_.str)){_list.push(\"'\"+_a+\"'\")} else {_list.push(_a)}\n }\n\n return _list.join(',')\n}\n\nProcess.is_alive = function(self){return self.$alive}\n\nProcess.join = function(self, timeout){\n // need to block until process is complete\n // could probably use a addEventListener to execute all existing code\n // after this join statement\n\n self.$worker.addEventListener('message', function (e) {\n var data=e.data\n if (data.stdout != '') { // output stdout from process\n $B.stdout.write(data.stdout)\n }\n }, false);\n}\n\nProcess.run = function(self){\n //fix me\n}\n\nProcess.start = function(self){\n self.$worker.postMessage({target: self.$target,\n args: $convert_args(self.$args),\n // kwargs: self.$kwargs\n })\n self.$worker.addEventListener('error', function(e) { throw e})\n self.$alive=true\n}\n\nProcess.terminate = function(self){\n self.$worker.terminate()\n self.$alive=false\n}\n\n// variables\n//name\n//daemon\n//pid\n//exitcode\n\nProcess. $factory = function(){\n //arguments group=None, target=None, name=None, args=(), kwargs=()\n\n var $ns=$B.args('Process',0,{},[],arguments,{},null,'kw')\n var kw=$ns['kw']\n\n var target=_b_.dict.get($ns['kw'],'target', _b_.None)\n var args=_b_.dict.get($ns['kw'],'args', _b_.tuple.$factory())\n\n var worker = new Worker('/src/web_workers/multiprocessing.js')\n\n var res = {\n __class__:Process,\n $worker: worker,\n name: $ns['name'] || _b_.None,\n $target: target+'',\n $args: args,\n //$kwargs: $ns['kw'],\n $alive: false\n }\n return res\n}\n\n$B.set_func_names(Process, \"multiprocessing\")\n\nvar Pool = $B.make_class(\"Pool\")\n\nPool.__enter__ = function(self){}\nPool.__exit__ = function(self){}\n\nPool.__str__ = Pool.toString = Pool.__repr__=function(self){\n return ''\n}\n\nPool.map = function(){\n\n var $ns=$B.args('Pool.map', 3,\n {self:null, func:null, fargs:null}, ['self', 'func', 'fargs'],\n arguments,{},'args','kw')\n var func = $ns['func']\n var fargs = $ns['fargs']\n\n var _results = []\n\n fargs = _b_.iter(fargs)\n\n var _pos = 0\n console.log(self.$processes)\n _workers =[]\n for(var i=0; i < self.$processes; i++) {\n _workers[i] = new Worker('/src/web_workers/multiprocessing.js')\n var arg\n\n try{arg = $B.$getattr(fargs, '__next__')()}\n catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n }\n console.log(arg)\n _workers[i].finished=false\n _workers[i].postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n\n _workers[i].addEventListener('message', function(e) {\n _results[e.data.pos]=e.data.result\n if (_results.length == args.length) return _results\n\n try {\n arg = $B.$getattr(fargs, '__next__')()\n e.currentTarget.postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n } catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n this.finished=true\n }\n }, false);\n }\n}\n\nPool.apply_async = function(){\n\n var $ns=$B.$MakeArgs('apply_async', 3,\n {self:null, func:null, fargs:null}, ['self', 'func', 'fargs'],\n arguments,{},'args','kw')\n var func = $ns['func']\n var fargs = $ns['fargs']\n\n fargs = _b_.iter(fargs)\n\n async_result = {}\n async_result.get = function(timeout){\n console.log(results)\n console.log(fargs)\n return this.results}\n async_result.results=[]\n\n var _pos=0\n\n _workers=[]\n for(var i=0; i < self.$processes; i++) {\n _workers[i] = new Worker('/src/web_workers/multiprocessing.js')\n var arg\n\n try{arg = $B.$getattr(fargs, '__next__')()}\n catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n }\n //console.log(arg)\n //_workers[i].finished=false\n _workers[i].postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n\n _workers[i].addEventListener('message', function(e) {\n async_result.results[e.data.pos]=e.data.result\n //if (_results.length == args.length) return _results\n\n try {\n arg = $B.$getattr(fargs, '__next__')()\n e.currentTarget.postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n } catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n this.finished=true\n }\n }, false);\n }\n\n console.log(\"return\", async_result)\n return async_result\n}\n\nPool.$factory = function(){\n console.log(\"pool\")\n console.log(arguments)\n var $ns=$B.args('Pool',1,\n {processes:null},['processes'],arguments,{},'args','kw')\n\n var processes = $ns['processes']\n\n if (processes === _b_.None) {\n // look to see if we have stored cpu_count in local storage\n // maybe we should create a brython config file with settings,etc..??\n\n // if not there use a tool such as Core Estimator to calculate number of cpu's\n // http://eligrey.com/blog/post/cpu-core-estimation-with-javascript\n }\n\n console.log(processes)\n var res = {\n __class__:Pool,\n $processes:processes\n }\n return res\n}\n\n$B.set_func_names(Pool, \"multiprocessing\")\n\n$B.imported._multiprocessing = {Process:Process, Pool:Pool}\n\n})(__BRYTHON__)\n"], "unicodedata": [".js", "// Implementation of unicodedata\n(function($B){\n\n var _b_ = $B.builtins\n\n // Load unicode table if not already loaded\n if($B.unicodedb === undefined){\n var xhr = new XMLHttpRequest\n xhr.open(\"GET\",\n $B.brython_path + \"unicode.txt\", false)\n xhr.onreadystatechange = function(){\n if(this.readyState == 4){\n if(this.status == 200){\n $B.unicodedb = this.responseText\n }else{\n console.log(\"Warning - could not \" +\n \"load unicode.txt\")\n }\n }\n }\n xhr.send()\n }\n\n function _info(chr){\n var ord = _b_.ord(chr),\n hex = ord.toString(16).toUpperCase()\n while(hex.length < 4){hex = \"0\" + hex}\n var re = new RegExp(\"^\" + hex +\";(.+?);(.*?);(.*?);(.*?);(.*?);(.*);(.*);(.*)$\",\n \"m\"),\n search = re.exec($B.unicodedb)\n if(search === null){\n return null\n }else{\n return {\n name: search[1],\n category: search[2],\n combining: search[3],\n bidirectional: search[4],\n decomposition: search[5],\n decimal: search[6],\n digit: search[7],\n numeric: search[8]\n }\n }\n }\n\n function bidirectional(chr){\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr, hex)\n throw _b_.KeyError.$factory(chr)\n }\n return search.bidirectional\n }\n\n function category(chr){\n // Returns the general category assigned to the character chr as\n // string.\n if(/\\p{Cn}/u.test(chr.charAt(0))){\n return \"Cn\"\n }\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return search.category\n }\n\n function combining(chr){\n // Returns the general category assigned to the character chr as\n // string.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.combining)\n }\n\n function decimal(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.decimal)\n }\n\n function decomposition(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return search.decomposition\n }\n\n function digit(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.digit)\n }\n\n function lookup(name){\n // Look up character by name. If a character with the given name is\n // found, return the corresponding character. If not found, KeyError\n // is raised.\n var re = new RegExp(\"^([0-9A-F]+);\" +\n name + \";(.*)$\", \"m\")\n search = re.exec($B.unicodedb)\n if(search === null){\n throw _b_.KeyError.$factory(\"undefined character name '\" +\n name + \"'\")\n }\n var res = parseInt(search[1], 16)\n return _b_.chr(res)\n }\n\n function name(chr, _default){\n // Returns the name assigned to the character chr as a string. If no\n // name is defined, default is returned, or, if not given, ValueError\n // is raised.\n var search = _info(chr)\n if(search === null){\n if(_default){return _default}\n throw _b_.KeyError.$factory(\"undefined character name '\" +\n chr + \"'\")\n }\n return search.name\n }\n\n function _norm(form, chr){\n var search = _info(chr)\n if(search === null){\n throw _b_.KeyError.$factory(chr)\n }\n switch(form){\n case \"NFC\":\n return chr\n case \"NFD\":\n var decomp = decomposition(chr),\n parts = decomp.split(\" \"),\n res = \"\"\n if(parts[0].startsWith(\"<\")){\n return chr\n }\n parts.forEach(function(part){\n if(! part.startsWith(\"<\")){\n res += _b_.chr(parseInt(part, 16))\n }\n })\n return res\n case \"NFKC\":\n var decomp = decomposition(chr),\n parts = decomp.split(\" \")\n if(parts[0] == \"\"){\n var res = \"\"\n parts.slice(1).forEach(function(part){\n res += _b_.chr(parseInt(part, 16))\n })\n return res\n }\n return chr\n case \"NFKD\":\n var decomp = decomposition(chr),\n parts = decomp.split(\" \")\n if(parts[0] == \"\"){\n var res = \"\"\n parts.slice(1).forEach(function(part){\n res += _b_.chr(parseInt(part, 16))\n })\n return res\n }\n return chr\n\n default:\n throw _b_.ValueError.$factory(\"invalid normalization form\")\n }\n }\n\n function normalize(form, unistr){\n var res = \"\"\n for(var i = 0, len = unistr.length; i < len; i++){\n res += _norm(form, unistr.charAt(i))\n }\n return res\n }\n\n function numeric(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n if(_default){return _default}\n throw _b_.KeyError.$factory(chr)\n }\n var parts = search.numeric.split('/'),\n value\n if(parts.length == 1){\n value = parseFloat(search.numeric)\n }else{\n value = parseInt(parts[0]) / parseInt(parts[1])\n }\n return $B.fast_float(value)\n }\n\n var module = {\n bidirectional: bidirectional,\n category: category,\n combining: combining,\n decimal: decimal,\n decomposition: decomposition,\n digit: digit,\n lookup: lookup,\n name: name,\n normalize: normalize,\n numeric: numeric,\n unidata_version: \"11.0.0\"\n }\n module.ucd_3_2_0 = {}\n for(var key in module){\n if(key == \"unidata_version\"){\n module.ucd_3_2_0[key] = '3.2.0'\n }else{\n module.ucd_3_2_0[key] = module[key] // approximation...\n }\n }\n $B.addToImported('unicodedata', module)\n\n})(__BRYTHON__)"], "_random": [".js", "// Javascript implementation of the _random module\n// Based on Ian Bicking's implementation of the Mersenne twister\n\n(function($B){\n\nvar _b_ = $B.builtins\n\n// Code copied from https://github.com/ianb/whrandom/blob/master/mersenne.js\n// by Ian Bicking\n\n// this program is a JavaScript version of Mersenne Twister,\n// a straight conversion from the original program, mt19937ar.c,\n// translated by y. okada on july 17, 2006.\n// and modified a little at july 20, 2006, but there are not any substantial differences.\n// modularized by Ian Bicking, March 25, 2013 (found original version at http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/VERSIONS/JAVASCRIPT/java-script.html)\n// in this program, procedure descriptions and comments of original source code were not removed.\n// lines commented with //c// were originally descriptions of c procedure. and a few following lines are appropriate JavaScript descriptions.\n// lines commented with /* and */ are original comments.\n// lines commented with // are additional comments in this JavaScript version.\n/*\n A C-program for MT19937, with initialization improved 2002/1/26.\n Coded by Takuji Nishimura and Makoto Matsumoto.\n\n Before using, initialize the state by using init_genrand(seed)\n or init_by_array(init_key, key_length).\n\n Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions\n are met:\n\n 1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n\n 3. The names of its contributors may not be used to endorse or promote\n products derived from this software without specific prior written\n permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n Any feedback is very welcome.\n http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html\n email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space)\n*/\n\nfunction RandomStream(seed) {\n /*jshint bitwise:false */\n /* Period parameters */\n //c//#define N 624\n //c//#define M 397\n //c//#define MATRIX_A 0x9908b0dfUL /* constant vector a */\n //c//#define UPPER_MASK 0x80000000UL /* most significant w-r bits */\n //c//#define LOWER_MASK 0x7fffffffUL /* least significant r bits */\n var N = 624\n var M = 397\n var MATRIX_A = 0x9908b0df /* constant vector a */\n var UPPER_MASK = 0x80000000 /* most significant w-r bits */\n var LOWER_MASK = 0x7fffffff /* least significant r bits */\n //c//static unsigned long mt[N]; /* the array for the state vector */\n //c//static int mti=N+1; /* mti==N+1 means mt[N] is not initialized */\n var mt = new Array(N) /* the array for the state vector */\n var mti = N + 1 /* mti==N+1 means mt[N] is not initialized */\n\n function unsigned32(n1){\n // returns a 32-bits unsiged integer from an operand to which applied a\n // bit operator.\n return n1 < 0 ? (n1 ^ UPPER_MASK) + UPPER_MASK : n1\n }\n\n function subtraction32(n1, n2){\n // emulates lowerflow of a c 32-bits unsiged integer variable, instead of\n // the operator -. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n return n1 < n2 ? unsigned32((0x100000000 - (n2 - n1)) & 0xffffffff) :\n n1 - n2\n }\n\n function addition32(n1, n2){\n // emulates overflow of a c 32-bits unsiged integer variable, instead of\n // the operator +. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n return unsigned32((n1 + n2) & 0xffffffff)\n }\n\n function multiplication32(n1, n2){\n // emulates overflow of a c 32-bits unsiged integer variable, instead of the\n // operator *. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n var sum = 0\n for (var i = 0; i < 32; ++i){\n if((n1 >>> i) & 0x1){\n sum = addition32(sum, unsigned32(n2 << i))\n }\n }\n return sum\n }\n\n /* initializes mt[N] with a seed */\n //c//void init_genrand(unsigned long s)\n function init_genrand(s) {\n //c//mt[0]= s & 0xffffffff;\n mt[0] = unsigned32(s & 0xffffffff)\n for(mti = 1; mti < N; mti++){\n mt[mti] =\n //c//(1812433253 * (mt[mti-1] ^ (mt[mti-1] >> 30)) + mti);\n addition32(multiplication32(1812433253,\n unsigned32(mt[mti - 1] ^ (mt[mti - 1] >>> 30))), mti)\n /* See Knuth TAOCP Vol2. 3rd Ed. P.106 for multiplier. */\n /* In the previous versions, MSBs of the seed affect */\n /* only MSBs of the array mt[]. */\n /* 2002/01/09 modified by Makoto Matsumoto */\n //c//mt[mti] &= 0xffffffff;\n mt[mti] = unsigned32(mt[mti] & 0xffffffff);\n /* for >32 bit machines */\n }\n }\n\n /* initialize by an array with array-length */\n /* init_key is the array for initializing keys */\n /* key_length is its length */\n /* slight change for C++, 2004/2/26 */\n //c//void init_by_array(unsigned long init_key[], int key_length)\n function init_by_array(init_key, key_length) {\n //c//int i, j, k;\n var i, j, k\n init_genrand(19650218)\n i = 1\n j = 0\n k = (N > key_length ? N : key_length)\n for(; k; k--){\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1664525))\n //c// + init_key[j] + j; /* non linear */\n mt[i] = addition32(\n addition32(unsigned32(mt[i] ^\n multiplication32(unsigned32(mt[i - 1] ^ (mt[i - 1] >>> 30)),\n 1664525)),\n init_key[j]), j)\n mt[i] =\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n unsigned32(mt[i] & 0xffffffff)\n i++\n j++\n if(i >= N){mt[0] = mt[N - 1]; i = 1}\n if(j >= key_length){j = 0}\n }\n for(k = N - 1; k; k--){\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1566083941))\n //c//- i; /* non linear */\n mt[i] = subtraction32(\n unsigned32(\n (mt[i]) ^\n multiplication32(\n unsigned32(mt[i - 1] ^ (mt[i - 1] >>> 30)),\n 1566083941)),\n i\n )\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n mt[i] = unsigned32(mt[i] & 0xffffffff)\n i++\n if(i >= N){mt[0] = mt[N - 1]; i = 1}\n }\n mt[0] = 0x80000000; /* MSB is 1; assuring non-zero initial array */\n }\n\n /* generates a random number on [0,0xffffffff]-interval */\n //c//unsigned long genrand_int32(void)\n function genrand_int32() {\n //c//unsigned long y;\n //c//static unsigned long mag01[2]={0x0UL, MATRIX_A};\n var y;\n var mag01 = [0x0, MATRIX_A];\n /* mag01[x] = x * MATRIX_A for x=0,1 */\n\n if(mti >= N){ /* generate N words at one time */\n //c//int kk;\n var kk\n\n if(mti == N + 1){ /* if init_genrand() has not been called, */\n init_genrand(Date.now()) /* a default initial seed is used */\n }\n\n for(kk = 0; kk < N - M; kk++){\n //c//y = (mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK);\n //c//mt[kk] = mt[kk+M] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK) | (mt[kk + 1]&LOWER_MASK))\n mt[kk] = unsigned32(mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1])\n }\n for(;kk < N - 1; kk++){\n //c//y = (mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK);\n //c//mt[kk] = mt[kk+(M-N)] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK) | (mt[kk + 1]&LOWER_MASK))\n mt[kk] = unsigned32(mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1])\n }\n //c//y = (mt[N-1]&UPPER_MASK)|(mt[0]&LOWER_MASK);\n //c//mt[N-1] = mt[M-1] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK))\n mt[N - 1] = unsigned32(mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1])\n mti = 0\n }\n\n y = mt[mti++]\n\n /* Tempering */\n //c//y ^= (y >> 11);\n //c//y ^= (y << 7) & 0x9d2c5680;\n //c//y ^= (y << 15) & 0xefc60000;\n //c//y ^= (y >> 18);\n y = unsigned32(y ^ (y >>> 11))\n y = unsigned32(y ^ ((y << 7) & 0x9d2c5680))\n y = unsigned32(y ^ ((y << 15) & 0xefc60000))\n y = unsigned32(y ^ (y >>> 18))\n\n return y\n }\n\n /* generates a random number on [0,0x7fffffff]-interval */\n //c//long genrand_int31(void)\n function genrand_int31(){\n //c//return (genrand_int32()>>1);\n return (genrand_int32()>>>1)\n }\n\n /* generates a random number on [0,1]-real-interval */\n //c//double genrand_real1(void)\n function genrand_real1(){\n return genrand_int32()*(1.0/4294967295.0)\n /* divided by 2^32-1 */\n }\n\n /* generates a random number on [0,1)-real-interval */\n //c//double genrand_real2(void)\n function genrand_real2(){\n return genrand_int32() * (1.0 / 4294967296.0)\n /* divided by 2^32 */\n }\n\n /* generates a random number on (0,1)-real-interval */\n //c//double genrand_real3(void)\n function genrand_real3() {\n return ((genrand_int32()) + 0.5) * (1.0 / 4294967296.0)\n /* divided by 2^32 */\n }\n\n /* generates a random number on [0,1) with 53-bit resolution*/\n //c//double genrand_res53(void)\n function genrand_res53() {\n //c//unsigned long a=genrand_int32()>>5, b=genrand_int32()>>6;\n var a = genrand_int32() >>> 5,\n b = genrand_int32() >>> 6\n return (a * 67108864.0 + b) * (1.0 / 9007199254740992.0)\n }\n /* These real versions are due to Isaku Wada, 2002/01/09 added */\n\n var random = genrand_res53\n\n random.seed = function(seed){\n if(seed === undefined || $B.is_none(seed)){\n const entries = new Uint32Array(N)\n crypto.getRandomValues(entries)\n init_by_array(Array.from(entries), N)\n return\n }\n\n if(!$B.$isinstance(seed, _b_.int)){\n seed = _b_.hash(seed)\n }\n\n // Transform to long integer\n if(typeof seed == \"number\"){\n seed = BigInt(seed)\n }else if(seed.__class__ === $B.long_int){\n seed = seed.value\n }else{\n return random.seed(seed.$brython_value)\n }\n\n // Take abs(seed)\n seed = seed > 0 ? seed : -seed\n\n var keys = []\n var int32_1 = 2n ** 32n - 1n\n\n // decomposition in factors of 2 ** 32\n while(seed >= int32_1){\n var quot = seed / int32_1,\n rest = seed % int32_1\n // Rest is a JS number (< 2 ** 32)\n keys.push(Number(rest))\n // Quotient is either a JS number or a instance of long_int\n // but seed must be long_int\n seed = quot\n }\n keys.push(Number(seed))\n\n init_by_array(keys, keys.length)\n }\n\n random.seed(seed)\n\n random.int31 = genrand_int31\n random.int32 = genrand_int32\n random.real1 = genrand_real1\n random.real2 = genrand_real2\n random.real3 = genrand_real3\n random.res53 = genrand_res53\n\n // Added for compatibility with Python\n random.getstate = function(){\n return $B.fast_tuple(mt.concat([mti]))\n }\n\n random.setstate = function(state){\n mt = state.slice(0, state.length - 1)\n mti = state[state.length - 1]\n }\n\n return random\n\n}\n\nvar Random = $B.make_class(\"Random\",\n function(){\n return {\n __class__: Random,\n _random: RandomStream(Date.now())\n }\n }\n)\n\nRandom.getrandbits = function(){\n var $ = $B.args(\"getrandbits\", 2, {self: null, k:null}, [\"self\", \"k\"],\n arguments, {}, null, null),\n self = $.self,\n k = $B.$GetInt($.k)\n\n if(k < 0)\n throw _b_.ValueError.$factory('number of bits must be non-negative')\n\n if(k === 0)\n return 0\n\n const words = Math.floor((k - 1) / 32) + 1\n const wordarray = new ArrayBuffer(words * 4)\n const wordarray_view = new DataView(wordarray)\n\n /* Fill-out bits of long integer, by 32-bit words, from least significant\n to most significant. */\n for(i = 0; i < words; i++, k -= 32){\n r = self._random.int32()\n if (k < 32)\n r >>>= (32 - k) /* Drop least significant bits */\n wordarray_view.setUint32(i * 4, r, true)\n }\n\n return _b_.int.from_bytes(_b_.bytes.$factory(Array.from(new Uint8Array(wordarray))), \"little\")\n}\n\nRandom.getstate = function(){\n var $ = $B.args('getstate', 1, {self: null},\n [\"self\"], arguments, {}, null, null),\n self = $.self\n return self._random.getstate()\n}\n\nRandom.random = function(){\n var $ = $B.args('random', 1, {self: null}, [\"self\"],\n arguments, {}, null, null),\n self = $.self\n return $B.fast_float(self._random())\n}\n\nRandom.seed = function(){\n var $ = $B.args('seed', 2, {self: null, n: null}, ['self', 'n'],\n arguments, {}, null, null),\n self = $.self,\n n = $.n\n\n if (self._random === undefined)\n self._random = RandomStream(n)\n else\n self._random.seed(n)\n}\n\nRandom.setstate = function(){\n var $ = $B.args('setstate', 2, {self: null, state:null}, ['self', 'state'],\n arguments, {}, null, null),\n self = $.self,\n state = $.state\n return self._random.setstate(state)\n}\n\n$B.set_func_names(Random, \"_random\")\n\n$B.imported._random = { Random }\n\n})(__BRYTHON__)\n"], "_symtable": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nvar module = {\n CELL: 5,\n DEF_ANNOT: 256,\n DEF_BOUND: 134,\n DEF_FREE: 32,\n DEF_FREE_CLASS: 64,\n DEF_GLOBAL: 1,\n DEF_IMPORT: 128,\n DEF_LOCAL: 2,\n DEF_NONLOCAL: 8,\n DEF_PARAM: 4,\n FREE: 4,\n GLOBAL_EXPLICIT: 2,\n GLOBAL_IMPLICIT: 3,\n LOCAL: 1,\n SCOPE_MASK: 15,\n SCOPE_OFF: 11,\n TYPE_CLASS: 1,\n TYPE_FUNCTION: 0,\n TYPE_MODULE: 2,\n USE: 16,\n symtable: function(){\n var $ = $B.args('symtable', 3,\n {code: null, filename: null, compile_type: null},\n ['code', 'filename', 'compile_type'], arguments,\n {}, null, null)\n var ast = _b_.compile($.code, $.filename, $.compile_type,\n $B.PyCF_ONLY_AST)\n // ast is an instance of Python class\n // _Py_Symtable_Build in symtable.js uses the underlying JS object\n return $B._PySymtable_Build(ast.$js_ast, $.filename)\n }\n}\n\n$B.addToImported('_symtable', module)\n\n})(__BRYTHON__)"], "_sre": [".py", "\n''\n\n\n\n\n\n\n\nMAXREPEAT=2147483648\nMAXGROUPS=2147483647\n\nimport array\nimport operator,sys\nfrom sre_constants import ATCODES,OPCODES,CHCODES\nfrom sre_constants import SRE_INFO_PREFIX,SRE_INFO_LITERAL\nfrom sre_constants import SRE_FLAG_UNICODE,SRE_FLAG_LOCALE\n\n\nfrom _sre_utils import (unicode_iscased,ascii_iscased,unicode_tolower,\nascii_tolower)\n\nimport sys\n\n\n\nMAGIC=20171005\n\n\n\n\n\n\n\n\n\n\n\n\n\nCODESIZE=4\n\ncopyright=\"_sre.py 2.4c Copyright 2005 by Nik Haldimann\"\n\n\ndef getcodesize():\n return CODESIZE\n \ndef compile(pattern,flags,code,groups=0,groupindex={},indexgroup=[None ]):\n ''\n \n return SRE_Pattern(pattern,flags,code,groups,groupindex,indexgroup)\n \ndef getlower(char_ord,flags):\n if (char_ord <128)or (flags&SRE_FLAG_UNICODE)\\\n or (flags&SRE_FLAG_LOCALE and char_ord <256):\n \n return ord(chr(char_ord).lower())\n else :\n return char_ord\n \n \nclass SRE_Pattern:\n\n def __init__(self,pattern,flags,code,groups=0,groupindex={},indexgroup=[None ]):\n self.pattern=pattern\n self.flags=flags\n self.groups=groups\n self.groupindex=groupindex\n self._indexgroup=indexgroup\n self._code=code\n \n def match(self,string,pos=0,endpos=sys.maxsize):\n ''\n\n \n state=_State(string,pos,endpos,self.flags)\n if state.match(self._code):\n return SRE_Match(self,state)\n return None\n \n def fullmatch(self,string,pos=0,endpos=sys.maxsize):\n ''\n\n \n end=\"$\"if isinstance(string,str)else b\"$\"\n if not string.endswith(end):\n string +=end\n state=_State(string,pos,endpos,self.flags)\n if state.match(self._code):\n return SRE_Match(self,state)\n return None\n \n def search(self,string,pos=0,endpos=sys.maxsize):\n ''\n\n\n \n state=_State(string,pos,endpos,self.flags)\n if state.search(self._code):\n return SRE_Match(self,state)\n else :\n return None\n \n def findall(self,string,pos=0,endpos=sys.maxsize):\n ''\n matchlist=[]\n state=_State(string,pos,endpos,self.flags)\n while state.start <=state.end:\n state.reset()\n state.string_position=state.start\n if not state.search(self._code):\n break\n match=SRE_Match(self,state)\n if self.groups ==0 or self.groups ==1:\n item=match.group(self.groups)\n else :\n item=match.groups(\"\")\n matchlist.append(item)\n if state.string_position ==state.start:\n state.start +=1\n else :\n state.start=state.string_position\n return matchlist\n \n def _subx(self,template,string,count=0,subn=False ):\n filter=template\n if not callable(template)and \"\\\\\"in template:\n \n \n \n \n import re as sre\n filter=sre._subx(self,template)\n state=_State(string,0,sys.maxsize,self.flags)\n sublist=[]\n \n n=last_pos=0\n while not count or n 0):\n \n if callable(filter):\n sublist.append(filter(SRE_Match(self,state)))\n else :\n sublist.append(filter)\n last_pos=state.string_position\n n +=1\n if state.string_position ==state.start:\n state.start +=1\n else :\n state.start=state.string_position\n \n if last_pos =0 and group <=self.re.groups:\n return group\n else :\n if group in self.re.groupindex:\n return self.re.groupindex[group]\n raise IndexError(\"no such group\")\n \n def _get_slice(self,group,default):\n group_indices=self.regs[group]\n if group_indices[0]>=0:\n return self.string[group_indices[0]:group_indices[1]]\n else :\n return default\n \n def start(self,group=0):\n ''\n\n \n return self.regs[self._get_index(group)][0]\n \n def end(self,group=0):\n ''\n\n \n return self.regs[self._get_index(group)][1]\n \n def span(self,group=0):\n ''\n return self.start(group),self.end(group)\n \n def expand(self,template):\n ''\n \n import sre\n return sre._expand(self.re,self,template)\n \n def groups(self,default=None ):\n ''\n\n \n groups=[]\n for indices in self.regs[1:]:\n if indices[0]>=0:\n groups.append(self.string[indices[0]:indices[1]])\n else :\n groups.append(default)\n return tuple(groups)\n \n def groupdict(self,default=None ):\n ''\n\n \n groupdict={}\n for key,value in self.re.groupindex.items():\n groupdict[key]=self._get_slice(value,default)\n return groupdict\n \n def group(self,*args):\n ''\n \n if len(args)==0:\n args=(0,)\n grouplist=[]\n for group in args:\n grouplist.append(self._get_slice(self._get_index(group),None ))\n if len(grouplist)==1:\n return grouplist[0]\n else :\n return tuple(grouplist)\n \n def __copy__():\n raise TypeError(\"cannot copy this pattern object\")\n \n def __deepcopy__():\n raise TypeError(\"cannot copy this pattern object\")\n \n def __str__(self):\n start,end=self.start(0),self.end(0)\n return (f\"\")\n \nclass _State:\n\n def __init__(self,string,start,end,flags):\n if isinstance(string,bytearray):\n string=str(bytes(string),\"latin1\")\n if isinstance(string,bytes):\n string=str(string,\"latin1\")\n self.string=string\n if start <0:\n start=0\n if end >len(string):\n end=len(string)\n self.start=start\n self.string_position=self.start\n self.end=end\n self.pos=start\n self.flags=flags\n self.reset()\n \n def reset(self):\n self.marks=[]\n self.lastindex=-1\n self.marks_stack=[]\n self.context_stack=[]\n self.repeat=None\n \n def match(self,pattern_codes):\n \n \n \n \n \n \n \n \n dispatcher=_OpcodeDispatcher()\n self.context_stack.append(_MatchContext(self,pattern_codes))\n has_matched=None\n while len(self.context_stack)>0:\n context=self.context_stack[-1]\n has_matched=dispatcher.match(context)\n if has_matched is not None :\n self.context_stack.pop()\n return has_matched\n \n def search(self,pattern_codes):\n flags=0\n if OPCODES[pattern_codes[0]].name ==\"info\":\n \n \n if pattern_codes[2]&SRE_INFO_PREFIX and pattern_codes[5]>1:\n return self.fast_search(pattern_codes)\n flags=pattern_codes[2]\n pattern_codes=pattern_codes[pattern_codes[1]+1:]\n \n string_position=self.start\n if OPCODES[pattern_codes[0]].name ==\"literal\":\n \n \n character=pattern_codes[1]\n while True :\n while string_position =self.end:\n return False\n self.start=string_position\n string_position +=1\n self.string_position=string_position\n if flags&SRE_INFO_LITERAL:\n return True\n if self.match(pattern_codes[2:]):\n return True\n return False\n \n \n while string_position <=self.end:\n self.reset()\n self.start=self.string_position=string_position\n if self.match(pattern_codes):\n return True\n string_position +=1\n return False\n \n def fast_search(self,pattern_codes):\n ''\n \n \n \n flags=pattern_codes[2]\n prefix_len=pattern_codes[5]\n prefix_skip=pattern_codes[6]\n prefix=pattern_codes[7:7+prefix_len]\n overlap=pattern_codes[7+prefix_len -1:pattern_codes[1]+1]\n pattern_codes=pattern_codes[pattern_codes[1]+1:]\n i=0\n string_position=self.string_position\n while string_position =len(self.marks):\n self.marks.extend([None ]*(mark_nr -len(self.marks)+1))\n self.marks[mark_nr]=position\n \n def get_marks(self,group_index):\n marks_index=2 *group_index\n if len(self.marks)>marks_index+1:\n return self.marks[marks_index],self.marks[marks_index+1]\n else :\n return None ,None\n \n def marks_push(self):\n self.marks_stack.append((self.marks[:],self.lastindex))\n \n def marks_pop(self):\n self.marks,self.lastindex=self.marks_stack.pop()\n \n def marks_pop_keep(self):\n self.marks,self.lastindex=self.marks_stack[-1]\n \n def marks_pop_discard(self):\n self.marks_stack.pop()\n \n def lower(self,char_ord):\n return getlower(char_ord,self.flags)\n \n \nclass _MatchContext:\n\n def __init__(self,state,pattern_codes):\n self.state=state\n self.pattern_codes=pattern_codes\n self.string_position=state.string_position\n self.code_position=0\n self.has_matched=None\n \n def push_new_context(self,pattern_offset):\n ''\n\n \n child_context=_MatchContext(self.state,\n self.pattern_codes[self.code_position+pattern_offset:])\n \n \n \n \n self.state.context_stack.append(child_context)\n return child_context\n \n def peek_char(self,peek=0):\n return self.state.string[self.string_position+peek]\n \n def skip_char(self,skip_count):\n self.string_position +=skip_count\n \n def remaining_chars(self):\n return self.state.end -self.string_position\n \n def peek_code(self,peek=0):\n return self.pattern_codes[self.code_position+peek]\n \n def skip_code(self,skip_count):\n self.code_position +=skip_count\n \n def remaining_codes(self):\n return len(self.pattern_codes)-self.code_position\n \n def at_beginning(self):\n return self.string_position ==0\n \n def at_end(self):\n return self.string_position ==self.state.end\n \n def at_linebreak(self):\n return not self.at_end()and _is_linebreak(self.peek_char())\n \n def at_boundary(self,word_checker):\n if self.at_beginning()and self.at_end():\n return False\n that=not self.at_beginning()and word_checker(self.peek_char(-1))\n this=not self.at_end()and word_checker(self.peek_char())\n return this !=that\n \n \nclass _RepeatContext(_MatchContext):\n\n def __init__(self,context):\n _MatchContext.__init__(self,context.state,\n context.pattern_codes[context.code_position:])\n self.count=-1\n \n self.previous=context.state.repeat\n self.last_position=None\n \n \nclass _Dispatcher:\n\n DISPATCH_TABLE=None\n \n def dispatch(self,code,context):\n method=self.DISPATCH_TABLE.get(code,self.__class__.unknown)\n return method(self,context)\n \n def unknown(self,code,ctx):\n raise NotImplementedError()\n \n def build_dispatch_table(cls,items,method_prefix):\n if cls.DISPATCH_TABLE is not None :\n return\n table={}\n for item in items:\n key,value=item.name.lower(),int(item)\n if hasattr(cls,\"%s%s\"%(method_prefix,key)):\n table[value]=getattr(cls,\"%s%s\"%(method_prefix,key))\n cls.DISPATCH_TABLE=table\n \n build_dispatch_table=classmethod(build_dispatch_table)\n \n \nclass _OpcodeDispatcher(_Dispatcher):\n\n def __init__(self):\n self.executing_contexts={}\n self.at_dispatcher=_AtcodeDispatcher()\n self.ch_dispatcher=_ChcodeDispatcher()\n self.set_dispatcher=_CharsetDispatcher()\n \n def match(self,context):\n ''\n\n \n while context.remaining_codes()>0 and context.has_matched is None :\n opcode=context.peek_code()\n if not self.dispatch(opcode,context):\n return None\n if context.has_matched is None :\n context.has_matched=False\n return context.has_matched\n \n def dispatch(self,opcode,context):\n ''\n \n \n if id(context)in self.executing_contexts:\n generator=self.executing_contexts[id(context)]\n del self.executing_contexts[id(context)]\n has_finished=next(generator)\n else :\n method=self.DISPATCH_TABLE.get(opcode,_OpcodeDispatcher.unknown)\n has_finished=method(self,context)\n if hasattr(has_finished,\"__next__\"):\n generator=has_finished\n has_finished=next(generator)\n if not has_finished:\n self.executing_contexts[id(context)]=generator\n return has_finished\n \n def op_success(self,ctx):\n \n \n ctx.state.string_position=ctx.string_position\n ctx.has_matched=True\n return True\n \n def op_failure(self,ctx):\n \n \n ctx.has_matched=False\n return True\n \n def general_op_literal(self,ctx,compare,decorate=lambda x:x):\n if ctx.at_end()or not compare(decorate(ord(ctx.peek_char())),\n decorate(ctx.peek_code(1))):\n ctx.has_matched=False\n ctx.skip_code(2)\n ctx.skip_char(1)\n \n def op_literal(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.eq)\n return True\n \n def op_not_literal(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.ne)\n return True\n \n def op_literal_ignore(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.eq,ctx.state.lower)\n return True\n \n def op_literal_uni_ignore(self,ctx):\n self.general_op_literal(ctx,operator.eq,ctx.state.lower)\n return True\n \n def op_not_literal_ignore(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.ne,ctx.state.lower)\n return True\n \n def op_at(self,ctx):\n \n \n \n if not self.at_dispatcher.dispatch(ctx.peek_code(1),ctx):\n ctx.has_matched=False\n \n return True\n ctx.skip_code(2)\n return True\n \n def op_category(self,ctx):\n \n \n \n if ctx.at_end()or not self.ch_dispatcher.dispatch(ctx.peek_code(1),ctx):\n ctx.has_matched=False\n \n return True\n ctx.skip_code(2)\n ctx.skip_char(1)\n return True\n \n def op_any(self,ctx):\n \n \n \n if ctx.at_end()or ctx.at_linebreak():\n ctx.has_matched=False\n \n return True\n ctx.skip_code(1)\n ctx.skip_char(1)\n return True\n \n def op_any_all(self,ctx):\n \n \n \n if ctx.at_end():\n ctx.has_matched=False\n \n return True\n ctx.skip_code(1)\n ctx.skip_char(1)\n return True\n \n def general_op_in(self,ctx,decorate=lambda x:x):\n \n \n if ctx.at_end():\n ctx.has_matched=False\n \n return\n skip=ctx.peek_code(1)\n ctx.skip_code(2)\n \n \n if not self.check_charset(ctx,decorate(ord(ctx.peek_char()))):\n \n ctx.has_matched=False\n return\n ctx.skip_code(skip -1)\n ctx.skip_char(1)\n \n \n def op_in(self,ctx):\n \n \n \n self.general_op_in(ctx)\n return True\n \n def op_in_ignore(self,ctx):\n \n \n \n self.general_op_in(ctx,ctx.state.lower)\n return True\n \n def op_in_uni_ignore(self,ctx):\n self.general_op_in(ctx,ctx.state.lower)\n return True\n \n def op_jump(self,ctx):\n \n \n \n ctx.skip_code(ctx.peek_code(1)+1)\n return True\n \n \n \n op_info=op_jump\n \n def op_mark(self,ctx):\n \n \n \n ctx.state.set_mark(ctx.peek_code(1),ctx.string_position)\n ctx.skip_code(2)\n return True\n \n def op_branch(self,ctx):\n \n \n \n ctx.state.marks_push()\n ctx.skip_code(1)\n current_branch_length=ctx.peek_code(0)\n while current_branch_length:\n \n \n if not (OPCODES[ctx.peek_code(1)].name ==\"literal\"and\\\n (ctx.at_end()or ctx.peek_code(2)!=ord(ctx.peek_char()))):\n ctx.state.string_position=ctx.string_position\n child_context=ctx.push_new_context(1)\n \n yield False\n if child_context.has_matched:\n ctx.has_matched=True\n yield True\n ctx.state.marks_pop_keep()\n ctx.skip_code(current_branch_length)\n current_branch_length=ctx.peek_code(0)\n ctx.state.marks_pop_discard()\n ctx.has_matched=False\n \n yield True\n \n def op_repeat_one(self,ctx):\n \n \n \n \n mincount=ctx.peek_code(2)\n maxcount=ctx.peek_code(3)\n \n \n \n if ctx.remaining_chars()=mincount and\\\n (ctx.at_end()or ord(ctx.peek_char())!=char):\n ctx.skip_char(-1)\n count -=1\n if count =mincount:\n ctx.state.string_position=ctx.string_position\n child_context=ctx.push_new_context(ctx.peek_code(1)+1)\n yield False\n if child_context.has_matched:\n ctx.has_matched=True\n yield True\n ctx.skip_char(-1)\n count -=1\n ctx.state.marks_pop_keep()\n \n ctx.state.marks_pop_discard()\n ctx.has_matched=False\n \n yield True\n \n def op_min_repeat_one(self,ctx):\n \n \n mincount=ctx.peek_code(2)\n maxcount=ctx.peek_code(3)\n \n \n if ctx.remaining_chars()=maxcount and maxcount !=MAXREPEAT:\n ctx.has_matched=False\n \n yield True\n repeat.count=count\n child_context=repeat.push_new_context(4)\n yield False\n ctx.has_matched=child_context.has_matched\n if not ctx.has_matched:\n repeat.count=count -1\n ctx.state.string_position=ctx.string_position\n yield True\n \n def general_op_groupref(self,ctx,decorate=lambda x:x):\n group_start,group_end=ctx.state.get_marks(ctx.peek_code(1))\n if group_start is None or group_end is None or group_end =0:\n child_context=ctx.push_new_context(3)\n yield False\n if child_context.has_matched:\n ctx.has_matched=False\n yield True\n ctx.skip_code(ctx.peek_code(1)+1)\n yield True\n \n def unknown(self,ctx):\n \n raise RuntimeError(\"Internal re error. Unknown opcode: %s\"%ctx.peek_code())\n \n def check_charset(self,ctx,char):\n ''\n \n self.set_dispatcher.reset(char)\n save_position=ctx.code_position\n result=None\n while result is None :\n result=self.set_dispatcher.dispatch(ctx.peek_code(),ctx)\n ctx.code_position=save_position\n \n return result\n \n def count_repetitions(self,ctx,maxcount):\n ''\n\n \n count=0\n real_maxcount=ctx.state.end -ctx.string_position\n if maxcount >4)\\\n &(1 <<(char_code&15)):\n return self.ok\n ctx.skip_code(16)\n else :\n if char_code <256 and ctx.peek_code(char_code >>5)\\\n &(1 <<(char_code&31)):\n return self.ok\n ctx.skip_code(8)\n def set_range(self,ctx):\n \n if ctx.peek_code(1)<=self.char <=ctx.peek_code(2):\n return self.ok\n ctx.skip_code(3)\n def set_negate(self,ctx):\n self.ok=not self.ok\n ctx.skip_code(1)\n \n def set_bigcharset(self,ctx):\n \n char_code=self.char\n count=ctx.peek_code(1)\n ctx.skip_code(2)\n if char_code <65536:\n block_index=char_code >>8\n \n a=array.array(\"B\")\n a.fromstring(array.array(CODESIZE ==2 and \"H\"or \"I\",\n [ctx.peek_code(block_index //CODESIZE)]).tostring())\n block=a[block_index %CODESIZE]\n ctx.skip_code(256 //CODESIZE)\n block_value=ctx.peek_code(block *(32 //CODESIZE)\n +((char_code&255)>>(CODESIZE ==2 and 4 or 5)))\n if block_value&(1 <<(char_code&((8 *CODESIZE)-1))):\n return self.ok\n else :\n ctx.skip_code(256 //CODESIZE)\n ctx.skip_code(count *(32 //CODESIZE))\n \n def unknown(self,ctx):\n return False\n \n_CharsetDispatcher.build_dispatch_table(OPCODES,\"set_\")\n\n\nclass _AtcodeDispatcher(_Dispatcher):\n\n def at_beginning(self,ctx):\n return ctx.at_beginning()\n at_beginning_string=at_beginning\n def at_beginning_line(self,ctx):\n return ctx.at_beginning()or _is_linebreak(ctx.peek_char(-1))\n def at_end(self,ctx):\n return (ctx.remaining_chars()==1 and ctx.at_linebreak())or ctx.at_end()\n def at_end_line(self,ctx):\n return ctx.at_linebreak()or ctx.at_end()\n def at_end_string(self,ctx):\n return ctx.at_end()\n def at_boundary(self,ctx):\n return ctx.at_boundary(_is_word)\n def at_non_boundary(self,ctx):\n return not ctx.at_boundary(_is_word)\n def at_loc_boundary(self,ctx):\n return ctx.at_boundary(_is_loc_word)\n def at_loc_non_boundary(self,ctx):\n return not ctx.at_boundary(_is_loc_word)\n def at_uni_boundary(self,ctx):\n return ctx.at_boundary(_is_uni_word)\n def at_uni_non_boundary(self,ctx):\n return not ctx.at_boundary(_is_uni_word)\n def unknown(self,ctx):\n return False\n \n_AtcodeDispatcher.build_dispatch_table(ATCODES,\"\")\n\n\nclass _ChcodeDispatcher(_Dispatcher):\n\n def category_digit(self,ctx):\n return _is_digit(ctx.peek_char())\n def category_not_digit(self,ctx):\n return not _is_digit(ctx.peek_char())\n def category_space(self,ctx):\n return _is_space(ctx.peek_char())\n def category_not_space(self,ctx):\n return not _is_space(ctx.peek_char())\n def category_word(self,ctx):\n return _is_word(ctx.peek_char())\n def category_not_word(self,ctx):\n return not _is_word(ctx.peek_char())\n def category_linebreak(self,ctx):\n return _is_linebreak(ctx.peek_char())\n def category_not_linebreak(self,ctx):\n return not _is_linebreak(ctx.peek_char())\n def category_loc_word(self,ctx):\n return _is_loc_word(ctx.peek_char())\n def category_loc_not_word(self,ctx):\n return not _is_loc_word(ctx.peek_char())\n def category_uni_digit(self,ctx):\n return ctx.peek_char().isdigit()\n def category_uni_not_digit(self,ctx):\n return not ctx.peek_char().isdigit()\n def category_uni_space(self,ctx):\n return ctx.peek_char().isspace()\n def category_uni_not_space(self,ctx):\n return not ctx.peek_char().isspace()\n def category_uni_word(self,ctx):\n return _is_uni_word(ctx.peek_char())\n def category_uni_not_word(self,ctx):\n return not _is_uni_word(ctx.peek_char())\n def category_uni_linebreak(self,ctx):\n return ord(ctx.peek_char())in _uni_linebreaks\n def category_uni_not_linebreak(self,ctx):\n return ord(ctx.peek_char())not in _uni_linebreaks\n def unknown(self,ctx):\n return False\n \n_ChcodeDispatcher.build_dispatch_table(CHCODES,\"\")\n\n\n_ascii_char_info=[0,0,0,0,0,0,0,0,0,2,6,2,\n2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,\n0,0,0,0,0,0,0,0,0,0,0,0,0,25,25,25,25,25,25,25,25,\n25,25,0,0,0,0,0,0,0,24,24,24,24,24,24,24,24,24,24,\n24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,0,0,\n0,0,16,0,24,24,24,24,24,24,24,24,24,24,24,24,24,24,\n24,24,24,24,24,24,24,24,24,24,24,24,0,0,0,0,0]\n\ndef _is_digit(char):\n code=ord(char)\n return code <128 and _ascii_char_info[code]&1\n \ndef _is_space(char):\n code=ord(char)\n return code <128 and _ascii_char_info[code]&2\n \ndef _is_word(char):\n\n code=ord(char)\n return code <128 and _ascii_char_info[code]&16\n \ndef _is_loc_word(char):\n return (not (ord(char)&~255)and char.isalnum())or char =='_'\n \ndef _is_uni_word(char):\n\n\n return chr(ord(char)).isalnum()or char =='_'\n \ndef _is_linebreak(char):\n return char ==\"\\n\"\n \n \n_uni_linebreaks=[10,13,28,29,30,133,8232,8233]\n\ndef _log(message):\n if 0:\n print(message)\n", ["_sre_utils", "array", "operator", "re", "sre", "sre_constants", "sys"]], "encoding_cp932": [".js", "var _table = [0x00, 0x0000,0x01, 0x0001,0x02, 0x0002,0x03, 0x0003,0x04, 0x0004,0x05, 0x0005,0x06, 0x0006,0x07, 0x0007,0x08, 0x0008,0x09, 0x0009,0x0A, 0x000A,0x0B, 0x000B,0x0C, 0x000C,0x0D, 0x000D,0x0E, 0x000E,0x0F, 0x000F,0x10, 0x0010,0x11, 0x0011,0x12, 0x0012,0x13, 0x0013,0x14, 0x0014,0x15, 0x0015,0x16, 0x0016,0x17, 0x0017,0x18, 0x0018,0x19, 0x0019,0x1A, 0x001A,0x1B, 0x001B,0x1C, 0x001C,0x1D, 0x001D,0x1E, 0x001E,0x1F, 0x001F,0x20, 0x0020,0x21, 0x0021,0x22, 0x0022,0x23, 0x0023,0x24, 0x0024,0x25, 0x0025,0x26, 0x0026,0x27, 0x0027,0x28, 0x0028,0x29, 0x0029,0x2A, 0x002A,0x2B, 0x002B,0x2C, 0x002C,0x2D, 0x002D,0x2E, 0x002E,0x2F, 0x002F,0x30, 0x0030,0x31, 0x0031,0x32, 0x0032,0x33, 0x0033,0x34, 0x0034,0x35, 0x0035,0x36, 0x0036,0x37, 0x0037,0x38, 0x0038,0x39, 0x0039,0x3A, 0x003A,0x3B, 0x003B,0x3C, 0x003C,0x3D, 0x003D,0x3E, 0x003E,0x3F, 0x003F,0x40, 0x0040,0x41, 0x0041,0x42, 0x0042,0x43, 0x0043,0x44, 0x0044,0x45, 0x0045,0x46, 0x0046,0x47, 0x0047,0x48, 0x0048,0x49, 0x0049,0x4A, 0x004A,0x4B, 0x004B,0x4C, 0x004C,0x4D, 0x004D,0x4E, 0x004E,0x4F, 0x004F,0x50, 0x0050,0x51, 0x0051,0x52, 0x0052,0x53, 0x0053,0x54, 0x0054,0x55, 0x0055,0x56, 0x0056,0x57, 0x0057,0x58, 0x0058,0x59, 0x0059,0x5A, 0x005A,0x5B, 0x005B,0x5C, 0x005C,0x5D, 0x005D,0x5E, 0x005E,0x5F, 0x005F,0x60, 0x0060,0x61, 0x0061,0x62, 0x0062,0x63, 0x0063,0x64, 0x0064,0x65, 0x0065,0x66, 0x0066,0x67, 0x0067,0x68, 0x0068,0x69, 0x0069,0x6A, 0x006A,0x6B, 0x006B,0x6C, 0x006C,0x6D, 0x006D,0x6E, 0x006E,0x6F, 0x006F,0x70, 0x0070,0x71, 0x0071,0x72, 0x0072,0x73, 0x0073,0x74, 0x0074,0x75, 0x0075,0x76, 0x0076,0x77, 0x0077,0x78, 0x0078,0x79, 0x0079,0x7A, 0x007A,0x7B, 0x007B,0x7C, 0x007C,0x7D, 0x007D,0x7E, 0x007E,0x7F, 0x007F,0x80, -1,0x81, -1,0x82, -1,0x83, -1,0x84, -1,0x85, -1,0x86, -1,0x87, -1,0x88, -1,0x89, -1,0x8A, -1,0x8B, -1,0x8C, -1,0x8D, -1,0x8E, -1,0x8F, -1,0x90, -1,0x91, -1,0x92, -1,0x93, -1,0x94, -1,0x95, -1,0x96, -1,0x97, -1,0x98, -1,0x99, -1,0x9A, -1,0x9B, -1,0x9C, -1,0x9D, -1,0x9E, -1,0x9F, -1,0xA0, -1,0xA1, 0xFF61,0xA2, 0xFF62,0xA3, 0xFF63,0xA4, 0xFF64,0xA5, 0xFF65,0xA6, 0xFF66,0xA7, 0xFF67,0xA8, 0xFF68,0xA9, 0xFF69,0xAA, 0xFF6A,0xAB, 0xFF6B,0xAC, 0xFF6C,0xAD, 0xFF6D,0xAE, 0xFF6E,0xAF, 0xFF6F,0xB0, 0xFF70,0xB1, 0xFF71,0xB2, 0xFF72,0xB3, 0xFF73,0xB4, 0xFF74,0xB5, 0xFF75,0xB6, 0xFF76,0xB7, 0xFF77,0xB8, 0xFF78,0xB9, 0xFF79,0xBA, 0xFF7A,0xBB, 0xFF7B,0xBC, 0xFF7C,0xBD, 0xFF7D,0xBE, 0xFF7E,0xBF, 0xFF7F,0xC0, 0xFF80,0xC1, 0xFF81,0xC2, 0xFF82,0xC3, 0xFF83,0xC4, 0xFF84,0xC5, 0xFF85,0xC6, 0xFF86,0xC7, 0xFF87,0xC8, 0xFF88,0xC9, 0xFF89,0xCA, 0xFF8A,0xCB, 0xFF8B,0xCC, 0xFF8C,0xCD, 0xFF8D,0xCE, 0xFF8E,0xCF, 0xFF8F,0xD0, 0xFF90,0xD1, 0xFF91,0xD2, 0xFF92,0xD3, 0xFF93,0xD4, 0xFF94,0xD5, 0xFF95,0xD6, 0xFF96,0xD7, 0xFF97,0xD8, 0xFF98,0xD9, 0xFF99,0xDA, 0xFF9A,0xDB, 0xFF9B,0xDC, 0xFF9C,0xDD, 0xFF9D,0xDE, 0xFF9E,0xDF, 0xFF9F,0xE0, -1,0xE1, -1,0xE2, -1,0xE3, -1,0xE4, -1,0xE5, -1,0xE6, -1,0xE7, -1,0xE8, -1,0xE9, -1,0xEA, -1,0xEB, -1,0xEC, -1,0xED, -1,0xEE, -1,0xEF, -1,0xF0, -1,0xF1, -1,0xF2, -1,0xF3, -1,0xF4, -1,0xF5, -1,0xF6, -1,0xF7, -1,0xF8, -1,0xF9, -1,0xFA, -1,0xFB, -1,0xFC, -1,0xFD, -1,0xFE, -1,0xFF, -1,0x8140, 0x3000,0x8141, 0x3001,0x8142, 0x3002,0x8143, 0xFF0C,0x8144, 0xFF0E,0x8145, 0x30FB,0x8146, 0xFF1A,0x8147, 0xFF1B,0x8148, 0xFF1F,0x8149, 0xFF01,0x814A, 0x309B,0x814B, 0x309C,0x814C, 0x00B4,0x814D, 0xFF40,0x814E, 0x00A8,0x814F, 0xFF3E,0x8150, 0xFFE3,0x8151, 0xFF3F,0x8152, 0x30FD,0x8153, 0x30FE,0x8154, 0x309D,0x8155, 0x309E,0x8156, 0x3003,0x8157, 0x4EDD,0x8158, 0x3005,0x8159, 0x3006,0x815A, 0x3007,0x815B, 0x30FC,0x815C, 0x2015,0x815D, 0x2010,0x815E, 0xFF0F,0x815F, 0xFF3C,0x8160, 0xFF5E,0x8161, 0x2225,0x8162, 0xFF5C,0x8163, 0x2026,0x8164, 0x2025,0x8165, 0x2018,0x8166, 0x2019,0x8167, 0x201C,0x8168, 0x201D,0x8169, 0xFF08,0x816A, 0xFF09,0x816B, 0x3014,0x816C, 0x3015,0x816D, 0xFF3B,0x816E, 0xFF3D,0x816F, 0xFF5B,0x8170, 0xFF5D,0x8171, 0x3008,0x8172, 0x3009,0x8173, 0x300A,0x8174, 0x300B,0x8175, 0x300C,0x8176, 0x300D,0x8177, 0x300E,0x8178, 0x300F,0x8179, 0x3010,0x817A, 0x3011,0x817B, 0xFF0B,0x817C, 0xFF0D,0x817D, 0x00B1,0x817E, 0x00D7,0x8180, 0x00F7,0x8181, 0xFF1D,0x8182, 0x2260,0x8183, 0xFF1C,0x8184, 0xFF1E,0x8185, 0x2266,0x8186, 0x2267,0x8187, 0x221E,0x8188, 0x2234,0x8189, 0x2642,0x818A, 0x2640,0x818B, 0x00B0,0x818C, 0x2032,0x818D, 0x2033,0x818E, 0x2103,0x818F, 0xFFE5,0x8190, 0xFF04,0x8191, 0xFFE0,0x8192, 0xFFE1,0x8193, 0xFF05,0x8194, 0xFF03,0x8195, 0xFF06,0x8196, 0xFF0A,0x8197, 0xFF20,0x8198, 0x00A7,0x8199, 0x2606,0x819A, 0x2605,0x819B, 0x25CB,0x819C, 0x25CF,0x819D, 0x25CE,0x819E, 0x25C7,0x819F, 0x25C6,0x81A0, 0x25A1,0x81A1, 0x25A0,0x81A2, 0x25B3,0x81A3, 0x25B2,0x81A4, 0x25BD,0x81A5, 0x25BC,0x81A6, 0x203B,0x81A7, 0x3012,0x81A8, 0x2192,0x81A9, 0x2190,0x81AA, 0x2191,0x81AB, 0x2193,0x81AC, 0x3013,0x81B8, 0x2208,0x81B9, 0x220B,0x81BA, 0x2286,0x81BB, 0x2287,0x81BC, 0x2282,0x81BD, 0x2283,0x81BE, 0x222A,0x81BF, 0x2229,0x81C8, 0x2227,0x81C9, 0x2228,0x81CA, 0xFFE2,0x81CB, 0x21D2,0x81CC, 0x21D4,0x81CD, 0x2200,0x81CE, 0x2203,0x81DA, 0x2220,0x81DB, 0x22A5,0x81DC, 0x2312,0x81DD, 0x2202,0x81DE, 0x2207,0x81DF, 0x2261,0x81E0, 0x2252,0x81E1, 0x226A,0x81E2, 0x226B,0x81E3, 0x221A,0x81E4, 0x223D,0x81E5, 0x221D,0x81E6, 0x2235,0x81E7, 0x222B,0x81E8, 0x222C,0x81F0, 0x212B,0x81F1, 0x2030,0x81F2, 0x266F,0x81F3, 0x266D,0x81F4, 0x266A,0x81F5, 0x2020,0x81F6, 0x2021,0x81F7, 0x00B6,0x81FC, 0x25EF,0x824F, 0xFF10,0x8250, 0xFF11,0x8251, 0xFF12,0x8252, 0xFF13,0x8253, 0xFF14,0x8254, 0xFF15,0x8255, 0xFF16,0x8256, 0xFF17,0x8257, 0xFF18,0x8258, 0xFF19,0x8260, 0xFF21,0x8261, 0xFF22,0x8262, 0xFF23,0x8263, 0xFF24,0x8264, 0xFF25,0x8265, 0xFF26,0x8266, 0xFF27,0x8267, 0xFF28,0x8268, 0xFF29,0x8269, 0xFF2A,0x826A, 0xFF2B,0x826B, 0xFF2C,0x826C, 0xFF2D,0x826D, 0xFF2E,0x826E, 0xFF2F,0x826F, 0xFF30,0x8270, 0xFF31,0x8271, 0xFF32,0x8272, 0xFF33,0x8273, 0xFF34,0x8274, 0xFF35,0x8275, 0xFF36,0x8276, 0xFF37,0x8277, 0xFF38,0x8278, 0xFF39,0x8279, 0xFF3A,0x8281, 0xFF41,0x8282, 0xFF42,0x8283, 0xFF43,0x8284, 0xFF44,0x8285, 0xFF45,0x8286, 0xFF46,0x8287, 0xFF47,0x8288, 0xFF48,0x8289, 0xFF49,0x828A, 0xFF4A,0x828B, 0xFF4B,0x828C, 0xFF4C,0x828D, 0xFF4D,0x828E, 0xFF4E,0x828F, 0xFF4F,0x8290, 0xFF50,0x8291, 0xFF51,0x8292, 0xFF52,0x8293, 0xFF53,0x8294, 0xFF54,0x8295, 0xFF55,0x8296, 0xFF56,0x8297, 0xFF57,0x8298, 0xFF58,0x8299, 0xFF59,0x829A, 0xFF5A,0x829F, 0x3041,0x82A0, 0x3042,0x82A1, 0x3043,0x82A2, 0x3044,0x82A3, 0x3045,0x82A4, 0x3046,0x82A5, 0x3047,0x82A6, 0x3048,0x82A7, 0x3049,0x82A8, 0x304A,0x82A9, 0x304B,0x82AA, 0x304C,0x82AB, 0x304D,0x82AC, 0x304E,0x82AD, 0x304F,0x82AE, 0x3050,0x82AF, 0x3051,0x82B0, 0x3052,0x82B1, 0x3053,0x82B2, 0x3054,0x82B3, 0x3055,0x82B4, 0x3056,0x82B5, 0x3057,0x82B6, 0x3058,0x82B7, 0x3059,0x82B8, 0x305A,0x82B9, 0x305B,0x82BA, 0x305C,0x82BB, 0x305D,0x82BC, 0x305E,0x82BD, 0x305F,0x82BE, 0x3060,0x82BF, 0x3061,0x82C0, 0x3062,0x82C1, 0x3063,0x82C2, 0x3064,0x82C3, 0x3065,0x82C4, 0x3066,0x82C5, 0x3067,0x82C6, 0x3068,0x82C7, 0x3069,0x82C8, 0x306A,0x82C9, 0x306B,0x82CA, 0x306C,0x82CB, 0x306D,0x82CC, 0x306E,0x82CD, 0x306F,0x82CE, 0x3070,0x82CF, 0x3071,0x82D0, 0x3072,0x82D1, 0x3073,0x82D2, 0x3074,0x82D3, 0x3075,0x82D4, 0x3076,0x82D5, 0x3077,0x82D6, 0x3078,0x82D7, 0x3079,0x82D8, 0x307A,0x82D9, 0x307B,0x82DA, 0x307C,0x82DB, 0x307D,0x82DC, 0x307E,0x82DD, 0x307F,0x82DE, 0x3080,0x82DF, 0x3081,0x82E0, 0x3082,0x82E1, 0x3083,0x82E2, 0x3084,0x82E3, 0x3085,0x82E4, 0x3086,0x82E5, 0x3087,0x82E6, 0x3088,0x82E7, 0x3089,0x82E8, 0x308A,0x82E9, 0x308B,0x82EA, 0x308C,0x82EB, 0x308D,0x82EC, 0x308E,0x82ED, 0x308F,0x82EE, 0x3090,0x82EF, 0x3091,0x82F0, 0x3092,0x82F1, 0x3093,0x8340, 0x30A1,0x8341, 0x30A2,0x8342, 0x30A3,0x8343, 0x30A4,0x8344, 0x30A5,0x8345, 0x30A6,0x8346, 0x30A7,0x8347, 0x30A8,0x8348, 0x30A9,0x8349, 0x30AA,0x834A, 0x30AB,0x834B, 0x30AC,0x834C, 0x30AD,0x834D, 0x30AE,0x834E, 0x30AF,0x834F, 0x30B0,0x8350, 0x30B1,0x8351, 0x30B2,0x8352, 0x30B3,0x8353, 0x30B4,0x8354, 0x30B5,0x8355, 0x30B6,0x8356, 0x30B7,0x8357, 0x30B8,0x8358, 0x30B9,0x8359, 0x30BA,0x835A, 0x30BB,0x835B, 0x30BC,0x835C, 0x30BD,0x835D, 0x30BE,0x835E, 0x30BF,0x835F, 0x30C0,0x8360, 0x30C1,0x8361, 0x30C2,0x8362, 0x30C3,0x8363, 0x30C4,0x8364, 0x30C5,0x8365, 0x30C6,0x8366, 0x30C7,0x8367, 0x30C8,0x8368, 0x30C9,0x8369, 0x30CA,0x836A, 0x30CB,0x836B, 0x30CC,0x836C, 0x30CD,0x836D, 0x30CE,0x836E, 0x30CF,0x836F, 0x30D0,0x8370, 0x30D1,0x8371, 0x30D2,0x8372, 0x30D3,0x8373, 0x30D4,0x8374, 0x30D5,0x8375, 0x30D6,0x8376, 0x30D7,0x8377, 0x30D8,0x8378, 0x30D9,0x8379, 0x30DA,0x837A, 0x30DB,0x837B, 0x30DC,0x837C, 0x30DD,0x837D, 0x30DE,0x837E, 0x30DF,0x8380, 0x30E0,0x8381, 0x30E1,0x8382, 0x30E2,0x8383, 0x30E3,0x8384, 0x30E4,0x8385, 0x30E5,0x8386, 0x30E6,0x8387, 0x30E7,0x8388, 0x30E8,0x8389, 0x30E9,0x838A, 0x30EA,0x838B, 0x30EB,0x838C, 0x30EC,0x838D, 0x30ED,0x838E, 0x30EE,0x838F, 0x30EF,0x8390, 0x30F0,0x8391, 0x30F1,0x8392, 0x30F2,0x8393, 0x30F3,0x8394, 0x30F4,0x8395, 0x30F5,0x8396, 0x30F6,0x839F, 0x0391,0x83A0, 0x0392,0x83A1, 0x0393,0x83A2, 0x0394,0x83A3, 0x0395,0x83A4, 0x0396,0x83A5, 0x0397,0x83A6, 0x0398,0x83A7, 0x0399,0x83A8, 0x039A,0x83A9, 0x039B,0x83AA, 0x039C,0x83AB, 0x039D,0x83AC, 0x039E,0x83AD, 0x039F,0x83AE, 0x03A0,0x83AF, 0x03A1,0x83B0, 0x03A3,0x83B1, 0x03A4,0x83B2, 0x03A5,0x83B3, 0x03A6,0x83B4, 0x03A7,0x83B5, 0x03A8,0x83B6, 0x03A9,0x83BF, 0x03B1,0x83C0, 0x03B2,0x83C1, 0x03B3,0x83C2, 0x03B4,0x83C3, 0x03B5,0x83C4, 0x03B6,0x83C5, 0x03B7,0x83C6, 0x03B8,0x83C7, 0x03B9,0x83C8, 0x03BA,0x83C9, 0x03BB,0x83CA, 0x03BC,0x83CB, 0x03BD,0x83CC, 0x03BE,0x83CD, 0x03BF,0x83CE, 0x03C0,0x83CF, 0x03C1,0x83D0, 0x03C3,0x83D1, 0x03C4,0x83D2, 0x03C5,0x83D3, 0x03C6,0x83D4, 0x03C7,0x83D5, 0x03C8,0x83D6, 0x03C9,0x8440, 0x0410,0x8441, 0x0411,0x8442, 0x0412,0x8443, 0x0413,0x8444, 0x0414,0x8445, 0x0415,0x8446, 0x0401,0x8447, 0x0416,0x8448, 0x0417,0x8449, 0x0418,0x844A, 0x0419,0x844B, 0x041A,0x844C, 0x041B,0x844D, 0x041C,0x844E, 0x041D,0x844F, 0x041E,0x8450, 0x041F,0x8451, 0x0420,0x8452, 0x0421,0x8453, 0x0422,0x8454, 0x0423,0x8455, 0x0424,0x8456, 0x0425,0x8457, 0x0426,0x8458, 0x0427,0x8459, 0x0428,0x845A, 0x0429,0x845B, 0x042A,0x845C, 0x042B,0x845D, 0x042C,0x845E, 0x042D,0x845F, 0x042E,0x8460, 0x042F,0x8470, 0x0430,0x8471, 0x0431,0x8472, 0x0432,0x8473, 0x0433,0x8474, 0x0434,0x8475, 0x0435,0x8476, 0x0451,0x8477, 0x0436,0x8478, 0x0437,0x8479, 0x0438,0x847A, 0x0439,0x847B, 0x043A,0x847C, 0x043B,0x847D, 0x043C,0x847E, 0x043D,0x8480, 0x043E,0x8481, 0x043F,0x8482, 0x0440,0x8483, 0x0441,0x8484, 0x0442,0x8485, 0x0443,0x8486, 0x0444,0x8487, 0x0445,0x8488, 0x0446,0x8489, 0x0447,0x848A, 0x0448,0x848B, 0x0449,0x848C, 0x044A,0x848D, 0x044B,0x848E, 0x044C,0x848F, 0x044D,0x8490, 0x044E,0x8491, 0x044F,0x849F, 0x2500,0x84A0, 0x2502,0x84A1, 0x250C,0x84A2, 0x2510,0x84A3, 0x2518,0x84A4, 0x2514,0x84A5, 0x251C,0x84A6, 0x252C,0x84A7, 0x2524,0x84A8, 0x2534,0x84A9, 0x253C,0x84AA, 0x2501,0x84AB, 0x2503,0x84AC, 0x250F,0x84AD, 0x2513,0x84AE, 0x251B,0x84AF, 0x2517,0x84B0, 0x2523,0x84B1, 0x2533,0x84B2, 0x252B,0x84B3, 0x253B,0x84B4, 0x254B,0x84B5, 0x2520,0x84B6, 0x252F,0x84B7, 0x2528,0x84B8, 0x2537,0x84B9, 0x253F,0x84BA, 0x251D,0x84BB, 0x2530,0x84BC, 0x2525,0x84BD, 0x2538,0x84BE, 0x2542,0x8740, 0x2460,0x8741, 0x2461,0x8742, 0x2462,0x8743, 0x2463,0x8744, 0x2464,0x8745, 0x2465,0x8746, 0x2466,0x8747, 0x2467,0x8748, 0x2468,0x8749, 0x2469,0x874A, 0x246A,0x874B, 0x246B,0x874C, 0x246C,0x874D, 0x246D,0x874E, 0x246E,0x874F, 0x246F,0x8750, 0x2470,0x8751, 0x2471,0x8752, 0x2472,0x8753, 0x2473,0x8754, 0x2160,0x8755, 0x2161,0x8756, 0x2162,0x8757, 0x2163,0x8758, 0x2164,0x8759, 0x2165,0x875A, 0x2166,0x875B, 0x2167,0x875C, 0x2168,0x875D, 0x2169,0x875F, 0x3349,0x8760, 0x3314,0x8761, 0x3322,0x8762, 0x334D,0x8763, 0x3318,0x8764, 0x3327,0x8765, 0x3303,0x8766, 0x3336,0x8767, 0x3351,0x8768, 0x3357,0x8769, 0x330D,0x876A, 0x3326,0x876B, 0x3323,0x876C, 0x332B,0x876D, 0x334A,0x876E, 0x333B,0x876F, 0x339C,0x8770, 0x339D,0x8771, 0x339E,0x8772, 0x338E,0x8773, 0x338F,0x8774, 0x33C4,0x8775, 0x33A1,0x877E, 0x337B,0x8780, 0x301D,0x8781, 0x301F,0x8782, 0x2116,0x8783, 0x33CD,0x8784, 0x2121,0x8785, 0x32A4,0x8786, 0x32A5,0x8787, 0x32A6,0x8788, 0x32A7,0x8789, 0x32A8,0x878A, 0x3231,0x878B, 0x3232,0x878C, 0x3239,0x878D, 0x337E,0x878E, 0x337D,0x878F, 0x337C,0x8790, 0x2252,0x8791, 0x2261,0x8792, 0x222B,0x8793, 0x222E,0x8794, 0x2211,0x8795, 0x221A,0x8796, 0x22A5,0x8797, 0x2220,0x8798, 0x221F,0x8799, 0x22BF,0x879A, 0x2235,0x879B, 0x2229,0x879C, 0x222A,0x889F, 0x4E9C,0x88A0, 0x5516,0x88A1, 0x5A03,0x88A2, 0x963F,0x88A3, 0x54C0,0x88A4, 0x611B,0x88A5, 0x6328,0x88A6, 0x59F6,0x88A7, 0x9022,0x88A8, 0x8475,0x88A9, 0x831C,0x88AA, 0x7A50,0x88AB, 0x60AA,0x88AC, 0x63E1,0x88AD, 0x6E25,0x88AE, 0x65ED,0x88AF, 0x8466,0x88B0, 0x82A6,0x88B1, 0x9BF5,0x88B2, 0x6893,0x88B3, 0x5727,0x88B4, 0x65A1,0x88B5, 0x6271,0x88B6, 0x5B9B,0x88B7, 0x59D0,0x88B8, 0x867B,0x88B9, 0x98F4,0x88BA, 0x7D62,0x88BB, 0x7DBE,0x88BC, 0x9B8E,0x88BD, 0x6216,0x88BE, 0x7C9F,0x88BF, 0x88B7,0x88C0, 0x5B89,0x88C1, 0x5EB5,0x88C2, 0x6309,0x88C3, 0x6697,0x88C4, 0x6848,0x88C5, 0x95C7,0x88C6, 0x978D,0x88C7, 0x674F,0x88C8, 0x4EE5,0x88C9, 0x4F0A,0x88CA, 0x4F4D,0x88CB, 0x4F9D,0x88CC, 0x5049,0x88CD, 0x56F2,0x88CE, 0x5937,0x88CF, 0x59D4,0x88D0, 0x5A01,0x88D1, 0x5C09,0x88D2, 0x60DF,0x88D3, 0x610F,0x88D4, 0x6170,0x88D5, 0x6613,0x88D6, 0x6905,0x88D7, 0x70BA,0x88D8, 0x754F,0x88D9, 0x7570,0x88DA, 0x79FB,0x88DB, 0x7DAD,0x88DC, 0x7DEF,0x88DD, 0x80C3,0x88DE, 0x840E,0x88DF, 0x8863,0x88E0, 0x8B02,0x88E1, 0x9055,0x88E2, 0x907A,0x88E3, 0x533B,0x88E4, 0x4E95,0x88E5, 0x4EA5,0x88E6, 0x57DF,0x88E7, 0x80B2,0x88E8, 0x90C1,0x88E9, 0x78EF,0x88EA, 0x4E00,0x88EB, 0x58F1,0x88EC, 0x6EA2,0x88ED, 0x9038,0x88EE, 0x7A32,0x88EF, 0x8328,0x88F0, 0x828B,0x88F1, 0x9C2F,0x88F2, 0x5141,0x88F3, 0x5370,0x88F4, 0x54BD,0x88F5, 0x54E1,0x88F6, 0x56E0,0x88F7, 0x59FB,0x88F8, 0x5F15,0x88F9, 0x98F2,0x88FA, 0x6DEB,0x88FB, 0x80E4,0x88FC, 0x852D,0x8940, 0x9662,0x8941, 0x9670,0x8942, 0x96A0,0x8943, 0x97FB,0x8944, 0x540B,0x8945, 0x53F3,0x8946, 0x5B87,0x8947, 0x70CF,0x8948, 0x7FBD,0x8949, 0x8FC2,0x894A, 0x96E8,0x894B, 0x536F,0x894C, 0x9D5C,0x894D, 0x7ABA,0x894E, 0x4E11,0x894F, 0x7893,0x8950, 0x81FC,0x8951, 0x6E26,0x8952, 0x5618,0x8953, 0x5504,0x8954, 0x6B1D,0x8955, 0x851A,0x8956, 0x9C3B,0x8957, 0x59E5,0x8958, 0x53A9,0x8959, 0x6D66,0x895A, 0x74DC,0x895B, 0x958F,0x895C, 0x5642,0x895D, 0x4E91,0x895E, 0x904B,0x895F, 0x96F2,0x8960, 0x834F,0x8961, 0x990C,0x8962, 0x53E1,0x8963, 0x55B6,0x8964, 0x5B30,0x8965, 0x5F71,0x8966, 0x6620,0x8967, 0x66F3,0x8968, 0x6804,0x8969, 0x6C38,0x896A, 0x6CF3,0x896B, 0x6D29,0x896C, 0x745B,0x896D, 0x76C8,0x896E, 0x7A4E,0x896F, 0x9834,0x8970, 0x82F1,0x8971, 0x885B,0x8972, 0x8A60,0x8973, 0x92ED,0x8974, 0x6DB2,0x8975, 0x75AB,0x8976, 0x76CA,0x8977, 0x99C5,0x8978, 0x60A6,0x8979, 0x8B01,0x897A, 0x8D8A,0x897B, 0x95B2,0x897C, 0x698E,0x897D, 0x53AD,0x897E, 0x5186,0x8980, 0x5712,0x8981, 0x5830,0x8982, 0x5944,0x8983, 0x5BB4,0x8984, 0x5EF6,0x8985, 0x6028,0x8986, 0x63A9,0x8987, 0x63F4,0x8988, 0x6CBF,0x8989, 0x6F14,0x898A, 0x708E,0x898B, 0x7114,0x898C, 0x7159,0x898D, 0x71D5,0x898E, 0x733F,0x898F, 0x7E01,0x8990, 0x8276,0x8991, 0x82D1,0x8992, 0x8597,0x8993, 0x9060,0x8994, 0x925B,0x8995, 0x9D1B,0x8996, 0x5869,0x8997, 0x65BC,0x8998, 0x6C5A,0x8999, 0x7525,0x899A, 0x51F9,0x899B, 0x592E,0x899C, 0x5965,0x899D, 0x5F80,0x899E, 0x5FDC,0x899F, 0x62BC,0x89A0, 0x65FA,0x89A1, 0x6A2A,0x89A2, 0x6B27,0x89A3, 0x6BB4,0x89A4, 0x738B,0x89A5, 0x7FC1,0x89A6, 0x8956,0x89A7, 0x9D2C,0x89A8, 0x9D0E,0x89A9, 0x9EC4,0x89AA, 0x5CA1,0x89AB, 0x6C96,0x89AC, 0x837B,0x89AD, 0x5104,0x89AE, 0x5C4B,0x89AF, 0x61B6,0x89B0, 0x81C6,0x89B1, 0x6876,0x89B2, 0x7261,0x89B3, 0x4E59,0x89B4, 0x4FFA,0x89B5, 0x5378,0x89B6, 0x6069,0x89B7, 0x6E29,0x89B8, 0x7A4F,0x89B9, 0x97F3,0x89BA, 0x4E0B,0x89BB, 0x5316,0x89BC, 0x4EEE,0x89BD, 0x4F55,0x89BE, 0x4F3D,0x89BF, 0x4FA1,0x89C0, 0x4F73,0x89C1, 0x52A0,0x89C2, 0x53EF,0x89C3, 0x5609,0x89C4, 0x590F,0x89C5, 0x5AC1,0x89C6, 0x5BB6,0x89C7, 0x5BE1,0x89C8, 0x79D1,0x89C9, 0x6687,0x89CA, 0x679C,0x89CB, 0x67B6,0x89CC, 0x6B4C,0x89CD, 0x6CB3,0x89CE, 0x706B,0x89CF, 0x73C2,0x89D0, 0x798D,0x89D1, 0x79BE,0x89D2, 0x7A3C,0x89D3, 0x7B87,0x89D4, 0x82B1,0x89D5, 0x82DB,0x89D6, 0x8304,0x89D7, 0x8377,0x89D8, 0x83EF,0x89D9, 0x83D3,0x89DA, 0x8766,0x89DB, 0x8AB2,0x89DC, 0x5629,0x89DD, 0x8CA8,0x89DE, 0x8FE6,0x89DF, 0x904E,0x89E0, 0x971E,0x89E1, 0x868A,0x89E2, 0x4FC4,0x89E3, 0x5CE8,0x89E4, 0x6211,0x89E5, 0x7259,0x89E6, 0x753B,0x89E7, 0x81E5,0x89E8, 0x82BD,0x89E9, 0x86FE,0x89EA, 0x8CC0,0x89EB, 0x96C5,0x89EC, 0x9913,0x89ED, 0x99D5,0x89EE, 0x4ECB,0x89EF, 0x4F1A,0x89F0, 0x89E3,0x89F1, 0x56DE,0x89F2, 0x584A,0x89F3, 0x58CA,0x89F4, 0x5EFB,0x89F5, 0x5FEB,0x89F6, 0x602A,0x89F7, 0x6094,0x89F8, 0x6062,0x89F9, 0x61D0,0x89FA, 0x6212,0x89FB, 0x62D0,0x89FC, 0x6539,0x8A40, 0x9B41,0x8A41, 0x6666,0x8A42, 0x68B0,0x8A43, 0x6D77,0x8A44, 0x7070,0x8A45, 0x754C,0x8A46, 0x7686,0x8A47, 0x7D75,0x8A48, 0x82A5,0x8A49, 0x87F9,0x8A4A, 0x958B,0x8A4B, 0x968E,0x8A4C, 0x8C9D,0x8A4D, 0x51F1,0x8A4E, 0x52BE,0x8A4F, 0x5916,0x8A50, 0x54B3,0x8A51, 0x5BB3,0x8A52, 0x5D16,0x8A53, 0x6168,0x8A54, 0x6982,0x8A55, 0x6DAF,0x8A56, 0x788D,0x8A57, 0x84CB,0x8A58, 0x8857,0x8A59, 0x8A72,0x8A5A, 0x93A7,0x8A5B, 0x9AB8,0x8A5C, 0x6D6C,0x8A5D, 0x99A8,0x8A5E, 0x86D9,0x8A5F, 0x57A3,0x8A60, 0x67FF,0x8A61, 0x86CE,0x8A62, 0x920E,0x8A63, 0x5283,0x8A64, 0x5687,0x8A65, 0x5404,0x8A66, 0x5ED3,0x8A67, 0x62E1,0x8A68, 0x64B9,0x8A69, 0x683C,0x8A6A, 0x6838,0x8A6B, 0x6BBB,0x8A6C, 0x7372,0x8A6D, 0x78BA,0x8A6E, 0x7A6B,0x8A6F, 0x899A,0x8A70, 0x89D2,0x8A71, 0x8D6B,0x8A72, 0x8F03,0x8A73, 0x90ED,0x8A74, 0x95A3,0x8A75, 0x9694,0x8A76, 0x9769,0x8A77, 0x5B66,0x8A78, 0x5CB3,0x8A79, 0x697D,0x8A7A, 0x984D,0x8A7B, 0x984E,0x8A7C, 0x639B,0x8A7D, 0x7B20,0x8A7E, 0x6A2B,0x8A80, 0x6A7F,0x8A81, 0x68B6,0x8A82, 0x9C0D,0x8A83, 0x6F5F,0x8A84, 0x5272,0x8A85, 0x559D,0x8A86, 0x6070,0x8A87, 0x62EC,0x8A88, 0x6D3B,0x8A89, 0x6E07,0x8A8A, 0x6ED1,0x8A8B, 0x845B,0x8A8C, 0x8910,0x8A8D, 0x8F44,0x8A8E, 0x4E14,0x8A8F, 0x9C39,0x8A90, 0x53F6,0x8A91, 0x691B,0x8A92, 0x6A3A,0x8A93, 0x9784,0x8A94, 0x682A,0x8A95, 0x515C,0x8A96, 0x7AC3,0x8A97, 0x84B2,0x8A98, 0x91DC,0x8A99, 0x938C,0x8A9A, 0x565B,0x8A9B, 0x9D28,0x8A9C, 0x6822,0x8A9D, 0x8305,0x8A9E, 0x8431,0x8A9F, 0x7CA5,0x8AA0, 0x5208,0x8AA1, 0x82C5,0x8AA2, 0x74E6,0x8AA3, 0x4E7E,0x8AA4, 0x4F83,0x8AA5, 0x51A0,0x8AA6, 0x5BD2,0x8AA7, 0x520A,0x8AA8, 0x52D8,0x8AA9, 0x52E7,0x8AAA, 0x5DFB,0x8AAB, 0x559A,0x8AAC, 0x582A,0x8AAD, 0x59E6,0x8AAE, 0x5B8C,0x8AAF, 0x5B98,0x8AB0, 0x5BDB,0x8AB1, 0x5E72,0x8AB2, 0x5E79,0x8AB3, 0x60A3,0x8AB4, 0x611F,0x8AB5, 0x6163,0x8AB6, 0x61BE,0x8AB7, 0x63DB,0x8AB8, 0x6562,0x8AB9, 0x67D1,0x8ABA, 0x6853,0x8ABB, 0x68FA,0x8ABC, 0x6B3E,0x8ABD, 0x6B53,0x8ABE, 0x6C57,0x8ABF, 0x6F22,0x8AC0, 0x6F97,0x8AC1, 0x6F45,0x8AC2, 0x74B0,0x8AC3, 0x7518,0x8AC4, 0x76E3,0x8AC5, 0x770B,0x8AC6, 0x7AFF,0x8AC7, 0x7BA1,0x8AC8, 0x7C21,0x8AC9, 0x7DE9,0x8ACA, 0x7F36,0x8ACB, 0x7FF0,0x8ACC, 0x809D,0x8ACD, 0x8266,0x8ACE, 0x839E,0x8ACF, 0x89B3,0x8AD0, 0x8ACC,0x8AD1, 0x8CAB,0x8AD2, 0x9084,0x8AD3, 0x9451,0x8AD4, 0x9593,0x8AD5, 0x9591,0x8AD6, 0x95A2,0x8AD7, 0x9665,0x8AD8, 0x97D3,0x8AD9, 0x9928,0x8ADA, 0x8218,0x8ADB, 0x4E38,0x8ADC, 0x542B,0x8ADD, 0x5CB8,0x8ADE, 0x5DCC,0x8ADF, 0x73A9,0x8AE0, 0x764C,0x8AE1, 0x773C,0x8AE2, 0x5CA9,0x8AE3, 0x7FEB,0x8AE4, 0x8D0B,0x8AE5, 0x96C1,0x8AE6, 0x9811,0x8AE7, 0x9854,0x8AE8, 0x9858,0x8AE9, 0x4F01,0x8AEA, 0x4F0E,0x8AEB, 0x5371,0x8AEC, 0x559C,0x8AED, 0x5668,0x8AEE, 0x57FA,0x8AEF, 0x5947,0x8AF0, 0x5B09,0x8AF1, 0x5BC4,0x8AF2, 0x5C90,0x8AF3, 0x5E0C,0x8AF4, 0x5E7E,0x8AF5, 0x5FCC,0x8AF6, 0x63EE,0x8AF7, 0x673A,0x8AF8, 0x65D7,0x8AF9, 0x65E2,0x8AFA, 0x671F,0x8AFB, 0x68CB,0x8AFC, 0x68C4,0x8B40, 0x6A5F,0x8B41, 0x5E30,0x8B42, 0x6BC5,0x8B43, 0x6C17,0x8B44, 0x6C7D,0x8B45, 0x757F,0x8B46, 0x7948,0x8B47, 0x5B63,0x8B48, 0x7A00,0x8B49, 0x7D00,0x8B4A, 0x5FBD,0x8B4B, 0x898F,0x8B4C, 0x8A18,0x8B4D, 0x8CB4,0x8B4E, 0x8D77,0x8B4F, 0x8ECC,0x8B50, 0x8F1D,0x8B51, 0x98E2,0x8B52, 0x9A0E,0x8B53, 0x9B3C,0x8B54, 0x4E80,0x8B55, 0x507D,0x8B56, 0x5100,0x8B57, 0x5993,0x8B58, 0x5B9C,0x8B59, 0x622F,0x8B5A, 0x6280,0x8B5B, 0x64EC,0x8B5C, 0x6B3A,0x8B5D, 0x72A0,0x8B5E, 0x7591,0x8B5F, 0x7947,0x8B60, 0x7FA9,0x8B61, 0x87FB,0x8B62, 0x8ABC,0x8B63, 0x8B70,0x8B64, 0x63AC,0x8B65, 0x83CA,0x8B66, 0x97A0,0x8B67, 0x5409,0x8B68, 0x5403,0x8B69, 0x55AB,0x8B6A, 0x6854,0x8B6B, 0x6A58,0x8B6C, 0x8A70,0x8B6D, 0x7827,0x8B6E, 0x6775,0x8B6F, 0x9ECD,0x8B70, 0x5374,0x8B71, 0x5BA2,0x8B72, 0x811A,0x8B73, 0x8650,0x8B74, 0x9006,0x8B75, 0x4E18,0x8B76, 0x4E45,0x8B77, 0x4EC7,0x8B78, 0x4F11,0x8B79, 0x53CA,0x8B7A, 0x5438,0x8B7B, 0x5BAE,0x8B7C, 0x5F13,0x8B7D, 0x6025,0x8B7E, 0x6551,0x8B80, 0x673D,0x8B81, 0x6C42,0x8B82, 0x6C72,0x8B83, 0x6CE3,0x8B84, 0x7078,0x8B85, 0x7403,0x8B86, 0x7A76,0x8B87, 0x7AAE,0x8B88, 0x7B08,0x8B89, 0x7D1A,0x8B8A, 0x7CFE,0x8B8B, 0x7D66,0x8B8C, 0x65E7,0x8B8D, 0x725B,0x8B8E, 0x53BB,0x8B8F, 0x5C45,0x8B90, 0x5DE8,0x8B91, 0x62D2,0x8B92, 0x62E0,0x8B93, 0x6319,0x8B94, 0x6E20,0x8B95, 0x865A,0x8B96, 0x8A31,0x8B97, 0x8DDD,0x8B98, 0x92F8,0x8B99, 0x6F01,0x8B9A, 0x79A6,0x8B9B, 0x9B5A,0x8B9C, 0x4EA8,0x8B9D, 0x4EAB,0x8B9E, 0x4EAC,0x8B9F, 0x4F9B,0x8BA0, 0x4FA0,0x8BA1, 0x50D1,0x8BA2, 0x5147,0x8BA3, 0x7AF6,0x8BA4, 0x5171,0x8BA5, 0x51F6,0x8BA6, 0x5354,0x8BA7, 0x5321,0x8BA8, 0x537F,0x8BA9, 0x53EB,0x8BAA, 0x55AC,0x8BAB, 0x5883,0x8BAC, 0x5CE1,0x8BAD, 0x5F37,0x8BAE, 0x5F4A,0x8BAF, 0x602F,0x8BB0, 0x6050,0x8BB1, 0x606D,0x8BB2, 0x631F,0x8BB3, 0x6559,0x8BB4, 0x6A4B,0x8BB5, 0x6CC1,0x8BB6, 0x72C2,0x8BB7, 0x72ED,0x8BB8, 0x77EF,0x8BB9, 0x80F8,0x8BBA, 0x8105,0x8BBB, 0x8208,0x8BBC, 0x854E,0x8BBD, 0x90F7,0x8BBE, 0x93E1,0x8BBF, 0x97FF,0x8BC0, 0x9957,0x8BC1, 0x9A5A,0x8BC2, 0x4EF0,0x8BC3, 0x51DD,0x8BC4, 0x5C2D,0x8BC5, 0x6681,0x8BC6, 0x696D,0x8BC7, 0x5C40,0x8BC8, 0x66F2,0x8BC9, 0x6975,0x8BCA, 0x7389,0x8BCB, 0x6850,0x8BCC, 0x7C81,0x8BCD, 0x50C5,0x8BCE, 0x52E4,0x8BCF, 0x5747,0x8BD0, 0x5DFE,0x8BD1, 0x9326,0x8BD2, 0x65A4,0x8BD3, 0x6B23,0x8BD4, 0x6B3D,0x8BD5, 0x7434,0x8BD6, 0x7981,0x8BD7, 0x79BD,0x8BD8, 0x7B4B,0x8BD9, 0x7DCA,0x8BDA, 0x82B9,0x8BDB, 0x83CC,0x8BDC, 0x887F,0x8BDD, 0x895F,0x8BDE, 0x8B39,0x8BDF, 0x8FD1,0x8BE0, 0x91D1,0x8BE1, 0x541F,0x8BE2, 0x9280,0x8BE3, 0x4E5D,0x8BE4, 0x5036,0x8BE5, 0x53E5,0x8BE6, 0x533A,0x8BE7, 0x72D7,0x8BE8, 0x7396,0x8BE9, 0x77E9,0x8BEA, 0x82E6,0x8BEB, 0x8EAF,0x8BEC, 0x99C6,0x8BED, 0x99C8,0x8BEE, 0x99D2,0x8BEF, 0x5177,0x8BF0, 0x611A,0x8BF1, 0x865E,0x8BF2, 0x55B0,0x8BF3, 0x7A7A,0x8BF4, 0x5076,0x8BF5, 0x5BD3,0x8BF6, 0x9047,0x8BF7, 0x9685,0x8BF8, 0x4E32,0x8BF9, 0x6ADB,0x8BFA, 0x91E7,0x8BFB, 0x5C51,0x8BFC, 0x5C48,0x8C40, 0x6398,0x8C41, 0x7A9F,0x8C42, 0x6C93,0x8C43, 0x9774,0x8C44, 0x8F61,0x8C45, 0x7AAA,0x8C46, 0x718A,0x8C47, 0x9688,0x8C48, 0x7C82,0x8C49, 0x6817,0x8C4A, 0x7E70,0x8C4B, 0x6851,0x8C4C, 0x936C,0x8C4D, 0x52F2,0x8C4E, 0x541B,0x8C4F, 0x85AB,0x8C50, 0x8A13,0x8C51, 0x7FA4,0x8C52, 0x8ECD,0x8C53, 0x90E1,0x8C54, 0x5366,0x8C55, 0x8888,0x8C56, 0x7941,0x8C57, 0x4FC2,0x8C58, 0x50BE,0x8C59, 0x5211,0x8C5A, 0x5144,0x8C5B, 0x5553,0x8C5C, 0x572D,0x8C5D, 0x73EA,0x8C5E, 0x578B,0x8C5F, 0x5951,0x8C60, 0x5F62,0x8C61, 0x5F84,0x8C62, 0x6075,0x8C63, 0x6176,0x8C64, 0x6167,0x8C65, 0x61A9,0x8C66, 0x63B2,0x8C67, 0x643A,0x8C68, 0x656C,0x8C69, 0x666F,0x8C6A, 0x6842,0x8C6B, 0x6E13,0x8C6C, 0x7566,0x8C6D, 0x7A3D,0x8C6E, 0x7CFB,0x8C6F, 0x7D4C,0x8C70, 0x7D99,0x8C71, 0x7E4B,0x8C72, 0x7F6B,0x8C73, 0x830E,0x8C74, 0x834A,0x8C75, 0x86CD,0x8C76, 0x8A08,0x8C77, 0x8A63,0x8C78, 0x8B66,0x8C79, 0x8EFD,0x8C7A, 0x981A,0x8C7B, 0x9D8F,0x8C7C, 0x82B8,0x8C7D, 0x8FCE,0x8C7E, 0x9BE8,0x8C80, 0x5287,0x8C81, 0x621F,0x8C82, 0x6483,0x8C83, 0x6FC0,0x8C84, 0x9699,0x8C85, 0x6841,0x8C86, 0x5091,0x8C87, 0x6B20,0x8C88, 0x6C7A,0x8C89, 0x6F54,0x8C8A, 0x7A74,0x8C8B, 0x7D50,0x8C8C, 0x8840,0x8C8D, 0x8A23,0x8C8E, 0x6708,0x8C8F, 0x4EF6,0x8C90, 0x5039,0x8C91, 0x5026,0x8C92, 0x5065,0x8C93, 0x517C,0x8C94, 0x5238,0x8C95, 0x5263,0x8C96, 0x55A7,0x8C97, 0x570F,0x8C98, 0x5805,0x8C99, 0x5ACC,0x8C9A, 0x5EFA,0x8C9B, 0x61B2,0x8C9C, 0x61F8,0x8C9D, 0x62F3,0x8C9E, 0x6372,0x8C9F, 0x691C,0x8CA0, 0x6A29,0x8CA1, 0x727D,0x8CA2, 0x72AC,0x8CA3, 0x732E,0x8CA4, 0x7814,0x8CA5, 0x786F,0x8CA6, 0x7D79,0x8CA7, 0x770C,0x8CA8, 0x80A9,0x8CA9, 0x898B,0x8CAA, 0x8B19,0x8CAB, 0x8CE2,0x8CAC, 0x8ED2,0x8CAD, 0x9063,0x8CAE, 0x9375,0x8CAF, 0x967A,0x8CB0, 0x9855,0x8CB1, 0x9A13,0x8CB2, 0x9E78,0x8CB3, 0x5143,0x8CB4, 0x539F,0x8CB5, 0x53B3,0x8CB6, 0x5E7B,0x8CB7, 0x5F26,0x8CB8, 0x6E1B,0x8CB9, 0x6E90,0x8CBA, 0x7384,0x8CBB, 0x73FE,0x8CBC, 0x7D43,0x8CBD, 0x8237,0x8CBE, 0x8A00,0x8CBF, 0x8AFA,0x8CC0, 0x9650,0x8CC1, 0x4E4E,0x8CC2, 0x500B,0x8CC3, 0x53E4,0x8CC4, 0x547C,0x8CC5, 0x56FA,0x8CC6, 0x59D1,0x8CC7, 0x5B64,0x8CC8, 0x5DF1,0x8CC9, 0x5EAB,0x8CCA, 0x5F27,0x8CCB, 0x6238,0x8CCC, 0x6545,0x8CCD, 0x67AF,0x8CCE, 0x6E56,0x8CCF, 0x72D0,0x8CD0, 0x7CCA,0x8CD1, 0x88B4,0x8CD2, 0x80A1,0x8CD3, 0x80E1,0x8CD4, 0x83F0,0x8CD5, 0x864E,0x8CD6, 0x8A87,0x8CD7, 0x8DE8,0x8CD8, 0x9237,0x8CD9, 0x96C7,0x8CDA, 0x9867,0x8CDB, 0x9F13,0x8CDC, 0x4E94,0x8CDD, 0x4E92,0x8CDE, 0x4F0D,0x8CDF, 0x5348,0x8CE0, 0x5449,0x8CE1, 0x543E,0x8CE2, 0x5A2F,0x8CE3, 0x5F8C,0x8CE4, 0x5FA1,0x8CE5, 0x609F,0x8CE6, 0x68A7,0x8CE7, 0x6A8E,0x8CE8, 0x745A,0x8CE9, 0x7881,0x8CEA, 0x8A9E,0x8CEB, 0x8AA4,0x8CEC, 0x8B77,0x8CED, 0x9190,0x8CEE, 0x4E5E,0x8CEF, 0x9BC9,0x8CF0, 0x4EA4,0x8CF1, 0x4F7C,0x8CF2, 0x4FAF,0x8CF3, 0x5019,0x8CF4, 0x5016,0x8CF5, 0x5149,0x8CF6, 0x516C,0x8CF7, 0x529F,0x8CF8, 0x52B9,0x8CF9, 0x52FE,0x8CFA, 0x539A,0x8CFB, 0x53E3,0x8CFC, 0x5411,0x8D40, 0x540E,0x8D41, 0x5589,0x8D42, 0x5751,0x8D43, 0x57A2,0x8D44, 0x597D,0x8D45, 0x5B54,0x8D46, 0x5B5D,0x8D47, 0x5B8F,0x8D48, 0x5DE5,0x8D49, 0x5DE7,0x8D4A, 0x5DF7,0x8D4B, 0x5E78,0x8D4C, 0x5E83,0x8D4D, 0x5E9A,0x8D4E, 0x5EB7,0x8D4F, 0x5F18,0x8D50, 0x6052,0x8D51, 0x614C,0x8D52, 0x6297,0x8D53, 0x62D8,0x8D54, 0x63A7,0x8D55, 0x653B,0x8D56, 0x6602,0x8D57, 0x6643,0x8D58, 0x66F4,0x8D59, 0x676D,0x8D5A, 0x6821,0x8D5B, 0x6897,0x8D5C, 0x69CB,0x8D5D, 0x6C5F,0x8D5E, 0x6D2A,0x8D5F, 0x6D69,0x8D60, 0x6E2F,0x8D61, 0x6E9D,0x8D62, 0x7532,0x8D63, 0x7687,0x8D64, 0x786C,0x8D65, 0x7A3F,0x8D66, 0x7CE0,0x8D67, 0x7D05,0x8D68, 0x7D18,0x8D69, 0x7D5E,0x8D6A, 0x7DB1,0x8D6B, 0x8015,0x8D6C, 0x8003,0x8D6D, 0x80AF,0x8D6E, 0x80B1,0x8D6F, 0x8154,0x8D70, 0x818F,0x8D71, 0x822A,0x8D72, 0x8352,0x8D73, 0x884C,0x8D74, 0x8861,0x8D75, 0x8B1B,0x8D76, 0x8CA2,0x8D77, 0x8CFC,0x8D78, 0x90CA,0x8D79, 0x9175,0x8D7A, 0x9271,0x8D7B, 0x783F,0x8D7C, 0x92FC,0x8D7D, 0x95A4,0x8D7E, 0x964D,0x8D80, 0x9805,0x8D81, 0x9999,0x8D82, 0x9AD8,0x8D83, 0x9D3B,0x8D84, 0x525B,0x8D85, 0x52AB,0x8D86, 0x53F7,0x8D87, 0x5408,0x8D88, 0x58D5,0x8D89, 0x62F7,0x8D8A, 0x6FE0,0x8D8B, 0x8C6A,0x8D8C, 0x8F5F,0x8D8D, 0x9EB9,0x8D8E, 0x514B,0x8D8F, 0x523B,0x8D90, 0x544A,0x8D91, 0x56FD,0x8D92, 0x7A40,0x8D93, 0x9177,0x8D94, 0x9D60,0x8D95, 0x9ED2,0x8D96, 0x7344,0x8D97, 0x6F09,0x8D98, 0x8170,0x8D99, 0x7511,0x8D9A, 0x5FFD,0x8D9B, 0x60DA,0x8D9C, 0x9AA8,0x8D9D, 0x72DB,0x8D9E, 0x8FBC,0x8D9F, 0x6B64,0x8DA0, 0x9803,0x8DA1, 0x4ECA,0x8DA2, 0x56F0,0x8DA3, 0x5764,0x8DA4, 0x58BE,0x8DA5, 0x5A5A,0x8DA6, 0x6068,0x8DA7, 0x61C7,0x8DA8, 0x660F,0x8DA9, 0x6606,0x8DAA, 0x6839,0x8DAB, 0x68B1,0x8DAC, 0x6DF7,0x8DAD, 0x75D5,0x8DAE, 0x7D3A,0x8DAF, 0x826E,0x8DB0, 0x9B42,0x8DB1, 0x4E9B,0x8DB2, 0x4F50,0x8DB3, 0x53C9,0x8DB4, 0x5506,0x8DB5, 0x5D6F,0x8DB6, 0x5DE6,0x8DB7, 0x5DEE,0x8DB8, 0x67FB,0x8DB9, 0x6C99,0x8DBA, 0x7473,0x8DBB, 0x7802,0x8DBC, 0x8A50,0x8DBD, 0x9396,0x8DBE, 0x88DF,0x8DBF, 0x5750,0x8DC0, 0x5EA7,0x8DC1, 0x632B,0x8DC2, 0x50B5,0x8DC3, 0x50AC,0x8DC4, 0x518D,0x8DC5, 0x6700,0x8DC6, 0x54C9,0x8DC7, 0x585E,0x8DC8, 0x59BB,0x8DC9, 0x5BB0,0x8DCA, 0x5F69,0x8DCB, 0x624D,0x8DCC, 0x63A1,0x8DCD, 0x683D,0x8DCE, 0x6B73,0x8DCF, 0x6E08,0x8DD0, 0x707D,0x8DD1, 0x91C7,0x8DD2, 0x7280,0x8DD3, 0x7815,0x8DD4, 0x7826,0x8DD5, 0x796D,0x8DD6, 0x658E,0x8DD7, 0x7D30,0x8DD8, 0x83DC,0x8DD9, 0x88C1,0x8DDA, 0x8F09,0x8DDB, 0x969B,0x8DDC, 0x5264,0x8DDD, 0x5728,0x8DDE, 0x6750,0x8DDF, 0x7F6A,0x8DE0, 0x8CA1,0x8DE1, 0x51B4,0x8DE2, 0x5742,0x8DE3, 0x962A,0x8DE4, 0x583A,0x8DE5, 0x698A,0x8DE6, 0x80B4,0x8DE7, 0x54B2,0x8DE8, 0x5D0E,0x8DE9, 0x57FC,0x8DEA, 0x7895,0x8DEB, 0x9DFA,0x8DEC, 0x4F5C,0x8DED, 0x524A,0x8DEE, 0x548B,0x8DEF, 0x643E,0x8DF0, 0x6628,0x8DF1, 0x6714,0x8DF2, 0x67F5,0x8DF3, 0x7A84,0x8DF4, 0x7B56,0x8DF5, 0x7D22,0x8DF6, 0x932F,0x8DF7, 0x685C,0x8DF8, 0x9BAD,0x8DF9, 0x7B39,0x8DFA, 0x5319,0x8DFB, 0x518A,0x8DFC, 0x5237,0x8E40, 0x5BDF,0x8E41, 0x62F6,0x8E42, 0x64AE,0x8E43, 0x64E6,0x8E44, 0x672D,0x8E45, 0x6BBA,0x8E46, 0x85A9,0x8E47, 0x96D1,0x8E48, 0x7690,0x8E49, 0x9BD6,0x8E4A, 0x634C,0x8E4B, 0x9306,0x8E4C, 0x9BAB,0x8E4D, 0x76BF,0x8E4E, 0x6652,0x8E4F, 0x4E09,0x8E50, 0x5098,0x8E51, 0x53C2,0x8E52, 0x5C71,0x8E53, 0x60E8,0x8E54, 0x6492,0x8E55, 0x6563,0x8E56, 0x685F,0x8E57, 0x71E6,0x8E58, 0x73CA,0x8E59, 0x7523,0x8E5A, 0x7B97,0x8E5B, 0x7E82,0x8E5C, 0x8695,0x8E5D, 0x8B83,0x8E5E, 0x8CDB,0x8E5F, 0x9178,0x8E60, 0x9910,0x8E61, 0x65AC,0x8E62, 0x66AB,0x8E63, 0x6B8B,0x8E64, 0x4ED5,0x8E65, 0x4ED4,0x8E66, 0x4F3A,0x8E67, 0x4F7F,0x8E68, 0x523A,0x8E69, 0x53F8,0x8E6A, 0x53F2,0x8E6B, 0x55E3,0x8E6C, 0x56DB,0x8E6D, 0x58EB,0x8E6E, 0x59CB,0x8E6F, 0x59C9,0x8E70, 0x59FF,0x8E71, 0x5B50,0x8E72, 0x5C4D,0x8E73, 0x5E02,0x8E74, 0x5E2B,0x8E75, 0x5FD7,0x8E76, 0x601D,0x8E77, 0x6307,0x8E78, 0x652F,0x8E79, 0x5B5C,0x8E7A, 0x65AF,0x8E7B, 0x65BD,0x8E7C, 0x65E8,0x8E7D, 0x679D,0x8E7E, 0x6B62,0x8E80, 0x6B7B,0x8E81, 0x6C0F,0x8E82, 0x7345,0x8E83, 0x7949,0x8E84, 0x79C1,0x8E85, 0x7CF8,0x8E86, 0x7D19,0x8E87, 0x7D2B,0x8E88, 0x80A2,0x8E89, 0x8102,0x8E8A, 0x81F3,0x8E8B, 0x8996,0x8E8C, 0x8A5E,0x8E8D, 0x8A69,0x8E8E, 0x8A66,0x8E8F, 0x8A8C,0x8E90, 0x8AEE,0x8E91, 0x8CC7,0x8E92, 0x8CDC,0x8E93, 0x96CC,0x8E94, 0x98FC,0x8E95, 0x6B6F,0x8E96, 0x4E8B,0x8E97, 0x4F3C,0x8E98, 0x4F8D,0x8E99, 0x5150,0x8E9A, 0x5B57,0x8E9B, 0x5BFA,0x8E9C, 0x6148,0x8E9D, 0x6301,0x8E9E, 0x6642,0x8E9F, 0x6B21,0x8EA0, 0x6ECB,0x8EA1, 0x6CBB,0x8EA2, 0x723E,0x8EA3, 0x74BD,0x8EA4, 0x75D4,0x8EA5, 0x78C1,0x8EA6, 0x793A,0x8EA7, 0x800C,0x8EA8, 0x8033,0x8EA9, 0x81EA,0x8EAA, 0x8494,0x8EAB, 0x8F9E,0x8EAC, 0x6C50,0x8EAD, 0x9E7F,0x8EAE, 0x5F0F,0x8EAF, 0x8B58,0x8EB0, 0x9D2B,0x8EB1, 0x7AFA,0x8EB2, 0x8EF8,0x8EB3, 0x5B8D,0x8EB4, 0x96EB,0x8EB5, 0x4E03,0x8EB6, 0x53F1,0x8EB7, 0x57F7,0x8EB8, 0x5931,0x8EB9, 0x5AC9,0x8EBA, 0x5BA4,0x8EBB, 0x6089,0x8EBC, 0x6E7F,0x8EBD, 0x6F06,0x8EBE, 0x75BE,0x8EBF, 0x8CEA,0x8EC0, 0x5B9F,0x8EC1, 0x8500,0x8EC2, 0x7BE0,0x8EC3, 0x5072,0x8EC4, 0x67F4,0x8EC5, 0x829D,0x8EC6, 0x5C61,0x8EC7, 0x854A,0x8EC8, 0x7E1E,0x8EC9, 0x820E,0x8ECA, 0x5199,0x8ECB, 0x5C04,0x8ECC, 0x6368,0x8ECD, 0x8D66,0x8ECE, 0x659C,0x8ECF, 0x716E,0x8ED0, 0x793E,0x8ED1, 0x7D17,0x8ED2, 0x8005,0x8ED3, 0x8B1D,0x8ED4, 0x8ECA,0x8ED5, 0x906E,0x8ED6, 0x86C7,0x8ED7, 0x90AA,0x8ED8, 0x501F,0x8ED9, 0x52FA,0x8EDA, 0x5C3A,0x8EDB, 0x6753,0x8EDC, 0x707C,0x8EDD, 0x7235,0x8EDE, 0x914C,0x8EDF, 0x91C8,0x8EE0, 0x932B,0x8EE1, 0x82E5,0x8EE2, 0x5BC2,0x8EE3, 0x5F31,0x8EE4, 0x60F9,0x8EE5, 0x4E3B,0x8EE6, 0x53D6,0x8EE7, 0x5B88,0x8EE8, 0x624B,0x8EE9, 0x6731,0x8EEA, 0x6B8A,0x8EEB, 0x72E9,0x8EEC, 0x73E0,0x8EED, 0x7A2E,0x8EEE, 0x816B,0x8EEF, 0x8DA3,0x8EF0, 0x9152,0x8EF1, 0x9996,0x8EF2, 0x5112,0x8EF3, 0x53D7,0x8EF4, 0x546A,0x8EF5, 0x5BFF,0x8EF6, 0x6388,0x8EF7, 0x6A39,0x8EF8, 0x7DAC,0x8EF9, 0x9700,0x8EFA, 0x56DA,0x8EFB, 0x53CE,0x8EFC, 0x5468,0x8F40, 0x5B97,0x8F41, 0x5C31,0x8F42, 0x5DDE,0x8F43, 0x4FEE,0x8F44, 0x6101,0x8F45, 0x62FE,0x8F46, 0x6D32,0x8F47, 0x79C0,0x8F48, 0x79CB,0x8F49, 0x7D42,0x8F4A, 0x7E4D,0x8F4B, 0x7FD2,0x8F4C, 0x81ED,0x8F4D, 0x821F,0x8F4E, 0x8490,0x8F4F, 0x8846,0x8F50, 0x8972,0x8F51, 0x8B90,0x8F52, 0x8E74,0x8F53, 0x8F2F,0x8F54, 0x9031,0x8F55, 0x914B,0x8F56, 0x916C,0x8F57, 0x96C6,0x8F58, 0x919C,0x8F59, 0x4EC0,0x8F5A, 0x4F4F,0x8F5B, 0x5145,0x8F5C, 0x5341,0x8F5D, 0x5F93,0x8F5E, 0x620E,0x8F5F, 0x67D4,0x8F60, 0x6C41,0x8F61, 0x6E0B,0x8F62, 0x7363,0x8F63, 0x7E26,0x8F64, 0x91CD,0x8F65, 0x9283,0x8F66, 0x53D4,0x8F67, 0x5919,0x8F68, 0x5BBF,0x8F69, 0x6DD1,0x8F6A, 0x795D,0x8F6B, 0x7E2E,0x8F6C, 0x7C9B,0x8F6D, 0x587E,0x8F6E, 0x719F,0x8F6F, 0x51FA,0x8F70, 0x8853,0x8F71, 0x8FF0,0x8F72, 0x4FCA,0x8F73, 0x5CFB,0x8F74, 0x6625,0x8F75, 0x77AC,0x8F76, 0x7AE3,0x8F77, 0x821C,0x8F78, 0x99FF,0x8F79, 0x51C6,0x8F7A, 0x5FAA,0x8F7B, 0x65EC,0x8F7C, 0x696F,0x8F7D, 0x6B89,0x8F7E, 0x6DF3,0x8F80, 0x6E96,0x8F81, 0x6F64,0x8F82, 0x76FE,0x8F83, 0x7D14,0x8F84, 0x5DE1,0x8F85, 0x9075,0x8F86, 0x9187,0x8F87, 0x9806,0x8F88, 0x51E6,0x8F89, 0x521D,0x8F8A, 0x6240,0x8F8B, 0x6691,0x8F8C, 0x66D9,0x8F8D, 0x6E1A,0x8F8E, 0x5EB6,0x8F8F, 0x7DD2,0x8F90, 0x7F72,0x8F91, 0x66F8,0x8F92, 0x85AF,0x8F93, 0x85F7,0x8F94, 0x8AF8,0x8F95, 0x52A9,0x8F96, 0x53D9,0x8F97, 0x5973,0x8F98, 0x5E8F,0x8F99, 0x5F90,0x8F9A, 0x6055,0x8F9B, 0x92E4,0x8F9C, 0x9664,0x8F9D, 0x50B7,0x8F9E, 0x511F,0x8F9F, 0x52DD,0x8FA0, 0x5320,0x8FA1, 0x5347,0x8FA2, 0x53EC,0x8FA3, 0x54E8,0x8FA4, 0x5546,0x8FA5, 0x5531,0x8FA6, 0x5617,0x8FA7, 0x5968,0x8FA8, 0x59BE,0x8FA9, 0x5A3C,0x8FAA, 0x5BB5,0x8FAB, 0x5C06,0x8FAC, 0x5C0F,0x8FAD, 0x5C11,0x8FAE, 0x5C1A,0x8FAF, 0x5E84,0x8FB0, 0x5E8A,0x8FB1, 0x5EE0,0x8FB2, 0x5F70,0x8FB3, 0x627F,0x8FB4, 0x6284,0x8FB5, 0x62DB,0x8FB6, 0x638C,0x8FB7, 0x6377,0x8FB8, 0x6607,0x8FB9, 0x660C,0x8FBA, 0x662D,0x8FBB, 0x6676,0x8FBC, 0x677E,0x8FBD, 0x68A2,0x8FBE, 0x6A1F,0x8FBF, 0x6A35,0x8FC0, 0x6CBC,0x8FC1, 0x6D88,0x8FC2, 0x6E09,0x8FC3, 0x6E58,0x8FC4, 0x713C,0x8FC5, 0x7126,0x8FC6, 0x7167,0x8FC7, 0x75C7,0x8FC8, 0x7701,0x8FC9, 0x785D,0x8FCA, 0x7901,0x8FCB, 0x7965,0x8FCC, 0x79F0,0x8FCD, 0x7AE0,0x8FCE, 0x7B11,0x8FCF, 0x7CA7,0x8FD0, 0x7D39,0x8FD1, 0x8096,0x8FD2, 0x83D6,0x8FD3, 0x848B,0x8FD4, 0x8549,0x8FD5, 0x885D,0x8FD6, 0x88F3,0x8FD7, 0x8A1F,0x8FD8, 0x8A3C,0x8FD9, 0x8A54,0x8FDA, 0x8A73,0x8FDB, 0x8C61,0x8FDC, 0x8CDE,0x8FDD, 0x91A4,0x8FDE, 0x9266,0x8FDF, 0x937E,0x8FE0, 0x9418,0x8FE1, 0x969C,0x8FE2, 0x9798,0x8FE3, 0x4E0A,0x8FE4, 0x4E08,0x8FE5, 0x4E1E,0x8FE6, 0x4E57,0x8FE7, 0x5197,0x8FE8, 0x5270,0x8FE9, 0x57CE,0x8FEA, 0x5834,0x8FEB, 0x58CC,0x8FEC, 0x5B22,0x8FED, 0x5E38,0x8FEE, 0x60C5,0x8FEF, 0x64FE,0x8FF0, 0x6761,0x8FF1, 0x6756,0x8FF2, 0x6D44,0x8FF3, 0x72B6,0x8FF4, 0x7573,0x8FF5, 0x7A63,0x8FF6, 0x84B8,0x8FF7, 0x8B72,0x8FF8, 0x91B8,0x8FF9, 0x9320,0x8FFA, 0x5631,0x8FFB, 0x57F4,0x8FFC, 0x98FE,0x9040, 0x62ED,0x9041, 0x690D,0x9042, 0x6B96,0x9043, 0x71ED,0x9044, 0x7E54,0x9045, 0x8077,0x9046, 0x8272,0x9047, 0x89E6,0x9048, 0x98DF,0x9049, 0x8755,0x904A, 0x8FB1,0x904B, 0x5C3B,0x904C, 0x4F38,0x904D, 0x4FE1,0x904E, 0x4FB5,0x904F, 0x5507,0x9050, 0x5A20,0x9051, 0x5BDD,0x9052, 0x5BE9,0x9053, 0x5FC3,0x9054, 0x614E,0x9055, 0x632F,0x9056, 0x65B0,0x9057, 0x664B,0x9058, 0x68EE,0x9059, 0x699B,0x905A, 0x6D78,0x905B, 0x6DF1,0x905C, 0x7533,0x905D, 0x75B9,0x905E, 0x771F,0x905F, 0x795E,0x9060, 0x79E6,0x9061, 0x7D33,0x9062, 0x81E3,0x9063, 0x82AF,0x9064, 0x85AA,0x9065, 0x89AA,0x9066, 0x8A3A,0x9067, 0x8EAB,0x9068, 0x8F9B,0x9069, 0x9032,0x906A, 0x91DD,0x906B, 0x9707,0x906C, 0x4EBA,0x906D, 0x4EC1,0x906E, 0x5203,0x906F, 0x5875,0x9070, 0x58EC,0x9071, 0x5C0B,0x9072, 0x751A,0x9073, 0x5C3D,0x9074, 0x814E,0x9075, 0x8A0A,0x9076, 0x8FC5,0x9077, 0x9663,0x9078, 0x976D,0x9079, 0x7B25,0x907A, 0x8ACF,0x907B, 0x9808,0x907C, 0x9162,0x907D, 0x56F3,0x907E, 0x53A8,0x9080, 0x9017,0x9081, 0x5439,0x9082, 0x5782,0x9083, 0x5E25,0x9084, 0x63A8,0x9085, 0x6C34,0x9086, 0x708A,0x9087, 0x7761,0x9088, 0x7C8B,0x9089, 0x7FE0,0x908A, 0x8870,0x908B, 0x9042,0x908C, 0x9154,0x908D, 0x9310,0x908E, 0x9318,0x908F, 0x968F,0x9090, 0x745E,0x9091, 0x9AC4,0x9092, 0x5D07,0x9093, 0x5D69,0x9094, 0x6570,0x9095, 0x67A2,0x9096, 0x8DA8,0x9097, 0x96DB,0x9098, 0x636E,0x9099, 0x6749,0x909A, 0x6919,0x909B, 0x83C5,0x909C, 0x9817,0x909D, 0x96C0,0x909E, 0x88FE,0x909F, 0x6F84,0x90A0, 0x647A,0x90A1, 0x5BF8,0x90A2, 0x4E16,0x90A3, 0x702C,0x90A4, 0x755D,0x90A5, 0x662F,0x90A6, 0x51C4,0x90A7, 0x5236,0x90A8, 0x52E2,0x90A9, 0x59D3,0x90AA, 0x5F81,0x90AB, 0x6027,0x90AC, 0x6210,0x90AD, 0x653F,0x90AE, 0x6574,0x90AF, 0x661F,0x90B0, 0x6674,0x90B1, 0x68F2,0x90B2, 0x6816,0x90B3, 0x6B63,0x90B4, 0x6E05,0x90B5, 0x7272,0x90B6, 0x751F,0x90B7, 0x76DB,0x90B8, 0x7CBE,0x90B9, 0x8056,0x90BA, 0x58F0,0x90BB, 0x88FD,0x90BC, 0x897F,0x90BD, 0x8AA0,0x90BE, 0x8A93,0x90BF, 0x8ACB,0x90C0, 0x901D,0x90C1, 0x9192,0x90C2, 0x9752,0x90C3, 0x9759,0x90C4, 0x6589,0x90C5, 0x7A0E,0x90C6, 0x8106,0x90C7, 0x96BB,0x90C8, 0x5E2D,0x90C9, 0x60DC,0x90CA, 0x621A,0x90CB, 0x65A5,0x90CC, 0x6614,0x90CD, 0x6790,0x90CE, 0x77F3,0x90CF, 0x7A4D,0x90D0, 0x7C4D,0x90D1, 0x7E3E,0x90D2, 0x810A,0x90D3, 0x8CAC,0x90D4, 0x8D64,0x90D5, 0x8DE1,0x90D6, 0x8E5F,0x90D7, 0x78A9,0x90D8, 0x5207,0x90D9, 0x62D9,0x90DA, 0x63A5,0x90DB, 0x6442,0x90DC, 0x6298,0x90DD, 0x8A2D,0x90DE, 0x7A83,0x90DF, 0x7BC0,0x90E0, 0x8AAC,0x90E1, 0x96EA,0x90E2, 0x7D76,0x90E3, 0x820C,0x90E4, 0x8749,0x90E5, 0x4ED9,0x90E6, 0x5148,0x90E7, 0x5343,0x90E8, 0x5360,0x90E9, 0x5BA3,0x90EA, 0x5C02,0x90EB, 0x5C16,0x90EC, 0x5DDD,0x90ED, 0x6226,0x90EE, 0x6247,0x90EF, 0x64B0,0x90F0, 0x6813,0x90F1, 0x6834,0x90F2, 0x6CC9,0x90F3, 0x6D45,0x90F4, 0x6D17,0x90F5, 0x67D3,0x90F6, 0x6F5C,0x90F7, 0x714E,0x90F8, 0x717D,0x90F9, 0x65CB,0x90FA, 0x7A7F,0x90FB, 0x7BAD,0x90FC, 0x7DDA,0x9140, 0x7E4A,0x9141, 0x7FA8,0x9142, 0x817A,0x9143, 0x821B,0x9144, 0x8239,0x9145, 0x85A6,0x9146, 0x8A6E,0x9147, 0x8CCE,0x9148, 0x8DF5,0x9149, 0x9078,0x914A, 0x9077,0x914B, 0x92AD,0x914C, 0x9291,0x914D, 0x9583,0x914E, 0x9BAE,0x914F, 0x524D,0x9150, 0x5584,0x9151, 0x6F38,0x9152, 0x7136,0x9153, 0x5168,0x9154, 0x7985,0x9155, 0x7E55,0x9156, 0x81B3,0x9157, 0x7CCE,0x9158, 0x564C,0x9159, 0x5851,0x915A, 0x5CA8,0x915B, 0x63AA,0x915C, 0x66FE,0x915D, 0x66FD,0x915E, 0x695A,0x915F, 0x72D9,0x9160, 0x758F,0x9161, 0x758E,0x9162, 0x790E,0x9163, 0x7956,0x9164, 0x79DF,0x9165, 0x7C97,0x9166, 0x7D20,0x9167, 0x7D44,0x9168, 0x8607,0x9169, 0x8A34,0x916A, 0x963B,0x916B, 0x9061,0x916C, 0x9F20,0x916D, 0x50E7,0x916E, 0x5275,0x916F, 0x53CC,0x9170, 0x53E2,0x9171, 0x5009,0x9172, 0x55AA,0x9173, 0x58EE,0x9174, 0x594F,0x9175, 0x723D,0x9176, 0x5B8B,0x9177, 0x5C64,0x9178, 0x531D,0x9179, 0x60E3,0x917A, 0x60F3,0x917B, 0x635C,0x917C, 0x6383,0x917D, 0x633F,0x917E, 0x63BB,0x9180, 0x64CD,0x9181, 0x65E9,0x9182, 0x66F9,0x9183, 0x5DE3,0x9184, 0x69CD,0x9185, 0x69FD,0x9186, 0x6F15,0x9187, 0x71E5,0x9188, 0x4E89,0x9189, 0x75E9,0x918A, 0x76F8,0x918B, 0x7A93,0x918C, 0x7CDF,0x918D, 0x7DCF,0x918E, 0x7D9C,0x918F, 0x8061,0x9190, 0x8349,0x9191, 0x8358,0x9192, 0x846C,0x9193, 0x84BC,0x9194, 0x85FB,0x9195, 0x88C5,0x9196, 0x8D70,0x9197, 0x9001,0x9198, 0x906D,0x9199, 0x9397,0x919A, 0x971C,0x919B, 0x9A12,0x919C, 0x50CF,0x919D, 0x5897,0x919E, 0x618E,0x919F, 0x81D3,0x91A0, 0x8535,0x91A1, 0x8D08,0x91A2, 0x9020,0x91A3, 0x4FC3,0x91A4, 0x5074,0x91A5, 0x5247,0x91A6, 0x5373,0x91A7, 0x606F,0x91A8, 0x6349,0x91A9, 0x675F,0x91AA, 0x6E2C,0x91AB, 0x8DB3,0x91AC, 0x901F,0x91AD, 0x4FD7,0x91AE, 0x5C5E,0x91AF, 0x8CCA,0x91B0, 0x65CF,0x91B1, 0x7D9A,0x91B2, 0x5352,0x91B3, 0x8896,0x91B4, 0x5176,0x91B5, 0x63C3,0x91B6, 0x5B58,0x91B7, 0x5B6B,0x91B8, 0x5C0A,0x91B9, 0x640D,0x91BA, 0x6751,0x91BB, 0x905C,0x91BC, 0x4ED6,0x91BD, 0x591A,0x91BE, 0x592A,0x91BF, 0x6C70,0x91C0, 0x8A51,0x91C1, 0x553E,0x91C2, 0x5815,0x91C3, 0x59A5,0x91C4, 0x60F0,0x91C5, 0x6253,0x91C6, 0x67C1,0x91C7, 0x8235,0x91C8, 0x6955,0x91C9, 0x9640,0x91CA, 0x99C4,0x91CB, 0x9A28,0x91CC, 0x4F53,0x91CD, 0x5806,0x91CE, 0x5BFE,0x91CF, 0x8010,0x91D0, 0x5CB1,0x91D1, 0x5E2F,0x91D2, 0x5F85,0x91D3, 0x6020,0x91D4, 0x614B,0x91D5, 0x6234,0x91D6, 0x66FF,0x91D7, 0x6CF0,0x91D8, 0x6EDE,0x91D9, 0x80CE,0x91DA, 0x817F,0x91DB, 0x82D4,0x91DC, 0x888B,0x91DD, 0x8CB8,0x91DE, 0x9000,0x91DF, 0x902E,0x91E0, 0x968A,0x91E1, 0x9EDB,0x91E2, 0x9BDB,0x91E3, 0x4EE3,0x91E4, 0x53F0,0x91E5, 0x5927,0x91E6, 0x7B2C,0x91E7, 0x918D,0x91E8, 0x984C,0x91E9, 0x9DF9,0x91EA, 0x6EDD,0x91EB, 0x7027,0x91EC, 0x5353,0x91ED, 0x5544,0x91EE, 0x5B85,0x91EF, 0x6258,0x91F0, 0x629E,0x91F1, 0x62D3,0x91F2, 0x6CA2,0x91F3, 0x6FEF,0x91F4, 0x7422,0x91F5, 0x8A17,0x91F6, 0x9438,0x91F7, 0x6FC1,0x91F8, 0x8AFE,0x91F9, 0x8338,0x91FA, 0x51E7,0x91FB, 0x86F8,0x91FC, 0x53EA,0x9240, 0x53E9,0x9241, 0x4F46,0x9242, 0x9054,0x9243, 0x8FB0,0x9244, 0x596A,0x9245, 0x8131,0x9246, 0x5DFD,0x9247, 0x7AEA,0x9248, 0x8FBF,0x9249, 0x68DA,0x924A, 0x8C37,0x924B, 0x72F8,0x924C, 0x9C48,0x924D, 0x6A3D,0x924E, 0x8AB0,0x924F, 0x4E39,0x9250, 0x5358,0x9251, 0x5606,0x9252, 0x5766,0x9253, 0x62C5,0x9254, 0x63A2,0x9255, 0x65E6,0x9256, 0x6B4E,0x9257, 0x6DE1,0x9258, 0x6E5B,0x9259, 0x70AD,0x925A, 0x77ED,0x925B, 0x7AEF,0x925C, 0x7BAA,0x925D, 0x7DBB,0x925E, 0x803D,0x925F, 0x80C6,0x9260, 0x86CB,0x9261, 0x8A95,0x9262, 0x935B,0x9263, 0x56E3,0x9264, 0x58C7,0x9265, 0x5F3E,0x9266, 0x65AD,0x9267, 0x6696,0x9268, 0x6A80,0x9269, 0x6BB5,0x926A, 0x7537,0x926B, 0x8AC7,0x926C, 0x5024,0x926D, 0x77E5,0x926E, 0x5730,0x926F, 0x5F1B,0x9270, 0x6065,0x9271, 0x667A,0x9272, 0x6C60,0x9273, 0x75F4,0x9274, 0x7A1A,0x9275, 0x7F6E,0x9276, 0x81F4,0x9277, 0x8718,0x9278, 0x9045,0x9279, 0x99B3,0x927A, 0x7BC9,0x927B, 0x755C,0x927C, 0x7AF9,0x927D, 0x7B51,0x927E, 0x84C4,0x9280, 0x9010,0x9281, 0x79E9,0x9282, 0x7A92,0x9283, 0x8336,0x9284, 0x5AE1,0x9285, 0x7740,0x9286, 0x4E2D,0x9287, 0x4EF2,0x9288, 0x5B99,0x9289, 0x5FE0,0x928A, 0x62BD,0x928B, 0x663C,0x928C, 0x67F1,0x928D, 0x6CE8,0x928E, 0x866B,0x928F, 0x8877,0x9290, 0x8A3B,0x9291, 0x914E,0x9292, 0x92F3,0x9293, 0x99D0,0x9294, 0x6A17,0x9295, 0x7026,0x9296, 0x732A,0x9297, 0x82E7,0x9298, 0x8457,0x9299, 0x8CAF,0x929A, 0x4E01,0x929B, 0x5146,0x929C, 0x51CB,0x929D, 0x558B,0x929E, 0x5BF5,0x929F, 0x5E16,0x92A0, 0x5E33,0x92A1, 0x5E81,0x92A2, 0x5F14,0x92A3, 0x5F35,0x92A4, 0x5F6B,0x92A5, 0x5FB4,0x92A6, 0x61F2,0x92A7, 0x6311,0x92A8, 0x66A2,0x92A9, 0x671D,0x92AA, 0x6F6E,0x92AB, 0x7252,0x92AC, 0x753A,0x92AD, 0x773A,0x92AE, 0x8074,0x92AF, 0x8139,0x92B0, 0x8178,0x92B1, 0x8776,0x92B2, 0x8ABF,0x92B3, 0x8ADC,0x92B4, 0x8D85,0x92B5, 0x8DF3,0x92B6, 0x929A,0x92B7, 0x9577,0x92B8, 0x9802,0x92B9, 0x9CE5,0x92BA, 0x52C5,0x92BB, 0x6357,0x92BC, 0x76F4,0x92BD, 0x6715,0x92BE, 0x6C88,0x92BF, 0x73CD,0x92C0, 0x8CC3,0x92C1, 0x93AE,0x92C2, 0x9673,0x92C3, 0x6D25,0x92C4, 0x589C,0x92C5, 0x690E,0x92C6, 0x69CC,0x92C7, 0x8FFD,0x92C8, 0x939A,0x92C9, 0x75DB,0x92CA, 0x901A,0x92CB, 0x585A,0x92CC, 0x6802,0x92CD, 0x63B4,0x92CE, 0x69FB,0x92CF, 0x4F43,0x92D0, 0x6F2C,0x92D1, 0x67D8,0x92D2, 0x8FBB,0x92D3, 0x8526,0x92D4, 0x7DB4,0x92D5, 0x9354,0x92D6, 0x693F,0x92D7, 0x6F70,0x92D8, 0x576A,0x92D9, 0x58F7,0x92DA, 0x5B2C,0x92DB, 0x7D2C,0x92DC, 0x722A,0x92DD, 0x540A,0x92DE, 0x91E3,0x92DF, 0x9DB4,0x92E0, 0x4EAD,0x92E1, 0x4F4E,0x92E2, 0x505C,0x92E3, 0x5075,0x92E4, 0x5243,0x92E5, 0x8C9E,0x92E6, 0x5448,0x92E7, 0x5824,0x92E8, 0x5B9A,0x92E9, 0x5E1D,0x92EA, 0x5E95,0x92EB, 0x5EAD,0x92EC, 0x5EF7,0x92ED, 0x5F1F,0x92EE, 0x608C,0x92EF, 0x62B5,0x92F0, 0x633A,0x92F1, 0x63D0,0x92F2, 0x68AF,0x92F3, 0x6C40,0x92F4, 0x7887,0x92F5, 0x798E,0x92F6, 0x7A0B,0x92F7, 0x7DE0,0x92F8, 0x8247,0x92F9, 0x8A02,0x92FA, 0x8AE6,0x92FB, 0x8E44,0x92FC, 0x9013,0x9340, 0x90B8,0x9341, 0x912D,0x9342, 0x91D8,0x9343, 0x9F0E,0x9344, 0x6CE5,0x9345, 0x6458,0x9346, 0x64E2,0x9347, 0x6575,0x9348, 0x6EF4,0x9349, 0x7684,0x934A, 0x7B1B,0x934B, 0x9069,0x934C, 0x93D1,0x934D, 0x6EBA,0x934E, 0x54F2,0x934F, 0x5FB9,0x9350, 0x64A4,0x9351, 0x8F4D,0x9352, 0x8FED,0x9353, 0x9244,0x9354, 0x5178,0x9355, 0x586B,0x9356, 0x5929,0x9357, 0x5C55,0x9358, 0x5E97,0x9359, 0x6DFB,0x935A, 0x7E8F,0x935B, 0x751C,0x935C, 0x8CBC,0x935D, 0x8EE2,0x935E, 0x985B,0x935F, 0x70B9,0x9360, 0x4F1D,0x9361, 0x6BBF,0x9362, 0x6FB1,0x9363, 0x7530,0x9364, 0x96FB,0x9365, 0x514E,0x9366, 0x5410,0x9367, 0x5835,0x9368, 0x5857,0x9369, 0x59AC,0x936A, 0x5C60,0x936B, 0x5F92,0x936C, 0x6597,0x936D, 0x675C,0x936E, 0x6E21,0x936F, 0x767B,0x9370, 0x83DF,0x9371, 0x8CED,0x9372, 0x9014,0x9373, 0x90FD,0x9374, 0x934D,0x9375, 0x7825,0x9376, 0x783A,0x9377, 0x52AA,0x9378, 0x5EA6,0x9379, 0x571F,0x937A, 0x5974,0x937B, 0x6012,0x937C, 0x5012,0x937D, 0x515A,0x937E, 0x51AC,0x9380, 0x51CD,0x9381, 0x5200,0x9382, 0x5510,0x9383, 0x5854,0x9384, 0x5858,0x9385, 0x5957,0x9386, 0x5B95,0x9387, 0x5CF6,0x9388, 0x5D8B,0x9389, 0x60BC,0x938A, 0x6295,0x938B, 0x642D,0x938C, 0x6771,0x938D, 0x6843,0x938E, 0x68BC,0x938F, 0x68DF,0x9390, 0x76D7,0x9391, 0x6DD8,0x9392, 0x6E6F,0x9393, 0x6D9B,0x9394, 0x706F,0x9395, 0x71C8,0x9396, 0x5F53,0x9397, 0x75D8,0x9398, 0x7977,0x9399, 0x7B49,0x939A, 0x7B54,0x939B, 0x7B52,0x939C, 0x7CD6,0x939D, 0x7D71,0x939E, 0x5230,0x939F, 0x8463,0x93A0, 0x8569,0x93A1, 0x85E4,0x93A2, 0x8A0E,0x93A3, 0x8B04,0x93A4, 0x8C46,0x93A5, 0x8E0F,0x93A6, 0x9003,0x93A7, 0x900F,0x93A8, 0x9419,0x93A9, 0x9676,0x93AA, 0x982D,0x93AB, 0x9A30,0x93AC, 0x95D8,0x93AD, 0x50CD,0x93AE, 0x52D5,0x93AF, 0x540C,0x93B0, 0x5802,0x93B1, 0x5C0E,0x93B2, 0x61A7,0x93B3, 0x649E,0x93B4, 0x6D1E,0x93B5, 0x77B3,0x93B6, 0x7AE5,0x93B7, 0x80F4,0x93B8, 0x8404,0x93B9, 0x9053,0x93BA, 0x9285,0x93BB, 0x5CE0,0x93BC, 0x9D07,0x93BD, 0x533F,0x93BE, 0x5F97,0x93BF, 0x5FB3,0x93C0, 0x6D9C,0x93C1, 0x7279,0x93C2, 0x7763,0x93C3, 0x79BF,0x93C4, 0x7BE4,0x93C5, 0x6BD2,0x93C6, 0x72EC,0x93C7, 0x8AAD,0x93C8, 0x6803,0x93C9, 0x6A61,0x93CA, 0x51F8,0x93CB, 0x7A81,0x93CC, 0x6934,0x93CD, 0x5C4A,0x93CE, 0x9CF6,0x93CF, 0x82EB,0x93D0, 0x5BC5,0x93D1, 0x9149,0x93D2, 0x701E,0x93D3, 0x5678,0x93D4, 0x5C6F,0x93D5, 0x60C7,0x93D6, 0x6566,0x93D7, 0x6C8C,0x93D8, 0x8C5A,0x93D9, 0x9041,0x93DA, 0x9813,0x93DB, 0x5451,0x93DC, 0x66C7,0x93DD, 0x920D,0x93DE, 0x5948,0x93DF, 0x90A3,0x93E0, 0x5185,0x93E1, 0x4E4D,0x93E2, 0x51EA,0x93E3, 0x8599,0x93E4, 0x8B0E,0x93E5, 0x7058,0x93E6, 0x637A,0x93E7, 0x934B,0x93E8, 0x6962,0x93E9, 0x99B4,0x93EA, 0x7E04,0x93EB, 0x7577,0x93EC, 0x5357,0x93ED, 0x6960,0x93EE, 0x8EDF,0x93EF, 0x96E3,0x93F0, 0x6C5D,0x93F1, 0x4E8C,0x93F2, 0x5C3C,0x93F3, 0x5F10,0x93F4, 0x8FE9,0x93F5, 0x5302,0x93F6, 0x8CD1,0x93F7, 0x8089,0x93F8, 0x8679,0x93F9, 0x5EFF,0x93FA, 0x65E5,0x93FB, 0x4E73,0x93FC, 0x5165,0x9440, 0x5982,0x9441, 0x5C3F,0x9442, 0x97EE,0x9443, 0x4EFB,0x9444, 0x598A,0x9445, 0x5FCD,0x9446, 0x8A8D,0x9447, 0x6FE1,0x9448, 0x79B0,0x9449, 0x7962,0x944A, 0x5BE7,0x944B, 0x8471,0x944C, 0x732B,0x944D, 0x71B1,0x944E, 0x5E74,0x944F, 0x5FF5,0x9450, 0x637B,0x9451, 0x649A,0x9452, 0x71C3,0x9453, 0x7C98,0x9454, 0x4E43,0x9455, 0x5EFC,0x9456, 0x4E4B,0x9457, 0x57DC,0x9458, 0x56A2,0x9459, 0x60A9,0x945A, 0x6FC3,0x945B, 0x7D0D,0x945C, 0x80FD,0x945D, 0x8133,0x945E, 0x81BF,0x945F, 0x8FB2,0x9460, 0x8997,0x9461, 0x86A4,0x9462, 0x5DF4,0x9463, 0x628A,0x9464, 0x64AD,0x9465, 0x8987,0x9466, 0x6777,0x9467, 0x6CE2,0x9468, 0x6D3E,0x9469, 0x7436,0x946A, 0x7834,0x946B, 0x5A46,0x946C, 0x7F75,0x946D, 0x82AD,0x946E, 0x99AC,0x946F, 0x4FF3,0x9470, 0x5EC3,0x9471, 0x62DD,0x9472, 0x6392,0x9473, 0x6557,0x9474, 0x676F,0x9475, 0x76C3,0x9476, 0x724C,0x9477, 0x80CC,0x9478, 0x80BA,0x9479, 0x8F29,0x947A, 0x914D,0x947B, 0x500D,0x947C, 0x57F9,0x947D, 0x5A92,0x947E, 0x6885,0x9480, 0x6973,0x9481, 0x7164,0x9482, 0x72FD,0x9483, 0x8CB7,0x9484, 0x58F2,0x9485, 0x8CE0,0x9486, 0x966A,0x9487, 0x9019,0x9488, 0x877F,0x9489, 0x79E4,0x948A, 0x77E7,0x948B, 0x8429,0x948C, 0x4F2F,0x948D, 0x5265,0x948E, 0x535A,0x948F, 0x62CD,0x9490, 0x67CF,0x9491, 0x6CCA,0x9492, 0x767D,0x9493, 0x7B94,0x9494, 0x7C95,0x9495, 0x8236,0x9496, 0x8584,0x9497, 0x8FEB,0x9498, 0x66DD,0x9499, 0x6F20,0x949A, 0x7206,0x949B, 0x7E1B,0x949C, 0x83AB,0x949D, 0x99C1,0x949E, 0x9EA6,0x949F, 0x51FD,0x94A0, 0x7BB1,0x94A1, 0x7872,0x94A2, 0x7BB8,0x94A3, 0x8087,0x94A4, 0x7B48,0x94A5, 0x6AE8,0x94A6, 0x5E61,0x94A7, 0x808C,0x94A8, 0x7551,0x94A9, 0x7560,0x94AA, 0x516B,0x94AB, 0x9262,0x94AC, 0x6E8C,0x94AD, 0x767A,0x94AE, 0x9197,0x94AF, 0x9AEA,0x94B0, 0x4F10,0x94B1, 0x7F70,0x94B2, 0x629C,0x94B3, 0x7B4F,0x94B4, 0x95A5,0x94B5, 0x9CE9,0x94B6, 0x567A,0x94B7, 0x5859,0x94B8, 0x86E4,0x94B9, 0x96BC,0x94BA, 0x4F34,0x94BB, 0x5224,0x94BC, 0x534A,0x94BD, 0x53CD,0x94BE, 0x53DB,0x94BF, 0x5E06,0x94C0, 0x642C,0x94C1, 0x6591,0x94C2, 0x677F,0x94C3, 0x6C3E,0x94C4, 0x6C4E,0x94C5, 0x7248,0x94C6, 0x72AF,0x94C7, 0x73ED,0x94C8, 0x7554,0x94C9, 0x7E41,0x94CA, 0x822C,0x94CB, 0x85E9,0x94CC, 0x8CA9,0x94CD, 0x7BC4,0x94CE, 0x91C6,0x94CF, 0x7169,0x94D0, 0x9812,0x94D1, 0x98EF,0x94D2, 0x633D,0x94D3, 0x6669,0x94D4, 0x756A,0x94D5, 0x76E4,0x94D6, 0x78D0,0x94D7, 0x8543,0x94D8, 0x86EE,0x94D9, 0x532A,0x94DA, 0x5351,0x94DB, 0x5426,0x94DC, 0x5983,0x94DD, 0x5E87,0x94DE, 0x5F7C,0x94DF, 0x60B2,0x94E0, 0x6249,0x94E1, 0x6279,0x94E2, 0x62AB,0x94E3, 0x6590,0x94E4, 0x6BD4,0x94E5, 0x6CCC,0x94E6, 0x75B2,0x94E7, 0x76AE,0x94E8, 0x7891,0x94E9, 0x79D8,0x94EA, 0x7DCB,0x94EB, 0x7F77,0x94EC, 0x80A5,0x94ED, 0x88AB,0x94EE, 0x8AB9,0x94EF, 0x8CBB,0x94F0, 0x907F,0x94F1, 0x975E,0x94F2, 0x98DB,0x94F3, 0x6A0B,0x94F4, 0x7C38,0x94F5, 0x5099,0x94F6, 0x5C3E,0x94F7, 0x5FAE,0x94F8, 0x6787,0x94F9, 0x6BD8,0x94FA, 0x7435,0x94FB, 0x7709,0x94FC, 0x7F8E,0x9540, 0x9F3B,0x9541, 0x67CA,0x9542, 0x7A17,0x9543, 0x5339,0x9544, 0x758B,0x9545, 0x9AED,0x9546, 0x5F66,0x9547, 0x819D,0x9548, 0x83F1,0x9549, 0x8098,0x954A, 0x5F3C,0x954B, 0x5FC5,0x954C, 0x7562,0x954D, 0x7B46,0x954E, 0x903C,0x954F, 0x6867,0x9550, 0x59EB,0x9551, 0x5A9B,0x9552, 0x7D10,0x9553, 0x767E,0x9554, 0x8B2C,0x9555, 0x4FF5,0x9556, 0x5F6A,0x9557, 0x6A19,0x9558, 0x6C37,0x9559, 0x6F02,0x955A, 0x74E2,0x955B, 0x7968,0x955C, 0x8868,0x955D, 0x8A55,0x955E, 0x8C79,0x955F, 0x5EDF,0x9560, 0x63CF,0x9561, 0x75C5,0x9562, 0x79D2,0x9563, 0x82D7,0x9564, 0x9328,0x9565, 0x92F2,0x9566, 0x849C,0x9567, 0x86ED,0x9568, 0x9C2D,0x9569, 0x54C1,0x956A, 0x5F6C,0x956B, 0x658C,0x956C, 0x6D5C,0x956D, 0x7015,0x956E, 0x8CA7,0x956F, 0x8CD3,0x9570, 0x983B,0x9571, 0x654F,0x9572, 0x74F6,0x9573, 0x4E0D,0x9574, 0x4ED8,0x9575, 0x57E0,0x9576, 0x592B,0x9577, 0x5A66,0x9578, 0x5BCC,0x9579, 0x51A8,0x957A, 0x5E03,0x957B, 0x5E9C,0x957C, 0x6016,0x957D, 0x6276,0x957E, 0x6577,0x9580, 0x65A7,0x9581, 0x666E,0x9582, 0x6D6E,0x9583, 0x7236,0x9584, 0x7B26,0x9585, 0x8150,0x9586, 0x819A,0x9587, 0x8299,0x9588, 0x8B5C,0x9589, 0x8CA0,0x958A, 0x8CE6,0x958B, 0x8D74,0x958C, 0x961C,0x958D, 0x9644,0x958E, 0x4FAE,0x958F, 0x64AB,0x9590, 0x6B66,0x9591, 0x821E,0x9592, 0x8461,0x9593, 0x856A,0x9594, 0x90E8,0x9595, 0x5C01,0x9596, 0x6953,0x9597, 0x98A8,0x9598, 0x847A,0x9599, 0x8557,0x959A, 0x4F0F,0x959B, 0x526F,0x959C, 0x5FA9,0x959D, 0x5E45,0x959E, 0x670D,0x959F, 0x798F,0x95A0, 0x8179,0x95A1, 0x8907,0x95A2, 0x8986,0x95A3, 0x6DF5,0x95A4, 0x5F17,0x95A5, 0x6255,0x95A6, 0x6CB8,0x95A7, 0x4ECF,0x95A8, 0x7269,0x95A9, 0x9B92,0x95AA, 0x5206,0x95AB, 0x543B,0x95AC, 0x5674,0x95AD, 0x58B3,0x95AE, 0x61A4,0x95AF, 0x626E,0x95B0, 0x711A,0x95B1, 0x596E,0x95B2, 0x7C89,0x95B3, 0x7CDE,0x95B4, 0x7D1B,0x95B5, 0x96F0,0x95B6, 0x6587,0x95B7, 0x805E,0x95B8, 0x4E19,0x95B9, 0x4F75,0x95BA, 0x5175,0x95BB, 0x5840,0x95BC, 0x5E63,0x95BD, 0x5E73,0x95BE, 0x5F0A,0x95BF, 0x67C4,0x95C0, 0x4E26,0x95C1, 0x853D,0x95C2, 0x9589,0x95C3, 0x965B,0x95C4, 0x7C73,0x95C5, 0x9801,0x95C6, 0x50FB,0x95C7, 0x58C1,0x95C8, 0x7656,0x95C9, 0x78A7,0x95CA, 0x5225,0x95CB, 0x77A5,0x95CC, 0x8511,0x95CD, 0x7B86,0x95CE, 0x504F,0x95CF, 0x5909,0x95D0, 0x7247,0x95D1, 0x7BC7,0x95D2, 0x7DE8,0x95D3, 0x8FBA,0x95D4, 0x8FD4,0x95D5, 0x904D,0x95D6, 0x4FBF,0x95D7, 0x52C9,0x95D8, 0x5A29,0x95D9, 0x5F01,0x95DA, 0x97AD,0x95DB, 0x4FDD,0x95DC, 0x8217,0x95DD, 0x92EA,0x95DE, 0x5703,0x95DF, 0x6355,0x95E0, 0x6B69,0x95E1, 0x752B,0x95E2, 0x88DC,0x95E3, 0x8F14,0x95E4, 0x7A42,0x95E5, 0x52DF,0x95E6, 0x5893,0x95E7, 0x6155,0x95E8, 0x620A,0x95E9, 0x66AE,0x95EA, 0x6BCD,0x95EB, 0x7C3F,0x95EC, 0x83E9,0x95ED, 0x5023,0x95EE, 0x4FF8,0x95EF, 0x5305,0x95F0, 0x5446,0x95F1, 0x5831,0x95F2, 0x5949,0x95F3, 0x5B9D,0x95F4, 0x5CF0,0x95F5, 0x5CEF,0x95F6, 0x5D29,0x95F7, 0x5E96,0x95F8, 0x62B1,0x95F9, 0x6367,0x95FA, 0x653E,0x95FB, 0x65B9,0x95FC, 0x670B,0x9640, 0x6CD5,0x9641, 0x6CE1,0x9642, 0x70F9,0x9643, 0x7832,0x9644, 0x7E2B,0x9645, 0x80DE,0x9646, 0x82B3,0x9647, 0x840C,0x9648, 0x84EC,0x9649, 0x8702,0x964A, 0x8912,0x964B, 0x8A2A,0x964C, 0x8C4A,0x964D, 0x90A6,0x964E, 0x92D2,0x964F, 0x98FD,0x9650, 0x9CF3,0x9651, 0x9D6C,0x9652, 0x4E4F,0x9653, 0x4EA1,0x9654, 0x508D,0x9655, 0x5256,0x9656, 0x574A,0x9657, 0x59A8,0x9658, 0x5E3D,0x9659, 0x5FD8,0x965A, 0x5FD9,0x965B, 0x623F,0x965C, 0x66B4,0x965D, 0x671B,0x965E, 0x67D0,0x965F, 0x68D2,0x9660, 0x5192,0x9661, 0x7D21,0x9662, 0x80AA,0x9663, 0x81A8,0x9664, 0x8B00,0x9665, 0x8C8C,0x9666, 0x8CBF,0x9667, 0x927E,0x9668, 0x9632,0x9669, 0x5420,0x966A, 0x982C,0x966B, 0x5317,0x966C, 0x50D5,0x966D, 0x535C,0x966E, 0x58A8,0x966F, 0x64B2,0x9670, 0x6734,0x9671, 0x7267,0x9672, 0x7766,0x9673, 0x7A46,0x9674, 0x91E6,0x9675, 0x52C3,0x9676, 0x6CA1,0x9677, 0x6B86,0x9678, 0x5800,0x9679, 0x5E4C,0x967A, 0x5954,0x967B, 0x672C,0x967C, 0x7FFB,0x967D, 0x51E1,0x967E, 0x76C6,0x9680, 0x6469,0x9681, 0x78E8,0x9682, 0x9B54,0x9683, 0x9EBB,0x9684, 0x57CB,0x9685, 0x59B9,0x9686, 0x6627,0x9687, 0x679A,0x9688, 0x6BCE,0x9689, 0x54E9,0x968A, 0x69D9,0x968B, 0x5E55,0x968C, 0x819C,0x968D, 0x6795,0x968E, 0x9BAA,0x968F, 0x67FE,0x9690, 0x9C52,0x9691, 0x685D,0x9692, 0x4EA6,0x9693, 0x4FE3,0x9694, 0x53C8,0x9695, 0x62B9,0x9696, 0x672B,0x9697, 0x6CAB,0x9698, 0x8FC4,0x9699, 0x4FAD,0x969A, 0x7E6D,0x969B, 0x9EBF,0x969C, 0x4E07,0x969D, 0x6162,0x969E, 0x6E80,0x969F, 0x6F2B,0x96A0, 0x8513,0x96A1, 0x5473,0x96A2, 0x672A,0x96A3, 0x9B45,0x96A4, 0x5DF3,0x96A5, 0x7B95,0x96A6, 0x5CAC,0x96A7, 0x5BC6,0x96A8, 0x871C,0x96A9, 0x6E4A,0x96AA, 0x84D1,0x96AB, 0x7A14,0x96AC, 0x8108,0x96AD, 0x5999,0x96AE, 0x7C8D,0x96AF, 0x6C11,0x96B0, 0x7720,0x96B1, 0x52D9,0x96B2, 0x5922,0x96B3, 0x7121,0x96B4, 0x725F,0x96B5, 0x77DB,0x96B6, 0x9727,0x96B7, 0x9D61,0x96B8, 0x690B,0x96B9, 0x5A7F,0x96BA, 0x5A18,0x96BB, 0x51A5,0x96BC, 0x540D,0x96BD, 0x547D,0x96BE, 0x660E,0x96BF, 0x76DF,0x96C0, 0x8FF7,0x96C1, 0x9298,0x96C2, 0x9CF4,0x96C3, 0x59EA,0x96C4, 0x725D,0x96C5, 0x6EC5,0x96C6, 0x514D,0x96C7, 0x68C9,0x96C8, 0x7DBF,0x96C9, 0x7DEC,0x96CA, 0x9762,0x96CB, 0x9EBA,0x96CC, 0x6478,0x96CD, 0x6A21,0x96CE, 0x8302,0x96CF, 0x5984,0x96D0, 0x5B5F,0x96D1, 0x6BDB,0x96D2, 0x731B,0x96D3, 0x76F2,0x96D4, 0x7DB2,0x96D5, 0x8017,0x96D6, 0x8499,0x96D7, 0x5132,0x96D8, 0x6728,0x96D9, 0x9ED9,0x96DA, 0x76EE,0x96DB, 0x6762,0x96DC, 0x52FF,0x96DD, 0x9905,0x96DE, 0x5C24,0x96DF, 0x623B,0x96E0, 0x7C7E,0x96E1, 0x8CB0,0x96E2, 0x554F,0x96E3, 0x60B6,0x96E4, 0x7D0B,0x96E5, 0x9580,0x96E6, 0x5301,0x96E7, 0x4E5F,0x96E8, 0x51B6,0x96E9, 0x591C,0x96EA, 0x723A,0x96EB, 0x8036,0x96EC, 0x91CE,0x96ED, 0x5F25,0x96EE, 0x77E2,0x96EF, 0x5384,0x96F0, 0x5F79,0x96F1, 0x7D04,0x96F2, 0x85AC,0x96F3, 0x8A33,0x96F4, 0x8E8D,0x96F5, 0x9756,0x96F6, 0x67F3,0x96F7, 0x85AE,0x96F8, 0x9453,0x96F9, 0x6109,0x96FA, 0x6108,0x96FB, 0x6CB9,0x96FC, 0x7652,0x9740, 0x8AED,0x9741, 0x8F38,0x9742, 0x552F,0x9743, 0x4F51,0x9744, 0x512A,0x9745, 0x52C7,0x9746, 0x53CB,0x9747, 0x5BA5,0x9748, 0x5E7D,0x9749, 0x60A0,0x974A, 0x6182,0x974B, 0x63D6,0x974C, 0x6709,0x974D, 0x67DA,0x974E, 0x6E67,0x974F, 0x6D8C,0x9750, 0x7336,0x9751, 0x7337,0x9752, 0x7531,0x9753, 0x7950,0x9754, 0x88D5,0x9755, 0x8A98,0x9756, 0x904A,0x9757, 0x9091,0x9758, 0x90F5,0x9759, 0x96C4,0x975A, 0x878D,0x975B, 0x5915,0x975C, 0x4E88,0x975D, 0x4F59,0x975E, 0x4E0E,0x975F, 0x8A89,0x9760, 0x8F3F,0x9761, 0x9810,0x9762, 0x50AD,0x9763, 0x5E7C,0x9764, 0x5996,0x9765, 0x5BB9,0x9766, 0x5EB8,0x9767, 0x63DA,0x9768, 0x63FA,0x9769, 0x64C1,0x976A, 0x66DC,0x976B, 0x694A,0x976C, 0x69D8,0x976D, 0x6D0B,0x976E, 0x6EB6,0x976F, 0x7194,0x9770, 0x7528,0x9771, 0x7AAF,0x9772, 0x7F8A,0x9773, 0x8000,0x9774, 0x8449,0x9775, 0x84C9,0x9776, 0x8981,0x9777, 0x8B21,0x9778, 0x8E0A,0x9779, 0x9065,0x977A, 0x967D,0x977B, 0x990A,0x977C, 0x617E,0x977D, 0x6291,0x977E, 0x6B32,0x9780, 0x6C83,0x9781, 0x6D74,0x9782, 0x7FCC,0x9783, 0x7FFC,0x9784, 0x6DC0,0x9785, 0x7F85,0x9786, 0x87BA,0x9787, 0x88F8,0x9788, 0x6765,0x9789, 0x83B1,0x978A, 0x983C,0x978B, 0x96F7,0x978C, 0x6D1B,0x978D, 0x7D61,0x978E, 0x843D,0x978F, 0x916A,0x9790, 0x4E71,0x9791, 0x5375,0x9792, 0x5D50,0x9793, 0x6B04,0x9794, 0x6FEB,0x9795, 0x85CD,0x9796, 0x862D,0x9797, 0x89A7,0x9798, 0x5229,0x9799, 0x540F,0x979A, 0x5C65,0x979B, 0x674E,0x979C, 0x68A8,0x979D, 0x7406,0x979E, 0x7483,0x979F, 0x75E2,0x97A0, 0x88CF,0x97A1, 0x88E1,0x97A2, 0x91CC,0x97A3, 0x96E2,0x97A4, 0x9678,0x97A5, 0x5F8B,0x97A6, 0x7387,0x97A7, 0x7ACB,0x97A8, 0x844E,0x97A9, 0x63A0,0x97AA, 0x7565,0x97AB, 0x5289,0x97AC, 0x6D41,0x97AD, 0x6E9C,0x97AE, 0x7409,0x97AF, 0x7559,0x97B0, 0x786B,0x97B1, 0x7C92,0x97B2, 0x9686,0x97B3, 0x7ADC,0x97B4, 0x9F8D,0x97B5, 0x4FB6,0x97B6, 0x616E,0x97B7, 0x65C5,0x97B8, 0x865C,0x97B9, 0x4E86,0x97BA, 0x4EAE,0x97BB, 0x50DA,0x97BC, 0x4E21,0x97BD, 0x51CC,0x97BE, 0x5BEE,0x97BF, 0x6599,0x97C0, 0x6881,0x97C1, 0x6DBC,0x97C2, 0x731F,0x97C3, 0x7642,0x97C4, 0x77AD,0x97C5, 0x7A1C,0x97C6, 0x7CE7,0x97C7, 0x826F,0x97C8, 0x8AD2,0x97C9, 0x907C,0x97CA, 0x91CF,0x97CB, 0x9675,0x97CC, 0x9818,0x97CD, 0x529B,0x97CE, 0x7DD1,0x97CF, 0x502B,0x97D0, 0x5398,0x97D1, 0x6797,0x97D2, 0x6DCB,0x97D3, 0x71D0,0x97D4, 0x7433,0x97D5, 0x81E8,0x97D6, 0x8F2A,0x97D7, 0x96A3,0x97D8, 0x9C57,0x97D9, 0x9E9F,0x97DA, 0x7460,0x97DB, 0x5841,0x97DC, 0x6D99,0x97DD, 0x7D2F,0x97DE, 0x985E,0x97DF, 0x4EE4,0x97E0, 0x4F36,0x97E1, 0x4F8B,0x97E2, 0x51B7,0x97E3, 0x52B1,0x97E4, 0x5DBA,0x97E5, 0x601C,0x97E6, 0x73B2,0x97E7, 0x793C,0x97E8, 0x82D3,0x97E9, 0x9234,0x97EA, 0x96B7,0x97EB, 0x96F6,0x97EC, 0x970A,0x97ED, 0x9E97,0x97EE, 0x9F62,0x97EF, 0x66A6,0x97F0, 0x6B74,0x97F1, 0x5217,0x97F2, 0x52A3,0x97F3, 0x70C8,0x97F4, 0x88C2,0x97F5, 0x5EC9,0x97F6, 0x604B,0x97F7, 0x6190,0x97F8, 0x6F23,0x97F9, 0x7149,0x97FA, 0x7C3E,0x97FB, 0x7DF4,0x97FC, 0x806F,0x9840, 0x84EE,0x9841, 0x9023,0x9842, 0x932C,0x9843, 0x5442,0x9844, 0x9B6F,0x9845, 0x6AD3,0x9846, 0x7089,0x9847, 0x8CC2,0x9848, 0x8DEF,0x9849, 0x9732,0x984A, 0x52B4,0x984B, 0x5A41,0x984C, 0x5ECA,0x984D, 0x5F04,0x984E, 0x6717,0x984F, 0x697C,0x9850, 0x6994,0x9851, 0x6D6A,0x9852, 0x6F0F,0x9853, 0x7262,0x9854, 0x72FC,0x9855, 0x7BED,0x9856, 0x8001,0x9857, 0x807E,0x9858, 0x874B,0x9859, 0x90CE,0x985A, 0x516D,0x985B, 0x9E93,0x985C, 0x7984,0x985D, 0x808B,0x985E, 0x9332,0x985F, 0x8AD6,0x9860, 0x502D,0x9861, 0x548C,0x9862, 0x8A71,0x9863, 0x6B6A,0x9864, 0x8CC4,0x9865, 0x8107,0x9866, 0x60D1,0x9867, 0x67A0,0x9868, 0x9DF2,0x9869, 0x4E99,0x986A, 0x4E98,0x986B, 0x9C10,0x986C, 0x8A6B,0x986D, 0x85C1,0x986E, 0x8568,0x986F, 0x6900,0x9870, 0x6E7E,0x9871, 0x7897,0x9872, 0x8155,0x989F, 0x5F0C,0x98A0, 0x4E10,0x98A1, 0x4E15,0x98A2, 0x4E2A,0x98A3, 0x4E31,0x98A4, 0x4E36,0x98A5, 0x4E3C,0x98A6, 0x4E3F,0x98A7, 0x4E42,0x98A8, 0x4E56,0x98A9, 0x4E58,0x98AA, 0x4E82,0x98AB, 0x4E85,0x98AC, 0x8C6B,0x98AD, 0x4E8A,0x98AE, 0x8212,0x98AF, 0x5F0D,0x98B0, 0x4E8E,0x98B1, 0x4E9E,0x98B2, 0x4E9F,0x98B3, 0x4EA0,0x98B4, 0x4EA2,0x98B5, 0x4EB0,0x98B6, 0x4EB3,0x98B7, 0x4EB6,0x98B8, 0x4ECE,0x98B9, 0x4ECD,0x98BA, 0x4EC4,0x98BB, 0x4EC6,0x98BC, 0x4EC2,0x98BD, 0x4ED7,0x98BE, 0x4EDE,0x98BF, 0x4EED,0x98C0, 0x4EDF,0x98C1, 0x4EF7,0x98C2, 0x4F09,0x98C3, 0x4F5A,0x98C4, 0x4F30,0x98C5, 0x4F5B,0x98C6, 0x4F5D,0x98C7, 0x4F57,0x98C8, 0x4F47,0x98C9, 0x4F76,0x98CA, 0x4F88,0x98CB, 0x4F8F,0x98CC, 0x4F98,0x98CD, 0x4F7B,0x98CE, 0x4F69,0x98CF, 0x4F70,0x98D0, 0x4F91,0x98D1, 0x4F6F,0x98D2, 0x4F86,0x98D3, 0x4F96,0x98D4, 0x5118,0x98D5, 0x4FD4,0x98D6, 0x4FDF,0x98D7, 0x4FCE,0x98D8, 0x4FD8,0x98D9, 0x4FDB,0x98DA, 0x4FD1,0x98DB, 0x4FDA,0x98DC, 0x4FD0,0x98DD, 0x4FE4,0x98DE, 0x4FE5,0x98DF, 0x501A,0x98E0, 0x5028,0x98E1, 0x5014,0x98E2, 0x502A,0x98E3, 0x5025,0x98E4, 0x5005,0x98E5, 0x4F1C,0x98E6, 0x4FF6,0x98E7, 0x5021,0x98E8, 0x5029,0x98E9, 0x502C,0x98EA, 0x4FFE,0x98EB, 0x4FEF,0x98EC, 0x5011,0x98ED, 0x5006,0x98EE, 0x5043,0x98EF, 0x5047,0x98F0, 0x6703,0x98F1, 0x5055,0x98F2, 0x5050,0x98F3, 0x5048,0x98F4, 0x505A,0x98F5, 0x5056,0x98F6, 0x506C,0x98F7, 0x5078,0x98F8, 0x5080,0x98F9, 0x509A,0x98FA, 0x5085,0x98FB, 0x50B4,0x98FC, 0x50B2,0x9940, 0x50C9,0x9941, 0x50CA,0x9942, 0x50B3,0x9943, 0x50C2,0x9944, 0x50D6,0x9945, 0x50DE,0x9946, 0x50E5,0x9947, 0x50ED,0x9948, 0x50E3,0x9949, 0x50EE,0x994A, 0x50F9,0x994B, 0x50F5,0x994C, 0x5109,0x994D, 0x5101,0x994E, 0x5102,0x994F, 0x5116,0x9950, 0x5115,0x9951, 0x5114,0x9952, 0x511A,0x9953, 0x5121,0x9954, 0x513A,0x9955, 0x5137,0x9956, 0x513C,0x9957, 0x513B,0x9958, 0x513F,0x9959, 0x5140,0x995A, 0x5152,0x995B, 0x514C,0x995C, 0x5154,0x995D, 0x5162,0x995E, 0x7AF8,0x995F, 0x5169,0x9960, 0x516A,0x9961, 0x516E,0x9962, 0x5180,0x9963, 0x5182,0x9964, 0x56D8,0x9965, 0x518C,0x9966, 0x5189,0x9967, 0x518F,0x9968, 0x5191,0x9969, 0x5193,0x996A, 0x5195,0x996B, 0x5196,0x996C, 0x51A4,0x996D, 0x51A6,0x996E, 0x51A2,0x996F, 0x51A9,0x9970, 0x51AA,0x9971, 0x51AB,0x9972, 0x51B3,0x9973, 0x51B1,0x9974, 0x51B2,0x9975, 0x51B0,0x9976, 0x51B5,0x9977, 0x51BD,0x9978, 0x51C5,0x9979, 0x51C9,0x997A, 0x51DB,0x997B, 0x51E0,0x997C, 0x8655,0x997D, 0x51E9,0x997E, 0x51ED,0x9980, 0x51F0,0x9981, 0x51F5,0x9982, 0x51FE,0x9983, 0x5204,0x9984, 0x520B,0x9985, 0x5214,0x9986, 0x520E,0x9987, 0x5227,0x9988, 0x522A,0x9989, 0x522E,0x998A, 0x5233,0x998B, 0x5239,0x998C, 0x524F,0x998D, 0x5244,0x998E, 0x524B,0x998F, 0x524C,0x9990, 0x525E,0x9991, 0x5254,0x9992, 0x526A,0x9993, 0x5274,0x9994, 0x5269,0x9995, 0x5273,0x9996, 0x527F,0x9997, 0x527D,0x9998, 0x528D,0x9999, 0x5294,0x999A, 0x5292,0x999B, 0x5271,0x999C, 0x5288,0x999D, 0x5291,0x999E, 0x8FA8,0x999F, 0x8FA7,0x99A0, 0x52AC,0x99A1, 0x52AD,0x99A2, 0x52BC,0x99A3, 0x52B5,0x99A4, 0x52C1,0x99A5, 0x52CD,0x99A6, 0x52D7,0x99A7, 0x52DE,0x99A8, 0x52E3,0x99A9, 0x52E6,0x99AA, 0x98ED,0x99AB, 0x52E0,0x99AC, 0x52F3,0x99AD, 0x52F5,0x99AE, 0x52F8,0x99AF, 0x52F9,0x99B0, 0x5306,0x99B1, 0x5308,0x99B2, 0x7538,0x99B3, 0x530D,0x99B4, 0x5310,0x99B5, 0x530F,0x99B6, 0x5315,0x99B7, 0x531A,0x99B8, 0x5323,0x99B9, 0x532F,0x99BA, 0x5331,0x99BB, 0x5333,0x99BC, 0x5338,0x99BD, 0x5340,0x99BE, 0x5346,0x99BF, 0x5345,0x99C0, 0x4E17,0x99C1, 0x5349,0x99C2, 0x534D,0x99C3, 0x51D6,0x99C4, 0x535E,0x99C5, 0x5369,0x99C6, 0x536E,0x99C7, 0x5918,0x99C8, 0x537B,0x99C9, 0x5377,0x99CA, 0x5382,0x99CB, 0x5396,0x99CC, 0x53A0,0x99CD, 0x53A6,0x99CE, 0x53A5,0x99CF, 0x53AE,0x99D0, 0x53B0,0x99D1, 0x53B6,0x99D2, 0x53C3,0x99D3, 0x7C12,0x99D4, 0x96D9,0x99D5, 0x53DF,0x99D6, 0x66FC,0x99D7, 0x71EE,0x99D8, 0x53EE,0x99D9, 0x53E8,0x99DA, 0x53ED,0x99DB, 0x53FA,0x99DC, 0x5401,0x99DD, 0x543D,0x99DE, 0x5440,0x99DF, 0x542C,0x99E0, 0x542D,0x99E1, 0x543C,0x99E2, 0x542E,0x99E3, 0x5436,0x99E4, 0x5429,0x99E5, 0x541D,0x99E6, 0x544E,0x99E7, 0x548F,0x99E8, 0x5475,0x99E9, 0x548E,0x99EA, 0x545F,0x99EB, 0x5471,0x99EC, 0x5477,0x99ED, 0x5470,0x99EE, 0x5492,0x99EF, 0x547B,0x99F0, 0x5480,0x99F1, 0x5476,0x99F2, 0x5484,0x99F3, 0x5490,0x99F4, 0x5486,0x99F5, 0x54C7,0x99F6, 0x54A2,0x99F7, 0x54B8,0x99F8, 0x54A5,0x99F9, 0x54AC,0x99FA, 0x54C4,0x99FB, 0x54C8,0x99FC, 0x54A8,0x9A40, 0x54AB,0x9A41, 0x54C2,0x9A42, 0x54A4,0x9A43, 0x54BE,0x9A44, 0x54BC,0x9A45, 0x54D8,0x9A46, 0x54E5,0x9A47, 0x54E6,0x9A48, 0x550F,0x9A49, 0x5514,0x9A4A, 0x54FD,0x9A4B, 0x54EE,0x9A4C, 0x54ED,0x9A4D, 0x54FA,0x9A4E, 0x54E2,0x9A4F, 0x5539,0x9A50, 0x5540,0x9A51, 0x5563,0x9A52, 0x554C,0x9A53, 0x552E,0x9A54, 0x555C,0x9A55, 0x5545,0x9A56, 0x5556,0x9A57, 0x5557,0x9A58, 0x5538,0x9A59, 0x5533,0x9A5A, 0x555D,0x9A5B, 0x5599,0x9A5C, 0x5580,0x9A5D, 0x54AF,0x9A5E, 0x558A,0x9A5F, 0x559F,0x9A60, 0x557B,0x9A61, 0x557E,0x9A62, 0x5598,0x9A63, 0x559E,0x9A64, 0x55AE,0x9A65, 0x557C,0x9A66, 0x5583,0x9A67, 0x55A9,0x9A68, 0x5587,0x9A69, 0x55A8,0x9A6A, 0x55DA,0x9A6B, 0x55C5,0x9A6C, 0x55DF,0x9A6D, 0x55C4,0x9A6E, 0x55DC,0x9A6F, 0x55E4,0x9A70, 0x55D4,0x9A71, 0x5614,0x9A72, 0x55F7,0x9A73, 0x5616,0x9A74, 0x55FE,0x9A75, 0x55FD,0x9A76, 0x561B,0x9A77, 0x55F9,0x9A78, 0x564E,0x9A79, 0x5650,0x9A7A, 0x71DF,0x9A7B, 0x5634,0x9A7C, 0x5636,0x9A7D, 0x5632,0x9A7E, 0x5638,0x9A80, 0x566B,0x9A81, 0x5664,0x9A82, 0x562F,0x9A83, 0x566C,0x9A84, 0x566A,0x9A85, 0x5686,0x9A86, 0x5680,0x9A87, 0x568A,0x9A88, 0x56A0,0x9A89, 0x5694,0x9A8A, 0x568F,0x9A8B, 0x56A5,0x9A8C, 0x56AE,0x9A8D, 0x56B6,0x9A8E, 0x56B4,0x9A8F, 0x56C2,0x9A90, 0x56BC,0x9A91, 0x56C1,0x9A92, 0x56C3,0x9A93, 0x56C0,0x9A94, 0x56C8,0x9A95, 0x56CE,0x9A96, 0x56D1,0x9A97, 0x56D3,0x9A98, 0x56D7,0x9A99, 0x56EE,0x9A9A, 0x56F9,0x9A9B, 0x5700,0x9A9C, 0x56FF,0x9A9D, 0x5704,0x9A9E, 0x5709,0x9A9F, 0x5708,0x9AA0, 0x570B,0x9AA1, 0x570D,0x9AA2, 0x5713,0x9AA3, 0x5718,0x9AA4, 0x5716,0x9AA5, 0x55C7,0x9AA6, 0x571C,0x9AA7, 0x5726,0x9AA8, 0x5737,0x9AA9, 0x5738,0x9AAA, 0x574E,0x9AAB, 0x573B,0x9AAC, 0x5740,0x9AAD, 0x574F,0x9AAE, 0x5769,0x9AAF, 0x57C0,0x9AB0, 0x5788,0x9AB1, 0x5761,0x9AB2, 0x577F,0x9AB3, 0x5789,0x9AB4, 0x5793,0x9AB5, 0x57A0,0x9AB6, 0x57B3,0x9AB7, 0x57A4,0x9AB8, 0x57AA,0x9AB9, 0x57B0,0x9ABA, 0x57C3,0x9ABB, 0x57C6,0x9ABC, 0x57D4,0x9ABD, 0x57D2,0x9ABE, 0x57D3,0x9ABF, 0x580A,0x9AC0, 0x57D6,0x9AC1, 0x57E3,0x9AC2, 0x580B,0x9AC3, 0x5819,0x9AC4, 0x581D,0x9AC5, 0x5872,0x9AC6, 0x5821,0x9AC7, 0x5862,0x9AC8, 0x584B,0x9AC9, 0x5870,0x9ACA, 0x6BC0,0x9ACB, 0x5852,0x9ACC, 0x583D,0x9ACD, 0x5879,0x9ACE, 0x5885,0x9ACF, 0x58B9,0x9AD0, 0x589F,0x9AD1, 0x58AB,0x9AD2, 0x58BA,0x9AD3, 0x58DE,0x9AD4, 0x58BB,0x9AD5, 0x58B8,0x9AD6, 0x58AE,0x9AD7, 0x58C5,0x9AD8, 0x58D3,0x9AD9, 0x58D1,0x9ADA, 0x58D7,0x9ADB, 0x58D9,0x9ADC, 0x58D8,0x9ADD, 0x58E5,0x9ADE, 0x58DC,0x9ADF, 0x58E4,0x9AE0, 0x58DF,0x9AE1, 0x58EF,0x9AE2, 0x58FA,0x9AE3, 0x58F9,0x9AE4, 0x58FB,0x9AE5, 0x58FC,0x9AE6, 0x58FD,0x9AE7, 0x5902,0x9AE8, 0x590A,0x9AE9, 0x5910,0x9AEA, 0x591B,0x9AEB, 0x68A6,0x9AEC, 0x5925,0x9AED, 0x592C,0x9AEE, 0x592D,0x9AEF, 0x5932,0x9AF0, 0x5938,0x9AF1, 0x593E,0x9AF2, 0x7AD2,0x9AF3, 0x5955,0x9AF4, 0x5950,0x9AF5, 0x594E,0x9AF6, 0x595A,0x9AF7, 0x5958,0x9AF8, 0x5962,0x9AF9, 0x5960,0x9AFA, 0x5967,0x9AFB, 0x596C,0x9AFC, 0x5969,0x9B40, 0x5978,0x9B41, 0x5981,0x9B42, 0x599D,0x9B43, 0x4F5E,0x9B44, 0x4FAB,0x9B45, 0x59A3,0x9B46, 0x59B2,0x9B47, 0x59C6,0x9B48, 0x59E8,0x9B49, 0x59DC,0x9B4A, 0x598D,0x9B4B, 0x59D9,0x9B4C, 0x59DA,0x9B4D, 0x5A25,0x9B4E, 0x5A1F,0x9B4F, 0x5A11,0x9B50, 0x5A1C,0x9B51, 0x5A09,0x9B52, 0x5A1A,0x9B53, 0x5A40,0x9B54, 0x5A6C,0x9B55, 0x5A49,0x9B56, 0x5A35,0x9B57, 0x5A36,0x9B58, 0x5A62,0x9B59, 0x5A6A,0x9B5A, 0x5A9A,0x9B5B, 0x5ABC,0x9B5C, 0x5ABE,0x9B5D, 0x5ACB,0x9B5E, 0x5AC2,0x9B5F, 0x5ABD,0x9B60, 0x5AE3,0x9B61, 0x5AD7,0x9B62, 0x5AE6,0x9B63, 0x5AE9,0x9B64, 0x5AD6,0x9B65, 0x5AFA,0x9B66, 0x5AFB,0x9B67, 0x5B0C,0x9B68, 0x5B0B,0x9B69, 0x5B16,0x9B6A, 0x5B32,0x9B6B, 0x5AD0,0x9B6C, 0x5B2A,0x9B6D, 0x5B36,0x9B6E, 0x5B3E,0x9B6F, 0x5B43,0x9B70, 0x5B45,0x9B71, 0x5B40,0x9B72, 0x5B51,0x9B73, 0x5B55,0x9B74, 0x5B5A,0x9B75, 0x5B5B,0x9B76, 0x5B65,0x9B77, 0x5B69,0x9B78, 0x5B70,0x9B79, 0x5B73,0x9B7A, 0x5B75,0x9B7B, 0x5B78,0x9B7C, 0x6588,0x9B7D, 0x5B7A,0x9B7E, 0x5B80,0x9B80, 0x5B83,0x9B81, 0x5BA6,0x9B82, 0x5BB8,0x9B83, 0x5BC3,0x9B84, 0x5BC7,0x9B85, 0x5BC9,0x9B86, 0x5BD4,0x9B87, 0x5BD0,0x9B88, 0x5BE4,0x9B89, 0x5BE6,0x9B8A, 0x5BE2,0x9B8B, 0x5BDE,0x9B8C, 0x5BE5,0x9B8D, 0x5BEB,0x9B8E, 0x5BF0,0x9B8F, 0x5BF6,0x9B90, 0x5BF3,0x9B91, 0x5C05,0x9B92, 0x5C07,0x9B93, 0x5C08,0x9B94, 0x5C0D,0x9B95, 0x5C13,0x9B96, 0x5C20,0x9B97, 0x5C22,0x9B98, 0x5C28,0x9B99, 0x5C38,0x9B9A, 0x5C39,0x9B9B, 0x5C41,0x9B9C, 0x5C46,0x9B9D, 0x5C4E,0x9B9E, 0x5C53,0x9B9F, 0x5C50,0x9BA0, 0x5C4F,0x9BA1, 0x5B71,0x9BA2, 0x5C6C,0x9BA3, 0x5C6E,0x9BA4, 0x4E62,0x9BA5, 0x5C76,0x9BA6, 0x5C79,0x9BA7, 0x5C8C,0x9BA8, 0x5C91,0x9BA9, 0x5C94,0x9BAA, 0x599B,0x9BAB, 0x5CAB,0x9BAC, 0x5CBB,0x9BAD, 0x5CB6,0x9BAE, 0x5CBC,0x9BAF, 0x5CB7,0x9BB0, 0x5CC5,0x9BB1, 0x5CBE,0x9BB2, 0x5CC7,0x9BB3, 0x5CD9,0x9BB4, 0x5CE9,0x9BB5, 0x5CFD,0x9BB6, 0x5CFA,0x9BB7, 0x5CED,0x9BB8, 0x5D8C,0x9BB9, 0x5CEA,0x9BBA, 0x5D0B,0x9BBB, 0x5D15,0x9BBC, 0x5D17,0x9BBD, 0x5D5C,0x9BBE, 0x5D1F,0x9BBF, 0x5D1B,0x9BC0, 0x5D11,0x9BC1, 0x5D14,0x9BC2, 0x5D22,0x9BC3, 0x5D1A,0x9BC4, 0x5D19,0x9BC5, 0x5D18,0x9BC6, 0x5D4C,0x9BC7, 0x5D52,0x9BC8, 0x5D4E,0x9BC9, 0x5D4B,0x9BCA, 0x5D6C,0x9BCB, 0x5D73,0x9BCC, 0x5D76,0x9BCD, 0x5D87,0x9BCE, 0x5D84,0x9BCF, 0x5D82,0x9BD0, 0x5DA2,0x9BD1, 0x5D9D,0x9BD2, 0x5DAC,0x9BD3, 0x5DAE,0x9BD4, 0x5DBD,0x9BD5, 0x5D90,0x9BD6, 0x5DB7,0x9BD7, 0x5DBC,0x9BD8, 0x5DC9,0x9BD9, 0x5DCD,0x9BDA, 0x5DD3,0x9BDB, 0x5DD2,0x9BDC, 0x5DD6,0x9BDD, 0x5DDB,0x9BDE, 0x5DEB,0x9BDF, 0x5DF2,0x9BE0, 0x5DF5,0x9BE1, 0x5E0B,0x9BE2, 0x5E1A,0x9BE3, 0x5E19,0x9BE4, 0x5E11,0x9BE5, 0x5E1B,0x9BE6, 0x5E36,0x9BE7, 0x5E37,0x9BE8, 0x5E44,0x9BE9, 0x5E43,0x9BEA, 0x5E40,0x9BEB, 0x5E4E,0x9BEC, 0x5E57,0x9BED, 0x5E54,0x9BEE, 0x5E5F,0x9BEF, 0x5E62,0x9BF0, 0x5E64,0x9BF1, 0x5E47,0x9BF2, 0x5E75,0x9BF3, 0x5E76,0x9BF4, 0x5E7A,0x9BF5, 0x9EBC,0x9BF6, 0x5E7F,0x9BF7, 0x5EA0,0x9BF8, 0x5EC1,0x9BF9, 0x5EC2,0x9BFA, 0x5EC8,0x9BFB, 0x5ED0,0x9BFC, 0x5ECF,0x9C40, 0x5ED6,0x9C41, 0x5EE3,0x9C42, 0x5EDD,0x9C43, 0x5EDA,0x9C44, 0x5EDB,0x9C45, 0x5EE2,0x9C46, 0x5EE1,0x9C47, 0x5EE8,0x9C48, 0x5EE9,0x9C49, 0x5EEC,0x9C4A, 0x5EF1,0x9C4B, 0x5EF3,0x9C4C, 0x5EF0,0x9C4D, 0x5EF4,0x9C4E, 0x5EF8,0x9C4F, 0x5EFE,0x9C50, 0x5F03,0x9C51, 0x5F09,0x9C52, 0x5F5D,0x9C53, 0x5F5C,0x9C54, 0x5F0B,0x9C55, 0x5F11,0x9C56, 0x5F16,0x9C57, 0x5F29,0x9C58, 0x5F2D,0x9C59, 0x5F38,0x9C5A, 0x5F41,0x9C5B, 0x5F48,0x9C5C, 0x5F4C,0x9C5D, 0x5F4E,0x9C5E, 0x5F2F,0x9C5F, 0x5F51,0x9C60, 0x5F56,0x9C61, 0x5F57,0x9C62, 0x5F59,0x9C63, 0x5F61,0x9C64, 0x5F6D,0x9C65, 0x5F73,0x9C66, 0x5F77,0x9C67, 0x5F83,0x9C68, 0x5F82,0x9C69, 0x5F7F,0x9C6A, 0x5F8A,0x9C6B, 0x5F88,0x9C6C, 0x5F91,0x9C6D, 0x5F87,0x9C6E, 0x5F9E,0x9C6F, 0x5F99,0x9C70, 0x5F98,0x9C71, 0x5FA0,0x9C72, 0x5FA8,0x9C73, 0x5FAD,0x9C74, 0x5FBC,0x9C75, 0x5FD6,0x9C76, 0x5FFB,0x9C77, 0x5FE4,0x9C78, 0x5FF8,0x9C79, 0x5FF1,0x9C7A, 0x5FDD,0x9C7B, 0x60B3,0x9C7C, 0x5FFF,0x9C7D, 0x6021,0x9C7E, 0x6060,0x9C80, 0x6019,0x9C81, 0x6010,0x9C82, 0x6029,0x9C83, 0x600E,0x9C84, 0x6031,0x9C85, 0x601B,0x9C86, 0x6015,0x9C87, 0x602B,0x9C88, 0x6026,0x9C89, 0x600F,0x9C8A, 0x603A,0x9C8B, 0x605A,0x9C8C, 0x6041,0x9C8D, 0x606A,0x9C8E, 0x6077,0x9C8F, 0x605F,0x9C90, 0x604A,0x9C91, 0x6046,0x9C92, 0x604D,0x9C93, 0x6063,0x9C94, 0x6043,0x9C95, 0x6064,0x9C96, 0x6042,0x9C97, 0x606C,0x9C98, 0x606B,0x9C99, 0x6059,0x9C9A, 0x6081,0x9C9B, 0x608D,0x9C9C, 0x60E7,0x9C9D, 0x6083,0x9C9E, 0x609A,0x9C9F, 0x6084,0x9CA0, 0x609B,0x9CA1, 0x6096,0x9CA2, 0x6097,0x9CA3, 0x6092,0x9CA4, 0x60A7,0x9CA5, 0x608B,0x9CA6, 0x60E1,0x9CA7, 0x60B8,0x9CA8, 0x60E0,0x9CA9, 0x60D3,0x9CAA, 0x60B4,0x9CAB, 0x5FF0,0x9CAC, 0x60BD,0x9CAD, 0x60C6,0x9CAE, 0x60B5,0x9CAF, 0x60D8,0x9CB0, 0x614D,0x9CB1, 0x6115,0x9CB2, 0x6106,0x9CB3, 0x60F6,0x9CB4, 0x60F7,0x9CB5, 0x6100,0x9CB6, 0x60F4,0x9CB7, 0x60FA,0x9CB8, 0x6103,0x9CB9, 0x6121,0x9CBA, 0x60FB,0x9CBB, 0x60F1,0x9CBC, 0x610D,0x9CBD, 0x610E,0x9CBE, 0x6147,0x9CBF, 0x613E,0x9CC0, 0x6128,0x9CC1, 0x6127,0x9CC2, 0x614A,0x9CC3, 0x613F,0x9CC4, 0x613C,0x9CC5, 0x612C,0x9CC6, 0x6134,0x9CC7, 0x613D,0x9CC8, 0x6142,0x9CC9, 0x6144,0x9CCA, 0x6173,0x9CCB, 0x6177,0x9CCC, 0x6158,0x9CCD, 0x6159,0x9CCE, 0x615A,0x9CCF, 0x616B,0x9CD0, 0x6174,0x9CD1, 0x616F,0x9CD2, 0x6165,0x9CD3, 0x6171,0x9CD4, 0x615F,0x9CD5, 0x615D,0x9CD6, 0x6153,0x9CD7, 0x6175,0x9CD8, 0x6199,0x9CD9, 0x6196,0x9CDA, 0x6187,0x9CDB, 0x61AC,0x9CDC, 0x6194,0x9CDD, 0x619A,0x9CDE, 0x618A,0x9CDF, 0x6191,0x9CE0, 0x61AB,0x9CE1, 0x61AE,0x9CE2, 0x61CC,0x9CE3, 0x61CA,0x9CE4, 0x61C9,0x9CE5, 0x61F7,0x9CE6, 0x61C8,0x9CE7, 0x61C3,0x9CE8, 0x61C6,0x9CE9, 0x61BA,0x9CEA, 0x61CB,0x9CEB, 0x7F79,0x9CEC, 0x61CD,0x9CED, 0x61E6,0x9CEE, 0x61E3,0x9CEF, 0x61F6,0x9CF0, 0x61FA,0x9CF1, 0x61F4,0x9CF2, 0x61FF,0x9CF3, 0x61FD,0x9CF4, 0x61FC,0x9CF5, 0x61FE,0x9CF6, 0x6200,0x9CF7, 0x6208,0x9CF8, 0x6209,0x9CF9, 0x620D,0x9CFA, 0x620C,0x9CFB, 0x6214,0x9CFC, 0x621B,0x9D40, 0x621E,0x9D41, 0x6221,0x9D42, 0x622A,0x9D43, 0x622E,0x9D44, 0x6230,0x9D45, 0x6232,0x9D46, 0x6233,0x9D47, 0x6241,0x9D48, 0x624E,0x9D49, 0x625E,0x9D4A, 0x6263,0x9D4B, 0x625B,0x9D4C, 0x6260,0x9D4D, 0x6268,0x9D4E, 0x627C,0x9D4F, 0x6282,0x9D50, 0x6289,0x9D51, 0x627E,0x9D52, 0x6292,0x9D53, 0x6293,0x9D54, 0x6296,0x9D55, 0x62D4,0x9D56, 0x6283,0x9D57, 0x6294,0x9D58, 0x62D7,0x9D59, 0x62D1,0x9D5A, 0x62BB,0x9D5B, 0x62CF,0x9D5C, 0x62FF,0x9D5D, 0x62C6,0x9D5E, 0x64D4,0x9D5F, 0x62C8,0x9D60, 0x62DC,0x9D61, 0x62CC,0x9D62, 0x62CA,0x9D63, 0x62C2,0x9D64, 0x62C7,0x9D65, 0x629B,0x9D66, 0x62C9,0x9D67, 0x630C,0x9D68, 0x62EE,0x9D69, 0x62F1,0x9D6A, 0x6327,0x9D6B, 0x6302,0x9D6C, 0x6308,0x9D6D, 0x62EF,0x9D6E, 0x62F5,0x9D6F, 0x6350,0x9D70, 0x633E,0x9D71, 0x634D,0x9D72, 0x641C,0x9D73, 0x634F,0x9D74, 0x6396,0x9D75, 0x638E,0x9D76, 0x6380,0x9D77, 0x63AB,0x9D78, 0x6376,0x9D79, 0x63A3,0x9D7A, 0x638F,0x9D7B, 0x6389,0x9D7C, 0x639F,0x9D7D, 0x63B5,0x9D7E, 0x636B,0x9D80, 0x6369,0x9D81, 0x63BE,0x9D82, 0x63E9,0x9D83, 0x63C0,0x9D84, 0x63C6,0x9D85, 0x63E3,0x9D86, 0x63C9,0x9D87, 0x63D2,0x9D88, 0x63F6,0x9D89, 0x63C4,0x9D8A, 0x6416,0x9D8B, 0x6434,0x9D8C, 0x6406,0x9D8D, 0x6413,0x9D8E, 0x6426,0x9D8F, 0x6436,0x9D90, 0x651D,0x9D91, 0x6417,0x9D92, 0x6428,0x9D93, 0x640F,0x9D94, 0x6467,0x9D95, 0x646F,0x9D96, 0x6476,0x9D97, 0x644E,0x9D98, 0x652A,0x9D99, 0x6495,0x9D9A, 0x6493,0x9D9B, 0x64A5,0x9D9C, 0x64A9,0x9D9D, 0x6488,0x9D9E, 0x64BC,0x9D9F, 0x64DA,0x9DA0, 0x64D2,0x9DA1, 0x64C5,0x9DA2, 0x64C7,0x9DA3, 0x64BB,0x9DA4, 0x64D8,0x9DA5, 0x64C2,0x9DA6, 0x64F1,0x9DA7, 0x64E7,0x9DA8, 0x8209,0x9DA9, 0x64E0,0x9DAA, 0x64E1,0x9DAB, 0x62AC,0x9DAC, 0x64E3,0x9DAD, 0x64EF,0x9DAE, 0x652C,0x9DAF, 0x64F6,0x9DB0, 0x64F4,0x9DB1, 0x64F2,0x9DB2, 0x64FA,0x9DB3, 0x6500,0x9DB4, 0x64FD,0x9DB5, 0x6518,0x9DB6, 0x651C,0x9DB7, 0x6505,0x9DB8, 0x6524,0x9DB9, 0x6523,0x9DBA, 0x652B,0x9DBB, 0x6534,0x9DBC, 0x6535,0x9DBD, 0x6537,0x9DBE, 0x6536,0x9DBF, 0x6538,0x9DC0, 0x754B,0x9DC1, 0x6548,0x9DC2, 0x6556,0x9DC3, 0x6555,0x9DC4, 0x654D,0x9DC5, 0x6558,0x9DC6, 0x655E,0x9DC7, 0x655D,0x9DC8, 0x6572,0x9DC9, 0x6578,0x9DCA, 0x6582,0x9DCB, 0x6583,0x9DCC, 0x8B8A,0x9DCD, 0x659B,0x9DCE, 0x659F,0x9DCF, 0x65AB,0x9DD0, 0x65B7,0x9DD1, 0x65C3,0x9DD2, 0x65C6,0x9DD3, 0x65C1,0x9DD4, 0x65C4,0x9DD5, 0x65CC,0x9DD6, 0x65D2,0x9DD7, 0x65DB,0x9DD8, 0x65D9,0x9DD9, 0x65E0,0x9DDA, 0x65E1,0x9DDB, 0x65F1,0x9DDC, 0x6772,0x9DDD, 0x660A,0x9DDE, 0x6603,0x9DDF, 0x65FB,0x9DE0, 0x6773,0x9DE1, 0x6635,0x9DE2, 0x6636,0x9DE3, 0x6634,0x9DE4, 0x661C,0x9DE5, 0x664F,0x9DE6, 0x6644,0x9DE7, 0x6649,0x9DE8, 0x6641,0x9DE9, 0x665E,0x9DEA, 0x665D,0x9DEB, 0x6664,0x9DEC, 0x6667,0x9DED, 0x6668,0x9DEE, 0x665F,0x9DEF, 0x6662,0x9DF0, 0x6670,0x9DF1, 0x6683,0x9DF2, 0x6688,0x9DF3, 0x668E,0x9DF4, 0x6689,0x9DF5, 0x6684,0x9DF6, 0x6698,0x9DF7, 0x669D,0x9DF8, 0x66C1,0x9DF9, 0x66B9,0x9DFA, 0x66C9,0x9DFB, 0x66BE,0x9DFC, 0x66BC,0x9E40, 0x66C4,0x9E41, 0x66B8,0x9E42, 0x66D6,0x9E43, 0x66DA,0x9E44, 0x66E0,0x9E45, 0x663F,0x9E46, 0x66E6,0x9E47, 0x66E9,0x9E48, 0x66F0,0x9E49, 0x66F5,0x9E4A, 0x66F7,0x9E4B, 0x670F,0x9E4C, 0x6716,0x9E4D, 0x671E,0x9E4E, 0x6726,0x9E4F, 0x6727,0x9E50, 0x9738,0x9E51, 0x672E,0x9E52, 0x673F,0x9E53, 0x6736,0x9E54, 0x6741,0x9E55, 0x6738,0x9E56, 0x6737,0x9E57, 0x6746,0x9E58, 0x675E,0x9E59, 0x6760,0x9E5A, 0x6759,0x9E5B, 0x6763,0x9E5C, 0x6764,0x9E5D, 0x6789,0x9E5E, 0x6770,0x9E5F, 0x67A9,0x9E60, 0x677C,0x9E61, 0x676A,0x9E62, 0x678C,0x9E63, 0x678B,0x9E64, 0x67A6,0x9E65, 0x67A1,0x9E66, 0x6785,0x9E67, 0x67B7,0x9E68, 0x67EF,0x9E69, 0x67B4,0x9E6A, 0x67EC,0x9E6B, 0x67B3,0x9E6C, 0x67E9,0x9E6D, 0x67B8,0x9E6E, 0x67E4,0x9E6F, 0x67DE,0x9E70, 0x67DD,0x9E71, 0x67E2,0x9E72, 0x67EE,0x9E73, 0x67B9,0x9E74, 0x67CE,0x9E75, 0x67C6,0x9E76, 0x67E7,0x9E77, 0x6A9C,0x9E78, 0x681E,0x9E79, 0x6846,0x9E7A, 0x6829,0x9E7B, 0x6840,0x9E7C, 0x684D,0x9E7D, 0x6832,0x9E7E, 0x684E,0x9E80, 0x68B3,0x9E81, 0x682B,0x9E82, 0x6859,0x9E83, 0x6863,0x9E84, 0x6877,0x9E85, 0x687F,0x9E86, 0x689F,0x9E87, 0x688F,0x9E88, 0x68AD,0x9E89, 0x6894,0x9E8A, 0x689D,0x9E8B, 0x689B,0x9E8C, 0x6883,0x9E8D, 0x6AAE,0x9E8E, 0x68B9,0x9E8F, 0x6874,0x9E90, 0x68B5,0x9E91, 0x68A0,0x9E92, 0x68BA,0x9E93, 0x690F,0x9E94, 0x688D,0x9E95, 0x687E,0x9E96, 0x6901,0x9E97, 0x68CA,0x9E98, 0x6908,0x9E99, 0x68D8,0x9E9A, 0x6922,0x9E9B, 0x6926,0x9E9C, 0x68E1,0x9E9D, 0x690C,0x9E9E, 0x68CD,0x9E9F, 0x68D4,0x9EA0, 0x68E7,0x9EA1, 0x68D5,0x9EA2, 0x6936,0x9EA3, 0x6912,0x9EA4, 0x6904,0x9EA5, 0x68D7,0x9EA6, 0x68E3,0x9EA7, 0x6925,0x9EA8, 0x68F9,0x9EA9, 0x68E0,0x9EAA, 0x68EF,0x9EAB, 0x6928,0x9EAC, 0x692A,0x9EAD, 0x691A,0x9EAE, 0x6923,0x9EAF, 0x6921,0x9EB0, 0x68C6,0x9EB1, 0x6979,0x9EB2, 0x6977,0x9EB3, 0x695C,0x9EB4, 0x6978,0x9EB5, 0x696B,0x9EB6, 0x6954,0x9EB7, 0x697E,0x9EB8, 0x696E,0x9EB9, 0x6939,0x9EBA, 0x6974,0x9EBB, 0x693D,0x9EBC, 0x6959,0x9EBD, 0x6930,0x9EBE, 0x6961,0x9EBF, 0x695E,0x9EC0, 0x695D,0x9EC1, 0x6981,0x9EC2, 0x696A,0x9EC3, 0x69B2,0x9EC4, 0x69AE,0x9EC5, 0x69D0,0x9EC6, 0x69BF,0x9EC7, 0x69C1,0x9EC8, 0x69D3,0x9EC9, 0x69BE,0x9ECA, 0x69CE,0x9ECB, 0x5BE8,0x9ECC, 0x69CA,0x9ECD, 0x69DD,0x9ECE, 0x69BB,0x9ECF, 0x69C3,0x9ED0, 0x69A7,0x9ED1, 0x6A2E,0x9ED2, 0x6991,0x9ED3, 0x69A0,0x9ED4, 0x699C,0x9ED5, 0x6995,0x9ED6, 0x69B4,0x9ED7, 0x69DE,0x9ED8, 0x69E8,0x9ED9, 0x6A02,0x9EDA, 0x6A1B,0x9EDB, 0x69FF,0x9EDC, 0x6B0A,0x9EDD, 0x69F9,0x9EDE, 0x69F2,0x9EDF, 0x69E7,0x9EE0, 0x6A05,0x9EE1, 0x69B1,0x9EE2, 0x6A1E,0x9EE3, 0x69ED,0x9EE4, 0x6A14,0x9EE5, 0x69EB,0x9EE6, 0x6A0A,0x9EE7, 0x6A12,0x9EE8, 0x6AC1,0x9EE9, 0x6A23,0x9EEA, 0x6A13,0x9EEB, 0x6A44,0x9EEC, 0x6A0C,0x9EED, 0x6A72,0x9EEE, 0x6A36,0x9EEF, 0x6A78,0x9EF0, 0x6A47,0x9EF1, 0x6A62,0x9EF2, 0x6A59,0x9EF3, 0x6A66,0x9EF4, 0x6A48,0x9EF5, 0x6A38,0x9EF6, 0x6A22,0x9EF7, 0x6A90,0x9EF8, 0x6A8D,0x9EF9, 0x6AA0,0x9EFA, 0x6A84,0x9EFB, 0x6AA2,0x9EFC, 0x6AA3,0x9F40, 0x6A97,0x9F41, 0x8617,0x9F42, 0x6ABB,0x9F43, 0x6AC3,0x9F44, 0x6AC2,0x9F45, 0x6AB8,0x9F46, 0x6AB3,0x9F47, 0x6AAC,0x9F48, 0x6ADE,0x9F49, 0x6AD1,0x9F4A, 0x6ADF,0x9F4B, 0x6AAA,0x9F4C, 0x6ADA,0x9F4D, 0x6AEA,0x9F4E, 0x6AFB,0x9F4F, 0x6B05,0x9F50, 0x8616,0x9F51, 0x6AFA,0x9F52, 0x6B12,0x9F53, 0x6B16,0x9F54, 0x9B31,0x9F55, 0x6B1F,0x9F56, 0x6B38,0x9F57, 0x6B37,0x9F58, 0x76DC,0x9F59, 0x6B39,0x9F5A, 0x98EE,0x9F5B, 0x6B47,0x9F5C, 0x6B43,0x9F5D, 0x6B49,0x9F5E, 0x6B50,0x9F5F, 0x6B59,0x9F60, 0x6B54,0x9F61, 0x6B5B,0x9F62, 0x6B5F,0x9F63, 0x6B61,0x9F64, 0x6B78,0x9F65, 0x6B79,0x9F66, 0x6B7F,0x9F67, 0x6B80,0x9F68, 0x6B84,0x9F69, 0x6B83,0x9F6A, 0x6B8D,0x9F6B, 0x6B98,0x9F6C, 0x6B95,0x9F6D, 0x6B9E,0x9F6E, 0x6BA4,0x9F6F, 0x6BAA,0x9F70, 0x6BAB,0x9F71, 0x6BAF,0x9F72, 0x6BB2,0x9F73, 0x6BB1,0x9F74, 0x6BB3,0x9F75, 0x6BB7,0x9F76, 0x6BBC,0x9F77, 0x6BC6,0x9F78, 0x6BCB,0x9F79, 0x6BD3,0x9F7A, 0x6BDF,0x9F7B, 0x6BEC,0x9F7C, 0x6BEB,0x9F7D, 0x6BF3,0x9F7E, 0x6BEF,0x9F80, 0x9EBE,0x9F81, 0x6C08,0x9F82, 0x6C13,0x9F83, 0x6C14,0x9F84, 0x6C1B,0x9F85, 0x6C24,0x9F86, 0x6C23,0x9F87, 0x6C5E,0x9F88, 0x6C55,0x9F89, 0x6C62,0x9F8A, 0x6C6A,0x9F8B, 0x6C82,0x9F8C, 0x6C8D,0x9F8D, 0x6C9A,0x9F8E, 0x6C81,0x9F8F, 0x6C9B,0x9F90, 0x6C7E,0x9F91, 0x6C68,0x9F92, 0x6C73,0x9F93, 0x6C92,0x9F94, 0x6C90,0x9F95, 0x6CC4,0x9F96, 0x6CF1,0x9F97, 0x6CD3,0x9F98, 0x6CBD,0x9F99, 0x6CD7,0x9F9A, 0x6CC5,0x9F9B, 0x6CDD,0x9F9C, 0x6CAE,0x9F9D, 0x6CB1,0x9F9E, 0x6CBE,0x9F9F, 0x6CBA,0x9FA0, 0x6CDB,0x9FA1, 0x6CEF,0x9FA2, 0x6CD9,0x9FA3, 0x6CEA,0x9FA4, 0x6D1F,0x9FA5, 0x884D,0x9FA6, 0x6D36,0x9FA7, 0x6D2B,0x9FA8, 0x6D3D,0x9FA9, 0x6D38,0x9FAA, 0x6D19,0x9FAB, 0x6D35,0x9FAC, 0x6D33,0x9FAD, 0x6D12,0x9FAE, 0x6D0C,0x9FAF, 0x6D63,0x9FB0, 0x6D93,0x9FB1, 0x6D64,0x9FB2, 0x6D5A,0x9FB3, 0x6D79,0x9FB4, 0x6D59,0x9FB5, 0x6D8E,0x9FB6, 0x6D95,0x9FB7, 0x6FE4,0x9FB8, 0x6D85,0x9FB9, 0x6DF9,0x9FBA, 0x6E15,0x9FBB, 0x6E0A,0x9FBC, 0x6DB5,0x9FBD, 0x6DC7,0x9FBE, 0x6DE6,0x9FBF, 0x6DB8,0x9FC0, 0x6DC6,0x9FC1, 0x6DEC,0x9FC2, 0x6DDE,0x9FC3, 0x6DCC,0x9FC4, 0x6DE8,0x9FC5, 0x6DD2,0x9FC6, 0x6DC5,0x9FC7, 0x6DFA,0x9FC8, 0x6DD9,0x9FC9, 0x6DE4,0x9FCA, 0x6DD5,0x9FCB, 0x6DEA,0x9FCC, 0x6DEE,0x9FCD, 0x6E2D,0x9FCE, 0x6E6E,0x9FCF, 0x6E2E,0x9FD0, 0x6E19,0x9FD1, 0x6E72,0x9FD2, 0x6E5F,0x9FD3, 0x6E3E,0x9FD4, 0x6E23,0x9FD5, 0x6E6B,0x9FD6, 0x6E2B,0x9FD7, 0x6E76,0x9FD8, 0x6E4D,0x9FD9, 0x6E1F,0x9FDA, 0x6E43,0x9FDB, 0x6E3A,0x9FDC, 0x6E4E,0x9FDD, 0x6E24,0x9FDE, 0x6EFF,0x9FDF, 0x6E1D,0x9FE0, 0x6E38,0x9FE1, 0x6E82,0x9FE2, 0x6EAA,0x9FE3, 0x6E98,0x9FE4, 0x6EC9,0x9FE5, 0x6EB7,0x9FE6, 0x6ED3,0x9FE7, 0x6EBD,0x9FE8, 0x6EAF,0x9FE9, 0x6EC4,0x9FEA, 0x6EB2,0x9FEB, 0x6ED4,0x9FEC, 0x6ED5,0x9FED, 0x6E8F,0x9FEE, 0x6EA5,0x9FEF, 0x6EC2,0x9FF0, 0x6E9F,0x9FF1, 0x6F41,0x9FF2, 0x6F11,0x9FF3, 0x704C,0x9FF4, 0x6EEC,0x9FF5, 0x6EF8,0x9FF6, 0x6EFE,0x9FF7, 0x6F3F,0x9FF8, 0x6EF2,0x9FF9, 0x6F31,0x9FFA, 0x6EEF,0x9FFB, 0x6F32,0x9FFC, 0x6ECC,0xE040, 0x6F3E,0xE041, 0x6F13,0xE042, 0x6EF7,0xE043, 0x6F86,0xE044, 0x6F7A,0xE045, 0x6F78,0xE046, 0x6F81,0xE047, 0x6F80,0xE048, 0x6F6F,0xE049, 0x6F5B,0xE04A, 0x6FF3,0xE04B, 0x6F6D,0xE04C, 0x6F82,0xE04D, 0x6F7C,0xE04E, 0x6F58,0xE04F, 0x6F8E,0xE050, 0x6F91,0xE051, 0x6FC2,0xE052, 0x6F66,0xE053, 0x6FB3,0xE054, 0x6FA3,0xE055, 0x6FA1,0xE056, 0x6FA4,0xE057, 0x6FB9,0xE058, 0x6FC6,0xE059, 0x6FAA,0xE05A, 0x6FDF,0xE05B, 0x6FD5,0xE05C, 0x6FEC,0xE05D, 0x6FD4,0xE05E, 0x6FD8,0xE05F, 0x6FF1,0xE060, 0x6FEE,0xE061, 0x6FDB,0xE062, 0x7009,0xE063, 0x700B,0xE064, 0x6FFA,0xE065, 0x7011,0xE066, 0x7001,0xE067, 0x700F,0xE068, 0x6FFE,0xE069, 0x701B,0xE06A, 0x701A,0xE06B, 0x6F74,0xE06C, 0x701D,0xE06D, 0x7018,0xE06E, 0x701F,0xE06F, 0x7030,0xE070, 0x703E,0xE071, 0x7032,0xE072, 0x7051,0xE073, 0x7063,0xE074, 0x7099,0xE075, 0x7092,0xE076, 0x70AF,0xE077, 0x70F1,0xE078, 0x70AC,0xE079, 0x70B8,0xE07A, 0x70B3,0xE07B, 0x70AE,0xE07C, 0x70DF,0xE07D, 0x70CB,0xE07E, 0x70DD,0xE080, 0x70D9,0xE081, 0x7109,0xE082, 0x70FD,0xE083, 0x711C,0xE084, 0x7119,0xE085, 0x7165,0xE086, 0x7155,0xE087, 0x7188,0xE088, 0x7166,0xE089, 0x7162,0xE08A, 0x714C,0xE08B, 0x7156,0xE08C, 0x716C,0xE08D, 0x718F,0xE08E, 0x71FB,0xE08F, 0x7184,0xE090, 0x7195,0xE091, 0x71A8,0xE092, 0x71AC,0xE093, 0x71D7,0xE094, 0x71B9,0xE095, 0x71BE,0xE096, 0x71D2,0xE097, 0x71C9,0xE098, 0x71D4,0xE099, 0x71CE,0xE09A, 0x71E0,0xE09B, 0x71EC,0xE09C, 0x71E7,0xE09D, 0x71F5,0xE09E, 0x71FC,0xE09F, 0x71F9,0xE0A0, 0x71FF,0xE0A1, 0x720D,0xE0A2, 0x7210,0xE0A3, 0x721B,0xE0A4, 0x7228,0xE0A5, 0x722D,0xE0A6, 0x722C,0xE0A7, 0x7230,0xE0A8, 0x7232,0xE0A9, 0x723B,0xE0AA, 0x723C,0xE0AB, 0x723F,0xE0AC, 0x7240,0xE0AD, 0x7246,0xE0AE, 0x724B,0xE0AF, 0x7258,0xE0B0, 0x7274,0xE0B1, 0x727E,0xE0B2, 0x7282,0xE0B3, 0x7281,0xE0B4, 0x7287,0xE0B5, 0x7292,0xE0B6, 0x7296,0xE0B7, 0x72A2,0xE0B8, 0x72A7,0xE0B9, 0x72B9,0xE0BA, 0x72B2,0xE0BB, 0x72C3,0xE0BC, 0x72C6,0xE0BD, 0x72C4,0xE0BE, 0x72CE,0xE0BF, 0x72D2,0xE0C0, 0x72E2,0xE0C1, 0x72E0,0xE0C2, 0x72E1,0xE0C3, 0x72F9,0xE0C4, 0x72F7,0xE0C5, 0x500F,0xE0C6, 0x7317,0xE0C7, 0x730A,0xE0C8, 0x731C,0xE0C9, 0x7316,0xE0CA, 0x731D,0xE0CB, 0x7334,0xE0CC, 0x732F,0xE0CD, 0x7329,0xE0CE, 0x7325,0xE0CF, 0x733E,0xE0D0, 0x734E,0xE0D1, 0x734F,0xE0D2, 0x9ED8,0xE0D3, 0x7357,0xE0D4, 0x736A,0xE0D5, 0x7368,0xE0D6, 0x7370,0xE0D7, 0x7378,0xE0D8, 0x7375,0xE0D9, 0x737B,0xE0DA, 0x737A,0xE0DB, 0x73C8,0xE0DC, 0x73B3,0xE0DD, 0x73CE,0xE0DE, 0x73BB,0xE0DF, 0x73C0,0xE0E0, 0x73E5,0xE0E1, 0x73EE,0xE0E2, 0x73DE,0xE0E3, 0x74A2,0xE0E4, 0x7405,0xE0E5, 0x746F,0xE0E6, 0x7425,0xE0E7, 0x73F8,0xE0E8, 0x7432,0xE0E9, 0x743A,0xE0EA, 0x7455,0xE0EB, 0x743F,0xE0EC, 0x745F,0xE0ED, 0x7459,0xE0EE, 0x7441,0xE0EF, 0x745C,0xE0F0, 0x7469,0xE0F1, 0x7470,0xE0F2, 0x7463,0xE0F3, 0x746A,0xE0F4, 0x7476,0xE0F5, 0x747E,0xE0F6, 0x748B,0xE0F7, 0x749E,0xE0F8, 0x74A7,0xE0F9, 0x74CA,0xE0FA, 0x74CF,0xE0FB, 0x74D4,0xE0FC, 0x73F1,0xE140, 0x74E0,0xE141, 0x74E3,0xE142, 0x74E7,0xE143, 0x74E9,0xE144, 0x74EE,0xE145, 0x74F2,0xE146, 0x74F0,0xE147, 0x74F1,0xE148, 0x74F8,0xE149, 0x74F7,0xE14A, 0x7504,0xE14B, 0x7503,0xE14C, 0x7505,0xE14D, 0x750C,0xE14E, 0x750E,0xE14F, 0x750D,0xE150, 0x7515,0xE151, 0x7513,0xE152, 0x751E,0xE153, 0x7526,0xE154, 0x752C,0xE155, 0x753C,0xE156, 0x7544,0xE157, 0x754D,0xE158, 0x754A,0xE159, 0x7549,0xE15A, 0x755B,0xE15B, 0x7546,0xE15C, 0x755A,0xE15D, 0x7569,0xE15E, 0x7564,0xE15F, 0x7567,0xE160, 0x756B,0xE161, 0x756D,0xE162, 0x7578,0xE163, 0x7576,0xE164, 0x7586,0xE165, 0x7587,0xE166, 0x7574,0xE167, 0x758A,0xE168, 0x7589,0xE169, 0x7582,0xE16A, 0x7594,0xE16B, 0x759A,0xE16C, 0x759D,0xE16D, 0x75A5,0xE16E, 0x75A3,0xE16F, 0x75C2,0xE170, 0x75B3,0xE171, 0x75C3,0xE172, 0x75B5,0xE173, 0x75BD,0xE174, 0x75B8,0xE175, 0x75BC,0xE176, 0x75B1,0xE177, 0x75CD,0xE178, 0x75CA,0xE179, 0x75D2,0xE17A, 0x75D9,0xE17B, 0x75E3,0xE17C, 0x75DE,0xE17D, 0x75FE,0xE17E, 0x75FF,0xE180, 0x75FC,0xE181, 0x7601,0xE182, 0x75F0,0xE183, 0x75FA,0xE184, 0x75F2,0xE185, 0x75F3,0xE186, 0x760B,0xE187, 0x760D,0xE188, 0x7609,0xE189, 0x761F,0xE18A, 0x7627,0xE18B, 0x7620,0xE18C, 0x7621,0xE18D, 0x7622,0xE18E, 0x7624,0xE18F, 0x7634,0xE190, 0x7630,0xE191, 0x763B,0xE192, 0x7647,0xE193, 0x7648,0xE194, 0x7646,0xE195, 0x765C,0xE196, 0x7658,0xE197, 0x7661,0xE198, 0x7662,0xE199, 0x7668,0xE19A, 0x7669,0xE19B, 0x766A,0xE19C, 0x7667,0xE19D, 0x766C,0xE19E, 0x7670,0xE19F, 0x7672,0xE1A0, 0x7676,0xE1A1, 0x7678,0xE1A2, 0x767C,0xE1A3, 0x7680,0xE1A4, 0x7683,0xE1A5, 0x7688,0xE1A6, 0x768B,0xE1A7, 0x768E,0xE1A8, 0x7696,0xE1A9, 0x7693,0xE1AA, 0x7699,0xE1AB, 0x769A,0xE1AC, 0x76B0,0xE1AD, 0x76B4,0xE1AE, 0x76B8,0xE1AF, 0x76B9,0xE1B0, 0x76BA,0xE1B1, 0x76C2,0xE1B2, 0x76CD,0xE1B3, 0x76D6,0xE1B4, 0x76D2,0xE1B5, 0x76DE,0xE1B6, 0x76E1,0xE1B7, 0x76E5,0xE1B8, 0x76E7,0xE1B9, 0x76EA,0xE1BA, 0x862F,0xE1BB, 0x76FB,0xE1BC, 0x7708,0xE1BD, 0x7707,0xE1BE, 0x7704,0xE1BF, 0x7729,0xE1C0, 0x7724,0xE1C1, 0x771E,0xE1C2, 0x7725,0xE1C3, 0x7726,0xE1C4, 0x771B,0xE1C5, 0x7737,0xE1C6, 0x7738,0xE1C7, 0x7747,0xE1C8, 0x775A,0xE1C9, 0x7768,0xE1CA, 0x776B,0xE1CB, 0x775B,0xE1CC, 0x7765,0xE1CD, 0x777F,0xE1CE, 0x777E,0xE1CF, 0x7779,0xE1D0, 0x778E,0xE1D1, 0x778B,0xE1D2, 0x7791,0xE1D3, 0x77A0,0xE1D4, 0x779E,0xE1D5, 0x77B0,0xE1D6, 0x77B6,0xE1D7, 0x77B9,0xE1D8, 0x77BF,0xE1D9, 0x77BC,0xE1DA, 0x77BD,0xE1DB, 0x77BB,0xE1DC, 0x77C7,0xE1DD, 0x77CD,0xE1DE, 0x77D7,0xE1DF, 0x77DA,0xE1E0, 0x77DC,0xE1E1, 0x77E3,0xE1E2, 0x77EE,0xE1E3, 0x77FC,0xE1E4, 0x780C,0xE1E5, 0x7812,0xE1E6, 0x7926,0xE1E7, 0x7820,0xE1E8, 0x792A,0xE1E9, 0x7845,0xE1EA, 0x788E,0xE1EB, 0x7874,0xE1EC, 0x7886,0xE1ED, 0x787C,0xE1EE, 0x789A,0xE1EF, 0x788C,0xE1F0, 0x78A3,0xE1F1, 0x78B5,0xE1F2, 0x78AA,0xE1F3, 0x78AF,0xE1F4, 0x78D1,0xE1F5, 0x78C6,0xE1F6, 0x78CB,0xE1F7, 0x78D4,0xE1F8, 0x78BE,0xE1F9, 0x78BC,0xE1FA, 0x78C5,0xE1FB, 0x78CA,0xE1FC, 0x78EC,0xE240, 0x78E7,0xE241, 0x78DA,0xE242, 0x78FD,0xE243, 0x78F4,0xE244, 0x7907,0xE245, 0x7912,0xE246, 0x7911,0xE247, 0x7919,0xE248, 0x792C,0xE249, 0x792B,0xE24A, 0x7940,0xE24B, 0x7960,0xE24C, 0x7957,0xE24D, 0x795F,0xE24E, 0x795A,0xE24F, 0x7955,0xE250, 0x7953,0xE251, 0x797A,0xE252, 0x797F,0xE253, 0x798A,0xE254, 0x799D,0xE255, 0x79A7,0xE256, 0x9F4B,0xE257, 0x79AA,0xE258, 0x79AE,0xE259, 0x79B3,0xE25A, 0x79B9,0xE25B, 0x79BA,0xE25C, 0x79C9,0xE25D, 0x79D5,0xE25E, 0x79E7,0xE25F, 0x79EC,0xE260, 0x79E1,0xE261, 0x79E3,0xE262, 0x7A08,0xE263, 0x7A0D,0xE264, 0x7A18,0xE265, 0x7A19,0xE266, 0x7A20,0xE267, 0x7A1F,0xE268, 0x7980,0xE269, 0x7A31,0xE26A, 0x7A3B,0xE26B, 0x7A3E,0xE26C, 0x7A37,0xE26D, 0x7A43,0xE26E, 0x7A57,0xE26F, 0x7A49,0xE270, 0x7A61,0xE271, 0x7A62,0xE272, 0x7A69,0xE273, 0x9F9D,0xE274, 0x7A70,0xE275, 0x7A79,0xE276, 0x7A7D,0xE277, 0x7A88,0xE278, 0x7A97,0xE279, 0x7A95,0xE27A, 0x7A98,0xE27B, 0x7A96,0xE27C, 0x7AA9,0xE27D, 0x7AC8,0xE27E, 0x7AB0,0xE280, 0x7AB6,0xE281, 0x7AC5,0xE282, 0x7AC4,0xE283, 0x7ABF,0xE284, 0x9083,0xE285, 0x7AC7,0xE286, 0x7ACA,0xE287, 0x7ACD,0xE288, 0x7ACF,0xE289, 0x7AD5,0xE28A, 0x7AD3,0xE28B, 0x7AD9,0xE28C, 0x7ADA,0xE28D, 0x7ADD,0xE28E, 0x7AE1,0xE28F, 0x7AE2,0xE290, 0x7AE6,0xE291, 0x7AED,0xE292, 0x7AF0,0xE293, 0x7B02,0xE294, 0x7B0F,0xE295, 0x7B0A,0xE296, 0x7B06,0xE297, 0x7B33,0xE298, 0x7B18,0xE299, 0x7B19,0xE29A, 0x7B1E,0xE29B, 0x7B35,0xE29C, 0x7B28,0xE29D, 0x7B36,0xE29E, 0x7B50,0xE29F, 0x7B7A,0xE2A0, 0x7B04,0xE2A1, 0x7B4D,0xE2A2, 0x7B0B,0xE2A3, 0x7B4C,0xE2A4, 0x7B45,0xE2A5, 0x7B75,0xE2A6, 0x7B65,0xE2A7, 0x7B74,0xE2A8, 0x7B67,0xE2A9, 0x7B70,0xE2AA, 0x7B71,0xE2AB, 0x7B6C,0xE2AC, 0x7B6E,0xE2AD, 0x7B9D,0xE2AE, 0x7B98,0xE2AF, 0x7B9F,0xE2B0, 0x7B8D,0xE2B1, 0x7B9C,0xE2B2, 0x7B9A,0xE2B3, 0x7B8B,0xE2B4, 0x7B92,0xE2B5, 0x7B8F,0xE2B6, 0x7B5D,0xE2B7, 0x7B99,0xE2B8, 0x7BCB,0xE2B9, 0x7BC1,0xE2BA, 0x7BCC,0xE2BB, 0x7BCF,0xE2BC, 0x7BB4,0xE2BD, 0x7BC6,0xE2BE, 0x7BDD,0xE2BF, 0x7BE9,0xE2C0, 0x7C11,0xE2C1, 0x7C14,0xE2C2, 0x7BE6,0xE2C3, 0x7BE5,0xE2C4, 0x7C60,0xE2C5, 0x7C00,0xE2C6, 0x7C07,0xE2C7, 0x7C13,0xE2C8, 0x7BF3,0xE2C9, 0x7BF7,0xE2CA, 0x7C17,0xE2CB, 0x7C0D,0xE2CC, 0x7BF6,0xE2CD, 0x7C23,0xE2CE, 0x7C27,0xE2CF, 0x7C2A,0xE2D0, 0x7C1F,0xE2D1, 0x7C37,0xE2D2, 0x7C2B,0xE2D3, 0x7C3D,0xE2D4, 0x7C4C,0xE2D5, 0x7C43,0xE2D6, 0x7C54,0xE2D7, 0x7C4F,0xE2D8, 0x7C40,0xE2D9, 0x7C50,0xE2DA, 0x7C58,0xE2DB, 0x7C5F,0xE2DC, 0x7C64,0xE2DD, 0x7C56,0xE2DE, 0x7C65,0xE2DF, 0x7C6C,0xE2E0, 0x7C75,0xE2E1, 0x7C83,0xE2E2, 0x7C90,0xE2E3, 0x7CA4,0xE2E4, 0x7CAD,0xE2E5, 0x7CA2,0xE2E6, 0x7CAB,0xE2E7, 0x7CA1,0xE2E8, 0x7CA8,0xE2E9, 0x7CB3,0xE2EA, 0x7CB2,0xE2EB, 0x7CB1,0xE2EC, 0x7CAE,0xE2ED, 0x7CB9,0xE2EE, 0x7CBD,0xE2EF, 0x7CC0,0xE2F0, 0x7CC5,0xE2F1, 0x7CC2,0xE2F2, 0x7CD8,0xE2F3, 0x7CD2,0xE2F4, 0x7CDC,0xE2F5, 0x7CE2,0xE2F6, 0x9B3B,0xE2F7, 0x7CEF,0xE2F8, 0x7CF2,0xE2F9, 0x7CF4,0xE2FA, 0x7CF6,0xE2FB, 0x7CFA,0xE2FC, 0x7D06,0xE340, 0x7D02,0xE341, 0x7D1C,0xE342, 0x7D15,0xE343, 0x7D0A,0xE344, 0x7D45,0xE345, 0x7D4B,0xE346, 0x7D2E,0xE347, 0x7D32,0xE348, 0x7D3F,0xE349, 0x7D35,0xE34A, 0x7D46,0xE34B, 0x7D73,0xE34C, 0x7D56,0xE34D, 0x7D4E,0xE34E, 0x7D72,0xE34F, 0x7D68,0xE350, 0x7D6E,0xE351, 0x7D4F,0xE352, 0x7D63,0xE353, 0x7D93,0xE354, 0x7D89,0xE355, 0x7D5B,0xE356, 0x7D8F,0xE357, 0x7D7D,0xE358, 0x7D9B,0xE359, 0x7DBA,0xE35A, 0x7DAE,0xE35B, 0x7DA3,0xE35C, 0x7DB5,0xE35D, 0x7DC7,0xE35E, 0x7DBD,0xE35F, 0x7DAB,0xE360, 0x7E3D,0xE361, 0x7DA2,0xE362, 0x7DAF,0xE363, 0x7DDC,0xE364, 0x7DB8,0xE365, 0x7D9F,0xE366, 0x7DB0,0xE367, 0x7DD8,0xE368, 0x7DDD,0xE369, 0x7DE4,0xE36A, 0x7DDE,0xE36B, 0x7DFB,0xE36C, 0x7DF2,0xE36D, 0x7DE1,0xE36E, 0x7E05,0xE36F, 0x7E0A,0xE370, 0x7E23,0xE371, 0x7E21,0xE372, 0x7E12,0xE373, 0x7E31,0xE374, 0x7E1F,0xE375, 0x7E09,0xE376, 0x7E0B,0xE377, 0x7E22,0xE378, 0x7E46,0xE379, 0x7E66,0xE37A, 0x7E3B,0xE37B, 0x7E35,0xE37C, 0x7E39,0xE37D, 0x7E43,0xE37E, 0x7E37,0xE380, 0x7E32,0xE381, 0x7E3A,0xE382, 0x7E67,0xE383, 0x7E5D,0xE384, 0x7E56,0xE385, 0x7E5E,0xE386, 0x7E59,0xE387, 0x7E5A,0xE388, 0x7E79,0xE389, 0x7E6A,0xE38A, 0x7E69,0xE38B, 0x7E7C,0xE38C, 0x7E7B,0xE38D, 0x7E83,0xE38E, 0x7DD5,0xE38F, 0x7E7D,0xE390, 0x8FAE,0xE391, 0x7E7F,0xE392, 0x7E88,0xE393, 0x7E89,0xE394, 0x7E8C,0xE395, 0x7E92,0xE396, 0x7E90,0xE397, 0x7E93,0xE398, 0x7E94,0xE399, 0x7E96,0xE39A, 0x7E8E,0xE39B, 0x7E9B,0xE39C, 0x7E9C,0xE39D, 0x7F38,0xE39E, 0x7F3A,0xE39F, 0x7F45,0xE3A0, 0x7F4C,0xE3A1, 0x7F4D,0xE3A2, 0x7F4E,0xE3A3, 0x7F50,0xE3A4, 0x7F51,0xE3A5, 0x7F55,0xE3A6, 0x7F54,0xE3A7, 0x7F58,0xE3A8, 0x7F5F,0xE3A9, 0x7F60,0xE3AA, 0x7F68,0xE3AB, 0x7F69,0xE3AC, 0x7F67,0xE3AD, 0x7F78,0xE3AE, 0x7F82,0xE3AF, 0x7F86,0xE3B0, 0x7F83,0xE3B1, 0x7F88,0xE3B2, 0x7F87,0xE3B3, 0x7F8C,0xE3B4, 0x7F94,0xE3B5, 0x7F9E,0xE3B6, 0x7F9D,0xE3B7, 0x7F9A,0xE3B8, 0x7FA3,0xE3B9, 0x7FAF,0xE3BA, 0x7FB2,0xE3BB, 0x7FB9,0xE3BC, 0x7FAE,0xE3BD, 0x7FB6,0xE3BE, 0x7FB8,0xE3BF, 0x8B71,0xE3C0, 0x7FC5,0xE3C1, 0x7FC6,0xE3C2, 0x7FCA,0xE3C3, 0x7FD5,0xE3C4, 0x7FD4,0xE3C5, 0x7FE1,0xE3C6, 0x7FE6,0xE3C7, 0x7FE9,0xE3C8, 0x7FF3,0xE3C9, 0x7FF9,0xE3CA, 0x98DC,0xE3CB, 0x8006,0xE3CC, 0x8004,0xE3CD, 0x800B,0xE3CE, 0x8012,0xE3CF, 0x8018,0xE3D0, 0x8019,0xE3D1, 0x801C,0xE3D2, 0x8021,0xE3D3, 0x8028,0xE3D4, 0x803F,0xE3D5, 0x803B,0xE3D6, 0x804A,0xE3D7, 0x8046,0xE3D8, 0x8052,0xE3D9, 0x8058,0xE3DA, 0x805A,0xE3DB, 0x805F,0xE3DC, 0x8062,0xE3DD, 0x8068,0xE3DE, 0x8073,0xE3DF, 0x8072,0xE3E0, 0x8070,0xE3E1, 0x8076,0xE3E2, 0x8079,0xE3E3, 0x807D,0xE3E4, 0x807F,0xE3E5, 0x8084,0xE3E6, 0x8086,0xE3E7, 0x8085,0xE3E8, 0x809B,0xE3E9, 0x8093,0xE3EA, 0x809A,0xE3EB, 0x80AD,0xE3EC, 0x5190,0xE3ED, 0x80AC,0xE3EE, 0x80DB,0xE3EF, 0x80E5,0xE3F0, 0x80D9,0xE3F1, 0x80DD,0xE3F2, 0x80C4,0xE3F3, 0x80DA,0xE3F4, 0x80D6,0xE3F5, 0x8109,0xE3F6, 0x80EF,0xE3F7, 0x80F1,0xE3F8, 0x811B,0xE3F9, 0x8129,0xE3FA, 0x8123,0xE3FB, 0x812F,0xE3FC, 0x814B,0xE440, 0x968B,0xE441, 0x8146,0xE442, 0x813E,0xE443, 0x8153,0xE444, 0x8151,0xE445, 0x80FC,0xE446, 0x8171,0xE447, 0x816E,0xE448, 0x8165,0xE449, 0x8166,0xE44A, 0x8174,0xE44B, 0x8183,0xE44C, 0x8188,0xE44D, 0x818A,0xE44E, 0x8180,0xE44F, 0x8182,0xE450, 0x81A0,0xE451, 0x8195,0xE452, 0x81A4,0xE453, 0x81A3,0xE454, 0x815F,0xE455, 0x8193,0xE456, 0x81A9,0xE457, 0x81B0,0xE458, 0x81B5,0xE459, 0x81BE,0xE45A, 0x81B8,0xE45B, 0x81BD,0xE45C, 0x81C0,0xE45D, 0x81C2,0xE45E, 0x81BA,0xE45F, 0x81C9,0xE460, 0x81CD,0xE461, 0x81D1,0xE462, 0x81D9,0xE463, 0x81D8,0xE464, 0x81C8,0xE465, 0x81DA,0xE466, 0x81DF,0xE467, 0x81E0,0xE468, 0x81E7,0xE469, 0x81FA,0xE46A, 0x81FB,0xE46B, 0x81FE,0xE46C, 0x8201,0xE46D, 0x8202,0xE46E, 0x8205,0xE46F, 0x8207,0xE470, 0x820A,0xE471, 0x820D,0xE472, 0x8210,0xE473, 0x8216,0xE474, 0x8229,0xE475, 0x822B,0xE476, 0x8238,0xE477, 0x8233,0xE478, 0x8240,0xE479, 0x8259,0xE47A, 0x8258,0xE47B, 0x825D,0xE47C, 0x825A,0xE47D, 0x825F,0xE47E, 0x8264,0xE480, 0x8262,0xE481, 0x8268,0xE482, 0x826A,0xE483, 0x826B,0xE484, 0x822E,0xE485, 0x8271,0xE486, 0x8277,0xE487, 0x8278,0xE488, 0x827E,0xE489, 0x828D,0xE48A, 0x8292,0xE48B, 0x82AB,0xE48C, 0x829F,0xE48D, 0x82BB,0xE48E, 0x82AC,0xE48F, 0x82E1,0xE490, 0x82E3,0xE491, 0x82DF,0xE492, 0x82D2,0xE493, 0x82F4,0xE494, 0x82F3,0xE495, 0x82FA,0xE496, 0x8393,0xE497, 0x8303,0xE498, 0x82FB,0xE499, 0x82F9,0xE49A, 0x82DE,0xE49B, 0x8306,0xE49C, 0x82DC,0xE49D, 0x8309,0xE49E, 0x82D9,0xE49F, 0x8335,0xE4A0, 0x8334,0xE4A1, 0x8316,0xE4A2, 0x8332,0xE4A3, 0x8331,0xE4A4, 0x8340,0xE4A5, 0x8339,0xE4A6, 0x8350,0xE4A7, 0x8345,0xE4A8, 0x832F,0xE4A9, 0x832B,0xE4AA, 0x8317,0xE4AB, 0x8318,0xE4AC, 0x8385,0xE4AD, 0x839A,0xE4AE, 0x83AA,0xE4AF, 0x839F,0xE4B0, 0x83A2,0xE4B1, 0x8396,0xE4B2, 0x8323,0xE4B3, 0x838E,0xE4B4, 0x8387,0xE4B5, 0x838A,0xE4B6, 0x837C,0xE4B7, 0x83B5,0xE4B8, 0x8373,0xE4B9, 0x8375,0xE4BA, 0x83A0,0xE4BB, 0x8389,0xE4BC, 0x83A8,0xE4BD, 0x83F4,0xE4BE, 0x8413,0xE4BF, 0x83EB,0xE4C0, 0x83CE,0xE4C1, 0x83FD,0xE4C2, 0x8403,0xE4C3, 0x83D8,0xE4C4, 0x840B,0xE4C5, 0x83C1,0xE4C6, 0x83F7,0xE4C7, 0x8407,0xE4C8, 0x83E0,0xE4C9, 0x83F2,0xE4CA, 0x840D,0xE4CB, 0x8422,0xE4CC, 0x8420,0xE4CD, 0x83BD,0xE4CE, 0x8438,0xE4CF, 0x8506,0xE4D0, 0x83FB,0xE4D1, 0x846D,0xE4D2, 0x842A,0xE4D3, 0x843C,0xE4D4, 0x855A,0xE4D5, 0x8484,0xE4D6, 0x8477,0xE4D7, 0x846B,0xE4D8, 0x84AD,0xE4D9, 0x846E,0xE4DA, 0x8482,0xE4DB, 0x8469,0xE4DC, 0x8446,0xE4DD, 0x842C,0xE4DE, 0x846F,0xE4DF, 0x8479,0xE4E0, 0x8435,0xE4E1, 0x84CA,0xE4E2, 0x8462,0xE4E3, 0x84B9,0xE4E4, 0x84BF,0xE4E5, 0x849F,0xE4E6, 0x84D9,0xE4E7, 0x84CD,0xE4E8, 0x84BB,0xE4E9, 0x84DA,0xE4EA, 0x84D0,0xE4EB, 0x84C1,0xE4EC, 0x84C6,0xE4ED, 0x84D6,0xE4EE, 0x84A1,0xE4EF, 0x8521,0xE4F0, 0x84FF,0xE4F1, 0x84F4,0xE4F2, 0x8517,0xE4F3, 0x8518,0xE4F4, 0x852C,0xE4F5, 0x851F,0xE4F6, 0x8515,0xE4F7, 0x8514,0xE4F8, 0x84FC,0xE4F9, 0x8540,0xE4FA, 0x8563,0xE4FB, 0x8558,0xE4FC, 0x8548,0xE540, 0x8541,0xE541, 0x8602,0xE542, 0x854B,0xE543, 0x8555,0xE544, 0x8580,0xE545, 0x85A4,0xE546, 0x8588,0xE547, 0x8591,0xE548, 0x858A,0xE549, 0x85A8,0xE54A, 0x856D,0xE54B, 0x8594,0xE54C, 0x859B,0xE54D, 0x85EA,0xE54E, 0x8587,0xE54F, 0x859C,0xE550, 0x8577,0xE551, 0x857E,0xE552, 0x8590,0xE553, 0x85C9,0xE554, 0x85BA,0xE555, 0x85CF,0xE556, 0x85B9,0xE557, 0x85D0,0xE558, 0x85D5,0xE559, 0x85DD,0xE55A, 0x85E5,0xE55B, 0x85DC,0xE55C, 0x85F9,0xE55D, 0x860A,0xE55E, 0x8613,0xE55F, 0x860B,0xE560, 0x85FE,0xE561, 0x85FA,0xE562, 0x8606,0xE563, 0x8622,0xE564, 0x861A,0xE565, 0x8630,0xE566, 0x863F,0xE567, 0x864D,0xE568, 0x4E55,0xE569, 0x8654,0xE56A, 0x865F,0xE56B, 0x8667,0xE56C, 0x8671,0xE56D, 0x8693,0xE56E, 0x86A3,0xE56F, 0x86A9,0xE570, 0x86AA,0xE571, 0x868B,0xE572, 0x868C,0xE573, 0x86B6,0xE574, 0x86AF,0xE575, 0x86C4,0xE576, 0x86C6,0xE577, 0x86B0,0xE578, 0x86C9,0xE579, 0x8823,0xE57A, 0x86AB,0xE57B, 0x86D4,0xE57C, 0x86DE,0xE57D, 0x86E9,0xE57E, 0x86EC,0xE580, 0x86DF,0xE581, 0x86DB,0xE582, 0x86EF,0xE583, 0x8712,0xE584, 0x8706,0xE585, 0x8708,0xE586, 0x8700,0xE587, 0x8703,0xE588, 0x86FB,0xE589, 0x8711,0xE58A, 0x8709,0xE58B, 0x870D,0xE58C, 0x86F9,0xE58D, 0x870A,0xE58E, 0x8734,0xE58F, 0x873F,0xE590, 0x8737,0xE591, 0x873B,0xE592, 0x8725,0xE593, 0x8729,0xE594, 0x871A,0xE595, 0x8760,0xE596, 0x875F,0xE597, 0x8778,0xE598, 0x874C,0xE599, 0x874E,0xE59A, 0x8774,0xE59B, 0x8757,0xE59C, 0x8768,0xE59D, 0x876E,0xE59E, 0x8759,0xE59F, 0x8753,0xE5A0, 0x8763,0xE5A1, 0x876A,0xE5A2, 0x8805,0xE5A3, 0x87A2,0xE5A4, 0x879F,0xE5A5, 0x8782,0xE5A6, 0x87AF,0xE5A7, 0x87CB,0xE5A8, 0x87BD,0xE5A9, 0x87C0,0xE5AA, 0x87D0,0xE5AB, 0x96D6,0xE5AC, 0x87AB,0xE5AD, 0x87C4,0xE5AE, 0x87B3,0xE5AF, 0x87C7,0xE5B0, 0x87C6,0xE5B1, 0x87BB,0xE5B2, 0x87EF,0xE5B3, 0x87F2,0xE5B4, 0x87E0,0xE5B5, 0x880F,0xE5B6, 0x880D,0xE5B7, 0x87FE,0xE5B8, 0x87F6,0xE5B9, 0x87F7,0xE5BA, 0x880E,0xE5BB, 0x87D2,0xE5BC, 0x8811,0xE5BD, 0x8816,0xE5BE, 0x8815,0xE5BF, 0x8822,0xE5C0, 0x8821,0xE5C1, 0x8831,0xE5C2, 0x8836,0xE5C3, 0x8839,0xE5C4, 0x8827,0xE5C5, 0x883B,0xE5C6, 0x8844,0xE5C7, 0x8842,0xE5C8, 0x8852,0xE5C9, 0x8859,0xE5CA, 0x885E,0xE5CB, 0x8862,0xE5CC, 0x886B,0xE5CD, 0x8881,0xE5CE, 0x887E,0xE5CF, 0x889E,0xE5D0, 0x8875,0xE5D1, 0x887D,0xE5D2, 0x88B5,0xE5D3, 0x8872,0xE5D4, 0x8882,0xE5D5, 0x8897,0xE5D6, 0x8892,0xE5D7, 0x88AE,0xE5D8, 0x8899,0xE5D9, 0x88A2,0xE5DA, 0x888D,0xE5DB, 0x88A4,0xE5DC, 0x88B0,0xE5DD, 0x88BF,0xE5DE, 0x88B1,0xE5DF, 0x88C3,0xE5E0, 0x88C4,0xE5E1, 0x88D4,0xE5E2, 0x88D8,0xE5E3, 0x88D9,0xE5E4, 0x88DD,0xE5E5, 0x88F9,0xE5E6, 0x8902,0xE5E7, 0x88FC,0xE5E8, 0x88F4,0xE5E9, 0x88E8,0xE5EA, 0x88F2,0xE5EB, 0x8904,0xE5EC, 0x890C,0xE5ED, 0x890A,0xE5EE, 0x8913,0xE5EF, 0x8943,0xE5F0, 0x891E,0xE5F1, 0x8925,0xE5F2, 0x892A,0xE5F3, 0x892B,0xE5F4, 0x8941,0xE5F5, 0x8944,0xE5F6, 0x893B,0xE5F7, 0x8936,0xE5F8, 0x8938,0xE5F9, 0x894C,0xE5FA, 0x891D,0xE5FB, 0x8960,0xE5FC, 0x895E,0xE640, 0x8966,0xE641, 0x8964,0xE642, 0x896D,0xE643, 0x896A,0xE644, 0x896F,0xE645, 0x8974,0xE646, 0x8977,0xE647, 0x897E,0xE648, 0x8983,0xE649, 0x8988,0xE64A, 0x898A,0xE64B, 0x8993,0xE64C, 0x8998,0xE64D, 0x89A1,0xE64E, 0x89A9,0xE64F, 0x89A6,0xE650, 0x89AC,0xE651, 0x89AF,0xE652, 0x89B2,0xE653, 0x89BA,0xE654, 0x89BD,0xE655, 0x89BF,0xE656, 0x89C0,0xE657, 0x89DA,0xE658, 0x89DC,0xE659, 0x89DD,0xE65A, 0x89E7,0xE65B, 0x89F4,0xE65C, 0x89F8,0xE65D, 0x8A03,0xE65E, 0x8A16,0xE65F, 0x8A10,0xE660, 0x8A0C,0xE661, 0x8A1B,0xE662, 0x8A1D,0xE663, 0x8A25,0xE664, 0x8A36,0xE665, 0x8A41,0xE666, 0x8A5B,0xE667, 0x8A52,0xE668, 0x8A46,0xE669, 0x8A48,0xE66A, 0x8A7C,0xE66B, 0x8A6D,0xE66C, 0x8A6C,0xE66D, 0x8A62,0xE66E, 0x8A85,0xE66F, 0x8A82,0xE670, 0x8A84,0xE671, 0x8AA8,0xE672, 0x8AA1,0xE673, 0x8A91,0xE674, 0x8AA5,0xE675, 0x8AA6,0xE676, 0x8A9A,0xE677, 0x8AA3,0xE678, 0x8AC4,0xE679, 0x8ACD,0xE67A, 0x8AC2,0xE67B, 0x8ADA,0xE67C, 0x8AEB,0xE67D, 0x8AF3,0xE67E, 0x8AE7,0xE680, 0x8AE4,0xE681, 0x8AF1,0xE682, 0x8B14,0xE683, 0x8AE0,0xE684, 0x8AE2,0xE685, 0x8AF7,0xE686, 0x8ADE,0xE687, 0x8ADB,0xE688, 0x8B0C,0xE689, 0x8B07,0xE68A, 0x8B1A,0xE68B, 0x8AE1,0xE68C, 0x8B16,0xE68D, 0x8B10,0xE68E, 0x8B17,0xE68F, 0x8B20,0xE690, 0x8B33,0xE691, 0x97AB,0xE692, 0x8B26,0xE693, 0x8B2B,0xE694, 0x8B3E,0xE695, 0x8B28,0xE696, 0x8B41,0xE697, 0x8B4C,0xE698, 0x8B4F,0xE699, 0x8B4E,0xE69A, 0x8B49,0xE69B, 0x8B56,0xE69C, 0x8B5B,0xE69D, 0x8B5A,0xE69E, 0x8B6B,0xE69F, 0x8B5F,0xE6A0, 0x8B6C,0xE6A1, 0x8B6F,0xE6A2, 0x8B74,0xE6A3, 0x8B7D,0xE6A4, 0x8B80,0xE6A5, 0x8B8C,0xE6A6, 0x8B8E,0xE6A7, 0x8B92,0xE6A8, 0x8B93,0xE6A9, 0x8B96,0xE6AA, 0x8B99,0xE6AB, 0x8B9A,0xE6AC, 0x8C3A,0xE6AD, 0x8C41,0xE6AE, 0x8C3F,0xE6AF, 0x8C48,0xE6B0, 0x8C4C,0xE6B1, 0x8C4E,0xE6B2, 0x8C50,0xE6B3, 0x8C55,0xE6B4, 0x8C62,0xE6B5, 0x8C6C,0xE6B6, 0x8C78,0xE6B7, 0x8C7A,0xE6B8, 0x8C82,0xE6B9, 0x8C89,0xE6BA, 0x8C85,0xE6BB, 0x8C8A,0xE6BC, 0x8C8D,0xE6BD, 0x8C8E,0xE6BE, 0x8C94,0xE6BF, 0x8C7C,0xE6C0, 0x8C98,0xE6C1, 0x621D,0xE6C2, 0x8CAD,0xE6C3, 0x8CAA,0xE6C4, 0x8CBD,0xE6C5, 0x8CB2,0xE6C6, 0x8CB3,0xE6C7, 0x8CAE,0xE6C8, 0x8CB6,0xE6C9, 0x8CC8,0xE6CA, 0x8CC1,0xE6CB, 0x8CE4,0xE6CC, 0x8CE3,0xE6CD, 0x8CDA,0xE6CE, 0x8CFD,0xE6CF, 0x8CFA,0xE6D0, 0x8CFB,0xE6D1, 0x8D04,0xE6D2, 0x8D05,0xE6D3, 0x8D0A,0xE6D4, 0x8D07,0xE6D5, 0x8D0F,0xE6D6, 0x8D0D,0xE6D7, 0x8D10,0xE6D8, 0x9F4E,0xE6D9, 0x8D13,0xE6DA, 0x8CCD,0xE6DB, 0x8D14,0xE6DC, 0x8D16,0xE6DD, 0x8D67,0xE6DE, 0x8D6D,0xE6DF, 0x8D71,0xE6E0, 0x8D73,0xE6E1, 0x8D81,0xE6E2, 0x8D99,0xE6E3, 0x8DC2,0xE6E4, 0x8DBE,0xE6E5, 0x8DBA,0xE6E6, 0x8DCF,0xE6E7, 0x8DDA,0xE6E8, 0x8DD6,0xE6E9, 0x8DCC,0xE6EA, 0x8DDB,0xE6EB, 0x8DCB,0xE6EC, 0x8DEA,0xE6ED, 0x8DEB,0xE6EE, 0x8DDF,0xE6EF, 0x8DE3,0xE6F0, 0x8DFC,0xE6F1, 0x8E08,0xE6F2, 0x8E09,0xE6F3, 0x8DFF,0xE6F4, 0x8E1D,0xE6F5, 0x8E1E,0xE6F6, 0x8E10,0xE6F7, 0x8E1F,0xE6F8, 0x8E42,0xE6F9, 0x8E35,0xE6FA, 0x8E30,0xE6FB, 0x8E34,0xE6FC, 0x8E4A,0xE740, 0x8E47,0xE741, 0x8E49,0xE742, 0x8E4C,0xE743, 0x8E50,0xE744, 0x8E48,0xE745, 0x8E59,0xE746, 0x8E64,0xE747, 0x8E60,0xE748, 0x8E2A,0xE749, 0x8E63,0xE74A, 0x8E55,0xE74B, 0x8E76,0xE74C, 0x8E72,0xE74D, 0x8E7C,0xE74E, 0x8E81,0xE74F, 0x8E87,0xE750, 0x8E85,0xE751, 0x8E84,0xE752, 0x8E8B,0xE753, 0x8E8A,0xE754, 0x8E93,0xE755, 0x8E91,0xE756, 0x8E94,0xE757, 0x8E99,0xE758, 0x8EAA,0xE759, 0x8EA1,0xE75A, 0x8EAC,0xE75B, 0x8EB0,0xE75C, 0x8EC6,0xE75D, 0x8EB1,0xE75E, 0x8EBE,0xE75F, 0x8EC5,0xE760, 0x8EC8,0xE761, 0x8ECB,0xE762, 0x8EDB,0xE763, 0x8EE3,0xE764, 0x8EFC,0xE765, 0x8EFB,0xE766, 0x8EEB,0xE767, 0x8EFE,0xE768, 0x8F0A,0xE769, 0x8F05,0xE76A, 0x8F15,0xE76B, 0x8F12,0xE76C, 0x8F19,0xE76D, 0x8F13,0xE76E, 0x8F1C,0xE76F, 0x8F1F,0xE770, 0x8F1B,0xE771, 0x8F0C,0xE772, 0x8F26,0xE773, 0x8F33,0xE774, 0x8F3B,0xE775, 0x8F39,0xE776, 0x8F45,0xE777, 0x8F42,0xE778, 0x8F3E,0xE779, 0x8F4C,0xE77A, 0x8F49,0xE77B, 0x8F46,0xE77C, 0x8F4E,0xE77D, 0x8F57,0xE77E, 0x8F5C,0xE780, 0x8F62,0xE781, 0x8F63,0xE782, 0x8F64,0xE783, 0x8F9C,0xE784, 0x8F9F,0xE785, 0x8FA3,0xE786, 0x8FAD,0xE787, 0x8FAF,0xE788, 0x8FB7,0xE789, 0x8FDA,0xE78A, 0x8FE5,0xE78B, 0x8FE2,0xE78C, 0x8FEA,0xE78D, 0x8FEF,0xE78E, 0x9087,0xE78F, 0x8FF4,0xE790, 0x9005,0xE791, 0x8FF9,0xE792, 0x8FFA,0xE793, 0x9011,0xE794, 0x9015,0xE795, 0x9021,0xE796, 0x900D,0xE797, 0x901E,0xE798, 0x9016,0xE799, 0x900B,0xE79A, 0x9027,0xE79B, 0x9036,0xE79C, 0x9035,0xE79D, 0x9039,0xE79E, 0x8FF8,0xE79F, 0x904F,0xE7A0, 0x9050,0xE7A1, 0x9051,0xE7A2, 0x9052,0xE7A3, 0x900E,0xE7A4, 0x9049,0xE7A5, 0x903E,0xE7A6, 0x9056,0xE7A7, 0x9058,0xE7A8, 0x905E,0xE7A9, 0x9068,0xE7AA, 0x906F,0xE7AB, 0x9076,0xE7AC, 0x96A8,0xE7AD, 0x9072,0xE7AE, 0x9082,0xE7AF, 0x907D,0xE7B0, 0x9081,0xE7B1, 0x9080,0xE7B2, 0x908A,0xE7B3, 0x9089,0xE7B4, 0x908F,0xE7B5, 0x90A8,0xE7B6, 0x90AF,0xE7B7, 0x90B1,0xE7B8, 0x90B5,0xE7B9, 0x90E2,0xE7BA, 0x90E4,0xE7BB, 0x6248,0xE7BC, 0x90DB,0xE7BD, 0x9102,0xE7BE, 0x9112,0xE7BF, 0x9119,0xE7C0, 0x9132,0xE7C1, 0x9130,0xE7C2, 0x914A,0xE7C3, 0x9156,0xE7C4, 0x9158,0xE7C5, 0x9163,0xE7C6, 0x9165,0xE7C7, 0x9169,0xE7C8, 0x9173,0xE7C9, 0x9172,0xE7CA, 0x918B,0xE7CB, 0x9189,0xE7CC, 0x9182,0xE7CD, 0x91A2,0xE7CE, 0x91AB,0xE7CF, 0x91AF,0xE7D0, 0x91AA,0xE7D1, 0x91B5,0xE7D2, 0x91B4,0xE7D3, 0x91BA,0xE7D4, 0x91C0,0xE7D5, 0x91C1,0xE7D6, 0x91C9,0xE7D7, 0x91CB,0xE7D8, 0x91D0,0xE7D9, 0x91D6,0xE7DA, 0x91DF,0xE7DB, 0x91E1,0xE7DC, 0x91DB,0xE7DD, 0x91FC,0xE7DE, 0x91F5,0xE7DF, 0x91F6,0xE7E0, 0x921E,0xE7E1, 0x91FF,0xE7E2, 0x9214,0xE7E3, 0x922C,0xE7E4, 0x9215,0xE7E5, 0x9211,0xE7E6, 0x925E,0xE7E7, 0x9257,0xE7E8, 0x9245,0xE7E9, 0x9249,0xE7EA, 0x9264,0xE7EB, 0x9248,0xE7EC, 0x9295,0xE7ED, 0x923F,0xE7EE, 0x924B,0xE7EF, 0x9250,0xE7F0, 0x929C,0xE7F1, 0x9296,0xE7F2, 0x9293,0xE7F3, 0x929B,0xE7F4, 0x925A,0xE7F5, 0x92CF,0xE7F6, 0x92B9,0xE7F7, 0x92B7,0xE7F8, 0x92E9,0xE7F9, 0x930F,0xE7FA, 0x92FA,0xE7FB, 0x9344,0xE7FC, 0x932E,0xE840, 0x9319,0xE841, 0x9322,0xE842, 0x931A,0xE843, 0x9323,0xE844, 0x933A,0xE845, 0x9335,0xE846, 0x933B,0xE847, 0x935C,0xE848, 0x9360,0xE849, 0x937C,0xE84A, 0x936E,0xE84B, 0x9356,0xE84C, 0x93B0,0xE84D, 0x93AC,0xE84E, 0x93AD,0xE84F, 0x9394,0xE850, 0x93B9,0xE851, 0x93D6,0xE852, 0x93D7,0xE853, 0x93E8,0xE854, 0x93E5,0xE855, 0x93D8,0xE856, 0x93C3,0xE857, 0x93DD,0xE858, 0x93D0,0xE859, 0x93C8,0xE85A, 0x93E4,0xE85B, 0x941A,0xE85C, 0x9414,0xE85D, 0x9413,0xE85E, 0x9403,0xE85F, 0x9407,0xE860, 0x9410,0xE861, 0x9436,0xE862, 0x942B,0xE863, 0x9435,0xE864, 0x9421,0xE865, 0x943A,0xE866, 0x9441,0xE867, 0x9452,0xE868, 0x9444,0xE869, 0x945B,0xE86A, 0x9460,0xE86B, 0x9462,0xE86C, 0x945E,0xE86D, 0x946A,0xE86E, 0x9229,0xE86F, 0x9470,0xE870, 0x9475,0xE871, 0x9477,0xE872, 0x947D,0xE873, 0x945A,0xE874, 0x947C,0xE875, 0x947E,0xE876, 0x9481,0xE877, 0x947F,0xE878, 0x9582,0xE879, 0x9587,0xE87A, 0x958A,0xE87B, 0x9594,0xE87C, 0x9596,0xE87D, 0x9598,0xE87E, 0x9599,0xE880, 0x95A0,0xE881, 0x95A8,0xE882, 0x95A7,0xE883, 0x95AD,0xE884, 0x95BC,0xE885, 0x95BB,0xE886, 0x95B9,0xE887, 0x95BE,0xE888, 0x95CA,0xE889, 0x6FF6,0xE88A, 0x95C3,0xE88B, 0x95CD,0xE88C, 0x95CC,0xE88D, 0x95D5,0xE88E, 0x95D4,0xE88F, 0x95D6,0xE890, 0x95DC,0xE891, 0x95E1,0xE892, 0x95E5,0xE893, 0x95E2,0xE894, 0x9621,0xE895, 0x9628,0xE896, 0x962E,0xE897, 0x962F,0xE898, 0x9642,0xE899, 0x964C,0xE89A, 0x964F,0xE89B, 0x964B,0xE89C, 0x9677,0xE89D, 0x965C,0xE89E, 0x965E,0xE89F, 0x965D,0xE8A0, 0x965F,0xE8A1, 0x9666,0xE8A2, 0x9672,0xE8A3, 0x966C,0xE8A4, 0x968D,0xE8A5, 0x9698,0xE8A6, 0x9695,0xE8A7, 0x9697,0xE8A8, 0x96AA,0xE8A9, 0x96A7,0xE8AA, 0x96B1,0xE8AB, 0x96B2,0xE8AC, 0x96B0,0xE8AD, 0x96B4,0xE8AE, 0x96B6,0xE8AF, 0x96B8,0xE8B0, 0x96B9,0xE8B1, 0x96CE,0xE8B2, 0x96CB,0xE8B3, 0x96C9,0xE8B4, 0x96CD,0xE8B5, 0x894D,0xE8B6, 0x96DC,0xE8B7, 0x970D,0xE8B8, 0x96D5,0xE8B9, 0x96F9,0xE8BA, 0x9704,0xE8BB, 0x9706,0xE8BC, 0x9708,0xE8BD, 0x9713,0xE8BE, 0x970E,0xE8BF, 0x9711,0xE8C0, 0x970F,0xE8C1, 0x9716,0xE8C2, 0x9719,0xE8C3, 0x9724,0xE8C4, 0x972A,0xE8C5, 0x9730,0xE8C6, 0x9739,0xE8C7, 0x973D,0xE8C8, 0x973E,0xE8C9, 0x9744,0xE8CA, 0x9746,0xE8CB, 0x9748,0xE8CC, 0x9742,0xE8CD, 0x9749,0xE8CE, 0x975C,0xE8CF, 0x9760,0xE8D0, 0x9764,0xE8D1, 0x9766,0xE8D2, 0x9768,0xE8D3, 0x52D2,0xE8D4, 0x976B,0xE8D5, 0x9771,0xE8D6, 0x9779,0xE8D7, 0x9785,0xE8D8, 0x977C,0xE8D9, 0x9781,0xE8DA, 0x977A,0xE8DB, 0x9786,0xE8DC, 0x978B,0xE8DD, 0x978F,0xE8DE, 0x9790,0xE8DF, 0x979C,0xE8E0, 0x97A8,0xE8E1, 0x97A6,0xE8E2, 0x97A3,0xE8E3, 0x97B3,0xE8E4, 0x97B4,0xE8E5, 0x97C3,0xE8E6, 0x97C6,0xE8E7, 0x97C8,0xE8E8, 0x97CB,0xE8E9, 0x97DC,0xE8EA, 0x97ED,0xE8EB, 0x9F4F,0xE8EC, 0x97F2,0xE8ED, 0x7ADF,0xE8EE, 0x97F6,0xE8EF, 0x97F5,0xE8F0, 0x980F,0xE8F1, 0x980C,0xE8F2, 0x9838,0xE8F3, 0x9824,0xE8F4, 0x9821,0xE8F5, 0x9837,0xE8F6, 0x983D,0xE8F7, 0x9846,0xE8F8, 0x984F,0xE8F9, 0x984B,0xE8FA, 0x986B,0xE8FB, 0x986F,0xE8FC, 0x9870,0xE940, 0x9871,0xE941, 0x9874,0xE942, 0x9873,0xE943, 0x98AA,0xE944, 0x98AF,0xE945, 0x98B1,0xE946, 0x98B6,0xE947, 0x98C4,0xE948, 0x98C3,0xE949, 0x98C6,0xE94A, 0x98E9,0xE94B, 0x98EB,0xE94C, 0x9903,0xE94D, 0x9909,0xE94E, 0x9912,0xE94F, 0x9914,0xE950, 0x9918,0xE951, 0x9921,0xE952, 0x991D,0xE953, 0x991E,0xE954, 0x9924,0xE955, 0x9920,0xE956, 0x992C,0xE957, 0x992E,0xE958, 0x993D,0xE959, 0x993E,0xE95A, 0x9942,0xE95B, 0x9949,0xE95C, 0x9945,0xE95D, 0x9950,0xE95E, 0x994B,0xE95F, 0x9951,0xE960, 0x9952,0xE961, 0x994C,0xE962, 0x9955,0xE963, 0x9997,0xE964, 0x9998,0xE965, 0x99A5,0xE966, 0x99AD,0xE967, 0x99AE,0xE968, 0x99BC,0xE969, 0x99DF,0xE96A, 0x99DB,0xE96B, 0x99DD,0xE96C, 0x99D8,0xE96D, 0x99D1,0xE96E, 0x99ED,0xE96F, 0x99EE,0xE970, 0x99F1,0xE971, 0x99F2,0xE972, 0x99FB,0xE973, 0x99F8,0xE974, 0x9A01,0xE975, 0x9A0F,0xE976, 0x9A05,0xE977, 0x99E2,0xE978, 0x9A19,0xE979, 0x9A2B,0xE97A, 0x9A37,0xE97B, 0x9A45,0xE97C, 0x9A42,0xE97D, 0x9A40,0xE97E, 0x9A43,0xE980, 0x9A3E,0xE981, 0x9A55,0xE982, 0x9A4D,0xE983, 0x9A5B,0xE984, 0x9A57,0xE985, 0x9A5F,0xE986, 0x9A62,0xE987, 0x9A65,0xE988, 0x9A64,0xE989, 0x9A69,0xE98A, 0x9A6B,0xE98B, 0x9A6A,0xE98C, 0x9AAD,0xE98D, 0x9AB0,0xE98E, 0x9ABC,0xE98F, 0x9AC0,0xE990, 0x9ACF,0xE991, 0x9AD1,0xE992, 0x9AD3,0xE993, 0x9AD4,0xE994, 0x9ADE,0xE995, 0x9ADF,0xE996, 0x9AE2,0xE997, 0x9AE3,0xE998, 0x9AE6,0xE999, 0x9AEF,0xE99A, 0x9AEB,0xE99B, 0x9AEE,0xE99C, 0x9AF4,0xE99D, 0x9AF1,0xE99E, 0x9AF7,0xE99F, 0x9AFB,0xE9A0, 0x9B06,0xE9A1, 0x9B18,0xE9A2, 0x9B1A,0xE9A3, 0x9B1F,0xE9A4, 0x9B22,0xE9A5, 0x9B23,0xE9A6, 0x9B25,0xE9A7, 0x9B27,0xE9A8, 0x9B28,0xE9A9, 0x9B29,0xE9AA, 0x9B2A,0xE9AB, 0x9B2E,0xE9AC, 0x9B2F,0xE9AD, 0x9B32,0xE9AE, 0x9B44,0xE9AF, 0x9B43,0xE9B0, 0x9B4F,0xE9B1, 0x9B4D,0xE9B2, 0x9B4E,0xE9B3, 0x9B51,0xE9B4, 0x9B58,0xE9B5, 0x9B74,0xE9B6, 0x9B93,0xE9B7, 0x9B83,0xE9B8, 0x9B91,0xE9B9, 0x9B96,0xE9BA, 0x9B97,0xE9BB, 0x9B9F,0xE9BC, 0x9BA0,0xE9BD, 0x9BA8,0xE9BE, 0x9BB4,0xE9BF, 0x9BC0,0xE9C0, 0x9BCA,0xE9C1, 0x9BB9,0xE9C2, 0x9BC6,0xE9C3, 0x9BCF,0xE9C4, 0x9BD1,0xE9C5, 0x9BD2,0xE9C6, 0x9BE3,0xE9C7, 0x9BE2,0xE9C8, 0x9BE4,0xE9C9, 0x9BD4,0xE9CA, 0x9BE1,0xE9CB, 0x9C3A,0xE9CC, 0x9BF2,0xE9CD, 0x9BF1,0xE9CE, 0x9BF0,0xE9CF, 0x9C15,0xE9D0, 0x9C14,0xE9D1, 0x9C09,0xE9D2, 0x9C13,0xE9D3, 0x9C0C,0xE9D4, 0x9C06,0xE9D5, 0x9C08,0xE9D6, 0x9C12,0xE9D7, 0x9C0A,0xE9D8, 0x9C04,0xE9D9, 0x9C2E,0xE9DA, 0x9C1B,0xE9DB, 0x9C25,0xE9DC, 0x9C24,0xE9DD, 0x9C21,0xE9DE, 0x9C30,0xE9DF, 0x9C47,0xE9E0, 0x9C32,0xE9E1, 0x9C46,0xE9E2, 0x9C3E,0xE9E3, 0x9C5A,0xE9E4, 0x9C60,0xE9E5, 0x9C67,0xE9E6, 0x9C76,0xE9E7, 0x9C78,0xE9E8, 0x9CE7,0xE9E9, 0x9CEC,0xE9EA, 0x9CF0,0xE9EB, 0x9D09,0xE9EC, 0x9D08,0xE9ED, 0x9CEB,0xE9EE, 0x9D03,0xE9EF, 0x9D06,0xE9F0, 0x9D2A,0xE9F1, 0x9D26,0xE9F2, 0x9DAF,0xE9F3, 0x9D23,0xE9F4, 0x9D1F,0xE9F5, 0x9D44,0xE9F6, 0x9D15,0xE9F7, 0x9D12,0xE9F8, 0x9D41,0xE9F9, 0x9D3F,0xE9FA, 0x9D3E,0xE9FB, 0x9D46,0xE9FC, 0x9D48,0xEA40, 0x9D5D,0xEA41, 0x9D5E,0xEA42, 0x9D64,0xEA43, 0x9D51,0xEA44, 0x9D50,0xEA45, 0x9D59,0xEA46, 0x9D72,0xEA47, 0x9D89,0xEA48, 0x9D87,0xEA49, 0x9DAB,0xEA4A, 0x9D6F,0xEA4B, 0x9D7A,0xEA4C, 0x9D9A,0xEA4D, 0x9DA4,0xEA4E, 0x9DA9,0xEA4F, 0x9DB2,0xEA50, 0x9DC4,0xEA51, 0x9DC1,0xEA52, 0x9DBB,0xEA53, 0x9DB8,0xEA54, 0x9DBA,0xEA55, 0x9DC6,0xEA56, 0x9DCF,0xEA57, 0x9DC2,0xEA58, 0x9DD9,0xEA59, 0x9DD3,0xEA5A, 0x9DF8,0xEA5B, 0x9DE6,0xEA5C, 0x9DED,0xEA5D, 0x9DEF,0xEA5E, 0x9DFD,0xEA5F, 0x9E1A,0xEA60, 0x9E1B,0xEA61, 0x9E1E,0xEA62, 0x9E75,0xEA63, 0x9E79,0xEA64, 0x9E7D,0xEA65, 0x9E81,0xEA66, 0x9E88,0xEA67, 0x9E8B,0xEA68, 0x9E8C,0xEA69, 0x9E92,0xEA6A, 0x9E95,0xEA6B, 0x9E91,0xEA6C, 0x9E9D,0xEA6D, 0x9EA5,0xEA6E, 0x9EA9,0xEA6F, 0x9EB8,0xEA70, 0x9EAA,0xEA71, 0x9EAD,0xEA72, 0x9761,0xEA73, 0x9ECC,0xEA74, 0x9ECE,0xEA75, 0x9ECF,0xEA76, 0x9ED0,0xEA77, 0x9ED4,0xEA78, 0x9EDC,0xEA79, 0x9EDE,0xEA7A, 0x9EDD,0xEA7B, 0x9EE0,0xEA7C, 0x9EE5,0xEA7D, 0x9EE8,0xEA7E, 0x9EEF,0xEA80, 0x9EF4,0xEA81, 0x9EF6,0xEA82, 0x9EF7,0xEA83, 0x9EF9,0xEA84, 0x9EFB,0xEA85, 0x9EFC,0xEA86, 0x9EFD,0xEA87, 0x9F07,0xEA88, 0x9F08,0xEA89, 0x76B7,0xEA8A, 0x9F15,0xEA8B, 0x9F21,0xEA8C, 0x9F2C,0xEA8D, 0x9F3E,0xEA8E, 0x9F4A,0xEA8F, 0x9F52,0xEA90, 0x9F54,0xEA91, 0x9F63,0xEA92, 0x9F5F,0xEA93, 0x9F60,0xEA94, 0x9F61,0xEA95, 0x9F66,0xEA96, 0x9F67,0xEA97, 0x9F6C,0xEA98, 0x9F6A,0xEA99, 0x9F77,0xEA9A, 0x9F72,0xEA9B, 0x9F76,0xEA9C, 0x9F95,0xEA9D, 0x9F9C,0xEA9E, 0x9FA0,0xEA9F, 0x582F,0xEAA0, 0x69C7,0xEAA1, 0x9059,0xEAA2, 0x7464,0xEAA3, 0x51DC,0xEAA4, 0x7199,0xED40, 0x7E8A,0xED41, 0x891C,0xED42, 0x9348,0xED43, 0x9288,0xED44, 0x84DC,0xED45, 0x4FC9,0xED46, 0x70BB,0xED47, 0x6631,0xED48, 0x68C8,0xED49, 0x92F9,0xED4A, 0x66FB,0xED4B, 0x5F45,0xED4C, 0x4E28,0xED4D, 0x4EE1,0xED4E, 0x4EFC,0xED4F, 0x4F00,0xED50, 0x4F03,0xED51, 0x4F39,0xED52, 0x4F56,0xED53, 0x4F92,0xED54, 0x4F8A,0xED55, 0x4F9A,0xED56, 0x4F94,0xED57, 0x4FCD,0xED58, 0x5040,0xED59, 0x5022,0xED5A, 0x4FFF,0xED5B, 0x501E,0xED5C, 0x5046,0xED5D, 0x5070,0xED5E, 0x5042,0xED5F, 0x5094,0xED60, 0x50F4,0xED61, 0x50D8,0xED62, 0x514A,0xED63, 0x5164,0xED64, 0x519D,0xED65, 0x51BE,0xED66, 0x51EC,0xED67, 0x5215,0xED68, 0x529C,0xED69, 0x52A6,0xED6A, 0x52C0,0xED6B, 0x52DB,0xED6C, 0x5300,0xED6D, 0x5307,0xED6E, 0x5324,0xED6F, 0x5372,0xED70, 0x5393,0xED71, 0x53B2,0xED72, 0x53DD,0xED73, 0xFA0E,0xED74, 0x549C,0xED75, 0x548A,0xED76, 0x54A9,0xED77, 0x54FF,0xED78, 0x5586,0xED79, 0x5759,0xED7A, 0x5765,0xED7B, 0x57AC,0xED7C, 0x57C8,0xED7D, 0x57C7,0xED7E, 0xFA0F,0xED80, 0xFA10,0xED81, 0x589E,0xED82, 0x58B2,0xED83, 0x590B,0xED84, 0x5953,0xED85, 0x595B,0xED86, 0x595D,0xED87, 0x5963,0xED88, 0x59A4,0xED89, 0x59BA,0xED8A, 0x5B56,0xED8B, 0x5BC0,0xED8C, 0x752F,0xED8D, 0x5BD8,0xED8E, 0x5BEC,0xED8F, 0x5C1E,0xED90, 0x5CA6,0xED91, 0x5CBA,0xED92, 0x5CF5,0xED93, 0x5D27,0xED94, 0x5D53,0xED95, 0xFA11,0xED96, 0x5D42,0xED97, 0x5D6D,0xED98, 0x5DB8,0xED99, 0x5DB9,0xED9A, 0x5DD0,0xED9B, 0x5F21,0xED9C, 0x5F34,0xED9D, 0x5F67,0xED9E, 0x5FB7,0xED9F, 0x5FDE,0xEDA0, 0x605D,0xEDA1, 0x6085,0xEDA2, 0x608A,0xEDA3, 0x60DE,0xEDA4, 0x60D5,0xEDA5, 0x6120,0xEDA6, 0x60F2,0xEDA7, 0x6111,0xEDA8, 0x6137,0xEDA9, 0x6130,0xEDAA, 0x6198,0xEDAB, 0x6213,0xEDAC, 0x62A6,0xEDAD, 0x63F5,0xEDAE, 0x6460,0xEDAF, 0x649D,0xEDB0, 0x64CE,0xEDB1, 0x654E,0xEDB2, 0x6600,0xEDB3, 0x6615,0xEDB4, 0x663B,0xEDB5, 0x6609,0xEDB6, 0x662E,0xEDB7, 0x661E,0xEDB8, 0x6624,0xEDB9, 0x6665,0xEDBA, 0x6657,0xEDBB, 0x6659,0xEDBC, 0xFA12,0xEDBD, 0x6673,0xEDBE, 0x6699,0xEDBF, 0x66A0,0xEDC0, 0x66B2,0xEDC1, 0x66BF,0xEDC2, 0x66FA,0xEDC3, 0x670E,0xEDC4, 0xF929,0xEDC5, 0x6766,0xEDC6, 0x67BB,0xEDC7, 0x6852,0xEDC8, 0x67C0,0xEDC9, 0x6801,0xEDCA, 0x6844,0xEDCB, 0x68CF,0xEDCC, 0xFA13,0xEDCD, 0x6968,0xEDCE, 0xFA14,0xEDCF, 0x6998,0xEDD0, 0x69E2,0xEDD1, 0x6A30,0xEDD2, 0x6A6B,0xEDD3, 0x6A46,0xEDD4, 0x6A73,0xEDD5, 0x6A7E,0xEDD6, 0x6AE2,0xEDD7, 0x6AE4,0xEDD8, 0x6BD6,0xEDD9, 0x6C3F,0xEDDA, 0x6C5C,0xEDDB, 0x6C86,0xEDDC, 0x6C6F,0xEDDD, 0x6CDA,0xEDDE, 0x6D04,0xEDDF, 0x6D87,0xEDE0, 0x6D6F,0xEDE1, 0x6D96,0xEDE2, 0x6DAC,0xEDE3, 0x6DCF,0xEDE4, 0x6DF8,0xEDE5, 0x6DF2,0xEDE6, 0x6DFC,0xEDE7, 0x6E39,0xEDE8, 0x6E5C,0xEDE9, 0x6E27,0xEDEA, 0x6E3C,0xEDEB, 0x6EBF,0xEDEC, 0x6F88,0xEDED, 0x6FB5,0xEDEE, 0x6FF5,0xEDEF, 0x7005,0xEDF0, 0x7007,0xEDF1, 0x7028,0xEDF2, 0x7085,0xEDF3, 0x70AB,0xEDF4, 0x710F,0xEDF5, 0x7104,0xEDF6, 0x715C,0xEDF7, 0x7146,0xEDF8, 0x7147,0xEDF9, 0xFA15,0xEDFA, 0x71C1,0xEDFB, 0x71FE,0xEDFC, 0x72B1,0xEE40, 0x72BE,0xEE41, 0x7324,0xEE42, 0xFA16,0xEE43, 0x7377,0xEE44, 0x73BD,0xEE45, 0x73C9,0xEE46, 0x73D6,0xEE47, 0x73E3,0xEE48, 0x73D2,0xEE49, 0x7407,0xEE4A, 0x73F5,0xEE4B, 0x7426,0xEE4C, 0x742A,0xEE4D, 0x7429,0xEE4E, 0x742E,0xEE4F, 0x7462,0xEE50, 0x7489,0xEE51, 0x749F,0xEE52, 0x7501,0xEE53, 0x756F,0xEE54, 0x7682,0xEE55, 0x769C,0xEE56, 0x769E,0xEE57, 0x769B,0xEE58, 0x76A6,0xEE59, 0xFA17,0xEE5A, 0x7746,0xEE5B, 0x52AF,0xEE5C, 0x7821,0xEE5D, 0x784E,0xEE5E, 0x7864,0xEE5F, 0x787A,0xEE60, 0x7930,0xEE61, 0xFA18,0xEE62, 0xFA19,0xEE63, 0xFA1A,0xEE64, 0x7994,0xEE65, 0xFA1B,0xEE66, 0x799B,0xEE67, 0x7AD1,0xEE68, 0x7AE7,0xEE69, 0xFA1C,0xEE6A, 0x7AEB,0xEE6B, 0x7B9E,0xEE6C, 0xFA1D,0xEE6D, 0x7D48,0xEE6E, 0x7D5C,0xEE6F, 0x7DB7,0xEE70, 0x7DA0,0xEE71, 0x7DD6,0xEE72, 0x7E52,0xEE73, 0x7F47,0xEE74, 0x7FA1,0xEE75, 0xFA1E,0xEE76, 0x8301,0xEE77, 0x8362,0xEE78, 0x837F,0xEE79, 0x83C7,0xEE7A, 0x83F6,0xEE7B, 0x8448,0xEE7C, 0x84B4,0xEE7D, 0x8553,0xEE7E, 0x8559,0xEE80, 0x856B,0xEE81, 0xFA1F,0xEE82, 0x85B0,0xEE83, 0xFA20,0xEE84, 0xFA21,0xEE85, 0x8807,0xEE86, 0x88F5,0xEE87, 0x8A12,0xEE88, 0x8A37,0xEE89, 0x8A79,0xEE8A, 0x8AA7,0xEE8B, 0x8ABE,0xEE8C, 0x8ADF,0xEE8D, 0xFA22,0xEE8E, 0x8AF6,0xEE8F, 0x8B53,0xEE90, 0x8B7F,0xEE91, 0x8CF0,0xEE92, 0x8CF4,0xEE93, 0x8D12,0xEE94, 0x8D76,0xEE95, 0xFA23,0xEE96, 0x8ECF,0xEE97, 0xFA24,0xEE98, 0xFA25,0xEE99, 0x9067,0xEE9A, 0x90DE,0xEE9B, 0xFA26,0xEE9C, 0x9115,0xEE9D, 0x9127,0xEE9E, 0x91DA,0xEE9F, 0x91D7,0xEEA0, 0x91DE,0xEEA1, 0x91ED,0xEEA2, 0x91EE,0xEEA3, 0x91E4,0xEEA4, 0x91E5,0xEEA5, 0x9206,0xEEA6, 0x9210,0xEEA7, 0x920A,0xEEA8, 0x923A,0xEEA9, 0x9240,0xEEAA, 0x923C,0xEEAB, 0x924E,0xEEAC, 0x9259,0xEEAD, 0x9251,0xEEAE, 0x9239,0xEEAF, 0x9267,0xEEB0, 0x92A7,0xEEB1, 0x9277,0xEEB2, 0x9278,0xEEB3, 0x92E7,0xEEB4, 0x92D7,0xEEB5, 0x92D9,0xEEB6, 0x92D0,0xEEB7, 0xFA27,0xEEB8, 0x92D5,0xEEB9, 0x92E0,0xEEBA, 0x92D3,0xEEBB, 0x9325,0xEEBC, 0x9321,0xEEBD, 0x92FB,0xEEBE, 0xFA28,0xEEBF, 0x931E,0xEEC0, 0x92FF,0xEEC1, 0x931D,0xEEC2, 0x9302,0xEEC3, 0x9370,0xEEC4, 0x9357,0xEEC5, 0x93A4,0xEEC6, 0x93C6,0xEEC7, 0x93DE,0xEEC8, 0x93F8,0xEEC9, 0x9431,0xEECA, 0x9445,0xEECB, 0x9448,0xEECC, 0x9592,0xEECD, 0xF9DC,0xEECE, 0xFA29,0xEECF, 0x969D,0xEED0, 0x96AF,0xEED1, 0x9733,0xEED2, 0x973B,0xEED3, 0x9743,0xEED4, 0x974D,0xEED5, 0x974F,0xEED6, 0x9751,0xEED7, 0x9755,0xEED8, 0x9857,0xEED9, 0x9865,0xEEDA, 0xFA2A,0xEEDB, 0xFA2B,0xEEDC, 0x9927,0xEEDD, 0xFA2C,0xEEDE, 0x999E,0xEEDF, 0x9A4E,0xEEE0, 0x9AD9,0xEEE1, 0x9ADC,0xEEE2, 0x9B75,0xEEE3, 0x9B72,0xEEE4, 0x9B8F,0xEEE5, 0x9BB1,0xEEE6, 0x9BBB,0xEEE7, 0x9C00,0xEEE8, 0x9D70,0xEEE9, 0x9D6B,0xEEEA, 0xFA2D,0xEEEB, 0x9E19,0xEEEC, 0x9ED1,0xEEEF, 0x2170,0xEEF0, 0x2171,0xEEF1, 0x2172,0xEEF2, 0x2173,0xEEF3, 0x2174,0xEEF4, 0x2175,0xEEF5, 0x2176,0xEEF6, 0x2177,0xEEF7, 0x2178,0xEEF8, 0x2179,0xEEF9, 0xFFE2,0xEEFA, 0xFFE4,0xEEFB, 0xFF07,0xEEFC, 0xFF02,0xFA40, 0x2170,0xFA41, 0x2171,0xFA42, 0x2172,0xFA43, 0x2173,0xFA44, 0x2174,0xFA45, 0x2175,0xFA46, 0x2176,0xFA47, 0x2177,0xFA48, 0x2178,0xFA49, 0x2179,0xFA4A, 0x2160,0xFA4B, 0x2161,0xFA4C, 0x2162,0xFA4D, 0x2163,0xFA4E, 0x2164,0xFA4F, 0x2165,0xFA50, 0x2166,0xFA51, 0x2167,0xFA52, 0x2168,0xFA53, 0x2169,0xFA54, 0xFFE2,0xFA55, 0xFFE4,0xFA56, 0xFF07,0xFA57, 0xFF02,0xFA58, 0x3231,0xFA59, 0x2116,0xFA5A, 0x2121,0xFA5B, 0x2235,0xFA5C, 0x7E8A,0xFA5D, 0x891C,0xFA5E, 0x9348,0xFA5F, 0x9288,0xFA60, 0x84DC,0xFA61, 0x4FC9,0xFA62, 0x70BB,0xFA63, 0x6631,0xFA64, 0x68C8,0xFA65, 0x92F9,0xFA66, 0x66FB,0xFA67, 0x5F45,0xFA68, 0x4E28,0xFA69, 0x4EE1,0xFA6A, 0x4EFC,0xFA6B, 0x4F00,0xFA6C, 0x4F03,0xFA6D, 0x4F39,0xFA6E, 0x4F56,0xFA6F, 0x4F92,0xFA70, 0x4F8A,0xFA71, 0x4F9A,0xFA72, 0x4F94,0xFA73, 0x4FCD,0xFA74, 0x5040,0xFA75, 0x5022,0xFA76, 0x4FFF,0xFA77, 0x501E,0xFA78, 0x5046,0xFA79, 0x5070,0xFA7A, 0x5042,0xFA7B, 0x5094,0xFA7C, 0x50F4,0xFA7D, 0x50D8,0xFA7E, 0x514A,0xFA80, 0x5164,0xFA81, 0x519D,0xFA82, 0x51BE,0xFA83, 0x51EC,0xFA84, 0x5215,0xFA85, 0x529C,0xFA86, 0x52A6,0xFA87, 0x52C0,0xFA88, 0x52DB,0xFA89, 0x5300,0xFA8A, 0x5307,0xFA8B, 0x5324,0xFA8C, 0x5372,0xFA8D, 0x5393,0xFA8E, 0x53B2,0xFA8F, 0x53DD,0xFA90, 0xFA0E,0xFA91, 0x549C,0xFA92, 0x548A,0xFA93, 0x54A9,0xFA94, 0x54FF,0xFA95, 0x5586,0xFA96, 0x5759,0xFA97, 0x5765,0xFA98, 0x57AC,0xFA99, 0x57C8,0xFA9A, 0x57C7,0xFA9B, 0xFA0F,0xFA9C, 0xFA10,0xFA9D, 0x589E,0xFA9E, 0x58B2,0xFA9F, 0x590B,0xFAA0, 0x5953,0xFAA1, 0x595B,0xFAA2, 0x595D,0xFAA3, 0x5963,0xFAA4, 0x59A4,0xFAA5, 0x59BA,0xFAA6, 0x5B56,0xFAA7, 0x5BC0,0xFAA8, 0x752F,0xFAA9, 0x5BD8,0xFAAA, 0x5BEC,0xFAAB, 0x5C1E,0xFAAC, 0x5CA6,0xFAAD, 0x5CBA,0xFAAE, 0x5CF5,0xFAAF, 0x5D27,0xFAB0, 0x5D53,0xFAB1, 0xFA11,0xFAB2, 0x5D42,0xFAB3, 0x5D6D,0xFAB4, 0x5DB8,0xFAB5, 0x5DB9,0xFAB6, 0x5DD0,0xFAB7, 0x5F21,0xFAB8, 0x5F34,0xFAB9, 0x5F67,0xFABA, 0x5FB7,0xFABB, 0x5FDE,0xFABC, 0x605D,0xFABD, 0x6085,0xFABE, 0x608A,0xFABF, 0x60DE,0xFAC0, 0x60D5,0xFAC1, 0x6120,0xFAC2, 0x60F2,0xFAC3, 0x6111,0xFAC4, 0x6137,0xFAC5, 0x6130,0xFAC6, 0x6198,0xFAC7, 0x6213,0xFAC8, 0x62A6,0xFAC9, 0x63F5,0xFACA, 0x6460,0xFACB, 0x649D,0xFACC, 0x64CE,0xFACD, 0x654E,0xFACE, 0x6600,0xFACF, 0x6615,0xFAD0, 0x663B,0xFAD1, 0x6609,0xFAD2, 0x662E,0xFAD3, 0x661E,0xFAD4, 0x6624,0xFAD5, 0x6665,0xFAD6, 0x6657,0xFAD7, 0x6659,0xFAD8, 0xFA12,0xFAD9, 0x6673,0xFADA, 0x6699,0xFADB, 0x66A0,0xFADC, 0x66B2,0xFADD, 0x66BF,0xFADE, 0x66FA,0xFADF, 0x670E,0xFAE0, 0xF929,0xFAE1, 0x6766,0xFAE2, 0x67BB,0xFAE3, 0x6852,0xFAE4, 0x67C0,0xFAE5, 0x6801,0xFAE6, 0x6844,0xFAE7, 0x68CF,0xFAE8, 0xFA13,0xFAE9, 0x6968,0xFAEA, 0xFA14,0xFAEB, 0x6998,0xFAEC, 0x69E2,0xFAED, 0x6A30,0xFAEE, 0x6A6B,0xFAEF, 0x6A46,0xFAF0, 0x6A73,0xFAF1, 0x6A7E,0xFAF2, 0x6AE2,0xFAF3, 0x6AE4,0xFAF4, 0x6BD6,0xFAF5, 0x6C3F,0xFAF6, 0x6C5C,0xFAF7, 0x6C86,0xFAF8, 0x6C6F,0xFAF9, 0x6CDA,0xFAFA, 0x6D04,0xFAFB, 0x6D87,0xFAFC, 0x6D6F,0xFB40, 0x6D96,0xFB41, 0x6DAC,0xFB42, 0x6DCF,0xFB43, 0x6DF8,0xFB44, 0x6DF2,0xFB45, 0x6DFC,0xFB46, 0x6E39,0xFB47, 0x6E5C,0xFB48, 0x6E27,0xFB49, 0x6E3C,0xFB4A, 0x6EBF,0xFB4B, 0x6F88,0xFB4C, 0x6FB5,0xFB4D, 0x6FF5,0xFB4E, 0x7005,0xFB4F, 0x7007,0xFB50, 0x7028,0xFB51, 0x7085,0xFB52, 0x70AB,0xFB53, 0x710F,0xFB54, 0x7104,0xFB55, 0x715C,0xFB56, 0x7146,0xFB57, 0x7147,0xFB58, 0xFA15,0xFB59, 0x71C1,0xFB5A, 0x71FE,0xFB5B, 0x72B1,0xFB5C, 0x72BE,0xFB5D, 0x7324,0xFB5E, 0xFA16,0xFB5F, 0x7377,0xFB60, 0x73BD,0xFB61, 0x73C9,0xFB62, 0x73D6,0xFB63, 0x73E3,0xFB64, 0x73D2,0xFB65, 0x7407,0xFB66, 0x73F5,0xFB67, 0x7426,0xFB68, 0x742A,0xFB69, 0x7429,0xFB6A, 0x742E,0xFB6B, 0x7462,0xFB6C, 0x7489,0xFB6D, 0x749F,0xFB6E, 0x7501,0xFB6F, 0x756F,0xFB70, 0x7682,0xFB71, 0x769C,0xFB72, 0x769E,0xFB73, 0x769B,0xFB74, 0x76A6,0xFB75, 0xFA17,0xFB76, 0x7746,0xFB77, 0x52AF,0xFB78, 0x7821,0xFB79, 0x784E,0xFB7A, 0x7864,0xFB7B, 0x787A,0xFB7C, 0x7930,0xFB7D, 0xFA18,0xFB7E, 0xFA19,0xFB80, 0xFA1A,0xFB81, 0x7994,0xFB82, 0xFA1B,0xFB83, 0x799B,0xFB84, 0x7AD1,0xFB85, 0x7AE7,0xFB86, 0xFA1C,0xFB87, 0x7AEB,0xFB88, 0x7B9E,0xFB89, 0xFA1D,0xFB8A, 0x7D48,0xFB8B, 0x7D5C,0xFB8C, 0x7DB7,0xFB8D, 0x7DA0,0xFB8E, 0x7DD6,0xFB8F, 0x7E52,0xFB90, 0x7F47,0xFB91, 0x7FA1,0xFB92, 0xFA1E,0xFB93, 0x8301,0xFB94, 0x8362,0xFB95, 0x837F,0xFB96, 0x83C7,0xFB97, 0x83F6,0xFB98, 0x8448,0xFB99, 0x84B4,0xFB9A, 0x8553,0xFB9B, 0x8559,0xFB9C, 0x856B,0xFB9D, 0xFA1F,0xFB9E, 0x85B0,0xFB9F, 0xFA20,0xFBA0, 0xFA21,0xFBA1, 0x8807,0xFBA2, 0x88F5,0xFBA3, 0x8A12,0xFBA4, 0x8A37,0xFBA5, 0x8A79,0xFBA6, 0x8AA7,0xFBA7, 0x8ABE,0xFBA8, 0x8ADF,0xFBA9, 0xFA22,0xFBAA, 0x8AF6,0xFBAB, 0x8B53,0xFBAC, 0x8B7F,0xFBAD, 0x8CF0,0xFBAE, 0x8CF4,0xFBAF, 0x8D12,0xFBB0, 0x8D76,0xFBB1, 0xFA23,0xFBB2, 0x8ECF,0xFBB3, 0xFA24,0xFBB4, 0xFA25,0xFBB5, 0x9067,0xFBB6, 0x90DE,0xFBB7, 0xFA26,0xFBB8, 0x9115,0xFBB9, 0x9127,0xFBBA, 0x91DA,0xFBBB, 0x91D7,0xFBBC, 0x91DE,0xFBBD, 0x91ED,0xFBBE, 0x91EE,0xFBBF, 0x91E4,0xFBC0, 0x91E5,0xFBC1, 0x9206,0xFBC2, 0x9210,0xFBC3, 0x920A,0xFBC4, 0x923A,0xFBC5, 0x9240,0xFBC6, 0x923C,0xFBC7, 0x924E,0xFBC8, 0x9259,0xFBC9, 0x9251,0xFBCA, 0x9239,0xFBCB, 0x9267,0xFBCC, 0x92A7,0xFBCD, 0x9277,0xFBCE, 0x9278,0xFBCF, 0x92E7,0xFBD0, 0x92D7,0xFBD1, 0x92D9,0xFBD2, 0x92D0,0xFBD3, 0xFA27,0xFBD4, 0x92D5,0xFBD5, 0x92E0,0xFBD6, 0x92D3,0xFBD7, 0x9325,0xFBD8, 0x9321,0xFBD9, 0x92FB,0xFBDA, 0xFA28,0xFBDB, 0x931E,0xFBDC, 0x92FF,0xFBDD, 0x931D,0xFBDE, 0x9302,0xFBDF, 0x9370,0xFBE0, 0x9357,0xFBE1, 0x93A4,0xFBE2, 0x93C6,0xFBE3, 0x93DE,0xFBE4, 0x93F8,0xFBE5, 0x9431,0xFBE6, 0x9445,0xFBE7, 0x9448,0xFBE8, 0x9592,0xFBE9, 0xF9DC,0xFBEA, 0xFA29,0xFBEB, 0x969D,0xFBEC, 0x96AF,0xFBED, 0x9733,0xFBEE, 0x973B,0xFBEF, 0x9743,0xFBF0, 0x974D,0xFBF1, 0x974F,0xFBF2, 0x9751,0xFBF3, 0x9755,0xFBF4, 0x9857,0xFBF5, 0x9865,0xFBF6, 0xFA2A,0xFBF7, 0xFA2B,0xFBF8, 0x9927,0xFBF9, 0xFA2C,0xFBFA, 0x999E,0xFBFB, 0x9A4E,0xFBFC, 0x9AD9,0xFC40, 0x9ADC,0xFC41, 0x9B75,0xFC42, 0x9B72,0xFC43, 0x9B8F,0xFC44, 0x9BB1,0xFC45, 0x9BBB,0xFC46, 0x9C00,0xFC47, 0x9D70,0xFC48, 0x9D6B,0xFC49, 0xFA2D,0xFC4A, 0x9E19,0xFC4B, 0x9ED1,]\nvar decoding_table = [],\n encoding_table = []\nfor(var i = 0, len = _table.length; i < len; i += 2){\nvar value = _table[i + 1]\nif(value !== null){\n encoding_table[value] = _table[i]\n}\ndecoding_table[_table[i]] = _table[i + 1]\n}\n__BRYTHON__.imported.encoding_cp932 = {encoding_table, decoding_table}\n"], "_jsre": [".js", "(function($B){\n\n var _b_ = $B.builtins\n\n var MatchObject = $B.make_class(\"Match\",\n function(jsmatch, string, pattern){\n return {\n __class__: MatchObject,\n jsmatch: jsmatch,\n string: string\n }\n }\n )\n MatchObject.item = function(self, rank){\n return self.jsmatch[rank]\n }\n MatchObject.group = function(self){\n var res = []\n for(var i = 0, _len_i = arguments.length; i < _len_i; i++){\n if(self.jsmatch[arguments[i]] === undefined){res.push(_b_.None)}\n else{res.push(self.jsmatch[arguments[i]])}\n }\n if(arguments.length == 1){return res[0]}\n return _b_.tuple.$factory(res)\n }\n MatchObject.groups = function(self, _default){\n if(_default === undefined){_default = _b_.None}\n var res = []\n for(var i = 1, _len_i = self.length; i < _len_i; i++){\n if(self.jsmatch[i] === undefined){res.push(_default)}\n else{res.push(self.jsmatch[i])}\n }\n return _b_.tuple.$factory(res)\n }\n MatchObject.start = function(self){\n return self.index\n }\n MatchObject.end = function(self){\n return self.length - self.index\n }\n\n $B.set_func_names(MatchObject, '_jsre')\n\n var obj = {\n __str__: function(){return \"\"}\n }\n obj.A = obj.ASCII = 256\n obj.I = obj.IGNORECASE = 2 // 'i'\n obj.L = obj.LOCALE = 4\n obj.M = obj.MULTILINE = 8 // 'm'\n obj.S = obj.DOTALL = 16\n obj.U = obj.UNICODE = 32\n obj.X = obj.VERBOSE = 64\n obj._is_valid = function(pattern) {\n if ($B.$options.re == 'pyre'){return false} //force use of python's re module\n if ($B.$options.re == 'jsre'){return true} //force use of brythons re module\n // FIXME: Improve\n\n if(! $B.$isinstance(pattern, _b_.str)){\n // this is probably a SRE_PATTERN, so return false, and let\n // python's re module handle this.\n return false\n }\n var is_valid = false\n try{\n new RegExp(pattern)\n is_valid = true\n }\n catch(e){}\n if(! is_valid){return false} //if js won't parse the pattern return false\n\n // using reference http://www.regular-expressions.info/\n // to compare python re and javascript regex libraries\n\n // look for things javascript does not support\n // check for name capturing group\n var mylist = ['?P=', '?P<', '(?#', '(?<=', '(? -1) return false\n }\n\n var re_list=['\\{,\\d+\\}']\n for(var i=0, _len_i = re_list.length; i < _len_i; i++) {\n var _re = new RegExp(re_list[i])\n if (_re.test(pattern)){return false}\n }\n\n // it looks like the pattern has passed all our tests so lets assume\n // javascript can handle this pattern.\n return true\n }\n var $SRE_PatternDict = {\n __class__:_b_.type,\n $infos:{\n __name__:'SRE_Pattern'\n }\n }\n $SRE_PatternDict.__mro__ = [_b_.object]\n $SRE_PatternDict.findall = function(self, string){\n return obj.findall(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.finditer = function(self, string){\n return obj.finditer(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.match = function(self, string){\n return obj.match(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.search = function(self, string){\n return obj.search(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.sub = function(self,repl,string){\n return obj.sub(self.pattern,repl,string,self.flags)\n }\n $B.set_func_names($SRE_PatternDict, \"_jsre\")\n // TODO: groups\n // TODO: groupindex\n function normflags(flags){\n return ((flags & obj.I)? 'i' : '') + ((flags & obj.M)? 'm' : '');\n }\n // TODO: fullmatch()\n // TODO: split()\n // TODO: subn()\n obj.compile = function(pattern, flags){\n return {\n __class__: $SRE_PatternDict,\n pattern: pattern,\n flags: normflags(flags)\n }\n }\n obj.escape = function(string){\n // Escape all the characters in pattern except ASCII letters, numbers\n // and '_'. This is useful if you want to match an arbitrary literal\n // string that may have regular expression metacharacters in it.\n var res = ''\n var ok = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'\n for(var i = 0, _len_i = string.length; i < _len_i; i++){\n if(ok.search(string.charAt(i))>-1){res += string.charAt(i)}\n }\n return res\n }\n obj.findall = function(pattern, string, flags){\n var $ns=$B.args('re.findall', 2,\n {pattern:null, string:null}, ['pattern', 'string'],\n arguments,{}, 'args', 'kw') ,\n args = $ns['args'] ,\n _flags = 0;\n if(args.length>0){var flags = args[0]}\n else{var _flags = $B.$getattr($ns['kw'], 'get')('flags', 0)}\n\n var flags = normflags()\n flags += 'gm'\n var jsp = new RegExp(pattern,flags),\n jsmatch = string.match(jsp)\n if(jsmatch === null){return []}\n return jsmatch\n }\n obj.finditer = function(pattern, string, flags){\n var $ns=$B.args('re.finditer', 2,\n {pattern:null, string:null}, ['pattern', 'string'],\n arguments,{},'args','kw'),\n args = $ns['args'],\n _flags = 0;\n if(args.length>0){var flags=args[0]}\n else{var _flags = $B.$getattr($ns['kw'], 'get')('flags', 0)}\n\n var flags = normflags()\n flags += 'gm'\n var jsp = new RegExp(pattern, flags),\n jsmatch = string.match(jsp);\n if(jsmatch === null){return []}\n\n var _list = []\n for(var j = 0, _len_j = jsmatch.length; j < _len_j; j++) {\n var mo = {}\n mo._match=jsmatch[j]\n mo.group = function(){\n var res = []\n for(var i=0, _len_i = arguments.length; i < _len_i;i++){\n if(jsmatch[arguments[i]] === undefined){res.push(_b_.None)}\n else{res.push(jsmatch[arguments[i]])}\n }\n if(arguments.length == 1){return res[0]}\n return _b_.tuple.$factory(res)\n }\n mo.groups = function(_default){\n if(_default === undefined){_default = _b_.None}\n var res = []\n for(var i = 1, _len_i = jsmatch.length; i < _len_i; i++){\n if(jsmatch[i] === undefined){res.push(_default)}\n else{res.push(jsmatch[i])}\n }\n return _b_.tuple.$factory(res)\n }\n mo.start = function(){return mo._match.index}\n mo.end = function(){return mo._match.length - mo._match.index}\n mo.string = string\n _list.push(mo)\n }\n return _list\n }\n obj.search = function(pattern, string){\n var $ns = $B.args('re.search', 2,\n {pattern:null, string:null},['pattern', 'string'],\n arguments, {}, 'args', 'kw')\n var args = $ns['args']\n if(args.length>0){var flags = args[0]}\n else{var flags = $B.$getattr($ns['kw'], 'get')('flags', '')}\n flags = normflags(flags)\n var jsp = new RegExp(pattern, flags)\n var jsmatch = string.match(jsp)\n if(jsmatch === null){return _b_.None}\n return MatchObject.$factory(jsmatch, string, pattern)\n }\n obj.sub = function(pattern, repl, string){\n var $ns=$B.args('re.search', 3,\n {pattern: null, repl: null, string: null},\n ['pattern', 'repl', 'string'],\n arguments,{}, 'args', 'kw')\n for($var in $ns){eval(\"var \" + $var + \"=$ns[$var]\")}\n var args = $ns['args']\n var count = _b_.dict.get($ns['kw'], 'count', 0)\n var flags = _b_.dict.get($ns['kw'], 'flags', '')\n if(args.length > 0){var count = args[0]}\n if(args.length > 1){var flags = args[1]}\n flags = normflags(flags)\n if(typeof repl == \"string\"){\n // backreferences are \\1, \\2... in Python but $1,$2... in Javascript\n repl = repl.replace(/\\\\(\\d+)/g, '$$$1')\n }else if(typeof repl == \"function\"){\n // the argument passed to the Python function is the match object\n // the arguments passed to the Javascript function are :\n // - the matched substring\n // - the matched groups\n // - the offset of the matched substring inside the string\n // - the string being examined\n var $repl1 = function(){\n var mo = Object()\n mo.string = arguments[arguments.length - 1]\n var matched = arguments[0];\n var start = arguments[arguments.length - 2]\n var end = start + matched.length\n mo.start = function(){return start}\n mo.end = function(){return end}\n groups = []\n for(var i = 1, _len_i = arguments.length-2; i < _len_i; i++){\n groups.push(arguments[i])\n }\n mo.groups = function(_default){\n if(_default === undefined){_default = _b_.None}\n var res = []\n for(var i = 0, _len_i = groups.length; i < _len_i; i++){\n if(groups[i] === undefined){res.push(_default)}\n else{res.push(groups[i])}\n }\n return res\n }\n mo.group = function(i){\n if(i==0){return matched}\n return groups[i-1]\n }\n return repl(mo)\n }\n }\n if(count == 0){flags += 'g'}\n var jsp = new RegExp(pattern, flags)\n if(typeof repl == 'function'){return string.replace(jsp, $repl1)}\n else{return string.replace(jsp, repl)}\n }\n obj.match = (function(search_func){\n return function(){\n // match is like search but pattern must start with ^\n var pattern = arguments[0]\n if(pattern.charAt(0) != '^'){pattern = '^'+pattern}\n var args = [pattern]\n for(var i = 1, _len_i = arguments.length; i < _len_i; i++){\n args.push(arguments[i])\n }\n return search_func.apply(null, args)\n }\n })(obj.search)\n\n $B.addToImported('_jsre', obj)\n}\n)(__BRYTHON__)\n"], "crypto_js": [".py", "", [], 1], "crypto_js.rollups": [".py", "", [], 1], "crypto_js.rollups.sha1": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(e,m){var p={},j=p.lib={},l=function(){},f=j.Base={extend:function(a){l.prototype=this;var c=new l;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nn=j.WordArray=f.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=m?c:4*a.length},toString:function(a){return(a||h).stringify(this)},concat:function(a){var c=this.words,q=a.words,d=this.sigBytes;a=a.sigBytes;this.clamp();if(d%4)for(var b=0;b>>2]|=(q[b>>>2]>>>24-8*(b%4)&255)<<24-8*((d+b)%4);else if(65535>>2]=q[b>>>2];else c.push.apply(c,q);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=e.ceil(c/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],b=0;b>>2]>>>24-8*(d%4)&255;b.push((f>>>4).toString(16));b.push((f&15).toString(16))}return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>3]|=parseInt(a.substr(d,\n2),16)<<24-4*(d%8);return new n.init(b,c/2)}},g=b.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var b=[],d=0;d>>2]>>>24-8*(d%4)&255));return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>2]|=(a.charCodeAt(d)&255)<<24-8*(d%4);return new n.init(b,c)}},r=b.Utf8={stringify:function(a){try{return decodeURIComponent(escape(g.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return g.parse(unescape(encodeURIComponent(a)))}},\nk=j.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new n.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=r.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,b=c.words,d=c.sigBytes,f=this.blockSize,h=d/(4*f),h=a?e.ceil(h):e.max((h|0)-this._minBufferSize,0);a=h*f;d=e.min(4*a,d);if(a){for(var g=0;ga;a++){if(16>a)l[a]=f[n+a]|0;else{var c=l[a-3]^l[a-8]^l[a-14]^l[a-16];l[a]=c<<1|c>>>31}c=(h<<5|h>>>27)+j+l[a];c=20>a?c+((g&e|~g&k)+1518500249):40>a?c+((g^e^k)+1859775393):60>a?c+((g&e|g&k|e&k)-1894007588):c+((g^e^\nk)-899497514);j=k;k=e;e=g<<30|g>>>2;g=h;h=c}b[0]=b[0]+h|0;b[1]=b[1]+g|0;b[2]=b[2]+e|0;b[3]=b[3]+k|0;b[4]=b[4]+j|0},_doFinalize:function(){var f=this._data,e=f.words,b=8*this._nDataBytes,h=8*f.sigBytes;e[h>>>5]|=128<<24-h%32;e[(h+64>>>9<<4)+14]=Math.floor(b/4294967296);e[(h+64>>>9<<4)+15]=b;f.sigBytes=4*e.length;this._process();return this._hash},clone:function(){var e=j.clone.call(this);e._hash=this._hash.clone();return e}});e.SHA1=j._createHelper(m);e.HmacSHA1=j._createHmacHelper(m)})();\n"], "crypto_js.rollups.sha256": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(h,s){var f={},t=f.lib={},g=function(){},j=t.Base={extend:function(a){g.prototype=this;var c=new g;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=t.WordArray=j.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=s?c:4*a.length},toString:function(a){return(a||u).stringify(this)},concat:function(a){var c=this.words,d=a.words,b=this.sigBytes;a=a.sigBytes;this.clamp();if(b%4)for(var e=0;e>>2]|=(d[e>>>2]>>>24-8*(e%4)&255)<<24-8*((b+e)%4);else if(65535>>2]=d[e>>>2];else c.push.apply(c,d);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=h.ceil(c/4)},clone:function(){var a=j.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],d=0;d>>2]>>>24-8*(b%4)&255;d.push((e>>>4).toString(16));d.push((e&15).toString(16))}return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>3]|=parseInt(a.substr(b,\n2),16)<<24-4*(b%8);return new q.init(d,c/2)}},k=v.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var d=[],b=0;b>>2]>>>24-8*(b%4)&255));return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>2]|=(a.charCodeAt(b)&255)<<24-8*(b%4);return new q.init(d,c)}},l=v.Utf8={stringify:function(a){try{return decodeURIComponent(escape(k.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return k.parse(unescape(encodeURIComponent(a)))}},\nx=t.BufferedBlockAlgorithm=j.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=l.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,d=c.words,b=c.sigBytes,e=this.blockSize,f=b/(4*e),f=a?h.ceil(f):h.max((f|0)-this._minBufferSize,0);a=f*e;b=h.min(4*a,b);if(a){for(var m=0;mk;){var l;a:{l=u;for(var x=h.sqrt(l),w=2;w<=x;w++)if(!(l%w)){l=!1;break a}l=!0}l&&(8>k&&(j[k]=v(h.pow(u,0.5))),q[k]=v(h.pow(u,1/3)),k++);u++}var a=[],f=f.SHA256=g.extend({_doReset:function(){this._hash=new t.init(j.slice(0))},_doProcessBlock:function(c,d){for(var b=this._hash.words,e=b[0],f=b[1],m=b[2],h=b[3],p=b[4],j=b[5],k=b[6],l=b[7],n=0;64>n;n++){if(16>n)a[n]=\nc[d+n]|0;else{var r=a[n-15],g=a[n-2];a[n]=((r<<25|r>>>7)^(r<<14|r>>>18)^r>>>3)+a[n-7]+((g<<15|g>>>17)^(g<<13|g>>>19)^g>>>10)+a[n-16]}r=l+((p<<26|p>>>6)^(p<<21|p>>>11)^(p<<7|p>>>25))+(p&j^~p&k)+q[n]+a[n];g=((e<<30|e>>>2)^(e<<19|e>>>13)^(e<<10|e>>>22))+(e&f^e&m^f&m);l=k;k=j;j=p;p=h+r|0;h=m;m=f;f=e;e=r+g|0}b[0]=b[0]+e|0;b[1]=b[1]+f|0;b[2]=b[2]+m|0;b[3]=b[3]+h|0;b[4]=b[4]+p|0;b[5]=b[5]+j|0;b[6]=b[6]+k|0;b[7]=b[7]+l|0},_doFinalize:function(){var a=this._data,d=a.words,b=8*this._nDataBytes,e=8*a.sigBytes;\nd[e>>>5]|=128<<24-e%32;d[(e+64>>>9<<4)+14]=h.floor(b/4294967296);d[(e+64>>>9<<4)+15]=b;a.sigBytes=4*d.length;this._process();return this._hash},clone:function(){var a=g.clone.call(this);a._hash=this._hash.clone();return a}});s.SHA256=g._createHelper(f);s.HmacSHA256=g._createHmacHelper(f)})(Math);\n"], "crypto_js.rollups.sha384": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(a,c){var d={},j=d.lib={},f=function(){},m=j.Base={extend:function(a){f.prototype=this;var b=new f;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nB=j.WordArray=m.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=c?b:4*a.length},toString:function(a){return(a||y).stringify(this)},concat:function(a){var b=this.words,g=a.words,e=this.sigBytes;a=a.sigBytes;this.clamp();if(e%4)for(var k=0;k>>2]|=(g[k>>>2]>>>24-8*(k%4)&255)<<24-8*((e+k)%4);else if(65535>>2]=g[k>>>2];else b.push.apply(b,g);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],g=0;g>>2]>>>24-8*(e%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>3]|=parseInt(a.substr(e,\n2),16)<<24-4*(e%8);return new B.init(g,b/2)}},F=v.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var g=[],e=0;e>>2]>>>24-8*(e%4)&255));return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>2]|=(a.charCodeAt(e)&255)<<24-8*(e%4);return new B.init(g,b)}},ha=v.Utf8={stringify:function(a){try{return decodeURIComponent(escape(F.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return F.parse(unescape(encodeURIComponent(a)))}},\nZ=j.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new B.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=ha.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,g=b.words,e=b.sigBytes,k=this.blockSize,m=e/(4*k),m=n?a.ceil(m):a.max((m|0)-this._minBufferSize,0);n=m*k;e=a.min(4*n,e);if(n){for(var c=0;cy;y++)v[y]=a();j=j.SHA512=d.extend({_doReset:function(){this._hash=new m.init([new f.init(1779033703,4089235720),new f.init(3144134277,2227873595),new f.init(1013904242,4271175723),new f.init(2773480762,1595750129),new f.init(1359893119,2917565137),new f.init(2600822924,725511199),new f.init(528734635,4215389547),new f.init(1541459225,327033209)])},_doProcessBlock:function(a,c){for(var d=this._hash.words,\nf=d[0],j=d[1],b=d[2],g=d[3],e=d[4],k=d[5],m=d[6],d=d[7],y=f.high,M=f.low,$=j.high,N=j.low,aa=b.high,O=b.low,ba=g.high,P=g.low,ca=e.high,Q=e.low,da=k.high,R=k.low,ea=m.high,S=m.low,fa=d.high,T=d.low,s=y,p=M,G=$,D=N,H=aa,E=O,W=ba,I=P,t=ca,q=Q,U=da,J=R,V=ea,K=S,X=fa,L=T,u=0;80>u;u++){var z=v[u];if(16>u)var r=z.high=a[c+2*u]|0,h=z.low=a[c+2*u+1]|0;else{var r=v[u-15],h=r.high,w=r.low,r=(h>>>1|w<<31)^(h>>>8|w<<24)^h>>>7,w=(w>>>1|h<<31)^(w>>>8|h<<24)^(w>>>7|h<<25),C=v[u-2],h=C.high,l=C.low,C=(h>>>19|l<<\n13)^(h<<3|l>>>29)^h>>>6,l=(l>>>19|h<<13)^(l<<3|h>>>29)^(l>>>6|h<<26),h=v[u-7],Y=h.high,A=v[u-16],x=A.high,A=A.low,h=w+h.low,r=r+Y+(h>>>0>>0?1:0),h=h+l,r=r+C+(h>>>0>>0?1:0),h=h+A,r=r+x+(h>>>0>>0?1:0);z.high=r;z.low=h}var Y=t&U^~t&V,A=q&J^~q&K,z=s&G^s&H^G&H,ja=p&D^p&E^D&E,w=(s>>>28|p<<4)^(s<<30|p>>>2)^(s<<25|p>>>7),C=(p>>>28|s<<4)^(p<<30|s>>>2)^(p<<25|s>>>7),l=B[u],ka=l.high,ga=l.low,l=L+((q>>>14|t<<18)^(q>>>18|t<<14)^(q<<23|t>>>9)),x=X+((t>>>14|q<<18)^(t>>>18|q<<14)^(t<<23|q>>>9))+(l>>>0<\nL>>>0?1:0),l=l+A,x=x+Y+(l>>>0>>0?1:0),l=l+ga,x=x+ka+(l>>>0>>0?1:0),l=l+h,x=x+r+(l>>>0>>0?1:0),h=C+ja,z=w+z+(h>>>0>>0?1:0),X=V,L=K,V=U,K=J,U=t,J=q,q=I+l|0,t=W+x+(q>>>0>>0?1:0)|0,W=H,I=E,H=G,E=D,G=s,D=p,p=l+h|0,s=x+z+(p>>>0>>0?1:0)|0}M=f.low=M+p;f.high=y+s+(M>>>0

>>0?1:0);N=j.low=N+D;j.high=$+G+(N>>>0>>0?1:0);O=b.low=O+E;b.high=aa+H+(O>>>0>>0?1:0);P=g.low=P+I;g.high=ba+W+(P>>>0>>0?1:0);Q=e.low=Q+q;e.high=ca+t+(Q>>>0>>0?1:0);R=k.low=R+J;k.high=da+U+(R>>>0>>0?1:0);\nS=m.low=S+K;m.high=ea+V+(S>>>0>>0?1:0);T=d.low=T+L;d.high=fa+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,c=a.words,d=8*this._nDataBytes,f=8*a.sigBytes;c[f>>>5]|=128<<24-f%32;c[(f+128>>>10<<5)+30]=Math.floor(d/4294967296);c[(f+128>>>10<<5)+31]=d;a.sigBytes=4*c.length;this._process();return this._hash.toX32()},clone:function(){var a=d.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});c.SHA512=d._createHelper(j);c.HmacSHA512=d._createHmacHelper(j)})();\n(function(){var a=CryptoJS,c=a.x64,d=c.Word,j=c.WordArray,c=a.algo,f=c.SHA512,c=c.SHA384=f.extend({_doReset:function(){this._hash=new j.init([new d.init(3418070365,3238371032),new d.init(1654270250,914150663),new d.init(2438529370,812702999),new d.init(355462360,4144912697),new d.init(1731405415,4290775857),new d.init(2394180231,1750603025),new d.init(3675008525,1694076839),new d.init(1203062813,3204075428)])},_doFinalize:function(){var a=f._doFinalize.call(this);a.sigBytes-=16;return a}});a.SHA384=\nf._createHelper(c);a.HmacSHA384=f._createHmacHelper(c)})();\n"], "crypto_js.rollups.sha512": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(a,m){var r={},f=r.lib={},g=function(){},l=f.Base={extend:function(a){g.prototype=this;var b=new g;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\np=f.WordArray=l.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=m?b:4*a.length},toString:function(a){return(a||q).stringify(this)},concat:function(a){var b=this.words,d=a.words,c=this.sigBytes;a=a.sigBytes;this.clamp();if(c%4)for(var j=0;j>>2]|=(d[j>>>2]>>>24-8*(j%4)&255)<<24-8*((c+j)%4);else if(65535>>2]=d[j>>>2];else b.push.apply(b,d);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=l.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],d=0;d>>2]>>>24-8*(c%4)&255;d.push((j>>>4).toString(16));d.push((j&15).toString(16))}return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>3]|=parseInt(a.substr(c,\n2),16)<<24-4*(c%8);return new p.init(d,b/2)}},G=y.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var d=[],c=0;c>>2]>>>24-8*(c%4)&255));return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>2]|=(a.charCodeAt(c)&255)<<24-8*(c%4);return new p.init(d,b)}},fa=y.Utf8={stringify:function(a){try{return decodeURIComponent(escape(G.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return G.parse(unescape(encodeURIComponent(a)))}},\nh=f.BufferedBlockAlgorithm=l.extend({reset:function(){this._data=new p.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=fa.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,d=b.words,c=b.sigBytes,j=this.blockSize,l=c/(4*j),l=n?a.ceil(l):a.max((l|0)-this._minBufferSize,0);n=l*j;c=a.min(4*n,c);if(n){for(var h=0;hq;q++)y[q]=a();f=f.SHA512=r.extend({_doReset:function(){this._hash=new l.init([new g.init(1779033703,4089235720),new g.init(3144134277,2227873595),new g.init(1013904242,4271175723),new g.init(2773480762,1595750129),new g.init(1359893119,2917565137),new g.init(2600822924,725511199),new g.init(528734635,4215389547),new g.init(1541459225,327033209)])},_doProcessBlock:function(a,f){for(var h=this._hash.words,\ng=h[0],n=h[1],b=h[2],d=h[3],c=h[4],j=h[5],l=h[6],h=h[7],q=g.high,m=g.low,r=n.high,N=n.low,Z=b.high,O=b.low,$=d.high,P=d.low,aa=c.high,Q=c.low,ba=j.high,R=j.low,ca=l.high,S=l.low,da=h.high,T=h.low,v=q,s=m,H=r,E=N,I=Z,F=O,W=$,J=P,w=aa,t=Q,U=ba,K=R,V=ca,L=S,X=da,M=T,x=0;80>x;x++){var B=y[x];if(16>x)var u=B.high=a[f+2*x]|0,e=B.low=a[f+2*x+1]|0;else{var u=y[x-15],e=u.high,z=u.low,u=(e>>>1|z<<31)^(e>>>8|z<<24)^e>>>7,z=(z>>>1|e<<31)^(z>>>8|e<<24)^(z>>>7|e<<25),D=y[x-2],e=D.high,k=D.low,D=(e>>>19|k<<13)^\n(e<<3|k>>>29)^e>>>6,k=(k>>>19|e<<13)^(k<<3|e>>>29)^(k>>>6|e<<26),e=y[x-7],Y=e.high,C=y[x-16],A=C.high,C=C.low,e=z+e.low,u=u+Y+(e>>>0>>0?1:0),e=e+k,u=u+D+(e>>>0>>0?1:0),e=e+C,u=u+A+(e>>>0>>0?1:0);B.high=u;B.low=e}var Y=w&U^~w&V,C=t&K^~t&L,B=v&H^v&I^H&I,ha=s&E^s&F^E&F,z=(v>>>28|s<<4)^(v<<30|s>>>2)^(v<<25|s>>>7),D=(s>>>28|v<<4)^(s<<30|v>>>2)^(s<<25|v>>>7),k=p[x],ia=k.high,ea=k.low,k=M+((t>>>14|w<<18)^(t>>>18|w<<14)^(t<<23|w>>>9)),A=X+((w>>>14|t<<18)^(w>>>18|t<<14)^(w<<23|t>>>9))+(k>>>0>>\n0?1:0),k=k+C,A=A+Y+(k>>>0>>0?1:0),k=k+ea,A=A+ia+(k>>>0>>0?1:0),k=k+e,A=A+u+(k>>>0>>0?1:0),e=D+ha,B=z+B+(e>>>0>>0?1:0),X=V,M=L,V=U,L=K,U=w,K=t,t=J+k|0,w=W+A+(t>>>0>>0?1:0)|0,W=I,J=F,I=H,F=E,H=v,E=s,s=k+e|0,v=A+B+(s>>>0>>0?1:0)|0}m=g.low=m+s;g.high=q+v+(m>>>0>>0?1:0);N=n.low=N+E;n.high=r+H+(N>>>0>>0?1:0);O=b.low=O+F;b.high=Z+I+(O>>>0>>0?1:0);P=d.low=P+J;d.high=$+W+(P>>>0>>0?1:0);Q=c.low=Q+t;c.high=aa+w+(Q>>>0>>0?1:0);R=j.low=R+K;j.high=ba+U+(R>>>0>>0?1:0);S=l.low=\nS+L;l.high=ca+V+(S>>>0>>0?1:0);T=h.low=T+M;h.high=da+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,f=a.words,h=8*this._nDataBytes,g=8*a.sigBytes;f[g>>>5]|=128<<24-g%32;f[(g+128>>>10<<5)+30]=Math.floor(h/4294967296);f[(g+128>>>10<<5)+31]=h;a.sigBytes=4*f.length;this._process();return this._hash.toX32()},clone:function(){var a=r.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});m.SHA512=r._createHelper(f);m.HmacSHA512=r._createHmacHelper(f)})();\n"], "crypto_js.rollups.md5": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(s,p){var m={},l=m.lib={},n=function(){},r=l.Base={extend:function(b){n.prototype=this;var h=new n;b&&h.mixIn(b);h.hasOwnProperty(\"init\")||(h.init=function(){h.$super.init.apply(this,arguments)});h.init.prototype=h;h.$super=this;return h},create:function(){var b=this.extend();b.init.apply(b,arguments);return b},init:function(){},mixIn:function(b){for(var h in b)b.hasOwnProperty(h)&&(this[h]=b[h]);b.hasOwnProperty(\"toString\")&&(this.toString=b.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=l.WordArray=r.extend({init:function(b,h){b=this.words=b||[];this.sigBytes=h!=p?h:4*b.length},toString:function(b){return(b||t).stringify(this)},concat:function(b){var h=this.words,a=b.words,j=this.sigBytes;b=b.sigBytes;this.clamp();if(j%4)for(var g=0;g>>2]|=(a[g>>>2]>>>24-8*(g%4)&255)<<24-8*((j+g)%4);else if(65535>>2]=a[g>>>2];else h.push.apply(h,a);this.sigBytes+=b;return this},clamp:function(){var b=this.words,h=this.sigBytes;b[h>>>2]&=4294967295<<\n32-8*(h%4);b.length=s.ceil(h/4)},clone:function(){var b=r.clone.call(this);b.words=this.words.slice(0);return b},random:function(b){for(var h=[],a=0;a>>2]>>>24-8*(j%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>3]|=parseInt(b.substr(j,\n2),16)<<24-4*(j%8);return new q.init(g,a/2)}},a=v.Latin1={stringify:function(b){var a=b.words;b=b.sigBytes;for(var g=[],j=0;j>>2]>>>24-8*(j%4)&255));return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>2]|=(b.charCodeAt(j)&255)<<24-8*(j%4);return new q.init(g,a)}},u=v.Utf8={stringify:function(b){try{return decodeURIComponent(escape(a.stringify(b)))}catch(g){throw Error(\"Malformed UTF-8 data\");}},parse:function(b){return a.parse(unescape(encodeURIComponent(b)))}},\ng=l.BufferedBlockAlgorithm=r.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(b){\"string\"==typeof b&&(b=u.parse(b));this._data.concat(b);this._nDataBytes+=b.sigBytes},_process:function(b){var a=this._data,g=a.words,j=a.sigBytes,k=this.blockSize,m=j/(4*k),m=b?s.ceil(m):s.max((m|0)-this._minBufferSize,0);b=m*k;j=s.min(4*b,j);if(b){for(var l=0;l>>32-j)+k}function m(a,k,b,h,l,j,m){a=a+(k&h|b&~h)+l+m;return(a<>>32-j)+k}function l(a,k,b,h,l,j,m){a=a+(k^b^h)+l+m;return(a<>>32-j)+k}function n(a,k,b,h,l,j,m){a=a+(b^(k|~h))+l+m;return(a<>>32-j)+k}for(var r=CryptoJS,q=r.lib,v=q.WordArray,t=q.Hasher,q=r.algo,a=[],u=0;64>u;u++)a[u]=4294967296*s.abs(s.sin(u+1))|0;q=q.MD5=t.extend({_doReset:function(){this._hash=new v.init([1732584193,4023233417,2562383102,271733878])},\n_doProcessBlock:function(g,k){for(var b=0;16>b;b++){var h=k+b,w=g[h];g[h]=(w<<8|w>>>24)&16711935|(w<<24|w>>>8)&4278255360}var b=this._hash.words,h=g[k+0],w=g[k+1],j=g[k+2],q=g[k+3],r=g[k+4],s=g[k+5],t=g[k+6],u=g[k+7],v=g[k+8],x=g[k+9],y=g[k+10],z=g[k+11],A=g[k+12],B=g[k+13],C=g[k+14],D=g[k+15],c=b[0],d=b[1],e=b[2],f=b[3],c=p(c,d,e,f,h,7,a[0]),f=p(f,c,d,e,w,12,a[1]),e=p(e,f,c,d,j,17,a[2]),d=p(d,e,f,c,q,22,a[3]),c=p(c,d,e,f,r,7,a[4]),f=p(f,c,d,e,s,12,a[5]),e=p(e,f,c,d,t,17,a[6]),d=p(d,e,f,c,u,22,a[7]),\nc=p(c,d,e,f,v,7,a[8]),f=p(f,c,d,e,x,12,a[9]),e=p(e,f,c,d,y,17,a[10]),d=p(d,e,f,c,z,22,a[11]),c=p(c,d,e,f,A,7,a[12]),f=p(f,c,d,e,B,12,a[13]),e=p(e,f,c,d,C,17,a[14]),d=p(d,e,f,c,D,22,a[15]),c=m(c,d,e,f,w,5,a[16]),f=m(f,c,d,e,t,9,a[17]),e=m(e,f,c,d,z,14,a[18]),d=m(d,e,f,c,h,20,a[19]),c=m(c,d,e,f,s,5,a[20]),f=m(f,c,d,e,y,9,a[21]),e=m(e,f,c,d,D,14,a[22]),d=m(d,e,f,c,r,20,a[23]),c=m(c,d,e,f,x,5,a[24]),f=m(f,c,d,e,C,9,a[25]),e=m(e,f,c,d,q,14,a[26]),d=m(d,e,f,c,v,20,a[27]),c=m(c,d,e,f,B,5,a[28]),f=m(f,c,\nd,e,j,9,a[29]),e=m(e,f,c,d,u,14,a[30]),d=m(d,e,f,c,A,20,a[31]),c=l(c,d,e,f,s,4,a[32]),f=l(f,c,d,e,v,11,a[33]),e=l(e,f,c,d,z,16,a[34]),d=l(d,e,f,c,C,23,a[35]),c=l(c,d,e,f,w,4,a[36]),f=l(f,c,d,e,r,11,a[37]),e=l(e,f,c,d,u,16,a[38]),d=l(d,e,f,c,y,23,a[39]),c=l(c,d,e,f,B,4,a[40]),f=l(f,c,d,e,h,11,a[41]),e=l(e,f,c,d,q,16,a[42]),d=l(d,e,f,c,t,23,a[43]),c=l(c,d,e,f,x,4,a[44]),f=l(f,c,d,e,A,11,a[45]),e=l(e,f,c,d,D,16,a[46]),d=l(d,e,f,c,j,23,a[47]),c=n(c,d,e,f,h,6,a[48]),f=n(f,c,d,e,u,10,a[49]),e=n(e,f,c,d,\nC,15,a[50]),d=n(d,e,f,c,s,21,a[51]),c=n(c,d,e,f,A,6,a[52]),f=n(f,c,d,e,q,10,a[53]),e=n(e,f,c,d,y,15,a[54]),d=n(d,e,f,c,w,21,a[55]),c=n(c,d,e,f,v,6,a[56]),f=n(f,c,d,e,D,10,a[57]),e=n(e,f,c,d,t,15,a[58]),d=n(d,e,f,c,B,21,a[59]),c=n(c,d,e,f,r,6,a[60]),f=n(f,c,d,e,z,10,a[61]),e=n(e,f,c,d,j,15,a[62]),d=n(d,e,f,c,x,21,a[63]);b[0]=b[0]+c|0;b[1]=b[1]+d|0;b[2]=b[2]+e|0;b[3]=b[3]+f|0},_doFinalize:function(){var a=this._data,k=a.words,b=8*this._nDataBytes,h=8*a.sigBytes;k[h>>>5]|=128<<24-h%32;var l=s.floor(b/\n4294967296);k[(h+64>>>9<<4)+15]=(l<<8|l>>>24)&16711935|(l<<24|l>>>8)&4278255360;k[(h+64>>>9<<4)+14]=(b<<8|b>>>24)&16711935|(b<<24|b>>>8)&4278255360;a.sigBytes=4*(k.length+1);this._process();a=this._hash;k=a.words;for(b=0;4>b;b++)h=k[b],k[b]=(h<<8|h>>>24)&16711935|(h<<24|h>>>8)&4278255360;return a},clone:function(){var a=t.clone.call(this);a._hash=this._hash.clone();return a}});r.MD5=t._createHelper(q);r.HmacMD5=t._createHmacHelper(q)})(Math);\n"], "crypto_js.rollups.sha3": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(v,p){var d={},u=d.lib={},r=function(){},f=u.Base={extend:function(a){r.prototype=this;var b=new r;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\ns=u.WordArray=f.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=p?b:4*a.length},toString:function(a){return(a||y).stringify(this)},concat:function(a){var b=this.words,c=a.words,j=this.sigBytes;a=a.sigBytes;this.clamp();if(j%4)for(var n=0;n>>2]|=(c[n>>>2]>>>24-8*(n%4)&255)<<24-8*((j+n)%4);else if(65535>>2]=c[n>>>2];else b.push.apply(b,c);this.sigBytes+=a;return this},clamp:function(){var a=this.words,b=this.sigBytes;a[b>>>2]&=4294967295<<\n32-8*(b%4);a.length=v.ceil(b/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var b=[],c=0;c>>2]>>>24-8*(j%4)&255;c.push((n>>>4).toString(16));c.push((n&15).toString(16))}return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>3]|=parseInt(a.substr(j,\n2),16)<<24-4*(j%8);return new s.init(c,b/2)}},e=x.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var c=[],j=0;j>>2]>>>24-8*(j%4)&255));return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>2]|=(a.charCodeAt(j)&255)<<24-8*(j%4);return new s.init(c,b)}},q=x.Utf8={stringify:function(a){try{return decodeURIComponent(escape(e.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return e.parse(unescape(encodeURIComponent(a)))}},\nt=u.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new s.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=q.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var b=this._data,c=b.words,j=b.sigBytes,n=this.blockSize,e=j/(4*n),e=a?v.ceil(e):v.max((e|0)-this._minBufferSize,0);a=e*n;j=v.min(4*a,j);if(a){for(var f=0;ft;t++){s[e+5*q]=(t+1)*(t+2)/2%64;var w=(2*e+3*q)%5,e=q%5,q=w}for(e=0;5>e;e++)for(q=0;5>q;q++)x[e+5*q]=q+5*((2*e+3*q)%5);e=1;for(q=0;24>q;q++){for(var a=w=t=0;7>a;a++){if(e&1){var b=(1<b?w^=1<e;e++)c[e]=f.create();d=d.SHA3=r.extend({cfg:r.cfg.extend({outputLength:512}),_doReset:function(){for(var a=this._state=\n[],b=0;25>b;b++)a[b]=new f.init;this.blockSize=(1600-2*this.cfg.outputLength)/32},_doProcessBlock:function(a,b){for(var e=this._state,f=this.blockSize/2,h=0;h>>24)&16711935|(l<<24|l>>>8)&4278255360,m=(m<<8|m>>>24)&16711935|(m<<24|m>>>8)&4278255360,g=e[h];g.high^=m;g.low^=l}for(f=0;24>f;f++){for(h=0;5>h;h++){for(var d=l=0,k=0;5>k;k++)g=e[h+5*k],l^=g.high,d^=g.low;g=c[h];g.high=l;g.low=d}for(h=0;5>h;h++){g=c[(h+4)%5];l=c[(h+1)%5];m=l.high;k=l.low;l=g.high^\n(m<<1|k>>>31);d=g.low^(k<<1|m>>>31);for(k=0;5>k;k++)g=e[h+5*k],g.high^=l,g.low^=d}for(m=1;25>m;m++)g=e[m],h=g.high,g=g.low,k=s[m],32>k?(l=h<>>32-k,d=g<>>32-k):(l=g<>>64-k,d=h<>>64-k),g=c[x[m]],g.high=l,g.low=d;g=c[0];h=e[0];g.high=h.high;g.low=h.low;for(h=0;5>h;h++)for(k=0;5>k;k++)m=h+5*k,g=e[m],l=c[m],m=c[(h+1)%5+5*k],d=c[(h+2)%5+5*k],g.high=l.high^~m.high&d.high,g.low=l.low^~m.low&d.low;g=e[0];h=y[f];g.high^=h.high;g.low^=h.low}},_doFinalize:function(){var a=this._data,\nb=a.words,c=8*a.sigBytes,e=32*this.blockSize;b[c>>>5]|=1<<24-c%32;b[(v.ceil((c+1)/e)*e>>>5)-1]|=128;a.sigBytes=4*b.length;this._process();for(var a=this._state,b=this.cfg.outputLength/8,c=b/8,e=[],h=0;h>>24)&16711935|(f<<24|f>>>8)&4278255360,d=(d<<8|d>>>24)&16711935|(d<<24|d>>>8)&4278255360;e.push(d);e.push(f)}return new u.init(e,b)},clone:function(){for(var a=r.clone.call(this),b=a._state=this._state.slice(0),c=0;25>c;c++)b[c]=b[c].clone();return a}});\np.SHA3=r._createHelper(d);p.HmacSHA3=r._createHmacHelper(d)})(Math);\n"], "crypto_js.rollups.sha224": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(g,l){var f={},k=f.lib={},h=function(){},m=k.Base={extend:function(a){h.prototype=this;var c=new h;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=k.WordArray=m.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=l?c:4*a.length},toString:function(a){return(a||s).stringify(this)},concat:function(a){var c=this.words,d=a.words,b=this.sigBytes;a=a.sigBytes;this.clamp();if(b%4)for(var e=0;e>>2]|=(d[e>>>2]>>>24-8*(e%4)&255)<<24-8*((b+e)%4);else if(65535>>2]=d[e>>>2];else c.push.apply(c,d);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=g.ceil(c/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],d=0;d>>2]>>>24-8*(b%4)&255;d.push((e>>>4).toString(16));d.push((e&15).toString(16))}return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>3]|=parseInt(a.substr(b,\n2),16)<<24-4*(b%8);return new q.init(d,c/2)}},n=t.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var d=[],b=0;b>>2]>>>24-8*(b%4)&255));return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>2]|=(a.charCodeAt(b)&255)<<24-8*(b%4);return new q.init(d,c)}},j=t.Utf8={stringify:function(a){try{return decodeURIComponent(escape(n.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return n.parse(unescape(encodeURIComponent(a)))}},\nw=k.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=j.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,d=c.words,b=c.sigBytes,e=this.blockSize,f=b/(4*e),f=a?g.ceil(f):g.max((f|0)-this._minBufferSize,0);a=f*e;b=g.min(4*a,b);if(a){for(var u=0;un;){var j;a:{j=s;for(var w=g.sqrt(j),v=2;v<=w;v++)if(!(j%v)){j=!1;break a}j=!0}j&&(8>n&&(m[n]=t(g.pow(s,0.5))),q[n]=t(g.pow(s,1/3)),n++);s++}var a=[],f=f.SHA256=h.extend({_doReset:function(){this._hash=new k.init(m.slice(0))},_doProcessBlock:function(c,d){for(var b=this._hash.words,e=b[0],f=b[1],g=b[2],k=b[3],h=b[4],l=b[5],m=b[6],n=b[7],p=0;64>p;p++){if(16>p)a[p]=\nc[d+p]|0;else{var j=a[p-15],r=a[p-2];a[p]=((j<<25|j>>>7)^(j<<14|j>>>18)^j>>>3)+a[p-7]+((r<<15|r>>>17)^(r<<13|r>>>19)^r>>>10)+a[p-16]}j=n+((h<<26|h>>>6)^(h<<21|h>>>11)^(h<<7|h>>>25))+(h&l^~h&m)+q[p]+a[p];r=((e<<30|e>>>2)^(e<<19|e>>>13)^(e<<10|e>>>22))+(e&f^e&g^f&g);n=m;m=l;l=h;h=k+j|0;k=g;g=f;f=e;e=j+r|0}b[0]=b[0]+e|0;b[1]=b[1]+f|0;b[2]=b[2]+g|0;b[3]=b[3]+k|0;b[4]=b[4]+h|0;b[5]=b[5]+l|0;b[6]=b[6]+m|0;b[7]=b[7]+n|0},_doFinalize:function(){var a=this._data,d=a.words,b=8*this._nDataBytes,e=8*a.sigBytes;\nd[e>>>5]|=128<<24-e%32;d[(e+64>>>9<<4)+14]=g.floor(b/4294967296);d[(e+64>>>9<<4)+15]=b;a.sigBytes=4*d.length;this._process();return this._hash},clone:function(){var a=h.clone.call(this);a._hash=this._hash.clone();return a}});l.SHA256=h._createHelper(f);l.HmacSHA256=h._createHmacHelper(f)})(Math);\n(function(){var g=CryptoJS,l=g.lib.WordArray,f=g.algo,k=f.SHA256,f=f.SHA224=k.extend({_doReset:function(){this._hash=new l.init([3238371032,914150663,812702999,4144912697,4290775857,1750603025,1694076839,3204075428])},_doFinalize:function(){var f=k._doFinalize.call(this);f.sigBytes-=4;return f}});g.SHA224=k._createHelper(f);g.HmacSHA224=k._createHmacHelper(f)})();\n"], "zipfile": [".py", "''\n\n\n\n\nimport binascii\nimport importlib.util\nimport io\nimport itertools\nimport os\nimport posixpath\nimport shutil\nimport stat\nimport struct\nimport sys\nimport threading\nimport time\nimport contextlib\nimport pathlib\n\ntry:\n import zlib\n crc32=zlib.crc32\nexcept ImportError:\n zlib=None\n crc32=binascii.crc32\n \ntry:\n import bz2\nexcept ImportError:\n bz2=None\n \ntry:\n import lzma\nexcept ImportError:\n lzma=None\n \n__all__=[\"BadZipFile\",\"BadZipfile\",\"error\",\n\"ZIP_STORED\",\"ZIP_DEFLATED\",\"ZIP_BZIP2\",\"ZIP_LZMA\",\n\"is_zipfile\",\"ZipInfo\",\"ZipFile\",\"PyZipFile\",\"LargeZipFile\",\n\"Path\"]\n\nclass BadZipFile(Exception):\n pass\n \n \nclass LargeZipFile(Exception):\n ''\n\n\n \n \nerror=BadZipfile=BadZipFile\n\n\nZIP64_LIMIT=(1 <<31)-1\nZIP_FILECOUNT_LIMIT=(1 <<16)-1\nZIP_MAX_COMMENT=(1 <<16)-1\n\n\nZIP_STORED=0\nZIP_DEFLATED=8\nZIP_BZIP2=12\nZIP_LZMA=14\n\n\nDEFAULT_VERSION=20\nZIP64_VERSION=45\nBZIP2_VERSION=46\nLZMA_VERSION=63\n\nMAX_EXTRACT_VERSION=63\n\n\n\n\n\n\n\n\n\nstructEndArchive=b\"<4s4H2LH\"\nstringEndArchive=b\"PK\\005\\006\"\nsizeEndCentDir=struct.calcsize(structEndArchive)\n\n_ECD_SIGNATURE=0\n_ECD_DISK_NUMBER=1\n_ECD_DISK_START=2\n_ECD_ENTRIES_THIS_DISK=3\n_ECD_ENTRIES_TOTAL=4\n_ECD_SIZE=5\n_ECD_OFFSET=6\n_ECD_COMMENT_SIZE=7\n\n\n_ECD_COMMENT=8\n_ECD_LOCATION=9\n\n\n\nstructCentralDir=\"<4s4B4HL2L5H2L\"\nstringCentralDir=b\"PK\\001\\002\"\nsizeCentralDir=struct.calcsize(structCentralDir)\n\n\n_CD_SIGNATURE=0\n_CD_CREATE_VERSION=1\n_CD_CREATE_SYSTEM=2\n_CD_EXTRACT_VERSION=3\n_CD_EXTRACT_SYSTEM=4\n_CD_FLAG_BITS=5\n_CD_COMPRESS_TYPE=6\n_CD_TIME=7\n_CD_DATE=8\n_CD_CRC=9\n_CD_COMPRESSED_SIZE=10\n_CD_UNCOMPRESSED_SIZE=11\n_CD_FILENAME_LENGTH=12\n_CD_EXTRA_FIELD_LENGTH=13\n_CD_COMMENT_LENGTH=14\n_CD_DISK_NUMBER_START=15\n_CD_INTERNAL_FILE_ATTRIBUTES=16\n_CD_EXTERNAL_FILE_ATTRIBUTES=17\n_CD_LOCAL_HEADER_OFFSET=18\n\n\n\n_MASK_ENCRYPTED=1 <<0\n\n_MASK_COMPRESS_OPTION_1=1 <<1\n\n\n\n\n_MASK_USE_DATA_DESCRIPTOR=1 <<3\n\n\n_MASK_COMPRESSED_PATCH=1 <<5\n_MASK_STRONG_ENCRYPTION=1 <<6\n\n\n\n\n_MASK_UTF_FILENAME=1 <<11\n\n\n\n\n\n\n\n\n\nstructFileHeader=\"<4s2B4HL2L2H\"\nstringFileHeader=b\"PK\\003\\004\"\nsizeFileHeader=struct.calcsize(structFileHeader)\n\n_FH_SIGNATURE=0\n_FH_EXTRACT_VERSION=1\n_FH_EXTRACT_SYSTEM=2\n_FH_GENERAL_PURPOSE_FLAG_BITS=3\n_FH_COMPRESSION_METHOD=4\n_FH_LAST_MOD_TIME=5\n_FH_LAST_MOD_DATE=6\n_FH_CRC=7\n_FH_COMPRESSED_SIZE=8\n_FH_UNCOMPRESSED_SIZE=9\n_FH_FILENAME_LENGTH=10\n_FH_EXTRA_FIELD_LENGTH=11\n\n\nstructEndArchive64Locator=\"<4sLQL\"\nstringEndArchive64Locator=b\"PK\\x06\\x07\"\nsizeEndCentDir64Locator=struct.calcsize(structEndArchive64Locator)\n\n\n\nstructEndArchive64=\"<4sQ2H2L4Q\"\nstringEndArchive64=b\"PK\\x06\\x06\"\nsizeEndCentDir64=struct.calcsize(structEndArchive64)\n\n_CD64_SIGNATURE=0\n_CD64_DIRECTORY_RECSIZE=1\n_CD64_CREATE_VERSION=2\n_CD64_EXTRACT_VERSION=3\n_CD64_DISK_NUMBER=4\n_CD64_DISK_NUMBER_START=5\n_CD64_NUMBER_ENTRIES_THIS_DISK=6\n_CD64_NUMBER_ENTRIES_TOTAL=7\n_CD64_DIRECTORY_SIZE=8\n_CD64_OFFSET_START_CENTDIR=9\n\n_DD_SIGNATURE=0x08074b50\n\n_EXTRA_FIELD_STRUCT=struct.Struct('1:\n raise BadZipFile(\"zipfiles that span multiple disks are not supported\")\n \n \n fpin.seek(offset -sizeEndCentDir64Locator -sizeEndCentDir64,2)\n data=fpin.read(sizeEndCentDir64)\n if len(data)!=sizeEndCentDir64:\n return endrec\n sig,sz,create_version,read_version,disk_num,disk_dir,\\\n dircount,dircount2,dirsize,diroffset=\\\n struct.unpack(structEndArchive64,data)\n if sig !=stringEndArchive64:\n return endrec\n \n \n endrec[_ECD_SIGNATURE]=sig\n endrec[_ECD_DISK_NUMBER]=disk_num\n endrec[_ECD_DISK_START]=disk_dir\n endrec[_ECD_ENTRIES_THIS_DISK]=dircount\n endrec[_ECD_ENTRIES_TOTAL]=dircount2\n endrec[_ECD_SIZE]=dirsize\n endrec[_ECD_OFFSET]=diroffset\n return endrec\n \n \ndef _EndRecData(fpin):\n ''\n\n\n \n \n \n fpin.seek(0,2)\n filesize=fpin.tell()\n \n \n \n \n try:\n fpin.seek(-sizeEndCentDir,2)\n except OSError:\n return None\n data=fpin.read()\n if(len(data)==sizeEndCentDir and\n data[0:4]==stringEndArchive and\n data[-2:]==b\"\\000\\000\"):\n \n endrec=struct.unpack(structEndArchive,data)\n endrec=list(endrec)\n \n \n endrec.append(b\"\")\n endrec.append(filesize -sizeEndCentDir)\n \n \n return _EndRecData64(fpin,-sizeEndCentDir,endrec)\n \n \n \n \n \n \n maxCommentStart=max(filesize -(1 <<16)-sizeEndCentDir,0)\n fpin.seek(maxCommentStart,0)\n data=fpin.read()\n start=data.rfind(stringEndArchive)\n if start >=0:\n \n recData=data[start:start+sizeEndCentDir]\n if len(recData)!=sizeEndCentDir:\n \n return None\n endrec=list(struct.unpack(structEndArchive,recData))\n commentSize=endrec[_ECD_COMMENT_SIZE]\n comment=data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]\n endrec.append(comment)\n endrec.append(maxCommentStart+start)\n \n \n return _EndRecData64(fpin,maxCommentStart+start -filesize,\n endrec)\n \n \n return None\n \n \nclass ZipInfo(object):\n ''\n \n __slots__=(\n 'orig_filename',\n 'filename',\n 'date_time',\n 'compress_type',\n '_compresslevel',\n 'comment',\n 'extra',\n 'create_system',\n 'create_version',\n 'extract_version',\n 'reserved',\n 'flag_bits',\n 'volume',\n 'internal_attr',\n 'external_attr',\n 'header_offset',\n 'CRC',\n 'compress_size',\n 'file_size',\n '_raw_time',\n )\n \n def __init__(self,filename=\"NoName\",date_time=(1980,1,1,0,0,0)):\n self.orig_filename=filename\n \n \n \n null_byte=filename.find(chr(0))\n if null_byte >=0:\n filename=filename[0:null_byte]\n \n \n \n if os.sep !=\"/\"and os.sep in filename:\n filename=filename.replace(os.sep,\"/\")\n \n self.filename=filename\n self.date_time=date_time\n \n if date_time[0]<1980:\n raise ValueError('ZIP does not support timestamps before 1980')\n \n \n self.compress_type=ZIP_STORED\n self._compresslevel=None\n self.comment=b\"\"\n self.extra=b\"\"\n if sys.platform =='win32':\n self.create_system=0\n else:\n \n self.create_system=3\n self.create_version=DEFAULT_VERSION\n self.extract_version=DEFAULT_VERSION\n self.reserved=0\n self.flag_bits=0\n self.volume=0\n self.internal_attr=0\n self.external_attr=0\n self.compress_size=0\n self.file_size=0\n \n \n \n \n def __repr__(self):\n result=['<%s filename=%r'%(self.__class__.__name__,self.filename)]\n if self.compress_type !=ZIP_STORED:\n result.append(' compress_type=%s'%\n compressor_names.get(self.compress_type,\n self.compress_type))\n hi=self.external_attr >>16\n lo=self.external_attr&0xFFFF\n if hi:\n result.append(' filemode=%r'%stat.filemode(hi))\n if lo:\n result.append(' external_attr=%#x'%lo)\n isdir=self.is_dir()\n if not isdir or self.file_size:\n result.append(' file_size=%r'%self.file_size)\n if((not isdir or self.compress_size)and\n (self.compress_type !=ZIP_STORED or\n self.file_size !=self.compress_size)):\n result.append(' compress_size=%r'%self.compress_size)\n result.append('>')\n return ''.join(result)\n \n def FileHeader(self,zip64=None):\n ''\n dt=self.date_time\n dosdate=(dt[0]-1980)<<9 |dt[1]<<5 |dt[2]\n dostime=dt[3]<<11 |dt[4]<<5 |(dt[5]//2)\n if self.flag_bits&_MASK_USE_DATA_DESCRIPTOR:\n \n CRC=compress_size=file_size=0\n else:\n CRC=self.CRC\n compress_size=self.compress_size\n file_size=self.file_size\n \n extra=self.extra\n \n min_version=0\n if zip64 is None:\n zip64=file_size >ZIP64_LIMIT or compress_size >ZIP64_LIMIT\n if zip64:\n fmt='ZIP64_LIMIT or compress_size >ZIP64_LIMIT:\n if not zip64:\n raise LargeZipFile(\"Filesize would require ZIP64 extensions\")\n \n \n file_size=0xffffffff\n compress_size=0xffffffff\n min_version=ZIP64_VERSION\n \n if self.compress_type ==ZIP_BZIP2:\n min_version=max(BZIP2_VERSION,min_version)\n elif self.compress_type ==ZIP_LZMA:\n min_version=max(LZMA_VERSION,min_version)\n \n self.extract_version=max(min_version,self.extract_version)\n self.create_version=max(min_version,self.create_version)\n filename,flag_bits=self._encodeFilenameFlags()\n header=struct.pack(structFileHeader,stringFileHeader,\n self.extract_version,self.reserved,flag_bits,\n self.compress_type,dostime,dosdate,CRC,\n compress_size,file_size,\n len(filename),len(extra))\n return header+filename+extra\n \n def _encodeFilenameFlags(self):\n try:\n return self.filename.encode('ascii'),self.flag_bits\n except UnicodeEncodeError:\n return self.filename.encode('utf-8'),self.flag_bits |_MASK_UTF_FILENAME\n \n def _decodeExtra(self):\n \n extra=self.extra\n unpack=struct.unpack\n while len(extra)>=4:\n tp,ln=unpack('len(extra):\n raise BadZipFile(\"Corrupt extra field %04x (size=%d)\"%(tp,ln))\n if tp ==0x0001:\n data=extra[4:ln+4]\n \n try:\n if self.file_size in(0xFFFF_FFFF_FFFF_FFFF,0xFFFF_FFFF):\n field=\"File size\"\n self.file_size,=unpack('2107:\n date_time=(2107,12,31,23,59,59)\n \n if arcname is None:\n arcname=filename\n arcname=os.path.normpath(os.path.splitdrive(arcname)[1])\n while arcname[0]in(os.sep,os.altsep):\n arcname=arcname[1:]\n if isdir:\n arcname +='/'\n zinfo=cls(arcname,date_time)\n zinfo.external_attr=(st.st_mode&0xFFFF)<<16\n if isdir:\n zinfo.file_size=0\n zinfo.external_attr |=0x10\n else:\n zinfo.file_size=st.st_size\n \n return zinfo\n \n def is_dir(self):\n ''\n return self.filename[-1]=='/'\n \n \n \n \n \n \n_crctable=None\ndef _gen_crc(crc):\n for j in range(8):\n if crc&1:\n crc=(crc >>1)^0xEDB88320\n else:\n crc >>=1\n return crc\n \n \n \n \n \n \n \n \n \ndef _ZipDecrypter(pwd):\n key0=305419896\n key1=591751049\n key2=878082192\n \n global _crctable\n if _crctable is None:\n _crctable=list(map(_gen_crc,range(256)))\n crctable=_crctable\n \n def crc32(ch,crc):\n ''\n return(crc >>8)^crctable[(crc ^ch)&0xFF]\n \n def update_keys(c):\n nonlocal key0,key1,key2\n key0=crc32(c,key0)\n key1=(key1+(key0&0xFF))&0xFFFFFFFF\n key1=(key1 *134775813+1)&0xFFFFFFFF\n key2=crc32(key1 >>24,key2)\n \n for p in pwd:\n update_keys(p)\n \n def decrypter(data):\n ''\n result=bytearray()\n append=result.append\n for c in data:\n k=key2 |2\n c ^=((k *(k ^1))>>8)&0xFF\n update_keys(c)\n append(c)\n return bytes(result)\n \n return decrypter\n \n \nclass LZMACompressor:\n\n def __init__(self):\n self._comp=None\n \n def _init(self):\n props=lzma._encode_filter_properties({'id':lzma.FILTER_LZMA1})\n self._comp=lzma.LZMACompressor(lzma.FORMAT_RAW,filters=[\n lzma._decode_filter_properties(lzma.FILTER_LZMA1,props)\n ])\n return struct.pack('>8)&0xff\n else:\n \n check_byte=(zipinfo.CRC >>24)&0xff\n h=self._init_decrypter()\n if h !=check_byte:\n raise RuntimeError(\"Bad password for file %r\"%zipinfo.orig_filename)\n \n \n def _init_decrypter(self):\n self._decrypter=_ZipDecrypter(self._pwd)\n \n \n \n \n \n header=self._fileobj.read(12)\n self._compress_left -=12\n return self._decrypter(header)[11]\n \n def __repr__(self):\n result=['<%s.%s'%(self.__class__.__module__,\n self.__class__.__qualname__)]\n if not self.closed:\n result.append(' name=%r mode=%r'%(self.name,self.mode))\n if self._compress_type !=ZIP_STORED:\n result.append(' compress_type=%s'%\n compressor_names.get(self._compress_type,\n self._compress_type))\n else:\n result.append(' [closed]')\n result.append('>')\n return ''.join(result)\n \n def readline(self,limit=-1):\n ''\n\n\n \n \n if limit <0:\n \n i=self._readbuffer.find(b'\\n',self._offset)+1\n if i >0:\n line=self._readbuffer[self._offset:i]\n self._offset=i\n return line\n \n return io.BufferedIOBase.readline(self,limit)\n \n def peek(self,n=1):\n ''\n if n >len(self._readbuffer)-self._offset:\n chunk=self.read(n)\n if len(chunk)>self._offset:\n self._readbuffer=chunk+self._readbuffer[self._offset:]\n self._offset=0\n else:\n self._offset -=len(chunk)\n \n \n return self._readbuffer[self._offset:self._offset+512]\n \n def readable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return True\n \n def read(self,n=-1):\n ''\n\n \n if self.closed:\n raise ValueError(\"read from closed file.\")\n if n is None or n <0:\n buf=self._readbuffer[self._offset:]\n self._readbuffer=b''\n self._offset=0\n while not self._eof:\n buf +=self._read1(self.MAX_N)\n return buf\n \n end=n+self._offset\n if end 0 and not self._eof:\n data=self._read1(n)\n if n 0:\n while not self._eof:\n data=self._read1(n)\n if n len(data):\n data +=self._read2(n -len(data))\n else:\n data=self._read2(n)\n \n if self._compress_type ==ZIP_STORED:\n self._eof=self._compress_left <=0\n elif self._compress_type ==ZIP_DEFLATED:\n n=max(n,self.MIN_READ_SIZE)\n data=self._decompressor.decompress(data,n)\n self._eof=(self._decompressor.eof or\n self._compress_left <=0 and\n not self._decompressor.unconsumed_tail)\n if self._eof:\n data +=self._decompressor.flush()\n else:\n data=self._decompressor.decompress(data)\n self._eof=self._decompressor.eof or self._compress_left <=0\n \n data=data[:self._left]\n self._left -=len(data)\n if self._left <=0:\n self._eof=True\n self._update_crc(data)\n return data\n \n def _read2(self,n):\n if self._compress_left <=0:\n return b''\n \n n=max(n,self.MIN_READ_SIZE)\n n=min(n,self._compress_left)\n \n data=self._fileobj.read(n)\n self._compress_left -=len(data)\n if not data:\n raise EOFError\n \n if self._decrypter is not None:\n data=self._decrypter(data)\n return data\n \n def close(self):\n try:\n if self._close_fileobj:\n self._fileobj.close()\n finally:\n super().close()\n \n def seekable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return self._seekable\n \n def seek(self,offset,whence=0):\n if self.closed:\n raise ValueError(\"seek on closed file.\")\n if not self._seekable:\n raise io.UnsupportedOperation(\"underlying stream is not seekable\")\n curr_pos=self.tell()\n if whence ==0:\n new_pos=offset\n elif whence ==1:\n new_pos=curr_pos+offset\n elif whence ==2:\n new_pos=self._orig_file_size+offset\n else:\n raise ValueError(\"whence must be os.SEEK_SET (0), \"\n \"os.SEEK_CUR (1), or os.SEEK_END (2)\")\n \n if new_pos >self._orig_file_size:\n new_pos=self._orig_file_size\n \n if new_pos <0:\n new_pos=0\n \n read_offset=new_pos -curr_pos\n buff_offset=read_offset+self._offset\n \n if buff_offset >=0 and buff_offset 0:\n read_len=min(self.MAX_SEEK_READ,read_offset)\n self.read(read_len)\n read_offset -=read_len\n \n return self.tell()\n \n def tell(self):\n if self.closed:\n raise ValueError(\"tell on closed file.\")\n if not self._seekable:\n raise io.UnsupportedOperation(\"underlying stream is not seekable\")\n filepos=self._orig_file_size -self._left -len(self._readbuffer)+self._offset\n return filepos\n \n \nclass _ZipWriteFile(io.BufferedIOBase):\n def __init__(self,zf,zinfo,zip64):\n self._zinfo=zinfo\n self._zip64=zip64\n self._zipfile=zf\n self._compressor=_get_compressor(zinfo.compress_type,\n zinfo._compresslevel)\n self._file_size=0\n self._compress_size=0\n self._crc=0\n \n @property\n def _fileobj(self):\n return self._zipfile.fp\n \n def writable(self):\n return True\n \n def write(self,data):\n if self.closed:\n raise ValueError('I/O operation on closed file.')\n \n \n if isinstance(data,(bytes,bytearray)):\n nbytes=len(data)\n else:\n data=memoryview(data)\n nbytes=data.nbytes\n self._file_size +=nbytes\n \n self._crc=crc32(data,self._crc)\n if self._compressor:\n data=self._compressor.compress(data)\n self._compress_size +=len(data)\n self._fileobj.write(data)\n return nbytes\n \n def close(self):\n if self.closed:\n return\n try:\n super().close()\n \n if self._compressor:\n buf=self._compressor.flush()\n self._compress_size +=len(buf)\n self._fileobj.write(buf)\n self._zinfo.compress_size=self._compress_size\n else:\n self._zinfo.compress_size=self._file_size\n self._zinfo.CRC=self._crc\n self._zinfo.file_size=self._file_size\n \n \n if self._zinfo.flag_bits&_MASK_USE_DATA_DESCRIPTOR:\n \n fmt='ZIP64_LIMIT:\n raise RuntimeError(\n 'File size unexpectedly exceeded ZIP64 limit')\n if self._compress_size >ZIP64_LIMIT:\n raise RuntimeError(\n 'Compressed size unexpectedly exceeded ZIP64 limit')\n \n \n \n \n self._zipfile.start_dir=self._fileobj.tell()\n self._fileobj.seek(self._zinfo.header_offset)\n self._fileobj.write(self._zinfo.FileHeader(self._zip64))\n self._fileobj.seek(self._zipfile.start_dir)\n \n \n self._zipfile.filelist.append(self._zinfo)\n self._zipfile.NameToInfo[self._zinfo.filename]=self._zinfo\n finally:\n self._zipfile._writing=False\n \n \n \nclass ZipFile:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n fp=None\n _windows_illegal_name_trans_table=None\n \n def __init__(self,file,mode=\"r\",compression=ZIP_STORED,allowZip64=True,\n compresslevel=None,*,strict_timestamps=True,metadata_encoding=None):\n ''\n \n if mode not in('r','w','x','a'):\n raise ValueError(\"ZipFile requires mode 'r', 'w', 'x', or 'a'\")\n \n _check_compression(compression)\n \n self._allowZip64=allowZip64\n self._didModify=False\n self.debug=0\n self.NameToInfo={}\n self.filelist=[]\n self.compression=compression\n self.compresslevel=compresslevel\n self.mode=mode\n self.pwd=None\n self._comment=b''\n self._strict_timestamps=strict_timestamps\n self.metadata_encoding=metadata_encoding\n \n \n if self.metadata_encoding and mode !='r':\n raise ValueError(\n \"metadata_encoding is only supported for reading files\")\n \n \n if isinstance(file,os.PathLike):\n file=os.fspath(file)\n if isinstance(file,str):\n \n self._filePassed=0\n self.filename=file\n modeDict={'r':'rb','w':'w+b','x':'x+b','a':'r+b',\n 'r+b':'w+b','w+b':'wb','x+b':'xb'}\n filemode=modeDict[mode]\n while True:\n try:\n self.fp=io.open(file,filemode)\n except OSError:\n if filemode in modeDict:\n filemode=modeDict[filemode]\n continue\n raise\n break\n else:\n self._filePassed=1\n self.fp=file\n self.filename=getattr(file,'name',None)\n self._fileRefCnt=1\n self._lock=threading.RLock()\n self._seekable=True\n self._writing=False\n \n try:\n if mode =='r':\n self._RealGetContents()\n elif mode in('w','x'):\n \n \n self._didModify=True\n try:\n self.start_dir=self.fp.tell()\n except(AttributeError,OSError):\n self.fp=_Tellable(self.fp)\n self.start_dir=0\n self._seekable=False\n else:\n \n try:\n self.fp.seek(self.start_dir)\n except(AttributeError,OSError):\n self._seekable=False\n elif mode =='a':\n try:\n \n self._RealGetContents()\n \n self.fp.seek(self.start_dir)\n except BadZipFile:\n \n self.fp.seek(0,2)\n \n \n \n self._didModify=True\n self.start_dir=self.fp.tell()\n else:\n raise ValueError(\"Mode must be 'r', 'w', 'x', or 'a'\")\n except:\n fp=self.fp\n self.fp=None\n self._fpclose(fp)\n raise\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,traceback):\n self.close()\n \n def __repr__(self):\n result=['<%s.%s'%(self.__class__.__module__,\n self.__class__.__qualname__)]\n if self.fp is not None:\n if self._filePassed:\n result.append(' file=%r'%self.fp)\n elif self.filename is not None:\n result.append(' filename=%r'%self.filename)\n result.append(' mode=%r'%self.mode)\n else:\n result.append(' [closed]')\n result.append('>')\n return ''.join(result)\n \n def _RealGetContents(self):\n ''\n fp=self.fp\n try:\n endrec=_EndRecData(fp)\n except OSError:\n raise BadZipFile(\"File is not a zip file\")\n if not endrec:\n raise BadZipFile(\"File is not a zip file\")\n if self.debug >1:\n print(endrec)\n size_cd=endrec[_ECD_SIZE]\n offset_cd=endrec[_ECD_OFFSET]\n self._comment=endrec[_ECD_COMMENT]\n \n \n concat=endrec[_ECD_LOCATION]-size_cd -offset_cd\n if endrec[_ECD_SIGNATURE]==stringEndArchive64:\n \n concat -=(sizeEndCentDir64+sizeEndCentDir64Locator)\n \n if self.debug >2:\n inferred=concat+offset_cd\n print(\"given, inferred, offset\",offset_cd,inferred,concat)\n \n self.start_dir=offset_cd+concat\n if self.start_dir <0:\n raise BadZipFile(\"Bad offset for central directory\")\n fp.seek(self.start_dir,0)\n data=fp.read(size_cd)\n fp=io.BytesIO(data)\n total=0\n while total 2:\n print(centdir)\n filename=fp.read(centdir[_CD_FILENAME_LENGTH])\n flags=centdir[_CD_FLAG_BITS]\n if flags&_MASK_UTF_FILENAME:\n \n filename=filename.decode('utf-8')\n else:\n \n filename=filename.decode(self.metadata_encoding or 'cp437')\n \n x=ZipInfo(filename)\n x.extra=fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])\n x.comment=fp.read(centdir[_CD_COMMENT_LENGTH])\n x.header_offset=centdir[_CD_LOCAL_HEADER_OFFSET]\n (x.create_version,x.create_system,x.extract_version,x.reserved,\n x.flag_bits,x.compress_type,t,d,\n x.CRC,x.compress_size,x.file_size)=centdir[1:12]\n if x.extract_version >MAX_EXTRACT_VERSION:\n raise NotImplementedError(\"zip file version %.1f\"%\n (x.extract_version /10))\n x.volume,x.internal_attr,x.external_attr=centdir[15:18]\n \n x._raw_time=t\n x.date_time=((d >>9)+1980,(d >>5)&0xF,d&0x1F,\n t >>11,(t >>5)&0x3F,(t&0x1F)*2)\n \n x._decodeExtra()\n x.header_offset=x.header_offset+concat\n self.filelist.append(x)\n self.NameToInfo[x.filename]=x\n \n \n total=(total+sizeCentralDir+centdir[_CD_FILENAME_LENGTH]\n +centdir[_CD_EXTRA_FIELD_LENGTH]\n +centdir[_CD_COMMENT_LENGTH])\n \n if self.debug >2:\n print(\"total\",total)\n \n \n def namelist(self):\n ''\n return[data.filename for data in self.filelist]\n \n def infolist(self):\n ''\n \n return self.filelist\n \n def printdir(self,file=None):\n ''\n print(\"%-46s %19s %12s\"%(\"File Name\",\"Modified \",\"Size\"),\n file=file)\n for zinfo in self.filelist:\n date=\"%d-%02d-%02d %02d:%02d:%02d\"%zinfo.date_time[:6]\n print(\"%-46s %s %12d\"%(zinfo.filename,date,zinfo.file_size),\n file=file)\n \n def testzip(self):\n ''\n chunk_size=2 **20\n for zinfo in self.filelist:\n try:\n \n \n with self.open(zinfo.filename,\"r\")as f:\n while f.read(chunk_size):\n pass\n except BadZipFile:\n return zinfo.filename\n \n def getinfo(self,name):\n ''\n info=self.NameToInfo.get(name)\n if info is None:\n raise KeyError(\n 'There is no item named %r in the archive'%name)\n \n return info\n \n def setpassword(self,pwd):\n ''\n if pwd and not isinstance(pwd,bytes):\n raise TypeError(\"pwd: expected bytes, got %s\"%type(pwd).__name__)\n if pwd:\n self.pwd=pwd\n else:\n self.pwd=None\n \n @property\n def comment(self):\n ''\n return self._comment\n \n @comment.setter\n def comment(self,comment):\n if not isinstance(comment,bytes):\n raise TypeError(\"comment: expected bytes, got %s\"%type(comment).__name__)\n \n if len(comment)>ZIP_MAX_COMMENT:\n import warnings\n warnings.warn('Archive comment is too long; truncating to %d bytes'\n %ZIP_MAX_COMMENT,stacklevel=2)\n comment=comment[:ZIP_MAX_COMMENT]\n self._comment=comment\n self._didModify=True\n \n def read(self,name,pwd=None):\n ''\n with self.open(name,\"r\",pwd)as fp:\n return fp.read()\n \n def open(self,name,mode=\"r\",pwd=None,*,force_zip64=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if mode not in{\"r\",\"w\"}:\n raise ValueError('open() requires mode \"r\" or \"w\"')\n if pwd and(mode ==\"w\"):\n raise ValueError(\"pwd is only supported for reading files\")\n if not self.fp:\n raise ValueError(\n \"Attempt to use ZIP archive that was already closed\")\n \n \n if isinstance(name,ZipInfo):\n \n zinfo=name\n elif mode =='w':\n zinfo=ZipInfo(name)\n zinfo.compress_type=self.compression\n zinfo._compresslevel=self.compresslevel\n else:\n \n zinfo=self.getinfo(name)\n \n if mode =='w':\n return self._open_to_write(zinfo,force_zip64=force_zip64)\n \n if self._writing:\n raise ValueError(\"Can't read from the ZIP file while there \"\n \"is an open writing handle on it. \"\n \"Close the writing handle before trying to read.\")\n \n \n self._fileRefCnt +=1\n zef_file=_SharedFile(self.fp,zinfo.header_offset,\n self._fpclose,self._lock,lambda:self._writing)\n try:\n \n fheader=zef_file.read(sizeFileHeader)\n if len(fheader)!=sizeFileHeader:\n raise BadZipFile(\"Truncated file header\")\n fheader=struct.unpack(structFileHeader,fheader)\n if fheader[_FH_SIGNATURE]!=stringFileHeader:\n raise BadZipFile(\"Bad magic number for file header\")\n \n fname=zef_file.read(fheader[_FH_FILENAME_LENGTH])\n if fheader[_FH_EXTRA_FIELD_LENGTH]:\n zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])\n \n if zinfo.flag_bits&_MASK_COMPRESSED_PATCH:\n \n raise NotImplementedError(\"compressed patched data (flag bit 5)\")\n \n if zinfo.flag_bits&_MASK_STRONG_ENCRYPTION:\n \n raise NotImplementedError(\"strong encryption (flag bit 6)\")\n \n if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS]&_MASK_UTF_FILENAME:\n \n fname_str=fname.decode(\"utf-8\")\n else:\n fname_str=fname.decode(self.metadata_encoding or \"cp437\")\n \n if fname_str !=zinfo.orig_filename:\n raise BadZipFile(\n 'File name in directory %r and header %r differ.'\n %(zinfo.orig_filename,fname))\n \n \n is_encrypted=zinfo.flag_bits&_MASK_ENCRYPTED\n if is_encrypted:\n if not pwd:\n pwd=self.pwd\n if pwd and not isinstance(pwd,bytes):\n raise TypeError(\"pwd: expected bytes, got %s\"%type(pwd).__name__)\n if not pwd:\n raise RuntimeError(\"File %r is encrypted, password \"\n \"required for extraction\"%name)\n else:\n pwd=None\n \n return ZipExtFile(zef_file,mode,zinfo,pwd,True)\n except:\n zef_file.close()\n raise\n \n def _open_to_write(self,zinfo,force_zip64=False):\n if force_zip64 and not self._allowZip64:\n raise ValueError(\n \"force_zip64 is True, but allowZip64 was False when opening \"\n \"the ZIP file.\"\n )\n if self._writing:\n raise ValueError(\"Can't write to the ZIP file while there is \"\n \"another write handle open on it. \"\n \"Close the first handle before opening another.\")\n \n \n zinfo.compress_size=0\n zinfo.CRC=0\n \n zinfo.flag_bits=0x00\n if zinfo.compress_type ==ZIP_LZMA:\n \n zinfo.flag_bits |=_MASK_COMPRESS_OPTION_1\n if not self._seekable:\n zinfo.flag_bits |=_MASK_USE_DATA_DESCRIPTOR\n \n if not zinfo.external_attr:\n zinfo.external_attr=0o600 <<16\n \n \n zip64=self._allowZip64 and\\\n (force_zip64 or zinfo.file_size *1.05 >ZIP64_LIMIT)\n \n if self._seekable:\n self.fp.seek(self.start_dir)\n zinfo.header_offset=self.fp.tell()\n \n self._writecheck(zinfo)\n self._didModify=True\n \n self.fp.write(zinfo.FileHeader(zip64))\n \n self._writing=True\n return _ZipWriteFile(self,zinfo,zip64)\n \n def extract(self,member,path=None,pwd=None):\n ''\n\n\n\n \n if path is None:\n path=os.getcwd()\n else:\n path=os.fspath(path)\n \n return self._extract_member(member,path,pwd)\n \n def extractall(self,path=None,members=None,pwd=None):\n ''\n\n\n\n \n if members is None:\n members=self.namelist()\n \n if path is None:\n path=os.getcwd()\n else:\n path=os.fspath(path)\n \n for zipinfo in members:\n self._extract_member(zipinfo,path,pwd)\n \n @classmethod\n def _sanitize_windows_name(cls,arcname,pathsep):\n ''\n table=cls._windows_illegal_name_trans_table\n if not table:\n illegal=':<>|\"?*'\n table=str.maketrans(illegal,'_'*len(illegal))\n cls._windows_illegal_name_trans_table=table\n arcname=arcname.translate(table)\n \n arcname=(x.rstrip('.')for x in arcname.split(pathsep))\n \n arcname=pathsep.join(x for x in arcname if x)\n return arcname\n \n def _extract_member(self,member,targetpath,pwd):\n ''\n\n \n if not isinstance(member,ZipInfo):\n member=self.getinfo(member)\n \n \n \n arcname=member.filename.replace('/',os.path.sep)\n \n if os.path.altsep:\n arcname=arcname.replace(os.path.altsep,os.path.sep)\n \n \n arcname=os.path.splitdrive(arcname)[1]\n invalid_path_parts=('',os.path.curdir,os.path.pardir)\n arcname=os.path.sep.join(x for x in arcname.split(os.path.sep)\n if x not in invalid_path_parts)\n if os.path.sep =='\\\\':\n \n arcname=self._sanitize_windows_name(arcname,os.path.sep)\n \n targetpath=os.path.join(targetpath,arcname)\n targetpath=os.path.normpath(targetpath)\n \n \n upperdirs=os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n os.makedirs(upperdirs)\n \n if member.is_dir():\n if not os.path.isdir(targetpath):\n os.mkdir(targetpath)\n return targetpath\n \n with self.open(member,pwd=pwd)as source,\\\n open(targetpath,\"wb\")as target:\n shutil.copyfileobj(source,target)\n \n return targetpath\n \n def _writecheck(self,zinfo):\n ''\n if zinfo.filename in self.NameToInfo:\n import warnings\n warnings.warn('Duplicate name: %r'%zinfo.filename,stacklevel=3)\n if self.mode not in('w','x','a'):\n raise ValueError(\"write() requires mode 'w', 'x', or 'a'\")\n if not self.fp:\n raise ValueError(\n \"Attempt to write ZIP archive that was already closed\")\n _check_compression(zinfo.compress_type)\n if not self._allowZip64:\n requires_zip64=None\n if len(self.filelist)>=ZIP_FILECOUNT_LIMIT:\n requires_zip64=\"Files count\"\n elif zinfo.file_size >ZIP64_LIMIT:\n requires_zip64=\"Filesize\"\n elif zinfo.header_offset >ZIP64_LIMIT:\n requires_zip64=\"Zipfile size\"\n if requires_zip64:\n raise LargeZipFile(requires_zip64+\n \" would require ZIP64 extensions\")\n \n def write(self,filename,arcname=None,\n compress_type=None,compresslevel=None):\n ''\n \n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists\"\n )\n \n zinfo=ZipInfo.from_file(filename,arcname,\n strict_timestamps=self._strict_timestamps)\n \n if zinfo.is_dir():\n zinfo.compress_size=0\n zinfo.CRC=0\n self.mkdir(zinfo)\n else:\n if compress_type is not None:\n zinfo.compress_type=compress_type\n else:\n zinfo.compress_type=self.compression\n \n if compresslevel is not None:\n zinfo._compresslevel=compresslevel\n else:\n zinfo._compresslevel=self.compresslevel\n \n with open(filename,\"rb\")as src,self.open(zinfo,'w')as dest:\n shutil.copyfileobj(src,dest,1024 *8)\n \n def writestr(self,zinfo_or_arcname,data,\n compress_type=None,compresslevel=None):\n ''\n\n\n\n \n if isinstance(data,str):\n data=data.encode(\"utf-8\")\n if not isinstance(zinfo_or_arcname,ZipInfo):\n zinfo=ZipInfo(filename=zinfo_or_arcname,\n date_time=time.localtime(time.time())[:6])\n zinfo.compress_type=self.compression\n zinfo._compresslevel=self.compresslevel\n if zinfo.filename[-1]=='/':\n zinfo.external_attr=0o40775 <<16\n zinfo.external_attr |=0x10\n else:\n zinfo.external_attr=0o600 <<16\n else:\n zinfo=zinfo_or_arcname\n \n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists.\"\n )\n \n if compress_type is not None:\n zinfo.compress_type=compress_type\n \n if compresslevel is not None:\n zinfo._compresslevel=compresslevel\n \n zinfo.file_size=len(data)\n with self._lock:\n with self.open(zinfo,mode='w')as dest:\n dest.write(data)\n \n def mkdir(self,zinfo_or_directory_name,mode=511):\n ''\n if isinstance(zinfo_or_directory_name,ZipInfo):\n zinfo=zinfo_or_directory_name\n if not zinfo.is_dir():\n raise ValueError(\"The given ZipInfo does not describe a directory\")\n elif isinstance(zinfo_or_directory_name,str):\n directory_name=zinfo_or_directory_name\n if not directory_name.endswith(\"/\"):\n directory_name +=\"/\"\n zinfo=ZipInfo(directory_name)\n zinfo.compress_size=0\n zinfo.CRC=0\n zinfo.external_attr=((0o40000 |mode)&0xFFFF)<<16\n zinfo.file_size=0\n zinfo.external_attr |=0x10\n else:\n raise TypeError(\"Expected type str or ZipInfo\")\n \n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n zinfo.header_offset=self.fp.tell()\n if zinfo.compress_type ==ZIP_LZMA:\n \n zinfo.flag_bits |=_MASK_COMPRESS_OPTION_1\n \n self._writecheck(zinfo)\n self._didModify=True\n \n self.filelist.append(zinfo)\n self.NameToInfo[zinfo.filename]=zinfo\n self.fp.write(zinfo.FileHeader(False))\n self.start_dir=self.fp.tell()\n \n def __del__(self):\n ''\n self.close()\n \n def close(self):\n ''\n \n if self.fp is None:\n return\n \n if self._writing:\n raise ValueError(\"Can't close the ZIP file while there is \"\n \"an open writing handle on it. \"\n \"Close the writing handle before closing the zip.\")\n \n try:\n if self.mode in('w','x','a')and self._didModify:\n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n self._write_end_record()\n finally:\n fp=self.fp\n self.fp=None\n self._fpclose(fp)\n \n def _write_end_record(self):\n for zinfo in self.filelist:\n dt=zinfo.date_time\n dosdate=(dt[0]-1980)<<9 |dt[1]<<5 |dt[2]\n dostime=dt[3]<<11 |dt[4]<<5 |(dt[5]//2)\n extra=[]\n if zinfo.file_size >ZIP64_LIMIT\\\n or zinfo.compress_size >ZIP64_LIMIT:\n extra.append(zinfo.file_size)\n extra.append(zinfo.compress_size)\n file_size=0xffffffff\n compress_size=0xffffffff\n else:\n file_size=zinfo.file_size\n compress_size=zinfo.compress_size\n \n if zinfo.header_offset >ZIP64_LIMIT:\n extra.append(zinfo.header_offset)\n header_offset=0xffffffff\n else:\n header_offset=zinfo.header_offset\n \n extra_data=zinfo.extra\n min_version=0\n if extra:\n \n extra_data=_strip_extra(extra_data,(1,))\n extra_data=struct.pack(\n 'ZIP_FILECOUNT_LIMIT:\n requires_zip64=\"Files count\"\n elif centDirOffset >ZIP64_LIMIT:\n requires_zip64=\"Central directory offset\"\n elif centDirSize >ZIP64_LIMIT:\n requires_zip64=\"Central directory size\"\n if requires_zip64:\n \n if not self._allowZip64:\n raise LargeZipFile(requires_zip64+\n \" would require ZIP64 extensions\")\n zip64endrec=struct.pack(\n structEndArchive64,stringEndArchive64,\n 44,45,45,0,0,centDirCount,centDirCount,\n centDirSize,centDirOffset)\n self.fp.write(zip64endrec)\n \n zip64locrec=struct.pack(\n structEndArchive64Locator,\n stringEndArchive64Locator,0,pos2,1)\n self.fp.write(zip64locrec)\n centDirCount=min(centDirCount,0xFFFF)\n centDirSize=min(centDirSize,0xFFFFFFFF)\n centDirOffset=min(centDirOffset,0xFFFFFFFF)\n \n endrec=struct.pack(structEndArchive,stringEndArchive,\n 0,0,centDirCount,centDirCount,\n centDirSize,centDirOffset,len(self._comment))\n self.fp.write(endrec)\n self.fp.write(self._comment)\n if self.mode ==\"a\":\n self.fp.truncate()\n self.fp.flush()\n \n def _fpclose(self,fp):\n assert self._fileRefCnt >0\n self._fileRefCnt -=1\n if not self._fileRefCnt and not self._filePassed:\n fp.close()\n \n \nclass PyZipFile(ZipFile):\n ''\n \n def __init__(self,file,mode=\"r\",compression=ZIP_STORED,\n allowZip64=True,optimize=-1):\n ZipFile.__init__(self,file,mode=mode,compression=compression,\n allowZip64=allowZip64)\n self._optimize=optimize\n \n def writepy(self,pathname,basename=\"\",filterfunc=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n pathname=os.fspath(pathname)\n if filterfunc and not filterfunc(pathname):\n if self.debug:\n label='path'if os.path.isdir(pathname)else 'file'\n print('%s %r skipped by filterfunc'%(label,pathname))\n return\n dir,name=os.path.split(pathname)\n if os.path.isdir(pathname):\n initname=os.path.join(pathname,\"__init__.py\")\n if os.path.isfile(initname):\n \n if basename:\n basename=\"%s/%s\"%(basename,name)\n else:\n basename=name\n if self.debug:\n print(\"Adding package in\",pathname,\"as\",basename)\n fname,arcname=self._get_codename(initname[0:-3],basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n dirlist=sorted(os.listdir(pathname))\n dirlist.remove(\"__init__.py\")\n \n for filename in dirlist:\n path=os.path.join(pathname,filename)\n root,ext=os.path.splitext(filename)\n if os.path.isdir(path):\n if os.path.isfile(os.path.join(path,\"__init__.py\")):\n \n self.writepy(path,basename,\n filterfunc=filterfunc)\n elif ext ==\".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc'%path)\n continue\n fname,arcname=self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n else:\n \n if self.debug:\n print(\"Adding files from directory\",pathname)\n for filename in sorted(os.listdir(pathname)):\n path=os.path.join(pathname,filename)\n root,ext=os.path.splitext(filename)\n if ext ==\".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc'%path)\n continue\n fname,arcname=self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n else:\n if pathname[-3:]!=\".py\":\n raise RuntimeError(\n 'Files added with writepy() must end with \".py\"')\n fname,arcname=self._get_codename(pathname[0:-3],basename)\n if self.debug:\n print(\"Adding file\",arcname)\n self.write(fname,arcname)\n \n def _get_codename(self,pathname,basename):\n ''\n\n\n\n\n \n def _compile(file,optimize=-1):\n import py_compile\n if self.debug:\n print(\"Compiling\",file)\n try:\n py_compile.compile(file,doraise=True,optimize=optimize)\n except py_compile.PyCompileError as err:\n print(err.msg)\n return False\n return True\n \n file_py=pathname+\".py\"\n file_pyc=pathname+\".pyc\"\n pycache_opt0=importlib.util.cache_from_source(file_py,optimization='')\n pycache_opt1=importlib.util.cache_from_source(file_py,optimization=1)\n pycache_opt2=importlib.util.cache_from_source(file_py,optimization=2)\n if self._optimize ==-1:\n \n if(os.path.isfile(file_pyc)and\n os.stat(file_pyc).st_mtime >=os.stat(file_py).st_mtime):\n \n arcname=fname=file_pyc\n elif(os.path.isfile(pycache_opt0)and\n os.stat(pycache_opt0).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt0\n arcname=file_pyc\n elif(os.path.isfile(pycache_opt1)and\n os.stat(pycache_opt1).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt1\n arcname=file_pyc\n elif(os.path.isfile(pycache_opt2)and\n os.stat(pycache_opt2).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt2\n arcname=file_pyc\n else:\n \n if _compile(file_py):\n if sys.flags.optimize ==0:\n fname=pycache_opt0\n elif sys.flags.optimize ==1:\n fname=pycache_opt1\n else:\n fname=pycache_opt2\n arcname=file_pyc\n else:\n fname=arcname=file_py\n else:\n \n if self._optimize ==0:\n fname=pycache_opt0\n arcname=file_pyc\n else:\n arcname=file_pyc\n if self._optimize ==1:\n fname=pycache_opt1\n elif self._optimize ==2:\n fname=pycache_opt2\n else:\n msg=\"invalid value for 'optimize': {!r}\".format(self._optimize)\n raise ValueError(msg)\n if not(os.path.isfile(fname)and\n os.stat(fname).st_mtime >=os.stat(file_py).st_mtime):\n if not _compile(file_py,optimize=self._optimize):\n fname=arcname=file_py\n archivename=os.path.split(arcname)[1]\n if basename:\n archivename=\"%s/%s\"%(basename,archivename)\n return(fname,archivename)\n \n \ndef _parents(path):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return itertools.islice(_ancestry(path),1,None)\n \n \ndef _ancestry(path):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n path=path.rstrip(posixpath.sep)\n while path and path !=posixpath.sep:\n yield path\n path,tail=posixpath.split(path)\n \n \n_dedupe=dict.fromkeys\n''\n\n\ndef _difference(minuend,subtrahend):\n ''\n\n\n \n return itertools.filterfalse(set(subtrahend).__contains__,minuend)\n \n \nclass CompleteDirs(ZipFile):\n ''\n\n\n \n \n @staticmethod\n def _implied_dirs(names):\n parents=itertools.chain.from_iterable(map(_parents,names))\n as_dirs=(p+posixpath.sep for p in parents)\n return _dedupe(_difference(as_dirs,names))\n \n def namelist(self):\n names=super(CompleteDirs,self).namelist()\n return names+list(self._implied_dirs(names))\n \n def _name_set(self):\n return set(self.namelist())\n \n def resolve_dir(self,name):\n ''\n\n\n \n names=self._name_set()\n dirname=name+'/'\n dir_match=name not in names and dirname in names\n return dirname if dir_match else name\n \n @classmethod\n def make(cls,source):\n ''\n\n\n \n if isinstance(source,CompleteDirs):\n return source\n \n if not isinstance(source,ZipFile):\n return cls(source)\n \n \n if 'r'not in source.mode:\n cls=CompleteDirs\n \n source.__class__=cls\n return source\n \n \nclass FastLookup(CompleteDirs):\n ''\n\n\n \n \n def namelist(self):\n with contextlib.suppress(AttributeError):\n return self.__names\n self.__names=super(FastLookup,self).namelist()\n return self.__names\n \n def _name_set(self):\n with contextlib.suppress(AttributeError):\n return self.__lookup\n self.__lookup=super(FastLookup,self)._name_set()\n return self.__lookup\n \n \nclass Path:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __repr=\"{self.__class__.__name__}({self.root.filename!r}, {self.at!r})\"\n \n def __init__(self,root,at=\"\"):\n ''\n\n\n\n\n\n\n\n \n self.root=FastLookup.make(root)\n self.at=at\n \n def open(self,mode='r',*args,pwd=None,**kwargs):\n ''\n\n\n\n \n if self.is_dir():\n raise IsADirectoryError(self)\n zip_mode=mode[0]\n if not self.exists()and zip_mode =='r':\n raise FileNotFoundError(self)\n stream=self.root.open(self.at,zip_mode,pwd=pwd)\n if 'b'in mode:\n if args or kwargs:\n raise ValueError(\"encoding args invalid for binary operation\")\n return stream\n else:\n kwargs[\"encoding\"]=io.text_encoding(kwargs.get(\"encoding\"))\n return io.TextIOWrapper(stream,*args,**kwargs)\n \n @property\n def name(self):\n return pathlib.Path(self.at).name or self.filename.name\n \n @property\n def suffix(self):\n return pathlib.Path(self.at).suffix or self.filename.suffix\n \n @property\n def suffixes(self):\n return pathlib.Path(self.at).suffixes or self.filename.suffixes\n \n @property\n def stem(self):\n return pathlib.Path(self.at).stem or self.filename.stem\n \n @property\n def filename(self):\n return pathlib.Path(self.root.filename).joinpath(self.at)\n \n def read_text(self,*args,**kwargs):\n kwargs[\"encoding\"]=io.text_encoding(kwargs.get(\"encoding\"))\n with self.open('r',*args,**kwargs)as strm:\n return strm.read()\n \n def read_bytes(self):\n with self.open('rb')as strm:\n return strm.read()\n \n def _is_child(self,path):\n return posixpath.dirname(path.at.rstrip(\"/\"))==self.at.rstrip(\"/\")\n \n def _next(self,at):\n return self.__class__(self.root,at)\n \n def is_dir(self):\n return not self.at or self.at.endswith(\"/\")\n \n def is_file(self):\n return self.exists()and not self.is_dir()\n \n def exists(self):\n return self.at in self.root._name_set()\n \n def iterdir(self):\n if not self.is_dir():\n raise ValueError(\"Can't listdir a file\")\n subs=map(self._next,self.root.namelist())\n return filter(self._is_child,subs)\n \n def __str__(self):\n return posixpath.join(self.root.filename,self.at)\n \n def __repr__(self):\n return self.__repr.format(self=self)\n \n def joinpath(self,*other):\n next=posixpath.join(self.at,*other)\n return self._next(self.root.resolve_dir(next))\n \n __truediv__=joinpath\n \n @property\n def parent(self):\n if not self.at:\n return self.filename.parent\n parent_at=posixpath.dirname(self.at.rstrip('/'))\n if parent_at:\n parent_at +='/'\n return self._next(parent_at)\n \n \ndef main(args=None):\n import argparse\n \n description='A simple command-line interface for zipfile module.'\n parser=argparse.ArgumentParser(description=description)\n group=parser.add_mutually_exclusive_group(required=True)\n group.add_argument('-l','--list',metavar='',\n help='Show listing of a zipfile')\n group.add_argument('-e','--extract',nargs=2,\n metavar=('',''),\n help='Extract zipfile into target dir')\n group.add_argument('-c','--create',nargs='+',\n metavar=('',''),\n help='Create zipfile from sources')\n group.add_argument('-t','--test',metavar='',\n help='Test if a zipfile is valid')\n parser.add_argument('--metadata-encoding',metavar='',\n help='Specify encoding of member names for -l, -e and -t')\n args=parser.parse_args(args)\n \n encoding=args.metadata_encoding\n \n if args.test is not None:\n src=args.test\n with ZipFile(src,'r',metadata_encoding=encoding)as zf:\n badfile=zf.testzip()\n if badfile:\n print(\"The following enclosed file is corrupted: {!r}\".format(badfile))\n print(\"Done testing\")\n \n elif args.list is not None:\n src=args.list\n with ZipFile(src,'r',metadata_encoding=encoding)as zf:\n zf.printdir()\n \n elif args.extract is not None:\n src,curdir=args.extract\n with ZipFile(src,'r',metadata_encoding=encoding)as zf:\n zf.extractall(curdir)\n \n elif args.create is not None:\n if encoding:\n print(\"Non-conforming encodings not supported with -c.\",\n file=sys.stderr)\n sys.exit(1)\n \n zip_name=args.create.pop(0)\n files=args.create\n \n def addToZip(zf,path,zippath):\n if os.path.isfile(path):\n zf.write(path,zippath,ZIP_DEFLATED)\n elif os.path.isdir(path):\n if zippath:\n zf.write(path,zippath)\n for nm in sorted(os.listdir(path)):\n addToZip(zf,\n os.path.join(path,nm),os.path.join(zippath,nm))\n \n \n with ZipFile(zip_name,'w')as zf:\n for path in files:\n zippath=os.path.basename(path)\n if not zippath:\n zippath=os.path.basename(os.path.dirname(path))\n if zippath in('',os.curdir,os.pardir):\n zippath=''\n addToZip(zf,path,zippath)\n \n \nif __name__ ==\"__main__\":\n main()\n", ["argparse", "binascii", "bz2", "contextlib", "importlib.util", "io", "itertools", "lzma", "os", "pathlib", "posixpath", "py_compile", "shutil", "stat", "struct", "sys", "threading", "time", "warnings", "zlib"]], "shutil": [".py", "''\n\n\n\n\n\nimport os\nimport sys\nimport stat\nimport fnmatch\nimport collections\nimport errno\nimport warnings\n\ntry:\n import zlib\n del zlib\n _ZLIB_SUPPORTED=True\nexcept ImportError:\n _ZLIB_SUPPORTED=False\n \ntry:\n import bz2\n del bz2\n _BZ2_SUPPORTED=True\nexcept ImportError:\n _BZ2_SUPPORTED=False\n \ntry:\n import lzma\n del lzma\n _LZMA_SUPPORTED=True\nexcept ImportError:\n _LZMA_SUPPORTED=False\n \n_WINDOWS=os.name =='nt'\nposix=nt=None\nif os.name =='posix':\n import posix\nelif _WINDOWS:\n import nt\n \nif sys.platform =='win32':\n import _winapi\nelse:\n _winapi=None\n \nCOPY_BUFSIZE=1024 *1024 if _WINDOWS else 64 *1024\n\n\n_USE_CP_SENDFILE=hasattr(os,\"sendfile\")and sys.platform.startswith(\"linux\")\n_HAS_FCOPYFILE=posix and hasattr(posix,\"_fcopyfile\")\n\n\n_WIN_DEFAULT_PATHEXT=\".COM;.EXE;.BAT;.CMD;.VBS;.JS;.WS;.MSC\"\n\n__all__=[\"copyfileobj\",\"copyfile\",\"copymode\",\"copystat\",\"copy\",\"copy2\",\n\"copytree\",\"move\",\"rmtree\",\"Error\",\"SpecialFileError\",\n\"ExecError\",\"make_archive\",\"get_archive_formats\",\n\"register_archive_format\",\"unregister_archive_format\",\n\"get_unpack_formats\",\"register_unpack_format\",\n\"unregister_unpack_format\",\"unpack_archive\",\n\"ignore_patterns\",\"chown\",\"which\",\"get_terminal_size\",\n\"SameFileError\"]\n\n\nclass Error(OSError):\n pass\n \nclass SameFileError(Error):\n ''\n \nclass SpecialFileError(OSError):\n ''\n \n \nclass ExecError(OSError):\n ''\n \nclass ReadError(OSError):\n ''\n \nclass RegistryError(Exception):\n ''\n \n \nclass _GiveupOnFastCopy(Exception):\n ''\n\n \n \ndef _fastcopy_fcopyfile(fsrc,fdst,flags):\n ''\n\n \n try:\n infd=fsrc.fileno()\n outfd=fdst.fileno()\n except Exception as err:\n raise _GiveupOnFastCopy(err)\n \n try:\n posix._fcopyfile(infd,outfd,flags)\n except OSError as err:\n err.filename=fsrc.name\n err.filename2=fdst.name\n if err.errno in{errno.EINVAL,errno.ENOTSUP}:\n raise _GiveupOnFastCopy(err)\n else:\n raise err from None\n \ndef _fastcopy_sendfile(fsrc,fdst):\n ''\n\n\n \n \n \n \n \n \n \n \n \n \n global _USE_CP_SENDFILE\n try:\n infd=fsrc.fileno()\n outfd=fdst.fileno()\n except Exception as err:\n raise _GiveupOnFastCopy(err)\n \n \n \n \n \n \n try:\n blocksize=max(os.fstat(infd).st_size,2 **23)\n except OSError:\n blocksize=2 **27\n \n \n if sys.maxsize <2 **32:\n blocksize=min(blocksize,2 **30)\n \n offset=0\n while True:\n try:\n sent=os.sendfile(outfd,infd,offset,blocksize)\n except OSError as err:\n \n err.filename=fsrc.name\n err.filename2=fdst.name\n \n if err.errno ==errno.ENOTSOCK:\n \n \n \n _USE_CP_SENDFILE=False\n raise _GiveupOnFastCopy(err)\n \n if err.errno ==errno.ENOSPC:\n raise err from None\n \n \n if offset ==0 and os.lseek(outfd,0,os.SEEK_CUR)==0:\n raise _GiveupOnFastCopy(err)\n \n raise err\n else:\n if sent ==0:\n break\n offset +=sent\n \ndef _copyfileobj_readinto(fsrc,fdst,length=COPY_BUFSIZE):\n ''\n\n\n \n \n fsrc_readinto=fsrc.readinto\n fdst_write=fdst.write\n with memoryview(bytearray(length))as mv:\n while True:\n n=fsrc_readinto(mv)\n if not n:\n break\n elif n 0:\n _copyfileobj_readinto(fsrc,fdst,min(file_size,COPY_BUFSIZE))\n return dst\n \n copyfileobj(fsrc,fdst)\n \n \n except IsADirectoryError as e:\n if not os.path.exists(dst):\n raise FileNotFoundError(f'Directory does not exist: {dst}')from e\n else:\n raise\n \n return dst\n \ndef copymode(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n \n sys.audit(\"shutil.copymode\",src,dst)\n \n if not follow_symlinks and _islink(src)and os.path.islink(dst):\n if hasattr(os,'lchmod'):\n stat_func,chmod_func=os.lstat,os.lchmod\n else:\n return\n else:\n stat_func,chmod_func=_stat,os.chmod\n \n st=stat_func(src)\n chmod_func(dst,stat.S_IMODE(st.st_mode))\n \nif hasattr(os,'listxattr'):\n def _copyxattr(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n \n \n try:\n names=os.listxattr(src,follow_symlinks=follow_symlinks)\n except OSError as e:\n if e.errno not in(errno.ENOTSUP,errno.ENODATA,errno.EINVAL):\n raise\n return\n for name in names:\n try:\n value=os.getxattr(src,name,follow_symlinks=follow_symlinks)\n os.setxattr(dst,name,value,follow_symlinks=follow_symlinks)\n except OSError as e:\n if e.errno not in(errno.EPERM,errno.ENOTSUP,errno.ENODATA,\n errno.EINVAL,errno.EACCES):\n raise\nelse:\n def _copyxattr(*args,**kwargs):\n pass\n \ndef copystat(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.copystat\",src,dst)\n \n def _nop(*args,ns=None,follow_symlinks=None):\n pass\n \n \n follow=follow_symlinks or not(_islink(src)and os.path.islink(dst))\n if follow:\n \n def lookup(name):\n return getattr(os,name,_nop)\n else:\n \n \n def lookup(name):\n fn=getattr(os,name,_nop)\n if fn in os.supports_follow_symlinks:\n return fn\n return _nop\n \n if isinstance(src,os.DirEntry):\n st=src.stat(follow_symlinks=follow)\n else:\n st=lookup(\"stat\")(src,follow_symlinks=follow)\n mode=stat.S_IMODE(st.st_mode)\n lookup(\"utime\")(dst,ns=(st.st_atime_ns,st.st_mtime_ns),\n follow_symlinks=follow)\n \n \n _copyxattr(src,dst,follow_symlinks=follow)\n try:\n lookup(\"chmod\")(dst,mode,follow_symlinks=follow)\n except NotImplementedError:\n \n \n \n \n \n \n \n \n \n \n pass\n if hasattr(st,'st_flags'):\n try:\n lookup(\"chflags\")(dst,st.st_flags,follow_symlinks=follow)\n except OSError as why:\n for err in 'EOPNOTSUPP','ENOTSUP':\n if hasattr(errno,err)and why.errno ==getattr(errno,err):\n break\n else:\n raise\n \ndef copy(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n\n\n\n\n \n if os.path.isdir(dst):\n dst=os.path.join(dst,os.path.basename(src))\n copyfile(src,dst,follow_symlinks=follow_symlinks)\n copymode(src,dst,follow_symlinks=follow_symlinks)\n return dst\n \ndef copy2(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n\n\n\n \n if os.path.isdir(dst):\n dst=os.path.join(dst,os.path.basename(src))\n \n if hasattr(_winapi,\"CopyFile2\"):\n src_=os.fsdecode(src)\n dst_=os.fsdecode(dst)\n flags=_winapi.COPY_FILE_ALLOW_DECRYPTED_DESTINATION\n if not follow_symlinks:\n flags |=_winapi.COPY_FILE_COPY_SYMLINK\n try:\n _winapi.CopyFile2(src_,dst_,flags)\n return dst\n except OSError as exc:\n if(exc.winerror ==_winapi.ERROR_PRIVILEGE_NOT_HELD\n and not follow_symlinks):\n \n \n pass\n elif exc.winerror ==_winapi.ERROR_ACCESS_DENIED:\n \n \n pass\n else:\n raise\n \n copyfile(src,dst,follow_symlinks=follow_symlinks)\n copystat(src,dst,follow_symlinks=follow_symlinks)\n return dst\n \ndef ignore_patterns(*patterns):\n ''\n\n\n \n def _ignore_patterns(path,names):\n ignored_names=[]\n for pattern in patterns:\n ignored_names.extend(fnmatch.filter(names,pattern))\n return set(ignored_names)\n return _ignore_patterns\n \ndef _copytree(entries,src,dst,symlinks,ignore,copy_function,\nignore_dangling_symlinks,dirs_exist_ok=False):\n if ignore is not None:\n ignored_names=ignore(os.fspath(src),[x.name for x in entries])\n else:\n ignored_names=set()\n \n os.makedirs(dst,exist_ok=dirs_exist_ok)\n errors=[]\n use_srcentry=copy_function is copy2 or copy_function is copy\n \n for srcentry in entries:\n if srcentry.name in ignored_names:\n continue\n srcname=os.path.join(src,srcentry.name)\n dstname=os.path.join(dst,srcentry.name)\n srcobj=srcentry if use_srcentry else srcname\n try:\n is_symlink=srcentry.is_symlink()\n if is_symlink and os.name =='nt':\n \n \n lstat=srcentry.stat(follow_symlinks=False)\n if lstat.st_reparse_tag ==stat.IO_REPARSE_TAG_MOUNT_POINT:\n is_symlink=False\n if is_symlink:\n linkto=os.readlink(srcname)\n if symlinks:\n \n \n \n os.symlink(linkto,dstname)\n copystat(srcobj,dstname,follow_symlinks=not symlinks)\n else:\n \n if not os.path.exists(linkto)and ignore_dangling_symlinks:\n continue\n \n if srcentry.is_dir():\n copytree(srcobj,dstname,symlinks,ignore,\n copy_function,ignore_dangling_symlinks,\n dirs_exist_ok)\n else:\n copy_function(srcobj,dstname)\n elif srcentry.is_dir():\n copytree(srcobj,dstname,symlinks,ignore,copy_function,\n ignore_dangling_symlinks,dirs_exist_ok)\n else:\n \n copy_function(srcobj,dstname)\n \n \n except Error as err:\n errors.extend(err.args[0])\n except OSError as why:\n errors.append((srcname,dstname,str(why)))\n try:\n copystat(src,dst)\n except OSError as why:\n \n if getattr(why,'winerror',None)is None:\n errors.append((src,dst,str(why)))\n if errors:\n raise Error(errors)\n return dst\n \ndef copytree(src,dst,symlinks=False,ignore=None,copy_function=copy2,\nignore_dangling_symlinks=False,dirs_exist_ok=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.copytree\",src,dst)\n with os.scandir(src)as itr:\n entries=list(itr)\n return _copytree(entries=entries,src=src,dst=dst,symlinks=symlinks,\n ignore=ignore,copy_function=copy_function,\n ignore_dangling_symlinks=ignore_dangling_symlinks,\n dirs_exist_ok=dirs_exist_ok)\n \nif hasattr(os.stat_result,'st_file_attributes'):\n def _rmtree_islink(path):\n try:\n st=os.lstat(path)\n return(stat.S_ISLNK(st.st_mode)or\n (st.st_file_attributes&stat.FILE_ATTRIBUTE_REPARSE_POINT\n and st.st_reparse_tag ==stat.IO_REPARSE_TAG_MOUNT_POINT))\n except OSError:\n return False\nelse:\n def _rmtree_islink(path):\n return os.path.islink(path)\n \n \ndef _rmtree_unsafe(path,onexc):\n try:\n with os.scandir(path)as scandir_it:\n entries=list(scandir_it)\n except OSError as err:\n onexc(os.scandir,path,err)\n entries=[]\n for entry in entries:\n fullname=entry.path\n try:\n is_dir=entry.is_dir(follow_symlinks=False)\n except OSError:\n is_dir=False\n \n if is_dir and not entry.is_junction():\n try:\n if entry.is_symlink():\n \n \n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError as err:\n onexc(os.path.islink,fullname,err)\n continue\n _rmtree_unsafe(fullname,onexc)\n else:\n try:\n os.unlink(fullname)\n except OSError as err:\n onexc(os.unlink,fullname,err)\n try:\n os.rmdir(path)\n except OSError as err:\n onexc(os.rmdir,path,err)\n \n \ndef _rmtree_safe_fd(topfd,path,onexc):\n try:\n with os.scandir(topfd)as scandir_it:\n entries=list(scandir_it)\n except OSError as err:\n err.filename=path\n onexc(os.scandir,path,err)\n return\n for entry in entries:\n fullname=os.path.join(path,entry.name)\n try:\n is_dir=entry.is_dir(follow_symlinks=False)\n except OSError:\n is_dir=False\n else:\n if is_dir:\n try:\n orig_st=entry.stat(follow_symlinks=False)\n is_dir=stat.S_ISDIR(orig_st.st_mode)\n except OSError as err:\n onexc(os.lstat,fullname,err)\n continue\n if is_dir:\n try:\n dirfd=os.open(entry.name,os.O_RDONLY,dir_fd=topfd)\n dirfd_closed=False\n except OSError as err:\n onexc(os.open,fullname,err)\n else:\n try:\n if os.path.samestat(orig_st,os.fstat(dirfd)):\n _rmtree_safe_fd(dirfd,fullname,onexc)\n try:\n os.close(dirfd)\n dirfd_closed=True\n os.rmdir(entry.name,dir_fd=topfd)\n except OSError as err:\n onexc(os.rmdir,fullname,err)\n else:\n try:\n \n \n \n raise OSError(\"Cannot call rmtree on a symbolic \"\n \"link\")\n except OSError as err:\n onexc(os.path.islink,fullname,err)\n finally:\n if not dirfd_closed:\n os.close(dirfd)\n else:\n try:\n os.unlink(entry.name,dir_fd=topfd)\n except OSError as err:\n onexc(os.unlink,fullname,err)\n \n_use_fd_functions=({os.open,os.stat,os.unlink,os.rmdir}<=\nos.supports_dir_fd and\nos.scandir in os.supports_fd and\nos.stat in os.supports_follow_symlinks)\n\ndef rmtree(path,ignore_errors=False,onerror=None,*,onexc=None,dir_fd=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if onerror is not None:\n warnings.warn(\"onerror argument is deprecated, use onexc instead\",\n DeprecationWarning,stacklevel=2)\n \n sys.audit(\"shutil.rmtree\",path,dir_fd)\n if ignore_errors:\n def onexc(*args):\n pass\n elif onerror is None and onexc is None:\n def onexc(*args):\n raise\n elif onexc is None:\n if onerror is None:\n def onexc(*args):\n raise\n else:\n \n def onexc(*args):\n func,path,exc=args\n if exc is None:\n exc_info=None,None,None\n else:\n exc_info=type(exc),exc,exc.__traceback__\n return onerror(func,path,exc_info)\n \n if _use_fd_functions:\n \n if isinstance(path,bytes):\n path=os.fsdecode(path)\n \n \n try:\n orig_st=os.lstat(path,dir_fd=dir_fd)\n except Exception as err:\n onexc(os.lstat,path,err)\n return\n try:\n fd=os.open(path,os.O_RDONLY,dir_fd=dir_fd)\n fd_closed=False\n except Exception as err:\n onexc(os.open,path,err)\n return\n try:\n if os.path.samestat(orig_st,os.fstat(fd)):\n _rmtree_safe_fd(fd,path,onexc)\n try:\n os.close(fd)\n fd_closed=True\n os.rmdir(path,dir_fd=dir_fd)\n except OSError as err:\n onexc(os.rmdir,path,err)\n else:\n try:\n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError as err:\n onexc(os.path.islink,path,err)\n finally:\n if not fd_closed:\n os.close(fd)\n else:\n if dir_fd is not None:\n raise NotImplementedError(\"dir_fd unavailable on this platform\")\n try:\n if _rmtree_islink(path):\n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError as err:\n onexc(os.path.islink,path,err)\n \n return\n return _rmtree_unsafe(path,onexc)\n \n \n \nrmtree.avoids_symlink_attacks=_use_fd_functions\n\ndef _basename(path):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n path=os.fspath(path)\n sep=os.path.sep+(os.path.altsep or '')\n return os.path.basename(path.rstrip(sep))\n \ndef move(src,dst,copy_function=copy2):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.move\",src,dst)\n real_dst=dst\n if os.path.isdir(dst):\n if _samefile(src,dst):\n \n \n os.rename(src,dst)\n return\n \n \n \n real_dst=os.path.join(dst,_basename(src))\n \n if os.path.exists(real_dst):\n raise Error(\"Destination path '%s' already exists\"%real_dst)\n try:\n os.rename(src,real_dst)\n except OSError:\n if os.path.islink(src):\n linkto=os.readlink(src)\n os.symlink(linkto,real_dst)\n os.unlink(src)\n elif os.path.isdir(src):\n if _destinsrc(src,dst):\n raise Error(\"Cannot move a directory '%s' into itself\"\n \" '%s'.\"%(src,dst))\n if(_is_immutable(src)\n or(not os.access(src,os.W_OK)and os.listdir(src)\n and sys.platform =='darwin')):\n raise PermissionError(\"Cannot move the non-empty directory \"\n \"'%s': Lacking write permission to '%s'.\"\n %(src,src))\n copytree(src,real_dst,copy_function=copy_function,\n symlinks=True)\n rmtree(src)\n else:\n copy_function(src,real_dst)\n os.unlink(src)\n return real_dst\n \ndef _destinsrc(src,dst):\n src=os.path.abspath(src)\n dst=os.path.abspath(dst)\n if not src.endswith(os.path.sep):\n src +=os.path.sep\n if not dst.endswith(os.path.sep):\n dst +=os.path.sep\n return dst.startswith(src)\n \ndef _is_immutable(src):\n st=_stat(src)\n immutable_states=[stat.UF_IMMUTABLE,stat.SF_IMMUTABLE]\n return hasattr(st,'st_flags')and st.st_flags in immutable_states\n \ndef _get_gid(name):\n ''\n if name is None:\n return None\n \n try:\n from grp import getgrnam\n except ImportError:\n return None\n \n try:\n result=getgrnam(name)\n except KeyError:\n result=None\n if result is not None:\n return result[2]\n return None\n \ndef _get_uid(name):\n ''\n if name is None:\n return None\n \n try:\n from pwd import getpwnam\n except ImportError:\n return None\n \n try:\n result=getpwnam(name)\n except KeyError:\n result=None\n if result is not None:\n return result[2]\n return None\n \ndef _make_tarball(base_name,base_dir,compress=\"gzip\",verbose=0,dry_run=0,\nowner=None,group=None,logger=None,root_dir=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if compress is None:\n tar_compression=''\n elif _ZLIB_SUPPORTED and compress =='gzip':\n tar_compression='gz'\n elif _BZ2_SUPPORTED and compress =='bzip2':\n tar_compression='bz2'\n elif _LZMA_SUPPORTED and compress =='xz':\n tar_compression='xz'\n else:\n raise ValueError(\"bad value for 'compress', or compression format not \"\n \"supported : {0}\".format(compress))\n \n import tarfile\n \n compress_ext='.'+tar_compression if compress else ''\n archive_name=base_name+'.tar'+compress_ext\n archive_dir=os.path.dirname(archive_name)\n \n if archive_dir and not os.path.exists(archive_dir):\n if logger is not None:\n logger.info(\"creating %s\",archive_dir)\n if not dry_run:\n os.makedirs(archive_dir)\n \n \n if logger is not None:\n logger.info('Creating tar archive')\n \n uid=_get_uid(owner)\n gid=_get_gid(group)\n \n def _set_uid_gid(tarinfo):\n if gid is not None:\n tarinfo.gid=gid\n tarinfo.gname=group\n if uid is not None:\n tarinfo.uid=uid\n tarinfo.uname=owner\n return tarinfo\n \n if not dry_run:\n tar=tarfile.open(archive_name,'w|%s'%tar_compression)\n arcname=base_dir\n if root_dir is not None:\n base_dir=os.path.join(root_dir,base_dir)\n try:\n tar.add(base_dir,arcname,filter=_set_uid_gid)\n finally:\n tar.close()\n \n if root_dir is not None:\n archive_name=os.path.abspath(archive_name)\n return archive_name\n \ndef _make_zipfile(base_name,base_dir,verbose=0,dry_run=0,\nlogger=None,owner=None,group=None,root_dir=None):\n ''\n\n\n\n \n import zipfile\n \n zip_filename=base_name+\".zip\"\n archive_dir=os.path.dirname(base_name)\n \n if archive_dir and not os.path.exists(archive_dir):\n if logger is not None:\n logger.info(\"creating %s\",archive_dir)\n if not dry_run:\n os.makedirs(archive_dir)\n \n if logger is not None:\n logger.info(\"creating '%s' and adding '%s' to it\",\n zip_filename,base_dir)\n \n if not dry_run:\n with zipfile.ZipFile(zip_filename,\"w\",\n compression=zipfile.ZIP_DEFLATED)as zf:\n arcname=os.path.normpath(base_dir)\n if root_dir is not None:\n base_dir=os.path.join(root_dir,base_dir)\n base_dir=os.path.normpath(base_dir)\n if arcname !=os.curdir:\n zf.write(base_dir,arcname)\n if logger is not None:\n logger.info(\"adding '%s'\",base_dir)\n for dirpath,dirnames,filenames in os.walk(base_dir):\n arcdirpath=dirpath\n if root_dir is not None:\n arcdirpath=os.path.relpath(arcdirpath,root_dir)\n arcdirpath=os.path.normpath(arcdirpath)\n for name in sorted(dirnames):\n path=os.path.join(dirpath,name)\n arcname=os.path.join(arcdirpath,name)\n zf.write(path,arcname)\n if logger is not None:\n logger.info(\"adding '%s'\",path)\n for name in filenames:\n path=os.path.join(dirpath,name)\n path=os.path.normpath(path)\n if os.path.isfile(path):\n arcname=os.path.join(arcdirpath,name)\n zf.write(path,arcname)\n if logger is not None:\n logger.info(\"adding '%s'\",path)\n \n if root_dir is not None:\n zip_filename=os.path.abspath(zip_filename)\n return zip_filename\n \n_make_tarball.supports_root_dir=True\n_make_zipfile.supports_root_dir=True\n\n\n\n\n\n_ARCHIVE_FORMATS={\n'tar':(_make_tarball,[('compress',None)],\n\"uncompressed tar file\"),\n}\n\nif _ZLIB_SUPPORTED:\n _ARCHIVE_FORMATS['gztar']=(_make_tarball,[('compress','gzip')],\n \"gzip'ed tar-file\")\n _ARCHIVE_FORMATS['zip']=(_make_zipfile,[],\"ZIP file\")\n \nif _BZ2_SUPPORTED:\n _ARCHIVE_FORMATS['bztar']=(_make_tarball,[('compress','bzip2')],\n \"bzip2'ed tar-file\")\n \nif _LZMA_SUPPORTED:\n _ARCHIVE_FORMATS['xztar']=(_make_tarball,[('compress','xz')],\n \"xz'ed tar-file\")\n \ndef get_archive_formats():\n ''\n\n\n \n formats=[(name,registry[2])for name,registry in\n _ARCHIVE_FORMATS.items()]\n formats.sort()\n return formats\n \ndef register_archive_format(name,function,extra_args=None,description=''):\n ''\n\n\n\n\n\n\n \n if extra_args is None:\n extra_args=[]\n if not callable(function):\n raise TypeError('The %s object is not callable'%function)\n if not isinstance(extra_args,(tuple,list)):\n raise TypeError('extra_args needs to be a sequence')\n for element in extra_args:\n if not isinstance(element,(tuple,list))or len(element)!=2:\n raise TypeError('extra_args elements are : (arg_name, value)')\n \n _ARCHIVE_FORMATS[name]=(function,extra_args,description)\n \ndef unregister_archive_format(name):\n del _ARCHIVE_FORMATS[name]\n \ndef make_archive(base_name,format,root_dir=None,base_dir=None,verbose=0,\ndry_run=0,owner=None,group=None,logger=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.make_archive\",base_name,format,root_dir,base_dir)\n try:\n format_info=_ARCHIVE_FORMATS[format]\n except KeyError:\n raise ValueError(\"unknown archive format '%s'\"%format)from None\n \n kwargs={'dry_run':dry_run,'logger':logger,\n 'owner':owner,'group':group}\n \n func=format_info[0]\n for arg,val in format_info[1]:\n kwargs[arg]=val\n \n if base_dir is None:\n base_dir=os.curdir\n \n supports_root_dir=getattr(func,'supports_root_dir',False)\n save_cwd=None\n if root_dir is not None:\n stmd=os.stat(root_dir).st_mode\n if not stat.S_ISDIR(stmd):\n raise NotADirectoryError(errno.ENOTDIR,'Not a directory',root_dir)\n \n if supports_root_dir:\n \n base_name=os.fspath(base_name)\n kwargs['root_dir']=root_dir\n else:\n save_cwd=os.getcwd()\n if logger is not None:\n logger.debug(\"changing into '%s'\",root_dir)\n base_name=os.path.abspath(base_name)\n if not dry_run:\n os.chdir(root_dir)\n \n try:\n filename=func(base_name,base_dir,**kwargs)\n finally:\n if save_cwd is not None:\n if logger is not None:\n logger.debug(\"changing back to '%s'\",save_cwd)\n os.chdir(save_cwd)\n \n return filename\n \n \ndef get_unpack_formats():\n ''\n\n\n\n \n formats=[(name,info[0],info[3])for name,info in\n _UNPACK_FORMATS.items()]\n formats.sort()\n return formats\n \ndef _check_unpack_options(extensions,function,extra_args):\n ''\n \n existing_extensions={}\n for name,info in _UNPACK_FORMATS.items():\n for ext in info[0]:\n existing_extensions[ext]=name\n \n for extension in extensions:\n if extension in existing_extensions:\n msg='%s is already registered for \"%s\"'\n raise RegistryError(msg %(extension,\n existing_extensions[extension]))\n \n if not callable(function):\n raise TypeError('The registered function must be a callable')\n \n \ndef register_unpack_format(name,extensions,function,extra_args=None,\ndescription=''):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if extra_args is None:\n extra_args=[]\n _check_unpack_options(extensions,function,extra_args)\n _UNPACK_FORMATS[name]=extensions,function,extra_args,description\n \ndef unregister_unpack_format(name):\n ''\n del _UNPACK_FORMATS[name]\n \ndef _ensure_directory(path):\n ''\n dirname=os.path.dirname(path)\n if not os.path.isdir(dirname):\n os.makedirs(dirname)\n \ndef _unpack_zipfile(filename,extract_dir):\n ''\n \n import zipfile\n \n if not zipfile.is_zipfile(filename):\n raise ReadError(\"%s is not a zip file\"%filename)\n \n zip=zipfile.ZipFile(filename)\n try:\n for info in zip.infolist():\n name=info.filename\n \n \n if name.startswith('/')or '..'in name:\n continue\n \n targetpath=os.path.join(extract_dir,*name.split('/'))\n if not targetpath:\n continue\n \n _ensure_directory(targetpath)\n if not name.endswith('/'):\n \n with zip.open(name,'r')as source,\\\n open(targetpath,'wb')as target:\n copyfileobj(source,target)\n finally:\n zip.close()\n \ndef _unpack_tarfile(filename,extract_dir,*,filter=None):\n ''\n \n import tarfile\n try:\n tarobj=tarfile.open(filename)\n except tarfile.TarError:\n raise ReadError(\n \"%s is not a compressed or uncompressed tar file\"%filename)\n try:\n tarobj.extractall(extract_dir,filter=filter)\n finally:\n tarobj.close()\n \n \n \n \n \n \n_UNPACK_FORMATS={\n'tar':(['.tar'],_unpack_tarfile,[],\"uncompressed tar file\"),\n'zip':(['.zip'],_unpack_zipfile,[],\"ZIP file\"),\n}\n\nif _ZLIB_SUPPORTED:\n _UNPACK_FORMATS['gztar']=(['.tar.gz','.tgz'],_unpack_tarfile,[],\n \"gzip'ed tar-file\")\n \nif _BZ2_SUPPORTED:\n _UNPACK_FORMATS['bztar']=(['.tar.bz2','.tbz2'],_unpack_tarfile,[],\n \"bzip2'ed tar-file\")\n \nif _LZMA_SUPPORTED:\n _UNPACK_FORMATS['xztar']=(['.tar.xz','.txz'],_unpack_tarfile,[],\n \"xz'ed tar-file\")\n \ndef _find_unpack_format(filename):\n for name,info in _UNPACK_FORMATS.items():\n for extension in info[0]:\n if filename.endswith(extension):\n return name\n return None\n \ndef unpack_archive(filename,extract_dir=None,format=None,*,filter=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.unpack_archive\",filename,extract_dir,format)\n \n if extract_dir is None:\n extract_dir=os.getcwd()\n \n extract_dir=os.fspath(extract_dir)\n filename=os.fspath(filename)\n \n if filter is None:\n filter_kwargs={}\n else:\n filter_kwargs={'filter':filter}\n if format is not None:\n try:\n format_info=_UNPACK_FORMATS[format]\n except KeyError:\n raise ValueError(\"Unknown unpack format '{0}'\".format(format))from None\n \n func=format_info[1]\n func(filename,extract_dir,**dict(format_info[2]),**filter_kwargs)\n else:\n \n format=_find_unpack_format(filename)\n if format is None:\n raise ReadError(\"Unknown archive format '{0}'\".format(filename))\n \n func=_UNPACK_FORMATS[format][1]\n kwargs=dict(_UNPACK_FORMATS[format][2])|filter_kwargs\n func(filename,extract_dir,**kwargs)\n \n \nif hasattr(os,'statvfs'):\n\n __all__.append('disk_usage')\n _ntuple_diskusage=collections.namedtuple('usage','total used free')\n _ntuple_diskusage.total.__doc__='Total space in bytes'\n _ntuple_diskusage.used.__doc__='Used space in bytes'\n _ntuple_diskusage.free.__doc__='Free space in bytes'\n \n def disk_usage(path):\n ''\n\n\n\n \n st=os.statvfs(path)\n free=st.f_bavail *st.f_frsize\n total=st.f_blocks *st.f_frsize\n used=(st.f_blocks -st.f_bfree)*st.f_frsize\n return _ntuple_diskusage(total,used,free)\n \nelif _WINDOWS:\n\n __all__.append('disk_usage')\n _ntuple_diskusage=collections.namedtuple('usage','total used free')\n \n def disk_usage(path):\n ''\n\n\n\n \n total,free=nt._getdiskusage(path)\n used=total -free\n return _ntuple_diskusage(total,used,free)\n \n \ndef chown(path,user=None,group=None):\n ''\n\n\n\n \n sys.audit('shutil.chown',path,user,group)\n \n if user is None and group is None:\n raise ValueError(\"user and/or group must be set\")\n \n _user=user\n _group=group\n \n \n if user is None:\n _user=-1\n \n elif isinstance(user,str):\n _user=_get_uid(user)\n if _user is None:\n raise LookupError(\"no such user: {!r}\".format(user))\n \n if group is None:\n _group=-1\n elif not isinstance(group,int):\n _group=_get_gid(group)\n if _group is None:\n raise LookupError(\"no such group: {!r}\".format(group))\n \n os.chown(path,_user,_group)\n \ndef get_terminal_size(fallback=(80,24)):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n try:\n columns=int(os.environ['COLUMNS'])\n except(KeyError,ValueError):\n columns=0\n \n try:\n lines=int(os.environ['LINES'])\n except(KeyError,ValueError):\n lines=0\n \n \n if columns <=0 or lines <=0:\n try:\n size=os.get_terminal_size(sys.__stdout__.fileno())\n except(AttributeError,ValueError,OSError):\n \n \n size=os.terminal_size(fallback)\n if columns <=0:\n columns=size.columns or fallback[0]\n if lines <=0:\n lines=size.lines or fallback[1]\n \n return os.terminal_size((columns,lines))\n \n \n \n \n \ndef _access_check(fn,mode):\n return(os.path.exists(fn)and os.access(fn,mode)\n and not os.path.isdir(fn))\n \n \ndef _win_path_needs_curdir(cmd,mode):\n ''\n\n\n\n \n return(not(mode&os.X_OK))or _winapi.NeedCurrentDirectoryForExePath(\n os.fsdecode(cmd))\n \n \ndef which(cmd,mode=os.F_OK |os.X_OK,path=None):\n ''\n\n\n\n\n\n\n\n \n use_bytes=isinstance(cmd,bytes)\n \n \n \n \n dirname,cmd=os.path.split(cmd)\n if dirname:\n path=[dirname]\n else:\n if path is None:\n path=os.environ.get(\"PATH\",None)\n if path is None:\n try:\n path=os.confstr(\"CS_PATH\")\n except(AttributeError,ValueError):\n \n path=os.defpath\n \n \n \n \n \n if not path:\n return None\n \n if use_bytes:\n path=os.fsencode(path)\n path=path.split(os.fsencode(os.pathsep))\n else:\n path=os.fsdecode(path)\n path=path.split(os.pathsep)\n \n if sys.platform ==\"win32\"and _win_path_needs_curdir(cmd,mode):\n curdir=os.curdir\n if use_bytes:\n curdir=os.fsencode(curdir)\n path.insert(0,curdir)\n \n if sys.platform ==\"win32\":\n \n pathext_source=os.getenv(\"PATHEXT\")or _WIN_DEFAULT_PATHEXT\n pathext=[ext for ext in pathext_source.split(os.pathsep)if ext]\n \n if use_bytes:\n pathext=[os.fsencode(ext)for ext in pathext]\n \n \n files=[cmd]+[cmd+ext for ext in pathext]\n else:\n \n \n files=[cmd]\n \n seen=set()\n for dir in path:\n normdir=os.path.normcase(dir)\n if not normdir in seen:\n seen.add(normdir)\n for thefile in files:\n name=os.path.join(dir,thefile)\n if _access_check(name,mode):\n return name\n return None\n", ["_winapi", "bz2", "collections", "errno", "fnmatch", "grp", "lzma", "nt", "os", "posix", "pwd", "stat", "sys", "tarfile", "warnings", "zipfile", "zlib"]], "tempfile": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n\"NamedTemporaryFile\",\"TemporaryFile\",\n\"SpooledTemporaryFile\",\"TemporaryDirectory\",\n\"mkstemp\",\"mkdtemp\",\n\"mktemp\",\n\"TMP_MAX\",\"gettempprefix\",\n\"tempdir\",\"gettempdir\",\n\"gettempprefixb\",\"gettempdirb\",\n]\n\n\n\n\nimport functools as _functools\nimport warnings as _warnings\nimport io as _io\nimport os as _os\nimport shutil as _shutil\nimport errno as _errno\nfrom random import Random as _Random\nimport sys as _sys\nimport types as _types\nimport weakref as _weakref\nimport _thread\n_allocate_lock=_thread.allocate_lock\n\n_text_openflags=_os.O_RDWR |_os.O_CREAT |_os.O_EXCL\nif hasattr(_os,'O_NOFOLLOW'):\n _text_openflags |=_os.O_NOFOLLOW\n \n_bin_openflags=_text_openflags\nif hasattr(_os,'O_BINARY'):\n _bin_openflags |=_os.O_BINARY\n \nif hasattr(_os,'TMP_MAX'):\n TMP_MAX=_os.TMP_MAX\nelse:\n TMP_MAX=10000\n \n \n \n \n \ntemplate=\"tmp\"\n\n\n\n_once_lock=_allocate_lock()\n\n\ndef _exists(fn):\n try:\n _os.lstat(fn)\n except OSError:\n return False\n else:\n return True\n \n \ndef _infer_return_type(*args):\n ''\n return_type=None\n for arg in args:\n if arg is None:\n continue\n \n if isinstance(arg,_os.PathLike):\n arg=_os.fspath(arg)\n \n if isinstance(arg,bytes):\n if return_type is str:\n raise TypeError(\"Can't mix bytes and non-bytes in \"\n \"path components.\")\n return_type=bytes\n else:\n if return_type is bytes:\n raise TypeError(\"Can't mix bytes and non-bytes in \"\n \"path components.\")\n return_type=str\n if return_type is None:\n if tempdir is None or isinstance(tempdir,str):\n return str\n else:\n \n return bytes\n return return_type\n \n \ndef _sanitize_params(prefix,suffix,dir):\n ''\n output_type=_infer_return_type(prefix,suffix,dir)\n if suffix is None:\n suffix=output_type()\n if prefix is None:\n if output_type is str:\n prefix=template\n else:\n prefix=_os.fsencode(template)\n if dir is None:\n if output_type is str:\n dir=gettempdir()\n else:\n dir=gettempdirb()\n return prefix,suffix,dir,output_type\n \n \nclass _RandomNameSequence:\n ''\n\n\n\n\n \n \n characters=\"abcdefghijklmnopqrstuvwxyz0123456789_\"\n \n @property\n def rng(self):\n cur_pid=_os.getpid()\n if cur_pid !=getattr(self,'_rng_pid',None):\n self._rng=_Random()\n self._rng_pid=cur_pid\n return self._rng\n \n def __iter__(self):\n return self\n \n def __next__(self):\n return ''.join(self.rng.choices(self.characters,k=8))\n \ndef _candidate_tempdir_list():\n ''\n \n \n dirlist=[]\n \n \n for envname in 'TMPDIR','TEMP','TMP':\n dirname=_os.getenv(envname)\n if dirname:dirlist.append(dirname)\n \n \n if _os.name =='nt':\n dirlist.extend([_os.path.expanduser(r'~\\AppData\\Local\\Temp'),\n _os.path.expandvars(r'%SYSTEMROOT%\\Temp'),\n r'c:\\temp',r'c:\\tmp',r'\\temp',r'\\tmp'])\n else:\n dirlist.extend(['/tmp','/var/tmp','/usr/tmp'])\n \n \n try:\n dirlist.append(_os.getcwd())\n except(AttributeError,OSError):\n dirlist.append(_os.curdir)\n \n return dirlist\n \ndef _get_default_tempdir():\n ''\n\n\n\n\n\n \n \n namer=_RandomNameSequence()\n dirlist=_candidate_tempdir_list()\n \n for dir in dirlist:\n if dir !=_os.curdir:\n dir=_os.path.abspath(dir)\n \n for seq in range(100):\n name=next(namer)\n filename=_os.path.join(dir,name)\n try:\n fd=_os.open(filename,_bin_openflags,0o600)\n try:\n try:\n _os.write(fd,b'blat')\n finally:\n _os.close(fd)\n finally:\n _os.unlink(filename)\n return dir\n except FileExistsError:\n pass\n except PermissionError:\n \n \n if(_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n break\n except OSError:\n break\n raise FileNotFoundError(_errno.ENOENT,\n \"No usable temporary directory found in %s\"%\n dirlist)\n \n_name_sequence=None\n\ndef _get_candidate_names():\n ''\n \n global _name_sequence\n if _name_sequence is None:\n _once_lock.acquire()\n try:\n if _name_sequence is None:\n _name_sequence=_RandomNameSequence()\n finally:\n _once_lock.release()\n return _name_sequence\n \n \ndef _mkstemp_inner(dir,pre,suf,flags,output_type):\n ''\n \n dir=_os.path.abspath(dir)\n names=_get_candidate_names()\n if output_type is bytes:\n names=map(_os.fsencode,names)\n \n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,pre+name+suf)\n _sys.audit(\"tempfile.mkstemp\",file)\n try:\n fd=_os.open(file,flags,0o600)\n except FileExistsError:\n continue\n except PermissionError:\n \n \n if(_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n else:\n raise\n return fd,file\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary file name found\")\n \n \n \n \ndef gettempprefix():\n ''\n return _os.fsdecode(template)\n \ndef gettempprefixb():\n ''\n return _os.fsencode(template)\n \ntempdir=None\n\ndef _gettempdir():\n ''\n global tempdir\n if tempdir is None:\n _once_lock.acquire()\n try:\n if tempdir is None:\n tempdir=_get_default_tempdir()\n finally:\n _once_lock.release()\n return tempdir\n \ndef gettempdir():\n ''\n return _os.fsdecode(_gettempdir())\n \ndef gettempdirb():\n ''\n return _os.fsencode(_gettempdir())\n \ndef mkstemp(suffix=None,prefix=None,dir=None,text=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n if text:\n flags=_text_openflags\n else:\n flags=_bin_openflags\n \n return _mkstemp_inner(dir,prefix,suffix,flags,output_type)\n \n \ndef mkdtemp(suffix=None,prefix=None,dir=None):\n ''\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n names=_get_candidate_names()\n if output_type is bytes:\n names=map(_os.fsencode,names)\n \n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,prefix+name+suffix)\n _sys.audit(\"tempfile.mkdtemp\",file)\n try:\n _os.mkdir(file,0o700)\n except FileExistsError:\n continue\n except PermissionError:\n \n \n if(_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n else:\n raise\n return _os.path.abspath(file)\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary directory name found\")\n \ndef mktemp(suffix=\"\",prefix=template,dir=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n if dir is None:\n dir=gettempdir()\n \n names=_get_candidate_names()\n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,prefix+name+suffix)\n if not _exists(file):\n return file\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary filename found\")\n \n \nclass _TemporaryFileCloser:\n ''\n\n \n \n cleanup_called=False\n close_called=False\n \n def __init__(self,file,name,delete=True,delete_on_close=True):\n self.file=file\n self.name=name\n self.delete=delete\n self.delete_on_close=delete_on_close\n \n def cleanup(self,windows=(_os.name =='nt'),unlink=_os.unlink):\n if not self.cleanup_called:\n self.cleanup_called=True\n try:\n if not self.close_called:\n self.close_called=True\n self.file.close()\n finally:\n \n \n if self.delete and not(windows and self.delete_on_close):\n try:\n unlink(self.name)\n except FileNotFoundError:\n pass\n \n def close(self):\n if not self.close_called:\n self.close_called=True\n try:\n self.file.close()\n finally:\n if self.delete and self.delete_on_close:\n self.cleanup()\n \n def __del__(self):\n self.cleanup()\n \n \nclass _TemporaryFileWrapper:\n ''\n\n\n\n\n \n \n def __init__(self,file,name,delete=True,delete_on_close=True):\n self.file=file\n self.name=name\n self._closer=_TemporaryFileCloser(file,name,delete,\n delete_on_close)\n \n def __getattr__(self,name):\n \n \n \n file=self.__dict__['file']\n a=getattr(file,name)\n if hasattr(a,'__call__'):\n func=a\n @_functools.wraps(func)\n def func_wrapper(*args,**kwargs):\n return func(*args,**kwargs)\n \n \n func_wrapper._closer=self._closer\n a=func_wrapper\n if not isinstance(a,int):\n setattr(self,name,a)\n return a\n \n \n \n def __enter__(self):\n self.file.__enter__()\n return self\n \n \n \n def __exit__(self,exc,value,tb):\n result=self.file.__exit__(exc,value,tb)\n self._closer.cleanup()\n return result\n \n def close(self):\n ''\n\n \n self._closer.close()\n \n \n def __iter__(self):\n \n \n \n \n \n for line in self.file:\n yield line\n \ndef NamedTemporaryFile(mode='w+b',buffering=-1,encoding=None,\nnewline=None,suffix=None,prefix=None,\ndir=None,delete=True,*,errors=None,\ndelete_on_close=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n flags=_bin_openflags\n \n \n \n if _os.name =='nt'and delete and delete_on_close:\n flags |=_os.O_TEMPORARY\n \n if \"b\"not in mode:\n encoding=_io.text_encoding(encoding)\n \n name=None\n def opener(*args):\n nonlocal name\n fd,name=_mkstemp_inner(dir,prefix,suffix,flags,output_type)\n return fd\n try:\n file=_io.open(dir,mode,buffering=buffering,\n newline=newline,encoding=encoding,errors=errors,\n opener=opener)\n try:\n raw=getattr(file,'buffer',file)\n raw=getattr(raw,'raw',raw)\n raw.name=name\n return _TemporaryFileWrapper(file,name,delete,delete_on_close)\n except:\n file.close()\n raise\n except:\n if name is not None and not(\n _os.name =='nt'and delete and delete_on_close):\n _os.unlink(name)\n raise\n \nif _os.name !='posix'or _sys.platform =='cygwin':\n\n\n TemporaryFile=NamedTemporaryFile\n \nelse:\n\n\n\n _O_TMPFILE_WORKS=hasattr(_os,'O_TMPFILE')\n \n def TemporaryFile(mode='w+b',buffering=-1,encoding=None,\n newline=None,suffix=None,prefix=None,\n dir=None,*,errors=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n global _O_TMPFILE_WORKS\n \n if \"b\"not in mode:\n encoding=_io.text_encoding(encoding)\n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n flags=_bin_openflags\n if _O_TMPFILE_WORKS:\n fd=None\n def opener(*args):\n nonlocal fd\n flags2=(flags |_os.O_TMPFILE)&~_os.O_CREAT\n fd=_os.open(dir,flags2,0o600)\n return fd\n try:\n file=_io.open(dir,mode,buffering=buffering,\n newline=newline,encoding=encoding,\n errors=errors,opener=opener)\n raw=getattr(file,'buffer',file)\n raw=getattr(raw,'raw',raw)\n raw.name=fd\n return file\n except IsADirectoryError:\n \n \n \n \n \n _O_TMPFILE_WORKS=False\n except OSError:\n \n \n \n \n \n \n \n pass\n \n \n fd=None\n def opener(*args):\n nonlocal fd\n fd,name=_mkstemp_inner(dir,prefix,suffix,flags,output_type)\n try:\n _os.unlink(name)\n except BaseException as e:\n _os.close(fd)\n raise\n return fd\n file=_io.open(dir,mode,buffering=buffering,\n newline=newline,encoding=encoding,errors=errors,\n opener=opener)\n raw=getattr(file,'buffer',file)\n raw=getattr(raw,'raw',raw)\n raw.name=fd\n return file\n \nclass SpooledTemporaryFile(_io.IOBase):\n ''\n\n\n \n _rolled=False\n \n def __init__(self,max_size=0,mode='w+b',buffering=-1,\n encoding=None,newline=None,\n suffix=None,prefix=None,dir=None,*,errors=None):\n if 'b'in mode:\n self._file=_io.BytesIO()\n else:\n encoding=_io.text_encoding(encoding)\n self._file=_io.TextIOWrapper(_io.BytesIO(),\n encoding=encoding,errors=errors,\n newline=newline)\n self._max_size=max_size\n self._rolled=False\n self._TemporaryFileArgs={'mode':mode,'buffering':buffering,\n 'suffix':suffix,'prefix':prefix,\n 'encoding':encoding,'newline':newline,\n 'dir':dir,'errors':errors}\n \n __class_getitem__=classmethod(_types.GenericAlias)\n \n def _check(self,file):\n if self._rolled:return\n max_size=self._max_size\n if max_size and file.tell()>max_size:\n self.rollover()\n \n def rollover(self):\n if self._rolled:return\n file=self._file\n newfile=self._file=TemporaryFile(**self._TemporaryFileArgs)\n del self._TemporaryFileArgs\n \n pos=file.tell()\n if hasattr(newfile,'buffer'):\n newfile.buffer.write(file.detach().getvalue())\n else:\n newfile.write(file.getvalue())\n newfile.seek(pos,0)\n \n self._rolled=True\n \n \n \n \n \n \n \n def __enter__(self):\n if self._file.closed:\n raise ValueError(\"Cannot enter context with closed file\")\n return self\n \n def __exit__(self,exc,value,tb):\n self._file.close()\n \n \n def __iter__(self):\n return self._file.__iter__()\n \n def __del__(self):\n if not self.closed:\n _warnings.warn(\n \"Unclosed file {!r}\".format(self),\n ResourceWarning,\n stacklevel=2,\n source=self\n )\n self.close()\n \n def close(self):\n self._file.close()\n \n @property\n def closed(self):\n return self._file.closed\n \n @property\n def encoding(self):\n return self._file.encoding\n \n @property\n def errors(self):\n return self._file.errors\n \n def fileno(self):\n self.rollover()\n return self._file.fileno()\n \n def flush(self):\n self._file.flush()\n \n def isatty(self):\n return self._file.isatty()\n \n @property\n def mode(self):\n try:\n return self._file.mode\n except AttributeError:\n return self._TemporaryFileArgs['mode']\n \n @property\n def name(self):\n try:\n return self._file.name\n except AttributeError:\n return None\n \n @property\n def newlines(self):\n return self._file.newlines\n \n def readable(self):\n return self._file.readable()\n \n def read(self,*args):\n return self._file.read(*args)\n \n def read1(self,*args):\n return self._file.read1(*args)\n \n def readinto(self,b):\n return self._file.readinto(b)\n \n def readinto1(self,b):\n return self._file.readinto1(b)\n \n def readline(self,*args):\n return self._file.readline(*args)\n \n def readlines(self,*args):\n return self._file.readlines(*args)\n \n def seekable(self):\n return self._file.seekable()\n \n def seek(self,*args):\n return self._file.seek(*args)\n \n def tell(self):\n return self._file.tell()\n \n def truncate(self,size=None):\n if size is None:\n return self._file.truncate()\n else:\n if size >self._max_size:\n self.rollover()\n return self._file.truncate(size)\n \n def writable(self):\n return self._file.writable()\n \n def write(self,s):\n file=self._file\n rv=file.write(s)\n self._check(file)\n return rv\n \n def writelines(self,iterable):\n file=self._file\n rv=file.writelines(iterable)\n self._check(file)\n return rv\n \n def detach(self):\n return self._file.detach()\n \n \nclass TemporaryDirectory:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,suffix=None,prefix=None,dir=None,\n ignore_cleanup_errors=False,*,delete=True):\n self.name=mkdtemp(suffix,prefix,dir)\n self._ignore_cleanup_errors=ignore_cleanup_errors\n self._delete=delete\n self._finalizer=_weakref.finalize(\n self,self._cleanup,self.name,\n warn_message=\"Implicitly cleaning up {!r}\".format(self),\n ignore_errors=self._ignore_cleanup_errors,delete=self._delete)\n \n @classmethod\n def _rmtree(cls,name,ignore_errors=False):\n def onexc(func,path,exc):\n if isinstance(exc,PermissionError):\n def resetperms(path):\n try:\n _os.chflags(path,0)\n except AttributeError:\n pass\n _os.chmod(path,0o700)\n \n try:\n if path !=name:\n resetperms(_os.path.dirname(path))\n resetperms(path)\n \n try:\n _os.unlink(path)\n \n except(IsADirectoryError,PermissionError):\n cls._rmtree(path,ignore_errors=ignore_errors)\n except FileNotFoundError:\n pass\n elif isinstance(exc,FileNotFoundError):\n pass\n else:\n if not ignore_errors:\n raise\n \n _shutil.rmtree(name,onexc=onexc)\n \n @classmethod\n def _cleanup(cls,name,warn_message,ignore_errors=False,delete=True):\n if delete:\n cls._rmtree(name,ignore_errors=ignore_errors)\n _warnings.warn(warn_message,ResourceWarning)\n \n def __repr__(self):\n return \"<{} {!r}>\".format(self.__class__.__name__,self.name)\n \n def __enter__(self):\n return self.name\n \n def __exit__(self,exc,value,tb):\n if self._delete:\n self.cleanup()\n \n def cleanup(self):\n if self._finalizer.detach()or _os.path.exists(self.name):\n self._rmtree(self.name,ignore_errors=self._ignore_cleanup_errors)\n \n __class_getitem__=classmethod(_types.GenericAlias)\n", ["_thread", "errno", "functools", "io", "os", "random", "shutil", "sys", "types", "warnings", "weakref"]], "queue": [".py", "''\n\nimport threading\nimport types\nfrom collections import deque\nfrom heapq import heappush,heappop\nfrom time import monotonic as time\ntry:\n from _queue import SimpleQueue\nexcept ImportError:\n SimpleQueue=None\n \n__all__=['Empty','Full','Queue','PriorityQueue','LifoQueue','SimpleQueue']\n\n\ntry:\n from _queue import Empty\nexcept ImportError:\n class Empty(Exception):\n ''\n pass\n \nclass Full(Exception):\n ''\n pass\n \n \nclass Queue:\n ''\n\n\n \n \n def __init__(self,maxsize=0):\n self.maxsize=maxsize\n self._init(maxsize)\n \n \n \n \n \n self.mutex=threading.Lock()\n \n \n \n self.not_empty=threading.Condition(self.mutex)\n \n \n \n self.not_full=threading.Condition(self.mutex)\n \n \n \n self.all_tasks_done=threading.Condition(self.mutex)\n self.unfinished_tasks=0\n \n def task_done(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n with self.all_tasks_done:\n unfinished=self.unfinished_tasks -1\n if unfinished <=0:\n if unfinished <0:\n raise ValueError('task_done() called too many times')\n self.all_tasks_done.notify_all()\n self.unfinished_tasks=unfinished\n \n def join(self):\n ''\n\n\n\n\n\n\n \n with self.all_tasks_done:\n while self.unfinished_tasks:\n self.all_tasks_done.wait()\n \n def qsize(self):\n ''\n with self.mutex:\n return self._qsize()\n \n def empty(self):\n ''\n\n\n\n\n\n\n\n\n \n with self.mutex:\n return not self._qsize()\n \n def full(self):\n ''\n\n\n\n\n\n \n with self.mutex:\n return 0 0:\n if not block:\n if self._qsize()>=self.maxsize:\n raise Full\n elif timeout is None:\n while self._qsize()>=self.maxsize:\n self.not_full.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else:\n endtime=time()+timeout\n while self._qsize()>=self.maxsize:\n remaining=endtime -time()\n if remaining <=0.0:\n raise Full\n self.not_full.wait(remaining)\n self._put(item)\n self.unfinished_tasks +=1\n self.not_empty.notify()\n \n def get(self,block=True,timeout=None):\n ''\n\n\n\n\n\n\n\n\n \n with self.not_empty:\n if not block:\n if not self._qsize():\n raise Empty\n elif timeout is None:\n while not self._qsize():\n self.not_empty.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else:\n endtime=time()+timeout\n while not self._qsize():\n remaining=endtime -time()\n if remaining <=0.0:\n raise Empty\n self.not_empty.wait(remaining)\n item=self._get()\n self.not_full.notify()\n return item\n \n def put_nowait(self,item):\n ''\n\n\n\n \n return self.put(item,block=False)\n \n def get_nowait(self):\n ''\n\n\n\n \n return self.get(block=False)\n \n \n \n \n \n \n def _init(self,maxsize):\n self.queue=deque()\n \n def _qsize(self):\n return len(self.queue)\n \n \n def _put(self,item):\n self.queue.append(item)\n \n \n def _get(self):\n return self.queue.popleft()\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nclass PriorityQueue(Queue):\n ''\n\n\n \n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n heappush(self.queue,item)\n \n def _get(self):\n return heappop(self.queue)\n \n \nclass LifoQueue(Queue):\n ''\n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n self.queue.append(item)\n \n def _get(self):\n return self.queue.pop()\n \n \nclass _PySimpleQueue:\n ''\n\n\n \n \n \n \n \n \n def __init__(self):\n self._queue=deque()\n self._count=threading.Semaphore(0)\n \n def put(self,item,block=True,timeout=None):\n ''\n\n\n\n \n self._queue.append(item)\n self._count.release()\n \n def get(self,block=True,timeout=None):\n ''\n\n\n\n\n\n\n\n\n \n if timeout is not None and timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n if not self._count.acquire(block,timeout):\n raise Empty\n return self._queue.popleft()\n \n def put_nowait(self,item):\n ''\n\n\n\n \n return self.put(item,block=False)\n \n def get_nowait(self):\n ''\n\n\n\n \n return self.get(block=False)\n \n def empty(self):\n ''\n return len(self._queue)==0\n \n def qsize(self):\n ''\n return len(self._queue)\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nif SimpleQueue is None:\n SimpleQueue=_PySimpleQueue\n", ["_queue", "collections", "heapq", "threading", "time", "types"]], "pkgutil": [".py", "''\n\nfrom collections import namedtuple\nfrom functools import singledispatch as simplegeneric\nimport importlib\nimport importlib.util\nimport importlib.machinery\nimport os\nimport os.path\nimport sys\nfrom types import ModuleType\nimport warnings\n\n__all__=[\n'get_importer','iter_importers','get_loader','find_loader',\n'walk_packages','iter_modules','get_data',\n'read_code','extend_path',\n'ModuleInfo',\n]\n\n\nModuleInfo=namedtuple('ModuleInfo','module_finder name ispkg')\nModuleInfo.__doc__='A namedtuple with minimal info about a module.'\n\n\ndef read_code(stream):\n\n\n import marshal\n \n magic=stream.read(4)\n if magic !=importlib.util.MAGIC_NUMBER:\n return None\n \n stream.read(12)\n return marshal.load(stream)\n \n \ndef walk_packages(path=None,prefix='',onerror=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def seen(p,m={}):\n if p in m:\n return True\n m[p]=True\n \n for info in iter_modules(path,prefix):\n yield info\n \n if info.ispkg:\n try:\n __import__(info.name)\n except ImportError:\n if onerror is not None:\n onerror(info.name)\n except Exception:\n if onerror is not None:\n onerror(info.name)\n else:\n raise\n else:\n path=getattr(sys.modules[info.name],'__path__',None)or[]\n \n \n path=[p for p in path if not seen(p)]\n \n yield from walk_packages(path,info.name+'.',onerror)\n \n \ndef iter_modules(path=None,prefix=''):\n ''\n\n\n\n\n\n\n\n \n if path is None:\n importers=iter_importers()\n elif isinstance(path,str):\n raise ValueError(\"path must be None or list of paths to look for \"\n \"modules in\")\n else:\n importers=map(get_importer,path)\n \n yielded={}\n for i in importers:\n for name,ispkg in iter_importer_modules(i,prefix):\n if name not in yielded:\n yielded[name]=1\n yield ModuleInfo(i,name,ispkg)\n \n \n@simplegeneric\ndef iter_importer_modules(importer,prefix=''):\n if not hasattr(importer,'iter_modules'):\n return[]\n return importer.iter_modules(prefix)\n \n \n \ndef _iter_file_finder_modules(importer,prefix=''):\n if importer.path is None or not os.path.isdir(importer.path):\n return\n \n yielded={}\n import inspect\n try:\n filenames=os.listdir(importer.path)\n except OSError:\n \n filenames=[]\n filenames.sort()\n \n for fn in filenames:\n modname=inspect.getmodulename(fn)\n if modname =='__init__'or modname in yielded:\n continue\n \n path=os.path.join(importer.path,fn)\n ispkg=False\n \n if not modname and os.path.isdir(path)and '.'not in fn:\n modname=fn\n try:\n dircontents=os.listdir(path)\n except OSError:\n \n dircontents=[]\n for fn in dircontents:\n subname=inspect.getmodulename(fn)\n if subname =='__init__':\n ispkg=True\n break\n else:\n continue\n \n if modname and '.'not in modname:\n yielded[modname]=1\n yield prefix+modname,ispkg\n \niter_importer_modules.register(\nimportlib.machinery.FileFinder,_iter_file_finder_modules)\n\n\ntry:\n import zipimport\n from zipimport import zipimporter\n \n def iter_zipimport_modules(importer,prefix=''):\n dirlist=sorted(zipimport._zip_directory_cache[importer.archive])\n _prefix=importer.prefix\n plen=len(_prefix)\n yielded={}\n import inspect\n for fn in dirlist:\n if not fn.startswith(_prefix):\n continue\n \n fn=fn[plen:].split(os.sep)\n \n if len(fn)==2 and fn[1].startswith('__init__.py'):\n if fn[0]not in yielded:\n yielded[fn[0]]=1\n yield prefix+fn[0],True\n \n if len(fn)!=1:\n continue\n \n modname=inspect.getmodulename(fn[0])\n if modname =='__init__':\n continue\n \n if modname and '.'not in modname and modname not in yielded:\n yielded[modname]=1\n yield prefix+modname,False\n \n iter_importer_modules.register(zipimporter,iter_zipimport_modules)\n \nexcept ImportError:\n pass\n \n \ndef get_importer(path_item):\n ''\n\n\n\n\n\n\n \n path_item=os.fsdecode(path_item)\n try:\n importer=sys.path_importer_cache[path_item]\n except KeyError:\n for path_hook in sys.path_hooks:\n try:\n importer=path_hook(path_item)\n sys.path_importer_cache.setdefault(path_item,importer)\n break\n except ImportError:\n pass\n else:\n importer=None\n return importer\n \n \ndef iter_importers(fullname=\"\"):\n ''\n\n\n\n\n\n\n\n\n\n \n if fullname.startswith('.'):\n msg=\"Relative module name {!r} not supported\".format(fullname)\n raise ImportError(msg)\n if '.'in fullname:\n \n pkg_name=fullname.rpartition(\".\")[0]\n pkg=importlib.import_module(pkg_name)\n path=getattr(pkg,'__path__',None)\n if path is None:\n return\n else:\n yield from sys.meta_path\n path=sys.path\n for item in path:\n yield get_importer(item)\n \n \ndef get_loader(module_or_name):\n ''\n\n\n\n\n \n warnings._deprecated(\"pkgutil.get_loader\",\n f\"{warnings._DEPRECATED_MSG}; \"\n \"use importlib.util.find_spec() instead\",\n remove=(3,14))\n if module_or_name in sys.modules:\n module_or_name=sys.modules[module_or_name]\n if module_or_name is None:\n return None\n if isinstance(module_or_name,ModuleType):\n module=module_or_name\n loader=getattr(module,'__loader__',None)\n if loader is not None:\n return loader\n if getattr(module,'__spec__',None)is None:\n return None\n fullname=module.__name__\n else:\n fullname=module_or_name\n return find_loader(fullname)\n \n \ndef find_loader(fullname):\n ''\n\n\n\n\n \n warnings._deprecated(\"pkgutil.find_loader\",\n f\"{warnings._DEPRECATED_MSG}; \"\n \"use importlib.util.find_spec() instead\",\n remove=(3,14))\n if fullname.startswith('.'):\n msg=\"Relative module name {!r} not supported\".format(fullname)\n raise ImportError(msg)\n try:\n spec=importlib.util.find_spec(fullname)\n except(ImportError,AttributeError,TypeError,ValueError)as ex:\n \n \n \n msg=\"Error while finding loader for {!r} ({}: {})\"\n raise ImportError(msg.format(fullname,type(ex),ex))from ex\n return spec.loader if spec is not None else None\n \n \ndef extend_path(path,name):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not isinstance(path,list):\n \n \n return path\n \n sname_pkg=name+\".pkg\"\n \n path=path[:]\n \n parent_package,_,final_name=name.rpartition('.')\n if parent_package:\n try:\n search_path=sys.modules[parent_package].__path__\n except(KeyError,AttributeError):\n \n \n return path\n else:\n search_path=sys.path\n \n for dir in search_path:\n if not isinstance(dir,str):\n continue\n \n finder=get_importer(dir)\n if finder is not None:\n portions=[]\n if hasattr(finder,'find_spec'):\n spec=finder.find_spec(final_name)\n if spec is not None:\n portions=spec.submodule_search_locations or[]\n \n elif hasattr(finder,'find_loader'):\n _,portions=finder.find_loader(final_name)\n \n for portion in portions:\n \n \n if portion not in path:\n path.append(portion)\n \n \n \n pkgfile=os.path.join(dir,sname_pkg)\n if os.path.isfile(pkgfile):\n try:\n f=open(pkgfile)\n except OSError as msg:\n sys.stderr.write(\"Can't open %s: %s\\n\"%\n (pkgfile,msg))\n else:\n with f:\n for line in f:\n line=line.rstrip('\\n')\n if not line or line.startswith('#'):\n continue\n path.append(line)\n \n return path\n \n \ndef get_data(package,resource):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n spec=importlib.util.find_spec(package)\n if spec is None:\n return None\n loader=spec.loader\n if loader is None or not hasattr(loader,'get_data'):\n return None\n \n mod=(sys.modules.get(package)or\n importlib._bootstrap._load(spec))\n if mod is None or not hasattr(mod,'__file__'):\n return None\n \n \n \n \n parts=resource.split('/')\n parts.insert(0,os.path.dirname(mod.__file__))\n resource_name=os.path.join(*parts)\n return loader.get_data(resource_name)\n \n \n_NAME_PATTERN=None\n\ndef resolve_name(name):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _NAME_PATTERN\n if _NAME_PATTERN is None:\n \n import re\n dotted_words=r'(?!\\d)(\\w+)(\\.(?!\\d)(\\w+))*'\n _NAME_PATTERN=re.compile(f'^(?P{dotted_words})'\n f'(?P:(?P{dotted_words})?)?$',\n re.UNICODE)\n \n m=_NAME_PATTERN.match(name)\n if not m:\n raise ValueError(f'invalid format: {name !r}')\n gd=m.groupdict()\n if gd.get('cln'):\n \n mod=importlib.import_module(gd['pkg'])\n parts=gd.get('obj')\n parts=parts.split('.')if parts else[]\n else:\n \n parts=name.split('.')\n modname=parts.pop(0)\n \n mod=importlib.import_module(modname)\n while parts:\n p=parts[0]\n s=f'{modname}.{p}'\n try:\n mod=importlib.import_module(s)\n parts.pop(0)\n modname=s\n except ImportError:\n break\n \n \n \n result=mod\n for p in parts:\n result=getattr(result,p)\n return result\n", ["collections", "functools", "importlib", "importlib.machinery", "importlib.util", "inspect", "marshal", "os", "os.path", "re", "sys", "types", "warnings", "zipimport"]], "_dummy_thread": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['error','start_new_thread','exit','get_ident','allocate_lock',\n'interrupt_main','LockType','RLock']\n\n\nTIMEOUT_MAX=2 **31\n\n\n\n\n\n\nerror=RuntimeError\n\ndef start_new_thread(function,args,kwargs={}):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if type(args)!=type(tuple()):\n raise TypeError(\"2nd arg must be a tuple\")\n if type(kwargs)!=type(dict()):\n raise TypeError(\"3rd arg must be a dict\")\n global _main\n _main=False\n try :\n function(*args,**kwargs)\n except SystemExit:\n pass\n except :\n import traceback\n traceback.print_exc()\n _main=True\n global _interrupt\n if _interrupt:\n _interrupt=False\n raise KeyboardInterrupt\n \ndef exit():\n ''\n raise SystemExit\n \ndef get_ident():\n ''\n\n\n\n\n \n return 1\n \ndef allocate_lock():\n ''\n return LockType()\n \ndef stack_size(size=None ):\n ''\n if size is not None :\n raise error(\"setting thread stack size not supported\")\n return 0\n \ndef _set_sentinel():\n ''\n return LockType()\n \nclass LockType(object):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self):\n self.locked_status=False\n \n def acquire(self,waitflag=None ,timeout=-1):\n ''\n\n\n\n\n\n\n\n\n \n if waitflag is None or waitflag:\n self.locked_status=True\n return True\n else :\n if not self.locked_status:\n self.locked_status=True\n return True\n else :\n if timeout >0:\n import time\n time.sleep(timeout)\n return False\n \n __enter__=acquire\n \n def __exit__(self,typ,val,tb):\n self.release()\n \n def release(self):\n ''\n \n \n if not self.locked_status:\n raise error\n self.locked_status=False\n return True\n \n def locked(self):\n return self.locked_status\n \n def __repr__(self):\n return \"<%s %s.%s object at %s>\"%(\n \"locked\"if self.locked_status else \"unlocked\",\n self.__class__.__module__,\n self.__class__.__qualname__,\n hex(id(self))\n )\n \n \nclass RLock(LockType):\n ''\n\n\n\n\n\n \n def __init__(self):\n super().__init__()\n self._levels=0\n \n def acquire(self,waitflag=None ,timeout=-1):\n ''\n \n locked=super().acquire(waitflag,timeout)\n if locked:\n self._levels +=1\n return locked\n \n def release(self):\n ''\n \n if self._levels ==0:\n raise error\n if self._levels ==1:\n super().release()\n self._levels -=1\n \n \n_interrupt=False\n\n_main=True\n\ndef interrupt_main():\n ''\n \n if _main:\n raise KeyboardInterrupt\n else :\n global _interrupt\n _interrupt=True\n", ["time", "traceback"]], "_struct": [".py", "\n\n\n\n\n\n\n\n\n\n\n\"\"\"Functions to convert between Python values and C structs.\nPython strings are used to hold the data representing the C struct\nand also as format strings to describe the layout of data in the C struct.\n\nThe optional first format char indicates byte order, size and alignment:\n @: native order, size & alignment (default)\n =: native order, std. size & alignment\n <: little-endian, std. size & alignment\n >: big-endian, std. size & alignment\n !: same as >\n\nThe remaining chars indicate types of args and must match exactly;\nthese can be preceded by a decimal repeat count:\n x: pad byte (no data);\n c:char;\n b:signed byte;\n B:unsigned byte;\n h:short;\n H:unsigned short;\n i:int;\n I:unsigned int;\n l:long;\n L:unsigned long;\n f:float;\n d:double.\nSpecial cases (preceding decimal count indicates length):\n s:string (array of char); p: pascal string (with count byte).\nSpecial case (only available in native format):\n P:an integer type that is wide enough to hold a pointer.\nSpecial case (not in native mode unless 'long long' in platform C):\n q:long long;\n Q:unsigned long long\nWhitespace between formats is ignored.\n\nThe variable struct.error is an exception raised on errors.\"\"\"\n\nimport math\nimport re\nimport sys\n\n\nclass StructError(Exception):\n pass\n \n \nerror=StructError\n\ndef _normalize(fmt):\n ''\n \n if re.search(r\"\\d\\s+\",fmt):\n raise StructError(\"bad char in struct format\")\n return fmt.replace(\" \",\"\")\n \ndef unpack_int(data,index,size,le):\n bytes=[b for b in data[index:index+size]]\n if le =='little':\n bytes.reverse()\n number=0\n for b in bytes:\n number=number <<8 |b\n return int(number)\n \ndef unpack_signed_int(data,index,size,le):\n number=unpack_int(data,index,size,le)\n max=2 **(size *8)\n if number >2 **(size *8 -1)-1:\n number=int(-1 *(max -number))\n return number\n \nINFINITY=1e200 *1e200\nNAN=INFINITY /INFINITY\n\nBIG_ENDIAN=0\nLITTLE_ENDIAN=1\n\ndef unpack_char(data,index,size,le):\n return data[index:index+size]\n \ndef pack_int(number,size,le):\n x=number\n res=[]\n for i in range(size):\n res.append(x&0xff)\n x >>=8\n if le =='big':\n res.reverse()\n return bytes(res)\n \ndef pack_signed_int(number,size,le):\n if not isinstance(number,int):\n raise StructError(\"argument for i,I,l,L,q,Q,h,H must be integer\")\n if number >2 **(8 *size -1)-1 or number <-1 *2 **(8 *size -1):\n raise OverflowError(\"Number:%i too large to convert\"%number)\n return pack_int(number,size,le)\n \ndef pack_unsigned_int(number,size,le):\n if not isinstance(number,int):\n raise StructError(\"argument for i,I,l,L,q,Q,h,H must be integer\")\n if number <0:\n raise TypeError(\"can't convert negative long to unsigned\")\n if number >2 **(8 *size)-1:\n raise OverflowError(\"Number:%i too large to convert\"%number)\n return pack_int(number,size,le)\n \ndef pack_char(char,size,le):\n return bytes(char)\n \ndef isinf(x):\n return x !=0.0 and x /2 ==x\n \ndef isnan(v):\n return v !=v *1.0 or(v ==1.0 and v ==2.0)\n \ndef pack_float(x,size,le):\n unsigned=float_pack(x,size)\n result=[]\n for i in range(size):\n result.append((unsigned >>(i *8))&0xFF)\n if le ==\"big\":\n result.reverse()\n return bytes(result)\n \ndef unpack_float(data,index,size,le):\n binary=[data[i]for i in range(index,index+size)]\n if le ==\"big\":\n binary.reverse()\n unsigned=0\n for i in range(size):\n unsigned |=binary[i]<<(i *8)\n return float_unpack(unsigned,size,le)\n \ndef round_to_nearest(x):\n ''\n\n\n\n\n\n\n\n\n \n int_part=int(x)\n frac_part=x -int_part\n if frac_part >0.5 or frac_part ==0.5 and int_part&1 ==1:\n int_part +=1\n return int_part\n \ndef float_unpack(Q,size,order=LITTLE_ENDIAN):\n ''\n \n \n if size ==8:\n MIN_EXP=-1021\n MAX_EXP=1024\n MANT_DIG=53\n BITS=64\n elif size ==4:\n MIN_EXP=-125\n MAX_EXP=128\n MANT_DIG=24\n BITS=32\n else:\n raise ValueError(\"invalid size value\")\n \n if Q >>BITS:\n raise ValueError(\"input out of range\")\n \n \n sign=Q >>BITS -1\n exp=(Q&((1 <>MANT_DIG -1\n mant=Q&((1 <0:\n \n mant=round_to_nearest(m *(1 <=0:\n mant=round_to_nearest(m *(1 <=MAX_EXP -MIN_EXP+2:\n raise OverflowError(\"float too large to pack in this format\")\n \n \n assert 0 <=mant <1 <':(default,'big'),\n'!':(default,'big'),\n'=':(default,sys.byteorder),\n'@':(default,sys.byteorder)\n}\n\ndef _getmode(fmt):\n try:\n formatdef,endianness=formatmode[fmt[0]]\n alignment=fmt[0]not in formatmode or fmt[0]=='@'\n index=1\n except(IndexError,KeyError):\n formatdef,endianness=formatmode['@']\n alignment=True\n index=0\n return formatdef,endianness,index,alignment\n \ndef _getnum(fmt,i):\n num=None\n cur=fmt[i]\n while('0'<=cur)and(cur <='9'):\n if num ==None:\n num=int(cur)\n else:\n num=10 *num+int(cur)\n i +=1\n cur=fmt[i]\n return num,i\n \ndef calcsize(fmt):\n ''\n\n \n if isinstance(fmt,bytes):\n fmt=fmt.decode(\"ascii\")\n \n fmt=_normalize(fmt)\n \n formatdef,endianness,i,alignment=_getmode(fmt)\n num=0\n result=0\n while i 0:\n result +=[bytes([len(args[0])])+args[0][:num -1]+\n b'\\0'*padding]\n else:\n if num <255:\n result +=[bytes([num -1])+args[0][:num -1]]\n else:\n result +=[bytes([255])+args[0][:num -1]]\n args.pop(0)\n else:\n raise StructError(\"arg for string format not a string\")\n \n else:\n if len(args)=num:\n n=num -1\n result.append(data[j+1:j+n+1])\n j +=num\n else:\n \n if j >0 and alignment:\n padding=format['size']-j %format['size']\n j +=padding\n for n in range(num):\n result +=[format['unpack'](data,j,format['size'],\n endianness)]\n j +=format['size']\n \n return tuple(result)\n \ndef pack_into(fmt,buf,offset,*args):\n data=pack(fmt,*args)\n buf[offset:offset+len(data)]=data\n \ndef unpack_from(fmt,buf,offset=0):\n size=calcsize(fmt)\n data=buf[offset:offset+size]\n if len(data)!=size:\n raise error(\"unpack_from requires a buffer of at least %d bytes\"\n %(size,))\n return unpack(fmt,data)\n \ndef _clearcache():\n ''\n \n \nclass Struct:\n\n def __init__(self,fmt):\n self.format=fmt\n \n def pack(self,*args):\n return pack(self.format,*args)\n \n def pack_into(self,*args):\n return pack_into(self.format,*args)\n \n def unpack(self,*args):\n return unpack(self.format,*args)\n \n def unpack_from(self,*args):\n return unpack_from(self.format,*args)\n \nif __name__ =='__main__':\n t=pack('Bf',1,2)\n print(t,len(t))\n print(unpack('Bf',t))\n print(calcsize('Bf'))\n \n", ["math", "re", "sys"]], "time": [".py", "from browser import self as window\nimport _locale\nimport javascript\n\n\ndate=javascript.Date.new\nnow=javascript.Date.now\n\n\n\n\n\n\n\n_STRUCT_TM_ITEMS=9\n\n\n\n\n\ndef _get_day_of_year(arg):\n ''\n\n\n\n\n\n\n\n\n\n \n ml=[31,28,31,30,31,30,31,31,30,31,30,31]\n if arg[0]%4 ==0:\n ml[1]+=1\n i=1\n yday=0\n while i mm >13:\n raise ValueError(\"month out of range\")\n \n dd=t[2]\n if dd ==0:dd=1\n if -1 >dd >32:\n raise ValueError(\"day of month out of range\")\n \n hh=t[3]\n if -1 >hh >24:\n raise ValueError(\"hour out of range\")\n \n minu=t[4]\n if -1 >minu >60:\n raise ValueError(\"minute out of range\")\n \n ss=t[5]\n if -1 >ss >62:\n raise ValueError(\"seconds out of range\")\n \n wd=t[6]%7\n if wd <-2:\n raise ValueError(\"day of week out of range\")\n \n dy=t[7]\n if dy ==0:dy=1\n if -1 >dy >367:\n raise ValueError(\"day of year out of range\")\n \n return t[0],mm,dd,hh,minu,ss,wd,dy,t[-1]\n \n \ndef _is_dst(secs=None):\n ''\n d=date()\n if secs is not None:\n d=date(secs *1000)\n \n \n \n jan=date(d.getFullYear(),0,1)\n jul=date(d.getFullYear(),6,1)\n dst=int(d.getTimezoneOffset()=0 else 6\n tmp=struct_time([d.getUTCFullYear(),\n d.getUTCMonth()+1,d.getUTCDate(),\n d.getUTCHours(),d.getUTCMinutes(),d.getUTCSeconds(),\n wday,0,0])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef localtime(secs=None):\n d=date()\n if secs is not None:\n d=date(secs *1000)\n dst=_is_dst(secs)\n wday=d.getDay()-1 if d.getDay()-1 >=0 else 6\n tmp=struct_time([d.getFullYear(),\n d.getMonth()+1,d.getDate(),\n d.getHours(),d.getMinutes(),d.getSeconds(),\n wday,0,dst])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef mktime(t):\n if isinstance(t,struct_time):\n d1=date(t.tm_year,t.tm_mon -1,t.tm_mday,\n t.tm_hour,t.tm_min,t.tm_sec,0).getTime()\n elif isinstance(t,tuple):\n d1=date(t[0],t[1]-1,t[2],t[3],t[4],t[5],0).getTime()\n else:\n raise ValueError(\"Tuple or struct_time argument required\")\n d2=date(0).getTime()\n return(d1 -d2)/1000.\n \ndef monotonic():\n return now()/1000.\n \ndef perf_counter():\n return window.performance.now()/1000.\n \ndef process_time():\n return now()/1000.\n \ndef time():\n return float(date().getTime()/1000)\n \ndef sleep(secs):\n ''\n\n \n \n float(secs)\n raise NotImplementedError(\"Blocking functions like time.sleep() are not \"\n \"supported in the browser. Use functions in module browser.timer \"\n \"instead.\")\n \ndef strftime(_format,t=None):\n def ns(t,nb):\n \n res=str(t)\n while len(res)>4)&0x3)),\n chr(((B -0x20)&0xf)<<4 |(((C -0x20)>>2)&0xf)),\n chr(((C -0x20)&0x3)<<6 |((D -0x20)&0x3f))\n ])for A,B,C,D in quadruplets_gen(s[1:].rstrip())]\n except ValueError:\n raise Error('Illegal char')\n result=''.join(result)\n trailingdata=result[length:]\n if trailingdata.strip('\\x00'):\n raise Error('Trailing garbage')\n result=result[:length]\n if len(result)>2)&0x3F],\n table_b2a_base64[((A <<4)|((B >>4)&0xF))&0x3F],\n table_b2a_base64[((B <<2)|((C >>6)&0x3))&0x3F],\n table_b2a_base64[(C)&0x3F]])\n for A,B,C in a]\n \n final=s[length -final_length:]\n if final_length ==0:\n snippet=''\n elif final_length ==1:\n a=final[0]\n snippet=table_b2a_base64[(a >>2)&0x3F]+\\\n table_b2a_base64[(a <<4)&0x3F]+'=='\n else :\n a=final[0]\n b=final[1]\n snippet=table_b2a_base64[(a >>2)&0x3F]+\\\n table_b2a_base64[((a <<4)|(b >>4)&0xF)&0x3F]+\\\n table_b2a_base64[(b <<2)&0x3F]+'='\n \n result=''.join(result)+snippet\n if newline:\n result +='\\n'\n return bytes(result,__BRYTHON__.charset)\n \ndef a2b_qp(s,header=False ):\n inp=0\n odata=[]\n while inp =len(s):\n break\n \n if (s[inp]=='\\n')or (s[inp]=='\\r'):\n if s[inp]!='\\n':\n while inp 0 and data[lf -1]=='\\r'\n \n inp=0\n linelen=0\n odata=[]\n while inp '~'or\n c =='='or\n (header and c =='_')or\n (c =='.'and linelen ==0 and (inp+1 ==len(data)or\n data[inp+1]=='\\n'or\n data[inp+1]=='\\r'))or\n (not istext and (c =='\\r'or c =='\\n'))or\n ((c =='\\t'or c ==' ')and (inp+1 ==len(data)))or\n (c <=' 'and c !='\\r'and c !='\\n'and\n (quotetabs or (not quotetabs and (c !='\\t'and c !=' '))))):\n linelen +=3\n if linelen >=MAXLINESIZE:\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=3\n odata.append('='+two_hex_digits(ord(c)))\n inp +=1\n else :\n if (istext and\n (c =='\\n'or (inp+1 0 and\n (odata[-1]==' 'or odata[-1]=='\\t')):\n ch=ord(odata[-1])\n odata[-1]='='\n odata.append(two_hex_digits(ch))\n \n if crlf:odata.append('\\r')\n odata.append('\\n')\n if c =='\\r':\n inp +=2\n else :\n inp +=1\n else :\n if (inp+1 =MAXLINESIZE):\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=0\n \n linelen +=1\n if header and c ==' ':\n c='_'\n odata.append(c)\n inp +=1\n return ''.join(odata)\n \nhex_numbers='0123456789ABCDEF'\ndef hex(n):\n if n ==0:\n return '0'\n \n if n <0:\n n=-n\n sign='-'\n else :\n sign=''\n arr=[]\n \n def hex_gen(n):\n ''\n while n:\n yield n %0x10\n n=n /0x10\n \n for nibble in hex_gen(n):\n arr=[hex_numbers[nibble]]+arr\n return sign+''.join(arr)\n \ndef two_hex_digits(n):\n return hex_numbers[n /0x10]+hex_numbers[n %0x10]\n \n \ndef strhex_to_int(s):\n i=0\n for c in s:\n i=i *0x10+hex_numbers.index(c)\n return i\n \nhqx_encoding='!\"#$%&\\'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr'\n\nDONE=0x7f\nSKIP=0x7e\nFAIL=0x7d\n\ntable_a2b_hqx=[\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,SKIP,FAIL,FAIL,SKIP,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,0x00,0x01,0x02,0x03,0x04,0x05,0x06,\n\n0x07,0x08,0x09,0x0A,0x0B,0x0C,FAIL,FAIL,\n\n0x0D,0x0E,0x0F,0x10,0x11,0x12,0x13,FAIL,\n\n0x14,0x15,DONE,FAIL,FAIL,FAIL,FAIL,FAIL,\n\n0x16,0x17,0x18,0x19,0x1A,0x1B,0x1C,0x1D,\n\n0x1E,0x1F,0x20,0x21,0x22,0x23,0x24,FAIL,\n\n0x25,0x26,0x27,0x28,0x29,0x2A,0x2B,FAIL,\n\n0x2C,0x2D,0x2E,0x2F,FAIL,FAIL,FAIL,FAIL,\n\n0x30,0x31,0x32,0x33,0x34,0x35,0x36,FAIL,\n\n0x37,0x38,0x39,0x3A,0x3B,0x3C,FAIL,FAIL,\n\n0x3D,0x3E,0x3F,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n]\n\ndef a2b_hqx(s):\n result=[]\n \n def quadruples_gen(s):\n t=[]\n for c in s:\n res=table_a2b_hqx[ord(c)]\n if res ==SKIP:\n continue\n elif res ==FAIL:\n raise Error('Illegal character')\n elif res ==DONE:\n yield t\n raise Done\n else :\n t.append(res)\n if len(t)==4:\n yield t\n t=[]\n yield t\n \n done=0\n try :\n for snippet in quadruples_gen(s):\n length=len(snippet)\n if length ==4:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n result.append(chr(((snippet[2]&0x03)<<6)|(snippet[3])))\n elif length ==3:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n elif length ==2:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n except Done:\n done=1\n except Error:\n raise\n return (''.join(result),done)\n \n \n \ndef b2a_hqx(s):\n result=[]\n \n def triples_gen(s):\n while s:\n try :\n yield ord(s[0]),ord(s[1]),ord(s[2])\n except IndexError:\n yield tuple([ord(c)for c in s])\n s=s[3:]\n \n for snippet in triples_gen(s):\n length=len(snippet)\n if length ==3:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2 |((snippet[2]&0xc0)>>6)])\n result.append(hqx_encoding[snippet[2]&0x3f])\n elif length ==2:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2])\n elif length ==1:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)])\n return ''.join(result)\n \ncrctab_hqx=[\n0x0000,0x1021,0x2042,0x3063,0x4084,0x50a5,0x60c6,0x70e7,\n0x8108,0x9129,0xa14a,0xb16b,0xc18c,0xd1ad,0xe1ce,0xf1ef,\n0x1231,0x0210,0x3273,0x2252,0x52b5,0x4294,0x72f7,0x62d6,\n0x9339,0x8318,0xb37b,0xa35a,0xd3bd,0xc39c,0xf3ff,0xe3de,\n0x2462,0x3443,0x0420,0x1401,0x64e6,0x74c7,0x44a4,0x5485,\n0xa56a,0xb54b,0x8528,0x9509,0xe5ee,0xf5cf,0xc5ac,0xd58d,\n0x3653,0x2672,0x1611,0x0630,0x76d7,0x66f6,0x5695,0x46b4,\n0xb75b,0xa77a,0x9719,0x8738,0xf7df,0xe7fe,0xd79d,0xc7bc,\n0x48c4,0x58e5,0x6886,0x78a7,0x0840,0x1861,0x2802,0x3823,\n0xc9cc,0xd9ed,0xe98e,0xf9af,0x8948,0x9969,0xa90a,0xb92b,\n0x5af5,0x4ad4,0x7ab7,0x6a96,0x1a71,0x0a50,0x3a33,0x2a12,\n0xdbfd,0xcbdc,0xfbbf,0xeb9e,0x9b79,0x8b58,0xbb3b,0xab1a,\n0x6ca6,0x7c87,0x4ce4,0x5cc5,0x2c22,0x3c03,0x0c60,0x1c41,\n0xedae,0xfd8f,0xcdec,0xddcd,0xad2a,0xbd0b,0x8d68,0x9d49,\n0x7e97,0x6eb6,0x5ed5,0x4ef4,0x3e13,0x2e32,0x1e51,0x0e70,\n0xff9f,0xefbe,0xdfdd,0xcffc,0xbf1b,0xaf3a,0x9f59,0x8f78,\n0x9188,0x81a9,0xb1ca,0xa1eb,0xd10c,0xc12d,0xf14e,0xe16f,\n0x1080,0x00a1,0x30c2,0x20e3,0x5004,0x4025,0x7046,0x6067,\n0x83b9,0x9398,0xa3fb,0xb3da,0xc33d,0xd31c,0xe37f,0xf35e,\n0x02b1,0x1290,0x22f3,0x32d2,0x4235,0x5214,0x6277,0x7256,\n0xb5ea,0xa5cb,0x95a8,0x8589,0xf56e,0xe54f,0xd52c,0xc50d,\n0x34e2,0x24c3,0x14a0,0x0481,0x7466,0x6447,0x5424,0x4405,\n0xa7db,0xb7fa,0x8799,0x97b8,0xe75f,0xf77e,0xc71d,0xd73c,\n0x26d3,0x36f2,0x0691,0x16b0,0x6657,0x7676,0x4615,0x5634,\n0xd94c,0xc96d,0xf90e,0xe92f,0x99c8,0x89e9,0xb98a,0xa9ab,\n0x5844,0x4865,0x7806,0x6827,0x18c0,0x08e1,0x3882,0x28a3,\n0xcb7d,0xdb5c,0xeb3f,0xfb1e,0x8bf9,0x9bd8,0xabbb,0xbb9a,\n0x4a75,0x5a54,0x6a37,0x7a16,0x0af1,0x1ad0,0x2ab3,0x3a92,\n0xfd2e,0xed0f,0xdd6c,0xcd4d,0xbdaa,0xad8b,0x9de8,0x8dc9,\n0x7c26,0x6c07,0x5c64,0x4c45,0x3ca2,0x2c83,0x1ce0,0x0cc1,\n0xef1f,0xff3e,0xcf5d,0xdf7c,0xaf9b,0xbfba,0x8fd9,0x9ff8,\n0x6e17,0x7e36,0x4e55,0x5e74,0x2e93,0x3eb2,0x0ed1,0x1ef0,\n]\n\ndef crc_hqx(s,crc):\n for c in s:\n crc=((crc <<8)&0xff00)^crctab_hqx[((crc >>8)&0xff)^ord(c)]\n \n return crc\n \ndef rlecode_hqx(s):\n ''\n\n\n\n \n if not s:\n return ''\n result=[]\n prev=s[0]\n count=1\n \n \n \n \n if s[-1]=='!':\n s=s[1:]+'?'\n else :\n s=s[1:]+'!'\n \n for c in s:\n if c ==prev and count <255:\n count +=1\n else :\n if count ==1:\n if prev !='\\x90':\n result.append(prev)\n else :\n result.extend(['\\x90','\\x00'])\n elif count <4:\n if prev !='\\x90':\n result.extend([prev]*count)\n else :\n result.extend(['\\x90','\\x00']*count)\n else :\n if prev !='\\x90':\n result.extend([prev,'\\x90',chr(count)])\n else :\n result.extend(['\\x90','\\x00','\\x90',chr(count)])\n count=1\n prev=c\n \n return ''.join(result)\n \ndef rledecode_hqx(s):\n s=s.split('\\x90')\n result=[s[0]]\n prev=s[0]\n for snippet in s[1:]:\n count=ord(snippet[0])\n if count >0:\n result.append(prev[-1]*(count -1))\n prev=snippet\n else :\n result.append('\\x90')\n prev='\\x90'\n result.append(snippet[1:])\n \n return ''.join(result)\n \ncrc_32_tab=[\n0x00000000,0x77073096,0xee0e612c,0x990951ba,0x076dc419,\n0x706af48f,0xe963a535,0x9e6495a3,0x0edb8832,0x79dcb8a4,\n0xe0d5e91e,0x97d2d988,0x09b64c2b,0x7eb17cbd,0xe7b82d07,\n0x90bf1d91,0x1db71064,0x6ab020f2,0xf3b97148,0x84be41de,\n0x1adad47d,0x6ddde4eb,0xf4d4b551,0x83d385c7,0x136c9856,\n0x646ba8c0,0xfd62f97a,0x8a65c9ec,0x14015c4f,0x63066cd9,\n0xfa0f3d63,0x8d080df5,0x3b6e20c8,0x4c69105e,0xd56041e4,\n0xa2677172,0x3c03e4d1,0x4b04d447,0xd20d85fd,0xa50ab56b,\n0x35b5a8fa,0x42b2986c,0xdbbbc9d6,0xacbcf940,0x32d86ce3,\n0x45df5c75,0xdcd60dcf,0xabd13d59,0x26d930ac,0x51de003a,\n0xc8d75180,0xbfd06116,0x21b4f4b5,0x56b3c423,0xcfba9599,\n0xb8bda50f,0x2802b89e,0x5f058808,0xc60cd9b2,0xb10be924,\n0x2f6f7c87,0x58684c11,0xc1611dab,0xb6662d3d,0x76dc4190,\n0x01db7106,0x98d220bc,0xefd5102a,0x71b18589,0x06b6b51f,\n0x9fbfe4a5,0xe8b8d433,0x7807c9a2,0x0f00f934,0x9609a88e,\n0xe10e9818,0x7f6a0dbb,0x086d3d2d,0x91646c97,0xe6635c01,\n0x6b6b51f4,0x1c6c6162,0x856530d8,0xf262004e,0x6c0695ed,\n0x1b01a57b,0x8208f4c1,0xf50fc457,0x65b0d9c6,0x12b7e950,\n0x8bbeb8ea,0xfcb9887c,0x62dd1ddf,0x15da2d49,0x8cd37cf3,\n0xfbd44c65,0x4db26158,0x3ab551ce,0xa3bc0074,0xd4bb30e2,\n0x4adfa541,0x3dd895d7,0xa4d1c46d,0xd3d6f4fb,0x4369e96a,\n0x346ed9fc,0xad678846,0xda60b8d0,0x44042d73,0x33031de5,\n0xaa0a4c5f,0xdd0d7cc9,0x5005713c,0x270241aa,0xbe0b1010,\n0xc90c2086,0x5768b525,0x206f85b3,0xb966d409,0xce61e49f,\n0x5edef90e,0x29d9c998,0xb0d09822,0xc7d7a8b4,0x59b33d17,\n0x2eb40d81,0xb7bd5c3b,0xc0ba6cad,0xedb88320,0x9abfb3b6,\n0x03b6e20c,0x74b1d29a,0xead54739,0x9dd277af,0x04db2615,\n0x73dc1683,0xe3630b12,0x94643b84,0x0d6d6a3e,0x7a6a5aa8,\n0xe40ecf0b,0x9309ff9d,0x0a00ae27,0x7d079eb1,0xf00f9344,\n0x8708a3d2,0x1e01f268,0x6906c2fe,0xf762575d,0x806567cb,\n0x196c3671,0x6e6b06e7,0xfed41b76,0x89d32be0,0x10da7a5a,\n0x67dd4acc,0xf9b9df6f,0x8ebeeff9,0x17b7be43,0x60b08ed5,\n0xd6d6a3e8,0xa1d1937e,0x38d8c2c4,0x4fdff252,0xd1bb67f1,\n0xa6bc5767,0x3fb506dd,0x48b2364b,0xd80d2bda,0xaf0a1b4c,\n0x36034af6,0x41047a60,0xdf60efc3,0xa867df55,0x316e8eef,\n0x4669be79,0xcb61b38c,0xbc66831a,0x256fd2a0,0x5268e236,\n0xcc0c7795,0xbb0b4703,0x220216b9,0x5505262f,0xc5ba3bbe,\n0xb2bd0b28,0x2bb45a92,0x5cb36a04,0xc2d7ffa7,0xb5d0cf31,\n0x2cd99e8b,0x5bdeae1d,0x9b64c2b0,0xec63f226,0x756aa39c,\n0x026d930a,0x9c0906a9,0xeb0e363f,0x72076785,0x05005713,\n0x95bf4a82,0xe2b87a14,0x7bb12bae,0x0cb61b38,0x92d28e9b,\n0xe5d5be0d,0x7cdcefb7,0x0bdbdf21,0x86d3d2d4,0xf1d4e242,\n0x68ddb3f8,0x1fda836e,0x81be16cd,0xf6b9265b,0x6fb077e1,\n0x18b74777,0x88085ae6,0xff0f6a70,0x66063bca,0x11010b5c,\n0x8f659eff,0xf862ae69,0x616bffd3,0x166ccf45,0xa00ae278,\n0xd70dd2ee,0x4e048354,0x3903b3c2,0xa7672661,0xd06016f7,\n0x4969474d,0x3e6e77db,0xaed16a4a,0xd9d65adc,0x40df0b66,\n0x37d83bf0,0xa9bcae53,0xdebb9ec5,0x47b2cf7f,0x30b5ffe9,\n0xbdbdf21c,0xcabac28a,0x53b39330,0x24b4a3a6,0xbad03605,\n0xcdd70693,0x54de5729,0x23d967bf,0xb3667a2e,0xc4614ab8,\n0x5d681b02,0x2a6f2b94,0xb40bbe37,0xc30c8ea1,0x5a05df1b,\n0x2d02ef8d\n]\n\ndef crc32(s,crc=0):\n result=0\n crc=~int(crc)&0xffffffff\n \n for c in s:\n crc=crc_32_tab[(crc ^int(ord(c)))&0xff]^(crc >>8)\n \n \n \n result=crc ^0xffffffff\n \n if result >2 **31:\n result=((result+2 **31)%2 **32)-2 **31\n \n return result\n", ["_base64", "_binascii"]], "gzip": [".py", "''\n\n\n\n\n\n\nimport struct,sys,time,os\nimport zlib\nimport builtins\nimport io\nimport _compression\n\n__all__=[\"BadGzipFile\",\"GzipFile\",\"open\",\"compress\",\"decompress\"]\n\nFTEXT,FHCRC,FEXTRA,FNAME,FCOMMENT=1,2,4,8,16\n\nREAD,WRITE=1,2\n\n_COMPRESS_LEVEL_FAST=1\n_COMPRESS_LEVEL_TRADEOFF=6\n_COMPRESS_LEVEL_BEST=9\n\n\ndef open(filename,mode=\"rb\",compresslevel=_COMPRESS_LEVEL_BEST,\nencoding=None ,errors=None ,newline=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if \"t\"in mode:\n if \"b\"in mode:\n raise ValueError(\"Invalid mode: %r\"%(mode,))\n else :\n if encoding is not None :\n raise ValueError(\"Argument 'encoding' not supported in binary mode\")\n if errors is not None :\n raise ValueError(\"Argument 'errors' not supported in binary mode\")\n if newline is not None :\n raise ValueError(\"Argument 'newline' not supported in binary mode\")\n \n gz_mode=mode.replace(\"t\",\"\")\n if isinstance(filename,(str,bytes,os.PathLike)):\n binary_file=GzipFile(filename,gz_mode,compresslevel)\n elif hasattr(filename,\"read\")or hasattr(filename,\"write\"):\n binary_file=GzipFile(None ,gz_mode,compresslevel,filename)\n else :\n raise TypeError(\"filename must be a str or bytes object, or a file\")\n \n if \"t\"in mode:\n encoding=io.text_encoding(encoding)\n return io.TextIOWrapper(binary_file,encoding,errors,newline)\n else :\n return binary_file\n \ndef write32u(output,value):\n\n\n output.write(struct.pack(\"'\n \n def _init_write(self,filename):\n self.name=filename\n self.crc=zlib.crc32(b\"\")\n self.size=0\n self.writebuf=[]\n self.bufsize=0\n self.offset=0\n \n def _write_gzip_header(self,compresslevel):\n self.fileobj.write(b'\\037\\213')\n self.fileobj.write(b'\\010')\n try :\n \n \n fname=os.path.basename(self.name)\n if not isinstance(fname,bytes):\n fname=fname.encode('latin-1')\n if fname.endswith(b'.gz'):\n fname=fname[:-3]\n except UnicodeEncodeError:\n fname=b''\n flags=0\n if fname:\n flags=FNAME\n self.fileobj.write(chr(flags).encode('latin-1'))\n mtime=self._write_mtime\n if mtime is None :\n mtime=time.time()\n write32u(self.fileobj,int(mtime))\n if compresslevel ==_COMPRESS_LEVEL_BEST:\n xfl=b'\\002'\n elif compresslevel ==_COMPRESS_LEVEL_FAST:\n xfl=b'\\004'\n else :\n xfl=b'\\000'\n self.fileobj.write(xfl)\n self.fileobj.write(b'\\377')\n if fname:\n self.fileobj.write(fname+b'\\000')\n \n def write(self,data):\n self._check_not_closed()\n if self.mode !=WRITE:\n import errno\n raise OSError(errno.EBADF,\"write() on read-only GzipFile object\")\n \n if self.fileobj is None :\n raise ValueError(\"write() on closed GzipFile object\")\n \n if isinstance(data,(bytes,bytearray)):\n length=len(data)\n else :\n \n data=memoryview(data)\n length=data.nbytes\n \n if length >0:\n self.fileobj.write(self.compress.compress(data))\n self.size +=length\n self.crc=zlib.crc32(data,self.crc)\n self.offset +=length\n \n return length\n \n def read(self,size=-1):\n self._check_not_closed()\n if self.mode !=READ:\n import errno\n raise OSError(errno.EBADF,\"read() on write-only GzipFile object\")\n return self._buffer.read(size)\n \n def read1(self,size=-1):\n ''\n\n \n self._check_not_closed()\n if self.mode !=READ:\n import errno\n raise OSError(errno.EBADF,\"read1() on write-only GzipFile object\")\n \n if size <0:\n size=io.DEFAULT_BUFFER_SIZE\n return self._buffer.read1(size)\n \n def peek(self,n):\n self._check_not_closed()\n if self.mode !=READ:\n import errno\n raise OSError(errno.EBADF,\"peek() on write-only GzipFile object\")\n return self._buffer.peek(n)\n \n @property\n def closed(self):\n return self.fileobj is None\n \n def close(self):\n fileobj=self.fileobj\n if fileobj is None :\n return\n self.fileobj=None\n try :\n if self.mode ==WRITE:\n fileobj.write(self.compress.flush())\n write32u(fileobj,self.crc)\n \n write32u(fileobj,self.size&0xffffffff)\n elif self.mode ==READ:\n self._buffer.close()\n finally :\n myfileobj=self.myfileobj\n if myfileobj:\n self.myfileobj=None\n myfileobj.close()\n \n def flush(self,zlib_mode=zlib.Z_SYNC_FLUSH):\n self._check_not_closed()\n if self.mode ==WRITE:\n \n self.fileobj.write(self.compress.flush(zlib_mode))\n self.fileobj.flush()\n \n def fileno(self):\n ''\n\n\n\n \n return self.fileobj.fileno()\n \n def rewind(self):\n ''\n \n if self.mode !=READ:\n raise OSError(\"Can't rewind in write mode\")\n self._buffer.seek(0)\n \n def readable(self):\n return self.mode ==READ\n \n def writable(self):\n return self.mode ==WRITE\n \n def seekable(self):\n return True\n \n def seek(self,offset,whence=io.SEEK_SET):\n if self.mode ==WRITE:\n if whence !=io.SEEK_SET:\n if whence ==io.SEEK_CUR:\n offset=self.offset+offset\n else :\n raise ValueError('Seek from end not supported')\n if offset 2:\n raise AddressValueError(f\"Only one '/' permitted in {address!r}\")\n return addr\n \n \ndef _find_address_range(addresses):\n ''\n\n\n\n\n\n\n\n \n it=iter(addresses)\n first=last=next(it)\n for ip in it:\n if ip._ip !=last._ip+1:\n yield first,last\n first=ip\n last=ip\n yield first,last\n \n \ndef _count_righthand_zero_bits(number,bits):\n ''\n\n\n\n\n\n\n\n\n \n if number ==0:\n return bits\n return min(bits,(~number&(number -1)).bit_length())\n \n \ndef summarize_address_range(first,last):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if (not (isinstance(first,_BaseAddress)and\n isinstance(last,_BaseAddress))):\n raise TypeError('first and last must be IP addresses, not networks')\n if first.version !=last.version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n first,last))\n if first >last:\n raise ValueError('last IP address must be greater than first')\n \n if first.version ==4:\n ip=IPv4Network\n elif first.version ==6:\n ip=IPv6Network\n else :\n raise ValueError('unknown IP version')\n \n ip_bits=first._max_prefixlen\n first_int=first._ip\n last_int=last._ip\n while first_int <=last_int:\n nbits=min(_count_righthand_zero_bits(first_int,ip_bits),\n (last_int -first_int+1).bit_length()-1)\n net=ip((first_int,ip_bits -nbits))\n yield net\n first_int +=1 <=net.broadcast_address:\n continue\n yield net\n last=net\n \n \ndef collapse_addresses(addresses):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n addrs=[]\n ips=[]\n nets=[]\n \n \n for ip in addresses:\n if isinstance(ip,_BaseAddress):\n if ips and ips[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,ips[-1]))\n ips.append(ip)\n elif ip._prefixlen ==ip._max_prefixlen:\n if ips and ips[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,ips[-1]))\n try :\n ips.append(ip.ip)\n except AttributeError:\n ips.append(ip.network_address)\n else :\n if nets and nets[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,nets[-1]))\n nets.append(ip)\n \n \n ips=sorted(set(ips))\n \n \n if ips:\n for first,last in _find_address_range(ips):\n addrs.extend(summarize_address_range(first,last))\n \n return _collapse_addresses_internal(addrs+nets)\n \n \ndef get_mixed_type_key(obj):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(obj,_BaseNetwork):\n return obj._get_networks_key()\n elif isinstance(obj,_BaseAddress):\n return obj._get_address_key()\n return NotImplemented\n \n \nclass _IPAddressBase:\n\n ''\n \n __slots__=()\n \n @property\n def exploded(self):\n ''\n return self._explode_shorthand_ip_string()\n \n @property\n def compressed(self):\n ''\n return str(self)\n \n @property\n def reverse_pointer(self):\n ''\n\n\n\n\n\n \n return self._reverse_pointer()\n \n @property\n def version(self):\n msg='%200s has no version specified'%(type(self),)\n raise NotImplementedError(msg)\n \n def _check_int_address(self,address):\n if address <0:\n msg=\"%d (< 0) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,self._version))\n if address >self._ALL_ONES:\n msg=\"%d (>= 2**%d) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,self._max_prefixlen,\n self._version))\n \n def _check_packed_address(self,address,expected_len):\n address_len=len(address)\n if address_len !=expected_len:\n msg=\"%r (len %d != %d) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,address_len,\n expected_len,self._version))\n \n @classmethod\n def _ip_int_from_prefix(cls,prefixlen):\n ''\n\n\n\n\n\n\n\n \n return cls._ALL_ONES ^(cls._ALL_ONES >>prefixlen)\n \n @classmethod\n def _prefix_from_ip_int(cls,ip_int):\n ''\n\n\n\n\n\n\n\n\n\n \n trailing_zeroes=_count_righthand_zero_bits(ip_int,\n cls._max_prefixlen)\n prefixlen=cls._max_prefixlen -trailing_zeroes\n leading_ones=ip_int >>trailing_zeroes\n all_ones=(1 <1:\n return address\n return address[0],cls._max_prefixlen\n \n def __reduce__(self):\n return self.__class__,(str(self),)\n \n \n_address_fmt_re=None\n\n@functools.total_ordering\nclass _BaseAddress(_IPAddressBase):\n\n ''\n\n\n\n \n \n __slots__=()\n \n def __int__(self):\n return self._ip\n \n def __eq__(self,other):\n try :\n return (self._ip ==other._ip\n and self._version ==other._version)\n except AttributeError:\n return NotImplemented\n \n def __lt__(self,other):\n if not isinstance(other,_BaseAddress):\n return NotImplemented\n if self._version !=other._version:\n raise TypeError('%s and %s are not of the same version'%(\n self,other))\n if self._ip !=other._ip:\n return self._ip =0:\n if network+n >broadcast:\n raise IndexError('address out of range')\n return self._address_class(network+n)\n else :\n n +=1\n if broadcast+n other.network_address:\n return 1\n \n if self.netmask other.netmask:\n return 1\n return 0\n \n def _get_networks_key(self):\n ''\n\n\n\n\n\n \n return (self._version,self.network_address,self.netmask)\n \n def subnets(self,prefixlen_diff=1,new_prefix=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._prefixlen ==self._max_prefixlen:\n yield self\n return\n \n if new_prefix is not None :\n if new_prefix 0')\n new_prefixlen=self._prefixlen+prefixlen_diff\n \n if new_prefixlen >self._max_prefixlen:\n raise ValueError(\n 'prefix length diff %d is invalid for netblock %s'%(\n new_prefixlen,self))\n \n start=int(self.network_address)\n end=int(self.broadcast_address)+1\n step=(int(self.hostmask)+1)>>prefixlen_diff\n for new_addr in range(start,end,step):\n current=self.__class__((new_addr,new_prefixlen))\n yield current\n \n def supernet(self,prefixlen_diff=1,new_prefix=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._prefixlen ==0:\n return self\n \n if new_prefix is not None :\n if new_prefix >self._prefixlen:\n raise ValueError('new prefix must be shorter')\n if prefixlen_diff !=1:\n raise ValueError('cannot set prefixlen_diff and new_prefix')\n prefixlen_diff=self._prefixlen -new_prefix\n \n new_prefixlen=self.prefixlen -prefixlen_diff\n if new_prefixlen <0:\n raise ValueError(\n 'current prefixlen is %d, cannot have a prefixlen_diff of %d'%\n (self.prefixlen,prefixlen_diff))\n return self.__class__((\n int(self.network_address)&(int(self.netmask)<=a.broadcast_address)\n except AttributeError:\n raise TypeError(f\"Unable to test subnet containment \"\n f\"between {a} and {b}\")\n \n def subnet_of(self,other):\n ''\n return self._is_subnet_of(self,other)\n \n def supernet_of(self,other):\n ''\n return self._is_subnet_of(other,self)\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_reserved and\n self.broadcast_address.is_reserved)\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return (self.network_address.is_link_local and\n self.broadcast_address.is_link_local)\n \n @property\n def is_private(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_private and\n self.broadcast_address.is_private)\n \n @property\n def is_global(self):\n ''\n\n\n\n\n\n \n return not self.is_private\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_unspecified and\n self.broadcast_address.is_unspecified)\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_loopback and\n self.broadcast_address.is_loopback)\n \nclass _BaseV4:\n\n ''\n\n\n\n\n \n \n __slots__=()\n _version=4\n \n _ALL_ONES=(2 **IPV4LENGTH)-1\n \n _max_prefixlen=IPV4LENGTH\n \n \n _netmask_cache={}\n \n def _explode_shorthand_ip_string(self):\n return str(self)\n \n @classmethod\n def _make_netmask(cls,arg):\n ''\n\n\n\n\n\n \n if arg not in cls._netmask_cache:\n if isinstance(arg,int):\n prefixlen=arg\n if not (0 <=prefixlen <=cls._max_prefixlen):\n cls._report_invalid_netmask(prefixlen)\n else :\n try :\n \n prefixlen=cls._prefix_from_prefix_string(arg)\n except NetmaskValueError:\n \n \n prefixlen=cls._prefix_from_ip_string(arg)\n netmask=IPv4Address(cls._ip_int_from_prefix(prefixlen))\n cls._netmask_cache[arg]=netmask,prefixlen\n return cls._netmask_cache[arg]\n \n @classmethod\n def _ip_int_from_string(cls,ip_str):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not ip_str:\n raise AddressValueError('Address cannot be empty')\n \n octets=ip_str.split('.')\n if len(octets)!=4:\n raise AddressValueError(\"Expected 4 octets in %r\"%ip_str)\n \n try :\n return int.from_bytes(map(cls._parse_octet,octets),'big')\n except ValueError as exc:\n raise AddressValueError(\"%s in %r\"%(exc,ip_str))from None\n \n @classmethod\n def _parse_octet(cls,octet_str):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not octet_str:\n raise ValueError(\"Empty octet not permitted\")\n \n if not (octet_str.isascii()and octet_str.isdigit()):\n msg=\"Only decimal digits permitted in %r\"\n raise ValueError(msg %octet_str)\n \n \n if len(octet_str)>3:\n msg=\"At most 3 characters permitted in %r\"\n raise ValueError(msg %octet_str)\n \n \n if octet_str !='0'and octet_str[0]=='0':\n msg=\"Leading zeros are not permitted in %r\"\n raise ValueError(msg %octet_str)\n \n octet_int=int(octet_str,10)\n if octet_int >255:\n raise ValueError(\"Octet %d (> 255) not permitted\"%octet_int)\n return octet_int\n \n @classmethod\n def _string_from_ip_int(cls,ip_int):\n ''\n\n\n\n\n\n\n\n \n return '.'.join(map(str,ip_int.to_bytes(4,'big')))\n \n def _reverse_pointer(self):\n ''\n\n\n\n \n reverse_octets=str(self).split('.')[::-1]\n return '.'.join(reverse_octets)+'.in-addr.arpa'\n \n @property\n def max_prefixlen(self):\n return self._max_prefixlen\n \n @property\n def version(self):\n return self._version\n \n \nclass IPv4Address(_BaseV4,_BaseAddress):\n\n ''\n \n __slots__=('_ip','__weakref__')\n \n def __init__(self,address):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if isinstance(address,int):\n self._check_int_address(address)\n self._ip=address\n return\n \n \n if isinstance(address,bytes):\n self._check_packed_address(address,4)\n self._ip=int.from_bytes(address)\n return\n \n \n \n addr_str=str(address)\n if '/'in addr_str:\n raise AddressValueError(f\"Unexpected '/' in {address!r}\")\n self._ip=self._ip_int_from_string(addr_str)\n \n @property\n def packed(self):\n ''\n return v4_int_to_packed(self._ip)\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return self in self._constants._reserved_network\n \n @property\n @functools.lru_cache()\n def is_private(self):\n ''\n\n\n\n\n\n \n return any(self in net for net in self._constants._private_networks)\n \n @property\n @functools.lru_cache()\n def is_global(self):\n return self not in self._constants._public_network and not self.is_private\n \n @property\n def is_multicast(self):\n ''\n\n\n\n\n\n \n return self in self._constants._multicast_network\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return self ==self._constants._unspecified_address\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n \n return self in self._constants._loopback_network\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return self in self._constants._linklocal_network\n \n \nclass IPv4Interface(IPv4Address):\n\n def __init__(self,address):\n addr,mask=self._split_addr_prefix(address)\n \n IPv4Address.__init__(self,addr)\n self.network=IPv4Network((addr,mask),strict=False )\n self.netmask=self.network.netmask\n self._prefixlen=self.network._prefixlen\n \n @functools.cached_property\n def hostmask(self):\n return self.network.hostmask\n \n def __str__(self):\n return '%s/%d'%(self._string_from_ip_int(self._ip),\n self._prefixlen)\n \n def __eq__(self,other):\n address_equal=IPv4Address.__eq__(self,other)\n if address_equal is NotImplemented or not address_equal:\n return address_equal\n try :\n return self.network ==other.network\n except AttributeError:\n \n \n \n return False\n \n def __lt__(self,other):\n address_less=IPv4Address.__lt__(self,other)\n if address_less is NotImplemented:\n return NotImplemented\n try :\n return (self.network >16)&0xFFFF))\n parts.append('%x'%(ipv4_int&0xFFFF))\n \n \n \n \n _max_parts=cls._HEXTET_COUNT+1\n if len(parts)>_max_parts:\n msg=\"At most %d colons permitted in %r\"%(_max_parts -1,ip_str)\n raise AddressValueError(msg)\n \n \n \n skip_index=None\n for i in range(1,len(parts)-1):\n if not parts[i]:\n if skip_index is not None :\n \n msg=\"At most one '::' permitted in %r\"%ip_str\n raise AddressValueError(msg)\n skip_index=i\n \n \n \n if skip_index is not None :\n \n parts_hi=skip_index\n parts_lo=len(parts)-skip_index -1\n if not parts[0]:\n parts_hi -=1\n if parts_hi:\n msg=\"Leading ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n if not parts[-1]:\n parts_lo -=1\n if parts_lo:\n msg=\"Trailing ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n parts_skipped=cls._HEXTET_COUNT -(parts_hi+parts_lo)\n if parts_skipped <1:\n msg=\"Expected at most %d other parts with '::' in %r\"\n raise AddressValueError(msg %(cls._HEXTET_COUNT -1,ip_str))\n else :\n \n \n \n if len(parts)!=cls._HEXTET_COUNT:\n msg=\"Exactly %d parts expected without '::' in %r\"\n raise AddressValueError(msg %(cls._HEXTET_COUNT,ip_str))\n if not parts[0]:\n msg=\"Leading ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n if not parts[-1]:\n msg=\"Trailing ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n parts_hi=len(parts)\n parts_lo=0\n parts_skipped=0\n \n try :\n \n ip_int=0\n for i in range(parts_hi):\n ip_int <<=16\n ip_int |=cls._parse_hextet(parts[i])\n ip_int <<=16 *parts_skipped\n for i in range(-parts_lo,0):\n ip_int <<=16\n ip_int |=cls._parse_hextet(parts[i])\n return ip_int\n except ValueError as exc:\n raise AddressValueError(\"%s in %r\"%(exc,ip_str))from None\n \n @classmethod\n def _parse_hextet(cls,hextet_str):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not cls._HEX_DIGITS.issuperset(hextet_str):\n raise ValueError(\"Only hex digits permitted in %r\"%hextet_str)\n \n \n if len(hextet_str)>4:\n msg=\"At most 4 characters permitted in %r\"\n raise ValueError(msg %hextet_str)\n \n return int(hextet_str,16)\n \n @classmethod\n def _compress_hextets(cls,hextets):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n best_doublecolon_start=-1\n best_doublecolon_len=0\n doublecolon_start=-1\n doublecolon_len=0\n for index,hextet in enumerate(hextets):\n if hextet =='0':\n doublecolon_len +=1\n if doublecolon_start ==-1:\n \n doublecolon_start=index\n if doublecolon_len >best_doublecolon_len:\n \n best_doublecolon_len=doublecolon_len\n best_doublecolon_start=doublecolon_start\n else :\n doublecolon_len=0\n doublecolon_start=-1\n \n if best_doublecolon_len >1:\n best_doublecolon_end=(best_doublecolon_start+\n best_doublecolon_len)\n \n if best_doublecolon_end ==len(hextets):\n hextets +=['']\n hextets[best_doublecolon_start:best_doublecolon_end]=['']\n \n if best_doublecolon_start ==0:\n hextets=['']+hextets\n \n return hextets\n \n @classmethod\n def _string_from_ip_int(cls,ip_int=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if ip_int is None :\n ip_int=int(cls._ip)\n \n if ip_int >cls._ALL_ONES:\n raise ValueError('IPv6 address is too large')\n \n hex_str='%032x'%ip_int\n hextets=['%x'%int(hex_str[x:x+4],16)for x in range(0,32,4)]\n \n hextets=cls._compress_hextets(hextets)\n return ':'.join(hextets)\n \n def _explode_shorthand_ip_string(self):\n ''\n\n\n\n\n\n\n\n \n if isinstance(self,IPv6Network):\n ip_str=str(self.network_address)\n elif isinstance(self,IPv6Interface):\n ip_str=str(self.ip)\n else :\n ip_str=str(self)\n \n ip_int=self._ip_int_from_string(ip_str)\n hex_str='%032x'%ip_int\n parts=[hex_str[x:x+4]for x in range(0,32,4)]\n if isinstance(self,(_BaseNetwork,IPv6Interface)):\n return '%s/%d'%(':'.join(parts),self._prefixlen)\n return ':'.join(parts)\n \n def _reverse_pointer(self):\n ''\n\n\n\n \n reverse_chars=self.exploded[::-1].replace(':','')\n return '.'.join(reverse_chars)+'.ip6.arpa'\n \n @staticmethod\n def _split_scope_id(ip_str):\n ''\n\n\n\n\n\n\n\n\n\n \n addr,sep,scope_id=ip_str.partition('%')\n if not sep:\n scope_id=None\n elif not scope_id or '%'in scope_id:\n raise AddressValueError('Invalid IPv6 address: \"%r\"'%ip_str)\n return addr,scope_id\n \n @property\n def max_prefixlen(self):\n return self._max_prefixlen\n \n @property\n def version(self):\n return self._version\n \n \nclass IPv6Address(_BaseV6,_BaseAddress):\n\n ''\n \n __slots__=('_ip','_scope_id','__weakref__')\n \n def __init__(self,address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if isinstance(address,int):\n self._check_int_address(address)\n self._ip=address\n self._scope_id=None\n return\n \n \n if isinstance(address,bytes):\n self._check_packed_address(address,16)\n self._ip=int.from_bytes(address,'big')\n self._scope_id=None\n return\n \n \n \n addr_str=str(address)\n if '/'in addr_str:\n raise AddressValueError(f\"Unexpected '/' in {address!r}\")\n addr_str,self._scope_id=self._split_scope_id(addr_str)\n \n self._ip=self._ip_int_from_string(addr_str)\n \n def __str__(self):\n ip_str=super().__str__()\n return ip_str+'%'+self._scope_id if self._scope_id else ip_str\n \n def __hash__(self):\n return hash((self._ip,self._scope_id))\n \n def __eq__(self,other):\n address_equal=super().__eq__(other)\n if address_equal is NotImplemented:\n return NotImplemented\n if not address_equal:\n return False\n return self._scope_id ==getattr(other,'_scope_id',None )\n \n @property\n def scope_id(self):\n ''\n\n\n\n\n\n\n \n return self._scope_id\n \n @property\n def packed(self):\n ''\n return v6_int_to_packed(self._ip)\n \n @property\n def is_multicast(self):\n ''\n\n\n\n\n\n \n return self in self._constants._multicast_network\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return any(self in x for x in self._constants._reserved_networks)\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return self in self._constants._linklocal_network\n \n @property\n def is_site_local(self):\n ''\n\n\n\n\n\n\n\n\n \n return self in self._constants._sitelocal_network\n \n @property\n @functools.lru_cache()\n def is_private(self):\n ''\n\n\n\n\n\n\n \n ipv4_mapped=self.ipv4_mapped\n if ipv4_mapped is not None :\n return ipv4_mapped.is_private\n return any(self in net for net in self._constants._private_networks)\n \n @property\n def is_global(self):\n ''\n\n\n\n\n\n \n return not self.is_private\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return self._ip ==0\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n\n \n return self._ip ==1\n \n @property\n def ipv4_mapped(self):\n ''\n\n\n\n\n\n \n if (self._ip >>32)!=0xFFFF:\n return None\n return IPv4Address(self._ip&0xFFFFFFFF)\n \n @property\n def teredo(self):\n ''\n\n\n\n\n\n\n \n if (self._ip >>96)!=0x20010000:\n return None\n return (IPv4Address((self._ip >>64)&0xFFFFFFFF),\n IPv4Address(~self._ip&0xFFFFFFFF))\n \n @property\n def sixtofour(self):\n ''\n\n\n\n\n\n \n if (self._ip >>112)!=0x2002:\n return None\n return IPv4Address((self._ip >>80)&0xFFFFFFFF)\n \n \nclass IPv6Interface(IPv6Address):\n\n def __init__(self,address):\n addr,mask=self._split_addr_prefix(address)\n \n IPv6Address.__init__(self,addr)\n self.network=IPv6Network((addr,mask),strict=False )\n self.netmask=self.network.netmask\n self._prefixlen=self.network._prefixlen\n \n @functools.cached_property\n def hostmask(self):\n return self.network.hostmask\n \n def __str__(self):\n return '%s/%d'%(super().__str__(),\n self._prefixlen)\n \n def __eq__(self,other):\n address_equal=IPv6Address.__eq__(self,other)\n if address_equal is NotImplemented or not address_equal:\n return address_equal\n try :\n return self.network ==other.network\n except AttributeError:\n \n \n \n return False\n \n def __lt__(self,other):\n address_less=IPv6Address.__lt__(self,other)\n if address_less is NotImplemented:\n return address_less\n try :\n return (self.network =len(fmt):\n \n \n \n continue\n field_name=fmt[i]\n is_metadata=field_name.startswith(':')\n if i >=n_defaults and not is_metadata:\n \n \n h=field_name+\": \"\n if token and token[:len(h)].lower()!=h:\n raise NNTPDataError(\"OVER/XOVER response doesn't include \"\n \"names of additional headers\")\n token=token[len(h):]if token else None\n fields[fmt[i]]=token\n overview.append((article_number,fields))\n return overview\n \ndef _parse_datetime(date_str,time_str=None):\n ''\n\n\n \n if time_str is None:\n time_str=date_str[-6:]\n date_str=date_str[:-6]\n hours=int(time_str[:2])\n minutes=int(time_str[2:4])\n seconds=int(time_str[4:])\n year=int(date_str[:-4])\n month=int(date_str[-4:-2])\n day=int(date_str[-2:])\n \n \n if year <70:\n year +=2000\n elif year <100:\n year +=1900\n return datetime.datetime(year,month,day,hours,minutes,seconds)\n \ndef _unparse_datetime(dt,legacy=False):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not isinstance(dt,datetime.datetime):\n time_str=\"000000\"\n else:\n time_str=\"{0.hour:02d}{0.minute:02d}{0.second:02d}\".format(dt)\n y=dt.year\n if legacy:\n y=y %100\n date_str=\"{0:02d}{1.month:02d}{1.day:02d}\".format(y,dt)\n else:\n date_str=\"{0:04d}{1.month:02d}{1.day:02d}\".format(y,dt)\n return date_str,time_str\n \n \nif _have_ssl:\n\n def _encrypt_on(sock,context,hostname):\n ''\n\n\n\n\n \n \n if context is None:\n context=ssl._create_stdlib_context()\n return context.wrap_socket(sock,server_hostname=hostname)\n \n \n \nclass NNTP:\n\n\n\n\n\n\n\n\n\n\n\n\n encoding='utf-8'\n errors='surrogateescape'\n \n def __init__(self,host,port=NNTP_PORT,user=None,password=None,\n readermode=None,usenetrc=False,\n timeout=_GLOBAL_DEFAULT_TIMEOUT):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.host=host\n self.port=port\n self.sock=self._create_socket(timeout)\n self.file=None\n try:\n self.file=self.sock.makefile(\"rwb\")\n self._base_init(readermode)\n if user or usenetrc:\n self.login(user,password,usenetrc)\n except:\n if self.file:\n self.file.close()\n self.sock.close()\n raise\n \n def _base_init(self,readermode):\n ''\n\n \n self.debugging=0\n self.welcome=self._getresp()\n \n \n self._caps=None\n self.getcapabilities()\n \n \n \n \n \n \n \n \n self.readermode_afterauth=False\n if readermode and 'READER'not in self._caps:\n self._setreadermode()\n if not self.readermode_afterauth:\n \n self._caps=None\n self.getcapabilities()\n \n \n \n \n self.tls_on=False\n \n \n self.authenticated=False\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n is_connected=lambda:hasattr(self,\"file\")\n if is_connected():\n try:\n self.quit()\n except(OSError,EOFError):\n pass\n finally:\n if is_connected():\n self._close()\n \n def _create_socket(self,timeout):\n if timeout is not None and not timeout:\n raise ValueError('Non-blocking socket (timeout=0) is not supported')\n sys.audit(\"nntplib.connect\",self,self.host,self.port)\n return socket.create_connection((self.host,self.port),timeout)\n \n def getwelcome(self):\n ''\n\n\n \n \n if self.debugging:print('*welcome*',repr(self.welcome))\n return self.welcome\n \n def getcapabilities(self):\n ''\n\n \n if self._caps is None:\n self.nntp_version=1\n self.nntp_implementation=None\n try:\n resp,caps=self.capabilities()\n except(NNTPPermanentError,NNTPTemporaryError):\n \n self._caps={}\n else:\n self._caps=caps\n if 'VERSION'in caps:\n \n \n self.nntp_version=max(map(int,caps['VERSION']))\n if 'IMPLEMENTATION'in caps:\n self.nntp_implementation=' '.join(caps['IMPLEMENTATION'])\n return self._caps\n \n def set_debuglevel(self,level):\n ''\n\n\n \n \n self.debugging=level\n debug=set_debuglevel\n \n def _putline(self,line):\n ''\n \n sys.audit(\"nntplib.putline\",self,line)\n line=line+_CRLF\n if self.debugging >1:print('*put*',repr(line))\n self.file.write(line)\n self.file.flush()\n \n def _putcmd(self,line):\n ''\n \n if self.debugging:print('*cmd*',repr(line))\n line=line.encode(self.encoding,self.errors)\n self._putline(line)\n \n def _getline(self,strip_crlf=True):\n ''\n\n \n line=self.file.readline(_MAXLINE+1)\n if len(line)>_MAXLINE:\n raise NNTPDataError('line too long')\n if self.debugging >1:\n print('*get*',repr(line))\n if not line:raise EOFError\n if strip_crlf:\n if line[-2:]==_CRLF:\n line=line[:-2]\n elif line[-1:]in _CRLF:\n line=line[:-1]\n return line\n \n def _getresp(self):\n ''\n\n \n resp=self._getline()\n if self.debugging:print('*resp*',repr(resp))\n resp=resp.decode(self.encoding,self.errors)\n c=resp[:1]\n if c =='4':\n raise NNTPTemporaryError(resp)\n if c =='5':\n raise NNTPPermanentError(resp)\n if c not in '123':\n raise NNTPProtocolError(resp)\n return resp\n \n def _getlongresp(self,file=None):\n ''\n\n\n\n\n\n \n \n openedFile=None\n try:\n \n if isinstance(file,(str,bytes)):\n openedFile=file=open(file,\"wb\")\n \n resp=self._getresp()\n if resp[:3]not in _LONGRESP:\n raise NNTPReplyError(resp)\n \n lines=[]\n if file is not None:\n \n terminators=(b'.'+_CRLF,b'.\\n')\n while 1:\n line=self._getline(False)\n if line in terminators:\n break\n if line.startswith(b'..'):\n line=line[1:]\n file.write(line)\n else:\n terminator=b'.'\n while 1:\n line=self._getline()\n if line ==terminator:\n break\n if line.startswith(b'..'):\n line=line[1:]\n lines.append(line)\n finally:\n \n if openedFile:\n openedFile.close()\n \n return resp,lines\n \n def _shortcmd(self,line):\n ''\n \n self._putcmd(line)\n return self._getresp()\n \n def _longcmd(self,line,file=None):\n ''\n \n self._putcmd(line)\n return self._getlongresp(file)\n \n def _longcmdstring(self,line,file=None):\n ''\n\n\n \n self._putcmd(line)\n resp,list=self._getlongresp(file)\n return resp,[line.decode(self.encoding,self.errors)\n for line in list]\n \n def _getoverviewfmt(self):\n ''\n \n try:\n return self._cachedoverviewfmt\n except AttributeError:\n pass\n try:\n resp,lines=self._longcmdstring(\"LIST OVERVIEW.FMT\")\n except NNTPPermanentError:\n \n fmt=_DEFAULT_OVERVIEW_FMT[:]\n else:\n fmt=_parse_overview_fmt(lines)\n self._cachedoverviewfmt=fmt\n return fmt\n \n def _grouplist(self,lines):\n \n return[GroupInfo(*line.split())for line in lines]\n \n def capabilities(self):\n ''\n\n\n\n\n \n caps={}\n resp,lines=self._longcmdstring(\"CAPABILITIES\")\n for line in lines:\n name,*tokens=line.split()\n caps[name]=tokens\n return resp,caps\n \n def newgroups(self,date,*,file=None):\n ''\n\n\n\n\n \n if not isinstance(date,(datetime.date,datetime.date)):\n raise TypeError(\n \"the date parameter must be a date or datetime object, \"\n \"not '{:40}'\".format(date.__class__.__name__))\n date_str,time_str=_unparse_datetime(date,self.nntp_version <2)\n cmd='NEWGROUPS {0} {1}'.format(date_str,time_str)\n resp,lines=self._longcmdstring(cmd,file)\n return resp,self._grouplist(lines)\n \n def newnews(self,group,date,*,file=None):\n ''\n\n\n\n\n\n \n if not isinstance(date,(datetime.date,datetime.date)):\n raise TypeError(\n \"the date parameter must be a date or datetime object, \"\n \"not '{:40}'\".format(date.__class__.__name__))\n date_str,time_str=_unparse_datetime(date,self.nntp_version <2)\n cmd='NEWNEWS {0} {1} {2}'.format(group,date_str,time_str)\n return self._longcmdstring(cmd,file)\n \n def list(self,group_pattern=None,*,file=None):\n ''\n\n\n\n\n\n \n if group_pattern is not None:\n command='LIST ACTIVE '+group_pattern\n else:\n command='LIST'\n resp,lines=self._longcmdstring(command,file)\n return resp,self._grouplist(lines)\n \n def _getdescriptions(self,group_pattern,return_all):\n line_pat=re.compile('^(?P[^ \\t]+)[ \\t]+(.*)$')\n \n resp,lines=self._longcmdstring('LIST NEWSGROUPS '+group_pattern)\n if not resp.startswith('215'):\n \n \n \n resp,lines=self._longcmdstring('XGTITLE '+group_pattern)\n groups={}\n for raw_line in lines:\n match=line_pat.search(raw_line.strip())\n if match:\n name,desc=match.group(1,2)\n if not return_all:\n return desc\n groups[name]=desc\n if return_all:\n return resp,groups\n else:\n \n return ''\n \n def description(self,group):\n ''\n\n\n\n\n\n\n\n\n \n return self._getdescriptions(group,False)\n \n def descriptions(self,group_pattern):\n ''\n return self._getdescriptions(group_pattern,True)\n \n def group(self,name):\n ''\n\n\n\n\n\n\n\n \n resp=self._shortcmd('GROUP '+name)\n if not resp.startswith('211'):\n raise NNTPReplyError(resp)\n words=resp.split()\n count=first=last=0\n n=len(words)\n if n >1:\n count=words[1]\n if n >2:\n first=words[2]\n if n >3:\n last=words[3]\n if n >4:\n name=words[4].lower()\n return resp,int(count),int(first),int(last),name\n \n def help(self,*,file=None):\n ''\n\n\n\n\n\n \n return self._longcmdstring('HELP',file)\n \n def _statparse(self,resp):\n ''\n \n if not resp.startswith('22'):\n raise NNTPReplyError(resp)\n words=resp.split()\n art_num=int(words[1])\n message_id=words[2]\n return resp,art_num,message_id\n \n def _statcmd(self,line):\n ''\n resp=self._shortcmd(line)\n return self._statparse(resp)\n \n def stat(self,message_spec=None):\n ''\n\n\n\n\n\n\n \n if message_spec:\n return self._statcmd('STAT {0}'.format(message_spec))\n else:\n return self._statcmd('STAT')\n \n def next(self):\n ''\n return self._statcmd('NEXT')\n \n def last(self):\n ''\n return self._statcmd('LAST')\n \n def _artcmd(self,line,file=None):\n ''\n resp,lines=self._longcmd(line,file)\n resp,art_num,message_id=self._statparse(resp)\n return resp,ArticleInfo(art_num,message_id,lines)\n \n def head(self,message_spec=None,*,file=None):\n ''\n\n\n\n\n\n \n if message_spec is not None:\n cmd='HEAD {0}'.format(message_spec)\n else:\n cmd='HEAD'\n return self._artcmd(cmd,file)\n \n def body(self,message_spec=None,*,file=None):\n ''\n\n\n\n\n\n \n if message_spec is not None:\n cmd='BODY {0}'.format(message_spec)\n else:\n cmd='BODY'\n return self._artcmd(cmd,file)\n \n def article(self,message_spec=None,*,file=None):\n ''\n\n\n\n\n\n \n if message_spec is not None:\n cmd='ARTICLE {0}'.format(message_spec)\n else:\n cmd='ARTICLE'\n return self._artcmd(cmd,file)\n \n def slave(self):\n ''\n\n \n return self._shortcmd('SLAVE')\n \n def xhdr(self,hdr,str,*,file=None):\n ''\n\n\n\n\n\n\n \n pat=re.compile('^([0-9]+) ?(.*)\\n?')\n resp,lines=self._longcmdstring('XHDR {0} {1}'.format(hdr,str),file)\n def remove_number(line):\n m=pat.match(line)\n return m.group(1,2)if m else line\n return resp,[remove_number(line)for line in lines]\n \n def xover(self,start,end,*,file=None):\n ''\n\n\n\n\n\n\n \n resp,lines=self._longcmdstring('XOVER {0}-{1}'.format(start,end),\n file)\n fmt=self._getoverviewfmt()\n return resp,_parse_overview(lines,fmt)\n \n def over(self,message_spec,*,file=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n cmd='OVER'if 'OVER'in self._caps else 'XOVER'\n if isinstance(message_spec,(tuple,list)):\n start,end=message_spec\n cmd +=' {0}-{1}'.format(start,end or '')\n elif message_spec is not None:\n cmd=cmd+' '+message_spec\n resp,lines=self._longcmdstring(cmd,file)\n fmt=self._getoverviewfmt()\n return resp,_parse_overview(lines,fmt)\n \n def date(self):\n ''\n\n\n\n \n resp=self._shortcmd(\"DATE\")\n if not resp.startswith('111'):\n raise NNTPReplyError(resp)\n elem=resp.split()\n if len(elem)!=2:\n raise NNTPDataError(resp)\n date=elem[1]\n if len(date)!=14:\n raise NNTPDataError(resp)\n return resp,_parse_datetime(date,None)\n \n def _post(self,command,f):\n resp=self._shortcmd(command)\n \n if not resp.startswith('3'):\n raise NNTPReplyError(resp)\n if isinstance(f,(bytes,bytearray)):\n f=f.splitlines()\n \n \n \n \n for line in f:\n if not line.endswith(_CRLF):\n line=line.rstrip(b\"\\r\\n\")+_CRLF\n if line.startswith(b'.'):\n line=b'.'+line\n self.file.write(line)\n self.file.write(b\".\\r\\n\")\n self.file.flush()\n return self._getresp()\n \n def post(self,data):\n ''\n\n\n \n return self._post('POST',data)\n \n def ihave(self,message_id,data):\n ''\n\n\n\n\n \n return self._post('IHAVE {0}'.format(message_id),data)\n \n def _close(self):\n try:\n if self.file:\n self.file.close()\n del self.file\n finally:\n self.sock.close()\n \n def quit(self):\n ''\n \n try:\n resp=self._shortcmd('QUIT')\n finally:\n self._close()\n return resp\n \n def login(self,user=None,password=None,usenetrc=True):\n if self.authenticated:\n raise ValueError(\"Already logged in.\")\n if not user and not usenetrc:\n raise ValueError(\n \"At least one of `user` and `usenetrc` must be specified\")\n \n \n \n try:\n if usenetrc and not user:\n import netrc\n credentials=netrc.netrc()\n auth=credentials.authenticators(self.host)\n if auth:\n user=auth[0]\n password=auth[2]\n except OSError:\n pass\n \n if not user:\n return\n resp=self._shortcmd('authinfo user '+user)\n if resp.startswith('381'):\n if not password:\n raise NNTPReplyError(resp)\n else:\n resp=self._shortcmd('authinfo pass '+password)\n if not resp.startswith('281'):\n raise NNTPPermanentError(resp)\n \n self._caps=None\n self.getcapabilities()\n \n \n if self.readermode_afterauth and 'READER'not in self._caps:\n self._setreadermode()\n \n self._caps=None\n self.getcapabilities()\n \n def _setreadermode(self):\n try:\n self.welcome=self._shortcmd('mode reader')\n except NNTPPermanentError:\n \n pass\n except NNTPTemporaryError as e:\n if e.response.startswith('480'):\n \n self.readermode_afterauth=True\n else:\n raise\n \n if _have_ssl:\n def starttls(self,context=None):\n ''\n\n \n \n \n if self.tls_on:\n raise ValueError(\"TLS is already enabled.\")\n if self.authenticated:\n raise ValueError(\"TLS cannot be started after authentication.\")\n resp=self._shortcmd('STARTTLS')\n if resp.startswith('382'):\n self.file.close()\n self.sock=_encrypt_on(self.sock,context,self.host)\n self.file=self.sock.makefile(\"rwb\")\n self.tls_on=True\n \n \n self._caps=None\n self.getcapabilities()\n else:\n raise NNTPError(\"TLS failed to start.\")\n \n \nif _have_ssl:\n class NNTP_SSL(NNTP):\n \n def __init__(self,host,port=NNTP_SSL_PORT,\n user=None,password=None,ssl_context=None,\n readermode=None,usenetrc=False,\n timeout=_GLOBAL_DEFAULT_TIMEOUT):\n ''\n\n \n self.ssl_context=ssl_context\n super().__init__(host,port,user,password,readermode,\n usenetrc,timeout)\n \n def _create_socket(self,timeout):\n sock=super()._create_socket(timeout)\n try:\n sock=_encrypt_on(sock,self.ssl_context,self.host)\n except:\n sock.close()\n raise\n else:\n return sock\n \n __all__.append(\"NNTP_SSL\")\n \n \n \nif __name__ =='__main__':\n import argparse\n \n parser=argparse.ArgumentParser(description=\"\"\"\\\n nntplib built-in demo - display the latest articles in a newsgroup\"\"\")\n parser.add_argument('-g','--group',default='gmane.comp.python.general',\n help='group to fetch messages from (default: %(default)s)')\n parser.add_argument('-s','--server',default='news.gmane.io',\n help='NNTP server hostname (default: %(default)s)')\n parser.add_argument('-p','--port',default=-1,type=int,\n help='NNTP port number (default: %s / %s)'%(NNTP_PORT,NNTP_SSL_PORT))\n parser.add_argument('-n','--nb-articles',default=10,type=int,\n help='number of articles to fetch (default: %(default)s)')\n parser.add_argument('-S','--ssl',action='store_true',default=False,\n help='use NNTP over SSL')\n args=parser.parse_args()\n \n port=args.port\n if not args.ssl:\n if port ==-1:\n port=NNTP_PORT\n s=NNTP(host=args.server,port=port)\n else:\n if port ==-1:\n port=NNTP_SSL_PORT\n s=NNTP_SSL(host=args.server,port=port)\n \n caps=s.getcapabilities()\n if 'STARTTLS'in caps:\n s.starttls()\n resp,count,first,last,name=s.group(args.group)\n print('Group',name,'has',count,'articles, range',first,'to',last)\n \n def cut(s,lim):\n if len(s)>lim:\n s=s[:lim -4]+\"...\"\n return s\n \n first=str(int(last)-args.nb_articles+1)\n resp,overviews=s.xover(first,last)\n for artnum,over in overviews:\n author=decode_header(over['from']).split('<',1)[0]\n subject=decode_header(over['subject'])\n lines=int(over[':lines'])\n print(\"{:7} {:20} {:42} ({})\".format(\n artnum,cut(author,20),cut(subject,42),lines)\n )\n \n s.quit()\n", ["argparse", "collections", "datetime", "email.header", "netrc", "re", "socket", "ssl", "sys", "warnings"]], "_compat_pickle": [".py", "\n\n\n\n\n\n\nIMPORT_MAPPING={\n'__builtin__':'builtins',\n'copy_reg':'copyreg',\n'Queue':'queue',\n'SocketServer':'socketserver',\n'ConfigParser':'configparser',\n'repr':'reprlib',\n'tkFileDialog':'tkinter.filedialog',\n'tkSimpleDialog':'tkinter.simpledialog',\n'tkColorChooser':'tkinter.colorchooser',\n'tkCommonDialog':'tkinter.commondialog',\n'Dialog':'tkinter.dialog',\n'Tkdnd':'tkinter.dnd',\n'tkFont':'tkinter.font',\n'tkMessageBox':'tkinter.messagebox',\n'ScrolledText':'tkinter.scrolledtext',\n'Tkconstants':'tkinter.constants',\n'Tix':'tkinter.tix',\n'ttk':'tkinter.ttk',\n'Tkinter':'tkinter',\n'markupbase':'_markupbase',\n'_winreg':'winreg',\n'thread':'_thread',\n'dummy_thread':'_dummy_thread',\n'dbhash':'dbm.bsd',\n'dumbdbm':'dbm.dumb',\n'dbm':'dbm.ndbm',\n'gdbm':'dbm.gnu',\n'xmlrpclib':'xmlrpc.client',\n'SimpleXMLRPCServer':'xmlrpc.server',\n'httplib':'http.client',\n'htmlentitydefs':'html.entities',\n'HTMLParser':'html.parser',\n'Cookie':'http.cookies',\n'cookielib':'http.cookiejar',\n'BaseHTTPServer':'http.server',\n'test.test_support':'test.support',\n'commands':'subprocess',\n'urlparse':'urllib.parse',\n'robotparser':'urllib.robotparser',\n'urllib2':'urllib.request',\n'anydbm':'dbm',\n'_abcoll':'collections.abc',\n}\n\n\n\n\n\nNAME_MAPPING={\n('__builtin__','xrange'):('builtins','range'),\n('__builtin__','reduce'):('functools','reduce'),\n('__builtin__','intern'):('sys','intern'),\n('__builtin__','unichr'):('builtins','chr'),\n('__builtin__','unicode'):('builtins','str'),\n('__builtin__','long'):('builtins','int'),\n('itertools','izip'):('builtins','zip'),\n('itertools','imap'):('builtins','map'),\n('itertools','ifilter'):('builtins','filter'),\n('itertools','ifilterfalse'):('itertools','filterfalse'),\n('itertools','izip_longest'):('itertools','zip_longest'),\n('UserDict','IterableUserDict'):('collections','UserDict'),\n('UserList','UserList'):('collections','UserList'),\n('UserString','UserString'):('collections','UserString'),\n('whichdb','whichdb'):('dbm','whichdb'),\n('_socket','fromfd'):('socket','fromfd'),\n('_multiprocessing','Connection'):('multiprocessing.connection','Connection'),\n('multiprocessing.process','Process'):('multiprocessing.context','Process'),\n('multiprocessing.forking','Popen'):('multiprocessing.popen_fork','Popen'),\n('urllib','ContentTooShortError'):('urllib.error','ContentTooShortError'),\n('urllib','getproxies'):('urllib.request','getproxies'),\n('urllib','pathname2url'):('urllib.request','pathname2url'),\n('urllib','quote_plus'):('urllib.parse','quote_plus'),\n('urllib','quote'):('urllib.parse','quote'),\n('urllib','unquote_plus'):('urllib.parse','unquote_plus'),\n('urllib','unquote'):('urllib.parse','unquote'),\n('urllib','url2pathname'):('urllib.request','url2pathname'),\n('urllib','urlcleanup'):('urllib.request','urlcleanup'),\n('urllib','urlencode'):('urllib.parse','urlencode'),\n('urllib','urlopen'):('urllib.request','urlopen'),\n('urllib','urlretrieve'):('urllib.request','urlretrieve'),\n('urllib2','HTTPError'):('urllib.error','HTTPError'),\n('urllib2','URLError'):('urllib.error','URLError'),\n}\n\nPYTHON2_EXCEPTIONS=(\n\"ArithmeticError\",\n\"AssertionError\",\n\"AttributeError\",\n\"BaseException\",\n\"BufferError\",\n\"BytesWarning\",\n\"DeprecationWarning\",\n\"EOFError\",\n\"EnvironmentError\",\n\"Exception\",\n\"FloatingPointError\",\n\"FutureWarning\",\n\"GeneratorExit\",\n\"IOError\",\n\"ImportError\",\n\"ImportWarning\",\n\"IndentationError\",\n\"IndexError\",\n\"KeyError\",\n\"KeyboardInterrupt\",\n\"LookupError\",\n\"MemoryError\",\n\"NameError\",\n\"NotImplementedError\",\n\"OSError\",\n\"OverflowError\",\n\"PendingDeprecationWarning\",\n\"ReferenceError\",\n\"RuntimeError\",\n\"RuntimeWarning\",\n\n\"StopIteration\",\n\"SyntaxError\",\n\"SyntaxWarning\",\n\"SystemError\",\n\"SystemExit\",\n\"TabError\",\n\"TypeError\",\n\"UnboundLocalError\",\n\"UnicodeDecodeError\",\n\"UnicodeEncodeError\",\n\"UnicodeError\",\n\"UnicodeTranslateError\",\n\"UnicodeWarning\",\n\"UserWarning\",\n\"ValueError\",\n\"Warning\",\n\"ZeroDivisionError\",\n)\n\ntry:\n WindowsError\nexcept NameError:\n pass\nelse:\n PYTHON2_EXCEPTIONS +=(\"WindowsError\",)\n \nfor excname in PYTHON2_EXCEPTIONS:\n NAME_MAPPING[(\"exceptions\",excname)]=(\"builtins\",excname)\n \nMULTIPROCESSING_EXCEPTIONS=(\n'AuthenticationError',\n'BufferTooShort',\n'ProcessError',\n'TimeoutError',\n)\n\nfor excname in MULTIPROCESSING_EXCEPTIONS:\n NAME_MAPPING[(\"multiprocessing\",excname)]=(\"multiprocessing.context\",excname)\n \n \nREVERSE_IMPORT_MAPPING=dict((v,k)for(k,v)in IMPORT_MAPPING.items())\nassert len(REVERSE_IMPORT_MAPPING)==len(IMPORT_MAPPING)\nREVERSE_NAME_MAPPING=dict((v,k)for(k,v)in NAME_MAPPING.items())\nassert len(REVERSE_NAME_MAPPING)==len(NAME_MAPPING)\n\n\n\nIMPORT_MAPPING.update({\n'cPickle':'pickle',\n'_elementtree':'xml.etree.ElementTree',\n'FileDialog':'tkinter.filedialog',\n'SimpleDialog':'tkinter.simpledialog',\n'DocXMLRPCServer':'xmlrpc.server',\n'SimpleHTTPServer':'http.server',\n'CGIHTTPServer':'http.server',\n\n'UserDict':'collections',\n'UserList':'collections',\n'UserString':'collections',\n'whichdb':'dbm',\n'StringIO':'io',\n'cStringIO':'io',\n})\n\nREVERSE_IMPORT_MAPPING.update({\n'_bz2':'bz2',\n'_dbm':'dbm',\n'_functools':'functools',\n'_gdbm':'gdbm',\n'_pickle':'pickle',\n})\n\nNAME_MAPPING.update({\n('__builtin__','basestring'):('builtins','str'),\n('exceptions','StandardError'):('builtins','Exception'),\n('UserDict','UserDict'):('collections','UserDict'),\n('socket','_socketobject'):('socket','SocketType'),\n})\n\nREVERSE_NAME_MAPPING.update({\n('_functools','reduce'):('__builtin__','reduce'),\n('tkinter.filedialog','FileDialog'):('FileDialog','FileDialog'),\n('tkinter.filedialog','LoadFileDialog'):('FileDialog','LoadFileDialog'),\n('tkinter.filedialog','SaveFileDialog'):('FileDialog','SaveFileDialog'),\n('tkinter.simpledialog','SimpleDialog'):('SimpleDialog','SimpleDialog'),\n('xmlrpc.server','ServerHTMLDoc'):('DocXMLRPCServer','ServerHTMLDoc'),\n('xmlrpc.server','XMLRPCDocGenerator'):\n('DocXMLRPCServer','XMLRPCDocGenerator'),\n('xmlrpc.server','DocXMLRPCRequestHandler'):\n('DocXMLRPCServer','DocXMLRPCRequestHandler'),\n('xmlrpc.server','DocXMLRPCServer'):\n('DocXMLRPCServer','DocXMLRPCServer'),\n('xmlrpc.server','DocCGIXMLRPCRequestHandler'):\n('DocXMLRPCServer','DocCGIXMLRPCRequestHandler'),\n('http.server','SimpleHTTPRequestHandler'):\n('SimpleHTTPServer','SimpleHTTPRequestHandler'),\n('http.server','CGIHTTPRequestHandler'):\n('CGIHTTPServer','CGIHTTPRequestHandler'),\n('_socket','socket'):('socket','_socketobject'),\n})\n\nPYTHON3_OSERROR_EXCEPTIONS=(\n'BrokenPipeError',\n'ChildProcessError',\n'ConnectionAbortedError',\n'ConnectionError',\n'ConnectionRefusedError',\n'ConnectionResetError',\n'FileExistsError',\n'FileNotFoundError',\n'InterruptedError',\n'IsADirectoryError',\n'NotADirectoryError',\n'PermissionError',\n'ProcessLookupError',\n'TimeoutError',\n)\n\nfor excname in PYTHON3_OSERROR_EXCEPTIONS:\n REVERSE_NAME_MAPPING[('builtins',excname)]=('exceptions','OSError')\n \nPYTHON3_IMPORTERROR_EXCEPTIONS=(\n'ModuleNotFoundError',\n)\n\nfor excname in PYTHON3_IMPORTERROR_EXCEPTIONS:\n REVERSE_NAME_MAPPING[('builtins',excname)]=('exceptions','ImportError')\ndel excname\n", []], "formatter": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport warnings\nwarnings.warn('the formatter module is deprecated',DeprecationWarning,\nstacklevel=2)\n\n\nAS_IS=None\n\n\nclass NullFormatter:\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,writer=None ):\n if writer is None :\n writer=NullWriter()\n self.writer=writer\n def end_paragraph(self,blankline):pass\n def add_line_break(self):pass\n def add_hor_rule(self,*args,**kw):pass\n def add_label_data(self,format,counter,blankline=None ):pass\n def add_flowing_data(self,data):pass\n def add_literal_data(self,data):pass\n def flush_softspace(self):pass\n def push_alignment(self,align):pass\n def pop_alignment(self):pass\n def push_font(self,x):pass\n def pop_font(self):pass\n def push_margin(self,margin):pass\n def pop_margin(self):pass\n def set_spacing(self,spacing):pass\n def push_style(self,*styles):pass\n def pop_style(self,n=1):pass\n def assert_line_data(self,flag=1):pass\n \n \nclass AbstractFormatter:\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n def __init__(self,writer):\n self.writer=writer\n self.align=None\n self.align_stack=[]\n self.font_stack=[]\n self.margin_stack=[]\n self.spacing=None\n self.style_stack=[]\n self.nospace=1\n self.softspace=0\n self.para_end=1\n self.parskip=0\n self.hard_break=1\n self.have_label=0\n \n def end_paragraph(self,blankline):\n if not self.hard_break:\n self.writer.send_line_break()\n self.have_label=0\n if self.parskip 0:\n label=label+self.format_letter(c,counter)\n elif c in 'iI':\n if counter >0:\n label=label+self.format_roman(c,counter)\n else :\n label=label+c\n return label\n \n def format_letter(self,case,counter):\n label=''\n while counter >0:\n counter,x=divmod(counter -1,26)\n \n \n \n s=chr(ord(case)+x)\n label=s+label\n return label\n \n def format_roman(self,case,counter):\n ones=['i','x','c','m']\n fives=['v','l','d']\n label,index='',0\n \n while counter >0:\n counter,x=divmod(counter,10)\n if x ==9:\n label=ones[index]+ones[index+1]+label\n elif x ==4:\n label=ones[index]+fives[index]+label\n else :\n if x >=5:\n s=fives[index]\n x=x -5\n else :\n s=''\n s=s+ones[index]*x\n label=s+label\n index=index+1\n if case =='I':\n return label.upper()\n return label\n \n def add_flowing_data(self,data):\n if not data:return\n prespace=data[:1].isspace()\n postspace=data[-1:].isspace()\n data=\" \".join(data.split())\n if self.nospace and not data:\n return\n elif prespace or self.softspace:\n if not data:\n if not self.nospace:\n self.softspace=1\n self.parskip=0\n return\n if not self.nospace:\n data=' '+data\n self.hard_break=self.nospace=self.para_end=\\\n self.parskip=self.have_label=0\n self.softspace=postspace\n self.writer.send_flowing_data(data)\n \n def add_literal_data(self,data):\n if not data:return\n if self.softspace:\n self.writer.send_flowing_data(\" \")\n self.hard_break=data[-1:]=='\\n'\n self.nospace=self.para_end=self.softspace=\\\n self.parskip=self.have_label=0\n self.writer.send_literal_data(data)\n \n def flush_softspace(self):\n if self.softspace:\n self.hard_break=self.para_end=self.parskip=\\\n self.have_label=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n \n def push_alignment(self,align):\n if align and align !=self.align:\n self.writer.new_alignment(align)\n self.align=align\n self.align_stack.append(align)\n else :\n self.align_stack.append(self.align)\n \n def pop_alignment(self):\n if self.align_stack:\n del self.align_stack[-1]\n if self.align_stack:\n self.align=align=self.align_stack[-1]\n self.writer.new_alignment(align)\n else :\n self.align=None\n self.writer.new_alignment(None )\n \n def push_font(self,font):\n size,i,b,tt=font\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n if self.font_stack:\n csize,ci,cb,ctt=self.font_stack[-1]\n if size is AS_IS:size=csize\n if i is AS_IS:i=ci\n if b is AS_IS:b=cb\n if tt is AS_IS:tt=ctt\n font=(size,i,b,tt)\n self.font_stack.append(font)\n self.writer.new_font(font)\n \n def pop_font(self):\n if self.font_stack:\n del self.font_stack[-1]\n if self.font_stack:\n font=self.font_stack[-1]\n else :\n font=None\n self.writer.new_font(font)\n \n def push_margin(self,margin):\n self.margin_stack.append(margin)\n fstack=[m for m in self.margin_stack if m]\n if not margin and fstack:\n margin=fstack[-1]\n self.writer.new_margin(margin,len(fstack))\n \n def pop_margin(self):\n if self.margin_stack:\n del self.margin_stack[-1]\n fstack=[m for m in self.margin_stack if m]\n if fstack:\n margin=fstack[-1]\n else :\n margin=None\n self.writer.new_margin(margin,len(fstack))\n \n def set_spacing(self,spacing):\n self.spacing=spacing\n self.writer.new_spacing(spacing)\n \n def push_style(self,*styles):\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n for style in styles:\n self.style_stack.append(style)\n self.writer.new_styles(tuple(self.style_stack))\n \n def pop_style(self,n=1):\n del self.style_stack[-n:]\n self.writer.new_styles(tuple(self.style_stack))\n \n def assert_line_data(self,flag=1):\n self.nospace=self.hard_break=not flag\n self.para_end=self.parskip=self.have_label=0\n \n \nclass NullWriter:\n ''\n\n\n\n\n\n \n def __init__(self):pass\n def flush(self):pass\n def new_alignment(self,align):pass\n def new_font(self,font):pass\n def new_margin(self,margin,level):pass\n def new_spacing(self,spacing):pass\n def new_styles(self,styles):pass\n def send_paragraph(self,blankline):pass\n def send_line_break(self):pass\n def send_hor_rule(self,*args,**kw):pass\n def send_label_data(self,data):pass\n def send_flowing_data(self,data):pass\n def send_literal_data(self,data):pass\n \n \nclass AbstractWriter(NullWriter):\n ''\n\n\n\n\n \n \n def new_alignment(self,align):\n print(\"new_alignment(%r)\"%(align,))\n \n def new_font(self,font):\n print(\"new_font(%r)\"%(font,))\n \n def new_margin(self,margin,level):\n print(\"new_margin(%r, %d)\"%(margin,level))\n \n def new_spacing(self,spacing):\n print(\"new_spacing(%r)\"%(spacing,))\n \n def new_styles(self,styles):\n print(\"new_styles(%r)\"%(styles,))\n \n def send_paragraph(self,blankline):\n print(\"send_paragraph(%r)\"%(blankline,))\n \n def send_line_break(self):\n print(\"send_line_break()\")\n \n def send_hor_rule(self,*args,**kw):\n print(\"send_hor_rule()\")\n \n def send_label_data(self,data):\n print(\"send_label_data(%r)\"%(data,))\n \n def send_flowing_data(self,data):\n print(\"send_flowing_data(%r)\"%(data,))\n \n def send_literal_data(self,data):\n print(\"send_literal_data(%r)\"%(data,))\n \n \nclass DumbWriter(NullWriter):\n ''\n\n\n\n\n\n \n \n def __init__(self,file=None ,maxcol=72):\n self.file=file or sys.stdout\n self.maxcol=maxcol\n NullWriter.__init__(self)\n self.reset()\n \n def reset(self):\n self.col=0\n self.atbreak=0\n \n def send_paragraph(self,blankline):\n self.file.write('\\n'*blankline)\n self.col=0\n self.atbreak=0\n \n def send_line_break(self):\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_hor_rule(self,*args,**kw):\n self.file.write('\\n')\n self.file.write('-'*self.maxcol)\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_literal_data(self,data):\n self.file.write(data)\n i=data.rfind('\\n')\n if i >=0:\n self.col=0\n data=data[i+1:]\n data=data.expandtabs()\n self.col=self.col+len(data)\n self.atbreak=0\n \n def send_flowing_data(self,data):\n if not data:return\n atbreak=self.atbreak or data[0].isspace()\n col=self.col\n maxcol=self.maxcol\n write=self.file.write\n for word in data.split():\n if atbreak:\n if col+len(word)>=maxcol:\n write('\\n')\n col=0\n else :\n write(' ')\n col=col+1\n write(word)\n col=col+len(word)\n atbreak=1\n self.col=col\n self.atbreak=data[-1].isspace()\n \n \ndef test(file=None ):\n w=DumbWriter()\n f=AbstractFormatter(w)\n if file is not None :\n fp=open(file)\n elif sys.argv[1:]:\n fp=open(sys.argv[1])\n else :\n fp=sys.stdin\n try :\n for line in fp:\n if line =='\\n':\n f.end_paragraph(1)\n else :\n f.add_flowing_data(line)\n finally :\n if fp is not sys.stdin:\n fp.close()\n f.end_paragraph(0)\n \n \nif __name__ =='__main__':\n test()\n", ["sys", "warnings"]], "bdb": [".py", "''\n\nimport fnmatch\nimport sys\nimport os\nfrom inspect import CO_GENERATOR,CO_COROUTINE,CO_ASYNC_GENERATOR\n\n__all__=[\"BdbQuit\",\"Bdb\",\"Breakpoint\"]\n\nGENERATOR_AND_COROUTINE_FLAGS=CO_GENERATOR |CO_COROUTINE |CO_ASYNC_GENERATOR\n\n\nclass BdbQuit(Exception):\n ''\n \n \nclass Bdb:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,skip=None):\n self.skip=set(skip)if skip else None\n self.breaks={}\n self.fncache={}\n self.frame_returning=None\n \n self._load_breaks()\n \n def canonic(self,filename):\n ''\n\n\n\n\n\n \n if filename ==\"<\"+filename[1:-1]+\">\":\n return filename\n canonic=self.fncache.get(filename)\n if not canonic:\n canonic=os.path.abspath(filename)\n canonic=os.path.normcase(canonic)\n self.fncache[filename]=canonic\n return canonic\n \n def reset(self):\n ''\n import linecache\n linecache.checkcache()\n self.botframe=None\n self._set_stopinfo(None,None)\n \n def trace_dispatch(self,frame,event,arg):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.quitting:\n return\n if event =='line':\n return self.dispatch_line(frame)\n if event =='call':\n return self.dispatch_call(frame,arg)\n if event =='return':\n return self.dispatch_return(frame,arg)\n if event =='exception':\n return self.dispatch_exception(frame,arg)\n if event =='c_call':\n return self.trace_dispatch\n if event =='c_exception':\n return self.trace_dispatch\n if event =='c_return':\n return self.trace_dispatch\n print('bdb.Bdb.dispatch: unknown debugging event:',repr(event))\n return self.trace_dispatch\n \n def dispatch_line(self,frame):\n ''\n\n\n\n\n \n if self.stop_here(frame)or self.break_here(frame):\n self.user_line(frame)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_call(self,frame,arg):\n ''\n\n\n\n\n \n \n if self.botframe is None:\n \n self.botframe=frame.f_back\n return self.trace_dispatch\n if not(self.stop_here(frame)or self.break_anywhere(frame)):\n \n return\n \n if self.stopframe and frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n return self.trace_dispatch\n self.user_call(frame,arg)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_return(self,frame,arg):\n ''\n\n\n\n\n \n if self.stop_here(frame)or frame ==self.returnframe:\n \n if self.stopframe and frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n return self.trace_dispatch\n try:\n self.frame_returning=frame\n self.user_return(frame,arg)\n finally:\n self.frame_returning=None\n if self.quitting:raise BdbQuit\n \n if self.stopframe is frame and self.stoplineno !=-1:\n self._set_stopinfo(None,None)\n return self.trace_dispatch\n \n def dispatch_exception(self,frame,arg):\n ''\n\n\n\n\n \n if self.stop_here(frame):\n \n \n \n if not(frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS\n and arg[0]is StopIteration and arg[2]is None):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n \n \n \n elif(self.stopframe and frame is not self.stopframe\n and self.stopframe.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS\n and arg[0]in(StopIteration,GeneratorExit)):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n return self.trace_dispatch\n \n \n \n \n \n def is_skipped_module(self,module_name):\n ''\n if module_name is None:\n return False\n for pattern in self.skip:\n if fnmatch.fnmatch(module_name,pattern):\n return True\n return False\n \n def stop_here(self,frame):\n ''\n \n \n if self.skip and\\\n self.is_skipped_module(frame.f_globals.get('__name__')):\n return False\n if frame is self.stopframe:\n if self.stoplineno ==-1:\n return False\n return frame.f_lineno >=self.stoplineno\n if not self.stopframe:\n return True\n return False\n \n def break_here(self,frame):\n ''\n\n\n\n \n filename=self.canonic(frame.f_code.co_filename)\n if filename not in self.breaks:\n return False\n lineno=frame.f_lineno\n if lineno not in self.breaks[filename]:\n \n \n lineno=frame.f_code.co_firstlineno\n if lineno not in self.breaks[filename]:\n return False\n \n \n (bp,flag)=effective(filename,lineno,frame)\n if bp:\n self.currentbp=bp.number\n if(flag and bp.temporary):\n self.do_clear(str(bp.number))\n return True\n else:\n return False\n \n def do_clear(self,arg):\n ''\n\n\n \n raise NotImplementedError(\"subclass of bdb must implement do_clear()\")\n \n def break_anywhere(self,frame):\n ''\n \n return self.canonic(frame.f_code.co_filename)in self.breaks\n \n \n \n \n def user_call(self,frame,argument_list):\n ''\n pass\n \n def user_line(self,frame):\n ''\n pass\n \n def user_return(self,frame,return_value):\n ''\n pass\n \n def user_exception(self,frame,exc_info):\n ''\n pass\n \n def _set_stopinfo(self,stopframe,returnframe,stoplineno=0):\n ''\n\n\n\n\n \n self.stopframe=stopframe\n self.returnframe=returnframe\n self.quitting=False\n \n \n self.stoplineno=stoplineno\n \n \n \n \n def set_until(self,frame,lineno=None):\n ''\n \n \n if lineno is None:\n lineno=frame.f_lineno+1\n self._set_stopinfo(frame,frame,lineno)\n \n def set_step(self):\n ''\n \n \n \n \n if self.frame_returning:\n caller_frame=self.frame_returning.f_back\n if caller_frame and not caller_frame.f_trace:\n caller_frame.f_trace=self.trace_dispatch\n self._set_stopinfo(None,None)\n \n def set_next(self,frame):\n ''\n self._set_stopinfo(frame,None)\n \n def set_return(self,frame):\n ''\n if frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n self._set_stopinfo(frame,None,-1)\n else:\n self._set_stopinfo(frame.f_back,frame)\n \n def set_trace(self,frame=None):\n ''\n\n\n \n if frame is None:\n frame=sys._getframe().f_back\n self.reset()\n while frame:\n frame.f_trace=self.trace_dispatch\n self.botframe=frame\n frame=frame.f_back\n self.set_step()\n sys.settrace(self.trace_dispatch)\n \n def set_continue(self):\n ''\n\n\n \n \n self._set_stopinfo(self.botframe,None,-1)\n if not self.breaks:\n \n sys.settrace(None)\n frame=sys._getframe().f_back\n while frame and frame is not self.botframe:\n del frame.f_trace\n frame=frame.f_back\n \n def set_quit(self):\n ''\n\n\n \n self.stopframe=self.botframe\n self.returnframe=None\n self.quitting=True\n sys.settrace(None)\n \n \n \n \n \n \n \n \n def _add_to_breaks(self,filename,lineno):\n ''\n bp_linenos=self.breaks.setdefault(filename,[])\n if lineno not in bp_linenos:\n bp_linenos.append(lineno)\n \n def set_break(self,filename,lineno,temporary=False,cond=None,\n funcname=None):\n ''\n\n\n\n \n filename=self.canonic(filename)\n import linecache\n line=linecache.getline(filename,lineno)\n if not line:\n return 'Line %s:%d does not exist'%(filename,lineno)\n self._add_to_breaks(filename,lineno)\n bp=Breakpoint(filename,lineno,temporary,cond,funcname)\n return None\n \n def _load_breaks(self):\n ''\n\n\n\n\n\n \n for(filename,lineno)in Breakpoint.bplist.keys():\n self._add_to_breaks(filename,lineno)\n \n def _prune_breaks(self,filename,lineno):\n ''\n\n\n\n\n\n \n if(filename,lineno)not in Breakpoint.bplist:\n self.breaks[filename].remove(lineno)\n if not self.breaks[filename]:\n del self.breaks[filename]\n \n def clear_break(self,filename,lineno):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename not in self.breaks:\n return 'There are no breakpoints in %s'%filename\n if lineno not in self.breaks[filename]:\n return 'There is no breakpoint at %s:%d'%(filename,lineno)\n \n \n for bp in Breakpoint.bplist[filename,lineno][:]:\n bp.deleteMe()\n self._prune_breaks(filename,lineno)\n return None\n \n def clear_bpbynumber(self,arg):\n ''\n\n\n \n try:\n bp=self.get_bpbynumber(arg)\n except ValueError as err:\n return str(err)\n bp.deleteMe()\n self._prune_breaks(bp.file,bp.line)\n return None\n \n def clear_all_file_breaks(self,filename):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename not in self.breaks:\n return 'There are no breakpoints in %s'%filename\n for line in self.breaks[filename]:\n blist=Breakpoint.bplist[filename,line]\n for bp in blist:\n bp.deleteMe()\n del self.breaks[filename]\n return None\n \n def clear_all_breaks(self):\n ''\n\n\n \n if not self.breaks:\n return 'There are no breakpoints'\n for bp in Breakpoint.bpbynumber:\n if bp:\n bp.deleteMe()\n self.breaks={}\n return None\n \n def get_bpbynumber(self,arg):\n ''\n\n\n\n \n if not arg:\n raise ValueError('Breakpoint number expected')\n try:\n number=int(arg)\n except ValueError:\n raise ValueError('Non-numeric breakpoint number %s'%arg)from None\n try:\n bp=Breakpoint.bpbynumber[number]\n except IndexError:\n raise ValueError('Breakpoint number %d out of range'%number)from None\n if bp is None:\n raise ValueError('Breakpoint %d already deleted'%number)\n return bp\n \n def get_break(self,filename,lineno):\n ''\n filename=self.canonic(filename)\n return filename in self.breaks and\\\n lineno in self.breaks[filename]\n \n def get_breaks(self,filename,lineno):\n ''\n\n\n \n filename=self.canonic(filename)\n return filename in self.breaks and\\\n lineno in self.breaks[filename]and\\\n Breakpoint.bplist[filename,lineno]or[]\n \n def get_file_breaks(self,filename):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename in self.breaks:\n return self.breaks[filename]\n else:\n return[]\n \n def get_all_breaks(self):\n ''\n return self.breaks\n \n \n \n \n def get_stack(self,f,t):\n ''\n\n\n\n \n stack=[]\n if t and t.tb_frame is f:\n t=t.tb_next\n while f is not None:\n stack.append((f,f.f_lineno))\n if f is self.botframe:\n break\n f=f.f_back\n stack.reverse()\n i=max(0,len(stack)-1)\n while t is not None:\n stack.append((t.tb_frame,t.tb_lineno))\n t=t.tb_next\n if f is None:\n i=max(0,len(stack)-1)\n return stack,i\n \n def format_stack_entry(self,frame_lineno,lprefix=': '):\n ''\n\n\n\n\n\n\n \n import linecache,reprlib\n frame,lineno=frame_lineno\n filename=self.canonic(frame.f_code.co_filename)\n s='%s(%r)'%(filename,lineno)\n if frame.f_code.co_name:\n s +=frame.f_code.co_name\n else:\n s +=\"\"\n s +='()'\n if '__return__'in frame.f_locals:\n rv=frame.f_locals['__return__']\n s +='->'\n s +=reprlib.repr(rv)\n if lineno is not None:\n line=linecache.getline(filename,lineno,frame.f_globals)\n if line:\n s +=lprefix+line.strip()\n else:\n s +=f'{lprefix}Warning: lineno is None'\n return s\n \n \n \n \n \n def run(self,cmd,globals=None,locals=None):\n ''\n\n\n \n if globals is None:\n import __main__\n globals=__main__.__dict__\n if locals is None:\n locals=globals\n self.reset()\n if isinstance(cmd,str):\n cmd=compile(cmd,\"\",\"exec\")\n sys.settrace(self.trace_dispatch)\n try:\n exec(cmd,globals,locals)\n except BdbQuit:\n pass\n finally:\n self.quitting=True\n sys.settrace(None)\n \n def runeval(self,expr,globals=None,locals=None):\n ''\n\n\n \n if globals is None:\n import __main__\n globals=__main__.__dict__\n if locals is None:\n locals=globals\n self.reset()\n sys.settrace(self.trace_dispatch)\n try:\n return eval(expr,globals,locals)\n except BdbQuit:\n pass\n finally:\n self.quitting=True\n sys.settrace(None)\n \n def runctx(self,cmd,globals,locals):\n ''\n \n self.run(cmd,globals,locals)\n \n \n \n def runcall(self,func,/,*args,**kwds):\n ''\n\n\n \n self.reset()\n sys.settrace(self.trace_dispatch)\n res=None\n try:\n res=func(*args,**kwds)\n except BdbQuit:\n pass\n finally:\n self.quitting=True\n sys.settrace(None)\n return res\n \n \ndef set_trace():\n ''\n Bdb().set_trace()\n \n \nclass Breakpoint:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n next=1\n bplist={}\n bpbynumber=[None]\n \n \n \n def __init__(self,file,line,temporary=False,cond=None,funcname=None):\n self.funcname=funcname\n \n self.func_first_executable_line=None\n self.file=file\n self.line=line\n self.temporary=temporary\n self.cond=cond\n self.enabled=True\n self.ignore=0\n self.hits=0\n self.number=Breakpoint.next\n Breakpoint.next +=1\n \n self.bpbynumber.append(self)\n if(file,line)in self.bplist:\n self.bplist[file,line].append(self)\n else:\n self.bplist[file,line]=[self]\n \n @staticmethod\n def clearBreakpoints():\n Breakpoint.next=1\n Breakpoint.bplist={}\n Breakpoint.bpbynumber=[None]\n \n def deleteMe(self):\n ''\n\n\n\n \n \n index=(self.file,self.line)\n self.bpbynumber[self.number]=None\n self.bplist[index].remove(self)\n if not self.bplist[index]:\n \n del self.bplist[index]\n \n def enable(self):\n ''\n self.enabled=True\n \n def disable(self):\n ''\n self.enabled=False\n \n def bpprint(self,out=None):\n ''\n\n\n\n \n if out is None:\n out=sys.stdout\n print(self.bpformat(),file=out)\n \n def bpformat(self):\n ''\n\n\n\n\n\n \n if self.temporary:\n disp='del '\n else:\n disp='keep '\n if self.enabled:\n disp=disp+'yes '\n else:\n disp=disp+'no '\n ret='%-4dbreakpoint %s at %s:%d'%(self.number,disp,\n self.file,self.line)\n if self.cond:\n ret +='\\n\\tstop only if %s'%(self.cond,)\n if self.ignore:\n ret +='\\n\\tignore next %d hits'%(self.ignore,)\n if self.hits:\n if self.hits >1:\n ss='s'\n else:\n ss=''\n ret +='\\n\\tbreakpoint already hit %d time%s'%(self.hits,ss)\n return ret\n \n def __str__(self):\n ''\n return 'breakpoint %s at %s:%s'%(self.number,self.file,self.line)\n \n \n \n \ndef checkfuncname(b,frame):\n ''\n\n\n\n\n\n \n if not b.funcname:\n \n if b.line !=frame.f_lineno:\n \n \n return False\n return True\n \n \n if frame.f_code.co_name !=b.funcname:\n \n return False\n \n \n if not b.func_first_executable_line:\n \n b.func_first_executable_line=frame.f_lineno\n \n if b.func_first_executable_line !=frame.f_lineno:\n \n return False\n return True\n \n \ndef effective(file,line,frame):\n ''\n\n\n\n\n\n\n\n\n\n\n \n possibles=Breakpoint.bplist[file,line]\n for b in possibles:\n if not b.enabled:\n continue\n if not checkfuncname(b,frame):\n continue\n \n b.hits +=1\n if not b.cond:\n \n if b.ignore >0:\n b.ignore -=1\n continue\n else:\n \n return(b,True)\n else:\n \n \n \n try:\n val=eval(b.cond,frame.f_globals,frame.f_locals)\n if val:\n if b.ignore >0:\n b.ignore -=1\n \n else:\n return(b,True)\n \n \n except:\n \n \n \n return(b,False)\n return(None,None)\n \n \n \n \nclass Tdb(Bdb):\n def user_call(self,frame,args):\n name=frame.f_code.co_name\n if not name:name='???'\n print('+++ call',name,args)\n def user_line(self,frame):\n import linecache\n name=frame.f_code.co_name\n if not name:name='???'\n fn=self.canonic(frame.f_code.co_filename)\n line=linecache.getline(fn,frame.f_lineno,frame.f_globals)\n print('+++',fn,frame.f_lineno,name,':',line.strip())\n def user_return(self,frame,retval):\n print('+++ return',retval)\n def user_exception(self,frame,exc_stuff):\n print('+++ exception',exc_stuff)\n self.set_continue()\n \ndef foo(n):\n print('foo(',n,')')\n x=bar(n *10)\n print('bar returned',x)\n \ndef bar(a):\n print('bar(',a,')')\n return a /2\n \ndef test():\n t=Tdb()\n t.run('import bdb; bdb.foo(10)')\n", ["__main__", "fnmatch", "inspect", "linecache", "os", "reprlib", "sys"]], "cmd": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport string,sys\n\n__all__=[\"Cmd\"]\n\nPROMPT='(Cmd) '\nIDENTCHARS=string.ascii_letters+string.digits+'_'\n\nclass Cmd:\n ''\n\n\n\n\n\n\n\n\n\n \n prompt=PROMPT\n identchars=IDENTCHARS\n ruler='='\n lastcmd=''\n intro=None\n doc_leader=\"\"\n doc_header=\"Documented commands (type help ):\"\n misc_header=\"Miscellaneous help topics:\"\n undoc_header=\"Undocumented commands:\"\n nohelp=\"*** No help on %s\"\n use_rawinput=1\n \n def __init__(self,completekey='tab',stdin=None,stdout=None):\n ''\n\n\n\n\n\n\n\n\n \n if stdin is not None:\n self.stdin=stdin\n else:\n self.stdin=sys.stdin\n if stdout is not None:\n self.stdout=stdout\n else:\n self.stdout=sys.stdout\n self.cmdqueue=[]\n self.completekey=completekey\n \n def cmdloop(self,intro=None):\n ''\n\n\n\n \n \n self.preloop()\n if self.use_rawinput and self.completekey:\n try:\n import readline\n self.old_completer=readline.get_completer()\n readline.set_completer(self.complete)\n readline.parse_and_bind(self.completekey+\": complete\")\n except ImportError:\n pass\n try:\n if intro is not None:\n self.intro=intro\n if self.intro:\n self.stdout.write(str(self.intro)+\"\\n\")\n stop=None\n while not stop:\n if self.cmdqueue:\n line=self.cmdqueue.pop(0)\n else:\n if self.use_rawinput:\n try:\n line=input(self.prompt)\n except EOFError:\n line='EOF'\n else:\n self.stdout.write(self.prompt)\n self.stdout.flush()\n line=self.stdin.readline()\n if not len(line):\n line='EOF'\n else:\n line=line.rstrip('\\r\\n')\n line=self.precmd(line)\n stop=self.onecmd(line)\n stop=self.postcmd(stop,line)\n self.postloop()\n finally:\n if self.use_rawinput and self.completekey:\n try:\n import readline\n readline.set_completer(self.old_completer)\n except ImportError:\n pass\n \n \n def precmd(self,line):\n ''\n\n\n \n return line\n \n def postcmd(self,stop,line):\n ''\n return stop\n \n def preloop(self):\n ''\n pass\n \n def postloop(self):\n ''\n\n\n \n pass\n \n def parseline(self,line):\n ''\n\n\n \n line=line.strip()\n if not line:\n return None,None,line\n elif line[0]=='?':\n line='help '+line[1:]\n elif line[0]=='!':\n if hasattr(self,'do_shell'):\n line='shell '+line[1:]\n else:\n return None,None,line\n i,n=0,len(line)\n while i 0:\n cmd,args,foo=self.parseline(line)\n if cmd =='':\n compfunc=self.completedefault\n else:\n try:\n compfunc=getattr(self,'complete_'+cmd)\n except AttributeError:\n compfunc=self.completedefault\n else:\n compfunc=self.completenames\n self.completion_matches=compfunc(text,line,begidx,endidx)\n try:\n return self.completion_matches[state]\n except IndexError:\n return None\n \n def get_names(self):\n \n \n return dir(self.__class__)\n \n def complete_help(self,*args):\n commands=set(self.completenames(*args))\n topics=set(a[5:]for a in self.get_names()\n if a.startswith('help_'+args[0]))\n return list(commands |topics)\n \n def do_help(self,arg):\n ''\n if arg:\n \n try:\n func=getattr(self,'help_'+arg)\n except AttributeError:\n try:\n doc=getattr(self,'do_'+arg).__doc__\n if doc:\n self.stdout.write(\"%s\\n\"%str(doc))\n return\n except AttributeError:\n pass\n self.stdout.write(\"%s\\n\"%str(self.nohelp %(arg,)))\n return\n func()\n else:\n names=self.get_names()\n cmds_doc=[]\n cmds_undoc=[]\n topics=set()\n for name in names:\n if name[:5]=='help_':\n topics.add(name[5:])\n names.sort()\n \n prevname=''\n for name in names:\n if name[:3]=='do_':\n if name ==prevname:\n continue\n prevname=name\n cmd=name[3:]\n if cmd in topics:\n cmds_doc.append(cmd)\n topics.remove(cmd)\n elif getattr(self,name).__doc__:\n cmds_doc.append(cmd)\n else:\n cmds_undoc.append(cmd)\n self.stdout.write(\"%s\\n\"%str(self.doc_leader))\n self.print_topics(self.doc_header,cmds_doc,15,80)\n self.print_topics(self.misc_header,sorted(topics),15,80)\n self.print_topics(self.undoc_header,cmds_undoc,15,80)\n \n def print_topics(self,header,cmds,cmdlen,maxcol):\n if cmds:\n self.stdout.write(\"%s\\n\"%str(header))\n if self.ruler:\n self.stdout.write(\"%s\\n\"%str(self.ruler *len(header)))\n self.columnize(cmds,maxcol -1)\n self.stdout.write(\"\\n\")\n \n def columnize(self,list,displaywidth=80):\n ''\n\n\n\n \n if not list:\n self.stdout.write(\"\\n\")\n return\n \n nonstrings=[i for i in range(len(list))\n if not isinstance(list[i],str)]\n if nonstrings:\n raise TypeError(\"list[i] not a string for i in %s\"\n %\", \".join(map(str,nonstrings)))\n size=len(list)\n if size ==1:\n self.stdout.write('%s\\n'%str(list[0]))\n return\n \n for nrows in range(1,len(list)):\n ncols=(size+nrows -1)//nrows\n colwidths=[]\n totwidth=-2\n for col in range(ncols):\n colwidth=0\n for row in range(nrows):\n i=row+nrows *col\n if i >=size:\n break\n x=list[i]\n colwidth=max(colwidth,len(x))\n colwidths.append(colwidth)\n totwidth +=colwidth+2\n if totwidth >displaywidth:\n break\n if totwidth <=displaywidth:\n break\n else:\n nrows=len(list)\n ncols=1\n colwidths=[0]\n for row in range(nrows):\n texts=[]\n for col in range(ncols):\n i=row+nrows *col\n if i >=size:\n x=\"\"\n else:\n x=list[i]\n texts.append(x)\n while texts and not texts[-1]:\n del texts[-1]\n for col in range(len(texts)):\n texts[col]=texts[col].ljust(colwidths[col])\n self.stdout.write(\"%s\\n\"%str(\" \".join(texts)))\n", ["readline", "string", "sys"]], "_socket": [".py", "''\n\n\n\n\nAF_APPLETALK=16\n\nAF_DECnet=12\n\nAF_INET=2\n\nAF_INET6=23\n\nAF_IPX=6\n\nAF_IRDA=26\n\nAF_SNA=11\n\nAF_UNSPEC=0\n\nAI_ADDRCONFIG=1024\n\nAI_ALL=256\n\nAI_CANONNAME=2\n\nAI_NUMERICHOST=4\n\nAI_NUMERICSERV=8\n\nAI_PASSIVE=1\n\nAI_V4MAPPED=2048\n\nCAPI=''\n\nEAI_AGAIN=11002\n\nEAI_BADFLAGS=10022\n\nEAI_FAIL=11003\n\nEAI_FAMILY=10047\n\nEAI_MEMORY=8\n\nEAI_NODATA=11001\n\nEAI_NONAME=11001\n\nEAI_SERVICE=10109\n\nEAI_SOCKTYPE=10044\n\nINADDR_ALLHOSTS_GROUP=-536870911\n\nINADDR_ANY=0\n\nINADDR_BROADCAST=-1\n\nINADDR_LOOPBACK=2130706433\n\nINADDR_MAX_LOCAL_GROUP=-536870657\n\nINADDR_NONE=-1\n\nINADDR_UNSPEC_GROUP=-536870912\n\nIPPORT_RESERVED=1024\n\nIPPORT_USERRESERVED=5000\n\nIPPROTO_ICMP=1\n\nIPPROTO_IP=0\n\nIPPROTO_RAW=255\n\nIPPROTO_TCP=6\n\nIPPROTO_UDP=17\n\nIPV6_CHECKSUM=26\n\nIPV6_DONTFRAG=14\n\nIPV6_HOPLIMIT=21\n\nIPV6_HOPOPTS=1\n\nIPV6_JOIN_GROUP=12\n\nIPV6_LEAVE_GROUP=13\n\nIPV6_MULTICAST_HOPS=10\n\nIPV6_MULTICAST_IF=9\n\nIPV6_MULTICAST_LOOP=11\n\nIPV6_PKTINFO=19\n\nIPV6_RECVRTHDR=38\n\nIPV6_RECVTCLASS=40\n\nIPV6_RTHDR=32\n\nIPV6_TCLASS=39\n\nIPV6_UNICAST_HOPS=4\n\nIPV6_V6ONLY=27\n\nIP_ADD_MEMBERSHIP=12\n\nIP_DROP_MEMBERSHIP=13\n\nIP_HDRINCL=2\n\nIP_MULTICAST_IF=9\n\nIP_MULTICAST_LOOP=11\n\nIP_MULTICAST_TTL=10\n\nIP_OPTIONS=1\n\nIP_RECVDSTADDR=25\n\nIP_TOS=3\n\nIP_TTL=4\n\nMSG_BCAST=1024\n\nMSG_CTRUNC=512\n\nMSG_DONTROUTE=4\n\nMSG_MCAST=2048\n\nMSG_OOB=1\n\nMSG_PEEK=2\n\nMSG_TRUNC=256\n\nNI_DGRAM=16\n\nNI_MAXHOST=1025\n\nNI_MAXSERV=32\n\nNI_NAMEREQD=4\n\nNI_NOFQDN=1\n\nNI_NUMERICHOST=2\n\nNI_NUMERICSERV=8\n\nRCVALL_MAX=3\n\nRCVALL_OFF=0\n\nRCVALL_ON=1\n\nRCVALL_SOCKETLEVELONLY=2\n\nSHUT_RD=0\n\nSHUT_RDWR=2\n\nSHUT_WR=1\n\nSIO_KEEPALIVE_VALS=2550136836\n\nSIO_RCVALL=2550136833\n\nSOCK_DGRAM=2\n\nSOCK_RAW=3\n\nSOCK_RDM=4\n\nSOCK_SEQPACKET=5\n\nSOCK_STREAM=1\n\nSOL_IP=0\n\nSOL_SOCKET=65535\n\nSOL_TCP=6\n\nSOL_UDP=17\n\nSOMAXCONN=2147483647\n\nSO_ACCEPTCONN=2\n\nSO_BROADCAST=32\n\nSO_DEBUG=1\n\nSO_DONTROUTE=16\n\nSO_ERROR=4103\n\nSO_EXCLUSIVEADDRUSE=-5\n\nSO_KEEPALIVE=8\n\nSO_LINGER=128\n\nSO_OOBINLINE=256\n\nSO_RCVBUF=4098\n\nSO_RCVLOWAT=4100\n\nSO_RCVTIMEO=4102\n\nSO_REUSEADDR=4\n\nSO_SNDBUF=4097\n\nSO_SNDLOWAT=4099\n\nSO_SNDTIMEO=4101\n\nSO_TYPE=4104\n\nSO_USELOOPBACK=64\n\nclass SocketType:\n pass\n \nTCP_MAXSEG=4\n\nTCP_NODELAY=1\n\n__loader__='<_frozen_importlib.ExtensionFileLoader object at 0x00CA2D90>'\n\ndef dup(*args,**kw):\n ''\n\n \n pass\n \nclass error:\n pass\n \nclass gaierror:\n pass\n \ndef getaddrinfo(*args,**kw):\n ''\n\n \n pass\n \ndef getdefaulttimeout(*args,**kw):\n ''\n\n\n \n pass\n \ndef gethostbyaddr(*args,**kw):\n ''\n\n \n pass\n \ndef gethostbyname(*args,**kw):\n ''\n \n pass\n \ndef gethostbyname_ex(*args,**kw):\n ''\n\n \n pass\n \ndef gethostname(*args,**kw):\n ''\n \n import browser\n return browser.window.navigator.userAgent\n \ndef getnameinfo(*args,**kw):\n ''\n \n pass\n \ndef getprotobyname(*args,**kw):\n ''\n \n pass\n \ndef getservbyname(*args,**kw):\n ''\n\n\n \n pass\n \ndef getservbyport(*args,**kw):\n ''\n\n\n \n pass\n \nhas_ipv6=True\n\nclass herror:\n pass\n \ndef htonl(*args,**kw):\n ''\n \n pass\n \ndef htons(*args,**kw):\n ''\n \n pass\n \ndef inet_aton(*args,**kw):\n ''\n\n \n pass\n \ndef inet_ntoa(*args,**kw):\n ''\n \n pass\n \ndef ntohl(*args,**kw):\n ''\n \n pass\n \ndef ntohs(*args,**kw):\n ''\n \n pass\n \ndef setdefaulttimeout(*args,**kw):\n ''\n\n\n \n pass\n \nclass socket:\n def __init__(self,*args,**kw):\n pass\n def bind(self,*args,**kw):\n pass\n def close(self):\n pass\n \nclass timeout:\n pass\n", ["browser"]], "_codecs_jp": [".py", "from encoding_cp932 import encoding_table,decoding_table\n\n\n\nclass Codec:\n\n def encode(self,input,errors='strict'):\n b=[]\n for pos,car in enumerate(input):\n cp=ord(car)\n try :\n code=encoding_table[cp]\n high=((code >>8)&0xff)\n low=code&0xff\n if high:\n b.append(high)\n b.append(low)\n except IndexError:\n raise UnicodeEncodeError(pos)\n return [bytes(b),len(input)]\n \n def decode(self,input,errors='strict'):\n i=0\n string=''\n while i 1:\n print(\"checking %r ...\"%file)\n \n try:\n process_tokens(tokenize.generate_tokens(f.readline))\n \n except tokenize.TokenError as msg:\n errprint(\"%r: Token Error: %s\"%(file,msg))\n return\n \n except SyntaxError as msg:\n errprint(\"%r: Token Error: %s\"%(file,msg))\n return\n \n except IndentationError as msg:\n errprint(\"%r: Indentation Error: %s\"%(file,msg))\n return\n \n except NannyNag as nag:\n badline=nag.get_lineno()\n line=nag.get_line()\n if verbose:\n print(\"%r: *** Line %d: trouble in tab city! ***\"%(file,badline))\n print(\"offending line: %r\"%(line,))\n print(nag.get_msg())\n else:\n if ' 'in file:file='\"'+file+'\"'\n if filename_only:print(file)\n else:print(file,badline,repr(line))\n return\n \n finally:\n f.close()\n \n if verbose:\n print(\"%r: Clean bill of health.\"%(file,))\n \nclass Whitespace:\n\n S,T=' \\t'\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def __init__(self,ws):\n self.raw=ws\n S,T=Whitespace.S,Whitespace.T\n count=[]\n b=n=nt=0\n for ch in self.raw:\n if ch ==S:\n n=n+1\n b=b+1\n elif ch ==T:\n n=n+1\n nt=nt+1\n if b >=len(count):\n count=count+[0]*(b -len(count)+1)\n count[b]=count[b]+1\n b=0\n else:\n break\n self.n=n\n self.nt=nt\n self.norm=tuple(count),b\n self.is_simple=len(count)<=1\n \n \n \n def longest_run_of_spaces(self):\n count,trailing=self.norm\n return max(len(count)-1,trailing)\n \n def indent_level(self,tabsize):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n count,trailing=self.norm\n il=0\n for i in range(tabsize,len(count)):\n il=il+i //tabsize *count[i]\n return trailing+tabsize *(il+self.nt)\n \n \n \n def equal(self,other):\n return self.norm ==other.norm\n \n \n \n \n \n def not_equal_witness(self,other):\n n=max(self.longest_run_of_spaces(),\n other.longest_run_of_spaces())+1\n a=[]\n for ts in range(1,n+1):\n if self.indent_level(ts)!=other.indent_level(ts):\n a.append((ts,\n self.indent_level(ts),\n other.indent_level(ts)))\n return a\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def less(self,other):\n if self.n >=other.n:\n return False\n if self.is_simple and other.is_simple:\n return self.nt <=other.nt\n n=max(self.longest_run_of_spaces(),\n other.longest_run_of_spaces())+1\n \n for ts in range(2,n+1):\n if self.indent_level(ts)>=other.indent_level(ts):\n return False\n return True\n \n \n \n \n \n def not_less_witness(self,other):\n n=max(self.longest_run_of_spaces(),\n other.longest_run_of_spaces())+1\n a=[]\n for ts in range(1,n+1):\n if self.indent_level(ts)>=other.indent_level(ts):\n a.append((ts,\n self.indent_level(ts),\n other.indent_level(ts)))\n return a\n \ndef format_witnesses(w):\n firsts=(str(tup[0])for tup in w)\n prefix=\"at tab size\"\n if len(w)>1:\n prefix=prefix+\"s\"\n return prefix+\" \"+', '.join(firsts)\n \ndef process_tokens(tokens):\n try:\n _process_tokens(tokens)\n except TabError as e:\n raise NannyNag(e.lineno,e.msg,e.text)\n \ndef _process_tokens(tokens):\n INDENT=tokenize.INDENT\n DEDENT=tokenize.DEDENT\n NEWLINE=tokenize.NEWLINE\n JUNK=tokenize.COMMENT,tokenize.NL\n indents=[Whitespace(\"\")]\n check_equal=0\n \n for(type,token,start,end,line)in tokens:\n if type ==NEWLINE:\n \n \n \n \n \n check_equal=1\n \n elif type ==INDENT:\n check_equal=0\n thisguy=Whitespace(token)\n if not indents[-1].less(thisguy):\n witness=indents[-1].not_less_witness(thisguy)\n msg=\"indent not greater e.g. \"+format_witnesses(witness)\n raise NannyNag(start[0],msg,line)\n indents.append(thisguy)\n \n elif type ==DEDENT:\n \n \n \n \n \n \n \n \n \n check_equal=1\n \n del indents[-1]\n \n elif check_equal and type not in JUNK:\n \n \n \n \n \n \n check_equal=0\n thisguy=Whitespace(line)\n if not indents[-1].equal(thisguy):\n witness=indents[-1].not_equal_witness(thisguy)\n msg=\"indent not equal e.g. \"+format_witnesses(witness)\n raise NannyNag(start[0],msg,line)\n \n \nif __name__ =='__main__':\n main()\n", ["getopt", "os", "sys", "tokenize"]], "_py_abc": [".py", "from _weakrefset import WeakSet\n\n\ndef get_cache_token():\n ''\n\n\n\n\n \n return ABCMeta._abc_invalidation_counter\n \n \nclass ABCMeta(type):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n _abc_invalidation_counter=0\n \n def __new__(mcls,name,bases,namespace,/,**kwargs):\n cls=super().__new__(mcls,name,bases,namespace,**kwargs)\n \n abstracts={name\n for name,value in namespace.items()\n if getattr(value,\"__isabstractmethod__\",False )}\n for base in bases:\n for name in getattr(base,\"__abstractmethods__\",set()):\n value=getattr(cls,name,None )\n if getattr(value,\"__isabstractmethod__\",False ):\n abstracts.add(name)\n cls.__abstractmethods__=frozenset(abstracts)\n \n cls._abc_registry=WeakSet()\n cls._abc_cache=WeakSet()\n cls._abc_negative_cache=WeakSet()\n cls._abc_negative_cache_version=ABCMeta._abc_invalidation_counter\n return cls\n \n def register(cls,subclass):\n ''\n\n\n \n if not isinstance(subclass,type):\n raise TypeError(\"Can only register classes\")\n if issubclass(subclass,cls):\n return subclass\n \n \n if issubclass(cls,subclass):\n \n raise RuntimeError(\"Refusing to create an inheritance cycle\")\n cls._abc_registry.add(subclass)\n ABCMeta._abc_invalidation_counter +=1\n return subclass\n \n def _dump_registry(cls,file=None ):\n ''\n print(f\"Class: {cls.__module__}.{cls.__qualname__}\",file=file)\n print(f\"Inv. counter: {get_cache_token()}\",file=file)\n for name in cls.__dict__:\n if name.startswith(\"_abc_\"):\n value=getattr(cls,name)\n if isinstance(value,WeakSet):\n value=set(value)\n print(f\"{name}: {value!r}\",file=file)\n \n def _abc_registry_clear(cls):\n ''\n cls._abc_registry.clear()\n \n def _abc_caches_clear(cls):\n ''\n cls._abc_cache.clear()\n cls._abc_negative_cache.clear()\n \n def __instancecheck__(cls,instance):\n ''\n \n subclass=instance.__class__\n if subclass in cls._abc_cache:\n return True\n subtype=type(instance)\n if subtype is subclass:\n if (cls._abc_negative_cache_version ==\n ABCMeta._abc_invalidation_counter and\n subclass in cls._abc_negative_cache):\n return False\n \n return cls.__subclasscheck__(subclass)\n return any(cls.__subclasscheck__(c)for c in (subclass,subtype))\n \n def __subclasscheck__(cls,subclass):\n ''\n if not isinstance(subclass,type):\n raise TypeError('issubclass() arg 1 must be a class')\n \n if subclass in cls._abc_cache:\n return True\n \n if cls._abc_negative_cache_version '\n \n \n \n \n \n \n \n_zip_searchorder=(\n(path_sep+'__init__.pyc',True,True),\n(path_sep+'__init__.py',False,True),\n('.pyc',True,False),\n('.py',False,False),\n)\n\n\n\ndef _get_module_path(self,fullname):\n return self.prefix+fullname.rpartition('.')[2]\n \n \ndef _is_dir(self,path):\n\n\n\n dirpath=path+path_sep\n \n return dirpath in self._files\n \n \ndef _get_module_info(self,fullname):\n path=_get_module_path(self,fullname)\n for suffix,isbytecode,ispackage in _zip_searchorder:\n fullpath=path+suffix\n if fullpath in self._files:\n return ispackage\n return None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _read_directory(archive):\n try:\n fp=_io.open_code(archive)\n except OSError:\n raise ZipImportError(f\"can't open Zip file: {archive !r}\",path=archive)\n \n with fp:\n \n \n \n start_offset=fp.tell()\n try:\n try:\n fp.seek(-END_CENTRAL_DIR_SIZE,2)\n header_position=fp.tell()\n buffer=fp.read(END_CENTRAL_DIR_SIZE)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n if len(buffer)!=END_CENTRAL_DIR_SIZE:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n if buffer[:4]!=STRING_END_ARCHIVE:\n \n \n try:\n fp.seek(0,2)\n file_size=fp.tell()\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",\n path=archive)\n max_comment_start=max(file_size -MAX_COMMENT_LEN -\n END_CENTRAL_DIR_SIZE,0)\n try:\n fp.seek(max_comment_start)\n data=fp.read()\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",\n path=archive)\n pos=data.rfind(STRING_END_ARCHIVE)\n if pos <0:\n raise ZipImportError(f'not a Zip file: {archive !r}',\n path=archive)\n buffer=data[pos:pos+END_CENTRAL_DIR_SIZE]\n if len(buffer)!=END_CENTRAL_DIR_SIZE:\n raise ZipImportError(f\"corrupt Zip file: {archive !r}\",\n path=archive)\n header_position=file_size -len(data)+pos\n \n header_size=_unpack_uint32(buffer[12:16])\n header_offset=_unpack_uint32(buffer[16:20])\n if header_position header_offset:\n raise ZipImportError(f'bad local header offset: {archive !r}',path=archive)\n file_offset +=arc_offset\n \n try:\n name=fp.read(name_size)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n if len(name)!=name_size:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n \n \n \n try:\n if len(fp.read(header_size -name_size))!=header_size -name_size:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n \n if flags&0x800:\n \n name=name.decode()\n else:\n \n try:\n name=name.decode('ascii')\n except UnicodeDecodeError:\n name=name.decode('latin1').translate(cp437_table)\n \n name=name.replace('/',path_sep)\n path=_bootstrap_external._path_join(archive,name)\n t=(path,compress,data_size,file_size,file_offset,time,date,crc)\n files[name]=t\n count +=1\n finally:\n fp.seek(start_offset)\n _bootstrap._verbose_message('zipimport: found {} names in {!r}',count,archive)\n return files\n \n \n \n \n \n \n \ncp437_table=(\n\n'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f'\n'\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f'\n' !\"#$%&\\'()*+,-./'\n'0123456789:;<=>?'\n'@ABCDEFGHIJKLMNO'\n'PQRSTUVWXYZ[\\\\]^_'\n'`abcdefghijklmno'\n'pqrstuvwxyz{|}~\\x7f'\n\n'\\xc7\\xfc\\xe9\\xe2\\xe4\\xe0\\xe5\\xe7'\n'\\xea\\xeb\\xe8\\xef\\xee\\xec\\xc4\\xc5'\n'\\xc9\\xe6\\xc6\\xf4\\xf6\\xf2\\xfb\\xf9'\n'\\xff\\xd6\\xdc\\xa2\\xa3\\xa5\\u20a7\\u0192'\n'\\xe1\\xed\\xf3\\xfa\\xf1\\xd1\\xaa\\xba'\n'\\xbf\\u2310\\xac\\xbd\\xbc\\xa1\\xab\\xbb'\n'\\u2591\\u2592\\u2593\\u2502\\u2524\\u2561\\u2562\\u2556'\n'\\u2555\\u2563\\u2551\\u2557\\u255d\\u255c\\u255b\\u2510'\n'\\u2514\\u2534\\u252c\\u251c\\u2500\\u253c\\u255e\\u255f'\n'\\u255a\\u2554\\u2569\\u2566\\u2560\\u2550\\u256c\\u2567'\n'\\u2568\\u2564\\u2565\\u2559\\u2558\\u2552\\u2553\\u256b'\n'\\u256a\\u2518\\u250c\\u2588\\u2584\\u258c\\u2590\\u2580'\n'\\u03b1\\xdf\\u0393\\u03c0\\u03a3\\u03c3\\xb5\\u03c4'\n'\\u03a6\\u0398\\u03a9\\u03b4\\u221e\\u03c6\\u03b5\\u2229'\n'\\u2261\\xb1\\u2265\\u2264\\u2320\\u2321\\xf7\\u2248'\n'\\xb0\\u2219\\xb7\\u221a\\u207f\\xb2\\u25a0\\xa0'\n)\n\n_importing_zlib=False\n\n\n\n\ndef _get_decompress_func():\n global _importing_zlib\n if _importing_zlib:\n \n \n _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')\n raise ZipImportError(\"can't decompress data; zlib not available\")\n \n _importing_zlib=True\n try:\n from zlib import decompress\n except Exception:\n _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')\n raise ZipImportError(\"can't decompress data; zlib not available\")\n finally:\n _importing_zlib=False\n \n _bootstrap._verbose_message('zipimport: zlib available')\n return decompress\n \n \ndef _get_data(archive,toc_entry):\n datapath,compress,data_size,file_size,file_offset,time,date,crc=toc_entry\n if data_size <0:\n raise ZipImportError('negative data size')\n \n with _io.open_code(archive)as fp:\n \n try:\n fp.seek(file_offset)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n buffer=fp.read(30)\n if len(buffer)!=30:\n raise EOFError('EOF read where not expected')\n \n if buffer[:4]!=b'PK\\x03\\x04':\n \n raise ZipImportError(f'bad local file header: {archive !r}',path=archive)\n \n name_size=_unpack_uint16(buffer[26:28])\n extra_size=_unpack_uint16(buffer[28:30])\n header_size=30+name_size+extra_size\n file_offset +=header_size\n try:\n fp.seek(file_offset)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n raw_data=fp.read(data_size)\n if len(raw_data)!=data_size:\n raise OSError(\"zipimport: can't read data\")\n \n if compress ==0:\n \n return raw_data\n \n \n try:\n decompress=_get_decompress_func()\n except Exception:\n raise ZipImportError(\"can't decompress data; zlib not available\")\n return decompress(raw_data,-15)\n \n \n \n \n \ndef _eq_mtime(t1,t2):\n\n return abs(t1 -t2)<=1\n \n \n \n \n \ndef _unmarshal_code(self,pathname,fullpath,fullname,data):\n exc_details={\n 'name':fullname,\n 'path':fullpath,\n }\n \n flags=_bootstrap_external._classify_pyc(data,fullname,exc_details)\n \n hash_based=flags&0b1 !=0\n if hash_based:\n check_source=flags&0b10 !=0\n if(_imp.check_hash_based_pycs !='never'and\n (check_source or _imp.check_hash_based_pycs =='always')):\n source_bytes=_get_pyc_source(self,fullpath)\n if source_bytes is not None:\n source_hash=_imp.source_hash(\n _bootstrap_external._RAW_MAGIC_NUMBER,\n source_bytes,\n )\n \n _bootstrap_external._validate_hash_pyc(\n data,source_hash,fullname,exc_details)\n else:\n source_mtime,source_size=\\\n _get_mtime_and_size_of_source(self,fullpath)\n \n if source_mtime:\n \n \n if(not _eq_mtime(_unpack_uint32(data[8:12]),source_mtime)or\n _unpack_uint32(data[12:16])!=source_size):\n _bootstrap._verbose_message(\n f'bytecode is stale for {fullname !r}')\n return None\n \n code=marshal.loads(data[16:])\n if not isinstance(code,_code_type):\n raise TypeError(f'compiled module {pathname !r} is not a code object')\n return code\n \n_code_type=type(_unmarshal_code.__code__)\n\n\n\n\ndef _normalize_line_endings(source):\n source=source.replace(b'\\r\\n',b'\\n')\n source=source.replace(b'\\r',b'\\n')\n return source\n \n \n \ndef _compile_source(pathname,source):\n source=_normalize_line_endings(source)\n return compile(source,pathname,'exec',dont_inherit=True)\n \n \n \ndef _parse_dostime(d,t):\n return time.mktime((\n (d >>9)+1980,\n (d >>5)&0xF,\n d&0x1F,\n t >>11,\n (t >>5)&0x3F,\n (t&0x1F)*2,\n -1,-1,-1))\n \n \n \n \ndef _get_mtime_and_size_of_source(self,path):\n try:\n \n assert path[-1:]in('c','o')\n path=path[:-1]\n toc_entry=self._files[path]\n \n \n time=toc_entry[5]\n date=toc_entry[6]\n uncompressed_size=toc_entry[3]\n return _parse_dostime(date,time),uncompressed_size\n except(KeyError,IndexError,TypeError):\n return 0,0\n \n \n \n \n \ndef _get_pyc_source(self,path):\n\n assert path[-1:]in('c','o')\n path=path[:-1]\n \n try:\n toc_entry=self._files[path]\n except KeyError:\n return None\n else:\n return _get_data(self.archive,toc_entry)\n \n \n \n \ndef _get_module_code(self,fullname):\n path=_get_module_path(self,fullname)\n import_error=None\n for suffix,isbytecode,ispackage in _zip_searchorder:\n fullpath=path+suffix\n _bootstrap._verbose_message('trying {}{}{}',self.archive,path_sep,fullpath,verbosity=2)\n try:\n toc_entry=self._files[fullpath]\n except KeyError:\n pass\n else:\n modpath=toc_entry[0]\n data=_get_data(self.archive,toc_entry)\n code=None\n if isbytecode:\n try:\n code=_unmarshal_code(self,modpath,fullpath,fullname,data)\n except ImportError as exc:\n import_error=exc\n else:\n code=_compile_source(modpath,data)\n if code is None:\n \n \n continue\n modpath=toc_entry[0]\n return code,ispackage,modpath\n else:\n if import_error:\n msg=f\"module load failed: {import_error}\"\n raise ZipImportError(msg,name=fullname)from import_error\n else:\n raise ZipImportError(f\"can't find module {fullname !r}\",name=fullname)\n", ["_frozen_importlib", "_frozen_importlib_external", "_imp", "_io", "_warnings", "importlib.readers", "marshal", "sys", "time", "zlib"]], "token": [".py", "''\n\n\n__all__=['tok_name','ISTERMINAL','ISNONTERMINAL','ISEOF']\n\nENDMARKER=0\nNAME=1\nNUMBER=2\nSTRING=3\nNEWLINE=4\nINDENT=5\nDEDENT=6\nLPAR=7\nRPAR=8\nLSQB=9\nRSQB=10\nCOLON=11\nCOMMA=12\nSEMI=13\nPLUS=14\nMINUS=15\nSTAR=16\nSLASH=17\nVBAR=18\nAMPER=19\nLESS=20\nGREATER=21\nEQUAL=22\nDOT=23\nPERCENT=24\nLBRACE=25\nRBRACE=26\nEQEQUAL=27\nNOTEQUAL=28\nLESSEQUAL=29\nGREATEREQUAL=30\nTILDE=31\nCIRCUMFLEX=32\nLEFTSHIFT=33\nRIGHTSHIFT=34\nDOUBLESTAR=35\nPLUSEQUAL=36\nMINEQUAL=37\nSTAREQUAL=38\nSLASHEQUAL=39\nPERCENTEQUAL=40\nAMPEREQUAL=41\nVBAREQUAL=42\nCIRCUMFLEXEQUAL=43\nLEFTSHIFTEQUAL=44\nRIGHTSHIFTEQUAL=45\nDOUBLESTAREQUAL=46\nDOUBLESLASH=47\nDOUBLESLASHEQUAL=48\nAT=49\nATEQUAL=50\nRARROW=51\nELLIPSIS=52\nCOLONEQUAL=53\nEXCLAMATION=54\nOP=55\nAWAIT=56\nASYNC=57\nTYPE_IGNORE=58\nTYPE_COMMENT=59\nSOFT_KEYWORD=60\nFSTRING_START=61\nFSTRING_MIDDLE=62\nFSTRING_END=63\nCOMMENT=64\nNL=65\n\nERRORTOKEN=66\nENCODING=67\nN_TOKENS=68\n\nNT_OFFSET=256\n\ntok_name={value:name\nfor name,value in globals().items()\nif isinstance(value,int)and not name.startswith('_')}\n__all__.extend(tok_name.values())\n\nEXACT_TOKEN_TYPES={\n'!':EXCLAMATION,\n'!=':NOTEQUAL,\n'%':PERCENT,\n'%=':PERCENTEQUAL,\n'&':AMPER,\n'&=':AMPEREQUAL,\n'(':LPAR,\n')':RPAR,\n'*':STAR,\n'**':DOUBLESTAR,\n'**=':DOUBLESTAREQUAL,\n'*=':STAREQUAL,\n'+':PLUS,\n'+=':PLUSEQUAL,\n',':COMMA,\n'-':MINUS,\n'-=':MINEQUAL,\n'->':RARROW,\n'.':DOT,\n'...':ELLIPSIS,\n'/':SLASH,\n'//':DOUBLESLASH,\n'//=':DOUBLESLASHEQUAL,\n'/=':SLASHEQUAL,\n':':COLON,\n':=':COLONEQUAL,\n';':SEMI,\n'<':LESS,\n'<<':LEFTSHIFT,\n'<<=':LEFTSHIFTEQUAL,\n'<=':LESSEQUAL,\n'=':EQUAL,\n'==':EQEQUAL,\n'>':GREATER,\n'>=':GREATEREQUAL,\n'>>':RIGHTSHIFT,\n'>>=':RIGHTSHIFTEQUAL,\n'@':AT,\n'@=':ATEQUAL,\n'[':LSQB,\n']':RSQB,\n'^':CIRCUMFLEX,\n'^=':CIRCUMFLEXEQUAL,\n'{':LBRACE,\n'|':VBAR,\n'|=':VBAREQUAL,\n'}':RBRACE,\n'~':TILDE,\n}\n\ndef ISTERMINAL(x):\n return x =NT_OFFSET\n \ndef ISEOF(x):\n return x ==ENDMARKER\n", []], "textwrap": [".py", "''\n\n\n\n\n\n\nimport re\n\n__all__=['TextWrapper','wrap','fill','dedent','indent','shorten']\n\n\n\n\n_whitespace='\\t\\n\\x0b\\x0c\\r '\n\nclass TextWrapper:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n unicode_whitespace_trans=dict.fromkeys(map(ord,_whitespace),ord(' '))\n \n \n \n \n \n \n \n word_punct=r'[\\w!\"\\'&.,?]'\n letter=r'[^\\d\\W]'\n whitespace=r'[%s]'%re.escape(_whitespace)\n nowhitespace='[^'+whitespace[1:]\n wordsep_re=re.compile(r'''\n ( # any whitespace\n %(ws)s+\n | # em-dash between words\n (?<=%(wp)s) -{2,} (?=\\w)\n | # word, possibly hyphenated\n %(nws)s+? (?:\n # hyphenated word\n -(?: (?<=%(lt)s{2}-) | (?<=%(lt)s-%(lt)s-))\n (?= %(lt)s -? %(lt)s)\n | # end of word\n (?=%(ws)s|\\Z)\n | # em-dash\n (?<=%(wp)s) (?=-{2,}\\w)\n )\n )'''%{'wp':word_punct,'lt':letter,\n 'ws':whitespace,'nws':nowhitespace},\n re.VERBOSE)\n del word_punct,letter,nowhitespace\n \n \n \n \n \n wordsep_simple_re=re.compile(r'(%s+)'%whitespace)\n del whitespace\n \n \n \n sentence_end_re=re.compile(r'[a-z]'\n r'[\\.\\!\\?]'\n r'[\\\"\\']?'\n r'\\Z')\n \n def __init__(self,\n width=70,\n initial_indent=\"\",\n subsequent_indent=\"\",\n expand_tabs=True,\n replace_whitespace=True,\n fix_sentence_endings=False,\n break_long_words=True,\n drop_whitespace=True,\n break_on_hyphens=True,\n tabsize=8,\n *,\n max_lines=None,\n placeholder=' [...]'):\n self.width=width\n self.initial_indent=initial_indent\n self.subsequent_indent=subsequent_indent\n self.expand_tabs=expand_tabs\n self.replace_whitespace=replace_whitespace\n self.fix_sentence_endings=fix_sentence_endings\n self.break_long_words=break_long_words\n self.drop_whitespace=drop_whitespace\n self.break_on_hyphens=break_on_hyphens\n self.tabsize=tabsize\n self.max_lines=max_lines\n self.placeholder=placeholder\n \n \n \n \n \n def _munge_whitespace(self,text):\n ''\n\n\n\n\n \n if self.expand_tabs:\n text=text.expandtabs(self.tabsize)\n if self.replace_whitespace:\n text=text.translate(self.unicode_whitespace_trans)\n return text\n \n \n def _split(self,text):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.break_on_hyphens is True:\n chunks=self.wordsep_re.split(text)\n else:\n chunks=self.wordsep_simple_re.split(text)\n chunks=[c for c in chunks if c]\n return chunks\n \n def _fix_sentence_endings(self,chunks):\n ''\n\n\n\n\n\n\n \n i=0\n patsearch=self.sentence_end_re.search\n while i space_left:\n \n \n hyphen=chunk.rfind('-',0,space_left)\n if hyphen >0 and any(c !='-'for c in chunk[:hyphen]):\n end=hyphen+1\n cur_line.append(chunk[:end])\n reversed_chunks[-1]=chunk[end:]\n \n \n \n \n elif not cur_line:\n cur_line.append(reversed_chunks.pop())\n \n \n \n \n \n \n \n def _wrap_chunks(self,chunks):\n ''\n\n\n\n\n\n\n\n\n\n\n \n lines=[]\n if self.width <=0:\n raise ValueError(\"invalid width %r (must be > 0)\"%self.width)\n if self.max_lines is not None:\n if self.max_lines >1:\n indent=self.subsequent_indent\n else:\n indent=self.initial_indent\n if len(indent)+len(self.placeholder.lstrip())>self.width:\n raise ValueError(\"placeholder too large for max width\")\n \n \n \n chunks.reverse()\n \n while chunks:\n \n \n \n cur_line=[]\n cur_len=0\n \n \n if lines:\n indent=self.subsequent_indent\n else:\n indent=self.initial_indent\n \n \n width=self.width -len(indent)\n \n \n \n if self.drop_whitespace and chunks[-1].strip()==''and lines:\n del chunks[-1]\n \n while chunks:\n l=len(chunks[-1])\n \n \n if cur_len+l <=width:\n cur_line.append(chunks.pop())\n cur_len +=l\n \n \n else:\n break\n \n \n \n if chunks and len(chunks[-1])>width:\n self._handle_long_word(chunks,cur_line,cur_len,width)\n cur_len=sum(map(len,cur_line))\n \n \n if self.drop_whitespace and cur_line and cur_line[-1].strip()=='':\n cur_len -=len(cur_line[-1])\n del cur_line[-1]\n \n if cur_line:\n if(self.max_lines is None or\n len(lines)+1 >30]+\n b32tab2[(c >>20)&0x3ff]+\n b32tab2[(c >>10)&0x3ff]+\n b32tab2[c&0x3ff]\n )\n \n if leftover ==1:\n encoded[-6:]=b'======'\n elif leftover ==2:\n encoded[-4:]=b'===='\n elif leftover ==3:\n encoded[-3:]=b'==='\n elif leftover ==4:\n encoded[-1:]=b'='\n return bytes(encoded)\n \ndef _b32decode(alphabet,s,casefold=False,map01=None):\n global _b32rev\n \n \n if alphabet not in _b32rev:\n _b32rev[alphabet]={v:k for k,v in enumerate(alphabet)}\n s=_bytes_from_decode_data(s)\n if len(s)%8:\n raise binascii.Error('Incorrect padding')\n \n \n \n if map01 is not None:\n map01=_bytes_from_decode_data(map01)\n assert len(map01)==1,repr(map01)\n s=s.translate(bytes.maketrans(b'01',b'O'+map01))\n if casefold:\n s=s.upper()\n \n \n \n l=len(s)\n s=s.rstrip(b'=')\n padchars=l -len(s)\n \n decoded=bytearray()\n b32rev=_b32rev[alphabet]\n for i in range(0,len(s),8):\n quanta=s[i:i+8]\n acc=0\n try:\n for c in quanta:\n acc=(acc <<5)+b32rev[c]\n except KeyError:\n raise binascii.Error('Non-base32 digit found')from None\n decoded +=acc.to_bytes(5)\n \n if l %8 or padchars not in{0,1,3,4,6}:\n raise binascii.Error('Incorrect padding')\n if padchars and decoded:\n acc <<=5 *padchars\n last=acc.to_bytes(5)\n leftover=(43 -5 *padchars)//8\n decoded[-5:]=last[:leftover]\n return bytes(decoded)\n \n \ndef b32encode(s):\n return _b32encode(_b32alphabet,s)\nb32encode.__doc__=_B32_ENCODE_DOCSTRING.format(encoding='base32')\n\ndef b32decode(s,casefold=False,map01=None):\n return _b32decode(_b32alphabet,s,casefold,map01)\nb32decode.__doc__=_B32_DECODE_DOCSTRING.format(encoding='base32',\nextra_args=_B32_DECODE_MAP01_DOCSTRING)\n\ndef b32hexencode(s):\n return _b32encode(_b32hexalphabet,s)\nb32hexencode.__doc__=_B32_ENCODE_DOCSTRING.format(encoding='base32hex')\n\ndef b32hexdecode(s,casefold=False):\n\n return _b32decode(_b32hexalphabet,s,casefold)\nb32hexdecode.__doc__=_B32_DECODE_DOCSTRING.format(encoding='base32hex',\nextra_args='')\n\n\n\n\n\ndef b16encode(s):\n ''\n \n return binascii.hexlify(s).upper()\n \n \ndef b16decode(s,casefold=False):\n ''\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n if casefold:\n s=s.upper()\n if re.search(b'[^0-9A-F]',s):\n raise binascii.Error('Non-base16 digit found')\n return binascii.unhexlify(s)\n \n \n \n \n \n_a85chars=None\n_a85chars2=None\n_A85START=b\"<~\"\n_A85END=b\"~>\"\n\ndef _85encode(b,chars,chars2,pad=False,foldnuls=False,foldspaces=False):\n\n if not isinstance(b,bytes_types):\n b=memoryview(b).tobytes()\n \n padding=(-len(b))%4\n if padding:\n b=b+b'\\0'*padding\n words=struct.Struct('!%dI'%(len(b)//4)).unpack(b)\n \n chunks=[b'z'if foldnuls and not word else\n b'y'if foldspaces and word ==0x20202020 else\n (chars2[word //614125]+\n chars2[word //85 %7225]+\n chars[word %85])\n for word in words]\n \n if padding and not pad:\n if chunks[-1]==b'z':\n chunks[-1]=chars[0]*5\n chunks[-1]=chunks[-1][:-padding]\n \n return b''.join(chunks)\n \ndef a85encode(b,*,foldspaces=False,wrapcol=0,pad=False,adobe=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _a85chars,_a85chars2\n \n \n if _a85chars2 is None:\n _a85chars=[bytes((i,))for i in range(33,118)]\n _a85chars2=[(a+b)for a in _a85chars for b in _a85chars]\n \n result=_85encode(b,_a85chars,_a85chars2,pad,True,foldspaces)\n \n if adobe:\n result=_A85START+result\n if wrapcol:\n wrapcol=max(2 if adobe else 1,wrapcol)\n chunks=[result[i:i+wrapcol]\n for i in range(0,len(result),wrapcol)]\n if adobe:\n if len(chunks[-1])+2 >wrapcol:\n chunks.append(b'')\n result=b'\\n'.join(chunks)\n if adobe:\n result +=_A85END\n \n return result\n \ndef a85decode(b,*,foldspaces=False,adobe=False,ignorechars=b' \\t\\n\\r\\v'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n b=_bytes_from_decode_data(b)\n if adobe:\n if not b.endswith(_A85END):\n raise ValueError(\n \"Ascii85 encoded byte sequences must end \"\n \"with {!r}\".format(_A85END)\n )\n if b.startswith(_A85START):\n b=b[2:-2]\n else:\n b=b[:-2]\n \n \n \n \n packI=struct.Struct('!I').pack\n decoded=[]\n decoded_append=decoded.append\n curr=[]\n curr_append=curr.append\n curr_clear=curr.clear\n for x in b+b'u'*4:\n if b'!'[0]<=x <=b'u'[0]:\n curr_append(x)\n if len(curr)==5:\n acc=0\n for x in curr:\n acc=85 *acc+(x -33)\n try:\n decoded_append(packI(acc))\n except struct.error:\n raise ValueError('Ascii85 overflow')from None\n curr_clear()\n elif x ==b'z'[0]:\n if curr:\n raise ValueError('z inside Ascii85 5-tuple')\n decoded_append(b'\\0\\0\\0\\0')\n elif foldspaces and x ==b'y'[0]:\n if curr:\n raise ValueError('y inside Ascii85 5-tuple')\n decoded_append(b'\\x20\\x20\\x20\\x20')\n elif x in ignorechars:\n \n continue\n else:\n raise ValueError('Non-Ascii85 digit found: %c'%x)\n \n result=b''.join(decoded)\n padding=4 -len(curr)\n if padding:\n \n result=result[:-padding]\n return result\n \n \n \n_b85alphabet=(b\"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\nb\"abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~\")\n_b85chars=None\n_b85chars2=None\n_b85dec=None\n\ndef b85encode(b,pad=False):\n ''\n\n\n\n \n global _b85chars,_b85chars2\n \n \n if _b85chars2 is None:\n _b85chars=[bytes((i,))for i in _b85alphabet]\n _b85chars2=[(a+b)for a in _b85chars for b in _b85chars]\n return _85encode(b,_b85chars,_b85chars2,pad)\n \ndef b85decode(b):\n ''\n\n\n \n global _b85dec\n \n \n if _b85dec is None:\n _b85dec=[None]*256\n for i,c in enumerate(_b85alphabet):\n _b85dec[c]=i\n \n b=_bytes_from_decode_data(b)\n padding=(-len(b))%5\n b=b+b'~'*padding\n out=[]\n packI=struct.Struct('!I').pack\n for i in range(0,len(b),5):\n chunk=b[i:i+5]\n acc=0\n try:\n for c in chunk:\n acc=acc *85+_b85dec[c]\n except TypeError:\n for j,c in enumerate(chunk):\n if _b85dec[c]is None:\n raise ValueError('bad base85 character at position %d'\n %(i+j))from None\n raise\n try:\n out.append(packI(acc))\n except struct.error:\n raise ValueError('base85 overflow in hunk starting at byte %d'\n %i)from None\n \n result=b''.join(out)\n if padding:\n result=result[:-padding]\n return result\n \n \n \n \n \nMAXLINESIZE=76\nMAXBINSIZE=(MAXLINESIZE //4)*3\n\ndef encode(input,output):\n ''\n while s :=input.read(MAXBINSIZE):\n while len(s)')\n_markedsectionclose=re.compile(r']\\s*]\\s*>')\n\n\n\n\n_msmarkedsectionclose=re.compile(r']\\s*>')\n\ndel re\n\n\nclass ParserBase:\n ''\n \n \n def __init__(self):\n if self.__class__ is ParserBase:\n raise RuntimeError(\n \"_markupbase.ParserBase must be subclassed\")\n \n def reset(self):\n self.lineno=1\n self.offset=0\n \n def getpos(self):\n ''\n return self.lineno,self.offset\n \n \n \n \n \n def updatepos(self,i,j):\n if i >=j:\n return j\n rawdata=self.rawdata\n nlines=rawdata.count(\"\\n\",i,j)\n if nlines:\n self.lineno=self.lineno+nlines\n pos=rawdata.rindex(\"\\n\",i,j)\n self.offset=j -(pos+1)\n else :\n self.offset=self.offset+j -i\n return j\n \n _decl_otherchars=''\n \n \n def parse_declaration(self,i):\n \n \n \n \n \n \n \n \n \n \n rawdata=self.rawdata\n j=i+2\n assert rawdata[i:j]==\"\":\n \n return j+1\n if rawdata[j:j+1]in (\"-\",\"\"):\n \n \n return -1\n \n n=len(rawdata)\n if rawdata[j:j+2]=='--':\n \n return self.parse_comment(i)\n elif rawdata[j]=='[':\n \n \n \n \n return self.parse_marked_section(i)\n else :\n decltype,j=self._scan_name(j,i)\n if j <0:\n return j\n if decltype ==\"doctype\":\n self._decl_otherchars=''\n while j \":\n \n data=rawdata[i+2:j]\n if decltype ==\"doctype\":\n self.handle_decl(data)\n else :\n \n \n \n \n self.unknown_decl(data)\n return j+1\n if c in \"\\\"'\":\n m=_declstringlit_match(rawdata,j)\n if not m:\n return -1\n j=m.end()\n elif c in \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\":\n name,j=self._scan_name(j,i)\n elif c in self._decl_otherchars:\n j=j+1\n elif c ==\"[\":\n \n if decltype ==\"doctype\":\n j=self._parse_doctype_subset(j+1,i)\n elif decltype in {\"attlist\",\"linktype\",\"link\",\"element\"}:\n \n \n \n \n raise AssertionError(\"unsupported '[' char in %s declaration\"%decltype)\n else :\n raise AssertionError(\"unexpected '[' char in declaration\")\n else :\n raise AssertionError(\"unexpected %r char in declaration\"%rawdata[j])\n if j <0:\n return j\n return -1\n \n \n \n def parse_marked_section(self,i,report=1):\n rawdata=self.rawdata\n assert rawdata[i:i+3]=='n:\n \n return -1\n if rawdata[j:j+4]==\" unknown values %r [%s]'\n %(cls.__name__,value,unknown,bin(unknown))\n )\n \n if cls._member_type_ is object:\n \n pseudo_member=object.__new__(cls)\n else:\n pseudo_member=cls._member_type_.__new__(cls,value)\n if not hasattr(pseudo_member,'_value_'):\n pseudo_member._value_=value\n if member_value or aliases:\n members=[]\n combined_value=0\n for m in cls._iter_member_(member_value):\n members.append(m)\n combined_value |=m._value_\n if aliases:\n value=member_value |aliases\n for n,pm in cls._member_map_.items():\n if pm not in members and pm._value_ and pm._value_&value ==pm._value_:\n members.append(pm)\n combined_value |=pm._value_\n unknown=value ^combined_value\n pseudo_member._name_='|'.join([m._name_ for m in members])\n if not combined_value:\n pseudo_member._name_=None\n elif unknown and cls._boundary_ is STRICT:\n raise ValueError('%r: no members with value %r'%(cls,unknown))\n elif unknown:\n pseudo_member._name_ +='|%s'%cls._numeric_repr_(unknown)\n else:\n pseudo_member._name_=None\n \n \n \n pseudo_member=cls._value2member_map_.setdefault(value,pseudo_member)\n if neg_value is not None:\n cls._value2member_map_[neg_value]=pseudo_member\n return pseudo_member\n \n def __contains__(self,other):\n ''\n\n \n if not isinstance(other,self.__class__):\n raise TypeError(\n \"unsupported operand type(s) for 'in': %r and %r\"%(\n type(other).__qualname__,self.__class__.__qualname__))\n return other._value_&self._value_ ==other._value_\n \n def __iter__(self):\n ''\n\n \n yield from self._iter_member_(self._value_)\n \n def __len__(self):\n return self._value_.bit_count()\n \n def __repr__(self):\n cls_name=self.__class__.__name__\n v_repr=self.__class__._value_repr_ or repr\n if self._name_ is None:\n return \"<%s: %s>\"%(cls_name,v_repr(self._value_))\n else:\n return \"<%s.%s: %s>\"%(cls_name,self._name_,v_repr(self._value_))\n \n def __str__(self):\n cls_name=self.__class__.__name__\n if self._name_ is None:\n return '%s(%r)'%(cls_name,self._value_)\n else:\n return \"%s.%s\"%(cls_name,self._name_)\n \n def __bool__(self):\n return bool(self._value_)\n \n def __or__(self,other):\n if isinstance(other,self.__class__):\n other=other._value_\n elif self._member_type_ is not object and isinstance(other,self._member_type_):\n other=other\n else:\n return NotImplemented\n value=self._value_\n return self.__class__(value |other)\n \n def __and__(self,other):\n if isinstance(other,self.__class__):\n other=other._value_\n elif self._member_type_ is not object and isinstance(other,self._member_type_):\n other=other\n else:\n return NotImplemented\n value=self._value_\n return self.__class__(value&other)\n \n def __xor__(self,other):\n if isinstance(other,self.__class__):\n other=other._value_\n elif self._member_type_ is not object and isinstance(other,self._member_type_):\n other=other\n else:\n return NotImplemented\n value=self._value_\n return self.__class__(value ^other)\n \n def __invert__(self):\n if self._inverted_ is None:\n if self._boundary_ in(EJECT,KEEP):\n self._inverted_=self.__class__(~self._value_)\n else:\n self._inverted_=self.__class__(self._singles_mask_&~self._value_)\n return self._inverted_\n \n __rand__=__and__\n __ror__=__or__\n __rxor__=__xor__\n \n \nclass IntFlag(int,ReprEnum,Flag,boundary=KEEP):\n ''\n\n \n \n \ndef _high_bit(value):\n ''\n\n \n return value.bit_length()-1\n \ndef unique(enumeration):\n ''\n\n \n duplicates=[]\n for name,member in enumeration.__members__.items():\n if name !=member.name:\n duplicates.append((name,member.name))\n if duplicates:\n alias_details=', '.join(\n [\"%s -> %s\"%(alias,name)for(alias,name)in duplicates])\n raise ValueError('duplicate values found in %r: %s'%\n (enumeration,alias_details))\n return enumeration\n \ndef _dataclass_repr(self):\n dcf=self.__dataclass_fields__\n return ', '.join(\n '%s=%r'%(k,getattr(self,k))\n for k in dcf.keys()\n if dcf[k].repr\n )\n \ndef global_enum_repr(self):\n ''\n\n\n\n \n module=self.__class__.__module__.split('.')[-1]\n return '%s.%s'%(module,self._name_)\n \ndef global_flag_repr(self):\n ''\n\n\n\n \n module=self.__class__.__module__.split('.')[-1]\n cls_name=self.__class__.__name__\n if self._name_ is None:\n return \"%s.%s(%r)\"%(module,cls_name,self._value_)\n if _is_single_bit(self):\n return '%s.%s'%(module,self._name_)\n if self._boundary_ is not FlagBoundary.KEEP:\n return '|'.join(['%s.%s'%(module,name)for name in self.name.split('|')])\n else:\n name=[]\n for n in self._name_.split('|'):\n if n[0].isdigit():\n name.append(n)\n else:\n name.append('%s.%s'%(module,n))\n return '|'.join(name)\n \ndef global_str(self):\n ''\n\n \n if self._name_ is None:\n cls_name=self.__class__.__name__\n return \"%s(%r)\"%(cls_name,self._value_)\n else:\n return self._name_\n \ndef global_enum(cls,update_str=False):\n ''\n\n\n\n \n if issubclass(cls,Flag):\n cls.__repr__=global_flag_repr\n else:\n cls.__repr__=global_enum_repr\n if not issubclass(cls,ReprEnum)or update_str:\n cls.__str__=global_str\n sys.modules[cls.__module__].__dict__.update(cls.__members__)\n return cls\n \ndef _simple_enum(etype=Enum,*,boundary=None,use_args=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def convert_class(cls):\n nonlocal use_args\n cls_name=cls.__name__\n if use_args is None:\n use_args=etype._use_args_\n __new__=cls.__dict__.get('__new__')\n if __new__ is not None:\n new_member=__new__.__func__\n else:\n new_member=etype._member_type_.__new__\n attrs={}\n body={}\n if __new__ is not None:\n body['__new_member__']=new_member\n body['_new_member_']=new_member\n body['_use_args_']=use_args\n body['_generate_next_value_']=gnv=etype._generate_next_value_\n body['_member_names_']=member_names=[]\n body['_member_map_']=member_map={}\n body['_value2member_map_']=value2member_map={}\n body['_unhashable_values_']=[]\n body['_member_type_']=member_type=etype._member_type_\n body['_value_repr_']=etype._value_repr_\n if issubclass(etype,Flag):\n body['_boundary_']=boundary or etype._boundary_\n body['_flag_mask_']=None\n body['_all_bits_']=None\n body['_singles_mask_']=None\n body['_inverted_']=None\n body['__or__']=Flag.__or__\n body['__xor__']=Flag.__xor__\n body['__and__']=Flag.__and__\n body['__ror__']=Flag.__ror__\n body['__rxor__']=Flag.__rxor__\n body['__rand__']=Flag.__rand__\n body['__invert__']=Flag.__invert__\n for name,obj in cls.__dict__.items():\n if name in('__dict__','__weakref__'):\n continue\n if _is_dunder(name)or _is_private(cls_name,name)or _is_sunder(name)or _is_descriptor(obj):\n body[name]=obj\n else:\n attrs[name]=obj\n if cls.__dict__.get('__doc__')is None:\n body['__doc__']='An enumeration.'\n \n \n \n \n \n enum_class=type(cls_name,(etype,),body,boundary=boundary,_simple=True)\n for name in('__repr__','__str__','__format__','__reduce_ex__'):\n if name not in body:\n \n enum_method=getattr(etype,name)\n found_method=getattr(enum_class,name)\n object_method=getattr(object,name)\n data_type_method=getattr(member_type,name)\n if found_method in(data_type_method,object_method):\n setattr(enum_class,name,enum_method)\n gnv_last_values=[]\n if issubclass(enum_class,Flag):\n \n single_bits=multi_bits=0\n for name,value in attrs.items():\n if isinstance(value,auto)and auto.value is _auto_null:\n value=gnv(name,1,len(member_names),gnv_last_values)\n if value in value2member_map:\n \n member=value2member_map[value]\n redirect=property()\n redirect.member=member\n redirect.__set_name__(enum_class,name)\n setattr(enum_class,name,redirect)\n member_map[name]=member\n else:\n \n if use_args:\n if not isinstance(value,tuple):\n value=(value,)\n member=new_member(enum_class,*value)\n value=value[0]\n else:\n member=new_member(enum_class)\n if __new__ is None:\n member._value_=value\n member._name_=name\n member.__objclass__=enum_class\n member.__init__(value)\n redirect=property()\n redirect.member=member\n redirect.__set_name__(enum_class,name)\n setattr(enum_class,name,redirect)\n member_map[name]=member\n member._sort_order_=len(member_names)\n value2member_map[value]=member\n if _is_single_bit(value):\n \n member_names.append(name)\n single_bits |=value\n else:\n multi_bits |=value\n gnv_last_values.append(value)\n enum_class._flag_mask_=single_bits |multi_bits\n enum_class._singles_mask_=single_bits\n enum_class._all_bits_=2 **((single_bits |multi_bits).bit_length())-1\n \n member_list=[m._value_ for m in enum_class]\n if member_list !=sorted(member_list):\n enum_class._iter_member_=enum_class._iter_member_by_def_\n else:\n \n for name,value in attrs.items():\n if isinstance(value,auto):\n if value.value is _auto_null:\n value.value=gnv(name,1,len(member_names),gnv_last_values)\n value=value.value\n if value in value2member_map:\n \n member=value2member_map[value]\n redirect=property()\n redirect.member=member\n redirect.__set_name__(enum_class,name)\n setattr(enum_class,name,redirect)\n member_map[name]=member\n else:\n \n if use_args:\n if not isinstance(value,tuple):\n value=(value,)\n member=new_member(enum_class,*value)\n value=value[0]\n else:\n member=new_member(enum_class)\n if __new__ is None:\n member._value_=value\n member._name_=name\n member.__objclass__=enum_class\n member.__init__(value)\n member._sort_order_=len(member_names)\n redirect=property()\n redirect.member=member\n redirect.__set_name__(enum_class,name)\n setattr(enum_class,name,redirect)\n member_map[name]=member\n value2member_map[value]=member\n member_names.append(name)\n gnv_last_values.append(value)\n if '__new__'in body:\n enum_class.__new_member__=enum_class.__new__\n enum_class.__new__=Enum.__new__\n return enum_class\n return convert_class\n \n@_simple_enum(StrEnum)\nclass EnumCheck:\n ''\n\n \n CONTINUOUS=\"no skipped integer values\"\n NAMED_FLAGS=\"multi-flag aliases may not contain unnamed flags\"\n UNIQUE=\"one name per value\"\nCONTINUOUS,NAMED_FLAGS,UNIQUE=EnumCheck\n\n\nclass verify:\n ''\n\n \n def __init__(self,*checks):\n self.checks=checks\n def __call__(self,enumeration):\n checks=self.checks\n cls_name=enumeration.__name__\n if Flag is not None and issubclass(enumeration,Flag):\n enum_type='flag'\n elif issubclass(enumeration,Enum):\n enum_type='enum'\n else:\n raise TypeError(\"the 'verify' decorator only works with Enum and Flag\")\n for check in checks:\n if check is UNIQUE:\n \n duplicates=[]\n for name,member in enumeration.__members__.items():\n if name !=member.name:\n duplicates.append((name,member.name))\n if duplicates:\n alias_details=', '.join(\n [\"%s -> %s\"%(alias,name)for(alias,name)in duplicates])\n raise ValueError('aliases found in %r: %s'%\n (enumeration,alias_details))\n elif check is CONTINUOUS:\n values=set(e.value for e in enumeration)\n if len(values)<2:\n continue\n low,high=min(values),max(values)\n missing=[]\n if enum_type =='flag':\n \n for i in range(_high_bit(low)+1,_high_bit(high)):\n if 2 **i not in values:\n missing.append(2 **i)\n elif enum_type =='enum':\n \n for i in range(low+1,high):\n if i not in values:\n missing.append(i)\n else:\n raise Exception('verify: unknown type %r'%enum_type)\n if missing:\n raise ValueError(('invalid %s %r: missing values %s'%(\n enum_type,cls_name,', '.join((str(m)for m in missing)))\n )[:256])\n \n elif check is NAMED_FLAGS:\n \n member_names=enumeration._member_names_\n member_values=[m.value for m in enumeration]\n missing_names=[]\n missing_value=0\n for name,alias in enumeration._member_map_.items():\n if name in member_names:\n \n continue\n if alias.value <0:\n \n continue\n values=list(_iter_bits_lsb(alias.value))\n missed=[v for v in values if v not in member_values]\n if missed:\n missing_names.append(name)\n missing_value |=reduce(_or_,missed)\n if missing_names:\n if len(missing_names)==1:\n alias='alias %s is missing'%missing_names[0]\n else:\n alias='aliases %s and %s are missing'%(\n ', '.join(missing_names[:-1]),missing_names[-1]\n )\n if _is_single_bit(missing_value):\n value='value 0x%x'%missing_value\n else:\n value='combined values of 0x%x'%missing_value\n raise ValueError(\n 'invalid Flag %r: %s %s [use enum.show_flag_values(value) for details]'\n %(cls_name,alias,value)\n )\n return enumeration\n \ndef _test_simple_enum(checked_enum,simple_enum):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n failed=[]\n if checked_enum.__dict__ !=simple_enum.__dict__:\n checked_dict=checked_enum.__dict__\n checked_keys=list(checked_dict.keys())\n simple_dict=simple_enum.__dict__\n simple_keys=list(simple_dict.keys())\n member_names=set(\n list(checked_enum._member_map_.keys())\n +list(simple_enum._member_map_.keys())\n )\n for key in set(checked_keys+simple_keys):\n if key in('__module__','_member_map_','_value2member_map_','__doc__'):\n \n continue\n elif key in member_names:\n \n continue\n elif key not in simple_keys:\n failed.append(\"missing key: %r\"%(key,))\n elif key not in checked_keys:\n failed.append(\"extra key: %r\"%(key,))\n else:\n checked_value=checked_dict[key]\n simple_value=simple_dict[key]\n if callable(checked_value)or isinstance(checked_value,bltns.property):\n continue\n if key =='__doc__':\n \n compressed_checked_value=checked_value.replace(' ','').replace('\\t','')\n compressed_simple_value=simple_value.replace(' ','').replace('\\t','')\n if compressed_checked_value !=compressed_simple_value:\n failed.append(\"%r:\\n %s\\n %s\"%(\n key,\n \"checked -> %r\"%(checked_value,),\n \"simple -> %r\"%(simple_value,),\n ))\n elif checked_value !=simple_value:\n failed.append(\"%r:\\n %s\\n %s\"%(\n key,\n \"checked -> %r\"%(checked_value,),\n \"simple -> %r\"%(simple_value,),\n ))\n failed.sort()\n for name in member_names:\n failed_member=[]\n if name not in simple_keys:\n failed.append('missing member from simple enum: %r'%name)\n elif name not in checked_keys:\n failed.append('extra member in simple enum: %r'%name)\n else:\n checked_member_dict=checked_enum[name].__dict__\n checked_member_keys=list(checked_member_dict.keys())\n simple_member_dict=simple_enum[name].__dict__\n simple_member_keys=list(simple_member_dict.keys())\n for key in set(checked_member_keys+simple_member_keys):\n if key in('__module__','__objclass__','_inverted_'):\n \n continue\n elif key not in simple_member_keys:\n failed_member.append(\"missing key %r not in the simple enum member %r\"%(key,name))\n elif key not in checked_member_keys:\n failed_member.append(\"extra key %r in simple enum member %r\"%(key,name))\n else:\n checked_value=checked_member_dict[key]\n simple_value=simple_member_dict[key]\n if checked_value !=simple_value:\n failed_member.append(\"%r:\\n %s\\n %s\"%(\n key,\n \"checked member -> %r\"%(checked_value,),\n \"simple member -> %r\"%(simple_value,),\n ))\n if failed_member:\n failed.append('%r member mismatch:\\n %s'%(\n name,'\\n '.join(failed_member),\n ))\n for method in(\n '__str__','__repr__','__reduce_ex__','__format__',\n '__getnewargs_ex__','__getnewargs__','__reduce_ex__','__reduce__'\n ):\n if method in simple_keys and method in checked_keys:\n \n continue\n elif method not in simple_keys and method not in checked_keys:\n \n checked_method=getattr(checked_enum,method,None)\n simple_method=getattr(simple_enum,method,None)\n if hasattr(checked_method,'__func__'):\n checked_method=checked_method.__func__\n simple_method=simple_method.__func__\n if checked_method !=simple_method:\n failed.append(\"%r: %-30s %s\"%(\n method,\n \"checked -> %r\"%(checked_method,),\n \"simple -> %r\"%(simple_method,),\n ))\n else:\n \n \n pass\n if failed:\n raise TypeError('enum mismatch:\\n %s'%'\\n '.join(failed))\n \ndef _old_convert_(etype,name,module,filter,source=None,*,boundary=None):\n ''\n\n \n \n \n \n \n \n module_globals=sys.modules[module].__dict__\n if source:\n source=source.__dict__\n else:\n source=module_globals\n \n \n \n members=[\n (name,value)\n for name,value in source.items()\n if filter(name)]\n try:\n \n members.sort(key=lambda t:(t[1],t[0]))\n except TypeError:\n \n members.sort(key=lambda t:t[0])\n cls=etype(name,members,module=module,boundary=boundary or KEEP)\n return cls\n \n_stdlib_enums=IntEnum,StrEnum,IntFlag\n", ["builtins", "functools", "operator", "sys", "types", "warnings"]], "timeit": [".py", "#! /usr/bin/env python3\n\n\"\"\"Tool for measuring execution time of small code snippets.\n\nThis module avoids a number of common traps for measuring execution\ntimes. See also Tim Peters' introduction to the Algorithms chapter in\nthe Python Cookbook, published by O'Reilly.\n\nLibrary usage: see the Timer class.\n\nCommand line usage:\n python timeit.py [-n N] [-r N] [-s S] [-p] [-h] [--] [statement]\n\nOptions:\n -n/--number N: how many times to execute 'statement' (default: see below)\n -r/--repeat N: how many times to repeat the timer (default 5)\n -s/--setup S: statement to be executed once initially (default 'pass').\n Execution time of this setup statement is NOT timed.\n -p/--process: use time.process_time() (default is time.perf_counter())\n -v/--verbose: print raw timing results; repeat for more digits precision\n -u/--unit: set the output time unit (nsec, usec, msec, or sec)\n -h/--help: print this usage message and exit\n --: separate options from statement, use when statement starts with -\n statement: statement to be timed (default 'pass')\n\nA multi-line statement may be given by specifying each line as a\nseparate argument; indented lines are possible by enclosing an\nargument in quotes and using leading spaces. Multiple -s options are\ntreated similarly.\n\nIf -n is not given, a suitable number of loops is calculated by trying\nincreasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the\ntotal time is at least 0.2 seconds.\n\nNote: there is a certain baseline overhead associated with executing a\npass statement. It differs between versions. The code here doesn't try\nto hide it, but you should be aware of it. The baseline overhead can be\nmeasured by invoking the program without arguments.\n\nClasses:\n\n Timer\n\nFunctions:\n\n timeit(string, string) -> float\n repeat(string, string) -> list\n default_timer() -> float\n\n\"\"\"\n\nimport gc\nimport itertools\nimport sys\nimport time\n\n__all__=[\"Timer\",\"timeit\",\"repeat\",\"default_timer\"]\n\ndummy_src_name=\"\"\ndefault_number=1000000\ndefault_repeat=5\ndefault_timer=time.perf_counter\n\n_globals=globals\n\n\n\n\ntemplate=\"\"\"\ndef inner(_it, _timer{init}):\n {setup}\n _t0 = _timer()\n for _i in _it:\n {stmt}\n pass\n _t1 = _timer()\n return _t1 - _t0\n\"\"\"\n\n\ndef reindent(src,indent):\n ''\n return src.replace(\"\\n\",\"\\n\"+\" \"*indent)\n \n \nclass Timer:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,stmt=\"pass\",setup=\"pass\",timer=default_timer,\n globals=None):\n ''\n self.timer=timer\n local_ns={}\n global_ns=_globals()if globals is None else globals\n init=''\n if isinstance(setup,str):\n \n compile(setup,dummy_src_name,\"exec\")\n stmtprefix=setup+'\\n'\n setup=reindent(setup,4)\n elif callable(setup):\n local_ns['_setup']=setup\n init +=', _setup=_setup'\n stmtprefix=''\n setup='_setup()'\n else:\n raise ValueError(\"setup is neither a string nor callable\")\n if isinstance(stmt,str):\n \n compile(stmtprefix+stmt,dummy_src_name,\"exec\")\n stmt=reindent(stmt,8)\n elif callable(stmt):\n local_ns['_stmt']=stmt\n init +=', _stmt=_stmt'\n stmt='_stmt()'\n else:\n raise ValueError(\"stmt is neither a string nor callable\")\n src=template.format(stmt=stmt,setup=setup,init=init)\n self.src=src\n code=compile(src,dummy_src_name,\"exec\")\n exec(code,global_ns,local_ns)\n self.inner=local_ns[\"inner\"]\n \n def print_exc(self,file=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n import linecache,traceback\n if self.src is not None:\n linecache.cache[dummy_src_name]=(len(self.src),\n None,\n self.src.split(\"\\n\"),\n dummy_src_name)\n \n \n traceback.print_exc(file=file)\n \n def timeit(self,number=default_number):\n ''\n\n\n\n\n\n\n\n \n it=itertools.repeat(None,number)\n gcold=gc.isenabled()\n gc.disable()\n try:\n timing=self.inner(it,self.timer)\n finally:\n if gcold:\n gc.enable()\n return timing\n \n def repeat(self,repeat=default_repeat,number=default_number):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n r=[]\n for i in range(repeat):\n t=self.timeit(number)\n r.append(t)\n return r\n \n def autorange(self,callback=None):\n ''\n\n\n\n\n\n\n\n \n i=1\n while True:\n for j in 1,2,5:\n number=i *j\n time_taken=self.timeit(number)\n if callback:\n callback(number,time_taken)\n if time_taken >=0.2:\n return(number,time_taken)\n i *=10\n \n \ndef timeit(stmt=\"pass\",setup=\"pass\",timer=default_timer,\nnumber=default_number,globals=None):\n ''\n return Timer(stmt,setup,timer,globals).timeit(number)\n \n \ndef repeat(stmt=\"pass\",setup=\"pass\",timer=default_timer,\nrepeat=default_repeat,number=default_number,globals=None):\n ''\n return Timer(stmt,setup,timer,globals).repeat(repeat,number)\n \n \ndef main(args=None,*,_wrap_timer=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if args is None:\n args=sys.argv[1:]\n import getopt\n try:\n opts,args=getopt.getopt(args,\"n:u:s:r:pvh\",\n [\"number=\",\"setup=\",\"repeat=\",\n \"process\",\"verbose\",\"unit=\",\"help\"])\n except getopt.error as err:\n print(err)\n print(\"use -h/--help for command line help\")\n return 2\n \n timer=default_timer\n stmt=\"\\n\".join(args)or \"pass\"\n number=0\n setup=[]\n repeat=default_repeat\n verbose=0\n time_unit=None\n units={\"nsec\":1e-9,\"usec\":1e-6,\"msec\":1e-3,\"sec\":1.0}\n precision=3\n for o,a in opts:\n if o in(\"-n\",\"--number\"):\n number=int(a)\n if o in(\"-s\",\"--setup\"):\n setup.append(a)\n if o in(\"-u\",\"--unit\"):\n if a in units:\n time_unit=a\n else:\n print(\"Unrecognized unit. Please select nsec, usec, msec, or sec.\",\n file=sys.stderr)\n return 2\n if o in(\"-r\",\"--repeat\"):\n repeat=int(a)\n if repeat <=0:\n repeat=1\n if o in(\"-p\",\"--process\"):\n timer=time.process_time\n if o in(\"-v\",\"--verbose\"):\n if verbose:\n precision +=1\n verbose +=1\n if o in(\"-h\",\"--help\"):\n print(__doc__,end=' ')\n return 0\n setup=\"\\n\".join(setup)or \"pass\"\n \n \n \n \n import os\n sys.path.insert(0,os.curdir)\n if _wrap_timer is not None:\n timer=_wrap_timer(timer)\n \n t=Timer(stmt,setup,timer)\n if number ==0:\n \n callback=None\n if verbose:\n def callback(number,time_taken):\n msg=\"{num} loop{s} -> {secs:.{prec}g} secs\"\n plural=(number !=1)\n print(msg.format(num=number,s='s'if plural else '',\n secs=time_taken,prec=precision))\n try:\n number,_=t.autorange(callback)\n except:\n t.print_exc()\n return 1\n \n if verbose:\n print()\n \n try:\n raw_timings=t.repeat(repeat,number)\n except:\n t.print_exc()\n return 1\n \n def format_time(dt):\n unit=time_unit\n \n if unit is not None:\n scale=units[unit]\n else:\n scales=[(scale,unit)for unit,scale in units.items()]\n scales.sort(reverse=True)\n for scale,unit in scales:\n if dt >=scale:\n break\n \n return \"%.*g %s\"%(precision,dt /scale,unit)\n \n if verbose:\n print(\"raw times: %s\"%\", \".join(map(format_time,raw_timings)))\n print()\n timings=[dt /number for dt in raw_timings]\n \n best=min(timings)\n print(\"%d loop%s, best of %d: %s per loop\"\n %(number,'s'if number !=1 else '',\n repeat,format_time(best)))\n \n best=min(timings)\n worst=max(timings)\n if worst >=best *4:\n import warnings\n warnings.warn_explicit(\"The test results are likely unreliable. \"\n \"The worst time (%s) was more than four times \"\n \"slower than the best time (%s).\"\n %(format_time(worst),format_time(best)),\n UserWarning,'',0)\n return None\n \n \nif __name__ ==\"__main__\":\n sys.exit(main())\n", ["gc", "getopt", "itertools", "linecache", "os", "sys", "time", "traceback", "warnings"]], "_signal": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nCTRL_BREAK_EVENT=1\n\nCTRL_C_EVENT=0\n\nNSIG=23\n\nSIGABRT=22\n\nSIGBREAK=21\n\nSIGFPE=8\n\nSIGILL=4\n\nSIGINT=2\n\nSIGSEGV=11\n\nSIGTERM=15\n\nSIG_DFL=0\n\nSIG_IGN=1\n\ndef default_int_handler(*args,**kw):\n ''\n \n pass\n \ndef getsignal(*args,**kw):\n ''\n\n\n\n\n \n pass\n \ndef raise_signal(*args,**kw):\n ''\n pass\n \ndef set_wakeup_fd(*args,**kw):\n ''\n\n\n\n\n \n pass\n \ndef signal(*args,**kw):\n ''\n\n\n\n\n\n \n pass\n \ndef strsignal(*args,**kw):\n ''\n\n \n pass\n \ndef valid_signals(*args,**kw):\n ''\n\n \n pass\n", []], "hmac": [".py", "''\n\n\n\n\nimport warnings as _warnings\ntry :\n import _hashlib as _hashopenssl\nexcept ImportError:\n _hashopenssl=None\n _functype=None\n from _operator import _compare_digest as compare_digest\nelse :\n compare_digest=_hashopenssl.compare_digest\n _functype=type(_hashopenssl.openssl_sha256)\n \nimport hashlib as _hashlib\n\ntrans_5C=bytes((x ^0x5C)for x in range(256))\ntrans_36=bytes((x ^0x36)for x in range(256))\n\n\n\ndigest_size=None\n\n\nclass HMAC:\n ''\n\n\n \n blocksize=64\n \n __slots__=(\n \"_hmac\",\"_inner\",\"_outer\",\"block_size\",\"digest_size\"\n )\n \n def __init__(self,key,msg=None ,digestmod=''):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n if not isinstance(key,(bytes,bytearray)):\n raise TypeError(\"key: expected bytes or bytearray, but got %r\"%type(key).__name__)\n \n if not digestmod:\n raise TypeError(\"Missing required parameter 'digestmod'.\")\n \n if _hashopenssl and isinstance(digestmod,(str,_functype)):\n try :\n self._init_hmac(key,msg,digestmod)\n except _hashopenssl.UnsupportedDigestmodError:\n self._init_old(key,msg,digestmod)\n else :\n self._init_old(key,msg,digestmod)\n \n def _init_hmac(self,key,msg,digestmod):\n self._hmac=_hashopenssl.hmac_new(key,msg,digestmod=digestmod)\n self.digest_size=self._hmac.digest_size\n self.block_size=self._hmac.block_size\n \n def _init_old(self,key,msg,digestmod):\n if callable(digestmod):\n digest_cons=digestmod\n elif isinstance(digestmod,str):\n digest_cons=lambda d=b'':_hashlib.new(digestmod,d)\n else :\n digest_cons=lambda d=b'':digestmod.new(d)\n \n self._hmac=None\n self._outer=digest_cons()\n self._inner=digest_cons()\n self.digest_size=self._inner.digest_size\n \n if hasattr(self._inner,'block_size'):\n blocksize=self._inner.block_size\n if blocksize <16:\n _warnings.warn('block_size of %d seems too small; using our '\n 'default of %d.'%(blocksize,self.blocksize),\n RuntimeWarning,2)\n blocksize=self.blocksize\n else :\n _warnings.warn('No block_size attribute on given digest object; '\n 'Assuming %d.'%(self.blocksize),\n RuntimeWarning,2)\n blocksize=self.blocksize\n \n if len(key)>blocksize:\n key=digest_cons(key).digest()\n \n \n \n self.block_size=blocksize\n \n key=key.ljust(blocksize,b'\\0')\n self._outer.update(key.translate(trans_5C))\n self._inner.update(key.translate(trans_36))\n if msg is not None :\n self.update(msg)\n \n @property\n def name(self):\n if self._hmac:\n return self._hmac.name\n else :\n return f\"hmac-{self._inner.name}\"\n \n def update(self,msg):\n ''\n inst=self._hmac or self._inner\n inst.update(msg)\n \n def copy(self):\n ''\n\n\n \n \n other=self.__class__.__new__(self.__class__)\n other.digest_size=self.digest_size\n if self._hmac:\n other._hmac=self._hmac.copy()\n other._inner=other._outer=None\n else :\n other._hmac=None\n other._inner=self._inner.copy()\n other._outer=self._outer.copy()\n return other\n \n def _current(self):\n ''\n\n\n \n if self._hmac:\n return self._hmac\n else :\n h=self._outer.copy()\n h.update(self._inner.digest())\n return h\n \n def digest(self):\n ''\n\n\n\n\n \n h=self._current()\n return h.digest()\n \n def hexdigest(self):\n ''\n \n h=self._current()\n return h.hexdigest()\n \ndef new(key,msg=None ,digestmod=''):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return HMAC(key,msg,digestmod)\n \n \ndef digest(key,msg,digest):\n ''\n\n\n\n\n\n\n \n if _hashopenssl is not None and isinstance(digest,(str,_functype)):\n try :\n return _hashopenssl.hmac_digest(key,msg,digest)\n except _hashopenssl.UnsupportedDigestmodError:\n pass\n \n if callable(digest):\n digest_cons=digest\n elif isinstance(digest,str):\n digest_cons=lambda d=b'':_hashlib.new(digest,d)\n else :\n digest_cons=lambda d=b'':digest.new(d)\n \n inner=digest_cons()\n outer=digest_cons()\n blocksize=getattr(inner,'block_size',64)\n if len(key)>blocksize:\n key=digest_cons(key).digest()\n key=key+b'\\x00'*(blocksize -len(key))\n inner.update(key.translate(trans_36))\n outer.update(key.translate(trans_5C))\n inner.update(msg)\n outer.update(inner.digest())\n return outer.digest()\n", ["_hashlib", "_operator", "hashlib", "warnings"]], "tarfile": [".py", "#!/usr/bin/env python3\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n''\n\n\nversion=\"0.9.0\"\n__author__=\"Lars Gust\\u00e4bel (lars@gustaebel.de)\"\n__credits__=\"Gustavo Niemeyer, Niels Gust\\u00e4bel, Richard Townsend.\"\n\n\n\n\nfrom builtins import open as bltn_open\nimport sys\nimport os\nimport io\nimport shutil\nimport stat\nimport time\nimport struct\nimport copy\nimport re\nimport warnings\n\ntry:\n import pwd\nexcept ImportError:\n pwd=None\ntry:\n import grp\nexcept ImportError:\n grp=None\n \n \n \n \nsymlink_exception=(AttributeError,NotImplementedError,OSError)\n\n\n__all__=[\"TarFile\",\"TarInfo\",\"is_tarfile\",\"TarError\",\"ReadError\",\n\"CompressionError\",\"StreamError\",\"ExtractError\",\"HeaderError\",\n\"ENCODING\",\"USTAR_FORMAT\",\"GNU_FORMAT\",\"PAX_FORMAT\",\n\"DEFAULT_FORMAT\",\"open\",\"fully_trusted_filter\",\"data_filter\",\n\"tar_filter\",\"FilterError\",\"AbsoluteLinkError\",\n\"OutsideDestinationError\",\"SpecialFileError\",\"AbsolutePathError\",\n\"LinkOutsideDestinationError\"]\n\n\n\n\n\nNUL=b\"\\0\"\nBLOCKSIZE=512\nRECORDSIZE=BLOCKSIZE *20\nGNU_MAGIC=b\"ustar \\0\"\nPOSIX_MAGIC=b\"ustar\\x0000\"\n\nLENGTH_NAME=100\nLENGTH_LINK=100\nLENGTH_PREFIX=155\n\nREGTYPE=b\"0\"\nAREGTYPE=b\"\\0\"\nLNKTYPE=b\"1\"\nSYMTYPE=b\"2\"\nCHRTYPE=b\"3\"\nBLKTYPE=b\"4\"\nDIRTYPE=b\"5\"\nFIFOTYPE=b\"6\"\nCONTTYPE=b\"7\"\n\nGNUTYPE_LONGNAME=b\"L\"\nGNUTYPE_LONGLINK=b\"K\"\nGNUTYPE_SPARSE=b\"S\"\n\nXHDTYPE=b\"x\"\nXGLTYPE=b\"g\"\nSOLARIS_XHDTYPE=b\"X\"\n\nUSTAR_FORMAT=0\nGNU_FORMAT=1\nPAX_FORMAT=2\nDEFAULT_FORMAT=PAX_FORMAT\n\n\n\n\n\nSUPPORTED_TYPES=(REGTYPE,AREGTYPE,LNKTYPE,\nSYMTYPE,DIRTYPE,FIFOTYPE,\nCONTTYPE,CHRTYPE,BLKTYPE,\nGNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nREGULAR_TYPES=(REGTYPE,AREGTYPE,\nCONTTYPE,GNUTYPE_SPARSE)\n\n\nGNU_TYPES=(GNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nPAX_FIELDS=(\"path\",\"linkpath\",\"size\",\"mtime\",\n\"uid\",\"gid\",\"uname\",\"gname\")\n\n\nPAX_NAME_FIELDS={\"path\",\"linkpath\",\"uname\",\"gname\"}\n\n\n\nPAX_NUMBER_FIELDS={\n\"atime\":float,\n\"ctime\":float,\n\"mtime\":float,\n\"uid\":int,\n\"gid\":int,\n\"size\":int\n}\n\n\n\n\nif os.name ==\"nt\":\n ENCODING=\"utf-8\"\nelse:\n ENCODING=sys.getfilesystemencoding()\n \n \n \n \n \ndef stn(s,length,encoding,errors):\n ''\n \n if s is None:\n raise ValueError(\"metadata cannot contain None\")\n s=s.encode(encoding,errors)\n return s[:length]+(length -len(s))*NUL\n \ndef nts(s,encoding,errors):\n ''\n \n p=s.find(b\"\\0\")\n if p !=-1:\n s=s[:p]\n return s.decode(encoding,errors)\n \ndef nti(s):\n ''\n \n \n \n if s[0]in(0o200,0o377):\n n=0\n for i in range(len(s)-1):\n n <<=8\n n +=s[i+1]\n if s[0]==0o377:\n n=-(256 **(len(s)-1)-n)\n else:\n try:\n s=nts(s,\"ascii\",\"strict\")\n n=int(s.strip()or \"0\",8)\n except ValueError:\n raise InvalidHeaderError(\"invalid header\")\n return n\n \ndef itn(n,digits=8,format=DEFAULT_FORMAT):\n ''\n \n \n \n \n \n \n \n \n \n original_n=n\n n=int(n)\n if 0 <=n <8 **(digits -1):\n s=bytes(\"%0*o\"%(digits -1,n),\"ascii\")+NUL\n elif format ==GNU_FORMAT and -256 **(digits -1)<=n <256 **(digits -1):\n if n >=0:\n s=bytearray([0o200])\n else:\n s=bytearray([0o377])\n n=256 **digits+n\n \n for i in range(digits -1):\n s.insert(1,n&0o377)\n n >>=8\n else:\n raise ValueError(\"overflow in number field\")\n \n return s\n \ndef calc_chksums(buf):\n ''\n\n\n\n\n\n\n \n unsigned_chksum=256+sum(struct.unpack_from(\"148B8x356B\",buf))\n signed_chksum=256+sum(struct.unpack_from(\"148b8x356b\",buf))\n return unsigned_chksum,signed_chksum\n \ndef copyfileobj(src,dst,length=None,exception=OSError,bufsize=None):\n ''\n\n \n bufsize=bufsize or 16 *1024\n if length ==0:\n return\n if length is None:\n shutil.copyfileobj(src,dst,bufsize)\n return\n \n blocks,remainder=divmod(length,bufsize)\n for b in range(blocks):\n buf=src.read(bufsize)\n if len(buf)self.bufsize:\n self.fileobj.write(self.buf[:self.bufsize])\n self.buf=self.buf[self.bufsize:]\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n \n self.closed=True\n try:\n if self.mode ==\"w\"and self.comptype !=\"tar\":\n self.buf +=self.cmp.flush()\n \n if self.mode ==\"w\"and self.buf:\n self.fileobj.write(self.buf)\n self.buf=b\"\"\n if self.comptype ==\"gz\":\n self.fileobj.write(struct.pack(\"=0:\n blocks,remainder=divmod(pos -self.pos,self.bufsize)\n for i in range(blocks):\n self.read(self.bufsize)\n self.read(remainder)\n else:\n raise StreamError(\"seeking backwards is not allowed\")\n return self.pos\n \n def read(self,size):\n ''\n assert size is not None\n buf=self._read(size)\n self.pos +=len(buf)\n return buf\n \n def _read(self,size):\n ''\n \n if self.comptype ==\"tar\":\n return self.__read(size)\n \n c=len(self.dbuf)\n t=[self.dbuf]\n while c lastpos:\n self.map.append((False,lastpos,offset,None))\n self.map.append((True,offset,offset+size,realpos))\n realpos +=size\n lastpos=offset+size\n if lastpos 0:\n while True:\n data,start,stop,offset=self.map[self.map_index]\n if start <=self.position \"%(self.__class__.__name__,self.name,id(self))\n \n def replace(self,*,\n name=_KEEP,mtime=_KEEP,mode=_KEEP,linkname=_KEEP,\n uid=_KEEP,gid=_KEEP,uname=_KEEP,gname=_KEEP,\n deep=True,_KEEP=_KEEP):\n ''\n \n if deep:\n result=copy.deepcopy(self)\n else:\n result=copy.copy(self)\n if name is not _KEEP:\n result.name=name\n if mtime is not _KEEP:\n result.mtime=mtime\n if mode is not _KEEP:\n result.mode=mode\n if linkname is not _KEEP:\n result.linkname=linkname\n if uid is not _KEEP:\n result.uid=uid\n if gid is not _KEEP:\n result.gid=gid\n if uname is not _KEEP:\n result.uname=uname\n if gname is not _KEEP:\n result.gname=gname\n return result\n \n def get_info(self):\n ''\n \n if self.mode is None:\n mode=None\n else:\n mode=self.mode&0o7777\n info={\n \"name\":self.name,\n \"mode\":mode,\n \"uid\":self.uid,\n \"gid\":self.gid,\n \"size\":self.size,\n \"mtime\":self.mtime,\n \"chksum\":self.chksum,\n \"type\":self.type,\n \"linkname\":self.linkname,\n \"uname\":self.uname,\n \"gname\":self.gname,\n \"devmajor\":self.devmajor,\n \"devminor\":self.devminor\n }\n \n if info[\"type\"]==DIRTYPE and not info[\"name\"].endswith(\"/\"):\n info[\"name\"]+=\"/\"\n \n return info\n \n def tobuf(self,format=DEFAULT_FORMAT,encoding=ENCODING,errors=\"surrogateescape\"):\n ''\n \n info=self.get_info()\n for name,value in info.items():\n if value is None:\n raise ValueError(\"%s may not be None\"%name)\n \n if format ==USTAR_FORMAT:\n return self.create_ustar_header(info,encoding,errors)\n elif format ==GNU_FORMAT:\n return self.create_gnu_header(info,encoding,errors)\n elif format ==PAX_FORMAT:\n return self.create_pax_header(info,encoding)\n else:\n raise ValueError(\"invalid format\")\n \n def create_ustar_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=POSIX_MAGIC\n \n if len(info[\"linkname\"].encode(encoding,errors))>LENGTH_LINK:\n raise ValueError(\"linkname is too long\")\n \n if len(info[\"name\"].encode(encoding,errors))>LENGTH_NAME:\n info[\"prefix\"],info[\"name\"]=self._posix_split_name(info[\"name\"],encoding,errors)\n \n return self._create_header(info,USTAR_FORMAT,encoding,errors)\n \n def create_gnu_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=GNU_MAGIC\n \n buf=b\"\"\n if len(info[\"linkname\"].encode(encoding,errors))>LENGTH_LINK:\n buf +=self._create_gnu_long_header(info[\"linkname\"],GNUTYPE_LONGLINK,encoding,errors)\n \n if len(info[\"name\"].encode(encoding,errors))>LENGTH_NAME:\n buf +=self._create_gnu_long_header(info[\"name\"],GNUTYPE_LONGNAME,encoding,errors)\n \n return buf+self._create_header(info,GNU_FORMAT,encoding,errors)\n \n def create_pax_header(self,info,encoding):\n ''\n\n\n \n info[\"magic\"]=POSIX_MAGIC\n pax_headers=self.pax_headers.copy()\n \n \n \n for name,hname,length in(\n (\"name\",\"path\",LENGTH_NAME),(\"linkname\",\"linkpath\",LENGTH_LINK),\n (\"uname\",\"uname\",32),(\"gname\",\"gname\",32)):\n \n if hname in pax_headers:\n \n continue\n \n \n try:\n info[name].encode(\"ascii\",\"strict\")\n except UnicodeEncodeError:\n pax_headers[hname]=info[name]\n continue\n \n if len(info[name])>length:\n pax_headers[hname]=info[name]\n \n \n \n for name,digits in((\"uid\",8),(\"gid\",8),(\"size\",12),(\"mtime\",12)):\n needs_pax=False\n \n val=info[name]\n val_is_float=isinstance(val,float)\n val_int=round(val)if val_is_float else val\n if not 0 <=val_int <8 **(digits -1):\n \n info[name]=0\n needs_pax=True\n elif val_is_float:\n \n \n info[name]=val_int\n needs_pax=True\n \n \n if needs_pax and name not in pax_headers:\n pax_headers[name]=str(val)\n \n \n if pax_headers:\n buf=self._create_pax_generic_header(pax_headers,XHDTYPE,encoding)\n else:\n buf=b\"\"\n \n return buf+self._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")\n \n @classmethod\n def create_pax_global_header(cls,pax_headers):\n ''\n \n return cls._create_pax_generic_header(pax_headers,XGLTYPE,\"utf-8\")\n \n def _posix_split_name(self,name,encoding,errors):\n ''\n\n \n components=name.split(\"/\")\n for i in range(1,len(components)):\n prefix=\"/\".join(components[:i])\n name=\"/\".join(components[i:])\n if len(prefix.encode(encoding,errors))<=LENGTH_PREFIX and\\\n len(name.encode(encoding,errors))<=LENGTH_NAME:\n break\n else:\n raise ValueError(\"name is too long\")\n \n return prefix,name\n \n @staticmethod\n def _create_header(info,format,encoding,errors):\n ''\n\n \n has_device_fields=info.get(\"type\")in(CHRTYPE,BLKTYPE)\n if has_device_fields:\n devmajor=itn(info.get(\"devmajor\",0),8,format)\n devminor=itn(info.get(\"devminor\",0),8,format)\n else:\n devmajor=stn(\"\",8,encoding,errors)\n devminor=stn(\"\",8,encoding,errors)\n \n \n \n filetype=info.get(\"type\",REGTYPE)\n if filetype is None:\n raise ValueError(\"TarInfo.type must not be None\")\n \n parts=[\n stn(info.get(\"name\",\"\"),100,encoding,errors),\n itn(info.get(\"mode\",0)&0o7777,8,format),\n itn(info.get(\"uid\",0),8,format),\n itn(info.get(\"gid\",0),8,format),\n itn(info.get(\"size\",0),12,format),\n itn(info.get(\"mtime\",0),12,format),\n b\" \",\n filetype,\n stn(info.get(\"linkname\",\"\"),100,encoding,errors),\n info.get(\"magic\",POSIX_MAGIC),\n stn(info.get(\"uname\",\"\"),32,encoding,errors),\n stn(info.get(\"gname\",\"\"),32,encoding,errors),\n devmajor,\n devminor,\n stn(info.get(\"prefix\",\"\"),155,encoding,errors)\n ]\n \n buf=struct.pack(\"%ds\"%BLOCKSIZE,b\"\".join(parts))\n chksum=calc_chksums(buf[-BLOCKSIZE:])[0]\n buf=buf[:-364]+bytes(\"%06o\\0\"%chksum,\"ascii\")+buf[-357:]\n return buf\n \n @staticmethod\n def _create_payload(payload):\n ''\n\n \n blocks,remainder=divmod(len(payload),BLOCKSIZE)\n if remainder >0:\n payload +=(BLOCKSIZE -remainder)*NUL\n return payload\n \n @classmethod\n def _create_gnu_long_header(cls,name,type,encoding,errors):\n ''\n\n \n name=name.encode(encoding,errors)+NUL\n \n info={}\n info[\"name\"]=\"././@LongLink\"\n info[\"type\"]=type\n info[\"size\"]=len(name)\n info[\"magic\"]=GNU_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,encoding,errors)+\\\n cls._create_payload(name)\n \n @classmethod\n def _create_pax_generic_header(cls,pax_headers,type,encoding):\n ''\n\n\n \n \n \n binary=False\n for keyword,value in pax_headers.items():\n try:\n value.encode(\"utf-8\",\"strict\")\n except UnicodeEncodeError:\n binary=True\n break\n \n records=b\"\"\n if binary:\n \n records +=b\"21 hdrcharset=BINARY\\n\"\n \n for keyword,value in pax_headers.items():\n keyword=keyword.encode(\"utf-8\")\n if binary:\n \n \n value=value.encode(encoding,\"surrogateescape\")\n else:\n value=value.encode(\"utf-8\")\n \n l=len(keyword)+len(value)+3\n n=p=0\n while True:\n n=l+len(str(p))\n if n ==p:\n break\n p=n\n records +=bytes(str(p),\"ascii\")+b\" \"+keyword+b\"=\"+value+b\"\\n\"\n \n \n \n info={}\n info[\"name\"]=\"././@PaxHeader\"\n info[\"type\"]=type\n info[\"size\"]=len(records)\n info[\"magic\"]=POSIX_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")+\\\n cls._create_payload(records)\n \n @classmethod\n def frombuf(cls,buf,encoding,errors):\n ''\n \n if len(buf)==0:\n raise EmptyHeaderError(\"empty header\")\n if len(buf)!=BLOCKSIZE:\n raise TruncatedHeaderError(\"truncated header\")\n if buf.count(NUL)==BLOCKSIZE:\n raise EOFHeaderError(\"end of file header\")\n \n chksum=nti(buf[148:156])\n if chksum not in calc_chksums(buf):\n raise InvalidHeaderError(\"bad checksum\")\n \n obj=cls()\n obj.name=nts(buf[0:100],encoding,errors)\n obj.mode=nti(buf[100:108])\n obj.uid=nti(buf[108:116])\n obj.gid=nti(buf[116:124])\n obj.size=nti(buf[124:136])\n obj.mtime=nti(buf[136:148])\n obj.chksum=chksum\n obj.type=buf[156:157]\n obj.linkname=nts(buf[157:257],encoding,errors)\n obj.uname=nts(buf[265:297],encoding,errors)\n obj.gname=nts(buf[297:329],encoding,errors)\n obj.devmajor=nti(buf[329:337])\n obj.devminor=nti(buf[337:345])\n prefix=nts(buf[345:500],encoding,errors)\n \n \n \n if obj.type ==AREGTYPE and obj.name.endswith(\"/\"):\n obj.type=DIRTYPE\n \n \n \n \n if obj.type ==GNUTYPE_SPARSE:\n pos=386\n structs=[]\n for i in range(4):\n try:\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[482])\n origsize=nti(buf[483:495])\n obj._sparse_structs=(structs,isextended,origsize)\n \n \n if obj.isdir():\n obj.name=obj.name.rstrip(\"/\")\n \n \n if prefix and obj.type not in GNU_TYPES:\n obj.name=prefix+\"/\"+obj.name\n return obj\n \n @classmethod\n def fromtarfile(cls,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(BLOCKSIZE)\n obj=cls.frombuf(buf,tarfile.encoding,tarfile.errors)\n obj.offset=tarfile.fileobj.tell()-BLOCKSIZE\n return obj._proc_member(tarfile)\n \n \n \n \n \n \n \n \n \n \n \n \n def _proc_member(self,tarfile):\n ''\n\n \n if self.type in(GNUTYPE_LONGNAME,GNUTYPE_LONGLINK):\n return self._proc_gnulong(tarfile)\n elif self.type ==GNUTYPE_SPARSE:\n return self._proc_sparse(tarfile)\n elif self.type in(XHDTYPE,XGLTYPE,SOLARIS_XHDTYPE):\n return self._proc_pax(tarfile)\n else:\n return self._proc_builtin(tarfile)\n \n def _proc_builtin(self,tarfile):\n ''\n\n \n self.offset_data=tarfile.fileobj.tell()\n offset=self.offset_data\n if self.isreg()or self.type not in SUPPORTED_TYPES:\n \n offset +=self._block(self.size)\n tarfile.offset=offset\n \n \n \n self._apply_pax_info(tarfile.pax_headers,tarfile.encoding,tarfile.errors)\n \n \n \n if self.isdir():\n self.name=self.name.rstrip(\"/\")\n \n return self\n \n def _proc_gnulong(self,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n try:\n next=self.fromtarfile(tarfile)\n except HeaderError as e:\n raise SubsequentHeaderError(str(e))from None\n \n \n \n next.offset=self.offset\n if self.type ==GNUTYPE_LONGNAME:\n next.name=nts(buf,tarfile.encoding,tarfile.errors)\n elif self.type ==GNUTYPE_LONGLINK:\n next.linkname=nts(buf,tarfile.encoding,tarfile.errors)\n \n \n \n if next.isdir():\n next.name=next.name.removesuffix(\"/\")\n \n return next\n \n def _proc_sparse(self,tarfile):\n ''\n \n \n structs,isextended,origsize=self._sparse_structs\n del self._sparse_structs\n \n \n while isextended:\n buf=tarfile.fileobj.read(BLOCKSIZE)\n pos=0\n for i in range(21):\n try:\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n if offset and numbytes:\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[504])\n self.sparse=structs\n \n self.offset_data=tarfile.fileobj.tell()\n tarfile.offset=self.offset_data+self._block(self.size)\n self.size=origsize\n return self\n \n def _proc_pax(self,tarfile):\n ''\n\n \n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n \n \n if self.type ==XGLTYPE:\n pax_headers=tarfile.pax_headers\n else:\n pax_headers=tarfile.pax_headers.copy()\n \n \n \n \n \n \n match=re.search(br\"\\d+ hdrcharset=([^\\n]+)\\n\",buf)\n if match is not None:\n pax_headers[\"hdrcharset\"]=match.group(1).decode(\"utf-8\")\n \n \n \n \n hdrcharset=pax_headers.get(\"hdrcharset\")\n if hdrcharset ==\"BINARY\":\n encoding=tarfile.encoding\n else:\n encoding=\"utf-8\"\n \n \n \n \n \n regex=re.compile(br\"(\\d+) ([^=]+)=\")\n pos=0\n while match :=regex.match(buf,pos):\n length,keyword=match.groups()\n length=int(length)\n if length ==0:\n raise InvalidHeaderError(\"invalid header\")\n value=buf[match.end(2)+1:match.start(1)+length -1]\n \n \n \n \n \n \n \n \n keyword=self._decode_pax_field(keyword,\"utf-8\",\"utf-8\",\n tarfile.errors)\n if keyword in PAX_NAME_FIELDS:\n value=self._decode_pax_field(value,encoding,tarfile.encoding,\n tarfile.errors)\n else:\n value=self._decode_pax_field(value,\"utf-8\",\"utf-8\",\n tarfile.errors)\n \n pax_headers[keyword]=value\n pos +=length\n \n \n try:\n next=self.fromtarfile(tarfile)\n except HeaderError as e:\n raise SubsequentHeaderError(str(e))from None\n \n \n if \"GNU.sparse.map\"in pax_headers:\n \n self._proc_gnusparse_01(next,pax_headers)\n \n elif \"GNU.sparse.size\"in pax_headers:\n \n self._proc_gnusparse_00(next,pax_headers,buf)\n \n elif pax_headers.get(\"GNU.sparse.major\")==\"1\"and pax_headers.get(\"GNU.sparse.minor\")==\"0\":\n \n self._proc_gnusparse_10(next,pax_headers,tarfile)\n \n if self.type in(XHDTYPE,SOLARIS_XHDTYPE):\n \n next._apply_pax_info(pax_headers,tarfile.encoding,tarfile.errors)\n next.offset=self.offset\n \n if \"size\"in pax_headers:\n \n \n \n offset=next.offset_data\n if next.isreg()or next.type not in SUPPORTED_TYPES:\n offset +=next._block(next.size)\n tarfile.offset=offset\n \n return next\n \n def _proc_gnusparse_00(self,next,pax_headers,buf):\n ''\n \n offsets=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.offset=(\\d+)\\n\",buf):\n offsets.append(int(match.group(1)))\n numbytes=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.numbytes=(\\d+)\\n\",buf):\n numbytes.append(int(match.group(1)))\n next.sparse=list(zip(offsets,numbytes))\n \n def _proc_gnusparse_01(self,next,pax_headers):\n ''\n \n sparse=[int(x)for x in pax_headers[\"GNU.sparse.map\"].split(\",\")]\n next.sparse=list(zip(sparse[::2],sparse[1::2]))\n \n def _proc_gnusparse_10(self,next,pax_headers,tarfile):\n ''\n \n fields=None\n sparse=[]\n buf=tarfile.fileobj.read(BLOCKSIZE)\n fields,buf=buf.split(b\"\\n\",1)\n fields=int(fields)\n while len(sparse)0:\n self.fileobj.write(NUL *(RECORDSIZE -remainder))\n finally:\n if not self._extfileobj:\n self.fileobj.close()\n \n def getmember(self,name):\n ''\n\n\n\n \n tarinfo=self._getmember(name.rstrip('/'))\n if tarinfo is None:\n raise KeyError(\"filename %r not found\"%name)\n return tarinfo\n \n def getmembers(self):\n ''\n\n \n self._check()\n if not self._loaded:\n self._load()\n \n return self.members\n \n def getnames(self):\n ''\n\n \n return[tarinfo.name for tarinfo in self.getmembers()]\n \n def gettarinfo(self,name=None,arcname=None,fileobj=None):\n ''\n\n\n\n\n\n\n \n self._check(\"awx\")\n \n \n \n if fileobj is not None:\n name=fileobj.name\n \n \n \n \n if arcname is None:\n arcname=name\n drv,arcname=os.path.splitdrive(arcname)\n arcname=arcname.replace(os.sep,\"/\")\n arcname=arcname.lstrip(\"/\")\n \n \n \n tarinfo=self.tarinfo()\n tarinfo.tarfile=self\n \n \n if fileobj is None:\n if not self.dereference:\n statres=os.lstat(name)\n else:\n statres=os.stat(name)\n else:\n statres=os.fstat(fileobj.fileno())\n linkname=\"\"\n \n stmd=statres.st_mode\n if stat.S_ISREG(stmd):\n inode=(statres.st_ino,statres.st_dev)\n if not self.dereference and statres.st_nlink >1 and\\\n inode in self.inodes and arcname !=self.inodes[inode]:\n \n \n type=LNKTYPE\n linkname=self.inodes[inode]\n else:\n \n \n type=REGTYPE\n if inode[0]:\n self.inodes[inode]=arcname\n elif stat.S_ISDIR(stmd):\n type=DIRTYPE\n elif stat.S_ISFIFO(stmd):\n type=FIFOTYPE\n elif stat.S_ISLNK(stmd):\n type=SYMTYPE\n linkname=os.readlink(name)\n elif stat.S_ISCHR(stmd):\n type=CHRTYPE\n elif stat.S_ISBLK(stmd):\n type=BLKTYPE\n else:\n return None\n \n \n \n tarinfo.name=arcname\n tarinfo.mode=stmd\n tarinfo.uid=statres.st_uid\n tarinfo.gid=statres.st_gid\n if type ==REGTYPE:\n tarinfo.size=statres.st_size\n else:\n tarinfo.size=0\n tarinfo.mtime=statres.st_mtime\n tarinfo.type=type\n tarinfo.linkname=linkname\n if pwd:\n try:\n tarinfo.uname=pwd.getpwuid(tarinfo.uid)[0]\n except KeyError:\n pass\n if grp:\n try:\n tarinfo.gname=grp.getgrgid(tarinfo.gid)[0]\n except KeyError:\n pass\n \n if type in(CHRTYPE,BLKTYPE):\n if hasattr(os,\"major\")and hasattr(os,\"minor\"):\n tarinfo.devmajor=os.major(statres.st_rdev)\n tarinfo.devminor=os.minor(statres.st_rdev)\n return tarinfo\n \n def list(self,verbose=True,*,members=None):\n ''\n\n\n\n \n self._check()\n \n if members is None:\n members=self\n for tarinfo in members:\n if verbose:\n if tarinfo.mode is None:\n _safe_print(\"??????????\")\n else:\n _safe_print(stat.filemode(tarinfo.mode))\n _safe_print(\"%s/%s\"%(tarinfo.uname or tarinfo.uid,\n tarinfo.gname or tarinfo.gid))\n if tarinfo.ischr()or tarinfo.isblk():\n _safe_print(\"%10s\"%\n (\"%d,%d\"%(tarinfo.devmajor,tarinfo.devminor)))\n else:\n _safe_print(\"%10d\"%tarinfo.size)\n if tarinfo.mtime is None:\n _safe_print(\"????-??-?? ??:??:??\")\n else:\n _safe_print(\"%d-%02d-%02d %02d:%02d:%02d\"\\\n %time.localtime(tarinfo.mtime)[:6])\n \n _safe_print(tarinfo.name+(\"/\"if tarinfo.isdir()else \"\"))\n \n if verbose:\n if tarinfo.issym():\n _safe_print(\"-> \"+tarinfo.linkname)\n if tarinfo.islnk():\n _safe_print(\"link to \"+tarinfo.linkname)\n print()\n \n def add(self,name,arcname=None,recursive=True,*,filter=None):\n ''\n\n\n\n\n\n\n\n \n self._check(\"awx\")\n \n if arcname is None:\n arcname=name\n \n \n if self.name is not None and os.path.abspath(name)==self.name:\n self._dbg(2,\"tarfile: Skipped %r\"%name)\n return\n \n self._dbg(1,name)\n \n \n tarinfo=self.gettarinfo(name,arcname)\n \n if tarinfo is None:\n self._dbg(1,\"tarfile: Unsupported type %r\"%name)\n return\n \n \n if filter is not None:\n tarinfo=filter(tarinfo)\n if tarinfo is None:\n self._dbg(2,\"tarfile: Excluded %r\"%name)\n return\n \n \n if tarinfo.isreg():\n with bltn_open(name,\"rb\")as f:\n self.addfile(tarinfo,f)\n \n elif tarinfo.isdir():\n self.addfile(tarinfo)\n if recursive:\n for f in sorted(os.listdir(name)):\n self.add(os.path.join(name,f),os.path.join(arcname,f),\n recursive,filter=filter)\n \n else:\n self.addfile(tarinfo)\n \n def addfile(self,tarinfo,fileobj=None):\n ''\n\n\n\n \n self._check(\"awx\")\n \n tarinfo=copy.copy(tarinfo)\n \n buf=tarinfo.tobuf(self.format,self.encoding,self.errors)\n self.fileobj.write(buf)\n self.offset +=len(buf)\n bufsize=self.copybufsize\n \n if fileobj is not None:\n copyfileobj(fileobj,self.fileobj,tarinfo.size,bufsize=bufsize)\n blocks,remainder=divmod(tarinfo.size,BLOCKSIZE)\n if remainder >0:\n self.fileobj.write(NUL *(BLOCKSIZE -remainder))\n blocks +=1\n self.offset +=blocks *BLOCKSIZE\n \n self.members.append(tarinfo)\n \n def _get_filter_function(self,filter):\n if filter is None:\n filter=self.extraction_filter\n if filter is None:\n warnings.warn(\n 'Python 3.14 will, by default, filter extracted tar '\n +'archives and reject files or modify their metadata. '\n +'Use the filter argument to control this behavior.',\n DeprecationWarning)\n return fully_trusted_filter\n if isinstance(filter,str):\n raise TypeError(\n 'String names are not supported for '\n +'TarFile.extraction_filter. Use a function such as '\n +'tarfile.data_filter directly.')\n return filter\n if callable(filter):\n return filter\n try:\n return _NAMED_FILTERS[filter]\n except KeyError:\n raise ValueError(f\"filter {filter !r} not found\")from None\n \n def extractall(self,path=\".\",members=None,*,numeric_owner=False,\n filter=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n directories=[]\n \n filter_function=self._get_filter_function(filter)\n if members is None:\n members=self\n \n for member in members:\n tarinfo=self._get_extract_tarinfo(member,filter_function,path)\n if tarinfo is None:\n continue\n if tarinfo.isdir():\n \n \n \n directories.append(tarinfo)\n self._extract_one(tarinfo,path,set_attrs=not tarinfo.isdir(),\n numeric_owner=numeric_owner)\n \n \n directories.sort(key=lambda a:a.name,reverse=True)\n \n \n for tarinfo in directories:\n dirpath=os.path.join(path,tarinfo.name)\n try:\n self.chown(tarinfo,dirpath,numeric_owner=numeric_owner)\n self.utime(tarinfo,dirpath)\n self.chmod(tarinfo,dirpath)\n except ExtractError as e:\n self._handle_nonfatal_error(e)\n \n def extract(self,member,path=\"\",set_attrs=True,*,numeric_owner=False,\n filter=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n filter_function=self._get_filter_function(filter)\n tarinfo=self._get_extract_tarinfo(member,filter_function,path)\n if tarinfo is not None:\n self._extract_one(tarinfo,path,set_attrs,numeric_owner)\n \n def _get_extract_tarinfo(self,member,filter_function,path):\n ''\n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else:\n tarinfo=member\n \n unfiltered=tarinfo\n try:\n tarinfo=filter_function(tarinfo,path)\n except(OSError,FilterError)as e:\n self._handle_fatal_error(e)\n except ExtractError as e:\n self._handle_nonfatal_error(e)\n if tarinfo is None:\n self._dbg(2,\"tarfile: Excluded %r\"%unfiltered.name)\n return None\n \n if tarinfo.islnk():\n tarinfo=copy.copy(tarinfo)\n tarinfo._link_target=os.path.join(path,tarinfo.linkname)\n return tarinfo\n \n def _extract_one(self,tarinfo,path,set_attrs,numeric_owner):\n ''\n self._check(\"r\")\n \n try:\n self._extract_member(tarinfo,os.path.join(path,tarinfo.name),\n set_attrs=set_attrs,\n numeric_owner=numeric_owner)\n except OSError as e:\n self._handle_fatal_error(e)\n except ExtractError as e:\n self._handle_nonfatal_error(e)\n \n def _handle_nonfatal_error(self,e):\n ''\n if self.errorlevel >1:\n raise\n else:\n self._dbg(1,\"tarfile: %s\"%e)\n \n def _handle_fatal_error(self,e):\n ''\n if self.errorlevel >0:\n raise\n elif isinstance(e,OSError):\n if e.filename is None:\n self._dbg(1,\"tarfile: %s\"%e.strerror)\n else:\n self._dbg(1,\"tarfile: %s %r\"%(e.strerror,e.filename))\n else:\n self._dbg(1,\"tarfile: %s %s\"%(type(e).__name__,e))\n \n def extractfile(self,member):\n ''\n\n\n\n\n \n self._check(\"r\")\n \n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else:\n tarinfo=member\n \n if tarinfo.isreg()or tarinfo.type not in SUPPORTED_TYPES:\n \n return self.fileobject(self,tarinfo)\n \n elif tarinfo.islnk()or tarinfo.issym():\n if isinstance(self.fileobj,_Stream):\n \n \n \n raise StreamError(\"cannot extract (sym)link as file object\")\n else:\n \n return self.extractfile(self._find_link_target(tarinfo))\n else:\n \n \n return None\n \n def _extract_member(self,tarinfo,targetpath,set_attrs=True,\n numeric_owner=False):\n ''\n\n \n \n \n \n targetpath=targetpath.rstrip(\"/\")\n targetpath=targetpath.replace(\"/\",os.sep)\n \n \n upperdirs=os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n \n \n os.makedirs(upperdirs)\n \n if tarinfo.islnk()or tarinfo.issym():\n self._dbg(1,\"%s -> %s\"%(tarinfo.name,tarinfo.linkname))\n else:\n self._dbg(1,tarinfo.name)\n \n if tarinfo.isreg():\n self.makefile(tarinfo,targetpath)\n elif tarinfo.isdir():\n self.makedir(tarinfo,targetpath)\n elif tarinfo.isfifo():\n self.makefifo(tarinfo,targetpath)\n elif tarinfo.ischr()or tarinfo.isblk():\n self.makedev(tarinfo,targetpath)\n elif tarinfo.islnk()or tarinfo.issym():\n self.makelink(tarinfo,targetpath)\n elif tarinfo.type not in SUPPORTED_TYPES:\n self.makeunknown(tarinfo,targetpath)\n else:\n self.makefile(tarinfo,targetpath)\n \n if set_attrs:\n self.chown(tarinfo,targetpath,numeric_owner)\n if not tarinfo.issym():\n self.chmod(tarinfo,targetpath)\n self.utime(tarinfo,targetpath)\n \n \n \n \n \n \n def makedir(self,tarinfo,targetpath):\n ''\n \n try:\n if tarinfo.mode is None:\n \n os.mkdir(targetpath)\n else:\n \n \n os.mkdir(targetpath,0o700)\n except FileExistsError:\n pass\n \n def makefile(self,tarinfo,targetpath):\n ''\n \n source=self.fileobj\n source.seek(tarinfo.offset_data)\n bufsize=self.copybufsize\n with bltn_open(targetpath,\"wb\")as target:\n if tarinfo.sparse is not None:\n for offset,size in tarinfo.sparse:\n target.seek(offset)\n copyfileobj(source,target,size,ReadError,bufsize)\n target.seek(tarinfo.size)\n target.truncate()\n else:\n copyfileobj(source,target,tarinfo.size,ReadError,bufsize)\n \n def makeunknown(self,tarinfo,targetpath):\n ''\n\n \n self.makefile(tarinfo,targetpath)\n self._dbg(1,\"tarfile: Unknown file type %r, \"\\\n \"extracted as regular file.\"%tarinfo.type)\n \n def makefifo(self,tarinfo,targetpath):\n ''\n \n if hasattr(os,\"mkfifo\"):\n os.mkfifo(targetpath)\n else:\n raise ExtractError(\"fifo not supported by system\")\n \n def makedev(self,tarinfo,targetpath):\n ''\n \n if not hasattr(os,\"mknod\")or not hasattr(os,\"makedev\"):\n raise ExtractError(\"special devices not supported by system\")\n \n mode=tarinfo.mode\n if mode is None:\n \n mode=0o600\n if tarinfo.isblk():\n mode |=stat.S_IFBLK\n else:\n mode |=stat.S_IFCHR\n \n os.mknod(targetpath,mode,\n os.makedev(tarinfo.devmajor,tarinfo.devminor))\n \n def makelink(self,tarinfo,targetpath):\n ''\n\n\n \n try:\n \n if tarinfo.issym():\n if os.path.lexists(targetpath):\n \n os.unlink(targetpath)\n os.symlink(tarinfo.linkname,targetpath)\n else:\n if os.path.exists(tarinfo._link_target):\n os.link(tarinfo._link_target,targetpath)\n else:\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except symlink_exception:\n try:\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except KeyError:\n raise ExtractError(\"unable to resolve link inside archive\")from None\n \n def chown(self,tarinfo,targetpath,numeric_owner):\n ''\n\n\n\n \n if hasattr(os,\"geteuid\")and os.geteuid()==0:\n \n g=tarinfo.gid\n u=tarinfo.uid\n if not numeric_owner:\n try:\n if grp and tarinfo.gname:\n g=grp.getgrnam(tarinfo.gname)[2]\n except KeyError:\n pass\n try:\n if pwd and tarinfo.uname:\n u=pwd.getpwnam(tarinfo.uname)[2]\n except KeyError:\n pass\n if g is None:\n g=-1\n if u is None:\n u=-1\n try:\n if tarinfo.issym()and hasattr(os,\"lchown\"):\n os.lchown(targetpath,u,g)\n else:\n os.chown(targetpath,u,g)\n except OSError as e:\n raise ExtractError(\"could not change owner\")from e\n \n def chmod(self,tarinfo,targetpath):\n ''\n \n if tarinfo.mode is None:\n return\n try:\n os.chmod(targetpath,tarinfo.mode)\n except OSError as e:\n raise ExtractError(\"could not change mode\")from e\n \n def utime(self,tarinfo,targetpath):\n ''\n \n mtime=tarinfo.mtime\n if mtime is None:\n return\n if not hasattr(os,'utime'):\n return\n try:\n os.utime(targetpath,(mtime,mtime))\n except OSError as e:\n raise ExtractError(\"could not change modification time\")from e\n \n \n def next(self):\n ''\n\n\n \n self._check(\"ra\")\n if self.firstmember is not None:\n m=self.firstmember\n self.firstmember=None\n return m\n \n \n if self.offset !=self.fileobj.tell():\n if self.offset ==0:\n return None\n self.fileobj.seek(self.offset -1)\n if not self.fileobj.read(1):\n raise ReadError(\"unexpected end of data\")\n \n \n tarinfo=None\n while True:\n try:\n tarinfo=self.tarinfo.fromtarfile(self)\n except EOFHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n except InvalidHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n elif self.offset ==0:\n raise ReadError(str(e))from None\n except EmptyHeaderError:\n if self.offset ==0:\n raise ReadError(\"empty file\")from None\n except TruncatedHeaderError as e:\n if self.offset ==0:\n raise ReadError(str(e))from None\n except SubsequentHeaderError as e:\n raise ReadError(str(e))from None\n except Exception as e:\n try:\n import zlib\n if isinstance(e,zlib.error):\n raise ReadError(f'zlib error: {e}')from None\n else:\n raise e\n except ImportError:\n raise e\n break\n \n if tarinfo is not None:\n self.members.append(tarinfo)\n else:\n self._loaded=True\n \n return tarinfo\n \n \n \n \n def _getmember(self,name,tarinfo=None,normalize=False):\n ''\n\n \n \n members=self.getmembers()\n \n \n skipping=False\n if tarinfo is not None:\n try:\n index=members.index(tarinfo)\n except ValueError:\n \n \n skipping=True\n else:\n \n members=members[:index]\n \n if normalize:\n name=os.path.normpath(name)\n \n for member in reversed(members):\n if skipping:\n if tarinfo.offset ==member.offset:\n skipping=False\n continue\n if normalize:\n member_name=os.path.normpath(member.name)\n else:\n member_name=member.name\n \n if name ==member_name:\n return member\n \n if skipping:\n \n raise ValueError(tarinfo)\n \n def _load(self):\n ''\n\n \n while self.next()is not None:\n pass\n self._loaded=True\n \n def _check(self,mode=None):\n ''\n\n \n if self.closed:\n raise OSError(\"%s is closed\"%self.__class__.__name__)\n if mode is not None and self.mode not in mode:\n raise OSError(\"bad operation for mode %r\"%self.mode)\n \n def _find_link_target(self,tarinfo):\n ''\n\n \n if tarinfo.issym():\n \n linkname=\"/\".join(filter(None,(os.path.dirname(tarinfo.name),tarinfo.linkname)))\n limit=None\n else:\n \n \n linkname=tarinfo.linkname\n limit=tarinfo\n \n member=self._getmember(linkname,tarinfo=limit,normalize=True)\n if member is None:\n raise KeyError(\"linkname %r not found\"%linkname)\n return member\n \n def __iter__(self):\n ''\n \n if self._loaded:\n yield from self.members\n return\n \n \n \n index=0\n \n \n \n if self.firstmember is not None:\n tarinfo=self.next()\n index +=1\n yield tarinfo\n \n while True:\n if index ',''),\n help='Extract tarfile into target dir')\n group.add_argument('-c','--create',nargs='+',\n metavar=('',''),\n help='Create tarfile from sources')\n group.add_argument('-t','--test',metavar='',\n help='Test if a tarfile is valid')\n \n args=parser.parse_args()\n \n if args.filter and args.extract is None:\n parser.exit(1,'--filter is only valid for extraction\\n')\n \n if args.test is not None:\n src=args.test\n if is_tarfile(src):\n with open(src,'r')as tar:\n tar.getmembers()\n print(tar.getmembers(),file=sys.stderr)\n if args.verbose:\n print('{!r} is a tar archive.'.format(src))\n else:\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.list is not None:\n src=args.list\n if is_tarfile(src):\n with TarFile.open(src,'r:*')as tf:\n tf.list(verbose=args.verbose)\n else:\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.extract is not None:\n if len(args.extract)==1:\n src=args.extract[0]\n curdir=os.curdir\n elif len(args.extract)==2:\n src,curdir=args.extract\n else:\n parser.exit(1,parser.format_help())\n \n if is_tarfile(src):\n with TarFile.open(src,'r:*')as tf:\n tf.extractall(path=curdir,filter=args.filter)\n if args.verbose:\n if curdir =='.':\n msg='{!r} file is extracted.'.format(src)\n else:\n msg=('{!r} file is extracted '\n 'into {!r} directory.').format(src,curdir)\n print(msg)\n else:\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.create is not None:\n tar_name=args.create.pop(0)\n _,ext=os.path.splitext(tar_name)\n compressions={\n \n '.gz':'gz',\n '.tgz':'gz',\n \n '.xz':'xz',\n '.txz':'xz',\n \n '.bz2':'bz2',\n '.tbz':'bz2',\n '.tbz2':'bz2',\n '.tb2':'bz2',\n }\n tar_mode='w:'+compressions[ext]if ext in compressions else 'w'\n tar_files=args.create\n \n with TarFile.open(tar_name,tar_mode)as tf:\n for file_name in tar_files:\n tf.add(file_name)\n \n if args.verbose:\n print('{!r} file created.'.format(tar_name))\n \nif __name__ =='__main__':\n main()\n", ["argparse", "builtins", "bz2", "copy", "grp", "gzip", "io", "lzma", "os", "pwd", "re", "shutil", "stat", "struct", "sys", "time", "warnings", "zlib"]], "stringprep": [".py", "\n''\n\n\n\n\n\nfrom unicodedata import ucd_3_2_0 as unicodedata\n\nassert unicodedata.unidata_version =='3.2.0'\n\ndef in_table_a1(code):\n if unicodedata.category(code)!='Cn':return False\n c=ord(code)\n if 0xFDD0 <=c <0xFDF0:return False\n return (c&0xFFFF)not in (0xFFFE,0xFFFF)\n \n \nb1_set=set([173,847,6150,6155,6156,6157,8203,8204,8205,8288,65279]+list(range(65024,65040)))\ndef in_table_b1(code):\n return ord(code)in b1_set\n \n \nb3_exceptions={\n0xb5:'\\u03bc',0xdf:'ss',0x130:'i\\u0307',0x149:'\\u02bcn',\n0x17f:'s',0x1f0:'j\\u030c',0x345:'\\u03b9',0x37a:' \\u03b9',\n0x390:'\\u03b9\\u0308\\u0301',0x3b0:'\\u03c5\\u0308\\u0301',0x3c2:'\\u03c3',0x3d0:'\\u03b2',\n0x3d1:'\\u03b8',0x3d2:'\\u03c5',0x3d3:'\\u03cd',0x3d4:'\\u03cb',\n0x3d5:'\\u03c6',0x3d6:'\\u03c0',0x3f0:'\\u03ba',0x3f1:'\\u03c1',\n0x3f2:'\\u03c3',0x3f5:'\\u03b5',0x587:'\\u0565\\u0582',0x1e96:'h\\u0331',\n0x1e97:'t\\u0308',0x1e98:'w\\u030a',0x1e99:'y\\u030a',0x1e9a:'a\\u02be',\n0x1e9b:'\\u1e61',0x1f50:'\\u03c5\\u0313',0x1f52:'\\u03c5\\u0313\\u0300',0x1f54:'\\u03c5\\u0313\\u0301',\n0x1f56:'\\u03c5\\u0313\\u0342',0x1f80:'\\u1f00\\u03b9',0x1f81:'\\u1f01\\u03b9',0x1f82:'\\u1f02\\u03b9',\n0x1f83:'\\u1f03\\u03b9',0x1f84:'\\u1f04\\u03b9',0x1f85:'\\u1f05\\u03b9',0x1f86:'\\u1f06\\u03b9',\n0x1f87:'\\u1f07\\u03b9',0x1f88:'\\u1f00\\u03b9',0x1f89:'\\u1f01\\u03b9',0x1f8a:'\\u1f02\\u03b9',\n0x1f8b:'\\u1f03\\u03b9',0x1f8c:'\\u1f04\\u03b9',0x1f8d:'\\u1f05\\u03b9',0x1f8e:'\\u1f06\\u03b9',\n0x1f8f:'\\u1f07\\u03b9',0x1f90:'\\u1f20\\u03b9',0x1f91:'\\u1f21\\u03b9',0x1f92:'\\u1f22\\u03b9',\n0x1f93:'\\u1f23\\u03b9',0x1f94:'\\u1f24\\u03b9',0x1f95:'\\u1f25\\u03b9',0x1f96:'\\u1f26\\u03b9',\n0x1f97:'\\u1f27\\u03b9',0x1f98:'\\u1f20\\u03b9',0x1f99:'\\u1f21\\u03b9',0x1f9a:'\\u1f22\\u03b9',\n0x1f9b:'\\u1f23\\u03b9',0x1f9c:'\\u1f24\\u03b9',0x1f9d:'\\u1f25\\u03b9',0x1f9e:'\\u1f26\\u03b9',\n0x1f9f:'\\u1f27\\u03b9',0x1fa0:'\\u1f60\\u03b9',0x1fa1:'\\u1f61\\u03b9',0x1fa2:'\\u1f62\\u03b9',\n0x1fa3:'\\u1f63\\u03b9',0x1fa4:'\\u1f64\\u03b9',0x1fa5:'\\u1f65\\u03b9',0x1fa6:'\\u1f66\\u03b9',\n0x1fa7:'\\u1f67\\u03b9',0x1fa8:'\\u1f60\\u03b9',0x1fa9:'\\u1f61\\u03b9',0x1faa:'\\u1f62\\u03b9',\n0x1fab:'\\u1f63\\u03b9',0x1fac:'\\u1f64\\u03b9',0x1fad:'\\u1f65\\u03b9',0x1fae:'\\u1f66\\u03b9',\n0x1faf:'\\u1f67\\u03b9',0x1fb2:'\\u1f70\\u03b9',0x1fb3:'\\u03b1\\u03b9',0x1fb4:'\\u03ac\\u03b9',\n0x1fb6:'\\u03b1\\u0342',0x1fb7:'\\u03b1\\u0342\\u03b9',0x1fbc:'\\u03b1\\u03b9',0x1fbe:'\\u03b9',\n0x1fc2:'\\u1f74\\u03b9',0x1fc3:'\\u03b7\\u03b9',0x1fc4:'\\u03ae\\u03b9',0x1fc6:'\\u03b7\\u0342',\n0x1fc7:'\\u03b7\\u0342\\u03b9',0x1fcc:'\\u03b7\\u03b9',0x1fd2:'\\u03b9\\u0308\\u0300',0x1fd3:'\\u03b9\\u0308\\u0301',\n0x1fd6:'\\u03b9\\u0342',0x1fd7:'\\u03b9\\u0308\\u0342',0x1fe2:'\\u03c5\\u0308\\u0300',0x1fe3:'\\u03c5\\u0308\\u0301',\n0x1fe4:'\\u03c1\\u0313',0x1fe6:'\\u03c5\\u0342',0x1fe7:'\\u03c5\\u0308\\u0342',0x1ff2:'\\u1f7c\\u03b9',\n0x1ff3:'\\u03c9\\u03b9',0x1ff4:'\\u03ce\\u03b9',0x1ff6:'\\u03c9\\u0342',0x1ff7:'\\u03c9\\u0342\\u03b9',\n0x1ffc:'\\u03c9\\u03b9',0x20a8:'rs',0x2102:'c',0x2103:'\\xb0c',\n0x2107:'\\u025b',0x2109:'\\xb0f',0x210b:'h',0x210c:'h',\n0x210d:'h',0x2110:'i',0x2111:'i',0x2112:'l',\n0x2115:'n',0x2116:'no',0x2119:'p',0x211a:'q',\n0x211b:'r',0x211c:'r',0x211d:'r',0x2120:'sm',\n0x2121:'tel',0x2122:'tm',0x2124:'z',0x2128:'z',\n0x212c:'b',0x212d:'c',0x2130:'e',0x2131:'f',\n0x2133:'m',0x213e:'\\u03b3',0x213f:'\\u03c0',0x2145:'d',\n0x3371:'hpa',0x3373:'au',0x3375:'ov',0x3380:'pa',\n0x3381:'na',0x3382:'\\u03bca',0x3383:'ma',0x3384:'ka',\n0x3385:'kb',0x3386:'mb',0x3387:'gb',0x338a:'pf',\n0x338b:'nf',0x338c:'\\u03bcf',0x3390:'hz',0x3391:'khz',\n0x3392:'mhz',0x3393:'ghz',0x3394:'thz',0x33a9:'pa',\n0x33aa:'kpa',0x33ab:'mpa',0x33ac:'gpa',0x33b4:'pv',\n0x33b5:'nv',0x33b6:'\\u03bcv',0x33b7:'mv',0x33b8:'kv',\n0x33b9:'mv',0x33ba:'pw',0x33bb:'nw',0x33bc:'\\u03bcw',\n0x33bd:'mw',0x33be:'kw',0x33bf:'mw',0x33c0:'k\\u03c9',\n0x33c1:'m\\u03c9',0x33c3:'bq',0x33c6:'c\\u2215kg',0x33c7:'co.',\n0x33c8:'db',0x33c9:'gy',0x33cb:'hp',0x33cd:'kk',\n0x33ce:'km',0x33d7:'ph',0x33d9:'ppm',0x33da:'pr',\n0x33dc:'sv',0x33dd:'wb',0xfb00:'ff',0xfb01:'fi',\n0xfb02:'fl',0xfb03:'ffi',0xfb04:'ffl',0xfb05:'st',\n0xfb06:'st',0xfb13:'\\u0574\\u0576',0xfb14:'\\u0574\\u0565',0xfb15:'\\u0574\\u056b',\n0xfb16:'\\u057e\\u0576',0xfb17:'\\u0574\\u056d',0x1d400:'a',0x1d401:'b',\n0x1d402:'c',0x1d403:'d',0x1d404:'e',0x1d405:'f',\n0x1d406:'g',0x1d407:'h',0x1d408:'i',0x1d409:'j',\n0x1d40a:'k',0x1d40b:'l',0x1d40c:'m',0x1d40d:'n',\n0x1d40e:'o',0x1d40f:'p',0x1d410:'q',0x1d411:'r',\n0x1d412:'s',0x1d413:'t',0x1d414:'u',0x1d415:'v',\n0x1d416:'w',0x1d417:'x',0x1d418:'y',0x1d419:'z',\n0x1d434:'a',0x1d435:'b',0x1d436:'c',0x1d437:'d',\n0x1d438:'e',0x1d439:'f',0x1d43a:'g',0x1d43b:'h',\n0x1d43c:'i',0x1d43d:'j',0x1d43e:'k',0x1d43f:'l',\n0x1d440:'m',0x1d441:'n',0x1d442:'o',0x1d443:'p',\n0x1d444:'q',0x1d445:'r',0x1d446:'s',0x1d447:'t',\n0x1d448:'u',0x1d449:'v',0x1d44a:'w',0x1d44b:'x',\n0x1d44c:'y',0x1d44d:'z',0x1d468:'a',0x1d469:'b',\n0x1d46a:'c',0x1d46b:'d',0x1d46c:'e',0x1d46d:'f',\n0x1d46e:'g',0x1d46f:'h',0x1d470:'i',0x1d471:'j',\n0x1d472:'k',0x1d473:'l',0x1d474:'m',0x1d475:'n',\n0x1d476:'o',0x1d477:'p',0x1d478:'q',0x1d479:'r',\n0x1d47a:'s',0x1d47b:'t',0x1d47c:'u',0x1d47d:'v',\n0x1d47e:'w',0x1d47f:'x',0x1d480:'y',0x1d481:'z',\n0x1d49c:'a',0x1d49e:'c',0x1d49f:'d',0x1d4a2:'g',\n0x1d4a5:'j',0x1d4a6:'k',0x1d4a9:'n',0x1d4aa:'o',\n0x1d4ab:'p',0x1d4ac:'q',0x1d4ae:'s',0x1d4af:'t',\n0x1d4b0:'u',0x1d4b1:'v',0x1d4b2:'w',0x1d4b3:'x',\n0x1d4b4:'y',0x1d4b5:'z',0x1d4d0:'a',0x1d4d1:'b',\n0x1d4d2:'c',0x1d4d3:'d',0x1d4d4:'e',0x1d4d5:'f',\n0x1d4d6:'g',0x1d4d7:'h',0x1d4d8:'i',0x1d4d9:'j',\n0x1d4da:'k',0x1d4db:'l',0x1d4dc:'m',0x1d4dd:'n',\n0x1d4de:'o',0x1d4df:'p',0x1d4e0:'q',0x1d4e1:'r',\n0x1d4e2:'s',0x1d4e3:'t',0x1d4e4:'u',0x1d4e5:'v',\n0x1d4e6:'w',0x1d4e7:'x',0x1d4e8:'y',0x1d4e9:'z',\n0x1d504:'a',0x1d505:'b',0x1d507:'d',0x1d508:'e',\n0x1d509:'f',0x1d50a:'g',0x1d50d:'j',0x1d50e:'k',\n0x1d50f:'l',0x1d510:'m',0x1d511:'n',0x1d512:'o',\n0x1d513:'p',0x1d514:'q',0x1d516:'s',0x1d517:'t',\n0x1d518:'u',0x1d519:'v',0x1d51a:'w',0x1d51b:'x',\n0x1d51c:'y',0x1d538:'a',0x1d539:'b',0x1d53b:'d',\n0x1d53c:'e',0x1d53d:'f',0x1d53e:'g',0x1d540:'i',\n0x1d541:'j',0x1d542:'k',0x1d543:'l',0x1d544:'m',\n0x1d546:'o',0x1d54a:'s',0x1d54b:'t',0x1d54c:'u',\n0x1d54d:'v',0x1d54e:'w',0x1d54f:'x',0x1d550:'y',\n0x1d56c:'a',0x1d56d:'b',0x1d56e:'c',0x1d56f:'d',\n0x1d570:'e',0x1d571:'f',0x1d572:'g',0x1d573:'h',\n0x1d574:'i',0x1d575:'j',0x1d576:'k',0x1d577:'l',\n0x1d578:'m',0x1d579:'n',0x1d57a:'o',0x1d57b:'p',\n0x1d57c:'q',0x1d57d:'r',0x1d57e:'s',0x1d57f:'t',\n0x1d580:'u',0x1d581:'v',0x1d582:'w',0x1d583:'x',\n0x1d584:'y',0x1d585:'z',0x1d5a0:'a',0x1d5a1:'b',\n0x1d5a2:'c',0x1d5a3:'d',0x1d5a4:'e',0x1d5a5:'f',\n0x1d5a6:'g',0x1d5a7:'h',0x1d5a8:'i',0x1d5a9:'j',\n0x1d5aa:'k',0x1d5ab:'l',0x1d5ac:'m',0x1d5ad:'n',\n0x1d5ae:'o',0x1d5af:'p',0x1d5b0:'q',0x1d5b1:'r',\n0x1d5b2:'s',0x1d5b3:'t',0x1d5b4:'u',0x1d5b5:'v',\n0x1d5b6:'w',0x1d5b7:'x',0x1d5b8:'y',0x1d5b9:'z',\n0x1d5d4:'a',0x1d5d5:'b',0x1d5d6:'c',0x1d5d7:'d',\n0x1d5d8:'e',0x1d5d9:'f',0x1d5da:'g',0x1d5db:'h',\n0x1d5dc:'i',0x1d5dd:'j',0x1d5de:'k',0x1d5df:'l',\n0x1d5e0:'m',0x1d5e1:'n',0x1d5e2:'o',0x1d5e3:'p',\n0x1d5e4:'q',0x1d5e5:'r',0x1d5e6:'s',0x1d5e7:'t',\n0x1d5e8:'u',0x1d5e9:'v',0x1d5ea:'w',0x1d5eb:'x',\n0x1d5ec:'y',0x1d5ed:'z',0x1d608:'a',0x1d609:'b',\n0x1d60a:'c',0x1d60b:'d',0x1d60c:'e',0x1d60d:'f',\n0x1d60e:'g',0x1d60f:'h',0x1d610:'i',0x1d611:'j',\n0x1d612:'k',0x1d613:'l',0x1d614:'m',0x1d615:'n',\n0x1d616:'o',0x1d617:'p',0x1d618:'q',0x1d619:'r',\n0x1d61a:'s',0x1d61b:'t',0x1d61c:'u',0x1d61d:'v',\n0x1d61e:'w',0x1d61f:'x',0x1d620:'y',0x1d621:'z',\n0x1d63c:'a',0x1d63d:'b',0x1d63e:'c',0x1d63f:'d',\n0x1d640:'e',0x1d641:'f',0x1d642:'g',0x1d643:'h',\n0x1d644:'i',0x1d645:'j',0x1d646:'k',0x1d647:'l',\n0x1d648:'m',0x1d649:'n',0x1d64a:'o',0x1d64b:'p',\n0x1d64c:'q',0x1d64d:'r',0x1d64e:'s',0x1d64f:'t',\n0x1d650:'u',0x1d651:'v',0x1d652:'w',0x1d653:'x',\n0x1d654:'y',0x1d655:'z',0x1d670:'a',0x1d671:'b',\n0x1d672:'c',0x1d673:'d',0x1d674:'e',0x1d675:'f',\n0x1d676:'g',0x1d677:'h',0x1d678:'i',0x1d679:'j',\n0x1d67a:'k',0x1d67b:'l',0x1d67c:'m',0x1d67d:'n',\n0x1d67e:'o',0x1d67f:'p',0x1d680:'q',0x1d681:'r',\n0x1d682:'s',0x1d683:'t',0x1d684:'u',0x1d685:'v',\n0x1d686:'w',0x1d687:'x',0x1d688:'y',0x1d689:'z',\n0x1d6a8:'\\u03b1',0x1d6a9:'\\u03b2',0x1d6aa:'\\u03b3',0x1d6ab:'\\u03b4',\n0x1d6ac:'\\u03b5',0x1d6ad:'\\u03b6',0x1d6ae:'\\u03b7',0x1d6af:'\\u03b8',\n0x1d6b0:'\\u03b9',0x1d6b1:'\\u03ba',0x1d6b2:'\\u03bb',0x1d6b3:'\\u03bc',\n0x1d6b4:'\\u03bd',0x1d6b5:'\\u03be',0x1d6b6:'\\u03bf',0x1d6b7:'\\u03c0',\n0x1d6b8:'\\u03c1',0x1d6b9:'\\u03b8',0x1d6ba:'\\u03c3',0x1d6bb:'\\u03c4',\n0x1d6bc:'\\u03c5',0x1d6bd:'\\u03c6',0x1d6be:'\\u03c7',0x1d6bf:'\\u03c8',\n0x1d6c0:'\\u03c9',0x1d6d3:'\\u03c3',0x1d6e2:'\\u03b1',0x1d6e3:'\\u03b2',\n0x1d6e4:'\\u03b3',0x1d6e5:'\\u03b4',0x1d6e6:'\\u03b5',0x1d6e7:'\\u03b6',\n0x1d6e8:'\\u03b7',0x1d6e9:'\\u03b8',0x1d6ea:'\\u03b9',0x1d6eb:'\\u03ba',\n0x1d6ec:'\\u03bb',0x1d6ed:'\\u03bc',0x1d6ee:'\\u03bd',0x1d6ef:'\\u03be',\n0x1d6f0:'\\u03bf',0x1d6f1:'\\u03c0',0x1d6f2:'\\u03c1',0x1d6f3:'\\u03b8',\n0x1d6f4:'\\u03c3',0x1d6f5:'\\u03c4',0x1d6f6:'\\u03c5',0x1d6f7:'\\u03c6',\n0x1d6f8:'\\u03c7',0x1d6f9:'\\u03c8',0x1d6fa:'\\u03c9',0x1d70d:'\\u03c3',\n0x1d71c:'\\u03b1',0x1d71d:'\\u03b2',0x1d71e:'\\u03b3',0x1d71f:'\\u03b4',\n0x1d720:'\\u03b5',0x1d721:'\\u03b6',0x1d722:'\\u03b7',0x1d723:'\\u03b8',\n0x1d724:'\\u03b9',0x1d725:'\\u03ba',0x1d726:'\\u03bb',0x1d727:'\\u03bc',\n0x1d728:'\\u03bd',0x1d729:'\\u03be',0x1d72a:'\\u03bf',0x1d72b:'\\u03c0',\n0x1d72c:'\\u03c1',0x1d72d:'\\u03b8',0x1d72e:'\\u03c3',0x1d72f:'\\u03c4',\n0x1d730:'\\u03c5',0x1d731:'\\u03c6',0x1d732:'\\u03c7',0x1d733:'\\u03c8',\n0x1d734:'\\u03c9',0x1d747:'\\u03c3',0x1d756:'\\u03b1',0x1d757:'\\u03b2',\n0x1d758:'\\u03b3',0x1d759:'\\u03b4',0x1d75a:'\\u03b5',0x1d75b:'\\u03b6',\n0x1d75c:'\\u03b7',0x1d75d:'\\u03b8',0x1d75e:'\\u03b9',0x1d75f:'\\u03ba',\n0x1d760:'\\u03bb',0x1d761:'\\u03bc',0x1d762:'\\u03bd',0x1d763:'\\u03be',\n0x1d764:'\\u03bf',0x1d765:'\\u03c0',0x1d766:'\\u03c1',0x1d767:'\\u03b8',\n0x1d768:'\\u03c3',0x1d769:'\\u03c4',0x1d76a:'\\u03c5',0x1d76b:'\\u03c6',\n0x1d76c:'\\u03c7',0x1d76d:'\\u03c8',0x1d76e:'\\u03c9',0x1d781:'\\u03c3',\n0x1d790:'\\u03b1',0x1d791:'\\u03b2',0x1d792:'\\u03b3',0x1d793:'\\u03b4',\n0x1d794:'\\u03b5',0x1d795:'\\u03b6',0x1d796:'\\u03b7',0x1d797:'\\u03b8',\n0x1d798:'\\u03b9',0x1d799:'\\u03ba',0x1d79a:'\\u03bb',0x1d79b:'\\u03bc',\n0x1d79c:'\\u03bd',0x1d79d:'\\u03be',0x1d79e:'\\u03bf',0x1d79f:'\\u03c0',\n0x1d7a0:'\\u03c1',0x1d7a1:'\\u03b8',0x1d7a2:'\\u03c3',0x1d7a3:'\\u03c4',\n0x1d7a4:'\\u03c5',0x1d7a5:'\\u03c6',0x1d7a6:'\\u03c7',0x1d7a7:'\\u03c8',\n0x1d7a8:'\\u03c9',0x1d7bb:'\\u03c3',}\n\ndef map_table_b3(code):\n r=b3_exceptions.get(ord(code))\n if r is not None :return r\n return code.lower()\n \n \ndef map_table_b2(a):\n al=map_table_b3(a)\n b=unicodedata.normalize(\"NFKC\",al)\n bl=\"\".join([map_table_b3(ch)for ch in b])\n c=unicodedata.normalize(\"NFKC\",bl)\n if b !=c:\n return c\n else :\n return al\n \n \ndef in_table_c11(code):\n return code ==\" \"\n \n \ndef in_table_c12(code):\n return unicodedata.category(code)==\"Zs\"and code !=\" \"\n \ndef in_table_c11_c12(code):\n return unicodedata.category(code)==\"Zs\"\n \n \ndef in_table_c21(code):\n return ord(code)<128 and unicodedata.category(code)==\"Cc\"\n \nc22_specials=set([1757,1807,6158,8204,8205,8232,8233,65279]+list(range(8288,8292))+list(range(8298,8304))+list(range(65529,65533))+list(range(119155,119163)))\ndef in_table_c22(code):\n c=ord(code)\n if c <128:return False\n if unicodedata.category(code)==\"Cc\":return True\n return c in c22_specials\n \ndef in_table_c21_c22(code):\n return unicodedata.category(code)==\"Cc\"or\\\n ord(code)in c22_specials\n \n \ndef in_table_c3(code):\n return unicodedata.category(code)==\"Co\"\n \n \ndef in_table_c4(code):\n c=ord(code)\n if c <0xFDD0:return False\n if c <0xFDF0:return True\n return (ord(code)&0xFFFF)in (0xFFFE,0xFFFF)\n \n \ndef in_table_c5(code):\n return unicodedata.category(code)==\"Cs\"\n \n \nc6_set=set(range(65529,65534))\ndef in_table_c6(code):\n return ord(code)in c6_set\n \n \nc7_set=set(range(12272,12284))\ndef in_table_c7(code):\n return ord(code)in c7_set\n \n \nc8_set=set([832,833,8206,8207]+list(range(8234,8239))+list(range(8298,8304)))\ndef in_table_c8(code):\n return ord(code)in c8_set\n \n \nc9_set=set([917505]+list(range(917536,917632)))\ndef in_table_c9(code):\n return ord(code)in c9_set\n \n \ndef in_table_d1(code):\n return unicodedata.bidirectional(code)in (\"R\",\"AL\")\n \n \ndef in_table_d2(code):\n return unicodedata.bidirectional(code)==\"L\"\n", ["unicodedata"]], "typing": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfrom abc import abstractmethod,ABCMeta\nimport collections\nfrom collections import defaultdict\nimport collections.abc\nimport copyreg\nimport contextlib\nimport functools\nimport operator\nimport re as stdlib_re\nimport sys\nimport types\nimport warnings\nfrom types import WrapperDescriptorType,MethodWrapperType,MethodDescriptorType,GenericAlias\n\nfrom _typing import(\n_idfunc,\nTypeVar,\nParamSpec,\nTypeVarTuple,\nParamSpecArgs,\nParamSpecKwargs,\nTypeAliasType,\nGeneric,\n)\n\n\n__all__=[\n\n'Annotated',\n'Any',\n'Callable',\n'ClassVar',\n'Concatenate',\n'Final',\n'ForwardRef',\n'Generic',\n'Literal',\n'Optional',\n'ParamSpec',\n'Protocol',\n'Tuple',\n'Type',\n'TypeVar',\n'TypeVarTuple',\n'Union',\n\n\n'AbstractSet',\n'ByteString',\n'Container',\n'ContextManager',\n'Hashable',\n'ItemsView',\n'Iterable',\n'Iterator',\n'KeysView',\n'Mapping',\n'MappingView',\n'MutableMapping',\n'MutableSequence',\n'MutableSet',\n'Sequence',\n'Sized',\n'ValuesView',\n'Awaitable',\n'AsyncIterator',\n'AsyncIterable',\n'Coroutine',\n'Collection',\n'AsyncGenerator',\n'AsyncContextManager',\n\n\n'Reversible',\n'SupportsAbs',\n'SupportsBytes',\n'SupportsComplex',\n'SupportsFloat',\n'SupportsIndex',\n'SupportsInt',\n'SupportsRound',\n\n\n'ChainMap',\n'Counter',\n'Deque',\n'Dict',\n'DefaultDict',\n'List',\n'OrderedDict',\n'Set',\n'FrozenSet',\n'NamedTuple',\n'TypedDict',\n'Generator',\n\n\n'BinaryIO',\n'IO',\n'Match',\n'Pattern',\n'TextIO',\n\n\n'AnyStr',\n'assert_type',\n'assert_never',\n'cast',\n'clear_overloads',\n'dataclass_transform',\n'final',\n'get_args',\n'get_origin',\n'get_overloads',\n'get_type_hints',\n'is_typeddict',\n'LiteralString',\n'Never',\n'NewType',\n'no_type_check',\n'no_type_check_decorator',\n'NoReturn',\n'NotRequired',\n'overload',\n'override',\n'ParamSpecArgs',\n'ParamSpecKwargs',\n'Required',\n'reveal_type',\n'runtime_checkable',\n'Self',\n'Text',\n'TYPE_CHECKING',\n'TypeAlias',\n'TypeGuard',\n'TypeAliasType',\n'Unpack',\n]\n\n\n\n\n\n\ndef _type_convert(arg,module=None,*,allow_special_forms=False):\n ''\n if arg is None:\n return type(None)\n if isinstance(arg,str):\n return ForwardRef(arg,module=module,is_class=allow_special_forms)\n return arg\n \n \ndef _type_check(arg,msg,is_argument=True,module=None,*,allow_special_forms=False):\n ''\n\n\n\n\n\n\n\n\n\n \n invalid_generic_forms=(Generic,Protocol)\n if not allow_special_forms:\n invalid_generic_forms +=(ClassVar,)\n if is_argument:\n invalid_generic_forms +=(Final,)\n \n arg=_type_convert(arg,module=module,allow_special_forms=allow_special_forms)\n if(isinstance(arg,_GenericAlias)and\n arg.__origin__ in invalid_generic_forms):\n raise TypeError(f\"{arg} is not valid as type argument\")\n if arg in(Any,LiteralString,NoReturn,Never,Self,TypeAlias):\n return arg\n if allow_special_forms and arg in(ClassVar,Final):\n return arg\n if isinstance(arg,_SpecialForm)or arg in(Generic,Protocol):\n raise TypeError(f\"Plain {arg} is not valid as type argument\")\n if type(arg)is tuple:\n raise TypeError(f\"{msg} Got {arg !r:.100}.\")\n return arg\n \n \ndef _is_param_expr(arg):\n return arg is ...or isinstance(arg,\n (tuple,list,ParamSpec,_ConcatenateGenericAlias))\n \n \ndef _should_unflatten_callable_args(typ,args):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n return(\n typ.__origin__ is collections.abc.Callable\n and not(len(args)==2 and _is_param_expr(args[0]))\n )\n \n \ndef _type_repr(obj):\n ''\n\n\n\n\n\n \n \n \n \n if isinstance(obj,type):\n if obj.__module__ =='builtins':\n return obj.__qualname__\n return f'{obj.__module__}.{obj.__qualname__}'\n if obj is ...:\n return '...'\n if isinstance(obj,types.FunctionType):\n return obj.__name__\n if isinstance(obj,tuple):\n \n return '['+', '.join(_type_repr(t)for t in obj)+']'\n return repr(obj)\n \n \ndef _collect_parameters(args):\n ''\n\n\n\n\n\n \n parameters=[]\n for t in args:\n if isinstance(t,type):\n \n pass\n elif isinstance(t,tuple):\n \n \n for x in t:\n for collected in _collect_parameters([x]):\n if collected not in parameters:\n parameters.append(collected)\n elif hasattr(t,'__typing_subst__'):\n if t not in parameters:\n parameters.append(t)\n else:\n for x in getattr(t,'__parameters__',()):\n if x not in parameters:\n parameters.append(x)\n return tuple(parameters)\n \n \ndef _check_generic(cls,parameters,elen):\n ''\n\n\n \n if not elen:\n raise TypeError(f\"{cls} is not a generic class\")\n alen=len(parameters)\n if alen !=elen:\n raise TypeError(f\"Too {'many'if alen >elen else 'few'} arguments for {cls};\"\n f\" actual {alen}, expected {elen}\")\n \ndef _unpack_args(args):\n newargs=[]\n for arg in args:\n subargs=getattr(arg,'__typing_unpacked_tuple_args__',None)\n if subargs is not None and not(subargs and subargs[-1]is ...):\n newargs.extend(subargs)\n else:\n newargs.append(arg)\n return newargs\n \ndef _deduplicate(params):\n\n all_params=set(params)\n if len(all_params)','eval')\n except SyntaxError:\n raise SyntaxError(f\"Forward reference must be an expression -- got {arg !r}\")\n \n self.__forward_arg__=arg\n self.__forward_code__=code\n self.__forward_evaluated__=False\n self.__forward_value__=None\n self.__forward_is_argument__=is_argument\n self.__forward_is_class__=is_class\n self.__forward_module__=module\n \n def _evaluate(self,globalns,localns,recursive_guard):\n if self.__forward_arg__ in recursive_guard:\n return self\n if not self.__forward_evaluated__ or localns is not globalns:\n if globalns is None and localns is None:\n globalns=localns={}\n elif globalns is None:\n globalns=localns\n elif localns is None:\n localns=globalns\n if self.__forward_module__ is not None:\n globalns=getattr(\n sys.modules.get(self.__forward_module__,None),'__dict__',globalns\n )\n type_=_type_check(\n eval(self.__forward_code__,globalns,localns),\n \"Forward references must evaluate to types.\",\n is_argument=self.__forward_is_argument__,\n allow_special_forms=self.__forward_is_class__,\n )\n self.__forward_value__=_eval_type(\n type_,globalns,localns,recursive_guard |{self.__forward_arg__}\n )\n self.__forward_evaluated__=True\n return self.__forward_value__\n \n def __eq__(self,other):\n if not isinstance(other,ForwardRef):\n return NotImplemented\n if self.__forward_evaluated__ and other.__forward_evaluated__:\n return(self.__forward_arg__ ==other.__forward_arg__ and\n self.__forward_value__ ==other.__forward_value__)\n return(self.__forward_arg__ ==other.__forward_arg__ and\n self.__forward_module__ ==other.__forward_module__)\n \n def __hash__(self):\n return hash((self.__forward_arg__,self.__forward_module__))\n \n def __or__(self,other):\n return Union[self,other]\n \n def __ror__(self,other):\n return Union[other,self]\n \n def __repr__(self):\n if self.__forward_module__ is None:\n module_repr=''\n else:\n module_repr=f', module={self.__forward_module__ !r}'\n return f'ForwardRef({self.__forward_arg__ !r}{module_repr})'\n \n \ndef _is_unpacked_typevartuple(x:Any)->bool:\n return((not isinstance(x,type))and\n getattr(x,'__typing_is_unpacked_typevartuple__',False))\n \n \ndef _is_typevar_like(x:Any)->bool:\n return isinstance(x,(TypeVar,ParamSpec))or _is_unpacked_typevartuple(x)\n \n \nclass _PickleUsingNameMixin:\n ''\n \n def __reduce__(self):\n return self.__name__\n \n \ndef _typevar_subst(self,arg):\n msg=\"Parameters to generic types must be types.\"\n arg=_type_check(arg,msg,is_argument=True)\n if((isinstance(arg,_GenericAlias)and arg.__origin__ is Unpack)or\n (isinstance(arg,GenericAlias)and getattr(arg,'__unpacked__',False))):\n raise TypeError(f\"{arg} is not valid as type argument\")\n return arg\n \n \ndef _typevartuple_prepare_subst(self,alias,args):\n params=alias.__parameters__\n typevartuple_index=params.index(self)\n for param in params[typevartuple_index+1:]:\n if isinstance(param,TypeVarTuple):\n raise TypeError(f\"More than one TypeVarTuple parameter in {alias}\")\n \n alen=len(args)\n plen=len(params)\n left=typevartuple_index\n right=plen -typevartuple_index -1\n var_tuple_index=None\n fillarg=None\n for k,arg in enumerate(args):\n if not isinstance(arg,type):\n subargs=getattr(arg,'__typing_unpacked_tuple_args__',None)\n if subargs and len(subargs)==2 and subargs[-1]is ...:\n if var_tuple_index is not None:\n raise TypeError(\"More than one unpacked arbitrary-length tuple argument\")\n var_tuple_index=k\n fillarg=subargs[0]\n if var_tuple_index is not None:\n left=min(left,var_tuple_index)\n right=min(right,alen -var_tuple_index -1)\n elif left+right >alen:\n raise TypeError(f\"Too few arguments for {alias};\"\n f\" actual {alen}, expected at least {plen -1}\")\n \n return(\n *args[:left],\n *([fillarg]*(typevartuple_index -left)),\n tuple(args[left:alen -right]),\n *([fillarg]*(plen -right -left -typevartuple_index -1)),\n *args[alen -right:],\n )\n \n \ndef _paramspec_subst(self,arg):\n if isinstance(arg,(list,tuple)):\n arg=tuple(_type_check(a,\"Expected a type.\")for a in arg)\n elif not _is_param_expr(arg):\n raise TypeError(f\"Expected a list of types, an ellipsis, \"\n f\"ParamSpec, or Concatenate. Got {arg}\")\n return arg\n \n \ndef _paramspec_prepare_subst(self,alias,args):\n params=alias.__parameters__\n i=params.index(self)\n if i >=len(args):\n raise TypeError(f\"Too few arguments for {alias}\")\n \n if len(params)==1 and not _is_param_expr(args[0]):\n assert i ==0\n args=(args,)\n \n elif isinstance(args[i],list):\n args=(*args[:i],tuple(args[i]),*args[i+1:])\n return args\n \n \n@_tp_cache\ndef _generic_class_getitem(cls,params):\n ''\n\n\n\n\n\n\n\n \n if not isinstance(params,tuple):\n params=(params,)\n \n params=tuple(_type_convert(p)for p in params)\n is_generic_or_protocol=cls in(Generic,Protocol)\n \n if is_generic_or_protocol:\n \n if not params:\n raise TypeError(\n f\"Parameter list to {cls.__qualname__}[...] cannot be empty\"\n )\n if not all(_is_typevar_like(p)for p in params):\n raise TypeError(\n f\"Parameters to {cls.__name__}[...] must all be type variables \"\n f\"or parameter specification variables.\")\n if len(set(params))!=len(params):\n raise TypeError(\n f\"Parameters to {cls.__name__}[...] must all be unique\")\n else:\n \n for param in cls.__parameters__:\n prepare=getattr(param,'__typing_prepare_subst__',None)\n if prepare is not None:\n params=prepare(cls,params)\n _check_generic(cls,params,len(cls.__parameters__))\n \n new_args=[]\n for param,new_arg in zip(cls.__parameters__,params):\n if isinstance(param,TypeVarTuple):\n new_args.extend(new_arg)\n else:\n new_args.append(new_arg)\n params=tuple(new_args)\n \n return _GenericAlias(cls,params)\n \n \ndef _generic_init_subclass(cls,*args,**kwargs):\n super(Generic,cls).__init_subclass__(*args,**kwargs)\n tvars=[]\n if '__orig_bases__'in cls.__dict__:\n error=Generic in cls.__orig_bases__\n else:\n error=(Generic in cls.__bases__ and\n cls.__name__ !='Protocol'and\n type(cls)!=_TypedDictMeta)\n if error:\n raise TypeError(\"Cannot inherit from plain Generic\")\n if '__orig_bases__'in cls.__dict__:\n tvars=_collect_parameters(cls.__orig_bases__)\n \n \n \n \n \n gvars=None\n for base in cls.__orig_bases__:\n if(isinstance(base,_GenericAlias)and\n base.__origin__ is Generic):\n if gvars is not None:\n raise TypeError(\n \"Cannot inherit from Generic[...] multiple times.\")\n gvars=base.__parameters__\n if gvars is not None:\n tvarset=set(tvars)\n gvarset=set(gvars)\n if not tvarset <=gvarset:\n s_vars=', '.join(str(t)for t in tvars if t not in gvarset)\n s_args=', '.join(str(g)for g in gvars)\n raise TypeError(f\"Some type variables ({s_vars}) are\"\n f\" not listed in Generic[{s_args}]\")\n tvars=gvars\n cls.__parameters__=tuple(tvars)\n \n \ndef _is_dunder(attr):\n return attr.startswith('__')and attr.endswith('__')\n \nclass _BaseGenericAlias(_Final,_root=True):\n ''\n\n\n\n\n\n\n \n \n def __init__(self,origin,*,inst=True,name=None):\n self._inst=inst\n self._name=name\n self.__origin__=origin\n self.__slots__=None\n \n def __call__(self,*args,**kwargs):\n if not self._inst:\n raise TypeError(f\"Type {self._name} cannot be instantiated; \"\n f\"use {self.__origin__.__name__}() instead\")\n result=self.__origin__(*args,**kwargs)\n try:\n result.__orig_class__=self\n except AttributeError:\n pass\n return result\n \n def __mro_entries__(self,bases):\n res=[]\n if self.__origin__ not in bases:\n res.append(self.__origin__)\n i=bases.index(self)\n for b in bases[i+1:]:\n if isinstance(b,_BaseGenericAlias)or issubclass(b,Generic):\n break\n else:\n res.append(Generic)\n return tuple(res)\n \n def __getattr__(self,attr):\n if attr in{'__name__','__qualname__'}:\n return self._name or self.__origin__.__name__\n \n \n \n if '__origin__'in self.__dict__ and not _is_dunder(attr):\n return getattr(self.__origin__,attr)\n raise AttributeError(attr)\n \n def __setattr__(self,attr,val):\n if _is_dunder(attr)or attr in{'_name','_inst','_nparams'}:\n super().__setattr__(attr,val)\n else:\n setattr(self.__origin__,attr,val)\n \n def __instancecheck__(self,obj):\n return self.__subclasscheck__(type(obj))\n \n def __subclasscheck__(self,cls):\n raise TypeError(\"Subscripted generics cannot be used with\"\n \" class and instance checks\")\n \n def __dir__(self):\n return list(set(super().__dir__()\n +[attr for attr in dir(self.__origin__)if not _is_dunder(attr)]))\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass _GenericAlias(_BaseGenericAlias,_root=True):\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n def __init__(self,origin,args,*,inst=True,name=None):\n super().__init__(origin,inst=inst,name=name)\n if not isinstance(args,tuple):\n args=(args,)\n self.__args__=tuple(...if a is _TypingEllipsis else\n a for a in args)\n self.__parameters__=_collect_parameters(args)\n if not name:\n self.__module__=origin.__module__\n \n def __eq__(self,other):\n if not isinstance(other,_GenericAlias):\n return NotImplemented\n return(self.__origin__ ==other.__origin__\n and self.__args__ ==other.__args__)\n \n def __hash__(self):\n return hash((self.__origin__,self.__args__))\n \n def __or__(self,right):\n return Union[self,right]\n \n def __ror__(self,left):\n return Union[left,self]\n \n @_tp_cache\n def __getitem__(self,args):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.__origin__ in(Generic,Protocol):\n \n raise TypeError(f\"Cannot subscript already-subscripted {self}\")\n if not self.__parameters__:\n raise TypeError(f\"{self} is not a generic class\")\n \n \n if not isinstance(args,tuple):\n args=(args,)\n args=tuple(_type_convert(p)for p in args)\n args=_unpack_args(args)\n new_args=self._determine_new_args(args)\n r=self.copy_with(new_args)\n return r\n \n def _determine_new_args(self,args):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n params=self.__parameters__\n \n for param in params:\n prepare=getattr(param,'__typing_prepare_subst__',None)\n if prepare is not None:\n args=prepare(self,args)\n alen=len(args)\n plen=len(params)\n if alen !=plen:\n raise TypeError(f\"Too {'many'if alen >plen else 'few'} arguments for {self};\"\n f\" actual {alen}, expected {plen}\")\n new_arg_by_param=dict(zip(params,args))\n return tuple(self._make_substitution(self.__args__,new_arg_by_param))\n \n def _make_substitution(self,args,new_arg_by_param):\n ''\n new_args=[]\n for old_arg in args:\n if isinstance(old_arg,type):\n new_args.append(old_arg)\n continue\n \n substfunc=getattr(old_arg,'__typing_subst__',None)\n if substfunc:\n new_arg=substfunc(new_arg_by_param[old_arg])\n else:\n subparams=getattr(old_arg,'__parameters__',())\n if not subparams:\n new_arg=old_arg\n else:\n subargs=[]\n for x in subparams:\n if isinstance(x,TypeVarTuple):\n subargs.extend(new_arg_by_param[x])\n else:\n subargs.append(new_arg_by_param[x])\n new_arg=old_arg[tuple(subargs)]\n \n if self.__origin__ ==collections.abc.Callable and isinstance(new_arg,tuple):\n \n \n \n \n \n \n \n \n \n \n new_args.extend(new_arg)\n elif _is_unpacked_typevartuple(old_arg):\n \n \n \n \n \n \n \n \n \n new_args.extend(new_arg)\n elif isinstance(old_arg,tuple):\n \n \n \n \n \n \n \n new_args.append(\n tuple(self._make_substitution(old_arg,new_arg_by_param)),\n )\n else:\n new_args.append(new_arg)\n return new_args\n \n def copy_with(self,args):\n return self.__class__(self.__origin__,args,name=self._name,inst=self._inst)\n \n def __repr__(self):\n if self._name:\n name='typing.'+self._name\n else:\n name=_type_repr(self.__origin__)\n if self.__args__:\n args=\", \".join([_type_repr(a)for a in self.__args__])\n else:\n \n args=\"()\"\n return f'{name}[{args}]'\n \n def __reduce__(self):\n if self._name:\n origin=globals()[self._name]\n else:\n origin=self.__origin__\n args=tuple(self.__args__)\n if len(args)==1 and not isinstance(args[0],tuple):\n args,=args\n return operator.getitem,(origin,args)\n \n def __mro_entries__(self,bases):\n if isinstance(self.__origin__,_SpecialForm):\n raise TypeError(f\"Cannot subclass {self !r}\")\n \n if self._name:\n return super().__mro_entries__(bases)\n if self.__origin__ is Generic:\n if Protocol in bases:\n return()\n i=bases.index(self)\n for b in bases[i+1:]:\n if isinstance(b,_BaseGenericAlias)and b is not self:\n return()\n return(self.__origin__,)\n \n def __iter__(self):\n yield Unpack[self]\n \n \n \n \n \n \nclass _SpecialGenericAlias(_NotIterable,_BaseGenericAlias,_root=True):\n def __init__(self,origin,nparams,*,inst=True,name=None):\n if name is None:\n name=origin.__name__\n super().__init__(origin,inst=inst,name=name)\n self._nparams=nparams\n if origin.__module__ =='builtins':\n self.__doc__=f'A generic version of {origin.__qualname__}.'\n else:\n self.__doc__=f'A generic version of {origin.__module__}.{origin.__qualname__}.'\n \n @_tp_cache\n def __getitem__(self,params):\n if not isinstance(params,tuple):\n params=(params,)\n msg=\"Parameters to generic types must be types.\"\n params=tuple(_type_check(p,msg)for p in params)\n _check_generic(self,params,self._nparams)\n return self.copy_with(params)\n \n def copy_with(self,params):\n return _GenericAlias(self.__origin__,params,\n name=self._name,inst=self._inst)\n \n def __repr__(self):\n return 'typing.'+self._name\n \n def __subclasscheck__(self,cls):\n if isinstance(cls,_SpecialGenericAlias):\n return issubclass(cls.__origin__,self.__origin__)\n if not isinstance(cls,_GenericAlias):\n return issubclass(cls,self.__origin__)\n return super().__subclasscheck__(cls)\n \n def __reduce__(self):\n return self._name\n \n def __or__(self,right):\n return Union[self,right]\n \n def __ror__(self,left):\n return Union[left,self]\n \n \nclass _DeprecatedGenericAlias(_SpecialGenericAlias,_root=True):\n def __init__(\n self,origin,nparams,*,removal_version,inst=True,name=None\n ):\n super().__init__(origin,nparams,inst=inst,name=name)\n self._removal_version=removal_version\n \n def __instancecheck__(self,inst):\n import warnings\n warnings._deprecated(\n f\"{self.__module__}.{self._name}\",remove=self._removal_version\n )\n return super().__instancecheck__(inst)\n \n \nclass _CallableGenericAlias(_NotIterable,_GenericAlias,_root=True):\n def __repr__(self):\n assert self._name =='Callable'\n args=self.__args__\n if len(args)==2 and _is_param_expr(args[0]):\n return super().__repr__()\n return(f'typing.Callable'\n f'[[{\", \".join([_type_repr(a)for a in args[:-1]])}], '\n f'{_type_repr(args[-1])}]')\n \n def __reduce__(self):\n args=self.__args__\n if not(len(args)==2 and _is_param_expr(args[0])):\n args=list(args[:-1]),args[-1]\n return operator.getitem,(Callable,args)\n \n \nclass _CallableType(_SpecialGenericAlias,_root=True):\n def copy_with(self,params):\n return _CallableGenericAlias(self.__origin__,params,\n name=self._name,inst=self._inst)\n \n def __getitem__(self,params):\n if not isinstance(params,tuple)or len(params)!=2:\n raise TypeError(\"Callable must be used as \"\n \"Callable[[arg, ...], result].\")\n args,result=params\n \n \n \n if isinstance(args,list):\n params=(tuple(args),result)\n else:\n params=(args,result)\n return self.__getitem_inner__(params)\n \n @_tp_cache\n def __getitem_inner__(self,params):\n args,result=params\n msg=\"Callable[args, result]: result must be a type.\"\n result=_type_check(result,msg)\n if args is Ellipsis:\n return self.copy_with((_TypingEllipsis,result))\n if not isinstance(args,tuple):\n args=(args,)\n args=tuple(_type_convert(arg)for arg in args)\n params=args+(result,)\n return self.copy_with(params)\n \n \nclass _TupleType(_SpecialGenericAlias,_root=True):\n @_tp_cache\n def __getitem__(self,params):\n if not isinstance(params,tuple):\n params=(params,)\n if len(params)>=2 and params[-1]is ...:\n msg=\"Tuple[t, ...]: t must be a type.\"\n params=tuple(_type_check(p,msg)for p in params[:-1])\n return self.copy_with((*params,_TypingEllipsis))\n msg=\"Tuple[t0, t1, ...]: each t must be a type.\"\n params=tuple(_type_check(p,msg)for p in params)\n return self.copy_with(params)\n \n \nclass _UnionGenericAlias(_NotIterable,_GenericAlias,_root=True):\n def copy_with(self,params):\n return Union[params]\n \n def __eq__(self,other):\n if not isinstance(other,(_UnionGenericAlias,types.UnionType)):\n return NotImplemented\n return set(self.__args__)==set(other.__args__)\n \n def __hash__(self):\n return hash(frozenset(self.__args__))\n \n def __repr__(self):\n args=self.__args__\n if len(args)==2:\n if args[0]is type(None):\n return f'typing.Optional[{_type_repr(args[1])}]'\n elif args[1]is type(None):\n return f'typing.Optional[{_type_repr(args[0])}]'\n return super().__repr__()\n \n def __instancecheck__(self,obj):\n return self.__subclasscheck__(type(obj))\n \n def __subclasscheck__(self,cls):\n for arg in self.__args__:\n if issubclass(cls,arg):\n return True\n \n def __reduce__(self):\n func,(origin,args)=super().__reduce__()\n return func,(Union,args)\n \n \ndef _value_and_type_iter(parameters):\n return((p,type(p))for p in parameters)\n \n \nclass _LiteralGenericAlias(_GenericAlias,_root=True):\n def __eq__(self,other):\n if not isinstance(other,_LiteralGenericAlias):\n return NotImplemented\n \n return set(_value_and_type_iter(self.__args__))==set(_value_and_type_iter(other.__args__))\n \n def __hash__(self):\n return hash(frozenset(_value_and_type_iter(self.__args__)))\n \n \nclass _ConcatenateGenericAlias(_GenericAlias,_root=True):\n def copy_with(self,params):\n if isinstance(params[-1],(list,tuple)):\n return(*params[:-1],*params[-1])\n if isinstance(params[-1],_ConcatenateGenericAlias):\n params=(*params[:-1],*params[-1].__args__)\n return super().copy_with(params)\n \n \n@_SpecialForm\ndef Unpack(self,parameters):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n item=_type_check(parameters,f'{self} accepts only single type.')\n return _UnpackGenericAlias(origin=self,args=(item,))\n \n \nclass _UnpackGenericAlias(_GenericAlias,_root=True):\n def __repr__(self):\n \n \n return f'typing.Unpack[{_type_repr(self.__args__[0])}]'\n \n def __getitem__(self,args):\n if self.__typing_is_unpacked_typevartuple__:\n return args\n return super().__getitem__(args)\n \n @property\n def __typing_unpacked_tuple_args__(self):\n assert self.__origin__ is Unpack\n assert len(self.__args__)==1\n arg,=self.__args__\n if isinstance(arg,_GenericAlias):\n assert arg.__origin__ is tuple\n return arg.__args__\n return None\n \n @property\n def __typing_is_unpacked_typevartuple__(self):\n assert self.__origin__ is Unpack\n assert len(self.__args__)==1\n return isinstance(self.__args__[0],TypeVarTuple)\n \n \nclass _TypingEllipsis:\n ''\n \n \n_TYPING_INTERNALS=frozenset({\n'__parameters__','__orig_bases__','__orig_class__',\n'_is_protocol','_is_runtime_protocol','__protocol_attrs__',\n'__callable_proto_members_only__','__type_params__',\n})\n\n_SPECIAL_NAMES=frozenset({\n'__abstractmethods__','__annotations__','__dict__','__doc__',\n'__init__','__module__','__new__','__slots__',\n'__subclasshook__','__weakref__','__class_getitem__'\n})\n\n\nEXCLUDED_ATTRIBUTES=_TYPING_INTERNALS |_SPECIAL_NAMES |{'_MutableMapping__marker'}\n\n\ndef _get_protocol_attrs(cls):\n ''\n\n\n\n \n attrs=set()\n for base in cls.__mro__[:-1]:\n if base.__name__ in{'Protocol','Generic'}:\n continue\n annotations=getattr(base,'__annotations__',{})\n for attr in(*base.__dict__,*annotations):\n if not attr.startswith('_abc_')and attr not in EXCLUDED_ATTRIBUTES:\n attrs.add(attr)\n return attrs\n \n \ndef _no_init_or_replace_init(self,*args,**kwargs):\n cls=type(self)\n \n if cls._is_protocol:\n raise TypeError('Protocols cannot be instantiated')\n \n \n \n if cls.__init__ is not _no_init_or_replace_init:\n return\n \n \n \n \n \n \n \n for base in cls.__mro__:\n init=base.__dict__.get('__init__',_no_init_or_replace_init)\n if init is not _no_init_or_replace_init:\n cls.__init__=init\n break\n else:\n \n cls.__init__=object.__init__\n \n cls.__init__(self,*args,**kwargs)\n \n \ndef _caller(depth=1,default='__main__'):\n try:\n return sys._getframemodulename(depth+1)or default\n except AttributeError:\n pass\n try:\n return sys._getframe(depth+1).f_globals.get('__name__',default)\n except(AttributeError,ValueError):\n pass\n return None\n \ndef _allow_reckless_class_checks(depth=2):\n ''\n\n\n\n \n return _caller(depth)in{'abc','functools',None}\n \n \n_PROTO_ALLOWLIST={\n'collections.abc':[\n'Callable','Awaitable','Iterable','Iterator','AsyncIterable',\n'Hashable','Sized','Container','Collection','Reversible','Buffer',\n],\n'contextlib':['AbstractContextManager','AbstractAsyncContextManager'],\n}\n\n\n@functools.cache\ndef _lazy_load_getattr_static():\n\n\n from inspect import getattr_static\n return getattr_static\n \n \n_cleanups.append(_lazy_load_getattr_static.cache_clear)\n\ndef _pickle_psargs(psargs):\n return ParamSpecArgs,(psargs.__origin__,)\n \ncopyreg.pickle(ParamSpecArgs,_pickle_psargs)\n\ndef _pickle_pskwargs(pskwargs):\n return ParamSpecKwargs,(pskwargs.__origin__,)\n \ncopyreg.pickle(ParamSpecKwargs,_pickle_pskwargs)\n\ndel _pickle_psargs,_pickle_pskwargs\n\n\nclass _ProtocolMeta(ABCMeta):\n\n\n def __new__(mcls,name,bases,namespace,/,**kwargs):\n if name ==\"Protocol\"and bases ==(Generic,):\n pass\n elif Protocol in bases:\n for base in bases:\n if not(\n base in{object,Generic}\n or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__,[])\n or(\n issubclass(base,Generic)\n and getattr(base,\"_is_protocol\",False)\n )\n ):\n raise TypeError(\n f\"Protocols can only inherit from other protocols, \"\n f\"got {base !r}\"\n )\n return super().__new__(mcls,name,bases,namespace,**kwargs)\n \n def __init__(cls,*args,**kwargs):\n super().__init__(*args,**kwargs)\n if getattr(cls,\"_is_protocol\",False):\n cls.__protocol_attrs__=_get_protocol_attrs(cls)\n \n \n cls.__callable_proto_members_only__=all(\n callable(getattr(cls,attr,None))for attr in cls.__protocol_attrs__\n )\n \n def __subclasscheck__(cls,other):\n if cls is Protocol:\n return type.__subclasscheck__(cls,other)\n if(\n getattr(cls,'_is_protocol',False)\n and not _allow_reckless_class_checks()\n ):\n if not isinstance(other,type):\n \n raise TypeError('issubclass() arg 1 must be a class')\n if(\n not cls.__callable_proto_members_only__\n and cls.__dict__.get(\"__subclasshook__\")is _proto_hook\n ):\n raise TypeError(\n \"Protocols with non-method members don't support issubclass()\"\n )\n if not getattr(cls,'_is_runtime_protocol',False):\n raise TypeError(\n \"Instance and class checks can only be used with \"\n \"@runtime_checkable protocols\"\n )\n return super().__subclasscheck__(other)\n \n def __instancecheck__(cls,instance):\n \n \n if cls is Protocol:\n return type.__instancecheck__(cls,instance)\n if not getattr(cls,\"_is_protocol\",False):\n \n return super().__instancecheck__(instance)\n \n if(\n not getattr(cls,'_is_runtime_protocol',False)and\n not _allow_reckless_class_checks()\n ):\n raise TypeError(\"Instance and class checks can only be used with\"\n \" @runtime_checkable protocols\")\n \n if super().__instancecheck__(instance):\n return True\n \n getattr_static=_lazy_load_getattr_static()\n for attr in cls.__protocol_attrs__:\n try:\n val=getattr_static(instance,attr)\n except AttributeError:\n break\n if val is None and callable(getattr(cls,attr,None)):\n break\n else:\n return True\n \n return False\n \n \n@classmethod\ndef _proto_hook(cls,other):\n if not cls.__dict__.get('_is_protocol',False):\n return NotImplemented\n \n for attr in cls.__protocol_attrs__:\n for base in other.__mro__:\n \n if attr in base.__dict__:\n if base.__dict__[attr]is None:\n return NotImplemented\n break\n \n \n annotations=getattr(base,'__annotations__',{})\n if(isinstance(annotations,collections.abc.Mapping)and\n attr in annotations and\n issubclass(other,Generic)and getattr(other,'_is_protocol',False)):\n break\n else:\n return NotImplemented\n return True\n \n \nclass Protocol(Generic,metaclass=_ProtocolMeta):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n _is_protocol=True\n _is_runtime_protocol=False\n \n def __init_subclass__(cls,*args,**kwargs):\n super().__init_subclass__(*args,**kwargs)\n \n \n if not cls.__dict__.get('_is_protocol',False):\n cls._is_protocol=any(b is Protocol for b in cls.__bases__)\n \n \n if '__subclasshook__'not in cls.__dict__:\n cls.__subclasshook__=_proto_hook\n \n \n if cls._is_protocol and cls.__init__ is Protocol.__init__:\n cls.__init__=_no_init_or_replace_init\n \n \nclass _AnnotatedAlias(_NotIterable,_GenericAlias,_root=True):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,origin,metadata):\n if isinstance(origin,_AnnotatedAlias):\n metadata=origin.__metadata__+metadata\n origin=origin.__origin__\n super().__init__(origin,origin,name='Annotated')\n self.__metadata__=metadata\n \n def copy_with(self,params):\n assert len(params)==1\n new_type=params[0]\n return _AnnotatedAlias(new_type,self.__metadata__)\n \n def __repr__(self):\n return \"typing.Annotated[{}, {}]\".format(\n _type_repr(self.__origin__),\n \", \".join(repr(a)for a in self.__metadata__)\n )\n \n def __reduce__(self):\n return operator.getitem,(\n Annotated,(self.__origin__,)+self.__metadata__\n )\n \n def __eq__(self,other):\n if not isinstance(other,_AnnotatedAlias):\n return NotImplemented\n return(self.__origin__ ==other.__origin__\n and self.__metadata__ ==other.__metadata__)\n \n def __hash__(self):\n return hash((self.__origin__,self.__metadata__))\n \n def __getattr__(self,attr):\n if attr in{'__name__','__qualname__'}:\n return 'Annotated'\n return super().__getattr__(attr)\n \n def __mro_entries__(self,bases):\n return(self.__origin__,)\n \n \nclass Annotated:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n def __new__(cls,*args,**kwargs):\n raise TypeError(\"Type Annotated cannot be instantiated.\")\n \n @_tp_cache\n def __class_getitem__(cls,params):\n if not isinstance(params,tuple)or len(params)<2:\n raise TypeError(\"Annotated[...] should be used \"\n \"with at least two arguments (a type and an \"\n \"annotation).\")\n if _is_unpacked_typevartuple(params[0]):\n raise TypeError(\"Annotated[...] should not be used with an \"\n \"unpacked TypeVarTuple\")\n msg=\"Annotated[t, ...]: t must be a type.\"\n origin=_type_check(params[0],msg,allow_special_forms=True)\n metadata=tuple(params[1:])\n return _AnnotatedAlias(origin,metadata)\n \n def __init_subclass__(cls,*args,**kwargs):\n raise TypeError(\n \"Cannot subclass {}.Annotated\".format(cls.__module__)\n )\n \n \ndef runtime_checkable(cls):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not issubclass(cls,Generic)or not getattr(cls,'_is_protocol',False):\n raise TypeError('@runtime_checkable can be only applied to protocol classes,'\n ' got %r'%cls)\n cls._is_runtime_protocol=True\n return cls\n \n \ndef cast(typ,val):\n ''\n\n\n\n\n\n \n return val\n \n \ndef assert_type(val,typ,/):\n ''\n\n\n\n\n\n\n\n\n\n\n \n return val\n \n \n_allowed_types=(types.FunctionType,types.BuiltinFunctionType,\ntypes.MethodType,types.ModuleType,\nWrapperDescriptorType,MethodWrapperType,MethodDescriptorType)\n\n\ndef get_type_hints(obj,globalns=None,localns=None,include_extras=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if getattr(obj,'__no_type_check__',None):\n return{}\n \n if isinstance(obj,type):\n hints={}\n for base in reversed(obj.__mro__):\n if globalns is None:\n base_globals=getattr(sys.modules.get(base.__module__,None),'__dict__',{})\n else:\n base_globals=globalns\n ann=base.__dict__.get('__annotations__',{})\n if isinstance(ann,types.GetSetDescriptorType):\n ann={}\n base_locals=dict(vars(base))if localns is None else localns\n if localns is None and globalns is None:\n \n \n \n \n \n \n base_globals,base_locals=base_locals,base_globals\n for name,value in ann.items():\n if value is None:\n value=type(None)\n if isinstance(value,str):\n value=ForwardRef(value,is_argument=False,is_class=True)\n value=_eval_type(value,base_globals,base_locals)\n hints[name]=value\n return hints if include_extras else{k:_strip_annotations(t)for k,t in hints.items()}\n \n if globalns is None:\n if isinstance(obj,types.ModuleType):\n globalns=obj.__dict__\n else:\n nsobj=obj\n \n while hasattr(nsobj,'__wrapped__'):\n nsobj=nsobj.__wrapped__\n globalns=getattr(nsobj,'__globals__',{})\n if localns is None:\n localns=globalns\n elif localns is None:\n localns=globalns\n hints=getattr(obj,'__annotations__',None)\n if hints is None:\n \n if isinstance(obj,_allowed_types):\n return{}\n else:\n raise TypeError('{!r} is not a module, class, method, '\n 'or function.'.format(obj))\n hints=dict(hints)\n for name,value in hints.items():\n if value is None:\n value=type(None)\n if isinstance(value,str):\n \n \n value=ForwardRef(\n value,\n is_argument=not isinstance(obj,types.ModuleType),\n is_class=False,\n )\n hints[name]=_eval_type(value,globalns,localns)\n return hints if include_extras else{k:_strip_annotations(t)for k,t in hints.items()}\n \n \ndef _strip_annotations(t):\n ''\n if isinstance(t,_AnnotatedAlias):\n return _strip_annotations(t.__origin__)\n if hasattr(t,\"__origin__\")and t.__origin__ in(Required,NotRequired):\n return _strip_annotations(t.__args__[0])\n if isinstance(t,_GenericAlias):\n stripped_args=tuple(_strip_annotations(a)for a in t.__args__)\n if stripped_args ==t.__args__:\n return t\n return t.copy_with(stripped_args)\n if isinstance(t,GenericAlias):\n stripped_args=tuple(_strip_annotations(a)for a in t.__args__)\n if stripped_args ==t.__args__:\n return t\n return GenericAlias(t.__origin__,stripped_args)\n if isinstance(t,types.UnionType):\n stripped_args=tuple(_strip_annotations(a)for a in t.__args__)\n if stripped_args ==t.__args__:\n return t\n return functools.reduce(operator.or_,stripped_args)\n \n return t\n \n \ndef get_origin(tp):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(tp,_AnnotatedAlias):\n return Annotated\n if isinstance(tp,(_BaseGenericAlias,GenericAlias,\n ParamSpecArgs,ParamSpecKwargs)):\n return tp.__origin__\n if tp is Generic:\n return Generic\n if isinstance(tp,types.UnionType):\n return types.UnionType\n return None\n \n \ndef get_args(tp):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(tp,_AnnotatedAlias):\n return(tp.__origin__,)+tp.__metadata__\n if isinstance(tp,(_GenericAlias,GenericAlias)):\n res=tp.__args__\n if _should_unflatten_callable_args(tp,res):\n res=(list(res[:-1]),res[-1])\n return res\n if isinstance(tp,types.UnionType):\n return tp.__args__\n return()\n \n \ndef is_typeddict(tp):\n ''\n\n\n\n\n\n\n\n\n\n \n return isinstance(tp,_TypedDictMeta)\n \n \n_ASSERT_NEVER_REPR_MAX_LENGTH=100\n\n\ndef assert_never(arg:Never,/)->Never:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n value=repr(arg)\n if len(value)>_ASSERT_NEVER_REPR_MAX_LENGTH:\n value=value[:_ASSERT_NEVER_REPR_MAX_LENGTH]+'...'\n raise AssertionError(f\"Expected code to be unreachable, but got: {value}\")\n \n \ndef no_type_check(arg):\n ''\n\n\n\n\n\n\n \n if isinstance(arg,type):\n for key in dir(arg):\n obj=getattr(arg,key)\n if(\n not hasattr(obj,'__qualname__')\n or obj.__qualname__ !=f'{arg.__qualname__}.{obj.__name__}'\n or getattr(obj,'__module__',None)!=arg.__module__\n ):\n \n \n \n continue\n \n if isinstance(obj,types.FunctionType):\n obj.__no_type_check__=True\n if isinstance(obj,types.MethodType):\n obj.__func__.__no_type_check__=True\n \n if isinstance(obj,type):\n no_type_check(obj)\n try:\n arg.__no_type_check__=True\n except TypeError:\n pass\n return arg\n \n \ndef no_type_check_decorator(decorator):\n ''\n\n\n\n \n @functools.wraps(decorator)\n def wrapped_decorator(*args,**kwds):\n func=decorator(*args,**kwds)\n func=no_type_check(func)\n return func\n \n return wrapped_decorator\n \n \ndef _overload_dummy(*args,**kwds):\n ''\n raise NotImplementedError(\n \"You should not call an overloaded function. \"\n \"A series of @overload-decorated functions \"\n \"outside a stub module should always be followed \"\n \"by an implementation that is not @overload-ed.\")\n \n \n \n_overload_registry=defaultdict(functools.partial(defaultdict,dict))\n\n\ndef overload(func):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n f=getattr(func,\"__func__\",func)\n try:\n _overload_registry[f.__module__][f.__qualname__][f.__code__.co_firstlineno]=func\n except AttributeError:\n \n pass\n return _overload_dummy\n \n \ndef get_overloads(func):\n ''\n \n f=getattr(func,\"__func__\",func)\n if f.__module__ not in _overload_registry:\n return[]\n mod_dict=_overload_registry[f.__module__]\n if f.__qualname__ not in mod_dict:\n return[]\n return list(mod_dict[f.__qualname__].values())\n \n \ndef clear_overloads():\n ''\n _overload_registry.clear()\n \n \ndef final(f):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n f.__final__=True\n except(AttributeError,TypeError):\n \n \n \n pass\n return f\n \n \n \n \n \nT=TypeVar('T')\nKT=TypeVar('KT')\nVT=TypeVar('VT')\nT_co=TypeVar('T_co',covariant=True)\nV_co=TypeVar('V_co',covariant=True)\nVT_co=TypeVar('VT_co',covariant=True)\nT_contra=TypeVar('T_contra',contravariant=True)\n\nCT_co=TypeVar('CT_co',covariant=True,bound=type)\n\n\n\n\nAnyStr=TypeVar('AnyStr',bytes,str)\n\n\n\n_alias=_SpecialGenericAlias\n\nHashable=_alias(collections.abc.Hashable,0)\nAwaitable=_alias(collections.abc.Awaitable,1)\nCoroutine=_alias(collections.abc.Coroutine,3)\nAsyncIterable=_alias(collections.abc.AsyncIterable,1)\nAsyncIterator=_alias(collections.abc.AsyncIterator,1)\nIterable=_alias(collections.abc.Iterable,1)\nIterator=_alias(collections.abc.Iterator,1)\nReversible=_alias(collections.abc.Reversible,1)\nSized=_alias(collections.abc.Sized,0)\nContainer=_alias(collections.abc.Container,1)\nCollection=_alias(collections.abc.Collection,1)\nCallable=_CallableType(collections.abc.Callable,2)\nCallable.__doc__=\\\n\"\"\"Deprecated alias to collections.abc.Callable.\n\n Callable[[int], str] signifies a function that takes a single\n parameter of type int and returns a str.\n\n The subscription syntax must always be used with exactly two\n values: the argument list and the return type.\n The argument list must be a list of types, a ParamSpec,\n Concatenate or ellipsis. The return type must be a single type.\n\n There is no syntax to indicate optional or keyword arguments;\n such function types are rarely used as callback types.\n \"\"\"\nAbstractSet=_alias(collections.abc.Set,1,name='AbstractSet')\nMutableSet=_alias(collections.abc.MutableSet,1)\n\nMapping=_alias(collections.abc.Mapping,2)\nMutableMapping=_alias(collections.abc.MutableMapping,2)\nSequence=_alias(collections.abc.Sequence,1)\nMutableSequence=_alias(collections.abc.MutableSequence,1)\nByteString=_DeprecatedGenericAlias(\ncollections.abc.ByteString,0,removal_version=(3,14)\n)\n\nTuple=_TupleType(tuple,-1,inst=False,name='Tuple')\nTuple.__doc__=\\\n\"\"\"Deprecated alias to builtins.tuple.\n\n Tuple[X, Y] is the cross-product type of X and Y.\n\n Example: Tuple[T1, T2] is a tuple of two elements corresponding\n to type variables T1 and T2. Tuple[int, float, str] is a tuple\n of an int, a float and a string.\n\n To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].\n \"\"\"\nList=_alias(list,1,inst=False,name='List')\nDeque=_alias(collections.deque,1,name='Deque')\nSet=_alias(set,1,inst=False,name='Set')\nFrozenSet=_alias(frozenset,1,inst=False,name='FrozenSet')\nMappingView=_alias(collections.abc.MappingView,1)\nKeysView=_alias(collections.abc.KeysView,1)\nItemsView=_alias(collections.abc.ItemsView,2)\nValuesView=_alias(collections.abc.ValuesView,1)\nContextManager=_alias(contextlib.AbstractContextManager,1,name='ContextManager')\nAsyncContextManager=_alias(contextlib.AbstractAsyncContextManager,1,name='AsyncContextManager')\nDict=_alias(dict,2,inst=False,name='Dict')\nDefaultDict=_alias(collections.defaultdict,2,name='DefaultDict')\nOrderedDict=_alias(collections.OrderedDict,2)\nCounter=_alias(collections.Counter,1)\nChainMap=_alias(collections.ChainMap,2)\nGenerator=_alias(collections.abc.Generator,3)\nAsyncGenerator=_alias(collections.abc.AsyncGenerator,2)\nType=_alias(type,1,inst=False,name='Type')\nType.__doc__=\\\n\"\"\"Deprecated alias to builtins.type.\n\n builtins.type or typing.Type can be used to annotate class objects.\n For example, suppose we have the following classes::\n\n class User: ... # Abstract base for User classes\n class BasicUser(User): ...\n class ProUser(User): ...\n class TeamUser(User): ...\n\n And a function that takes a class argument that's a subclass of\n User and returns an instance of the corresponding class::\n\n def new_user[U](user_class: Type[U]) -> U:\n user = user_class()\n # (Here we could write the user object to a database)\n return user\n\n joe = new_user(BasicUser)\n\n At this point the type checker knows that joe has type BasicUser.\n \"\"\"\n\n\n@runtime_checkable\nclass SupportsInt(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __int__(self)->int:\n pass\n \n \n@runtime_checkable\nclass SupportsFloat(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __float__(self)->float:\n pass\n \n \n@runtime_checkable\nclass SupportsComplex(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __complex__(self)->complex:\n pass\n \n \n@runtime_checkable\nclass SupportsBytes(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __bytes__(self)->bytes:\n pass\n \n \n@runtime_checkable\nclass SupportsIndex(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __index__(self)->int:\n pass\n \n \n@runtime_checkable\nclass SupportsAbs[T](Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __abs__(self)->T:\n pass\n \n \n@runtime_checkable\nclass SupportsRound[T](Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __round__(self,ndigits:int=0)->T:\n pass\n \n \ndef _make_nmtuple(name,types,module,defaults=()):\n fields=[n for n,t in types]\n types={n:_type_check(t,f\"field {n} annotation must be a type\")\n for n,t in types}\n nm_tpl=collections.namedtuple(name,fields,\n defaults=defaults,module=module)\n nm_tpl.__annotations__=nm_tpl.__new__.__annotations__=types\n return nm_tpl\n \n \n \n_prohibited=frozenset({'__new__','__init__','__slots__','__getnewargs__',\n'_fields','_field_defaults',\n'_make','_replace','_asdict','_source'})\n\n_special=frozenset({'__module__','__name__','__annotations__'})\n\n\nclass NamedTupleMeta(type):\n def __new__(cls,typename,bases,ns):\n assert _NamedTuple in bases\n for base in bases:\n if base is not _NamedTuple and base is not Generic:\n raise TypeError(\n 'can only inherit from a NamedTuple type and Generic')\n bases=tuple(tuple if base is _NamedTuple else base for base in bases)\n types=ns.get('__annotations__',{})\n default_names=[]\n for field_name in types:\n if field_name in ns:\n default_names.append(field_name)\n elif default_names:\n raise TypeError(f\"Non-default namedtuple field {field_name} \"\n f\"cannot follow default field\"\n f\"{'s'if len(default_names)>1 else ''} \"\n f\"{', '.join(default_names)}\")\n nm_tpl=_make_nmtuple(typename,types.items(),\n defaults=[ns[n]for n in default_names],\n module=ns['__module__'])\n nm_tpl.__bases__=bases\n if Generic in bases:\n class_getitem=_generic_class_getitem\n nm_tpl.__class_getitem__=classmethod(class_getitem)\n \n for key in ns:\n if key in _prohibited:\n raise AttributeError(\"Cannot overwrite NamedTuple attribute \"+key)\n elif key not in _special and key not in nm_tpl._fields:\n setattr(nm_tpl,key,ns[key])\n if Generic in bases:\n nm_tpl.__init_subclass__()\n return nm_tpl\n \n \ndef NamedTuple(typename,fields=None,/,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if fields is None:\n fields=kwargs.items()\n elif kwargs:\n raise TypeError(\"Either list of fields or keywords\"\n \" can be provided to NamedTuple, not both\")\n nt=_make_nmtuple(typename,fields,module=_caller())\n nt.__orig_bases__=(NamedTuple,)\n return nt\n \n_NamedTuple=type.__new__(NamedTupleMeta,'NamedTuple',(),{})\n\ndef _namedtuple_mro_entries(bases):\n assert NamedTuple in bases\n return(_NamedTuple,)\n \nNamedTuple.__mro_entries__=_namedtuple_mro_entries\n\n\nclass _TypedDictMeta(type):\n def __new__(cls,name,bases,ns,total=True):\n ''\n\n\n\n\n\n \n for base in bases:\n if type(base)is not _TypedDictMeta and base is not Generic:\n raise TypeError('cannot inherit from both a TypedDict type '\n 'and a non-TypedDict base class')\n \n if any(issubclass(b,Generic)for b in bases):\n generic_base=(Generic,)\n else:\n generic_base=()\n \n tp_dict=type.__new__(_TypedDictMeta,name,(*generic_base,dict),ns)\n \n if not hasattr(tp_dict,'__orig_bases__'):\n tp_dict.__orig_bases__=bases\n \n annotations={}\n own_annotations=ns.get('__annotations__',{})\n msg=\"TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type\"\n own_annotations={\n n:_type_check(tp,msg,module=tp_dict.__module__)\n for n,tp in own_annotations.items()\n }\n required_keys=set()\n optional_keys=set()\n \n for base in bases:\n annotations.update(base.__dict__.get('__annotations__',{}))\n required_keys.update(base.__dict__.get('__required_keys__',()))\n optional_keys.update(base.__dict__.get('__optional_keys__',()))\n \n annotations.update(own_annotations)\n for annotation_key,annotation_type in own_annotations.items():\n annotation_origin=get_origin(annotation_type)\n if annotation_origin is Annotated:\n annotation_args=get_args(annotation_type)\n if annotation_args:\n annotation_type=annotation_args[0]\n annotation_origin=get_origin(annotation_type)\n \n if annotation_origin is Required:\n required_keys.add(annotation_key)\n elif annotation_origin is NotRequired:\n optional_keys.add(annotation_key)\n elif total:\n required_keys.add(annotation_key)\n else:\n optional_keys.add(annotation_key)\n \n tp_dict.__annotations__=annotations\n tp_dict.__required_keys__=frozenset(required_keys)\n tp_dict.__optional_keys__=frozenset(optional_keys)\n if not hasattr(tp_dict,'__total__'):\n tp_dict.__total__=total\n return tp_dict\n \n __call__=dict\n \n def __subclasscheck__(cls,other):\n \n raise TypeError('TypedDict does not support instance and class checks')\n \n __instancecheck__=__subclasscheck__\n \n \ndef TypedDict(typename,fields=None,/,*,total=True,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if fields is None:\n fields=kwargs\n elif kwargs:\n raise TypeError(\"TypedDict takes either a dict or keyword arguments,\"\n \" but not both\")\n if kwargs:\n warnings.warn(\n \"The kwargs-based syntax for TypedDict definitions is deprecated \"\n \"in Python 3.11, will be removed in Python 3.13, and may not be \"\n \"understood by third-party type checkers.\",\n DeprecationWarning,\n stacklevel=2,\n )\n \n ns={'__annotations__':dict(fields)}\n module=_caller()\n if module is not None:\n \n ns['__module__']=module\n \n td=_TypedDictMeta(typename,(),ns,total=total)\n td.__orig_bases__=(TypedDict,)\n return td\n \n_TypedDict=type.__new__(_TypedDictMeta,'TypedDict',(),{})\nTypedDict.__mro_entries__=lambda bases:(_TypedDict,)\n\n\n@_SpecialForm\ndef Required(self,parameters):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n item=_type_check(parameters,f'{self._name} accepts only a single type.')\n return _GenericAlias(self,(item,))\n \n \n@_SpecialForm\ndef NotRequired(self,parameters):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n item=_type_check(parameters,f'{self._name} accepts only a single type.')\n return _GenericAlias(self,(item,))\n \n \nclass NewType:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __call__=_idfunc\n \n def __init__(self,name,tp):\n self.__qualname__=name\n if '.'in name:\n name=name.rpartition('.')[-1]\n self.__name__=name\n self.__supertype__=tp\n def_mod=_caller()\n if def_mod !='typing':\n self.__module__=def_mod\n \n def __mro_entries__(self,bases):\n \n \n superclass_name=self.__name__\n \n class Dummy:\n def __init_subclass__(cls):\n subclass_name=cls.__name__\n raise TypeError(\n f\"Cannot subclass an instance of NewType. Perhaps you were looking for: \"\n f\"`{subclass_name} = NewType({subclass_name !r}, {superclass_name})`\"\n )\n \n return(Dummy,)\n \n def __repr__(self):\n return f'{self.__module__}.{self.__qualname__}'\n \n def __reduce__(self):\n return self.__qualname__\n \n def __or__(self,other):\n return Union[self,other]\n \n def __ror__(self,other):\n return Union[other,self]\n \n \n \nText=str\n\n\n\nTYPE_CHECKING=False\n\n\nclass IO(Generic[AnyStr]):\n ''\n\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n @property\n @abstractmethod\n def mode(self)->str:\n pass\n \n @property\n @abstractmethod\n def name(self)->str:\n pass\n \n @abstractmethod\n def close(self)->None:\n pass\n \n @property\n @abstractmethod\n def closed(self)->bool:\n pass\n \n @abstractmethod\n def fileno(self)->int:\n pass\n \n @abstractmethod\n def flush(self)->None:\n pass\n \n @abstractmethod\n def isatty(self)->bool:\n pass\n \n @abstractmethod\n def read(self,n:int=-1)->AnyStr:\n pass\n \n @abstractmethod\n def readable(self)->bool:\n pass\n \n @abstractmethod\n def readline(self,limit:int=-1)->AnyStr:\n pass\n \n @abstractmethod\n def readlines(self,hint:int=-1)->List[AnyStr]:\n pass\n \n @abstractmethod\n def seek(self,offset:int,whence:int=0)->int:\n pass\n \n @abstractmethod\n def seekable(self)->bool:\n pass\n \n @abstractmethod\n def tell(self)->int:\n pass\n \n @abstractmethod\n def truncate(self,size:int=None)->int:\n pass\n \n @abstractmethod\n def writable(self)->bool:\n pass\n \n @abstractmethod\n def write(self,s:AnyStr)->int:\n pass\n \n @abstractmethod\n def writelines(self,lines:List[AnyStr])->None:\n pass\n \n @abstractmethod\n def __enter__(self)->'IO[AnyStr]':\n pass\n \n @abstractmethod\n def __exit__(self,type,value,traceback)->None:\n pass\n \n \nclass BinaryIO(IO[bytes]):\n ''\n \n __slots__=()\n \n @abstractmethod\n def write(self,s:Union[bytes,bytearray])->int:\n pass\n \n @abstractmethod\n def __enter__(self)->'BinaryIO':\n pass\n \n \nclass TextIO(IO[str]):\n ''\n \n __slots__=()\n \n @property\n @abstractmethod\n def buffer(self)->BinaryIO:\n pass\n \n @property\n @abstractmethod\n def encoding(self)->str:\n pass\n \n @property\n @abstractmethod\n def errors(self)->Optional[str]:\n pass\n \n @property\n @abstractmethod\n def line_buffering(self)->bool:\n pass\n \n @property\n @abstractmethod\n def newlines(self)->Any:\n pass\n \n @abstractmethod\n def __enter__(self)->'TextIO':\n pass\n \n \nclass _DeprecatedType(type):\n def __getattribute__(cls,name):\n if name not in(\"__dict__\",\"__module__\")and name in cls.__dict__:\n warnings.warn(\n f\"{cls.__name__} is deprecated, import directly \"\n f\"from typing instead. {cls.__name__} will be removed \"\n \"in Python 3.12.\",\n DeprecationWarning,\n stacklevel=2,\n )\n return super().__getattribute__(name)\n \n \nclass io(metaclass=_DeprecatedType):\n ''\n \n __all__=['IO','TextIO','BinaryIO']\n IO=IO\n TextIO=TextIO\n BinaryIO=BinaryIO\n \n \nio.__name__=__name__+'.io'\nsys.modules[io.__name__]=io\n\nPattern=_alias(stdlib_re.Pattern,1)\nMatch=_alias(stdlib_re.Match,1)\n\nclass re(metaclass=_DeprecatedType):\n ''\n \n __all__=['Pattern','Match']\n Pattern=Pattern\n Match=Match\n \n \nre.__name__=__name__+'.re'\nsys.modules[re.__name__]=re\n\n\ndef reveal_type[T](obj:T,/)->T:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n print(f\"Runtime type is {type(obj).__name__ !r}\",file=sys.stderr)\n return obj\n \n \nclass _IdentityCallable(Protocol):\n def __call__[T](self,arg:T,/)->T:\n ...\n \n \ndef dataclass_transform(\n*,\neq_default:bool=True,\norder_default:bool=False,\nkw_only_default:bool=False,\nfrozen_default:bool=False,\nfield_specifiers:tuple[type[Any]|Callable[...,Any],...]=(),\n**kwargs:Any,\n)->_IdentityCallable:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def decorator(cls_or_fn):\n cls_or_fn.__dataclass_transform__={\n \"eq_default\":eq_default,\n \"order_default\":order_default,\n \"kw_only_default\":kw_only_default,\n \"frozen_default\":frozen_default,\n \"field_specifiers\":field_specifiers,\n \"kwargs\":kwargs,\n }\n return cls_or_fn\n return decorator\n \n \ntype _Func=Callable[...,Any]\n\n\ndef override[F:_Func](method:F,/)->F:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n method.__override__=True\n except(AttributeError,TypeError):\n \n \n \n pass\n return method\n", ["_typing", "abc", "collections", "collections.abc", "contextlib", "copyreg", "functools", "inspect", "operator", "re", "sys", "types", "warnings"]], "socket": [".py", "\n\n\n\"\"\"\\\nThis module provides socket operations and some related functions.\nOn Unix, it supports IP (Internet Protocol) and Unix domain sockets.\nOn other systems, it only supports IP. Functions specific for a\nsocket are available as methods of the socket object.\n\nFunctions:\n\nsocket() -- create a new socket object\nsocketpair() -- create a pair of new socket objects [*]\nfromfd() -- create a socket object from an open file descriptor [*]\nsend_fds() -- Send file descriptor to the socket.\nrecv_fds() -- Receive file descriptors from the socket.\nfromshare() -- create a socket object from data received from socket.share() [*]\ngethostname() -- return the current hostname\ngethostbyname() -- map a hostname to its IP number\ngethostbyaddr() -- map an IP number or hostname to DNS info\ngetservbyname() -- map a service name and a protocol name to a port number\ngetprotobyname() -- map a protocol name (e.g. 'tcp') to a number\nntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order\nhtons(), htonl() -- convert 16, 32 bit int from host to network byte order\ninet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format\ninet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)\nsocket.getdefaulttimeout() -- get the default timeout value\nsocket.setdefaulttimeout() -- set the default timeout value\ncreate_connection() -- connects to an address, with an optional timeout and\n optional source address.\n\n [*] not available on all platforms!\n\nSpecial objects:\n\nSocketType -- type object for socket objects\nerror -- exception raised for I/O errors\nhas_ipv6 -- boolean value indicating if IPv6 is supported\n\nIntEnum constants:\n\nAF_INET, AF_UNIX -- socket domains (first argument to socket() call)\nSOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)\n\nInteger constants:\n\nMany other constants may be defined; these may be used in calls to\nthe setsockopt() and getsockopt() methods.\n\"\"\"\n\nimport _socket\nfrom _socket import *\n\nimport os,sys,io,selectors\nfrom enum import IntEnum,IntFlag\n\ntry:\n import errno\nexcept ImportError:\n errno=None\nEBADF=getattr(errno,'EBADF',9)\nEAGAIN=getattr(errno,'EAGAIN',11)\nEWOULDBLOCK=getattr(errno,'EWOULDBLOCK',11)\n\n__all__=[\"fromfd\",\"getfqdn\",\"create_connection\",\"create_server\",\n\"has_dualstack_ipv6\",\"AddressFamily\",\"SocketKind\"]\n__all__.extend(os._get_exports_list(_socket))\n\n\n\n\n\n\n\nIntEnum._convert_(\n'AddressFamily',\n__name__,\nlambda C:C.isupper()and C.startswith('AF_'))\n\nIntEnum._convert_(\n'SocketKind',\n__name__,\nlambda C:C.isupper()and C.startswith('SOCK_'))\n\nIntFlag._convert_(\n'MsgFlag',\n__name__,\nlambda C:C.isupper()and C.startswith('MSG_'))\n\nIntFlag._convert_(\n'AddressInfo',\n__name__,\nlambda C:C.isupper()and C.startswith('AI_'))\n\n_LOCALHOST='127.0.0.1'\n_LOCALHOST_V6='::1'\n\n\ndef _intenum_converter(value,enum_klass):\n ''\n\n\n \n try:\n return enum_klass(value)\n except ValueError:\n return value\n \n \n \nif sys.platform.lower().startswith(\"win\"):\n errorTab={}\n errorTab[6]=\"Specified event object handle is invalid.\"\n errorTab[8]=\"Insufficient memory available.\"\n errorTab[87]=\"One or more parameters are invalid.\"\n errorTab[995]=\"Overlapped operation aborted.\"\n errorTab[996]=\"Overlapped I/O event object not in signaled state.\"\n errorTab[997]=\"Overlapped operation will complete later.\"\n errorTab[10004]=\"The operation was interrupted.\"\n errorTab[10009]=\"A bad file handle was passed.\"\n errorTab[10013]=\"Permission denied.\"\n errorTab[10014]=\"A fault occurred on the network??\"\n errorTab[10022]=\"An invalid operation was attempted.\"\n errorTab[10024]=\"Too many open files.\"\n errorTab[10035]=\"The socket operation would block.\"\n errorTab[10036]=\"A blocking operation is already in progress.\"\n errorTab[10037]=\"Operation already in progress.\"\n errorTab[10038]=\"Socket operation on nonsocket.\"\n errorTab[10039]=\"Destination address required.\"\n errorTab[10040]=\"Message too long.\"\n errorTab[10041]=\"Protocol wrong type for socket.\"\n errorTab[10042]=\"Bad protocol option.\"\n errorTab[10043]=\"Protocol not supported.\"\n errorTab[10044]=\"Socket type not supported.\"\n errorTab[10045]=\"Operation not supported.\"\n errorTab[10046]=\"Protocol family not supported.\"\n errorTab[10047]=\"Address family not supported by protocol family.\"\n errorTab[10048]=\"The network address is in use.\"\n errorTab[10049]=\"Cannot assign requested address.\"\n errorTab[10050]=\"Network is down.\"\n errorTab[10051]=\"Network is unreachable.\"\n errorTab[10052]=\"Network dropped connection on reset.\"\n errorTab[10053]=\"Software caused connection abort.\"\n errorTab[10054]=\"The connection has been reset.\"\n errorTab[10055]=\"No buffer space available.\"\n errorTab[10056]=\"Socket is already connected.\"\n errorTab[10057]=\"Socket is not connected.\"\n errorTab[10058]=\"The network has been shut down.\"\n errorTab[10059]=\"Too many references.\"\n errorTab[10060]=\"The operation timed out.\"\n errorTab[10061]=\"Connection refused.\"\n errorTab[10062]=\"Cannot translate name.\"\n errorTab[10063]=\"The name is too long.\"\n errorTab[10064]=\"The host is down.\"\n errorTab[10065]=\"The host is unreachable.\"\n errorTab[10066]=\"Directory not empty.\"\n errorTab[10067]=\"Too many processes.\"\n errorTab[10068]=\"User quota exceeded.\"\n errorTab[10069]=\"Disk quota exceeded.\"\n errorTab[10070]=\"Stale file handle reference.\"\n errorTab[10071]=\"Item is remote.\"\n errorTab[10091]=\"Network subsystem is unavailable.\"\n errorTab[10092]=\"Winsock.dll version out of range.\"\n errorTab[10093]=\"Successful WSAStartup not yet performed.\"\n errorTab[10101]=\"Graceful shutdown in progress.\"\n errorTab[10102]=\"No more results from WSALookupServiceNext.\"\n errorTab[10103]=\"Call has been canceled.\"\n errorTab[10104]=\"Procedure call table is invalid.\"\n errorTab[10105]=\"Service provider is invalid.\"\n errorTab[10106]=\"Service provider failed to initialize.\"\n errorTab[10107]=\"System call failure.\"\n errorTab[10108]=\"Service not found.\"\n errorTab[10109]=\"Class type not found.\"\n errorTab[10110]=\"No more results from WSALookupServiceNext.\"\n errorTab[10111]=\"Call was canceled.\"\n errorTab[10112]=\"Database query was refused.\"\n errorTab[11001]=\"Host not found.\"\n errorTab[11002]=\"Nonauthoritative host not found.\"\n errorTab[11003]=\"This is a nonrecoverable error.\"\n errorTab[11004]=\"Valid name, no data record requested type.\"\n errorTab[11005]=\"QoS receivers.\"\n errorTab[11006]=\"QoS senders.\"\n errorTab[11007]=\"No QoS senders.\"\n errorTab[11008]=\"QoS no receivers.\"\n errorTab[11009]=\"QoS request confirmed.\"\n errorTab[11010]=\"QoS admission error.\"\n errorTab[11011]=\"QoS policy failure.\"\n errorTab[11012]=\"QoS bad style.\"\n errorTab[11013]=\"QoS bad object.\"\n errorTab[11014]=\"QoS traffic control error.\"\n errorTab[11015]=\"QoS generic error.\"\n errorTab[11016]=\"QoS service type error.\"\n errorTab[11017]=\"QoS flowspec error.\"\n errorTab[11018]=\"Invalid QoS provider buffer.\"\n errorTab[11019]=\"Invalid QoS filter style.\"\n errorTab[11020]=\"Invalid QoS filter style.\"\n errorTab[11021]=\"Incorrect QoS filter count.\"\n errorTab[11022]=\"Invalid QoS object length.\"\n errorTab[11023]=\"Incorrect QoS flow count.\"\n errorTab[11024]=\"Unrecognized QoS object.\"\n errorTab[11025]=\"Invalid QoS policy object.\"\n errorTab[11026]=\"Invalid QoS flow descriptor.\"\n errorTab[11027]=\"Invalid QoS provider-specific flowspec.\"\n errorTab[11028]=\"Invalid QoS provider-specific filterspec.\"\n errorTab[11029]=\"Invalid QoS shape discard mode object.\"\n errorTab[11030]=\"Invalid QoS shaping rate object.\"\n errorTab[11031]=\"Reserved policy QoS element type.\"\n __all__.append(\"errorTab\")\n \n \nclass _GiveupOnSendfile(Exception):pass\n\n\nclass socket(_socket.socket):\n\n ''\n \n __slots__=[\"__weakref__\",\"_io_refs\",\"_closed\"]\n \n def __init__(self,family=-1,type=-1,proto=-1,fileno=None):\n \n \n \n \n if fileno is None:\n if family ==-1:\n family=AF_INET\n if type ==-1:\n type=SOCK_STREAM\n if proto ==-1:\n proto=0\n _socket.socket.__init__(self,family,type,proto,fileno)\n self._io_refs=0\n self._closed=False\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n if not self._closed:\n self.close()\n \n def __repr__(self):\n ''\n\n \n closed=getattr(self,'_closed',False)\n s=\"<%s.%s%s fd=%i, family=%s, type=%s, proto=%i\"\\\n %(self.__class__.__module__,\n self.__class__.__qualname__,\n \" [closed]\"if closed else \"\",\n self.fileno(),\n self.family,\n self.type,\n self.proto)\n if not closed:\n try:\n laddr=self.getsockname()\n if laddr:\n s +=\", laddr=%s\"%str(laddr)\n except error:\n pass\n try:\n raddr=self.getpeername()\n if raddr:\n s +=\", raddr=%s\"%str(raddr)\n except error:\n pass\n s +='>'\n return s\n \n def __getstate__(self):\n raise TypeError(f\"cannot pickle {self.__class__.__name__ !r} object\")\n \n def dup(self):\n ''\n\n\n\n \n fd=dup(self.fileno())\n sock=self.__class__(self.family,self.type,self.proto,fileno=fd)\n sock.settimeout(self.gettimeout())\n return sock\n \n def accept(self):\n ''\n\n\n\n\n \n fd,addr=self._accept()\n sock=socket(self.family,self.type,self.proto,fileno=fd)\n \n \n \n if getdefaulttimeout()is None and self.gettimeout():\n sock.setblocking(True)\n return sock,addr\n \n def makefile(self,mode=\"r\",buffering=None,*,\n encoding=None,errors=None,newline=None):\n ''\n\n\n\n \n \n if not set(mode)<={\"r\",\"w\",\"b\"}:\n raise ValueError(\"invalid mode %r (only r, w, b allowed)\"%(mode,))\n writing=\"w\"in mode\n reading=\"r\"in mode or not writing\n assert reading or writing\n binary=\"b\"in mode\n rawmode=\"\"\n if reading:\n rawmode +=\"r\"\n if writing:\n rawmode +=\"w\"\n raw=SocketIO(self,rawmode)\n self._io_refs +=1\n if buffering is None:\n buffering=-1\n if buffering <0:\n buffering=io.DEFAULT_BUFFER_SIZE\n if buffering ==0:\n if not binary:\n raise ValueError(\"unbuffered streams must be binary\")\n return raw\n if reading and writing:\n buffer=io.BufferedRWPair(raw,raw,buffering)\n elif reading:\n buffer=io.BufferedReader(raw,buffering)\n else:\n assert writing\n buffer=io.BufferedWriter(raw,buffering)\n if binary:\n return buffer\n encoding=io.text_encoding(encoding)\n text=io.TextIOWrapper(buffer,encoding,errors,newline)\n text.mode=mode\n return text\n \n if hasattr(os,'sendfile'):\n \n def _sendfile_use_sendfile(self,file,offset=0,count=None):\n self._check_sendfile_params(file,offset,count)\n sockno=self.fileno()\n try:\n fileno=file.fileno()\n except(AttributeError,io.UnsupportedOperation)as err:\n raise _GiveupOnSendfile(err)\n try:\n fsize=os.fstat(fileno).st_size\n except OSError as err:\n raise _GiveupOnSendfile(err)\n if not fsize:\n return 0\n \n blocksize=min(count or fsize,2 **30)\n timeout=self.gettimeout()\n if timeout ==0:\n raise ValueError(\"non-blocking sockets are not supported\")\n \n \n \n if hasattr(selectors,'PollSelector'):\n selector=selectors.PollSelector()\n else:\n selector=selectors.SelectSelector()\n selector.register(sockno,selectors.EVENT_WRITE)\n \n total_sent=0\n \n selector_select=selector.select\n os_sendfile=os.sendfile\n try:\n while True:\n if timeout and not selector_select(timeout):\n raise TimeoutError('timed out')\n if count:\n blocksize=count -total_sent\n if blocksize <=0:\n break\n try:\n sent=os_sendfile(sockno,fileno,offset,blocksize)\n except BlockingIOError:\n if not timeout:\n \n \n selector_select()\n continue\n except OSError as err:\n if total_sent ==0:\n \n \n \n \n raise _GiveupOnSendfile(err)\n raise err from None\n else:\n if sent ==0:\n break\n offset +=sent\n total_sent +=sent\n return total_sent\n finally:\n if total_sent >0 and hasattr(file,'seek'):\n file.seek(offset)\n else:\n def _sendfile_use_sendfile(self,file,offset=0,count=None):\n raise _GiveupOnSendfile(\n \"os.sendfile() not available on this platform\")\n \n def _sendfile_use_send(self,file,offset=0,count=None):\n self._check_sendfile_params(file,offset,count)\n if self.gettimeout()==0:\n raise ValueError(\"non-blocking sockets are not supported\")\n if offset:\n file.seek(offset)\n blocksize=min(count,8192)if count else 8192\n total_sent=0\n \n file_read=file.read\n sock_send=self.send\n try:\n while True:\n if count:\n blocksize=min(count -total_sent,blocksize)\n if blocksize <=0:\n break\n data=memoryview(file_read(blocksize))\n if not data:\n break\n while True:\n try:\n sent=sock_send(data)\n except BlockingIOError:\n continue\n else:\n total_sent +=sent\n if sent 0 and hasattr(file,'seek'):\n file.seek(offset+total_sent)\n \n def _check_sendfile_params(self,file,offset,count):\n if 'b'not in getattr(file,'mode','b'):\n raise ValueError(\"file should be opened in binary mode\")\n if not self.type&SOCK_STREAM:\n raise ValueError(\"only SOCK_STREAM type sockets are supported\")\n if count is not None:\n if not isinstance(count,int):\n raise TypeError(\n \"count must be a positive integer (got {!r})\".format(count))\n if count <=0:\n raise ValueError(\n \"count must be a positive integer (got {!r})\".format(count))\n \n def sendfile(self,file,offset=0,count=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n return self._sendfile_use_sendfile(file,offset,count)\n except _GiveupOnSendfile:\n return self._sendfile_use_send(file,offset,count)\n \n def _decref_socketios(self):\n if self._io_refs >0:\n self._io_refs -=1\n if self._closed:\n self.close()\n \n def _real_close(self,_ss=_socket.socket):\n \n _ss.close(self)\n \n def close(self):\n \n self._closed=True\n if self._io_refs <=0:\n self._real_close()\n \n def detach(self):\n ''\n\n\n\n\n \n self._closed=True\n return super().detach()\n \n @property\n def family(self):\n ''\n \n return _intenum_converter(super().family,AddressFamily)\n \n @property\n def type(self):\n ''\n \n return _intenum_converter(super().type,SocketKind)\n \n if os.name =='nt':\n def get_inheritable(self):\n return os.get_handle_inheritable(self.fileno())\n def set_inheritable(self,inheritable):\n os.set_handle_inheritable(self.fileno(),inheritable)\n else:\n def get_inheritable(self):\n return os.get_inheritable(self.fileno())\n def set_inheritable(self,inheritable):\n os.set_inheritable(self.fileno(),inheritable)\n get_inheritable.__doc__=\"Get the inheritable flag of the socket\"\n set_inheritable.__doc__=\"Set the inheritable flag of the socket\"\n \ndef fromfd(fd,family,type,proto=0):\n ''\n\n\n\n \n nfd=dup(fd)\n return socket(family,type,proto,nfd)\n \nif hasattr(_socket.socket,\"sendmsg\"):\n import array\n \n def send_fds(sock,buffers,fds,flags=0,address=None):\n ''\n\n\n \n return sock.sendmsg(buffers,[(_socket.SOL_SOCKET,\n _socket.SCM_RIGHTS,array.array(\"i\",fds))])\n __all__.append(\"send_fds\")\n \nif hasattr(_socket.socket,\"recvmsg\"):\n import array\n \n def recv_fds(sock,bufsize,maxfds,flags=0):\n ''\n\n\n\n\n \n \n fds=array.array(\"i\")\n msg,ancdata,flags,addr=sock.recvmsg(bufsize,\n _socket.CMSG_LEN(maxfds *fds.itemsize))\n for cmsg_level,cmsg_type,cmsg_data in ancdata:\n if(cmsg_level ==_socket.SOL_SOCKET and cmsg_type ==_socket.SCM_RIGHTS):\n fds.frombytes(cmsg_data[:\n len(cmsg_data)-(len(cmsg_data)%fds.itemsize)])\n \n return msg,list(fds),flags,addr\n __all__.append(\"recv_fds\")\n \nif hasattr(_socket.socket,\"share\"):\n def fromshare(info):\n ''\n\n\n\n \n return socket(0,0,0,info)\n __all__.append(\"fromshare\")\n \nif hasattr(_socket,\"socketpair\"):\n\n def socketpair(family=None,type=SOCK_STREAM,proto=0):\n ''\n\n\n\n\n\n \n if family is None:\n try:\n family=AF_UNIX\n except NameError:\n family=AF_INET\n a,b=_socket.socketpair(family,type,proto)\n a=socket(family,type,proto,a.detach())\n b=socket(family,type,proto,b.detach())\n return a,b\n \nelse:\n\n\n def socketpair(family=AF_INET,type=SOCK_STREAM,proto=0):\n if family ==AF_INET:\n host=_LOCALHOST\n elif family ==AF_INET6:\n host=_LOCALHOST_V6\n else:\n raise ValueError(\"Only AF_INET and AF_INET6 socket address families \"\n \"are supported\")\n if type !=SOCK_STREAM:\n raise ValueError(\"Only SOCK_STREAM socket type is supported\")\n if proto !=0:\n raise ValueError(\"Only protocol zero is supported\")\n \n \n \n lsock=socket(family,type,proto)\n try:\n lsock.bind((host,0))\n lsock.listen()\n \n addr,port=lsock.getsockname()[:2]\n csock=socket(family,type,proto)\n try:\n csock.setblocking(False)\n try:\n csock.connect((addr,port))\n except(BlockingIOError,InterruptedError):\n pass\n csock.setblocking(True)\n ssock,_=lsock.accept()\n except:\n csock.close()\n raise\n finally:\n lsock.close()\n return(ssock,csock)\n __all__.append(\"socketpair\")\n \nsocketpair.__doc__=\"\"\"socketpair([family[, type[, proto]]]) -> (socket object, socket object)\nCreate a pair of socket objects from the sockets returned by the platform\nsocketpair() function.\nThe arguments are the same as for socket() except the default family is AF_UNIX\nif defined on the platform; otherwise, the default is AF_INET.\n\"\"\"\n\n_blocking_errnos={EAGAIN,EWOULDBLOCK}\n\nclass SocketIO(io.RawIOBase):\n\n ''\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n def __init__(self,sock,mode):\n if mode not in(\"r\",\"w\",\"rw\",\"rb\",\"wb\",\"rwb\"):\n raise ValueError(\"invalid mode: %r\"%mode)\n io.RawIOBase.__init__(self)\n self._sock=sock\n if \"b\"not in mode:\n mode +=\"b\"\n self._mode=mode\n self._reading=\"r\"in mode\n self._writing=\"w\"in mode\n self._timeout_occurred=False\n \n def readinto(self,b):\n ''\n\n\n\n\n\n \n self._checkClosed()\n self._checkReadable()\n if self._timeout_occurred:\n raise OSError(\"cannot read from timed out object\")\n while True:\n try:\n return self._sock.recv_into(b)\n except timeout:\n self._timeout_occurred=True\n raise\n except error as e:\n if e.errno in _blocking_errnos:\n return None\n raise\n \n def write(self,b):\n ''\n\n\n\n \n self._checkClosed()\n self._checkWritable()\n try:\n return self._sock.send(b)\n except error as e:\n \n if e.errno in _blocking_errnos:\n return None\n raise\n \n def readable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._reading\n \n def writable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._writing\n \n def seekable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return super().seekable()\n \n def fileno(self):\n ''\n \n self._checkClosed()\n return self._sock.fileno()\n \n @property\n def name(self):\n if not self.closed:\n return self.fileno()\n else:\n return -1\n \n @property\n def mode(self):\n return self._mode\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n io.RawIOBase.close(self)\n self._sock._decref_socketios()\n self._sock=None\n \n \ndef getfqdn(name=''):\n ''\n\n\n\n\n\n\n\n \n name=name.strip()\n if not name or name =='0.0.0.0':\n name=gethostname()\n try:\n hostname,aliases,ipaddrs=gethostbyaddr(name)\n except error:\n pass\n else:\n aliases.insert(0,hostname)\n for name in aliases:\n if '.'in name:\n break\n else:\n name=hostname\n return name\n \n \n_GLOBAL_DEFAULT_TIMEOUT=object()\n\ndef create_connection(address,timeout=_GLOBAL_DEFAULT_TIMEOUT,\nsource_address=None,*,all_errors=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n host,port=address\n exceptions=[]\n for res in getaddrinfo(host,port,0,SOCK_STREAM):\n af,socktype,proto,canonname,sa=res\n sock=None\n try:\n sock=socket(af,socktype,proto)\n if timeout is not _GLOBAL_DEFAULT_TIMEOUT:\n sock.settimeout(timeout)\n if source_address:\n sock.bind(source_address)\n sock.connect(sa)\n \n exceptions.clear()\n return sock\n \n except error as exc:\n if not all_errors:\n exceptions.clear()\n exceptions.append(exc)\n if sock is not None:\n sock.close()\n \n if len(exceptions):\n try:\n if not all_errors:\n raise exceptions[0]\n raise ExceptionGroup(\"create_connection failed\",exceptions)\n finally:\n \n exceptions.clear()\n else:\n raise error(\"getaddrinfo returns an empty list\")\n \n \ndef has_dualstack_ipv6():\n ''\n\n \n if not has_ipv6\\\n or not hasattr(_socket,'IPPROTO_IPV6')\\\n or not hasattr(_socket,'IPV6_V6ONLY'):\n return False\n try:\n with socket(AF_INET6,SOCK_STREAM)as sock:\n sock.setsockopt(IPPROTO_IPV6,IPV6_V6ONLY,0)\n return True\n except error:\n return False\n \n \ndef create_server(address,*,family=AF_INET,backlog=None,reuse_port=False,\ndualstack_ipv6=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if reuse_port and not hasattr(_socket,\"SO_REUSEPORT\"):\n raise ValueError(\"SO_REUSEPORT not supported on this platform\")\n if dualstack_ipv6:\n if not has_dualstack_ipv6():\n raise ValueError(\"dualstack_ipv6 not supported on this platform\")\n if family !=AF_INET6:\n raise ValueError(\"dualstack_ipv6 requires AF_INET6 family\")\n sock=socket(family,SOCK_STREAM)\n try:\n \n \n \n \n \n \n \n \n \n if os.name not in('nt','cygwin')and\\\n hasattr(_socket,'SO_REUSEADDR'):\n try:\n sock.setsockopt(SOL_SOCKET,SO_REUSEADDR,1)\n except error:\n \n \n pass\n if reuse_port:\n sock.setsockopt(SOL_SOCKET,SO_REUSEPORT,1)\n if has_ipv6 and family ==AF_INET6:\n if dualstack_ipv6:\n sock.setsockopt(IPPROTO_IPV6,IPV6_V6ONLY,0)\n elif hasattr(_socket,\"IPV6_V6ONLY\")and\\\n hasattr(_socket,\"IPPROTO_IPV6\"):\n sock.setsockopt(IPPROTO_IPV6,IPV6_V6ONLY,1)\n try:\n sock.bind(address)\n except error as err:\n msg='%s (while attempting to bind on address %r)'%\\\n (err.strerror,address)\n raise error(err.errno,msg)from None\n if backlog is None:\n sock.listen()\n else:\n sock.listen(backlog)\n return sock\n except error:\n sock.close()\n raise\n \n \ndef getaddrinfo(host,port,family=0,type=0,proto=0,flags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n addrlist=[]\n for res in _socket.getaddrinfo(host,port,family,type,proto,flags):\n af,socktype,proto,canonname,sa=res\n addrlist.append((_intenum_converter(af,AddressFamily),\n _intenum_converter(socktype,SocketKind),\n proto,canonname,sa))\n return addrlist\n", ["_socket", "array", "enum", "errno", "io", "os", "selectors", "sys"]], "datetime": [".py", "try:\n from _datetime import *\n from _datetime import __doc__\nexcept ImportError:\n from _pydatetime import *\n from _pydatetime import __doc__\n \n__all__=(\"date\",\"datetime\",\"time\",\"timedelta\",\"timezone\",\"tzinfo\",\n\"MINYEAR\",\"MAXYEAR\",\"UTC\")\n", ["_datetime", "_pydatetime"]], "_thread": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['error','start_new_thread','exit','get_ident','allocate_lock',\n'interrupt_main','LockType']\n\n\nTIMEOUT_MAX=2 **31\n\n\n\n\n\n\nerror=RuntimeError\n\ndef daemon_threads_allowed():\n return False\n \ndef _set_sentinel(*args,**kw):\n return LockType()\n \ndef start_new_thread(function,args,kwargs={}):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if type(args)!=type(tuple()):\n raise TypeError(\"2nd arg must be a tuple\")\n if type(kwargs)!=type(dict()):\n raise TypeError(\"3rd arg must be a dict\")\n global _main\n _main=False\n try:\n function(*args,**kwargs)\n except SystemExit:\n pass\n except:\n import traceback\n traceback.print_exc()\n _main=True\n global _interrupt\n if _interrupt:\n _interrupt=False\n raise KeyboardInterrupt\n \ndef exit():\n ''\n raise SystemExit\n \ndef get_ident():\n ''\n\n\n\n\n \n return -1\n \ndef allocate_lock():\n ''\n return LockType()\n \ndef stack_size(size=None):\n ''\n if size is not None:\n raise error(\"setting thread stack size not supported\")\n return 0\n \nclass LockType(object):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self):\n self.locked_status=False\n \n def acquire(self,waitflag=None,timeout=-1):\n ''\n\n\n\n\n\n\n\n\n \n if waitflag is None or waitflag:\n self.locked_status=True\n return True\n else:\n if not self.locked_status:\n self.locked_status=True\n return True\n else:\n if timeout >0:\n import time\n time.sleep(timeout)\n return False\n \n __enter__=acquire\n \n def __exit__(self,typ,val,tb):\n self.release()\n \n def release(self):\n ''\n \n \n \n \n self.locked_status=False\n return True\n \n def locked(self):\n return self.locked_status\n \n \n_interrupt=False\n\n_main=True\n\ndef interrupt_main():\n ''\n \n if _main:\n raise KeyboardInterrupt\n else:\n global _interrupt\n _interrupt=True\n \n \nclass _local:\n pass\n \nRLock=LockType\n", ["time", "traceback"]], "sysconfig": [".py", "''\n\nimport os\nimport sys\nimport threading\nfrom os.path import realpath\n\n__all__=[\n'get_config_h_filename',\n'get_config_var',\n'get_config_vars',\n'get_makefile_filename',\n'get_path',\n'get_path_names',\n'get_paths',\n'get_platform',\n'get_python_version',\n'get_scheme_names',\n'parse_config_h',\n]\n\n\n_ALWAYS_STR={\n'MACOSX_DEPLOYMENT_TARGET',\n}\n\n_INSTALL_SCHEMES={\n'posix_prefix':{\n'stdlib':'{installed_base}/{platlibdir}/python{py_version_short}',\n'platstdlib':'{platbase}/{platlibdir}/python{py_version_short}',\n'purelib':'{base}/lib/python{py_version_short}/site-packages',\n'platlib':'{platbase}/{platlibdir}/python{py_version_short}/site-packages',\n'include':\n'{installed_base}/include/python{py_version_short}{abiflags}',\n'platinclude':\n'{installed_platbase}/include/python{py_version_short}{abiflags}',\n'scripts':'{base}/bin',\n'data':'{base}',\n},\n'posix_home':{\n'stdlib':'{installed_base}/lib/python',\n'platstdlib':'{base}/lib/python',\n'purelib':'{base}/lib/python',\n'platlib':'{base}/lib/python',\n'include':'{installed_base}/include/python',\n'platinclude':'{installed_base}/include/python',\n'scripts':'{base}/bin',\n'data':'{base}',\n},\n'nt':{\n'stdlib':'{installed_base}/Lib',\n'platstdlib':'{base}/Lib',\n'purelib':'{base}/Lib/site-packages',\n'platlib':'{base}/Lib/site-packages',\n'include':'{installed_base}/Include',\n'platinclude':'{installed_base}/Include',\n'scripts':'{base}/Scripts',\n'data':'{base}',\n},\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n'posix_venv':{\n'stdlib':'{installed_base}/{platlibdir}/python{py_version_short}',\n'platstdlib':'{platbase}/{platlibdir}/python{py_version_short}',\n'purelib':'{base}/lib/python{py_version_short}/site-packages',\n'platlib':'{platbase}/{platlibdir}/python{py_version_short}/site-packages',\n'include':\n'{installed_base}/include/python{py_version_short}{abiflags}',\n'platinclude':\n'{installed_platbase}/include/python{py_version_short}{abiflags}',\n'scripts':'{base}/bin',\n'data':'{base}',\n},\n'nt_venv':{\n'stdlib':'{installed_base}/Lib',\n'platstdlib':'{base}/Lib',\n'purelib':'{base}/Lib/site-packages',\n'platlib':'{base}/Lib/site-packages',\n'include':'{installed_base}/Include',\n'platinclude':'{installed_base}/Include',\n'scripts':'{base}/Scripts',\n'data':'{base}',\n},\n}\n\n\nif os.name =='nt':\n _INSTALL_SCHEMES['venv']=_INSTALL_SCHEMES['nt_venv']\nelse:\n _INSTALL_SCHEMES['venv']=_INSTALL_SCHEMES['posix_venv']\n \n \n \n \ndef _getuserbase():\n env_base=os.environ.get(\"PYTHONUSERBASE\",None)\n if env_base:\n return env_base\n \n \n if sys.platform in{\"emscripten\",\"vxworks\",\"wasi\"}:\n return None\n \n def joinuser(*args):\n return os.path.expanduser(os.path.join(*args))\n \n if os.name ==\"nt\":\n base=os.environ.get(\"APPDATA\")or \"~\"\n return joinuser(base,\"Python\")\n \n if sys.platform ==\"darwin\"and sys._framework:\n return joinuser(\"~\",\"Library\",sys._framework,\n f\"{sys.version_info[0]}.{sys.version_info[1]}\")\n \n return joinuser(\"~\",\".local\")\n \n_HAS_USER_BASE=(_getuserbase()is not None)\n\nif _HAS_USER_BASE:\n _INSTALL_SCHEMES |={\n \n 'nt_user':{\n 'stdlib':'{userbase}/Python{py_version_nodot_plat}',\n 'platstdlib':'{userbase}/Python{py_version_nodot_plat}',\n 'purelib':'{userbase}/Python{py_version_nodot_plat}/site-packages',\n 'platlib':'{userbase}/Python{py_version_nodot_plat}/site-packages',\n 'include':'{userbase}/Python{py_version_nodot_plat}/Include',\n 'scripts':'{userbase}/Python{py_version_nodot_plat}/Scripts',\n 'data':'{userbase}',\n },\n 'posix_user':{\n 'stdlib':'{userbase}/{platlibdir}/python{py_version_short}',\n 'platstdlib':'{userbase}/{platlibdir}/python{py_version_short}',\n 'purelib':'{userbase}/lib/python{py_version_short}/site-packages',\n 'platlib':'{userbase}/lib/python{py_version_short}/site-packages',\n 'include':'{userbase}/include/python{py_version_short}',\n 'scripts':'{userbase}/bin',\n 'data':'{userbase}',\n },\n 'osx_framework_user':{\n 'stdlib':'{userbase}/lib/python',\n 'platstdlib':'{userbase}/lib/python',\n 'purelib':'{userbase}/lib/python/site-packages',\n 'platlib':'{userbase}/lib/python/site-packages',\n 'include':'{userbase}/include/python{py_version_short}',\n 'scripts':'{userbase}/bin',\n 'data':'{userbase}',\n },\n }\n \n_SCHEME_KEYS=('stdlib','platstdlib','purelib','platlib','include',\n'scripts','data')\n\n_PY_VERSION=sys.version.split()[0]\n_PY_VERSION_SHORT=f'{sys.version_info[0]}.{sys.version_info[1]}'\n_PY_VERSION_SHORT_NO_DOT=f'{sys.version_info[0]}{sys.version_info[1]}'\n_PREFIX=os.path.normpath(sys.prefix)\n_BASE_PREFIX=os.path.normpath(sys.base_prefix)\n_EXEC_PREFIX=os.path.normpath(sys.exec_prefix)\n_BASE_EXEC_PREFIX=os.path.normpath(sys.base_exec_prefix)\n\n_CONFIG_VARS_LOCK=threading.RLock()\n_CONFIG_VARS=None\n\n_CONFIG_VARS_INITIALIZED=False\n_USER_BASE=None\n\n\n\n_variable_rx=r\"([a-zA-Z][a-zA-Z0-9_]+)\\s*=\\s*(.*)\"\n_findvar1_rx=r\"\\$\\(([A-Za-z][A-Za-z0-9_]*)\\)\"\n_findvar2_rx=r\"\\${([A-Za-z][A-Za-z0-9_]*)}\"\n\n\ndef _safe_realpath(path):\n try:\n return realpath(path)\n except OSError:\n return path\n \nif sys.executable:\n _PROJECT_BASE=os.path.dirname(_safe_realpath(sys.executable))\nelse:\n\n\n _PROJECT_BASE=_safe_realpath(os.getcwd())\n \n \n \n \n_sys_home=getattr(sys,'_home',None)\nif _sys_home:\n _PROJECT_BASE=_sys_home\n \nif os.name =='nt':\n\n\n\n\n\n if _safe_realpath(_PROJECT_BASE).startswith(\n _safe_realpath(f'{_BASE_PREFIX}\\\\PCbuild')):\n _PROJECT_BASE=_BASE_PREFIX\n \n \nif \"_PYTHON_PROJECT_BASE\"in os.environ:\n _PROJECT_BASE=_safe_realpath(os.environ[\"_PYTHON_PROJECT_BASE\"])\n \ndef is_python_build(check_home=None):\n if check_home is not None:\n import warnings\n warnings.warn(\"check_home argument is deprecated and ignored.\",\n DeprecationWarning,stacklevel=2)\n for fn in(\"Setup\",\"Setup.local\"):\n if os.path.isfile(os.path.join(_PROJECT_BASE,\"Modules\",fn)):\n return True\n return False\n \n_PYTHON_BUILD=is_python_build()\n\nif _PYTHON_BUILD:\n for scheme in('posix_prefix','posix_home'):\n \n \n \n \n scheme=_INSTALL_SCHEMES[scheme]\n scheme['headers']=scheme['include']\n scheme['include']='{srcdir}/Include'\n scheme['platinclude']='{projectbase}/.'\n del scheme\n \n \ndef _subst_vars(s,local_vars):\n try:\n return s.format(**local_vars)\n except KeyError as var:\n try:\n return s.format(**os.environ)\n except KeyError:\n raise AttributeError(f'{var}')from None\n \ndef _extend_dict(target_dict,other_dict):\n target_keys=target_dict.keys()\n for key,value in other_dict.items():\n if key in target_keys:\n continue\n target_dict[key]=value\n \n \ndef _expand_vars(scheme,vars):\n res={}\n if vars is None:\n vars={}\n _extend_dict(vars,get_config_vars())\n if os.name =='nt':\n \n \n \n vars=vars |{'platlibdir':'lib'}\n \n for key,value in _INSTALL_SCHEMES[scheme].items():\n if os.name in('posix','nt'):\n value=os.path.expanduser(value)\n res[key]=os.path.normpath(_subst_vars(value,vars))\n return res\n \n \ndef _get_preferred_schemes():\n if os.name =='nt':\n return{\n 'prefix':'nt',\n 'home':'posix_home',\n 'user':'nt_user',\n }\n if sys.platform =='darwin'and sys._framework:\n return{\n 'prefix':'posix_prefix',\n 'home':'posix_home',\n 'user':'osx_framework_user',\n }\n return{\n 'prefix':'posix_prefix',\n 'home':'posix_home',\n 'user':'posix_user',\n }\n \n \ndef get_preferred_scheme(key):\n if key =='prefix'and sys.prefix !=sys.base_prefix:\n return 'venv'\n scheme=_get_preferred_schemes()[key]\n if scheme not in _INSTALL_SCHEMES:\n raise ValueError(\n f\"{key !r} returned {scheme !r}, which is not a valid scheme \"\n f\"on this platform\"\n )\n return scheme\n \n \ndef get_default_scheme():\n return get_preferred_scheme('prefix')\n \n \ndef _parse_makefile(filename,vars=None,keep_unresolved=True):\n ''\n\n\n\n\n \n import re\n \n if vars is None:\n vars={}\n done={}\n notdone={}\n \n with open(filename,encoding=sys.getfilesystemencoding(),\n errors=\"surrogateescape\")as f:\n lines=f.readlines()\n \n for line in lines:\n if line.startswith('#')or line.strip()=='':\n continue\n m=re.match(_variable_rx,line)\n if m:\n n,v=m.group(1,2)\n v=v.strip()\n \n tmpv=v.replace('$$','')\n \n if \"$\"in tmpv:\n notdone[n]=v\n else:\n try:\n if n in _ALWAYS_STR:\n raise ValueError\n \n v=int(v)\n except ValueError:\n \n done[n]=v.replace('$$','$')\n else:\n done[n]=v\n \n \n variables=list(notdone.keys())\n \n \n \n \n \n renamed_variables=('CFLAGS','LDFLAGS','CPPFLAGS')\n \n while len(variables)>0:\n for name in tuple(variables):\n value=notdone[name]\n m1=re.search(_findvar1_rx,value)\n m2=re.search(_findvar2_rx,value)\n if m1 and m2:\n m=m1 if m1.start()=\"5\":\n osname=\"solaris\"\n release=f\"{int(release[0])-3}.{release[2:]}\"\n \n \n \n bitness={2147483647:\"32bit\",9223372036854775807:\"64bit\"}\n machine +=f\".{bitness[sys.maxsize]}\"\n \n elif osname[:3]==\"aix\":\n from _aix_support import aix_platform\n return aix_platform()\n elif osname[:6]==\"cygwin\":\n osname=\"cygwin\"\n import re\n rel_re=re.compile(r'[\\d.]+')\n m=rel_re.match(release)\n if m:\n release=m.group()\n elif osname[:6]==\"darwin\":\n import _osx_support\n osname,release,machine=_osx_support.get_platform_osx(\n get_config_vars(),\n osname,release,machine)\n \n return f\"{osname}-{release}-{machine}\"\n \n \ndef get_python_version():\n return _PY_VERSION_SHORT\n \n \ndef expand_makefile_vars(s,vars):\n ''\n\n\n\n\n\n \n import re\n \n \n \n \n \n \n \n while True:\n m=re.search(_findvar1_rx,s)or re.search(_findvar2_rx,s)\n if m:\n (beg,end)=m.span()\n s=s[0:beg]+vars.get(m.group(1))+s[end:]\n else:\n break\n return s\n \n \ndef _print_dict(title,data):\n for index,(key,value)in enumerate(sorted(data.items())):\n if index ==0:\n print(f'{title}: ')\n print(f'\\t{key} = \"{value}\"')\n \n \ndef _main():\n ''\n if '--generate-posix-vars'in sys.argv:\n _generate_posix_vars()\n return\n print(f'Platform: \"{get_platform()}\"')\n print(f'Python version: \"{get_python_version()}\"')\n print(f'Current installation scheme: \"{get_default_scheme()}\"')\n print()\n _print_dict('Paths',get_paths())\n print()\n _print_dict('Variables',get_config_vars())\n \n \nif __name__ =='__main__':\n _main()\n", ["_aix_support", "_imp", "_osx_support", "os", "os.path", "pprint", "re", "sys", "threading", "types", "warnings"]], "pathlib": [".py", "''\n\n\n\n\n\n\nimport fnmatch\nimport functools\nimport io\nimport ntpath\nimport os\nimport posixpath\nimport re\nimport sys\nimport warnings\nfrom _collections_abc import Sequence\nfrom errno import ENOENT,ENOTDIR,EBADF,ELOOP\nfrom stat import S_ISDIR,S_ISLNK,S_ISREG,S_ISSOCK,S_ISBLK,S_ISCHR,S_ISFIFO\nfrom urllib.parse import quote_from_bytes as urlquote_from_bytes\n\n\n__all__=[\n\"PurePath\",\"PurePosixPath\",\"PureWindowsPath\",\n\"Path\",\"PosixPath\",\"WindowsPath\",\n]\n\n\n\n\n\n\n\n_WIN_RESERVED_NAMES=frozenset(\n{'CON','PRN','AUX','NUL','CONIN$','CONOUT$'}|\n{f'COM{c}'for c in '123456789\\xb9\\xb2\\xb3'}|\n{f'LPT{c}'for c in '123456789\\xb9\\xb2\\xb3'}\n)\n\n_WINERROR_NOT_READY=21\n_WINERROR_INVALID_NAME=123\n_WINERROR_CANT_RESOLVE_FILENAME=1921\n\n\n_IGNORED_ERRNOS=(ENOENT,ENOTDIR,EBADF,ELOOP)\n\n_IGNORED_WINERRORS=(\n_WINERROR_NOT_READY,\n_WINERROR_INVALID_NAME,\n_WINERROR_CANT_RESOLVE_FILENAME)\n\ndef _ignore_error(exception):\n return(getattr(exception,'errno',None)in _IGNORED_ERRNOS or\n getattr(exception,'winerror',None)in _IGNORED_WINERRORS)\n \n \n@functools.cache\ndef _is_case_sensitive(flavour):\n return flavour.normcase('Aa')=='Aa'\n \n \n \n \n \n \n \n \n \n \n \n \n \n_FNMATCH_PREFIX,_FNMATCH_SUFFIX=fnmatch.translate('_').split('_')\n_FNMATCH_SLICE=slice(len(_FNMATCH_PREFIX),-len(_FNMATCH_SUFFIX))\n_SWAP_SEP_AND_NEWLINE={\n'/':str.maketrans({'/':'\\n','\\n':'/'}),\n'\\\\':str.maketrans({'\\\\':'\\n','\\n':'\\\\'}),\n}\n\n\n@functools.lru_cache()\ndef _make_selector(pattern_parts,flavour,case_sensitive):\n pat=pattern_parts[0]\n if not pat:\n return _TerminatingSelector()\n if pat =='**':\n child_parts_idx=1\n while child_parts_idx =len(self)or idx <-len(self):\n raise IndexError(idx)\n if idx <0:\n idx +=len(self)\n return self._path._from_parsed_parts(self._drv,self._root,\n self._tail[:-idx -1])\n \n def __repr__(self):\n return \"<{}.parents>\".format(type(self._path).__name__)\n \n \nclass PurePath(object):\n ''\n\n\n\n\n\n\n \n \n __slots__=(\n \n \n '_raw_paths',\n \n \n \n \n \n \n \n \n '_drv','_root','_tail_cached',\n \n \n \n \n '_str',\n \n \n \n \n \n '_str_normcase_cached',\n \n \n \n \n \n '_parts_normcase_cached',\n \n \n \n '_lines_cached',\n \n \n \n '_hash',\n )\n _flavour=os.path\n \n def __new__(cls,*args,**kwargs):\n ''\n\n\n\n \n if cls is PurePath:\n cls=PureWindowsPath if os.name =='nt'else PurePosixPath\n return object.__new__(cls)\n \n def __reduce__(self):\n \n \n return(self.__class__,self.parts)\n \n def __init__(self,*args):\n paths=[]\n for arg in args:\n if isinstance(arg,PurePath):\n if arg._flavour is ntpath and self._flavour is posixpath:\n \n paths.extend(path.replace('\\\\','/')for path in arg._raw_paths)\n else:\n paths.extend(arg._raw_paths)\n else:\n try:\n path=os.fspath(arg)\n except TypeError:\n path=arg\n if not isinstance(path,str):\n raise TypeError(\n \"argument should be a str or an os.PathLike \"\n \"object where __fspath__ returns a str, \"\n f\"not {type(path).__name__ !r}\")\n paths.append(path)\n self._raw_paths=paths\n \n def with_segments(self,*pathsegments):\n ''\n\n\n \n return type(self)(*pathsegments)\n \n @classmethod\n def _parse_path(cls,path):\n if not path:\n return '','',[]\n sep=cls._flavour.sep\n altsep=cls._flavour.altsep\n if altsep:\n path=path.replace(altsep,sep)\n drv,root,rel=cls._flavour.splitroot(path)\n if not root and drv.startswith(sep)and not drv.endswith(sep):\n drv_parts=drv.split(sep)\n if len(drv_parts)==4 and drv_parts[2]not in '?.':\n \n root=sep\n elif len(drv_parts)==6:\n \n root=sep\n parsed=[sys.intern(str(x))for x in rel.split(sep)if x and x !='.']\n return drv,root,parsed\n \n def _load_parts(self):\n paths=self._raw_paths\n if len(paths)==0:\n path=''\n elif len(paths)==1:\n path=paths[0]\n else:\n path=self._flavour.join(*paths)\n drv,root,tail=self._parse_path(path)\n self._drv=drv\n self._root=root\n self._tail_cached=tail\n \n def _from_parsed_parts(self,drv,root,tail):\n path_str=self._format_parsed_parts(drv,root,tail)\n path=self.with_segments(path_str)\n path._str=path_str or '.'\n path._drv=drv\n path._root=root\n path._tail_cached=tail\n return path\n \n @classmethod\n def _format_parsed_parts(cls,drv,root,tail):\n if drv or root:\n return drv+root+cls._flavour.sep.join(tail)\n elif tail and cls._flavour.splitdrive(tail[0])[0]:\n tail=['.']+tail\n return cls._flavour.sep.join(tail)\n \n def __str__(self):\n ''\n \n try:\n return self._str\n except AttributeError:\n self._str=self._format_parsed_parts(self.drive,self.root,\n self._tail)or '.'\n return self._str\n \n def __fspath__(self):\n return str(self)\n \n def as_posix(self):\n ''\n \n f=self._flavour\n return str(self).replace(f.sep,'/')\n \n def __bytes__(self):\n ''\n \n return os.fsencode(self)\n \n def __repr__(self):\n return \"{}({!r})\".format(self.__class__.__name__,self.as_posix())\n \n def as_uri(self):\n ''\n if not self.is_absolute():\n raise ValueError(\"relative path can't be expressed as a file URI\")\n \n drive=self.drive\n if len(drive)==2 and drive[1]==':':\n \n prefix='file:///'+drive\n path=self.as_posix()[2:]\n elif drive:\n \n prefix='file:'\n path=self.as_posix()\n else:\n \n prefix='file://'\n path=str(self)\n return prefix+urlquote_from_bytes(os.fsencode(path))\n \n @property\n def _str_normcase(self):\n \n try:\n return self._str_normcase_cached\n except AttributeError:\n if _is_case_sensitive(self._flavour):\n self._str_normcase_cached=str(self)\n else:\n self._str_normcase_cached=str(self).lower()\n return self._str_normcase_cached\n \n @property\n def _parts_normcase(self):\n \n try:\n return self._parts_normcase_cached\n except AttributeError:\n self._parts_normcase_cached=self._str_normcase.split(self._flavour.sep)\n return self._parts_normcase_cached\n \n @property\n def _lines(self):\n \n try:\n return self._lines_cached\n except AttributeError:\n path_str=str(self)\n if path_str =='.':\n self._lines_cached=''\n else:\n trans=_SWAP_SEP_AND_NEWLINE[self._flavour.sep]\n self._lines_cached=path_str.translate(trans)\n return self._lines_cached\n \n def __eq__(self,other):\n if not isinstance(other,PurePath):\n return NotImplemented\n return self._str_normcase ==other._str_normcase and self._flavour is other._flavour\n \n def __hash__(self):\n try:\n return self._hash\n except AttributeError:\n self._hash=hash(self._str_normcase)\n return self._hash\n \n def __lt__(self,other):\n if not isinstance(other,PurePath)or self._flavour is not other._flavour:\n return NotImplemented\n return self._parts_normcase other._parts_normcase\n \n def __ge__(self,other):\n if not isinstance(other,PurePath)or self._flavour is not other._flavour:\n return NotImplemented\n return self._parts_normcase >=other._parts_normcase\n \n @property\n def drive(self):\n ''\n try:\n return self._drv\n except AttributeError:\n self._load_parts()\n return self._drv\n \n @property\n def root(self):\n ''\n try:\n return self._root\n except AttributeError:\n self._load_parts()\n return self._root\n \n @property\n def _tail(self):\n try:\n return self._tail_cached\n except AttributeError:\n self._load_parts()\n return self._tail_cached\n \n @property\n def anchor(self):\n ''\n anchor=self.drive+self.root\n return anchor\n \n @property\n def name(self):\n ''\n tail=self._tail\n if not tail:\n return ''\n return tail[-1]\n \n @property\n def suffix(self):\n ''\n\n\n\n \n name=self.name\n i=name.rfind('.')\n if 0 >> from decimal import *\n>>> setcontext(ExtendedContext)\n>>> Decimal(0)\nDecimal('0')\n>>> Decimal('1')\nDecimal('1')\n>>> Decimal('-.0123')\nDecimal('-0.0123')\n>>> Decimal(123456)\nDecimal('123456')\n>>> Decimal('123.45e12345678')\nDecimal('1.2345E+12345680')\n>>> Decimal('1.33') + Decimal('1.27')\nDecimal('2.60')\n>>> Decimal('12.34') + Decimal('3.87') - Decimal('18.41')\nDecimal('-2.20')\n>>> dig = Decimal(1)\n>>> print(dig / Decimal(3))\n0.333333333\n>>> getcontext().prec = 18\n>>> print(dig / Decimal(3))\n0.333333333333333333\n>>> print(dig.sqrt())\n1\n>>> print(Decimal(3).sqrt())\n1.73205080756887729\n>>> print(Decimal(3) ** 123)\n4.85192780976896427E+58\n>>> inf = Decimal(1) / Decimal(0)\n>>> print(inf)\nInfinity\n>>> neginf = Decimal(-1) / Decimal(0)\n>>> print(neginf)\n-Infinity\n>>> print(neginf + inf)\nNaN\n>>> print(neginf * inf)\n-Infinity\n>>> print(dig / 0)\nInfinity\n>>> getcontext().traps[DivisionByZero] = 1\n>>> print(dig / 0)\nTraceback (most recent call last):\n ...\n ...\n ...\ndecimal.DivisionByZero: x / 0\n>>> c = Context()\n>>> c.traps[InvalidOperation] = 0\n>>> print(c.flags[InvalidOperation])\n0\n>>> c.divide(Decimal(0), Decimal(0))\nDecimal('NaN')\n>>> c.traps[InvalidOperation] = 1\n>>> print(c.flags[InvalidOperation])\n1\n>>> c.flags[InvalidOperation] = 0\n>>> print(c.flags[InvalidOperation])\n0\n>>> print(c.divide(Decimal(0), Decimal(0)))\nTraceback (most recent call last):\n ...\n ...\n ...\ndecimal.InvalidOperation: 0 / 0\n>>> print(c.flags[InvalidOperation])\n1\n>>> c.flags[InvalidOperation] = 0\n>>> c.traps[InvalidOperation] = 0\n>>> print(c.divide(Decimal(0), Decimal(0)))\nNaN\n>>> print(c.flags[InvalidOperation])\n1\n>>>\n\"\"\"\n\n__all__=[\n\n'Decimal','Context',\n\n\n'DecimalTuple',\n\n\n'DefaultContext','BasicContext','ExtendedContext',\n\n\n'DecimalException','Clamped','InvalidOperation','DivisionByZero',\n'Inexact','Rounded','Subnormal','Overflow','Underflow',\n'FloatOperation',\n\n\n'DivisionImpossible','InvalidContext','ConversionSyntax','DivisionUndefined',\n\n\n'ROUND_DOWN','ROUND_HALF_UP','ROUND_HALF_EVEN','ROUND_CEILING',\n'ROUND_FLOOR','ROUND_UP','ROUND_HALF_DOWN','ROUND_05UP',\n\n\n'setcontext','getcontext','localcontext',\n\n\n'MAX_PREC','MAX_EMAX','MIN_EMIN','MIN_ETINY',\n\n\n'HAVE_THREADS',\n\n\n'HAVE_CONTEXTVAR'\n]\n\n__xname__=__name__\n__name__='decimal'\n__version__='1.70'\n\n__libmpdec_version__=\"2.4.2\"\n\nimport math as _math\nimport numbers as _numbers\nimport sys\n\ntry:\n from collections import namedtuple as _namedtuple\n DecimalTuple=_namedtuple('DecimalTuple','sign digits exponent',module='decimal')\nexcept ImportError:\n DecimalTuple=lambda *args:args\n \n \nROUND_DOWN='ROUND_DOWN'\nROUND_HALF_UP='ROUND_HALF_UP'\nROUND_HALF_EVEN='ROUND_HALF_EVEN'\nROUND_CEILING='ROUND_CEILING'\nROUND_FLOOR='ROUND_FLOOR'\nROUND_UP='ROUND_UP'\nROUND_HALF_DOWN='ROUND_HALF_DOWN'\nROUND_05UP='ROUND_05UP'\n\n\nHAVE_THREADS=True\nHAVE_CONTEXTVAR=True\nif sys.maxsize ==2 **63 -1:\n MAX_PREC=999999999999999999\n MAX_EMAX=999999999999999999\n MIN_EMIN=-999999999999999999\nelse:\n MAX_PREC=425000000\n MAX_EMAX=425000000\n MIN_EMIN=-425000000\n \nMIN_ETINY=MIN_EMIN -(MAX_PREC -1)\n\n\n\nclass DecimalException(ArithmeticError):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def handle(self,context,*args):\n pass\n \n \nclass Clamped(DecimalException):\n ''\n\n\n\n\n\n\n\n\n \n \nclass InvalidOperation(DecimalException):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def handle(self,context,*args):\n if args:\n ans=_dec_from_triple(args[0]._sign,args[0]._int,'n',True)\n return ans._fix_nan(context)\n return _NaN\n \nclass ConversionSyntax(InvalidOperation):\n ''\n\n\n\n\n \n def handle(self,context,*args):\n return _NaN\n \nclass DivisionByZero(DecimalException,ZeroDivisionError):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def handle(self,context,sign,*args):\n return _SignedInfinity[sign]\n \nclass DivisionImpossible(InvalidOperation):\n ''\n\n\n\n\n \n \n def handle(self,context,*args):\n return _NaN\n \nclass DivisionUndefined(InvalidOperation,ZeroDivisionError):\n ''\n\n\n\n\n \n \n def handle(self,context,*args):\n return _NaN\n \nclass Inexact(DecimalException):\n ''\n\n\n\n\n\n\n\n\n \n \nclass InvalidContext(InvalidOperation):\n ''\n\n\n\n\n\n\n\n \n \n def handle(self,context,*args):\n return _NaN\n \nclass Rounded(DecimalException):\n ''\n\n\n\n\n\n\n\n\n \n \nclass Subnormal(DecimalException):\n ''\n\n\n\n\n\n\n\n \n \nclass Overflow(Inexact,Rounded):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def handle(self,context,sign,*args):\n if context.rounding in(ROUND_HALF_UP,ROUND_HALF_EVEN,\n ROUND_HALF_DOWN,ROUND_UP):\n return _SignedInfinity[sign]\n if sign ==0:\n if context.rounding ==ROUND_CEILING:\n return _SignedInfinity[sign]\n return _dec_from_triple(sign,'9'*context.prec,\n context.Emax -context.prec+1)\n if sign ==1:\n if context.rounding ==ROUND_FLOOR:\n return _SignedInfinity[sign]\n return _dec_from_triple(sign,'9'*context.prec,\n context.Emax -context.prec+1)\n \n \nclass Underflow(Inexact,Rounded,Subnormal):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \nclass FloatOperation(DecimalException,TypeError):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n_signals=[Clamped,DivisionByZero,Inexact,Overflow,Rounded,\nUnderflow,InvalidOperation,Subnormal,FloatOperation]\n\n\n_condition_map={ConversionSyntax:InvalidOperation,\nDivisionImpossible:InvalidOperation,\nDivisionUndefined:InvalidOperation,\nInvalidContext:InvalidOperation}\n\n\n_rounding_modes=(ROUND_DOWN,ROUND_HALF_UP,ROUND_HALF_EVEN,ROUND_CEILING,\nROUND_FLOOR,ROUND_UP,ROUND_HALF_DOWN,ROUND_05UP)\n\n\n\n\n\n\nimport contextvars\n\n_current_context_var=contextvars.ContextVar('decimal_context')\n\n_context_attributes=frozenset(\n['prec','Emin','Emax','capitals','clamp','rounding','flags','traps']\n)\n\ndef getcontext():\n ''\n\n\n\n\n \n try:\n return _current_context_var.get()\n except LookupError:\n context=Context()\n _current_context_var.set(context)\n return context\n \ndef setcontext(context):\n ''\n if context in(DefaultContext,BasicContext,ExtendedContext):\n context=context.copy()\n context.clear_flags()\n _current_context_var.set(context)\n \ndel contextvars\n\ndef localcontext(ctx=None,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if ctx is None:\n ctx=getcontext()\n ctx_manager=_ContextManager(ctx)\n for key,value in kwargs.items():\n if key not in _context_attributes:\n raise TypeError(f\"'{key}' is an invalid keyword argument for this function\")\n setattr(ctx_manager.new_context,key,value)\n return ctx_manager\n \n \n \n \n \n \n \n \nclass Decimal(object):\n ''\n \n __slots__=('_exp','_int','_sign','_is_special')\n \n \n \n \n \n def __new__(cls,value=\"0\",context=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n self=object.__new__(cls)\n \n \n \n if isinstance(value,str):\n m=_parser(value.strip().replace(\"_\",\"\"))\n if m is None:\n if context is None:\n context=getcontext()\n return context._raise_error(ConversionSyntax,\n \"Invalid literal for Decimal: %r\"%value)\n \n if m.group('sign')==\"-\":\n self._sign=1\n else:\n self._sign=0\n intpart=m.group('int')\n if intpart is not None:\n \n fracpart=m.group('frac')or ''\n exp=int(m.group('exp')or '0')\n self._int=str(int(intpart+fracpart))\n self._exp=exp -len(fracpart)\n self._is_special=False\n else:\n diag=m.group('diag')\n if diag is not None:\n \n self._int=str(int(diag or '0')).lstrip('0')\n if m.group('signal'):\n self._exp='N'\n else:\n self._exp='n'\n else:\n \n self._int='0'\n self._exp='F'\n self._is_special=True\n return self\n \n \n if isinstance(value,int):\n if value >=0:\n self._sign=0\n else:\n self._sign=1\n self._exp=0\n self._int=str(abs(value))\n self._is_special=False\n return self\n \n \n if isinstance(value,Decimal):\n self._exp=value._exp\n self._sign=value._sign\n self._int=value._int\n self._is_special=value._is_special\n return self\n \n \n if isinstance(value,_WorkRep):\n self._sign=value.sign\n self._int=str(value.int)\n self._exp=int(value.exp)\n self._is_special=False\n return self\n \n \n if isinstance(value,(list,tuple)):\n if len(value)!=3:\n raise ValueError('Invalid tuple size in creation of Decimal '\n 'from list or tuple. The list or tuple '\n 'should have exactly three elements.')\n \n if not(isinstance(value[0],int)and value[0]in(0,1)):\n raise ValueError(\"Invalid sign. The first value in the tuple \"\n \"should be an integer; either 0 for a \"\n \"positive number or 1 for a negative number.\")\n self._sign=value[0]\n if value[2]=='F':\n \n self._int='0'\n self._exp=value[2]\n self._is_special=True\n else:\n \n digits=[]\n for digit in value[1]:\n if isinstance(digit,int)and 0 <=digit <=9:\n \n if digits or digit !=0:\n digits.append(digit)\n else:\n raise ValueError(\"The second value in the tuple must \"\n \"be composed of integers in the range \"\n \"0 through 9.\")\n if value[2]in('n','N'):\n \n self._int=''.join(map(str,digits))\n self._exp=value[2]\n self._is_special=True\n elif isinstance(value[2],int):\n \n self._int=''.join(map(str,digits or[0]))\n self._exp=value[2]\n self._is_special=False\n else:\n raise ValueError(\"The third value in the tuple must \"\n \"be an integer, or one of the \"\n \"strings 'F', 'n', 'N'.\")\n return self\n \n if isinstance(value,float):\n if context is None:\n context=getcontext()\n context._raise_error(FloatOperation,\n \"strict semantics for mixing floats and Decimals are \"\n \"enabled\")\n value=Decimal.from_float(value)\n self._exp=value._exp\n self._sign=value._sign\n self._int=value._int\n self._is_special=value._is_special\n return self\n \n raise TypeError(\"Cannot convert %r to Decimal\"%value)\n \n @classmethod\n def from_float(cls,f):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(f,int):\n sign=0 if f >=0 else 1\n k=0\n coeff=str(abs(f))\n elif isinstance(f,float):\n if _math.isinf(f)or _math.isnan(f):\n return cls(repr(f))\n if _math.copysign(1.0,f)==1.0:\n sign=0\n else:\n sign=1\n n,d=abs(f).as_integer_ratio()\n k=d.bit_length()-1\n coeff=str(n *5 **k)\n else:\n raise TypeError(\"argument must be int or float.\")\n \n result=_dec_from_triple(sign,coeff,-k)\n if cls is Decimal:\n return result\n else:\n return cls(result)\n \n def _isnan(self):\n ''\n\n\n\n\n \n if self._is_special:\n exp=self._exp\n if exp =='n':\n return 1\n elif exp =='N':\n return 2\n return 0\n \n def _isinfinity(self):\n ''\n\n\n\n\n \n if self._exp =='F':\n if self._sign:\n return -1\n return 1\n return 0\n \n def _check_nans(self,other=None,context=None):\n ''\n\n\n\n\n\n\n \n \n self_is_nan=self._isnan()\n if other is None:\n other_is_nan=False\n else:\n other_is_nan=other._isnan()\n \n if self_is_nan or other_is_nan:\n if context is None:\n context=getcontext()\n \n if self_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n self)\n if other_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n other)\n if self_is_nan:\n return self._fix_nan(context)\n \n return other._fix_nan(context)\n return 0\n \n def _compare_check_nans(self,other,context):\n ''\n\n\n\n\n\n\n\n\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n if self.is_snan():\n return context._raise_error(InvalidOperation,\n 'comparison involving sNaN',\n self)\n elif other.is_snan():\n return context._raise_error(InvalidOperation,\n 'comparison involving sNaN',\n other)\n elif self.is_qnan():\n return context._raise_error(InvalidOperation,\n 'comparison involving NaN',\n self)\n elif other.is_qnan():\n return context._raise_error(InvalidOperation,\n 'comparison involving NaN',\n other)\n return 0\n \n def __bool__(self):\n ''\n\n\n \n return self._is_special or self._int !='0'\n \n def _cmp(self,other):\n ''\n\n\n \n \n if self._is_special or other._is_special:\n self_inf=self._isinfinity()\n other_inf=other._isinfinity()\n if self_inf ==other_inf:\n return 0\n elif self_inf other_adjusted:\n return(-1)**self._sign\n else:\n return -((-1)**self._sign)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def __eq__(self,other,context=None):\n self,other=_convert_for_comparison(self,other,equality_op=True)\n if other is NotImplemented:\n return other\n if self._check_nans(other,context):\n return False\n return self._cmp(other)==0\n \n def __lt__(self,other,context=None):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)<0\n \n def __le__(self,other,context=None):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)<=0\n \n def __gt__(self,other,context=None):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)>0\n \n def __ge__(self,other,context=None):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)>=0\n \n def compare(self,other,context=None):\n ''\n\n\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n \n if(self._is_special or other and other._is_special):\n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n return Decimal(self._cmp(other))\n \n def __hash__(self):\n ''\n \n \n \n \n \n if self._is_special:\n if self.is_snan():\n raise TypeError('Cannot hash a signaling NaN value.')\n elif self.is_nan():\n return object.__hash__(self)\n else:\n if self._sign:\n return -_PyHASH_INF\n else:\n return _PyHASH_INF\n \n if self._exp >=0:\n exp_hash=pow(10,self._exp,_PyHASH_MODULUS)\n else:\n exp_hash=pow(_PyHASH_10INV,-self._exp,_PyHASH_MODULUS)\n hash_=int(self._int)*exp_hash %_PyHASH_MODULUS\n ans=hash_ if self >=0 else -hash_\n return -2 if ans ==-1 else ans\n \n def as_tuple(self):\n ''\n\n\n \n return DecimalTuple(self._sign,tuple(map(int,self._int)),self._exp)\n \n def as_integer_ratio(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot convert NaN to integer ratio\")\n else:\n raise OverflowError(\"cannot convert Infinity to integer ratio\")\n \n if not self:\n return 0,1\n \n \n \n n=int(self._int)\n if self._exp >=0:\n \n n,d=n *10 **self._exp,1\n else:\n \n d5=-self._exp\n while d5 >0 and n %5 ==0:\n n //=5\n d5 -=1\n \n \n \n d2=-self._exp\n shift2=min((n&-n).bit_length()-1,d2)\n if shift2:\n n >>=shift2\n d2 -=shift2\n \n d=5 **d5 <-6:\n \n dotplace=leftdigits\n elif not eng:\n \n dotplace=1\n elif self._int =='0':\n \n dotplace=(leftdigits+1)%3 -1\n else:\n \n dotplace=(leftdigits -1)%3+1\n \n if dotplace <=0:\n intpart='0'\n fracpart='.'+'0'*(-dotplace)+self._int\n elif dotplace >=len(self._int):\n intpart=self._int+'0'*(dotplace -len(self._int))\n fracpart=''\n else:\n intpart=self._int[:dotplace]\n fracpart='.'+self._int[dotplace:]\n if leftdigits ==dotplace:\n exp=''\n else:\n if context is None:\n context=getcontext()\n exp=['e','E'][context.capitals]+\"%+d\"%(leftdigits -dotplace)\n \n return sign+intpart+fracpart+exp\n \n def to_eng_string(self,context=None):\n ''\n\n\n\n\n \n return self.__str__(eng=True,context=context)\n \n def __neg__(self,context=None):\n ''\n\n\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if context is None:\n context=getcontext()\n \n if not self and context.rounding !=ROUND_FLOOR:\n \n \n ans=self.copy_abs()\n else:\n ans=self.copy_negate()\n \n return ans._fix(context)\n \n def __pos__(self,context=None):\n ''\n\n\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if context is None:\n context=getcontext()\n \n if not self and context.rounding !=ROUND_FLOOR:\n \n ans=self.copy_abs()\n else:\n ans=Decimal(self)\n \n return ans._fix(context)\n \n def __abs__(self,round=True,context=None):\n ''\n\n\n\n\n \n if not round:\n return self.copy_abs()\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._sign:\n ans=self.__neg__(context=context)\n else:\n ans=self.__pos__(context=context)\n \n return ans\n \n def __add__(self,other,context=None):\n ''\n\n\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if self._isinfinity():\n \n if self._sign !=other._sign and other._isinfinity():\n return context._raise_error(InvalidOperation,'-INF + INF')\n return Decimal(self)\n if other._isinfinity():\n return Decimal(other)\n \n exp=min(self._exp,other._exp)\n negativezero=0\n if context.rounding ==ROUND_FLOOR and self._sign !=other._sign:\n \n negativezero=1\n \n if not self and not other:\n sign=min(self._sign,other._sign)\n if negativezero:\n sign=1\n ans=_dec_from_triple(sign,'0',exp)\n ans=ans._fix(context)\n return ans\n if not self:\n exp=max(exp,other._exp -context.prec -1)\n ans=other._rescale(exp,context.rounding)\n ans=ans._fix(context)\n return ans\n if not other:\n exp=max(exp,self._exp -context.prec -1)\n ans=self._rescale(exp,context.rounding)\n ans=ans._fix(context)\n return ans\n \n op1=_WorkRep(self)\n op2=_WorkRep(other)\n op1,op2=_normalize(op1,op2,context.prec)\n \n result=_WorkRep()\n if op1.sign !=op2.sign:\n \n if op1.int ==op2.int:\n ans=_dec_from_triple(negativezero,'0',exp)\n ans=ans._fix(context)\n return ans\n if op1.int =0:\n coeff,remainder=divmod(op1.int *10 **shift,op2.int)\n else:\n coeff,remainder=divmod(op1.int,op2.int *10 **-shift)\n if remainder:\n \n if coeff %5 ==0:\n coeff +=1\n else:\n \n ideal_exp=self._exp -other._exp\n while exp =op2.exp:\n op1.int *=10 **(op1.exp -op2.exp)\n else:\n op2.int *=10 **(op2.exp -op1.exp)\n q,r=divmod(op1.int,op2.int)\n if q <10 **context.prec:\n return(_dec_from_triple(sign,str(q),0),\n _dec_from_triple(self._sign,str(r),ideal_exp))\n \n \n ans=context._raise_error(DivisionImpossible,\n 'quotient too large in //, % or divmod')\n return ans,ans\n \n def __rtruediv__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__truediv__(self,context=context)\n \n def __divmod__(self,other,context=None):\n ''\n\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return(ans,ans)\n \n sign=self._sign ^other._sign\n if self._isinfinity():\n if other._isinfinity():\n ans=context._raise_error(InvalidOperation,'divmod(INF, INF)')\n return ans,ans\n else:\n return(_SignedInfinity[sign],\n context._raise_error(InvalidOperation,'INF % x'))\n \n if not other:\n if not self:\n ans=context._raise_error(DivisionUndefined,'divmod(0, 0)')\n return ans,ans\n else:\n return(context._raise_error(DivisionByZero,'x // 0',sign),\n context._raise_error(InvalidOperation,'x % 0'))\n \n quotient,remainder=self._divide(other,context)\n remainder=remainder._fix(context)\n return quotient,remainder\n \n def __rdivmod__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__divmod__(self,context=context)\n \n def __mod__(self,other,context=None):\n ''\n\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if self._isinfinity():\n return context._raise_error(InvalidOperation,'INF % x')\n elif not other:\n if self:\n return context._raise_error(InvalidOperation,'x % 0')\n else:\n return context._raise_error(DivisionUndefined,'0 % 0')\n \n remainder=self._divide(other,context)[1]\n remainder=remainder._fix(context)\n return remainder\n \n def __rmod__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__mod__(self,context=context)\n \n def remainder_near(self,other,context=None):\n ''\n\n \n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n \n if self._isinfinity():\n return context._raise_error(InvalidOperation,\n 'remainder_near(infinity, x)')\n \n \n if not other:\n if self:\n return context._raise_error(InvalidOperation,\n 'remainder_near(x, 0)')\n else:\n return context._raise_error(DivisionUndefined,\n 'remainder_near(0, 0)')\n \n \n if other._isinfinity():\n ans=Decimal(self)\n return ans._fix(context)\n \n \n ideal_exponent=min(self._exp,other._exp)\n if not self:\n ans=_dec_from_triple(self._sign,'0',ideal_exponent)\n return ans._fix(context)\n \n \n expdiff=self.adjusted()-other.adjusted()\n if expdiff >=context.prec+1:\n \n return context._raise_error(DivisionImpossible)\n if expdiff <=-2:\n \n ans=self._rescale(ideal_exponent,context.rounding)\n return ans._fix(context)\n \n \n op1=_WorkRep(self)\n op2=_WorkRep(other)\n if op1.exp >=op2.exp:\n op1.int *=10 **(op1.exp -op2.exp)\n else:\n op2.int *=10 **(op2.exp -op1.exp)\n q,r=divmod(op1.int,op2.int)\n \n \n \n if 2 *r+(q&1)>op2.int:\n r -=op2.int\n q +=1\n \n if q >=10 **context.prec:\n return context._raise_error(DivisionImpossible)\n \n \n sign=self._sign\n if r <0:\n sign=1 -sign\n r=-r\n \n ans=_dec_from_triple(sign,str(r),ideal_exponent)\n return ans._fix(context)\n \n def __floordiv__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if self._isinfinity():\n if other._isinfinity():\n return context._raise_error(InvalidOperation,'INF // INF')\n else:\n return _SignedInfinity[self._sign ^other._sign]\n \n if not other:\n if self:\n return context._raise_error(DivisionByZero,'x // 0',\n self._sign ^other._sign)\n else:\n return context._raise_error(DivisionUndefined,'0 // 0')\n \n return self._divide(other,context)[0]\n \n def __rfloordiv__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__floordiv__(self,context=context)\n \n def __float__(self):\n ''\n if self._isnan():\n if self.is_snan():\n raise ValueError(\"Cannot convert signaling NaN to float\")\n s=\"-nan\"if self._sign else \"nan\"\n else:\n s=str(self)\n return float(s)\n \n def __int__(self):\n ''\n if self._is_special:\n if self._isnan():\n raise ValueError(\"Cannot convert NaN to integer\")\n elif self._isinfinity():\n raise OverflowError(\"Cannot convert infinity to integer\")\n s=(-1)**self._sign\n if self._exp >=0:\n return s *int(self._int)*10 **self._exp\n else:\n return s *int(self._int[:self._exp]or '0')\n \n __trunc__=__int__\n \n @property\n def real(self):\n return self\n \n @property\n def imag(self):\n return Decimal(0)\n \n def conjugate(self):\n return self\n \n def __complex__(self):\n return complex(float(self))\n \n def _fix_nan(self,context):\n ''\n payload=self._int\n \n \n \n max_payload_len=context.prec -context.clamp\n if len(payload)>max_payload_len:\n payload=payload[len(payload)-max_payload_len:].lstrip('0')\n return _dec_from_triple(self._sign,payload,self._exp,True)\n return Decimal(self)\n \n def _fix(self,context):\n ''\n\n\n\n\n\n\n \n \n if self._is_special:\n if self._isnan():\n \n return self._fix_nan(context)\n else:\n \n return Decimal(self)\n \n \n \n Etiny=context.Etiny()\n Etop=context.Etop()\n if not self:\n exp_max=[context.Emax,Etop][context.clamp]\n new_exp=min(max(self._exp,Etiny),exp_max)\n if new_exp !=self._exp:\n context._raise_error(Clamped)\n return _dec_from_triple(self._sign,'0',new_exp)\n else:\n return Decimal(self)\n \n \n \n exp_min=len(self._int)+self._exp -context.prec\n if exp_min >Etop:\n \n ans=context._raise_error(Overflow,'above Emax',self._sign)\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n return ans\n \n self_is_subnormal=exp_min 0:\n coeff=str(int(coeff)+1)\n if len(coeff)>context.prec:\n coeff=coeff[:-1]\n exp_min +=1\n \n \n if exp_min >Etop:\n ans=context._raise_error(Overflow,'above Emax',self._sign)\n else:\n ans=_dec_from_triple(self._sign,coeff,exp_min)\n \n \n \n if changed and self_is_subnormal:\n context._raise_error(Underflow)\n if self_is_subnormal:\n context._raise_error(Subnormal)\n if changed:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n if not ans:\n \n context._raise_error(Clamped)\n return ans\n \n if self_is_subnormal:\n context._raise_error(Subnormal)\n \n \n if context.clamp ==1 and self._exp >Etop:\n context._raise_error(Clamped)\n self_padded=self._int+'0'*(self._exp -Etop)\n return _dec_from_triple(self._sign,self_padded,Etop)\n \n \n return Decimal(self)\n \n \n \n \n \n \n \n \n \n \n \n def _round_down(self,prec):\n ''\n if _all_zeros(self._int,prec):\n return 0\n else:\n return -1\n \n def _round_up(self,prec):\n ''\n return -self._round_down(prec)\n \n def _round_half_up(self,prec):\n ''\n if self._int[prec]in '56789':\n return 1\n elif _all_zeros(self._int,prec):\n return 0\n else:\n return -1\n \n def _round_half_down(self,prec):\n ''\n if _exact_half(self._int,prec):\n return -1\n else:\n return self._round_half_up(prec)\n \n def _round_half_even(self,prec):\n ''\n if _exact_half(self._int,prec)and\\\n (prec ==0 or self._int[prec -1]in '02468'):\n return -1\n else:\n return self._round_half_up(prec)\n \n def _round_ceiling(self,prec):\n ''\n if self._sign:\n return self._round_down(prec)\n else:\n return -self._round_down(prec)\n \n def _round_floor(self,prec):\n ''\n if not self._sign:\n return self._round_down(prec)\n else:\n return -self._round_down(prec)\n \n def _round_05up(self,prec):\n ''\n if prec and self._int[prec -1]not in '05':\n return self._round_down(prec)\n else:\n return -self._round_down(prec)\n \n _pick_rounding_function=dict(\n ROUND_DOWN=_round_down,\n ROUND_UP=_round_up,\n ROUND_HALF_UP=_round_half_up,\n ROUND_HALF_DOWN=_round_half_down,\n ROUND_HALF_EVEN=_round_half_even,\n ROUND_CEILING=_round_ceiling,\n ROUND_FLOOR=_round_floor,\n ROUND_05UP=_round_05up,\n )\n \n def __round__(self,n=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if n is not None:\n \n if not isinstance(n,int):\n raise TypeError('Second argument to round should be integral')\n exp=_dec_from_triple(0,'1',-n)\n return self.quantize(exp)\n \n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot round a NaN\")\n else:\n raise OverflowError(\"cannot round an infinity\")\n return int(self._rescale(0,ROUND_HALF_EVEN))\n \n def __floor__(self):\n ''\n\n\n\n\n\n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot round a NaN\")\n else:\n raise OverflowError(\"cannot round an infinity\")\n return int(self._rescale(0,ROUND_FLOOR))\n \n def __ceil__(self):\n ''\n\n\n\n\n\n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot round a NaN\")\n else:\n raise OverflowError(\"cannot round an infinity\")\n return int(self._rescale(0,ROUND_CEILING))\n \n def fma(self,other,third,context=None):\n ''\n\n\n\n\n\n\n\n \n \n other=_convert_other(other,raiseit=True)\n third=_convert_other(third,raiseit=True)\n \n \n \n if self._is_special or other._is_special:\n if context is None:\n context=getcontext()\n if self._exp =='N':\n return context._raise_error(InvalidOperation,'sNaN',self)\n if other._exp =='N':\n return context._raise_error(InvalidOperation,'sNaN',other)\n if self._exp =='n':\n product=self\n elif other._exp =='n':\n product=other\n elif self._exp =='F':\n if not other:\n return context._raise_error(InvalidOperation,\n 'INF * 0 in fma')\n product=_SignedInfinity[self._sign ^other._sign]\n elif other._exp =='F':\n if not self:\n return context._raise_error(InvalidOperation,\n '0 * INF in fma')\n product=_SignedInfinity[self._sign ^other._sign]\n else:\n product=_dec_from_triple(self._sign ^other._sign,\n str(int(self._int)*int(other._int)),\n self._exp+other._exp)\n \n return product.__add__(third,context)\n \n def _power_modulo(self,other,modulo,context=None):\n ''\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n modulo=_convert_other(modulo)\n if modulo is NotImplemented:\n return modulo\n \n if context is None:\n context=getcontext()\n \n \n \n self_is_nan=self._isnan()\n other_is_nan=other._isnan()\n modulo_is_nan=modulo._isnan()\n if self_is_nan or other_is_nan or modulo_is_nan:\n if self_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n self)\n if other_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n other)\n if modulo_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n modulo)\n if self_is_nan:\n return self._fix_nan(context)\n if other_is_nan:\n return other._fix_nan(context)\n return modulo._fix_nan(context)\n \n \n if not(self._isinteger()and\n other._isinteger()and\n modulo._isinteger()):\n return context._raise_error(InvalidOperation,\n 'pow() 3rd argument not allowed '\n 'unless all arguments are integers')\n if other <0:\n return context._raise_error(InvalidOperation,\n 'pow() 2nd argument cannot be '\n 'negative when 3rd argument specified')\n if not modulo:\n return context._raise_error(InvalidOperation,\n 'pow() 3rd argument cannot be 0')\n \n \n \n if modulo.adjusted()>=context.prec:\n return context._raise_error(InvalidOperation,\n 'insufficient precision: pow() 3rd '\n 'argument must not have more than '\n 'precision digits')\n \n \n \n if not other and not self:\n return context._raise_error(InvalidOperation,\n 'at least one of pow() 1st argument '\n 'and 2nd argument must be nonzero; '\n '0**0 is not defined')\n \n \n if other._iseven():\n sign=0\n else:\n sign=self._sign\n \n \n \n modulo=abs(int(modulo))\n base=_WorkRep(self.to_integral_value())\n exponent=_WorkRep(other.to_integral_value())\n \n \n base=(base.int %modulo *pow(10,base.exp,modulo))%modulo\n for i in range(exponent.exp):\n base=pow(base,10,modulo)\n base=pow(base,exponent.int,modulo)\n \n return _dec_from_triple(sign,str(base),0)\n \n def _power_exact(self,other,p):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n x=_WorkRep(self)\n xc,xe=x.int,x.exp\n while xc %10 ==0:\n xc //=10\n xe +=1\n \n y=_WorkRep(other)\n yc,ye=y.int,y.exp\n while yc %10 ==0:\n yc //=10\n ye +=1\n \n \n \n if xc ==1:\n xe *=yc\n \n while xe %10 ==0:\n xe //=10\n ye +=1\n if ye <0:\n return None\n exponent=xe *10 **ye\n if y.sign ==1:\n exponent=-exponent\n \n if other._isinteger()and other._sign ==0:\n ideal_exponent=self._exp *int(other)\n zeros=min(exponent -ideal_exponent,p -1)\n else:\n zeros=0\n return _dec_from_triple(0,'1'+'0'*zeros,exponent -zeros)\n \n \n \n if y.sign ==1:\n last_digit=xc %10\n if last_digit in(2,4,6,8):\n \n if xc&-xc !=xc:\n return None\n \n e=_nbits(xc)-1\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n emax=p *93 //65\n if ye >=len(str(emax)):\n return None\n \n \n e=_decimal_lshift_exact(e *yc,ye)\n xe=_decimal_lshift_exact(xe *yc,ye)\n if e is None or xe is None:\n return None\n \n if e >emax:\n return None\n xc=5 **e\n \n elif last_digit ==5:\n \n \n e=_nbits(xc)*28 //65\n xc,remainder=divmod(5 **e,xc)\n if remainder:\n return None\n while xc %5 ==0:\n xc //=5\n e -=1\n \n \n \n \n emax=p *10 //3\n if ye >=len(str(emax)):\n return None\n \n e=_decimal_lshift_exact(e *yc,ye)\n xe=_decimal_lshift_exact(xe *yc,ye)\n if e is None or xe is None:\n return None\n \n if e >emax:\n return None\n xc=2 **e\n else:\n return None\n \n if xc >=10 **p:\n return None\n xe=-e -xe\n return _dec_from_triple(0,str(xc),xe)\n \n \n if ye >=0:\n m,n=yc *10 **ye,1\n else:\n if xe !=0 and len(str(abs(yc *xe)))<=-ye:\n return None\n xc_bits=_nbits(xc)\n if len(str(abs(yc)*xc_bits))<=-ye:\n return None\n m,n=yc,10 **(-ye)\n while m %2 ==n %2 ==0:\n m //=2\n n //=2\n while m %5 ==n %5 ==0:\n m //=5\n n //=5\n \n \n if n >1:\n \n if xc_bits <=n:\n return None\n \n xe,rem=divmod(xe,n)\n if rem !=0:\n return None\n \n \n a=1 <<-(-_nbits(xc)//n)\n while True:\n q,r=divmod(xc,a **(n -1))\n if a <=q:\n break\n else:\n a=(a *(n -1)+q)//n\n if not(a ==q and r ==0):\n return None\n xc=a\n \n \n \n \n \n \n if xc >1 and m >p *100 //_log10_lb(xc):\n return None\n xc=xc **m\n xe *=m\n if xc >10 **p:\n return None\n \n \n \n \n str_xc=str(xc)\n if other._isinteger()and other._sign ==0:\n ideal_exponent=self._exp *int(other)\n zeros=min(xe -ideal_exponent,p -len(str_xc))\n else:\n zeros=0\n return _dec_from_triple(0,str_xc+'0'*zeros,xe -zeros)\n \n def __pow__(self,other,modulo=None,context=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if modulo is not None:\n return self._power_modulo(other,modulo,context)\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n \n if not other:\n if not self:\n return context._raise_error(InvalidOperation,'0 ** 0')\n else:\n return _One\n \n \n result_sign=0\n if self._sign ==1:\n if other._isinteger():\n if not other._iseven():\n result_sign=1\n else:\n \n \n if self:\n return context._raise_error(InvalidOperation,\n 'x ** y with x negative and y not an integer')\n \n self=self.copy_negate()\n \n \n if not self:\n if other._sign ==0:\n return _dec_from_triple(result_sign,'0',0)\n else:\n return _SignedInfinity[result_sign]\n \n \n if self._isinfinity():\n if other._sign ==0:\n return _SignedInfinity[result_sign]\n else:\n return _dec_from_triple(result_sign,'0',0)\n \n \n \n \n if self ==_One:\n if other._isinteger():\n \n \n \n \n if other._sign ==1:\n multiplier=0\n elif other >context.prec:\n multiplier=context.prec\n else:\n multiplier=int(other)\n \n exp=self._exp *multiplier\n if exp <1 -context.prec:\n exp=1 -context.prec\n context._raise_error(Rounded)\n else:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n exp=1 -context.prec\n \n return _dec_from_triple(result_sign,'1'+'0'*-exp,exp)\n \n \n self_adj=self.adjusted()\n \n \n \n if other._isinfinity():\n if(other._sign ==0)==(self_adj <0):\n return _dec_from_triple(result_sign,'0',0)\n else:\n return _SignedInfinity[result_sign]\n \n \n \n ans=None\n exact=False\n \n \n \n \n \n \n bound=self._log10_exp_bound()+other.adjusted()\n if(self_adj >=0)==(other._sign ==0):\n \n \n if bound >=len(str(context.Emax)):\n ans=_dec_from_triple(result_sign,'1',context.Emax+1)\n else:\n \n \n Etiny=context.Etiny()\n if bound >=len(str(-Etiny)):\n ans=_dec_from_triple(result_sign,'1',Etiny -1)\n \n \n if ans is None:\n ans=self._power_exact(other,context.prec+1)\n if ans is not None:\n if result_sign ==1:\n ans=_dec_from_triple(1,ans._int,ans._exp)\n exact=True\n \n \n if ans is None:\n p=context.prec\n x=_WorkRep(self)\n xc,xe=x.int,x.exp\n y=_WorkRep(other)\n yc,ye=y.int,y.exp\n if y.sign ==1:\n yc=-yc\n \n \n \n extra=3\n while True:\n coeff,exp=_dpower(xc,xe,yc,ye,p+extra)\n if coeff %(5 *10 **(len(str(coeff))-p -1)):\n break\n extra +=3\n \n ans=_dec_from_triple(result_sign,str(coeff),exp)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if exact and not other._isinteger():\n \n \n if len(ans._int)<=context.prec:\n expdiff=context.prec+1 -len(ans._int)\n ans=_dec_from_triple(ans._sign,ans._int+'0'*expdiff,\n ans._exp -expdiff)\n \n \n newcontext=context.copy()\n newcontext.clear_flags()\n for exception in _signals:\n newcontext.traps[exception]=0\n \n \n ans=ans._fix(newcontext)\n \n \n newcontext._raise_error(Inexact)\n if newcontext.flags[Subnormal]:\n newcontext._raise_error(Underflow)\n \n \n \n \n \n \n if newcontext.flags[Overflow]:\n context._raise_error(Overflow,'above Emax',ans._sign)\n for exception in Underflow,Subnormal,Inexact,Rounded,Clamped:\n if newcontext.flags[exception]:\n context._raise_error(exception)\n \n else:\n ans=ans._fix(context)\n \n return ans\n \n def __rpow__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__pow__(self,context=context)\n \n def normalize(self,context=None):\n ''\n \n if context is None:\n context=getcontext()\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n dup=self._fix(context)\n if dup._isinfinity():\n return dup\n \n if not dup:\n return _dec_from_triple(dup._sign,'0',0)\n exp_max=[context.Emax,context.Etop()][context.clamp]\n end=len(dup._int)\n exp=dup._exp\n while dup._int[end -1]=='0'and exp context.Emax:\n return context._raise_error(InvalidOperation,\n 'exponent of quantize result too large for current context')\n if self_adjusted -exp._exp+1 >context.prec:\n return context._raise_error(InvalidOperation,\n 'quantize result has too many digits for current context')\n \n ans=self._rescale(exp._exp,rounding)\n if ans.adjusted()>context.Emax:\n return context._raise_error(InvalidOperation,\n 'exponent of quantize result too large for current context')\n if len(ans._int)>context.prec:\n return context._raise_error(InvalidOperation,\n 'quantize result has too many digits for current context')\n \n \n if ans and ans.adjusted()self._exp:\n if ans !=self:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n \n \n \n ans=ans._fix(context)\n return ans\n \n def same_quantum(self,other,context=None):\n ''\n\n\n\n\n\n\n \n other=_convert_other(other,raiseit=True)\n if self._is_special or other._is_special:\n return(self.is_nan()and other.is_nan()or\n self.is_infinite()and other.is_infinite())\n return self._exp ==other._exp\n \n def _rescale(self,exp,rounding):\n ''\n\n\n\n\n\n\n\n\n \n if self._is_special:\n return Decimal(self)\n if not self:\n return _dec_from_triple(self._sign,'0',exp)\n \n if self._exp >=exp:\n \n return _dec_from_triple(self._sign,\n self._int+'0'*(self._exp -exp),exp)\n \n \n \n digits=len(self._int)+self._exp -exp\n if digits <0:\n self=_dec_from_triple(self._sign,'1',exp -1)\n digits=0\n this_function=self._pick_rounding_function[rounding]\n changed=this_function(self,digits)\n coeff=self._int[:digits]or '0'\n if changed ==1:\n coeff=str(int(coeff)+1)\n return _dec_from_triple(self._sign,coeff,exp)\n \n def _round(self,places,rounding):\n ''\n\n\n\n\n\n\n\n \n if places <=0:\n raise ValueError(\"argument should be at least 1 in _round\")\n if self._is_special or not self:\n return Decimal(self)\n ans=self._rescale(self.adjusted()+1 -places,rounding)\n \n \n \n \n if ans.adjusted()!=self.adjusted():\n ans=ans._rescale(ans.adjusted()+1 -places,rounding)\n return ans\n \n def to_integral_exact(self,rounding=None,context=None):\n ''\n\n\n\n\n\n\n\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n return Decimal(self)\n if self._exp >=0:\n return Decimal(self)\n if not self:\n return _dec_from_triple(self._sign,'0',0)\n if context is None:\n context=getcontext()\n if rounding is None:\n rounding=context.rounding\n ans=self._rescale(0,rounding)\n if ans !=self:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n return ans\n \n def to_integral_value(self,rounding=None,context=None):\n ''\n if context is None:\n context=getcontext()\n if rounding is None:\n rounding=context.rounding\n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n return Decimal(self)\n if self._exp >=0:\n return Decimal(self)\n else:\n return self._rescale(0,rounding)\n \n \n to_integral=to_integral_value\n \n def sqrt(self,context=None):\n ''\n if context is None:\n context=getcontext()\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._isinfinity()and self._sign ==0:\n return Decimal(self)\n \n if not self:\n \n ans=_dec_from_triple(self._sign,'0',self._exp //2)\n return ans._fix(context)\n \n if self._sign ==1:\n return context._raise_error(InvalidOperation,'sqrt(-x), x > 0')\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n prec=context.prec+1\n \n \n \n \n \n op=_WorkRep(self)\n e=op.exp >>1\n if op.exp&1:\n c=op.int *10\n l=(len(self._int)>>1)+1\n else:\n c=op.int\n l=len(self._int)+1 >>1\n \n \n shift=prec -l\n if shift >=0:\n c *=100 **shift\n exact=True\n else:\n c,remainder=divmod(c,100 **-shift)\n exact=not remainder\n e -=shift\n \n \n n=10 **prec\n while True:\n q=c //n\n if n <=q:\n break\n else:\n n=n+q >>1\n exact=exact and n *n ==c\n \n if exact:\n \n if shift >=0:\n \n n //=10 **shift\n else:\n n *=10 **-shift\n e +=shift\n else:\n \n if n %5 ==0:\n n +=1\n \n ans=_dec_from_triple(0,str(n),e)\n \n \n context=context._shallow_copy()\n rounding=context._set_rounding(ROUND_HALF_EVEN)\n ans=ans._fix(context)\n context.rounding=rounding\n \n return ans\n \n def max(self,other,context=None):\n ''\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self._cmp(other)\n if c ==0:\n \n \n \n \n \n \n \n \n c=self.compare_total(other)\n \n if c ==-1:\n ans=other\n else:\n ans=self\n \n return ans._fix(context)\n \n def min(self,other,context=None):\n ''\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self._cmp(other)\n if c ==0:\n c=self.compare_total(other)\n \n if c ==-1:\n ans=self\n else:\n ans=other\n \n return ans._fix(context)\n \n def _isinteger(self):\n ''\n if self._is_special:\n return False\n if self._exp >=0:\n return True\n rest=self._int[self._exp:]\n return rest =='0'*len(rest)\n \n def _iseven(self):\n ''\n if not self or self._exp >0:\n return True\n return self._int[-1+self._exp]in '02468'\n \n def adjusted(self):\n ''\n try:\n return self._exp+len(self._int)-1\n \n except TypeError:\n return 0\n \n def canonical(self):\n ''\n\n\n\n \n return self\n \n def compare_signal(self,other,context=None):\n ''\n\n\n\n \n other=_convert_other(other,raiseit=True)\n ans=self._compare_check_nans(other,context)\n if ans:\n return ans\n return self.compare(other,context=context)\n \n def compare_total(self,other,context=None):\n ''\n\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n \n if self._sign and not other._sign:\n return _NegativeOne\n if not self._sign and other._sign:\n return _One\n sign=self._sign\n \n \n self_nan=self._isnan()\n other_nan=other._isnan()\n if self_nan or other_nan:\n if self_nan ==other_nan:\n \n self_key=len(self._int),self._int\n other_key=len(other._int),other._int\n if self_key other_key:\n if sign:\n return _NegativeOne\n else:\n return _One\n return _Zero\n \n if sign:\n if self_nan ==1:\n return _NegativeOne\n if other_nan ==1:\n return _One\n if self_nan ==2:\n return _NegativeOne\n if other_nan ==2:\n return _One\n else:\n if self_nan ==1:\n return _One\n if other_nan ==1:\n return _NegativeOne\n if self_nan ==2:\n return _One\n if other_nan ==2:\n return _NegativeOne\n \n if self other:\n return _One\n \n if self._exp other._exp:\n if sign:\n return _NegativeOne\n else:\n return _One\n return _Zero\n \n \n def compare_total_mag(self,other,context=None):\n ''\n\n\n \n other=_convert_other(other,raiseit=True)\n \n s=self.copy_abs()\n o=other.copy_abs()\n return s.compare_total(o)\n \n def copy_abs(self):\n ''\n return _dec_from_triple(0,self._int,self._exp,self._is_special)\n \n def copy_negate(self):\n ''\n if self._sign:\n return _dec_from_triple(0,self._int,self._exp,self._is_special)\n else:\n return _dec_from_triple(1,self._int,self._exp,self._is_special)\n \n def copy_sign(self,other,context=None):\n ''\n other=_convert_other(other,raiseit=True)\n return _dec_from_triple(other._sign,self._int,\n self._exp,self._is_special)\n \n def exp(self,context=None):\n ''\n \n if context is None:\n context=getcontext()\n \n \n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n \n if self._isinfinity()==-1:\n return _Zero\n \n \n if not self:\n return _One\n \n \n if self._isinfinity()==1:\n return Decimal(self)\n \n \n \n \n \n p=context.prec\n adj=self.adjusted()\n \n \n \n \n \n \n if self._sign ==0 and adj >len(str((context.Emax+1)*3)):\n \n ans=_dec_from_triple(0,'1',context.Emax+1)\n elif self._sign ==1 and adj >len(str((-context.Etiny()+1)*3)):\n \n ans=_dec_from_triple(0,'1',context.Etiny()-1)\n elif self._sign ==0 and adj <-p:\n \n ans=_dec_from_triple(0,'1'+'0'*(p -1)+'1',-p)\n elif self._sign ==1 and adj <-p -1:\n \n ans=_dec_from_triple(0,'9'*(p+1),-p -1)\n \n else:\n op=_WorkRep(self)\n c,e=op.int,op.exp\n if op.sign ==1:\n c=-c\n \n \n \n \n extra=3\n while True:\n coeff,exp=_dexp(c,e,p+extra)\n if coeff %(5 *10 **(len(str(coeff))-p -1)):\n break\n extra +=3\n \n ans=_dec_from_triple(0,str(coeff),exp)\n \n \n \n context=context._shallow_copy()\n rounding=context._set_rounding(ROUND_HALF_EVEN)\n ans=ans._fix(context)\n context.rounding=rounding\n \n return ans\n \n def is_canonical(self):\n ''\n\n\n\n \n return True\n \n def is_finite(self):\n ''\n\n\n\n \n return not self._is_special\n \n def is_infinite(self):\n ''\n return self._exp =='F'\n \n def is_nan(self):\n ''\n return self._exp in('n','N')\n \n def is_normal(self,context=None):\n ''\n if self._is_special or not self:\n return False\n if context is None:\n context=getcontext()\n return context.Emin <=self.adjusted()\n \n def is_qnan(self):\n ''\n return self._exp =='n'\n \n def is_signed(self):\n ''\n return self._sign ==1\n \n def is_snan(self):\n ''\n return self._exp =='N'\n \n def is_subnormal(self,context=None):\n ''\n if self._is_special or not self:\n return False\n if context is None:\n context=getcontext()\n return self.adjusted()=1:\n \n return len(str(adj *23 //10))-1\n if adj <=-2:\n \n return len(str((-1 -adj)*23 //10))-1\n op=_WorkRep(self)\n c,e=op.int,op.exp\n if adj ==0:\n \n num=str(c -10 **-e)\n den=str(c)\n return len(num)-len(den)-(num =1:\n \n return len(str(adj))-1\n if adj <=-2:\n \n return len(str(-1 -adj))-1\n op=_WorkRep(self)\n c,e=op.int,op.exp\n if adj ==0:\n \n num=str(c -10 **-e)\n den=str(231 *c)\n return len(num)-len(den)-(num 0:\n opa='0'*dif+opa\n elif dif <0:\n opa=opa[-context.prec:]\n dif=context.prec -len(opb)\n if dif >0:\n opb='0'*dif+opb\n elif dif <0:\n opb=opb[-context.prec:]\n return opa,opb\n \n def logical_and(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n if not self._islogical()or not other._islogical():\n return context._raise_error(InvalidOperation)\n \n \n (opa,opb)=self._fill_logical(context,self._int,other._int)\n \n \n result=\"\".join([str(int(a)&int(b))for a,b in zip(opa,opb)])\n return _dec_from_triple(0,result.lstrip('0')or '0',0)\n \n def logical_invert(self,context=None):\n ''\n if context is None:\n context=getcontext()\n return self.logical_xor(_dec_from_triple(0,'1'*context.prec,0),\n context)\n \n def logical_or(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n if not self._islogical()or not other._islogical():\n return context._raise_error(InvalidOperation)\n \n \n (opa,opb)=self._fill_logical(context,self._int,other._int)\n \n \n result=\"\".join([str(int(a)|int(b))for a,b in zip(opa,opb)])\n return _dec_from_triple(0,result.lstrip('0')or '0',0)\n \n def logical_xor(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n if not self._islogical()or not other._islogical():\n return context._raise_error(InvalidOperation)\n \n \n (opa,opb)=self._fill_logical(context,self._int,other._int)\n \n \n result=\"\".join([str(int(a)^int(b))for a,b in zip(opa,opb)])\n return _dec_from_triple(0,result.lstrip('0')or '0',0)\n \n def max_mag(self,other,context=None):\n ''\n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self.copy_abs()._cmp(other.copy_abs())\n if c ==0:\n c=self.compare_total(other)\n \n if c ==-1:\n ans=other\n else:\n ans=self\n \n return ans._fix(context)\n \n def min_mag(self,other,context=None):\n ''\n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self.copy_abs()._cmp(other.copy_abs())\n if c ==0:\n c=self.compare_total(other)\n \n if c ==-1:\n ans=self\n else:\n ans=other\n \n return ans._fix(context)\n \n def next_minus(self,context=None):\n ''\n if context is None:\n context=getcontext()\n \n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._isinfinity()==-1:\n return _NegativeInfinity\n if self._isinfinity()==1:\n return _dec_from_triple(0,'9'*context.prec,context.Etop())\n \n context=context.copy()\n context._set_rounding(ROUND_FLOOR)\n context._ignore_all_flags()\n new_self=self._fix(context)\n if new_self !=self:\n return new_self\n return self.__sub__(_dec_from_triple(0,'1',context.Etiny()-1),\n context)\n \n def next_plus(self,context=None):\n ''\n if context is None:\n context=getcontext()\n \n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._isinfinity()==1:\n return _Infinity\n if self._isinfinity()==-1:\n return _dec_from_triple(1,'9'*context.prec,context.Etop())\n \n context=context.copy()\n context._set_rounding(ROUND_CEILING)\n context._ignore_all_flags()\n new_self=self._fix(context)\n if new_self !=self:\n return new_self\n return self.__add__(_dec_from_triple(0,'1',context.Etiny()-1),\n context)\n \n def next_toward(self,other,context=None):\n ''\n\n\n\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n comparison=self._cmp(other)\n if comparison ==0:\n return self.copy_sign(other)\n \n if comparison ==-1:\n ans=self.next_plus(context)\n else:\n ans=self.next_minus(context)\n \n \n if ans._isinfinity():\n context._raise_error(Overflow,\n 'Infinite result from next_toward',\n ans._sign)\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n elif ans.adjusted()0:\n rotdig='0'*topad+rotdig\n elif topad <0:\n rotdig=rotdig[-topad:]\n \n \n rotated=rotdig[torot:]+rotdig[:torot]\n return _dec_from_triple(self._sign,\n rotated.lstrip('0')or '0',self._exp)\n \n def scaleb(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if other._exp !=0:\n return context._raise_error(InvalidOperation)\n liminf=-2 *(context.Emax+context.prec)\n limsup=2 *(context.Emax+context.prec)\n if not(liminf <=int(other)<=limsup):\n return context._raise_error(InvalidOperation)\n \n if self._isinfinity():\n return Decimal(self)\n \n d=_dec_from_triple(self._sign,self._int,self._exp+int(other))\n d=d._fix(context)\n return d\n \n def shift(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if other._exp !=0:\n return context._raise_error(InvalidOperation)\n if not(-context.prec <=int(other)<=context.prec):\n return context._raise_error(InvalidOperation)\n \n if self._isinfinity():\n return Decimal(self)\n \n \n torot=int(other)\n rotdig=self._int\n topad=context.prec -len(rotdig)\n if topad >0:\n rotdig='0'*topad+rotdig\n elif topad <0:\n rotdig=rotdig[-topad:]\n \n \n if torot <0:\n shifted=rotdig[:torot]\n else:\n shifted=rotdig+'0'*torot\n shifted=shifted[-context.prec:]\n \n return _dec_from_triple(self._sign,\n shifted.lstrip('0')or '0',self._exp)\n \n \n def __reduce__(self):\n return(self.__class__,(str(self),))\n \n def __copy__(self):\n if type(self)is Decimal:\n return self\n return self.__class__(str(self))\n \n def __deepcopy__(self,memo):\n if type(self)is Decimal:\n return self\n return self.__class__(str(self))\n \n \n \n def __format__(self,specifier,context=None,_localeconv=None):\n ''\n\n\n\n\n\n\n \n \n \n \n \n \n \n if context is None:\n context=getcontext()\n \n spec=_parse_format_specifier(specifier,_localeconv=_localeconv)\n \n \n if self._is_special:\n sign=_format_sign(self._sign,spec)\n body=str(self.copy_abs())\n if spec['type']=='%':\n body +='%'\n return _format_align(sign,body,spec)\n \n \n if spec['type']is None:\n spec['type']=['g','G'][context.capitals]\n \n \n if spec['type']=='%':\n self=_dec_from_triple(self._sign,self._int,self._exp+2)\n \n \n rounding=context.rounding\n precision=spec['precision']\n if precision is not None:\n if spec['type']in 'eE':\n self=self._round(precision+1,rounding)\n elif spec['type']in 'fF%':\n self=self._rescale(-precision,rounding)\n elif spec['type']in 'gG'and len(self._int)>precision:\n self=self._round(precision,rounding)\n \n \n if not self and self._exp >0 and spec['type']in 'fF%':\n self=self._rescale(0,rounding)\n if not self and spec['no_neg_0']and self._sign:\n adjusted_sign=0\n else:\n adjusted_sign=self._sign\n \n \n leftdigits=self._exp+len(self._int)\n if spec['type']in 'eE':\n if not self and precision is not None:\n dotplace=1 -precision\n else:\n dotplace=1\n elif spec['type']in 'fF%':\n dotplace=leftdigits\n elif spec['type']in 'gG':\n if self._exp <=0 and leftdigits >-6:\n dotplace=leftdigits\n else:\n dotplace=1\n \n \n if dotplace <0:\n intpart='0'\n fracpart='0'*(-dotplace)+self._int\n elif dotplace >len(self._int):\n intpart=self._int+'0'*(dotplace -len(self._int))\n fracpart=''\n else:\n intpart=self._int[:dotplace]or '0'\n fracpart=self._int[dotplace:]\n exp=leftdigits -dotplace\n \n \n \n return _format_number(adjusted_sign,intpart,fracpart,exp,spec)\n \ndef _dec_from_triple(sign,coefficient,exponent,special=False):\n ''\n\n\n\n\n \n \n self=object.__new__(Decimal)\n self._sign=sign\n self._int=coefficient\n self._exp=exponent\n self._is_special=special\n \n return self\n \n \n \n \n_numbers.Number.register(Decimal)\n\n\n\n\nclass _ContextManager(object):\n ''\n\n\n\n \n def __init__(self,new_context):\n self.new_context=new_context.copy()\n def __enter__(self):\n self.saved_context=getcontext()\n setcontext(self.new_context)\n return self.new_context\n def __exit__(self,t,v,tb):\n setcontext(self.saved_context)\n \nclass Context(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,prec=None,rounding=None,Emin=None,Emax=None,\n capitals=None,clamp=None,flags=None,traps=None,\n _ignored_flags=None):\n \n \n try:\n dc=DefaultContext\n except NameError:\n pass\n \n self.prec=prec if prec is not None else dc.prec\n self.rounding=rounding if rounding is not None else dc.rounding\n self.Emin=Emin if Emin is not None else dc.Emin\n self.Emax=Emax if Emax is not None else dc.Emax\n self.capitals=capitals if capitals is not None else dc.capitals\n self.clamp=clamp if clamp is not None else dc.clamp\n \n if _ignored_flags is None:\n self._ignored_flags=[]\n else:\n self._ignored_flags=_ignored_flags\n \n if traps is None:\n self.traps=dc.traps.copy()\n elif not isinstance(traps,dict):\n self.traps=dict((s,int(s in traps))for s in _signals+traps)\n else:\n self.traps=traps\n \n if flags is None:\n self.flags=dict.fromkeys(_signals,0)\n elif not isinstance(flags,dict):\n self.flags=dict((s,int(s in flags))for s in _signals+flags)\n else:\n self.flags=flags\n \n def _set_integer_check(self,name,value,vmin,vmax):\n if not isinstance(value,int):\n raise TypeError(\"%s must be an integer\"%name)\n if vmin =='-inf':\n if value >vmax:\n raise ValueError(\"%s must be in [%s, %d]. got: %s\"%(name,vmin,vmax,value))\n elif vmax =='inf':\n if value vmax:\n raise ValueError(\"%s must be in [%d, %d]. got %s\"%(name,vmin,vmax,value))\n return object.__setattr__(self,name,value)\n \n def _set_signal_dict(self,name,d):\n if not isinstance(d,dict):\n raise TypeError(\"%s must be a signal dict\"%d)\n for key in d:\n if not key in _signals:\n raise KeyError(\"%s is not a valid signal dict\"%d)\n for key in _signals:\n if not key in d:\n raise KeyError(\"%s is not a valid signal dict\"%d)\n return object.__setattr__(self,name,d)\n \n def __setattr__(self,name,value):\n if name =='prec':\n return self._set_integer_check(name,value,1,'inf')\n elif name =='Emin':\n return self._set_integer_check(name,value,'-inf',0)\n elif name =='Emax':\n return self._set_integer_check(name,value,0,'inf')\n elif name =='capitals':\n return self._set_integer_check(name,value,0,1)\n elif name =='clamp':\n return self._set_integer_check(name,value,0,1)\n elif name =='rounding':\n if not value in _rounding_modes:\n \n \n raise TypeError(\"%s: invalid rounding mode\"%value)\n return object.__setattr__(self,name,value)\n elif name =='flags'or name =='traps':\n return self._set_signal_dict(name,value)\n elif name =='_ignored_flags':\n return object.__setattr__(self,name,value)\n else:\n raise AttributeError(\n \"'decimal.Context' object has no attribute '%s'\"%name)\n \n def __delattr__(self,name):\n raise AttributeError(\"%s cannot be deleted\"%name)\n \n \n def __reduce__(self):\n flags=[sig for sig,v in self.flags.items()if v]\n traps=[sig for sig,v in self.traps.items()if v]\n return(self.__class__,\n (self.prec,self.rounding,self.Emin,self.Emax,\n self.capitals,self.clamp,flags,traps))\n \n def __repr__(self):\n ''\n s=[]\n s.append('Context(prec=%(prec)d, rounding=%(rounding)s, '\n 'Emin=%(Emin)d, Emax=%(Emax)d, capitals=%(capitals)d, '\n 'clamp=%(clamp)d'\n %vars(self))\n names=[f.__name__ for f,v in self.flags.items()if v]\n s.append('flags=['+', '.join(names)+']')\n names=[t.__name__ for t,v in self.traps.items()if v]\n s.append('traps=['+', '.join(names)+']')\n return ', '.join(s)+')'\n \n def clear_flags(self):\n ''\n for flag in self.flags:\n self.flags[flag]=0\n \n def clear_traps(self):\n ''\n for flag in self.traps:\n self.traps[flag]=0\n \n def _shallow_copy(self):\n ''\n nc=Context(self.prec,self.rounding,self.Emin,self.Emax,\n self.capitals,self.clamp,self.flags,self.traps,\n self._ignored_flags)\n return nc\n \n def copy(self):\n ''\n nc=Context(self.prec,self.rounding,self.Emin,self.Emax,\n self.capitals,self.clamp,\n self.flags.copy(),self.traps.copy(),\n self._ignored_flags)\n return nc\n __copy__=copy\n \n def _raise_error(self,condition,explanation=None,*args):\n ''\n\n\n\n\n\n \n error=_condition_map.get(condition,condition)\n if error in self._ignored_flags:\n \n return error().handle(self,*args)\n \n self.flags[error]=1\n if not self.traps[error]:\n \n return condition().handle(self,*args)\n \n \n \n raise error(explanation)\n \n def _ignore_all_flags(self):\n ''\n return self._ignore_flags(*_signals)\n \n def _ignore_flags(self,*flags):\n ''\n \n \n self._ignored_flags=(self._ignored_flags+list(flags))\n return list(flags)\n \n def _regard_flags(self,*flags):\n ''\n if flags and isinstance(flags[0],(tuple,list)):\n flags=flags[0]\n for flag in flags:\n self._ignored_flags.remove(flag)\n \n \n __hash__=None\n \n def Etiny(self):\n ''\n return int(self.Emin -self.prec+1)\n \n def Etop(self):\n ''\n return int(self.Emax -self.prec+1)\n \n def _set_rounding(self,type):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n rounding=self.rounding\n self.rounding=type\n return rounding\n \n def create_decimal(self,num='0'):\n ''\n\n\n \n \n if isinstance(num,str)and(num !=num.strip()or '_'in num):\n return self._raise_error(ConversionSyntax,\n \"trailing or leading whitespace and \"\n \"underscores are not permitted.\")\n \n d=Decimal(num,context=self)\n if d._isnan()and len(d._int)>self.prec -self.clamp:\n return self._raise_error(ConversionSyntax,\n \"diagnostic info too long in NaN\")\n return d._fix(self)\n \n def create_decimal_from_float(self,f):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n d=Decimal.from_float(f)\n return d._fix(self)\n \n \n def abs(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.__abs__(context=self)\n \n def add(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__add__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def _apply(self,a):\n return str(a._fix(self))\n \n def canonical(self,a):\n ''\n\n\n\n\n\n\n \n if not isinstance(a,Decimal):\n raise TypeError(\"canonical requires a Decimal as an argument.\")\n return a.canonical()\n \n def compare(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.compare(b,context=self)\n \n def compare_signal(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.compare_signal(b,context=self)\n \n def compare_total(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.compare_total(b)\n \n def compare_total_mag(self,a,b):\n ''\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.compare_total_mag(b)\n \n def copy_abs(self,a):\n ''\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.copy_abs()\n \n def copy_decimal(self,a):\n ''\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return Decimal(a)\n \n def copy_negate(self,a):\n ''\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.copy_negate()\n \n def copy_sign(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.copy_sign(b)\n \n def divide(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__truediv__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def divide_int(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__floordiv__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def divmod(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__divmod__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def exp(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.exp(context=self)\n \n def fma(self,a,b,c):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.fma(b,c,context=self)\n \n def is_canonical(self,a):\n ''\n\n\n\n\n\n\n \n if not isinstance(a,Decimal):\n raise TypeError(\"is_canonical requires a Decimal as an argument.\")\n return a.is_canonical()\n \n def is_finite(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_finite()\n \n def is_infinite(self,a):\n ''\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_infinite()\n \n def is_nan(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_nan()\n \n def is_normal(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_normal(context=self)\n \n def is_qnan(self,a):\n ''\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_qnan()\n \n def is_signed(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_signed()\n \n def is_snan(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_snan()\n \n def is_subnormal(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_subnormal(context=self)\n \n def is_zero(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_zero()\n \n def ln(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.ln(context=self)\n \n def log10(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.log10(context=self)\n \n def logb(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logb(context=self)\n \n def logical_and(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logical_and(b,context=self)\n \n def logical_invert(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logical_invert(context=self)\n \n def logical_or(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logical_or(b,context=self)\n \n def logical_xor(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logical_xor(b,context=self)\n \n def max(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.max(b,context=self)\n \n def max_mag(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.max_mag(b,context=self)\n \n def min(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.min(b,context=self)\n \n def min_mag(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.min_mag(b,context=self)\n \n def minus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.__neg__(context=self)\n \n def multiply(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__mul__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def next_minus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.next_minus(context=self)\n \n def next_plus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.next_plus(context=self)\n \n def next_toward(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.next_toward(b,context=self)\n \n def normalize(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.normalize(context=self)\n \n def number_class(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.number_class(context=self)\n \n def plus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.__pos__(context=self)\n \n def power(self,a,b,modulo=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__pow__(b,modulo,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def quantize(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.quantize(b,context=self)\n \n def radix(self):\n ''\n\n\n\n \n return Decimal(10)\n \n def remainder(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__mod__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def remainder_near(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.remainder_near(b,context=self)\n \n def rotate(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.rotate(b,context=self)\n \n def same_quantum(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.same_quantum(b)\n \n def scaleb(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.scaleb(b,context=self)\n \n def shift(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.shift(b,context=self)\n \n def sqrt(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.sqrt(context=self)\n \n def subtract(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__sub__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def to_eng_string(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.to_eng_string(context=self)\n \n def to_sci_string(self,a):\n ''\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.__str__(context=self)\n \n def to_integral_exact(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.to_integral_exact(context=self)\n \n def to_integral_value(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.to_integral_value(context=self)\n \n \n to_integral=to_integral_value\n \nclass _WorkRep(object):\n __slots__=('sign','int','exp')\n \n \n \n \n def __init__(self,value=None):\n if value is None:\n self.sign=None\n self.int=0\n self.exp=None\n elif isinstance(value,Decimal):\n self.sign=value._sign\n self.int=int(value._int)\n self.exp=value._exp\n else:\n \n self.sign=value[0]\n self.int=value[1]\n self.exp=value[2]\n \n def __repr__(self):\n return \"(%r, %r, %r)\"%(self.sign,self.int,self.exp)\n \n \n \ndef _normalize(op1,op2,prec=0):\n ''\n\n\n \n if op1.exp =0:\n return n *10 **e\n else:\n \n str_n=str(abs(n))\n val_n=len(str_n)-len(str_n.rstrip('0'))\n return None if val_n <-e else n //10 **-e\n \ndef _sqrt_nearest(n,a):\n ''\n\n\n\n\n \n if n <=0 or a <=0:\n raise ValueError(\"Both arguments to _sqrt_nearest should be positive.\")\n \n b=0\n while a !=b:\n b,a=a,a --n //a >>1\n return a\n \ndef _rshift_nearest(x,shift):\n ''\n\n\n \n b,q=1 <>shift\n return q+(2 *(x&(b -1))+(q&1)>b)\n \ndef _div_nearest(a,b):\n ''\n\n\n \n q,r=divmod(a,b)\n return q+(2 *r+(q&1)>b)\n \ndef _ilog(x,M,L=8):\n ''\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n y=x -M\n \n R=0\n while(R <=L and abs(y)<=M or\n R >L and abs(y)>>R -L >=M):\n y=_div_nearest((M *y)<<1,\n M+_sqrt_nearest(M *(M+_rshift_nearest(y,R)),M))\n R +=1\n \n \n T=-int(-10 *len(str(M))//(3 *L))\n yshift=_rshift_nearest(y,R)\n w=_div_nearest(M,T)\n for k in range(T -1,0,-1):\n w=_div_nearest(M,k)-_div_nearest(yshift *w,M)\n \n return _div_nearest(w *y,M)\n \ndef _dlog10(c,e,p):\n ''\n\n \n \n \n \n p +=2\n \n \n \n \n \n l=len(str(c))\n f=e+l -(e+l >=1)\n \n if p >0:\n M=10 **p\n k=e+p -f\n if k >=0:\n c *=10 **k\n else:\n c=_div_nearest(c,10 **-k)\n \n log_d=_ilog(c,M)\n log_10=_log10_digits(p)\n log_d=_div_nearest(log_d *M,log_10)\n log_tenpower=f *M\n else:\n log_d=0\n log_tenpower=_div_nearest(f,10 **-p)\n \n return _div_nearest(log_tenpower+log_d,100)\n \ndef _dlog(c,e,p):\n ''\n\n \n \n \n \n p +=2\n \n \n \n \n l=len(str(c))\n f=e+l -(e+l >=1)\n \n \n if p >0:\n k=e+p -f\n if k >=0:\n c *=10 **k\n else:\n c=_div_nearest(c,10 **-k)\n \n \n log_d=_ilog(c,10 **p)\n else:\n \n log_d=0\n \n \n if f:\n extra=len(str(abs(f)))-1\n if p+extra >=0:\n \n \n f_log_ten=_div_nearest(f *_log10_digits(p+extra),10 **extra)\n else:\n f_log_ten=0\n else:\n f_log_ten=0\n \n \n return _div_nearest(f_log_ten+log_d,100)\n \nclass _Log10Memoize(object):\n ''\n\n \n def __init__(self):\n self.digits=\"23025850929940456840179914546843642076011014886\"\n \n def getdigits(self,p):\n ''\n\n\n \n \n \n \n \n if p <0:\n raise ValueError(\"p should be nonnegative\")\n \n if p >=len(self.digits):\n \n \n extra=3\n while True:\n \n M=10 **(p+extra+2)\n digits=str(_div_nearest(_ilog(10 *M,M),100))\n if digits[-extra:]!='0'*extra:\n break\n extra +=3\n \n \n self.digits=digits.rstrip('0')[:-1]\n return int(self.digits[:p+1])\n \n_log10_digits=_Log10Memoize().getdigits\n\ndef _iexp(x,M,L=8):\n ''\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n R=_nbits((x <=0:\n cshift=c *10 **shift\n else:\n cshift=c //10 **-shift\n quot,rem=divmod(cshift,_log10_digits(q))\n \n \n rem=_div_nearest(rem,10 **extra)\n \n \n return _div_nearest(_iexp(rem,10 **p),1000),quot -p+3\n \ndef _dpower(xc,xe,yc,ye,p):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n b=len(str(abs(yc)))+ye\n \n \n lxc=_dlog(xc,xe,p+b+1)\n \n \n shift=ye -b\n if shift >=0:\n pc=lxc *yc *10 **shift\n else:\n pc=_div_nearest(lxc *yc,10 **-shift)\n \n if pc ==0:\n \n \n if((len(str(xc))+xe >=1)==(yc >0)):\n coeff,exp=10 **(p -1)+1,1 -p\n else:\n coeff,exp=10 **p -1,-p\n else:\n coeff,exp=_dexp(pc,-(p+1),p+1)\n coeff=_div_nearest(coeff,10)\n exp +=1\n \n return coeff,exp\n \ndef _log10_lb(c,correction={\n'1':100,'2':70,'3':53,'4':40,'5':31,\n'6':23,'7':16,'8':10,'9':5}):\n ''\n if c <=0:\n raise ValueError(\"The argument to _log10_lb should be nonnegative.\")\n str_c=str(c)\n return 100 *len(str_c)-correction[str_c[0]]\n \n \n \ndef _convert_other(other,raiseit=False,allow_float=False):\n ''\n\n\n\n\n\n \n if isinstance(other,Decimal):\n return other\n if isinstance(other,int):\n return Decimal(other)\n if allow_float and isinstance(other,float):\n return Decimal.from_float(other)\n \n if raiseit:\n raise TypeError(\"Unable to convert %s to Decimal\"%other)\n return NotImplemented\n \ndef _convert_for_comparison(self,other,equality_op=False):\n ''\n\n\n\n\n \n if isinstance(other,Decimal):\n return self,other\n \n \n \n \n \n if isinstance(other,_numbers.Rational):\n if not self._is_special:\n self=_dec_from_triple(self._sign,\n str(int(self._int)*other.denominator),\n self._exp)\n return self,Decimal(other.numerator)\n \n \n \n \n if equality_op and isinstance(other,_numbers.Complex)and other.imag ==0:\n other=other.real\n if isinstance(other,float):\n context=getcontext()\n if equality_op:\n context.flags[FloatOperation]=1\n else:\n context._raise_error(FloatOperation,\n \"strict semantics for mixing floats and Decimals are enabled\")\n return self,Decimal.from_float(other)\n return NotImplemented,NotImplemented\n \n \n \n \n \n \n \nDefaultContext=Context(\nprec=28,rounding=ROUND_HALF_EVEN,\ntraps=[DivisionByZero,Overflow,InvalidOperation],\nflags=[],\nEmax=999999,\nEmin=-999999,\ncapitals=1,\nclamp=0\n)\n\n\n\n\n\n\nBasicContext=Context(\nprec=9,rounding=ROUND_HALF_UP,\ntraps=[DivisionByZero,Overflow,InvalidOperation,Clamped,Underflow],\nflags=[],\n)\n\nExtendedContext=Context(\nprec=9,rounding=ROUND_HALF_EVEN,\ntraps=[],\nflags=[],\n)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\n_parser=re.compile(r\"\"\" # A numeric string consists of:\n# \\s*\n (?P[-+])? # an optional sign, followed by either...\n (\n (?=\\d|\\.\\d) # ...a number (with at least one digit)\n (?P\\d*) # having a (possibly empty) integer part\n (\\.(?P\\d*))? # followed by an optional fractional part\n (E(?P[-+]?\\d+))? # followed by an optional exponent, or...\n |\n Inf(inity)? # ...an infinity, or...\n |\n (?Ps)? # ...an (optionally signaling)\n NaN # NaN\n (?P\\d*) # with (possibly empty) diagnostic info.\n )\n# \\s*\n \\Z\n\"\"\",re.VERBOSE |re.IGNORECASE).match\n\n_all_zeros=re.compile('0*$').match\n_exact_half=re.compile('50*$').match\n\n\n\n\n\n\n\n\n\n\n_parse_format_specifier_regex=re.compile(r\"\"\"\\A\n(?:\n (?P.)?\n (?P[<>=^])\n)?\n(?P[-+ ])?\n(?Pz)?\n(?P\\#)?\n(?P0)?\n(?P(?!0)\\d+)?\n(?P,)?\n(?:\\.(?P0|(?!0)\\d+))?\n(?P[eEfFgGn%])?\n\\Z\n\"\"\",re.VERBOSE |re.DOTALL)\n\ndel re\n\n\n\n\ntry:\n import locale as _locale\nexcept ImportError:\n pass\n \ndef _parse_format_specifier(format_spec,_localeconv=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n m=_parse_format_specifier_regex.match(format_spec)\n if m is None:\n raise ValueError(\"Invalid format specifier: \"+format_spec)\n \n \n format_dict=m.groupdict()\n \n \n \n fill=format_dict['fill']\n align=format_dict['align']\n format_dict['zeropad']=(format_dict['zeropad']is not None)\n if format_dict['zeropad']:\n if fill is not None:\n raise ValueError(\"Fill character conflicts with '0'\"\n \" in format specifier: \"+format_spec)\n if align is not None:\n raise ValueError(\"Alignment conflicts with '0' in \"\n \"format specifier: \"+format_spec)\n format_dict['fill']=fill or ' '\n \n \n \n format_dict['align']=align or '>'\n \n \n if format_dict['sign']is None:\n format_dict['sign']='-'\n \n \n format_dict['minimumwidth']=int(format_dict['minimumwidth']or '0')\n if format_dict['precision']is not None:\n format_dict['precision']=int(format_dict['precision'])\n \n \n \n if format_dict['precision']==0:\n if format_dict['type']is None or format_dict['type']in 'gGn':\n format_dict['precision']=1\n \n \n \n if format_dict['type']=='n':\n \n format_dict['type']='g'\n if _localeconv is None:\n _localeconv=_locale.localeconv()\n if format_dict['thousands_sep']is not None:\n raise ValueError(\"Explicit thousands separator conflicts with \"\n \"'n' type in format specifier: \"+format_spec)\n format_dict['thousands_sep']=_localeconv['thousands_sep']\n format_dict['grouping']=_localeconv['grouping']\n format_dict['decimal_point']=_localeconv['decimal_point']\n else:\n if format_dict['thousands_sep']is None:\n format_dict['thousands_sep']=''\n format_dict['grouping']=[3,0]\n format_dict['decimal_point']='.'\n \n return format_dict\n \ndef _format_align(sign,body,spec):\n ''\n\n\n\n\n \n \n minimumwidth=spec['minimumwidth']\n fill=spec['fill']\n padding=fill *(minimumwidth -len(sign)-len(body))\n \n align=spec['align']\n if align =='<':\n result=sign+body+padding\n elif align =='>':\n result=padding+sign+body\n elif align =='=':\n result=sign+padding+body\n elif align =='^':\n half=len(padding)//2\n result=padding[:half]+sign+body+padding[half:]\n else:\n raise ValueError('Unrecognised alignment field')\n \n return result\n \ndef _group_lengths(grouping):\n ''\n\n\n \n \n \n \n \n \n \n \n \n from itertools import chain,repeat\n if not grouping:\n return[]\n elif grouping[-1]==0 and len(grouping)>=2:\n return chain(grouping[:-1],repeat(grouping[-2]))\n elif grouping[-1]==_locale.CHAR_MAX:\n return grouping[:-1]\n else:\n raise ValueError('unrecognised format for grouping')\n \ndef _insert_thousands_sep(digits,spec,min_width=1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n sep=spec['thousands_sep']\n grouping=spec['grouping']\n \n groups=[]\n for l in _group_lengths(grouping):\n if l <=0:\n raise ValueError(\"group length should be positive\")\n \n l=min(max(len(digits),min_width,1),l)\n groups.append('0'*(l -len(digits))+digits[-l:])\n digits=digits[:-l]\n min_width -=l\n if not digits and min_width <=0:\n break\n min_width -=len(sep)\n else:\n l=max(len(digits),min_width,1)\n groups.append('0'*(l -len(digits))+digits[-l:])\n return sep.join(reversed(groups))\n \ndef _format_sign(is_negative,spec):\n ''\n \n if is_negative:\n return '-'\n elif spec['sign']in ' +':\n return spec['sign']\n else:\n return ''\n \ndef _format_number(is_negative,intpart,fracpart,exp,spec):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n sign=_format_sign(is_negative,spec)\n \n if fracpart or spec['alt']:\n fracpart=spec['decimal_point']+fracpart\n \n if exp !=0 or spec['type']in 'eE':\n echar={'E':'E','e':'e','G':'E','g':'e'}[spec['type']]\n fracpart +=\"{0}{1:+}\".format(echar,exp)\n if spec['type']=='%':\n fracpart +='%'\n \n if spec['zeropad']:\n min_width=spec['minimumwidth']-len(fracpart)-len(sign)\n else:\n min_width=0\n intpart=_insert_thousands_sep(intpart,spec,min_width)\n \n return _format_align(sign,intpart+fracpart,spec)\n \n \n \n \n \n_Infinity=Decimal('Inf')\n_NegativeInfinity=Decimal('-Inf')\n_NaN=Decimal('NaN')\n_Zero=Decimal(0)\n_One=Decimal(1)\n_NegativeOne=Decimal(-1)\n\n\n_SignedInfinity=(_Infinity,_NegativeInfinity)\n\n\n\n_PyHASH_MODULUS=sys.hash_info.modulus\n\n_PyHASH_INF=sys.hash_info.inf\n_PyHASH_NAN=sys.hash_info.nan\n\n\n_PyHASH_10INV=pow(10,_PyHASH_MODULUS -2,_PyHASH_MODULUS)\ndel sys\n", ["collections", "contextvars", "itertools", "locale", "math", "numbers", "re", "sys"]], "ntpath": [".py", "\n''\n\n\n\n\n\n\n\n\ncurdir='.'\npardir='..'\nextsep='.'\nsep='\\\\'\npathsep=';'\naltsep='/'\ndefpath='.;C:\\\\bin'\ndevnull='nul'\n\nimport os\nimport sys\nimport stat\nimport genericpath\nfrom genericpath import *\n\n\n__all__=[\"normcase\",\"isabs\",\"join\",\"splitdrive\",\"splitroot\",\"split\",\"splitext\",\n\"basename\",\"dirname\",\"commonprefix\",\"getsize\",\"getmtime\",\n\"getatime\",\"getctime\",\"islink\",\"exists\",\"lexists\",\"isdir\",\"isfile\",\n\"ismount\",\"expanduser\",\"expandvars\",\"normpath\",\"abspath\",\n\"curdir\",\"pardir\",\"sep\",\"pathsep\",\"defpath\",\"altsep\",\n\"extsep\",\"devnull\",\"realpath\",\"supports_unicode_filenames\",\"relpath\",\n\"samefile\",\"sameopenfile\",\"samestat\",\"commonpath\",\"isjunction\"]\n\ndef _get_bothseps(path):\n if isinstance(path,bytes):\n return b'\\\\/'\n else:\n return '\\\\/'\n \n \n \n \n \ntry:\n from _winapi import(\n LCMapStringEx as _LCMapStringEx,\n LOCALE_NAME_INVARIANT as _LOCALE_NAME_INVARIANT,\n LCMAP_LOWERCASE as _LCMAP_LOWERCASE)\n \n def normcase(s):\n ''\n\n\n \n s=os.fspath(s)\n if not s:\n return s\n if isinstance(s,bytes):\n encoding=sys.getfilesystemencoding()\n s=s.decode(encoding,'surrogateescape').replace('/','\\\\')\n s=_LCMapStringEx(_LOCALE_NAME_INVARIANT,\n _LCMAP_LOWERCASE,s)\n return s.encode(encoding,'surrogateescape')\n else:\n return _LCMapStringEx(_LOCALE_NAME_INVARIANT,\n _LCMAP_LOWERCASE,\n s.replace('/','\\\\'))\nexcept ImportError:\n def normcase(s):\n ''\n\n\n \n s=os.fspath(s)\n if isinstance(s,bytes):\n return os.fsencode(os.fsdecode(s).replace('/','\\\\').lower())\n return s.replace('/','\\\\').lower()\n \n \n \n \n \n \n \n \ndef isabs(s):\n ''\n s=os.fspath(s)\n if isinstance(s,bytes):\n sep=b'\\\\'\n altsep=b'/'\n colon_sep=b':\\\\'\n else:\n sep='\\\\'\n altsep='/'\n colon_sep=':\\\\'\n s=s[:3].replace(altsep,sep)\n \n \n if s.startswith(sep)or s.startswith(colon_sep,1):\n return True\n return False\n \n \n \ndef join(path,*paths):\n path=os.fspath(path)\n if isinstance(path,bytes):\n sep=b'\\\\'\n seps=b'\\\\/'\n colon=b':'\n else:\n sep='\\\\'\n seps='\\\\/'\n colon=':'\n try:\n if not paths:\n path[:0]+sep\n result_drive,result_root,result_path=splitroot(path)\n for p in map(os.fspath,paths):\n p_drive,p_root,p_path=splitroot(p)\n if p_root:\n \n if p_drive or not result_drive:\n result_drive=p_drive\n result_root=p_root\n result_path=p_path\n continue\n elif p_drive and p_drive !=result_drive:\n if p_drive.lower()!=result_drive.lower():\n \n result_drive=p_drive\n result_root=p_root\n result_path=p_path\n continue\n \n result_drive=p_drive\n \n if result_path and result_path[-1]not in seps:\n result_path=result_path+sep\n result_path=result_path+p_path\n \n if(result_path and not result_root and\n result_drive and result_drive[-1:]not in colon+seps):\n return result_drive+sep+result_path\n return result_drive+result_root+result_path\n except(TypeError,AttributeError,BytesWarning):\n genericpath._check_arg_types('join',path,*paths)\n raise\n \n \n \n \n \ndef splitdrive(p):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n drive,root,tail=splitroot(p)\n return drive,root+tail\n \n \ndef splitroot(p):\n ''\n\n\n\n\n\n\n\n\n \n p=os.fspath(p)\n if isinstance(p,bytes):\n sep=b'\\\\'\n altsep=b'/'\n colon=b':'\n unc_prefix=b'\\\\\\\\?\\\\UNC\\\\'\n empty=b''\n else:\n sep='\\\\'\n altsep='/'\n colon=':'\n unc_prefix='\\\\\\\\?\\\\UNC\\\\'\n empty=''\n normp=p.replace(altsep,sep)\n if normp[:1]==sep:\n if normp[1:2]==sep:\n \n \n start=8 if normp[:8].upper()==unc_prefix else 2\n index=normp.find(sep,start)\n if index ==-1:\n return p,empty,empty\n index2=normp.find(sep,index+1)\n if index2 ==-1:\n return p,empty,empty\n return p[:index2],p[index2:index2+1],p[index2+1:]\n else:\n \n return empty,p[:1],p[1:]\n elif normp[1:2]==colon:\n if normp[2:3]==sep:\n \n return p[:2],p[2:3],p[3:]\n else:\n \n return p[:2],empty,p[2:]\n else:\n \n return empty,empty,p\n \n \n \n \n \n \n \ndef split(p):\n ''\n\n\n \n p=os.fspath(p)\n seps=_get_bothseps(p)\n d,r,p=splitroot(p)\n \n i=len(p)\n while i and p[i -1]not in seps:\n i -=1\n head,tail=p[:i],p[i:]\n return d+r+head.rstrip(seps),tail\n \n \n \n \n \n \n \ndef splitext(p):\n p=os.fspath(p)\n if isinstance(p,bytes):\n return genericpath._splitext(p,b'\\\\',b'/',b'.')\n else:\n return genericpath._splitext(p,'\\\\','/','.')\nsplitext.__doc__=genericpath._splitext.__doc__\n\n\n\n\ndef basename(p):\n ''\n return split(p)[1]\n \n \n \n \ndef dirname(p):\n ''\n return split(p)[0]\n \n \n \n \nif hasattr(os.stat_result,'st_reparse_tag'):\n def isjunction(path):\n ''\n try:\n st=os.lstat(path)\n except(OSError,ValueError,AttributeError):\n return False\n return bool(st.st_reparse_tag ==stat.IO_REPARSE_TAG_MOUNT_POINT)\nelse:\n def isjunction(path):\n ''\n os.fspath(path)\n return False\n \n \n \n \ndef lexists(path):\n ''\n try:\n st=os.lstat(path)\n except(OSError,ValueError):\n return False\n return True\n \n \n \n \n \n \n \n \n \n \n \ntry:\n from nt import _getvolumepathname\nexcept ImportError:\n _getvolumepathname=None\ndef ismount(path):\n ''\n \n path=os.fspath(path)\n seps=_get_bothseps(path)\n path=abspath(path)\n drive,root,rest=splitroot(path)\n if drive and drive[0]in seps:\n return not rest\n if root and not rest:\n return True\n \n if _getvolumepathname:\n x=path.rstrip(seps)\n y=_getvolumepathname(path).rstrip(seps)\n return x.casefold()==y.casefold()\n else:\n return False\n \n \n \n \n \n \n \n \n \n \n \ndef expanduser(path):\n ''\n\n \n path=os.fspath(path)\n if isinstance(path,bytes):\n tilde=b'~'\n else:\n tilde='~'\n if not path.startswith(tilde):\n return path\n i,n=1,len(path)\n while i 0 and comps[i -1]!=pardir:\n del comps[i -1:i+1]\n i -=1\n elif i ==0 and root:\n del comps[i]\n else:\n i +=1\n else:\n i +=1\n \n if not prefix and not comps:\n comps.append(curdir)\n return prefix+sep.join(comps)\n \nelse:\n def normpath(path):\n ''\n path=os.fspath(path)\n if isinstance(path,bytes):\n return os.fsencode(_path_normpath(os.fsdecode(path)))or b\".\"\n return _path_normpath(path)or \".\"\n \n \ndef _abspath_fallback(path):\n ''\n\n\n\n \n \n path=os.fspath(path)\n if not isabs(path):\n if isinstance(path,bytes):\n cwd=os.getcwdb()\n else:\n cwd=os.getcwd()\n path=join(cwd,path)\n return normpath(path)\n \n \ntry:\n from nt import _getfullpathname\n \nexcept ImportError:\n abspath=_abspath_fallback\n \nelse:\n def abspath(path):\n ''\n try:\n return _getfullpathname(normpath(path))\n except(OSError,ValueError):\n return _abspath_fallback(path)\n \ntry:\n from nt import _getfinalpathname,readlink as _nt_readlink\nexcept ImportError:\n\n realpath=abspath\nelse:\n def _readlink_deep(path):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n allowed_winerror=1,2,3,5,21,32,50,67,87,4390,4392,4393\n \n seen=set()\n while normcase(path)not in seen:\n seen.add(normcase(path))\n try:\n old_path=path\n path=_nt_readlink(path)\n \n \n if not isabs(path):\n \n \n \n if not islink(old_path):\n path=old_path\n break\n path=normpath(join(dirname(old_path),path))\n except OSError as ex:\n if ex.winerror in allowed_winerror:\n break\n raise\n except ValueError:\n \n break\n return path\n \n def _getfinalpathname_nonstrict(path):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n allowed_winerror=1,2,3,5,21,32,50,53,65,67,87,123,161,1920,1921\n \n \n \n tail=path[:0]\n while path:\n try:\n path=_getfinalpathname(path)\n return join(path,tail)if tail else path\n except OSError as ex:\n if ex.winerror not in allowed_winerror:\n raise\n try:\n \n \n \n new_path=_readlink_deep(path)\n if new_path !=path:\n return join(new_path,tail)if tail else new_path\n except OSError:\n \n pass\n path,name=split(path)\n \n \n \n if path and not name:\n return path+tail\n tail=join(name,tail)if tail else name\n return tail\n \n def realpath(path,*,strict=False):\n path=normpath(path)\n if isinstance(path,bytes):\n prefix=b'\\\\\\\\?\\\\'\n unc_prefix=b'\\\\\\\\?\\\\UNC\\\\'\n new_unc_prefix=b'\\\\\\\\'\n cwd=os.getcwdb()\n \n if normcase(path)==normcase(os.fsencode(devnull)):\n return b'\\\\\\\\.\\\\NUL'\n else:\n prefix='\\\\\\\\?\\\\'\n unc_prefix='\\\\\\\\?\\\\UNC\\\\'\n new_unc_prefix='\\\\\\\\'\n cwd=os.getcwd()\n \n if normcase(path)==normcase(devnull):\n return '\\\\\\\\.\\\\NUL'\n had_prefix=path.startswith(prefix)\n if not had_prefix and not isabs(path):\n path=join(cwd,path)\n try:\n path=_getfinalpathname(path)\n initial_winerror=0\n except OSError as ex:\n if strict:\n raise\n initial_winerror=ex.winerror\n path=_getfinalpathname_nonstrict(path)\n \n \n \n if not had_prefix and path.startswith(prefix):\n \n \n if path.startswith(unc_prefix):\n spath=new_unc_prefix+path[len(unc_prefix):]\n else:\n spath=path[len(prefix):]\n \n try:\n if _getfinalpathname(spath)==path:\n path=spath\n except OSError as ex:\n \n \n if ex.winerror ==initial_winerror:\n path=spath\n return path\n \n \n \nsupports_unicode_filenames=True\n\ndef relpath(path,start=None):\n ''\n path=os.fspath(path)\n if isinstance(path,bytes):\n sep=b'\\\\'\n curdir=b'.'\n pardir=b'..'\n else:\n sep='\\\\'\n curdir='.'\n pardir='..'\n \n if start is None:\n start=curdir\n \n if not path:\n raise ValueError(\"no path specified\")\n \n start=os.fspath(start)\n try:\n start_abs=abspath(normpath(start))\n path_abs=abspath(normpath(path))\n start_drive,_,start_rest=splitroot(start_abs)\n path_drive,_,path_rest=splitroot(path_abs)\n if normcase(start_drive)!=normcase(path_drive):\n raise ValueError(\"path is on mount %r, start on mount %r\"%(\n path_drive,start_drive))\n \n start_list=[x for x in start_rest.split(sep)if x]\n path_list=[x for x in path_rest.split(sep)if x]\n \n i=0\n for e1,e2 in zip(start_list,path_list):\n if normcase(e1)!=normcase(e2):\n break\n i +=1\n \n rel_list=[pardir]*(len(start_list)-i)+path_list[i:]\n if not rel_list:\n return curdir\n return join(*rel_list)\n except(TypeError,ValueError,AttributeError,BytesWarning,DeprecationWarning):\n genericpath._check_arg_types('relpath',path,start)\n raise\n \n \n \n \n \n \n \n \n \n \n \n \ndef commonpath(paths):\n ''\n \n if not paths:\n raise ValueError('commonpath() arg is an empty sequence')\n \n paths=tuple(map(os.fspath,paths))\n if isinstance(paths[0],bytes):\n sep=b'\\\\'\n altsep=b'/'\n curdir=b'.'\n else:\n sep='\\\\'\n altsep='/'\n curdir='.'\n \n try:\n drivesplits=[splitroot(p.replace(altsep,sep).lower())for p in paths]\n split_paths=[p.split(sep)for d,r,p in drivesplits]\n \n if len({r for d,r,p in drivesplits})!=1:\n raise ValueError(\"Can't mix absolute and relative paths\")\n \n \n \n \n if len({d for d,r,p in drivesplits})!=1:\n raise ValueError(\"Paths don't have the same drive\")\n \n drive,root,path=splitroot(paths[0].replace(altsep,sep))\n common=path.split(sep)\n common=[c for c in common if c and c !=curdir]\n \n split_paths=[[c for c in s if c and c !=curdir]for s in split_paths]\n s1=min(split_paths)\n s2=max(split_paths)\n for i,c in enumerate(s1):\n if c !=s2[i]:\n common=common[:i]\n break\n else:\n common=common[:len(s1)]\n \n return drive+root+sep.join(common)\n except(TypeError,AttributeError):\n genericpath._check_arg_types('commonpath',*paths)\n raise\n \n \ntry:\n\n\n\n from nt import _path_isdir as isdir\n from nt import _path_isfile as isfile\n from nt import _path_islink as islink\n from nt import _path_exists as exists\nexcept ImportError:\n\n pass\n \n \ntry:\n from nt import _path_isdevdrive\nexcept ImportError:\n def isdevdrive(path):\n ''\n \n return False\nelse:\n def isdevdrive(path):\n ''\n try:\n return _path_isdevdrive(abspath(path))\n except OSError:\n return False\n", ["_winapi", "genericpath", "nt", "os", "stat", "string", "sys"]], "tokenize": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__author__='Ka-Ping Yee '\n__credits__=('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '\n'Skip Montanaro, Raymond Hettinger, Trent Nelson, '\n'Michael Foord')\nfrom builtins import open as _builtin_open\nfrom codecs import lookup,BOM_UTF8\nimport collections\nimport functools\nfrom io import TextIOWrapper\nimport itertools as _itertools\nimport re\nimport sys\nfrom token import *\nfrom token import EXACT_TOKEN_TYPES\nimport _tokenize\n\ncookie_re=re.compile(r'^[ \\t\\f]*#.*?coding[:=][ \\t]*([-\\w.]+)',re.ASCII)\nblank_re=re.compile(br'^[ \\t\\f]*(?:[#\\r\\n]|$)',re.ASCII)\n\nimport token\n__all__=token.__all__+[\"tokenize\",\"generate_tokens\",\"detect_encoding\",\n\"untokenize\",\"TokenInfo\"]\ndel token\n\nclass TokenInfo(collections.namedtuple('TokenInfo','type string start end line')):\n def __repr__(self):\n annotated_type='%d (%s)'%(self.type,tok_name[self.type])\n return('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)'%\n self._replace(type=annotated_type))\n \n @property\n def exact_type(self):\n if self.type ==OP and self.string in EXACT_TOKEN_TYPES:\n return EXACT_TOKEN_TYPES[self.string]\n else:\n return self.type\n \ndef group(*choices):return '('+'|'.join(choices)+')'\ndef any(*choices):return group(*choices)+'*'\ndef maybe(*choices):return group(*choices)+'?'\n\n\n\nWhitespace=r'[ \\f\\t]*'\nComment=r'#[^\\r\\n]*'\nIgnore=Whitespace+any(r'\\\\\\r?\\n'+Whitespace)+maybe(Comment)\nName=r'\\w+'\n\nHexnumber=r'0[xX](?:_?[0-9a-fA-F])+'\nBinnumber=r'0[bB](?:_?[01])+'\nOctnumber=r'0[oO](?:_?[0-7])+'\nDecnumber=r'(?:0(?:_?0)*|[1-9](?:_?[0-9])*)'\nIntnumber=group(Hexnumber,Binnumber,Octnumber,Decnumber)\nExponent=r'[eE][-+]?[0-9](?:_?[0-9])*'\nPointfloat=group(r'[0-9](?:_?[0-9])*\\.(?:[0-9](?:_?[0-9])*)?',\nr'\\.[0-9](?:_?[0-9])*')+maybe(Exponent)\nExpfloat=r'[0-9](?:_?[0-9])*'+Exponent\nFloatnumber=group(Pointfloat,Expfloat)\nImagnumber=group(r'[0-9](?:_?[0-9])*[jJ]',Floatnumber+r'[jJ]')\nNumber=group(Imagnumber,Floatnumber,Intnumber)\n\n\ndef _all_string_prefixes():\n\n\n\n _valid_string_prefixes=['b','r','u','f','br','fr']\n \n result={''}\n for prefix in _valid_string_prefixes:\n for t in _itertools.permutations(prefix):\n \n \n for u in _itertools.product(*[(c,c.upper())for c in t]):\n result.add(''.join(u))\n return result\n \n@functools.lru_cache\ndef _compile(expr):\n return re.compile(expr,re.UNICODE)\n \n \n \nStringPrefix=group(*_all_string_prefixes())\n\n\nSingle=r\"[^'\\\\]*(?:\\\\.[^'\\\\]*)*'\"\n\nDouble=r'[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\"'\n\nSingle3=r\"[^'\\\\]*(?:(?:\\\\.|'(?!''))[^'\\\\]*)*'''\"\n\nDouble3=r'[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\"'\nTriple=group(StringPrefix+\"'''\",StringPrefix+'\"\"\"')\n\nString=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*'\",\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*\"')\n\n\n\n\nSpecial=group(*map(re.escape,sorted(EXACT_TOKEN_TYPES,reverse=True)))\nFunny=group(r'\\r?\\n',Special)\n\nPlainToken=group(Number,Funny,String,Name)\nToken=Ignore+PlainToken\n\n\nContStr=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*\"+\ngroup(\"'\",r'\\\\\\r?\\n'),\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*'+\ngroup('\"',r'\\\\\\r?\\n'))\nPseudoExtras=group(r'\\\\\\r?\\n|\\Z',Comment,Triple)\nPseudoToken=Whitespace+group(PseudoExtras,Number,Funny,ContStr,Name)\n\n\n\n\nendpats={}\nfor _prefix in _all_string_prefixes():\n endpats[_prefix+\"'\"]=Single\n endpats[_prefix+'\"']=Double\n endpats[_prefix+\"'''\"]=Single3\n endpats[_prefix+'\"\"\"']=Double3\ndel _prefix\n\n\n\nsingle_quoted=set()\ntriple_quoted=set()\nfor t in _all_string_prefixes():\n for u in(t+'\"',t+\"'\"):\n single_quoted.add(u)\n for u in(t+'\"\"\"',t+\"'''\"):\n triple_quoted.add(u)\ndel t,u\n\ntabsize=8\n\nclass TokenError(Exception):pass\n\n\nclass StopTokenizing(Exception):pass\n\nclass Untokenizer:\n\n def __init__(self):\n self.tokens=[]\n self.prev_row=1\n self.prev_col=0\n self.encoding=None\n \n def add_whitespace(self,start):\n row,col=start\n if row =len(indent):\n self.tokens.append(indent)\n self.prev_col=len(indent)\n startline=False\n elif tok_type ==FSTRING_MIDDLE:\n if '{'in token or '}'in token:\n end_line,end_col=end\n end=(end_line,end_col+token.count('{')+token.count('}'))\n token=re.sub('{','{{',token)\n token=re.sub('}','}}',token)\n \n \n self.add_whitespace(start)\n self.tokens.append(token)\n self.prev_row,self.prev_col=end\n if tok_type in(NEWLINE,NL):\n self.prev_row +=1\n self.prev_col=0\n return \"\".join(self.tokens)\n \n def compat(self,token,iterable):\n indents=[]\n toks_append=self.tokens.append\n startline=token[0]in(NEWLINE,NL)\n prevstring=False\n \n for tok in _itertools.chain([token],iterable):\n toknum,tokval=tok[:2]\n if toknum ==ENCODING:\n self.encoding=tokval\n continue\n \n if toknum in(NAME,NUMBER):\n tokval +=' '\n \n \n if toknum ==STRING:\n if prevstring:\n tokval=' '+tokval\n prevstring=True\n else:\n prevstring=False\n \n if toknum ==INDENT:\n indents.append(tokval)\n continue\n elif toknum ==DEDENT:\n indents.pop()\n continue\n elif toknum in(NEWLINE,NL):\n startline=True\n elif startline and indents:\n toks_append(indents[-1])\n startline=False\n elif toknum ==FSTRING_MIDDLE:\n if '{'in tokval or '}'in tokval:\n tokval=re.sub('{','{{',tokval)\n tokval=re.sub('}','}}',tokval)\n \n toks_append(tokval)\n \n \ndef untokenize(iterable):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ut=Untokenizer()\n out=ut.untokenize(iterable)\n if ut.encoding is not None:\n out=out.encode(ut.encoding)\n return out\n \n \ndef _get_normal_name(orig_enc):\n ''\n \n enc=orig_enc[:12].lower().replace(\"_\",\"-\")\n if enc ==\"utf-8\"or enc.startswith(\"utf-8-\"):\n return \"utf-8\"\n if enc in(\"latin-1\",\"iso-8859-1\",\"iso-latin-1\")or\\\n enc.startswith((\"latin-1-\",\"iso-8859-1-\",\"iso-latin-1-\")):\n return \"iso-8859-1\"\n return orig_enc\n \ndef detect_encoding(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n filename=readline.__self__.name\n except AttributeError:\n filename=None\n bom_found=False\n encoding=None\n default='utf-8'\n def read_or_stop():\n try:\n return readline()\n except StopIteration:\n return b''\n \n def find_cookie(line):\n try:\n \n \n \n line_string=line.decode('utf-8')\n except UnicodeDecodeError:\n msg=\"invalid or missing encoding declaration\"\n if filename is not None:\n msg='{} for {!r}'.format(msg,filename)\n raise SyntaxError(msg)\n \n match=cookie_re.match(line_string)\n if not match:\n return None\n encoding=_get_normal_name(match.group(1))\n try:\n codec=lookup(encoding)\n except LookupError:\n \n if filename is None:\n msg=\"unknown encoding: \"+encoding\n else:\n msg=\"unknown encoding for {!r}: {}\".format(filename,\n encoding)\n raise SyntaxError(msg)\n \n if bom_found:\n if encoding !='utf-8':\n \n if filename is None:\n msg='encoding problem: utf-8'\n else:\n msg='encoding problem for {!r}: utf-8'.format(filename)\n raise SyntaxError(msg)\n encoding +='-sig'\n return encoding\n \n first=read_or_stop()\n if first.startswith(BOM_UTF8):\n bom_found=True\n first=first[3:]\n default='utf-8-sig'\n if not first:\n return default,[]\n \n encoding=find_cookie(first)\n if encoding:\n return encoding,[first]\n if not blank_re.match(first):\n return default,[first]\n \n second=read_or_stop()\n if not second:\n return default,[first]\n \n encoding=find_cookie(second)\n if encoding:\n return encoding,[first,second]\n \n return default,[first,second]\n \n \ndef open(filename):\n ''\n\n \n buffer=_builtin_open(filename,'rb')\n try:\n encoding,lines=detect_encoding(buffer.readline)\n buffer.seek(0)\n text=TextIOWrapper(buffer,encoding,line_buffering=True)\n text.mode='r'\n return text\n except:\n buffer.close()\n raise\n \ndef tokenize(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n encoding,consumed=detect_encoding(readline)\n rl_gen=_itertools.chain(consumed,iter(readline,b\"\"))\n if encoding is not None:\n if encoding ==\"utf-8-sig\":\n \n encoding=\"utf-8\"\n yield TokenInfo(ENCODING,encoding,(0,0),(0,0),'')\n yield from _generate_tokens_from_c_tokenizer(rl_gen.__next__,encoding,extra_tokens=True)\n \ndef generate_tokens(readline):\n ''\n\n\n\n \n return _generate_tokens_from_c_tokenizer(readline,extra_tokens=True)\n \ndef main():\n import argparse\n \n \n def perror(message):\n sys.stderr.write(message)\n sys.stderr.write('\\n')\n \n def error(message,filename=None,location=None):\n if location:\n args=(filename,)+location+(message,)\n perror(\"%s:%d:%d: error: %s\"%args)\n elif filename:\n perror(\"%s: error: %s\"%(filename,message))\n else:\n perror(\"error: %s\"%message)\n sys.exit(1)\n \n \n parser=argparse.ArgumentParser(prog='python -m tokenize')\n parser.add_argument(dest='filename',nargs='?',\n metavar='filename.py',\n help='the file to tokenize; defaults to stdin')\n parser.add_argument('-e','--exact',dest='exact',action='store_true',\n help='display token names using the exact type')\n args=parser.parse_args()\n \n try:\n \n if args.filename:\n filename=args.filename\n with _builtin_open(filename,'rb')as f:\n tokens=list(tokenize(f.readline))\n else:\n filename=\"\"\n tokens=_generate_tokens_from_c_tokenizer(\n sys.stdin.readline,extra_tokens=True)\n \n \n \n for token in tokens:\n token_type=token.type\n if args.exact:\n token_type=token.exact_type\n token_range=\"%d,%d-%d,%d:\"%(token.start+token.end)\n print(\"%-20s%-15s%-15r\"%\n (token_range,tok_name[token_type],token.string))\n except IndentationError as err:\n line,column=err.args[1][1:3]\n error(err.args[0],filename,(line,column))\n except TokenError as err:\n line,column=err.args[1]\n error(err.args[0],filename,(line,column))\n except SyntaxError as err:\n error(err,filename)\n except OSError as err:\n error(err)\n except KeyboardInterrupt:\n print(\"interrupted\\n\")\n except Exception as err:\n perror(\"unexpected error: %s\"%err)\n raise\n \ndef _transform_msg(msg):\n ''\n\n\n\n \n if \"unterminated triple-quoted string literal\"in msg:\n return \"EOF in multi-line string\"\n return msg\n \ndef _generate_tokens_from_c_tokenizer(source,encoding=None,extra_tokens=False):\n ''\n if encoding is None:\n it=_tokenize.TokenizerIter(source,extra_tokens=extra_tokens)\n else:\n it=_tokenize.TokenizerIter(source,encoding=encoding,extra_tokens=extra_tokens)\n try:\n for info in it:\n yield TokenInfo._make(info)\n except SyntaxError as e:\n if type(e)!=SyntaxError:\n raise e from None\n msg=_transform_msg(e.msg)\n raise TokenError(msg,(e.lineno,e.offset))from None\n \n \nif __name__ ==\"__main__\":\n main()\n", ["_tokenize", "argparse", "builtins", "codecs", "collections", "functools", "io", "itertools", "re", "sys", "token"]], "uuid": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport os\nimport sys\n\nfrom enum import Enum,_simple_enum\n\n\n__author__='Ka-Ping Yee '\n\n\nif sys.platform in('win32','darwin','emscripten','wasi'):\n _AIX=_LINUX=False\nelse:\n import platform\n _platform_system=platform.system()\n _AIX=_platform_system =='AIX'\n _LINUX=_platform_system =='Linux'\n \n_MAC_DELIM=b':'\n_MAC_OMITS_LEADING_ZEROES=False\nif _AIX:\n _MAC_DELIM=b'.'\n _MAC_OMITS_LEADING_ZEROES=True\n \nRESERVED_NCS,RFC_4122,RESERVED_MICROSOFT,RESERVED_FUTURE=[\n'reserved for NCS compatibility','specified in RFC 4122',\n'reserved for Microsoft compatibility','reserved for future definition']\n\nint_=int\nbytes_=bytes\n\n\n@_simple_enum(Enum)\nclass SafeUUID:\n safe=0\n unsafe=-1\n unknown=None\n \n \nclass UUID:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('int','is_safe','__weakref__')\n \n def __init__(self,hex=None,bytes=None,bytes_le=None,fields=None,\n int=None,version=None,\n *,is_safe=SafeUUID.unknown):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if[hex,bytes,bytes_le,fields,int].count(None)!=4:\n raise TypeError('one of the hex, bytes, bytes_le, fields, '\n 'or int arguments must be given')\n if hex is not None:\n hex=hex.replace('urn:','').replace('uuid:','')\n hex=hex.strip('{}').replace('-','')\n if len(hex)!=32:\n raise ValueError('badly formed hexadecimal UUID string')\n int=int_(hex,16)\n if bytes_le is not None:\n if len(bytes_le)!=16:\n raise ValueError('bytes_le is not a 16-char string')\n bytes=(bytes_le[4 -1::-1]+bytes_le[6 -1:4 -1:-1]+\n bytes_le[8 -1:6 -1:-1]+bytes_le[8:])\n if bytes is not None:\n if len(bytes)!=16:\n raise ValueError('bytes is not a 16-char string')\n assert isinstance(bytes,bytes_),repr(bytes)\n int=int_.from_bytes(bytes)\n if fields is not None:\n if len(fields)!=6:\n raise ValueError('fields is not a 6-tuple')\n (time_low,time_mid,time_hi_version,\n clock_seq_hi_variant,clock_seq_low,node)=fields\n if not 0 <=time_low <1 <<32:\n raise ValueError('field 1 out of range (need a 32-bit value)')\n if not 0 <=time_mid <1 <<16:\n raise ValueError('field 2 out of range (need a 16-bit value)')\n if not 0 <=time_hi_version <1 <<16:\n raise ValueError('field 3 out of range (need a 16-bit value)')\n if not 0 <=clock_seq_hi_variant <1 <<8:\n raise ValueError('field 4 out of range (need an 8-bit value)')\n if not 0 <=clock_seq_low <1 <<8:\n raise ValueError('field 5 out of range (need an 8-bit value)')\n if not 0 <=node <1 <<48:\n raise ValueError('field 6 out of range (need a 48-bit value)')\n clock_seq=(clock_seq_hi_variant <<8)|clock_seq_low\n int=((time_low <<96)|(time_mid <<80)|\n (time_hi_version <<64)|(clock_seq <<48)|node)\n if int is not None:\n if not 0 <=int <1 <<128:\n raise ValueError('int is out of range (need a 128-bit value)')\n if version is not None:\n if not 1 <=version <=5:\n raise ValueError('illegal version number')\n \n int &=~(0xc000 <<48)\n int |=0x8000 <<48\n \n int &=~(0xf000 <<64)\n int |=version <<76\n object.__setattr__(self,'int',int)\n object.__setattr__(self,'is_safe',is_safe)\n \n def __getstate__(self):\n d={'int':self.int}\n if self.is_safe !=SafeUUID.unknown:\n \n \n d['is_safe']=self.is_safe.value\n return d\n \n def __setstate__(self,state):\n object.__setattr__(self,'int',state['int'])\n \n object.__setattr__(self,'is_safe',\n SafeUUID(state['is_safe'])\n if 'is_safe'in state else SafeUUID.unknown)\n \n def __eq__(self,other):\n if isinstance(other,UUID):\n return self.int ==other.int\n return NotImplemented\n \n \n \n \n def __lt__(self,other):\n if isinstance(other,UUID):\n return self.int other.int\n return NotImplemented\n \n def __le__(self,other):\n if isinstance(other,UUID):\n return self.int <=other.int\n return NotImplemented\n \n def __ge__(self,other):\n if isinstance(other,UUID):\n return self.int >=other.int\n return NotImplemented\n \n def __hash__(self):\n return hash(self.int)\n \n def __int__(self):\n return self.int\n \n def __repr__(self):\n return '%s(%r)'%(self.__class__.__name__,str(self))\n \n def __setattr__(self,name,value):\n raise TypeError('UUID objects are immutable')\n \n def __str__(self):\n hex='%032x'%self.int\n return '%s-%s-%s-%s-%s'%(\n hex[:8],hex[8:12],hex[12:16],hex[16:20],hex[20:])\n \n @property\n def bytes(self):\n return self.int.to_bytes(16)\n \n @property\n def bytes_le(self):\n bytes=self.bytes\n return(bytes[4 -1::-1]+bytes[6 -1:4 -1:-1]+bytes[8 -1:6 -1:-1]+\n bytes[8:])\n \n @property\n def fields(self):\n return(self.time_low,self.time_mid,self.time_hi_version,\n self.clock_seq_hi_variant,self.clock_seq_low,self.node)\n \n @property\n def time_low(self):\n return self.int >>96\n \n @property\n def time_mid(self):\n return(self.int >>80)&0xffff\n \n @property\n def time_hi_version(self):\n return(self.int >>64)&0xffff\n \n @property\n def clock_seq_hi_variant(self):\n return(self.int >>56)&0xff\n \n @property\n def clock_seq_low(self):\n return(self.int >>48)&0xff\n \n @property\n def time(self):\n return(((self.time_hi_version&0x0fff)<<48)|\n (self.time_mid <<32)|self.time_low)\n \n @property\n def clock_seq(self):\n return(((self.clock_seq_hi_variant&0x3f)<<8)|\n self.clock_seq_low)\n \n @property\n def node(self):\n return self.int&0xffffffffffff\n \n @property\n def hex(self):\n return '%032x'%self.int\n \n @property\n def urn(self):\n return 'urn:uuid:'+str(self)\n \n @property\n def variant(self):\n if not self.int&(0x8000 <<48):\n return RESERVED_NCS\n elif not self.int&(0x4000 <<48):\n return RFC_4122\n elif not self.int&(0x2000 <<48):\n return RESERVED_MICROSOFT\n else:\n return RESERVED_FUTURE\n \n @property\n def version(self):\n \n if self.variant ==RFC_4122:\n return int((self.int >>76)&0xf)\n \n \ndef _get_command_stdout(command,*args):\n import io,os,shutil,subprocess\n \n try:\n path_dirs=os.environ.get('PATH',os.defpath).split(os.pathsep)\n path_dirs.extend(['/sbin','/usr/sbin'])\n executable=shutil.which(command,path=os.pathsep.join(path_dirs))\n if executable is None:\n return None\n \n \n \n env=dict(os.environ)\n env['LC_ALL']='C'\n \n if args !=('',):\n command=(executable,*args)\n else:\n command=(executable,)\n proc=subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.DEVNULL,\n env=env)\n if not proc:\n return None\n stdout,stderr=proc.communicate()\n return io.BytesIO(stdout)\n except(OSError,subprocess.SubprocessError):\n return None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _is_universal(mac):\n return not(mac&(1 <<41))\n \n \ndef _find_mac_near_keyword(command,args,keywords,get_word_index):\n ''\n\n\n\n\n\n\n \n stdout=_get_command_stdout(command,args)\n if stdout is None:\n return None\n \n first_local_mac=None\n for line in stdout:\n words=line.lower().rstrip().split()\n for i in range(len(words)):\n if words[i]in keywords:\n try:\n word=words[get_word_index(i)]\n mac=int(word.replace(_MAC_DELIM,b''),16)\n except(ValueError,IndexError):\n \n \n \n \n \n pass\n else:\n if _is_universal(mac):\n return mac\n first_local_mac=first_local_mac or mac\n return first_local_mac or None\n \n \ndef _parse_mac(word):\n\n\n\n\n\n\n parts=word.split(_MAC_DELIM)\n if len(parts)!=6:\n return\n if _MAC_OMITS_LEADING_ZEROES:\n \n \n \n \n if not all(1 <=len(part)<=2 for part in parts):\n return\n hexstr=b''.join(part.rjust(2,b'0')for part in parts)\n else:\n if not all(len(part)==2 for part in parts):\n return\n hexstr=b''.join(parts)\n try:\n return int(hexstr,16)\n except ValueError:\n return\n \n \ndef _find_mac_under_heading(command,args,heading):\n ''\n\n\n\n\n \n stdout=_get_command_stdout(command,args)\n if stdout is None:\n return None\n \n keywords=stdout.readline().rstrip().split()\n try:\n column_index=keywords.index(heading)\n except ValueError:\n return None\n \n first_local_mac=None\n for line in stdout:\n words=line.rstrip().split()\n try:\n word=words[column_index]\n except IndexError:\n continue\n \n mac=_parse_mac(word)\n if mac is None:\n continue\n if _is_universal(mac):\n return mac\n if first_local_mac is None:\n first_local_mac=mac\n \n return first_local_mac\n \n \n \n \ndef _ifconfig_getnode():\n ''\n \n keywords=(b'hwaddr',b'ether',b'address:',b'lladdr')\n for args in('','-a','-av'):\n mac=_find_mac_near_keyword('ifconfig',args,keywords,lambda i:i+1)\n if mac:\n return mac\n return None\n \ndef _ip_getnode():\n ''\n \n mac=_find_mac_near_keyword('ip','link',[b'link/ether'],lambda i:i+1)\n if mac:\n return mac\n return None\n \ndef _arp_getnode():\n ''\n import os,socket\n if not hasattr(socket,\"gethostbyname\"):\n return None\n try:\n ip_addr=socket.gethostbyname(socket.gethostname())\n except OSError:\n return None\n \n \n mac=_find_mac_near_keyword('arp','-an',[os.fsencode(ip_addr)],lambda i:-1)\n if mac:\n return mac\n \n \n mac=_find_mac_near_keyword('arp','-an',[os.fsencode(ip_addr)],lambda i:i+1)\n if mac:\n return mac\n \n \n mac=_find_mac_near_keyword('arp','-an',[os.fsencode('(%s)'%ip_addr)],\n lambda i:i+2)\n \n if mac:\n return mac\n return None\n \ndef _lanscan_getnode():\n ''\n \n return _find_mac_near_keyword('lanscan','-ai',[b'lan0'],lambda i:0)\n \ndef _netstat_getnode():\n ''\n \n return _find_mac_under_heading('netstat','-ian',b'Address')\n \ndef _ipconfig_getnode():\n ''\n \n return _windll_getnode()\n \ndef _netbios_getnode():\n ''\n \n return _windll_getnode()\n \n \n \ntry:\n import _uuid\n _generate_time_safe=getattr(_uuid,\"generate_time_safe\",None)\n _UuidCreate=getattr(_uuid,\"UuidCreate\",None)\n _has_uuid_generate_time_safe=_uuid.has_uuid_generate_time_safe\nexcept ImportError:\n _uuid=None\n _generate_time_safe=None\n _UuidCreate=None\n _has_uuid_generate_time_safe=None\n \n \ndef _load_system_functions():\n ''\n \n \ndef _unix_getnode():\n ''\n if _generate_time_safe:\n uuid_time,_=_generate_time_safe()\n return UUID(bytes=uuid_time).node\n \ndef _windll_getnode():\n ''\n if _UuidCreate:\n uuid_bytes=_UuidCreate()\n return UUID(bytes_le=uuid_bytes).node\n \ndef _random_getnode():\n ''\n \n \n \n \n \n \n \n \n \n \n import random\n return random.getrandbits(48)|(1 <<40)\n \n \n \n \n \n \n \n \nif _LINUX:\n _OS_GETTERS=[_ip_getnode,_ifconfig_getnode]\nelif sys.platform =='darwin':\n _OS_GETTERS=[_ifconfig_getnode,_arp_getnode,_netstat_getnode]\nelif sys.platform =='win32':\n\n _OS_GETTERS=[]\nelif _AIX:\n _OS_GETTERS=[_netstat_getnode]\nelse:\n _OS_GETTERS=[_ifconfig_getnode,_ip_getnode,_arp_getnode,\n _netstat_getnode,_lanscan_getnode]\nif os.name =='posix':\n _GETTERS=[_unix_getnode]+_OS_GETTERS\nelif os.name =='nt':\n _GETTERS=[_windll_getnode]+_OS_GETTERS\nelse:\n _GETTERS=_OS_GETTERS\n \n_node=None\n\ndef getnode():\n ''\n\n\n\n\n\n \n global _node\n if _node is not None:\n return _node\n \n for getter in _GETTERS+[_random_getnode]:\n try:\n _node=getter()\n except:\n continue\n if(_node is not None)and(0 <=_node <(1 <<48)):\n return _node\n assert False,'_random_getnode() returned invalid value: {}'.format(_node)\n \n \n_last_timestamp=None\n\ndef uuid1(node=None,clock_seq=None):\n ''\n\n\n \n \n \n \n if _generate_time_safe is not None and node is clock_seq is None:\n uuid_time,safely_generated=_generate_time_safe()\n try:\n is_safe=SafeUUID(safely_generated)\n except ValueError:\n is_safe=SafeUUID.unknown\n return UUID(bytes=uuid_time,is_safe=is_safe)\n \n global _last_timestamp\n import time\n nanoseconds=time.time_ns()\n \n \n timestamp=nanoseconds //100+0x01b21dd213814000\n if _last_timestamp is not None and timestamp <=_last_timestamp:\n timestamp=_last_timestamp+1\n _last_timestamp=timestamp\n if clock_seq is None:\n import random\n clock_seq=random.getrandbits(14)\n time_low=timestamp&0xffffffff\n time_mid=(timestamp >>32)&0xffff\n time_hi_version=(timestamp >>48)&0x0fff\n clock_seq_low=clock_seq&0xff\n clock_seq_hi_variant=(clock_seq >>8)&0x3f\n if node is None:\n node=getnode()\n return UUID(fields=(time_low,time_mid,time_hi_version,\n clock_seq_hi_variant,clock_seq_low,node),version=1)\n \ndef uuid3(namespace,name):\n ''\n if isinstance(name,str):\n name=bytes(name,\"utf-8\")\n from hashlib import md5\n digest=md5(\n namespace.bytes+name,\n usedforsecurity=False\n ).digest()\n return UUID(bytes=digest[:16],version=3)\n \ndef uuid4():\n ''\n return UUID(bytes=os.urandom(16),version=4)\n \ndef uuid5(namespace,name):\n ''\n if isinstance(name,str):\n name=bytes(name,\"utf-8\")\n from hashlib import sha1\n hash=sha1(namespace.bytes+name).digest()\n return UUID(bytes=hash[:16],version=5)\n \n \ndef main():\n ''\n uuid_funcs={\n \"uuid1\":uuid1,\n \"uuid3\":uuid3,\n \"uuid4\":uuid4,\n \"uuid5\":uuid5\n }\n uuid_namespace_funcs=(\"uuid3\",\"uuid5\")\n namespaces={\n \"@dns\":NAMESPACE_DNS,\n \"@url\":NAMESPACE_URL,\n \"@oid\":NAMESPACE_OID,\n \"@x500\":NAMESPACE_X500\n }\n \n import argparse\n parser=argparse.ArgumentParser(\n description=\"Generates a uuid using the selected uuid function.\")\n parser.add_argument(\"-u\",\"--uuid\",choices=uuid_funcs.keys(),default=\"uuid4\",\n help=\"The function to use to generate the uuid. \"\n \"By default uuid4 function is used.\")\n parser.add_argument(\"-n\",\"--namespace\",\n help=\"The namespace is a UUID, or '@ns' where 'ns' is a \"\n \"well-known predefined UUID addressed by namespace name. \"\n \"Such as @dns, @url, @oid, and @x500. \"\n \"Only required for uuid3/uuid5 functions.\")\n parser.add_argument(\"-N\",\"--name\",\n help=\"The name used as part of generating the uuid. \"\n \"Only required for uuid3/uuid5 functions.\")\n \n args=parser.parse_args()\n uuid_func=uuid_funcs[args.uuid]\n namespace=args.namespace\n name=args.name\n \n if args.uuid in uuid_namespace_funcs:\n if not namespace or not name:\n parser.error(\n \"Incorrect number of arguments. \"\n f\"{args.uuid} requires a namespace and a name. \"\n \"Run 'python -m uuid -h' for more information.\"\n )\n namespace=namespaces[namespace]if namespace in namespaces else UUID(namespace)\n print(uuid_func(namespace,name))\n else:\n print(uuid_func())\n \n \n \n \nNAMESPACE_DNS=UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_URL=UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_OID=UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_X500=UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')\n\nif __name__ ==\"__main__\":\n main()\n", ["_uuid", "argparse", "enum", "hashlib", "io", "os", "platform", "random", "shutil", "socket", "subprocess", "sys", "time"]], "imp": [".py", "''\n\n\n\n\n\n\n\nfrom _imp import(lock_held,acquire_lock,release_lock,\nget_frozen_object,is_frozen_package,\ninit_frozen,is_builtin,is_frozen,\n_fix_co_filename,_frozen_module_names)\ntry:\n from _imp import create_dynamic\nexcept ImportError:\n\n create_dynamic=None\n \nfrom importlib._bootstrap import _ERR_MSG,_exec,_load,_builtin_from_name\nfrom importlib._bootstrap_external import SourcelessFileLoader\n\nfrom importlib import machinery\nfrom importlib import util\nimport importlib\nimport os\nimport sys\nimport tokenize\nimport types\nimport warnings\n\nwarnings.warn(\"the imp module is deprecated in favour of importlib and slated \"\n\"for removal in Python 3.12; \"\n\"see the module's documentation for alternative uses\",\nDeprecationWarning,stacklevel=2)\n\n\nSEARCH_ERROR=0\nPY_SOURCE=1\nPY_COMPILED=2\nC_EXTENSION=3\nPY_RESOURCE=4\nPKG_DIRECTORY=5\nC_BUILTIN=6\nPY_FROZEN=7\nPY_CODERESOURCE=8\nIMP_HOOK=9\n\n\ndef new_module(name):\n ''\n\n\n\n\n\n \n return types.ModuleType(name)\n \n \ndef get_magic():\n ''\n\n\n \n return util.MAGIC_NUMBER\n \n \ndef get_tag():\n ''\n return sys.implementation.cache_tag\n \n \ndef cache_from_source(path,debug_override=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n return util.cache_from_source(path,debug_override)\n \n \ndef source_from_cache(path):\n ''\n\n\n\n\n\n\n\n\n \n return util.source_from_cache(path)\n \n \ndef get_suffixes():\n ''\n extensions=[(s,'rb',C_EXTENSION)for s in machinery.EXTENSION_SUFFIXES]\n source=[(s,'r',PY_SOURCE)for s in machinery.SOURCE_SUFFIXES]\n bytecode=[(s,'rb',PY_COMPILED)for s in machinery.BYTECODE_SUFFIXES]\n \n return extensions+source+bytecode\n \n \nclass NullImporter:\n\n ''\n\n\n\n \n \n def __init__(self,path):\n if path =='':\n raise ImportError('empty pathname',path='')\n elif os.path.isdir(path):\n raise ImportError('existing directory',path=path)\n \n def find_module(self,fullname):\n ''\n return None\n \n \nclass _HackedGetData:\n\n ''\n \n \n def __init__(self,fullname,path,file=None):\n super().__init__(fullname,path)\n self.file=file\n \n def get_data(self,path):\n ''\n if self.file and path ==self.path:\n \n \n if not self.file.closed:\n file=self.file\n if 'b'not in file.mode:\n file.close()\n if self.file.closed:\n self.file=file=open(self.path,'rb')\n \n with file:\n return file.read()\n else:\n return super().get_data(path)\n \n \nclass _LoadSourceCompatibility(_HackedGetData,machinery.SourceFileLoader):\n\n ''\n \n \ndef load_source(name,pathname,file=None):\n loader=_LoadSourceCompatibility(name,pathname,file)\n spec=util.spec_from_file_location(name,pathname,loader=loader)\n if name in sys.modules:\n module=_exec(spec,sys.modules[name])\n else:\n module=_load(spec)\n \n \n module.__loader__=machinery.SourceFileLoader(name,pathname)\n module.__spec__.loader=module.__loader__\n return module\n \n \nclass _LoadCompiledCompatibility(_HackedGetData,SourcelessFileLoader):\n\n ''\n \n \ndef load_compiled(name,pathname,file=None):\n ''\n loader=_LoadCompiledCompatibility(name,pathname,file)\n spec=util.spec_from_file_location(name,pathname,loader=loader)\n if name in sys.modules:\n module=_exec(spec,sys.modules[name])\n else:\n module=_load(spec)\n \n \n module.__loader__=SourcelessFileLoader(name,pathname)\n module.__spec__.loader=module.__loader__\n return module\n \n \ndef load_package(name,path):\n ''\n if os.path.isdir(path):\n extensions=(machinery.SOURCE_SUFFIXES[:]+\n machinery.BYTECODE_SUFFIXES[:])\n for extension in extensions:\n init_path=os.path.join(path,'__init__'+extension)\n if os.path.exists(init_path):\n path=init_path\n break\n else:\n raise ValueError('{!r} is not a package'.format(path))\n spec=util.spec_from_file_location(name,path,\n submodule_search_locations=[])\n if name in sys.modules:\n return _exec(spec,sys.modules[name])\n else:\n return _load(spec)\n \n \ndef load_module(name,file,filename,details):\n ''\n\n\n\n\n\n \n suffix,mode,type_=details\n if mode and(not mode.startswith('r')or '+'in mode):\n raise ValueError('invalid file open mode {!r}'.format(mode))\n elif file is None and type_ in{PY_SOURCE,PY_COMPILED}:\n msg='file object required for import (type code {})'.format(type_)\n raise ValueError(msg)\n elif type_ ==PY_SOURCE:\n return load_source(name,filename,file)\n elif type_ ==PY_COMPILED:\n return load_compiled(name,filename,file)\n elif type_ ==C_EXTENSION and load_dynamic is not None:\n if file is None:\n with open(filename,'rb')as opened_file:\n return load_dynamic(name,filename,opened_file)\n else:\n return load_dynamic(name,filename,file)\n elif type_ ==PKG_DIRECTORY:\n return load_package(name,filename)\n elif type_ ==C_BUILTIN:\n return init_builtin(name)\n elif type_ ==PY_FROZEN:\n return init_frozen(name)\n else:\n msg=\"Don't know how to import {} (type code {})\".format(name,type_)\n raise ImportError(msg,name=name)\n \n \ndef find_module(name,path=None):\n ''\n\n\n\n\n\n\n\n\n \n if not isinstance(name,str):\n raise TypeError(\"'name' must be a str, not {}\".format(type(name)))\n elif not isinstance(path,(type(None),list)):\n \n raise RuntimeError(\"'path' must be None or a list, \"\n \"not {}\".format(type(path)))\n \n if path is None:\n if is_builtin(name):\n return None,None,('','',C_BUILTIN)\n elif is_frozen(name):\n return None,None,('','',PY_FROZEN)\n else:\n path=sys.path\n \n for entry in path:\n package_directory=os.path.join(entry,name)\n for suffix in['.py',machinery.BYTECODE_SUFFIXES[0]]:\n package_file_name='__init__'+suffix\n file_path=os.path.join(package_directory,package_file_name)\n if os.path.isfile(file_path):\n return None,package_directory,('','',PKG_DIRECTORY)\n for suffix,mode,type_ in get_suffixes():\n file_name=name+suffix\n file_path=os.path.join(entry,file_name)\n if os.path.isfile(file_path):\n break\n else:\n continue\n break\n else:\n raise ImportError(_ERR_MSG.format(name),name=name)\n \n encoding=None\n if 'b'not in mode:\n with open(file_path,'rb')as file:\n encoding=tokenize.detect_encoding(file.readline)[0]\n file=open(file_path,mode,encoding=encoding)\n return file,file_path,(suffix,mode,type_)\n \n \ndef reload(module):\n ''\n\n\n\n\n\n \n return importlib.reload(module)\n \n \ndef init_builtin(name):\n ''\n\n\n\n \n try:\n return _builtin_from_name(name)\n except ImportError:\n return None\n \n \nif create_dynamic:\n def load_dynamic(name,path,file=None):\n ''\n\n\n \n import importlib.machinery\n loader=importlib.machinery.ExtensionFileLoader(name,path)\n \n \n \n spec=importlib.machinery.ModuleSpec(\n name=name,loader=loader,origin=path)\n return _load(spec)\n \nelse:\n load_dynamic=None\n", ["_imp", "importlib", "importlib._bootstrap", "importlib._bootstrap_external", "importlib.machinery", "importlib.util", "os", "sys", "tokenize", "types", "warnings"]], "re": [".py", "from python_re import *\n\nimport python_re\n_compile=python_re._compile\n_reconstructor=python_re._reconstructor\n\npython_re._reconstructor.__module__='re'\n", ["python_re"]], "_sysconfigdata": [".py", "build_time_vars={'HAVE_SYS_WAIT_H':1,'HAVE_UTIL_H':0,'HAVE_SYMLINKAT':1,'HAVE_LIBSENDFILE':0,'SRCDIRS':'Parser Grammar Objects Python Modules Mac','SIZEOF_OFF_T':8,'BASECFLAGS':'-Wno-unused-result','HAVE_UTIME_H':1,'EXTRAMACHDEPPATH':'','HAVE_SYS_TIME_H':1,'CFLAGSFORSHARED':'-fPIC','HAVE_HYPOT':1,'PGSRCS':'\\\\','HAVE_LIBUTIL_H':0,'HAVE_COMPUTED_GOTOS':1,'HAVE_LUTIMES':1,'HAVE_MAKEDEV':1,'HAVE_REALPATH':1,'HAVE_LINUX_TIPC_H':1,'MULTIARCH':'i386-linux-gnu','HAVE_GETWD':1,'HAVE_GCC_ASM_FOR_X64':0,'HAVE_INET_PTON':1,'HAVE_GETHOSTBYNAME_R_6_ARG':1,'SIZEOF__BOOL':1,'HAVE_ZLIB_COPY':1,'ASDLGEN':'python3.3 ../Parser/asdl_c.py','GRAMMAR_INPUT':'../Grammar/Grammar','HOST_GNU_TYPE':'i686-pc-linux-gnu','HAVE_SCHED_RR_GET_INTERVAL':1,'HAVE_BLUETOOTH_H':0,'HAVE_MKFIFO':1,'TIMEMODULE_LIB':0,'LIBM':'-lm','PGENOBJS':'\\\\ \\\\','PYTHONFRAMEWORK':'','GETPGRP_HAVE_ARG':0,'HAVE_MMAP':1,'SHLIB_SUFFIX':'.so','SIZEOF_FLOAT':4,'HAVE_RENAMEAT':1,'HAVE_LANGINFO_H':1,'HAVE_STDLIB_H':1,'PY_CORE_CFLAGS':'-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security -I. -IInclude -I../Include -D_FORTIFY_SOURCE=2 -fPIC -DPy_BUILD_CORE','HAVE_BROKEN_PIPE_BUF':0,'HAVE_CONFSTR':1,'HAVE_SIGTIMEDWAIT':1,'HAVE_FTELLO':1,'READELF':'readelf','HAVE_SIGALTSTACK':1,'TESTTIMEOUT':3600,'PYTHONPATH':':plat-i386-linux-gnu','SIZEOF_WCHAR_T':4,'LIBOBJS':'','HAVE_SYSCONF':1,'MAKESETUP':'../Modules/makesetup','HAVE_UTIMENSAT':1,'HAVE_FCHOWNAT':1,'HAVE_WORKING_TZSET':1,'HAVE_FINITE':1,'HAVE_ASINH':1,'HAVE_SETEUID':1,'CONFIGFILES':'configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in','HAVE_SETGROUPS':1,'PARSER_OBJS':'\\\\ Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o','HAVE_MBRTOWC':1,'SIZEOF_INT':4,'HAVE_STDARG_PROTOTYPES':1,'TM_IN_SYS_TIME':0,'HAVE_SYS_TIMES_H':1,'HAVE_LCHOWN':1,'HAVE_SSIZE_T':1,'HAVE_PAUSE':1,'SYSLIBS':'-lm','POSIX_SEMAPHORES_NOT_ENABLED':0,'HAVE_DEVICE_MACROS':1,'BLDSHARED':'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','LIBSUBDIRS':'tkinter tkinter/test tkinter/test/test_tkinter \\\\','HAVE_SYS_UN_H':1,'HAVE_SYS_STAT_H':1,'VPATH':'..','INCLDIRSTOMAKE':'/usr/include /usr/include /usr/include/python3.3m /usr/include/python3.3m','HAVE_BROKEN_SEM_GETVALUE':0,'HAVE_TIMEGM':1,'PACKAGE_VERSION':0,'MAJOR_IN_SYSMACROS':0,'HAVE_ATANH':1,'HAVE_GAI_STRERROR':1,'HAVE_SYS_POLL_H':1,'SIZEOF_PTHREAD_T':4,'SIZEOF_FPOS_T':16,'HAVE_CTERMID':1,'HAVE_TMPFILE':1,'HAVE_SETUID':1,'CXX':'i686-linux-gnu-g++ -pthread','srcdir':'..','HAVE_UINT32_T':1,'HAVE_ADDRINFO':1,'HAVE_GETSPENT':1,'SIZEOF_DOUBLE':8,'HAVE_INT32_T':1,'LIBRARY_OBJS_OMIT_FROZEN':'\\\\','HAVE_FUTIMES':1,'CONFINCLUDEPY':'/usr/include/python3.3m','HAVE_RL_COMPLETION_APPEND_CHARACTER':1,'LIBFFI_INCLUDEDIR':'','HAVE_SETGID':1,'HAVE_UINT64_T':1,'EXEMODE':755,'UNIVERSALSDK':'','HAVE_LIBDL':1,'HAVE_GETNAMEINFO':1,'HAVE_STDINT_H':1,'COREPYTHONPATH':':plat-i386-linux-gnu','HAVE_SOCKADDR_STORAGE':1,'HAVE_WAITID':1,'EXTRAPLATDIR':'@EXTRAPLATDIR@','HAVE_ACCEPT4':1,'RUNSHARED':'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared:','EXE':'','HAVE_SIGACTION':1,'HAVE_CHOWN':1,'HAVE_GETLOGIN':1,'HAVE_TZNAME':0,'PACKAGE_NAME':0,'HAVE_GETPGID':1,'HAVE_GLIBC_MEMMOVE_BUG':0,'BUILD_GNU_TYPE':'i686-pc-linux-gnu','HAVE_LINUX_CAN_H':1,'DYNLOADFILE':'dynload_shlib.o','HAVE_PWRITE':1,'BUILDEXE':'','HAVE_OPENPTY':1,'HAVE_LOCKF':1,'HAVE_COPYSIGN':1,'HAVE_PREAD':1,'HAVE_DLOPEN':1,'HAVE_SYS_KERN_CONTROL_H':0,'PY_FORMAT_LONG_LONG':'\"ll\"','HAVE_TCSETPGRP':1,'HAVE_SETSID':1,'HAVE_STRUCT_STAT_ST_BIRTHTIME':0,'HAVE_STRING_H':1,'LDLIBRARY':'libpython3.3m.so','INSTALL_SCRIPT':'/usr/bin/install -c','HAVE_SYS_XATTR_H':1,'HAVE_CURSES_IS_TERM_RESIZED':1,'HAVE_TMPNAM_R':1,'STRICT_SYSV_CURSES':\"/* Don't use ncurses extensions */\",'WANT_SIGFPE_HANDLER':1,'HAVE_INT64_T':1,'HAVE_STAT_TV_NSEC':1,'HAVE_SYS_MKDEV_H':0,'HAVE_BROKEN_POLL':0,'HAVE_IF_NAMEINDEX':1,'HAVE_GETPWENT':1,'PSRCS':'\\\\','RANLIB':'ranlib','HAVE_WCSCOLL':1,'WITH_NEXT_FRAMEWORK':0,'ASDLGEN_FILES':'../Parser/asdl.py ../Parser/asdl_c.py','HAVE_RL_PRE_INPUT_HOOK':1,'PACKAGE_URL':0,'SHLIB_EXT':0,'HAVE_SYS_LOADAVG_H':0,'HAVE_LIBIEEE':0,'HAVE_SEM_OPEN':1,'HAVE_TERM_H':1,'IO_OBJS':'\\\\','IO_H':'Modules/_io/_iomodule.h','HAVE_STATVFS':1,'VERSION':'3.3','HAVE_GETC_UNLOCKED':1,'MACHDEPS':'plat-i386-linux-gnu @EXTRAPLATDIR@','SUBDIRSTOO':'Include Lib Misc','HAVE_SETREUID':1,'HAVE_ERFC':1,'HAVE_SETRESUID':1,'LINKFORSHARED':'-Xlinker -export-dynamic -Wl,-O1 -Wl,-Bsymbolic-functions','HAVE_SYS_TYPES_H':1,'HAVE_GETPAGESIZE':1,'HAVE_SETEGID':1,'HAVE_PTY_H':1,'HAVE_STRUCT_STAT_ST_FLAGS':0,'HAVE_WCHAR_H':1,'HAVE_FSEEKO':1,'Py_ENABLE_SHARED':1,'HAVE_SIGRELSE':1,'HAVE_PTHREAD_INIT':0,'FILEMODE':644,'HAVE_SYS_RESOURCE_H':1,'HAVE_READLINKAT':1,'PYLONG_BITS_IN_DIGIT':0,'LINKCC':'i686-linux-gnu-gcc -pthread','HAVE_SETLOCALE':1,'HAVE_CHROOT':1,'HAVE_OPENAT':1,'HAVE_FEXECVE':1,'LDCXXSHARED':'i686-linux-gnu-g++ -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions','DIST':'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in Include Lib Misc Ext-dummy','HAVE_MKNOD':1,'PY_LDFLAGS':'-Wl,-Bsymbolic-functions -Wl,-z,relro','HAVE_BROKEN_MBSTOWCS':0,'LIBRARY_OBJS':'\\\\','HAVE_LOG1P':1,'SIZEOF_VOID_P':4,'HAVE_FCHOWN':1,'PYTHONFRAMEWORKPREFIX':'','HAVE_LIBDLD':0,'HAVE_TGAMMA':1,'HAVE_ERRNO_H':1,'HAVE_IO_H':0,'OTHER_LIBTOOL_OPT':'','HAVE_POLL_H':1,'PY_CPPFLAGS':'-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2','XMLLIBSUBDIRS':'xml xml/dom xml/etree xml/parsers xml/sax','GRAMMAR_H':'Include/graminit.h','TANH_PRESERVES_ZERO_SIGN':1,'HAVE_GETLOADAVG':1,'UNICODE_DEPS':'\\\\ \\\\','HAVE_GETCWD':1,'MANDIR':'/usr/share/man','MACHDESTLIB':'/usr/lib/python3.3','GRAMMAR_C':'Python/graminit.c','PGOBJS':'\\\\','HAVE_DEV_PTMX':1,'HAVE_UINTPTR_T':1,'HAVE_SCHED_SETAFFINITY':1,'PURIFY':'','HAVE_DECL_ISINF':1,'HAVE_RL_CALLBACK':1,'HAVE_WRITEV':1,'HAVE_GETHOSTBYNAME_R_5_ARG':0,'HAVE_SYS_AUDIOIO_H':0,'EXT_SUFFIX':'.cpython-33m.so','SIZEOF_LONG_LONG':8,'DLINCLDIR':'.','HAVE_PATHCONF':1,'HAVE_UNLINKAT':1,'MKDIR_P':'/bin/mkdir -p','HAVE_ALTZONE':0,'SCRIPTDIR':'/usr/lib','OPCODETARGETGEN_FILES':'\\\\','HAVE_GETSPNAM':1,'HAVE_SYS_TERMIO_H':0,'HAVE_ATTRIBUTE_FORMAT_PARSETUPLE':0,'HAVE_PTHREAD_H':1,'Py_DEBUG':0,'HAVE_STRUCT_STAT_ST_BLOCKS':1,'X87_DOUBLE_ROUNDING':1,'SIZEOF_TIME_T':4,'HAVE_DYNAMIC_LOADING':1,'HAVE_DIRECT_H':0,'SRC_GDB_HOOKS':'../Tools/gdb/libpython.py','HAVE_GETADDRINFO':1,'HAVE_BROKEN_NICE':0,'HAVE_DIRENT_H':1,'HAVE_WCSXFRM':1,'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK':1,'HAVE_FSTATVFS':1,'PYTHON':'python','HAVE_OSX105_SDK':0,'BINDIR':'/usr/bin','TESTPYTHON':'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python','ARFLAGS':'rc','PLATDIR':'plat-i386-linux-gnu','HAVE_ASM_TYPES_H':1,'PY3LIBRARY':'libpython3.so','HAVE_PLOCK':0,'FLOCK_NEEDS_LIBBSD':0,'WITH_TSC':0,'HAVE_LIBREADLINE':1,'MACHDEP':'linux','HAVE_SELECT':1,'LDFLAGS':'-Wl,-Bsymbolic-functions -Wl,-z,relro','HAVE_HSTRERROR':1,'SOABI':'cpython-33m','HAVE_GETTIMEOFDAY':1,'HAVE_LIBRESOLV':0,'HAVE_UNSETENV':1,'HAVE_TM_ZONE':1,'HAVE_GETPGRP':1,'HAVE_FLOCK':1,'HAVE_SYS_BSDTTY_H':0,'SUBDIRS':'','PYTHONFRAMEWORKINSTALLDIR':'','PACKAGE_BUGREPORT':0,'HAVE_CLOCK':1,'HAVE_GETPEERNAME':1,'SIZEOF_PID_T':4,'HAVE_CONIO_H':0,'HAVE_FSTATAT':1,'HAVE_NETPACKET_PACKET_H':1,'HAVE_WAIT3':1,'DESTPATH':'','HAVE_STAT_TV_NSEC2':0,'HAVE_GETRESGID':1,'HAVE_UCS4_TCL':0,'SIGNED_RIGHT_SHIFT_ZERO_FILLS':0,'HAVE_TIMES':1,'HAVE_UNAME':1,'HAVE_ERF':1,'SIZEOF_SHORT':2,'HAVE_NCURSES_H':1,'HAVE_SYS_SENDFILE_H':1,'HAVE_CTERMID_R':0,'HAVE_TMPNAM':1,'prefix':'/usr','HAVE_NICE':1,'WITH_THREAD':1,'LN':'ln','TESTRUNNER':'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python ../Tools/scripts/run_tests.py','HAVE_SIGINTERRUPT':1,'HAVE_SETPGID':1,'RETSIGTYPE':'void','HAVE_SCHED_GET_PRIORITY_MAX':1,'HAVE_SYS_SYS_DOMAIN_H':0,'HAVE_SYS_DIR_H':0,'HAVE__GETPTY':0,'HAVE_BLUETOOTH_BLUETOOTH_H':1,'HAVE_BIND_TEXTDOMAIN_CODESET':1,'HAVE_POLL':1,'PYTHON_OBJS':'\\\\','HAVE_WAITPID':1,'USE_INLINE':1,'HAVE_FUTIMENS':1,'USE_COMPUTED_GOTOS':1,'MAINCC':'i686-linux-gnu-gcc -pthread','HAVE_SOCKETPAIR':1,'HAVE_PROCESS_H':0,'HAVE_SETVBUF':1,'HAVE_FDOPENDIR':1,'CONFINCLUDEDIR':'/usr/include','BINLIBDEST':'/usr/lib/python3.3','HAVE_SYS_IOCTL_H':1,'HAVE_SYSEXITS_H':1,'LDLAST':'','HAVE_SYS_FILE_H':1,'HAVE_RL_COMPLETION_SUPPRESS_APPEND':1,'HAVE_RL_COMPLETION_MATCHES':1,'HAVE_TCGETPGRP':1,'SIZEOF_SIZE_T':4,'HAVE_EPOLL_CREATE1':1,'HAVE_SYS_SELECT_H':1,'HAVE_CLOCK_GETTIME':1,'CFLAGS':'-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','HAVE_SNPRINTF':1,'BLDLIBRARY':'-lpython3.3m','PARSER_HEADERS':'\\\\','SO':'.so','LIBRARY':'libpython3.3m.a','HAVE_FPATHCONF':1,'HAVE_TERMIOS_H':1,'HAVE_BROKEN_PTHREAD_SIGMASK':0,'AST_H':'Include/Python-ast.h','HAVE_GCC_UINT128_T':0,'HAVE_ACOSH':1,'MODOBJS':'Modules/_threadmodule.o Modules/signalmodule.o Modules/arraymodule.o Modules/mathmodule.o Modules/_math.o Modules/_struct.o Modules/timemodule.o Modules/_randommodule.o Modules/atexitmodule.o Modules/_elementtree.o Modules/_pickle.o Modules/_datetimemodule.o Modules/_bisectmodule.o Modules/_heapqmodule.o Modules/unicodedata.o Modules/fcntlmodule.o Modules/spwdmodule.o Modules/grpmodule.o Modules/selectmodule.o Modules/socketmodule.o Modules/_posixsubprocess.o Modules/md5module.o Modules/sha1module.o Modules/sha256module.o Modules/sha512module.o Modules/syslogmodule.o Modules/binascii.o Modules/zlibmodule.o Modules/pyexpat.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/_weakref.o Modules/_functoolsmodule.o Modules/operator.o Modules/_collectionsmodule.o Modules/itertoolsmodule.o Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o Modules/zipimport.o Modules/faulthandler.o Modules/symtablemodule.o Modules/xxsubtype.o','AST_C':'Python/Python-ast.c','HAVE_SYS_NDIR_H':0,'DESTDIRS':'/usr /usr/lib /usr/lib/python3.3 /usr/lib/python3.3/lib-dynload','HAVE_SIGNAL_H':1,'PACKAGE_TARNAME':0,'HAVE_GETPRIORITY':1,'INCLUDEDIR':'/usr/include','HAVE_INTTYPES_H':1,'SIGNAL_OBJS':'','HAVE_READV':1,'HAVE_SETHOSTNAME':1,'MODLIBS':'-lrt -lexpat -L/usr/lib -lz -lexpat','CC':'i686-linux-gnu-gcc -pthread','HAVE_LCHMOD':0,'SIZEOF_UINTPTR_T':4,'LIBPC':'/usr/lib/i386-linux-gnu/pkgconfig','BYTESTR_DEPS':'\\\\','HAVE_MKDIRAT':1,'LIBPL':'/usr/lib/python3.3/config-3.3m-i386-linux-gnu','HAVE_SHADOW_H':1,'HAVE_SYS_EVENT_H':0,'INSTALL':'/usr/bin/install -c','HAVE_GCC_ASM_FOR_X87':1,'HAVE_BROKEN_UNSETENV':0,'BASECPPFLAGS':'','DOUBLE_IS_BIG_ENDIAN_IEEE754':0,'HAVE_STRUCT_STAT_ST_RDEV':1,'HAVE_SEM_UNLINK':1,'BUILDPYTHON':'python','HAVE_RL_CATCH_SIGNAL':1,'HAVE_DECL_TZNAME':0,'RESSRCDIR':'Mac/Resources/framework','HAVE_PTHREAD_SIGMASK':1,'HAVE_UTIMES':1,'DISTDIRS':'Include Lib Misc Ext-dummy','HAVE_FDATASYNC':1,'HAVE_USABLE_WCHAR_T':0,'PY_FORMAT_SIZE_T':'\"z\"','HAVE_SCHED_SETSCHEDULER':1,'VA_LIST_IS_ARRAY':0,'HAVE_LINUX_NETLINK_H':1,'HAVE_SETREGID':1,'HAVE_STROPTS_H':1,'LDVERSION':'3.3m','abs_builddir':'/build/buildd/python3.3-3.3.1/build-shared','SITEPATH':'','HAVE_GETHOSTBYNAME':0,'HAVE_SIGPENDING':1,'HAVE_KQUEUE':0,'HAVE_SYNC':1,'HAVE_GETSID':1,'HAVE_ROUND':1,'HAVE_STRFTIME':1,'AST_H_DIR':'Include','HAVE_PIPE2':1,'AST_C_DIR':'Python','TESTPYTHONOPTS':'','HAVE_DEV_PTC':0,'GETTIMEOFDAY_NO_TZ':0,'HAVE_NET_IF_H':1,'HAVE_SENDFILE':1,'HAVE_SETPGRP':1,'HAVE_SEM_GETVALUE':1,'CONFIGURE_LDFLAGS':'-Wl,-Bsymbolic-functions -Wl,-z,relro','DLLLIBRARY':'','PYTHON_FOR_BUILD':'./python -E','SETPGRP_HAVE_ARG':0,'HAVE_INET_ATON':1,'INSTALL_SHARED':'/usr/bin/install -c -m 555','WITH_DOC_STRINGS':1,'OPCODETARGETS_H':'\\\\','HAVE_INITGROUPS':1,'HAVE_LINKAT':1,'BASEMODLIBS':'','SGI_ABI':'','HAVE_SCHED_SETPARAM':1,'OPT':'-DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes','HAVE_POSIX_FADVISE':1,'datarootdir':'/usr/share','HAVE_MEMRCHR':1,'HGTAG':'','HAVE_MEMMOVE':1,'HAVE_GETRESUID':1,'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754':0,'HAVE_LSTAT':1,'AR':'ar','HAVE_WAIT4':1,'HAVE_SYS_MODEM_H':0,'INSTSONAME':'libpython3.3m.so.1.0','HAVE_SYS_STATVFS_H':1,'HAVE_LGAMMA':1,'HAVE_PROTOTYPES':1,'HAVE_SYS_UIO_H':1,'MAJOR_IN_MKDEV':0,'QUICKTESTOPTS':'-x test_subprocess test_io test_lib2to3 \\\\','HAVE_SYS_DEVPOLL_H':0,'HAVE_CHFLAGS':0,'HAVE_FSYNC':1,'HAVE_FCHMOD':1,'INCLUDEPY':'/usr/include/python3.3m','HAVE_SEM_TIMEDWAIT':1,'LDLIBRARYDIR':'','HAVE_STRUCT_TM_TM_ZONE':1,'HAVE_CURSES_H':1,'TIME_WITH_SYS_TIME':1,'HAVE_DUP2':1,'ENABLE_IPV6':1,'WITH_VALGRIND':0,'HAVE_SETITIMER':1,'THREADOBJ':'Python/thread.o','LOCALMODLIBS':'-lrt -lexpat -L/usr/lib -lz -lexpat','HAVE_MEMORY_H':1,'HAVE_GETITIMER':1,'HAVE_C99_BOOL':1,'INSTALL_DATA':'/usr/bin/install -c -m 644','PGEN':'Parser/pgen','HAVE_GRP_H':1,'HAVE_WCSFTIME':1,'AIX_GENUINE_CPLUSPLUS':0,'HAVE_LIBINTL_H':1,'SHELL':'/bin/sh','HAVE_UNISTD_H':1,'EXTRATESTOPTS':'','HAVE_EXECV':1,'HAVE_FSEEK64':0,'MVWDELCH_IS_EXPRESSION':1,'DESTSHARED':'/usr/lib/python3.3/lib-dynload','OPCODETARGETGEN':'\\\\','LIBDEST':'/usr/lib/python3.3','CCSHARED':'-fPIC','HAVE_EXPM1':1,'HAVE_DLFCN_H':1,'exec_prefix':'/usr','HAVE_READLINK':1,'WINDOW_HAS_FLAGS':1,'HAVE_FTELL64':0,'HAVE_STRLCPY':0,'MACOSX_DEPLOYMENT_TARGET':'','HAVE_SYS_SYSCALL_H':1,'DESTLIB':'/usr/lib/python3.3','LDSHARED':'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','HGVERSION':'','PYTHON_HEADERS':'\\\\','HAVE_STRINGS_H':1,'DOUBLE_IS_LITTLE_ENDIAN_IEEE754':1,'HAVE_POSIX_FALLOCATE':1,'HAVE_DIRFD':1,'HAVE_LOG2':1,'HAVE_GETPID':1,'HAVE_ALARM':1,'MACHDEP_OBJS':'','HAVE_SPAWN_H':1,'HAVE_FORK':1,'HAVE_SETRESGID':1,'HAVE_FCHMODAT':1,'HAVE_CLOCK_GETRES':1,'MACHDEPPATH':':plat-i386-linux-gnu','STDC_HEADERS':1,'HAVE_SETPRIORITY':1,'LIBC':'','HAVE_SYS_EPOLL_H':1,'HAVE_SYS_UTSNAME_H':1,'HAVE_PUTENV':1,'HAVE_CURSES_RESIZE_TERM':1,'HAVE_FUTIMESAT':1,'WITH_DYLD':0,'INSTALL_PROGRAM':'/usr/bin/install -c','LIBS':'-lpthread -ldl -lutil','HAVE_TRUNCATE':1,'TESTOPTS':'','PROFILE_TASK':'../Tools/pybench/pybench.py -n 2 --with-gc --with-syscheck','HAVE_CURSES_RESIZETERM':1,'ABIFLAGS':'m','HAVE_GETGROUPLIST':1,'OBJECT_OBJS':'\\\\','HAVE_MKNODAT':1,'HAVE_ST_BLOCKS':1,'HAVE_STRUCT_STAT_ST_GEN':0,'SYS_SELECT_WITH_SYS_TIME':1,'SHLIBS':'-lpthread -ldl -lutil','HAVE_GETGROUPS':1,'MODULE_OBJS':'\\\\','PYTHONFRAMEWORKDIR':'no-framework','HAVE_FCNTL_H':1,'HAVE_LINK':1,'HAVE_SIGWAIT':1,'HAVE_GAMMA':1,'HAVE_SYS_LOCK_H':0,'HAVE_FORKPTY':1,'HAVE_SOCKADDR_SA_LEN':0,'HAVE_TEMPNAM':1,'HAVE_STRUCT_STAT_ST_BLKSIZE':1,'HAVE_MKFIFOAT':1,'HAVE_SIGWAITINFO':1,'HAVE_FTIME':1,'HAVE_EPOLL':1,'HAVE_SYS_SOCKET_H':1,'HAVE_LARGEFILE_SUPPORT':1,'CONFIGURE_CFLAGS':'-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security','HAVE_PTHREAD_DESTRUCTOR':0,'CONFIGURE_CPPFLAGS':'-D_FORTIFY_SOURCE=2','HAVE_SYMLINK':1,'HAVE_LONG_LONG':1,'HAVE_IEEEFP_H':0,'LIBDIR':'/usr/lib','HAVE_PTHREAD_KILL':1,'TESTPATH':'','HAVE_STRDUP':1,'POBJS':'\\\\','NO_AS_NEEDED':'-Wl,--no-as-needed','HAVE_LONG_DOUBLE':1,'HGBRANCH':'','DISTFILES':'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in','PTHREAD_SYSTEM_SCHED_SUPPORTED':1,'HAVE_FACCESSAT':1,'AST_ASDL':'../Parser/Python.asdl','CPPFLAGS':'-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2','HAVE_MKTIME':1,'HAVE_NDIR_H':0,'PY_CFLAGS':'-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','LIBOBJDIR':'Python/','HAVE_LINUX_CAN_RAW_H':1,'HAVE_GETHOSTBYNAME_R_3_ARG':0,'PACKAGE_STRING':0,'GNULD':'yes','LOG1P_DROPS_ZERO_SIGN':0,'HAVE_FTRUNCATE':1,'WITH_LIBINTL':0,'HAVE_MREMAP':1,'HAVE_DECL_ISNAN':1,'HAVE_KILLPG':1,'SIZEOF_LONG':4,'HAVE_DECL_ISFINITE':1,'HAVE_IPA_PURE_CONST_BUG':0,'WITH_PYMALLOC':1,'abs_srcdir':'/build/buildd/python3.3-3.3.1/build-shared/..','HAVE_FCHDIR':1,'HAVE_BROKEN_POSIX_SEMAPHORES':0,'AC_APPLE_UNIVERSAL_BUILD':0,'PGENSRCS':'\\\\ \\\\','DIRMODE':755,'HAVE_GETHOSTBYNAME_R':1,'HAVE_LCHFLAGS':0,'HAVE_SYS_PARAM_H':1,'SIZEOF_LONG_DOUBLE':12,'CONFIG_ARGS':\"'--enable-shared' '--prefix=/usr' '--enable-ipv6' '--enable-loadable-sqlite-extensions' '--with-dbmliborder=bdb:gdbm' '--with-computed-gotos' '--with-system-expat' '--with-system-ffi' '--with-fpectl' 'CC=i686-linux-gnu-gcc' 'CFLAGS=-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ' 'LDFLAGS=-Wl,-Bsymbolic-functions -Wl,-z,relro' 'CPPFLAGS=-D_FORTIFY_SOURCE=2'\",'HAVE_SCHED_H':1,'HAVE_KILL':1}\n\n", []], "struct": [".py", "__all__=[\n\n'calcsize','pack','pack_into','unpack','unpack_from',\n'iter_unpack',\n\n\n'Struct',\n\n\n'error'\n]\n\nfrom _struct import *\nfrom _struct import _clearcache\nfrom _struct import __doc__\n", ["_struct"]], "sre_parse": [".py", "import warnings\nwarnings.warn(f\"module {__name__ !r} is deprecated\",\nDeprecationWarning,\nstacklevel=2)\n\nfrom re import _parser as _\nglobals().update({k:v for k,v in vars(_).items()if k[:2]!='__'})\n", ["re", "warnings"]], "sys": [".py", "\nfrom _sys import *\nimport _sys\n\n_getframe=_sys._getframe\n\nclass _dataclass(tuple):\n\n def __init__(self,**kwargs):\n self.keys=list(kwargs)\n self.__dict__.update(kwargs)\n \n def __getitem__(self,key):\n if isinstance(key,int)and 0 <=key <=len(self.keys):\n return self.__dict__[self.keys[key]]\n raise KeyError(key)\n \n def __iter__(self):\n return(self.__dict__[key]for key in self.keys)\n \n def __len__(self):\n return len(self.keys)\n \n def __repr__(self):\n s=', '.join(f'{k}={self.__dict__[k]!r}'for k in self.keys)\n return f'sys.{self.__class__.__name__}({s})'\n \n \ndef make_dataclass(name,bases=None):\n bases=[_dataclass]if bases is None else[*bases,_dataclass]\n cls=type(name,bases,{})\n return cls\n \n \n__breakpointhook__=breakpointhook\n\nabiflags=0\n\ndef audit(event,*args):\n ''\n pass\n \nbrython_debug_mode=__BRYTHON__.get_option('debug')\n\nbase_exec_prefix=__BRYTHON__.brython_path\n\nbase_prefix=__BRYTHON__.brython_path\n\nbuiltin_module_names=__BRYTHON__.builtin_module_names\n\nbyteorder='little'\n\ncopyright=\"\"\"Copyright (c) 2001-2023 Python Software Foundation.\nAll Rights Reserved.\n\nCopyright (c) 2000 BeOpen.com.\nAll Rights Reserved.\n\nCopyright (c) 1995-2001 Corporation for National Research Initiatives.\nAll Rights Reserved.\n\nCopyright (c) 1991-1995 Stichting Mathematisch Centrum, Amsterdam.\nAll Rights Reserved.\"\"\"\n\ndont_write_bytecode=True\n\nexec_prefix=__BRYTHON__.brython_path\n\nargv=orig_argv=[__BRYTHON__.script_path]+list(__BRYTHON__.get_option('args'))\n\ndef displayhook(value):\n if value is not None:\n stdout.write(repr(value))\n \n__displayhook__=displayhook\n\ndef exit(i=None):\n raise SystemExit('')\n \nflags=make_dataclass('flags')(\ndebug=0,\ninspect=0,\ninteractive=0,\noptimize=0,\ndont_write_bytecode=0,\nno_user_site=0,\nno_site=0,\nignore_environment=0,\nverbose=0,\nbytes_warning=0,\nquiet=0,\nhash_randomization=1,\nisolated=0,\ndev_mode=False,\nutf8_mode=0,\nwarn_default_encoding=0\n)\n\ndef getfilesystemencoding(*args,**kw):\n ''\n\n \n return 'utf-8'\n \ndef getfilesystemencodeerrors():\n return \"utf-8\"\n \ndef intern(string):\n return string\n \nint_info=make_dataclass('int_info')(\nbits_per_digit=30,\nsizeof_digit=4,\ndefault_max_str_digits=__BRYTHON__.int_max_str_digits,\nstr_digits_check_threshold=__BRYTHON__.str_digits_check_threshold)\n\ndef get_int_max_str_digits():\n return __BRYTHON__.int_max_str_digits\n \ndef set_int_max_str_digits(value):\n try:\n value=int(value)\n except:\n raise ValueError(f\"'{value.__class__.__name__}' object \"\n \"cannot be interpreted as an integer\")\n if value !=0 and value =other\n \n return NotImplemented\n \n def __gt__(self,other):\n if isinstance(other,tuple):\n return(self.major,self.minor,self.micro)>other\n \n return NotImplemented\n \n def __le__(self,other):\n if isinstance(other,tuple):\n return(self.major,self.minor,self.micro)<=other\n \n return NotImplemented\n \n def __lt__(self,other):\n if isinstance(other,tuple):\n return(self.major,self.minor,self.micro)self.n:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in self.indices)\n else :\n try :\n for i in reversed(range(self.r)):\n if self.indices[i]!=i+self.n -self.r:\n break\n self.indices[i]+=1\n for j in range(i+1,self.r):\n self.indices[j]=self.indices[j -1]+1\n return tuple(self.pool[i]for i in self.indices)\n except :\n raise StopIteration\n \nclass combinations_with_replacement:\n def __init__(self,iterable,r):\n self.pool=tuple(iterable)\n self.n=len(self.pool)\n self.r=r\n self.indices=[0]*self.r\n self.zero=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if not self.n and self.r:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in self.indices)\n else :\n try :\n for i in reversed(range(self.r)):\n if self.indices[i]!=self.n -1:\n break\n self.indices[i:]=[self.indices[i]+1]*(self.r -i)\n return tuple(self.pool[i]for i in self.indices)\n except :\n raise StopIteration\n \n \n \nclass compress:\n def __init__(self,data,selectors):\n self.data=iter(data)\n self.selectors=iter(selectors)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n while True :\n next_item=next(self.data)\n next_selector=next(self.selectors)\n if bool(next_selector):\n return next_item\n \n \n \n \nclass count:\n ''\n\n\n\n \n def __init__(self,start=0,step=1):\n if not isinstance(start,(int,float)):\n raise TypeError('a number is required')\n self.times=start -step\n self.step=step\n \n def __iter__(self):\n return self\n \n def __next__(self):\n self.times +=self.step\n return self.times\n \n def __repr__(self):\n return 'count(%d)'%(self.times+self.step)\n \n \n \nclass cycle:\n def __init__(self,iterable):\n self._cur_iter=iter(iterable)\n self._saved=[]\n self._must_save=True\n \n def __iter__(self):\n return self\n \n def __next__(self):\n try :\n next_elt=next(self._cur_iter)\n if self._must_save:\n self._saved.append(next_elt)\n except StopIteration:\n self._cur_iter=iter(self._saved)\n next_elt=next(self._cur_iter)\n self._must_save=False\n return next_elt\n \n \n \nclass dropwhile:\n def __init__(self,predicate,iterable):\n self._predicate=predicate\n self._iter=iter(iterable)\n self._dropped=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n value=next(self._iter)\n if self._dropped:\n return value\n while self._predicate(value):\n value=next(self._iter)\n self._dropped=True\n return value\n \n \n \nclass filterfalse:\n def __init__(self,predicate,iterable):\n \n self._iter=iter(iterable)\n if predicate is None :\n self._predicate=bool\n else :\n self._predicate=predicate\n \n def __iter__(self):\n return self\n def __next__(self):\n next_elt=next(self._iter)\n while True :\n if not self._predicate(next_elt):\n return next_elt\n next_elt=next(self._iter)\n \nclass groupby:\n\n\n def __init__(self,iterable,key=None ):\n if key is None :\n key=lambda x:x\n self.keyfunc=key\n self.it=iter(iterable)\n self.tgtkey=self.currkey=self.currvalue=object()\n def __iter__(self):\n return self\n def __next__(self):\n while self.currkey ==self.tgtkey:\n self.currvalue=next(self.it)\n self.currkey=self.keyfunc(self.currvalue)\n self.tgtkey=self.currkey\n return (self.currkey,self._grouper(self.tgtkey))\n def _grouper(self,tgtkey):\n while self.currkey ==tgtkey:\n yield self.currvalue\n self.currvalue=next(self.it)\n self.currkey=self.keyfunc(self.currvalue)\n \n \n \nclass islice:\n def __init__(self,iterable,*args):\n s=slice(*args)\n self.start,self.stop,self.step=s.start or 0,s.stop,s.step\n if not isinstance(self.start,int):\n raise ValueError(\"Start argument must be an integer\")\n if self.stop !=None and not isinstance(self.stop,int):\n raise ValueError(\"Stop argument must be an integer or None\")\n if self.step is None :\n self.step=1\n if self.start <0 or (self.stop !=None and self.stop <0\n )or self.step <=0:\n raise ValueError(\"indices for islice() must be positive\")\n self.it=iter(iterable)\n self.donext=None\n self.cnt=0\n \n def __iter__(self):\n return self\n \n def __next__(self):\n nextindex=self.start\n if self.stop !=None and nextindex >=self.stop:\n raise StopIteration\n while self.cnt <=nextindex:\n nextitem=next(self.it)\n self.cnt +=1\n self.start +=self.step\n return nextitem\n \nclass permutations:\n def __init__(self,iterable,r=None ):\n self.pool=tuple(iterable)\n self.n=len(self.pool)\n self.r=self.n if r is None else r\n self.indices=list(range(self.n))\n self.cycles=list(range(self.n,self.n -self.r,-1))\n self.zero=False\n self.stop=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n indices=self.indices\n if self.r >self.n:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in indices[:self.r])\n \n i=self.r -1\n while i >=0:\n j=self.cycles[i]-1\n if j >0:\n self.cycles[i]=j\n indices[i],indices[-j]=indices[-j],indices[i]\n return tuple(self.pool[i]for i in indices[:self.r])\n self.cycles[i]=len(indices)-i\n n1=len(indices)-1\n assert n1 >=0\n num=indices[i]\n for k in range(i,n1):\n indices[k]=indices[k+1]\n indices[n1]=num\n i -=1\n raise StopIteration\n \n \ndef product(*args,repeat=1):\n\n\n pools=[tuple(pool)for pool in args]*repeat\n result=[[]]\n for pool in pools:\n result=[x+[y]for x in result for y in pool]\n for prod in result:\n yield tuple(prod)\n \n \n \n \n \n \n \n \nclass _product:\n def __init__(self,*args,**kw):\n if len(kw)>1:\n raise TypeError(\"product() takes at most 1 argument (%d given)\"%\n len(kw))\n self.repeat=kw.get('repeat',1)\n if not isinstance(self.repeat,int):\n raise TypeError(\"integer argument expected, got %s\"%\n type(self.repeat))\n self.gears=[x for x in args]*self.repeat\n self.num_gears=len(self.gears)\n \n self.indicies=[(0,len(self.gears[x]))\n for x in range(0,self.num_gears)]\n self.cont=True\n self.zero=False\n \n def roll_gears(self):\n \n \n \n should_carry=True\n for n in range(0,self.num_gears):\n nth_gear=self.num_gears -n -1\n if should_carry:\n count,lim=self.indicies[nth_gear]\n count +=1\n if count ==lim and nth_gear ==0:\n self.cont=False\n if count ==lim:\n should_carry=True\n count=0\n else :\n should_carry=False\n self.indicies[nth_gear]=(count,lim)\n else :\n break\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if self.zero:\n raise StopIteration\n if self.repeat >0:\n if not self.cont:\n raise StopIteration\n l=[]\n for x in range(0,self.num_gears):\n index,limit=self.indicies[x]\n print('itertools 353',self.gears,x,index)\n l.append(self.gears[x][index])\n self.roll_gears()\n return tuple(l)\n elif self.repeat ==0:\n self.zero=True\n return ()\n else :\n raise ValueError(\"repeat argument cannot be negative\")\n \n \n \nclass repeat:\n def __init__(self,obj,times=None ):\n self._obj=obj\n if times is not None :\n range(times)\n if times <0:\n times=0\n self._times=times\n \n def __iter__(self):\n return self\n \n def __next__(self):\n \n if self._times is not None :\n if self._times <=0:\n raise StopIteration()\n self._times -=1\n return self._obj\n \n def __repr__(self):\n if self._times is not None :\n return 'repeat(%r, %r)'%(self._obj,self._times)\n else :\n return 'repeat(%r)'%(self._obj,)\n \n def __len__(self):\n if self._times ==-1 or self._times is None :\n raise TypeError(\"len() of uniszed object\")\n return self._times\n \n \n \nclass starmap(object):\n def __init__(self,function,iterable):\n self._func=function\n self._iter=iter(iterable)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n t=next(self._iter)\n return self._func(*t)\n \n \n \nclass takewhile(object):\n def __init__(self,predicate,iterable):\n self._predicate=predicate\n self._iter=iter(iterable)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n value=next(self._iter)\n if not self._predicate(value):\n raise StopIteration()\n return value\n \n \n \nclass TeeData(object):\n def __init__(self,iterator):\n self.data=[]\n self._iter=iterator\n \n def __getitem__(self,i):\n \n while i >=len(self.data):\n self.data.append(next(self._iter))\n return self.data[i]\n \n \nclass TeeObject(object):\n def __init__(self,iterable=None ,tee_data=None ):\n if tee_data:\n self.tee_data=tee_data\n self.pos=0\n \n elif isinstance(iterable,TeeObject):\n self.tee_data=iterable.tee_data\n self.pos=iterable.pos\n else :\n self.tee_data=TeeData(iter(iterable))\n self.pos=0\n \n def __next__(self):\n data=self.tee_data[self.pos]\n self.pos +=1\n return data\n \n def __iter__(self):\n return self\n \n \ndef tee(iterable,n=2):\n if isinstance(iterable,TeeObject):\n return tuple([iterable]+\n [TeeObject(tee_data=iterable.tee_data)for i in range(n -1)])\n tee_data=TeeData(iter(iterable))\n return tuple([TeeObject(tee_data=tee_data)for i in range(n)])\n \nclass zip_longest:\n def __init__(self,*args,fillvalue=None ):\n self.args=[iter(arg)for arg in args]\n self.fillvalue=fillvalue\n self.units=len(args)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n temp=[]\n nb=0\n for i in range(self.units):\n try :\n temp.append(next(self.args[i]))\n nb +=1\n except StopIteration:\n temp.append(self.fillvalue)\n if nb ==0:\n raise StopIteration\n return tuple(temp)\n", ["operator"]], "encodings.mac_romanian": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-romanian',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\u0102'\n'\\u0218'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\u0103'\n'\\u0219'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u2044'\n'\\u20ac'\n'\\u2039'\n'\\u203a'\n'\\u021a'\n'\\u021b'\n'\\u2021'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_farsi": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-farsi',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xa0'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\u06ba'\n'\\xab'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\u2026'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xbb'\n'\\xf4'\n'\\xf6'\n'\\xf7'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n' '\n'!'\n'\"'\n'#'\n'$'\n'\\u066a'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n'\\u060c'\n'-'\n'.'\n'/'\n'\\u06f0'\n'\\u06f1'\n'\\u06f2'\n'\\u06f3'\n'\\u06f4'\n'\\u06f5'\n'\\u06f6'\n'\\u06f7'\n'\\u06f8'\n'\\u06f9'\n':'\n'\\u061b'\n'<'\n'='\n'>'\n'\\u061f'\n'\\u274a'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\u0636'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'\\u0640'\n'\\u0641'\n'\\u0642'\n'\\u0643'\n'\\u0644'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\u0649'\n'\\u064a'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\u064f'\n'\\u0650'\n'\\u0651'\n'\\u0652'\n'\\u067e'\n'\\u0679'\n'\\u0686'\n'\\u06d5'\n'\\u06a4'\n'\\u06af'\n'\\u0688'\n'\\u0691'\n'{'\n'|'\n'}'\n'\\u0698'\n'\\u06d2'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.idna": [".py", "\n\nimport stringprep,re,codecs\nfrom unicodedata import ucd_3_2_0 as unicodedata\n\n\ndots=re.compile(\"[\\u002E\\u3002\\uFF0E\\uFF61]\")\n\n\nace_prefix=b\"xn--\"\nsace_prefix=\"xn--\"\n\n\ndef nameprep(label):\n\n newlabel=[]\n for c in label:\n if stringprep.in_table_b1(c):\n \n continue\n newlabel.append(stringprep.map_table_b2(c))\n label=\"\".join(newlabel)\n \n \n label=unicodedata.normalize(\"NFKC\",label)\n \n \n for c in label:\n if stringprep.in_table_c12(c)or\\\n stringprep.in_table_c22(c)or\\\n stringprep.in_table_c3(c)or\\\n stringprep.in_table_c4(c)or\\\n stringprep.in_table_c5(c)or\\\n stringprep.in_table_c6(c)or\\\n stringprep.in_table_c7(c)or\\\n stringprep.in_table_c8(c)or\\\n stringprep.in_table_c9(c):\n raise UnicodeError(\"Invalid character %r\"%c)\n \n \n RandAL=[stringprep.in_table_d1(x)for x in label]\n for c in RandAL:\n if c:\n \n \n \n \n \n \n if any(stringprep.in_table_d2(x)for x in label):\n raise UnicodeError(\"Violation of BIDI requirement 2\")\n \n \n \n \n \n if not RandAL[0]or not RandAL[-1]:\n raise UnicodeError(\"Violation of BIDI requirement 3\")\n \n return label\n \ndef ToASCII(label):\n try :\n \n label=label.encode(\"ascii\")\n except UnicodeError:\n pass\n else :\n \n \n if 0 =64:\n raise UnicodeError(\"label too long\")\n return result,len(input)\n \n result=bytearray()\n labels=dots.split(input)\n if labels and not labels[-1]:\n trailing_dot=b'.'\n del labels[-1]\n else :\n trailing_dot=b''\n for label in labels:\n if result:\n \n result.extend(b'.')\n result.extend(ToASCII(label))\n return bytes(result+trailing_dot),len(input)\n \n def decode(self,input,errors='strict'):\n \n if errors !='strict':\n raise UnicodeError(\"Unsupported error handling \"+errors)\n \n if not input:\n return \"\",0\n \n \n if not isinstance(input,bytes):\n \n input=bytes(input)\n \n if ace_prefix not in input:\n \n try :\n return input.decode('ascii'),len(input)\n except UnicodeDecodeError:\n pass\n \n labels=input.split(b\".\")\n \n if labels and len(labels[-1])==0:\n trailing_dot='.'\n del labels[-1]\n else :\n trailing_dot=''\n \n result=[]\n for label in labels:\n result.append(ToUnicode(label))\n \n return \".\".join(result)+trailing_dot,len(input)\n \nclass IncrementalEncoder(codecs.BufferedIncrementalEncoder):\n def _buffer_encode(self,input,errors,final):\n if errors !='strict':\n \n raise UnicodeError(\"unsupported error handling \"+errors)\n \n if not input:\n return (b'',0)\n \n labels=dots.split(input)\n trailing_dot=b''\n if labels:\n if not labels[-1]:\n trailing_dot=b'.'\n del labels[-1]\n elif not final:\n \n del labels[-1]\n if labels:\n trailing_dot=b'.'\n \n result=bytearray()\n size=0\n for label in labels:\n if size:\n \n result.extend(b'.')\n size +=1\n result.extend(ToASCII(label))\n size +=len(label)\n \n result +=trailing_dot\n size +=len(trailing_dot)\n return (bytes(result),size)\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def _buffer_decode(self,input,errors,final):\n if errors !='strict':\n raise UnicodeError(\"Unsupported error handling \"+errors)\n \n if not input:\n return (\"\",0)\n \n \n if isinstance(input,str):\n labels=dots.split(input)\n else :\n \n input=str(input,\"ascii\")\n labels=input.split(\".\")\n \n trailing_dot=''\n if labels:\n if not labels[-1]:\n trailing_dot='.'\n del labels[-1]\n elif not final:\n \n del labels[-1]\n if labels:\n trailing_dot='.'\n \n result=[]\n size=0\n for label in labels:\n result.append(ToUnicode(label))\n if size:\n size +=1\n size +=len(label)\n \n result=\".\".join(result)+trailing_dot\n size +=len(trailing_dot)\n return (result,size)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='idna',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs", "re", "stringprep", "unicodedata"]], "encodings.cp273": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp273',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'{'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'\\xc4'\n'.'\n'<'\n'('\n'+'\n'!'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'~'\n'\\xdc'\n'$'\n'*'\n')'\n';'\n'^'\n'-'\n'/'\n'\\xc2'\n'['\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xf6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'\\xa7'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'\\xdf'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'@'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xac'\n'|'\n'\\u203e'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'\\xe4'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xa6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'\\xfc'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'}'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\xd6'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\\\'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n']'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.punycode": [".py", "''\n\n\n\n\nimport codecs\n\n\n\ndef segregate(str):\n ''\n base=bytearray()\n extended=set()\n for c in str:\n if ord(c)<128:\n base.append(ord(c))\n else :\n extended.add(c)\n extended=sorted(extended)\n return bytes(base),extended\n \ndef selective_len(str,max):\n ''\n res=0\n for c in str:\n if ord(c)26:return 26\n return res\n \ndigits=b\"abcdefghijklmnopqrstuvwxyz0123456789\"\ndef generate_generalized_integer(N,bias):\n ''\n result=bytearray()\n j=0\n while 1:\n t=T(j,bias)\n if N 455:\n delta=delta //35\n divisions +=36\n bias=divisions+(36 *delta //(delta+38))\n return bias\n \n \ndef generate_integers(baselen,deltas):\n ''\n \n result=bytearray()\n bias=72\n for points,delta in enumerate(deltas):\n s=generate_generalized_integer(delta,bias)\n result.extend(s)\n bias=adapt(delta,points ==0,baselen+points+1)\n return bytes(result)\n \ndef punycode_encode(text):\n base,extended=segregate(text)\n deltas=insertion_unsort(text,extended)\n extended=generate_integers(len(base),deltas)\n if base:\n return base+b\"-\"+extended\n return extended\n \n \n \ndef decode_generalized_number(extended,extpos,bias,errors):\n ''\n result=0\n w=1\n j=0\n while 1:\n try :\n char=ord(extended[extpos])\n except IndexError:\n if errors ==\"strict\":\n raise UnicodeError(\"incomplete punicode string\")\n return extpos+1,None\n extpos +=1\n if 0x41 <=char <=0x5A:\n digit=char -0x41\n elif 0x30 <=char <=0x39:\n digit=char -22\n elif errors ==\"strict\":\n raise UnicodeError(\"Invalid extended code point '%s'\"\n %extended[extpos -1])\n else :\n return extpos,None\n t=T(j,bias)\n result +=digit *w\n if digit 0x10FFFF:\n if errors ==\"strict\":\n raise UnicodeError(\"Invalid character U+%x\"%char)\n char=ord('?')\n pos=pos %(len(base)+1)\n base=base[:pos]+chr(char)+base[pos:]\n bias=adapt(delta,(extpos ==0),len(base))\n extpos=newpos\n return base\n \ndef punycode_decode(text,errors):\n if isinstance(text,str):\n text=text.encode(\"ascii\")\n if isinstance(text,memoryview):\n text=bytes(text)\n pos=text.rfind(b\"-\")\n if pos ==-1:\n base=\"\"\n extended=str(text,\"ascii\").upper()\n else :\n base=str(text[:pos],\"ascii\",errors)\n extended=str(text[pos+1:],\"ascii\").upper()\n return insertion_sort(base,extended,errors)\n \n \n \nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n res=punycode_encode(input)\n return res,len(input)\n \n def decode(self,input,errors='strict'):\n if errors not in ('strict','replace','ignore'):\n raise UnicodeError(\"Unsupported error handling \"+errors)\n res=punycode_decode(input,errors)\n return res,len(input)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return punycode_encode(input)\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n if self.errors not in ('strict','replace','ignore'):\n raise UnicodeError(\"Unsupported error handling \"+self.errors)\n return punycode_decode(input,self.errors)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='punycode',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.raw_unicode_escape": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.raw_unicode_escape_encode\n decode=codecs.raw_unicode_escape_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.raw_unicode_escape_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def _buffer_decode(self,input,errors,final):\n return codecs.raw_unicode_escape_decode(input,errors,final)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n def decode(self,input,errors='strict'):\n return codecs.raw_unicode_escape_decode(input,errors,False)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='raw-unicode-escape',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.utf_8": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nencode=codecs.utf_8_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_8_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.utf_8_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_8_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_8_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_8_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-8',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp1252": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1252',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\u0160'\n'\\u2039'\n'\\u0152'\n'\\ufffe'\n'\\u017d'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\u0161'\n'\\u203a'\n'\\u0153'\n'\\ufffe'\n'\\u017e'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp869": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp869',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:None ,\n0x0081:None ,\n0x0082:None ,\n0x0083:None ,\n0x0084:None ,\n0x0085:None ,\n0x0086:0x0386,\n0x0087:None ,\n0x0088:0x00b7,\n0x0089:0x00ac,\n0x008a:0x00a6,\n0x008b:0x2018,\n0x008c:0x2019,\n0x008d:0x0388,\n0x008e:0x2015,\n0x008f:0x0389,\n0x0090:0x038a,\n0x0091:0x03aa,\n0x0092:0x038c,\n0x0093:None ,\n0x0094:None ,\n0x0095:0x038e,\n0x0096:0x03ab,\n0x0097:0x00a9,\n0x0098:0x038f,\n0x0099:0x00b2,\n0x009a:0x00b3,\n0x009b:0x03ac,\n0x009c:0x00a3,\n0x009d:0x03ad,\n0x009e:0x03ae,\n0x009f:0x03af,\n0x00a0:0x03ca,\n0x00a1:0x0390,\n0x00a2:0x03cc,\n0x00a3:0x03cd,\n0x00a4:0x0391,\n0x00a5:0x0392,\n0x00a6:0x0393,\n0x00a7:0x0394,\n0x00a8:0x0395,\n0x00a9:0x0396,\n0x00aa:0x0397,\n0x00ab:0x00bd,\n0x00ac:0x0398,\n0x00ad:0x0399,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x039a,\n0x00b6:0x039b,\n0x00b7:0x039c,\n0x00b8:0x039d,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x039e,\n0x00be:0x039f,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x03a0,\n0x00c7:0x03a1,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x03a3,\n0x00d0:0x03a4,\n0x00d1:0x03a5,\n0x00d2:0x03a6,\n0x00d3:0x03a7,\n0x00d4:0x03a8,\n0x00d5:0x03a9,\n0x00d6:0x03b1,\n0x00d7:0x03b2,\n0x00d8:0x03b3,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x03b4,\n0x00de:0x03b5,\n0x00df:0x2580,\n0x00e0:0x03b6,\n0x00e1:0x03b7,\n0x00e2:0x03b8,\n0x00e3:0x03b9,\n0x00e4:0x03ba,\n0x00e5:0x03bb,\n0x00e6:0x03bc,\n0x00e7:0x03bd,\n0x00e8:0x03be,\n0x00e9:0x03bf,\n0x00ea:0x03c0,\n0x00eb:0x03c1,\n0x00ec:0x03c3,\n0x00ed:0x03c2,\n0x00ee:0x03c4,\n0x00ef:0x0384,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x03c5,\n0x00f3:0x03c6,\n0x00f4:0x03c7,\n0x00f5:0x00a7,\n0x00f6:0x03c8,\n0x00f7:0x0385,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x03c9,\n0x00fb:0x03cb,\n0x00fc:0x03b0,\n0x00fd:0x03ce,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0386'\n'\\ufffe'\n'\\xb7'\n'\\xac'\n'\\xa6'\n'\\u2018'\n'\\u2019'\n'\\u0388'\n'\\u2015'\n'\\u0389'\n'\\u038a'\n'\\u03aa'\n'\\u038c'\n'\\ufffe'\n'\\ufffe'\n'\\u038e'\n'\\u03ab'\n'\\xa9'\n'\\u038f'\n'\\xb2'\n'\\xb3'\n'\\u03ac'\n'\\xa3'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03ca'\n'\\u0390'\n'\\u03cc'\n'\\u03cd'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\xbd'\n'\\u0398'\n'\\u0399'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u039e'\n'\\u039f'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u03a0'\n'\\u03a1'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u03b4'\n'\\u03b5'\n'\\u2580'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c3'\n'\\u03c2'\n'\\u03c4'\n'\\u0384'\n'\\xad'\n'\\xb1'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\xa7'\n'\\u03c8'\n'\\u0385'\n'\\xb0'\n'\\xa8'\n'\\u03c9'\n'\\u03cb'\n'\\u03b0'\n'\\u03ce'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a3:0x009c,\n0x00a6:0x008a,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x0097,\n0x00ab:0x00ae,\n0x00ac:0x0089,\n0x00ad:0x00f0,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x0099,\n0x00b3:0x009a,\n0x00b7:0x0088,\n0x00bb:0x00af,\n0x00bd:0x00ab,\n0x0384:0x00ef,\n0x0385:0x00f7,\n0x0386:0x0086,\n0x0388:0x008d,\n0x0389:0x008f,\n0x038a:0x0090,\n0x038c:0x0092,\n0x038e:0x0095,\n0x038f:0x0098,\n0x0390:0x00a1,\n0x0391:0x00a4,\n0x0392:0x00a5,\n0x0393:0x00a6,\n0x0394:0x00a7,\n0x0395:0x00a8,\n0x0396:0x00a9,\n0x0397:0x00aa,\n0x0398:0x00ac,\n0x0399:0x00ad,\n0x039a:0x00b5,\n0x039b:0x00b6,\n0x039c:0x00b7,\n0x039d:0x00b8,\n0x039e:0x00bd,\n0x039f:0x00be,\n0x03a0:0x00c6,\n0x03a1:0x00c7,\n0x03a3:0x00cf,\n0x03a4:0x00d0,\n0x03a5:0x00d1,\n0x03a6:0x00d2,\n0x03a7:0x00d3,\n0x03a8:0x00d4,\n0x03a9:0x00d5,\n0x03aa:0x0091,\n0x03ab:0x0096,\n0x03ac:0x009b,\n0x03ad:0x009d,\n0x03ae:0x009e,\n0x03af:0x009f,\n0x03b0:0x00fc,\n0x03b1:0x00d6,\n0x03b2:0x00d7,\n0x03b3:0x00d8,\n0x03b4:0x00dd,\n0x03b5:0x00de,\n0x03b6:0x00e0,\n0x03b7:0x00e1,\n0x03b8:0x00e2,\n0x03b9:0x00e3,\n0x03ba:0x00e4,\n0x03bb:0x00e5,\n0x03bc:0x00e6,\n0x03bd:0x00e7,\n0x03be:0x00e8,\n0x03bf:0x00e9,\n0x03c0:0x00ea,\n0x03c1:0x00eb,\n0x03c2:0x00ed,\n0x03c3:0x00ec,\n0x03c4:0x00ee,\n0x03c5:0x00f2,\n0x03c6:0x00f3,\n0x03c7:0x00f4,\n0x03c8:0x00f6,\n0x03c9:0x00fa,\n0x03ca:0x00a0,\n0x03cb:0x00fb,\n0x03cc:0x00a2,\n0x03cd:0x00a3,\n0x03ce:0x00fd,\n0x2015:0x008e,\n0x2018:0x008b,\n0x2019:0x008c,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.iso8859_14": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-14',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u1e02'\n'\\u1e03'\n'\\xa3'\n'\\u010a'\n'\\u010b'\n'\\u1e0a'\n'\\xa7'\n'\\u1e80'\n'\\xa9'\n'\\u1e82'\n'\\u1e0b'\n'\\u1ef2'\n'\\xad'\n'\\xae'\n'\\u0178'\n'\\u1e1e'\n'\\u1e1f'\n'\\u0120'\n'\\u0121'\n'\\u1e40'\n'\\u1e41'\n'\\xb6'\n'\\u1e56'\n'\\u1e81'\n'\\u1e57'\n'\\u1e83'\n'\\u1e60'\n'\\u1ef3'\n'\\u1e84'\n'\\u1e85'\n'\\u1e61'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u0174'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\u1e6a'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\u0176'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u0175'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\u1e6b'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\u0177'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_2": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-2',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0104'\n'\\u02d8'\n'\\u0141'\n'\\xa4'\n'\\u013d'\n'\\u015a'\n'\\xa7'\n'\\xa8'\n'\\u0160'\n'\\u015e'\n'\\u0164'\n'\\u0179'\n'\\xad'\n'\\u017d'\n'\\u017b'\n'\\xb0'\n'\\u0105'\n'\\u02db'\n'\\u0142'\n'\\xb4'\n'\\u013e'\n'\\u015b'\n'\\u02c7'\n'\\xb8'\n'\\u0161'\n'\\u015f'\n'\\u0165'\n'\\u017a'\n'\\u02dd'\n'\\u017e'\n'\\u017c'\n'\\u0154'\n'\\xc1'\n'\\xc2'\n'\\u0102'\n'\\xc4'\n'\\u0139'\n'\\u0106'\n'\\xc7'\n'\\u010c'\n'\\xc9'\n'\\u0118'\n'\\xcb'\n'\\u011a'\n'\\xcd'\n'\\xce'\n'\\u010e'\n'\\u0110'\n'\\u0143'\n'\\u0147'\n'\\xd3'\n'\\xd4'\n'\\u0150'\n'\\xd6'\n'\\xd7'\n'\\u0158'\n'\\u016e'\n'\\xda'\n'\\u0170'\n'\\xdc'\n'\\xdd'\n'\\u0162'\n'\\xdf'\n'\\u0155'\n'\\xe1'\n'\\xe2'\n'\\u0103'\n'\\xe4'\n'\\u013a'\n'\\u0107'\n'\\xe7'\n'\\u010d'\n'\\xe9'\n'\\u0119'\n'\\xeb'\n'\\u011b'\n'\\xed'\n'\\xee'\n'\\u010f'\n'\\u0111'\n'\\u0144'\n'\\u0148'\n'\\xf3'\n'\\xf4'\n'\\u0151'\n'\\xf6'\n'\\xf7'\n'\\u0159'\n'\\u016f'\n'\\xfa'\n'\\u0171'\n'\\xfc'\n'\\xfd'\n'\\u0163'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_arabic": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-arabic',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c4,\n0x0081:0x00a0,\n0x0082:0x00c7,\n0x0083:0x00c9,\n0x0084:0x00d1,\n0x0085:0x00d6,\n0x0086:0x00dc,\n0x0087:0x00e1,\n0x0088:0x00e0,\n0x0089:0x00e2,\n0x008a:0x00e4,\n0x008b:0x06ba,\n0x008c:0x00ab,\n0x008d:0x00e7,\n0x008e:0x00e9,\n0x008f:0x00e8,\n0x0090:0x00ea,\n0x0091:0x00eb,\n0x0092:0x00ed,\n0x0093:0x2026,\n0x0094:0x00ee,\n0x0095:0x00ef,\n0x0096:0x00f1,\n0x0097:0x00f3,\n0x0098:0x00bb,\n0x0099:0x00f4,\n0x009a:0x00f6,\n0x009b:0x00f7,\n0x009c:0x00fa,\n0x009d:0x00f9,\n0x009e:0x00fb,\n0x009f:0x00fc,\n0x00a0:0x0020,\n0x00a1:0x0021,\n0x00a2:0x0022,\n0x00a3:0x0023,\n0x00a4:0x0024,\n0x00a5:0x066a,\n0x00a6:0x0026,\n0x00a7:0x0027,\n0x00a8:0x0028,\n0x00a9:0x0029,\n0x00aa:0x002a,\n0x00ab:0x002b,\n0x00ac:0x060c,\n0x00ad:0x002d,\n0x00ae:0x002e,\n0x00af:0x002f,\n0x00b0:0x0660,\n0x00b1:0x0661,\n0x00b2:0x0662,\n0x00b3:0x0663,\n0x00b4:0x0664,\n0x00b5:0x0665,\n0x00b6:0x0666,\n0x00b7:0x0667,\n0x00b8:0x0668,\n0x00b9:0x0669,\n0x00ba:0x003a,\n0x00bb:0x061b,\n0x00bc:0x003c,\n0x00bd:0x003d,\n0x00be:0x003e,\n0x00bf:0x061f,\n0x00c0:0x274a,\n0x00c1:0x0621,\n0x00c2:0x0622,\n0x00c3:0x0623,\n0x00c4:0x0624,\n0x00c5:0x0625,\n0x00c6:0x0626,\n0x00c7:0x0627,\n0x00c8:0x0628,\n0x00c9:0x0629,\n0x00ca:0x062a,\n0x00cb:0x062b,\n0x00cc:0x062c,\n0x00cd:0x062d,\n0x00ce:0x062e,\n0x00cf:0x062f,\n0x00d0:0x0630,\n0x00d1:0x0631,\n0x00d2:0x0632,\n0x00d3:0x0633,\n0x00d4:0x0634,\n0x00d5:0x0635,\n0x00d6:0x0636,\n0x00d7:0x0637,\n0x00d8:0x0638,\n0x00d9:0x0639,\n0x00da:0x063a,\n0x00db:0x005b,\n0x00dc:0x005c,\n0x00dd:0x005d,\n0x00de:0x005e,\n0x00df:0x005f,\n0x00e0:0x0640,\n0x00e1:0x0641,\n0x00e2:0x0642,\n0x00e3:0x0643,\n0x00e4:0x0644,\n0x00e5:0x0645,\n0x00e6:0x0646,\n0x00e7:0x0647,\n0x00e8:0x0648,\n0x00e9:0x0649,\n0x00ea:0x064a,\n0x00eb:0x064b,\n0x00ec:0x064c,\n0x00ed:0x064d,\n0x00ee:0x064e,\n0x00ef:0x064f,\n0x00f0:0x0650,\n0x00f1:0x0651,\n0x00f2:0x0652,\n0x00f3:0x067e,\n0x00f4:0x0679,\n0x00f5:0x0686,\n0x00f6:0x06d5,\n0x00f7:0x06a4,\n0x00f8:0x06af,\n0x00f9:0x0688,\n0x00fa:0x0691,\n0x00fb:0x007b,\n0x00fc:0x007c,\n0x00fd:0x007d,\n0x00fe:0x0698,\n0x00ff:0x06d2,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xa0'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\u06ba'\n'\\xab'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\u2026'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xbb'\n'\\xf4'\n'\\xf6'\n'\\xf7'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n' '\n'!'\n'\"'\n'#'\n'$'\n'\\u066a'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n'\\u060c'\n'-'\n'.'\n'/'\n'\\u0660'\n'\\u0661'\n'\\u0662'\n'\\u0663'\n'\\u0664'\n'\\u0665'\n'\\u0666'\n'\\u0667'\n'\\u0668'\n'\\u0669'\n':'\n'\\u061b'\n'<'\n'='\n'>'\n'\\u061f'\n'\\u274a'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\u0636'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'\\u0640'\n'\\u0641'\n'\\u0642'\n'\\u0643'\n'\\u0644'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\u0649'\n'\\u064a'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\u064f'\n'\\u0650'\n'\\u0651'\n'\\u0652'\n'\\u067e'\n'\\u0679'\n'\\u0686'\n'\\u06d5'\n'\\u06a4'\n'\\u06af'\n'\\u0688'\n'\\u0691'\n'{'\n'|'\n'}'\n'\\u0698'\n'\\u06d2'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0020:0x00a0,\n0x0021:0x0021,\n0x0021:0x00a1,\n0x0022:0x0022,\n0x0022:0x00a2,\n0x0023:0x0023,\n0x0023:0x00a3,\n0x0024:0x0024,\n0x0024:0x00a4,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0026:0x00a6,\n0x0027:0x0027,\n0x0027:0x00a7,\n0x0028:0x0028,\n0x0028:0x00a8,\n0x0029:0x0029,\n0x0029:0x00a9,\n0x002a:0x002a,\n0x002a:0x00aa,\n0x002b:0x002b,\n0x002b:0x00ab,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002d:0x00ad,\n0x002e:0x002e,\n0x002e:0x00ae,\n0x002f:0x002f,\n0x002f:0x00af,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003a:0x00ba,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003c:0x00bc,\n0x003d:0x003d,\n0x003d:0x00bd,\n0x003e:0x003e,\n0x003e:0x00be,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005b:0x00db,\n0x005c:0x005c,\n0x005c:0x00dc,\n0x005d:0x005d,\n0x005d:0x00dd,\n0x005e:0x005e,\n0x005e:0x00de,\n0x005f:0x005f,\n0x005f:0x00df,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007b:0x00fb,\n0x007c:0x007c,\n0x007c:0x00fc,\n0x007d:0x007d,\n0x007d:0x00fd,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x0081,\n0x00ab:0x008c,\n0x00bb:0x0098,\n0x00c4:0x0080,\n0x00c7:0x0082,\n0x00c9:0x0083,\n0x00d1:0x0084,\n0x00d6:0x0085,\n0x00dc:0x0086,\n0x00e0:0x0088,\n0x00e1:0x0087,\n0x00e2:0x0089,\n0x00e4:0x008a,\n0x00e7:0x008d,\n0x00e8:0x008f,\n0x00e9:0x008e,\n0x00ea:0x0090,\n0x00eb:0x0091,\n0x00ed:0x0092,\n0x00ee:0x0094,\n0x00ef:0x0095,\n0x00f1:0x0096,\n0x00f3:0x0097,\n0x00f4:0x0099,\n0x00f6:0x009a,\n0x00f7:0x009b,\n0x00f9:0x009d,\n0x00fa:0x009c,\n0x00fb:0x009e,\n0x00fc:0x009f,\n0x060c:0x00ac,\n0x061b:0x00bb,\n0x061f:0x00bf,\n0x0621:0x00c1,\n0x0622:0x00c2,\n0x0623:0x00c3,\n0x0624:0x00c4,\n0x0625:0x00c5,\n0x0626:0x00c6,\n0x0627:0x00c7,\n0x0628:0x00c8,\n0x0629:0x00c9,\n0x062a:0x00ca,\n0x062b:0x00cb,\n0x062c:0x00cc,\n0x062d:0x00cd,\n0x062e:0x00ce,\n0x062f:0x00cf,\n0x0630:0x00d0,\n0x0631:0x00d1,\n0x0632:0x00d2,\n0x0633:0x00d3,\n0x0634:0x00d4,\n0x0635:0x00d5,\n0x0636:0x00d6,\n0x0637:0x00d7,\n0x0638:0x00d8,\n0x0639:0x00d9,\n0x063a:0x00da,\n0x0640:0x00e0,\n0x0641:0x00e1,\n0x0642:0x00e2,\n0x0643:0x00e3,\n0x0644:0x00e4,\n0x0645:0x00e5,\n0x0646:0x00e6,\n0x0647:0x00e7,\n0x0648:0x00e8,\n0x0649:0x00e9,\n0x064a:0x00ea,\n0x064b:0x00eb,\n0x064c:0x00ec,\n0x064d:0x00ed,\n0x064e:0x00ee,\n0x064f:0x00ef,\n0x0650:0x00f0,\n0x0651:0x00f1,\n0x0652:0x00f2,\n0x0660:0x00b0,\n0x0661:0x00b1,\n0x0662:0x00b2,\n0x0663:0x00b3,\n0x0664:0x00b4,\n0x0665:0x00b5,\n0x0666:0x00b6,\n0x0667:0x00b7,\n0x0668:0x00b8,\n0x0669:0x00b9,\n0x066a:0x00a5,\n0x0679:0x00f4,\n0x067e:0x00f3,\n0x0686:0x00f5,\n0x0688:0x00f9,\n0x0691:0x00fa,\n0x0698:0x00fe,\n0x06a4:0x00f7,\n0x06af:0x00f8,\n0x06ba:0x008b,\n0x06d2:0x00ff,\n0x06d5:0x00f6,\n0x2026:0x0093,\n0x274a:0x00c0,\n}\n", ["codecs"]], "encodings.mac_croatian": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-croatian',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\u0160'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\u017d'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\u2206'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u0161'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\u017e'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u0106'\n'\\xab'\n'\\u010c'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u0110'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\uf8ff'\n'\\xa9'\n'\\u2044'\n'\\u20ac'\n'\\u2039'\n'\\u203a'\n'\\xc6'\n'\\xbb'\n'\\u2013'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\u0107'\n'\\xc1'\n'\\u010d'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\u0111'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u03c0'\n'\\xcb'\n'\\u02da'\n'\\xb8'\n'\\xca'\n'\\xe6'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.big5hkscs": [".py", "\n\n\n\n\n\nimport _codecs_hk,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_hk.getcodec('big5hkscs')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='big5hkscs',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_hk", "_multibytecodec", "codecs"]], "encodings.cp1256": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1256',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\u067e'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\u0679'\n'\\u2039'\n'\\u0152'\n'\\u0686'\n'\\u0698'\n'\\u0688'\n'\\u06af'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u06a9'\n'\\u2122'\n'\\u0691'\n'\\u203a'\n'\\u0153'\n'\\u200c'\n'\\u200d'\n'\\u06ba'\n'\\xa0'\n'\\u060c'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\u06be'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\u061b'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\u061f'\n'\\u06c1'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\u0636'\n'\\xd7'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'\\u0640'\n'\\u0641'\n'\\u0642'\n'\\u0643'\n'\\xe0'\n'\\u0644'\n'\\xe2'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\u0649'\n'\\u064a'\n'\\xee'\n'\\xef'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\xf4'\n'\\u064f'\n'\\u0650'\n'\\xf7'\n'\\u0651'\n'\\xf9'\n'\\u0652'\n'\\xfb'\n'\\xfc'\n'\\u200e'\n'\\u200f'\n'\\u06d2'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_6": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-6',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa4'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u060c'\n'\\xad'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u061b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u061f'\n'\\ufffe'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\u0636'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0640'\n'\\u0641'\n'\\u0642'\n'\\u0643'\n'\\u0644'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\u0649'\n'\\u064a'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\u064f'\n'\\u0650'\n'\\u0651'\n'\\u0652'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_10": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-10',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0104'\n'\\u0112'\n'\\u0122'\n'\\u012a'\n'\\u0128'\n'\\u0136'\n'\\xa7'\n'\\u013b'\n'\\u0110'\n'\\u0160'\n'\\u0166'\n'\\u017d'\n'\\xad'\n'\\u016a'\n'\\u014a'\n'\\xb0'\n'\\u0105'\n'\\u0113'\n'\\u0123'\n'\\u012b'\n'\\u0129'\n'\\u0137'\n'\\xb7'\n'\\u013c'\n'\\u0111'\n'\\u0161'\n'\\u0167'\n'\\u017e'\n'\\u2015'\n'\\u016b'\n'\\u014b'\n'\\u0100'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\u012e'\n'\\u010c'\n'\\xc9'\n'\\u0118'\n'\\xcb'\n'\\u0116'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\u0145'\n'\\u014c'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\u0168'\n'\\xd8'\n'\\u0172'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\u0101'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\u012f'\n'\\u010d'\n'\\xe9'\n'\\u0119'\n'\\xeb'\n'\\u0117'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\u0146'\n'\\u014d'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\u0169'\n'\\xf8'\n'\\u0173'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\u0138'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_kr": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_kr')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_kr',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.cp1140": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1140',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'\\xa2'\n'.'\n'<'\n'('\n'+'\n'|'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n'!'\n'$'\n'*'\n')'\n';'\n'\\xac'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\u20ac'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'^'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'['\n']'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\xfc'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\xd6'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\\xdc'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1125": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1125',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0410,\n0x0081:0x0411,\n0x0082:0x0412,\n0x0083:0x0413,\n0x0084:0x0414,\n0x0085:0x0415,\n0x0086:0x0416,\n0x0087:0x0417,\n0x0088:0x0418,\n0x0089:0x0419,\n0x008a:0x041a,\n0x008b:0x041b,\n0x008c:0x041c,\n0x008d:0x041d,\n0x008e:0x041e,\n0x008f:0x041f,\n0x0090:0x0420,\n0x0091:0x0421,\n0x0092:0x0422,\n0x0093:0x0423,\n0x0094:0x0424,\n0x0095:0x0425,\n0x0096:0x0426,\n0x0097:0x0427,\n0x0098:0x0428,\n0x0099:0x0429,\n0x009a:0x042a,\n0x009b:0x042b,\n0x009c:0x042c,\n0x009d:0x042d,\n0x009e:0x042e,\n0x009f:0x042f,\n0x00a0:0x0430,\n0x00a1:0x0431,\n0x00a2:0x0432,\n0x00a3:0x0433,\n0x00a4:0x0434,\n0x00a5:0x0435,\n0x00a6:0x0436,\n0x00a7:0x0437,\n0x00a8:0x0438,\n0x00a9:0x0439,\n0x00aa:0x043a,\n0x00ab:0x043b,\n0x00ac:0x043c,\n0x00ad:0x043d,\n0x00ae:0x043e,\n0x00af:0x043f,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x0440,\n0x00e1:0x0441,\n0x00e2:0x0442,\n0x00e3:0x0443,\n0x00e4:0x0444,\n0x00e5:0x0445,\n0x00e6:0x0446,\n0x00e7:0x0447,\n0x00e8:0x0448,\n0x00e9:0x0449,\n0x00ea:0x044a,\n0x00eb:0x044b,\n0x00ec:0x044c,\n0x00ed:0x044d,\n0x00ee:0x044e,\n0x00ef:0x044f,\n0x00f0:0x0401,\n0x00f1:0x0451,\n0x00f2:0x0490,\n0x00f3:0x0491,\n0x00f4:0x0404,\n0x00f5:0x0454,\n0x00f6:0x0406,\n0x00f7:0x0456,\n0x00f8:0x0407,\n0x00f9:0x0457,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x2116,\n0x00fd:0x00a4,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n'\\u0401'\n'\\u0451'\n'\\u0490'\n'\\u0491'\n'\\u0404'\n'\\u0454'\n'\\u0406'\n'\\u0456'\n'\\u0407'\n'\\u0457'\n'\\xb7'\n'\\u221a'\n'\\u2116'\n'\\xa4'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00fd,\n0x00b7:0x00fa,\n0x0401:0x00f0,\n0x0404:0x00f4,\n0x0406:0x00f6,\n0x0407:0x00f8,\n0x0410:0x0080,\n0x0411:0x0081,\n0x0412:0x0082,\n0x0413:0x0083,\n0x0414:0x0084,\n0x0415:0x0085,\n0x0416:0x0086,\n0x0417:0x0087,\n0x0418:0x0088,\n0x0419:0x0089,\n0x041a:0x008a,\n0x041b:0x008b,\n0x041c:0x008c,\n0x041d:0x008d,\n0x041e:0x008e,\n0x041f:0x008f,\n0x0420:0x0090,\n0x0421:0x0091,\n0x0422:0x0092,\n0x0423:0x0093,\n0x0424:0x0094,\n0x0425:0x0095,\n0x0426:0x0096,\n0x0427:0x0097,\n0x0428:0x0098,\n0x0429:0x0099,\n0x042a:0x009a,\n0x042b:0x009b,\n0x042c:0x009c,\n0x042d:0x009d,\n0x042e:0x009e,\n0x042f:0x009f,\n0x0430:0x00a0,\n0x0431:0x00a1,\n0x0432:0x00a2,\n0x0433:0x00a3,\n0x0434:0x00a4,\n0x0435:0x00a5,\n0x0436:0x00a6,\n0x0437:0x00a7,\n0x0438:0x00a8,\n0x0439:0x00a9,\n0x043a:0x00aa,\n0x043b:0x00ab,\n0x043c:0x00ac,\n0x043d:0x00ad,\n0x043e:0x00ae,\n0x043f:0x00af,\n0x0440:0x00e0,\n0x0441:0x00e1,\n0x0442:0x00e2,\n0x0443:0x00e3,\n0x0444:0x00e4,\n0x0445:0x00e5,\n0x0446:0x00e6,\n0x0447:0x00e7,\n0x0448:0x00e8,\n0x0449:0x00e9,\n0x044a:0x00ea,\n0x044b:0x00eb,\n0x044c:0x00ec,\n0x044d:0x00ed,\n0x044e:0x00ee,\n0x044f:0x00ef,\n0x0451:0x00f1,\n0x0454:0x00f5,\n0x0456:0x00f7,\n0x0457:0x00f9,\n0x0490:0x00f2,\n0x0491:0x00f3,\n0x2116:0x00fc,\n0x221a:0x00fb,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.iso2022_jp_1": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_1')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_1',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.cp1257": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1257',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\ufffe'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\ufffe'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\ufffe'\n'\\xa8'\n'\\u02c7'\n'\\xb8'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\ufffe'\n'\\xaf'\n'\\u02db'\n'\\ufffe'\n'\\xa0'\n'\\ufffe'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\ufffe'\n'\\xa6'\n'\\xa7'\n'\\xd8'\n'\\xa9'\n'\\u0156'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xc6'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xf8'\n'\\xb9'\n'\\u0157'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xe6'\n'\\u0104'\n'\\u012e'\n'\\u0100'\n'\\u0106'\n'\\xc4'\n'\\xc5'\n'\\u0118'\n'\\u0112'\n'\\u010c'\n'\\xc9'\n'\\u0179'\n'\\u0116'\n'\\u0122'\n'\\u0136'\n'\\u012a'\n'\\u013b'\n'\\u0160'\n'\\u0143'\n'\\u0145'\n'\\xd3'\n'\\u014c'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\u0172'\n'\\u0141'\n'\\u015a'\n'\\u016a'\n'\\xdc'\n'\\u017b'\n'\\u017d'\n'\\xdf'\n'\\u0105'\n'\\u012f'\n'\\u0101'\n'\\u0107'\n'\\xe4'\n'\\xe5'\n'\\u0119'\n'\\u0113'\n'\\u010d'\n'\\xe9'\n'\\u017a'\n'\\u0117'\n'\\u0123'\n'\\u0137'\n'\\u012b'\n'\\u013c'\n'\\u0161'\n'\\u0144'\n'\\u0146'\n'\\xf3'\n'\\u014d'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\u0173'\n'\\u0142'\n'\\u015b'\n'\\u016b'\n'\\xfc'\n'\\u017c'\n'\\u017e'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp949": [".py", "\n\n\n\n\n\nimport _codecs_kr,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_kr.getcodec('cp949')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp949',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_kr", "_multibytecodec", "codecs"]], "encodings.cp858": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp858',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x00ec,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00ff,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x00d7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x00c0,\n0x00b8:0x00a9,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x00a2,\n0x00be:0x00a5,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x00e3,\n0x00c7:0x00c3,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x00f0,\n0x00d1:0x00d0,\n0x00d2:0x00ca,\n0x00d3:0x00cb,\n0x00d4:0x00c8,\n0x00d5:0x20ac,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x00cf,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x00a6,\n0x00de:0x00cc,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x00d2,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:0x00fe,\n0x00e8:0x00de,\n0x00e9:0x00da,\n0x00ea:0x00db,\n0x00eb:0x00d9,\n0x00ec:0x00fd,\n0x00ed:0x00dd,\n0x00ee:0x00af,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x2017,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\xec'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\xff'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\xd7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\xc0'\n'\\xa9'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\xa2'\n'\\xa5'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\xe3'\n'\\xc3'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\xf0'\n'\\xd0'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\u20ac'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\xa6'\n'\\xcc'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\xd2'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\xfe'\n'\\xde'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\xfd'\n'\\xdd'\n'\\xaf'\n'\\xb4'\n'\\xad'\n'\\xb1'\n'\\u2017'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x00bd,\n0x00a3:0x009c,\n0x00a4:0x00cf,\n0x00a5:0x00be,\n0x00a6:0x00dd,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x00b8,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00af:0x00ee,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b4:0x00ef,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b8:0x00f7,\n0x00b9:0x00fb,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00bf:0x00a8,\n0x00c0:0x00b7,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c3:0x00c7,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c8:0x00d4,\n0x00c9:0x0090,\n0x00ca:0x00d2,\n0x00cb:0x00d3,\n0x00cc:0x00de,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00cf:0x00d8,\n0x00d0:0x00d1,\n0x00d1:0x00a5,\n0x00d2:0x00e3,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00d8:0x009d,\n0x00d9:0x00eb,\n0x00da:0x00e9,\n0x00db:0x00ea,\n0x00dc:0x009a,\n0x00dd:0x00ed,\n0x00de:0x00e8,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e3:0x00c6,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f0:0x00d0,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00fd:0x00ec,\n0x00fe:0x00e7,\n0x00ff:0x0098,\n0x20ac:0x00d5,\n0x0192:0x009f,\n0x2017:0x00f2,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.iso8859_7": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-7',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u2018'\n'\\u2019'\n'\\xa3'\n'\\u20ac'\n'\\u20af'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\u037a'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\ufffe'\n'\\u2015'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u0384'\n'\\u0385'\n'\\u0386'\n'\\xb7'\n'\\u0388'\n'\\u0389'\n'\\u038a'\n'\\xbb'\n'\\u038c'\n'\\xbd'\n'\\u038e'\n'\\u038f'\n'\\u0390'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0398'\n'\\u0399'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u039e'\n'\\u039f'\n'\\u03a0'\n'\\u03a1'\n'\\ufffe'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03aa'\n'\\u03ab'\n'\\u03ac'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03b0'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u03b4'\n'\\u03b5'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c2'\n'\\u03c3'\n'\\u03c4'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\u03c8'\n'\\u03c9'\n'\\u03ca'\n'\\u03cb'\n'\\u03cc'\n'\\u03cd'\n'\\u03ce'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_11": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-11',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0e01'\n'\\u0e02'\n'\\u0e03'\n'\\u0e04'\n'\\u0e05'\n'\\u0e06'\n'\\u0e07'\n'\\u0e08'\n'\\u0e09'\n'\\u0e0a'\n'\\u0e0b'\n'\\u0e0c'\n'\\u0e0d'\n'\\u0e0e'\n'\\u0e0f'\n'\\u0e10'\n'\\u0e11'\n'\\u0e12'\n'\\u0e13'\n'\\u0e14'\n'\\u0e15'\n'\\u0e16'\n'\\u0e17'\n'\\u0e18'\n'\\u0e19'\n'\\u0e1a'\n'\\u0e1b'\n'\\u0e1c'\n'\\u0e1d'\n'\\u0e1e'\n'\\u0e1f'\n'\\u0e20'\n'\\u0e21'\n'\\u0e22'\n'\\u0e23'\n'\\u0e24'\n'\\u0e25'\n'\\u0e26'\n'\\u0e27'\n'\\u0e28'\n'\\u0e29'\n'\\u0e2a'\n'\\u0e2b'\n'\\u0e2c'\n'\\u0e2d'\n'\\u0e2e'\n'\\u0e2f'\n'\\u0e30'\n'\\u0e31'\n'\\u0e32'\n'\\u0e33'\n'\\u0e34'\n'\\u0e35'\n'\\u0e36'\n'\\u0e37'\n'\\u0e38'\n'\\u0e39'\n'\\u0e3a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0e3f'\n'\\u0e40'\n'\\u0e41'\n'\\u0e42'\n'\\u0e43'\n'\\u0e44'\n'\\u0e45'\n'\\u0e46'\n'\\u0e47'\n'\\u0e48'\n'\\u0e49'\n'\\u0e4a'\n'\\u0e4b'\n'\\u0e4c'\n'\\u0e4d'\n'\\u0e4e'\n'\\u0e4f'\n'\\u0e50'\n'\\u0e51'\n'\\u0e52'\n'\\u0e53'\n'\\u0e54'\n'\\u0e55'\n'\\u0e56'\n'\\u0e57'\n'\\u0e58'\n'\\u0e59'\n'\\u0e5a'\n'\\u0e5b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.hp_roman8": [".py", "''\n\n\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='hp-roman8',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xc0'\n'\\xc2'\n'\\xc8'\n'\\xca'\n'\\xcb'\n'\\xce'\n'\\xcf'\n'\\xb4'\n'\\u02cb'\n'\\u02c6'\n'\\xa8'\n'\\u02dc'\n'\\xd9'\n'\\xdb'\n'\\u20a4'\n'\\xaf'\n'\\xdd'\n'\\xfd'\n'\\xb0'\n'\\xc7'\n'\\xe7'\n'\\xd1'\n'\\xf1'\n'\\xa1'\n'\\xbf'\n'\\xa4'\n'\\xa3'\n'\\xa5'\n'\\xa7'\n'\\u0192'\n'\\xa2'\n'\\xe2'\n'\\xea'\n'\\xf4'\n'\\xfb'\n'\\xe1'\n'\\xe9'\n'\\xf3'\n'\\xfa'\n'\\xe0'\n'\\xe8'\n'\\xf2'\n'\\xf9'\n'\\xe4'\n'\\xeb'\n'\\xf6'\n'\\xfc'\n'\\xc5'\n'\\xee'\n'\\xd8'\n'\\xc6'\n'\\xe5'\n'\\xed'\n'\\xf8'\n'\\xe6'\n'\\xc4'\n'\\xec'\n'\\xd6'\n'\\xdc'\n'\\xc9'\n'\\xef'\n'\\xdf'\n'\\xd4'\n'\\xc1'\n'\\xc3'\n'\\xe3'\n'\\xd0'\n'\\xf0'\n'\\xcd'\n'\\xcc'\n'\\xd3'\n'\\xd2'\n'\\xd5'\n'\\xf5'\n'\\u0160'\n'\\u0161'\n'\\xda'\n'\\u0178'\n'\\xff'\n'\\xde'\n'\\xfe'\n'\\xb7'\n'\\xb5'\n'\\xb6'\n'\\xbe'\n'\\u2014'\n'\\xbc'\n'\\xbd'\n'\\xaa'\n'\\xba'\n'\\xab'\n'\\u25a0'\n'\\xbb'\n'\\xb1'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.koi8_r": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='koi8-r',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u2500'\n'\\u2502'\n'\\u250c'\n'\\u2510'\n'\\u2514'\n'\\u2518'\n'\\u251c'\n'\\u2524'\n'\\u252c'\n'\\u2534'\n'\\u253c'\n'\\u2580'\n'\\u2584'\n'\\u2588'\n'\\u258c'\n'\\u2590'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2320'\n'\\u25a0'\n'\\u2219'\n'\\u221a'\n'\\u2248'\n'\\u2264'\n'\\u2265'\n'\\xa0'\n'\\u2321'\n'\\xb0'\n'\\xb2'\n'\\xb7'\n'\\xf7'\n'\\u2550'\n'\\u2551'\n'\\u2552'\n'\\u0451'\n'\\u2553'\n'\\u2554'\n'\\u2555'\n'\\u2556'\n'\\u2557'\n'\\u2558'\n'\\u2559'\n'\\u255a'\n'\\u255b'\n'\\u255c'\n'\\u255d'\n'\\u255e'\n'\\u255f'\n'\\u2560'\n'\\u2561'\n'\\u0401'\n'\\u2562'\n'\\u2563'\n'\\u2564'\n'\\u2565'\n'\\u2566'\n'\\u2567'\n'\\u2568'\n'\\u2569'\n'\\u256a'\n'\\u256b'\n'\\u256c'\n'\\xa9'\n'\\u044e'\n'\\u0430'\n'\\u0431'\n'\\u0446'\n'\\u0434'\n'\\u0435'\n'\\u0444'\n'\\u0433'\n'\\u0445'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u044f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0436'\n'\\u0432'\n'\\u044c'\n'\\u044b'\n'\\u0437'\n'\\u0448'\n'\\u044d'\n'\\u0449'\n'\\u0447'\n'\\u044a'\n'\\u042e'\n'\\u0410'\n'\\u0411'\n'\\u0426'\n'\\u0414'\n'\\u0415'\n'\\u0424'\n'\\u0413'\n'\\u0425'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u042f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0416'\n'\\u0412'\n'\\u042c'\n'\\u042b'\n'\\u0417'\n'\\u0428'\n'\\u042d'\n'\\u0429'\n'\\u0427'\n'\\u042a'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.zlib_codec": [".py", "''\n\n\n\n\n\n\nimport codecs\nimport zlib\n\n\n\ndef zlib_encode(input,errors='strict'):\n assert errors =='strict'\n return (zlib.compress(input),len(input))\n \ndef zlib_decode(input,errors='strict'):\n assert errors =='strict'\n return (zlib.decompress(input),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return zlib_encode(input,errors)\n def decode(self,input,errors='strict'):\n return zlib_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n assert errors =='strict'\n self.errors=errors\n self.compressobj=zlib.compressobj()\n \n def encode(self,input,final=False ):\n if final:\n c=self.compressobj.compress(input)\n return c+self.compressobj.flush()\n else :\n return self.compressobj.compress(input)\n \n def reset(self):\n self.compressobj=zlib.compressobj()\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def __init__(self,errors='strict'):\n assert errors =='strict'\n self.errors=errors\n self.decompressobj=zlib.decompressobj()\n \n def decode(self,input,final=False ):\n if final:\n c=self.decompressobj.decompress(input)\n return c+self.decompressobj.flush()\n else :\n return self.decompressobj.decompress(input)\n \n def reset(self):\n self.decompressobj=zlib.decompressobj()\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='zlib',\n encode=zlib_encode,\n decode=zlib_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n _is_text_encoding=False ,\n )\n", ["codecs", "zlib"]], "encodings.gbk": [".py", "\n\n\n\n\n\nimport _codecs_cn,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_cn.getcodec('gbk')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='gbk',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_cn", "_multibytecodec", "codecs"]], "encodings.johab": [".py", "\n\n\n\n\n\nimport _codecs_kr,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_kr.getcodec('johab')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='johab',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_kr", "_multibytecodec", "codecs"]], "encodings.cp1253": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1253',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\ufffe'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\u0385'\n'\\u0386'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\ufffe'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\u2015'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u0384'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u0388'\n'\\u0389'\n'\\u038a'\n'\\xbb'\n'\\u038c'\n'\\xbd'\n'\\u038e'\n'\\u038f'\n'\\u0390'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0398'\n'\\u0399'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u039e'\n'\\u039f'\n'\\u03a0'\n'\\u03a1'\n'\\ufffe'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03aa'\n'\\u03ab'\n'\\u03ac'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03b0'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u03b4'\n'\\u03b5'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c2'\n'\\u03c3'\n'\\u03c4'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\u03c8'\n'\\u03c9'\n'\\u03ca'\n'\\u03cb'\n'\\u03cc'\n'\\u03cd'\n'\\u03ce'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_15": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-15',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\u20ac'\n'\\xa5'\n'\\u0160'\n'\\xa7'\n'\\u0161'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u017d'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u017e'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\u0152'\n'\\u0153'\n'\\u0178'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp_2004": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_2004')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_2004',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.mac_iceland": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-iceland',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\xdd'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\xc6'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\xe6'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u2044'\n'\\u20ac'\n'\\xd0'\n'\\xf0'\n'\\xde'\n'\\xfe'\n'\\xfd'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_3": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-3',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0126'\n'\\u02d8'\n'\\xa3'\n'\\xa4'\n'\\ufffe'\n'\\u0124'\n'\\xa7'\n'\\xa8'\n'\\u0130'\n'\\u015e'\n'\\u011e'\n'\\u0134'\n'\\xad'\n'\\ufffe'\n'\\u017b'\n'\\xb0'\n'\\u0127'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\u0125'\n'\\xb7'\n'\\xb8'\n'\\u0131'\n'\\u015f'\n'\\u011f'\n'\\u0135'\n'\\xbd'\n'\\ufffe'\n'\\u017c'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\ufffe'\n'\\xc4'\n'\\u010a'\n'\\u0108'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\ufffe'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\u0120'\n'\\xd6'\n'\\xd7'\n'\\u011c'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u016c'\n'\\u015c'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\ufffe'\n'\\xe4'\n'\\u010b'\n'\\u0109'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\ufffe'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\u0121'\n'\\xf6'\n'\\xf7'\n'\\u011d'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u016d'\n'\\u015d'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_greek": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-greek',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xb9'\n'\\xb2'\n'\\xc9'\n'\\xb3'\n'\\xd6'\n'\\xdc'\n'\\u0385'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\u0384'\n'\\xa8'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xa3'\n'\\u2122'\n'\\xee'\n'\\xef'\n'\\u2022'\n'\\xbd'\n'\\u2030'\n'\\xf4'\n'\\xf6'\n'\\xa6'\n'\\u20ac'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\u0393'\n'\\u0394'\n'\\u0398'\n'\\u039b'\n'\\u039e'\n'\\u03a0'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u03a3'\n'\\u03aa'\n'\\xa7'\n'\\u2260'\n'\\xb0'\n'\\xb7'\n'\\u0391'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\u0392'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0399'\n'\\u039a'\n'\\u039c'\n'\\u03a6'\n'\\u03ab'\n'\\u03a8'\n'\\u03a9'\n'\\u03ac'\n'\\u039d'\n'\\xac'\n'\\u039f'\n'\\u03a1'\n'\\u2248'\n'\\u03a4'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u03a5'\n'\\u03a7'\n'\\u0386'\n'\\u0388'\n'\\u0153'\n'\\u2013'\n'\\u2015'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u0389'\n'\\u038a'\n'\\u038c'\n'\\u038e'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03cc'\n'\\u038f'\n'\\u03cd'\n'\\u03b1'\n'\\u03b2'\n'\\u03c8'\n'\\u03b4'\n'\\u03b5'\n'\\u03c6'\n'\\u03b3'\n'\\u03b7'\n'\\u03b9'\n'\\u03be'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03bf'\n'\\u03c0'\n'\\u03ce'\n'\\u03c1'\n'\\u03c3'\n'\\u03c4'\n'\\u03b8'\n'\\u03c9'\n'\\u03c2'\n'\\u03c7'\n'\\u03c5'\n'\\u03b6'\n'\\u03ca'\n'\\u03cb'\n'\\u0390'\n'\\u03b0'\n'\\xad'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.rot_13": [".py", "#!/usr/bin/env python\n''\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return (str.translate(input,rot13_map),len(input))\n \n def decode(self,input,errors='strict'):\n return (str.translate(input,rot13_map),len(input))\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return str.translate(input,rot13_map)\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return str.translate(input,rot13_map)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='rot-13',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False ,\n )\n \n \n \nrot13_map=codecs.make_identity_dict(range(256))\nrot13_map.update({\n0x0041:0x004e,\n0x0042:0x004f,\n0x0043:0x0050,\n0x0044:0x0051,\n0x0045:0x0052,\n0x0046:0x0053,\n0x0047:0x0054,\n0x0048:0x0055,\n0x0049:0x0056,\n0x004a:0x0057,\n0x004b:0x0058,\n0x004c:0x0059,\n0x004d:0x005a,\n0x004e:0x0041,\n0x004f:0x0042,\n0x0050:0x0043,\n0x0051:0x0044,\n0x0052:0x0045,\n0x0053:0x0046,\n0x0054:0x0047,\n0x0055:0x0048,\n0x0056:0x0049,\n0x0057:0x004a,\n0x0058:0x004b,\n0x0059:0x004c,\n0x005a:0x004d,\n0x0061:0x006e,\n0x0062:0x006f,\n0x0063:0x0070,\n0x0064:0x0071,\n0x0065:0x0072,\n0x0066:0x0073,\n0x0067:0x0074,\n0x0068:0x0075,\n0x0069:0x0076,\n0x006a:0x0077,\n0x006b:0x0078,\n0x006c:0x0079,\n0x006d:0x007a,\n0x006e:0x0061,\n0x006f:0x0062,\n0x0070:0x0063,\n0x0071:0x0064,\n0x0072:0x0065,\n0x0073:0x0066,\n0x0074:0x0067,\n0x0075:0x0068,\n0x0076:0x0069,\n0x0077:0x006a,\n0x0078:0x006b,\n0x0079:0x006c,\n0x007a:0x006d,\n})\n\n\n\ndef rot13(infile,outfile):\n outfile.write(codecs.encode(infile.read(),'rot-13'))\n \nif __name__ =='__main__':\n import sys\n rot13(sys.stdin,sys.stdout)\n", ["codecs", "sys"]], "encodings.utf_16_be": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nencode=codecs.utf_16_be_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_16_be_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.utf_16_be_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_16_be_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_16_be_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_16_be_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-16-be',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.euc_kr": [".py", "\n\n\n\n\n\nimport _codecs_kr,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_kr.getcodec('euc_kr')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='euc_kr',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_kr", "_multibytecodec", "codecs"]], "encodings.mac_centeuro": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-centeuro',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\u0100'\n'\\u0101'\n'\\xc9'\n'\\u0104'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\u0105'\n'\\u010c'\n'\\xe4'\n'\\u010d'\n'\\u0106'\n'\\u0107'\n'\\xe9'\n'\\u0179'\n'\\u017a'\n'\\u010e'\n'\\xed'\n'\\u010f'\n'\\u0112'\n'\\u0113'\n'\\u0116'\n'\\xf3'\n'\\u0117'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\u011a'\n'\\u011b'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\u0118'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\u0119'\n'\\xa8'\n'\\u2260'\n'\\u0123'\n'\\u012e'\n'\\u012f'\n'\\u012a'\n'\\u2264'\n'\\u2265'\n'\\u012b'\n'\\u0136'\n'\\u2202'\n'\\u2211'\n'\\u0142'\n'\\u013b'\n'\\u013c'\n'\\u013d'\n'\\u013e'\n'\\u0139'\n'\\u013a'\n'\\u0145'\n'\\u0146'\n'\\u0143'\n'\\xac'\n'\\u221a'\n'\\u0144'\n'\\u0147'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u0148'\n'\\u0150'\n'\\xd5'\n'\\u0151'\n'\\u014c'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\u014d'\n'\\u0154'\n'\\u0155'\n'\\u0158'\n'\\u2039'\n'\\u203a'\n'\\u0159'\n'\\u0156'\n'\\u0157'\n'\\u0160'\n'\\u201a'\n'\\u201e'\n'\\u0161'\n'\\u015a'\n'\\u015b'\n'\\xc1'\n'\\u0164'\n'\\u0165'\n'\\xcd'\n'\\u017d'\n'\\u017e'\n'\\u016a'\n'\\xd3'\n'\\xd4'\n'\\u016b'\n'\\u016e'\n'\\xda'\n'\\u016f'\n'\\u0170'\n'\\u0171'\n'\\u0172'\n'\\u0173'\n'\\xdd'\n'\\xfd'\n'\\u0137'\n'\\u017b'\n'\\u0141'\n'\\u017c'\n'\\u0122'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.euc_jisx0213": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('euc_jisx0213')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='euc_jisx0213',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp863": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp863',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00c2,\n0x0085:0x00e0,\n0x0086:0x00b6,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x2017,\n0x008e:0x00c0,\n0x008f:0x00a7,\n0x0090:0x00c9,\n0x0091:0x00c8,\n0x0092:0x00ca,\n0x0093:0x00f4,\n0x0094:0x00cb,\n0x0095:0x00cf,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00a4,\n0x0099:0x00d4,\n0x009a:0x00dc,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00d9,\n0x009e:0x00db,\n0x009f:0x0192,\n0x00a0:0x00a6,\n0x00a1:0x00b4,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00a8,\n0x00a5:0x00b8,\n0x00a6:0x00b3,\n0x00a7:0x00af,\n0x00a8:0x00ce,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00be,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xc2'\n'\\xe0'\n'\\xb6'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\u2017'\n'\\xc0'\n'\\xa7'\n'\\xc9'\n'\\xc8'\n'\\xca'\n'\\xf4'\n'\\xcb'\n'\\xcf'\n'\\xfb'\n'\\xf9'\n'\\xa4'\n'\\xd4'\n'\\xdc'\n'\\xa2'\n'\\xa3'\n'\\xd9'\n'\\xdb'\n'\\u0192'\n'\\xa6'\n'\\xb4'\n'\\xf3'\n'\\xfa'\n'\\xa8'\n'\\xb8'\n'\\xb3'\n'\\xaf'\n'\\xce'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xbe'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00a4:0x0098,\n0x00a6:0x00a0,\n0x00a7:0x008f,\n0x00a8:0x00a4,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00af:0x00a7,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00a6,\n0x00b4:0x00a1,\n0x00b5:0x00e6,\n0x00b6:0x0086,\n0x00b7:0x00fa,\n0x00b8:0x00a5,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00ad,\n0x00c0:0x008e,\n0x00c2:0x0084,\n0x00c7:0x0080,\n0x00c8:0x0091,\n0x00c9:0x0090,\n0x00ca:0x0092,\n0x00cb:0x0094,\n0x00ce:0x00a8,\n0x00cf:0x0095,\n0x00d4:0x0099,\n0x00d9:0x009d,\n0x00db:0x009e,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e2:0x0083,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f7:0x00f6,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x2017:0x008d,\n0x207f:0x00fc,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.ascii": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.ascii_encode\n decode=codecs.ascii_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.ascii_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.ascii_decode(input,self.errors)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \nclass StreamConverter(StreamWriter,StreamReader):\n\n encode=codecs.ascii_decode\n decode=codecs.ascii_encode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='ascii',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.iso8859_8": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-8',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\ufffe'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xd7'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xf7'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2017'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\ufffe'\n'\\ufffe'\n'\\u200e'\n'\\u200f'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp857": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp857',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x0131,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x0130,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x015e,\n0x009f:0x015f,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x011e,\n0x00a7:0x011f,\n0x00a8:0x00bf,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x00c0,\n0x00b8:0x00a9,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x00a2,\n0x00be:0x00a5,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x00e3,\n0x00c7:0x00c3,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x00ba,\n0x00d1:0x00aa,\n0x00d2:0x00ca,\n0x00d3:0x00cb,\n0x00d4:0x00c8,\n0x00d5:None ,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x00cf,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x00a6,\n0x00de:0x00cc,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x00d2,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:None ,\n0x00e8:0x00d7,\n0x00e9:0x00da,\n0x00ea:0x00db,\n0x00eb:0x00d9,\n0x00ed:0x00ff,\n0x00ee:0x00af,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:None ,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\u0131'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\u0130'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\u015e'\n'\\u015f'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\u011e'\n'\\u011f'\n'\\xbf'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\xc0'\n'\\xa9'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\xa2'\n'\\xa5'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\xe3'\n'\\xc3'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\xba'\n'\\xaa'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\ufffe'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\xa6'\n'\\xcc'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\xd2'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\ufffe'\n'\\xd7'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\xec'\n'\\xff'\n'\\xaf'\n'\\xb4'\n'\\xad'\n'\\xb1'\n'\\ufffe'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x00bd,\n0x00a3:0x009c,\n0x00a4:0x00cf,\n0x00a5:0x00be,\n0x00a6:0x00dd,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x00b8,\n0x00aa:0x00d1,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00af:0x00ee,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b4:0x00ef,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b8:0x00f7,\n0x00b9:0x00fb,\n0x00ba:0x00d0,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00bf:0x00a8,\n0x00c0:0x00b7,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c3:0x00c7,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c8:0x00d4,\n0x00c9:0x0090,\n0x00ca:0x00d2,\n0x00cb:0x00d3,\n0x00cc:0x00de,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00cf:0x00d8,\n0x00d1:0x00a5,\n0x00d2:0x00e3,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x00e8,\n0x00d8:0x009d,\n0x00d9:0x00eb,\n0x00da:0x00e9,\n0x00db:0x00ea,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e3:0x00c6,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x00ec,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00ff:0x00ed,\n0x011e:0x00a6,\n0x011f:0x00a7,\n0x0130:0x0098,\n0x0131:0x008d,\n0x015e:0x009e,\n0x015f:0x009f,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.utf_32_be": [".py", "''\n\n\nimport codecs\n\n\n\nencode=codecs.utf_32_be_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_32_be_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.utf_32_be_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_32_be_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_32_be_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_32_be_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-32-be',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp1258": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1258',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\u0152'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\u0153'\n'\\ufffe'\n'\\ufffe'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\u0102'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\u0300'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u0110'\n'\\xd1'\n'\\u0309'\n'\\xd3'\n'\\xd4'\n'\\u01a0'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u01af'\n'\\u0303'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\u0103'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\u0301'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u0111'\n'\\xf1'\n'\\u0323'\n'\\xf3'\n'\\xf4'\n'\\u01a1'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u01b0'\n'\\u20ab'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.oem": [".py", "''\n\n\n\n\nfrom codecs import oem_encode,oem_decode\n\nimport codecs\n\n\n\nencode=oem_encode\n\ndef decode(input,errors='strict'):\n return oem_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return oem_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=oem_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=oem_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=oem_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='oem',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.mac_latin2": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-latin2',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\u0100'\n'\\u0101'\n'\\xc9'\n'\\u0104'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\u0105'\n'\\u010c'\n'\\xe4'\n'\\u010d'\n'\\u0106'\n'\\u0107'\n'\\xe9'\n'\\u0179'\n'\\u017a'\n'\\u010e'\n'\\xed'\n'\\u010f'\n'\\u0112'\n'\\u0113'\n'\\u0116'\n'\\xf3'\n'\\u0117'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\u011a'\n'\\u011b'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\u0118'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\u0119'\n'\\xa8'\n'\\u2260'\n'\\u0123'\n'\\u012e'\n'\\u012f'\n'\\u012a'\n'\\u2264'\n'\\u2265'\n'\\u012b'\n'\\u0136'\n'\\u2202'\n'\\u2211'\n'\\u0142'\n'\\u013b'\n'\\u013c'\n'\\u013d'\n'\\u013e'\n'\\u0139'\n'\\u013a'\n'\\u0145'\n'\\u0146'\n'\\u0143'\n'\\xac'\n'\\u221a'\n'\\u0144'\n'\\u0147'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u0148'\n'\\u0150'\n'\\xd5'\n'\\u0151'\n'\\u014c'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\u014d'\n'\\u0154'\n'\\u0155'\n'\\u0158'\n'\\u2039'\n'\\u203a'\n'\\u0159'\n'\\u0156'\n'\\u0157'\n'\\u0160'\n'\\u201a'\n'\\u201e'\n'\\u0161'\n'\\u015a'\n'\\u015b'\n'\\xc1'\n'\\u0164'\n'\\u0165'\n'\\xcd'\n'\\u017d'\n'\\u017e'\n'\\u016a'\n'\\xd3'\n'\\xd4'\n'\\u016b'\n'\\u016e'\n'\\xda'\n'\\u016f'\n'\\u0170'\n'\\u0171'\n'\\u0172'\n'\\u0173'\n'\\xdd'\n'\\xfd'\n'\\u0137'\n'\\u017b'\n'\\u0141'\n'\\u017c'\n'\\u0122'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp775": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp775',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0106,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x0101,\n0x0084:0x00e4,\n0x0085:0x0123,\n0x0086:0x00e5,\n0x0087:0x0107,\n0x0088:0x0142,\n0x0089:0x0113,\n0x008a:0x0156,\n0x008b:0x0157,\n0x008c:0x012b,\n0x008d:0x0179,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x014d,\n0x0094:0x00f6,\n0x0095:0x0122,\n0x0096:0x00a2,\n0x0097:0x015a,\n0x0098:0x015b,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x00d7,\n0x009f:0x00a4,\n0x00a0:0x0100,\n0x00a1:0x012a,\n0x00a2:0x00f3,\n0x00a3:0x017b,\n0x00a4:0x017c,\n0x00a5:0x017a,\n0x00a6:0x201d,\n0x00a7:0x00a6,\n0x00a8:0x00a9,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x0141,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x0104,\n0x00b6:0x010c,\n0x00b7:0x0118,\n0x00b8:0x0116,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x012e,\n0x00be:0x0160,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x0172,\n0x00c7:0x016a,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x017d,\n0x00d0:0x0105,\n0x00d1:0x010d,\n0x00d2:0x0119,\n0x00d3:0x0117,\n0x00d4:0x012f,\n0x00d5:0x0161,\n0x00d6:0x0173,\n0x00d7:0x016b,\n0x00d8:0x017e,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x014c,\n0x00e3:0x0143,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:0x0144,\n0x00e8:0x0136,\n0x00e9:0x0137,\n0x00ea:0x013b,\n0x00eb:0x013c,\n0x00ec:0x0146,\n0x00ed:0x0112,\n0x00ee:0x0145,\n0x00ef:0x2019,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x201c,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x201e,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0106'\n'\\xfc'\n'\\xe9'\n'\\u0101'\n'\\xe4'\n'\\u0123'\n'\\xe5'\n'\\u0107'\n'\\u0142'\n'\\u0113'\n'\\u0156'\n'\\u0157'\n'\\u012b'\n'\\u0179'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\u014d'\n'\\xf6'\n'\\u0122'\n'\\xa2'\n'\\u015a'\n'\\u015b'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\xd7'\n'\\xa4'\n'\\u0100'\n'\\u012a'\n'\\xf3'\n'\\u017b'\n'\\u017c'\n'\\u017a'\n'\\u201d'\n'\\xa6'\n'\\xa9'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\u0141'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u0104'\n'\\u010c'\n'\\u0118'\n'\\u0116'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u012e'\n'\\u0160'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u0172'\n'\\u016a'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u017d'\n'\\u0105'\n'\\u010d'\n'\\u0119'\n'\\u0117'\n'\\u012f'\n'\\u0161'\n'\\u0173'\n'\\u016b'\n'\\u017e'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\u014c'\n'\\u0143'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\u0144'\n'\\u0136'\n'\\u0137'\n'\\u013b'\n'\\u013c'\n'\\u0146'\n'\\u0112'\n'\\u0145'\n'\\u2019'\n'\\xad'\n'\\xb1'\n'\\u201c'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\u201e'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a2:0x0096,\n0x00a3:0x009c,\n0x00a4:0x009f,\n0x00a6:0x00a7,\n0x00a7:0x00f5,\n0x00a9:0x00a8,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b9:0x00fb,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c9:0x0090,\n0x00d3:0x00e0,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00d8:0x009d,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e9:0x0082,\n0x00f3:0x00a2,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00fc:0x0081,\n0x0100:0x00a0,\n0x0101:0x0083,\n0x0104:0x00b5,\n0x0105:0x00d0,\n0x0106:0x0080,\n0x0107:0x0087,\n0x010c:0x00b6,\n0x010d:0x00d1,\n0x0112:0x00ed,\n0x0113:0x0089,\n0x0116:0x00b8,\n0x0117:0x00d3,\n0x0118:0x00b7,\n0x0119:0x00d2,\n0x0122:0x0095,\n0x0123:0x0085,\n0x012a:0x00a1,\n0x012b:0x008c,\n0x012e:0x00bd,\n0x012f:0x00d4,\n0x0136:0x00e8,\n0x0137:0x00e9,\n0x013b:0x00ea,\n0x013c:0x00eb,\n0x0141:0x00ad,\n0x0142:0x0088,\n0x0143:0x00e3,\n0x0144:0x00e7,\n0x0145:0x00ee,\n0x0146:0x00ec,\n0x014c:0x00e2,\n0x014d:0x0093,\n0x0156:0x008a,\n0x0157:0x008b,\n0x015a:0x0097,\n0x015b:0x0098,\n0x0160:0x00be,\n0x0161:0x00d5,\n0x016a:0x00c7,\n0x016b:0x00d7,\n0x0172:0x00c6,\n0x0173:0x00d6,\n0x0179:0x008d,\n0x017a:0x00a5,\n0x017b:0x00a3,\n0x017c:0x00a4,\n0x017d:0x00cf,\n0x017e:0x00d8,\n0x2019:0x00ef,\n0x201c:0x00f2,\n0x201d:0x00a6,\n0x201e:0x00f7,\n0x2219:0x00f9,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.mac_roman": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-roman',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\xc6'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\xe6'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u2044'\n'\\u20ac'\n'\\u2039'\n'\\u203a'\n'\\ufb01'\n'\\ufb02'\n'\\u2021'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport codecs\nimport sys\nfrom. import aliases\n\n_cache={}\n_unknown='--unknown--'\n_import_tail=['*']\n_aliases=aliases.aliases\n\nclass CodecRegistryError(LookupError,SystemError):\n pass\n \ndef normalize_encoding(encoding):\n\n ''\n\n\n\n\n\n\n\n\n \n if isinstance(encoding,bytes):\n encoding=str(encoding,\"ascii\")\n \n chars=[]\n punct=False\n for c in encoding:\n if c.isalnum()or c =='.':\n if punct and chars:\n chars.append('_')\n if c.isascii():\n chars.append(c)\n punct=False\n else:\n punct=True\n return ''.join(chars)\n \ndef search_function(encoding):\n\n\n entry=_cache.get(encoding,_unknown)\n if entry is not _unknown:\n return entry\n \n \n \n \n \n \n \n \n norm_encoding=normalize_encoding(encoding)\n aliased_encoding=_aliases.get(norm_encoding)or\\\n _aliases.get(norm_encoding.replace('.','_'))\n if aliased_encoding is not None:\n modnames=[aliased_encoding,\n norm_encoding]\n else:\n modnames=[norm_encoding]\n for modname in modnames:\n if not modname or '.'in modname:\n continue\n try:\n \n \n mod=__import__('encodings.'+modname,fromlist=_import_tail,\n level=0)\n except ImportError:\n \n \n pass\n else:\n break\n else:\n mod=None\n \n try:\n getregentry=mod.getregentry\n except AttributeError:\n \n mod=None\n \n if mod is None:\n \n _cache[encoding]=None\n return None\n \n \n entry=getregentry()\n if not isinstance(entry,codecs.CodecInfo):\n if not 4 <=len(entry)<=7:\n raise CodecRegistryError('module \"%s\" (%s) failed to register'\n %(mod.__name__,mod.__file__))\n if not callable(entry[0])or not callable(entry[1])or\\\n (entry[2]is not None and not callable(entry[2]))or\\\n (entry[3]is not None and not callable(entry[3]))or\\\n (len(entry)>4 and entry[4]is not None and not callable(entry[4]))or\\\n (len(entry)>5 and entry[5]is not None and not callable(entry[5])):\n raise CodecRegistryError('incompatible codecs in module \"%s\" (%s)'\n %(mod.__name__,mod.__file__))\n if len(entry)<7 or entry[6]is None:\n entry +=(None,)*(6 -len(entry))+(mod.__name__.split(\".\",1)[1],)\n entry=codecs.CodecInfo(*entry)\n \n \n _cache[encoding]=entry\n \n \n \n try:\n codecaliases=mod.getaliases()\n except AttributeError:\n pass\n else:\n for alias in codecaliases:\n if alias not in _aliases:\n _aliases[alias]=modname\n \n \n return entry\n \n \ncodecs.register(search_function)\n\nif sys.platform =='win32':\n\n\n\n\n def _alias_mbcs(encoding):\n try:\n import _winapi\n ansi_code_page=\"cp%s\"%_winapi.GetACP()\n if encoding ==ansi_code_page:\n import encodings.mbcs\n return encodings.mbcs.getregentry()\n except ImportError:\n \n pass\n \n codecs.register(_alias_mbcs)\n", ["_winapi", "codecs", "encodings", "encodings.aliases", "encodings.mbcs", "sys"], 1], "encodings.cp852": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp852',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x016f,\n0x0086:0x0107,\n0x0087:0x00e7,\n0x0088:0x0142,\n0x0089:0x00eb,\n0x008a:0x0150,\n0x008b:0x0151,\n0x008c:0x00ee,\n0x008d:0x0179,\n0x008e:0x00c4,\n0x008f:0x0106,\n0x0090:0x00c9,\n0x0091:0x0139,\n0x0092:0x013a,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x013d,\n0x0096:0x013e,\n0x0097:0x015a,\n0x0098:0x015b,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x0164,\n0x009c:0x0165,\n0x009d:0x0141,\n0x009e:0x00d7,\n0x009f:0x010d,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x0104,\n0x00a5:0x0105,\n0x00a6:0x017d,\n0x00a7:0x017e,\n0x00a8:0x0118,\n0x00a9:0x0119,\n0x00aa:0x00ac,\n0x00ab:0x017a,\n0x00ac:0x010c,\n0x00ad:0x015f,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x011a,\n0x00b8:0x015e,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x017b,\n0x00be:0x017c,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x0102,\n0x00c7:0x0103,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x0111,\n0x00d1:0x0110,\n0x00d2:0x010e,\n0x00d3:0x00cb,\n0x00d4:0x010f,\n0x00d5:0x0147,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x011b,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x0162,\n0x00de:0x016e,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x0143,\n0x00e4:0x0144,\n0x00e5:0x0148,\n0x00e6:0x0160,\n0x00e7:0x0161,\n0x00e8:0x0154,\n0x00e9:0x00da,\n0x00ea:0x0155,\n0x00eb:0x0170,\n0x00ec:0x00fd,\n0x00ed:0x00dd,\n0x00ee:0x0163,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x02dd,\n0x00f2:0x02db,\n0x00f3:0x02c7,\n0x00f4:0x02d8,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x02d9,\n0x00fb:0x0171,\n0x00fc:0x0158,\n0x00fd:0x0159,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\u016f'\n'\\u0107'\n'\\xe7'\n'\\u0142'\n'\\xeb'\n'\\u0150'\n'\\u0151'\n'\\xee'\n'\\u0179'\n'\\xc4'\n'\\u0106'\n'\\xc9'\n'\\u0139'\n'\\u013a'\n'\\xf4'\n'\\xf6'\n'\\u013d'\n'\\u013e'\n'\\u015a'\n'\\u015b'\n'\\xd6'\n'\\xdc'\n'\\u0164'\n'\\u0165'\n'\\u0141'\n'\\xd7'\n'\\u010d'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\u0104'\n'\\u0105'\n'\\u017d'\n'\\u017e'\n'\\u0118'\n'\\u0119'\n'\\xac'\n'\\u017a'\n'\\u010c'\n'\\u015f'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\u011a'\n'\\u015e'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u017b'\n'\\u017c'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u0102'\n'\\u0103'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\u0111'\n'\\u0110'\n'\\u010e'\n'\\xcb'\n'\\u010f'\n'\\u0147'\n'\\xcd'\n'\\xce'\n'\\u011b'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u0162'\n'\\u016e'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\u0143'\n'\\u0144'\n'\\u0148'\n'\\u0160'\n'\\u0161'\n'\\u0154'\n'\\xda'\n'\\u0155'\n'\\u0170'\n'\\xfd'\n'\\xdd'\n'\\u0163'\n'\\xb4'\n'\\xad'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n'\\u02d8'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\u02d9'\n'\\u0171'\n'\\u0158'\n'\\u0159'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00cf,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00b0:0x00f8,\n0x00b4:0x00ef,\n0x00b8:0x00f7,\n0x00bb:0x00af,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c4:0x008e,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00cb:0x00d3,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00da:0x00e9,\n0x00dc:0x009a,\n0x00dd:0x00ed,\n0x00df:0x00e1,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e7:0x0087,\n0x00e9:0x0082,\n0x00eb:0x0089,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00fa:0x00a3,\n0x00fc:0x0081,\n0x00fd:0x00ec,\n0x0102:0x00c6,\n0x0103:0x00c7,\n0x0104:0x00a4,\n0x0105:0x00a5,\n0x0106:0x008f,\n0x0107:0x0086,\n0x010c:0x00ac,\n0x010d:0x009f,\n0x010e:0x00d2,\n0x010f:0x00d4,\n0x0110:0x00d1,\n0x0111:0x00d0,\n0x0118:0x00a8,\n0x0119:0x00a9,\n0x011a:0x00b7,\n0x011b:0x00d8,\n0x0139:0x0091,\n0x013a:0x0092,\n0x013d:0x0095,\n0x013e:0x0096,\n0x0141:0x009d,\n0x0142:0x0088,\n0x0143:0x00e3,\n0x0144:0x00e4,\n0x0147:0x00d5,\n0x0148:0x00e5,\n0x0150:0x008a,\n0x0151:0x008b,\n0x0154:0x00e8,\n0x0155:0x00ea,\n0x0158:0x00fc,\n0x0159:0x00fd,\n0x015a:0x0097,\n0x015b:0x0098,\n0x015e:0x00b8,\n0x015f:0x00ad,\n0x0160:0x00e6,\n0x0161:0x00e7,\n0x0162:0x00dd,\n0x0163:0x00ee,\n0x0164:0x009b,\n0x0165:0x009c,\n0x016e:0x00de,\n0x016f:0x0085,\n0x0170:0x00eb,\n0x0171:0x00fb,\n0x0179:0x008d,\n0x017a:0x00ab,\n0x017b:0x00bd,\n0x017c:0x00be,\n0x017d:0x00a6,\n0x017e:0x00a7,\n0x02c7:0x00f3,\n0x02d8:0x00f4,\n0x02d9:0x00fa,\n0x02db:0x00f2,\n0x02dd:0x00f1,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.shift_jisx0213": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('shift_jisx0213')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='shift_jisx0213',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp866": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp866',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0410,\n0x0081:0x0411,\n0x0082:0x0412,\n0x0083:0x0413,\n0x0084:0x0414,\n0x0085:0x0415,\n0x0086:0x0416,\n0x0087:0x0417,\n0x0088:0x0418,\n0x0089:0x0419,\n0x008a:0x041a,\n0x008b:0x041b,\n0x008c:0x041c,\n0x008d:0x041d,\n0x008e:0x041e,\n0x008f:0x041f,\n0x0090:0x0420,\n0x0091:0x0421,\n0x0092:0x0422,\n0x0093:0x0423,\n0x0094:0x0424,\n0x0095:0x0425,\n0x0096:0x0426,\n0x0097:0x0427,\n0x0098:0x0428,\n0x0099:0x0429,\n0x009a:0x042a,\n0x009b:0x042b,\n0x009c:0x042c,\n0x009d:0x042d,\n0x009e:0x042e,\n0x009f:0x042f,\n0x00a0:0x0430,\n0x00a1:0x0431,\n0x00a2:0x0432,\n0x00a3:0x0433,\n0x00a4:0x0434,\n0x00a5:0x0435,\n0x00a6:0x0436,\n0x00a7:0x0437,\n0x00a8:0x0438,\n0x00a9:0x0439,\n0x00aa:0x043a,\n0x00ab:0x043b,\n0x00ac:0x043c,\n0x00ad:0x043d,\n0x00ae:0x043e,\n0x00af:0x043f,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x0440,\n0x00e1:0x0441,\n0x00e2:0x0442,\n0x00e3:0x0443,\n0x00e4:0x0444,\n0x00e5:0x0445,\n0x00e6:0x0446,\n0x00e7:0x0447,\n0x00e8:0x0448,\n0x00e9:0x0449,\n0x00ea:0x044a,\n0x00eb:0x044b,\n0x00ec:0x044c,\n0x00ed:0x044d,\n0x00ee:0x044e,\n0x00ef:0x044f,\n0x00f0:0x0401,\n0x00f1:0x0451,\n0x00f2:0x0404,\n0x00f3:0x0454,\n0x00f4:0x0407,\n0x00f5:0x0457,\n0x00f6:0x040e,\n0x00f7:0x045e,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x2116,\n0x00fd:0x00a4,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n'\\u0401'\n'\\u0451'\n'\\u0404'\n'\\u0454'\n'\\u0407'\n'\\u0457'\n'\\u040e'\n'\\u045e'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u2116'\n'\\xa4'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00fd,\n0x00b0:0x00f8,\n0x00b7:0x00fa,\n0x0401:0x00f0,\n0x0404:0x00f2,\n0x0407:0x00f4,\n0x040e:0x00f6,\n0x0410:0x0080,\n0x0411:0x0081,\n0x0412:0x0082,\n0x0413:0x0083,\n0x0414:0x0084,\n0x0415:0x0085,\n0x0416:0x0086,\n0x0417:0x0087,\n0x0418:0x0088,\n0x0419:0x0089,\n0x041a:0x008a,\n0x041b:0x008b,\n0x041c:0x008c,\n0x041d:0x008d,\n0x041e:0x008e,\n0x041f:0x008f,\n0x0420:0x0090,\n0x0421:0x0091,\n0x0422:0x0092,\n0x0423:0x0093,\n0x0424:0x0094,\n0x0425:0x0095,\n0x0426:0x0096,\n0x0427:0x0097,\n0x0428:0x0098,\n0x0429:0x0099,\n0x042a:0x009a,\n0x042b:0x009b,\n0x042c:0x009c,\n0x042d:0x009d,\n0x042e:0x009e,\n0x042f:0x009f,\n0x0430:0x00a0,\n0x0431:0x00a1,\n0x0432:0x00a2,\n0x0433:0x00a3,\n0x0434:0x00a4,\n0x0435:0x00a5,\n0x0436:0x00a6,\n0x0437:0x00a7,\n0x0438:0x00a8,\n0x0439:0x00a9,\n0x043a:0x00aa,\n0x043b:0x00ab,\n0x043c:0x00ac,\n0x043d:0x00ad,\n0x043e:0x00ae,\n0x043f:0x00af,\n0x0440:0x00e0,\n0x0441:0x00e1,\n0x0442:0x00e2,\n0x0443:0x00e3,\n0x0444:0x00e4,\n0x0445:0x00e5,\n0x0446:0x00e6,\n0x0447:0x00e7,\n0x0448:0x00e8,\n0x0449:0x00e9,\n0x044a:0x00ea,\n0x044b:0x00eb,\n0x044c:0x00ec,\n0x044d:0x00ed,\n0x044e:0x00ee,\n0x044f:0x00ef,\n0x0451:0x00f1,\n0x0454:0x00f3,\n0x0457:0x00f5,\n0x045e:0x00f7,\n0x2116:0x00fc,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.utf_7": [".py", "''\n\n\n\nimport codecs\n\n\n\nencode=codecs.utf_7_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_7_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.utf_7_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_7_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_7_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_7_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-7',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.base64_codec": [".py", "''\n\n\n\n\n\n\nimport codecs\nimport base64\n\n\n\ndef base64_encode(input,errors='strict'):\n assert errors =='strict'\n return (base64.encodebytes(input),len(input))\n \ndef base64_decode(input,errors='strict'):\n assert errors =='strict'\n return (base64.decodebytes(input),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return base64_encode(input,errors)\n def decode(self,input,errors='strict'):\n return base64_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n assert self.errors =='strict'\n return base64.encodebytes(input)\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n assert self.errors =='strict'\n return base64.decodebytes(input)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='base64',\n encode=base64_encode,\n decode=base64_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False ,\n )\n", ["base64", "codecs"]], "encodings.cp932": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('cp932')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp932',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp720": [".py", "''\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp720',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\xe9'\n'\\xe2'\n'\\x84'\n'\\xe0'\n'\\x86'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\u0651'\n'\\u0652'\n'\\xf4'\n'\\xa4'\n'\\u0640'\n'\\xfb'\n'\\xf9'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\xa3'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u0636'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'\\u0641'\n'\\xb5'\n'\\u0642'\n'\\u0643'\n'\\u0644'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\u0649'\n'\\u064a'\n'\\u2261'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\u064f'\n'\\u0650'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp862": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp862',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x05d0,\n0x0081:0x05d1,\n0x0082:0x05d2,\n0x0083:0x05d3,\n0x0084:0x05d4,\n0x0085:0x05d5,\n0x0086:0x05d6,\n0x0087:0x05d7,\n0x0088:0x05d8,\n0x0089:0x05d9,\n0x008a:0x05da,\n0x008b:0x05db,\n0x008c:0x05dc,\n0x008d:0x05dd,\n0x008e:0x05de,\n0x008f:0x05df,\n0x0090:0x05e0,\n0x0091:0x05e1,\n0x0092:0x05e2,\n0x0093:0x05e3,\n0x0094:0x05e4,\n0x0095:0x05e5,\n0x0096:0x05e6,\n0x0097:0x05e7,\n0x0098:0x05e8,\n0x0099:0x05e9,\n0x009a:0x05ea,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00a5,\n0x009e:0x20a7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\u20a7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00a5:0x009d,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00d1:0x00a5,\n0x00df:0x00e1,\n0x00e1:0x00a0,\n0x00ed:0x00a1,\n0x00f1:0x00a4,\n0x00f3:0x00a2,\n0x00f7:0x00f6,\n0x00fa:0x00a3,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x05d0:0x0080,\n0x05d1:0x0081,\n0x05d2:0x0082,\n0x05d3:0x0083,\n0x05d4:0x0084,\n0x05d5:0x0085,\n0x05d6:0x0086,\n0x05d7:0x0087,\n0x05d8:0x0088,\n0x05d9:0x0089,\n0x05da:0x008a,\n0x05db:0x008b,\n0x05dc:0x008c,\n0x05dd:0x008d,\n0x05de:0x008e,\n0x05df:0x008f,\n0x05e0:0x0090,\n0x05e1:0x0091,\n0x05e2:0x0092,\n0x05e3:0x0093,\n0x05e4:0x0094,\n0x05e5:0x0095,\n0x05e6:0x0096,\n0x05e7:0x0097,\n0x05e8:0x0098,\n0x05e9:0x0099,\n0x05ea:0x009a,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.cp437": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp437',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x00ec,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00ff,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00a5,\n0x009e:0x20a7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\xec'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\xff'\n'\\xd6'\n'\\xdc'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\u20a7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00a5:0x009d,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00d1:0x00a5,\n0x00d6:0x0099,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00ff:0x0098,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.palmos": [".py", "''\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='palmos',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\x81'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\u0160'\n'\\u2039'\n'\\u0152'\n'\\u2666'\n'\\u2663'\n'\\u2665'\n'\\u2660'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\u0161'\n'\\x9b'\n'\\u0153'\n'\\x9d'\n'\\x9e'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_9": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-9',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u011e'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u0130'\n'\\u015e'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u011f'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u0131'\n'\\u015f'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp856": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp856',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\ufffe'\n'\\xa3'\n'\\ufffe'\n'\\xd7'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\ufffe'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa9'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\xa2'\n'\\xa5'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\ufffe'\n'\\ufffe'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\xa6'\n'\\ufffe'\n'\\u2580'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xb5'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xaf'\n'\\xb4'\n'\\xad'\n'\\xb1'\n'\\u2017'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.aliases": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\naliases={\n\n\n\n\n'646':'ascii',\n'ansi_x3.4_1968':'ascii',\n'ansi_x3_4_1968':'ascii',\n'ansi_x3.4_1986':'ascii',\n'cp367':'ascii',\n'csascii':'ascii',\n'ibm367':'ascii',\n'iso646_us':'ascii',\n'iso_646.irv_1991':'ascii',\n'iso_ir_6':'ascii',\n'us':'ascii',\n'us_ascii':'ascii',\n\n\n'base64':'base64_codec',\n'base_64':'base64_codec',\n\n\n'big5_tw':'big5',\n'csbig5':'big5',\n\n\n'big5_hkscs':'big5hkscs',\n'hkscs':'big5hkscs',\n\n\n'bz2':'bz2_codec',\n\n\n'037':'cp037',\n'csibm037':'cp037',\n'ebcdic_cp_ca':'cp037',\n'ebcdic_cp_nl':'cp037',\n'ebcdic_cp_us':'cp037',\n'ebcdic_cp_wt':'cp037',\n'ibm037':'cp037',\n'ibm039':'cp037',\n\n\n'1026':'cp1026',\n'csibm1026':'cp1026',\n'ibm1026':'cp1026',\n\n\n'1125':'cp1125',\n'ibm1125':'cp1125',\n'cp866u':'cp1125',\n'ruscii':'cp1125',\n\n\n'1140':'cp1140',\n'ibm1140':'cp1140',\n\n\n'1250':'cp1250',\n'windows_1250':'cp1250',\n\n\n'1251':'cp1251',\n'windows_1251':'cp1251',\n\n\n'1252':'cp1252',\n'windows_1252':'cp1252',\n\n\n'1253':'cp1253',\n'windows_1253':'cp1253',\n\n\n'1254':'cp1254',\n'windows_1254':'cp1254',\n\n\n'1255':'cp1255',\n'windows_1255':'cp1255',\n\n\n'1256':'cp1256',\n'windows_1256':'cp1256',\n\n\n'1257':'cp1257',\n'windows_1257':'cp1257',\n\n\n'1258':'cp1258',\n'windows_1258':'cp1258',\n\n\n'273':'cp273',\n'ibm273':'cp273',\n'csibm273':'cp273',\n\n\n'424':'cp424',\n'csibm424':'cp424',\n'ebcdic_cp_he':'cp424',\n'ibm424':'cp424',\n\n\n'437':'cp437',\n'cspc8codepage437':'cp437',\n'ibm437':'cp437',\n\n\n'500':'cp500',\n'csibm500':'cp500',\n'ebcdic_cp_be':'cp500',\n'ebcdic_cp_ch':'cp500',\n'ibm500':'cp500',\n\n\n'775':'cp775',\n'cspc775baltic':'cp775',\n'ibm775':'cp775',\n\n\n'850':'cp850',\n'cspc850multilingual':'cp850',\n'ibm850':'cp850',\n\n\n'852':'cp852',\n'cspcp852':'cp852',\n'ibm852':'cp852',\n\n\n'855':'cp855',\n'csibm855':'cp855',\n'ibm855':'cp855',\n\n\n'857':'cp857',\n'csibm857':'cp857',\n'ibm857':'cp857',\n\n\n'858':'cp858',\n'csibm858':'cp858',\n'ibm858':'cp858',\n\n\n'860':'cp860',\n'csibm860':'cp860',\n'ibm860':'cp860',\n\n\n'861':'cp861',\n'cp_is':'cp861',\n'csibm861':'cp861',\n'ibm861':'cp861',\n\n\n'862':'cp862',\n'cspc862latinhebrew':'cp862',\n'ibm862':'cp862',\n\n\n'863':'cp863',\n'csibm863':'cp863',\n'ibm863':'cp863',\n\n\n'864':'cp864',\n'csibm864':'cp864',\n'ibm864':'cp864',\n\n\n'865':'cp865',\n'csibm865':'cp865',\n'ibm865':'cp865',\n\n\n'866':'cp866',\n'csibm866':'cp866',\n'ibm866':'cp866',\n\n\n'869':'cp869',\n'cp_gr':'cp869',\n'csibm869':'cp869',\n'ibm869':'cp869',\n\n\n'932':'cp932',\n'ms932':'cp932',\n'mskanji':'cp932',\n'ms_kanji':'cp932',\n\n\n'949':'cp949',\n'ms949':'cp949',\n'uhc':'cp949',\n\n\n'950':'cp950',\n'ms950':'cp950',\n\n\n'jisx0213':'euc_jis_2004',\n'eucjis2004':'euc_jis_2004',\n'euc_jis2004':'euc_jis_2004',\n\n\n'eucjisx0213':'euc_jisx0213',\n\n\n'eucjp':'euc_jp',\n'ujis':'euc_jp',\n'u_jis':'euc_jp',\n\n\n'euckr':'euc_kr',\n'korean':'euc_kr',\n'ksc5601':'euc_kr',\n'ks_c_5601':'euc_kr',\n'ks_c_5601_1987':'euc_kr',\n'ksx1001':'euc_kr',\n'ks_x_1001':'euc_kr',\n\n\n'gb18030_2000':'gb18030',\n\n\n'chinese':'gb2312',\n'csiso58gb231280':'gb2312',\n'euc_cn':'gb2312',\n'euccn':'gb2312',\n'eucgb2312_cn':'gb2312',\n'gb2312_1980':'gb2312',\n'gb2312_80':'gb2312',\n'iso_ir_58':'gb2312',\n\n\n'936':'gbk',\n'cp936':'gbk',\n'ms936':'gbk',\n\n\n'hex':'hex_codec',\n\n\n'roman8':'hp_roman8',\n'r8':'hp_roman8',\n'csHPRoman8':'hp_roman8',\n'cp1051':'hp_roman8',\n'ibm1051':'hp_roman8',\n\n\n'hzgb':'hz',\n'hz_gb':'hz',\n'hz_gb_2312':'hz',\n\n\n'csiso2022jp':'iso2022_jp',\n'iso2022jp':'iso2022_jp',\n'iso_2022_jp':'iso2022_jp',\n\n\n'iso2022jp_1':'iso2022_jp_1',\n'iso_2022_jp_1':'iso2022_jp_1',\n\n\n'iso2022jp_2':'iso2022_jp_2',\n'iso_2022_jp_2':'iso2022_jp_2',\n\n\n'iso_2022_jp_2004':'iso2022_jp_2004',\n'iso2022jp_2004':'iso2022_jp_2004',\n\n\n'iso2022jp_3':'iso2022_jp_3',\n'iso_2022_jp_3':'iso2022_jp_3',\n\n\n'iso2022jp_ext':'iso2022_jp_ext',\n'iso_2022_jp_ext':'iso2022_jp_ext',\n\n\n'csiso2022kr':'iso2022_kr',\n'iso2022kr':'iso2022_kr',\n'iso_2022_kr':'iso2022_kr',\n\n\n'csisolatin6':'iso8859_10',\n'iso_8859_10':'iso8859_10',\n'iso_8859_10_1992':'iso8859_10',\n'iso_ir_157':'iso8859_10',\n'l6':'iso8859_10',\n'latin6':'iso8859_10',\n\n\n'thai':'iso8859_11',\n'iso_8859_11':'iso8859_11',\n'iso_8859_11_2001':'iso8859_11',\n\n\n'iso_8859_13':'iso8859_13',\n'l7':'iso8859_13',\n'latin7':'iso8859_13',\n\n\n'iso_8859_14':'iso8859_14',\n'iso_8859_14_1998':'iso8859_14',\n'iso_celtic':'iso8859_14',\n'iso_ir_199':'iso8859_14',\n'l8':'iso8859_14',\n'latin8':'iso8859_14',\n\n\n'iso_8859_15':'iso8859_15',\n'l9':'iso8859_15',\n'latin9':'iso8859_15',\n\n\n'iso_8859_16':'iso8859_16',\n'iso_8859_16_2001':'iso8859_16',\n'iso_ir_226':'iso8859_16',\n'l10':'iso8859_16',\n'latin10':'iso8859_16',\n\n\n'csisolatin2':'iso8859_2',\n'iso_8859_2':'iso8859_2',\n'iso_8859_2_1987':'iso8859_2',\n'iso_ir_101':'iso8859_2',\n'l2':'iso8859_2',\n'latin2':'iso8859_2',\n\n\n'csisolatin3':'iso8859_3',\n'iso_8859_3':'iso8859_3',\n'iso_8859_3_1988':'iso8859_3',\n'iso_ir_109':'iso8859_3',\n'l3':'iso8859_3',\n'latin3':'iso8859_3',\n\n\n'csisolatin4':'iso8859_4',\n'iso_8859_4':'iso8859_4',\n'iso_8859_4_1988':'iso8859_4',\n'iso_ir_110':'iso8859_4',\n'l4':'iso8859_4',\n'latin4':'iso8859_4',\n\n\n'csisolatincyrillic':'iso8859_5',\n'cyrillic':'iso8859_5',\n'iso_8859_5':'iso8859_5',\n'iso_8859_5_1988':'iso8859_5',\n'iso_ir_144':'iso8859_5',\n\n\n'arabic':'iso8859_6',\n'asmo_708':'iso8859_6',\n'csisolatinarabic':'iso8859_6',\n'ecma_114':'iso8859_6',\n'iso_8859_6':'iso8859_6',\n'iso_8859_6_1987':'iso8859_6',\n'iso_ir_127':'iso8859_6',\n\n\n'csisolatingreek':'iso8859_7',\n'ecma_118':'iso8859_7',\n'elot_928':'iso8859_7',\n'greek':'iso8859_7',\n'greek8':'iso8859_7',\n'iso_8859_7':'iso8859_7',\n'iso_8859_7_1987':'iso8859_7',\n'iso_ir_126':'iso8859_7',\n\n\n'csisolatinhebrew':'iso8859_8',\n'hebrew':'iso8859_8',\n'iso_8859_8':'iso8859_8',\n'iso_8859_8_1988':'iso8859_8',\n'iso_ir_138':'iso8859_8',\n\n\n'csisolatin5':'iso8859_9',\n'iso_8859_9':'iso8859_9',\n'iso_8859_9_1989':'iso8859_9',\n'iso_ir_148':'iso8859_9',\n'l5':'iso8859_9',\n'latin5':'iso8859_9',\n\n\n'cp1361':'johab',\n'ms1361':'johab',\n\n\n'cskoi8r':'koi8_r',\n\n\n'kz_1048':'kz1048',\n'rk1048':'kz1048',\n'strk1048_2002':'kz1048',\n\n\n\n\n\n\n\n\n'8859':'latin_1',\n'cp819':'latin_1',\n'csisolatin1':'latin_1',\n'ibm819':'latin_1',\n'iso8859':'latin_1',\n'iso8859_1':'latin_1',\n'iso_8859_1':'latin_1',\n'iso_8859_1_1987':'latin_1',\n'iso_ir_100':'latin_1',\n'l1':'latin_1',\n'latin':'latin_1',\n'latin1':'latin_1',\n\n\n'maccyrillic':'mac_cyrillic',\n\n\n'macgreek':'mac_greek',\n\n\n'maciceland':'mac_iceland',\n\n\n'maccentraleurope':'mac_latin2',\n'mac_centeuro':'mac_latin2',\n'maclatin2':'mac_latin2',\n\n\n'macintosh':'mac_roman',\n'macroman':'mac_roman',\n\n\n'macturkish':'mac_turkish',\n\n\n'ansi':'mbcs',\n'dbcs':'mbcs',\n\n\n'csptcp154':'ptcp154',\n'pt154':'ptcp154',\n'cp154':'ptcp154',\n'cyrillic_asian':'ptcp154',\n\n\n'quopri':'quopri_codec',\n'quoted_printable':'quopri_codec',\n'quotedprintable':'quopri_codec',\n\n\n'rot13':'rot_13',\n\n\n'csshiftjis':'shift_jis',\n'shiftjis':'shift_jis',\n'sjis':'shift_jis',\n's_jis':'shift_jis',\n\n\n'shiftjis2004':'shift_jis_2004',\n'sjis_2004':'shift_jis_2004',\n's_jis_2004':'shift_jis_2004',\n\n\n'shiftjisx0213':'shift_jisx0213',\n'sjisx0213':'shift_jisx0213',\n's_jisx0213':'shift_jisx0213',\n\n\n'tis620':'tis_620',\n'tis_620_0':'tis_620',\n'tis_620_2529_0':'tis_620',\n'tis_620_2529_1':'tis_620',\n'iso_ir_166':'tis_620',\n\n\n'u16':'utf_16',\n'utf16':'utf_16',\n\n\n'unicodebigunmarked':'utf_16_be',\n'utf_16be':'utf_16_be',\n\n\n'unicodelittleunmarked':'utf_16_le',\n'utf_16le':'utf_16_le',\n\n\n'u32':'utf_32',\n'utf32':'utf_32',\n\n\n'utf_32be':'utf_32_be',\n\n\n'utf_32le':'utf_32_le',\n\n\n'u7':'utf_7',\n'utf7':'utf_7',\n'unicode_1_1_utf_7':'utf_7',\n\n\n'u8':'utf_8',\n'utf':'utf_8',\n'utf8':'utf_8',\n'utf8_ucs2':'utf_8',\n'utf8_ucs4':'utf_8',\n'cp65001':'utf_8',\n\n\n'uu':'uu_codec',\n\n\n'zip':'zlib_codec',\n'zlib':'zlib_codec',\n\n\n'x_mac_japanese':'shift_jis',\n'x_mac_korean':'euc_kr',\n'x_mac_simp_chinese':'gb2312',\n'x_mac_trad_chinese':'big5',\n}\n", []], "encodings.latin_1": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.latin_1_encode\n decode=codecs.latin_1_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.latin_1_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.latin_1_decode(input,self.errors)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \nclass StreamConverter(StreamWriter,StreamReader):\n\n encode=codecs.latin_1_decode\n decode=codecs.latin_1_encode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-1',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp875": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp875',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0398'\n'\\u0399'\n'['\n'.'\n'<'\n'('\n'+'\n'!'\n'&'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u039e'\n'\\u039f'\n'\\u03a0'\n'\\u03a1'\n'\\u03a3'\n']'\n'$'\n'*'\n')'\n';'\n'^'\n'-'\n'/'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03aa'\n'\\u03ab'\n'|'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xa8'\n'\\u0386'\n'\\u0388'\n'\\u0389'\n'\\xa0'\n'\\u038a'\n'\\u038c'\n'\\u038e'\n'\\u038f'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\u0385'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u03b4'\n'\\u03b5'\n'\\u03b6'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\xb4'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c3'\n'\\xa3'\n'\\u03ac'\n'\\u03ad'\n'\\u03ae'\n'\\u03ca'\n'\\u03af'\n'\\u03cc'\n'\\u03cd'\n'\\u03cb'\n'\\u03ce'\n'\\u03c2'\n'\\u03c4'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\u03c8'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\u03c9'\n'\\u0390'\n'\\u03b0'\n'\\u2018'\n'\\u2015'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb1'\n'\\xbd'\n'\\x1a'\n'\\u0387'\n'\\u2019'\n'\\xa6'\n'\\\\'\n'\\x1a'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xa7'\n'\\x1a'\n'\\x1a'\n'\\xab'\n'\\xac'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xa9'\n'\\x1a'\n'\\x1a'\n'\\xbb'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp950": [".py", "\n\n\n\n\n\nimport _codecs_tw,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_tw.getcodec('cp950')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp950',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_tw", "_multibytecodec", "codecs"]], "encodings.unicode_escape": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.unicode_escape_encode\n decode=codecs.unicode_escape_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.unicode_escape_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def _buffer_decode(self,input,errors,final):\n return codecs.unicode_escape_decode(input,errors,final)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n def decode(self,input,errors='strict'):\n return codecs.unicode_escape_decode(input,errors,False)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='unicode-escape',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.cp737": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp737',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0391,\n0x0081:0x0392,\n0x0082:0x0393,\n0x0083:0x0394,\n0x0084:0x0395,\n0x0085:0x0396,\n0x0086:0x0397,\n0x0087:0x0398,\n0x0088:0x0399,\n0x0089:0x039a,\n0x008a:0x039b,\n0x008b:0x039c,\n0x008c:0x039d,\n0x008d:0x039e,\n0x008e:0x039f,\n0x008f:0x03a0,\n0x0090:0x03a1,\n0x0091:0x03a3,\n0x0092:0x03a4,\n0x0093:0x03a5,\n0x0094:0x03a6,\n0x0095:0x03a7,\n0x0096:0x03a8,\n0x0097:0x03a9,\n0x0098:0x03b1,\n0x0099:0x03b2,\n0x009a:0x03b3,\n0x009b:0x03b4,\n0x009c:0x03b5,\n0x009d:0x03b6,\n0x009e:0x03b7,\n0x009f:0x03b8,\n0x00a0:0x03b9,\n0x00a1:0x03ba,\n0x00a2:0x03bb,\n0x00a3:0x03bc,\n0x00a4:0x03bd,\n0x00a5:0x03be,\n0x00a6:0x03bf,\n0x00a7:0x03c0,\n0x00a8:0x03c1,\n0x00a9:0x03c3,\n0x00aa:0x03c2,\n0x00ab:0x03c4,\n0x00ac:0x03c5,\n0x00ad:0x03c6,\n0x00ae:0x03c7,\n0x00af:0x03c8,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03c9,\n0x00e1:0x03ac,\n0x00e2:0x03ad,\n0x00e3:0x03ae,\n0x00e4:0x03ca,\n0x00e5:0x03af,\n0x00e6:0x03cc,\n0x00e7:0x03cd,\n0x00e8:0x03cb,\n0x00e9:0x03ce,\n0x00ea:0x0386,\n0x00eb:0x0388,\n0x00ec:0x0389,\n0x00ed:0x038a,\n0x00ee:0x038c,\n0x00ef:0x038e,\n0x00f0:0x038f,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x03aa,\n0x00f5:0x03ab,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0398'\n'\\u0399'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u039e'\n'\\u039f'\n'\\u03a0'\n'\\u03a1'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u03b4'\n'\\u03b5'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c3'\n'\\u03c2'\n'\\u03c4'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\u03c8'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03c9'\n'\\u03ac'\n'\\u03ad'\n'\\u03ae'\n'\\u03ca'\n'\\u03af'\n'\\u03cc'\n'\\u03cd'\n'\\u03cb'\n'\\u03ce'\n'\\u0386'\n'\\u0388'\n'\\u0389'\n'\\u038a'\n'\\u038c'\n'\\u038e'\n'\\u038f'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u03aa'\n'\\u03ab'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b7:0x00fa,\n0x00f7:0x00f6,\n0x0386:0x00ea,\n0x0388:0x00eb,\n0x0389:0x00ec,\n0x038a:0x00ed,\n0x038c:0x00ee,\n0x038e:0x00ef,\n0x038f:0x00f0,\n0x0391:0x0080,\n0x0392:0x0081,\n0x0393:0x0082,\n0x0394:0x0083,\n0x0395:0x0084,\n0x0396:0x0085,\n0x0397:0x0086,\n0x0398:0x0087,\n0x0399:0x0088,\n0x039a:0x0089,\n0x039b:0x008a,\n0x039c:0x008b,\n0x039d:0x008c,\n0x039e:0x008d,\n0x039f:0x008e,\n0x03a0:0x008f,\n0x03a1:0x0090,\n0x03a3:0x0091,\n0x03a4:0x0092,\n0x03a5:0x0093,\n0x03a6:0x0094,\n0x03a7:0x0095,\n0x03a8:0x0096,\n0x03a9:0x0097,\n0x03aa:0x00f4,\n0x03ab:0x00f5,\n0x03ac:0x00e1,\n0x03ad:0x00e2,\n0x03ae:0x00e3,\n0x03af:0x00e5,\n0x03b1:0x0098,\n0x03b2:0x0099,\n0x03b3:0x009a,\n0x03b4:0x009b,\n0x03b5:0x009c,\n0x03b6:0x009d,\n0x03b7:0x009e,\n0x03b8:0x009f,\n0x03b9:0x00a0,\n0x03ba:0x00a1,\n0x03bb:0x00a2,\n0x03bc:0x00a3,\n0x03bd:0x00a4,\n0x03be:0x00a5,\n0x03bf:0x00a6,\n0x03c0:0x00a7,\n0x03c1:0x00a8,\n0x03c2:0x00aa,\n0x03c3:0x00a9,\n0x03c4:0x00ab,\n0x03c5:0x00ac,\n0x03c6:0x00ad,\n0x03c7:0x00ae,\n0x03c8:0x00af,\n0x03c9:0x00e0,\n0x03ca:0x00e4,\n0x03cb:0x00e8,\n0x03cc:0x00e6,\n0x03cd:0x00e7,\n0x03ce:0x00e9,\n0x207f:0x00fc,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x2248:0x00f7,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.cp865": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp865',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x00ec,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00ff,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x20a7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00a4,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\xec'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\xff'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\u20a7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xa4'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a3:0x009c,\n0x00a4:0x00af,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00ba:0x00a7,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00d1:0x00a5,\n0x00d6:0x0099,\n0x00d8:0x009d,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00ff:0x0098,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.ptcp154": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='ptcp154',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0496'\n'\\u0492'\n'\\u04ee'\n'\\u0493'\n'\\u201e'\n'\\u2026'\n'\\u04b6'\n'\\u04ae'\n'\\u04b2'\n'\\u04af'\n'\\u04a0'\n'\\u04e2'\n'\\u04a2'\n'\\u049a'\n'\\u04ba'\n'\\u04b8'\n'\\u0497'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u04b3'\n'\\u04b7'\n'\\u04a1'\n'\\u04e3'\n'\\u04a3'\n'\\u049b'\n'\\u04bb'\n'\\u04b9'\n'\\xa0'\n'\\u040e'\n'\\u045e'\n'\\u0408'\n'\\u04e8'\n'\\u0498'\n'\\u04b0'\n'\\xa7'\n'\\u0401'\n'\\xa9'\n'\\u04d8'\n'\\xab'\n'\\xac'\n'\\u04ef'\n'\\xae'\n'\\u049c'\n'\\xb0'\n'\\u04b1'\n'\\u0406'\n'\\u0456'\n'\\u0499'\n'\\u04e9'\n'\\xb6'\n'\\xb7'\n'\\u0451'\n'\\u2116'\n'\\u04d9'\n'\\xbb'\n'\\u0458'\n'\\u04aa'\n'\\u04ab'\n'\\u049d'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.big5": [".py", "\n\n\n\n\n\nimport _codecs_tw,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_tw.getcodec('big5')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='big5',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_tw", "_multibytecodec", "codecs"]], "encodings.cp424": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp424',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\xa2'\n'.'\n'<'\n'('\n'+'\n'|'\n'&'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'!'\n'$'\n'*'\n')'\n';'\n'\\xac'\n'-'\n'/'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\ufffe'\n'\\u05ea'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2017'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\ufffe'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xb8'\n'\\ufffe'\n'\\xa4'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xae'\n'^'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'['\n']'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp861": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp861',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00d0,\n0x008c:0x00f0,\n0x008d:0x00de,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00fe,\n0x0096:0x00fb,\n0x0097:0x00dd,\n0x0098:0x00fd,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x20a7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00c1,\n0x00a5:0x00cd,\n0x00a6:0x00d3,\n0x00a7:0x00da,\n0x00a8:0x00bf,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xd0'\n'\\xf0'\n'\\xde'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xfe'\n'\\xfb'\n'\\xdd'\n'\\xfd'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\u20a7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xc1'\n'\\xcd'\n'\\xd3'\n'\\xda'\n'\\xbf'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a3:0x009c,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00c1:0x00a4,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00cd:0x00a5,\n0x00d0:0x008b,\n0x00d3:0x00a6,\n0x00d6:0x0099,\n0x00d8:0x009d,\n0x00da:0x00a7,\n0x00dc:0x009a,\n0x00dd:0x0097,\n0x00de:0x008d,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ed:0x00a1,\n0x00f0:0x008c,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00fd:0x0098,\n0x00fe:0x0095,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.euc_jp": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('euc_jp')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='euc_jp',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp855": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp855',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0452,\n0x0081:0x0402,\n0x0082:0x0453,\n0x0083:0x0403,\n0x0084:0x0451,\n0x0085:0x0401,\n0x0086:0x0454,\n0x0087:0x0404,\n0x0088:0x0455,\n0x0089:0x0405,\n0x008a:0x0456,\n0x008b:0x0406,\n0x008c:0x0457,\n0x008d:0x0407,\n0x008e:0x0458,\n0x008f:0x0408,\n0x0090:0x0459,\n0x0091:0x0409,\n0x0092:0x045a,\n0x0093:0x040a,\n0x0094:0x045b,\n0x0095:0x040b,\n0x0096:0x045c,\n0x0097:0x040c,\n0x0098:0x045e,\n0x0099:0x040e,\n0x009a:0x045f,\n0x009b:0x040f,\n0x009c:0x044e,\n0x009d:0x042e,\n0x009e:0x044a,\n0x009f:0x042a,\n0x00a0:0x0430,\n0x00a1:0x0410,\n0x00a2:0x0431,\n0x00a3:0x0411,\n0x00a4:0x0446,\n0x00a5:0x0426,\n0x00a6:0x0434,\n0x00a7:0x0414,\n0x00a8:0x0435,\n0x00a9:0x0415,\n0x00aa:0x0444,\n0x00ab:0x0424,\n0x00ac:0x0433,\n0x00ad:0x0413,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x0445,\n0x00b6:0x0425,\n0x00b7:0x0438,\n0x00b8:0x0418,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x0439,\n0x00be:0x0419,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x043a,\n0x00c7:0x041a,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x043b,\n0x00d1:0x041b,\n0x00d2:0x043c,\n0x00d3:0x041c,\n0x00d4:0x043d,\n0x00d5:0x041d,\n0x00d6:0x043e,\n0x00d7:0x041e,\n0x00d8:0x043f,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x041f,\n0x00de:0x044f,\n0x00df:0x2580,\n0x00e0:0x042f,\n0x00e1:0x0440,\n0x00e2:0x0420,\n0x00e3:0x0441,\n0x00e4:0x0421,\n0x00e5:0x0442,\n0x00e6:0x0422,\n0x00e7:0x0443,\n0x00e8:0x0423,\n0x00e9:0x0436,\n0x00ea:0x0416,\n0x00eb:0x0432,\n0x00ec:0x0412,\n0x00ed:0x044c,\n0x00ee:0x042c,\n0x00ef:0x2116,\n0x00f0:0x00ad,\n0x00f1:0x044b,\n0x00f2:0x042b,\n0x00f3:0x0437,\n0x00f4:0x0417,\n0x00f5:0x0448,\n0x00f6:0x0428,\n0x00f7:0x044d,\n0x00f8:0x042d,\n0x00f9:0x0449,\n0x00fa:0x0429,\n0x00fb:0x0447,\n0x00fc:0x0427,\n0x00fd:0x00a7,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0452'\n'\\u0402'\n'\\u0453'\n'\\u0403'\n'\\u0451'\n'\\u0401'\n'\\u0454'\n'\\u0404'\n'\\u0455'\n'\\u0405'\n'\\u0456'\n'\\u0406'\n'\\u0457'\n'\\u0407'\n'\\u0458'\n'\\u0408'\n'\\u0459'\n'\\u0409'\n'\\u045a'\n'\\u040a'\n'\\u045b'\n'\\u040b'\n'\\u045c'\n'\\u040c'\n'\\u045e'\n'\\u040e'\n'\\u045f'\n'\\u040f'\n'\\u044e'\n'\\u042e'\n'\\u044a'\n'\\u042a'\n'\\u0430'\n'\\u0410'\n'\\u0431'\n'\\u0411'\n'\\u0446'\n'\\u0426'\n'\\u0434'\n'\\u0414'\n'\\u0435'\n'\\u0415'\n'\\u0444'\n'\\u0424'\n'\\u0433'\n'\\u0413'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u0445'\n'\\u0425'\n'\\u0438'\n'\\u0418'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u0439'\n'\\u0419'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u043a'\n'\\u041a'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\u043b'\n'\\u041b'\n'\\u043c'\n'\\u041c'\n'\\u043d'\n'\\u041d'\n'\\u043e'\n'\\u041e'\n'\\u043f'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u041f'\n'\\u044f'\n'\\u2580'\n'\\u042f'\n'\\u0440'\n'\\u0420'\n'\\u0441'\n'\\u0421'\n'\\u0442'\n'\\u0422'\n'\\u0443'\n'\\u0423'\n'\\u0436'\n'\\u0416'\n'\\u0432'\n'\\u0412'\n'\\u044c'\n'\\u042c'\n'\\u2116'\n'\\xad'\n'\\u044b'\n'\\u042b'\n'\\u0437'\n'\\u0417'\n'\\u0448'\n'\\u0428'\n'\\u044d'\n'\\u042d'\n'\\u0449'\n'\\u0429'\n'\\u0447'\n'\\u0427'\n'\\xa7'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00cf,\n0x00a7:0x00fd,\n0x00ab:0x00ae,\n0x00ad:0x00f0,\n0x00bb:0x00af,\n0x0401:0x0085,\n0x0402:0x0081,\n0x0403:0x0083,\n0x0404:0x0087,\n0x0405:0x0089,\n0x0406:0x008b,\n0x0407:0x008d,\n0x0408:0x008f,\n0x0409:0x0091,\n0x040a:0x0093,\n0x040b:0x0095,\n0x040c:0x0097,\n0x040e:0x0099,\n0x040f:0x009b,\n0x0410:0x00a1,\n0x0411:0x00a3,\n0x0412:0x00ec,\n0x0413:0x00ad,\n0x0414:0x00a7,\n0x0415:0x00a9,\n0x0416:0x00ea,\n0x0417:0x00f4,\n0x0418:0x00b8,\n0x0419:0x00be,\n0x041a:0x00c7,\n0x041b:0x00d1,\n0x041c:0x00d3,\n0x041d:0x00d5,\n0x041e:0x00d7,\n0x041f:0x00dd,\n0x0420:0x00e2,\n0x0421:0x00e4,\n0x0422:0x00e6,\n0x0423:0x00e8,\n0x0424:0x00ab,\n0x0425:0x00b6,\n0x0426:0x00a5,\n0x0427:0x00fc,\n0x0428:0x00f6,\n0x0429:0x00fa,\n0x042a:0x009f,\n0x042b:0x00f2,\n0x042c:0x00ee,\n0x042d:0x00f8,\n0x042e:0x009d,\n0x042f:0x00e0,\n0x0430:0x00a0,\n0x0431:0x00a2,\n0x0432:0x00eb,\n0x0433:0x00ac,\n0x0434:0x00a6,\n0x0435:0x00a8,\n0x0436:0x00e9,\n0x0437:0x00f3,\n0x0438:0x00b7,\n0x0439:0x00bd,\n0x043a:0x00c6,\n0x043b:0x00d0,\n0x043c:0x00d2,\n0x043d:0x00d4,\n0x043e:0x00d6,\n0x043f:0x00d8,\n0x0440:0x00e1,\n0x0441:0x00e3,\n0x0442:0x00e5,\n0x0443:0x00e7,\n0x0444:0x00aa,\n0x0445:0x00b5,\n0x0446:0x00a4,\n0x0447:0x00fb,\n0x0448:0x00f5,\n0x0449:0x00f9,\n0x044a:0x009e,\n0x044b:0x00f1,\n0x044c:0x00ed,\n0x044d:0x00f7,\n0x044e:0x009c,\n0x044f:0x00de,\n0x0451:0x0084,\n0x0452:0x0080,\n0x0453:0x0082,\n0x0454:0x0086,\n0x0455:0x0088,\n0x0456:0x008a,\n0x0457:0x008c,\n0x0458:0x008e,\n0x0459:0x0090,\n0x045a:0x0092,\n0x045b:0x0094,\n0x045c:0x0096,\n0x045e:0x0098,\n0x045f:0x009a,\n0x2116:0x00ef,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.shift_jis": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('shift_jis')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='shift_jis',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.utf_32_le": [".py", "''\n\n\nimport codecs\n\n\n\nencode=codecs.utf_32_le_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_32_le_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.utf_32_le_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_32_le_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_32_le_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_32_le_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-32-le',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp500": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp500',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'['\n'.'\n'<'\n'('\n'+'\n'!'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n']'\n'$'\n'*'\n')'\n';'\n'^'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xac'\n'|'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\xfc'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\xd6'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\\xdc'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.undefined": [".py", "''\n\n\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n raise UnicodeError(\"undefined encoding\")\n \n def decode(self,input,errors='strict'):\n raise UnicodeError(\"undefined encoding\")\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n raise UnicodeError(\"undefined encoding\")\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n raise UnicodeError(\"undefined encoding\")\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='undefined',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.cp860": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp860',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e3,\n0x0085:0x00e0,\n0x0086:0x00c1,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00ca,\n0x008a:0x00e8,\n0x008b:0x00cd,\n0x008c:0x00d4,\n0x008d:0x00ec,\n0x008e:0x00c3,\n0x008f:0x00c2,\n0x0090:0x00c9,\n0x0091:0x00c0,\n0x0092:0x00c8,\n0x0093:0x00f4,\n0x0094:0x00f5,\n0x0095:0x00f2,\n0x0096:0x00da,\n0x0097:0x00f9,\n0x0098:0x00cc,\n0x0099:0x00d5,\n0x009a:0x00dc,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00d9,\n0x009e:0x20a7,\n0x009f:0x00d3,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x00d2,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe3'\n'\\xe0'\n'\\xc1'\n'\\xe7'\n'\\xea'\n'\\xca'\n'\\xe8'\n'\\xcd'\n'\\xd4'\n'\\xec'\n'\\xc3'\n'\\xc2'\n'\\xc9'\n'\\xc0'\n'\\xc8'\n'\\xf4'\n'\\xf5'\n'\\xf2'\n'\\xda'\n'\\xf9'\n'\\xcc'\n'\\xd5'\n'\\xdc'\n'\\xa2'\n'\\xa3'\n'\\xd9'\n'\\u20a7'\n'\\xd3'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\xd2'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00c0:0x0091,\n0x00c1:0x0086,\n0x00c2:0x008f,\n0x00c3:0x008e,\n0x00c7:0x0080,\n0x00c8:0x0092,\n0x00c9:0x0090,\n0x00ca:0x0089,\n0x00cc:0x0098,\n0x00cd:0x008b,\n0x00d1:0x00a5,\n0x00d2:0x00a9,\n0x00d3:0x009f,\n0x00d4:0x008c,\n0x00d5:0x0099,\n0x00d9:0x009d,\n0x00da:0x0096,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e3:0x0084,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f5:0x0094,\n0x00f7:0x00f6,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fc:0x0081,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.uu_codec": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\nimport binascii\nfrom io import BytesIO\n\n\n\ndef uu_encode(input,errors='strict',filename='',mode=0o666):\n assert errors =='strict'\n infile=BytesIO(input)\n outfile=BytesIO()\n read=infile.read\n write=outfile.write\n \n \n filename=filename.replace('\\n','\\\\n')\n filename=filename.replace('\\r','\\\\r')\n \n \n write(('begin %o %s\\n'%(mode&0o777,filename)).encode('ascii'))\n chunk=read(45)\n while chunk:\n write(binascii.b2a_uu(chunk))\n chunk=read(45)\n write(b' \\nend\\n')\n \n return (outfile.getvalue(),len(input))\n \ndef uu_decode(input,errors='strict'):\n assert errors =='strict'\n infile=BytesIO(input)\n outfile=BytesIO()\n readline=infile.readline\n write=outfile.write\n \n \n while 1:\n s=readline()\n if not s:\n raise ValueError('Missing \"begin\" line in input data')\n if s[:5]==b'begin':\n break\n \n \n while True :\n s=readline()\n if not s or s ==b'end\\n':\n break\n try :\n data=binascii.a2b_uu(s)\n except binascii.Error as v:\n \n nbytes=(((s[0]-32)&63)*4+5)//3\n data=binascii.a2b_uu(s[:nbytes])\n \n write(data)\n if not s:\n raise ValueError('Truncated input data')\n \n return (outfile.getvalue(),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return uu_encode(input,errors)\n \n def decode(self,input,errors='strict'):\n return uu_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return uu_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return uu_decode(input,self.errors)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='uu',\n encode=uu_encode,\n decode=uu_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n _is_text_encoding=False ,\n )\n", ["binascii", "codecs", "io"]], "encodings.utf_16_le": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nencode=codecs.utf_16_le_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_16_le_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.utf_16_le_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_16_le_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_16_le_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_16_le_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-16-le',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.gb18030": [".py", "\n\n\n\n\n\nimport _codecs_cn,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_cn.getcodec('gb18030')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='gb18030',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_cn", "_multibytecodec", "codecs"]], "encodings.cp874": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp874',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2026'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\u0e01'\n'\\u0e02'\n'\\u0e03'\n'\\u0e04'\n'\\u0e05'\n'\\u0e06'\n'\\u0e07'\n'\\u0e08'\n'\\u0e09'\n'\\u0e0a'\n'\\u0e0b'\n'\\u0e0c'\n'\\u0e0d'\n'\\u0e0e'\n'\\u0e0f'\n'\\u0e10'\n'\\u0e11'\n'\\u0e12'\n'\\u0e13'\n'\\u0e14'\n'\\u0e15'\n'\\u0e16'\n'\\u0e17'\n'\\u0e18'\n'\\u0e19'\n'\\u0e1a'\n'\\u0e1b'\n'\\u0e1c'\n'\\u0e1d'\n'\\u0e1e'\n'\\u0e1f'\n'\\u0e20'\n'\\u0e21'\n'\\u0e22'\n'\\u0e23'\n'\\u0e24'\n'\\u0e25'\n'\\u0e26'\n'\\u0e27'\n'\\u0e28'\n'\\u0e29'\n'\\u0e2a'\n'\\u0e2b'\n'\\u0e2c'\n'\\u0e2d'\n'\\u0e2e'\n'\\u0e2f'\n'\\u0e30'\n'\\u0e31'\n'\\u0e32'\n'\\u0e33'\n'\\u0e34'\n'\\u0e35'\n'\\u0e36'\n'\\u0e37'\n'\\u0e38'\n'\\u0e39'\n'\\u0e3a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0e3f'\n'\\u0e40'\n'\\u0e41'\n'\\u0e42'\n'\\u0e43'\n'\\u0e44'\n'\\u0e45'\n'\\u0e46'\n'\\u0e47'\n'\\u0e48'\n'\\u0e49'\n'\\u0e4a'\n'\\u0e4b'\n'\\u0e4c'\n'\\u0e4d'\n'\\u0e4e'\n'\\u0e4f'\n'\\u0e50'\n'\\u0e51'\n'\\u0e52'\n'\\u0e53'\n'\\u0e54'\n'\\u0e55'\n'\\u0e56'\n'\\u0e57'\n'\\u0e58'\n'\\u0e59'\n'\\u0e5a'\n'\\u0e5b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp850": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp850',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x00ec,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00ff,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x00d7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x00c0,\n0x00b8:0x00a9,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x00a2,\n0x00be:0x00a5,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x00e3,\n0x00c7:0x00c3,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x00f0,\n0x00d1:0x00d0,\n0x00d2:0x00ca,\n0x00d3:0x00cb,\n0x00d4:0x00c8,\n0x00d5:0x0131,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x00cf,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x00a6,\n0x00de:0x00cc,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x00d2,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:0x00fe,\n0x00e8:0x00de,\n0x00e9:0x00da,\n0x00ea:0x00db,\n0x00eb:0x00d9,\n0x00ec:0x00fd,\n0x00ed:0x00dd,\n0x00ee:0x00af,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x2017,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\xec'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\xff'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\xd7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\xc0'\n'\\xa9'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\xa2'\n'\\xa5'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\xe3'\n'\\xc3'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\xf0'\n'\\xd0'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\u0131'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\xa6'\n'\\xcc'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\xd2'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\xfe'\n'\\xde'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\xfd'\n'\\xdd'\n'\\xaf'\n'\\xb4'\n'\\xad'\n'\\xb1'\n'\\u2017'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x00bd,\n0x00a3:0x009c,\n0x00a4:0x00cf,\n0x00a5:0x00be,\n0x00a6:0x00dd,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x00b8,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00af:0x00ee,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b4:0x00ef,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b8:0x00f7,\n0x00b9:0x00fb,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00bf:0x00a8,\n0x00c0:0x00b7,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c3:0x00c7,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c8:0x00d4,\n0x00c9:0x0090,\n0x00ca:0x00d2,\n0x00cb:0x00d3,\n0x00cc:0x00de,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00cf:0x00d8,\n0x00d0:0x00d1,\n0x00d1:0x00a5,\n0x00d2:0x00e3,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00d8:0x009d,\n0x00d9:0x00eb,\n0x00da:0x00e9,\n0x00db:0x00ea,\n0x00dc:0x009a,\n0x00dd:0x00ed,\n0x00de:0x00e8,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e3:0x00c6,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f0:0x00d0,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00fd:0x00ec,\n0x00fe:0x00e7,\n0x00ff:0x0098,\n0x0131:0x00d5,\n0x0192:0x009f,\n0x2017:0x00f2,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.cp864": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp864',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0025:0x066a,\n0x0080:0x00b0,\n0x0081:0x00b7,\n0x0082:0x2219,\n0x0083:0x221a,\n0x0084:0x2592,\n0x0085:0x2500,\n0x0086:0x2502,\n0x0087:0x253c,\n0x0088:0x2524,\n0x0089:0x252c,\n0x008a:0x251c,\n0x008b:0x2534,\n0x008c:0x2510,\n0x008d:0x250c,\n0x008e:0x2514,\n0x008f:0x2518,\n0x0090:0x03b2,\n0x0091:0x221e,\n0x0092:0x03c6,\n0x0093:0x00b1,\n0x0094:0x00bd,\n0x0095:0x00bc,\n0x0096:0x2248,\n0x0097:0x00ab,\n0x0098:0x00bb,\n0x0099:0xfef7,\n0x009a:0xfef8,\n0x009b:None ,\n0x009c:None ,\n0x009d:0xfefb,\n0x009e:0xfefc,\n0x009f:None ,\n0x00a1:0x00ad,\n0x00a2:0xfe82,\n0x00a5:0xfe84,\n0x00a6:None ,\n0x00a7:None ,\n0x00a8:0xfe8e,\n0x00a9:0xfe8f,\n0x00aa:0xfe95,\n0x00ab:0xfe99,\n0x00ac:0x060c,\n0x00ad:0xfe9d,\n0x00ae:0xfea1,\n0x00af:0xfea5,\n0x00b0:0x0660,\n0x00b1:0x0661,\n0x00b2:0x0662,\n0x00b3:0x0663,\n0x00b4:0x0664,\n0x00b5:0x0665,\n0x00b6:0x0666,\n0x00b7:0x0667,\n0x00b8:0x0668,\n0x00b9:0x0669,\n0x00ba:0xfed1,\n0x00bb:0x061b,\n0x00bc:0xfeb1,\n0x00bd:0xfeb5,\n0x00be:0xfeb9,\n0x00bf:0x061f,\n0x00c0:0x00a2,\n0x00c1:0xfe80,\n0x00c2:0xfe81,\n0x00c3:0xfe83,\n0x00c4:0xfe85,\n0x00c5:0xfeca,\n0x00c6:0xfe8b,\n0x00c7:0xfe8d,\n0x00c8:0xfe91,\n0x00c9:0xfe93,\n0x00ca:0xfe97,\n0x00cb:0xfe9b,\n0x00cc:0xfe9f,\n0x00cd:0xfea3,\n0x00ce:0xfea7,\n0x00cf:0xfea9,\n0x00d0:0xfeab,\n0x00d1:0xfead,\n0x00d2:0xfeaf,\n0x00d3:0xfeb3,\n0x00d4:0xfeb7,\n0x00d5:0xfebb,\n0x00d6:0xfebf,\n0x00d7:0xfec1,\n0x00d8:0xfec5,\n0x00d9:0xfecb,\n0x00da:0xfecf,\n0x00db:0x00a6,\n0x00dc:0x00ac,\n0x00dd:0x00f7,\n0x00de:0x00d7,\n0x00df:0xfec9,\n0x00e0:0x0640,\n0x00e1:0xfed3,\n0x00e2:0xfed7,\n0x00e3:0xfedb,\n0x00e4:0xfedf,\n0x00e5:0xfee3,\n0x00e6:0xfee7,\n0x00e7:0xfeeb,\n0x00e8:0xfeed,\n0x00e9:0xfeef,\n0x00ea:0xfef3,\n0x00eb:0xfebd,\n0x00ec:0xfecc,\n0x00ed:0xfece,\n0x00ee:0xfecd,\n0x00ef:0xfee1,\n0x00f0:0xfe7d,\n0x00f1:0x0651,\n0x00f2:0xfee5,\n0x00f3:0xfee9,\n0x00f4:0xfeec,\n0x00f5:0xfef0,\n0x00f6:0xfef2,\n0x00f7:0xfed0,\n0x00f8:0xfed5,\n0x00f9:0xfef5,\n0x00fa:0xfef6,\n0x00fb:0xfedd,\n0x00fc:0xfed9,\n0x00fd:0xfef1,\n0x00fe:0x25a0,\n0x00ff:None ,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'\\u066a'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xb0'\n'\\xb7'\n'\\u2219'\n'\\u221a'\n'\\u2592'\n'\\u2500'\n'\\u2502'\n'\\u253c'\n'\\u2524'\n'\\u252c'\n'\\u251c'\n'\\u2534'\n'\\u2510'\n'\\u250c'\n'\\u2514'\n'\\u2518'\n'\\u03b2'\n'\\u221e'\n'\\u03c6'\n'\\xb1'\n'\\xbd'\n'\\xbc'\n'\\u2248'\n'\\xab'\n'\\xbb'\n'\\ufef7'\n'\\ufef8'\n'\\ufffe'\n'\\ufffe'\n'\\ufefb'\n'\\ufefc'\n'\\ufffe'\n'\\xa0'\n'\\xad'\n'\\ufe82'\n'\\xa3'\n'\\xa4'\n'\\ufe84'\n'\\ufffe'\n'\\ufffe'\n'\\ufe8e'\n'\\ufe8f'\n'\\ufe95'\n'\\ufe99'\n'\\u060c'\n'\\ufe9d'\n'\\ufea1'\n'\\ufea5'\n'\\u0660'\n'\\u0661'\n'\\u0662'\n'\\u0663'\n'\\u0664'\n'\\u0665'\n'\\u0666'\n'\\u0667'\n'\\u0668'\n'\\u0669'\n'\\ufed1'\n'\\u061b'\n'\\ufeb1'\n'\\ufeb5'\n'\\ufeb9'\n'\\u061f'\n'\\xa2'\n'\\ufe80'\n'\\ufe81'\n'\\ufe83'\n'\\ufe85'\n'\\ufeca'\n'\\ufe8b'\n'\\ufe8d'\n'\\ufe91'\n'\\ufe93'\n'\\ufe97'\n'\\ufe9b'\n'\\ufe9f'\n'\\ufea3'\n'\\ufea7'\n'\\ufea9'\n'\\ufeab'\n'\\ufead'\n'\\ufeaf'\n'\\ufeb3'\n'\\ufeb7'\n'\\ufebb'\n'\\ufebf'\n'\\ufec1'\n'\\ufec5'\n'\\ufecb'\n'\\ufecf'\n'\\xa6'\n'\\xac'\n'\\xf7'\n'\\xd7'\n'\\ufec9'\n'\\u0640'\n'\\ufed3'\n'\\ufed7'\n'\\ufedb'\n'\\ufedf'\n'\\ufee3'\n'\\ufee7'\n'\\ufeeb'\n'\\ufeed'\n'\\ufeef'\n'\\ufef3'\n'\\ufebd'\n'\\ufecc'\n'\\ufece'\n'\\ufecd'\n'\\ufee1'\n'\\ufe7d'\n'\\u0651'\n'\\ufee5'\n'\\ufee9'\n'\\ufeec'\n'\\ufef0'\n'\\ufef2'\n'\\ufed0'\n'\\ufed5'\n'\\ufef5'\n'\\ufef6'\n'\\ufedd'\n'\\ufed9'\n'\\ufef1'\n'\\u25a0'\n'\\ufffe'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00a0,\n0x00a2:0x00c0,\n0x00a3:0x00a3,\n0x00a4:0x00a4,\n0x00a6:0x00db,\n0x00ab:0x0097,\n0x00ac:0x00dc,\n0x00ad:0x00a1,\n0x00b0:0x0080,\n0x00b1:0x0093,\n0x00b7:0x0081,\n0x00bb:0x0098,\n0x00bc:0x0095,\n0x00bd:0x0094,\n0x00d7:0x00de,\n0x00f7:0x00dd,\n0x03b2:0x0090,\n0x03c6:0x0092,\n0x060c:0x00ac,\n0x061b:0x00bb,\n0x061f:0x00bf,\n0x0640:0x00e0,\n0x0651:0x00f1,\n0x0660:0x00b0,\n0x0661:0x00b1,\n0x0662:0x00b2,\n0x0663:0x00b3,\n0x0664:0x00b4,\n0x0665:0x00b5,\n0x0666:0x00b6,\n0x0667:0x00b7,\n0x0668:0x00b8,\n0x0669:0x00b9,\n0x066a:0x0025,\n0x2219:0x0082,\n0x221a:0x0083,\n0x221e:0x0091,\n0x2248:0x0096,\n0x2500:0x0085,\n0x2502:0x0086,\n0x250c:0x008d,\n0x2510:0x008c,\n0x2514:0x008e,\n0x2518:0x008f,\n0x251c:0x008a,\n0x2524:0x0088,\n0x252c:0x0089,\n0x2534:0x008b,\n0x253c:0x0087,\n0x2592:0x0084,\n0x25a0:0x00fe,\n0xfe7d:0x00f0,\n0xfe80:0x00c1,\n0xfe81:0x00c2,\n0xfe82:0x00a2,\n0xfe83:0x00c3,\n0xfe84:0x00a5,\n0xfe85:0x00c4,\n0xfe8b:0x00c6,\n0xfe8d:0x00c7,\n0xfe8e:0x00a8,\n0xfe8f:0x00a9,\n0xfe91:0x00c8,\n0xfe93:0x00c9,\n0xfe95:0x00aa,\n0xfe97:0x00ca,\n0xfe99:0x00ab,\n0xfe9b:0x00cb,\n0xfe9d:0x00ad,\n0xfe9f:0x00cc,\n0xfea1:0x00ae,\n0xfea3:0x00cd,\n0xfea5:0x00af,\n0xfea7:0x00ce,\n0xfea9:0x00cf,\n0xfeab:0x00d0,\n0xfead:0x00d1,\n0xfeaf:0x00d2,\n0xfeb1:0x00bc,\n0xfeb3:0x00d3,\n0xfeb5:0x00bd,\n0xfeb7:0x00d4,\n0xfeb9:0x00be,\n0xfebb:0x00d5,\n0xfebd:0x00eb,\n0xfebf:0x00d6,\n0xfec1:0x00d7,\n0xfec5:0x00d8,\n0xfec9:0x00df,\n0xfeca:0x00c5,\n0xfecb:0x00d9,\n0xfecc:0x00ec,\n0xfecd:0x00ee,\n0xfece:0x00ed,\n0xfecf:0x00da,\n0xfed0:0x00f7,\n0xfed1:0x00ba,\n0xfed3:0x00e1,\n0xfed5:0x00f8,\n0xfed7:0x00e2,\n0xfed9:0x00fc,\n0xfedb:0x00e3,\n0xfedd:0x00fb,\n0xfedf:0x00e4,\n0xfee1:0x00ef,\n0xfee3:0x00e5,\n0xfee5:0x00f2,\n0xfee7:0x00e6,\n0xfee9:0x00f3,\n0xfeeb:0x00e7,\n0xfeec:0x00f4,\n0xfeed:0x00e8,\n0xfeef:0x00e9,\n0xfef0:0x00f5,\n0xfef1:0x00fd,\n0xfef2:0x00f6,\n0xfef3:0x00ea,\n0xfef5:0x00f9,\n0xfef6:0x00fa,\n0xfef7:0x0099,\n0xfef8:0x009a,\n0xfefb:0x009d,\n0xfefc:0x009e,\n}\n", ["codecs"]], "encodings.utf_32": [".py", "''\n\n\nimport codecs,sys\n\n\n\nencode=codecs.utf_32_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_32_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n codecs.IncrementalEncoder.__init__(self,errors)\n self.encoder=None\n \n def encode(self,input,final=False ):\n if self.encoder is None :\n result=codecs.utf_32_encode(input,self.errors)[0]\n if sys.byteorder =='little':\n self.encoder=codecs.utf_32_le_encode\n else :\n self.encoder=codecs.utf_32_be_encode\n return result\n return self.encoder(input,self.errors)[0]\n \n def reset(self):\n codecs.IncrementalEncoder.reset(self)\n self.encoder=None\n \n def getstate(self):\n \n \n \n \n return (2 if self.encoder is None else 0)\n \n def setstate(self,state):\n if state:\n self.encoder=None\n else :\n if sys.byteorder =='little':\n self.encoder=codecs.utf_32_le_encode\n else :\n self.encoder=codecs.utf_32_be_encode\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def __init__(self,errors='strict'):\n codecs.BufferedIncrementalDecoder.__init__(self,errors)\n self.decoder=None\n \n def _buffer_decode(self,input,errors,final):\n if self.decoder is None :\n (output,consumed,byteorder)=\\\n codecs.utf_32_ex_decode(input,errors,0,final)\n if byteorder ==-1:\n self.decoder=codecs.utf_32_le_decode\n elif byteorder ==1:\n self.decoder=codecs.utf_32_be_decode\n elif consumed >=4:\n raise UnicodeError(\"UTF-32 stream does not start with BOM\")\n return (output,consumed)\n return self.decoder(input,self.errors,final)\n \n def reset(self):\n codecs.BufferedIncrementalDecoder.reset(self)\n self.decoder=None\n \n def getstate(self):\n \n \n state=codecs.BufferedIncrementalDecoder.getstate(self)[0]\n \n \n \n \n if self.decoder is None :\n return (state,2)\n addstate=int((sys.byteorder ==\"big\")!=\n (self.decoder is codecs.utf_32_be_decode))\n return (state,addstate)\n \n def setstate(self,state):\n \n codecs.BufferedIncrementalDecoder.setstate(self,state)\n state=state[1]\n if state ==0:\n self.decoder=(codecs.utf_32_be_decode\n if sys.byteorder ==\"big\"\n else codecs.utf_32_le_decode)\n elif state ==1:\n self.decoder=(codecs.utf_32_le_decode\n if sys.byteorder ==\"big\"\n else codecs.utf_32_be_decode)\n else :\n self.decoder=None\n \nclass StreamWriter(codecs.StreamWriter):\n def __init__(self,stream,errors='strict'):\n self.encoder=None\n codecs.StreamWriter.__init__(self,stream,errors)\n \n def reset(self):\n codecs.StreamWriter.reset(self)\n self.encoder=None\n \n def encode(self,input,errors='strict'):\n if self.encoder is None :\n result=codecs.utf_32_encode(input,errors)\n if sys.byteorder =='little':\n self.encoder=codecs.utf_32_le_encode\n else :\n self.encoder=codecs.utf_32_be_encode\n return result\n else :\n return self.encoder(input,errors)\n \nclass StreamReader(codecs.StreamReader):\n\n def reset(self):\n codecs.StreamReader.reset(self)\n try :\n del self.decode\n except AttributeError:\n pass\n \n def decode(self,input,errors='strict'):\n (object,consumed,byteorder)=\\\n codecs.utf_32_ex_decode(input,errors,0,False )\n if byteorder ==-1:\n self.decode=codecs.utf_32_le_decode\n elif byteorder ==1:\n self.decode=codecs.utf_32_be_decode\n elif consumed >=4:\n raise UnicodeError(\"UTF-32 stream does not start with BOM\")\n return (object,consumed)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-32',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs", "sys"]], "encodings.koi8_u": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='koi8-u',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u2500'\n'\\u2502'\n'\\u250c'\n'\\u2510'\n'\\u2514'\n'\\u2518'\n'\\u251c'\n'\\u2524'\n'\\u252c'\n'\\u2534'\n'\\u253c'\n'\\u2580'\n'\\u2584'\n'\\u2588'\n'\\u258c'\n'\\u2590'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2320'\n'\\u25a0'\n'\\u2219'\n'\\u221a'\n'\\u2248'\n'\\u2264'\n'\\u2265'\n'\\xa0'\n'\\u2321'\n'\\xb0'\n'\\xb2'\n'\\xb7'\n'\\xf7'\n'\\u2550'\n'\\u2551'\n'\\u2552'\n'\\u0451'\n'\\u0454'\n'\\u2554'\n'\\u0456'\n'\\u0457'\n'\\u2557'\n'\\u2558'\n'\\u2559'\n'\\u255a'\n'\\u255b'\n'\\u0491'\n'\\u255d'\n'\\u255e'\n'\\u255f'\n'\\u2560'\n'\\u2561'\n'\\u0401'\n'\\u0404'\n'\\u2563'\n'\\u0406'\n'\\u0407'\n'\\u2566'\n'\\u2567'\n'\\u2568'\n'\\u2569'\n'\\u256a'\n'\\u0490'\n'\\u256c'\n'\\xa9'\n'\\u044e'\n'\\u0430'\n'\\u0431'\n'\\u0446'\n'\\u0434'\n'\\u0435'\n'\\u0444'\n'\\u0433'\n'\\u0445'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u044f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0436'\n'\\u0432'\n'\\u044c'\n'\\u044b'\n'\\u0437'\n'\\u0448'\n'\\u044d'\n'\\u0449'\n'\\u0447'\n'\\u044a'\n'\\u042e'\n'\\u0410'\n'\\u0411'\n'\\u0426'\n'\\u0414'\n'\\u0415'\n'\\u0424'\n'\\u0413'\n'\\u0425'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u042f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0416'\n'\\u0412'\n'\\u042c'\n'\\u042b'\n'\\u0417'\n'\\u0428'\n'\\u042d'\n'\\u0429'\n'\\u0427'\n'\\u042a'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1254": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1254',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\u0160'\n'\\u2039'\n'\\u0152'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\u0161'\n'\\u203a'\n'\\u0153'\n'\\ufffe'\n'\\ufffe'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u011e'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u0130'\n'\\u015e'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u011f'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u0131'\n'\\u015f'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp_2": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_2')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_2',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.utf_16": [".py", "''\n\n\n\n\n\n\n\nimport codecs,sys\n\n\n\nencode=codecs.utf_16_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_16_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n codecs.IncrementalEncoder.__init__(self,errors)\n self.encoder=None\n \n def encode(self,input,final=False ):\n if self.encoder is None :\n result=codecs.utf_16_encode(input,self.errors)[0]\n if sys.byteorder =='little':\n self.encoder=codecs.utf_16_le_encode\n else :\n self.encoder=codecs.utf_16_be_encode\n return result\n return self.encoder(input,self.errors)[0]\n \n def reset(self):\n codecs.IncrementalEncoder.reset(self)\n self.encoder=None\n \n def getstate(self):\n \n \n \n \n return (2 if self.encoder is None else 0)\n \n def setstate(self,state):\n if state:\n self.encoder=None\n else :\n if sys.byteorder =='little':\n self.encoder=codecs.utf_16_le_encode\n else :\n self.encoder=codecs.utf_16_be_encode\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def __init__(self,errors='strict'):\n codecs.BufferedIncrementalDecoder.__init__(self,errors)\n self.decoder=None\n \n def _buffer_decode(self,input,errors,final):\n if self.decoder is None :\n (output,consumed,byteorder)=\\\n codecs.utf_16_ex_decode(input,errors,0,final)\n if byteorder ==-1:\n self.decoder=codecs.utf_16_le_decode\n elif byteorder ==1:\n self.decoder=codecs.utf_16_be_decode\n elif consumed >=2:\n raise UnicodeError(\"UTF-16 stream does not start with BOM\")\n return (output,consumed)\n return self.decoder(input,self.errors,final)\n \n def reset(self):\n codecs.BufferedIncrementalDecoder.reset(self)\n self.decoder=None\n \n def getstate(self):\n \n \n state=codecs.BufferedIncrementalDecoder.getstate(self)[0]\n \n \n \n \n if self.decoder is None :\n return (state,2)\n addstate=int((sys.byteorder ==\"big\")!=\n (self.decoder is codecs.utf_16_be_decode))\n return (state,addstate)\n \n def setstate(self,state):\n \n codecs.BufferedIncrementalDecoder.setstate(self,state)\n state=state[1]\n if state ==0:\n self.decoder=(codecs.utf_16_be_decode\n if sys.byteorder ==\"big\"\n else codecs.utf_16_le_decode)\n elif state ==1:\n self.decoder=(codecs.utf_16_le_decode\n if sys.byteorder ==\"big\"\n else codecs.utf_16_be_decode)\n else :\n self.decoder=None\n \nclass StreamWriter(codecs.StreamWriter):\n def __init__(self,stream,errors='strict'):\n codecs.StreamWriter.__init__(self,stream,errors)\n self.encoder=None\n \n def reset(self):\n codecs.StreamWriter.reset(self)\n self.encoder=None\n \n def encode(self,input,errors='strict'):\n if self.encoder is None :\n result=codecs.utf_16_encode(input,errors)\n if sys.byteorder =='little':\n self.encoder=codecs.utf_16_le_encode\n else :\n self.encoder=codecs.utf_16_be_encode\n return result\n else :\n return self.encoder(input,errors)\n \nclass StreamReader(codecs.StreamReader):\n\n def reset(self):\n codecs.StreamReader.reset(self)\n try :\n del self.decode\n except AttributeError:\n pass\n \n def decode(self,input,errors='strict'):\n (object,consumed,byteorder)=\\\n codecs.utf_16_ex_decode(input,errors,0,False )\n if byteorder ==-1:\n self.decode=codecs.utf_16_le_decode\n elif byteorder ==1:\n self.decode=codecs.utf_16_be_decode\n elif consumed >=2:\n raise UnicodeError(\"UTF-16 stream does not start with BOM\")\n return (object,consumed)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-16',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs", "sys"]], "encodings.iso8859_4": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-4',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0104'\n'\\u0138'\n'\\u0156'\n'\\xa4'\n'\\u0128'\n'\\u013b'\n'\\xa7'\n'\\xa8'\n'\\u0160'\n'\\u0112'\n'\\u0122'\n'\\u0166'\n'\\xad'\n'\\u017d'\n'\\xaf'\n'\\xb0'\n'\\u0105'\n'\\u02db'\n'\\u0157'\n'\\xb4'\n'\\u0129'\n'\\u013c'\n'\\u02c7'\n'\\xb8'\n'\\u0161'\n'\\u0113'\n'\\u0123'\n'\\u0167'\n'\\u014a'\n'\\u017e'\n'\\u014b'\n'\\u0100'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\u012e'\n'\\u010c'\n'\\xc9'\n'\\u0118'\n'\\xcb'\n'\\u0116'\n'\\xcd'\n'\\xce'\n'\\u012a'\n'\\u0110'\n'\\u0145'\n'\\u014c'\n'\\u0136'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\u0172'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u0168'\n'\\u016a'\n'\\xdf'\n'\\u0101'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\u012f'\n'\\u010d'\n'\\xe9'\n'\\u0119'\n'\\xeb'\n'\\u0117'\n'\\xed'\n'\\xee'\n'\\u012b'\n'\\u0111'\n'\\u0146'\n'\\u014d'\n'\\u0137'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\u0173'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u0169'\n'\\u016b'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.euc_jis_2004": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('euc_jis_2004')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='euc_jis_2004',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.mbcs": [".py", "''\n\n\n\n\n\n\n\n\n\n\nfrom codecs import mbcs_encode,mbcs_decode\n\nimport codecs\n\n\n\nencode=mbcs_encode\n\ndef decode(input,errors='strict'):\n return mbcs_decode(input,errors,True )\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return mbcs_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=mbcs_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=mbcs_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=mbcs_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mbcs',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp1250": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1250',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\ufffe'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\ufffe'\n'\\u2030'\n'\\u0160'\n'\\u2039'\n'\\u015a'\n'\\u0164'\n'\\u017d'\n'\\u0179'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\u0161'\n'\\u203a'\n'\\u015b'\n'\\u0165'\n'\\u017e'\n'\\u017a'\n'\\xa0'\n'\\u02c7'\n'\\u02d8'\n'\\u0141'\n'\\xa4'\n'\\u0104'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\u015e'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\u017b'\n'\\xb0'\n'\\xb1'\n'\\u02db'\n'\\u0142'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\u0105'\n'\\u015f'\n'\\xbb'\n'\\u013d'\n'\\u02dd'\n'\\u013e'\n'\\u017c'\n'\\u0154'\n'\\xc1'\n'\\xc2'\n'\\u0102'\n'\\xc4'\n'\\u0139'\n'\\u0106'\n'\\xc7'\n'\\u010c'\n'\\xc9'\n'\\u0118'\n'\\xcb'\n'\\u011a'\n'\\xcd'\n'\\xce'\n'\\u010e'\n'\\u0110'\n'\\u0143'\n'\\u0147'\n'\\xd3'\n'\\xd4'\n'\\u0150'\n'\\xd6'\n'\\xd7'\n'\\u0158'\n'\\u016e'\n'\\xda'\n'\\u0170'\n'\\xdc'\n'\\xdd'\n'\\u0162'\n'\\xdf'\n'\\u0155'\n'\\xe1'\n'\\xe2'\n'\\u0103'\n'\\xe4'\n'\\u013a'\n'\\u0107'\n'\\xe7'\n'\\u010d'\n'\\xe9'\n'\\u0119'\n'\\xeb'\n'\\u011b'\n'\\xed'\n'\\xee'\n'\\u010f'\n'\\u0111'\n'\\u0144'\n'\\u0148'\n'\\xf3'\n'\\xf4'\n'\\u0151'\n'\\xf6'\n'\\xf7'\n'\\u0159'\n'\\u016f'\n'\\xfa'\n'\\u0171'\n'\\xfc'\n'\\xfd'\n'\\u0163'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.gb2312": [".py", "\n\n\n\n\n\nimport _codecs_cn,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_cn.getcodec('gb2312')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='gb2312',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_cn", "_multibytecodec", "codecs"]], "encodings.iso8859_16": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-16',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0104'\n'\\u0105'\n'\\u0141'\n'\\u20ac'\n'\\u201e'\n'\\u0160'\n'\\xa7'\n'\\u0161'\n'\\xa9'\n'\\u0218'\n'\\xab'\n'\\u0179'\n'\\xad'\n'\\u017a'\n'\\u017b'\n'\\xb0'\n'\\xb1'\n'\\u010c'\n'\\u0142'\n'\\u017d'\n'\\u201d'\n'\\xb6'\n'\\xb7'\n'\\u017e'\n'\\u010d'\n'\\u0219'\n'\\xbb'\n'\\u0152'\n'\\u0153'\n'\\u0178'\n'\\u017c'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\u0102'\n'\\xc4'\n'\\u0106'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u0110'\n'\\u0143'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\u0150'\n'\\xd6'\n'\\u015a'\n'\\u0170'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u0118'\n'\\u021a'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\u0103'\n'\\xe4'\n'\\u0107'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u0111'\n'\\u0144'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\u0151'\n'\\xf6'\n'\\u015b'\n'\\u0171'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u0119'\n'\\u021b'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_cyrillic": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-cyrillic',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u2020'\n'\\xb0'\n'\\u0490'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\u0406'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\u0402'\n'\\u0452'\n'\\u2260'\n'\\u0403'\n'\\u0453'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\u0456'\n'\\xb5'\n'\\u0491'\n'\\u0408'\n'\\u0404'\n'\\u0454'\n'\\u0407'\n'\\u0457'\n'\\u0409'\n'\\u0459'\n'\\u040a'\n'\\u045a'\n'\\u0458'\n'\\u0405'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u040b'\n'\\u045b'\n'\\u040c'\n'\\u045c'\n'\\u0455'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u201e'\n'\\u040e'\n'\\u045e'\n'\\u040f'\n'\\u045f'\n'\\u2116'\n'\\u0401'\n'\\u0451'\n'\\u044f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u20ac'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.hex_codec": [".py", "''\n\n\n\n\n\n\nimport codecs\nimport binascii\n\n\n\ndef hex_encode(input,errors='strict'):\n assert errors =='strict'\n return (binascii.b2a_hex(input),len(input))\n \ndef hex_decode(input,errors='strict'):\n assert errors =='strict'\n return (binascii.a2b_hex(input),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return hex_encode(input,errors)\n def decode(self,input,errors='strict'):\n return hex_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n assert self.errors =='strict'\n return binascii.b2a_hex(input)\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n assert self.errors =='strict'\n return binascii.a2b_hex(input)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='hex',\n encode=hex_encode,\n decode=hex_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False ,\n )\n", ["binascii", "codecs"]], "encodings.tis_620": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='tis-620',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\ufffe'\n'\\u0e01'\n'\\u0e02'\n'\\u0e03'\n'\\u0e04'\n'\\u0e05'\n'\\u0e06'\n'\\u0e07'\n'\\u0e08'\n'\\u0e09'\n'\\u0e0a'\n'\\u0e0b'\n'\\u0e0c'\n'\\u0e0d'\n'\\u0e0e'\n'\\u0e0f'\n'\\u0e10'\n'\\u0e11'\n'\\u0e12'\n'\\u0e13'\n'\\u0e14'\n'\\u0e15'\n'\\u0e16'\n'\\u0e17'\n'\\u0e18'\n'\\u0e19'\n'\\u0e1a'\n'\\u0e1b'\n'\\u0e1c'\n'\\u0e1d'\n'\\u0e1e'\n'\\u0e1f'\n'\\u0e20'\n'\\u0e21'\n'\\u0e22'\n'\\u0e23'\n'\\u0e24'\n'\\u0e25'\n'\\u0e26'\n'\\u0e27'\n'\\u0e28'\n'\\u0e29'\n'\\u0e2a'\n'\\u0e2b'\n'\\u0e2c'\n'\\u0e2d'\n'\\u0e2e'\n'\\u0e2f'\n'\\u0e30'\n'\\u0e31'\n'\\u0e32'\n'\\u0e33'\n'\\u0e34'\n'\\u0e35'\n'\\u0e36'\n'\\u0e37'\n'\\u0e38'\n'\\u0e39'\n'\\u0e3a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0e3f'\n'\\u0e40'\n'\\u0e41'\n'\\u0e42'\n'\\u0e43'\n'\\u0e44'\n'\\u0e45'\n'\\u0e46'\n'\\u0e47'\n'\\u0e48'\n'\\u0e49'\n'\\u0e4a'\n'\\u0e4b'\n'\\u0e4c'\n'\\u0e4d'\n'\\u0e4e'\n'\\u0e4f'\n'\\u0e50'\n'\\u0e51'\n'\\u0e52'\n'\\u0e53'\n'\\u0e54'\n'\\u0e55'\n'\\u0e56'\n'\\u0e57'\n'\\u0e58'\n'\\u0e59'\n'\\u0e5a'\n'\\u0e5b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp037": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp037',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'\\xa2'\n'.'\n'<'\n'('\n'+'\n'|'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n'!'\n'$'\n'*'\n')'\n';'\n'\\xac'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'^'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'['\n']'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\xfc'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\xd6'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\\xdc'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1006": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1006',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u06f0'\n'\\u06f1'\n'\\u06f2'\n'\\u06f3'\n'\\u06f4'\n'\\u06f5'\n'\\u06f6'\n'\\u06f7'\n'\\u06f8'\n'\\u06f9'\n'\\u060c'\n'\\u061b'\n'\\xad'\n'\\u061f'\n'\\ufe81'\n'\\ufe8d'\n'\\ufe8e'\n'\\ufe8e'\n'\\ufe8f'\n'\\ufe91'\n'\\ufb56'\n'\\ufb58'\n'\\ufe93'\n'\\ufe95'\n'\\ufe97'\n'\\ufb66'\n'\\ufb68'\n'\\ufe99'\n'\\ufe9b'\n'\\ufe9d'\n'\\ufe9f'\n'\\ufb7a'\n'\\ufb7c'\n'\\ufea1'\n'\\ufea3'\n'\\ufea5'\n'\\ufea7'\n'\\ufea9'\n'\\ufb84'\n'\\ufeab'\n'\\ufead'\n'\\ufb8c'\n'\\ufeaf'\n'\\ufb8a'\n'\\ufeb1'\n'\\ufeb3'\n'\\ufeb5'\n'\\ufeb7'\n'\\ufeb9'\n'\\ufebb'\n'\\ufebd'\n'\\ufebf'\n'\\ufec1'\n'\\ufec5'\n'\\ufec9'\n'\\ufeca'\n'\\ufecb'\n'\\ufecc'\n'\\ufecd'\n'\\ufece'\n'\\ufecf'\n'\\ufed0'\n'\\ufed1'\n'\\ufed3'\n'\\ufed5'\n'\\ufed7'\n'\\ufed9'\n'\\ufedb'\n'\\ufb92'\n'\\ufb94'\n'\\ufedd'\n'\\ufedf'\n'\\ufee0'\n'\\ufee1'\n'\\ufee3'\n'\\ufb9e'\n'\\ufee5'\n'\\ufee7'\n'\\ufe85'\n'\\ufeed'\n'\\ufba6'\n'\\ufba8'\n'\\ufba9'\n'\\ufbaa'\n'\\ufe80'\n'\\ufe89'\n'\\ufe8a'\n'\\ufe8b'\n'\\ufef1'\n'\\ufef2'\n'\\ufef3'\n'\\ufbb0'\n'\\ufbae'\n'\\ufe7c'\n'\\ufe7d'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1251": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1251',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0402'\n'\\u0403'\n'\\u201a'\n'\\u0453'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u20ac'\n'\\u2030'\n'\\u0409'\n'\\u2039'\n'\\u040a'\n'\\u040c'\n'\\u040b'\n'\\u040f'\n'\\u0452'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\u0459'\n'\\u203a'\n'\\u045a'\n'\\u045c'\n'\\u045b'\n'\\u045f'\n'\\xa0'\n'\\u040e'\n'\\u045e'\n'\\u0408'\n'\\xa4'\n'\\u0490'\n'\\xa6'\n'\\xa7'\n'\\u0401'\n'\\xa9'\n'\\u0404'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\u0407'\n'\\xb0'\n'\\xb1'\n'\\u0406'\n'\\u0456'\n'\\u0491'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u0451'\n'\\u2116'\n'\\u0454'\n'\\xbb'\n'\\u0458'\n'\\u0405'\n'\\u0455'\n'\\u0457'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_turkish": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-turkish',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\xc6'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\xe6'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u011e'\n'\\u011f'\n'\\u0130'\n'\\u0131'\n'\\u015e'\n'\\u015f'\n'\\u2021'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\uf8a0'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp_ext": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_ext')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_ext',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.iso8859_1": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-1',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.hz": [".py", "\n\n\n\n\n\nimport _codecs_cn,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_cn.getcodec('hz')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='hz',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_cn", "_multibytecodec", "codecs"]], "encodings.bz2_codec": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\nimport bz2\n\n\n\ndef bz2_encode(input,errors='strict'):\n assert errors =='strict'\n return (bz2.compress(input),len(input))\n \ndef bz2_decode(input,errors='strict'):\n assert errors =='strict'\n return (bz2.decompress(input),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return bz2_encode(input,errors)\n def decode(self,input,errors='strict'):\n return bz2_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n assert errors =='strict'\n self.errors=errors\n self.compressobj=bz2.BZ2Compressor()\n \n def encode(self,input,final=False ):\n if final:\n c=self.compressobj.compress(input)\n return c+self.compressobj.flush()\n else :\n return self.compressobj.compress(input)\n \n def reset(self):\n self.compressobj=bz2.BZ2Compressor()\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def __init__(self,errors='strict'):\n assert errors =='strict'\n self.errors=errors\n self.decompressobj=bz2.BZ2Decompressor()\n \n def decode(self,input,final=False ):\n try :\n return self.decompressobj.decompress(input)\n except EOFError:\n return ''\n \n def reset(self):\n self.decompressobj=bz2.BZ2Decompressor()\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name=\"bz2\",\n encode=bz2_encode,\n decode=bz2_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False ,\n )\n", ["bz2", "codecs"]], "encodings.quopri_codec": [".py", "''\n\n\n\n\nimport codecs\nimport quopri\nfrom io import BytesIO\n\ndef quopri_encode(input,errors='strict'):\n assert errors =='strict'\n f=BytesIO(input)\n g=BytesIO()\n quopri.encode(f,g,quotetabs=True )\n return (g.getvalue(),len(input))\n \ndef quopri_decode(input,errors='strict'):\n assert errors =='strict'\n f=BytesIO(input)\n g=BytesIO()\n quopri.decode(f,g)\n return (g.getvalue(),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return quopri_encode(input,errors)\n def decode(self,input,errors='strict'):\n return quopri_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return quopri_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return quopri_decode(input,self.errors)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='quopri',\n encode=quopri_encode,\n decode=quopri_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False ,\n )\n", ["codecs", "io", "quopri"]], "encodings.kz1048": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='kz1048',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0402'\n'\\u0403'\n'\\u201a'\n'\\u0453'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u20ac'\n'\\u2030'\n'\\u0409'\n'\\u2039'\n'\\u040a'\n'\\u049a'\n'\\u04ba'\n'\\u040f'\n'\\u0452'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\u0459'\n'\\u203a'\n'\\u045a'\n'\\u049b'\n'\\u04bb'\n'\\u045f'\n'\\xa0'\n'\\u04b0'\n'\\u04b1'\n'\\u04d8'\n'\\xa4'\n'\\u04e8'\n'\\xa6'\n'\\xa7'\n'\\u0401'\n'\\xa9'\n'\\u0492'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\u04ae'\n'\\xb0'\n'\\xb1'\n'\\u0406'\n'\\u0456'\n'\\u04e9'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u0451'\n'\\u2116'\n'\\u0493'\n'\\xbb'\n'\\u04d9'\n'\\u04a2'\n'\\u04a3'\n'\\u04af'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.utf_8_sig": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\ndef encode(input,errors='strict'):\n return (codecs.BOM_UTF8+codecs.utf_8_encode(input,errors)[0],\n len(input))\n \ndef decode(input,errors='strict'):\n prefix=0\n if input[:3]==codecs.BOM_UTF8:\n input=input[3:]\n prefix=3\n (output,consumed)=codecs.utf_8_decode(input,errors,True )\n return (output,consumed+prefix)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n codecs.IncrementalEncoder.__init__(self,errors)\n self.first=1\n \n def encode(self,input,final=False ):\n if self.first:\n self.first=0\n return codecs.BOM_UTF8+\\\n codecs.utf_8_encode(input,self.errors)[0]\n else :\n return codecs.utf_8_encode(input,self.errors)[0]\n \n def reset(self):\n codecs.IncrementalEncoder.reset(self)\n self.first=1\n \n def getstate(self):\n return self.first\n \n def setstate(self,state):\n self.first=state\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def __init__(self,errors='strict'):\n codecs.BufferedIncrementalDecoder.__init__(self,errors)\n self.first=1\n \n def _buffer_decode(self,input,errors,final):\n if self.first:\n if len(input)<3:\n if codecs.BOM_UTF8.startswith(input):\n \n \n return (\"\",0)\n else :\n self.first=0\n else :\n self.first=0\n if input[:3]==codecs.BOM_UTF8:\n (output,consumed)=\\\n codecs.utf_8_decode(input[3:],errors,final)\n return (output,consumed+3)\n return codecs.utf_8_decode(input,errors,final)\n \n def reset(self):\n codecs.BufferedIncrementalDecoder.reset(self)\n self.first=1\n \n def getstate(self):\n state=codecs.BufferedIncrementalDecoder.getstate(self)\n \n return (state[0],self.first)\n \n def setstate(self,state):\n \n codecs.BufferedIncrementalDecoder.setstate(self,state)\n self.first=state[1]\n \nclass StreamWriter(codecs.StreamWriter):\n def reset(self):\n codecs.StreamWriter.reset(self)\n try :\n del self.encode\n except AttributeError:\n pass\n \n def encode(self,input,errors='strict'):\n self.encode=codecs.utf_8_encode\n return encode(input,errors)\n \nclass StreamReader(codecs.StreamReader):\n def reset(self):\n codecs.StreamReader.reset(self)\n try :\n del self.decode\n except AttributeError:\n pass\n \n def decode(self,input,errors='strict'):\n if len(input)<3:\n if codecs.BOM_UTF8.startswith(input):\n \n \n return (\"\",0)\n elif input[:3]==codecs.BOM_UTF8:\n self.decode=codecs.utf_8_decode\n (output,consumed)=codecs.utf_8_decode(input[3:],errors)\n return (output,consumed+3)\n \n self.decode=codecs.utf_8_decode\n return codecs.utf_8_decode(input,errors)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-8-sig',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.koi8_t": [".py", "''\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='koi8-t',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u049b'\n'\\u0493'\n'\\u201a'\n'\\u0492'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\ufffe'\n'\\u2030'\n'\\u04b3'\n'\\u2039'\n'\\u04b2'\n'\\u04b7'\n'\\u04b6'\n'\\ufffe'\n'\\u049a'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u04ef'\n'\\u04ee'\n'\\u0451'\n'\\xa4'\n'\\u04e3'\n'\\xa6'\n'\\xa7'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\ufffe'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\u0401'\n'\\ufffe'\n'\\u04e2'\n'\\xb6'\n'\\xb7'\n'\\ufffe'\n'\\u2116'\n'\\ufffe'\n'\\xbb'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa9'\n'\\u044e'\n'\\u0430'\n'\\u0431'\n'\\u0446'\n'\\u0434'\n'\\u0435'\n'\\u0444'\n'\\u0433'\n'\\u0445'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u044f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0436'\n'\\u0432'\n'\\u044c'\n'\\u044b'\n'\\u0437'\n'\\u0448'\n'\\u044d'\n'\\u0449'\n'\\u0447'\n'\\u044a'\n'\\u042e'\n'\\u0410'\n'\\u0411'\n'\\u0426'\n'\\u0414'\n'\\u0415'\n'\\u0424'\n'\\u0413'\n'\\u0425'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u042f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0416'\n'\\u0412'\n'\\u042c'\n'\\u042b'\n'\\u0417'\n'\\u0428'\n'\\u042d'\n'\\u0429'\n'\\u0427'\n'\\u042a'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1255": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1255',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\u20aa'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xd7'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xf7'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\u05b0'\n'\\u05b1'\n'\\u05b2'\n'\\u05b3'\n'\\u05b4'\n'\\u05b5'\n'\\u05b6'\n'\\u05b7'\n'\\u05b8'\n'\\u05b9'\n'\\ufffe'\n'\\u05bb'\n'\\u05bc'\n'\\u05bd'\n'\\u05be'\n'\\u05bf'\n'\\u05c0'\n'\\u05c1'\n'\\u05c2'\n'\\u05c3'\n'\\u05f0'\n'\\u05f1'\n'\\u05f2'\n'\\u05f3'\n'\\u05f4'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\ufffe'\n'\\ufffe'\n'\\u200e'\n'\\u200f'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp_3": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_3')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_3',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.shift_jis_2004": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('shift_jis_2004')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='shift_jis_2004',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp1026": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1026',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'{'\n'\\xf1'\n'\\xc7'\n'.'\n'<'\n'('\n'+'\n'!'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n'\\u011e'\n'\\u0130'\n'*'\n')'\n';'\n'^'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'['\n'\\xd1'\n'\\u015f'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\u0131'\n':'\n'\\xd6'\n'\\u015e'\n\"'\"\n'='\n'\\xdc'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'}'\n'`'\n'\\xa6'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'\\xf6'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n']'\n'$'\n'@'\n'\\xae'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xac'\n'|'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'\\xe7'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'~'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'\\u011f'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\\\'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\xfc'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'#'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\"'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.charmap": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.charmap_encode\n decode=codecs.charmap_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict',mapping=None ):\n codecs.IncrementalEncoder.__init__(self,errors)\n self.mapping=mapping\n \n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,self.mapping)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def __init__(self,errors='strict',mapping=None ):\n codecs.IncrementalDecoder.__init__(self,errors)\n self.mapping=mapping\n \n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,self.mapping)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n\n def __init__(self,stream,errors='strict',mapping=None ):\n codecs.StreamWriter.__init__(self,stream,errors)\n self.mapping=mapping\n \n def encode(self,input,errors='strict'):\n return Codec.encode(input,errors,self.mapping)\n \nclass StreamReader(Codec,codecs.StreamReader):\n\n def __init__(self,stream,errors='strict',mapping=None ):\n codecs.StreamReader.__init__(self,stream,errors)\n self.mapping=mapping\n \n def decode(self,input,errors='strict'):\n return Codec.decode(input,errors,self.mapping)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='charmap',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.iso8859_5": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-5',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0401'\n'\\u0402'\n'\\u0403'\n'\\u0404'\n'\\u0405'\n'\\u0406'\n'\\u0407'\n'\\u0408'\n'\\u0409'\n'\\u040a'\n'\\u040b'\n'\\u040c'\n'\\xad'\n'\\u040e'\n'\\u040f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n'\\u2116'\n'\\u0451'\n'\\u0452'\n'\\u0453'\n'\\u0454'\n'\\u0455'\n'\\u0456'\n'\\u0457'\n'\\u0458'\n'\\u0459'\n'\\u045a'\n'\\u045b'\n'\\u045c'\n'\\xa7'\n'\\u045e'\n'\\u045f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_13": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-13',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u201d'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\u201e'\n'\\xa6'\n'\\xa7'\n'\\xd8'\n'\\xa9'\n'\\u0156'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xc6'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u201c'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xf8'\n'\\xb9'\n'\\u0157'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xe6'\n'\\u0104'\n'\\u012e'\n'\\u0100'\n'\\u0106'\n'\\xc4'\n'\\xc5'\n'\\u0118'\n'\\u0112'\n'\\u010c'\n'\\xc9'\n'\\u0179'\n'\\u0116'\n'\\u0122'\n'\\u0136'\n'\\u012a'\n'\\u013b'\n'\\u0160'\n'\\u0143'\n'\\u0145'\n'\\xd3'\n'\\u014c'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\u0172'\n'\\u0141'\n'\\u015a'\n'\\u016a'\n'\\xdc'\n'\\u017b'\n'\\u017d'\n'\\xdf'\n'\\u0105'\n'\\u012f'\n'\\u0101'\n'\\u0107'\n'\\xe4'\n'\\xe5'\n'\\u0119'\n'\\u0113'\n'\\u010d'\n'\\xe9'\n'\\u017a'\n'\\u0117'\n'\\u0123'\n'\\u0137'\n'\\u012b'\n'\\u013c'\n'\\u0161'\n'\\u0144'\n'\\u0146'\n'\\xf3'\n'\\u014d'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\u0173'\n'\\u0142'\n'\\u015b'\n'\\u016b'\n'\\xfc'\n'\\u017c'\n'\\u017e'\n'\\u2019'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "unittest.signals": [".py", "import signal\nimport weakref\n\nfrom functools import wraps\n\n__unittest=True\n\n\nclass _InterruptHandler(object):\n def __init__(self,default_handler):\n self.called=False\n self.original_handler=default_handler\n if isinstance(default_handler,int):\n if default_handler ==signal.SIG_DFL:\n \n default_handler=signal.default_int_handler\n elif default_handler ==signal.SIG_IGN:\n \n \n def default_handler(unused_signum,unused_frame):\n pass\n else:\n raise TypeError(\"expected SIGINT signal handler to be \"\n \"signal.SIG_IGN, signal.SIG_DFL, or a \"\n \"callable object\")\n self.default_handler=default_handler\n \n def __call__(self,signum,frame):\n installed_handler=signal.getsignal(signal.SIGINT)\n if installed_handler is not self:\n \n \n self.default_handler(signum,frame)\n \n if self.called:\n self.default_handler(signum,frame)\n self.called=True\n for result in _results.keys():\n result.stop()\n \n_results=weakref.WeakKeyDictionary()\ndef registerResult(result):\n _results[result]=1\n \ndef removeResult(result):\n return bool(_results.pop(result,None))\n \n_interrupt_handler=None\ndef installHandler():\n global _interrupt_handler\n if _interrupt_handler is None:\n default_handler=signal.getsignal(signal.SIGINT)\n _interrupt_handler=_InterruptHandler(default_handler)\n signal.signal(signal.SIGINT,_interrupt_handler)\n \n \ndef removeHandler(method=None):\n if method is not None:\n @wraps(method)\n def inner(*args,**kwargs):\n initial=signal.getsignal(signal.SIGINT)\n removeHandler()\n try:\n return method(*args,**kwargs)\n finally:\n signal.signal(signal.SIGINT,initial)\n return inner\n \n global _interrupt_handler\n if _interrupt_handler is not None:\n signal.signal(signal.SIGINT,_interrupt_handler.original_handler)\n", ["functools", "signal", "weakref"]], "unittest.runner": [".py", "''\n\nimport sys\nimport time\nimport warnings\n\nfrom. import result\nfrom.case import _SubTest\nfrom.signals import registerResult\n\n__unittest=True\n\n\nclass _WritelnDecorator(object):\n ''\n def __init__(self,stream):\n self.stream=stream\n \n def __getattr__(self,attr):\n if attr in('stream','__getstate__'):\n raise AttributeError(attr)\n return getattr(self.stream,attr)\n \n def writeln(self,arg=None):\n if arg:\n self.write(arg)\n self.write('\\n')\n \n \nclass TextTestResult(result.TestResult):\n ''\n\n\n \n separator1='='*70\n separator2='-'*70\n \n def __init__(self,stream,descriptions,verbosity,*,durations=None):\n ''\n \n super(TextTestResult,self).__init__(stream,descriptions,verbosity)\n self.stream=stream\n self.showAll=verbosity >1\n self.dots=verbosity ==1\n self.descriptions=descriptions\n self._newline=True\n self.durations=durations\n \n def getDescription(self,test):\n doc_first_line=test.shortDescription()\n if self.descriptions and doc_first_line:\n return '\\n'.join((str(test),doc_first_line))\n else:\n return str(test)\n \n def startTest(self,test):\n super(TextTestResult,self).startTest(test)\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n self._newline=False\n \n def _write_status(self,test,status):\n is_subtest=isinstance(test,_SubTest)\n if is_subtest or self._newline:\n if not self._newline:\n self.stream.writeln()\n if is_subtest:\n self.stream.write(\" \")\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.writeln(status)\n self.stream.flush()\n self._newline=True\n \n def addSubTest(self,test,subtest,err):\n if err is not None:\n if self.showAll:\n if issubclass(err[0],subtest.failureException):\n self._write_status(subtest,\"FAIL\")\n else:\n self._write_status(subtest,\"ERROR\")\n elif self.dots:\n if issubclass(err[0],subtest.failureException):\n self.stream.write('F')\n else:\n self.stream.write('E')\n self.stream.flush()\n super(TextTestResult,self).addSubTest(test,subtest,err)\n \n def addSuccess(self,test):\n super(TextTestResult,self).addSuccess(test)\n if self.showAll:\n self._write_status(test,\"ok\")\n elif self.dots:\n self.stream.write('.')\n self.stream.flush()\n \n def addError(self,test,err):\n super(TextTestResult,self).addError(test,err)\n if self.showAll:\n self._write_status(test,\"ERROR\")\n elif self.dots:\n self.stream.write('E')\n self.stream.flush()\n \n def addFailure(self,test,err):\n super(TextTestResult,self).addFailure(test,err)\n if self.showAll:\n self._write_status(test,\"FAIL\")\n elif self.dots:\n self.stream.write('F')\n self.stream.flush()\n \n def addSkip(self,test,reason):\n super(TextTestResult,self).addSkip(test,reason)\n if self.showAll:\n self._write_status(test,\"skipped {0!r}\".format(reason))\n elif self.dots:\n self.stream.write(\"s\")\n self.stream.flush()\n \n def addExpectedFailure(self,test,err):\n super(TextTestResult,self).addExpectedFailure(test,err)\n if self.showAll:\n self.stream.writeln(\"expected failure\")\n self.stream.flush()\n elif self.dots:\n self.stream.write(\"x\")\n self.stream.flush()\n \n def addUnexpectedSuccess(self,test):\n super(TextTestResult,self).addUnexpectedSuccess(test)\n if self.showAll:\n self.stream.writeln(\"unexpected success\")\n self.stream.flush()\n elif self.dots:\n self.stream.write(\"u\")\n self.stream.flush()\n \n def printErrors(self):\n if self.dots or self.showAll:\n self.stream.writeln()\n self.stream.flush()\n self.printErrorList('ERROR',self.errors)\n self.printErrorList('FAIL',self.failures)\n unexpectedSuccesses=getattr(self,'unexpectedSuccesses',())\n if unexpectedSuccesses:\n self.stream.writeln(self.separator1)\n for test in unexpectedSuccesses:\n self.stream.writeln(f\"UNEXPECTED SUCCESS: {self.getDescription(test)}\")\n self.stream.flush()\n \n def printErrorList(self,flavour,errors):\n for test,err in errors:\n self.stream.writeln(self.separator1)\n self.stream.writeln(\"%s: %s\"%(flavour,self.getDescription(test)))\n self.stream.writeln(self.separator2)\n self.stream.writeln(\"%s\"%err)\n self.stream.flush()\n \n \nclass TextTestRunner(object):\n ''\n\n\n\n \n resultclass=TextTestResult\n \n def __init__(self,stream=None,descriptions=True,verbosity=1,\n failfast=False,buffer=False,resultclass=None,warnings=None,\n *,tb_locals=False,durations=None):\n ''\n\n\n\n \n if stream is None:\n stream=sys.stderr\n self.stream=_WritelnDecorator(stream)\n self.descriptions=descriptions\n self.verbosity=verbosity\n self.failfast=failfast\n self.buffer=buffer\n self.tb_locals=tb_locals\n self.durations=durations\n self.warnings=warnings\n if resultclass is not None:\n self.resultclass=resultclass\n \n def _makeResult(self):\n try:\n return self.resultclass(self.stream,self.descriptions,\n self.verbosity,durations=self.durations)\n except TypeError:\n \n return self.resultclass(self.stream,self.descriptions,\n self.verbosity)\n \n def _printDurations(self,result):\n if not result.collectedDurations:\n return\n ls=sorted(result.collectedDurations,key=lambda x:x[1],\n reverse=True)\n if self.durations >0:\n ls=ls[:self.durations]\n self.stream.writeln(\"Slowest test durations\")\n if hasattr(result,'separator2'):\n self.stream.writeln(result.separator2)\n hidden=False\n for test,elapsed in ls:\n if self.verbosity <2 and elapsed <0.001:\n hidden=True\n continue\n self.stream.writeln(\"%-10s %s\"%(\"%.3fs\"%elapsed,test))\n if hidden:\n self.stream.writeln(\"\\n(durations < 0.001s were hidden; \"\n \"use -v to show these durations)\")\n else:\n self.stream.writeln(\"\")\n \n def run(self,test):\n ''\n result=self._makeResult()\n registerResult(result)\n result.failfast=self.failfast\n result.buffer=self.buffer\n result.tb_locals=self.tb_locals\n with warnings.catch_warnings():\n if self.warnings:\n \n warnings.simplefilter(self.warnings)\n startTime=time.perf_counter()\n startTestRun=getattr(result,'startTestRun',None)\n if startTestRun is not None:\n startTestRun()\n try:\n test(result)\n finally:\n stopTestRun=getattr(result,'stopTestRun',None)\n if stopTestRun is not None:\n stopTestRun()\n stopTime=time.perf_counter()\n timeTaken=stopTime -startTime\n result.printErrors()\n if self.durations is not None:\n self._printDurations(result)\n \n if hasattr(result,'separator2'):\n self.stream.writeln(result.separator2)\n \n run=result.testsRun\n self.stream.writeln(\"Ran %d test%s in %.3fs\"%\n (run,run !=1 and \"s\"or \"\",timeTaken))\n self.stream.writeln()\n \n expectedFails=unexpectedSuccesses=skipped=0\n try:\n results=map(len,(result.expectedFailures,\n result.unexpectedSuccesses,\n result.skipped))\n except AttributeError:\n pass\n else:\n expectedFails,unexpectedSuccesses,skipped=results\n \n infos=[]\n if not result.wasSuccessful():\n self.stream.write(\"FAILED\")\n failed,errored=len(result.failures),len(result.errors)\n if failed:\n infos.append(\"failures=%d\"%failed)\n if errored:\n infos.append(\"errors=%d\"%errored)\n elif run ==0:\n self.stream.write(\"NO TESTS RAN\")\n else:\n self.stream.write(\"OK\")\n if skipped:\n infos.append(\"skipped=%d\"%skipped)\n if expectedFails:\n infos.append(\"expected failures=%d\"%expectedFails)\n if unexpectedSuccesses:\n infos.append(\"unexpected successes=%d\"%unexpectedSuccesses)\n if infos:\n self.stream.writeln(\" (%s)\"%(\", \".join(infos),))\n else:\n self.stream.write(\"\\n\")\n self.stream.flush()\n return result\n", ["sys", "time", "unittest", "unittest.case", "unittest.result", "unittest.signals", "warnings"]], "unittest.suite": [".py", "''\n\nimport sys\n\nfrom. import case\nfrom. import util\n\n__unittest=True\n\n\ndef _call_if_exists(parent,attr):\n func=getattr(parent,attr,lambda:None)\n func()\n \n \nclass BaseTestSuite(object):\n ''\n \n _cleanup=True\n \n def __init__(self,tests=()):\n self._tests=[]\n self._removed_tests=0\n self.addTests(tests)\n \n def __repr__(self):\n return \"<%s tests=%s>\"%(util.strclass(self.__class__),list(self))\n \n def __eq__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return list(self)==list(other)\n \n def __iter__(self):\n return iter(self._tests)\n \n def countTestCases(self):\n cases=self._removed_tests\n for test in self:\n if test:\n cases +=test.countTestCases()\n return cases\n \n def addTest(self,test):\n \n if not callable(test):\n raise TypeError(\"{} is not callable\".format(repr(test)))\n if isinstance(test,type)and issubclass(test,\n (case.TestCase,TestSuite)):\n raise TypeError(\"TestCases and TestSuites must be instantiated \"\n \"before passing them to addTest()\")\n self._tests.append(test)\n \n def addTests(self,tests):\n if isinstance(tests,str):\n raise TypeError(\"tests must be an iterable of tests, not a string\")\n for test in tests:\n self.addTest(test)\n \n def run(self,result):\n for index,test in enumerate(self):\n if result.shouldStop:\n break\n test(result)\n if self._cleanup:\n self._removeTestAtIndex(index)\n return result\n \n def _removeTestAtIndex(self,index):\n ''\n try:\n test=self._tests[index]\n except TypeError:\n \n pass\n else:\n \n \n if hasattr(test,'countTestCases'):\n self._removed_tests +=test.countTestCases()\n self._tests[index]=None\n \n def __call__(self,*args,**kwds):\n return self.run(*args,**kwds)\n \n def debug(self):\n ''\n for test in self:\n test.debug()\n \n \nclass TestSuite(BaseTestSuite):\n ''\n\n\n\n\n\n\n \n \n def run(self,result,debug=False):\n topLevel=False\n if getattr(result,'_testRunEntered',False)is False:\n result._testRunEntered=topLevel=True\n \n for index,test in enumerate(self):\n if result.shouldStop:\n break\n \n if _isnotsuite(test):\n self._tearDownPreviousClass(test,result)\n self._handleModuleFixture(test,result)\n self._handleClassSetUp(test,result)\n result._previousTestClass=test.__class__\n \n if(getattr(test.__class__,'_classSetupFailed',False)or\n getattr(result,'_moduleSetUpFailed',False)):\n continue\n \n if not debug:\n test(result)\n else:\n test.debug()\n \n if self._cleanup:\n self._removeTestAtIndex(index)\n \n if topLevel:\n self._tearDownPreviousClass(None,result)\n self._handleModuleTearDown(result)\n result._testRunEntered=False\n return result\n \n def debug(self):\n ''\n debug=_DebugResult()\n self.run(debug,True)\n \n \n \n def _handleClassSetUp(self,test,result):\n previousClass=getattr(result,'_previousTestClass',None)\n currentClass=test.__class__\n if currentClass ==previousClass:\n return\n if result._moduleSetUpFailed:\n return\n if getattr(currentClass,\"__unittest_skip__\",False):\n return\n \n failed=False\n try:\n currentClass._classSetupFailed=False\n except TypeError:\n \n \n pass\n \n setUpClass=getattr(currentClass,'setUpClass',None)\n doClassCleanups=getattr(currentClass,'doClassCleanups',None)\n if setUpClass is not None:\n _call_if_exists(result,'_setupStdout')\n try:\n try:\n setUpClass()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n failed=True\n try:\n currentClass._classSetupFailed=True\n except TypeError:\n pass\n className=util.strclass(currentClass)\n self._createClassOrModuleLevelException(result,e,\n 'setUpClass',\n className)\n if failed and doClassCleanups is not None:\n doClassCleanups()\n for exc_info in currentClass.tearDown_exceptions:\n self._createClassOrModuleLevelException(\n result,exc_info[1],'setUpClass',className,\n info=exc_info)\n finally:\n _call_if_exists(result,'_restoreStdout')\n \n def _get_previous_module(self,result):\n previousModule=None\n previousClass=getattr(result,'_previousTestClass',None)\n if previousClass is not None:\n previousModule=previousClass.__module__\n return previousModule\n \n \n def _handleModuleFixture(self,test,result):\n previousModule=self._get_previous_module(result)\n currentModule=test.__class__.__module__\n if currentModule ==previousModule:\n return\n \n self._handleModuleTearDown(result)\n \n \n result._moduleSetUpFailed=False\n try:\n module=sys.modules[currentModule]\n except KeyError:\n return\n setUpModule=getattr(module,'setUpModule',None)\n if setUpModule is not None:\n _call_if_exists(result,'_setupStdout')\n try:\n try:\n setUpModule()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n result._moduleSetUpFailed=True\n self._createClassOrModuleLevelException(result,e,\n 'setUpModule',\n currentModule)\n if result._moduleSetUpFailed:\n try:\n case.doModuleCleanups()\n except Exception as e:\n self._createClassOrModuleLevelException(result,e,\n 'setUpModule',\n currentModule)\n finally:\n _call_if_exists(result,'_restoreStdout')\n \n def _createClassOrModuleLevelException(self,result,exc,method_name,\n parent,info=None):\n errorName=f'{method_name} ({parent})'\n self._addClassOrModuleLevelException(result,exc,errorName,info)\n \n def _addClassOrModuleLevelException(self,result,exception,errorName,\n info=None):\n error=_ErrorHolder(errorName)\n addSkip=getattr(result,'addSkip',None)\n if addSkip is not None and isinstance(exception,case.SkipTest):\n addSkip(error,str(exception))\n else:\n if not info:\n result.addError(error,sys.exc_info())\n else:\n result.addError(error,info)\n \n def _handleModuleTearDown(self,result):\n previousModule=self._get_previous_module(result)\n if previousModule is None:\n return\n if result._moduleSetUpFailed:\n return\n \n try:\n module=sys.modules[previousModule]\n except KeyError:\n return\n \n _call_if_exists(result,'_setupStdout')\n try:\n tearDownModule=getattr(module,'tearDownModule',None)\n if tearDownModule is not None:\n try:\n tearDownModule()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n self._createClassOrModuleLevelException(result,e,\n 'tearDownModule',\n previousModule)\n try:\n case.doModuleCleanups()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n self._createClassOrModuleLevelException(result,e,\n 'tearDownModule',\n previousModule)\n finally:\n _call_if_exists(result,'_restoreStdout')\n \n def _tearDownPreviousClass(self,test,result):\n previousClass=getattr(result,'_previousTestClass',None)\n currentClass=test.__class__\n if currentClass ==previousClass or previousClass is None:\n return\n if getattr(previousClass,'_classSetupFailed',False):\n return\n if getattr(result,'_moduleSetUpFailed',False):\n return\n if getattr(previousClass,\"__unittest_skip__\",False):\n return\n \n tearDownClass=getattr(previousClass,'tearDownClass',None)\n doClassCleanups=getattr(previousClass,'doClassCleanups',None)\n if tearDownClass is None and doClassCleanups is None:\n return\n \n _call_if_exists(result,'_setupStdout')\n try:\n if tearDownClass is not None:\n try:\n tearDownClass()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n className=util.strclass(previousClass)\n self._createClassOrModuleLevelException(result,e,\n 'tearDownClass',\n className)\n if doClassCleanups is not None:\n doClassCleanups()\n for exc_info in previousClass.tearDown_exceptions:\n if isinstance(result,_DebugResult):\n raise exc_info[1]\n className=util.strclass(previousClass)\n self._createClassOrModuleLevelException(result,exc_info[1],\n 'tearDownClass',\n className,\n info=exc_info)\n finally:\n _call_if_exists(result,'_restoreStdout')\n \n \nclass _ErrorHolder(object):\n ''\n\n\n\n \n \n \n \n \n failureException=None\n \n def __init__(self,description):\n self.description=description\n \n def id(self):\n return self.description\n \n def shortDescription(self):\n return None\n \n def __repr__(self):\n return \"\"%(self.description,)\n \n def __str__(self):\n return self.id()\n \n def run(self,result):\n \n \n pass\n \n def __call__(self,result):\n return self.run(result)\n \n def countTestCases(self):\n return 0\n \ndef _isnotsuite(test):\n ''\n try:\n iter(test)\n except TypeError:\n return True\n return False\n \n \nclass _DebugResult(object):\n ''\n _previousTestClass=None\n _moduleSetUpFailed=False\n shouldStop=False\n", ["sys", "unittest", "unittest.case", "unittest.util"]], "unittest.util": [".py", "''\n\nfrom collections import namedtuple,Counter\nfrom os.path import commonprefix\n\n__unittest=True\n\n_MAX_LENGTH=80\n_PLACEHOLDER_LEN=12\n_MIN_BEGIN_LEN=5\n_MIN_END_LEN=5\n_MIN_COMMON_LEN=5\n_MIN_DIFF_LEN=_MAX_LENGTH -\\\n(_MIN_BEGIN_LEN+_PLACEHOLDER_LEN+_MIN_COMMON_LEN+\n_PLACEHOLDER_LEN+_MIN_END_LEN)\nassert _MIN_DIFF_LEN >=0\n\ndef _shorten(s,prefixlen,suffixlen):\n skip=len(s)-prefixlen -suffixlen\n if skip >_PLACEHOLDER_LEN:\n s='%s[%d chars]%s'%(s[:prefixlen],skip,s[len(s)-suffixlen:])\n return s\n \ndef _common_shorten_repr(*args):\n args=tuple(map(safe_repr,args))\n maxlen=max(map(len,args))\n if maxlen <=_MAX_LENGTH:\n return args\n \n prefix=commonprefix(args)\n prefixlen=len(prefix)\n \n common_len=_MAX_LENGTH -\\\n (maxlen -prefixlen+_MIN_BEGIN_LEN+_PLACEHOLDER_LEN)\n if common_len >_MIN_COMMON_LEN:\n assert _MIN_BEGIN_LEN+_PLACEHOLDER_LEN+_MIN_COMMON_LEN+\\\n (maxlen -prefixlen)<_MAX_LENGTH\n prefix=_shorten(prefix,_MIN_BEGIN_LEN,common_len)\n return tuple(prefix+s[prefixlen:]for s in args)\n \n prefix=_shorten(prefix,_MIN_BEGIN_LEN,_MIN_COMMON_LEN)\n return tuple(prefix+_shorten(s[prefixlen:],_MIN_DIFF_LEN,_MIN_END_LEN)\n for s in args)\n \ndef safe_repr(obj,short=False):\n try:\n result=repr(obj)\n except Exception:\n result=object.__repr__(obj)\n if not short or len(result)<_MAX_LENGTH:\n return result\n return result[:_MAX_LENGTH]+' [truncated]...'\n \ndef strclass(cls):\n return \"%s.%s\"%(cls.__module__,cls.__qualname__)\n \ndef sorted_list_difference(expected,actual):\n ''\n\n\n\n\n\n \n i=j=0\n missing=[]\n unexpected=[]\n while True:\n try:\n e=expected[i]\n a=actual[j]\n if e a:\n unexpected.append(a)\n j +=1\n while actual[j]==a:\n j +=1\n else:\n i +=1\n try:\n while expected[i]==e:\n i +=1\n finally:\n j +=1\n while actual[j]==a:\n j +=1\n except IndexError:\n missing.extend(expected[i:])\n unexpected.extend(actual[j:])\n break\n return missing,unexpected\n \n \ndef unorderable_list_difference(expected,actual):\n ''\n\n\n\n \n missing=[]\n while expected:\n item=expected.pop()\n try:\n actual.remove(item)\n except ValueError:\n missing.append(item)\n \n \n return missing,actual\n \ndef three_way_cmp(x,y):\n ''\n return(x >y)-(x \"%\n (util.strclass(self.__class__),self.testsRun,len(self.errors),\n len(self.failures)))\n", ["functools", "io", "sys", "traceback", "unittest", "unittest.util"]], "unittest.loader": [".py", "''\n\nimport os\nimport re\nimport sys\nimport traceback\nimport types\nimport functools\n\nfrom fnmatch import fnmatch,fnmatchcase\n\nfrom. import case,suite,util\n\n__unittest=True\n\n\n\n\nVALID_MODULE_NAME=re.compile(r'[_a-z]\\w*\\.py$',re.IGNORECASE)\n\n\nclass _FailedTest(case.TestCase):\n _testMethodName=None\n \n def __init__(self,method_name,exception):\n self._exception=exception\n super(_FailedTest,self).__init__(method_name)\n \n def __getattr__(self,name):\n if name !=self._testMethodName:\n return super(_FailedTest,self).__getattr__(name)\n def testFailure():\n raise self._exception\n return testFailure\n \n \ndef _make_failed_import_test(name,suiteClass):\n message='Failed to import test module: %s\\n%s'%(\n name,traceback.format_exc())\n return _make_failed_test(name,ImportError(message),suiteClass,message)\n \ndef _make_failed_load_tests(name,exception,suiteClass):\n message='Failed to call load_tests:\\n%s'%(traceback.format_exc(),)\n return _make_failed_test(\n name,exception,suiteClass,message)\n \ndef _make_failed_test(methodname,exception,suiteClass,message):\n test=_FailedTest(methodname,exception)\n return suiteClass((test,)),message\n \ndef _make_skipped_test(methodname,exception,suiteClass):\n @case.skip(str(exception))\n def testSkipped(self):\n pass\n attrs={methodname:testSkipped}\n TestClass=type(\"ModuleSkipped\",(case.TestCase,),attrs)\n return suiteClass((TestClass(methodname),))\n \ndef _splitext(path):\n return os.path.splitext(path)[0]\n \n \nclass TestLoader(object):\n ''\n\n\n \n testMethodPrefix='test'\n sortTestMethodsUsing=staticmethod(util.three_way_cmp)\n testNamePatterns=None\n suiteClass=suite.TestSuite\n _top_level_dir=None\n \n def __init__(self):\n super(TestLoader,self).__init__()\n self.errors=[]\n \n \n self._loading_packages=set()\n \n def loadTestsFromTestCase(self,testCaseClass):\n ''\n if issubclass(testCaseClass,suite.TestSuite):\n raise TypeError(\"Test cases should not be derived from \"\n \"TestSuite. Maybe you meant to derive from \"\n \"TestCase?\")\n testCaseNames=self.getTestCaseNames(testCaseClass)\n if not testCaseNames and hasattr(testCaseClass,'runTest'):\n testCaseNames=['runTest']\n loaded_suite=self.suiteClass(map(testCaseClass,testCaseNames))\n return loaded_suite\n \n def loadTestsFromModule(self,module,*,pattern=None):\n ''\n tests=[]\n for name in dir(module):\n obj=getattr(module,name)\n if isinstance(obj,type)and issubclass(obj,case.TestCase):\n tests.append(self.loadTestsFromTestCase(obj))\n \n load_tests=getattr(module,'load_tests',None)\n tests=self.suiteClass(tests)\n if load_tests is not None:\n try:\n return load_tests(self,tests,pattern)\n except Exception as e:\n error_case,error_message=_make_failed_load_tests(\n module.__name__,e,self.suiteClass)\n self.errors.append(error_message)\n return error_case\n return tests\n \n def loadTestsFromName(self,name,module=None):\n ''\n\n\n\n\n\n\n \n parts=name.split('.')\n error_case,error_message=None,None\n if module is None:\n parts_copy=parts[:]\n while parts_copy:\n try:\n module_name='.'.join(parts_copy)\n module=__import__(module_name)\n break\n except ImportError:\n next_attribute=parts_copy.pop()\n \n error_case,error_message=_make_failed_import_test(\n next_attribute,self.suiteClass)\n if not parts_copy:\n \n self.errors.append(error_message)\n return error_case\n parts=parts[1:]\n obj=module\n for part in parts:\n try:\n parent,obj=obj,getattr(obj,part)\n except AttributeError as e:\n \n if(getattr(obj,'__path__',None)is not None\n and error_case is not None):\n \n \n \n \n \n self.errors.append(error_message)\n return error_case\n else:\n \n error_case,error_message=_make_failed_test(\n part,e,self.suiteClass,\n 'Failed to access attribute:\\n%s'%(\n traceback.format_exc(),))\n self.errors.append(error_message)\n return error_case\n \n if isinstance(obj,types.ModuleType):\n return self.loadTestsFromModule(obj)\n elif isinstance(obj,type)and issubclass(obj,case.TestCase):\n return self.loadTestsFromTestCase(obj)\n elif(isinstance(obj,types.FunctionType)and\n isinstance(parent,type)and\n issubclass(parent,case.TestCase)):\n name=parts[-1]\n inst=parent(name)\n \n if not isinstance(getattr(inst,name),types.FunctionType):\n return self.suiteClass([inst])\n elif isinstance(obj,suite.TestSuite):\n return obj\n if callable(obj):\n test=obj()\n if isinstance(test,suite.TestSuite):\n return test\n elif isinstance(test,case.TestCase):\n return self.suiteClass([test])\n else:\n raise TypeError(\"calling %s returned %s, not a test\"%\n (obj,test))\n else:\n raise TypeError(\"don't know how to make test from: %s\"%obj)\n \n def loadTestsFromNames(self,names,module=None):\n ''\n\n \n suites=[self.loadTestsFromName(name,module)for name in names]\n return self.suiteClass(suites)\n \n def getTestCaseNames(self,testCaseClass):\n ''\n \n def shouldIncludeMethod(attrname):\n if not attrname.startswith(self.testMethodPrefix):\n return False\n testFunc=getattr(testCaseClass,attrname)\n if not callable(testFunc):\n return False\n fullName=f'%s.%s.%s'%(\n testCaseClass.__module__,testCaseClass.__qualname__,attrname\n )\n return self.testNamePatterns is None or\\\n any(fnmatchcase(fullName,pattern)for pattern in self.testNamePatterns)\n testFnNames=list(filter(shouldIncludeMethod,dir(testCaseClass)))\n if self.sortTestMethodsUsing:\n testFnNames.sort(key=functools.cmp_to_key(self.sortTestMethodsUsing))\n return testFnNames\n \n def discover(self,start_dir,pattern='test*.py',top_level_dir=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n set_implicit_top=False\n if top_level_dir is None and self._top_level_dir is not None:\n \n top_level_dir=self._top_level_dir\n elif top_level_dir is None:\n set_implicit_top=True\n top_level_dir=start_dir\n \n top_level_dir=os.path.abspath(top_level_dir)\n \n if not top_level_dir in sys.path:\n \n \n \n \n sys.path.insert(0,top_level_dir)\n self._top_level_dir=top_level_dir\n \n is_not_importable=False\n if os.path.isdir(os.path.abspath(start_dir)):\n start_dir=os.path.abspath(start_dir)\n if start_dir !=top_level_dir:\n is_not_importable=not os.path.isfile(os.path.join(start_dir,'__init__.py'))\n else:\n \n try:\n __import__(start_dir)\n except ImportError:\n is_not_importable=True\n else:\n the_module=sys.modules[start_dir]\n top_part=start_dir.split('.')[0]\n try:\n start_dir=os.path.abspath(\n os.path.dirname((the_module.__file__)))\n except AttributeError:\n if the_module.__name__ in sys.builtin_module_names:\n \n raise TypeError('Can not use builtin modules '\n 'as dotted module names')from None\n else:\n raise TypeError(\n f\"don't know how to discover from {the_module !r}\"\n )from None\n \n if set_implicit_top:\n self._top_level_dir=self._get_directory_containing_module(top_part)\n sys.path.remove(top_level_dir)\n \n if is_not_importable:\n raise ImportError('Start directory is not importable: %r'%start_dir)\n \n tests=list(self._find_tests(start_dir,pattern))\n return self.suiteClass(tests)\n \n def _get_directory_containing_module(self,module_name):\n module=sys.modules[module_name]\n full_path=os.path.abspath(module.__file__)\n \n if os.path.basename(full_path).lower().startswith('__init__.py'):\n return os.path.dirname(os.path.dirname(full_path))\n else:\n \n \n \n return os.path.dirname(full_path)\n \n def _get_name_from_path(self,path):\n if path ==self._top_level_dir:\n return '.'\n path=_splitext(os.path.normpath(path))\n \n _relpath=os.path.relpath(path,self._top_level_dir)\n assert not os.path.isabs(_relpath),\"Path must be within the project\"\n assert not _relpath.startswith('..'),\"Path must be within the project\"\n \n name=_relpath.replace(os.path.sep,'.')\n return name\n \n def _get_module_from_name(self,name):\n __import__(name)\n return sys.modules[name]\n \n def _match_path(self,path,full_path,pattern):\n \n return fnmatch(path,pattern)\n \n def _find_tests(self,start_dir,pattern):\n ''\n \n name=self._get_name_from_path(start_dir)\n \n \n if name !='.'and name not in self._loading_packages:\n \n \n tests,should_recurse=self._find_test_path(start_dir,pattern)\n if tests is not None:\n yield tests\n if not should_recurse:\n \n \n return\n \n paths=sorted(os.listdir(start_dir))\n for path in paths:\n full_path=os.path.join(start_dir,path)\n tests,should_recurse=self._find_test_path(full_path,pattern)\n if tests is not None:\n yield tests\n if should_recurse:\n \n name=self._get_name_from_path(full_path)\n self._loading_packages.add(name)\n try:\n yield from self._find_tests(full_path,pattern)\n finally:\n self._loading_packages.discard(name)\n \n def _find_test_path(self,full_path,pattern):\n ''\n\n\n\n\n\n \n basename=os.path.basename(full_path)\n if os.path.isfile(full_path):\n if not VALID_MODULE_NAME.match(basename):\n \n return None,False\n if not self._match_path(basename,full_path,pattern):\n return None,False\n \n name=self._get_name_from_path(full_path)\n try:\n module=self._get_module_from_name(name)\n except case.SkipTest as e:\n return _make_skipped_test(name,e,self.suiteClass),False\n except:\n error_case,error_message=\\\n _make_failed_import_test(name,self.suiteClass)\n self.errors.append(error_message)\n return error_case,False\n else:\n mod_file=os.path.abspath(\n getattr(module,'__file__',full_path))\n realpath=_splitext(\n os.path.realpath(mod_file))\n fullpath_noext=_splitext(\n os.path.realpath(full_path))\n if realpath.lower()!=fullpath_noext.lower():\n module_dir=os.path.dirname(realpath)\n mod_name=_splitext(\n os.path.basename(full_path))\n expected_dir=os.path.dirname(full_path)\n msg=(\"%r module incorrectly imported from %r. Expected \"\n \"%r. Is this module globally installed?\")\n raise ImportError(\n msg %(mod_name,module_dir,expected_dir))\n return self.loadTestsFromModule(module,pattern=pattern),False\n elif os.path.isdir(full_path):\n if not os.path.isfile(os.path.join(full_path,'__init__.py')):\n return None,False\n \n load_tests=None\n tests=None\n name=self._get_name_from_path(full_path)\n try:\n package=self._get_module_from_name(name)\n except case.SkipTest as e:\n return _make_skipped_test(name,e,self.suiteClass),False\n except:\n error_case,error_message=\\\n _make_failed_import_test(name,self.suiteClass)\n self.errors.append(error_message)\n return error_case,False\n else:\n load_tests=getattr(package,'load_tests',None)\n \n self._loading_packages.add(name)\n try:\n tests=self.loadTestsFromModule(package,pattern=pattern)\n if load_tests is not None:\n \n return tests,False\n return tests,True\n finally:\n self._loading_packages.discard(name)\n else:\n return None,False\n \n \ndefaultTestLoader=TestLoader()\n\n\n\n\n\ndef _makeLoader(prefix,sortUsing,suiteClass=None,testNamePatterns=None):\n loader=TestLoader()\n loader.sortTestMethodsUsing=sortUsing\n loader.testMethodPrefix=prefix\n loader.testNamePatterns=testNamePatterns\n if suiteClass:\n loader.suiteClass=suiteClass\n return loader\n \ndef getTestCaseNames(testCaseClass,prefix,sortUsing=util.three_way_cmp,testNamePatterns=None):\n import warnings\n warnings.warn(\n \"unittest.getTestCaseNames() is deprecated and will be removed in Python 3.13. \"\n \"Please use unittest.TestLoader.getTestCaseNames() instead.\",\n DeprecationWarning,stacklevel=2\n )\n return _makeLoader(prefix,sortUsing,testNamePatterns=testNamePatterns).getTestCaseNames(testCaseClass)\n \ndef makeSuite(testCaseClass,prefix='test',sortUsing=util.three_way_cmp,\nsuiteClass=suite.TestSuite):\n import warnings\n warnings.warn(\n \"unittest.makeSuite() is deprecated and will be removed in Python 3.13. \"\n \"Please use unittest.TestLoader.loadTestsFromTestCase() instead.\",\n DeprecationWarning,stacklevel=2\n )\n return _makeLoader(prefix,sortUsing,suiteClass).loadTestsFromTestCase(\n testCaseClass)\n \ndef findTestCases(module,prefix='test',sortUsing=util.three_way_cmp,\nsuiteClass=suite.TestSuite):\n import warnings\n warnings.warn(\n \"unittest.findTestCases() is deprecated and will be removed in Python 3.13. \"\n \"Please use unittest.TestLoader.loadTestsFromModule() instead.\",\n DeprecationWarning,stacklevel=2\n )\n return _makeLoader(prefix,sortUsing,suiteClass).loadTestsFromModule(\\\n module)\n", ["fnmatch", "functools", "os", "re", "sys", "traceback", "types", "unittest", "unittest.case", "unittest.suite", "unittest.util", "warnings"]], "unittest.case": [".py", "''\n\nimport sys\nimport functools\nimport difflib\nimport pprint\nimport re\nimport warnings\nimport collections\nimport contextlib\nimport traceback\nimport time\nimport types\n\nfrom. import result\nfrom.util import(strclass,safe_repr,_count_diff_all_purpose,\n_count_diff_hashable,_common_shorten_repr)\n\n__unittest=True\n\n_subtest_msg_sentinel=object()\n\nDIFF_OMITTED=('\\nDiff is %s characters long. '\n'Set self.maxDiff to None to see it.')\n\nclass SkipTest(Exception):\n ''\n\n\n\n\n \n \nclass _ShouldStop(Exception):\n ''\n\n \n \nclass _UnexpectedSuccess(Exception):\n ''\n\n \n \n \nclass _Outcome(object):\n def __init__(self,result=None):\n self.expecting_failure=False\n self.result=result\n self.result_supports_subtests=hasattr(result,\"addSubTest\")\n self.success=True\n self.expectedFailure=None\n \n @contextlib.contextmanager\n def testPartExecutor(self,test_case,subTest=False):\n old_success=self.success\n self.success=True\n try:\n yield\n except KeyboardInterrupt:\n raise\n except SkipTest as e:\n self.success=False\n _addSkip(self.result,test_case,str(e))\n except _ShouldStop:\n pass\n except:\n exc_info=sys.exc_info()\n if self.expecting_failure:\n self.expectedFailure=exc_info\n else:\n self.success=False\n if subTest:\n self.result.addSubTest(test_case.test_case,test_case,exc_info)\n else:\n _addError(self.result,test_case,exc_info)\n \n \n exc_info=None\n else:\n if subTest and self.success:\n self.result.addSubTest(test_case.test_case,test_case,None)\n finally:\n self.success=self.success and old_success\n \n \ndef _addSkip(result,test_case,reason):\n addSkip=getattr(result,'addSkip',None)\n if addSkip is not None:\n addSkip(test_case,reason)\n else:\n warnings.warn(\"TestResult has no addSkip method, skips not reported\",\n RuntimeWarning,2)\n result.addSuccess(test_case)\n \ndef _addError(result,test,exc_info):\n if result is not None and exc_info is not None:\n if issubclass(exc_info[0],test.failureException):\n result.addFailure(test,exc_info)\n else:\n result.addError(test,exc_info)\n \ndef _id(obj):\n return obj\n \n \ndef _enter_context(cm,addcleanup):\n\n\n cls=type(cm)\n try:\n enter=cls.__enter__\n exit=cls.__exit__\n except AttributeError:\n raise TypeError(f\"'{cls.__module__}.{cls.__qualname__}' object does \"\n f\"not support the context manager protocol\")from None\n result=enter(cm)\n addcleanup(exit,cm,None,None,None)\n return result\n \n \n_module_cleanups=[]\ndef addModuleCleanup(function,/,*args,**kwargs):\n ''\n \n _module_cleanups.append((function,args,kwargs))\n \ndef enterModuleContext(cm):\n ''\n return _enter_context(cm,addModuleCleanup)\n \n \ndef doModuleCleanups():\n ''\n \n exceptions=[]\n while _module_cleanups:\n function,args,kwargs=_module_cleanups.pop()\n try:\n function(*args,**kwargs)\n except Exception as exc:\n exceptions.append(exc)\n if exceptions:\n \n \n raise exceptions[0]\n \n \ndef skip(reason):\n ''\n\n \n def decorator(test_item):\n if not isinstance(test_item,type):\n @functools.wraps(test_item)\n def skip_wrapper(*args,**kwargs):\n raise SkipTest(reason)\n test_item=skip_wrapper\n \n test_item.__unittest_skip__=True\n test_item.__unittest_skip_why__=reason\n return test_item\n if isinstance(reason,types.FunctionType):\n test_item=reason\n reason=''\n return decorator(test_item)\n return decorator\n \ndef skipIf(condition,reason):\n ''\n\n \n if condition:\n return skip(reason)\n return _id\n \ndef skipUnless(condition,reason):\n ''\n\n \n if not condition:\n return skip(reason)\n return _id\n \ndef expectedFailure(test_item):\n test_item.__unittest_expecting_failure__=True\n return test_item\n \ndef _is_subtype(expected,basetype):\n if isinstance(expected,tuple):\n return all(_is_subtype(e,basetype)for e in expected)\n return isinstance(expected,type)and issubclass(expected,basetype)\n \nclass _BaseTestCaseContext:\n\n def __init__(self,test_case):\n self.test_case=test_case\n \n def _raiseFailure(self,standardMsg):\n msg=self.test_case._formatMessage(self.msg,standardMsg)\n raise self.test_case.failureException(msg)\n \nclass _AssertRaisesBaseContext(_BaseTestCaseContext):\n\n def __init__(self,expected,test_case,expected_regex=None):\n _BaseTestCaseContext.__init__(self,test_case)\n self.expected=expected\n self.test_case=test_case\n if expected_regex is not None:\n expected_regex=re.compile(expected_regex)\n self.expected_regex=expected_regex\n self.obj_name=None\n self.msg=None\n \n def handle(self,name,args,kwargs):\n ''\n\n\n\n\n \n try:\n if not _is_subtype(self.expected,self._base_type):\n raise TypeError('%s() arg 1 must be %s'%\n (name,self._base_type_str))\n if not args:\n self.msg=kwargs.pop('msg',None)\n if kwargs:\n raise TypeError('%r is an invalid keyword argument for '\n 'this function'%(next(iter(kwargs)),))\n return self\n \n callable_obj,*args=args\n try:\n self.obj_name=callable_obj.__name__\n except AttributeError:\n self.obj_name=str(callable_obj)\n with self:\n callable_obj(*args,**kwargs)\n finally:\n \n self=None\n \n \nclass _AssertRaisesContext(_AssertRaisesBaseContext):\n ''\n \n _base_type=BaseException\n _base_type_str='an exception type or tuple of exception types'\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,tb):\n if exc_type is None:\n try:\n exc_name=self.expected.__name__\n except AttributeError:\n exc_name=str(self.expected)\n if self.obj_name:\n self._raiseFailure(\"{} not raised by {}\".format(exc_name,\n self.obj_name))\n else:\n self._raiseFailure(\"{} not raised\".format(exc_name))\n else:\n traceback.clear_frames(tb)\n if not issubclass(exc_type,self.expected):\n \n return False\n \n self.exception=exc_value.with_traceback(None)\n if self.expected_regex is None:\n return True\n \n expected_regex=self.expected_regex\n if not expected_regex.search(str(exc_value)):\n self._raiseFailure('\"{}\" does not match \"{}\"'.format(\n expected_regex.pattern,str(exc_value)))\n return True\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nclass _AssertWarnsContext(_AssertRaisesBaseContext):\n ''\n \n _base_type=Warning\n _base_type_str='a warning type or tuple of warning types'\n \n def __enter__(self):\n \n \n for v in list(sys.modules.values()):\n if getattr(v,'__warningregistry__',None):\n v.__warningregistry__={}\n self.warnings_manager=warnings.catch_warnings(record=True)\n self.warnings=self.warnings_manager.__enter__()\n warnings.simplefilter(\"always\",self.expected)\n return self\n \n def __exit__(self,exc_type,exc_value,tb):\n self.warnings_manager.__exit__(exc_type,exc_value,tb)\n if exc_type is not None:\n \n return\n try:\n exc_name=self.expected.__name__\n except AttributeError:\n exc_name=str(self.expected)\n first_matching=None\n for m in self.warnings:\n w=m.message\n if not isinstance(w,self.expected):\n continue\n if first_matching is None:\n first_matching=w\n if(self.expected_regex is not None and\n not self.expected_regex.search(str(w))):\n continue\n \n self.warning=w\n self.filename=m.filename\n self.lineno=m.lineno\n return\n \n if first_matching is not None:\n self._raiseFailure('\"{}\" does not match \"{}\"'.format(\n self.expected_regex.pattern,str(first_matching)))\n if self.obj_name:\n self._raiseFailure(\"{} not triggered by {}\".format(exc_name,\n self.obj_name))\n else:\n self._raiseFailure(\"{} not triggered\".format(exc_name))\n \n \nclass _OrderedChainMap(collections.ChainMap):\n def __iter__(self):\n seen=set()\n for mapping in self.maps:\n for k in mapping:\n if k not in seen:\n seen.add(k)\n yield k\n \n \nclass TestCase(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n failureException=AssertionError\n \n longMessage=True\n \n maxDiff=80 *8\n \n \n \n _diffThreshold=2 **16\n \n def __init_subclass__(cls,*args,**kwargs):\n \n cls._classSetupFailed=False\n cls._class_cleanups=[]\n super().__init_subclass__(*args,**kwargs)\n \n def __init__(self,methodName='runTest'):\n ''\n\n\n \n self._testMethodName=methodName\n self._outcome=None\n self._testMethodDoc='No test'\n try:\n testMethod=getattr(self,methodName)\n except AttributeError:\n if methodName !='runTest':\n \n \n raise ValueError(\"no such test method in %s: %s\"%\n (self.__class__,methodName))\n else:\n self._testMethodDoc=testMethod.__doc__\n self._cleanups=[]\n self._subtest=None\n \n \n \n \n self._type_equality_funcs={}\n self.addTypeEqualityFunc(dict,'assertDictEqual')\n self.addTypeEqualityFunc(list,'assertListEqual')\n self.addTypeEqualityFunc(tuple,'assertTupleEqual')\n self.addTypeEqualityFunc(set,'assertSetEqual')\n self.addTypeEqualityFunc(frozenset,'assertSetEqual')\n self.addTypeEqualityFunc(str,'assertMultiLineEqual')\n \n def addTypeEqualityFunc(self,typeobj,function):\n ''\n\n\n\n\n\n\n\n\n\n\n \n self._type_equality_funcs[typeobj]=function\n \n def addCleanup(self,function,/,*args,**kwargs):\n ''\n\n\n\n \n self._cleanups.append((function,args,kwargs))\n \n def enterContext(self,cm):\n ''\n\n\n\n \n return _enter_context(cm,self.addCleanup)\n \n @classmethod\n def addClassCleanup(cls,function,/,*args,**kwargs):\n ''\n \n cls._class_cleanups.append((function,args,kwargs))\n \n @classmethod\n def enterClassContext(cls,cm):\n ''\n return _enter_context(cm,cls.addClassCleanup)\n \n def setUp(self):\n ''\n pass\n \n def tearDown(self):\n ''\n pass\n \n @classmethod\n def setUpClass(cls):\n ''\n \n @classmethod\n def tearDownClass(cls):\n ''\n \n def countTestCases(self):\n return 1\n \n def defaultTestResult(self):\n return result.TestResult()\n \n def shortDescription(self):\n ''\n\n\n\n\n \n doc=self._testMethodDoc\n return doc.strip().split(\"\\n\")[0].strip()if doc else None\n \n \n def id(self):\n return \"%s.%s\"%(strclass(self.__class__),self._testMethodName)\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self._testMethodName ==other._testMethodName\n \n def __hash__(self):\n return hash((type(self),self._testMethodName))\n \n def __str__(self):\n return \"%s (%s.%s)\"%(self._testMethodName,strclass(self.__class__),self._testMethodName)\n \n def __repr__(self):\n return \"<%s testMethod=%s>\"%\\\n (strclass(self.__class__),self._testMethodName)\n \n @contextlib.contextmanager\n def subTest(self,msg=_subtest_msg_sentinel,**params):\n ''\n\n\n\n\n \n if self._outcome is None or not self._outcome.result_supports_subtests:\n yield\n return\n parent=self._subtest\n if parent is None:\n params_map=_OrderedChainMap(params)\n else:\n params_map=parent.params.new_child(params)\n self._subtest=_SubTest(self,msg,params_map)\n try:\n with self._outcome.testPartExecutor(self._subtest,subTest=True):\n yield\n if not self._outcome.success:\n result=self._outcome.result\n if result is not None and result.failfast:\n raise _ShouldStop\n elif self._outcome.expectedFailure:\n \n \n raise _ShouldStop\n finally:\n self._subtest=parent\n \n def _addExpectedFailure(self,result,exc_info):\n try:\n addExpectedFailure=result.addExpectedFailure\n except AttributeError:\n warnings.warn(\"TestResult has no addExpectedFailure method, reporting as passes\",\n RuntimeWarning)\n result.addSuccess(self)\n else:\n addExpectedFailure(self,exc_info)\n \n def _addUnexpectedSuccess(self,result):\n try:\n addUnexpectedSuccess=result.addUnexpectedSuccess\n except AttributeError:\n warnings.warn(\"TestResult has no addUnexpectedSuccess method, reporting as failure\",\n RuntimeWarning)\n \n \n try:\n raise _UnexpectedSuccess from None\n except _UnexpectedSuccess:\n result.addFailure(self,sys.exc_info())\n else:\n addUnexpectedSuccess(self)\n \n def _addDuration(self,result,elapsed):\n try:\n addDuration=result.addDuration\n except AttributeError:\n warnings.warn(\"TestResult has no addDuration method\",\n RuntimeWarning)\n else:\n addDuration(self,elapsed)\n \n def _callSetUp(self):\n self.setUp()\n \n def _callTestMethod(self,method):\n if method()is not None:\n warnings.warn(f'It is deprecated to return a value that is not None from a '\n f'test case ({method})',DeprecationWarning,stacklevel=3)\n \n def _callTearDown(self):\n self.tearDown()\n \n def _callCleanup(self,function,/,*args,**kwargs):\n function(*args,**kwargs)\n \n def run(self,result=None):\n if result is None:\n result=self.defaultTestResult()\n startTestRun=getattr(result,'startTestRun',None)\n stopTestRun=getattr(result,'stopTestRun',None)\n if startTestRun is not None:\n startTestRun()\n else:\n stopTestRun=None\n \n result.startTest(self)\n try:\n testMethod=getattr(self,self._testMethodName)\n if(getattr(self.__class__,\"__unittest_skip__\",False)or\n getattr(testMethod,\"__unittest_skip__\",False)):\n \n skip_why=(getattr(self.__class__,'__unittest_skip_why__','')\n or getattr(testMethod,'__unittest_skip_why__',''))\n _addSkip(result,self,skip_why)\n return result\n \n expecting_failure=(\n getattr(self,\"__unittest_expecting_failure__\",False)or\n getattr(testMethod,\"__unittest_expecting_failure__\",False)\n )\n outcome=_Outcome(result)\n start_time=time.perf_counter()\n try:\n self._outcome=outcome\n \n with outcome.testPartExecutor(self):\n self._callSetUp()\n if outcome.success:\n outcome.expecting_failure=expecting_failure\n with outcome.testPartExecutor(self):\n self._callTestMethod(testMethod)\n outcome.expecting_failure=False\n with outcome.testPartExecutor(self):\n self._callTearDown()\n self.doCleanups()\n self._addDuration(result,(time.perf_counter()-start_time))\n \n if outcome.success:\n if expecting_failure:\n if outcome.expectedFailure:\n self._addExpectedFailure(result,outcome.expectedFailure)\n else:\n self._addUnexpectedSuccess(result)\n else:\n result.addSuccess(self)\n return result\n finally:\n \n \n outcome.expectedFailure=None\n outcome=None\n \n \n self._outcome=None\n \n finally:\n result.stopTest(self)\n if stopTestRun is not None:\n stopTestRun()\n \n def doCleanups(self):\n ''\n \n outcome=self._outcome or _Outcome()\n while self._cleanups:\n function,args,kwargs=self._cleanups.pop()\n with outcome.testPartExecutor(self):\n self._callCleanup(function,*args,**kwargs)\n \n \n \n return outcome.success\n \n @classmethod\n def doClassCleanups(cls):\n ''\n \n cls.tearDown_exceptions=[]\n while cls._class_cleanups:\n function,args,kwargs=cls._class_cleanups.pop()\n try:\n function(*args,**kwargs)\n except Exception:\n cls.tearDown_exceptions.append(sys.exc_info())\n \n def __call__(self,*args,**kwds):\n return self.run(*args,**kwds)\n \n def debug(self):\n ''\n testMethod=getattr(self,self._testMethodName)\n if(getattr(self.__class__,\"__unittest_skip__\",False)or\n getattr(testMethod,\"__unittest_skip__\",False)):\n \n skip_why=(getattr(self.__class__,'__unittest_skip_why__','')\n or getattr(testMethod,'__unittest_skip_why__',''))\n raise SkipTest(skip_why)\n \n self._callSetUp()\n self._callTestMethod(testMethod)\n self._callTearDown()\n while self._cleanups:\n function,args,kwargs=self._cleanups.pop()\n self._callCleanup(function,*args,**kwargs)\n \n def skipTest(self,reason):\n ''\n raise SkipTest(reason)\n \n def fail(self,msg=None):\n ''\n raise self.failureException(msg)\n \n def assertFalse(self,expr,msg=None):\n ''\n if expr:\n msg=self._formatMessage(msg,\"%s is not false\"%safe_repr(expr))\n raise self.failureException(msg)\n \n def assertTrue(self,expr,msg=None):\n ''\n if not expr:\n msg=self._formatMessage(msg,\"%s is not true\"%safe_repr(expr))\n raise self.failureException(msg)\n \n def _formatMessage(self,msg,standardMsg):\n ''\n\n\n\n\n\n\n\n \n if not self.longMessage:\n return msg or standardMsg\n if msg is None:\n return standardMsg\n try:\n \n \n return '%s : %s'%(standardMsg,msg)\n except UnicodeDecodeError:\n return '%s : %s'%(safe_repr(standardMsg),safe_repr(msg))\n \n def assertRaises(self,expected_exception,*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertRaisesContext(expected_exception,self)\n try:\n return context.handle('assertRaises',args,kwargs)\n finally:\n \n context=None\n \n def assertWarns(self,expected_warning,*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertWarnsContext(expected_warning,self)\n return context.handle('assertWarns',args,kwargs)\n \n def assertLogs(self,logger=None,level=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n from._log import _AssertLogsContext\n return _AssertLogsContext(self,logger,level,no_logs=False)\n \n def assertNoLogs(self,logger=None,level=None):\n ''\n\n\n\n \n from._log import _AssertLogsContext\n return _AssertLogsContext(self,logger,level,no_logs=True)\n \n def _getAssertEqualityFunc(self,first,second):\n ''\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n if type(first)is type(second):\n asserter=self._type_equality_funcs.get(type(first))\n if asserter is not None:\n if isinstance(asserter,str):\n asserter=getattr(self,asserter)\n return asserter\n \n return self._baseAssertEqual\n \n def _baseAssertEqual(self,first,second,msg=None):\n ''\n if not first ==second:\n standardMsg='%s != %s'%_common_shorten_repr(first,second)\n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertEqual(self,first,second,msg=None):\n ''\n\n \n assertion_func=self._getAssertEqualityFunc(first,second)\n assertion_func(first,second,msg=msg)\n \n def assertNotEqual(self,first,second,msg=None):\n ''\n\n \n if not first !=second:\n msg=self._formatMessage(msg,'%s == %s'%(safe_repr(first),\n safe_repr(second)))\n raise self.failureException(msg)\n \n def assertAlmostEqual(self,first,second,places=None,msg=None,\n delta=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if first ==second:\n \n return\n if delta is not None and places is not None:\n raise TypeError(\"specify delta or places not both\")\n \n diff=abs(first -second)\n if delta is not None:\n if diff <=delta:\n return\n \n standardMsg='%s != %s within %s delta (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n safe_repr(delta),\n safe_repr(diff))\n else:\n if places is None:\n places=7\n \n if round(diff,places)==0:\n return\n \n standardMsg='%s != %s within %r places (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n places,\n safe_repr(diff))\n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertNotAlmostEqual(self,first,second,places=None,msg=None,\n delta=None):\n ''\n\n\n\n\n\n\n\n\n \n if delta is not None and places is not None:\n raise TypeError(\"specify delta or places not both\")\n diff=abs(first -second)\n if delta is not None:\n if not(first ==second)and diff >delta:\n return\n standardMsg='%s == %s within %s delta (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n safe_repr(delta),\n safe_repr(diff))\n else:\n if places is None:\n places=7\n if not(first ==second)and round(diff,places)!=0:\n return\n standardMsg='%s == %s within %r places'%(safe_repr(first),\n safe_repr(second),\n places)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertSequenceEqual(self,seq1,seq2,msg=None,seq_type=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if seq_type is not None:\n seq_type_name=seq_type.__name__\n if not isinstance(seq1,seq_type):\n raise self.failureException('First sequence is not a %s: %s'\n %(seq_type_name,safe_repr(seq1)))\n if not isinstance(seq2,seq_type):\n raise self.failureException('Second sequence is not a %s: %s'\n %(seq_type_name,safe_repr(seq2)))\n else:\n seq_type_name=\"sequence\"\n \n differing=None\n try:\n len1=len(seq1)\n except(TypeError,NotImplementedError):\n differing='First %s has no length. Non-sequence?'%(\n seq_type_name)\n \n if differing is None:\n try:\n len2=len(seq2)\n except(TypeError,NotImplementedError):\n differing='Second %s has no length. Non-sequence?'%(\n seq_type_name)\n \n if differing is None:\n if seq1 ==seq2:\n return\n \n differing='%ss differ: %s != %s\\n'%(\n (seq_type_name.capitalize(),)+\n _common_shorten_repr(seq1,seq2))\n \n for i in range(min(len1,len2)):\n try:\n item1=seq1[i]\n except(TypeError,IndexError,NotImplementedError):\n differing +=('\\nUnable to index element %d of first %s\\n'%\n (i,seq_type_name))\n break\n \n try:\n item2=seq2[i]\n except(TypeError,IndexError,NotImplementedError):\n differing +=('\\nUnable to index element %d of second %s\\n'%\n (i,seq_type_name))\n break\n \n if item1 !=item2:\n differing +=('\\nFirst differing element %d:\\n%s\\n%s\\n'%\n ((i,)+_common_shorten_repr(item1,item2)))\n break\n else:\n if(len1 ==len2 and seq_type is None and\n type(seq1)!=type(seq2)):\n \n return\n \n if len1 >len2:\n differing +=('\\nFirst %s contains %d additional '\n 'elements.\\n'%(seq_type_name,len1 -len2))\n try:\n differing +=('First extra element %d:\\n%s\\n'%\n (len2,safe_repr(seq1[len2])))\n except(TypeError,IndexError,NotImplementedError):\n differing +=('Unable to index element %d '\n 'of first %s\\n'%(len2,seq_type_name))\n elif len1 self._diffThreshold or\n len(second)>self._diffThreshold):\n self._baseAssertEqual(first,second,msg)\n \n \n \n \n \n first_presplit=first\n second_presplit=second\n if first and second:\n if first[-1]!='\\n'or second[-1]!='\\n':\n first_presplit +='\\n'\n second_presplit +='\\n'\n elif second and second[-1]!='\\n':\n second_presplit +='\\n'\n elif first and first[-1]!='\\n':\n first_presplit +='\\n'\n \n firstlines=first_presplit.splitlines(keepends=True)\n secondlines=second_presplit.splitlines(keepends=True)\n \n \n standardMsg='%s != %s'%_common_shorten_repr(first,second)\n diff='\\n'+''.join(difflib.ndiff(firstlines,secondlines))\n standardMsg=self._truncateMessage(standardMsg,diff)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertLess(self,a,b,msg=None):\n ''\n if not a b:\n standardMsg='%s not greater than %s'%(safe_repr(a),safe_repr(b))\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertGreaterEqual(self,a,b,msg=None):\n ''\n if not a >=b:\n standardMsg='%s not greater than or equal to %s'%(safe_repr(a),safe_repr(b))\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsNone(self,obj,msg=None):\n ''\n if obj is not None:\n standardMsg='%s is not None'%(safe_repr(obj),)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsNotNone(self,obj,msg=None):\n ''\n if obj is None:\n standardMsg='unexpectedly None'\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsInstance(self,obj,cls,msg=None):\n ''\n \n if not isinstance(obj,cls):\n standardMsg='%s is not an instance of %r'%(safe_repr(obj),cls)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertNotIsInstance(self,obj,cls,msg=None):\n ''\n if isinstance(obj,cls):\n standardMsg='%s is an instance of %r'%(safe_repr(obj),cls)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertRaisesRegex(self,expected_exception,expected_regex,\n *args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n \n context=_AssertRaisesContext(expected_exception,self,expected_regex)\n return context.handle('assertRaisesRegex',args,kwargs)\n \n def assertWarnsRegex(self,expected_warning,expected_regex,\n *args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertWarnsContext(expected_warning,self,expected_regex)\n return context.handle('assertWarnsRegex',args,kwargs)\n \n def assertRegex(self,text,expected_regex,msg=None):\n ''\n if isinstance(expected_regex,(str,bytes)):\n assert expected_regex,\"expected_regex must not be empty.\"\n expected_regex=re.compile(expected_regex)\n if not expected_regex.search(text):\n standardMsg=\"Regex didn't match: %r not found in %r\"%(\n expected_regex.pattern,text)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertNotRegex(self,text,unexpected_regex,msg=None):\n ''\n if isinstance(unexpected_regex,(str,bytes)):\n unexpected_regex=re.compile(unexpected_regex)\n match=unexpected_regex.search(text)\n if match:\n standardMsg='Regex matched: %r matches %r in %r'%(\n text[match.start():match.end()],\n unexpected_regex.pattern,\n text)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n \n \nclass FunctionTestCase(TestCase):\n ''\n\n\n\n\n\n \n \n def __init__(self,testFunc,setUp=None,tearDown=None,description=None):\n super(FunctionTestCase,self).__init__()\n self._setUpFunc=setUp\n self._tearDownFunc=tearDown\n self._testFunc=testFunc\n self._description=description\n \n def setUp(self):\n if self._setUpFunc is not None:\n self._setUpFunc()\n \n def tearDown(self):\n if self._tearDownFunc is not None:\n self._tearDownFunc()\n \n def runTest(self):\n self._testFunc()\n \n def id(self):\n return self._testFunc.__name__\n \n def __eq__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n \n return self._setUpFunc ==other._setUpFunc and\\\n self._tearDownFunc ==other._tearDownFunc and\\\n self._testFunc ==other._testFunc and\\\n self._description ==other._description\n \n def __hash__(self):\n return hash((type(self),self._setUpFunc,self._tearDownFunc,\n self._testFunc,self._description))\n \n def __str__(self):\n return \"%s (%s)\"%(strclass(self.__class__),\n self._testFunc.__name__)\n \n def __repr__(self):\n return \"<%s tec=%s>\"%(strclass(self.__class__),\n self._testFunc)\n \n def shortDescription(self):\n if self._description is not None:\n return self._description\n doc=self._testFunc.__doc__\n return doc and doc.split(\"\\n\")[0].strip()or None\n \n \nclass _SubTest(TestCase):\n\n def __init__(self,test_case,message,params):\n super().__init__()\n self._message=message\n self.test_case=test_case\n self.params=params\n self.failureException=test_case.failureException\n \n def runTest(self):\n raise NotImplementedError(\"subtests cannot be run directly\")\n \n def _subDescription(self):\n parts=[]\n if self._message is not _subtest_msg_sentinel:\n parts.append(\"[{}]\".format(self._message))\n if self.params:\n params_desc=', '.join(\n \"{}={!r}\".format(k,v)\n for(k,v)in self.params.items())\n parts.append(\"({})\".format(params_desc))\n return \" \".join(parts)or '()'\n \n def id(self):\n return \"{} {}\".format(self.test_case.id(),self._subDescription())\n \n def shortDescription(self):\n ''\n\n \n return self.test_case.shortDescription()\n \n def __str__(self):\n return \"{} {}\".format(self.test_case,self._subDescription())\n", ["collections", "contextlib", "difflib", "functools", "pprint", "re", "sys", "time", "traceback", "types", "unittest", "unittest._log", "unittest.result", "unittest.util", "warnings"]], "unittest.main": [".py", "''\n\nimport sys\nimport argparse\nimport os\nimport warnings\n\nfrom. import loader,runner\nfrom.signals import installHandler\n\n__unittest=True\n_NO_TESTS_EXITCODE=5\n\nMAIN_EXAMPLES=\"\"\"\\\nExamples:\n %(prog)s test_module - run tests from test_module\n %(prog)s module.TestClass - run tests from module.TestClass\n %(prog)s module.Class.test_method - run specified test method\n %(prog)s path/to/test_file.py - run tests from test_file.py\n\"\"\"\n\nMODULE_EXAMPLES=\"\"\"\\\nExamples:\n %(prog)s - run default set of tests\n %(prog)s MyTestSuite - run suite 'MyTestSuite'\n %(prog)s MyTestCase.testSomething - run MyTestCase.testSomething\n %(prog)s MyTestCase - run all 'test*' test methods\n in MyTestCase\n\"\"\"\n\ndef _convert_name(name):\n\n\n\n\n if os.path.isfile(name)and name.lower().endswith('.py'):\n if os.path.isabs(name):\n rel_path=os.path.relpath(name,os.getcwd())\n if os.path.isabs(rel_path)or rel_path.startswith(os.pardir):\n return name\n name=rel_path\n \n \n return os.path.normpath(name)[:-3].replace('\\\\','.').replace('/','.')\n return name\n \ndef _convert_names(names):\n return[_convert_name(name)for name in names]\n \n \ndef _convert_select_pattern(pattern):\n if not '*'in pattern:\n pattern='*%s*'%pattern\n return pattern\n \n \nclass TestProgram(object):\n ''\n\n \n \n module=None\n verbosity=1\n failfast=catchbreak=buffer=progName=warnings=testNamePatterns=None\n _discovery_parser=None\n \n def __init__(self,module='__main__',defaultTest=None,argv=None,\n testRunner=None,testLoader=loader.defaultTestLoader,\n exit=True,verbosity=1,failfast=None,catchbreak=None,\n buffer=None,warnings=None,*,tb_locals=False,\n durations=None):\n if isinstance(module,str):\n self.module=__import__(module)\n for part in module.split('.')[1:]:\n self.module=getattr(self.module,part)\n else:\n self.module=module\n if argv is None:\n argv=sys.argv\n \n self.exit=exit\n self.failfast=failfast\n self.catchbreak=catchbreak\n self.verbosity=verbosity\n self.buffer=buffer\n self.tb_locals=tb_locals\n self.durations=durations\n if warnings is None and not sys.warnoptions:\n \n \n \n self.warnings='default'\n else:\n \n \n \n \n \n self.warnings=warnings\n self.defaultTest=defaultTest\n self.testRunner=testRunner\n self.testLoader=testLoader\n self.progName=os.path.basename(argv[0])\n self.parseArgs(argv)\n self.runTests()\n \n def usageExit(self,msg=None):\n warnings.warn(\"TestProgram.usageExit() is deprecated and will be\"\n \" removed in Python 3.13\",DeprecationWarning)\n if msg:\n print(msg)\n if self._discovery_parser is None:\n self._initArgParsers()\n self._print_help()\n sys.exit(2)\n \n def _print_help(self,*args,**kwargs):\n if self.module is None:\n print(self._main_parser.format_help())\n print(MAIN_EXAMPLES %{'prog':self.progName})\n self._discovery_parser.print_help()\n else:\n print(self._main_parser.format_help())\n print(MODULE_EXAMPLES %{'prog':self.progName})\n \n def parseArgs(self,argv):\n self._initArgParsers()\n if self.module is None:\n if len(argv)>1 and argv[1].lower()=='discover':\n self._do_discovery(argv[2:])\n return\n self._main_parser.parse_args(argv[1:],self)\n if not self.tests:\n \n \n self._do_discovery([])\n return\n else:\n self._main_parser.parse_args(argv[1:],self)\n \n if self.tests:\n self.testNames=_convert_names(self.tests)\n if __name__ =='__main__':\n \n self.module=None\n elif self.defaultTest is None:\n \n self.testNames=None\n elif isinstance(self.defaultTest,str):\n self.testNames=(self.defaultTest,)\n else:\n self.testNames=list(self.defaultTest)\n self.createTests()\n \n def createTests(self,from_discovery=False,Loader=None):\n if self.testNamePatterns:\n self.testLoader.testNamePatterns=self.testNamePatterns\n if from_discovery:\n loader=self.testLoader if Loader is None else Loader()\n self.test=loader.discover(self.start,self.pattern,self.top)\n elif self.testNames is None:\n self.test=self.testLoader.loadTestsFromModule(self.module)\n else:\n self.test=self.testLoader.loadTestsFromNames(self.testNames,\n self.module)\n \n def _initArgParsers(self):\n parent_parser=self._getParentArgParser()\n self._main_parser=self._getMainArgParser(parent_parser)\n self._discovery_parser=self._getDiscoveryArgParser(parent_parser)\n \n def _getParentArgParser(self):\n parser=argparse.ArgumentParser(add_help=False)\n \n parser.add_argument('-v','--verbose',dest='verbosity',\n action='store_const',const=2,\n help='Verbose output')\n parser.add_argument('-q','--quiet',dest='verbosity',\n action='store_const',const=0,\n help='Quiet output')\n parser.add_argument('--locals',dest='tb_locals',\n action='store_true',\n help='Show local variables in tracebacks')\n parser.add_argument('--durations',dest='durations',type=int,\n default=None,metavar=\"N\",\n help='Show the N slowest test cases (N=0 for all)')\n if self.failfast is None:\n parser.add_argument('-f','--failfast',dest='failfast',\n action='store_true',\n help='Stop on first fail or error')\n self.failfast=False\n if self.catchbreak is None:\n parser.add_argument('-c','--catch',dest='catchbreak',\n action='store_true',\n help='Catch Ctrl-C and display results so far')\n self.catchbreak=False\n if self.buffer is None:\n parser.add_argument('-b','--buffer',dest='buffer',\n action='store_true',\n help='Buffer stdout and stderr during tests')\n self.buffer=False\n if self.testNamePatterns is None:\n parser.add_argument('-k',dest='testNamePatterns',\n action='append',type=_convert_select_pattern,\n help='Only run tests which match the given substring')\n self.testNamePatterns=[]\n \n return parser\n \n def _getMainArgParser(self,parent):\n parser=argparse.ArgumentParser(parents=[parent])\n parser.prog=self.progName\n parser.print_help=self._print_help\n \n parser.add_argument('tests',nargs='*',\n help='a list of any number of test modules, '\n 'classes and test methods.')\n \n return parser\n \n def _getDiscoveryArgParser(self,parent):\n parser=argparse.ArgumentParser(parents=[parent])\n parser.prog='%s discover'%self.progName\n parser.epilog=('For test discovery all test modules must be '\n 'importable from the top level directory of the '\n 'project.')\n \n parser.add_argument('-s','--start-directory',dest='start',\n help=\"Directory to start discovery ('.' default)\")\n parser.add_argument('-p','--pattern',dest='pattern',\n help=\"Pattern to match tests ('test*.py' default)\")\n parser.add_argument('-t','--top-level-directory',dest='top',\n help='Top level directory of project (defaults to '\n 'start directory)')\n for arg in('start','pattern','top'):\n parser.add_argument(arg,nargs='?',\n default=argparse.SUPPRESS,\n help=argparse.SUPPRESS)\n \n return parser\n \n def _do_discovery(self,argv,Loader=None):\n self.start='.'\n self.pattern='test*.py'\n self.top=None\n if argv is not None:\n \n if self._discovery_parser is None:\n \n self._initArgParsers()\n self._discovery_parser.parse_args(argv,self)\n \n self.createTests(from_discovery=True,Loader=Loader)\n \n def runTests(self):\n if self.catchbreak:\n installHandler()\n if self.testRunner is None:\n self.testRunner=runner.TextTestRunner\n if isinstance(self.testRunner,type):\n try:\n try:\n testRunner=self.testRunner(verbosity=self.verbosity,\n failfast=self.failfast,\n buffer=self.buffer,\n warnings=self.warnings,\n tb_locals=self.tb_locals,\n durations=self.durations)\n except TypeError:\n \n testRunner=self.testRunner(verbosity=self.verbosity,\n failfast=self.failfast,\n buffer=self.buffer,\n warnings=self.warnings)\n except TypeError:\n \n testRunner=self.testRunner()\n else:\n \n testRunner=self.testRunner\n self.result=testRunner.run(self.test)\n if self.exit:\n if self.result.testsRun ==0:\n sys.exit(_NO_TESTS_EXITCODE)\n elif self.result.wasSuccessful():\n sys.exit(0)\n else:\n sys.exit(1)\n \n \nmain=TestProgram\n", ["argparse", "os", "sys", "unittest", "unittest.loader", "unittest.runner", "unittest.signals", "warnings"]], "unittest.async_case": [".py", "import asyncio\nimport contextvars\nimport inspect\nimport warnings\n\nfrom.case import TestCase\n\n\nclass IsolatedAsyncioTestCase(TestCase):\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n def __init__(self,methodName='runTest'):\n super().__init__(methodName)\n self._asyncioRunner=None\n self._asyncioTestContext=contextvars.copy_context()\n \n async def asyncSetUp(self):\n pass\n \n async def asyncTearDown(self):\n pass\n \n def addAsyncCleanup(self,func,/,*args,**kwargs):\n \n \n \n \n \n \n \n \n \n \n \n \n self.addCleanup(*(func,*args),**kwargs)\n \n async def enterAsyncContext(self,cm):\n ''\n\n\n\n \n \n \n cls=type(cm)\n try:\n enter=cls.__aenter__\n exit=cls.__aexit__\n except AttributeError:\n raise TypeError(f\"'{cls.__module__}.{cls.__qualname__}' object does \"\n f\"not support the asynchronous context manager protocol\"\n )from None\n result=await enter(cm)\n self.addAsyncCleanup(exit,cm,None,None,None)\n return result\n \n def _callSetUp(self):\n \n \n \n self._asyncioRunner.get_loop()\n self._asyncioTestContext.run(self.setUp)\n self._callAsync(self.asyncSetUp)\n \n def _callTestMethod(self,method):\n if self._callMaybeAsync(method)is not None:\n warnings.warn(f'It is deprecated to return a value that is not None from a '\n f'test case ({method})',DeprecationWarning,stacklevel=4)\n \n def _callTearDown(self):\n self._callAsync(self.asyncTearDown)\n self._asyncioTestContext.run(self.tearDown)\n \n def _callCleanup(self,function,*args,**kwargs):\n self._callMaybeAsync(function,*args,**kwargs)\n \n def _callAsync(self,func,/,*args,**kwargs):\n assert self._asyncioRunner is not None,'asyncio runner is not initialized'\n assert inspect.iscoroutinefunction(func),f'{func !r} is not an async function'\n return self._asyncioRunner.run(\n func(*args,**kwargs),\n context=self._asyncioTestContext\n )\n \n def _callMaybeAsync(self,func,/,*args,**kwargs):\n assert self._asyncioRunner is not None,'asyncio runner is not initialized'\n if inspect.iscoroutinefunction(func):\n return self._asyncioRunner.run(\n func(*args,**kwargs),\n context=self._asyncioTestContext,\n )\n else:\n return self._asyncioTestContext.run(func,*args,**kwargs)\n \n def _setupAsyncioRunner(self):\n assert self._asyncioRunner is None,'asyncio runner is already initialized'\n runner=asyncio.Runner(debug=True)\n self._asyncioRunner=runner\n \n def _tearDownAsyncioRunner(self):\n runner=self._asyncioRunner\n runner.close()\n \n def run(self,result=None):\n self._setupAsyncioRunner()\n try:\n return super().run(result)\n finally:\n self._tearDownAsyncioRunner()\n \n def debug(self):\n self._setupAsyncioRunner()\n super().debug()\n self._tearDownAsyncioRunner()\n \n def __del__(self):\n if self._asyncioRunner is not None:\n self._tearDownAsyncioRunner()\n", ["asyncio", "contextvars", "inspect", "unittest.case", "warnings"]], "unittest.__main__": [".py", "''\n\nimport sys\nif sys.argv[0].endswith(\"__main__.py\"):\n import os.path\n \n \n \n \n executable=os.path.basename(sys.executable)\n sys.argv[0]=executable+\" -m unittest\"\n del os\n \n__unittest=True\n\nfrom.main import main\n\nmain(module=None)\n", ["os.path", "sys", "unittest.main"]], "unittest._log": [".py", "import logging\nimport collections\n\nfrom.case import _BaseTestCaseContext\n\n\n_LoggingWatcher=collections.namedtuple(\"_LoggingWatcher\",\n[\"records\",\"output\"])\n\nclass _CapturingHandler(logging.Handler):\n ''\n\n \n \n def __init__(self):\n logging.Handler.__init__(self)\n self.watcher=_LoggingWatcher([],[])\n \n def flush(self):\n pass\n \n def emit(self,record):\n self.watcher.records.append(record)\n msg=self.format(record)\n self.watcher.output.append(msg)\n \n \nclass _AssertLogsContext(_BaseTestCaseContext):\n ''\n \n LOGGING_FORMAT=\"%(levelname)s:%(name)s:%(message)s\"\n \n def __init__(self,test_case,logger_name,level,no_logs):\n _BaseTestCaseContext.__init__(self,test_case)\n self.logger_name=logger_name\n if level:\n self.level=logging._nameToLevel.get(level,level)\n else:\n self.level=logging.INFO\n self.msg=None\n self.no_logs=no_logs\n \n def __enter__(self):\n if isinstance(self.logger_name,logging.Logger):\n logger=self.logger=self.logger_name\n else:\n logger=self.logger=logging.getLogger(self.logger_name)\n formatter=logging.Formatter(self.LOGGING_FORMAT)\n handler=_CapturingHandler()\n handler.setLevel(self.level)\n handler.setFormatter(formatter)\n self.watcher=handler.watcher\n self.old_handlers=logger.handlers[:]\n self.old_level=logger.level\n self.old_propagate=logger.propagate\n logger.handlers=[handler]\n logger.setLevel(self.level)\n logger.propagate=False\n if self.no_logs:\n return\n return handler.watcher\n \n def __exit__(self,exc_type,exc_value,tb):\n self.logger.handlers=self.old_handlers\n self.logger.propagate=self.old_propagate\n self.logger.setLevel(self.old_level)\n \n if exc_type is not None:\n \n return False\n \n if self.no_logs:\n \n if len(self.watcher.records)>0:\n self._raiseFailure(\n \"Unexpected logs found: {!r}\".format(\n self.watcher.output\n )\n )\n \n else:\n \n if len(self.watcher.records)==0:\n self._raiseFailure(\n \"no logs of level {} or higher triggered on {}\"\n .format(logging.getLevelName(self.level),self.logger.name))\n", ["collections", "logging", "unittest.case"]], "unittest.mock": [".py", "\n\n\n\n\n\n__all__=(\n'Mock',\n'MagicMock',\n'patch',\n'sentinel',\n'DEFAULT',\n'ANY',\n'call',\n'create_autospec',\n'AsyncMock',\n'FILTER_DIR',\n'NonCallableMock',\n'NonCallableMagicMock',\n'mock_open',\n'PropertyMock',\n'seal',\n)\n\n\nimport asyncio\nimport contextlib\nimport io\nimport inspect\nimport pprint\nimport sys\nimport builtins\nimport pkgutil\nfrom asyncio import iscoroutinefunction\nfrom types import CodeType,ModuleType,MethodType\nfrom unittest.util import safe_repr\nfrom functools import wraps,partial\nfrom threading import RLock\n\n\nclass InvalidSpecError(Exception):\n ''\n \n \n_builtins={name for name in dir(builtins)if not name.startswith('_')}\n\nFILTER_DIR=True\n\n\n\n_safe_super=super\n\ndef _is_async_obj(obj):\n if _is_instance_mock(obj)and not isinstance(obj,AsyncMock):\n return False\n if hasattr(obj,'__func__'):\n obj=getattr(obj,'__func__')\n return iscoroutinefunction(obj)or inspect.isawaitable(obj)\n \n \ndef _is_async_func(func):\n if getattr(func,'__code__',None):\n return iscoroutinefunction(func)\n else:\n return False\n \n \ndef _is_instance_mock(obj):\n\n\n return issubclass(type(obj),NonCallableMock)\n \n \ndef _is_exception(obj):\n return(\n isinstance(obj,BaseException)or\n isinstance(obj,type)and issubclass(obj,BaseException)\n )\n \n \ndef _extract_mock(obj):\n\n\n if isinstance(obj,FunctionTypes)and hasattr(obj,'mock'):\n return obj.mock\n else:\n return obj\n \n \ndef _get_signature_object(func,as_instance,eat_self):\n ''\n\n\n\n \n if isinstance(func,type)and not as_instance:\n \n func=func.__init__\n \n eat_self=True\n elif isinstance(func,(classmethod,staticmethod)):\n if isinstance(func,classmethod):\n \n eat_self=True\n \n func=func.__func__\n elif not isinstance(func,FunctionTypes):\n \n \n try:\n func=func.__call__\n except AttributeError:\n return None\n if eat_self:\n sig_func=partial(func,None)\n else:\n sig_func=func\n try:\n return func,inspect.signature(sig_func)\n except ValueError:\n \n return None\n \n \ndef _check_signature(func,mock,skipfirst,instance=False):\n sig=_get_signature_object(func,instance,skipfirst)\n if sig is None:\n return\n func,sig=sig\n def checksig(self,/,*args,**kwargs):\n sig.bind(*args,**kwargs)\n _copy_func_details(func,checksig)\n type(mock)._mock_check_sig=checksig\n type(mock).__signature__=sig\n \n \ndef _copy_func_details(func,funcopy):\n\n\n for attribute in(\n '__name__','__doc__','__text_signature__',\n '__module__','__defaults__','__kwdefaults__',\n ):\n try:\n setattr(funcopy,attribute,getattr(func,attribute))\n except AttributeError:\n pass\n \n \ndef _callable(obj):\n if isinstance(obj,type):\n return True\n if isinstance(obj,(staticmethod,classmethod,MethodType)):\n return _callable(obj.__func__)\n if getattr(obj,'__call__',None)is not None:\n return True\n return False\n \n \ndef _is_list(obj):\n\n\n return type(obj)in(list,tuple)\n \n \ndef _instance_callable(obj):\n ''\n \n if not isinstance(obj,type):\n \n return getattr(obj,'__call__',None)is not None\n \n \n \n for base in(obj,)+obj.__mro__:\n if base.__dict__.get('__call__')is not None:\n return True\n return False\n \n \ndef _set_signature(mock,original,instance=False):\n\n\n\n\n skipfirst=isinstance(original,type)\n result=_get_signature_object(original,instance,skipfirst)\n if result is None:\n return mock\n func,sig=result\n def checksig(*args,**kwargs):\n sig.bind(*args,**kwargs)\n _copy_func_details(func,checksig)\n \n name=original.__name__\n if not name.isidentifier():\n name='funcopy'\n context={'_checksig_':checksig,'mock':mock}\n src=\"\"\"def %s(*args, **kwargs):\n _checksig_(*args, **kwargs)\n return mock(*args, **kwargs)\"\"\"%name\n exec(src,context)\n funcopy=context[name]\n _setup_func(funcopy,mock,sig)\n return funcopy\n \n \ndef _setup_func(funcopy,mock,sig):\n funcopy.mock=mock\n \n def assert_called_with(*args,**kwargs):\n return mock.assert_called_with(*args,**kwargs)\n def assert_called(*args,**kwargs):\n return mock.assert_called(*args,**kwargs)\n def assert_not_called(*args,**kwargs):\n return mock.assert_not_called(*args,**kwargs)\n def assert_called_once(*args,**kwargs):\n return mock.assert_called_once(*args,**kwargs)\n def assert_called_once_with(*args,**kwargs):\n return mock.assert_called_once_with(*args,**kwargs)\n def assert_has_calls(*args,**kwargs):\n return mock.assert_has_calls(*args,**kwargs)\n def assert_any_call(*args,**kwargs):\n return mock.assert_any_call(*args,**kwargs)\n def reset_mock():\n funcopy.method_calls=_CallList()\n funcopy.mock_calls=_CallList()\n mock.reset_mock()\n ret=funcopy.return_value\n if _is_instance_mock(ret)and not ret is mock:\n ret.reset_mock()\n \n funcopy.called=False\n funcopy.call_count=0\n funcopy.call_args=None\n funcopy.call_args_list=_CallList()\n funcopy.method_calls=_CallList()\n funcopy.mock_calls=_CallList()\n \n funcopy.return_value=mock.return_value\n funcopy.side_effect=mock.side_effect\n funcopy._mock_children=mock._mock_children\n \n funcopy.assert_called_with=assert_called_with\n funcopy.assert_called_once_with=assert_called_once_with\n funcopy.assert_has_calls=assert_has_calls\n funcopy.assert_any_call=assert_any_call\n funcopy.reset_mock=reset_mock\n funcopy.assert_called=assert_called\n funcopy.assert_not_called=assert_not_called\n funcopy.assert_called_once=assert_called_once\n funcopy.__signature__=sig\n \n mock._mock_delegate=funcopy\n \n \ndef _setup_async_mock(mock):\n mock._is_coroutine=asyncio.coroutines._is_coroutine\n mock.await_count=0\n mock.await_args=None\n mock.await_args_list=_CallList()\n \n \n \n \n def wrapper(attr,/,*args,**kwargs):\n return getattr(mock.mock,attr)(*args,**kwargs)\n \n for attribute in('assert_awaited',\n 'assert_awaited_once',\n 'assert_awaited_with',\n 'assert_awaited_once_with',\n 'assert_any_await',\n 'assert_has_awaits',\n 'assert_not_awaited'):\n \n \n \n \n \n setattr(mock,attribute,partial(wrapper,attribute))\n \n \ndef _is_magic(name):\n return '__%s__'%name[2:-2]==name\n \n \nclass _SentinelObject(object):\n ''\n def __init__(self,name):\n self.name=name\n \n def __repr__(self):\n return 'sentinel.%s'%self.name\n \n def __reduce__(self):\n return 'sentinel.%s'%self.name\n \n \nclass _Sentinel(object):\n ''\n def __init__(self):\n self._sentinels={}\n \n def __getattr__(self,name):\n if name =='__bases__':\n \n raise AttributeError\n return self._sentinels.setdefault(name,_SentinelObject(name))\n \n def __reduce__(self):\n return 'sentinel'\n \n \nsentinel=_Sentinel()\n\nDEFAULT=sentinel.DEFAULT\n_missing=sentinel.MISSING\n_deleted=sentinel.DELETED\n\n\n_allowed_names={\n'return_value','_mock_return_value','side_effect',\n'_mock_side_effect','_mock_parent','_mock_new_parent',\n'_mock_name','_mock_new_name'\n}\n\n\ndef _delegating_property(name):\n _allowed_names.add(name)\n _the_name='_mock_'+name\n def _get(self,name=name,_the_name=_the_name):\n sig=self._mock_delegate\n if sig is None:\n return getattr(self,_the_name)\n return getattr(sig,name)\n def _set(self,value,name=name,_the_name=_the_name):\n sig=self._mock_delegate\n if sig is None:\n self.__dict__[_the_name]=value\n else:\n setattr(sig,name,value)\n \n return property(_get,_set)\n \n \n \nclass _CallList(list):\n\n def __contains__(self,value):\n if not isinstance(value,list):\n return list.__contains__(self,value)\n len_value=len(value)\n len_self=len(self)\n if len_value >len_self:\n return False\n \n for i in range(0,len_self -len_value+1):\n sub_list=self[i:i+len_value]\n if sub_list ==value:\n return True\n return False\n \n def __repr__(self):\n return pprint.pformat(list(self))\n \n \ndef _check_and_set_parent(parent,value,name,new_name):\n value=_extract_mock(value)\n \n if not _is_instance_mock(value):\n return False\n if((value._mock_name or value._mock_new_name)or\n (value._mock_parent is not None)or\n (value._mock_new_parent is not None)):\n return False\n \n _parent=parent\n while _parent is not None:\n \n \n if _parent is value:\n return False\n _parent=_parent._mock_new_parent\n \n if new_name:\n value._mock_new_parent=parent\n value._mock_new_name=new_name\n if name:\n value._mock_parent=parent\n value._mock_name=name\n return True\n \n \nclass _MockIter(object):\n def __init__(self,obj):\n self.obj=iter(obj)\n def __next__(self):\n return next(self.obj)\n \nclass Base(object):\n _mock_return_value=DEFAULT\n _mock_side_effect=None\n def __init__(self,/,*args,**kwargs):\n pass\n \n \n \nclass NonCallableMock(Base):\n ''\n \n \n \n \n \n \n \n _lock=RLock()\n \n def __new__(\n cls,spec=None,wraps=None,name=None,spec_set=None,\n parent=None,_spec_state=None,_new_name='',_new_parent=None,\n _spec_as_instance=False,_eat_self=None,unsafe=False,**kwargs\n ):\n \n \n \n bases=(cls,)\n if not issubclass(cls,AsyncMockMixin):\n \n spec_arg=spec_set or spec\n if spec_arg is not None and _is_async_obj(spec_arg):\n bases=(AsyncMockMixin,cls)\n new=type(cls.__name__,bases,{'__doc__':cls.__doc__})\n instance=_safe_super(NonCallableMock,cls).__new__(new)\n return instance\n \n \n def __init__(\n self,spec=None,wraps=None,name=None,spec_set=None,\n parent=None,_spec_state=None,_new_name='',_new_parent=None,\n _spec_as_instance=False,_eat_self=None,unsafe=False,**kwargs\n ):\n if _new_parent is None:\n _new_parent=parent\n \n __dict__=self.__dict__\n __dict__['_mock_parent']=parent\n __dict__['_mock_name']=name\n __dict__['_mock_new_name']=_new_name\n __dict__['_mock_new_parent']=_new_parent\n __dict__['_mock_sealed']=False\n \n if spec_set is not None:\n spec=spec_set\n spec_set=True\n if _eat_self is None:\n _eat_self=parent is not None\n \n self._mock_add_spec(spec,spec_set,_spec_as_instance,_eat_self)\n \n __dict__['_mock_children']={}\n __dict__['_mock_wraps']=wraps\n __dict__['_mock_delegate']=None\n \n __dict__['_mock_called']=False\n __dict__['_mock_call_args']=None\n __dict__['_mock_call_count']=0\n __dict__['_mock_call_args_list']=_CallList()\n __dict__['_mock_mock_calls']=_CallList()\n \n __dict__['method_calls']=_CallList()\n __dict__['_mock_unsafe']=unsafe\n \n if kwargs:\n self.configure_mock(**kwargs)\n \n _safe_super(NonCallableMock,self).__init__(\n spec,wraps,name,spec_set,parent,\n _spec_state\n )\n \n \n def attach_mock(self,mock,attribute):\n ''\n\n\n \n inner_mock=_extract_mock(mock)\n \n inner_mock._mock_parent=None\n inner_mock._mock_new_parent=None\n inner_mock._mock_name=''\n inner_mock._mock_new_name=None\n \n setattr(self,attribute,mock)\n \n \n def mock_add_spec(self,spec,spec_set=False):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n \n \n def _mock_add_spec(self,spec,spec_set,_spec_as_instance=False,\n _eat_self=False):\n if _is_instance_mock(spec):\n raise InvalidSpecError(f'Cannot spec a Mock object. [object={spec !r}]')\n \n _spec_class=None\n _spec_signature=None\n _spec_asyncs=[]\n \n if spec is not None and not _is_list(spec):\n if isinstance(spec,type):\n _spec_class=spec\n else:\n _spec_class=type(spec)\n res=_get_signature_object(spec,\n _spec_as_instance,_eat_self)\n _spec_signature=res and res[1]\n \n spec_list=dir(spec)\n \n for attr in spec_list:\n if iscoroutinefunction(getattr(spec,attr,None)):\n _spec_asyncs.append(attr)\n \n spec=spec_list\n \n __dict__=self.__dict__\n __dict__['_spec_class']=_spec_class\n __dict__['_spec_set']=spec_set\n __dict__['_spec_signature']=_spec_signature\n __dict__['_mock_methods']=spec\n __dict__['_spec_asyncs']=_spec_asyncs\n \n def __get_return_value(self):\n ret=self._mock_return_value\n if self._mock_delegate is not None:\n ret=self._mock_delegate.return_value\n \n if ret is DEFAULT:\n ret=self._get_child_mock(\n _new_parent=self,_new_name='()'\n )\n self.return_value=ret\n return ret\n \n \n def __set_return_value(self,value):\n if self._mock_delegate is not None:\n self._mock_delegate.return_value=value\n else:\n self._mock_return_value=value\n _check_and_set_parent(self,value,None,'()')\n \n __return_value_doc=\"The value to be returned when the mock is called.\"\n return_value=property(__get_return_value,__set_return_value,\n __return_value_doc)\n \n \n @property\n def __class__(self):\n if self._spec_class is None:\n return type(self)\n return self._spec_class\n \n called=_delegating_property('called')\n call_count=_delegating_property('call_count')\n call_args=_delegating_property('call_args')\n call_args_list=_delegating_property('call_args_list')\n mock_calls=_delegating_property('mock_calls')\n \n \n def __get_side_effect(self):\n delegated=self._mock_delegate\n if delegated is None:\n return self._mock_side_effect\n sf=delegated.side_effect\n if(sf is not None and not callable(sf)\n and not isinstance(sf,_MockIter)and not _is_exception(sf)):\n sf=_MockIter(sf)\n delegated.side_effect=sf\n return sf\n \n def __set_side_effect(self,value):\n value=_try_iter(value)\n delegated=self._mock_delegate\n if delegated is None:\n self._mock_side_effect=value\n else:\n delegated.side_effect=value\n \n side_effect=property(__get_side_effect,__set_side_effect)\n \n \n def reset_mock(self,visited=None,*,return_value=False,side_effect=False):\n ''\n if visited is None:\n visited=[]\n if id(self)in visited:\n return\n visited.append(id(self))\n \n self.called=False\n self.call_args=None\n self.call_count=0\n self.mock_calls=_CallList()\n self.call_args_list=_CallList()\n self.method_calls=_CallList()\n \n if return_value:\n self._mock_return_value=DEFAULT\n if side_effect:\n self._mock_side_effect=None\n \n for child in self._mock_children.values():\n if isinstance(child,_SpecState)or child is _deleted:\n continue\n child.reset_mock(visited,return_value=return_value,side_effect=side_effect)\n \n ret=self._mock_return_value\n if _is_instance_mock(ret)and ret is not self:\n ret.reset_mock(visited)\n \n \n def configure_mock(self,/,**kwargs):\n ''\n\n\n\n\n\n\n \n for arg,val in sorted(kwargs.items(),\n \n \n \n key=lambda entry:entry[0].count('.')):\n args=arg.split('.')\n final=args.pop()\n obj=self\n for entry in args:\n obj=getattr(obj,entry)\n setattr(obj,final,val)\n \n \n def __getattr__(self,name):\n if name in{'_mock_methods','_mock_unsafe'}:\n raise AttributeError(name)\n elif self._mock_methods is not None:\n if name not in self._mock_methods or name in _all_magics:\n raise AttributeError(\"Mock object has no attribute %r\"%name)\n elif _is_magic(name):\n raise AttributeError(name)\n if not self._mock_unsafe and(not self._mock_methods or name not in self._mock_methods):\n if name.startswith(('assert','assret','asert','aseert','assrt'))or name in _ATTRIB_DENY_LIST:\n raise AttributeError(\n f\"{name !r} is not a valid assertion. Use a spec \"\n f\"for the mock if {name !r} is meant to be an attribute.\")\n \n with NonCallableMock._lock:\n result=self._mock_children.get(name)\n if result is _deleted:\n raise AttributeError(name)\n elif result is None:\n wraps=None\n if self._mock_wraps is not None:\n \n \n wraps=getattr(self._mock_wraps,name)\n \n result=self._get_child_mock(\n parent=self,name=name,wraps=wraps,_new_name=name,\n _new_parent=self\n )\n self._mock_children[name]=result\n \n elif isinstance(result,_SpecState):\n try:\n result=create_autospec(\n result.spec,result.spec_set,result.instance,\n result.parent,result.name\n )\n except InvalidSpecError:\n target_name=self.__dict__['_mock_name']or self\n raise InvalidSpecError(\n f'Cannot autospec attr {name !r} from target '\n f'{target_name !r} as it has already been mocked out. '\n f'[target={self !r}, attr={result.spec !r}]')\n self._mock_children[name]=result\n \n return result\n \n \n def _extract_mock_name(self):\n _name_list=[self._mock_new_name]\n _parent=self._mock_new_parent\n last=self\n \n dot='.'\n if _name_list ==['()']:\n dot=''\n \n while _parent is not None:\n last=_parent\n \n _name_list.append(_parent._mock_new_name+dot)\n dot='.'\n if _parent._mock_new_name =='()':\n dot=''\n \n _parent=_parent._mock_new_parent\n \n _name_list=list(reversed(_name_list))\n _first=last._mock_name or 'mock'\n if len(_name_list)>1:\n if _name_list[1]not in('()','().'):\n _first +='.'\n _name_list[0]=_first\n return ''.join(_name_list)\n \n def __repr__(self):\n name=self._extract_mock_name()\n \n name_string=''\n if name not in('mock','mock.'):\n name_string=' name=%r'%name\n \n spec_string=''\n if self._spec_class is not None:\n spec_string=' spec=%r'\n if self._spec_set:\n spec_string=' spec_set=%r'\n spec_string=spec_string %self._spec_class.__name__\n return \"<%s%s%s id='%s'>\"%(\n type(self).__name__,\n name_string,\n spec_string,\n id(self)\n )\n \n \n def __dir__(self):\n ''\n if not FILTER_DIR:\n return object.__dir__(self)\n \n extras=self._mock_methods or[]\n from_type=dir(type(self))\n from_dict=list(self.__dict__)\n from_child_mocks=[\n m_name for m_name,m_value in self._mock_children.items()\n if m_value is not _deleted]\n \n from_type=[e for e in from_type if not e.startswith('_')]\n from_dict=[e for e in from_dict if not e.startswith('_')or\n _is_magic(e)]\n return sorted(set(extras+from_type+from_dict+from_child_mocks))\n \n \n def __setattr__(self,name,value):\n if name in _allowed_names:\n \n return object.__setattr__(self,name,value)\n elif(self._spec_set and self._mock_methods is not None and\n name not in self._mock_methods and\n name not in self.__dict__):\n raise AttributeError(\"Mock object has no attribute '%s'\"%name)\n elif name in _unsupported_magics:\n msg='Attempting to set unsupported magic method %r.'%name\n raise AttributeError(msg)\n elif name in _all_magics:\n if self._mock_methods is not None and name not in self._mock_methods:\n raise AttributeError(\"Mock object has no attribute '%s'\"%name)\n \n if not _is_instance_mock(value):\n setattr(type(self),name,_get_method(name,value))\n original=value\n value=lambda *args,**kw:original(self,*args,**kw)\n else:\n \n \n _check_and_set_parent(self,value,None,name)\n setattr(type(self),name,value)\n self._mock_children[name]=value\n elif name =='__class__':\n self._spec_class=value\n return\n else:\n if _check_and_set_parent(self,value,name,name):\n self._mock_children[name]=value\n \n if self._mock_sealed and not hasattr(self,name):\n mock_name=f'{self._extract_mock_name()}.{name}'\n raise AttributeError(f'Cannot set {mock_name}')\n \n return object.__setattr__(self,name,value)\n \n \n def __delattr__(self,name):\n if name in _all_magics and name in type(self).__dict__:\n delattr(type(self),name)\n if name not in self.__dict__:\n \n \n return\n \n obj=self._mock_children.get(name,_missing)\n if name in self.__dict__:\n _safe_super(NonCallableMock,self).__delattr__(name)\n elif obj is _deleted:\n raise AttributeError(name)\n if obj is not _missing:\n del self._mock_children[name]\n self._mock_children[name]=_deleted\n \n \n def _format_mock_call_signature(self,args,kwargs):\n name=self._mock_name or 'mock'\n return _format_call_signature(name,args,kwargs)\n \n \n def _format_mock_failure_message(self,args,kwargs,action='call'):\n message='expected %s not found.\\nExpected: %s\\nActual: %s'\n expected_string=self._format_mock_call_signature(args,kwargs)\n call_args=self.call_args\n actual_string=self._format_mock_call_signature(*call_args)\n return message %(action,expected_string,actual_string)\n \n \n def _get_call_signature_from_name(self,name):\n ''\n\n\n\n\n\n\n\n\n \n if not name:\n return self._spec_signature\n \n sig=None\n names=name.replace('()','').split('.')\n children=self._mock_children\n \n for name in names:\n child=children.get(name)\n if child is None or isinstance(child,_SpecState):\n break\n else:\n \n \n \n child=_extract_mock(child)\n children=child._mock_children\n sig=child._spec_signature\n \n return sig\n \n \n def _call_matcher(self,_call):\n ''\n\n\n\n\n \n \n if isinstance(_call,tuple)and len(_call)>2:\n sig=self._get_call_signature_from_name(_call[0])\n else:\n sig=self._spec_signature\n \n if sig is not None:\n if len(_call)==2:\n name=''\n args,kwargs=_call\n else:\n name,args,kwargs=_call\n try:\n bound_call=sig.bind(*args,**kwargs)\n return call(name,bound_call.args,bound_call.kwargs)\n except TypeError as e:\n return e.with_traceback(None)\n else:\n return _call\n \n def assert_not_called(self):\n ''\n \n if self.call_count !=0:\n msg=(\"Expected '%s' to not have been called. Called %s times.%s\"\n %(self._mock_name or 'mock',\n self.call_count,\n self._calls_repr()))\n raise AssertionError(msg)\n \n def assert_called(self):\n ''\n \n if self.call_count ==0:\n msg=(\"Expected '%s' to have been called.\"%\n (self._mock_name or 'mock'))\n raise AssertionError(msg)\n \n def assert_called_once(self):\n ''\n \n if not self.call_count ==1:\n msg=(\"Expected '%s' to have been called once. Called %s times.%s\"\n %(self._mock_name or 'mock',\n self.call_count,\n self._calls_repr()))\n raise AssertionError(msg)\n \n def assert_called_with(self,/,*args,**kwargs):\n ''\n\n\n \n if self.call_args is None:\n expected=self._format_mock_call_signature(args,kwargs)\n actual='not called.'\n error_message=('expected call not found.\\nExpected: %s\\nActual: %s'\n %(expected,actual))\n raise AssertionError(error_message)\n \n def _error_message():\n msg=self._format_mock_failure_message(args,kwargs)\n return msg\n expected=self._call_matcher(_Call((args,kwargs),two=True))\n actual=self._call_matcher(self.call_args)\n if actual !=expected:\n cause=expected if isinstance(expected,Exception)else None\n raise AssertionError(_error_message())from cause\n \n \n def assert_called_once_with(self,/,*args,**kwargs):\n ''\n \n if not self.call_count ==1:\n msg=(\"Expected '%s' to be called once. Called %s times.%s\"\n %(self._mock_name or 'mock',\n self.call_count,\n self._calls_repr()))\n raise AssertionError(msg)\n return self.assert_called_with(*args,**kwargs)\n \n \n def assert_has_calls(self,calls,any_order=False):\n ''\n\n\n\n\n\n\n\n \n expected=[self._call_matcher(c)for c in calls]\n cause=next((e for e in expected if isinstance(e,Exception)),None)\n all_calls=_CallList(self._call_matcher(c)for c in self.mock_calls)\n if not any_order:\n if expected not in all_calls:\n if cause is None:\n problem='Calls not found.'\n else:\n problem=('Error processing expected calls.\\n'\n 'Errors: {}').format(\n [e if isinstance(e,Exception)else None\n for e in expected])\n raise AssertionError(\n f'{problem}\\n'\n f'Expected: {_CallList(calls)}'\n f'{self._calls_repr(prefix=\"Actual\").rstrip(\".\")}'\n )from cause\n return\n \n all_calls=list(all_calls)\n \n not_found=[]\n for kall in expected:\n try:\n all_calls.remove(kall)\n except ValueError:\n not_found.append(kall)\n if not_found:\n raise AssertionError(\n '%r does not contain all of %r in its call list, '\n 'found %r instead'%(self._mock_name or 'mock',\n tuple(not_found),all_calls)\n )from cause\n \n \n def assert_any_call(self,/,*args,**kwargs):\n ''\n\n\n\n \n expected=self._call_matcher(_Call((args,kwargs),two=True))\n cause=expected if isinstance(expected,Exception)else None\n actual=[self._call_matcher(c)for c in self.call_args_list]\n if cause or expected not in _AnyComparer(actual):\n expected_string=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(\n '%s call not found'%expected_string\n )from cause\n \n \n def _get_child_mock(self,/,**kw):\n ''\n\n\n\n\n\n \n if self._mock_sealed:\n attribute=f\".{kw['name']}\"if \"name\"in kw else \"()\"\n mock_name=self._extract_mock_name()+attribute\n raise AttributeError(mock_name)\n \n _new_name=kw.get(\"_new_name\")\n if _new_name in self.__dict__['_spec_asyncs']:\n return AsyncMock(**kw)\n \n _type=type(self)\n if issubclass(_type,MagicMock)and _new_name in _async_method_magics:\n \n klass=AsyncMock\n elif issubclass(_type,AsyncMockMixin):\n if(_new_name in _all_sync_magics or\n self._mock_methods and _new_name in self._mock_methods):\n \n klass=MagicMock\n else:\n klass=AsyncMock\n elif not issubclass(_type,CallableMixin):\n if issubclass(_type,NonCallableMagicMock):\n klass=MagicMock\n elif issubclass(_type,NonCallableMock):\n klass=Mock\n else:\n klass=_type.__mro__[1]\n return klass(**kw)\n \n \n def _calls_repr(self,prefix=\"Calls\"):\n ''\n\n\n\n\n\n \n if not self.mock_calls:\n return \"\"\n return f\"\\n{prefix}: {safe_repr(self.mock_calls)}.\"\n \n \n \n_ATTRIB_DENY_LIST=frozenset({\nname.removeprefix(\"assert_\")\nfor name in dir(NonCallableMock)\nif name.startswith(\"assert_\")\n})\n\n\nclass _AnyComparer(list):\n ''\n\n\n \n def __contains__(self,item):\n for _call in self:\n assert len(item)==len(_call)\n if all([\n expected ==actual\n for expected,actual in zip(item,_call)\n ]):\n return True\n return False\n \n \ndef _try_iter(obj):\n if obj is None:\n return obj\n if _is_exception(obj):\n return obj\n if _callable(obj):\n return obj\n try:\n return iter(obj)\n except TypeError:\n \n \n return obj\n \n \nclass CallableMixin(Base):\n\n def __init__(self,spec=None,side_effect=None,return_value=DEFAULT,\n wraps=None,name=None,spec_set=None,parent=None,\n _spec_state=None,_new_name='',_new_parent=None,**kwargs):\n self.__dict__['_mock_return_value']=return_value\n _safe_super(CallableMixin,self).__init__(\n spec,wraps,name,spec_set,parent,\n _spec_state,_new_name,_new_parent,**kwargs\n )\n \n self.side_effect=side_effect\n \n \n def _mock_check_sig(self,/,*args,**kwargs):\n \n pass\n \n \n def __call__(self,/,*args,**kwargs):\n \n \n self._mock_check_sig(*args,**kwargs)\n self._increment_mock_call(*args,**kwargs)\n return self._mock_call(*args,**kwargs)\n \n \n def _mock_call(self,/,*args,**kwargs):\n return self._execute_mock_call(*args,**kwargs)\n \n def _increment_mock_call(self,/,*args,**kwargs):\n self.called=True\n self.call_count +=1\n \n \n \n \n _call=_Call((args,kwargs),two=True)\n self.call_args=_call\n self.call_args_list.append(_call)\n \n \n do_method_calls=self._mock_parent is not None\n method_call_name=self._mock_name\n \n \n mock_call_name=self._mock_new_name\n is_a_call=mock_call_name =='()'\n self.mock_calls.append(_Call(('',args,kwargs)))\n \n \n _new_parent=self._mock_new_parent\n while _new_parent is not None:\n \n \n if do_method_calls:\n _new_parent.method_calls.append(_Call((method_call_name,args,kwargs)))\n do_method_calls=_new_parent._mock_parent is not None\n if do_method_calls:\n method_call_name=_new_parent._mock_name+'.'+method_call_name\n \n \n this_mock_call=_Call((mock_call_name,args,kwargs))\n _new_parent.mock_calls.append(this_mock_call)\n \n if _new_parent._mock_new_name:\n if is_a_call:\n dot=''\n else:\n dot='.'\n is_a_call=_new_parent._mock_new_name =='()'\n mock_call_name=_new_parent._mock_new_name+dot+mock_call_name\n \n \n _new_parent=_new_parent._mock_new_parent\n \n def _execute_mock_call(self,/,*args,**kwargs):\n \n \n \n effect=self.side_effect\n if effect is not None:\n if _is_exception(effect):\n raise effect\n elif not _callable(effect):\n result=next(effect)\n if _is_exception(result):\n raise result\n else:\n result=effect(*args,**kwargs)\n \n if result is not DEFAULT:\n return result\n \n if self._mock_return_value is not DEFAULT:\n return self.return_value\n \n if self._mock_wraps is not None:\n return self._mock_wraps(*args,**kwargs)\n \n return self.return_value\n \n \n \nclass Mock(CallableMixin,NonCallableMock):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \ndef _check_spec_arg_typos(kwargs_to_check):\n typos=(\"autospect\",\"auto_spec\",\"set_spec\")\n for typo in typos:\n if typo in kwargs_to_check:\n raise RuntimeError(\n f\"{typo !r} might be a typo; use unsafe=True if this is intended\"\n )\n \n \nclass _patch(object):\n\n attribute_name=None\n _active_patches=[]\n \n def __init__(\n self,getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs,*,unsafe=False\n ):\n if new_callable is not None:\n if new is not DEFAULT:\n raise ValueError(\n \"Cannot use 'new' and 'new_callable' together\"\n )\n if autospec is not None:\n raise ValueError(\n \"Cannot use 'autospec' and 'new_callable' together\"\n )\n if not unsafe:\n _check_spec_arg_typos(kwargs)\n if _is_instance_mock(spec):\n raise InvalidSpecError(\n f'Cannot spec attr {attribute !r} as the spec '\n f'has already been mocked out. [spec={spec !r}]')\n if _is_instance_mock(spec_set):\n raise InvalidSpecError(\n f'Cannot spec attr {attribute !r} as the spec_set '\n f'target has already been mocked out. [spec_set={spec_set !r}]')\n \n self.getter=getter\n self.attribute=attribute\n self.new=new\n self.new_callable=new_callable\n self.spec=spec\n self.create=create\n self.has_local=False\n self.spec_set=spec_set\n self.autospec=autospec\n self.kwargs=kwargs\n self.additional_patchers=[]\n \n \n def copy(self):\n patcher=_patch(\n self.getter,self.attribute,self.new,self.spec,\n self.create,self.spec_set,\n self.autospec,self.new_callable,self.kwargs\n )\n patcher.attribute_name=self.attribute_name\n patcher.additional_patchers=[\n p.copy()for p in self.additional_patchers\n ]\n return patcher\n \n \n def __call__(self,func):\n if isinstance(func,type):\n return self.decorate_class(func)\n if inspect.iscoroutinefunction(func):\n return self.decorate_async_callable(func)\n return self.decorate_callable(func)\n \n \n def decorate_class(self,klass):\n for attr in dir(klass):\n if not attr.startswith(patch.TEST_PREFIX):\n continue\n \n attr_value=getattr(klass,attr)\n if not hasattr(attr_value,\"__call__\"):\n continue\n \n patcher=self.copy()\n setattr(klass,attr,patcher(attr_value))\n return klass\n \n \n @contextlib.contextmanager\n def decoration_helper(self,patched,args,keywargs):\n extra_args=[]\n with contextlib.ExitStack()as exit_stack:\n for patching in patched.patchings:\n arg=exit_stack.enter_context(patching)\n if patching.attribute_name is not None:\n keywargs.update(arg)\n elif patching.new is DEFAULT:\n extra_args.append(arg)\n \n args +=tuple(extra_args)\n yield(args,keywargs)\n \n \n def decorate_callable(self,func):\n \n if hasattr(func,'patchings'):\n func.patchings.append(self)\n return func\n \n @wraps(func)\n def patched(*args,**keywargs):\n with self.decoration_helper(patched,\n args,\n keywargs)as(newargs,newkeywargs):\n return func(*newargs,**newkeywargs)\n \n patched.patchings=[self]\n return patched\n \n \n def decorate_async_callable(self,func):\n \n if hasattr(func,'patchings'):\n func.patchings.append(self)\n return func\n \n @wraps(func)\n async def patched(*args,**keywargs):\n with self.decoration_helper(patched,\n args,\n keywargs)as(newargs,newkeywargs):\n return await func(*newargs,**newkeywargs)\n \n patched.patchings=[self]\n return patched\n \n \n def get_original(self):\n target=self.getter()\n name=self.attribute\n \n original=DEFAULT\n local=False\n \n try:\n original=target.__dict__[name]\n except(AttributeError,KeyError):\n original=getattr(target,name,DEFAULT)\n else:\n local=True\n \n if name in _builtins and isinstance(target,ModuleType):\n self.create=True\n \n if not self.create and original is DEFAULT:\n raise AttributeError(\n \"%s does not have the attribute %r\"%(target,name)\n )\n return original,local\n \n \n def __enter__(self):\n ''\n new,spec,spec_set=self.new,self.spec,self.spec_set\n autospec,kwargs=self.autospec,self.kwargs\n new_callable=self.new_callable\n self.target=self.getter()\n \n \n if spec is False:\n spec=None\n if spec_set is False:\n spec_set=None\n if autospec is False:\n autospec=None\n \n if spec is not None and autospec is not None:\n raise TypeError(\"Can't specify spec and autospec\")\n if((spec is not None or autospec is not None)and\n spec_set not in(True,None)):\n raise TypeError(\"Can't provide explicit spec_set *and* spec or autospec\")\n \n original,local=self.get_original()\n \n if new is DEFAULT and autospec is None:\n inherit=False\n if spec is True:\n \n spec=original\n if spec_set is True:\n spec_set=original\n spec=None\n elif spec is not None:\n if spec_set is True:\n spec_set=spec\n spec=None\n elif spec_set is True:\n spec_set=original\n \n if spec is not None or spec_set is not None:\n if original is DEFAULT:\n raise TypeError(\"Can't use 'spec' with create=True\")\n if isinstance(original,type):\n \n inherit=True\n if spec is None and _is_async_obj(original):\n Klass=AsyncMock\n else:\n Klass=MagicMock\n _kwargs={}\n if new_callable is not None:\n Klass=new_callable\n elif spec is not None or spec_set is not None:\n this_spec=spec\n if spec_set is not None:\n this_spec=spec_set\n if _is_list(this_spec):\n not_callable='__call__'not in this_spec\n else:\n not_callable=not callable(this_spec)\n if _is_async_obj(this_spec):\n Klass=AsyncMock\n elif not_callable:\n Klass=NonCallableMagicMock\n \n if spec is not None:\n _kwargs['spec']=spec\n if spec_set is not None:\n _kwargs['spec_set']=spec_set\n \n \n if(isinstance(Klass,type)and\n issubclass(Klass,NonCallableMock)and self.attribute):\n _kwargs['name']=self.attribute\n \n _kwargs.update(kwargs)\n new=Klass(**_kwargs)\n \n if inherit and _is_instance_mock(new):\n \n \n this_spec=spec\n if spec_set is not None:\n this_spec=spec_set\n if(not _is_list(this_spec)and not\n _instance_callable(this_spec)):\n Klass=NonCallableMagicMock\n \n _kwargs.pop('name')\n new.return_value=Klass(_new_parent=new,_new_name='()',\n **_kwargs)\n elif autospec is not None:\n \n \n \n if new is not DEFAULT:\n raise TypeError(\n \"autospec creates the mock for you. Can't specify \"\n \"autospec and new.\"\n )\n if original is DEFAULT:\n raise TypeError(\"Can't use 'autospec' with create=True\")\n spec_set=bool(spec_set)\n if autospec is True:\n autospec=original\n \n if _is_instance_mock(self.target):\n raise InvalidSpecError(\n f'Cannot autospec attr {self.attribute !r} as the patch '\n f'target has already been mocked out. '\n f'[target={self.target !r}, attr={autospec !r}]')\n if _is_instance_mock(autospec):\n target_name=getattr(self.target,'__name__',self.target)\n raise InvalidSpecError(\n f'Cannot autospec attr {self.attribute !r} from target '\n f'{target_name !r} as it has already been mocked out. '\n f'[target={self.target !r}, attr={autospec !r}]')\n \n new=create_autospec(autospec,spec_set=spec_set,\n _name=self.attribute,**kwargs)\n elif kwargs:\n \n \n raise TypeError(\"Can't pass kwargs to a mock we aren't creating\")\n \n new_attr=new\n \n self.temp_original=original\n self.is_local=local\n self._exit_stack=contextlib.ExitStack()\n try:\n setattr(self.target,self.attribute,new_attr)\n if self.attribute_name is not None:\n extra_args={}\n if self.new is DEFAULT:\n extra_args[self.attribute_name]=new\n for patching in self.additional_patchers:\n arg=self._exit_stack.enter_context(patching)\n if patching.new is DEFAULT:\n extra_args.update(arg)\n return extra_args\n \n return new\n except:\n if not self.__exit__(*sys.exc_info()):\n raise\n \n def __exit__(self,*exc_info):\n ''\n if self.is_local and self.temp_original is not DEFAULT:\n setattr(self.target,self.attribute,self.temp_original)\n else:\n delattr(self.target,self.attribute)\n if not self.create and(not hasattr(self.target,self.attribute)or\n self.attribute in('__doc__','__module__',\n '__defaults__','__annotations__',\n '__kwdefaults__')):\n \n setattr(self.target,self.attribute,self.temp_original)\n \n del self.temp_original\n del self.is_local\n del self.target\n exit_stack=self._exit_stack\n del self._exit_stack\n return exit_stack.__exit__(*exc_info)\n \n \n def start(self):\n ''\n result=self.__enter__()\n self._active_patches.append(self)\n return result\n \n \n def stop(self):\n ''\n try:\n self._active_patches.remove(self)\n except ValueError:\n \n return None\n \n return self.__exit__(None,None,None)\n \n \n \ndef _get_target(target):\n try:\n target,attribute=target.rsplit('.',1)\n except(TypeError,ValueError,AttributeError):\n raise TypeError(\n f\"Need a valid target to patch. You supplied: {target !r}\")\n return partial(pkgutil.resolve_name,target),attribute\n \n \ndef _patch_object(\ntarget,attribute,new=DEFAULT,spec=None,\ncreate=False,spec_set=None,autospec=None,\nnew_callable=None,*,unsafe=False,**kwargs\n):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if type(target)is str:\n raise TypeError(\n f\"{target !r} must be the actual object to be patched, not a str\"\n )\n getter=lambda:target\n return _patch(\n getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs,unsafe=unsafe\n )\n \n \ndef _patch_multiple(target,spec=None,create=False,spec_set=None,\nautospec=None,new_callable=None,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if type(target)is str:\n getter=partial(pkgutil.resolve_name,target)\n else:\n getter=lambda:target\n \n if not kwargs:\n raise ValueError(\n 'Must supply at least one keyword argument with patch.multiple'\n )\n \n items=list(kwargs.items())\n attribute,new=items[0]\n patcher=_patch(\n getter,attribute,new,spec,create,spec_set,\n autospec,new_callable,{}\n )\n patcher.attribute_name=attribute\n for attribute,new in items[1:]:\n this_patcher=_patch(\n getter,attribute,new,spec,create,spec_set,\n autospec,new_callable,{}\n )\n this_patcher.attribute_name=attribute\n patcher.additional_patchers.append(this_patcher)\n return patcher\n \n \ndef patch(\ntarget,new=DEFAULT,spec=None,create=False,\nspec_set=None,autospec=None,new_callable=None,*,unsafe=False,**kwargs\n):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n getter,attribute=_get_target(target)\n return _patch(\n getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs,unsafe=unsafe\n )\n \n \nclass _patch_dict(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,in_dict,values=(),clear=False,**kwargs):\n self.in_dict=in_dict\n \n self.values=dict(values)\n self.values.update(kwargs)\n self.clear=clear\n self._original=None\n \n \n def __call__(self,f):\n if isinstance(f,type):\n return self.decorate_class(f)\n if inspect.iscoroutinefunction(f):\n return self.decorate_async_callable(f)\n return self.decorate_callable(f)\n \n \n def decorate_callable(self,f):\n @wraps(f)\n def _inner(*args,**kw):\n self._patch_dict()\n try:\n return f(*args,**kw)\n finally:\n self._unpatch_dict()\n \n return _inner\n \n \n def decorate_async_callable(self,f):\n @wraps(f)\n async def _inner(*args,**kw):\n self._patch_dict()\n try:\n return await f(*args,**kw)\n finally:\n self._unpatch_dict()\n \n return _inner\n \n \n def decorate_class(self,klass):\n for attr in dir(klass):\n attr_value=getattr(klass,attr)\n if(attr.startswith(patch.TEST_PREFIX)and\n hasattr(attr_value,\"__call__\")):\n decorator=_patch_dict(self.in_dict,self.values,self.clear)\n decorated=decorator(attr_value)\n setattr(klass,attr,decorated)\n return klass\n \n \n def __enter__(self):\n ''\n self._patch_dict()\n return self.in_dict\n \n \n def _patch_dict(self):\n values=self.values\n if isinstance(self.in_dict,str):\n self.in_dict=pkgutil.resolve_name(self.in_dict)\n in_dict=self.in_dict\n clear=self.clear\n \n try:\n original=in_dict.copy()\n except AttributeError:\n \n \n original={}\n for key in in_dict:\n original[key]=in_dict[key]\n self._original=original\n \n if clear:\n _clear_dict(in_dict)\n \n try:\n in_dict.update(values)\n except AttributeError:\n \n for key in values:\n in_dict[key]=values[key]\n \n \n def _unpatch_dict(self):\n in_dict=self.in_dict\n original=self._original\n \n _clear_dict(in_dict)\n \n try:\n in_dict.update(original)\n except AttributeError:\n for key in original:\n in_dict[key]=original[key]\n \n \n def __exit__(self,*args):\n ''\n if self._original is not None:\n self._unpatch_dict()\n return False\n \n \n def start(self):\n ''\n result=self.__enter__()\n _patch._active_patches.append(self)\n return result\n \n \n def stop(self):\n ''\n try:\n _patch._active_patches.remove(self)\n except ValueError:\n \n return None\n \n return self.__exit__(None,None,None)\n \n \ndef _clear_dict(in_dict):\n try:\n in_dict.clear()\n except AttributeError:\n keys=list(in_dict)\n for key in keys:\n del in_dict[key]\n \n \ndef _patch_stopall():\n ''\n for patch in reversed(_patch._active_patches):\n patch.stop()\n \n \npatch.object=_patch_object\npatch.dict=_patch_dict\npatch.multiple=_patch_multiple\npatch.stopall=_patch_stopall\npatch.TEST_PREFIX='test'\n\nmagic_methods=(\n\"lt le gt ge eq ne \"\n\"getitem setitem delitem \"\n\"len contains iter \"\n\"hash str sizeof \"\n\"enter exit \"\n\n\n\"divmod rdivmod neg pos abs invert \"\n\"complex int float index \"\n\"round trunc floor ceil \"\n\"bool next \"\n\"fspath \"\n\"aiter \"\n)\n\nnumerics=(\n\"add sub mul matmul truediv floordiv mod lshift rshift and xor or pow\"\n)\ninplace=' '.join('i%s'%n for n in numerics.split())\nright=' '.join('r%s'%n for n in numerics.split())\n\n\n\n\n\n_non_defaults={\n'__get__','__set__','__delete__','__reversed__','__missing__',\n'__reduce__','__reduce_ex__','__getinitargs__','__getnewargs__',\n'__getstate__','__setstate__','__getformat__',\n'__repr__','__dir__','__subclasses__','__format__',\n'__getnewargs_ex__',\n}\n\n\ndef _get_method(name,func):\n ''\n def method(self,/,*args,**kw):\n return func(self,*args,**kw)\n method.__name__=name\n return method\n \n \n_magics={\n'__%s__'%method for method in\n' '.join([magic_methods,numerics,inplace,right]).split()\n}\n\n\n_async_method_magics={\"__aenter__\",\"__aexit__\",\"__anext__\"}\n\n_sync_async_magics={\"__aiter__\"}\n_async_magics=_async_method_magics |_sync_async_magics\n\n_all_sync_magics=_magics |_non_defaults\n_all_magics=_all_sync_magics |_async_magics\n\n_unsupported_magics={\n'__getattr__','__setattr__',\n'__init__','__new__','__prepare__',\n'__instancecheck__','__subclasscheck__',\n'__del__'\n}\n\n_calculate_return_value={\n'__hash__':lambda self:object.__hash__(self),\n'__str__':lambda self:object.__str__(self),\n'__sizeof__':lambda self:object.__sizeof__(self),\n'__fspath__':lambda self:f\"{type(self).__name__}/{self._extract_mock_name()}/{id(self)}\",\n}\n\n_return_values={\n'__lt__':NotImplemented,\n'__gt__':NotImplemented,\n'__le__':NotImplemented,\n'__ge__':NotImplemented,\n'__int__':1,\n'__contains__':False,\n'__len__':0,\n'__exit__':False,\n'__complex__':1j,\n'__float__':1.0,\n'__bool__':True,\n'__index__':1,\n'__aexit__':False,\n}\n\n\ndef _get_eq(self):\n def __eq__(other):\n ret_val=self.__eq__._mock_return_value\n if ret_val is not DEFAULT:\n return ret_val\n if self is other:\n return True\n return NotImplemented\n return __eq__\n \ndef _get_ne(self):\n def __ne__(other):\n if self.__ne__._mock_return_value is not DEFAULT:\n return DEFAULT\n if self is other:\n return False\n return NotImplemented\n return __ne__\n \ndef _get_iter(self):\n def __iter__():\n ret_val=self.__iter__._mock_return_value\n if ret_val is DEFAULT:\n return iter([])\n \n \n return iter(ret_val)\n return __iter__\n \ndef _get_async_iter(self):\n def __aiter__():\n ret_val=self.__aiter__._mock_return_value\n if ret_val is DEFAULT:\n return _AsyncIterator(iter([]))\n return _AsyncIterator(iter(ret_val))\n return __aiter__\n \n_side_effect_methods={\n'__eq__':_get_eq,\n'__ne__':_get_ne,\n'__iter__':_get_iter,\n'__aiter__':_get_async_iter\n}\n\n\n\ndef _set_return_value(mock,method,name):\n fixed=_return_values.get(name,DEFAULT)\n if fixed is not DEFAULT:\n method.return_value=fixed\n return\n \n return_calculator=_calculate_return_value.get(name)\n if return_calculator is not None:\n return_value=return_calculator(mock)\n method.return_value=return_value\n return\n \n side_effector=_side_effect_methods.get(name)\n if side_effector is not None:\n method.side_effect=side_effector(mock)\n \n \n \nclass MagicMixin(Base):\n def __init__(self,/,*args,**kw):\n self._mock_set_magics()\n _safe_super(MagicMixin,self).__init__(*args,**kw)\n self._mock_set_magics()\n \n \n def _mock_set_magics(self):\n orig_magics=_magics |_async_method_magics\n these_magics=orig_magics\n \n if getattr(self,\"_mock_methods\",None)is not None:\n these_magics=orig_magics.intersection(self._mock_methods)\n \n remove_magics=set()\n remove_magics=orig_magics -these_magics\n \n for entry in remove_magics:\n if entry in type(self).__dict__:\n \n delattr(self,entry)\n \n \n these_magics=these_magics -set(type(self).__dict__)\n \n _type=type(self)\n for entry in these_magics:\n setattr(_type,entry,MagicProxy(entry,self))\n \n \n \nclass NonCallableMagicMock(MagicMixin,NonCallableMock):\n ''\n def mock_add_spec(self,spec,spec_set=False):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n self._mock_set_magics()\n \n \nclass AsyncMagicMixin(MagicMixin):\n pass\n \n \nclass MagicMock(MagicMixin,Mock):\n ''\n\n\n\n\n\n\n\n\n \n def mock_add_spec(self,spec,spec_set=False):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n self._mock_set_magics()\n \n \n \nclass MagicProxy(Base):\n def __init__(self,name,parent):\n self.name=name\n self.parent=parent\n \n def create_mock(self):\n entry=self.name\n parent=self.parent\n m=parent._get_child_mock(name=entry,_new_name=entry,\n _new_parent=parent)\n setattr(parent,entry,m)\n _set_return_value(parent,m,entry)\n return m\n \n def __get__(self,obj,_type=None):\n return self.create_mock()\n \n \n_CODE_ATTRS=dir(CodeType)\n_CODE_SIG=inspect.signature(partial(CodeType.__init__,None))\n\n\nclass AsyncMockMixin(Base):\n await_count=_delegating_property('await_count')\n await_args=_delegating_property('await_args')\n await_args_list=_delegating_property('await_args_list')\n \n def __init__(self,/,*args,**kwargs):\n super().__init__(*args,**kwargs)\n \n \n \n \n \n \n self.__dict__['_is_coroutine']=asyncio.coroutines._is_coroutine\n self.__dict__['_mock_await_count']=0\n self.__dict__['_mock_await_args']=None\n self.__dict__['_mock_await_args_list']=_CallList()\n code_mock=NonCallableMock(spec_set=_CODE_ATTRS)\n code_mock.__dict__[\"_spec_class\"]=CodeType\n code_mock.__dict__[\"_spec_signature\"]=_CODE_SIG\n code_mock.co_flags=(\n inspect.CO_COROUTINE\n +inspect.CO_VARARGS\n +inspect.CO_VARKEYWORDS\n )\n code_mock.co_argcount=0\n code_mock.co_varnames=('args','kwargs')\n code_mock.co_posonlyargcount=0\n code_mock.co_kwonlyargcount=0\n self.__dict__['__code__']=code_mock\n self.__dict__['__name__']='AsyncMock'\n self.__dict__['__defaults__']=tuple()\n self.__dict__['__kwdefaults__']={}\n self.__dict__['__annotations__']=None\n \n async def _execute_mock_call(self,/,*args,**kwargs):\n \n \n \n _call=_Call((args,kwargs),two=True)\n self.await_count +=1\n self.await_args=_call\n self.await_args_list.append(_call)\n \n effect=self.side_effect\n if effect is not None:\n if _is_exception(effect):\n raise effect\n elif not _callable(effect):\n try:\n result=next(effect)\n except StopIteration:\n \n \n raise StopAsyncIteration\n if _is_exception(result):\n raise result\n elif iscoroutinefunction(effect):\n result=await effect(*args,**kwargs)\n else:\n result=effect(*args,**kwargs)\n \n if result is not DEFAULT:\n return result\n \n if self._mock_return_value is not DEFAULT:\n return self.return_value\n \n if self._mock_wraps is not None:\n if iscoroutinefunction(self._mock_wraps):\n return await self._mock_wraps(*args,**kwargs)\n return self._mock_wraps(*args,**kwargs)\n \n return self.return_value\n \n def assert_awaited(self):\n ''\n\n \n if self.await_count ==0:\n msg=f\"Expected {self._mock_name or 'mock'} to have been awaited.\"\n raise AssertionError(msg)\n \n def assert_awaited_once(self):\n ''\n\n \n if not self.await_count ==1:\n msg=(f\"Expected {self._mock_name or 'mock'} to have been awaited once.\"\n f\" Awaited {self.await_count} times.\")\n raise AssertionError(msg)\n \n def assert_awaited_with(self,/,*args,**kwargs):\n ''\n\n \n if self.await_args is None:\n expected=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(f'Expected await: {expected}\\nNot awaited')\n \n def _error_message():\n msg=self._format_mock_failure_message(args,kwargs,action='await')\n return msg\n \n expected=self._call_matcher(_Call((args,kwargs),two=True))\n actual=self._call_matcher(self.await_args)\n if actual !=expected:\n cause=expected if isinstance(expected,Exception)else None\n raise AssertionError(_error_message())from cause\n \n def assert_awaited_once_with(self,/,*args,**kwargs):\n ''\n\n\n \n if not self.await_count ==1:\n msg=(f\"Expected {self._mock_name or 'mock'} to have been awaited once.\"\n f\" Awaited {self.await_count} times.\")\n raise AssertionError(msg)\n return self.assert_awaited_with(*args,**kwargs)\n \n def assert_any_await(self,/,*args,**kwargs):\n ''\n\n \n expected=self._call_matcher(_Call((args,kwargs),two=True))\n cause=expected if isinstance(expected,Exception)else None\n actual=[self._call_matcher(c)for c in self.await_args_list]\n if cause or expected not in _AnyComparer(actual):\n expected_string=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(\n '%s await not found'%expected_string\n )from cause\n \n def assert_has_awaits(self,calls,any_order=False):\n ''\n\n\n\n\n\n\n\n\n\n \n expected=[self._call_matcher(c)for c in calls]\n cause=next((e for e in expected if isinstance(e,Exception)),None)\n all_awaits=_CallList(self._call_matcher(c)for c in self.await_args_list)\n if not any_order:\n if expected not in all_awaits:\n if cause is None:\n problem='Awaits not found.'\n else:\n problem=('Error processing expected awaits.\\n'\n 'Errors: {}').format(\n [e if isinstance(e,Exception)else None\n for e in expected])\n raise AssertionError(\n f'{problem}\\n'\n f'Expected: {_CallList(calls)}\\n'\n f'Actual: {self.await_args_list}'\n )from cause\n return\n \n all_awaits=list(all_awaits)\n \n not_found=[]\n for kall in expected:\n try:\n all_awaits.remove(kall)\n except ValueError:\n not_found.append(kall)\n if not_found:\n raise AssertionError(\n '%r not all found in await list'%(tuple(not_found),)\n )from cause\n \n def assert_not_awaited(self):\n ''\n\n \n if self.await_count !=0:\n msg=(f\"Expected {self._mock_name or 'mock'} to not have been awaited.\"\n f\" Awaited {self.await_count} times.\")\n raise AssertionError(msg)\n \n def reset_mock(self,/,*args,**kwargs):\n ''\n\n \n super().reset_mock(*args,**kwargs)\n self.await_count=0\n self.await_args=None\n self.await_args_list=_CallList()\n \n \nclass AsyncMock(AsyncMockMixin,AsyncMagicMixin,Mock):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \nclass _ANY(object):\n ''\n \n def __eq__(self,other):\n return True\n \n def __ne__(self,other):\n return False\n \n def __repr__(self):\n return ''\n \nANY=_ANY()\n\n\n\ndef _format_call_signature(name,args,kwargs):\n message='%s(%%s)'%name\n formatted_args=''\n args_string=', '.join([repr(arg)for arg in args])\n kwargs_string=', '.join([\n '%s=%r'%(key,value)for key,value in kwargs.items()\n ])\n if args_string:\n formatted_args=args_string\n if kwargs_string:\n if formatted_args:\n formatted_args +=', '\n formatted_args +=kwargs_string\n \n return message %formatted_args\n \n \n \nclass _Call(tuple):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __new__(cls,value=(),name='',parent=None,two=False,\n from_kall=True):\n args=()\n kwargs={}\n _len=len(value)\n if _len ==3:\n name,args,kwargs=value\n elif _len ==2:\n first,second=value\n if isinstance(first,str):\n name=first\n if isinstance(second,tuple):\n args=second\n else:\n kwargs=second\n else:\n args,kwargs=first,second\n elif _len ==1:\n value,=value\n if isinstance(value,str):\n name=value\n elif isinstance(value,tuple):\n args=value\n else:\n kwargs=value\n \n if two:\n return tuple.__new__(cls,(args,kwargs))\n \n return tuple.__new__(cls,(name,args,kwargs))\n \n \n def __init__(self,value=(),name=None,parent=None,two=False,\n from_kall=True):\n self._mock_name=name\n self._mock_parent=parent\n self._mock_from_kall=from_kall\n \n \n def __eq__(self,other):\n try:\n len_other=len(other)\n except TypeError:\n return NotImplemented\n \n self_name=''\n if len(self)==2:\n self_args,self_kwargs=self\n else:\n self_name,self_args,self_kwargs=self\n \n if(getattr(self,'_mock_parent',None)and getattr(other,'_mock_parent',None)\n and self._mock_parent !=other._mock_parent):\n return False\n \n other_name=''\n if len_other ==0:\n other_args,other_kwargs=(),{}\n elif len_other ==3:\n other_name,other_args,other_kwargs=other\n elif len_other ==1:\n value,=other\n if isinstance(value,tuple):\n other_args=value\n other_kwargs={}\n elif isinstance(value,str):\n other_name=value\n other_args,other_kwargs=(),{}\n else:\n other_args=()\n other_kwargs=value\n elif len_other ==2:\n \n first,second=other\n if isinstance(first,str):\n other_name=first\n if isinstance(second,tuple):\n other_args,other_kwargs=second,{}\n else:\n other_args,other_kwargs=(),second\n else:\n other_args,other_kwargs=first,second\n else:\n return False\n \n if self_name and other_name !=self_name:\n return False\n \n \n return(other_args,other_kwargs)==(self_args,self_kwargs)\n \n \n __ne__=object.__ne__\n \n \n def __call__(self,/,*args,**kwargs):\n if self._mock_name is None:\n return _Call(('',args,kwargs),name='()')\n \n name=self._mock_name+'()'\n return _Call((self._mock_name,args,kwargs),name=name,parent=self)\n \n \n def __getattr__(self,attr):\n if self._mock_name is None:\n return _Call(name=attr,from_kall=False)\n name='%s.%s'%(self._mock_name,attr)\n return _Call(name=name,parent=self,from_kall=False)\n \n \n def __getattribute__(self,attr):\n if attr in tuple.__dict__:\n raise AttributeError\n return tuple.__getattribute__(self,attr)\n \n \n def _get_call_arguments(self):\n if len(self)==2:\n args,kwargs=self\n else:\n name,args,kwargs=self\n \n return args,kwargs\n \n @property\n def args(self):\n return self._get_call_arguments()[0]\n \n @property\n def kwargs(self):\n return self._get_call_arguments()[1]\n \n def __repr__(self):\n if not self._mock_from_kall:\n name=self._mock_name or 'call'\n if name.startswith('()'):\n name='call%s'%name\n return name\n \n if len(self)==2:\n name='call'\n args,kwargs=self\n else:\n name,args,kwargs=self\n if not name:\n name='call'\n elif not name.startswith('()'):\n name='call.%s'%name\n else:\n name='call%s'%name\n return _format_call_signature(name,args,kwargs)\n \n \n def call_list(self):\n ''\n\n \n vals=[]\n thing=self\n while thing is not None:\n if thing._mock_from_kall:\n vals.append(thing)\n thing=thing._mock_parent\n return _CallList(reversed(vals))\n \n \ncall=_Call(from_kall=False)\n\n\ndef create_autospec(spec,spec_set=False,instance=False,_parent=None,\n_name=None,*,unsafe=False,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _is_list(spec):\n \n \n spec=type(spec)\n \n is_type=isinstance(spec,type)\n if _is_instance_mock(spec):\n raise InvalidSpecError(f'Cannot autospec a Mock object. '\n f'[object={spec !r}]')\n is_async_func=_is_async_func(spec)\n _kwargs={'spec':spec}\n if spec_set:\n _kwargs={'spec_set':spec}\n elif spec is None:\n \n _kwargs={}\n if _kwargs and instance:\n _kwargs['_spec_as_instance']=True\n if not unsafe:\n _check_spec_arg_typos(kwargs)\n \n _kwargs.update(kwargs)\n \n Klass=MagicMock\n if inspect.isdatadescriptor(spec):\n \n \n _kwargs={}\n elif is_async_func:\n if instance:\n raise RuntimeError(\"Instance can not be True when create_autospec \"\n \"is mocking an async function\")\n Klass=AsyncMock\n elif not _callable(spec):\n Klass=NonCallableMagicMock\n elif is_type and instance and not _instance_callable(spec):\n Klass=NonCallableMagicMock\n \n _name=_kwargs.pop('name',_name)\n \n _new_name=_name\n if _parent is None:\n \n _new_name=''\n \n mock=Klass(parent=_parent,_new_parent=_parent,_new_name=_new_name,\n name=_name,**_kwargs)\n \n if isinstance(spec,FunctionTypes):\n \n \n mock=_set_signature(mock,spec)\n if is_async_func:\n _setup_async_mock(mock)\n else:\n _check_signature(spec,mock,is_type,instance)\n \n if _parent is not None and not instance:\n _parent._mock_children[_name]=mock\n \n if is_type and not instance and 'return_value'not in kwargs:\n mock.return_value=create_autospec(spec,spec_set,instance=True,\n _name='()',_parent=mock)\n \n for entry in dir(spec):\n if _is_magic(entry):\n \n continue\n \n \n \n \n \n \n \n \n \n \n try:\n original=getattr(spec,entry)\n except AttributeError:\n continue\n \n kwargs={'spec':original}\n if spec_set:\n kwargs={'spec_set':original}\n \n if not isinstance(original,FunctionTypes):\n new=_SpecState(original,spec_set,mock,entry,instance)\n mock._mock_children[entry]=new\n else:\n parent=mock\n if isinstance(spec,FunctionTypes):\n parent=mock.mock\n \n skipfirst=_must_skip(spec,entry,is_type)\n kwargs['_eat_self']=skipfirst\n if iscoroutinefunction(original):\n child_klass=AsyncMock\n else:\n child_klass=MagicMock\n new=child_klass(parent=parent,name=entry,_new_name=entry,\n _new_parent=parent,\n **kwargs)\n mock._mock_children[entry]=new\n new.return_value=child_klass()\n _check_signature(original,new,skipfirst=skipfirst)\n \n \n \n \n \n if isinstance(new,FunctionTypes):\n setattr(mock,entry,new)\n \n return mock\n \n \ndef _must_skip(spec,entry,is_type):\n ''\n\n\n \n if not isinstance(spec,type):\n if entry in getattr(spec,'__dict__',{}):\n \n return False\n spec=spec.__class__\n \n for klass in spec.__mro__:\n result=klass.__dict__.get(entry,DEFAULT)\n if result is DEFAULT:\n continue\n if isinstance(result,(staticmethod,classmethod)):\n return False\n elif isinstance(result,FunctionTypes):\n \n \n return is_type\n else:\n return False\n \n \n return is_type\n \n \nclass _SpecState(object):\n\n def __init__(self,spec,spec_set=False,parent=None,\n name=None,ids=None,instance=False):\n self.spec=spec\n self.ids=ids\n self.spec_set=spec_set\n self.parent=parent\n self.instance=instance\n self.name=name\n \n \nFunctionTypes=(\n\ntype(create_autospec),\n\ntype(ANY.__eq__),\n)\n\n\nfile_spec=None\nopen_spec=None\n\n\ndef _to_stream(read_data):\n if isinstance(read_data,bytes):\n return io.BytesIO(read_data)\n else:\n return io.StringIO(read_data)\n \n \ndef mock_open(mock=None,read_data=''):\n ''\n\n\n\n\n\n\n\n\n\n \n _read_data=_to_stream(read_data)\n _state=[_read_data,None]\n \n def _readlines_side_effect(*args,**kwargs):\n if handle.readlines.return_value is not None:\n return handle.readlines.return_value\n return _state[0].readlines(*args,**kwargs)\n \n def _read_side_effect(*args,**kwargs):\n if handle.read.return_value is not None:\n return handle.read.return_value\n return _state[0].read(*args,**kwargs)\n \n def _readline_side_effect(*args,**kwargs):\n yield from _iter_side_effect()\n while True:\n yield _state[0].readline(*args,**kwargs)\n \n def _iter_side_effect():\n if handle.readline.return_value is not None:\n while True:\n yield handle.readline.return_value\n for line in _state[0]:\n yield line\n \n def _next_side_effect():\n if handle.readline.return_value is not None:\n return handle.readline.return_value\n return next(_state[0])\n \n global file_spec\n if file_spec is None:\n import _io\n file_spec=list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))\n \n global open_spec\n if open_spec is None:\n import _io\n open_spec=list(set(dir(_io.open)))\n if mock is None:\n mock=MagicMock(name='open',spec=open_spec)\n \n handle=MagicMock(spec=file_spec)\n handle.__enter__.return_value=handle\n \n handle.write.return_value=None\n handle.read.return_value=None\n handle.readline.return_value=None\n handle.readlines.return_value=None\n \n handle.read.side_effect=_read_side_effect\n _state[1]=_readline_side_effect()\n handle.readline.side_effect=_state[1]\n handle.readlines.side_effect=_readlines_side_effect\n handle.__iter__.side_effect=_iter_side_effect\n handle.__next__.side_effect=_next_side_effect\n \n def reset_data(*args,**kwargs):\n _state[0]=_to_stream(read_data)\n if handle.readline.side_effect ==_state[1]:\n \n _state[1]=_readline_side_effect()\n handle.readline.side_effect=_state[1]\n return DEFAULT\n \n mock.side_effect=reset_data\n mock.return_value=handle\n return mock\n \n \nclass PropertyMock(Mock):\n ''\n\n\n\n\n\n\n \n def _get_child_mock(self,/,**kwargs):\n return MagicMock(**kwargs)\n \n def __get__(self,obj,obj_type=None):\n return self()\n def __set__(self,obj,val):\n self(val)\n \n \ndef seal(mock):\n ''\n\n\n\n\n\n\n\n \n mock._mock_sealed=True\n for attr in dir(mock):\n try:\n m=getattr(mock,attr)\n except AttributeError:\n continue\n if not isinstance(m,NonCallableMock):\n continue\n if isinstance(m._mock_children.get(attr),_SpecState):\n continue\n if m._mock_new_parent is mock:\n seal(m)\n \n \nclass _AsyncIterator:\n ''\n\n \n def __init__(self,iterator):\n self.iterator=iterator\n code_mock=NonCallableMock(spec_set=CodeType)\n code_mock.co_flags=inspect.CO_ITERABLE_COROUTINE\n self.__dict__['__code__']=code_mock\n \n async def __anext__(self):\n try:\n return next(self.iterator)\n except StopIteration:\n pass\n raise StopAsyncIteration\n", ["_io", "asyncio", "builtins", "contextlib", "functools", "inspect", "io", "pkgutil", "pprint", "sys", "threading", "types", "unittest.util"]], "multiprocessing.util": [".py", "\n\n\n\n\n\n\n\n\nimport sys\nimport functools\nimport os\nimport itertools\nimport weakref\nimport atexit\nimport threading\n\nfrom subprocess import _args_from_interpreter_flags\n\nfrom multiprocessing.process import current_process,active_children\n\n__all__=[\n'sub_debug','debug','info','sub_warning','get_logger',\n'log_to_stderr','get_temp_dir','register_after_fork',\n'is_exiting','Finalize','ForkAwareThreadLock','ForkAwareLocal',\n'SUBDEBUG','SUBWARNING',\n]\n\n\n\n\n\nNOTSET=0\nSUBDEBUG=5\nDEBUG=10\nINFO=20\nSUBWARNING=25\n\nLOGGER_NAME='multiprocessing'\nDEFAULT_LOGGING_FORMAT='[%(levelname)s/%(processName)s] %(message)s'\n\n_logger=None\n_log_to_stderr=False\n\ndef sub_debug(msg,*args):\n if _logger:\n _logger.log(SUBDEBUG,msg,*args)\n \ndef debug(msg,*args):\n if _logger:\n _logger.log(DEBUG,msg,*args)\n \ndef info(msg,*args):\n if _logger:\n _logger.log(INFO,msg,*args)\n \ndef sub_warning(msg,*args):\n if _logger:\n _logger.log(SUBWARNING,msg,*args)\n \ndef get_logger():\n ''\n\n \n global _logger\n import logging\n \n logging._acquireLock()\n try :\n if not _logger:\n \n _logger=logging.getLogger(LOGGER_NAME)\n _logger.propagate=0\n logging.addLevelName(SUBDEBUG,'SUBDEBUG')\n logging.addLevelName(SUBWARNING,'SUBWARNING')\n \n \n if hasattr(atexit,'unregister'):\n atexit.unregister(_exit_function)\n atexit.register(_exit_function)\n else :\n atexit._exithandlers.remove((_exit_function,(),{}))\n atexit._exithandlers.append((_exit_function,(),{}))\n \n finally :\n logging._releaseLock()\n \n return _logger\n \ndef log_to_stderr(level=None ):\n ''\n\n \n global _log_to_stderr\n import logging\n \n logger=get_logger()\n formatter=logging.Formatter(DEFAULT_LOGGING_FORMAT)\n handler=logging.StreamHandler()\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n \n if level:\n logger.setLevel(level)\n _log_to_stderr=True\n return _logger\n \n \n \n \n \ndef get_temp_dir():\n\n if current_process()._tempdir is None :\n import shutil,tempfile\n tempdir=tempfile.mkdtemp(prefix='pymp-')\n info('created temp directory %s',tempdir)\n Finalize(None ,shutil.rmtree,args=[tempdir],exitpriority=-100)\n current_process()._tempdir=tempdir\n return current_process()._tempdir\n \n \n \n \n \n_afterfork_registry=weakref.WeakValueDictionary()\n_afterfork_counter=itertools.count()\n\ndef _run_after_forkers():\n items=list(_afterfork_registry.items())\n items.sort()\n for (index,ident,func),obj in items:\n try :\n func(obj)\n except Exception as e:\n info('after forker raised exception %s',e)\n \ndef register_after_fork(obj,func):\n _afterfork_registry[(next(_afterfork_counter),id(obj),func)]=obj\n \n \n \n \n \n_finalizer_registry={}\n_finalizer_counter=itertools.count()\n\n\nclass Finalize(object):\n ''\n\n \n def __init__(self,obj,callback,args=(),kwargs=None ,exitpriority=None ):\n assert exitpriority is None or type(exitpriority)is int\n \n if obj is not None :\n self._weakref=weakref.ref(obj,self)\n else :\n assert exitpriority is not None\n \n self._callback=callback\n self._args=args\n self._kwargs=kwargs or {}\n self._key=(exitpriority,next(_finalizer_counter))\n self._pid=os.getpid()\n \n _finalizer_registry[self._key]=self\n \n def __call__(self,wr=None ,\n \n \n _finalizer_registry=_finalizer_registry,\n sub_debug=sub_debug,getpid=os.getpid):\n ''\n\n \n try :\n del _finalizer_registry[self._key]\n except KeyError:\n sub_debug('finalizer no longer registered')\n else :\n if self._pid !=getpid():\n sub_debug('finalizer ignored because different process')\n res=None\n else :\n sub_debug('finalizer calling %s with args %s and kwargs %s',\n self._callback,self._args,self._kwargs)\n res=self._callback(*self._args,**self._kwargs)\n self._weakref=self._callback=self._args=\\\n self._kwargs=self._key=None\n return res\n \n def cancel(self):\n ''\n\n \n try :\n del _finalizer_registry[self._key]\n except KeyError:\n pass\n else :\n self._weakref=self._callback=self._args=\\\n self._kwargs=self._key=None\n \n def still_active(self):\n ''\n\n \n return self._key in _finalizer_registry\n \n def __repr__(self):\n try :\n obj=self._weakref()\n except (AttributeError,TypeError):\n obj=None\n \n if obj is None :\n return ''\n \n x=''\n \n \ndef _run_finalizers(minpriority=None ):\n ''\n\n\n\n\n \n if _finalizer_registry is None :\n \n \n \n return\n \n if minpriority is None :\n f=lambda p:p[0][0]is not None\n else :\n f=lambda p:p[0][0]is not None and p[0][0]>=minpriority\n \n items=[x for x in list(_finalizer_registry.items())if f(x)]\n items.sort(reverse=True )\n \n for key,finalizer in items:\n sub_debug('calling %s',finalizer)\n try :\n finalizer()\n except Exception:\n import traceback\n traceback.print_exc()\n \n if minpriority is None :\n _finalizer_registry.clear()\n \n \n \n \n \ndef is_exiting():\n ''\n\n \n return _exiting or _exiting is None\n \n_exiting=False\n\ndef _exit_function(info=info,debug=debug,_run_finalizers=_run_finalizers,\nactive_children=active_children,\ncurrent_process=current_process):\n\n\n\n\n global _exiting\n \n if not _exiting:\n _exiting=True\n \n info('process shutting down')\n debug('running all \"atexit\" finalizers with priority >= 0')\n _run_finalizers(0)\n \n if current_process()is not None :\n \n \n \n \n \n \n \n \n \n \n \n \n \n for p in active_children():\n if p._daemonic:\n info('calling terminate() for daemon %s',p.name)\n p._popen.terminate()\n \n for p in active_children():\n info('calling join() for process %s',p.name)\n p.join()\n \n debug('running the remaining \"atexit\" finalizers')\n _run_finalizers()\n \natexit.register(_exit_function)\n\n\n\n\n\nclass ForkAwareThreadLock(object):\n def __init__(self):\n self._reset()\n register_after_fork(self,ForkAwareThreadLock._reset)\n \n def _reset(self):\n self._lock=threading.Lock()\n self.acquire=self._lock.acquire\n self.release=self._lock.release\n \nclass ForkAwareLocal(threading.local):\n def __init__(self):\n register_after_fork(self,lambda obj:obj.__dict__.clear())\n def __reduce__(self):\n return type(self),()\n", ["atexit", "functools", "itertools", "logging", "multiprocessing.process", "os", "shutil", "subprocess", "sys", "tempfile", "threading", "traceback", "weakref"]], "multiprocessing": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__='0.70a1'\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Manager','Pipe','cpu_count','log_to_stderr','get_logger',\n'allow_connection_pickling','BufferTooShort','TimeoutError',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','SimpleQueue','JoinableQueue','Pool',\n'Value','Array','RawValue','RawArray','SUBDEBUG','SUBWARNING',\n]\n\n__author__='R. Oudkerk (r.m.oudkerk@gmail.com)'\n\n\n\n\n\nimport os\nimport sys\n\nfrom multiprocessing.process import Process,current_process,active_children\nfrom multiprocessing.util import SUBDEBUG,SUBWARNING\n\n\n\n\n\nclass ProcessError(Exception):\n pass\n \nclass BufferTooShort(ProcessError):\n pass\n \nclass TimeoutError(ProcessError):\n pass\n \nclass AuthenticationError(ProcessError):\n pass\n \nimport _multiprocessing\n\n\n\n\n\ndef Manager():\n ''\n\n\n\n\n \n from multiprocessing.managers import SyncManager\n m=SyncManager()\n m.start()\n return m\n \n \n \n \n \n \n \n \n \ndef cpu_count():\n ''\n\n \n if sys.platform =='win32':\n try :\n num=int(os.environ['NUMBER_OF_PROCESSORS'])\n except (ValueError,KeyError):\n num=0\n elif 'bsd'in sys.platform or sys.platform =='darwin':\n comm='/sbin/sysctl -n hw.ncpu'\n if sys.platform =='darwin':\n comm='/usr'+comm\n try :\n with os.popen(comm)as p:\n num=int(p.read())\n except ValueError:\n num=0\n else :\n try :\n num=os.sysconf('SC_NPROCESSORS_ONLN')\n except (ValueError,OSError,AttributeError):\n num=0\n \n if num >=1:\n return num\n else :\n raise NotImplementedError('cannot determine number of cpus')\n \ndef freeze_support():\n ''\n\n\n \n if sys.platform =='win32'and getattr(sys,'frozen',False ):\n from multiprocessing.forking import freeze_support\n freeze_support()\n \ndef get_logger():\n ''\n\n \n from multiprocessing.util import get_logger\n return get_logger()\n \ndef log_to_stderr(level=None ):\n ''\n\n \n from multiprocessing.util import log_to_stderr\n return log_to_stderr(level)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef Lock():\n ''\n\n \n from multiprocessing.synchronize import Lock\n return Lock()\n \ndef RLock():\n ''\n\n \n from multiprocessing.synchronize import RLock\n return RLock()\n \ndef Condition(lock=None ):\n ''\n\n \n from multiprocessing.synchronize import Condition\n return Condition(lock)\n \ndef Semaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import Semaphore\n return Semaphore(value)\n \ndef BoundedSemaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import BoundedSemaphore\n return BoundedSemaphore(value)\n \ndef Event():\n ''\n\n \n from multiprocessing.synchronize import Event\n return Event()\n \ndef Barrier(parties,action=None ,timeout=None ):\n ''\n\n \n from multiprocessing.synchronize import Barrier\n return Barrier(parties,action,timeout)\n \ndef Queue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import Queue\n return Queue(maxsize)\n \ndef JoinableQueue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import JoinableQueue\n return JoinableQueue(maxsize)\n \ndef SimpleQueue():\n ''\n\n \n from multiprocessing.queues import SimpleQueue\n return SimpleQueue()\n \ndef Pool(processes=None ,initializer=None ,initargs=(),maxtasksperchild=None ):\n ''\n\n \n from multiprocessing.pool import Pool\n return Pool(processes,initializer,initargs,maxtasksperchild)\n \ndef RawValue(typecode_or_type,*args):\n ''\n\n \n from multiprocessing.sharedctypes import RawValue\n return RawValue(typecode_or_type,*args)\n \ndef RawArray(typecode_or_type,size_or_initializer):\n ''\n\n \n from multiprocessing.sharedctypes import RawArray\n return RawArray(typecode_or_type,size_or_initializer)\n \ndef Value(typecode_or_type,*args,lock=True ):\n ''\n\n \n from multiprocessing.sharedctypes import Value\n return Value(typecode_or_type,*args,lock=lock)\n \ndef Array(typecode_or_type,size_or_initializer,*,lock=True ):\n ''\n\n \n from multiprocessing.sharedctypes import Array\n return Array(typecode_or_type,size_or_initializer,lock=lock)\n \n \n \n \n \nif sys.platform =='win32':\n\n def set_executable(executable):\n ''\n\n\n\n \n from multiprocessing.forking import set_executable\n set_executable(executable)\n \n __all__ +=['set_executable']\n", ["_multiprocessing", "multiprocessing.forking", "multiprocessing.managers", "multiprocessing.pool", "multiprocessing.process", "multiprocessing.queues", "multiprocessing.sharedctypes", "multiprocessing.synchronize", "multiprocessing.util", "os", "sys"], 1], "multiprocessing.connection": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None ]\n\n\nclass Listener(object):\n\n def __init__(self,address=None ,family=None ,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n address=property(lambda self:self._backlog_queue)\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True ):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n self._in.not_empty.acquire()\n self._in.not_empty.wait(timeout)\n self._in.not_empty.release()\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n", ["queue"]], "multiprocessing.process": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Process','current_process','active_children']\n\n\n\n\n\nimport os\nimport sys\nimport signal\nimport itertools\nfrom _weakrefset import WeakSet\n\n\nfrom _multiprocessing import Process\n\n\n\n\ntry :\n ORIGINAL_DIR=os.path.abspath(os.getcwd())\nexcept OSError:\n ORIGINAL_DIR=None\n \n \n \n \n \ndef current_process():\n ''\n\n \n return _current_process\n \ndef active_children():\n ''\n\n \n _cleanup()\n return list(_current_process._children)\n \n \n \n \n \ndef _cleanup():\n\n for p in list(_current_process._children):\n if p._popen.poll()is not None :\n _current_process._children.discard(p)\n \n \n \n \n \n \n \n \n \n \n \n \nclass AuthenticationString(bytes):\n def __reduce__(self):\n from .forking import Popen\n if not Popen.thread_is_spawning():\n raise TypeError(\n 'Pickling an AuthenticationString object is '\n 'disallowed for security reasons'\n )\n return AuthenticationString,(bytes(self),)\n \n \n \n \n \nclass _MainProcess(Process):\n\n def __init__(self):\n self._identity=()\n self._daemonic=False\n self._name='MainProcess'\n self._parent_pid=None\n self._popen=None\n self._counter=itertools.count(1)\n self._children=set()\n self._authkey=AuthenticationString(os.urandom(32))\n self._tempdir=None\n \n_current_process=_MainProcess()\ndel _MainProcess\n\n\n\n\n\n_exitcode_to_name={}\n\nfor name,signum in list(signal.__dict__.items()):\n if name[:3]=='SIG'and '_'not in name:\n _exitcode_to_name[-signum]=name\n \n \n_dangling=WeakSet()\n", ["_multiprocessing", "_weakrefset", "itertools", "multiprocessing.forking", "os", "signal", "sys"]], "multiprocessing.pool": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Pool']\n\n\n\n\n\nimport threading\nimport queue\nimport itertools\nimport collections\nimport time\n\nfrom multiprocessing import Process,cpu_count,TimeoutError\nfrom multiprocessing.util import Finalize,debug\n\n\n\n\n\nRUN=0\nCLOSE=1\nTERMINATE=2\n\n\n\n\n\njob_counter=itertools.count()\n\ndef mapstar(args):\n return list(map(*args))\n \ndef starmapstar(args):\n return list(itertools.starmap(args[0],args[1]))\n \n \n \n \n \nclass MaybeEncodingError(Exception):\n ''\n \n \n def __init__(self,exc,value):\n self.exc=repr(exc)\n self.value=repr(value)\n super(MaybeEncodingError,self).__init__(self.exc,self.value)\n \n def __str__(self):\n return \"Error sending result: '%s'. Reason: '%s'\"%(self.value,\n self.exc)\n \n def __repr__(self):\n return \"\"%str(self)\n \n \ndef worker(inqueue,outqueue,initializer=None ,initargs=(),maxtasks=None ):\n assert maxtasks is None or (type(maxtasks)==int and maxtasks >0)\n put=outqueue.put\n get=inqueue.get\n if hasattr(inqueue,'_writer'):\n inqueue._writer.close()\n outqueue._reader.close()\n \n if initializer is not None :\n initializer(*initargs)\n \n completed=0\n while maxtasks is None or (maxtasks and completed 1\n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=IMapIterator(self._cache)\n self._taskqueue.put((((result._job,i,mapstar,(x,),{})\n for i,x in enumerate(task_batches)),result._set_length))\n return (item for chunk in result for item in chunk)\n \n def imap_unordered(self,func,iterable,chunksize=1):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n if chunksize ==1:\n result=IMapUnorderedIterator(self._cache)\n self._taskqueue.put((((result._job,i,func,(x,),{})\n for i,x in enumerate(iterable)),result._set_length))\n return result\n else :\n assert chunksize >1\n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=IMapUnorderedIterator(self._cache)\n self._taskqueue.put((((result._job,i,mapstar,(x,),{})\n for i,x in enumerate(task_batches)),result._set_length))\n return (item for chunk in result for item in chunk)\n \n def apply_async(self,func,args=(),kwds={},callback=None ,\n error_callback=None ):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n result=ApplyResult(self._cache,callback,error_callback)\n self._taskqueue.put(([(result._job,None ,func,args,kwds)],None ))\n return result\n \n def map_async(self,func,iterable,chunksize=None ,callback=None ,\n error_callback=None ):\n ''\n\n \n return self._map_async(func,iterable,mapstar,chunksize,callback,\n error_callback)\n \n def _map_async(self,func,iterable,mapper,chunksize=None ,callback=None ,\n error_callback=None ):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n if not hasattr(iterable,'__len__'):\n iterable=list(iterable)\n \n if chunksize is None :\n chunksize,extra=divmod(len(iterable),len(self._pool)*4)\n if extra:\n chunksize +=1\n if len(iterable)==0:\n chunksize=0\n \n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=MapResult(self._cache,chunksize,len(iterable),callback,\n error_callback=error_callback)\n self._taskqueue.put((((result._job,i,mapper,(x,),{})\n for i,x in enumerate(task_batches)),None ))\n return result\n \n @staticmethod\n def _handle_workers(pool):\n thread=threading.current_thread()\n \n \n \n while thread._state ==RUN or (pool._cache and thread._state !=TERMINATE):\n pool._maintain_pool()\n time.sleep(0.1)\n \n pool._taskqueue.put(None )\n debug('worker handler exiting')\n \n @staticmethod\n def _handle_tasks(taskqueue,put,outqueue,pool):\n thread=threading.current_thread()\n \n for taskseq,set_length in iter(taskqueue.get,None ):\n i=-1\n for i,task in enumerate(taskseq):\n if thread._state:\n debug('task handler found thread._state != RUN')\n break\n try :\n put(task)\n except IOError:\n debug('could not put task on queue')\n break\n else :\n if set_length:\n debug('doing set_length()')\n set_length(i+1)\n continue\n break\n else :\n debug('task handler got sentinel')\n \n \n try :\n \n debug('task handler sending sentinel to result handler')\n outqueue.put(None )\n \n \n debug('task handler sending sentinel to workers')\n for p in pool:\n put(None )\n except IOError:\n debug('task handler got IOError when sending sentinels')\n \n debug('task handler exiting')\n \n @staticmethod\n def _handle_results(outqueue,get,cache):\n thread=threading.current_thread()\n \n while 1:\n try :\n task=get()\n except (IOError,EOFError):\n debug('result handler got EOFError/IOError -- exiting')\n return\n \n if thread._state:\n assert thread._state ==TERMINATE\n debug('result handler found thread._state=TERMINATE')\n break\n \n if task is None :\n debug('result handler got sentinel')\n break\n \n job,i,obj=task\n try :\n cache[job]._set(i,obj)\n except KeyError:\n pass\n \n while cache and thread._state !=TERMINATE:\n try :\n task=get()\n except (IOError,EOFError):\n debug('result handler got EOFError/IOError -- exiting')\n return\n \n if task is None :\n debug('result handler ignoring extra sentinel')\n continue\n job,i,obj=task\n try :\n cache[job]._set(i,obj)\n except KeyError:\n pass\n \n if hasattr(outqueue,'_reader'):\n debug('ensuring that outqueue is not full')\n \n \n \n try :\n for i in range(10):\n if not outqueue._reader.poll():\n break\n get()\n except (IOError,EOFError):\n pass\n \n debug('result handler exiting: len(cache)=%s, thread._state=%s',\n len(cache),thread._state)\n \n @staticmethod\n def _get_tasks(func,it,size):\n it=iter(it)\n while 1:\n x=tuple(itertools.islice(it,size))\n if not x:\n return\n yield (func,x)\n \n def __reduce__(self):\n raise NotImplementedError(\n 'pool objects cannot be passed between processes or pickled'\n )\n \n def close(self):\n debug('closing pool')\n if self._state ==RUN:\n self._state=CLOSE\n self._worker_handler._state=CLOSE\n \n def terminate(self):\n debug('terminating pool')\n self._state=TERMINATE\n self._worker_handler._state=TERMINATE\n self._terminate()\n \n def join(self):\n debug('joining pool')\n assert self._state in (CLOSE,TERMINATE)\n self._worker_handler.join()\n self._task_handler.join()\n self._result_handler.join()\n for p in self._pool:\n p.join()\n \n @staticmethod\n def _help_stuff_finish(inqueue,task_handler,size):\n \n debug('removing tasks from inqueue until task handler finished')\n inqueue._rlock.acquire()\n while task_handler.is_alive()and inqueue._reader.poll():\n inqueue._reader.recv()\n time.sleep(0)\n \n @classmethod\n def _terminate_pool(cls,taskqueue,inqueue,outqueue,pool,\n worker_handler,task_handler,result_handler,cache):\n \n debug('finalizing pool')\n \n worker_handler._state=TERMINATE\n task_handler._state=TERMINATE\n \n debug('helping task handler/workers to finish')\n cls._help_stuff_finish(inqueue,task_handler,len(pool))\n \n assert result_handler.is_alive()or len(cache)==0\n \n result_handler._state=TERMINATE\n outqueue.put(None )\n \n \n \n debug('joining worker handler')\n if threading.current_thread()is not worker_handler:\n worker_handler.join()\n \n \n if pool and hasattr(pool[0],'terminate'):\n debug('terminating workers')\n for p in pool:\n if p.exitcode is None :\n p.terminate()\n \n debug('joining task handler')\n if threading.current_thread()is not task_handler:\n task_handler.join()\n \n debug('joining result handler')\n if threading.current_thread()is not result_handler:\n result_handler.join()\n \n if pool and hasattr(pool[0],'terminate'):\n debug('joining pool workers')\n for p in pool:\n if p.is_alive():\n \n debug('cleaning up worker %d'%p.pid)\n p.join()\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_val,exc_tb):\n self.terminate()\n \n \n \n \n \nclass ApplyResult(object):\n\n def __init__(self,cache,callback,error_callback):\n self._event=threading.Event()\n self._job=next(job_counter)\n self._cache=cache\n self._callback=callback\n self._error_callback=error_callback\n cache[self._job]=self\n \n def ready(self):\n return self._event.is_set()\n \n def successful(self):\n assert self.ready()\n return self._success\n \n def wait(self,timeout=None ):\n self._event.wait(timeout)\n \n def get(self,timeout=None ):\n self.wait(timeout)\n if not self.ready():\n raise TimeoutError\n if self._success:\n return self._value\n else :\n raise self._value\n \n def _set(self,i,obj):\n self._success,self._value=obj\n if self._callback and self._success:\n self._callback(self._value)\n if self._error_callback and not self._success:\n self._error_callback(self._value)\n self._event.set()\n del self._cache[self._job]\n \nAsyncResult=ApplyResult\n\n\n\n\n\nclass MapResult(ApplyResult):\n\n def __init__(self,cache,chunksize,length,callback,error_callback):\n ApplyResult.__init__(self,cache,callback,\n error_callback=error_callback)\n self._success=True\n self._value=[None ]*length\n self._chunksize=chunksize\n if chunksize <=0:\n self._number_left=0\n self._event.set()\n del cache[self._job]\n else :\n self._number_left=length //chunksize+bool(length %chunksize)\n \n def _set(self,i,success_result):\n success,result=success_result\n if success:\n self._value[i *self._chunksize:(i+1)*self._chunksize]=result\n self._number_left -=1\n if self._number_left ==0:\n if self._callback:\n self._callback(self._value)\n del self._cache[self._job]\n self._event.set()\n else :\n self._success=False\n self._value=result\n if self._error_callback:\n self._error_callback(self._value)\n del self._cache[self._job]\n self._event.set()\n \n \n \n \n \nclass IMapIterator(object):\n\n def __init__(self,cache):\n self._cond=threading.Condition(threading.Lock())\n self._job=next(job_counter)\n self._cache=cache\n self._items=collections.deque()\n self._index=0\n self._length=None\n self._unsorted={}\n cache[self._job]=self\n \n def __iter__(self):\n return self\n \n def next(self,timeout=None ):\n self._cond.acquire()\n try :\n try :\n item=self._items.popleft()\n except IndexError:\n if self._index ==self._length:\n raise StopIteration\n self._cond.wait(timeout)\n try :\n item=self._items.popleft()\n except IndexError:\n if self._index ==self._length:\n raise StopIteration\n raise TimeoutError\n finally :\n self._cond.release()\n \n success,value=item\n if success:\n return value\n raise value\n \n __next__=next\n \n def _set(self,i,obj):\n self._cond.acquire()\n try :\n if self._index ==i:\n self._items.append(obj)\n self._index +=1\n while self._index in self._unsorted:\n obj=self._unsorted.pop(self._index)\n self._items.append(obj)\n self._index +=1\n self._cond.notify()\n else :\n self._unsorted[i]=obj\n \n if self._index ==self._length:\n del self._cache[self._job]\n finally :\n self._cond.release()\n \n def _set_length(self,length):\n self._cond.acquire()\n try :\n self._length=length\n if self._index ==self._length:\n self._cond.notify()\n del self._cache[self._job]\n finally :\n self._cond.release()\n \n \n \n \n \nclass IMapUnorderedIterator(IMapIterator):\n\n def _set(self,i,obj):\n self._cond.acquire()\n try :\n self._items.append(obj)\n self._index +=1\n self._cond.notify()\n if self._index ==self._length:\n del self._cache[self._job]\n finally :\n self._cond.release()\n \n \n \n \n \nclass ThreadPool(Pool):\n\n from .dummy import Process\n \n def __init__(self,processes=None ,initializer=None ,initargs=()):\n Pool.__init__(self,processes,initializer,initargs)\n \n def _setup_queues(self):\n self._inqueue=queue.Queue()\n self._outqueue=queue.Queue()\n self._quick_put=self._inqueue.put\n self._quick_get=self._outqueue.get\n \n @staticmethod\n def _help_stuff_finish(inqueue,task_handler,size):\n \n inqueue.not_empty.acquire()\n try :\n inqueue.queue.clear()\n inqueue.queue.extend([None ]*size)\n inqueue.not_empty.notify_all()\n finally :\n inqueue.not_empty.release()\n", ["collections", "itertools", "multiprocessing", "multiprocessing.Process", "multiprocessing.dummy", "multiprocessing.queues", "multiprocessing.util", "queue", "threading", "time"]], "multiprocessing.dummy": [".py", "\n\n\n\n\n\n\n\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','Manager','Pipe','Pool','JoinableQueue'\n]\n\n\n\n\n\nimport threading\nimport sys\nimport weakref\nimport array\n\nfrom.connection import Pipe\nfrom threading import Lock,RLock,Semaphore,BoundedSemaphore\nfrom threading import Event,Condition,Barrier\nfrom queue import Queue\n\n\n\n\n\nclass DummyProcess(threading.Thread):\n\n def __init__(self,group=None,target=None,name=None,args=(),kwargs={}):\n threading.Thread.__init__(self,group,target,name,args,kwargs)\n self._pid=None\n self._children=weakref.WeakKeyDictionary()\n self._start_called=False\n self._parent=current_process()\n \n def start(self):\n if self._parent is not current_process():\n raise RuntimeError(\n \"Parent is {0!r} but current_process is {1!r}\".format(\n self._parent,current_process()))\n self._start_called=True\n if hasattr(self._parent,'_children'):\n self._parent._children[self]=None\n threading.Thread.start(self)\n \n @property\n def exitcode(self):\n if self._start_called and not self.is_alive():\n return 0\n else:\n return None\n \n \n \n \n \nProcess=DummyProcess\ncurrent_process=threading.current_thread\ncurrent_process()._children=weakref.WeakKeyDictionary()\n\ndef active_children():\n children=current_process()._children\n for p in list(children):\n if not p.is_alive():\n children.pop(p,None)\n return list(children)\n \ndef freeze_support():\n pass\n \n \n \n \n \nclass Namespace(object):\n def __init__(self,/,**kwds):\n self.__dict__.update(kwds)\n def __repr__(self):\n items=list(self.__dict__.items())\n temp=[]\n for name,value in items:\n if not name.startswith('_'):\n temp.append('%s=%r'%(name,value))\n temp.sort()\n return '%s(%s)'%(self.__class__.__name__,', '.join(temp))\n \ndict=dict\nlist=list\n\ndef Array(typecode,sequence,lock=True):\n return array.array(typecode,sequence)\n \nclass Value(object):\n def __init__(self,typecode,value,lock=True):\n self._typecode=typecode\n self._value=value\n \n @property\n def value(self):\n return self._value\n \n @value.setter\n def value(self,value):\n self._value=value\n \n def __repr__(self):\n return '<%s(%r, %r)>'%(type(self).__name__,self._typecode,self._value)\n \ndef Manager():\n return sys.modules[__name__]\n \ndef shutdown():\n pass\n \ndef Pool(processes=None,initializer=None,initargs=()):\n from..pool import ThreadPool\n return ThreadPool(processes,initializer,initargs)\n \nJoinableQueue=Queue\n", ["array", "multiprocessing.dummy.connection", "multiprocessing.pool", "queue", "sys", "threading", "weakref"], 1], "multiprocessing.dummy.connection": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None]\n\n\nclass Listener(object):\n\n def __init__(self,address=None,family=None,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n @property\n def address(self):\n return self._backlog_queue\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n with self._in.not_empty:\n self._in.not_empty.wait(timeout)\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n", ["queue"]], "urllib.error": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\nimport io\nimport urllib.response\n\n__all__=['URLError','HTTPError','ContentTooShortError']\n\n\nclass URLError(OSError):\n\n\n\n\n\n def __init__(self,reason,filename=None):\n self.args=reason,\n self.reason=reason\n if filename is not None:\n self.filename=filename\n \n def __str__(self):\n return ''%self.reason\n \n \nclass HTTPError(URLError,urllib.response.addinfourl):\n ''\n __super_init=urllib.response.addinfourl.__init__\n \n def __init__(self,url,code,msg,hdrs,fp):\n self.code=code\n self.msg=msg\n self.hdrs=hdrs\n self.fp=fp\n self.filename=url\n if fp is None:\n fp=io.BytesIO()\n self.__super_init(fp,hdrs,url,code)\n \n def __str__(self):\n return 'HTTP Error %s: %s'%(self.code,self.msg)\n \n def __repr__(self):\n return ''%(self.code,self.msg)\n \n \n \n @property\n def reason(self):\n return self.msg\n \n @property\n def headers(self):\n return self.hdrs\n \n @headers.setter\n def headers(self,headers):\n self.hdrs=headers\n \n \nclass ContentTooShortError(URLError):\n ''\n def __init__(self,message,content):\n URLError.__init__(self,message)\n self.content=content\n", ["io", "urllib.response"]], "urllib.request": [".py", "from browser import ajax\nfrom . import error\n\nclass FileIO:\n\n def __init__(self,data):\n self._data=data\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n pass\n \n def read(self):\n return self._data\n \ndef urlopen(url,data=None ,timeout=None ):\n global result\n result=None\n \n def on_complete(req):\n global result\n if req.status ==200:\n result=req\n \n _ajax=ajax.ajax()\n _ajax.bind('complete',on_complete)\n if timeout is not None :\n _ajax.set_timeout(timeout)\n \n if data is None :\n _ajax.open('GET',url,False )\n _ajax.send()\n else :\n _ajax.open('POST',url,False )\n _ajax.send(data)\n \n if result is not None :\n if isinstance(result.text,str):\n return FileIO(result.text)\n \n return FileIO(result.text())\n raise error.HTTPError('file not found')\n", ["browser", "browser.ajax", "urllib", "urllib.error"]], "urllib": [".py", "", [], 1], "urllib.response": [".py", "''\n\n\n\n\n\n\n\nimport tempfile\n\n__all__=['addbase','addclosehook','addinfo','addinfourl']\n\n\nclass addbase(tempfile._TemporaryFileWrapper):\n ''\n \n \n \n def __init__(self,fp):\n super(addbase,self).__init__(fp,'',delete=False)\n \n self.fp=fp\n \n def __repr__(self):\n return '<%s at %r whose fp = %r>'%(self.__class__.__name__,\n id(self),self.file)\n \n def __enter__(self):\n if self.fp.closed:\n raise ValueError(\"I/O operation on closed file\")\n return self\n \n def __exit__(self,type,value,traceback):\n self.close()\n \n \nclass addclosehook(addbase):\n ''\n \n def __init__(self,fp,closehook,*hookargs):\n super(addclosehook,self).__init__(fp)\n self.closehook=closehook\n self.hookargs=hookargs\n \n def close(self):\n try:\n closehook=self.closehook\n hookargs=self.hookargs\n if closehook:\n self.closehook=None\n self.hookargs=None\n closehook(*hookargs)\n finally:\n super(addclosehook,self).close()\n \n \nclass addinfo(addbase):\n ''\n \n def __init__(self,fp,headers):\n super(addinfo,self).__init__(fp)\n self.headers=headers\n \n def info(self):\n return self.headers\n \n \nclass addinfourl(addinfo):\n ''\n \n def __init__(self,fp,headers,url,code=None):\n super(addinfourl,self).__init__(fp,headers)\n self.url=url\n self.code=code\n \n @property\n def status(self):\n return self.code\n \n def getcode(self):\n return self.code\n \n def geturl(self):\n return self.url\n", ["tempfile"]], "urllib.parse": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfrom collections import namedtuple\nimport functools\nimport math\nimport re\nimport types\nimport warnings\nimport ipaddress\n\n__all__=[\"urlparse\",\"urlunparse\",\"urljoin\",\"urldefrag\",\n\"urlsplit\",\"urlunsplit\",\"urlencode\",\"parse_qs\",\n\"parse_qsl\",\"quote\",\"quote_plus\",\"quote_from_bytes\",\n\"unquote\",\"unquote_plus\",\"unquote_to_bytes\",\n\"DefragResult\",\"ParseResult\",\"SplitResult\",\n\"DefragResultBytes\",\"ParseResultBytes\",\"SplitResultBytes\"]\n\n\n\n\n\nuses_relative=['','ftp','http','gopher','nntp','imap',\n'wais','file','https','shttp','mms',\n'prospero','rtsp','rtsps','rtspu','sftp',\n'svn','svn+ssh','ws','wss']\n\nuses_netloc=['','ftp','http','gopher','nntp','telnet',\n'imap','wais','file','mms','https','shttp',\n'snews','prospero','rtsp','rtsps','rtspu','rsync',\n'svn','svn+ssh','sftp','nfs','git','git+ssh',\n'ws','wss','itms-services']\n\nuses_params=['','ftp','hdl','prospero','http','imap',\n'https','shttp','rtsp','rtsps','rtspu','sip',\n'sips','mms','sftp','tel']\n\n\n\n\nnon_hierarchical=['gopher','hdl','mailto','news',\n'telnet','wais','imap','snews','sip','sips']\n\nuses_query=['','http','wais','imap','https','shttp','mms',\n'gopher','rtsp','rtsps','rtspu','sip','sips']\n\nuses_fragment=['','ftp','hdl','http','gopher','news',\n'nntp','wais','https','shttp','snews',\n'file','prospero']\n\n\nscheme_chars=('abcdefghijklmnopqrstuvwxyz'\n'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\n'0123456789'\n'+-.')\n\n\n\n_WHATWG_C0_CONTROL_OR_SPACE='\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f '\n\n\n_UNSAFE_URL_BYTES_TO_REMOVE=['\\t','\\r','\\n']\n\ndef clear_cache():\n ''\n urlsplit.cache_clear()\n _byte_quoter_factory.cache_clear()\n \n \n \n \n \n \n \n_implicit_encoding='ascii'\n_implicit_errors='strict'\n\ndef _noop(obj):\n return obj\n \ndef _encode_result(obj,encoding=_implicit_encoding,\nerrors=_implicit_errors):\n return obj.encode(encoding,errors)\n \ndef _decode_args(args,encoding=_implicit_encoding,\nerrors=_implicit_errors):\n return tuple(x.decode(encoding,errors)if x else ''for x in args)\n \ndef _coerce_args(*args):\n\n\n\n\n\n str_input=isinstance(args[0],str)\n for arg in args[1:]:\n \n \n if arg and isinstance(arg,str)!=str_input:\n raise TypeError(\"Cannot mix str and non-str arguments\")\n if str_input:\n return args+(_noop,)\n return _decode_args(args)+(_encode_result,)\n \n \nclass _ResultMixinStr(object):\n ''\n __slots__=()\n \n def encode(self,encoding='ascii',errors='strict'):\n return self._encoded_counterpart(*(x.encode(encoding,errors)for x in self))\n \n \nclass _ResultMixinBytes(object):\n ''\n __slots__=()\n \n def decode(self,encoding='ascii',errors='strict'):\n return self._decoded_counterpart(*(x.decode(encoding,errors)for x in self))\n \n \nclass _NetlocResultMixinBase(object):\n ''\n __slots__=()\n \n @property\n def username(self):\n return self._userinfo[0]\n \n @property\n def password(self):\n return self._userinfo[1]\n \n @property\n def hostname(self):\n hostname=self._hostinfo[0]\n if not hostname:\n return None\n \n \n separator='%'if isinstance(hostname,str)else b'%'\n hostname,percent,zone=hostname.partition(separator)\n return hostname.lower()+percent+zone\n \n @property\n def port(self):\n port=self._hostinfo[1]\n if port is not None:\n if port.isdigit()and port.isascii():\n port=int(port)\n else:\n raise ValueError(f\"Port could not be cast to integer value as {port !r}\")\n if not(0 <=port <=65535):\n raise ValueError(\"Port out of range 0-65535\")\n return port\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nclass _NetlocResultMixinStr(_NetlocResultMixinBase,_ResultMixinStr):\n __slots__=()\n \n @property\n def _userinfo(self):\n netloc=self.netloc\n userinfo,have_info,hostinfo=netloc.rpartition('@')\n if have_info:\n username,have_password,password=userinfo.partition(':')\n if not have_password:\n password=None\n else:\n username=password=None\n return username,password\n \n @property\n def _hostinfo(self):\n netloc=self.netloc\n _,_,hostinfo=netloc.rpartition('@')\n _,have_open_br,bracketed=hostinfo.partition('[')\n if have_open_br:\n hostname,_,port=bracketed.partition(']')\n _,_,port=port.partition(':')\n else:\n hostname,_,port=hostinfo.partition(':')\n if not port:\n port=None\n return hostname,port\n \n \nclass _NetlocResultMixinBytes(_NetlocResultMixinBase,_ResultMixinBytes):\n __slots__=()\n \n @property\n def _userinfo(self):\n netloc=self.netloc\n userinfo,have_info,hostinfo=netloc.rpartition(b'@')\n if have_info:\n username,have_password,password=userinfo.partition(b':')\n if not have_password:\n password=None\n else:\n username=password=None\n return username,password\n \n @property\n def _hostinfo(self):\n netloc=self.netloc\n _,_,hostinfo=netloc.rpartition(b'@')\n _,have_open_br,bracketed=hostinfo.partition(b'[')\n if have_open_br:\n hostname,_,port=bracketed.partition(b']')\n _,_,port=port.partition(b':')\n else:\n hostname,_,port=hostinfo.partition(b':')\n if not port:\n port=None\n return hostname,port\n \n \n_DefragResultBase=namedtuple('DefragResult','url fragment')\n_SplitResultBase=namedtuple(\n'SplitResult','scheme netloc path query fragment')\n_ParseResultBase=namedtuple(\n'ParseResult','scheme netloc path params query fragment')\n\n_DefragResultBase.__doc__=\"\"\"\nDefragResult(url, fragment)\n\nA 2-tuple that contains the url without fragment identifier and the fragment\nidentifier as a separate argument.\n\"\"\"\n\n_DefragResultBase.url.__doc__=\"\"\"The URL with no fragment identifier.\"\"\"\n\n_DefragResultBase.fragment.__doc__=\"\"\"\nFragment identifier separated from URL, that allows indirect identification of a\nsecondary resource by reference to a primary resource and additional identifying\ninformation.\n\"\"\"\n\n_SplitResultBase.__doc__=\"\"\"\nSplitResult(scheme, netloc, path, query, fragment)\n\nA 5-tuple that contains the different components of a URL. Similar to\nParseResult, but does not split params.\n\"\"\"\n\n_SplitResultBase.scheme.__doc__=\"\"\"Specifies URL scheme for the request.\"\"\"\n\n_SplitResultBase.netloc.__doc__=\"\"\"\nNetwork location where the request is made to.\n\"\"\"\n\n_SplitResultBase.path.__doc__=\"\"\"\nThe hierarchical path, such as the path to a file to download.\n\"\"\"\n\n_SplitResultBase.query.__doc__=\"\"\"\nThe query component, that contains non-hierarchical data, that along with data\nin path component, identifies a resource in the scope of URI's scheme and\nnetwork location.\n\"\"\"\n\n_SplitResultBase.fragment.__doc__=\"\"\"\nFragment identifier, that allows indirect identification of a secondary resource\nby reference to a primary resource and additional identifying information.\n\"\"\"\n\n_ParseResultBase.__doc__=\"\"\"\nParseResult(scheme, netloc, path, params, query, fragment)\n\nA 6-tuple that contains components of a parsed URL.\n\"\"\"\n\n_ParseResultBase.scheme.__doc__=_SplitResultBase.scheme.__doc__\n_ParseResultBase.netloc.__doc__=_SplitResultBase.netloc.__doc__\n_ParseResultBase.path.__doc__=_SplitResultBase.path.__doc__\n_ParseResultBase.params.__doc__=\"\"\"\nParameters for last path element used to dereference the URI in order to provide\naccess to perform some operation on the resource.\n\"\"\"\n\n_ParseResultBase.query.__doc__=_SplitResultBase.query.__doc__\n_ParseResultBase.fragment.__doc__=_SplitResultBase.fragment.__doc__\n\n\n\n\n\nResultBase=_NetlocResultMixinStr\n\n\nclass DefragResult(_DefragResultBase,_ResultMixinStr):\n __slots__=()\n def geturl(self):\n if self.fragment:\n return self.url+'#'+self.fragment\n else:\n return self.url\n \nclass SplitResult(_SplitResultBase,_NetlocResultMixinStr):\n __slots__=()\n def geturl(self):\n return urlunsplit(self)\n \nclass ParseResult(_ParseResultBase,_NetlocResultMixinStr):\n __slots__=()\n def geturl(self):\n return urlunparse(self)\n \n \nclass DefragResultBytes(_DefragResultBase,_ResultMixinBytes):\n __slots__=()\n def geturl(self):\n if self.fragment:\n return self.url+b'#'+self.fragment\n else:\n return self.url\n \nclass SplitResultBytes(_SplitResultBase,_NetlocResultMixinBytes):\n __slots__=()\n def geturl(self):\n return urlunsplit(self)\n \nclass ParseResultBytes(_ParseResultBase,_NetlocResultMixinBytes):\n __slots__=()\n def geturl(self):\n return urlunparse(self)\n \n \ndef _fix_result_transcoding():\n _result_pairs=(\n (DefragResult,DefragResultBytes),\n (SplitResult,SplitResultBytes),\n (ParseResult,ParseResultBytes),\n )\n for _decoded,_encoded in _result_pairs:\n _decoded._encoded_counterpart=_encoded\n _encoded._decoded_counterpart=_decoded\n \n_fix_result_transcoding()\ndel _fix_result_transcoding\n\ndef urlparse(url,scheme='',allow_fragments=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n url,scheme,_coerce_result=_coerce_args(url,scheme)\n splitresult=urlsplit(url,scheme,allow_fragments)\n scheme,netloc,url,query,fragment=splitresult\n if scheme in uses_params and ';'in url:\n url,params=_splitparams(url)\n else:\n params=''\n result=ParseResult(scheme,netloc,url,params,query,fragment)\n return _coerce_result(result)\n \ndef _splitparams(url):\n if '/'in url:\n i=url.find(';',url.rfind('/'))\n if i <0:\n return url,''\n else:\n i=url.find(';')\n return url[:i],url[i+1:]\n \ndef _splitnetloc(url,start=0):\n delim=len(url)\n for c in '/?#':\n wdelim=url.find(c,start)\n if wdelim >=0:\n delim=min(delim,wdelim)\n return url[start:delim],url[delim:]\n \ndef _checknetloc(netloc):\n if not netloc or netloc.isascii():\n return\n \n \n import unicodedata\n n=netloc.replace('@','')\n n=n.replace(':','')\n n=n.replace('#','')\n n=n.replace('?','')\n netloc2=unicodedata.normalize('NFKC',n)\n if n ==netloc2:\n return\n for c in '/?#@:':\n if c in netloc2:\n raise ValueError(\"netloc '\"+netloc+\"' contains invalid \"+\n \"characters under NFKC normalization\")\n \n \n \ndef _check_bracketed_host(hostname):\n if hostname.startswith('v'):\n if not re.match(r\"\\Av[a-fA-F0-9]+\\..+\\Z\",hostname):\n raise ValueError(f\"IPvFuture address is invalid\")\n else:\n ip=ipaddress.ip_address(hostname)\n if isinstance(ip,ipaddress.IPv4Address):\n raise ValueError(f\"An IPv4 address cannot be in brackets\")\n \n \n \n@functools.lru_cache(typed=True)\ndef urlsplit(url,scheme='',allow_fragments=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n url,scheme,_coerce_result=_coerce_args(url,scheme)\n \n \n url=url.lstrip(_WHATWG_C0_CONTROL_OR_SPACE)\n scheme=scheme.strip(_WHATWG_C0_CONTROL_OR_SPACE)\n \n for b in _UNSAFE_URL_BYTES_TO_REMOVE:\n url=url.replace(b,\"\")\n scheme=scheme.replace(b,\"\")\n \n allow_fragments=bool(allow_fragments)\n netloc=query=fragment=''\n i=url.find(':')\n if i >0 and url[0].isascii()and url[0].isalpha():\n for c in url[:i]:\n if c not in scheme_chars:\n break\n else:\n scheme,url=url[:i].lower(),url[i+1:]\n if url[:2]=='//':\n netloc,url=_splitnetloc(url,2)\n if(('['in netloc and ']'not in netloc)or\n (']'in netloc and '['not in netloc)):\n raise ValueError(\"Invalid IPv6 URL\")\n if '['in netloc and ']'in netloc:\n bracketed_host=netloc.partition('[')[2].partition(']')[0]\n _check_bracketed_host(bracketed_host)\n if allow_fragments and '#'in url:\n url,fragment=url.split('#',1)\n if '?'in url:\n url,query=url.split('?',1)\n _checknetloc(netloc)\n v=SplitResult(scheme,netloc,url,query,fragment)\n return _coerce_result(v)\n \ndef urlunparse(components):\n ''\n\n\n \n scheme,netloc,url,params,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if params:\n url=\"%s;%s\"%(url,params)\n return _coerce_result(urlunsplit((scheme,netloc,url,query,fragment)))\n \ndef urlunsplit(components):\n ''\n\n\n\n \n scheme,netloc,url,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if netloc or(scheme and scheme in uses_netloc and url[:2]!='//'):\n if url and url[:1]!='/':url='/'+url\n url='//'+(netloc or '')+url\n if scheme:\n url=scheme+':'+url\n if query:\n url=url+'?'+query\n if fragment:\n url=url+'#'+fragment\n return _coerce_result(url)\n \ndef urljoin(base,url,allow_fragments=True):\n ''\n \n if not base:\n return url\n if not url:\n return base\n \n base,url,_coerce_result=_coerce_args(base,url)\n bscheme,bnetloc,bpath,bparams,bquery,bfragment=\\\n urlparse(base,'',allow_fragments)\n scheme,netloc,path,params,query,fragment=\\\n urlparse(url,bscheme,allow_fragments)\n \n if scheme !=bscheme or scheme not in uses_relative:\n return _coerce_result(url)\n if scheme in uses_netloc:\n if netloc:\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n netloc=bnetloc\n \n if not path and not params:\n path=bpath\n params=bparams\n if not query:\n query=bquery\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n \n base_parts=bpath.split('/')\n if base_parts[-1]!='':\n \n \n del base_parts[-1]\n \n \n if path[:1]=='/':\n segments=path.split('/')\n else:\n segments=base_parts+path.split('/')\n \n \n segments[1:-1]=filter(None,segments[1:-1])\n \n resolved_path=[]\n \n for seg in segments:\n if seg =='..':\n try:\n resolved_path.pop()\n except IndexError:\n \n \n pass\n elif seg =='.':\n continue\n else:\n resolved_path.append(seg)\n \n if segments[-1]in('.','..'):\n \n \n resolved_path.append('')\n \n return _coerce_result(urlunparse((scheme,netloc,'/'.join(\n resolved_path)or '/',params,query,fragment)))\n \n \ndef urldefrag(url):\n ''\n\n\n\n\n \n url,_coerce_result=_coerce_args(url)\n if '#'in url:\n s,n,p,a,q,frag=urlparse(url)\n defrag=urlunparse((s,n,p,a,q,''))\n else:\n frag=''\n defrag=url\n return _coerce_result(DefragResult(defrag,frag))\n \n_hexdig='0123456789ABCDEFabcdef'\n_hextobyte=None\n\ndef unquote_to_bytes(string):\n ''\n return bytes(_unquote_impl(string))\n \ndef _unquote_impl(string:bytes |bytearray |str)->bytes |bytearray:\n\n\n if not string:\n \n string.split\n return b''\n if isinstance(string,str):\n string=string.encode('utf-8')\n bits=string.split(b'%')\n if len(bits)==1:\n return string\n res=bytearray(bits[0])\n append=res.extend\n \n \n global _hextobyte\n if _hextobyte is None:\n _hextobyte={(a+b).encode():bytes.fromhex(a+b)\n for a in _hexdig for b in _hexdig}\n for item in bits[1:]:\n try:\n append(_hextobyte[item[:2]])\n append(item[2:])\n except KeyError:\n append(b'%')\n append(item)\n return res\n \n_asciire=re.compile('([\\x00-\\x7f]+)')\n\ndef _generate_unquoted_parts(string,encoding,errors):\n previous_match_end=0\n for ascii_match in _asciire.finditer(string):\n start,end=ascii_match.span()\n yield string[previous_match_end:start]\n \n yield _unquote_impl(ascii_match[1]).decode(encoding,errors)\n previous_match_end=end\n yield string[previous_match_end:]\n \ndef unquote(string,encoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n \n if isinstance(string,bytes):\n return _unquote_impl(string).decode(encoding,errors)\n if '%'not in string:\n \n string.split\n return string\n if encoding is None:\n encoding='utf-8'\n if errors is None:\n errors='replace'\n return ''.join(_generate_unquoted_parts(string,encoding,errors))\n \n \ndef parse_qs(qs,keep_blank_values=False,strict_parsing=False,\nencoding='utf-8',errors='replace',max_num_fields=None,separator='&'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n parsed_result={}\n pairs=parse_qsl(qs,keep_blank_values,strict_parsing,\n encoding=encoding,errors=errors,\n max_num_fields=max_num_fields,separator=separator)\n for name,value in pairs:\n if name in parsed_result:\n parsed_result[name].append(value)\n else:\n parsed_result[name]=[value]\n return parsed_result\n \n \ndef parse_qsl(qs,keep_blank_values=False,strict_parsing=False,\nencoding='utf-8',errors='replace',max_num_fields=None,separator='&'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n qs,_coerce_result=_coerce_args(qs)\n separator,_=_coerce_args(separator)\n \n if not separator or(not isinstance(separator,(str,bytes))):\n raise ValueError(\"Separator must be of type string or bytes.\")\n \n \n \n \n if max_num_fields is not None:\n num_fields=1+qs.count(separator)if qs else 0\n if max_num_fields \"\n \n def __missing__(self,b):\n \n res=chr(b)if b in self.safe else '%{:02X}'.format(b)\n self[b]=res\n return res\n \ndef quote(string,safe='/',encoding=None,errors=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(string,str):\n if not string:\n return string\n if encoding is None:\n encoding='utf-8'\n if errors is None:\n errors='strict'\n string=string.encode(encoding,errors)\n else:\n if encoding is not None:\n raise TypeError(\"quote() doesn't support 'encoding' for bytes\")\n if errors is not None:\n raise TypeError(\"quote() doesn't support 'errors' for bytes\")\n return quote_from_bytes(string,safe)\n \ndef quote_plus(string,safe='',encoding=None,errors=None):\n ''\n\n\n \n \n \n if((isinstance(string,str)and ' 'not in string)or\n (isinstance(string,bytes)and b' 'not in string)):\n return quote(string,safe,encoding,errors)\n if isinstance(safe,str):\n space=' '\n else:\n space=b' '\n string=quote(string,safe+space,encoding,errors)\n return string.replace(' ','+')\n \n \n@functools.lru_cache\ndef _byte_quoter_factory(safe):\n return _Quoter(safe).__getitem__\n \ndef quote_from_bytes(bs,safe='/'):\n ''\n\n\n \n if not isinstance(bs,(bytes,bytearray)):\n raise TypeError(\"quote_from_bytes() expected bytes\")\n if not bs:\n return ''\n if isinstance(safe,str):\n \n safe=safe.encode('ascii','ignore')\n else:\n \n safe=bytes([c for c in safe if c <128])\n if not bs.rstrip(_ALWAYS_SAFE_BYTES+safe):\n return bs.decode()\n quoter=_byte_quoter_factory(safe)\n if(bs_len :=len(bs))<200_000:\n return ''.join(map(quoter,bs))\n else:\n \n chunk_size=math.isqrt(bs_len)\n chunks=[''.join(map(quoter,bs[i:i+chunk_size]))\n for i in range(0,bs_len,chunk_size)]\n return ''.join(chunks)\n \ndef urlencode(query,doseq=False,safe='',encoding=None,errors=None,\nquote_via=quote_plus):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if hasattr(query,\"items\"):\n query=query.items()\n else:\n \n \n try:\n \n \n if len(query)and not isinstance(query[0],tuple):\n raise TypeError\n \n \n \n \n except TypeError as err:\n raise TypeError(\"not a valid non-string sequence \"\n \"or mapping object\")from err\n \n l=[]\n if not doseq:\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_via(k,safe)\n else:\n k=quote_via(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_via(v,safe)\n else:\n v=quote_via(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else:\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_via(k,safe)\n else:\n k=quote_via(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_via(v,safe)\n l.append(k+'='+v)\n elif isinstance(v,str):\n v=quote_via(v,safe,encoding,errors)\n l.append(k+'='+v)\n else:\n try:\n \n x=len(v)\n except TypeError:\n \n v=quote_via(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else:\n \n for elt in v:\n if isinstance(elt,bytes):\n elt=quote_via(elt,safe)\n else:\n elt=quote_via(str(elt),safe,encoding,errors)\n l.append(k+'='+elt)\n return '&'.join(l)\n \n \ndef to_bytes(url):\n warnings.warn(\"urllib.parse.to_bytes() is deprecated as of 3.8\",\n DeprecationWarning,stacklevel=2)\n return _to_bytes(url)\n \n \ndef _to_bytes(url):\n ''\n \n \n \n if isinstance(url,str):\n try:\n url=url.encode(\"ASCII\").decode()\n except UnicodeError:\n raise UnicodeError(\"URL \"+repr(url)+\n \" contains non-ASCII characters\")\n return url\n \n \ndef unwrap(url):\n ''\n\n\n \n url=str(url).strip()\n if url[:1]=='<'and url[-1:]=='>':\n url=url[1:-1].strip()\n if url[:4]=='URL:':\n url=url[4:].strip()\n return url\n \n \ndef splittype(url):\n warnings.warn(\"urllib.parse.splittype() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splittype(url)\n \n \n_typeprog=None\ndef _splittype(url):\n ''\n global _typeprog\n if _typeprog is None:\n _typeprog=re.compile('([^/:]+):(.*)',re.DOTALL)\n \n match=_typeprog.match(url)\n if match:\n scheme,data=match.groups()\n return scheme.lower(),data\n return None,url\n \n \ndef splithost(url):\n warnings.warn(\"urllib.parse.splithost() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splithost(url)\n \n \n_hostprog=None\ndef _splithost(url):\n ''\n global _hostprog\n if _hostprog is None:\n _hostprog=re.compile('//([^/#?]*)(.*)',re.DOTALL)\n \n match=_hostprog.match(url)\n if match:\n host_port,path=match.groups()\n if path and path[0]!='/':\n path='/'+path\n return host_port,path\n return None,url\n \n \ndef splituser(host):\n warnings.warn(\"urllib.parse.splituser() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splituser(host)\n \n \ndef _splituser(host):\n ''\n user,delim,host=host.rpartition('@')\n return(user if delim else None),host\n \n \ndef splitpasswd(user):\n warnings.warn(\"urllib.parse.splitpasswd() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitpasswd(user)\n \n \ndef _splitpasswd(user):\n ''\n user,delim,passwd=user.partition(':')\n return user,(passwd if delim else None)\n \n \ndef splitport(host):\n warnings.warn(\"urllib.parse.splitport() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitport(host)\n \n \n \n_portprog=None\ndef _splitport(host):\n ''\n global _portprog\n if _portprog is None:\n _portprog=re.compile('(.*):([0-9]*)',re.DOTALL)\n \n match=_portprog.fullmatch(host)\n if match:\n host,port=match.groups()\n if port:\n return host,port\n return host,None\n \n \ndef splitnport(host,defport=-1):\n warnings.warn(\"urllib.parse.splitnport() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitnport(host,defport)\n \n \ndef _splitnport(host,defport=-1):\n ''\n\n\n \n host,delim,port=host.rpartition(':')\n if not delim:\n host=port\n elif port:\n if port.isdigit()and port.isascii():\n nport=int(port)\n else:\n nport=None\n return host,nport\n return host,defport\n \n \ndef splitquery(url):\n warnings.warn(\"urllib.parse.splitquery() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitquery(url)\n \n \ndef _splitquery(url):\n ''\n path,delim,query=url.rpartition('?')\n if delim:\n return path,query\n return url,None\n \n \ndef splittag(url):\n warnings.warn(\"urllib.parse.splittag() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splittag(url)\n \n \ndef _splittag(url):\n ''\n path,delim,tag=url.rpartition('#')\n if delim:\n return path,tag\n return url,None\n \n \ndef splitattr(url):\n warnings.warn(\"urllib.parse.splitattr() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitattr(url)\n \n \ndef _splitattr(url):\n ''\n \n words=url.split(';')\n return words[0],words[1:]\n \n \ndef splitvalue(attr):\n warnings.warn(\"urllib.parse.splitvalue() is deprecated as of 3.8, \"\n \"use urllib.parse.parse_qsl() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitvalue(attr)\n \n \ndef _splitvalue(attr):\n ''\n attr,delim,value=attr.partition('=')\n return attr,(value if delim else None)\n", ["collections", "functools", "ipaddress", "math", "re", "types", "unicodedata", "warnings"]], "html": [".py", "''\n\n\n\nimport re as _re\nfrom html.entities import html5 as _html5\n\n\n__all__=['escape','unescape']\n\n\ndef escape(s,quote=True):\n ''\n\n\n\n\n \n s=s.replace(\"&\",\"&\")\n s=s.replace(\"<\",\"<\")\n s=s.replace(\">\",\">\")\n if quote:\n s=s.replace('\"',\""\")\n s=s.replace('\\'',\"'\")\n return s\n \n \n \n \n_invalid_charrefs={\n0x00:'\\ufffd',\n0x0d:'\\r',\n0x80:'\\u20ac',\n0x81:'\\x81',\n0x82:'\\u201a',\n0x83:'\\u0192',\n0x84:'\\u201e',\n0x85:'\\u2026',\n0x86:'\\u2020',\n0x87:'\\u2021',\n0x88:'\\u02c6',\n0x89:'\\u2030',\n0x8a:'\\u0160',\n0x8b:'\\u2039',\n0x8c:'\\u0152',\n0x8d:'\\x8d',\n0x8e:'\\u017d',\n0x8f:'\\x8f',\n0x90:'\\x90',\n0x91:'\\u2018',\n0x92:'\\u2019',\n0x93:'\\u201c',\n0x94:'\\u201d',\n0x95:'\\u2022',\n0x96:'\\u2013',\n0x97:'\\u2014',\n0x98:'\\u02dc',\n0x99:'\\u2122',\n0x9a:'\\u0161',\n0x9b:'\\u203a',\n0x9c:'\\u0153',\n0x9d:'\\x9d',\n0x9e:'\\u017e',\n0x9f:'\\u0178',\n}\n\n_invalid_codepoints={\n\n0x1,0x2,0x3,0x4,0x5,0x6,0x7,0x8,\n\n0xe,0xf,0x10,0x11,0x12,0x13,0x14,0x15,0x16,0x17,0x18,0x19,\n0x1a,0x1b,0x1c,0x1d,0x1e,0x1f,\n\n0x7f,0x80,0x81,0x82,0x83,0x84,0x85,0x86,0x87,0x88,0x89,0x8a,\n0x8b,0x8c,0x8d,0x8e,0x8f,0x90,0x91,0x92,0x93,0x94,0x95,0x96,\n0x97,0x98,0x99,0x9a,0x9b,0x9c,0x9d,0x9e,0x9f,\n\n0xfdd0,0xfdd1,0xfdd2,0xfdd3,0xfdd4,0xfdd5,0xfdd6,0xfdd7,0xfdd8,\n0xfdd9,0xfdda,0xfddb,0xfddc,0xfddd,0xfdde,0xfddf,0xfde0,0xfde1,\n0xfde2,0xfde3,0xfde4,0xfde5,0xfde6,0xfde7,0xfde8,0xfde9,0xfdea,\n0xfdeb,0xfdec,0xfded,0xfdee,0xfdef,\n\n0xb,0xfffe,0xffff,0x1fffe,0x1ffff,0x2fffe,0x2ffff,0x3fffe,0x3ffff,\n0x4fffe,0x4ffff,0x5fffe,0x5ffff,0x6fffe,0x6ffff,0x7fffe,0x7ffff,\n0x8fffe,0x8ffff,0x9fffe,0x9ffff,0xafffe,0xaffff,0xbfffe,0xbffff,\n0xcfffe,0xcffff,0xdfffe,0xdffff,0xefffe,0xeffff,0xffffe,0xfffff,\n0x10fffe,0x10ffff\n}\n\n\ndef _replace_charref(s):\n s=s.group(1)\n if s[0]=='#':\n \n if s[1]in 'xX':\n num=int(s[2:].rstrip(';'),16)\n else:\n num=int(s[1:].rstrip(';'))\n if num in _invalid_charrefs:\n return _invalid_charrefs[num]\n if 0xD800 <=num <=0xDFFF or num >0x10FFFF:\n return '\\uFFFD'\n if num in _invalid_codepoints:\n return ''\n return chr(num)\n else:\n \n if s in _html5:\n return _html5[s]\n \n for x in range(len(s)-1,1,-1):\n if s[:x]in _html5:\n return _html5[s[:x]]+s[x:]\n else:\n return '&'+s\n \n \n_charref=_re.compile(r'&(#[0-9]+;?'\nr'|#[xX][0-9a-fA-F]+;?'\nr'|[^\\t\\n\\f <&#;]{1,32};?)')\n\ndef unescape(s):\n ''\n\n\n\n\n\n \n if '&'not in s:\n return s\n return _charref.sub(_replace_charref,s)\n", ["html.entities", "re"], 1], "html.parser": [".py", "''\n\n\n\n\n\n\n\n\n\nimport re\nimport _markupbase\n\nfrom html import unescape\n\n\n__all__=['HTMLParser']\n\n\n\ninteresting_normal=re.compile('[&<]')\nincomplete=re.compile('&[a-zA-Z#]')\n\nentityref=re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]')\ncharref=re.compile('&#(?:[0-9]+|[xX][0-9a-fA-F]+)[^0-9a-fA-F]')\n\nstarttagopen=re.compile('<[a-zA-Z]')\npiclose=re.compile('>')\ncommentclose=re.compile(r'--\\s*>')\n\n\n\n\n\n\ntagfind_tolerant=re.compile(r'([a-zA-Z][^\\t\\n\\r\\f />\\x00]*)(?:\\s|/(?!>))*')\nattrfind_tolerant=re.compile(\nr'((?<=[\\'\"\\s/])[^\\s/>][^\\s/=>]*)(\\s*=+\\s*'\nr'(\\'[^\\']*\\'|\"[^\"]*\"|(?![\\'\"])[^>\\s]*))?(?:\\s|/(?!>))*')\nlocatestarttagend_tolerant=re.compile(r\"\"\"\n <[a-zA-Z][^\\t\\n\\r\\f />\\x00]* # tag name\n (?:[\\s/]* # optional whitespace before attribute name\n (?:(?<=['\"\\s/])[^\\s/>][^\\s/=>]* # attribute name\n (?:\\s*=+\\s* # value indicator\n (?:'[^']*' # LITA-enclosed value\n |\"[^\"]*\" # LIT-enclosed value\n |(?!['\"])[^>\\s]* # bare value\n )\n \\s* # possibly followed by a space\n )?(?:\\s|/(?!>))*\n )*\n )?\n \\s* # trailing whitespace\n\"\"\",re.VERBOSE)\nendendtag=re.compile('>')\n\n\nendtagfind=re.compile(r'')\n\n\n\nclass HTMLParser(_markupbase.ParserBase):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n CDATA_CONTENT_ELEMENTS=(\"script\",\"style\")\n \n def __init__(self,*,convert_charrefs=True):\n ''\n\n\n\n \n super().__init__()\n self.convert_charrefs=convert_charrefs\n self.reset()\n \n def reset(self):\n ''\n self.rawdata=''\n self.lasttag='???'\n self.interesting=interesting_normal\n self.cdata_elem=None\n super().reset()\n \n def feed(self,data):\n ''\n\n\n\n \n self.rawdata=self.rawdata+data\n self.goahead(0)\n \n def close(self):\n ''\n self.goahead(1)\n \n __starttag_text=None\n \n def get_starttag_text(self):\n ''\n return self.__starttag_text\n \n def set_cdata_mode(self,elem):\n self.cdata_elem=elem.lower()\n self.interesting=re.compile(r''%self.cdata_elem,re.I)\n \n def clear_cdata_mode(self):\n self.interesting=interesting_normal\n self.cdata_elem=None\n \n \n \n \n def goahead(self,end):\n rawdata=self.rawdata\n i=0\n n=len(rawdata)\n while i =0 and\n not re.compile(r'[\\s;]').search(rawdata,amppos)):\n break\n j=n\n else:\n match=self.interesting.search(rawdata,i)\n if match:\n j=match.start()\n else:\n if self.cdata_elem:\n break\n j=n\n if i ',i+1)\n if k <0:\n k=rawdata.find('<',i+1)\n if k <0:\n k=i+1\n else:\n k +=1\n if self.convert_charrefs and not self.cdata_elem:\n self.handle_data(unescape(rawdata[i:k]))\n else:\n self.handle_data(rawdata[i:k])\n i=self.updatepos(i,k)\n elif startswith(\"&#\",i):\n match=charref.match(rawdata,i)\n if match:\n name=match.group()[2:-1]\n self.handle_charref(name)\n k=match.end()\n if not startswith(';',k -1):\n k=k -1\n i=self.updatepos(i,k)\n continue\n else:\n if \";\"in rawdata[i:]:\n self.handle_data(rawdata[i:i+2])\n i=self.updatepos(i,i+2)\n break\n elif startswith('&',i):\n match=entityref.match(rawdata,i)\n if match:\n name=match.group(1)\n self.handle_entityref(name)\n k=match.end()\n if not startswith(';',k -1):\n k=k -1\n i=self.updatepos(i,k)\n continue\n match=incomplete.match(rawdata,i)\n if match:\n \n if end and match.group()==rawdata[i:]:\n k=match.end()\n if k <=i:\n k=n\n i=self.updatepos(i,i+1)\n \n break\n elif(i+1)',i+9)\n if gtpos ==-1:\n return -1\n self.handle_decl(rawdata[i+2:gtpos])\n return gtpos+1\n else:\n return self.parse_bogus_comment(i)\n \n \n \n def parse_bogus_comment(self,i,report=1):\n rawdata=self.rawdata\n assert rawdata[i:i+2]in('',i+2)\n if pos ==-1:\n return -1\n if report:\n self.handle_comment(rawdata[i+2:pos])\n return pos+1\n \n \n def parse_pi(self,i):\n rawdata=self.rawdata\n assert rawdata[i:i+2]=='\",\"/>\"):\n self.handle_data(rawdata[i:endpos])\n return endpos\n if end.endswith('/>'):\n \n self.handle_startendtag(tag,attrs)\n else:\n self.handle_starttag(tag,attrs)\n if tag in self.CDATA_CONTENT_ELEMENTS:\n self.set_cdata_mode(tag)\n return endpos\n \n \n \n def check_for_whole_start_tag(self,i):\n rawdata=self.rawdata\n m=locatestarttagend_tolerant.match(rawdata,i)\n if m:\n j=m.end()\n next=rawdata[j:j+1]\n if next ==\">\":\n return j+1\n if next ==\"/\":\n if rawdata.startswith(\"/>\",j):\n return j+2\n if rawdata.startswith(\"/\",j):\n \n return -1\n \n if j >i:\n return j\n else:\n return i+1\n if next ==\"\":\n \n return -1\n if next in(\"abcdefghijklmnopqrstuvwxyz=/\"\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\"):\n \n \n return -1\n if j >i:\n return j\n else:\n return i+1\n raise AssertionError(\"we should not get here!\")\n \n \n def parse_endtag(self,i):\n rawdata=self.rawdata\n assert rawdata[i:i+2]==\"':\n return i+3\n else:\n return self.parse_bogus_comment(i)\n tagname=namematch.group(1).lower()\n \n \n \n \n gtpos=rawdata.find('>',namematch.end())\n self.handle_endtag(tagname)\n return gtpos+1\n \n elem=match.group(1).lower()\n if self.cdata_elem is not None:\n if elem !=self.cdata_elem:\n self.handle_data(rawdata[i:gtpos])\n return gtpos\n \n self.handle_endtag(elem)\n self.clear_cdata_mode()\n return gtpos\n \n \n def handle_startendtag(self,tag,attrs):\n self.handle_starttag(tag,attrs)\n self.handle_endtag(tag)\n \n \n def handle_starttag(self,tag,attrs):\n pass\n \n \n def handle_endtag(self,tag):\n pass\n \n \n def handle_charref(self,name):\n pass\n \n \n def handle_entityref(self,name):\n pass\n \n \n def handle_data(self,data):\n pass\n \n \n def handle_comment(self,data):\n pass\n \n \n def handle_decl(self,decl):\n pass\n \n \n def handle_pi(self,data):\n pass\n \n def unknown_decl(self,data):\n pass\n", ["_markupbase", "html", "re"]], "html.entities": [".py", "''\n\n__all__=['html5','name2codepoint','codepoint2name','entitydefs']\n\n\n\nname2codepoint={\n'AElig':0x00c6,\n'Aacute':0x00c1,\n'Acirc':0x00c2,\n'Agrave':0x00c0,\n'Alpha':0x0391,\n'Aring':0x00c5,\n'Atilde':0x00c3,\n'Auml':0x00c4,\n'Beta':0x0392,\n'Ccedil':0x00c7,\n'Chi':0x03a7,\n'Dagger':0x2021,\n'Delta':0x0394,\n'ETH':0x00d0,\n'Eacute':0x00c9,\n'Ecirc':0x00ca,\n'Egrave':0x00c8,\n'Epsilon':0x0395,\n'Eta':0x0397,\n'Euml':0x00cb,\n'Gamma':0x0393,\n'Iacute':0x00cd,\n'Icirc':0x00ce,\n'Igrave':0x00cc,\n'Iota':0x0399,\n'Iuml':0x00cf,\n'Kappa':0x039a,\n'Lambda':0x039b,\n'Mu':0x039c,\n'Ntilde':0x00d1,\n'Nu':0x039d,\n'OElig':0x0152,\n'Oacute':0x00d3,\n'Ocirc':0x00d4,\n'Ograve':0x00d2,\n'Omega':0x03a9,\n'Omicron':0x039f,\n'Oslash':0x00d8,\n'Otilde':0x00d5,\n'Ouml':0x00d6,\n'Phi':0x03a6,\n'Pi':0x03a0,\n'Prime':0x2033,\n'Psi':0x03a8,\n'Rho':0x03a1,\n'Scaron':0x0160,\n'Sigma':0x03a3,\n'THORN':0x00de,\n'Tau':0x03a4,\n'Theta':0x0398,\n'Uacute':0x00da,\n'Ucirc':0x00db,\n'Ugrave':0x00d9,\n'Upsilon':0x03a5,\n'Uuml':0x00dc,\n'Xi':0x039e,\n'Yacute':0x00dd,\n'Yuml':0x0178,\n'Zeta':0x0396,\n'aacute':0x00e1,\n'acirc':0x00e2,\n'acute':0x00b4,\n'aelig':0x00e6,\n'agrave':0x00e0,\n'alefsym':0x2135,\n'alpha':0x03b1,\n'amp':0x0026,\n'and':0x2227,\n'ang':0x2220,\n'aring':0x00e5,\n'asymp':0x2248,\n'atilde':0x00e3,\n'auml':0x00e4,\n'bdquo':0x201e,\n'beta':0x03b2,\n'brvbar':0x00a6,\n'bull':0x2022,\n'cap':0x2229,\n'ccedil':0x00e7,\n'cedil':0x00b8,\n'cent':0x00a2,\n'chi':0x03c7,\n'circ':0x02c6,\n'clubs':0x2663,\n'cong':0x2245,\n'copy':0x00a9,\n'crarr':0x21b5,\n'cup':0x222a,\n'curren':0x00a4,\n'dArr':0x21d3,\n'dagger':0x2020,\n'darr':0x2193,\n'deg':0x00b0,\n'delta':0x03b4,\n'diams':0x2666,\n'divide':0x00f7,\n'eacute':0x00e9,\n'ecirc':0x00ea,\n'egrave':0x00e8,\n'empty':0x2205,\n'emsp':0x2003,\n'ensp':0x2002,\n'epsilon':0x03b5,\n'equiv':0x2261,\n'eta':0x03b7,\n'eth':0x00f0,\n'euml':0x00eb,\n'euro':0x20ac,\n'exist':0x2203,\n'fnof':0x0192,\n'forall':0x2200,\n'frac12':0x00bd,\n'frac14':0x00bc,\n'frac34':0x00be,\n'frasl':0x2044,\n'gamma':0x03b3,\n'ge':0x2265,\n'gt':0x003e,\n'hArr':0x21d4,\n'harr':0x2194,\n'hearts':0x2665,\n'hellip':0x2026,\n'iacute':0x00ed,\n'icirc':0x00ee,\n'iexcl':0x00a1,\n'igrave':0x00ec,\n'image':0x2111,\n'infin':0x221e,\n'int':0x222b,\n'iota':0x03b9,\n'iquest':0x00bf,\n'isin':0x2208,\n'iuml':0x00ef,\n'kappa':0x03ba,\n'lArr':0x21d0,\n'lambda':0x03bb,\n'lang':0x2329,\n'laquo':0x00ab,\n'larr':0x2190,\n'lceil':0x2308,\n'ldquo':0x201c,\n'le':0x2264,\n'lfloor':0x230a,\n'lowast':0x2217,\n'loz':0x25ca,\n'lrm':0x200e,\n'lsaquo':0x2039,\n'lsquo':0x2018,\n'lt':0x003c,\n'macr':0x00af,\n'mdash':0x2014,\n'micro':0x00b5,\n'middot':0x00b7,\n'minus':0x2212,\n'mu':0x03bc,\n'nabla':0x2207,\n'nbsp':0x00a0,\n'ndash':0x2013,\n'ne':0x2260,\n'ni':0x220b,\n'not':0x00ac,\n'notin':0x2209,\n'nsub':0x2284,\n'ntilde':0x00f1,\n'nu':0x03bd,\n'oacute':0x00f3,\n'ocirc':0x00f4,\n'oelig':0x0153,\n'ograve':0x00f2,\n'oline':0x203e,\n'omega':0x03c9,\n'omicron':0x03bf,\n'oplus':0x2295,\n'or':0x2228,\n'ordf':0x00aa,\n'ordm':0x00ba,\n'oslash':0x00f8,\n'otilde':0x00f5,\n'otimes':0x2297,\n'ouml':0x00f6,\n'para':0x00b6,\n'part':0x2202,\n'permil':0x2030,\n'perp':0x22a5,\n'phi':0x03c6,\n'pi':0x03c0,\n'piv':0x03d6,\n'plusmn':0x00b1,\n'pound':0x00a3,\n'prime':0x2032,\n'prod':0x220f,\n'prop':0x221d,\n'psi':0x03c8,\n'quot':0x0022,\n'rArr':0x21d2,\n'radic':0x221a,\n'rang':0x232a,\n'raquo':0x00bb,\n'rarr':0x2192,\n'rceil':0x2309,\n'rdquo':0x201d,\n'real':0x211c,\n'reg':0x00ae,\n'rfloor':0x230b,\n'rho':0x03c1,\n'rlm':0x200f,\n'rsaquo':0x203a,\n'rsquo':0x2019,\n'sbquo':0x201a,\n'scaron':0x0161,\n'sdot':0x22c5,\n'sect':0x00a7,\n'shy':0x00ad,\n'sigma':0x03c3,\n'sigmaf':0x03c2,\n'sim':0x223c,\n'spades':0x2660,\n'sub':0x2282,\n'sube':0x2286,\n'sum':0x2211,\n'sup':0x2283,\n'sup1':0x00b9,\n'sup2':0x00b2,\n'sup3':0x00b3,\n'supe':0x2287,\n'szlig':0x00df,\n'tau':0x03c4,\n'there4':0x2234,\n'theta':0x03b8,\n'thetasym':0x03d1,\n'thinsp':0x2009,\n'thorn':0x00fe,\n'tilde':0x02dc,\n'times':0x00d7,\n'trade':0x2122,\n'uArr':0x21d1,\n'uacute':0x00fa,\n'uarr':0x2191,\n'ucirc':0x00fb,\n'ugrave':0x00f9,\n'uml':0x00a8,\n'upsih':0x03d2,\n'upsilon':0x03c5,\n'uuml':0x00fc,\n'weierp':0x2118,\n'xi':0x03be,\n'yacute':0x00fd,\n'yen':0x00a5,\n'yuml':0x00ff,\n'zeta':0x03b6,\n'zwj':0x200d,\n'zwnj':0x200c,\n}\n\n\n\n\n\n\n\nhtml5={\n'Aacute':'\\xc1',\n'aacute':'\\xe1',\n'Aacute;':'\\xc1',\n'aacute;':'\\xe1',\n'Abreve;':'\\u0102',\n'abreve;':'\\u0103',\n'ac;':'\\u223e',\n'acd;':'\\u223f',\n'acE;':'\\u223e\\u0333',\n'Acirc':'\\xc2',\n'acirc':'\\xe2',\n'Acirc;':'\\xc2',\n'acirc;':'\\xe2',\n'acute':'\\xb4',\n'acute;':'\\xb4',\n'Acy;':'\\u0410',\n'acy;':'\\u0430',\n'AElig':'\\xc6',\n'aelig':'\\xe6',\n'AElig;':'\\xc6',\n'aelig;':'\\xe6',\n'af;':'\\u2061',\n'Afr;':'\\U0001d504',\n'afr;':'\\U0001d51e',\n'Agrave':'\\xc0',\n'agrave':'\\xe0',\n'Agrave;':'\\xc0',\n'agrave;':'\\xe0',\n'alefsym;':'\\u2135',\n'aleph;':'\\u2135',\n'Alpha;':'\\u0391',\n'alpha;':'\\u03b1',\n'Amacr;':'\\u0100',\n'amacr;':'\\u0101',\n'amalg;':'\\u2a3f',\n'AMP':'&',\n'amp':'&',\n'AMP;':'&',\n'amp;':'&',\n'And;':'\\u2a53',\n'and;':'\\u2227',\n'andand;':'\\u2a55',\n'andd;':'\\u2a5c',\n'andslope;':'\\u2a58',\n'andv;':'\\u2a5a',\n'ang;':'\\u2220',\n'ange;':'\\u29a4',\n'angle;':'\\u2220',\n'angmsd;':'\\u2221',\n'angmsdaa;':'\\u29a8',\n'angmsdab;':'\\u29a9',\n'angmsdac;':'\\u29aa',\n'angmsdad;':'\\u29ab',\n'angmsdae;':'\\u29ac',\n'angmsdaf;':'\\u29ad',\n'angmsdag;':'\\u29ae',\n'angmsdah;':'\\u29af',\n'angrt;':'\\u221f',\n'angrtvb;':'\\u22be',\n'angrtvbd;':'\\u299d',\n'angsph;':'\\u2222',\n'angst;':'\\xc5',\n'angzarr;':'\\u237c',\n'Aogon;':'\\u0104',\n'aogon;':'\\u0105',\n'Aopf;':'\\U0001d538',\n'aopf;':'\\U0001d552',\n'ap;':'\\u2248',\n'apacir;':'\\u2a6f',\n'apE;':'\\u2a70',\n'ape;':'\\u224a',\n'apid;':'\\u224b',\n'apos;':\"'\",\n'ApplyFunction;':'\\u2061',\n'approx;':'\\u2248',\n'approxeq;':'\\u224a',\n'Aring':'\\xc5',\n'aring':'\\xe5',\n'Aring;':'\\xc5',\n'aring;':'\\xe5',\n'Ascr;':'\\U0001d49c',\n'ascr;':'\\U0001d4b6',\n'Assign;':'\\u2254',\n'ast;':'*',\n'asymp;':'\\u2248',\n'asympeq;':'\\u224d',\n'Atilde':'\\xc3',\n'atilde':'\\xe3',\n'Atilde;':'\\xc3',\n'atilde;':'\\xe3',\n'Auml':'\\xc4',\n'auml':'\\xe4',\n'Auml;':'\\xc4',\n'auml;':'\\xe4',\n'awconint;':'\\u2233',\n'awint;':'\\u2a11',\n'backcong;':'\\u224c',\n'backepsilon;':'\\u03f6',\n'backprime;':'\\u2035',\n'backsim;':'\\u223d',\n'backsimeq;':'\\u22cd',\n'Backslash;':'\\u2216',\n'Barv;':'\\u2ae7',\n'barvee;':'\\u22bd',\n'Barwed;':'\\u2306',\n'barwed;':'\\u2305',\n'barwedge;':'\\u2305',\n'bbrk;':'\\u23b5',\n'bbrktbrk;':'\\u23b6',\n'bcong;':'\\u224c',\n'Bcy;':'\\u0411',\n'bcy;':'\\u0431',\n'bdquo;':'\\u201e',\n'becaus;':'\\u2235',\n'Because;':'\\u2235',\n'because;':'\\u2235',\n'bemptyv;':'\\u29b0',\n'bepsi;':'\\u03f6',\n'bernou;':'\\u212c',\n'Bernoullis;':'\\u212c',\n'Beta;':'\\u0392',\n'beta;':'\\u03b2',\n'beth;':'\\u2136',\n'between;':'\\u226c',\n'Bfr;':'\\U0001d505',\n'bfr;':'\\U0001d51f',\n'bigcap;':'\\u22c2',\n'bigcirc;':'\\u25ef',\n'bigcup;':'\\u22c3',\n'bigodot;':'\\u2a00',\n'bigoplus;':'\\u2a01',\n'bigotimes;':'\\u2a02',\n'bigsqcup;':'\\u2a06',\n'bigstar;':'\\u2605',\n'bigtriangledown;':'\\u25bd',\n'bigtriangleup;':'\\u25b3',\n'biguplus;':'\\u2a04',\n'bigvee;':'\\u22c1',\n'bigwedge;':'\\u22c0',\n'bkarow;':'\\u290d',\n'blacklozenge;':'\\u29eb',\n'blacksquare;':'\\u25aa',\n'blacktriangle;':'\\u25b4',\n'blacktriangledown;':'\\u25be',\n'blacktriangleleft;':'\\u25c2',\n'blacktriangleright;':'\\u25b8',\n'blank;':'\\u2423',\n'blk12;':'\\u2592',\n'blk14;':'\\u2591',\n'blk34;':'\\u2593',\n'block;':'\\u2588',\n'bne;':'=\\u20e5',\n'bnequiv;':'\\u2261\\u20e5',\n'bNot;':'\\u2aed',\n'bnot;':'\\u2310',\n'Bopf;':'\\U0001d539',\n'bopf;':'\\U0001d553',\n'bot;':'\\u22a5',\n'bottom;':'\\u22a5',\n'bowtie;':'\\u22c8',\n'boxbox;':'\\u29c9',\n'boxDL;':'\\u2557',\n'boxDl;':'\\u2556',\n'boxdL;':'\\u2555',\n'boxdl;':'\\u2510',\n'boxDR;':'\\u2554',\n'boxDr;':'\\u2553',\n'boxdR;':'\\u2552',\n'boxdr;':'\\u250c',\n'boxH;':'\\u2550',\n'boxh;':'\\u2500',\n'boxHD;':'\\u2566',\n'boxHd;':'\\u2564',\n'boxhD;':'\\u2565',\n'boxhd;':'\\u252c',\n'boxHU;':'\\u2569',\n'boxHu;':'\\u2567',\n'boxhU;':'\\u2568',\n'boxhu;':'\\u2534',\n'boxminus;':'\\u229f',\n'boxplus;':'\\u229e',\n'boxtimes;':'\\u22a0',\n'boxUL;':'\\u255d',\n'boxUl;':'\\u255c',\n'boxuL;':'\\u255b',\n'boxul;':'\\u2518',\n'boxUR;':'\\u255a',\n'boxUr;':'\\u2559',\n'boxuR;':'\\u2558',\n'boxur;':'\\u2514',\n'boxV;':'\\u2551',\n'boxv;':'\\u2502',\n'boxVH;':'\\u256c',\n'boxVh;':'\\u256b',\n'boxvH;':'\\u256a',\n'boxvh;':'\\u253c',\n'boxVL;':'\\u2563',\n'boxVl;':'\\u2562',\n'boxvL;':'\\u2561',\n'boxvl;':'\\u2524',\n'boxVR;':'\\u2560',\n'boxVr;':'\\u255f',\n'boxvR;':'\\u255e',\n'boxvr;':'\\u251c',\n'bprime;':'\\u2035',\n'Breve;':'\\u02d8',\n'breve;':'\\u02d8',\n'brvbar':'\\xa6',\n'brvbar;':'\\xa6',\n'Bscr;':'\\u212c',\n'bscr;':'\\U0001d4b7',\n'bsemi;':'\\u204f',\n'bsim;':'\\u223d',\n'bsime;':'\\u22cd',\n'bsol;':'\\\\',\n'bsolb;':'\\u29c5',\n'bsolhsub;':'\\u27c8',\n'bull;':'\\u2022',\n'bullet;':'\\u2022',\n'bump;':'\\u224e',\n'bumpE;':'\\u2aae',\n'bumpe;':'\\u224f',\n'Bumpeq;':'\\u224e',\n'bumpeq;':'\\u224f',\n'Cacute;':'\\u0106',\n'cacute;':'\\u0107',\n'Cap;':'\\u22d2',\n'cap;':'\\u2229',\n'capand;':'\\u2a44',\n'capbrcup;':'\\u2a49',\n'capcap;':'\\u2a4b',\n'capcup;':'\\u2a47',\n'capdot;':'\\u2a40',\n'CapitalDifferentialD;':'\\u2145',\n'caps;':'\\u2229\\ufe00',\n'caret;':'\\u2041',\n'caron;':'\\u02c7',\n'Cayleys;':'\\u212d',\n'ccaps;':'\\u2a4d',\n'Ccaron;':'\\u010c',\n'ccaron;':'\\u010d',\n'Ccedil':'\\xc7',\n'ccedil':'\\xe7',\n'Ccedil;':'\\xc7',\n'ccedil;':'\\xe7',\n'Ccirc;':'\\u0108',\n'ccirc;':'\\u0109',\n'Cconint;':'\\u2230',\n'ccups;':'\\u2a4c',\n'ccupssm;':'\\u2a50',\n'Cdot;':'\\u010a',\n'cdot;':'\\u010b',\n'cedil':'\\xb8',\n'cedil;':'\\xb8',\n'Cedilla;':'\\xb8',\n'cemptyv;':'\\u29b2',\n'cent':'\\xa2',\n'cent;':'\\xa2',\n'CenterDot;':'\\xb7',\n'centerdot;':'\\xb7',\n'Cfr;':'\\u212d',\n'cfr;':'\\U0001d520',\n'CHcy;':'\\u0427',\n'chcy;':'\\u0447',\n'check;':'\\u2713',\n'checkmark;':'\\u2713',\n'Chi;':'\\u03a7',\n'chi;':'\\u03c7',\n'cir;':'\\u25cb',\n'circ;':'\\u02c6',\n'circeq;':'\\u2257',\n'circlearrowleft;':'\\u21ba',\n'circlearrowright;':'\\u21bb',\n'circledast;':'\\u229b',\n'circledcirc;':'\\u229a',\n'circleddash;':'\\u229d',\n'CircleDot;':'\\u2299',\n'circledR;':'\\xae',\n'circledS;':'\\u24c8',\n'CircleMinus;':'\\u2296',\n'CirclePlus;':'\\u2295',\n'CircleTimes;':'\\u2297',\n'cirE;':'\\u29c3',\n'cire;':'\\u2257',\n'cirfnint;':'\\u2a10',\n'cirmid;':'\\u2aef',\n'cirscir;':'\\u29c2',\n'ClockwiseContourIntegral;':'\\u2232',\n'CloseCurlyDoubleQuote;':'\\u201d',\n'CloseCurlyQuote;':'\\u2019',\n'clubs;':'\\u2663',\n'clubsuit;':'\\u2663',\n'Colon;':'\\u2237',\n'colon;':':',\n'Colone;':'\\u2a74',\n'colone;':'\\u2254',\n'coloneq;':'\\u2254',\n'comma;':',',\n'commat;':'@',\n'comp;':'\\u2201',\n'compfn;':'\\u2218',\n'complement;':'\\u2201',\n'complexes;':'\\u2102',\n'cong;':'\\u2245',\n'congdot;':'\\u2a6d',\n'Congruent;':'\\u2261',\n'Conint;':'\\u222f',\n'conint;':'\\u222e',\n'ContourIntegral;':'\\u222e',\n'Copf;':'\\u2102',\n'copf;':'\\U0001d554',\n'coprod;':'\\u2210',\n'Coproduct;':'\\u2210',\n'COPY':'\\xa9',\n'copy':'\\xa9',\n'COPY;':'\\xa9',\n'copy;':'\\xa9',\n'copysr;':'\\u2117',\n'CounterClockwiseContourIntegral;':'\\u2233',\n'crarr;':'\\u21b5',\n'Cross;':'\\u2a2f',\n'cross;':'\\u2717',\n'Cscr;':'\\U0001d49e',\n'cscr;':'\\U0001d4b8',\n'csub;':'\\u2acf',\n'csube;':'\\u2ad1',\n'csup;':'\\u2ad0',\n'csupe;':'\\u2ad2',\n'ctdot;':'\\u22ef',\n'cudarrl;':'\\u2938',\n'cudarrr;':'\\u2935',\n'cuepr;':'\\u22de',\n'cuesc;':'\\u22df',\n'cularr;':'\\u21b6',\n'cularrp;':'\\u293d',\n'Cup;':'\\u22d3',\n'cup;':'\\u222a',\n'cupbrcap;':'\\u2a48',\n'CupCap;':'\\u224d',\n'cupcap;':'\\u2a46',\n'cupcup;':'\\u2a4a',\n'cupdot;':'\\u228d',\n'cupor;':'\\u2a45',\n'cups;':'\\u222a\\ufe00',\n'curarr;':'\\u21b7',\n'curarrm;':'\\u293c',\n'curlyeqprec;':'\\u22de',\n'curlyeqsucc;':'\\u22df',\n'curlyvee;':'\\u22ce',\n'curlywedge;':'\\u22cf',\n'curren':'\\xa4',\n'curren;':'\\xa4',\n'curvearrowleft;':'\\u21b6',\n'curvearrowright;':'\\u21b7',\n'cuvee;':'\\u22ce',\n'cuwed;':'\\u22cf',\n'cwconint;':'\\u2232',\n'cwint;':'\\u2231',\n'cylcty;':'\\u232d',\n'Dagger;':'\\u2021',\n'dagger;':'\\u2020',\n'daleth;':'\\u2138',\n'Darr;':'\\u21a1',\n'dArr;':'\\u21d3',\n'darr;':'\\u2193',\n'dash;':'\\u2010',\n'Dashv;':'\\u2ae4',\n'dashv;':'\\u22a3',\n'dbkarow;':'\\u290f',\n'dblac;':'\\u02dd',\n'Dcaron;':'\\u010e',\n'dcaron;':'\\u010f',\n'Dcy;':'\\u0414',\n'dcy;':'\\u0434',\n'DD;':'\\u2145',\n'dd;':'\\u2146',\n'ddagger;':'\\u2021',\n'ddarr;':'\\u21ca',\n'DDotrahd;':'\\u2911',\n'ddotseq;':'\\u2a77',\n'deg':'\\xb0',\n'deg;':'\\xb0',\n'Del;':'\\u2207',\n'Delta;':'\\u0394',\n'delta;':'\\u03b4',\n'demptyv;':'\\u29b1',\n'dfisht;':'\\u297f',\n'Dfr;':'\\U0001d507',\n'dfr;':'\\U0001d521',\n'dHar;':'\\u2965',\n'dharl;':'\\u21c3',\n'dharr;':'\\u21c2',\n'DiacriticalAcute;':'\\xb4',\n'DiacriticalDot;':'\\u02d9',\n'DiacriticalDoubleAcute;':'\\u02dd',\n'DiacriticalGrave;':'`',\n'DiacriticalTilde;':'\\u02dc',\n'diam;':'\\u22c4',\n'Diamond;':'\\u22c4',\n'diamond;':'\\u22c4',\n'diamondsuit;':'\\u2666',\n'diams;':'\\u2666',\n'die;':'\\xa8',\n'DifferentialD;':'\\u2146',\n'digamma;':'\\u03dd',\n'disin;':'\\u22f2',\n'div;':'\\xf7',\n'divide':'\\xf7',\n'divide;':'\\xf7',\n'divideontimes;':'\\u22c7',\n'divonx;':'\\u22c7',\n'DJcy;':'\\u0402',\n'djcy;':'\\u0452',\n'dlcorn;':'\\u231e',\n'dlcrop;':'\\u230d',\n'dollar;':'$',\n'Dopf;':'\\U0001d53b',\n'dopf;':'\\U0001d555',\n'Dot;':'\\xa8',\n'dot;':'\\u02d9',\n'DotDot;':'\\u20dc',\n'doteq;':'\\u2250',\n'doteqdot;':'\\u2251',\n'DotEqual;':'\\u2250',\n'dotminus;':'\\u2238',\n'dotplus;':'\\u2214',\n'dotsquare;':'\\u22a1',\n'doublebarwedge;':'\\u2306',\n'DoubleContourIntegral;':'\\u222f',\n'DoubleDot;':'\\xa8',\n'DoubleDownArrow;':'\\u21d3',\n'DoubleLeftArrow;':'\\u21d0',\n'DoubleLeftRightArrow;':'\\u21d4',\n'DoubleLeftTee;':'\\u2ae4',\n'DoubleLongLeftArrow;':'\\u27f8',\n'DoubleLongLeftRightArrow;':'\\u27fa',\n'DoubleLongRightArrow;':'\\u27f9',\n'DoubleRightArrow;':'\\u21d2',\n'DoubleRightTee;':'\\u22a8',\n'DoubleUpArrow;':'\\u21d1',\n'DoubleUpDownArrow;':'\\u21d5',\n'DoubleVerticalBar;':'\\u2225',\n'DownArrow;':'\\u2193',\n'Downarrow;':'\\u21d3',\n'downarrow;':'\\u2193',\n'DownArrowBar;':'\\u2913',\n'DownArrowUpArrow;':'\\u21f5',\n'DownBreve;':'\\u0311',\n'downdownarrows;':'\\u21ca',\n'downharpoonleft;':'\\u21c3',\n'downharpoonright;':'\\u21c2',\n'DownLeftRightVector;':'\\u2950',\n'DownLeftTeeVector;':'\\u295e',\n'DownLeftVector;':'\\u21bd',\n'DownLeftVectorBar;':'\\u2956',\n'DownRightTeeVector;':'\\u295f',\n'DownRightVector;':'\\u21c1',\n'DownRightVectorBar;':'\\u2957',\n'DownTee;':'\\u22a4',\n'DownTeeArrow;':'\\u21a7',\n'drbkarow;':'\\u2910',\n'drcorn;':'\\u231f',\n'drcrop;':'\\u230c',\n'Dscr;':'\\U0001d49f',\n'dscr;':'\\U0001d4b9',\n'DScy;':'\\u0405',\n'dscy;':'\\u0455',\n'dsol;':'\\u29f6',\n'Dstrok;':'\\u0110',\n'dstrok;':'\\u0111',\n'dtdot;':'\\u22f1',\n'dtri;':'\\u25bf',\n'dtrif;':'\\u25be',\n'duarr;':'\\u21f5',\n'duhar;':'\\u296f',\n'dwangle;':'\\u29a6',\n'DZcy;':'\\u040f',\n'dzcy;':'\\u045f',\n'dzigrarr;':'\\u27ff',\n'Eacute':'\\xc9',\n'eacute':'\\xe9',\n'Eacute;':'\\xc9',\n'eacute;':'\\xe9',\n'easter;':'\\u2a6e',\n'Ecaron;':'\\u011a',\n'ecaron;':'\\u011b',\n'ecir;':'\\u2256',\n'Ecirc':'\\xca',\n'ecirc':'\\xea',\n'Ecirc;':'\\xca',\n'ecirc;':'\\xea',\n'ecolon;':'\\u2255',\n'Ecy;':'\\u042d',\n'ecy;':'\\u044d',\n'eDDot;':'\\u2a77',\n'Edot;':'\\u0116',\n'eDot;':'\\u2251',\n'edot;':'\\u0117',\n'ee;':'\\u2147',\n'efDot;':'\\u2252',\n'Efr;':'\\U0001d508',\n'efr;':'\\U0001d522',\n'eg;':'\\u2a9a',\n'Egrave':'\\xc8',\n'egrave':'\\xe8',\n'Egrave;':'\\xc8',\n'egrave;':'\\xe8',\n'egs;':'\\u2a96',\n'egsdot;':'\\u2a98',\n'el;':'\\u2a99',\n'Element;':'\\u2208',\n'elinters;':'\\u23e7',\n'ell;':'\\u2113',\n'els;':'\\u2a95',\n'elsdot;':'\\u2a97',\n'Emacr;':'\\u0112',\n'emacr;':'\\u0113',\n'empty;':'\\u2205',\n'emptyset;':'\\u2205',\n'EmptySmallSquare;':'\\u25fb',\n'emptyv;':'\\u2205',\n'EmptyVerySmallSquare;':'\\u25ab',\n'emsp13;':'\\u2004',\n'emsp14;':'\\u2005',\n'emsp;':'\\u2003',\n'ENG;':'\\u014a',\n'eng;':'\\u014b',\n'ensp;':'\\u2002',\n'Eogon;':'\\u0118',\n'eogon;':'\\u0119',\n'Eopf;':'\\U0001d53c',\n'eopf;':'\\U0001d556',\n'epar;':'\\u22d5',\n'eparsl;':'\\u29e3',\n'eplus;':'\\u2a71',\n'epsi;':'\\u03b5',\n'Epsilon;':'\\u0395',\n'epsilon;':'\\u03b5',\n'epsiv;':'\\u03f5',\n'eqcirc;':'\\u2256',\n'eqcolon;':'\\u2255',\n'eqsim;':'\\u2242',\n'eqslantgtr;':'\\u2a96',\n'eqslantless;':'\\u2a95',\n'Equal;':'\\u2a75',\n'equals;':'=',\n'EqualTilde;':'\\u2242',\n'equest;':'\\u225f',\n'Equilibrium;':'\\u21cc',\n'equiv;':'\\u2261',\n'equivDD;':'\\u2a78',\n'eqvparsl;':'\\u29e5',\n'erarr;':'\\u2971',\n'erDot;':'\\u2253',\n'Escr;':'\\u2130',\n'escr;':'\\u212f',\n'esdot;':'\\u2250',\n'Esim;':'\\u2a73',\n'esim;':'\\u2242',\n'Eta;':'\\u0397',\n'eta;':'\\u03b7',\n'ETH':'\\xd0',\n'eth':'\\xf0',\n'ETH;':'\\xd0',\n'eth;':'\\xf0',\n'Euml':'\\xcb',\n'euml':'\\xeb',\n'Euml;':'\\xcb',\n'euml;':'\\xeb',\n'euro;':'\\u20ac',\n'excl;':'!',\n'exist;':'\\u2203',\n'Exists;':'\\u2203',\n'expectation;':'\\u2130',\n'ExponentialE;':'\\u2147',\n'exponentiale;':'\\u2147',\n'fallingdotseq;':'\\u2252',\n'Fcy;':'\\u0424',\n'fcy;':'\\u0444',\n'female;':'\\u2640',\n'ffilig;':'\\ufb03',\n'fflig;':'\\ufb00',\n'ffllig;':'\\ufb04',\n'Ffr;':'\\U0001d509',\n'ffr;':'\\U0001d523',\n'filig;':'\\ufb01',\n'FilledSmallSquare;':'\\u25fc',\n'FilledVerySmallSquare;':'\\u25aa',\n'fjlig;':'fj',\n'flat;':'\\u266d',\n'fllig;':'\\ufb02',\n'fltns;':'\\u25b1',\n'fnof;':'\\u0192',\n'Fopf;':'\\U0001d53d',\n'fopf;':'\\U0001d557',\n'ForAll;':'\\u2200',\n'forall;':'\\u2200',\n'fork;':'\\u22d4',\n'forkv;':'\\u2ad9',\n'Fouriertrf;':'\\u2131',\n'fpartint;':'\\u2a0d',\n'frac12':'\\xbd',\n'frac12;':'\\xbd',\n'frac13;':'\\u2153',\n'frac14':'\\xbc',\n'frac14;':'\\xbc',\n'frac15;':'\\u2155',\n'frac16;':'\\u2159',\n'frac18;':'\\u215b',\n'frac23;':'\\u2154',\n'frac25;':'\\u2156',\n'frac34':'\\xbe',\n'frac34;':'\\xbe',\n'frac35;':'\\u2157',\n'frac38;':'\\u215c',\n'frac45;':'\\u2158',\n'frac56;':'\\u215a',\n'frac58;':'\\u215d',\n'frac78;':'\\u215e',\n'frasl;':'\\u2044',\n'frown;':'\\u2322',\n'Fscr;':'\\u2131',\n'fscr;':'\\U0001d4bb',\n'gacute;':'\\u01f5',\n'Gamma;':'\\u0393',\n'gamma;':'\\u03b3',\n'Gammad;':'\\u03dc',\n'gammad;':'\\u03dd',\n'gap;':'\\u2a86',\n'Gbreve;':'\\u011e',\n'gbreve;':'\\u011f',\n'Gcedil;':'\\u0122',\n'Gcirc;':'\\u011c',\n'gcirc;':'\\u011d',\n'Gcy;':'\\u0413',\n'gcy;':'\\u0433',\n'Gdot;':'\\u0120',\n'gdot;':'\\u0121',\n'gE;':'\\u2267',\n'ge;':'\\u2265',\n'gEl;':'\\u2a8c',\n'gel;':'\\u22db',\n'geq;':'\\u2265',\n'geqq;':'\\u2267',\n'geqslant;':'\\u2a7e',\n'ges;':'\\u2a7e',\n'gescc;':'\\u2aa9',\n'gesdot;':'\\u2a80',\n'gesdoto;':'\\u2a82',\n'gesdotol;':'\\u2a84',\n'gesl;':'\\u22db\\ufe00',\n'gesles;':'\\u2a94',\n'Gfr;':'\\U0001d50a',\n'gfr;':'\\U0001d524',\n'Gg;':'\\u22d9',\n'gg;':'\\u226b',\n'ggg;':'\\u22d9',\n'gimel;':'\\u2137',\n'GJcy;':'\\u0403',\n'gjcy;':'\\u0453',\n'gl;':'\\u2277',\n'gla;':'\\u2aa5',\n'glE;':'\\u2a92',\n'glj;':'\\u2aa4',\n'gnap;':'\\u2a8a',\n'gnapprox;':'\\u2a8a',\n'gnE;':'\\u2269',\n'gne;':'\\u2a88',\n'gneq;':'\\u2a88',\n'gneqq;':'\\u2269',\n'gnsim;':'\\u22e7',\n'Gopf;':'\\U0001d53e',\n'gopf;':'\\U0001d558',\n'grave;':'`',\n'GreaterEqual;':'\\u2265',\n'GreaterEqualLess;':'\\u22db',\n'GreaterFullEqual;':'\\u2267',\n'GreaterGreater;':'\\u2aa2',\n'GreaterLess;':'\\u2277',\n'GreaterSlantEqual;':'\\u2a7e',\n'GreaterTilde;':'\\u2273',\n'Gscr;':'\\U0001d4a2',\n'gscr;':'\\u210a',\n'gsim;':'\\u2273',\n'gsime;':'\\u2a8e',\n'gsiml;':'\\u2a90',\n'GT':'>',\n'gt':'>',\n'GT;':'>',\n'Gt;':'\\u226b',\n'gt;':'>',\n'gtcc;':'\\u2aa7',\n'gtcir;':'\\u2a7a',\n'gtdot;':'\\u22d7',\n'gtlPar;':'\\u2995',\n'gtquest;':'\\u2a7c',\n'gtrapprox;':'\\u2a86',\n'gtrarr;':'\\u2978',\n'gtrdot;':'\\u22d7',\n'gtreqless;':'\\u22db',\n'gtreqqless;':'\\u2a8c',\n'gtrless;':'\\u2277',\n'gtrsim;':'\\u2273',\n'gvertneqq;':'\\u2269\\ufe00',\n'gvnE;':'\\u2269\\ufe00',\n'Hacek;':'\\u02c7',\n'hairsp;':'\\u200a',\n'half;':'\\xbd',\n'hamilt;':'\\u210b',\n'HARDcy;':'\\u042a',\n'hardcy;':'\\u044a',\n'hArr;':'\\u21d4',\n'harr;':'\\u2194',\n'harrcir;':'\\u2948',\n'harrw;':'\\u21ad',\n'Hat;':'^',\n'hbar;':'\\u210f',\n'Hcirc;':'\\u0124',\n'hcirc;':'\\u0125',\n'hearts;':'\\u2665',\n'heartsuit;':'\\u2665',\n'hellip;':'\\u2026',\n'hercon;':'\\u22b9',\n'Hfr;':'\\u210c',\n'hfr;':'\\U0001d525',\n'HilbertSpace;':'\\u210b',\n'hksearow;':'\\u2925',\n'hkswarow;':'\\u2926',\n'hoarr;':'\\u21ff',\n'homtht;':'\\u223b',\n'hookleftarrow;':'\\u21a9',\n'hookrightarrow;':'\\u21aa',\n'Hopf;':'\\u210d',\n'hopf;':'\\U0001d559',\n'horbar;':'\\u2015',\n'HorizontalLine;':'\\u2500',\n'Hscr;':'\\u210b',\n'hscr;':'\\U0001d4bd',\n'hslash;':'\\u210f',\n'Hstrok;':'\\u0126',\n'hstrok;':'\\u0127',\n'HumpDownHump;':'\\u224e',\n'HumpEqual;':'\\u224f',\n'hybull;':'\\u2043',\n'hyphen;':'\\u2010',\n'Iacute':'\\xcd',\n'iacute':'\\xed',\n'Iacute;':'\\xcd',\n'iacute;':'\\xed',\n'ic;':'\\u2063',\n'Icirc':'\\xce',\n'icirc':'\\xee',\n'Icirc;':'\\xce',\n'icirc;':'\\xee',\n'Icy;':'\\u0418',\n'icy;':'\\u0438',\n'Idot;':'\\u0130',\n'IEcy;':'\\u0415',\n'iecy;':'\\u0435',\n'iexcl':'\\xa1',\n'iexcl;':'\\xa1',\n'iff;':'\\u21d4',\n'Ifr;':'\\u2111',\n'ifr;':'\\U0001d526',\n'Igrave':'\\xcc',\n'igrave':'\\xec',\n'Igrave;':'\\xcc',\n'igrave;':'\\xec',\n'ii;':'\\u2148',\n'iiiint;':'\\u2a0c',\n'iiint;':'\\u222d',\n'iinfin;':'\\u29dc',\n'iiota;':'\\u2129',\n'IJlig;':'\\u0132',\n'ijlig;':'\\u0133',\n'Im;':'\\u2111',\n'Imacr;':'\\u012a',\n'imacr;':'\\u012b',\n'image;':'\\u2111',\n'ImaginaryI;':'\\u2148',\n'imagline;':'\\u2110',\n'imagpart;':'\\u2111',\n'imath;':'\\u0131',\n'imof;':'\\u22b7',\n'imped;':'\\u01b5',\n'Implies;':'\\u21d2',\n'in;':'\\u2208',\n'incare;':'\\u2105',\n'infin;':'\\u221e',\n'infintie;':'\\u29dd',\n'inodot;':'\\u0131',\n'Int;':'\\u222c',\n'int;':'\\u222b',\n'intcal;':'\\u22ba',\n'integers;':'\\u2124',\n'Integral;':'\\u222b',\n'intercal;':'\\u22ba',\n'Intersection;':'\\u22c2',\n'intlarhk;':'\\u2a17',\n'intprod;':'\\u2a3c',\n'InvisibleComma;':'\\u2063',\n'InvisibleTimes;':'\\u2062',\n'IOcy;':'\\u0401',\n'iocy;':'\\u0451',\n'Iogon;':'\\u012e',\n'iogon;':'\\u012f',\n'Iopf;':'\\U0001d540',\n'iopf;':'\\U0001d55a',\n'Iota;':'\\u0399',\n'iota;':'\\u03b9',\n'iprod;':'\\u2a3c',\n'iquest':'\\xbf',\n'iquest;':'\\xbf',\n'Iscr;':'\\u2110',\n'iscr;':'\\U0001d4be',\n'isin;':'\\u2208',\n'isindot;':'\\u22f5',\n'isinE;':'\\u22f9',\n'isins;':'\\u22f4',\n'isinsv;':'\\u22f3',\n'isinv;':'\\u2208',\n'it;':'\\u2062',\n'Itilde;':'\\u0128',\n'itilde;':'\\u0129',\n'Iukcy;':'\\u0406',\n'iukcy;':'\\u0456',\n'Iuml':'\\xcf',\n'iuml':'\\xef',\n'Iuml;':'\\xcf',\n'iuml;':'\\xef',\n'Jcirc;':'\\u0134',\n'jcirc;':'\\u0135',\n'Jcy;':'\\u0419',\n'jcy;':'\\u0439',\n'Jfr;':'\\U0001d50d',\n'jfr;':'\\U0001d527',\n'jmath;':'\\u0237',\n'Jopf;':'\\U0001d541',\n'jopf;':'\\U0001d55b',\n'Jscr;':'\\U0001d4a5',\n'jscr;':'\\U0001d4bf',\n'Jsercy;':'\\u0408',\n'jsercy;':'\\u0458',\n'Jukcy;':'\\u0404',\n'jukcy;':'\\u0454',\n'Kappa;':'\\u039a',\n'kappa;':'\\u03ba',\n'kappav;':'\\u03f0',\n'Kcedil;':'\\u0136',\n'kcedil;':'\\u0137',\n'Kcy;':'\\u041a',\n'kcy;':'\\u043a',\n'Kfr;':'\\U0001d50e',\n'kfr;':'\\U0001d528',\n'kgreen;':'\\u0138',\n'KHcy;':'\\u0425',\n'khcy;':'\\u0445',\n'KJcy;':'\\u040c',\n'kjcy;':'\\u045c',\n'Kopf;':'\\U0001d542',\n'kopf;':'\\U0001d55c',\n'Kscr;':'\\U0001d4a6',\n'kscr;':'\\U0001d4c0',\n'lAarr;':'\\u21da',\n'Lacute;':'\\u0139',\n'lacute;':'\\u013a',\n'laemptyv;':'\\u29b4',\n'lagran;':'\\u2112',\n'Lambda;':'\\u039b',\n'lambda;':'\\u03bb',\n'Lang;':'\\u27ea',\n'lang;':'\\u27e8',\n'langd;':'\\u2991',\n'langle;':'\\u27e8',\n'lap;':'\\u2a85',\n'Laplacetrf;':'\\u2112',\n'laquo':'\\xab',\n'laquo;':'\\xab',\n'Larr;':'\\u219e',\n'lArr;':'\\u21d0',\n'larr;':'\\u2190',\n'larrb;':'\\u21e4',\n'larrbfs;':'\\u291f',\n'larrfs;':'\\u291d',\n'larrhk;':'\\u21a9',\n'larrlp;':'\\u21ab',\n'larrpl;':'\\u2939',\n'larrsim;':'\\u2973',\n'larrtl;':'\\u21a2',\n'lat;':'\\u2aab',\n'lAtail;':'\\u291b',\n'latail;':'\\u2919',\n'late;':'\\u2aad',\n'lates;':'\\u2aad\\ufe00',\n'lBarr;':'\\u290e',\n'lbarr;':'\\u290c',\n'lbbrk;':'\\u2772',\n'lbrace;':'{',\n'lbrack;':'[',\n'lbrke;':'\\u298b',\n'lbrksld;':'\\u298f',\n'lbrkslu;':'\\u298d',\n'Lcaron;':'\\u013d',\n'lcaron;':'\\u013e',\n'Lcedil;':'\\u013b',\n'lcedil;':'\\u013c',\n'lceil;':'\\u2308',\n'lcub;':'{',\n'Lcy;':'\\u041b',\n'lcy;':'\\u043b',\n'ldca;':'\\u2936',\n'ldquo;':'\\u201c',\n'ldquor;':'\\u201e',\n'ldrdhar;':'\\u2967',\n'ldrushar;':'\\u294b',\n'ldsh;':'\\u21b2',\n'lE;':'\\u2266',\n'le;':'\\u2264',\n'LeftAngleBracket;':'\\u27e8',\n'LeftArrow;':'\\u2190',\n'Leftarrow;':'\\u21d0',\n'leftarrow;':'\\u2190',\n'LeftArrowBar;':'\\u21e4',\n'LeftArrowRightArrow;':'\\u21c6',\n'leftarrowtail;':'\\u21a2',\n'LeftCeiling;':'\\u2308',\n'LeftDoubleBracket;':'\\u27e6',\n'LeftDownTeeVector;':'\\u2961',\n'LeftDownVector;':'\\u21c3',\n'LeftDownVectorBar;':'\\u2959',\n'LeftFloor;':'\\u230a',\n'leftharpoondown;':'\\u21bd',\n'leftharpoonup;':'\\u21bc',\n'leftleftarrows;':'\\u21c7',\n'LeftRightArrow;':'\\u2194',\n'Leftrightarrow;':'\\u21d4',\n'leftrightarrow;':'\\u2194',\n'leftrightarrows;':'\\u21c6',\n'leftrightharpoons;':'\\u21cb',\n'leftrightsquigarrow;':'\\u21ad',\n'LeftRightVector;':'\\u294e',\n'LeftTee;':'\\u22a3',\n'LeftTeeArrow;':'\\u21a4',\n'LeftTeeVector;':'\\u295a',\n'leftthreetimes;':'\\u22cb',\n'LeftTriangle;':'\\u22b2',\n'LeftTriangleBar;':'\\u29cf',\n'LeftTriangleEqual;':'\\u22b4',\n'LeftUpDownVector;':'\\u2951',\n'LeftUpTeeVector;':'\\u2960',\n'LeftUpVector;':'\\u21bf',\n'LeftUpVectorBar;':'\\u2958',\n'LeftVector;':'\\u21bc',\n'LeftVectorBar;':'\\u2952',\n'lEg;':'\\u2a8b',\n'leg;':'\\u22da',\n'leq;':'\\u2264',\n'leqq;':'\\u2266',\n'leqslant;':'\\u2a7d',\n'les;':'\\u2a7d',\n'lescc;':'\\u2aa8',\n'lesdot;':'\\u2a7f',\n'lesdoto;':'\\u2a81',\n'lesdotor;':'\\u2a83',\n'lesg;':'\\u22da\\ufe00',\n'lesges;':'\\u2a93',\n'lessapprox;':'\\u2a85',\n'lessdot;':'\\u22d6',\n'lesseqgtr;':'\\u22da',\n'lesseqqgtr;':'\\u2a8b',\n'LessEqualGreater;':'\\u22da',\n'LessFullEqual;':'\\u2266',\n'LessGreater;':'\\u2276',\n'lessgtr;':'\\u2276',\n'LessLess;':'\\u2aa1',\n'lesssim;':'\\u2272',\n'LessSlantEqual;':'\\u2a7d',\n'LessTilde;':'\\u2272',\n'lfisht;':'\\u297c',\n'lfloor;':'\\u230a',\n'Lfr;':'\\U0001d50f',\n'lfr;':'\\U0001d529',\n'lg;':'\\u2276',\n'lgE;':'\\u2a91',\n'lHar;':'\\u2962',\n'lhard;':'\\u21bd',\n'lharu;':'\\u21bc',\n'lharul;':'\\u296a',\n'lhblk;':'\\u2584',\n'LJcy;':'\\u0409',\n'ljcy;':'\\u0459',\n'Ll;':'\\u22d8',\n'll;':'\\u226a',\n'llarr;':'\\u21c7',\n'llcorner;':'\\u231e',\n'Lleftarrow;':'\\u21da',\n'llhard;':'\\u296b',\n'lltri;':'\\u25fa',\n'Lmidot;':'\\u013f',\n'lmidot;':'\\u0140',\n'lmoust;':'\\u23b0',\n'lmoustache;':'\\u23b0',\n'lnap;':'\\u2a89',\n'lnapprox;':'\\u2a89',\n'lnE;':'\\u2268',\n'lne;':'\\u2a87',\n'lneq;':'\\u2a87',\n'lneqq;':'\\u2268',\n'lnsim;':'\\u22e6',\n'loang;':'\\u27ec',\n'loarr;':'\\u21fd',\n'lobrk;':'\\u27e6',\n'LongLeftArrow;':'\\u27f5',\n'Longleftarrow;':'\\u27f8',\n'longleftarrow;':'\\u27f5',\n'LongLeftRightArrow;':'\\u27f7',\n'Longleftrightarrow;':'\\u27fa',\n'longleftrightarrow;':'\\u27f7',\n'longmapsto;':'\\u27fc',\n'LongRightArrow;':'\\u27f6',\n'Longrightarrow;':'\\u27f9',\n'longrightarrow;':'\\u27f6',\n'looparrowleft;':'\\u21ab',\n'looparrowright;':'\\u21ac',\n'lopar;':'\\u2985',\n'Lopf;':'\\U0001d543',\n'lopf;':'\\U0001d55d',\n'loplus;':'\\u2a2d',\n'lotimes;':'\\u2a34',\n'lowast;':'\\u2217',\n'lowbar;':'_',\n'LowerLeftArrow;':'\\u2199',\n'LowerRightArrow;':'\\u2198',\n'loz;':'\\u25ca',\n'lozenge;':'\\u25ca',\n'lozf;':'\\u29eb',\n'lpar;':'(',\n'lparlt;':'\\u2993',\n'lrarr;':'\\u21c6',\n'lrcorner;':'\\u231f',\n'lrhar;':'\\u21cb',\n'lrhard;':'\\u296d',\n'lrm;':'\\u200e',\n'lrtri;':'\\u22bf',\n'lsaquo;':'\\u2039',\n'Lscr;':'\\u2112',\n'lscr;':'\\U0001d4c1',\n'Lsh;':'\\u21b0',\n'lsh;':'\\u21b0',\n'lsim;':'\\u2272',\n'lsime;':'\\u2a8d',\n'lsimg;':'\\u2a8f',\n'lsqb;':'[',\n'lsquo;':'\\u2018',\n'lsquor;':'\\u201a',\n'Lstrok;':'\\u0141',\n'lstrok;':'\\u0142',\n'LT':'<',\n'lt':'<',\n'LT;':'<',\n'Lt;':'\\u226a',\n'lt;':'<',\n'ltcc;':'\\u2aa6',\n'ltcir;':'\\u2a79',\n'ltdot;':'\\u22d6',\n'lthree;':'\\u22cb',\n'ltimes;':'\\u22c9',\n'ltlarr;':'\\u2976',\n'ltquest;':'\\u2a7b',\n'ltri;':'\\u25c3',\n'ltrie;':'\\u22b4',\n'ltrif;':'\\u25c2',\n'ltrPar;':'\\u2996',\n'lurdshar;':'\\u294a',\n'luruhar;':'\\u2966',\n'lvertneqq;':'\\u2268\\ufe00',\n'lvnE;':'\\u2268\\ufe00',\n'macr':'\\xaf',\n'macr;':'\\xaf',\n'male;':'\\u2642',\n'malt;':'\\u2720',\n'maltese;':'\\u2720',\n'Map;':'\\u2905',\n'map;':'\\u21a6',\n'mapsto;':'\\u21a6',\n'mapstodown;':'\\u21a7',\n'mapstoleft;':'\\u21a4',\n'mapstoup;':'\\u21a5',\n'marker;':'\\u25ae',\n'mcomma;':'\\u2a29',\n'Mcy;':'\\u041c',\n'mcy;':'\\u043c',\n'mdash;':'\\u2014',\n'mDDot;':'\\u223a',\n'measuredangle;':'\\u2221',\n'MediumSpace;':'\\u205f',\n'Mellintrf;':'\\u2133',\n'Mfr;':'\\U0001d510',\n'mfr;':'\\U0001d52a',\n'mho;':'\\u2127',\n'micro':'\\xb5',\n'micro;':'\\xb5',\n'mid;':'\\u2223',\n'midast;':'*',\n'midcir;':'\\u2af0',\n'middot':'\\xb7',\n'middot;':'\\xb7',\n'minus;':'\\u2212',\n'minusb;':'\\u229f',\n'minusd;':'\\u2238',\n'minusdu;':'\\u2a2a',\n'MinusPlus;':'\\u2213',\n'mlcp;':'\\u2adb',\n'mldr;':'\\u2026',\n'mnplus;':'\\u2213',\n'models;':'\\u22a7',\n'Mopf;':'\\U0001d544',\n'mopf;':'\\U0001d55e',\n'mp;':'\\u2213',\n'Mscr;':'\\u2133',\n'mscr;':'\\U0001d4c2',\n'mstpos;':'\\u223e',\n'Mu;':'\\u039c',\n'mu;':'\\u03bc',\n'multimap;':'\\u22b8',\n'mumap;':'\\u22b8',\n'nabla;':'\\u2207',\n'Nacute;':'\\u0143',\n'nacute;':'\\u0144',\n'nang;':'\\u2220\\u20d2',\n'nap;':'\\u2249',\n'napE;':'\\u2a70\\u0338',\n'napid;':'\\u224b\\u0338',\n'napos;':'\\u0149',\n'napprox;':'\\u2249',\n'natur;':'\\u266e',\n'natural;':'\\u266e',\n'naturals;':'\\u2115',\n'nbsp':'\\xa0',\n'nbsp;':'\\xa0',\n'nbump;':'\\u224e\\u0338',\n'nbumpe;':'\\u224f\\u0338',\n'ncap;':'\\u2a43',\n'Ncaron;':'\\u0147',\n'ncaron;':'\\u0148',\n'Ncedil;':'\\u0145',\n'ncedil;':'\\u0146',\n'ncong;':'\\u2247',\n'ncongdot;':'\\u2a6d\\u0338',\n'ncup;':'\\u2a42',\n'Ncy;':'\\u041d',\n'ncy;':'\\u043d',\n'ndash;':'\\u2013',\n'ne;':'\\u2260',\n'nearhk;':'\\u2924',\n'neArr;':'\\u21d7',\n'nearr;':'\\u2197',\n'nearrow;':'\\u2197',\n'nedot;':'\\u2250\\u0338',\n'NegativeMediumSpace;':'\\u200b',\n'NegativeThickSpace;':'\\u200b',\n'NegativeThinSpace;':'\\u200b',\n'NegativeVeryThinSpace;':'\\u200b',\n'nequiv;':'\\u2262',\n'nesear;':'\\u2928',\n'nesim;':'\\u2242\\u0338',\n'NestedGreaterGreater;':'\\u226b',\n'NestedLessLess;':'\\u226a',\n'NewLine;':'\\n',\n'nexist;':'\\u2204',\n'nexists;':'\\u2204',\n'Nfr;':'\\U0001d511',\n'nfr;':'\\U0001d52b',\n'ngE;':'\\u2267\\u0338',\n'nge;':'\\u2271',\n'ngeq;':'\\u2271',\n'ngeqq;':'\\u2267\\u0338',\n'ngeqslant;':'\\u2a7e\\u0338',\n'nges;':'\\u2a7e\\u0338',\n'nGg;':'\\u22d9\\u0338',\n'ngsim;':'\\u2275',\n'nGt;':'\\u226b\\u20d2',\n'ngt;':'\\u226f',\n'ngtr;':'\\u226f',\n'nGtv;':'\\u226b\\u0338',\n'nhArr;':'\\u21ce',\n'nharr;':'\\u21ae',\n'nhpar;':'\\u2af2',\n'ni;':'\\u220b',\n'nis;':'\\u22fc',\n'nisd;':'\\u22fa',\n'niv;':'\\u220b',\n'NJcy;':'\\u040a',\n'njcy;':'\\u045a',\n'nlArr;':'\\u21cd',\n'nlarr;':'\\u219a',\n'nldr;':'\\u2025',\n'nlE;':'\\u2266\\u0338',\n'nle;':'\\u2270',\n'nLeftarrow;':'\\u21cd',\n'nleftarrow;':'\\u219a',\n'nLeftrightarrow;':'\\u21ce',\n'nleftrightarrow;':'\\u21ae',\n'nleq;':'\\u2270',\n'nleqq;':'\\u2266\\u0338',\n'nleqslant;':'\\u2a7d\\u0338',\n'nles;':'\\u2a7d\\u0338',\n'nless;':'\\u226e',\n'nLl;':'\\u22d8\\u0338',\n'nlsim;':'\\u2274',\n'nLt;':'\\u226a\\u20d2',\n'nlt;':'\\u226e',\n'nltri;':'\\u22ea',\n'nltrie;':'\\u22ec',\n'nLtv;':'\\u226a\\u0338',\n'nmid;':'\\u2224',\n'NoBreak;':'\\u2060',\n'NonBreakingSpace;':'\\xa0',\n'Nopf;':'\\u2115',\n'nopf;':'\\U0001d55f',\n'not':'\\xac',\n'Not;':'\\u2aec',\n'not;':'\\xac',\n'NotCongruent;':'\\u2262',\n'NotCupCap;':'\\u226d',\n'NotDoubleVerticalBar;':'\\u2226',\n'NotElement;':'\\u2209',\n'NotEqual;':'\\u2260',\n'NotEqualTilde;':'\\u2242\\u0338',\n'NotExists;':'\\u2204',\n'NotGreater;':'\\u226f',\n'NotGreaterEqual;':'\\u2271',\n'NotGreaterFullEqual;':'\\u2267\\u0338',\n'NotGreaterGreater;':'\\u226b\\u0338',\n'NotGreaterLess;':'\\u2279',\n'NotGreaterSlantEqual;':'\\u2a7e\\u0338',\n'NotGreaterTilde;':'\\u2275',\n'NotHumpDownHump;':'\\u224e\\u0338',\n'NotHumpEqual;':'\\u224f\\u0338',\n'notin;':'\\u2209',\n'notindot;':'\\u22f5\\u0338',\n'notinE;':'\\u22f9\\u0338',\n'notinva;':'\\u2209',\n'notinvb;':'\\u22f7',\n'notinvc;':'\\u22f6',\n'NotLeftTriangle;':'\\u22ea',\n'NotLeftTriangleBar;':'\\u29cf\\u0338',\n'NotLeftTriangleEqual;':'\\u22ec',\n'NotLess;':'\\u226e',\n'NotLessEqual;':'\\u2270',\n'NotLessGreater;':'\\u2278',\n'NotLessLess;':'\\u226a\\u0338',\n'NotLessSlantEqual;':'\\u2a7d\\u0338',\n'NotLessTilde;':'\\u2274',\n'NotNestedGreaterGreater;':'\\u2aa2\\u0338',\n'NotNestedLessLess;':'\\u2aa1\\u0338',\n'notni;':'\\u220c',\n'notniva;':'\\u220c',\n'notnivb;':'\\u22fe',\n'notnivc;':'\\u22fd',\n'NotPrecedes;':'\\u2280',\n'NotPrecedesEqual;':'\\u2aaf\\u0338',\n'NotPrecedesSlantEqual;':'\\u22e0',\n'NotReverseElement;':'\\u220c',\n'NotRightTriangle;':'\\u22eb',\n'NotRightTriangleBar;':'\\u29d0\\u0338',\n'NotRightTriangleEqual;':'\\u22ed',\n'NotSquareSubset;':'\\u228f\\u0338',\n'NotSquareSubsetEqual;':'\\u22e2',\n'NotSquareSuperset;':'\\u2290\\u0338',\n'NotSquareSupersetEqual;':'\\u22e3',\n'NotSubset;':'\\u2282\\u20d2',\n'NotSubsetEqual;':'\\u2288',\n'NotSucceeds;':'\\u2281',\n'NotSucceedsEqual;':'\\u2ab0\\u0338',\n'NotSucceedsSlantEqual;':'\\u22e1',\n'NotSucceedsTilde;':'\\u227f\\u0338',\n'NotSuperset;':'\\u2283\\u20d2',\n'NotSupersetEqual;':'\\u2289',\n'NotTilde;':'\\u2241',\n'NotTildeEqual;':'\\u2244',\n'NotTildeFullEqual;':'\\u2247',\n'NotTildeTilde;':'\\u2249',\n'NotVerticalBar;':'\\u2224',\n'npar;':'\\u2226',\n'nparallel;':'\\u2226',\n'nparsl;':'\\u2afd\\u20e5',\n'npart;':'\\u2202\\u0338',\n'npolint;':'\\u2a14',\n'npr;':'\\u2280',\n'nprcue;':'\\u22e0',\n'npre;':'\\u2aaf\\u0338',\n'nprec;':'\\u2280',\n'npreceq;':'\\u2aaf\\u0338',\n'nrArr;':'\\u21cf',\n'nrarr;':'\\u219b',\n'nrarrc;':'\\u2933\\u0338',\n'nrarrw;':'\\u219d\\u0338',\n'nRightarrow;':'\\u21cf',\n'nrightarrow;':'\\u219b',\n'nrtri;':'\\u22eb',\n'nrtrie;':'\\u22ed',\n'nsc;':'\\u2281',\n'nsccue;':'\\u22e1',\n'nsce;':'\\u2ab0\\u0338',\n'Nscr;':'\\U0001d4a9',\n'nscr;':'\\U0001d4c3',\n'nshortmid;':'\\u2224',\n'nshortparallel;':'\\u2226',\n'nsim;':'\\u2241',\n'nsime;':'\\u2244',\n'nsimeq;':'\\u2244',\n'nsmid;':'\\u2224',\n'nspar;':'\\u2226',\n'nsqsube;':'\\u22e2',\n'nsqsupe;':'\\u22e3',\n'nsub;':'\\u2284',\n'nsubE;':'\\u2ac5\\u0338',\n'nsube;':'\\u2288',\n'nsubset;':'\\u2282\\u20d2',\n'nsubseteq;':'\\u2288',\n'nsubseteqq;':'\\u2ac5\\u0338',\n'nsucc;':'\\u2281',\n'nsucceq;':'\\u2ab0\\u0338',\n'nsup;':'\\u2285',\n'nsupE;':'\\u2ac6\\u0338',\n'nsupe;':'\\u2289',\n'nsupset;':'\\u2283\\u20d2',\n'nsupseteq;':'\\u2289',\n'nsupseteqq;':'\\u2ac6\\u0338',\n'ntgl;':'\\u2279',\n'Ntilde':'\\xd1',\n'ntilde':'\\xf1',\n'Ntilde;':'\\xd1',\n'ntilde;':'\\xf1',\n'ntlg;':'\\u2278',\n'ntriangleleft;':'\\u22ea',\n'ntrianglelefteq;':'\\u22ec',\n'ntriangleright;':'\\u22eb',\n'ntrianglerighteq;':'\\u22ed',\n'Nu;':'\\u039d',\n'nu;':'\\u03bd',\n'num;':'#',\n'numero;':'\\u2116',\n'numsp;':'\\u2007',\n'nvap;':'\\u224d\\u20d2',\n'nVDash;':'\\u22af',\n'nVdash;':'\\u22ae',\n'nvDash;':'\\u22ad',\n'nvdash;':'\\u22ac',\n'nvge;':'\\u2265\\u20d2',\n'nvgt;':'>\\u20d2',\n'nvHarr;':'\\u2904',\n'nvinfin;':'\\u29de',\n'nvlArr;':'\\u2902',\n'nvle;':'\\u2264\\u20d2',\n'nvlt;':'<\\u20d2',\n'nvltrie;':'\\u22b4\\u20d2',\n'nvrArr;':'\\u2903',\n'nvrtrie;':'\\u22b5\\u20d2',\n'nvsim;':'\\u223c\\u20d2',\n'nwarhk;':'\\u2923',\n'nwArr;':'\\u21d6',\n'nwarr;':'\\u2196',\n'nwarrow;':'\\u2196',\n'nwnear;':'\\u2927',\n'Oacute':'\\xd3',\n'oacute':'\\xf3',\n'Oacute;':'\\xd3',\n'oacute;':'\\xf3',\n'oast;':'\\u229b',\n'ocir;':'\\u229a',\n'Ocirc':'\\xd4',\n'ocirc':'\\xf4',\n'Ocirc;':'\\xd4',\n'ocirc;':'\\xf4',\n'Ocy;':'\\u041e',\n'ocy;':'\\u043e',\n'odash;':'\\u229d',\n'Odblac;':'\\u0150',\n'odblac;':'\\u0151',\n'odiv;':'\\u2a38',\n'odot;':'\\u2299',\n'odsold;':'\\u29bc',\n'OElig;':'\\u0152',\n'oelig;':'\\u0153',\n'ofcir;':'\\u29bf',\n'Ofr;':'\\U0001d512',\n'ofr;':'\\U0001d52c',\n'ogon;':'\\u02db',\n'Ograve':'\\xd2',\n'ograve':'\\xf2',\n'Ograve;':'\\xd2',\n'ograve;':'\\xf2',\n'ogt;':'\\u29c1',\n'ohbar;':'\\u29b5',\n'ohm;':'\\u03a9',\n'oint;':'\\u222e',\n'olarr;':'\\u21ba',\n'olcir;':'\\u29be',\n'olcross;':'\\u29bb',\n'oline;':'\\u203e',\n'olt;':'\\u29c0',\n'Omacr;':'\\u014c',\n'omacr;':'\\u014d',\n'Omega;':'\\u03a9',\n'omega;':'\\u03c9',\n'Omicron;':'\\u039f',\n'omicron;':'\\u03bf',\n'omid;':'\\u29b6',\n'ominus;':'\\u2296',\n'Oopf;':'\\U0001d546',\n'oopf;':'\\U0001d560',\n'opar;':'\\u29b7',\n'OpenCurlyDoubleQuote;':'\\u201c',\n'OpenCurlyQuote;':'\\u2018',\n'operp;':'\\u29b9',\n'oplus;':'\\u2295',\n'Or;':'\\u2a54',\n'or;':'\\u2228',\n'orarr;':'\\u21bb',\n'ord;':'\\u2a5d',\n'order;':'\\u2134',\n'orderof;':'\\u2134',\n'ordf':'\\xaa',\n'ordf;':'\\xaa',\n'ordm':'\\xba',\n'ordm;':'\\xba',\n'origof;':'\\u22b6',\n'oror;':'\\u2a56',\n'orslope;':'\\u2a57',\n'orv;':'\\u2a5b',\n'oS;':'\\u24c8',\n'Oscr;':'\\U0001d4aa',\n'oscr;':'\\u2134',\n'Oslash':'\\xd8',\n'oslash':'\\xf8',\n'Oslash;':'\\xd8',\n'oslash;':'\\xf8',\n'osol;':'\\u2298',\n'Otilde':'\\xd5',\n'otilde':'\\xf5',\n'Otilde;':'\\xd5',\n'otilde;':'\\xf5',\n'Otimes;':'\\u2a37',\n'otimes;':'\\u2297',\n'otimesas;':'\\u2a36',\n'Ouml':'\\xd6',\n'ouml':'\\xf6',\n'Ouml;':'\\xd6',\n'ouml;':'\\xf6',\n'ovbar;':'\\u233d',\n'OverBar;':'\\u203e',\n'OverBrace;':'\\u23de',\n'OverBracket;':'\\u23b4',\n'OverParenthesis;':'\\u23dc',\n'par;':'\\u2225',\n'para':'\\xb6',\n'para;':'\\xb6',\n'parallel;':'\\u2225',\n'parsim;':'\\u2af3',\n'parsl;':'\\u2afd',\n'part;':'\\u2202',\n'PartialD;':'\\u2202',\n'Pcy;':'\\u041f',\n'pcy;':'\\u043f',\n'percnt;':'%',\n'period;':'.',\n'permil;':'\\u2030',\n'perp;':'\\u22a5',\n'pertenk;':'\\u2031',\n'Pfr;':'\\U0001d513',\n'pfr;':'\\U0001d52d',\n'Phi;':'\\u03a6',\n'phi;':'\\u03c6',\n'phiv;':'\\u03d5',\n'phmmat;':'\\u2133',\n'phone;':'\\u260e',\n'Pi;':'\\u03a0',\n'pi;':'\\u03c0',\n'pitchfork;':'\\u22d4',\n'piv;':'\\u03d6',\n'planck;':'\\u210f',\n'planckh;':'\\u210e',\n'plankv;':'\\u210f',\n'plus;':'+',\n'plusacir;':'\\u2a23',\n'plusb;':'\\u229e',\n'pluscir;':'\\u2a22',\n'plusdo;':'\\u2214',\n'plusdu;':'\\u2a25',\n'pluse;':'\\u2a72',\n'PlusMinus;':'\\xb1',\n'plusmn':'\\xb1',\n'plusmn;':'\\xb1',\n'plussim;':'\\u2a26',\n'plustwo;':'\\u2a27',\n'pm;':'\\xb1',\n'Poincareplane;':'\\u210c',\n'pointint;':'\\u2a15',\n'Popf;':'\\u2119',\n'popf;':'\\U0001d561',\n'pound':'\\xa3',\n'pound;':'\\xa3',\n'Pr;':'\\u2abb',\n'pr;':'\\u227a',\n'prap;':'\\u2ab7',\n'prcue;':'\\u227c',\n'prE;':'\\u2ab3',\n'pre;':'\\u2aaf',\n'prec;':'\\u227a',\n'precapprox;':'\\u2ab7',\n'preccurlyeq;':'\\u227c',\n'Precedes;':'\\u227a',\n'PrecedesEqual;':'\\u2aaf',\n'PrecedesSlantEqual;':'\\u227c',\n'PrecedesTilde;':'\\u227e',\n'preceq;':'\\u2aaf',\n'precnapprox;':'\\u2ab9',\n'precneqq;':'\\u2ab5',\n'precnsim;':'\\u22e8',\n'precsim;':'\\u227e',\n'Prime;':'\\u2033',\n'prime;':'\\u2032',\n'primes;':'\\u2119',\n'prnap;':'\\u2ab9',\n'prnE;':'\\u2ab5',\n'prnsim;':'\\u22e8',\n'prod;':'\\u220f',\n'Product;':'\\u220f',\n'profalar;':'\\u232e',\n'profline;':'\\u2312',\n'profsurf;':'\\u2313',\n'prop;':'\\u221d',\n'Proportion;':'\\u2237',\n'Proportional;':'\\u221d',\n'propto;':'\\u221d',\n'prsim;':'\\u227e',\n'prurel;':'\\u22b0',\n'Pscr;':'\\U0001d4ab',\n'pscr;':'\\U0001d4c5',\n'Psi;':'\\u03a8',\n'psi;':'\\u03c8',\n'puncsp;':'\\u2008',\n'Qfr;':'\\U0001d514',\n'qfr;':'\\U0001d52e',\n'qint;':'\\u2a0c',\n'Qopf;':'\\u211a',\n'qopf;':'\\U0001d562',\n'qprime;':'\\u2057',\n'Qscr;':'\\U0001d4ac',\n'qscr;':'\\U0001d4c6',\n'quaternions;':'\\u210d',\n'quatint;':'\\u2a16',\n'quest;':'?',\n'questeq;':'\\u225f',\n'QUOT':'\"',\n'quot':'\"',\n'QUOT;':'\"',\n'quot;':'\"',\n'rAarr;':'\\u21db',\n'race;':'\\u223d\\u0331',\n'Racute;':'\\u0154',\n'racute;':'\\u0155',\n'radic;':'\\u221a',\n'raemptyv;':'\\u29b3',\n'Rang;':'\\u27eb',\n'rang;':'\\u27e9',\n'rangd;':'\\u2992',\n'range;':'\\u29a5',\n'rangle;':'\\u27e9',\n'raquo':'\\xbb',\n'raquo;':'\\xbb',\n'Rarr;':'\\u21a0',\n'rArr;':'\\u21d2',\n'rarr;':'\\u2192',\n'rarrap;':'\\u2975',\n'rarrb;':'\\u21e5',\n'rarrbfs;':'\\u2920',\n'rarrc;':'\\u2933',\n'rarrfs;':'\\u291e',\n'rarrhk;':'\\u21aa',\n'rarrlp;':'\\u21ac',\n'rarrpl;':'\\u2945',\n'rarrsim;':'\\u2974',\n'Rarrtl;':'\\u2916',\n'rarrtl;':'\\u21a3',\n'rarrw;':'\\u219d',\n'rAtail;':'\\u291c',\n'ratail;':'\\u291a',\n'ratio;':'\\u2236',\n'rationals;':'\\u211a',\n'RBarr;':'\\u2910',\n'rBarr;':'\\u290f',\n'rbarr;':'\\u290d',\n'rbbrk;':'\\u2773',\n'rbrace;':'}',\n'rbrack;':']',\n'rbrke;':'\\u298c',\n'rbrksld;':'\\u298e',\n'rbrkslu;':'\\u2990',\n'Rcaron;':'\\u0158',\n'rcaron;':'\\u0159',\n'Rcedil;':'\\u0156',\n'rcedil;':'\\u0157',\n'rceil;':'\\u2309',\n'rcub;':'}',\n'Rcy;':'\\u0420',\n'rcy;':'\\u0440',\n'rdca;':'\\u2937',\n'rdldhar;':'\\u2969',\n'rdquo;':'\\u201d',\n'rdquor;':'\\u201d',\n'rdsh;':'\\u21b3',\n'Re;':'\\u211c',\n'real;':'\\u211c',\n'realine;':'\\u211b',\n'realpart;':'\\u211c',\n'reals;':'\\u211d',\n'rect;':'\\u25ad',\n'REG':'\\xae',\n'reg':'\\xae',\n'REG;':'\\xae',\n'reg;':'\\xae',\n'ReverseElement;':'\\u220b',\n'ReverseEquilibrium;':'\\u21cb',\n'ReverseUpEquilibrium;':'\\u296f',\n'rfisht;':'\\u297d',\n'rfloor;':'\\u230b',\n'Rfr;':'\\u211c',\n'rfr;':'\\U0001d52f',\n'rHar;':'\\u2964',\n'rhard;':'\\u21c1',\n'rharu;':'\\u21c0',\n'rharul;':'\\u296c',\n'Rho;':'\\u03a1',\n'rho;':'\\u03c1',\n'rhov;':'\\u03f1',\n'RightAngleBracket;':'\\u27e9',\n'RightArrow;':'\\u2192',\n'Rightarrow;':'\\u21d2',\n'rightarrow;':'\\u2192',\n'RightArrowBar;':'\\u21e5',\n'RightArrowLeftArrow;':'\\u21c4',\n'rightarrowtail;':'\\u21a3',\n'RightCeiling;':'\\u2309',\n'RightDoubleBracket;':'\\u27e7',\n'RightDownTeeVector;':'\\u295d',\n'RightDownVector;':'\\u21c2',\n'RightDownVectorBar;':'\\u2955',\n'RightFloor;':'\\u230b',\n'rightharpoondown;':'\\u21c1',\n'rightharpoonup;':'\\u21c0',\n'rightleftarrows;':'\\u21c4',\n'rightleftharpoons;':'\\u21cc',\n'rightrightarrows;':'\\u21c9',\n'rightsquigarrow;':'\\u219d',\n'RightTee;':'\\u22a2',\n'RightTeeArrow;':'\\u21a6',\n'RightTeeVector;':'\\u295b',\n'rightthreetimes;':'\\u22cc',\n'RightTriangle;':'\\u22b3',\n'RightTriangleBar;':'\\u29d0',\n'RightTriangleEqual;':'\\u22b5',\n'RightUpDownVector;':'\\u294f',\n'RightUpTeeVector;':'\\u295c',\n'RightUpVector;':'\\u21be',\n'RightUpVectorBar;':'\\u2954',\n'RightVector;':'\\u21c0',\n'RightVectorBar;':'\\u2953',\n'ring;':'\\u02da',\n'risingdotseq;':'\\u2253',\n'rlarr;':'\\u21c4',\n'rlhar;':'\\u21cc',\n'rlm;':'\\u200f',\n'rmoust;':'\\u23b1',\n'rmoustache;':'\\u23b1',\n'rnmid;':'\\u2aee',\n'roang;':'\\u27ed',\n'roarr;':'\\u21fe',\n'robrk;':'\\u27e7',\n'ropar;':'\\u2986',\n'Ropf;':'\\u211d',\n'ropf;':'\\U0001d563',\n'roplus;':'\\u2a2e',\n'rotimes;':'\\u2a35',\n'RoundImplies;':'\\u2970',\n'rpar;':')',\n'rpargt;':'\\u2994',\n'rppolint;':'\\u2a12',\n'rrarr;':'\\u21c9',\n'Rrightarrow;':'\\u21db',\n'rsaquo;':'\\u203a',\n'Rscr;':'\\u211b',\n'rscr;':'\\U0001d4c7',\n'Rsh;':'\\u21b1',\n'rsh;':'\\u21b1',\n'rsqb;':']',\n'rsquo;':'\\u2019',\n'rsquor;':'\\u2019',\n'rthree;':'\\u22cc',\n'rtimes;':'\\u22ca',\n'rtri;':'\\u25b9',\n'rtrie;':'\\u22b5',\n'rtrif;':'\\u25b8',\n'rtriltri;':'\\u29ce',\n'RuleDelayed;':'\\u29f4',\n'ruluhar;':'\\u2968',\n'rx;':'\\u211e',\n'Sacute;':'\\u015a',\n'sacute;':'\\u015b',\n'sbquo;':'\\u201a',\n'Sc;':'\\u2abc',\n'sc;':'\\u227b',\n'scap;':'\\u2ab8',\n'Scaron;':'\\u0160',\n'scaron;':'\\u0161',\n'sccue;':'\\u227d',\n'scE;':'\\u2ab4',\n'sce;':'\\u2ab0',\n'Scedil;':'\\u015e',\n'scedil;':'\\u015f',\n'Scirc;':'\\u015c',\n'scirc;':'\\u015d',\n'scnap;':'\\u2aba',\n'scnE;':'\\u2ab6',\n'scnsim;':'\\u22e9',\n'scpolint;':'\\u2a13',\n'scsim;':'\\u227f',\n'Scy;':'\\u0421',\n'scy;':'\\u0441',\n'sdot;':'\\u22c5',\n'sdotb;':'\\u22a1',\n'sdote;':'\\u2a66',\n'searhk;':'\\u2925',\n'seArr;':'\\u21d8',\n'searr;':'\\u2198',\n'searrow;':'\\u2198',\n'sect':'\\xa7',\n'sect;':'\\xa7',\n'semi;':';',\n'seswar;':'\\u2929',\n'setminus;':'\\u2216',\n'setmn;':'\\u2216',\n'sext;':'\\u2736',\n'Sfr;':'\\U0001d516',\n'sfr;':'\\U0001d530',\n'sfrown;':'\\u2322',\n'sharp;':'\\u266f',\n'SHCHcy;':'\\u0429',\n'shchcy;':'\\u0449',\n'SHcy;':'\\u0428',\n'shcy;':'\\u0448',\n'ShortDownArrow;':'\\u2193',\n'ShortLeftArrow;':'\\u2190',\n'shortmid;':'\\u2223',\n'shortparallel;':'\\u2225',\n'ShortRightArrow;':'\\u2192',\n'ShortUpArrow;':'\\u2191',\n'shy':'\\xad',\n'shy;':'\\xad',\n'Sigma;':'\\u03a3',\n'sigma;':'\\u03c3',\n'sigmaf;':'\\u03c2',\n'sigmav;':'\\u03c2',\n'sim;':'\\u223c',\n'simdot;':'\\u2a6a',\n'sime;':'\\u2243',\n'simeq;':'\\u2243',\n'simg;':'\\u2a9e',\n'simgE;':'\\u2aa0',\n'siml;':'\\u2a9d',\n'simlE;':'\\u2a9f',\n'simne;':'\\u2246',\n'simplus;':'\\u2a24',\n'simrarr;':'\\u2972',\n'slarr;':'\\u2190',\n'SmallCircle;':'\\u2218',\n'smallsetminus;':'\\u2216',\n'smashp;':'\\u2a33',\n'smeparsl;':'\\u29e4',\n'smid;':'\\u2223',\n'smile;':'\\u2323',\n'smt;':'\\u2aaa',\n'smte;':'\\u2aac',\n'smtes;':'\\u2aac\\ufe00',\n'SOFTcy;':'\\u042c',\n'softcy;':'\\u044c',\n'sol;':'/',\n'solb;':'\\u29c4',\n'solbar;':'\\u233f',\n'Sopf;':'\\U0001d54a',\n'sopf;':'\\U0001d564',\n'spades;':'\\u2660',\n'spadesuit;':'\\u2660',\n'spar;':'\\u2225',\n'sqcap;':'\\u2293',\n'sqcaps;':'\\u2293\\ufe00',\n'sqcup;':'\\u2294',\n'sqcups;':'\\u2294\\ufe00',\n'Sqrt;':'\\u221a',\n'sqsub;':'\\u228f',\n'sqsube;':'\\u2291',\n'sqsubset;':'\\u228f',\n'sqsubseteq;':'\\u2291',\n'sqsup;':'\\u2290',\n'sqsupe;':'\\u2292',\n'sqsupset;':'\\u2290',\n'sqsupseteq;':'\\u2292',\n'squ;':'\\u25a1',\n'Square;':'\\u25a1',\n'square;':'\\u25a1',\n'SquareIntersection;':'\\u2293',\n'SquareSubset;':'\\u228f',\n'SquareSubsetEqual;':'\\u2291',\n'SquareSuperset;':'\\u2290',\n'SquareSupersetEqual;':'\\u2292',\n'SquareUnion;':'\\u2294',\n'squarf;':'\\u25aa',\n'squf;':'\\u25aa',\n'srarr;':'\\u2192',\n'Sscr;':'\\U0001d4ae',\n'sscr;':'\\U0001d4c8',\n'ssetmn;':'\\u2216',\n'ssmile;':'\\u2323',\n'sstarf;':'\\u22c6',\n'Star;':'\\u22c6',\n'star;':'\\u2606',\n'starf;':'\\u2605',\n'straightepsilon;':'\\u03f5',\n'straightphi;':'\\u03d5',\n'strns;':'\\xaf',\n'Sub;':'\\u22d0',\n'sub;':'\\u2282',\n'subdot;':'\\u2abd',\n'subE;':'\\u2ac5',\n'sube;':'\\u2286',\n'subedot;':'\\u2ac3',\n'submult;':'\\u2ac1',\n'subnE;':'\\u2acb',\n'subne;':'\\u228a',\n'subplus;':'\\u2abf',\n'subrarr;':'\\u2979',\n'Subset;':'\\u22d0',\n'subset;':'\\u2282',\n'subseteq;':'\\u2286',\n'subseteqq;':'\\u2ac5',\n'SubsetEqual;':'\\u2286',\n'subsetneq;':'\\u228a',\n'subsetneqq;':'\\u2acb',\n'subsim;':'\\u2ac7',\n'subsub;':'\\u2ad5',\n'subsup;':'\\u2ad3',\n'succ;':'\\u227b',\n'succapprox;':'\\u2ab8',\n'succcurlyeq;':'\\u227d',\n'Succeeds;':'\\u227b',\n'SucceedsEqual;':'\\u2ab0',\n'SucceedsSlantEqual;':'\\u227d',\n'SucceedsTilde;':'\\u227f',\n'succeq;':'\\u2ab0',\n'succnapprox;':'\\u2aba',\n'succneqq;':'\\u2ab6',\n'succnsim;':'\\u22e9',\n'succsim;':'\\u227f',\n'SuchThat;':'\\u220b',\n'Sum;':'\\u2211',\n'sum;':'\\u2211',\n'sung;':'\\u266a',\n'sup1':'\\xb9',\n'sup1;':'\\xb9',\n'sup2':'\\xb2',\n'sup2;':'\\xb2',\n'sup3':'\\xb3',\n'sup3;':'\\xb3',\n'Sup;':'\\u22d1',\n'sup;':'\\u2283',\n'supdot;':'\\u2abe',\n'supdsub;':'\\u2ad8',\n'supE;':'\\u2ac6',\n'supe;':'\\u2287',\n'supedot;':'\\u2ac4',\n'Superset;':'\\u2283',\n'SupersetEqual;':'\\u2287',\n'suphsol;':'\\u27c9',\n'suphsub;':'\\u2ad7',\n'suplarr;':'\\u297b',\n'supmult;':'\\u2ac2',\n'supnE;':'\\u2acc',\n'supne;':'\\u228b',\n'supplus;':'\\u2ac0',\n'Supset;':'\\u22d1',\n'supset;':'\\u2283',\n'supseteq;':'\\u2287',\n'supseteqq;':'\\u2ac6',\n'supsetneq;':'\\u228b',\n'supsetneqq;':'\\u2acc',\n'supsim;':'\\u2ac8',\n'supsub;':'\\u2ad4',\n'supsup;':'\\u2ad6',\n'swarhk;':'\\u2926',\n'swArr;':'\\u21d9',\n'swarr;':'\\u2199',\n'swarrow;':'\\u2199',\n'swnwar;':'\\u292a',\n'szlig':'\\xdf',\n'szlig;':'\\xdf',\n'Tab;':'\\t',\n'target;':'\\u2316',\n'Tau;':'\\u03a4',\n'tau;':'\\u03c4',\n'tbrk;':'\\u23b4',\n'Tcaron;':'\\u0164',\n'tcaron;':'\\u0165',\n'Tcedil;':'\\u0162',\n'tcedil;':'\\u0163',\n'Tcy;':'\\u0422',\n'tcy;':'\\u0442',\n'tdot;':'\\u20db',\n'telrec;':'\\u2315',\n'Tfr;':'\\U0001d517',\n'tfr;':'\\U0001d531',\n'there4;':'\\u2234',\n'Therefore;':'\\u2234',\n'therefore;':'\\u2234',\n'Theta;':'\\u0398',\n'theta;':'\\u03b8',\n'thetasym;':'\\u03d1',\n'thetav;':'\\u03d1',\n'thickapprox;':'\\u2248',\n'thicksim;':'\\u223c',\n'ThickSpace;':'\\u205f\\u200a',\n'thinsp;':'\\u2009',\n'ThinSpace;':'\\u2009',\n'thkap;':'\\u2248',\n'thksim;':'\\u223c',\n'THORN':'\\xde',\n'thorn':'\\xfe',\n'THORN;':'\\xde',\n'thorn;':'\\xfe',\n'Tilde;':'\\u223c',\n'tilde;':'\\u02dc',\n'TildeEqual;':'\\u2243',\n'TildeFullEqual;':'\\u2245',\n'TildeTilde;':'\\u2248',\n'times':'\\xd7',\n'times;':'\\xd7',\n'timesb;':'\\u22a0',\n'timesbar;':'\\u2a31',\n'timesd;':'\\u2a30',\n'tint;':'\\u222d',\n'toea;':'\\u2928',\n'top;':'\\u22a4',\n'topbot;':'\\u2336',\n'topcir;':'\\u2af1',\n'Topf;':'\\U0001d54b',\n'topf;':'\\U0001d565',\n'topfork;':'\\u2ada',\n'tosa;':'\\u2929',\n'tprime;':'\\u2034',\n'TRADE;':'\\u2122',\n'trade;':'\\u2122',\n'triangle;':'\\u25b5',\n'triangledown;':'\\u25bf',\n'triangleleft;':'\\u25c3',\n'trianglelefteq;':'\\u22b4',\n'triangleq;':'\\u225c',\n'triangleright;':'\\u25b9',\n'trianglerighteq;':'\\u22b5',\n'tridot;':'\\u25ec',\n'trie;':'\\u225c',\n'triminus;':'\\u2a3a',\n'TripleDot;':'\\u20db',\n'triplus;':'\\u2a39',\n'trisb;':'\\u29cd',\n'tritime;':'\\u2a3b',\n'trpezium;':'\\u23e2',\n'Tscr;':'\\U0001d4af',\n'tscr;':'\\U0001d4c9',\n'TScy;':'\\u0426',\n'tscy;':'\\u0446',\n'TSHcy;':'\\u040b',\n'tshcy;':'\\u045b',\n'Tstrok;':'\\u0166',\n'tstrok;':'\\u0167',\n'twixt;':'\\u226c',\n'twoheadleftarrow;':'\\u219e',\n'twoheadrightarrow;':'\\u21a0',\n'Uacute':'\\xda',\n'uacute':'\\xfa',\n'Uacute;':'\\xda',\n'uacute;':'\\xfa',\n'Uarr;':'\\u219f',\n'uArr;':'\\u21d1',\n'uarr;':'\\u2191',\n'Uarrocir;':'\\u2949',\n'Ubrcy;':'\\u040e',\n'ubrcy;':'\\u045e',\n'Ubreve;':'\\u016c',\n'ubreve;':'\\u016d',\n'Ucirc':'\\xdb',\n'ucirc':'\\xfb',\n'Ucirc;':'\\xdb',\n'ucirc;':'\\xfb',\n'Ucy;':'\\u0423',\n'ucy;':'\\u0443',\n'udarr;':'\\u21c5',\n'Udblac;':'\\u0170',\n'udblac;':'\\u0171',\n'udhar;':'\\u296e',\n'ufisht;':'\\u297e',\n'Ufr;':'\\U0001d518',\n'ufr;':'\\U0001d532',\n'Ugrave':'\\xd9',\n'ugrave':'\\xf9',\n'Ugrave;':'\\xd9',\n'ugrave;':'\\xf9',\n'uHar;':'\\u2963',\n'uharl;':'\\u21bf',\n'uharr;':'\\u21be',\n'uhblk;':'\\u2580',\n'ulcorn;':'\\u231c',\n'ulcorner;':'\\u231c',\n'ulcrop;':'\\u230f',\n'ultri;':'\\u25f8',\n'Umacr;':'\\u016a',\n'umacr;':'\\u016b',\n'uml':'\\xa8',\n'uml;':'\\xa8',\n'UnderBar;':'_',\n'UnderBrace;':'\\u23df',\n'UnderBracket;':'\\u23b5',\n'UnderParenthesis;':'\\u23dd',\n'Union;':'\\u22c3',\n'UnionPlus;':'\\u228e',\n'Uogon;':'\\u0172',\n'uogon;':'\\u0173',\n'Uopf;':'\\U0001d54c',\n'uopf;':'\\U0001d566',\n'UpArrow;':'\\u2191',\n'Uparrow;':'\\u21d1',\n'uparrow;':'\\u2191',\n'UpArrowBar;':'\\u2912',\n'UpArrowDownArrow;':'\\u21c5',\n'UpDownArrow;':'\\u2195',\n'Updownarrow;':'\\u21d5',\n'updownarrow;':'\\u2195',\n'UpEquilibrium;':'\\u296e',\n'upharpoonleft;':'\\u21bf',\n'upharpoonright;':'\\u21be',\n'uplus;':'\\u228e',\n'UpperLeftArrow;':'\\u2196',\n'UpperRightArrow;':'\\u2197',\n'Upsi;':'\\u03d2',\n'upsi;':'\\u03c5',\n'upsih;':'\\u03d2',\n'Upsilon;':'\\u03a5',\n'upsilon;':'\\u03c5',\n'UpTee;':'\\u22a5',\n'UpTeeArrow;':'\\u21a5',\n'upuparrows;':'\\u21c8',\n'urcorn;':'\\u231d',\n'urcorner;':'\\u231d',\n'urcrop;':'\\u230e',\n'Uring;':'\\u016e',\n'uring;':'\\u016f',\n'urtri;':'\\u25f9',\n'Uscr;':'\\U0001d4b0',\n'uscr;':'\\U0001d4ca',\n'utdot;':'\\u22f0',\n'Utilde;':'\\u0168',\n'utilde;':'\\u0169',\n'utri;':'\\u25b5',\n'utrif;':'\\u25b4',\n'uuarr;':'\\u21c8',\n'Uuml':'\\xdc',\n'uuml':'\\xfc',\n'Uuml;':'\\xdc',\n'uuml;':'\\xfc',\n'uwangle;':'\\u29a7',\n'vangrt;':'\\u299c',\n'varepsilon;':'\\u03f5',\n'varkappa;':'\\u03f0',\n'varnothing;':'\\u2205',\n'varphi;':'\\u03d5',\n'varpi;':'\\u03d6',\n'varpropto;':'\\u221d',\n'vArr;':'\\u21d5',\n'varr;':'\\u2195',\n'varrho;':'\\u03f1',\n'varsigma;':'\\u03c2',\n'varsubsetneq;':'\\u228a\\ufe00',\n'varsubsetneqq;':'\\u2acb\\ufe00',\n'varsupsetneq;':'\\u228b\\ufe00',\n'varsupsetneqq;':'\\u2acc\\ufe00',\n'vartheta;':'\\u03d1',\n'vartriangleleft;':'\\u22b2',\n'vartriangleright;':'\\u22b3',\n'Vbar;':'\\u2aeb',\n'vBar;':'\\u2ae8',\n'vBarv;':'\\u2ae9',\n'Vcy;':'\\u0412',\n'vcy;':'\\u0432',\n'VDash;':'\\u22ab',\n'Vdash;':'\\u22a9',\n'vDash;':'\\u22a8',\n'vdash;':'\\u22a2',\n'Vdashl;':'\\u2ae6',\n'Vee;':'\\u22c1',\n'vee;':'\\u2228',\n'veebar;':'\\u22bb',\n'veeeq;':'\\u225a',\n'vellip;':'\\u22ee',\n'Verbar;':'\\u2016',\n'verbar;':'|',\n'Vert;':'\\u2016',\n'vert;':'|',\n'VerticalBar;':'\\u2223',\n'VerticalLine;':'|',\n'VerticalSeparator;':'\\u2758',\n'VerticalTilde;':'\\u2240',\n'VeryThinSpace;':'\\u200a',\n'Vfr;':'\\U0001d519',\n'vfr;':'\\U0001d533',\n'vltri;':'\\u22b2',\n'vnsub;':'\\u2282\\u20d2',\n'vnsup;':'\\u2283\\u20d2',\n'Vopf;':'\\U0001d54d',\n'vopf;':'\\U0001d567',\n'vprop;':'\\u221d',\n'vrtri;':'\\u22b3',\n'Vscr;':'\\U0001d4b1',\n'vscr;':'\\U0001d4cb',\n'vsubnE;':'\\u2acb\\ufe00',\n'vsubne;':'\\u228a\\ufe00',\n'vsupnE;':'\\u2acc\\ufe00',\n'vsupne;':'\\u228b\\ufe00',\n'Vvdash;':'\\u22aa',\n'vzigzag;':'\\u299a',\n'Wcirc;':'\\u0174',\n'wcirc;':'\\u0175',\n'wedbar;':'\\u2a5f',\n'Wedge;':'\\u22c0',\n'wedge;':'\\u2227',\n'wedgeq;':'\\u2259',\n'weierp;':'\\u2118',\n'Wfr;':'\\U0001d51a',\n'wfr;':'\\U0001d534',\n'Wopf;':'\\U0001d54e',\n'wopf;':'\\U0001d568',\n'wp;':'\\u2118',\n'wr;':'\\u2240',\n'wreath;':'\\u2240',\n'Wscr;':'\\U0001d4b2',\n'wscr;':'\\U0001d4cc',\n'xcap;':'\\u22c2',\n'xcirc;':'\\u25ef',\n'xcup;':'\\u22c3',\n'xdtri;':'\\u25bd',\n'Xfr;':'\\U0001d51b',\n'xfr;':'\\U0001d535',\n'xhArr;':'\\u27fa',\n'xharr;':'\\u27f7',\n'Xi;':'\\u039e',\n'xi;':'\\u03be',\n'xlArr;':'\\u27f8',\n'xlarr;':'\\u27f5',\n'xmap;':'\\u27fc',\n'xnis;':'\\u22fb',\n'xodot;':'\\u2a00',\n'Xopf;':'\\U0001d54f',\n'xopf;':'\\U0001d569',\n'xoplus;':'\\u2a01',\n'xotime;':'\\u2a02',\n'xrArr;':'\\u27f9',\n'xrarr;':'\\u27f6',\n'Xscr;':'\\U0001d4b3',\n'xscr;':'\\U0001d4cd',\n'xsqcup;':'\\u2a06',\n'xuplus;':'\\u2a04',\n'xutri;':'\\u25b3',\n'xvee;':'\\u22c1',\n'xwedge;':'\\u22c0',\n'Yacute':'\\xdd',\n'yacute':'\\xfd',\n'Yacute;':'\\xdd',\n'yacute;':'\\xfd',\n'YAcy;':'\\u042f',\n'yacy;':'\\u044f',\n'Ycirc;':'\\u0176',\n'ycirc;':'\\u0177',\n'Ycy;':'\\u042b',\n'ycy;':'\\u044b',\n'yen':'\\xa5',\n'yen;':'\\xa5',\n'Yfr;':'\\U0001d51c',\n'yfr;':'\\U0001d536',\n'YIcy;':'\\u0407',\n'yicy;':'\\u0457',\n'Yopf;':'\\U0001d550',\n'yopf;':'\\U0001d56a',\n'Yscr;':'\\U0001d4b4',\n'yscr;':'\\U0001d4ce',\n'YUcy;':'\\u042e',\n'yucy;':'\\u044e',\n'yuml':'\\xff',\n'Yuml;':'\\u0178',\n'yuml;':'\\xff',\n'Zacute;':'\\u0179',\n'zacute;':'\\u017a',\n'Zcaron;':'\\u017d',\n'zcaron;':'\\u017e',\n'Zcy;':'\\u0417',\n'zcy;':'\\u0437',\n'Zdot;':'\\u017b',\n'zdot;':'\\u017c',\n'zeetrf;':'\\u2128',\n'ZeroWidthSpace;':'\\u200b',\n'Zeta;':'\\u0396',\n'zeta;':'\\u03b6',\n'Zfr;':'\\u2128',\n'zfr;':'\\U0001d537',\n'ZHcy;':'\\u0416',\n'zhcy;':'\\u0436',\n'zigrarr;':'\\u21dd',\n'Zopf;':'\\u2124',\n'zopf;':'\\U0001d56b',\n'Zscr;':'\\U0001d4b5',\n'zscr;':'\\U0001d4cf',\n'zwj;':'\\u200d',\n'zwnj;':'\\u200c',\n}\n\n\ncodepoint2name={}\n\n\n\nentitydefs={}\n\nfor(name,codepoint)in name2codepoint.items():\n codepoint2name[codepoint]=name\n entitydefs[name]=chr(codepoint)\n \ndel name,codepoint\n", []], "browser": [".py", "", [], 1], "browser.object_storage": [".py", "import json\n\nclass _UnProvided():\n pass\n \n \nclass ObjectStorage():\n\n def __init__(self,storage):\n self.storage=storage\n \n def __delitem__(self,key):\n del self.storage[json.dumps(key)]\n \n def __getitem__(self,key):\n return json.loads(self.storage[json.dumps(key)])\n \n def __setitem__(self,key,value):\n self.storage[json.dumps(key)]=json.dumps(value)\n \n def __contains__(self,key):\n return json.dumps(key)in self.storage\n \n def get(self,key,default=None):\n if json.dumps(key)in self.storage:\n return self.storage[json.dumps(key)]\n return default\n \n def pop(self,key,default=_UnProvided()):\n if type(default)is _UnProvided or json.dumps(key)in self.storage:\n return json.loads(self.storage.pop(json.dumps(key)))\n return default\n \n def __iter__(self):\n keys=self.keys()\n return keys.__iter__()\n \n def keys(self):\n return[json.loads(key)for key in self.storage.keys()]\n \n def values(self):\n return[json.loads(val)for val in self.storage.values()]\n \n def items(self):\n return list(zip(self.keys(),self.values()))\n \n def clear(self):\n self.storage.clear()\n \n def __len__(self):\n return len(self.storage)\n", ["json"]], "browser.worker": [".py", "from _webworker import *\n", ["_webworker"]], "browser.session_storage": [".py", "\nimport sys\nfrom browser import window\nfrom .local_storage import LocalStorage\n\nhas_session_storage=hasattr(window,'sessionStorage')\n\nclass SessionStorage(LocalStorage):\n\n storage_type=\"session_storage\"\n \n def __init__(self):\n if not has_session_storage:\n raise EnvironmentError(\"SessionStorage not available\")\n self.store=window.sessionStorage\n \nif has_session_storage:\n storage=SessionStorage()\n", ["browser", "browser.local_storage", "sys"]], "browser.ui": [".py", "from . import html,window,console,document\n\n\nclass UIError(Exception):\n pass\n \n \nclass Border:\n\n def __init__(self,width=1,style='solid',color='#000',radius=None ):\n self.width=width\n self.style=style\n self.color=color\n self.radius=radius\n \n \nclass Font:\n\n def __init__(self,family='Arial',size=None ,weight='normal',\n style='normal'):\n self.family=family\n self.size=size\n self.weight=weight\n self.style=style\n \n \nclass _Directions:\n\n def __init__(self,*args,**kw):\n if len(args)==0:\n values=[0]*4\n elif len(args)==1:\n values=[args[0]]*4\n elif len(args)==2:\n values=[args[0],args[1]]*2\n elif len(args)==3:\n values=args+[0]\n elif len(args)==4:\n values=args\n else :\n raise ValueError('Padding expects at most 4 arguments, got '+\n f'{len(args)}')\n self.top,self.right,self.bottom,self.left=values\n if (x :=kw.get('x'))is not None :\n self.left=self.right=x\n if (y :=kw.get('y'))is not None :\n self.top=self.bottom=y\n if (top :=kw.get('top'))is not None :\n self.top=top\n if (right :=kw.get('right'))is not None :\n self.right=right\n if (bottom :=kw.get('bottom'))is not None :\n self.bottom=bottom\n if (left :=kw.get('left'))is not None :\n self.left=left\n \n \nclass _Coords:\n\n def __init__(self,left,top,width,height):\n self.left=left\n self.top=top\n self.width=width\n self.height=height\n \n \nclass Padding(_Directions):\n pass\n \n \nclass Mouse:\n\n def __str__(self):\n return f''\n \nmouse=Mouse()\n\nclass Rows:\n\n def __init__(self,widget):\n self.widget=widget\n self._rows=[]\n if hasattr(widget,'_table'):\n console.log('_table',widget._table)\n for row in self._widget.rows:\n cells=[]\n for cell in row.cells:\n cells.append(cell.firstChild)\n self._rows.append(cells)\n return self._rows\n \n \nclass Widget:\n\n def __init_subclass__(cls):\n cls.__str__=Widget.__str__\n \n def __str__(self):\n return f''\n \n def add(self,widget,row='same',column=None ,**kw):\n widget.master=self\n widget.config(**widget._options)\n widget.grid(row=row,column=column,**kw)\n widget.kw=kw\n \n def add_row(self,widgets,row='next',column_start=0,**kw):\n ''\n for i,widget in enumerate(widgets):\n if i ==0:\n self.add(widget,row=row,column=column_start,**kw)\n else :\n self.add(widget,**kw)\n \n def add_from_table(self,table,**kw):\n ''\n \n for line in table:\n self.add(Label(line[0]),row='next')\n for cell in line[1:]:\n if isinstance(cell,str):\n self.add(Label(cell),align='left',**kw)\n else :\n self.add(Label(cell),align='right',**kw)\n \n def apply_default_style(self):\n if hasattr(self,'default_style'):\n for key,value in self.default_style.items():\n self.style[key]=value\n \n def config(self,**kw):\n element=self\n \n if (value :=kw.get('value')):\n if not isinstance(self,(Label,Entry)):\n raise TypeError(\"invalid keyword 'value' for \"+\n self.__class__.__name__)\n element._value=value\n element.text=value\n \n for attr in ['type','name','checked']:\n if (value :=kw.get(attr))is not None :\n setattr(element,attr,value)\n \n if (title :=kw.get('title'))and isinstance(self,Box):\n element.title_bar.text=title\n \n for attr in ['width','height','top','left']:\n if (value :=kw.get(attr)):\n \n match value:\n case str():\n setattr(element.style,attr,value)\n case int()|float():\n setattr(element.style,attr,f'{round(value)}px')\n case _:\n raise ValueError(f\"{attr} should be str or number, \"+\n f\"not '{value.__class__.__name__}'\")\n \n if (cursor :=kw.get('cursor')):\n element.style.cursor=cursor\n \n if (command :=kw.get('command')):\n element.bind('click',\n lambda ev,command=command:command(ev.target))\n element.style.cursor='default'\n \n if (font :=kw.get('font')):\n element.style.fontFamily=font.family\n element.style.fontWeight=font.weight\n element.style.fontStyle=font.style\n if font.size:\n if isinstance(font.size,str):\n element.style.fontSize=font.size\n else :\n element.style.fontSize=f'{font.size}px'\n \n if (background :=kw.get('background')):\n element.style.backgroundColor=background\n if (color :=kw.get('color')):\n element.style.color=color\n \n if (border :=kw.get('border')):\n if isinstance(border,str):\n element.style.borderWidth=border\n element.style.borderStyle='solid'\n elif isinstance(border,int):\n element.style.borderWidth=f'{border}px'\n element.style.borderStyle='solid'\n elif isinstance(border,Border):\n element.style.borderStyle=border.style\n element.style.borderWidth=f'{border.width}px'\n element.style.borderColor=border.color\n element.style.borderRadius=f'{border.radius}px'\n else :\n raise TypeError('invalid type for border: '+\n border.__class__.__name__)\n \n if (padding :=kw.get('padding')):\n if isinstance(padding,str):\n element.style.padding=padding\n elif isinstance(padding,int):\n element.style.padding=f'{padding}px'\n elif isinstance(padding,Padding):\n for key in ['top','right','bottom','left']:\n value=getattr(padding,key)\n attr='padding'+key.capitalize()\n if isinstance(value,str):\n setattr(element.style,attr,value)\n else :\n setattr(element.style,attr,f'{value}px')\n else :\n raise TypeError('invalid type for padding: '+\n padding.__class__.__name__)\n \n if (menu :=kw.get('menu'))is not None :\n if isinstance(self,Box):\n menu._build()\n self.insertBefore(menu.element,\n self.title_bar.nextSibling)\n self.menu=menu\n \n if (callbacks :=kw.get('callbacks'))is not None :\n for event,func in callbacks.items():\n element.bind(event,self._wrap_callback(func))\n \n self._config=getattr(self,'_config',{})\n self._config |=kw\n \n def _wrap_callback(self,func):\n def f(event):\n res=func(event)\n if res is False :\n event.stopPropagation()\n event.preventDefault()\n return res\n return f\n \n def coords(self):\n if not hasattr(self,'master'):\n raise TypeError(\"attribute 'coords' not set until widget is added\")\n parent=self.parentNode\n return _Coords(parent.offsetLeft,parent.offsetTop,parent.offsetWidth,\n parent.offsetHeight)\n \n def grid(self,column=None ,columnspan=1,row=None ,rowspan=1,align='',\n **options):\n master=self.master\n if not hasattr(master,'_table'):\n master._table=html.TABLE(\n cellpadding=0,\n cellspacing=0,\n style='width:100%;')\n master <=master._table\n if row =='same':\n row=0\n \n master.table=_Wrapper(master._table)\n \n if not hasattr(master,'cells'):\n master.cells=set()\n \n valid=[None ,'same','next']\n if not isinstance(row,int)and row not in valid:\n raise ValueError(f'invalid value for row: {row!r}')\n if not isinstance(column,int)and column not in valid:\n raise ValueError(f'invalid value for column: {column!r}')\n \n \n \n \n nb_rows=len(master._table.rows)\n if row is None or row =='next':\n \n row=nb_rows\n if column is None :\n column=0\n elif row =='same':\n row=max(0,nb_rows -1)\n \n if column is None :\n column='next'\n \n for i in range(row -nb_rows+1):\n master._table <=html.TR()\n \n tr=master._table.rows[row]\n \n nb_cols=len(tr.cells)\n if column =='next':\n column=nb_cols\n elif column =='same':\n column=nb_cols -1\n \n \n cols_from_span=[c for (r,c)in master.cells\n if r ==row and c 1:\n td.attrs['colspan']=columnspan\n if rowspan >1:\n td.attrs['rowspan']=rowspan\n \n aligns=align.split()\n if 'left'in aligns:\n td.style.textAlign='left'\n if 'right'in aligns:\n td.style.textAlign='right'\n if 'center'in aligns:\n td.style.textAlign='center'\n if 'top'in aligns:\n td.style.verticalAlign='top'\n if 'bottom'in aligns:\n td.style.verticalAlign='bottom'\n if 'middle'in aligns:\n td.style.verticalAlign='middle'\n \n has_child=len(td.childNodes)>0\n if has_child:\n if hasattr(td.firstChild,'is_inner'):\n inner=td.firstChild\n else :\n inner=html.DIV(style=\"position:relative\")\n inner.is_inner=True\n inner <=td.firstChild\n td <=inner\n self.style.position=\"absolute\"\n self.style.top='0px'\n inner <=self\n else :\n td <=self\n \n self.row=row\n self.column=column\n self.cell=_Wrapper(td)\n \n self.cell.config(**options)\n \n self.row=_Wrapper(tr)\n \n return self\n \n @property\n def rows(self):\n return Rows(self)\n \n def sort_by_row(self,*columns,has_title=False ):\n ''\n \n rows=list(self._table.rows)\n if has_title:\n head=rows[0]\n rows=rows[1:]\n \n def first_values(row,rank):\n values=[]\n for i in range(rank):\n col_num,_=columns[i]\n values.append(row.cells[col_num].firstChild._value)\n return values\n \n for i,(col_num,ascending)in enumerate(columns):\n if i ==0:\n rows.sort(key=lambda row:row.cells[col_num].firstChild._value,\n reverse=not ascending)\n else :\n new_rows=[]\n j=0\n while True :\n same_start=[row for row in rows if\n first_values(row,i)==first_values(rows[j],i)]\n same_start.sort(key=lambda r:r.cells[col_num].firstChild._value,\n reverse=not ascending)\n new_rows +=same_start\n j +=len(same_start)\n if j ==len(rows):\n rows=new_rows\n break\n \n if has_title:\n rows.insert(0,head)\n self._table <=rows\n \n \n \nborderColor='#008'\nbackgroundColor='#fff'\ncolor='#000'\n\n\nclass Frame(html.DIV,Widget):\n\n def __init__(self,*args,**options):\n self._options=options\n \n \nclass Bar(Frame):\n\n def __init__(self,**options):\n super().__init__(**options)\n self <=Label(\" \")\n \n \nclass Box(html.DIV,Widget):\n\n default_config={\n 'width':'inherit',\n 'background':backgroundColor,\n 'color':color,\n 'cursor':'default',\n 'menu':None ,\n 'font':Font(family='sans-serif',size=12)\n }\n \n def __init__(self,container=document,title=\"\",titlebar=False ,**options):\n html.DIV.__init__(self,\n style=\"position:absolute;box-sizing:border-box\")\n \n container <=self\n self._options=self.default_config |options\n self.config(**self._options)\n \n if titlebar:\n self.title_bar=TitleBar(title)\n self.add(self.title_bar)\n \n panel=Frame()\n self.add(panel,row=\"next\",align=\"left\")\n self.panel=panel\n \n self.title_bar.close_button.bind(\"click\",self.close)\n \n self.title_bar.bind(\"mousedown\",self._grab_widget)\n self.title_bar.bind(\"touchstart\",self._grab_widget)\n self.title_bar.bind(\"mouseup\",self._stop_moving)\n self.title_bar.bind(\"touchend\",self._stop_moving)\n self.bind(\"leave\",self._stop_moving)\n self.is_moving=False\n \n elif title:\n raise UIError('cannot set title if titlebar is not set')\n \n def add(self,widget,**kw):\n if hasattr(self,'panel'):\n self.panel.add(widget,**kw)\n else :\n Widget.add(self,widget,**kw)\n \n def add_menu(self,menu):\n ''\n if not hasattr(self,\"_table\"):\n self.add(menu)\n else :\n self.insertBefore(menu,self._table)\n menu._toplevel=True\n \n def close(self,*args):\n self.remove()\n \n def keys(self):\n return [\n 'left','top','width','height'\n 'background','color',\n 'cursor',\n 'menu',\n 'border',\n 'font',\n 'padding']\n \n def _grab_widget(self,event):\n self._remove_menus()\n document.bind(\"mousemove\",self._move_widget)\n document.bind(\"touchmove\",self._move_widget)\n self.is_moving=True\n self.initial=[self.left -event.x,self.top -event.y]\n \n event.preventDefault()\n \n def _move_widget(self,event):\n if not self.is_moving:\n return\n \n \n self.left=self.initial[0]+event.x\n self.top=self.initial[1]+event.y\n \n def _stop_moving(self,event):\n self.is_moving=False\n document.unbind(\"mousemove\")\n document.unbind(\"touchmove\")\n \n def title(self,text):\n self.title_bar.text=text\n \n def _remove_menus(self):\n menu=self._options['menu']\n if menu and menu.open_submenu:\n menu.open_on_mouseenter=False\n menu.open_submenu.element.remove()\n \n \nclass _Wrapper:\n\n def __init__(self,element):\n self.element=element\n \n def config(self,**options):\n Widget.config(self.element,**options)\n \n \nclass Checkbuttons(Frame):\n\n COUNTER=0\n \n def __init__(self,**options):\n Frame.__init__(self,**options)\n self.name=f'checkbutton{self.COUNTER}'\n self.COUNTER +=1\n \n def add_option(self,label,value=None ,checked=False ):\n self.add(Entry(type=\"checkbox\",name=self.name,\n value=value if value is not None else label,\n checked=checked))\n self.add(Label(label))\n \n \nclass Button(html.BUTTON,Widget):\n\n def __init__(self,*args,**options):\n super().__init__(*args)\n self._options=options\n \n \nclass Entry(html.INPUT,Widget):\n\n def __init__(self,*args,**options):\n self._options=options\n super().__init__(*args)\n \n \nclass Image(html.IMG,Widget):\n\n def __init__(self,src,**options):\n super().__init__(src=src)\n self._options=options\n \n \nclass Label(html.DIV,Widget):\n\n default_style={\n 'whiteSpace':'pre',\n 'padding':'0.3em'\n }\n \n def __init__(self,value,*args,**options):\n self._options=options\n self._value=value\n super().__init__(value,*args)\n if not value:\n self.style.minHeight='1em'\n self.apply_default_style()\n \n \nclass Link(html.A,Widget):\n\n def __init__(self,text,href,**options):\n super().__init__(text,href=href)\n self._options=options\n \n \nclass Listbox(Frame):\n\n def __init__(self,**options):\n self.size=options.pop('size',None )\n self.multiple=options.pop('multiple',False )\n if self.size is not None and not isinstance(self.size,int):\n raise ValueError('size must be an integer')\n Frame.__init__(self,**options)\n self._selected=[]\n \n def add_option(self,name):\n option=Label(name,\n callbacks=dict(mouseenter=self.enter_option,\n mouseleave=self.leave_option,\n click=self.select_option))\n self.add(option,row='next')\n if self.size is not None and option.row ==self.size -1:\n self.style.height=f'{self.offsetHeight}px'\n self.style.overflowY=\"scroll\"\n \n def enter_option(self,widget):\n if widget not in self._selected:\n widget.config(background='lightblue')\n \n def leave_option(self,widget):\n if widget not in self._selected:\n widget.config(background='inherit')\n \n def select_option(self,widget):\n if self.multiple:\n if widget in self._selected:\n self.unselect(widget)\n self.enter_option(widget)\n else :\n self.select(widget)\n else :\n if self._selected:\n self.unselect(self._selected[0])\n self.select(widget)\n \n def select(self,widget):\n widget.config(background='blue',color='white')\n self._selected.append(widget)\n \n def unselect(self,widget):\n widget.config(background='inherit',color='inherit')\n self._selected.remove(widget)\n \n \nclass Menu(Frame):\n\n default_config={\n 'background':'#eee'\n }\n \n toplevel_options={\n 'background':'inherit',\n 'color':'inherit',\n 'highlight-background':'LightBlue',\n 'highlight-color':'inherit'\n }\n \n submenu_options={\n 'background':'inherit',\n 'color':'inherit',\n 'highlight-background':'blue',\n 'highlight-color':'white'\n }\n \n def __init__(self,master,label=None ,**options):\n self.master=master\n self._toplevel_options=(self.toplevel_options |\n options.pop('toplevel_options',{}))\n self._submenu_options=(self.submenu_options |\n options.pop('submenu_options',{}))\n \n self._toplevel=not isinstance(master,Menu)\n self._options=self.default_config |options\n Frame.__init__(self,**self._options)\n \n if not self._toplevel:\n if label is None :\n raise ValueError('missing submenu label')\n master.add_submenu(label,self)\n elif not hasattr(master,\"_table\"):\n master.add(self)\n else :\n master.insertBefore(self,master._table)\n \n \n def add_option(self,label,command=None ):\n callbacks=dict(mouseenter=self.enter,\n mouseleave=self.leave)\n if command:\n callbacks['click']=command\n name=Label(label,padding=5,callbacks=callbacks)\n self.add(name,row=\"next\")\n \n def add_submenu(self,label,submenu=None ):\n menu_options={\n 'callbacks':dict(click=self.submenu,\n mouseenter=self.enter,\n mouseleave=self.leave),\n 'padding':5\n }\n frame=Frame(**menu_options)\n frame.submenu=submenu\n \n frame.add(Label(label))\n if not self._toplevel:\n frame.add(Label('▶',padding=Padding(left=\"1em\")))\n if self._toplevel:\n self.add(frame)\n else :\n self.add(frame,row=\"next\")\n \n def enter(self,widget):\n if self._toplevel:\n options=self._toplevel_options\n else :\n options=self._submenu_options\n widget.config(background=options['highlight-background'],\n color=options['highlight-color'])\n if hasattr(widget.master,'open_submenu'):\n self.submenu(widget)\n \n def leave(self,widget):\n if self._toplevel:\n options=self._toplevel_options\n else :\n options=self._submenu_options\n widget.config(background=options['background'],\n color=options['color'])\n \n def submenu(self,widget):\n master=widget.master\n if hasattr(master,'open_submenu'):\n master.open_submenu.remove()\n if not hasattr(widget,\"submenu\"):\n return\n coords=widget.coords()\n if self._toplevel:\n top=coords.top+coords.height\n left=coords.left\n else :\n top=coords.top+widget.closest('TABLE').offsetTop\n left=coords.left+master.master.clientWidth\n \n box=Box(container=widget,titlebar=None ,\n top=f'{top}px',left=f'{left}px')\n box.add(widget.submenu)\n master.open_submenu=box\n \nclass Radiobuttons(Frame):\n\n COUNTER=0\n \n def __init__(self,**options):\n Frame.__init__(self,**options)\n self.name=\"radiobutton{self.COUNTER}\"\n self.COUNTER +=1\n \n def add_option(self,label,value=None ,checked=False ):\n self.add(Entry(type=\"radio\",\n name=self.name,\n value=value if value is not None else label,\n checked=checked))\n self.add(Label(label))\n \n \nclass Slider(Frame):\n\n default_config={\n 'background':\"#bbb\"\n }\n \n def __init__(self,ratio=0,width=300,height=20,**options):\n background=options.pop('background',self.default_config['background'])\n Frame.__init__(self,width=width,height=height,**options)\n self.style.display='flex'\n self.style.alignItems='center'\n self.bar=html.DIV(style=\"width:100%;height:25%;border-radius:3px;\")\n self.bar.style.backgroundColor=background\n self <=self.bar\n self.slider=html.DIV(style=\"position:absolute;\"+\n \"cursor:grab;\")\n self.slider.style.backgroundColor=background\n self <=self.slider\n self.slider.bind('mousedown',self.grab_slider)\n self.ratio=ratio\n self.moving=False\n \n def grid(self,**kw):\n Widget.grid(self,**kw)\n ray=round(self.offsetWidth *0.03)\n self.min_x=-ray\n self.max_x=round(self.width -self.slider.width -ray)\n self.interval=self.width -self.slider.width\n self.slider.left=self.min_x+round(self.interval *self.ratio)\n self.slider.style.height=self.slider.style.width=f'{2 * ray}px'\n self.slider.style.borderRadius=\"50%\"\n print(self.slider.style.width)\n \n def grab_slider(self,event):\n self.x0=self.slider.left\n self.mouse0=event.clientX\n document.bind('mousemove',self.move_slider)\n document.bind('mouseup',self.release_slider)\n self.moving=True\n event.preventDefault()\n \n def move_slider(self,event):\n event.preventDefault()\n if self.moving:\n dx=event.clientX -self.mouse0\n x=self.x0+dx\n if x self.max_x:\n x=self.max_x\n self.slider.left=x\n self.ratio=(x -self.min_x)/self.interval\n evt=window.CustomEvent.new('move')\n evt.clientX=event.clientX\n evt.clientY=event.clientY\n self.dispatchEvent(evt)\n return False\n \n def release_slider(self,event):\n self.moving=False\n document.unbind('mousemove',self.move_slider)\n document.unbind('mouseup',self.release_slider)\n \n \nclass Text(html.DIV,Widget):\n\n default_style={\n 'borderWidth':'1px',\n 'borderStyle':'solid',\n 'borderColor':'#999',\n 'boxSizing':'border-box'\n }\n \n def __init__(self,*args,**options):\n self.apply_default_style()\n self._options=options\n super().__init__(*args)\n self.attrs['contenteditable']=True\n \n \nclass TitleBar(html.DIV,Widget):\n\n default_config={\n 'background':'#f0f0f0',\n 'cursor':'default'\n }\n \n def __init__(self,title='',*args,**options):\n self._options=self.default_config |options\n super().__init__('',*args)\n \n self.add(Label(title))\n self.close_button=Button(\"╳\",\n padding=Padding(bottom=10),\n background=\"inherit\",\n border=Border(width=0))\n \n self.add(self.close_button,align=\"right top\")\n \n self.config(**self._options)\n \n", ["browser"]], "browser.timer": [".py", "from browser import self as window\n\n\nclear_interval=window.clearInterval\n\nclear_timeout=window.clearTimeout\n\ndef set_interval(func,interval,*args):\n return window.setInterval(func,interval,*args)\n \ndef set_timeout(func,interval,*args):\n return int(window.setTimeout(func,interval,*args))\n \ndef request_animation_frame(func):\n if func.__code__.co_argcount ==0:\n raise TypeError(f'function {func.__code__.co_name}() '+\n 'should take a single argument')\n return int(window.requestAnimationFrame(func))\n \ndef cancel_animation_frame(int_id):\n window.cancelAnimationFrame(int_id)\n \ndef set_loop_timeout(x):\n\n assert isinstance(x,int)\n __BRYTHON__.loop_timeout=x\n", ["browser"]], "browser.idbcache": [".py", "\n\nfrom datetime import datetime\n\nfrom browser.widgets import dialog\n\nfrom browser import bind,window,document\nfrom browser.html import *\n\nidb_name=\"brython-cache\"\nidb_cx=window.indexedDB.open(idb_name)\n\ninfos={\"nb_modules\":0,\"size\":0}\n\n@bind(idb_cx,\"success\")\ndef open_success(evt):\n db=evt.target.result\n if not db.objectStoreNames.contains(\"modules\"):\n dialog.InfoDialog('indexedDB cache','db has no store \"modules\"')\n return\n \n table=TABLE(border=1)\n table <=TR(TH(col)for col in\n ['Name','Package','Size','Brython timestamp',\n 'Stdlib timestamp'])\n tx=db.transaction(\"modules\",\"readwrite\")\n store=tx.objectStore(\"modules\")\n outdated=[]\n \n openCursor=store.openCursor()\n \n @bind(openCursor,\"error\")\n def cursor_error(evt):\n print(\"open cursor error\",evt)\n \n @bind(openCursor,\"success\")\n def cursor_success(evt):\n infos['nb_modules']+=1\n cursor=evt.target.result\n if cursor:\n record=cursor.value\n timestamp=datetime.fromtimestamp(record.timestamp /1000)\n source_ts=datetime.fromtimestamp(record.source_ts /1000)\n table <=TR(TD(record.name)+\n TD(bool(record.is_package))+\n TD(len(record.content),align=\"right\")+\n TD(timestamp.strftime('%Y-%m-%d %H:%M'))+\n TD(source_ts.strftime('%Y-%m-%d %H:%M'))\n )\n infos['size']+=len(record.content)\n getattr(cursor,\"continue\")()\n else :\n panel=dialog.Dialog('indexedDB cache',top=0,left=0).panel\n panel <=H1(\"Brython indexedDB cache\")\n size='{:,d}'.format(infos['size'])\n panel <=H3(f\"{infos['nb_modules']} modules, size {size} bytes\")\n panel <=table\n", ["browser", "browser.html", "browser.widgets", "browser.widgets.dialog", "datetime"]], "browser.local_storage": [".py", "\nimport sys\nfrom browser import window,console\nimport javascript\n\nhas_local_storage=hasattr(window,'localStorage')\n\nclass _UnProvided():\n pass\n \nclass LocalStorage():\n storage_type=\"local_storage\"\n \n def __init__(self):\n if not has_local_storage:\n raise EnvironmentError(\"LocalStorage not available\")\n self.store=window.localStorage\n \n def __delitem__(self,key):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n if key not in self:\n raise KeyError(key)\n self.store.removeItem(key)\n \n def __getitem__(self,key):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n res=self.store.getItem(key)\n if res is not javascript.NULL:\n return res\n raise KeyError(key)\n \n def __setitem__(self,key,value):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n if not isinstance(value,str):\n raise TypeError(\"value must be string\")\n self.store.setItem(key,value)\n \n \n def __contains__(self,key):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n res=self.store.getItem(key)\n if res is javascript.NULL:\n return False\n return True\n \n def __iter__(self):\n keys=self.keys()\n return keys.__iter__()\n \n def get(self,key,default=None):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n return self.store.getItem(key)or default\n \n def pop(self,key,default=_UnProvided()):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n if type(default)is _UnProvided:\n ret=self.get(key)\n del self[key]\n return ret\n else:\n if key in self:\n ret=self.get(key)\n del self[key]\n return ret\n else:\n return default\n \n \n \n def keys(self):\n return[self.store.key(i)for i in range(self.store.length)]\n \n def values(self):\n return[self.__getitem__(k)for k in self.keys()]\n \n def items(self):\n return list(zip(self.keys(),self.values()))\n \n def clear(self):\n self.store.clear()\n \n def __len__(self):\n return self.store.length\n \nif has_local_storage:\n storage=LocalStorage()\n", ["browser", "javascript", "sys"]], "browser.indexed_db": [".py", "class EventListener:\n def __init__(self,events=[]):\n self._events=events\n \n def append(self,event):\n self._events.append(event)\n \n def fire(self,e):\n for _event in self._events:\n _event(e)\n \nclass IndexedDB:\n def __init__(self):\n if not __BRYTHON__.has_indexedDB:\n raise NotImplementedError(\"Your browser doesn't support indexedDB\")\n return\n \n self._indexedDB=__BRYTHON__.indexedDB()\n self._db=None\n self._version=None\n \n def _onsuccess(self,event):\n self._db=event.target.result\n \n def open(self,name,onsuccess,version=1.0,onerror=None ,\n onupgradeneeded=None ):\n self._version=version\n _result=self._indexedDB.open(name,version)\n _success=EventListener([self._onsuccess,onsuccess])\n _result.onsuccess=_success.fire\n _result.onupgradeneeded=onupgradeneeded\n \n \n def onerror(e):\n print(\"onerror: %s:%s\"%(e.type,e.target.result))\n \n def onblocked(e):\n print(\"blocked: %s:%s\"%(e.type,e.result))\n \n _result.onerror=onerror\n _result.onblocked=onblocked\n \n def transaction(self,entities,mode='read'):\n return Transaction(self._db.transaction(entities,mode))\n \nclass Transaction:\n\n def __init__(self,transaction):\n self._transaction=transaction\n \n def objectStore(self,name):\n return ObjectStore(self._transaction.objectStore(name))\n \nclass ObjectStore:\n\n def __init__(self,objectStore):\n self._objectStore=objectStore\n self._data=[]\n \n def clear(self,onsuccess=None ,onerror=None ):\n _result=self._objectStore.clear()\n \n if onsuccess is not None :\n _result.onsuccess=onsuccess\n \n if onerror is not None :\n _result.onerror=onerror\n \n def _helper(self,func,object,onsuccess=None ,onerror=None ):\n _result=func(object)\n \n if onsuccess is not None :\n _result.onsuccess=onsuccess\n \n if onerror is not None :\n _result.onerror=onerror\n \n def put(self,obj,key=None ,onsuccess=None ,onerror=None ):\n _r=self._objectStore.put(obj,key)\n _r.onsuccess=onsuccess\n _r.onerror=onerror\n \n def add(self,obj,key,onsuccess=None ,onerror=None ):\n _r=self._objectStore.add(obj,key)\n _r.onsuccess=onsuccess\n _r.onerror=onerror\n \n \n def delete(self,index,onsuccess=None ,onerror=None ):\n self._helper(self._objectStore.delete,index,onsuccess,onerror)\n \n def query(self,*args):\n self._data=[]\n def onsuccess(event):\n cursor=event.target.result\n if cursor is not None :\n self._data.append(cursor.value)\n getattr(cursor,\"continue\")()\n \n self._objectStore.openCursor(args).onsuccess=onsuccess\n \n def fetchall(self):\n yield self._data\n \n def get(self,key,onsuccess=None ,onerror=None ):\n self._helper(self._objectStore.get,key,onsuccess,onerror)\n", []], "browser.webcomponent": [".py", "from _webcomponent import *\n", ["_webcomponent"]], "browser.svg": [".py", "from _svg import *\n", ["_svg"]], "browser.markdown": [".py", "\n\nimport re\n\nimport random\n\nletters='abcdefghijklmnopqrstuvwxyz'\nletters +=letters.upper()+'0123456789'\n\nclass URL:\n\n def __init__(self,src):\n elts=src.split(maxsplit=1)\n self.href=elts[0]\n self.alt=''\n if len(elts)==2:\n alt=elts[1]\n if alt[0]=='\"'and alt[-1]=='\"':\n self.alt=alt[1:-1]\n elif alt[0]==\"'\"and alt[-1]==\"'\":\n self.alt=alt[1:-1]\n elif alt[0]==\"(\"and alt[-1]==\")\":\n self.alt=alt[1:-1]\n \n \nclass CodeBlock:\n\n def __init__(self,line):\n self.lines=[line]\n if line.startswith(\"```\"):\n if len(line)>3:\n self.info=line[3:]\n else :\n self.info=\"block\"\n elif line.startswith(\"`\")and len(line)>1:\n self.info=line[1:]\n elif line.startswith(\">>>\"):\n self.info=\"python-console\"\n else :\n self.info=None\n \n def to_html(self):\n if self.lines[0].startswith(\"`\"):\n self.lines.pop(0)\n res=escape('\\n'.join(self.lines))\n res=unmark(res)\n _class=self.info or \"marked\"\n res='

%s
\\n'%(_class,res)\n return res,[]\n \n \nclass Marked:\n\n def __init__(self,line=''):\n self.line=line\n self.children=[]\n \n def to_html(self):\n return apply_markdown(self.line)\n \n \nclass Script:\n\n def __init__(self,src):\n self.src=src\n \n def to_html(self):\n return self.src,[]\n \n \n \nrefs={}\nref_pattern=r\"^\\[(.*)\\]:\\s+(.*)\"\n\ndef mark(src):\n\n global refs\n refs={}\n \n \n \n \n \n \n \n \n src=src.replace('\\r\\n','\\n')\n \n \n src=re.sub(r'(.*?)\\n=+\\n','\\n# \\\\1\\n',src)\n src=re.sub(r'(.*?)\\n-+\\n','\\n## \\\\1\\n',src)\n \n lines=src.split('\\n')+['']\n \n i=bq=0\n ul=ol=0\n \n while i '):\n nb=1\n while nb ':\n nb +=1\n lines[i]=lines[i][nb:]\n if nb >bq:\n lines.insert(i,'
'*(nb -bq))\n i +=1\n bq=nb\n elif nb '*(bq -nb))\n i +=1\n bq=nb\n elif bq >0:\n lines.insert(i,'
'*bq)\n i +=1\n bq=0\n \n \n if (lines[i].strip()and lines[i].lstrip()[0]in '-+*'\n and len(lines[i].lstrip())>1\n and lines[i].lstrip()[1]==' '\n and (i ==0 or ul or not lines[i -1].strip())):\n \n nb=1+len(lines[i])-len(lines[i].lstrip())\n lines[i]='
  • '+lines[i][nb:]\n if nb >ul:\n lines.insert(i,'
      '*(nb -ul))\n i +=1\n elif nb '*(ul -nb))\n i +=1\n ul=nb\n elif ul and not lines[i].strip():\n if (i 1 and nline[1]==' ':\n pass\n else :\n lines.insert(i,'
    '*ul)\n i +=1\n ul=0\n \n \n mo=re.search(r'^(\\d+\\.)',lines[i])\n if mo:\n if not ol:\n lines.insert(i,'
      ')\n i +=1\n lines[i]='
    1. '+lines[i][len(mo.groups()[0]):]\n ol=1\n elif (ol and not lines[i].strip()and i ')\n i +=1\n ol=0\n \n i +=1\n \n if ul:\n lines.append(''*ul)\n if ol:\n lines.append('
    '*ol)\n if bq:\n lines.append(''*bq)\n \n sections=[]\n scripts=[]\n section=Marked()\n \n i=0\n while i '):\n sections.append(Script('\\n'.join(lines[i:j+1])))\n for k in range(i,j+1):\n lines[k]=''\n section=Marked()\n break\n j +=1\n i=j\n continue\n \n \n elif line.startswith('#'):\n level=1\n line=lines[i]\n while level ','>')\n czone=czone.replace('_','_')\n czone=czone.replace('*','*')\n return czone\n \ndef s_escape(mo):\n\n czone=mo.string[mo.start():mo.end()]\n return escape(czone)\n \ndef unmark(code_zone):\n\n code_zone=code_zone.replace('_','_')\n return code_zone\n \ndef s_unmark(mo):\n\n code_zone=mo.string[mo.start():mo.end()]\n code_zone=code_zone.replace('_','_')\n return code_zone\n \ndef apply_markdown(src):\n\n scripts=[]\n key=None\n \n i=0\n while i 0 and src[i -1]=='!'\n start_a=i+1\n nb=1\n while True :\n end_a=src.find(']',i)\n if end_a ==-1:\n break\n nb +=src[i+1:end_a].count('[')-1\n i=end_a+1\n if nb ==0:\n break\n if end_a >-1 and src[start_a:end_a].find('\\n')==-1:\n link=src[start_a:end_a]\n rest=src[end_a+1:].lstrip()\n if rest and rest[0]=='(':\n j=0\n while True :\n end_href=rest.find(')',j)\n if end_href ==-1:\n break\n if rest[end_href -1]=='\\\\':\n j=end_href+1\n else :\n break\n if end_href >-1 and rest[:end_href].find('\\n')==-1:\n if img_link:\n tag=('\"'+link+'\"')\n src=src[:start_a -2]+tag+rest[end_href+1:]\n else :\n tag=('
    '+link\n +'')\n src=src[:start_a -1]+tag+rest[end_href+1:]\n i=start_a+len(tag)\n elif rest and rest[0]=='[':\n j=0\n while True :\n end_key=rest.find(']',j)\n if end_key ==-1:\n break\n if rest[end_key -1]=='\\\\':\n j=end_key+1\n else :\n break\n if end_key >-1 and rest[:end_key].find('\\n')==-1:\n if not key:\n key=link\n if key.lower()not in refs:\n raise KeyError('unknown reference %s'%key)\n url=refs[key.lower()]\n tag=''+link+''\n src=src[:start_a -1]+tag+rest[end_key+1:]\n i=start_a+len(tag)\n \n i +=1\n \n \n \n \n \n \n \n \n \n rstr=' '+''.join(random.choice(letters)for i in range(16))+' '\n \n i=0\n state=None\n start=-1\n data=''\n tags=[]\n while i 'and state is None :\n tags.append(src[i:j+1])\n src=src[:i]+rstr+src[j+1:]\n i +=len(rstr)\n break\n elif state =='\"'or state ==\"'\":\n data +=src[j]\n elif src[j]=='\\n':\n \n \n src=src[:i]+'<'+src[i+1:]\n j=i+4\n break\n j +=1\n elif src[i]=='`'and i >0:\n if src[i -1]!='\\\\':\n \n j=i+1\n while j \\1'%(tag,tag),src)\n \n \n src=re.sub(r'\\*(.+?)\\*',r'<%s>\\1'%('EM','EM'),src)\n \n \n \n src=re.sub(r'\\b_(.*?)_\\b',r'\\1',src,\n flags=re.M)\n \n \n code_pattern=r'\\`(.*?)\\`'\n src=re.sub(code_pattern,r'\\1',src)\n \n \n while True :\n pos=src.rfind(rstr)\n if pos ==-1:\n break\n repl=tags.pop()\n src=src[:pos]+repl+src[pos+len(rstr):]\n \n src='

    '+src+'

    '\n \n return src,scripts\n", ["random", "re"]], "browser.template": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport tb as traceback\nfrom browser import document,html\n\n\n\nvoid_elements=[\"AREA\",\"BASE\",\"BR\",\"COL\",\"EMBED\",\"HR\",\"IMG\",\"INPUT\",\n\"LINK\",\"META\",\"PARAM\",\"SOURCE\",\"TRACK\",\"WBR\"]\n\ndef copy(obj):\n if isinstance(obj,dict):\n res={}\n for key,value in obj.items():\n res[key]=copy(value)\n return res\n elif isinstance(obj,(list,tuple)):\n return obj[:]\n elif isinstance(obj,set):\n return{x for x in obj}\n else:\n return obj\n \n \nclass ElementData:\n ''\n \n \n def __init__(self,**kw):\n ''\n\n\n \n self.__keys__=set()\n for key,value in kw.items():\n object.__setattr__(self,key,value)\n self.__keys__.add(key)\n \n def __setattr__(self,attr,value):\n ''\n\n\n \n object.__setattr__(self,attr,value)\n if attr !=\"__keys__\":\n self.__keys__.add(attr)\n \n def to_dict(self):\n ''\n return{k:getattr(self,k)for k in self.__keys__}\n \n def clone(self):\n ''\n\n\n \n return copy(self.to_dict())\n \n \nclass TemplateError(Exception):\n pass\n \n \nclass Template:\n\n def __init__(self,element,callbacks=[]):\n if isinstance(element,str):\n element=document[element]\n self.element=element\n self.line_mapping={}\n self.line_num=1\n self.indent=0\n self.python=\"\"\n self.parse(element)\n self.callbacks=callbacks\n \n def add(self,content,elt):\n self.python +=content\n self.line_mapping[self.line_num]=elt\n if content.endswith(\"\\n\"):\n self.line_num +=1\n \n def add_indent(self,content,elt):\n self.add(\" \"*self.indent+content,elt)\n \n def write(self,content):\n self.html +=str(content)+\"\\n\"\n \n def parse(self,elt):\n ''\n\n \n \n \n is_block=False\n \n if elt.nodeType ==3:\n \n if elt.text.strip():\n text=elt.text.replace('\"',\""\")\n text=text.replace(\"\\n\",\"\\\\n\")\n text='\"'+text+'\"'\n \n nb_braces=elt.text.count(\"{\")\n if nb_braces:\n nb_double_braces=elt.text.count(\"{{\")\n if nb_double_braces !=nb_braces:\n lines=[line for line in elt.text.split(\"\\n\")\n if line.strip()]\n text='f\"\"\"'+\" \".join(lines)+'\"\"\"'\n self.add_indent(\"__write__(\"+text+\")\\n\",elt)\n \n elif hasattr(elt,\"tagName\"):\n start_tag=\"__write__('<\"+elt.tagName\n block=None\n \n \n static_attrs=[]\n dynamic_attrs=[]\n for item in elt.attributes:\n if item.name ==\"b-code\":\n \n block=item.value.rstrip(\":\")+\":\"\n elif item.name ==\"b-include\":\n \n elt.html=open(item.value).read()\n else:\n value=item.value.replace(\"\\n\",\"\")\n if \"{\"in value:\n dynamic_attrs.append(\"'\"+item.name+\"', f'\"+\n value+\"'\")\n else:\n static_attrs.append(item.name+'=\"'+value+'\"')\n \n if block:\n self.add_indent(block+\"\\n\",elt)\n self.indent +=1\n is_block=True\n \n self.add_indent(start_tag,elt)\n \n if static_attrs or dynamic_attrs:\n self.add(\" \",elt)\n \n for attr in static_attrs:\n self.add_indent(attr+\" \",elt)\n \n if dynamic_attrs:\n self.add(\"')\\n\",elt)\n for attr in dynamic_attrs:\n self.add_indent(\"__render_attr__(\"+attr+\")\\n\",elt)\n self.add_indent(\"__write__('>')\\n\",elt)\n else:\n self.add_indent(\">')\\n\",elt)\n \n for child in elt.childNodes:\n self.parse(child)\n \n if hasattr(elt,\"tagName\")and elt.tagName not in void_elements:\n self.add_indent(\"__write__('')\\n\",elt)\n \n if is_block:\n self.indent -=1\n \n def on(self,element,event,callback):\n def func(evt):\n cache=self.data.clone()\n callback(evt,self)\n new_data=self.data.to_dict()\n if new_data !=cache:\n self.render(**new_data)\n element.bind(event,func)\n \n def render_attr(self,name,value):\n ''\n\n\n\n\n\n \n if value ==\"False\":\n return\n elif value ==\"True\":\n self.html +=\" \"+name\n else:\n self.html +=\" \"+name+'=\"'+str(value)+'\"'\n \n def render(self,**ns):\n ''\n\n \n \n self.data=ElementData(**ns)\n \n \n ns.update({\"__write__\":self.write,\n \"__render_attr__\":self.render_attr})\n \n self.html=\"\"\n \n \n try:\n exec(self.python,ns)\n except Exception as exc:\n msg=traceback.format_exc()\n if isinstance(exc,SyntaxError):\n line_no=exc.args[2]\n else:\n tb=exc.__traceback__\n while tb is not None:\n print('template 265, tb',tb,tb.tb_frame,tb.tb_lineno)\n line_no=tb.tb_lineno\n tb=tb.tb_next\n elt=self.line_mapping[line_no]\n print(\"Error rendering the element:\",elt.nodeType)\n if elt.nodeType ==3:\n print(elt.textContent)\n else:\n try:\n print(elt.outerHTML)\n except AttributeError:\n print('no outerHTML for',elt)\n print(elt.html)\n print(f\"{exc.__class__.__name__}: {exc}\")\n return\n \n \n \n \n \n \n \n if self.element.nodeType !=9:\n rank=self.element.index()\n parent=self.element.parent\n self.element.outerHTML=self.html\n self.element=parent.childNodes[rank]\n \n else:\n \n self.element.html=self.html\n \n \n self.element.unbind()\n callbacks={}\n for callback in self.callbacks:\n callbacks[callback.__name__]=callback\n \n \n \n for element in self.element.select(\"*[b-on]\"):\n bindings=element.getAttribute(\"b-on\")\n bindings=bindings.split(\";\")\n for binding in bindings:\n parts=binding.split(\":\")\n if not len(parts)==2:\n raise TemplateError(f\"wrong binding: {binding}\")\n event,func_name=[x.strip()for x in parts]\n if not func_name in callbacks:\n print(element.outerHTML)\n raise TemplateError(f\"unknown callback: {func_name}\")\n self.on(element,event,callbacks[func_name])\n", ["browser", "tb"]], "browser.ajax": [".py", "from _ajax import *\n", ["_ajax"]], "browser.highlight": [".py", "import re\n\nfrom browser import html\n\nletters='abcdefghijklmnopqrstuvwxyz'\nletters +=letters.upper()+'_'\ndigits='0123456789'\n\nbuiltin_funcs=\"\"\"abs|dict|help|min|setattr|\nall|dir|hex|next|slice|\nany|divmod|id|object|sorted|\nascii|enumerate|input|oct|staticmethod|\nbin|eval|int|open|str|\nbool|exec|isinstance|ord|sum|\nbytearray|filter|issubclass|pow|super|\nbytes|float|iter|print|tuple|\ncallable|format|len|property|type|\nchr|frozenset|list|range|vars|\nclassmethod|getattr|locals|repr|zip|\ncompile|globals|map|reversed|__import__|\ncomplex|hasattr|max|round|\ndelattr|hash|memoryview|set|\n\"\"\"\n\nkeywords=[\n'False',\n'None',\n'True',\n'and',\n'as',\n'assert',\n'async',\n'await',\n'break',\n'class',\n'continue',\n'def',\n'del',\n'elif',\n'else',\n'except',\n'finally',\n'for',\n'from',\n'global',\n'if',\n'import',\n'in',\n'is',\n'lambda',\n'nonlocal',\n'not',\n'or',\n'pass',\n'raise',\n'return',\n'try',\n'while',\n'with',\n'yield',\n]\nkw_pattern='^('+'|'.join(keywords)+')$'\nbf_pattern='^('+builtin_funcs.replace(\"\\n\",\"\")+')$'\n\ndef escape(txt):\n txt=txt.replace('<','<')\n txt=txt.replace('>','>')\n return txt\n \ndef highlight(txt):\n res=html.PRE()\n i=0\n name=''\n while i if the item has a submenu\n*/\n.brython-menu-submenu-item {\n font-family: var(--brython-menu-font-family);\n padding: 0.3em 0.3em 0.3em 1em;\n cursor: default;\n}\n\n/* end of browser.widgets.menu classes */\n\n\"\"\"\n\n\nclass Menu:\n\n def __init__(self,container=document.body,parent=None ,default_css=True ):\n ''\n\n \n self.container=container\n self.parent=parent\n \n if default_css:\n \n for stylesheet in document.styleSheets:\n if stylesheet.ownerNode.id ==\"brython-menu\":\n break\n else :\n document <=html.STYLE(style_sheet,id=\"brython-menu\")\n \n self.default_css=default_css\n \n if parent:\n parent.submenu=html.TABLE(Class=\"brython-menu-submenu\")\n parent.submenu.style.position=\"absolute\"\n parent.submenu.style.display=\"none\"\n self.container <=parent.submenu\n \n parent.bind(\"click\",self.unfold)\n \n if not hasattr(self.container,\"bind_document\"):\n \n document.bind(\"click\",self.hide_menus)\n self.container.bind_document=True\n \n def add_item(self,label,callback=None ,menu=False ):\n if self.parent is None :\n \n item=html.SPAN(label,Class=\"brython-menu-navbar-item\")\n self.container <=item\n item.bind(\"click\",self.hide_menus)\n else :\n \n item=html.TR(Class=\"brython-menu-submenu-row\")\n self.parent.submenu <=item\n item <=html.TD(label,Class=\"brython-menu-submenu-item\")\n item <=html.TD(\">\"if menu else \" \",\n Class=\"brython-menu-submenu-item\",\n paddingLeft=\"2em\")\n \n if callback is not None :\n item.bind(\"click\",callback)\n \n return item\n \n def add_link(self,label,href):\n ''\n if self.parent is None :\n \n item=html.A(label,Class=\"brython-menu-navbar-link\",href=href)\n self.container <=item\n else :\n \n item=html.TR(Class=\"brython-menu-submenu-row\")\n self.parent.submenu <=item\n item <=html.TD(html.A(label,Class=\"brython-menu-submenu-link\",\n href=href))\n \n return item\n \n def add_menu(self,label):\n ''\n \n item=self.add_item(label,menu=True )\n \n if self.parent is None :\n \n span=html.SPAN(Class=\"brython-menu-submenu\")\n span.style.position=\"absolute\"\n \n return Menu(self.container,item,default_css=self.default_css)\n \n def hide_menus(self,*args):\n ''\n for css in [\".brython-menu-navbar-item-selected\",\n \".brython-menu-submenu-row-selected\"]:\n for item in document.select(css):\n item.classList.remove(css[1:])\n for div in document.select(\".brython-menu-submenu\"):\n if div.style.display !=\"none\":\n div.style.display=\"none\"\n \n def hide_submenus(self,table):\n ''\n for row in table.select(\"TR\"):\n if hasattr(row,\"submenu\"):\n row.submenu.style.display=\"none\"\n self.hide_submenus(row.submenu)\n \n def unfold(self,ev):\n ''\n target=ev.target\n if target.nodeName ==\"SPAN\":\n \n selected=document.select(\".brython-menu-navbar-item-selected\")\n \n if selected:\n self.hide_menus()\n \n for item in selected:\n item.classList.remove(\"brython-menu-navbar-item-selected\")\n \n submenu=target.submenu\n \n target.classList.add(\"brython-menu-navbar-item-selected\")\n submenu.style.left=f\"{target.abs_left}px\"\n submenu.style.top=f\"{target.abs_top + target.offsetHeight}px\"\n \n \n \n if not selected:\n for item in document.select(\".brython-menu-navbar-item\"):\n item.bind(\"mouseenter\",self.unfold)\n \n \n submenu.style.display=\"block\"\n \n else :\n target=target.closest(\"TR\")\n \n \n table=target.closest(\"TABLE\")\n self.hide_submenus(table)\n \n \n selected=table.select(\".brython-menu-submenu-row-selected\")\n for row in selected:\n row.classList.remove(\"brython-menu-submenu-row-selected\")\n \n \n target.classList.add(\"brython-menu-submenu-row-selected\")\n \n if hasattr(target,\"submenu\"):\n \n target.submenu.style.top=f\"{target.abs_top}px\"\n target.submenu.style.left=\\\n f\"{target.abs_left + target.offsetWidth}px\"\n target.submenu.style.display=\"block\"\n \n if not selected:\n \n \n for row in table.select(\"TR\"):\n row.bind(\"mouseenter\",self.unfold)\n \n \n \n \n ev.stopPropagation()\n", ["browser"]], "json": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__='2.0.9'\n__all__=[\n'dump','dumps','load','loads',\n'JSONDecoder','JSONDecodeError','JSONEncoder',\n]\n\n__author__='Bob Ippolito '\n\n\n\n\n\nclass codecs:\n\n BOM_UTF8=b'\\xef\\xbb\\xbf'\n BOM_LE=BOM_UTF16_LE=b'\\xff\\xfe'\n BOM_BE=BOM_UTF16_BE=b'\\xfe\\xff'\n BOM_UTF32_LE=b'\\xff\\xfe\\x00\\x00'\n BOM_UTF32_BE=b'\\x00\\x00\\xfe\\xff'\n \n \nimport _json\nfrom.encoder import JSONEncoder\n\nJSONDecoder=_json.JSONDecoder\n\nclass decoder:\n JSONDecoder=_json.JSONDecoder\n \nclass JSONDecodeError(ValueError):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,msg,doc,pos):\n lineno=doc.count('\\n',0,pos)+1\n colno=pos -doc.rfind('\\n',0,pos)\n errmsg='%s: line %d column %d (char %d)'%(msg,lineno,colno,pos)\n ValueError.__init__(self,errmsg)\n self.msg=msg\n self.doc=doc\n self.pos=pos\n self.lineno=lineno\n self.colno=colno\n \n def __reduce__(self):\n return self.__class__,(self.msg,self.doc,self.pos)\n \ndef dump(obj,fp,**kw):\n fp.write(dumps(obj,**kw))\n \ndef dumps(obj,*,skipkeys=False,ensure_ascii=True,check_circular=True,\nallow_nan=True,cls=None,indent=None,separators=None,\ndefault=None,sort_keys=False,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if cls is None:\n return _json.dumps(obj,1,\n skipkeys=skipkeys,ensure_ascii=ensure_ascii,\n check_circular=check_circular,allow_nan=allow_nan,indent=indent,\n separators=separators,default=default,sort_keys=sort_keys,\n **kw)\n return cls(\n skipkeys=skipkeys,ensure_ascii=ensure_ascii,\n check_circular=check_circular,allow_nan=allow_nan,indent=indent,\n separators=separators,default=default,sort_keys=sort_keys,\n **kw).encode(obj)\n \ndef detect_encoding(b):\n bstartswith=b.startswith\n if bstartswith((codecs.BOM_UTF32_BE,codecs.BOM_UTF32_LE)):\n return 'utf-32'\n if bstartswith((codecs.BOM_UTF16_BE,codecs.BOM_UTF16_LE)):\n return 'utf-16'\n if bstartswith(codecs.BOM_UTF8):\n return 'utf-8-sig'\n \n if len(b)>=4:\n if not b[0]:\n \n \n return 'utf-16-be'if b[1]else 'utf-32-be'\n if not b[1]:\n \n \n \n return 'utf-16-le'if b[2]or b[3]else 'utf-32-le'\n elif len(b)==2:\n if not b[0]:\n \n return 'utf-16-be'\n if not b[1]:\n \n return 'utf-16-le'\n \n return 'utf-8'\n \n \ndef load(fp,*,cls=None,object_hook=None,parse_float=None,\nparse_int=None,parse_constant=None,object_pairs_hook=None,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return loads(fp.read(),\n cls=cls,object_hook=object_hook,\n parse_float=parse_float,parse_int=parse_int,\n parse_constant=parse_constant,object_pairs_hook=object_pairs_hook,**kw)\n \n \ndef loads(s,*,cls=None,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(s,str):\n if s.startswith('\\ufeff'):\n raise JSONDecodeError(\"Unexpected UTF-8 BOM (decode using utf-8-sig)\",\n s,0)\n else:\n if not isinstance(s,(bytes,bytearray)):\n raise TypeError(f'the JSON object must be str, bytes or bytearray, '\n f'not {s.__class__.__name__}')\n s=s.decode(detect_encoding(s),'surrogatepass')\n \n \n if \"encoding\"in kw:\n import warnings\n warnings.warn(\n \"'encoding' is ignored and deprecated. It will be removed in Python 3.9\",\n DeprecationWarning,\n stacklevel=2\n )\n del kw['encoding']\n \n if cls is None:\n \n \n return _json.loads(s,**kw)\n if object_hook is not None:\n kw['object_hook']=object_hook\n if object_pairs_hook is not None:\n kw['object_pairs_hook']=object_pairs_hook\n if parse_float is not None:\n kw['parse_float']=parse_float\n if parse_int is not None:\n kw['parse_int']=parse_int\n if parse_constant is not None:\n kw['parse_constant']=parse_constant\n return cls(**kw).decode(s)\n", ["_json", "json.encoder", "warnings"], 1], "json.encoder": [".py", "''\n\nimport re\n\ntry:\n from _json import encode_basestring_ascii as c_encode_basestring_ascii\nexcept ImportError:\n c_encode_basestring_ascii=None\ntry:\n from _json import encode_basestring as c_encode_basestring\nexcept ImportError:\n c_encode_basestring=None\ntry:\n from _json import make_encoder as c_make_encoder\nexcept ImportError:\n c_make_encoder=None\n \nESCAPE=re.compile(r'[\\x00-\\x1f\\\\\"\\b\\f\\n\\r\\t]')\nESCAPE_ASCII=re.compile(r'([\\\\\"]|[^\\ -~])')\nHAS_UTF8=re.compile(b'[\\x80-\\xff]')\nESCAPE_DCT={\n'\\\\':'\\\\\\\\',\n'\"':'\\\\\"',\n'\\b':'\\\\b',\n'\\f':'\\\\f',\n'\\n':'\\\\n',\n'\\r':'\\\\r',\n'\\t':'\\\\t',\n}\nfor i in range(0x20):\n ESCAPE_DCT.setdefault(chr(i),'\\\\u{0:04x}'.format(i))\n \ndel i\n\nINFINITY=float('inf')\n\ndef py_encode_basestring(s):\n ''\n\n \n def replace(match):\n return ESCAPE_DCT[match.group(0)]\n return '\"'+ESCAPE.sub(replace,s)+'\"'\n \n \nencode_basestring=(c_encode_basestring or py_encode_basestring)\n\n\ndef py_encode_basestring_ascii(s):\n ''\n\n \n def replace(match):\n s=match.group(0)\n try:\n return ESCAPE_DCT[s]\n except KeyError:\n n=ord(s)\n if n <0x10000:\n return '\\\\u{0:04x}'.format(n)\n \n else:\n \n n -=0x10000\n s1=0xd800 |((n >>10)&0x3ff)\n s2=0xdc00 |(n&0x3ff)\n return '\\\\u{0:04x}\\\\u{1:04x}'.format(s1,s2)\n return '\"'+ESCAPE_ASCII.sub(replace,s)+'\"'\n \n \nencode_basestring_ascii=(\nc_encode_basestring_ascii or py_encode_basestring_ascii)\n\nclass JSONEncoder(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n item_separator=', '\n key_separator=': '\n def __init__(self,*,skipkeys=False,ensure_ascii=True,\n check_circular=True,allow_nan=True,sort_keys=False,\n indent=None,separators=None,default=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n self.skipkeys=skipkeys\n self.ensure_ascii=ensure_ascii\n self.check_circular=check_circular\n self.allow_nan=allow_nan\n self.sort_keys=sort_keys\n self.indent=indent\n if separators is not None:\n self.item_separator,self.key_separator=separators\n elif indent is not None:\n self.item_separator=','\n if default is not None:\n self.default=default\n \n def default(self,o):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise TypeError(f'Object of type {o.__class__.__name__} '\n f'is not JSON serializable')\n \n def encode(self,o):\n ''\n\n\n\n\n\n \n \n if isinstance(o,str):\n if self.ensure_ascii:\n return encode_basestring_ascii(o)\n else:\n return encode_basestring(o)\n \n \n \n chunks=self.iterencode(o,_one_shot=True)\n if not isinstance(chunks,(list,tuple)):\n chunks=list(chunks)\n return ''.join(chunks)\n \n def iterencode(self,o,_one_shot=False):\n ''\n\n\n\n\n\n\n\n \n if self.check_circular:\n markers={}\n else:\n markers=None\n if self.ensure_ascii:\n _encoder=encode_basestring_ascii\n else:\n _encoder=encode_basestring\n \n def floatstr(o,allow_nan=self.allow_nan,\n _repr=float.__repr__,_inf=INFINITY,_neginf=-INFINITY):\n \n \n \n \n if o !=o:\n text='NaN'\n elif o ==_inf:\n text='Infinity'\n elif o ==_neginf:\n text='-Infinity'\n else:\n return _repr(o)\n \n if not allow_nan:\n raise ValueError(\n \"Out of range float values are not JSON compliant: \"+\n repr(o))\n \n return text\n \n \n if(_one_shot and c_make_encoder is not None\n and self.indent is None):\n _iterencode=c_make_encoder(\n markers,self.default,_encoder,self.indent,\n self.key_separator,self.item_separator,self.sort_keys,\n self.skipkeys,self.allow_nan)\n else:\n _iterencode=_make_iterencode(\n markers,self.default,_encoder,self.indent,floatstr,\n self.key_separator,self.item_separator,self.sort_keys,\n self.skipkeys,_one_shot)\n return _iterencode(o,0)\n \ndef _make_iterencode(markers,_default,_encoder,_indent,_floatstr,\n_key_separator,_item_separator,_sort_keys,_skipkeys,_one_shot,\n\nValueError=ValueError,\ndict=dict,\nfloat=float,\nid=id,\nint=int,\nisinstance=isinstance,\nlist=list,\nstr=str,\ntuple=tuple,\n_intstr=int.__repr__,\n):\n\n if _indent is not None and not isinstance(_indent,str):\n _indent=' '*_indent\n \n def _iterencode_list(lst,_current_indent_level):\n if not lst:\n yield '[]'\n return\n if markers is not None:\n markerid=id(lst)\n if markerid in markers:\n raise ValueError(\"Circular reference detected\")\n markers[markerid]=lst\n buf='['\n if _indent is not None:\n _current_indent_level +=1\n newline_indent='\\n'+_indent *_current_indent_level\n separator=_item_separator+newline_indent\n buf +=newline_indent\n else:\n newline_indent=None\n separator=_item_separator\n first=True\n for value in lst:\n if first:\n first=False\n else:\n buf=separator\n if isinstance(value,str):\n yield buf+_encoder(value)\n elif value is None:\n yield buf+'null'\n elif value is True:\n yield buf+'true'\n elif value is False:\n yield buf+'false'\n elif isinstance(value,int):\n \n \n \n yield buf+_intstr(value)\n elif isinstance(value,float):\n \n yield buf+_floatstr(value)\n else:\n yield buf\n if isinstance(value,(list,tuple)):\n chunks=_iterencode_list(value,_current_indent_level)\n elif isinstance(value,dict):\n chunks=_iterencode_dict(value,_current_indent_level)\n else:\n chunks=_iterencode(value,_current_indent_level)\n yield from chunks\n if newline_indent is not None:\n _current_indent_level -=1\n yield '\\n'+_indent *_current_indent_level\n yield ']'\n if markers is not None:\n del markers[markerid]\n \n def _iterencode_dict(dct,_current_indent_level):\n if not dct:\n yield '{}'\n return\n if markers is not None:\n markerid=id(dct)\n if markerid in markers:\n raise ValueError(\"Circular reference detected\")\n markers[markerid]=dct\n yield '{'\n if _indent is not None:\n _current_indent_level +=1\n newline_indent='\\n'+_indent *_current_indent_level\n item_separator=_item_separator+newline_indent\n yield newline_indent\n else:\n newline_indent=None\n item_separator=_item_separator\n first=True\n if _sort_keys:\n items=sorted(dct.items())\n else:\n items=dct.items()\n for key,value in items:\n if isinstance(key,str):\n pass\n \n \n elif isinstance(key,float):\n \n key=_floatstr(key)\n elif key is True:\n key='true'\n elif key is False:\n key='false'\n elif key is None:\n key='null'\n elif isinstance(key,int):\n \n key=_intstr(key)\n elif _skipkeys:\n continue\n else:\n raise TypeError(f'keys must be str, int, float, bool or None, '\n f'not {key.__class__.__name__}')\n if first:\n first=False\n else:\n yield item_separator\n yield _encoder(key)\n yield _key_separator\n if isinstance(value,str):\n yield _encoder(value)\n elif value is None:\n yield 'null'\n elif value is True:\n yield 'true'\n elif value is False:\n yield 'false'\n elif isinstance(value,int):\n \n yield _intstr(value)\n elif isinstance(value,float):\n \n yield _floatstr(value)\n else:\n if isinstance(value,(list,tuple)):\n chunks=_iterencode_list(value,_current_indent_level)\n elif isinstance(value,dict):\n chunks=_iterencode_dict(value,_current_indent_level)\n else:\n chunks=_iterencode(value,_current_indent_level)\n yield from chunks\n if newline_indent is not None:\n _current_indent_level -=1\n yield '\\n'+_indent *_current_indent_level\n yield '}'\n if markers is not None:\n del markers[markerid]\n \n def _iterencode(o,_current_indent_level):\n if isinstance(o,str):\n yield _encoder(o)\n elif o is None:\n yield 'null'\n elif o is True:\n yield 'true'\n elif o is False:\n yield 'false'\n elif isinstance(o,int):\n \n yield _intstr(o)\n elif isinstance(o,float):\n \n yield _floatstr(o)\n elif isinstance(o,(list,tuple)):\n yield from _iterencode_list(o,_current_indent_level)\n elif isinstance(o,dict):\n yield from _iterencode_dict(o,_current_indent_level)\n else:\n if markers is not None:\n markerid=id(o)\n if markerid in markers:\n raise ValueError(\"Circular reference detected\")\n markers[markerid]=o\n o=_default(o)\n yield from _iterencode(o,_current_indent_level)\n if markers is not None:\n del markers[markerid]\n return _iterencode\n", ["_json", "re"]], "http.cookies": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nr\"\"\"\nHere's a sample session to show how to use this module.\nAt the moment, this is the only documentation.\n\nThe Basics\n----------\n\nImporting is easy...\n\n >>> from http import cookies\n\nMost of the time you start by creating a cookie.\n\n >>> C = cookies.SimpleCookie()\n\nOnce you've created your Cookie, you can add values just as if it were\na dictionary.\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"fig\"] = \"newton\"\n >>> C[\"sugar\"] = \"wafer\"\n >>> C.output()\n 'Set-Cookie: fig=newton\\r\\nSet-Cookie: sugar=wafer'\n\nNotice that the printable representation of a Cookie is the\nappropriate format for a Set-Cookie: header. This is the\ndefault behavior. You can change the header and printed\nattributes by using the .output() function\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"rocky\"] = \"road\"\n >>> C[\"rocky\"][\"path\"] = \"/cookie\"\n >>> print(C.output(header=\"Cookie:\"))\n Cookie: rocky=road; Path=/cookie\n >>> print(C.output(attrs=[], header=\"Cookie:\"))\n Cookie: rocky=road\n\nThe load() method of a Cookie extracts cookies from a string. In a\nCGI script, you would use this method to extract the cookies from the\nHTTP_COOKIE environment variable.\n\n >>> C = cookies.SimpleCookie()\n >>> C.load(\"chips=ahoy; vienna=finger\")\n >>> C.output()\n 'Set-Cookie: chips=ahoy\\r\\nSet-Cookie: vienna=finger'\n\nThe load() method is darn-tootin smart about identifying cookies\nwithin a string. Escaped quotation marks, nested semicolons, and other\nsuch trickeries do not confuse it.\n\n >>> C = cookies.SimpleCookie()\n >>> C.load('keebler=\"E=everybody; L=\\\\\"Loves\\\\\"; fudge=\\\\012;\";')\n >>> print(C)\n Set-Cookie: keebler=\"E=everybody; L=\\\"Loves\\\"; fudge=\\012;\"\n\nEach element of the Cookie also supports all of the RFC 2109\nCookie attributes. Here's an example which sets the Path\nattribute.\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"oreo\"] = \"doublestuff\"\n >>> C[\"oreo\"][\"path\"] = \"/\"\n >>> print(C)\n Set-Cookie: oreo=doublestuff; Path=/\n\nEach dictionary element has a 'value' attribute, which gives you\nback the value associated with the key.\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"twix\"] = \"none for you\"\n >>> C[\"twix\"].value\n 'none for you'\n\nThe SimpleCookie expects that all values should be standard strings.\nJust to be sure, SimpleCookie invokes the str() builtin to convert\nthe value to a string, when the values are set dictionary-style.\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"number\"] = 7\n >>> C[\"string\"] = \"seven\"\n >>> C[\"number\"].value\n '7'\n >>> C[\"string\"].value\n 'seven'\n >>> C.output()\n 'Set-Cookie: number=7\\r\\nSet-Cookie: string=seven'\n\nFinis.\n\"\"\"\n\n\n\n\nimport re\nimport string\nimport types\n\n__all__=[\"CookieError\",\"BaseCookie\",\"SimpleCookie\"]\n\n_nulljoin=''.join\n_semispacejoin='; '.join\n_spacejoin=' '.join\n\n\n\n\nclass CookieError(Exception):\n pass\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n_LegalChars=string.ascii_letters+string.digits+\"!#$%&'*+-.^_`|~:\"\n_UnescapedChars=_LegalChars+' ()/<=>?@[]{}'\n\n_Translator={n:'\\\\%03o'%n\nfor n in set(range(256))-set(map(ord,_UnescapedChars))}\n_Translator.update({\nord('\"'):'\\\\\"',\nord('\\\\'):'\\\\\\\\',\n})\n\n_is_legal_key=re.compile('[%s]+'%re.escape(_LegalChars)).fullmatch\n\ndef _quote(str):\n ''\n\n\n\n\n \n if str is None or _is_legal_key(str):\n return str\n else:\n return '\"'+str.translate(_Translator)+'\"'\n \n \n_OctalPatt=re.compile(r\"\\\\[0-3][0-7][0-7]\")\n_QuotePatt=re.compile(r\"[\\\\].\")\n\ndef _unquote(str):\n\n\n if str is None or len(str)<2:\n return str\n if str[0]!='\"'or str[-1]!='\"':\n return str\n \n \n \n \n \n str=str[1:-1]\n \n \n \n \n \n i=0\n n=len(str)\n res=[]\n while 0 <=i '%(self.__class__.__name__,self.OutputString())\n \n def js_output(self,attrs=None):\n \n return \"\"\"\n \n \"\"\"%(self.OutputString(attrs).replace('\"',r'\\\"'))\n \n def OutputString(self,attrs=None):\n \n \n result=[]\n append=result.append\n \n \n append(\"%s=%s\"%(self.key,self.coded_value))\n \n \n if attrs is None:\n attrs=self._reserved\n items=sorted(self.items())\n for key,value in items:\n if value ==\"\":\n continue\n if key not in attrs:\n continue\n if key ==\"expires\"and isinstance(value,int):\n append(\"%s=%s\"%(self._reserved[key],_getdate(value)))\n elif key ==\"max-age\"and isinstance(value,int):\n append(\"%s=%d\"%(self._reserved[key],value))\n elif key ==\"comment\"and isinstance(value,str):\n append(\"%s=%s\"%(self._reserved[key],_quote(value)))\n elif key in self._flags:\n if value:\n append(str(self._reserved[key]))\n else:\n append(\"%s=%s\"%(self._reserved[key],value))\n \n \n return _semispacejoin(result)\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \n \n \n \n \n \n \n \n \n \n_LegalKeyChars=r\"\\w\\d!#%&'~_`><@,:/\\$\\*\\+\\-\\.\\^\\|\\)\\(\\?\\}\\{\\=\"\n_LegalValueChars=_LegalKeyChars+r'\\[\\]'\n_CookiePattern=re.compile(r\"\"\"\n \\s* # Optional whitespace at start of cookie\n (?P # Start of group 'key'\n [\"\"\"+_LegalKeyChars+r\"\"\"]+? # Any word of at least one letter\n ) # End of group 'key'\n ( # Optional group: there may not be a value.\n \\s*=\\s* # Equal Sign\n (?P # Start of group 'val'\n \"(?:[^\\\\\"]|\\\\.)*\" # Any doublequoted string\n | # or\n \\w{3},\\s[\\w\\d\\s-]{9,11}\\s[\\d:]{8}\\sGMT # Special case for \"expires\" attr\n | # or\n [\"\"\"+_LegalValueChars+r\"\"\"]* # Any word or empty string\n ) # End of group 'val'\n )? # End of optional value group\n \\s* # Any number of spaces.\n (\\s+|;|$) # Ending either at space, semicolon, or EOS.\n \"\"\",re.ASCII |re.VERBOSE)\n\n\n\n\n\nclass BaseCookie(dict):\n ''\n \n def value_decode(self,val):\n ''\n\n\n\n\n \n return val,val\n \n def value_encode(self,val):\n ''\n\n\n\n \n strval=str(val)\n return strval,strval\n \n def __init__(self,input=None):\n if input:\n self.load(input)\n \n def __set(self,key,real_value,coded_value):\n ''\n M=self.get(key,Morsel())\n M.set(key,real_value,coded_value)\n dict.__setitem__(self,key,M)\n \n def __setitem__(self,key,value):\n ''\n if isinstance(value,Morsel):\n \n dict.__setitem__(self,key,value)\n else:\n rval,cval=self.value_encode(value)\n self.__set(key,rval,cval)\n \n def output(self,attrs=None,header=\"Set-Cookie:\",sep=\"\\015\\012\"):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.output(attrs,header))\n return sep.join(result)\n \n __str__=output\n \n def __repr__(self):\n l=[]\n items=sorted(self.items())\n for key,value in items:\n l.append('%s=%s'%(key,repr(value.value)))\n return '<%s: %s>'%(self.__class__.__name__,_spacejoin(l))\n \n def js_output(self,attrs=None):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.js_output(attrs))\n return _nulljoin(result)\n \n def load(self,rawdata):\n ''\n\n\n\n \n if isinstance(rawdata,str):\n self.__parse_string(rawdata)\n else:\n \n for key,value in rawdata.items():\n self[key]=value\n return\n \n def __parse_string(self,str,patt=_CookiePattern):\n i=0\n n=len(str)\n parsed_items=[]\n morsel_seen=False\n \n TYPE_ATTRIBUTE=1\n TYPE_KEYVALUE=2\n \n \n \n \n while 0 <=i _MAXLINE:\n raise LineTooLong(\"header line\")\n headers.append(line)\n if len(headers)>_MAXHEADERS:\n raise HTTPException(\"got more than %d headers\"%_MAXHEADERS)\n if line in(b'\\r\\n',b'\\n',b''):\n break\n return headers\n \ndef _parse_header_lines(header_lines,_class=HTTPMessage):\n ''\n\n\n\n\n\n\n\n\n \n hstring=b''.join(header_lines).decode('iso-8859-1')\n return email.parser.Parser(_class=_class).parsestr(hstring)\n \ndef parse_headers(fp,_class=HTTPMessage):\n ''\n \n headers=_read_headers(fp)\n return _parse_header_lines(headers,_class)\n \n \nclass HTTPResponse(io.BufferedIOBase):\n\n\n\n\n\n\n\n\n def __init__(self,sock,debuglevel=0,method=None,url=None):\n \n \n \n \n \n \n \n self.fp=sock.makefile(\"rb\")\n self.debuglevel=debuglevel\n self._method=method\n \n \n \n \n \n \n \n self.headers=self.msg=None\n \n \n self.version=_UNKNOWN\n self.status=_UNKNOWN\n self.reason=_UNKNOWN\n \n self.chunked=_UNKNOWN\n self.chunk_left=_UNKNOWN\n self.length=_UNKNOWN\n self.will_close=_UNKNOWN\n \n def _read_status(self):\n line=str(self.fp.readline(_MAXLINE+1),\"iso-8859-1\")\n if len(line)>_MAXLINE:\n raise LineTooLong(\"status line\")\n if self.debuglevel >0:\n print(\"reply:\",repr(line))\n if not line:\n \n \n raise RemoteDisconnected(\"Remote end closed connection without\"\n \" response\")\n try:\n version,status,reason=line.split(None,2)\n except ValueError:\n try:\n version,status=line.split(None,1)\n reason=\"\"\n except ValueError:\n \n version=\"\"\n if not version.startswith(\"HTTP/\"):\n self._close_conn()\n raise BadStatusLine(line)\n \n \n try:\n status=int(status)\n if status <100 or status >999:\n raise BadStatusLine(line)\n except ValueError:\n raise BadStatusLine(line)\n return version,status,reason\n \n def begin(self):\n if self.headers is not None:\n \n return\n \n \n while True:\n version,status,reason=self._read_status()\n if status !=CONTINUE:\n break\n \n skipped_headers=_read_headers(self.fp)\n if self.debuglevel >0:\n print(\"headers:\",skipped_headers)\n del skipped_headers\n \n self.code=self.status=status\n self.reason=reason.strip()\n if version in(\"HTTP/1.0\",\"HTTP/0.9\"):\n \n self.version=10\n elif version.startswith(\"HTTP/1.\"):\n self.version=11\n else:\n raise UnknownProtocol(version)\n \n self.headers=self.msg=parse_headers(self.fp)\n \n if self.debuglevel >0:\n for hdr,val in self.headers.items():\n print(\"header:\",hdr+\":\",val)\n \n \n tr_enc=self.headers.get(\"transfer-encoding\")\n if tr_enc and tr_enc.lower()==\"chunked\":\n self.chunked=True\n self.chunk_left=None\n else:\n self.chunked=False\n \n \n self.will_close=self._check_close()\n \n \n \n self.length=None\n length=self.headers.get(\"content-length\")\n if length and not self.chunked:\n try:\n self.length=int(length)\n except ValueError:\n self.length=None\n else:\n if self.length <0:\n self.length=None\n else:\n self.length=None\n \n \n if(status ==NO_CONTENT or status ==NOT_MODIFIED or\n 100 <=status <200 or\n self._method ==\"HEAD\"):\n self.length=0\n \n \n \n \n if(not self.will_close and\n not self.chunked and\n self.length is None):\n self.will_close=True\n \n def _check_close(self):\n conn=self.headers.get(\"connection\")\n if self.version ==11:\n \n \n if conn and \"close\"in conn.lower():\n return True\n return False\n \n \n \n \n \n if self.headers.get(\"keep-alive\"):\n return False\n \n \n \n if conn and \"keep-alive\"in conn.lower():\n return False\n \n \n pconn=self.headers.get(\"proxy-connection\")\n if pconn and \"keep-alive\"in pconn.lower():\n return False\n \n \n return True\n \n def _close_conn(self):\n fp=self.fp\n self.fp=None\n fp.close()\n \n def close(self):\n try:\n super().close()\n finally:\n if self.fp:\n self._close_conn()\n \n \n \n \n \n \n def flush(self):\n super().flush()\n if self.fp:\n self.fp.flush()\n \n def readable(self):\n ''\n return True\n \n \n \n def isclosed(self):\n ''\n \n \n \n \n \n \n return self.fp is None\n \n def read(self,amt=None):\n ''\n if self.fp is None:\n return b\"\"\n \n if self._method ==\"HEAD\":\n self._close_conn()\n return b\"\"\n \n if self.chunked:\n return self._read_chunked(amt)\n \n if amt is not None:\n if self.length is not None and amt >self.length:\n \n amt=self.length\n s=self.fp.read(amt)\n if not s and amt:\n \n \n self._close_conn()\n elif self.length is not None:\n self.length -=len(s)\n if not self.length:\n self._close_conn()\n return s\n else:\n \n if self.length is None:\n s=self.fp.read()\n else:\n try:\n s=self._safe_read(self.length)\n except IncompleteRead:\n self._close_conn()\n raise\n self.length=0\n self._close_conn()\n return s\n \n def readinto(self,b):\n ''\n\n \n \n if self.fp is None:\n return 0\n \n if self._method ==\"HEAD\":\n self._close_conn()\n return 0\n \n if self.chunked:\n return self._readinto_chunked(b)\n \n if self.length is not None:\n if len(b)>self.length:\n \n b=memoryview(b)[0:self.length]\n \n \n \n \n n=self.fp.readinto(b)\n if not n and b:\n \n \n self._close_conn()\n elif self.length is not None:\n self.length -=n\n if not self.length:\n self._close_conn()\n return n\n \n def _read_next_chunk_size(self):\n \n line=self.fp.readline(_MAXLINE+1)\n if len(line)>_MAXLINE:\n raise LineTooLong(\"chunk size\")\n i=line.find(b\";\")\n if i >=0:\n line=line[:i]\n try:\n return int(line,16)\n except ValueError:\n \n \n self._close_conn()\n raise\n \n def _read_and_discard_trailer(self):\n \n \n while True:\n line=self.fp.readline(_MAXLINE+1)\n if len(line)>_MAXLINE:\n raise LineTooLong(\"trailer line\")\n if not line:\n \n \n break\n if line in(b'\\r\\n',b'\\n',b''):\n break\n \n def _get_chunk_left(self):\n \n \n \n \n \n chunk_left=self.chunk_left\n if not chunk_left:\n if chunk_left is not None:\n \n self._safe_read(2)\n try:\n chunk_left=self._read_next_chunk_size()\n except ValueError:\n raise IncompleteRead(b'')\n if chunk_left ==0:\n \n self._read_and_discard_trailer()\n \n self._close_conn()\n chunk_left=None\n self.chunk_left=chunk_left\n return chunk_left\n \n def _read_chunked(self,amt=None):\n assert self.chunked !=_UNKNOWN\n value=[]\n try:\n while(chunk_left :=self._get_chunk_left())is not None:\n if amt is not None and amt <=chunk_left:\n value.append(self._safe_read(amt))\n self.chunk_left=chunk_left -amt\n break\n \n value.append(self._safe_read(chunk_left))\n if amt is not None:\n amt -=chunk_left\n self.chunk_left=0\n return b''.join(value)\n except IncompleteRead as exc:\n raise IncompleteRead(b''.join(value))from exc\n \n def _readinto_chunked(self,b):\n assert self.chunked !=_UNKNOWN\n total_bytes=0\n mvb=memoryview(b)\n try:\n while True:\n chunk_left=self._get_chunk_left()\n if chunk_left is None:\n return total_bytes\n \n if len(mvb)<=chunk_left:\n n=self._safe_readinto(mvb)\n self.chunk_left=chunk_left -n\n return total_bytes+n\n \n temp_mvb=mvb[:chunk_left]\n n=self._safe_readinto(temp_mvb)\n mvb=mvb[n:]\n total_bytes +=n\n self.chunk_left=0\n \n except IncompleteRead:\n raise IncompleteRead(bytes(b[0:total_bytes]))\n \n def _safe_read(self,amt):\n ''\n\n\n\n\n \n data=self.fp.read(amt)\n if len(data)self.length):\n n=self.length\n result=self.fp.read1(n)\n if not result and n:\n self._close_conn()\n elif self.length is not None:\n self.length -=len(result)\n return result\n \n def peek(self,n=-1):\n \n \n if self.fp is None or self._method ==\"HEAD\":\n return b\"\"\n if self.chunked:\n return self._peek_chunked(n)\n return self.fp.peek(n)\n \n def readline(self,limit=-1):\n if self.fp is None or self._method ==\"HEAD\":\n return b\"\"\n if self.chunked:\n \n return super().readline(limit)\n if self.length is not None and(limit <0 or limit >self.length):\n limit=self.length\n result=self.fp.readline(limit)\n if not result and limit:\n self._close_conn()\n elif self.length is not None:\n self.length -=len(result)\n return result\n \n def _read1_chunked(self,n):\n \n \n chunk_left=self._get_chunk_left()\n if chunk_left is None or n ==0:\n return b''\n if not(0 <=n <=chunk_left):\n n=chunk_left\n read=self.fp.read1(n)\n self.chunk_left -=len(read)\n if not read:\n raise IncompleteRead(b\"\")\n return read\n \n def _peek_chunked(self,n):\n \n \n try:\n chunk_left=self._get_chunk_left()\n except IncompleteRead:\n return b''\n if chunk_left is None:\n return b''\n \n \n return self.fp.peek(chunk_left)[:chunk_left]\n \n def fileno(self):\n return self.fp.fileno()\n \n def getheader(self,name,default=None):\n ''\n\n\n\n\n\n\n\n\n\n \n if self.headers is None:\n raise ResponseNotReady()\n headers=self.headers.get_all(name)or default\n if isinstance(headers,str)or not hasattr(headers,'__iter__'):\n return headers\n else:\n return ', '.join(headers)\n \n def getheaders(self):\n ''\n if self.headers is None:\n raise ResponseNotReady()\n return list(self.headers.items())\n \n \n \n def __iter__(self):\n return self\n \n \n \n def info(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return self.headers\n \n def geturl(self):\n ''\n\n\n\n\n\n\n\n \n return self.url\n \n def getcode(self):\n ''\n\n\n \n return self.status\n \n \ndef _create_https_context(http_version):\n\n\n context=ssl._create_default_https_context()\n \n if http_version ==11:\n context.set_alpn_protocols(['http/1.1'])\n \n if context.post_handshake_auth is not None:\n context.post_handshake_auth=True\n return context\n \n \nclass HTTPConnection:\n\n _http_vsn=11\n _http_vsn_str='HTTP/1.1'\n \n response_class=HTTPResponse\n default_port=HTTP_PORT\n auto_open=1\n debuglevel=0\n \n @staticmethod\n def _is_textIO(stream):\n ''\n \n return isinstance(stream,io.TextIOBase)\n \n @staticmethod\n def _get_content_length(body,method):\n ''\n\n\n\n\n \n if body is None:\n \n \n if method.upper()in _METHODS_EXPECTING_BODY:\n return 0\n else:\n return None\n \n if hasattr(body,'read'):\n \n return None\n \n try:\n \n mv=memoryview(body)\n return mv.nbytes\n except TypeError:\n pass\n \n if isinstance(body,str):\n return len(body)\n \n return None\n \n def __init__(self,host,port=None,timeout=socket._GLOBAL_DEFAULT_TIMEOUT,\n source_address=None,blocksize=8192):\n self.timeout=timeout\n self.source_address=source_address\n self.blocksize=blocksize\n self.sock=None\n self._buffer=[]\n self.__response=None\n self.__state=_CS_IDLE\n self._method=None\n self._tunnel_host=None\n self._tunnel_port=None\n self._tunnel_headers={}\n self._raw_proxy_headers=None\n \n (self.host,self.port)=self._get_hostport(host,port)\n \n self._validate_host(self.host)\n \n \n \n self._create_connection=socket.create_connection\n \n def set_tunnel(self,host,port=None,headers=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if self.sock:\n raise RuntimeError(\"Can't set up tunnel for established connection\")\n \n self._tunnel_host,self._tunnel_port=self._get_hostport(host,port)\n if headers:\n self._tunnel_headers=headers.copy()\n else:\n self._tunnel_headers.clear()\n \n if not any(header.lower()==\"host\"for header in self._tunnel_headers):\n encoded_host=self._tunnel_host.encode(\"idna\").decode(\"ascii\")\n self._tunnel_headers[\"Host\"]=\"%s:%d\"%(\n encoded_host,self._tunnel_port)\n \n def _get_hostport(self,host,port):\n if port is None:\n i=host.rfind(':')\n j=host.rfind(']')\n if i >j:\n try:\n port=int(host[i+1:])\n except ValueError:\n if host[i+1:]==\"\":\n port=self.default_port\n else:\n raise InvalidURL(\"nonnumeric port: '%s'\"%host[i+1:])\n host=host[:i]\n else:\n port=self.default_port\n if host and host[0]=='['and host[-1]==']':\n host=host[1:-1]\n \n return(host,port)\n \n def set_debuglevel(self,level):\n self.debuglevel=level\n \n def _tunnel(self):\n connect=b\"CONNECT %s:%d %s\\r\\n\"%(\n self._tunnel_host.encode(\"idna\"),self._tunnel_port,\n self._http_vsn_str.encode(\"ascii\"))\n headers=[connect]\n for header,value in self._tunnel_headers.items():\n headers.append(f\"{header}: {value}\\r\\n\".encode(\"latin-1\"))\n headers.append(b\"\\r\\n\")\n \n \n \n self.send(b\"\".join(headers))\n del headers\n \n response=self.response_class(self.sock,method=self._method)\n try:\n (version,code,message)=response._read_status()\n \n self._raw_proxy_headers=_read_headers(response.fp)\n \n if self.debuglevel >0:\n for header in self._raw_proxy_headers:\n print('header:',header.decode())\n \n if code !=http.HTTPStatus.OK:\n self.close()\n raise OSError(f\"Tunnel connection failed: {code} {message.strip()}\")\n \n finally:\n response.close()\n \n def get_proxy_response_headers(self):\n ''\n\n\n\n\n\n \n return(\n _parse_header_lines(self._raw_proxy_headers)\n if self._raw_proxy_headers is not None\n else None\n )\n \n def connect(self):\n ''\n sys.audit(\"http.client.connect\",self,self.host,self.port)\n self.sock=self._create_connection(\n (self.host,self.port),self.timeout,self.source_address)\n \n try:\n self.sock.setsockopt(socket.IPPROTO_TCP,socket.TCP_NODELAY,1)\n except OSError as e:\n if e.errno !=errno.ENOPROTOOPT:\n raise\n \n if self._tunnel_host:\n self._tunnel()\n \n def close(self):\n ''\n self.__state=_CS_IDLE\n try:\n sock=self.sock\n if sock:\n self.sock=None\n sock.close()\n finally:\n response=self.__response\n if response:\n self.__response=None\n response.close()\n \n def send(self,data):\n ''\n\n\n \n \n if self.sock is None:\n if self.auto_open:\n self.connect()\n else:\n raise NotConnected()\n \n if self.debuglevel >0:\n print(\"send:\",repr(data))\n if hasattr(data,\"read\"):\n if self.debuglevel >0:\n print(\"sending a readable\")\n encode=self._is_textIO(data)\n if encode and self.debuglevel >0:\n print(\"encoding file using iso-8859-1\")\n while datablock :=data.read(self.blocksize):\n if encode:\n datablock=datablock.encode(\"iso-8859-1\")\n sys.audit(\"http.client.send\",self,datablock)\n self.sock.sendall(datablock)\n return\n sys.audit(\"http.client.send\",self,data)\n try:\n self.sock.sendall(data)\n except TypeError:\n if isinstance(data,collections.abc.Iterable):\n for d in data:\n self.sock.sendall(d)\n else:\n raise TypeError(\"data should be a bytes-like object \"\n \"or an iterable, got %r\"%type(data))\n \n def _output(self,s):\n ''\n\n\n \n self._buffer.append(s)\n \n def _read_readable(self,readable):\n if self.debuglevel >0:\n print(\"reading a readable\")\n encode=self._is_textIO(readable)\n if encode and self.debuglevel >0:\n print(\"encoding file using iso-8859-1\")\n while datablock :=readable.read(self.blocksize):\n if encode:\n datablock=datablock.encode(\"iso-8859-1\")\n yield datablock\n \n def _send_output(self,message_body=None,encode_chunked=False):\n ''\n\n\n\n \n self._buffer.extend((b\"\",b\"\"))\n msg=b\"\\r\\n\".join(self._buffer)\n del self._buffer[:]\n self.send(msg)\n \n if message_body is not None:\n \n \n if hasattr(message_body,'read'):\n \n \n \n chunks=self._read_readable(message_body)\n else:\n try:\n \n \n \n \n memoryview(message_body)\n except TypeError:\n try:\n chunks=iter(message_body)\n except TypeError:\n raise TypeError(\"message_body should be a bytes-like \"\n \"object or an iterable, got %r\"\n %type(message_body))\n else:\n \n \n chunks=(message_body,)\n \n for chunk in chunks:\n if not chunk:\n if self.debuglevel >0:\n print('Zero length chunk ignored')\n continue\n \n if encode_chunked and self._http_vsn ==11:\n \n chunk=f'{len(chunk):X}\\r\\n'.encode('ascii')+chunk\\\n +b'\\r\\n'\n self.send(chunk)\n \n if encode_chunked and self._http_vsn ==11:\n \n self.send(b'0\\r\\n\\r\\n')\n \n def putrequest(self,method,url,skip_host=False,\n skip_accept_encoding=False):\n ''\n\n\n\n\n\n\n \n \n \n if self.__response and self.__response.isclosed():\n self.__response=None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.__state ==_CS_IDLE:\n self.__state=_CS_REQ_STARTED\n else:\n raise CannotSendRequest(self.__state)\n \n self._validate_method(method)\n \n \n self._method=method\n \n url=url or '/'\n self._validate_path(url)\n \n request='%s %s %s'%(method,url,self._http_vsn_str)\n \n self._output(self._encode_request(request))\n \n if self._http_vsn ==11:\n \n \n if not skip_host:\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n netloc=''\n if url.startswith('http'):\n nil,netloc,nil,nil,nil=urlsplit(url)\n \n if netloc:\n try:\n netloc_enc=netloc.encode(\"ascii\")\n except UnicodeEncodeError:\n netloc_enc=netloc.encode(\"idna\")\n self.putheader('Host',netloc_enc)\n else:\n if self._tunnel_host:\n host=self._tunnel_host\n port=self._tunnel_port\n else:\n host=self.host\n port=self.port\n \n try:\n host_enc=host.encode(\"ascii\")\n except UnicodeEncodeError:\n host_enc=host.encode(\"idna\")\n \n \n \n \n if host.find(':')>=0:\n host_enc=b'['+host_enc+b']'\n \n if port ==self.default_port:\n self.putheader('Host',host_enc)\n else:\n host_enc=host_enc.decode(\"ascii\")\n self.putheader('Host',\"%s:%s\"%(host_enc,port))\n \n \n \n \n \n \n \n \n \n if not skip_accept_encoding:\n self.putheader('Accept-Encoding','identity')\n \n \n \n \n \n \n \n \n \n else:\n \n pass\n \n def _encode_request(self,request):\n \n return request.encode('ascii')\n \n def _validate_method(self,method):\n ''\n \n match=_contains_disallowed_method_pchar_re.search(method)\n if match:\n raise ValueError(\n f\"method can't contain control characters. {method !r} \"\n f\"(found at least {match.group()!r})\")\n \n def _validate_path(self,url):\n ''\n \n match=_contains_disallowed_url_pchar_re.search(url)\n if match:\n raise InvalidURL(f\"URL can't contain control characters. {url !r} \"\n f\"(found at least {match.group()!r})\")\n \n def _validate_host(self,host):\n ''\n \n match=_contains_disallowed_url_pchar_re.search(host)\n if match:\n raise InvalidURL(f\"URL can't contain control characters. {host !r} \"\n f\"(found at least {match.group()!r})\")\n \n def putheader(self,header,*values):\n ''\n\n\n \n if self.__state !=_CS_REQ_STARTED:\n raise CannotSendHeader()\n \n if hasattr(header,'encode'):\n header=header.encode('ascii')\n \n if not _is_legal_header_name(header):\n raise ValueError('Invalid header name %r'%(header,))\n \n values=list(values)\n for i,one_value in enumerate(values):\n if hasattr(one_value,'encode'):\n values[i]=one_value.encode('latin-1')\n elif isinstance(one_value,int):\n values[i]=str(one_value).encode('ascii')\n \n if _is_illegal_header_value(values[i]):\n raise ValueError('Invalid header value %r'%(values[i],))\n \n value=b'\\r\\n\\t'.join(values)\n header=header+b': '+value\n self._output(header)\n \n def endheaders(self,message_body=None,*,encode_chunked=False):\n ''\n\n\n\n\n \n if self.__state ==_CS_REQ_STARTED:\n self.__state=_CS_REQ_SENT\n else:\n raise CannotSendHeader()\n self._send_output(message_body,encode_chunked=encode_chunked)\n \n def request(self,method,url,body=None,headers={},*,\n encode_chunked=False):\n ''\n self._send_request(method,url,body,headers,encode_chunked)\n \n def _send_request(self,method,url,body,headers,encode_chunked):\n \n header_names=frozenset(k.lower()for k in headers)\n skips={}\n if 'host'in header_names:\n skips['skip_host']=1\n if 'accept-encoding'in header_names:\n skips['skip_accept_encoding']=1\n \n self.putrequest(method,url,**skips)\n \n \n \n \n \n \n \n \n if 'content-length'not in header_names:\n \n \n \n if 'transfer-encoding'not in header_names:\n \n \n encode_chunked=False\n content_length=self._get_content_length(body,method)\n if content_length is None:\n if body is not None:\n if self.debuglevel >0:\n print('Unable to determine size of %r'%body)\n encode_chunked=True\n self.putheader('Transfer-Encoding','chunked')\n else:\n self.putheader('Content-Length',str(content_length))\n else:\n encode_chunked=False\n \n for hdr,value in headers.items():\n self.putheader(hdr,value)\n if isinstance(body,str):\n \n \n body=_encode(body,'body')\n self.endheaders(body,encode_chunked=encode_chunked)\n \n def getresponse(self):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n if self.__response and self.__response.isclosed():\n self.__response=None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.__state !=_CS_REQ_SENT or self.__response:\n raise ResponseNotReady(self.__state)\n \n if self.debuglevel >0:\n response=self.response_class(self.sock,self.debuglevel,\n method=self._method)\n else:\n response=self.response_class(self.sock,method=self._method)\n \n try:\n try:\n response.begin()\n except ConnectionError:\n self.close()\n raise\n assert response.will_close !=_UNKNOWN\n self.__state=_CS_IDLE\n \n if response.will_close:\n \n self.close()\n else:\n \n self.__response=response\n \n return response\n except:\n response.close()\n raise\n \ntry:\n import ssl\nexcept ImportError:\n pass\nelse:\n class HTTPSConnection(HTTPConnection):\n ''\n \n default_port=HTTPS_PORT\n \n def __init__(self,host,port=None,\n *,timeout=socket._GLOBAL_DEFAULT_TIMEOUT,\n source_address=None,context=None,blocksize=8192):\n super(HTTPSConnection,self).__init__(host,port,timeout,\n source_address,\n blocksize=blocksize)\n if context is None:\n context=_create_https_context(self._http_vsn)\n self._context=context\n \n def connect(self):\n ''\n \n super().connect()\n \n if self._tunnel_host:\n server_hostname=self._tunnel_host\n else:\n server_hostname=self.host\n \n self.sock=self._context.wrap_socket(self.sock,\n server_hostname=server_hostname)\n \n __all__.append(\"HTTPSConnection\")\n \nclass HTTPException(Exception):\n\n\n pass\n \nclass NotConnected(HTTPException):\n pass\n \nclass InvalidURL(HTTPException):\n pass\n \nclass UnknownProtocol(HTTPException):\n def __init__(self,version):\n self.args=version,\n self.version=version\n \nclass UnknownTransferEncoding(HTTPException):\n pass\n \nclass UnimplementedFileMode(HTTPException):\n pass\n \nclass IncompleteRead(HTTPException):\n def __init__(self,partial,expected=None):\n self.args=partial,\n self.partial=partial\n self.expected=expected\n def __repr__(self):\n if self.expected is not None:\n e=', %i more expected'%self.expected\n else:\n e=''\n return '%s(%i bytes read%s)'%(self.__class__.__name__,\n len(self.partial),e)\n __str__=object.__str__\n \nclass ImproperConnectionState(HTTPException):\n pass\n \nclass CannotSendRequest(ImproperConnectionState):\n pass\n \nclass CannotSendHeader(ImproperConnectionState):\n pass\n \nclass ResponseNotReady(ImproperConnectionState):\n pass\n \nclass BadStatusLine(HTTPException):\n def __init__(self,line):\n if not line:\n line=repr(line)\n self.args=line,\n self.line=line\n \nclass LineTooLong(HTTPException):\n def __init__(self,line_type):\n HTTPException.__init__(self,\"got more than %d bytes when reading %s\"\n %(_MAXLINE,line_type))\n \nclass RemoteDisconnected(ConnectionResetError,BadStatusLine):\n def __init__(self,*pos,**kw):\n BadStatusLine.__init__(self,\"\")\n ConnectionResetError.__init__(self,*pos,**kw)\n \n \nerror=HTTPException\n", ["collections.abc", "email.message", "email.parser", "errno", "http", "io", "re", "socket", "ssl", "sys", "urllib.parse"]], "http": [".py", "from enum import StrEnum,IntEnum,_simple_enum\n\n__all__=['HTTPStatus','HTTPMethod']\n\n\n@_simple_enum(IntEnum)\nclass HTTPStatus:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __new__(cls,value,phrase,description=''):\n obj=int.__new__(cls,value)\n obj._value_=value\n \n obj.phrase=phrase\n obj.description=description\n return obj\n \n @property\n def is_informational(self):\n return 100 <=self <=199\n \n @property\n def is_success(self):\n return 200 <=self <=299\n \n @property\n def is_redirection(self):\n return 300 <=self <=399\n \n @property\n def is_client_error(self):\n return 400 <=self <=499\n \n @property\n def is_server_error(self):\n return 500 <=self <=599\n \n \n CONTINUE=100,'Continue','Request received, please continue'\n SWITCHING_PROTOCOLS=(101,'Switching Protocols',\n 'Switching to new protocol; obey Upgrade header')\n PROCESSING=102,'Processing'\n EARLY_HINTS=103,'Early Hints'\n \n \n OK=200,'OK','Request fulfilled, document follows'\n CREATED=201,'Created','Document created, URL follows'\n ACCEPTED=(202,'Accepted',\n 'Request accepted, processing continues off-line')\n NON_AUTHORITATIVE_INFORMATION=(203,\n 'Non-Authoritative Information','Request fulfilled from cache')\n NO_CONTENT=204,'No Content','Request fulfilled, nothing follows'\n RESET_CONTENT=205,'Reset Content','Clear input form for further input'\n PARTIAL_CONTENT=206,'Partial Content','Partial content follows'\n MULTI_STATUS=207,'Multi-Status'\n ALREADY_REPORTED=208,'Already Reported'\n IM_USED=226,'IM Used'\n \n \n MULTIPLE_CHOICES=(300,'Multiple Choices',\n 'Object has several resources -- see URI list')\n MOVED_PERMANENTLY=(301,'Moved Permanently',\n 'Object moved permanently -- see URI list')\n FOUND=302,'Found','Object moved temporarily -- see URI list'\n SEE_OTHER=303,'See Other','Object moved -- see Method and URL list'\n NOT_MODIFIED=(304,'Not Modified',\n 'Document has not changed since given time')\n USE_PROXY=(305,'Use Proxy',\n 'You must use proxy specified in Location to access this resource')\n TEMPORARY_REDIRECT=(307,'Temporary Redirect',\n 'Object moved temporarily -- see URI list')\n PERMANENT_REDIRECT=(308,'Permanent Redirect',\n 'Object moved permanently -- see URI list')\n \n \n BAD_REQUEST=(400,'Bad Request',\n 'Bad request syntax or unsupported method')\n UNAUTHORIZED=(401,'Unauthorized',\n 'No permission -- see authorization schemes')\n PAYMENT_REQUIRED=(402,'Payment Required',\n 'No payment -- see charging schemes')\n FORBIDDEN=(403,'Forbidden',\n 'Request forbidden -- authorization will not help')\n NOT_FOUND=(404,'Not Found',\n 'Nothing matches the given URI')\n METHOD_NOT_ALLOWED=(405,'Method Not Allowed',\n 'Specified method is invalid for this resource')\n NOT_ACCEPTABLE=(406,'Not Acceptable',\n 'URI not available in preferred format')\n PROXY_AUTHENTICATION_REQUIRED=(407,\n 'Proxy Authentication Required',\n 'You must authenticate with this proxy before proceeding')\n REQUEST_TIMEOUT=(408,'Request Timeout',\n 'Request timed out; try again later')\n CONFLICT=409,'Conflict','Request conflict'\n GONE=(410,'Gone',\n 'URI no longer exists and has been permanently removed')\n LENGTH_REQUIRED=(411,'Length Required',\n 'Client must specify Content-Length')\n PRECONDITION_FAILED=(412,'Precondition Failed',\n 'Precondition in headers is false')\n REQUEST_ENTITY_TOO_LARGE=(413,'Request Entity Too Large',\n 'Entity is too large')\n REQUEST_URI_TOO_LONG=(414,'Request-URI Too Long',\n 'URI is too long')\n UNSUPPORTED_MEDIA_TYPE=(415,'Unsupported Media Type',\n 'Entity body in unsupported format')\n REQUESTED_RANGE_NOT_SATISFIABLE=(416,\n 'Requested Range Not Satisfiable',\n 'Cannot satisfy request range')\n EXPECTATION_FAILED=(417,'Expectation Failed',\n 'Expect condition could not be satisfied')\n IM_A_TEAPOT=(418,'I\\'m a Teapot',\n 'Server refuses to brew coffee because it is a teapot.')\n MISDIRECTED_REQUEST=(421,'Misdirected Request',\n 'Server is not able to produce a response')\n UNPROCESSABLE_ENTITY=422,'Unprocessable Entity'\n LOCKED=423,'Locked'\n FAILED_DEPENDENCY=424,'Failed Dependency'\n TOO_EARLY=425,'Too Early'\n UPGRADE_REQUIRED=426,'Upgrade Required'\n PRECONDITION_REQUIRED=(428,'Precondition Required',\n 'The origin server requires the request to be conditional')\n TOO_MANY_REQUESTS=(429,'Too Many Requests',\n 'The user has sent too many requests in '\n 'a given amount of time (\"rate limiting\")')\n REQUEST_HEADER_FIELDS_TOO_LARGE=(431,\n 'Request Header Fields Too Large',\n 'The server is unwilling to process the request because its header '\n 'fields are too large')\n UNAVAILABLE_FOR_LEGAL_REASONS=(451,\n 'Unavailable For Legal Reasons',\n 'The server is denying access to the '\n 'resource as a consequence of a legal demand')\n \n \n INTERNAL_SERVER_ERROR=(500,'Internal Server Error',\n 'Server got itself in trouble')\n NOT_IMPLEMENTED=(501,'Not Implemented',\n 'Server does not support this operation')\n BAD_GATEWAY=(502,'Bad Gateway',\n 'Invalid responses from another server/proxy')\n SERVICE_UNAVAILABLE=(503,'Service Unavailable',\n 'The server cannot process the request due to a high load')\n GATEWAY_TIMEOUT=(504,'Gateway Timeout',\n 'The gateway server did not receive a timely response')\n HTTP_VERSION_NOT_SUPPORTED=(505,'HTTP Version Not Supported',\n 'Cannot fulfill request')\n VARIANT_ALSO_NEGOTIATES=506,'Variant Also Negotiates'\n INSUFFICIENT_STORAGE=507,'Insufficient Storage'\n LOOP_DETECTED=508,'Loop Detected'\n NOT_EXTENDED=510,'Not Extended'\n NETWORK_AUTHENTICATION_REQUIRED=(511,\n 'Network Authentication Required',\n 'The client needs to authenticate to gain network access')\n \n \n@_simple_enum(StrEnum)\nclass HTTPMethod:\n ''\n\n\n\n\n\n \n def __new__(cls,value,description):\n obj=str.__new__(cls,value)\n obj._value_=value\n obj.description=description\n return obj\n \n def __repr__(self):\n return \"<%s.%s>\"%(self.__class__.__name__,self._name_)\n \n CONNECT='CONNECT','Establish a connection to the server.'\n DELETE='DELETE','Remove the target.'\n GET='GET','Retrieve the target.'\n HEAD='HEAD','Same as GET, but only retrieve the status line and header section.'\n OPTIONS='OPTIONS','Describe the communication options for the target.'\n PATCH='PATCH','Apply partial modifications to a target.'\n POST='POST','Perform target-specific processing with the request payload.'\n PUT='PUT','Replace the target with the request payload.'\n TRACE='TRACE','Perform a message loop-back test along the path to the target.'\n", ["enum"], 1], "concurrent": [".py", "", [], 1], "concurrent.futures._base": [".py", "\n\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport collections\nimport logging\nimport threading\nimport time\nimport types\n\nFIRST_COMPLETED='FIRST_COMPLETED'\nFIRST_EXCEPTION='FIRST_EXCEPTION'\nALL_COMPLETED='ALL_COMPLETED'\n_AS_COMPLETED='_AS_COMPLETED'\n\n\nPENDING='PENDING'\nRUNNING='RUNNING'\n\nCANCELLED='CANCELLED'\n\nCANCELLED_AND_NOTIFIED='CANCELLED_AND_NOTIFIED'\nFINISHED='FINISHED'\n\n_FUTURE_STATES=[\nPENDING,\nRUNNING,\nCANCELLED,\nCANCELLED_AND_NOTIFIED,\nFINISHED\n]\n\n_STATE_TO_DESCRIPTION_MAP={\nPENDING:\"pending\",\nRUNNING:\"running\",\nCANCELLED:\"cancelled\",\nCANCELLED_AND_NOTIFIED:\"cancelled\",\nFINISHED:\"finished\"\n}\n\n\nLOGGER=logging.getLogger(\"concurrent.futures\")\n\nclass Error(Exception):\n ''\n pass\n \nclass CancelledError(Error):\n ''\n pass\n \nTimeoutError=TimeoutError\n\nclass InvalidStateError(Error):\n ''\n pass\n \nclass _Waiter(object):\n ''\n def __init__(self):\n self.event=threading.Event()\n self.finished_futures=[]\n \n def add_result(self,future):\n self.finished_futures.append(future)\n \n def add_exception(self,future):\n self.finished_futures.append(future)\n \n def add_cancelled(self,future):\n self.finished_futures.append(future)\n \nclass _AsCompletedWaiter(_Waiter):\n ''\n \n def __init__(self):\n super(_AsCompletedWaiter,self).__init__()\n self.lock=threading.Lock()\n \n def add_result(self,future):\n with self.lock:\n super(_AsCompletedWaiter,self).add_result(future)\n self.event.set()\n \n def add_exception(self,future):\n with self.lock:\n super(_AsCompletedWaiter,self).add_exception(future)\n self.event.set()\n \n def add_cancelled(self,future):\n with self.lock:\n super(_AsCompletedWaiter,self).add_cancelled(future)\n self.event.set()\n \nclass _FirstCompletedWaiter(_Waiter):\n ''\n \n def add_result(self,future):\n super().add_result(future)\n self.event.set()\n \n def add_exception(self,future):\n super().add_exception(future)\n self.event.set()\n \n def add_cancelled(self,future):\n super().add_cancelled(future)\n self.event.set()\n \nclass _AllCompletedWaiter(_Waiter):\n ''\n \n def __init__(self,num_pending_calls,stop_on_exception):\n self.num_pending_calls=num_pending_calls\n self.stop_on_exception=stop_on_exception\n self.lock=threading.Lock()\n super().__init__()\n \n def _decrement_pending_calls(self):\n with self.lock:\n self.num_pending_calls -=1\n if not self.num_pending_calls:\n self.event.set()\n \n def add_result(self,future):\n super().add_result(future)\n self._decrement_pending_calls()\n \n def add_exception(self,future):\n super().add_exception(future)\n if self.stop_on_exception:\n self.event.set()\n else:\n self._decrement_pending_calls()\n \n def add_cancelled(self,future):\n super().add_cancelled(future)\n self._decrement_pending_calls()\n \nclass _AcquireFutures(object):\n ''\n \n def __init__(self,futures):\n self.futures=sorted(futures,key=id)\n \n def __enter__(self):\n for future in self.futures:\n future._condition.acquire()\n \n def __exit__(self,*args):\n for future in self.futures:\n future._condition.release()\n \ndef _create_and_install_waiters(fs,return_when):\n if return_when ==_AS_COMPLETED:\n waiter=_AsCompletedWaiter()\n elif return_when ==FIRST_COMPLETED:\n waiter=_FirstCompletedWaiter()\n else:\n pending_count=sum(\n f._state not in[CANCELLED_AND_NOTIFIED,FINISHED]for f in fs)\n \n if return_when ==FIRST_EXCEPTION:\n waiter=_AllCompletedWaiter(pending_count,stop_on_exception=True)\n elif return_when ==ALL_COMPLETED:\n waiter=_AllCompletedWaiter(pending_count,stop_on_exception=False)\n else:\n raise ValueError(\"Invalid return condition: %r\"%return_when)\n \n for f in fs:\n f._waiters.append(waiter)\n \n return waiter\n \n \ndef _yield_finished_futures(fs,waiter,ref_collect):\n ''\n\n\n\n\n\n\n\n\n \n while fs:\n f=fs[-1]\n for futures_set in ref_collect:\n futures_set.remove(f)\n with f._condition:\n f._waiters.remove(waiter)\n del f\n \n yield fs.pop()\n \n \ndef as_completed(fs,timeout=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if timeout is not None:\n end_time=timeout+time.monotonic()\n \n fs=set(fs)\n total_futures=len(fs)\n with _AcquireFutures(fs):\n finished=set(\n f for f in fs\n if f._state in[CANCELLED_AND_NOTIFIED,FINISHED])\n pending=fs -finished\n waiter=_create_and_install_waiters(fs,_AS_COMPLETED)\n finished=list(finished)\n try:\n yield from _yield_finished_futures(finished,waiter,\n ref_collect=(fs,))\n \n while pending:\n if timeout is None:\n wait_timeout=None\n else:\n wait_timeout=end_time -time.monotonic()\n if wait_timeout <0:\n raise TimeoutError(\n '%d (of %d) futures unfinished'%(\n len(pending),total_futures))\n \n waiter.event.wait(wait_timeout)\n \n with waiter.lock:\n finished=waiter.finished_futures\n waiter.finished_futures=[]\n waiter.event.clear()\n \n \n finished.reverse()\n yield from _yield_finished_futures(finished,waiter,\n ref_collect=(fs,pending))\n \n finally:\n \n for f in fs:\n with f._condition:\n f._waiters.remove(waiter)\n \nDoneAndNotDoneFutures=collections.namedtuple(\n'DoneAndNotDoneFutures','done not_done')\ndef wait(fs,timeout=None,return_when=ALL_COMPLETED):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n fs=set(fs)\n with _AcquireFutures(fs):\n done={f for f in fs\n if f._state in[CANCELLED_AND_NOTIFIED,FINISHED]}\n not_done=fs -done\n if(return_when ==FIRST_COMPLETED)and done:\n return DoneAndNotDoneFutures(done,not_done)\n elif(return_when ==FIRST_EXCEPTION)and done:\n if any(f for f in done\n if not f.cancelled()and f.exception()is not None):\n return DoneAndNotDoneFutures(done,not_done)\n \n if len(done)==len(fs):\n return DoneAndNotDoneFutures(done,not_done)\n \n waiter=_create_and_install_waiters(fs,return_when)\n \n waiter.event.wait(timeout)\n for f in fs:\n with f._condition:\n f._waiters.remove(waiter)\n \n done.update(waiter.finished_futures)\n return DoneAndNotDoneFutures(done,fs -done)\n \n \ndef _result_or_cancel(fut,timeout=None):\n try:\n try:\n return fut.result(timeout)\n finally:\n fut.cancel()\n finally:\n \n del fut\n \n \nclass Future(object):\n ''\n \n def __init__(self):\n ''\n self._condition=threading.Condition()\n self._state=PENDING\n self._result=None\n self._exception=None\n self._waiters=[]\n self._done_callbacks=[]\n \n def _invoke_callbacks(self):\n for callback in self._done_callbacks:\n try:\n callback(self)\n except Exception:\n LOGGER.exception('exception calling callback for %r',self)\n \n def __repr__(self):\n with self._condition:\n if self._state ==FINISHED:\n if self._exception:\n return '<%s at %#x state=%s raised %s>'%(\n self.__class__.__name__,\n id(self),\n _STATE_TO_DESCRIPTION_MAP[self._state],\n self._exception.__class__.__name__)\n else:\n return '<%s at %#x state=%s returned %s>'%(\n self.__class__.__name__,\n id(self),\n _STATE_TO_DESCRIPTION_MAP[self._state],\n self._result.__class__.__name__)\n return '<%s at %#x state=%s>'%(\n self.__class__.__name__,\n id(self),\n _STATE_TO_DESCRIPTION_MAP[self._state])\n \n def cancel(self):\n ''\n\n\n\n \n with self._condition:\n if self._state in[RUNNING,FINISHED]:\n return False\n \n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n return True\n \n self._state=CANCELLED\n self._condition.notify_all()\n \n self._invoke_callbacks()\n return True\n \n def cancelled(self):\n ''\n with self._condition:\n return self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]\n \n def running(self):\n ''\n with self._condition:\n return self._state ==RUNNING\n \n def done(self):\n ''\n with self._condition:\n return self._state in[CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED]\n \n def __get_result(self):\n if self._exception:\n try:\n raise self._exception\n finally:\n \n self=None\n else:\n return self._result\n \n def add_done_callback(self,fn):\n ''\n\n\n\n\n\n\n\n\n \n with self._condition:\n if self._state not in[CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED]:\n self._done_callbacks.append(fn)\n return\n try:\n fn(self)\n except Exception:\n LOGGER.exception('exception calling callback for %r',self)\n \n def result(self,timeout=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n with self._condition:\n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self.__get_result()\n \n self._condition.wait(timeout)\n \n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self.__get_result()\n else:\n raise TimeoutError()\n finally:\n \n self=None\n \n def exception(self,timeout=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n with self._condition:\n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self._exception\n \n self._condition.wait(timeout)\n \n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self._exception\n else:\n raise TimeoutError()\n \n \n def set_running_or_notify_cancel(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n with self._condition:\n if self._state ==CANCELLED:\n self._state=CANCELLED_AND_NOTIFIED\n for waiter in self._waiters:\n waiter.add_cancelled(self)\n \n \n return False\n elif self._state ==PENDING:\n self._state=RUNNING\n return True\n else:\n LOGGER.critical('Future %s in unexpected state: %s',\n id(self),\n self._state)\n raise RuntimeError('Future in unexpected state')\n \n def set_result(self,result):\n ''\n\n\n \n with self._condition:\n if self._state in{CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED}:\n raise InvalidStateError('{}: {!r}'.format(self._state,self))\n self._result=result\n self._state=FINISHED\n for waiter in self._waiters:\n waiter.add_result(self)\n self._condition.notify_all()\n self._invoke_callbacks()\n \n def set_exception(self,exception):\n ''\n\n\n \n with self._condition:\n if self._state in{CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED}:\n raise InvalidStateError('{}: {!r}'.format(self._state,self))\n self._exception=exception\n self._state=FINISHED\n for waiter in self._waiters:\n waiter.add_exception(self)\n self._condition.notify_all()\n self._invoke_callbacks()\n \n __class_getitem__=classmethod(types.GenericAlias)\n \nclass Executor(object):\n ''\n \n def submit(self,fn,/,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n raise NotImplementedError()\n \n def map(self,fn,*iterables,timeout=None,chunksize=1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if timeout is not None:\n end_time=timeout+time.monotonic()\n \n fs=[self.submit(fn,*args)for args in zip(*iterables)]\n \n \n \n def result_iterator():\n try:\n \n fs.reverse()\n while fs:\n \n if timeout is None:\n yield _result_or_cancel(fs.pop())\n else:\n yield _result_or_cancel(fs.pop(),end_time -time.monotonic())\n finally:\n for future in fs:\n future.cancel()\n return result_iterator()\n \n def shutdown(self,wait=True,*,cancel_futures=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_val,exc_tb):\n self.shutdown(wait=True)\n return False\n \n \nclass BrokenExecutor(RuntimeError):\n ''\n\n \n", ["collections", "logging", "threading", "time", "types"]], "concurrent.futures.thread": [".py", "\n\n\n\"\"\"Implements ThreadPoolExecutor.\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nfrom concurrent.futures import _base\nimport itertools\nimport queue\nimport threading\nimport types\nimport weakref\nimport os\n\n\n_threads_queues=weakref.WeakKeyDictionary()\n_shutdown=False\n\n\n_global_shutdown_lock=threading.Lock()\n\ndef _python_exit():\n global _shutdown\n with _global_shutdown_lock:\n _shutdown=True\n items=list(_threads_queues.items())\n for t,q in items:\n q.put(None)\n for t,q in items:\n t.join()\n \n \n \n \n \nthreading._register_atexit(_python_exit)\n\n\nif hasattr(os,'register_at_fork'):\n os.register_at_fork(before=_global_shutdown_lock.acquire,\n after_in_child=_global_shutdown_lock._at_fork_reinit,\n after_in_parent=_global_shutdown_lock.release)\n \n \nclass _WorkItem:\n def __init__(self,future,fn,args,kwargs):\n self.future=future\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \n def run(self):\n if not self.future.set_running_or_notify_cancel():\n return\n \n try:\n result=self.fn(*self.args,**self.kwargs)\n except BaseException as exc:\n self.future.set_exception(exc)\n \n self=None\n else:\n self.future.set_result(result)\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \ndef _worker(executor_reference,work_queue,initializer,initargs):\n if initializer is not None:\n try:\n initializer(*initargs)\n except BaseException:\n _base.LOGGER.critical('Exception in initializer:',exc_info=True)\n executor=executor_reference()\n if executor is not None:\n executor._initializer_failed()\n return\n try:\n while True:\n try:\n work_item=work_queue.get_nowait()\n except queue.Empty:\n \n executor=executor_reference()\n if executor is not None:\n executor._idle_semaphore.release()\n del executor\n work_item=work_queue.get(block=True)\n \n if work_item is not None:\n work_item.run()\n \n del work_item\n continue\n \n executor=executor_reference()\n \n \n \n \n if _shutdown or executor is None or executor._shutdown:\n \n \n if executor is not None:\n executor._shutdown=True\n \n work_queue.put(None)\n return\n del executor\n except BaseException:\n _base.LOGGER.critical('Exception in worker',exc_info=True)\n \n \nclass BrokenThreadPool(_base.BrokenExecutor):\n ''\n\n \n \n \nclass ThreadPoolExecutor(_base.Executor):\n\n\n _counter=itertools.count().__next__\n \n def __init__(self,max_workers=None,thread_name_prefix='',\n initializer=None,initargs=()):\n ''\n\n\n\n\n\n\n\n \n if max_workers is None:\n \n \n \n \n \n \n \n max_workers=min(32,(os.cpu_count()or 1)+4)\n if max_workers <=0:\n raise ValueError(\"max_workers must be greater than 0\")\n \n if initializer is not None and not callable(initializer):\n raise TypeError(\"initializer must be a callable\")\n \n self._max_workers=max_workers\n self._work_queue=queue.SimpleQueue()\n self._idle_semaphore=threading.Semaphore(0)\n self._threads=set()\n self._broken=False\n self._shutdown=False\n self._shutdown_lock=threading.Lock()\n self._thread_name_prefix=(thread_name_prefix or\n (\"ThreadPoolExecutor-%d\"%self._counter()))\n self._initializer=initializer\n self._initargs=initargs\n \n def submit(self,fn,/,*args,**kwargs):\n with self._shutdown_lock,_global_shutdown_lock:\n if self._broken:\n raise BrokenThreadPool(self._broken)\n \n if self._shutdown:\n raise RuntimeError('cannot schedule new futures after shutdown')\n if _shutdown:\n raise RuntimeError('cannot schedule new futures after '\n 'interpreter shutdown')\n \n f=_base.Future()\n w=_WorkItem(f,fn,args,kwargs)\n \n self._work_queue.put(w)\n self._adjust_thread_count()\n return f\n submit.__doc__=_base.Executor.submit.__doc__\n \n def _adjust_thread_count(self):\n \n if self._idle_semaphore.acquire(timeout=0):\n return\n \n \n \n def weakref_cb(_,q=self._work_queue):\n q.put(None)\n \n num_threads=len(self._threads)\n if num_threads | Work Ids | => | | => | Call Q | => | |\n| | +----------+ | | +-----------+ | |\n| | | ... | | | | ... | | |\n| | | 6 | | | | 5, call() | | |\n| | | 7 | | | | ... | | |\n| Process | | ... | | Local | +-----------+ | Process |\n| Pool | +----------+ | Worker | | #1..n |\n| Executor | | Thread | | |\n| | +----------- + | | +-----------+ | |\n| | <=> | Work Items | <=> | | <= | Result Q | <= | |\n| | +------------+ | | +-----------+ | |\n| | | 6: call() | | | | ... | | |\n| | | future | | | | 4, result | | |\n| | | ... | | | | 3, except | | |\n+----------+ +------------+ +--------+ +-----------+ +---------+\n\nExecutor.submit() called:\n- creates a uniquely numbered _WorkItem and adds it to the \"Work Items\" dict\n- adds the id of the _WorkItem to the \"Work Ids\" queue\n\nLocal worker thread:\n- reads work ids from the \"Work Ids\" queue and looks up the corresponding\n WorkItem from the \"Work Items\" dict: if the work item has been cancelled then\n it is simply removed from the dict, otherwise it is repackaged as a\n _CallItem and put in the \"Call Q\". New _CallItems are put in the \"Call Q\"\n until \"Call Q\" is full. NOTE: the size of the \"Call Q\" is kept small because\n calls placed in the \"Call Q\" can no longer be cancelled with Future.cancel().\n- reads _ResultItems from \"Result Q\", updates the future stored in the\n \"Work Items\" dict and deletes the dict entry\n\nProcess #1..n:\n- reads _CallItems from \"Call Q\", executes the calls, and puts the resulting\n _ResultItems in \"Result Q\"\n\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport atexit\nimport os\nfrom concurrent.futures import _base\nimport queue\nfrom queue import Full\nimport multiprocessing\nfrom multiprocessing import SimpleQueue\nimport threading\nimport weakref\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_threads_queues=weakref.WeakKeyDictionary()\n_shutdown=False\n\ndef _python_exit():\n global _shutdown\n _shutdown=True\n items=list(_threads_queues.items())\n for t,q in items:\n q.put(None )\n for t,q in items:\n t.join()\n \n \n \n \n \nEXTRA_QUEUED_CALLS=1\n\nclass _WorkItem(object):\n def __init__(self,future,fn,args,kwargs):\n self.future=future\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \nclass _ResultItem(object):\n def __init__(self,work_id,exception=None ,result=None ):\n self.work_id=work_id\n self.exception=exception\n self.result=result\n \nclass _CallItem(object):\n def __init__(self,work_id,fn,args,kwargs):\n self.work_id=work_id\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \ndef _web_worker(call_queue,result_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n \n while True :\n call_item=call_queue.get(block=True )\n if call_item is None :\n \n result_queue.put(os.getpid())\n return\n try :\n r=call_item.fn(*call_item.args,**call_item.kwargs)\n except BaseException as e:\n result_queue.put(_ResultItem(call_item.work_id,\n exception=e))\n else :\n result_queue.put(_ResultItem(call_item.work_id,\n result=r))\n \ndef _add_call_item_to_queue(pending_work_items,\nwork_ids,\ncall_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n while True :\n if call_queue.full():\n return\n try :\n work_id=work_ids.get(block=False )\n except queue.Empty:\n return\n else :\n work_item=pending_work_items[work_id]\n \n if work_item.future.set_running_or_notify_cancel():\n call_queue.put(_CallItem(work_id,\n work_item.fn,\n work_item.args,\n work_item.kwargs),\n block=True )\n else :\n del pending_work_items[work_id]\n continue\n \ndef _queue_management_worker(executor_reference,\nprocesses,\npending_work_items,\nwork_ids_queue,\ncall_queue,\nresult_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n executor=None\n \n def shutting_down():\n return _shutdown or executor is None or executor._shutdown_thread\n \n def shutdown_worker():\n \n nb_children_alive=sum(p.is_alive()for p in processes.values())\n for i in range(0,nb_children_alive):\n call_queue.put_nowait(None )\n \n call_queue.close()\n \n \n for p in processes.values():\n p.join()\n \n reader=result_queue._reader\n \n while True :\n _add_call_item_to_queue(pending_work_items,\n work_ids_queue,\n call_queue)\n \n sentinels=[p.sentinel for p in processes.values()]\n assert sentinels\n \n ready=[reader]+sentinels\n if reader in ready:\n result_item=reader.recv()\n else :\n \n executor=executor_reference()\n if executor is not None :\n executor._broken=True\n executor._shutdown_thread=True\n executor=None\n \n for work_id,work_item in pending_work_items.items():\n work_item.future.set_exception(\n BrokenProcessPool(\n \"A process in the process pool was \"\n \"terminated abruptly while the future was \"\n \"running or pending.\"\n ))\n \n del work_item\n pending_work_items.clear()\n \n \n for p in processes.values():\n p.terminate()\n shutdown_worker()\n return\n if isinstance(result_item,int):\n \n \n assert shutting_down()\n p=processes.pop(result_item)\n p.join()\n if not processes:\n shutdown_worker()\n return\n elif result_item is not None :\n work_item=pending_work_items.pop(result_item.work_id,None )\n \n if work_item is not None :\n if result_item.exception:\n work_item.future.set_exception(result_item.exception)\n else :\n work_item.future.set_result(result_item.result)\n \n del work_item\n \n executor=executor_reference()\n \n \n \n \n if shutting_down():\n try :\n \n \n if not pending_work_items:\n shutdown_worker()\n return\n except Full:\n \n \n pass\n executor=None\n \n_system_limits_checked=False\n_system_limited=None\ndef _check_system_limits():\n global _system_limits_checked,_system_limited\n if _system_limits_checked:\n if _system_limited:\n raise NotImplementedError(_system_limited)\n _system_limits_checked=True\n try :\n nsems_max=os.sysconf(\"SC_SEM_NSEMS_MAX\")\n except (AttributeError,ValueError):\n \n return\n if nsems_max ==-1:\n \n \n return\n if nsems_max >=256:\n \n \n return\n _system_limited=\"system provides too few semaphores (%d available, 256 necessary)\"%nsems_max\n raise NotImplementedError(_system_limited)\n \n \nclass BrokenProcessPool(RuntimeError):\n ''\n\n\n \n \n \nclass WebWorkerExecutor(_base.Executor):\n def __init__(self,max_workers=None ):\n ''\n\n\n\n\n\n \n _check_system_limits()\n \n if max_workers is None :\n self._max_workers=os.cpu_count()or 1\n else :\n self._max_workers=max_workers\n \n \n \n \n self._call_queue=multiprocessing.Queue(self._max_workers+\n EXTRA_QUEUED_CALLS)\n \n \n \n self._call_queue._ignore_epipe=True\n self._result_queue=SimpleQueue()\n self._work_ids=queue.Queue()\n self._queue_management_thread=None\n \n self._webworkers={}\n \n \n self._shutdown_thread=False\n self._shutdown_lock=threading.Lock()\n self._broken=False\n self._queue_count=0\n self._pending_work_items={}\n \n def _start_queue_management_thread(self):\n \n \n def weakref_cb(_,q=self._result_queue):\n q.put(None )\n if self._queue_management_thread is None :\n \n self._adjust_process_count()\n self._queue_management_thread=threading.Thread(\n target=_queue_management_worker,\n args=(weakref.ref(self,weakref_cb),\n self._webworkers,\n self._pending_work_items,\n self._work_ids,\n self._call_queue,\n self._result_queue))\n self._queue_management_thread.daemon=True\n self._queue_management_thread.start()\n _threads_queues[self._queue_management_thread]=self._result_queue\n \n def _adjust_process_count(self):\n for _ in range(len(self._webworkers),self._max_workers):\n p=multiprocessing.Process(\n target=_web_worker,\n args=(self._call_queue,\n self._result_queue))\n p.start()\n self._webworkers[p.pid]=p\n \n def submit(self,fn,*args,**kwargs):\n with self._shutdown_lock:\n if self._broken:\n raise BrokenProcessPool('A child process terminated '\n 'abruptly, the process pool is not usable anymore')\n if self._shutdown_thread:\n raise RuntimeError('cannot schedule new futures after shutdown')\n \n f=_base.Future()\n w=_WorkItem(f,fn,args,kwargs)\n \n self._pending_work_items[self._queue_count]=w\n self._work_ids.put(self._queue_count)\n self._queue_count +=1\n \n self._result_queue.put(None )\n \n self._start_queue_management_thread()\n return f\n submit.__doc__=_base.Executor.submit.__doc__\n \n def shutdown(self,wait=True ):\n with self._shutdown_lock:\n self._shutdown_thread=True\n if self._queue_management_thread:\n \n self._result_queue.put(None )\n if wait:\n self._queue_management_thread.join()\n \n \n self._queue_management_thread=None\n self._call_queue=None\n self._result_queue=None\n self._webworkers=None\n shutdown.__doc__=_base.Executor.shutdown.__doc__\n \natexit.register(_python_exit)\n", ["atexit", "concurrent.futures", "concurrent.futures._base", "multiprocessing", "os", "queue", "threading", "weakref"]], "concurrent.futures": [".py", "\n\n\n\"\"\"Execute computations asynchronously using threads or processes.\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nfrom concurrent.futures._base import(FIRST_COMPLETED,\nFIRST_EXCEPTION,\nALL_COMPLETED,\nCancelledError,\nTimeoutError,\nInvalidStateError,\nBrokenExecutor,\nFuture,\nExecutor,\nwait,\nas_completed)\n\n__all__=(\n'FIRST_COMPLETED',\n'FIRST_EXCEPTION',\n'ALL_COMPLETED',\n'CancelledError',\n'TimeoutError',\n'BrokenExecutor',\n'Future',\n'Executor',\n'wait',\n'as_completed',\n'ProcessPoolExecutor',\n'ThreadPoolExecutor',\n)\n\n\ndef __dir__():\n return __all__+('__author__','__doc__')\n \n \ndef __getattr__(name):\n global ProcessPoolExecutor,ThreadPoolExecutor\n \n if name =='ProcessPoolExecutor':\n from.process import ProcessPoolExecutor as pe\n ProcessPoolExecutor=pe\n return pe\n \n if name =='ThreadPoolExecutor':\n from.thread import ThreadPoolExecutor as te\n ThreadPoolExecutor=te\n return te\n \n raise AttributeError(f\"module {__name__ !r} has no attribute {name !r}\")\n", ["concurrent.futures._base", "concurrent.futures.process", "concurrent.futures.thread"], 1], "concurrent.futures.process": [".py", "\n\n\n\"\"\"Implements ProcessPoolExecutor.\n\nThe following diagram and text describe the data-flow through the system:\n\n|======================= In-process =====================|== Out-of-process ==|\n\n+----------+ +----------+ +--------+ +-----------+ +---------+\n| | => | Work Ids | | | | Call Q | | Process |\n| | +----------+ | | +-----------+ | Pool |\n| | | ... | | | | ... | +---------+\n| | | 6 | => | | => | 5, call() | => | |\n| | | 7 | | | | ... | | |\n| Process | | ... | | Local | +-----------+ | Process |\n| Pool | +----------+ | Worker | | #1..n |\n| Executor | | Thread | | |\n| | +----------- + | | +-----------+ | |\n| | <=> | Work Items | <=> | | <= | Result Q | <= | |\n| | +------------+ | | +-----------+ | |\n| | | 6: call() | | | | ... | | |\n| | | future | | | | 4, result | | |\n| | | ... | | | | 3, except | | |\n+----------+ +------------+ +--------+ +-----------+ +---------+\n\nExecutor.submit() called:\n- creates a uniquely numbered _WorkItem and adds it to the \"Work Items\" dict\n- adds the id of the _WorkItem to the \"Work Ids\" queue\n\nLocal worker thread:\n- reads work ids from the \"Work Ids\" queue and looks up the corresponding\n WorkItem from the \"Work Items\" dict: if the work item has been cancelled then\n it is simply removed from the dict, otherwise it is repackaged as a\n _CallItem and put in the \"Call Q\". New _CallItems are put in the \"Call Q\"\n until \"Call Q\" is full. NOTE: the size of the \"Call Q\" is kept small because\n calls placed in the \"Call Q\" can no longer be cancelled with Future.cancel().\n- reads _ResultItems from \"Result Q\", updates the future stored in the\n \"Work Items\" dict and deletes the dict entry\n\nProcess #1..n:\n- reads _CallItems from \"Call Q\", executes the calls, and puts the resulting\n _ResultItems in \"Result Q\"\n\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport os\nfrom concurrent.futures import _base\nimport queue\nimport multiprocessing as mp\n\n\nimport multiprocessing.connection\nfrom multiprocessing.queues import Queue\nimport threading\nimport weakref\nfrom functools import partial\nimport itertools\nimport sys\nfrom traceback import format_exception\n\n\n_threads_wakeups=weakref.WeakKeyDictionary()\n_global_shutdown=False\n\n\nclass _ThreadWakeup:\n def __init__(self):\n self._closed=False\n self._reader,self._writer=mp.Pipe(duplex=False)\n \n def close(self):\n if not self._closed:\n self._closed=True\n self._writer.close()\n self._reader.close()\n \n def wakeup(self):\n if not self._closed:\n self._writer.send_bytes(b\"\")\n \n def clear(self):\n if not self._closed:\n while self._reader.poll():\n self._reader.recv_bytes()\n \n \ndef _python_exit():\n global _global_shutdown\n _global_shutdown=True\n items=list(_threads_wakeups.items())\n for _,thread_wakeup in items:\n \n thread_wakeup.wakeup()\n for t,_ in items:\n t.join()\n \n \n \n \n \nthreading._register_atexit(_python_exit)\n\n\n\n\n\nEXTRA_QUEUED_CALLS=1\n\n\n\n\n\n\n_MAX_WINDOWS_WORKERS=63 -2\n\n\n\nclass _RemoteTraceback(Exception):\n def __init__(self,tb):\n self.tb=tb\n def __str__(self):\n return self.tb\n \nclass _ExceptionWithTraceback:\n def __init__(self,exc,tb):\n tb=''.join(format_exception(type(exc),exc,tb))\n self.exc=exc\n \n \n self.exc.__traceback__=None\n self.tb='\\n\"\"\"\\n%s\"\"\"'%tb\n def __reduce__(self):\n return _rebuild_exc,(self.exc,self.tb)\n \ndef _rebuild_exc(exc,tb):\n exc.__cause__=_RemoteTraceback(tb)\n return exc\n \nclass _WorkItem(object):\n def __init__(self,future,fn,args,kwargs):\n self.future=future\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \nclass _ResultItem(object):\n def __init__(self,work_id,exception=None,result=None,exit_pid=None):\n self.work_id=work_id\n self.exception=exception\n self.result=result\n self.exit_pid=exit_pid\n \nclass _CallItem(object):\n def __init__(self,work_id,fn,args,kwargs):\n self.work_id=work_id\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \n \nclass _SafeQueue(Queue):\n ''\n def __init__(self,max_size=0,*,ctx,pending_work_items,shutdown_lock,\n thread_wakeup):\n self.pending_work_items=pending_work_items\n self.shutdown_lock=shutdown_lock\n self.thread_wakeup=thread_wakeup\n super().__init__(max_size,ctx=ctx)\n \n def _on_queue_feeder_error(self,e,obj):\n if isinstance(obj,_CallItem):\n tb=format_exception(type(e),e,e.__traceback__)\n e.__cause__=_RemoteTraceback('\\n\"\"\"\\n{}\"\"\"'.format(''.join(tb)))\n work_item=self.pending_work_items.pop(obj.work_id,None)\n with self.shutdown_lock:\n self.thread_wakeup.wakeup()\n \n \n \n if work_item is not None:\n work_item.future.set_exception(e)\n else:\n super()._on_queue_feeder_error(e,obj)\n \n \ndef _get_chunks(*iterables,chunksize):\n ''\n it=zip(*iterables)\n while True:\n chunk=tuple(itertools.islice(it,chunksize))\n if not chunk:\n return\n yield chunk\n \n \ndef _process_chunk(fn,chunk):\n ''\n\n\n\n\n\n\n \n return[fn(*args)for args in chunk]\n \n \ndef _sendback_result(result_queue,work_id,result=None,exception=None,\nexit_pid=None):\n ''\n try:\n result_queue.put(_ResultItem(work_id,result=result,\n exception=exception,exit_pid=exit_pid))\n except BaseException as e:\n exc=_ExceptionWithTraceback(e,e.__traceback__)\n result_queue.put(_ResultItem(work_id,exception=exc,\n exit_pid=exit_pid))\n \n \ndef _process_worker(call_queue,result_queue,initializer,initargs,max_tasks=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if initializer is not None:\n try:\n initializer(*initargs)\n except BaseException:\n _base.LOGGER.critical('Exception in initializer:',exc_info=True)\n \n \n return\n num_tasks=0\n exit_pid=None\n while True:\n call_item=call_queue.get(block=True)\n if call_item is None:\n \n result_queue.put(os.getpid())\n return\n \n if max_tasks is not None:\n num_tasks +=1\n if num_tasks >=max_tasks:\n exit_pid=os.getpid()\n \n try:\n r=call_item.fn(*call_item.args,**call_item.kwargs)\n except BaseException as e:\n exc=_ExceptionWithTraceback(e,e.__traceback__)\n _sendback_result(result_queue,call_item.work_id,exception=exc,\n exit_pid=exit_pid)\n else:\n _sendback_result(result_queue,call_item.work_id,result=r,\n exit_pid=exit_pid)\n del r\n \n \n \n del call_item\n \n if exit_pid is not None:\n return\n \n \nclass _ExecutorManagerThread(threading.Thread):\n ''\n\n\n\n\n\n\n\n\n \n \n def __init__(self,executor):\n \n \n \n \n self.thread_wakeup=executor._executor_manager_thread_wakeup\n self.shutdown_lock=executor._shutdown_lock\n \n \n \n \n \n \n \n def weakref_cb(_,\n thread_wakeup=self.thread_wakeup,\n shutdown_lock=self.shutdown_lock):\n mp.util.debug('Executor collected: triggering callback for'\n ' QueueManager wakeup')\n with shutdown_lock:\n thread_wakeup.wakeup()\n \n self.executor_reference=weakref.ref(executor,weakref_cb)\n \n \n self.processes=executor._processes\n \n \n \n self.call_queue=executor._call_queue\n \n \n self.result_queue=executor._result_queue\n \n \n self.work_ids_queue=executor._work_ids\n \n \n \n self.max_tasks_per_child=executor._max_tasks_per_child\n \n \n \n self.pending_work_items=executor._pending_work_items\n \n super().__init__()\n \n def run(self):\n \n \n while True:\n self.add_call_item_to_queue()\n \n result_item,is_broken,cause=self.wait_result_broken_or_wakeup()\n \n if is_broken:\n self.terminate_broken(cause)\n return\n if result_item is not None:\n self.process_result_item(result_item)\n \n process_exited=result_item.exit_pid is not None\n if process_exited:\n p=self.processes.pop(result_item.exit_pid)\n p.join()\n \n \n \n del result_item\n \n if executor :=self.executor_reference():\n if process_exited:\n with self.shutdown_lock:\n executor._adjust_process_count()\n else:\n executor._idle_worker_semaphore.release()\n del executor\n \n if self.is_shutting_down():\n self.flag_executor_shutting_down()\n \n \n \n \n self.add_call_item_to_queue()\n \n \n \n if not self.pending_work_items:\n self.join_executor_internals()\n return\n \n def add_call_item_to_queue(self):\n \n \n while True:\n if self.call_queue.full():\n return\n try:\n work_id=self.work_ids_queue.get(block=False)\n except queue.Empty:\n return\n else:\n work_item=self.pending_work_items[work_id]\n \n if work_item.future.set_running_or_notify_cancel():\n self.call_queue.put(_CallItem(work_id,\n work_item.fn,\n work_item.args,\n work_item.kwargs),\n block=True)\n else:\n del self.pending_work_items[work_id]\n continue\n \n def wait_result_broken_or_wakeup(self):\n \n \n \n \n \n result_reader=self.result_queue._reader\n assert not self.thread_wakeup._closed\n wakeup_reader=self.thread_wakeup._reader\n readers=[result_reader,wakeup_reader]\n worker_sentinels=[p.sentinel for p in list(self.processes.values())]\n ready=mp.connection.wait(readers+worker_sentinels)\n \n cause=None\n is_broken=True\n result_item=None\n if result_reader in ready:\n try:\n result_item=result_reader.recv()\n is_broken=False\n except BaseException as e:\n cause=format_exception(type(e),e,e.__traceback__)\n \n elif wakeup_reader in ready:\n is_broken=False\n \n with self.shutdown_lock:\n self.thread_wakeup.clear()\n \n return result_item,is_broken,cause\n \n def process_result_item(self,result_item):\n \n \n \n if isinstance(result_item,int):\n \n \n assert self.is_shutting_down()\n p=self.processes.pop(result_item)\n p.join()\n if not self.processes:\n self.join_executor_internals()\n return\n else:\n \n work_item=self.pending_work_items.pop(result_item.work_id,None)\n \n if work_item is not None:\n if result_item.exception:\n work_item.future.set_exception(result_item.exception)\n else:\n work_item.future.set_result(result_item.result)\n \n def is_shutting_down(self):\n \n executor=self.executor_reference()\n \n \n \n \n return(_global_shutdown or executor is None\n or executor._shutdown_thread)\n \n def terminate_broken(self,cause):\n \n \n \n \n \n executor=self.executor_reference()\n if executor is not None:\n executor._broken=('A child process terminated '\n 'abruptly, the process pool is not '\n 'usable anymore')\n executor._shutdown_thread=True\n executor=None\n \n \n \n bpe=BrokenProcessPool(\"A process in the process pool was \"\n \"terminated abruptly while the future was \"\n \"running or pending.\")\n if cause is not None:\n bpe.__cause__=_RemoteTraceback(\n f\"\\n'''\\n{''.join(cause)}'''\")\n \n \n for work_id,work_item in self.pending_work_items.items():\n work_item.future.set_exception(bpe)\n \n del work_item\n self.pending_work_items.clear()\n \n \n \n for p in self.processes.values():\n p.terminate()\n \n \n self.join_executor_internals()\n \n def flag_executor_shutting_down(self):\n \n \n executor=self.executor_reference()\n if executor is not None:\n executor._shutdown_thread=True\n \n if executor._cancel_pending_futures:\n \n \n new_pending_work_items={}\n for work_id,work_item in self.pending_work_items.items():\n if not work_item.future.cancel():\n new_pending_work_items[work_id]=work_item\n self.pending_work_items=new_pending_work_items\n \n \n while True:\n try:\n self.work_ids_queue.get_nowait()\n except queue.Empty:\n break\n \n \n executor._cancel_pending_futures=False\n \n def shutdown_workers(self):\n n_children_to_stop=self.get_n_children_alive()\n n_sentinels_sent=0\n \n \n while(n_sentinels_sent 0):\n for i in range(n_children_to_stop -n_sentinels_sent):\n try:\n self.call_queue.put_nowait(None)\n n_sentinels_sent +=1\n except queue.Full:\n break\n \n def join_executor_internals(self):\n self.shutdown_workers()\n \n self.call_queue.close()\n self.call_queue.join_thread()\n with self.shutdown_lock:\n self.thread_wakeup.close()\n \n \n for p in self.processes.values():\n p.join()\n \n def get_n_children_alive(self):\n \n return sum(p.is_alive()for p in self.processes.values())\n \n \n_system_limits_checked=False\n_system_limited=None\n\n\ndef _check_system_limits():\n global _system_limits_checked,_system_limited\n if _system_limits_checked:\n if _system_limited:\n raise NotImplementedError(_system_limited)\n _system_limits_checked=True\n try:\n import multiprocessing.synchronize\n except ImportError:\n _system_limited=(\n \"This Python build lacks multiprocessing.synchronize, usually due \"\n \"to named semaphores being unavailable on this platform.\"\n )\n raise NotImplementedError(_system_limited)\n try:\n nsems_max=os.sysconf(\"SC_SEM_NSEMS_MAX\")\n except(AttributeError,ValueError):\n \n return\n if nsems_max ==-1:\n \n \n return\n if nsems_max >=256:\n \n \n return\n _system_limited=(\"system provides too few semaphores (%d\"\n \" available, 256 necessary)\"%nsems_max)\n raise NotImplementedError(_system_limited)\n \n \ndef _chain_from_iterable_of_lists(iterable):\n ''\n\n\n\n \n for element in iterable:\n element.reverse()\n while element:\n yield element.pop()\n \n \nclass BrokenProcessPool(_base.BrokenExecutor):\n ''\n\n\n \n \n \nclass ProcessPoolExecutor(_base.Executor):\n def __init__(self,max_workers=None,mp_context=None,\n initializer=None,initargs=(),*,max_tasks_per_child=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n _check_system_limits()\n \n if max_workers is None:\n self._max_workers=os.cpu_count()or 1\n if sys.platform =='win32':\n self._max_workers=min(_MAX_WINDOWS_WORKERS,\n self._max_workers)\n else:\n if max_workers <=0:\n raise ValueError(\"max_workers must be greater than 0\")\n elif(sys.platform =='win32'and\n max_workers >_MAX_WINDOWS_WORKERS):\n raise ValueError(\n f\"max_workers must be <= {_MAX_WINDOWS_WORKERS}\")\n \n self._max_workers=max_workers\n \n if mp_context is None:\n if max_tasks_per_child is not None:\n mp_context=mp.get_context(\"spawn\")\n else:\n mp_context=mp.get_context()\n self._mp_context=mp_context\n \n \n self._safe_to_dynamically_spawn_children=(\n self._mp_context.get_start_method(allow_none=False)!=\"fork\")\n \n if initializer is not None and not callable(initializer):\n raise TypeError(\"initializer must be a callable\")\n self._initializer=initializer\n self._initargs=initargs\n \n if max_tasks_per_child is not None:\n if not isinstance(max_tasks_per_child,int):\n raise TypeError(\"max_tasks_per_child must be an integer\")\n elif max_tasks_per_child <=0:\n raise ValueError(\"max_tasks_per_child must be >= 1\")\n if self._mp_context.get_start_method(allow_none=False)==\"fork\":\n \n raise ValueError(\"max_tasks_per_child is incompatible with\"\n \" the 'fork' multiprocessing start method;\"\n \" supply a different mp_context.\")\n self._max_tasks_per_child=max_tasks_per_child\n \n \n self._executor_manager_thread=None\n \n \n self._processes={}\n \n \n self._shutdown_thread=False\n self._shutdown_lock=threading.Lock()\n self._idle_worker_semaphore=threading.Semaphore(0)\n self._broken=False\n self._queue_count=0\n self._pending_work_items={}\n self._cancel_pending_futures=False\n \n \n \n \n \n \n \n \n \n self._executor_manager_thread_wakeup=_ThreadWakeup()\n \n \n \n \n \n queue_size=self._max_workers+EXTRA_QUEUED_CALLS\n self._call_queue=_SafeQueue(\n max_size=queue_size,ctx=self._mp_context,\n pending_work_items=self._pending_work_items,\n shutdown_lock=self._shutdown_lock,\n thread_wakeup=self._executor_manager_thread_wakeup)\n \n \n \n self._call_queue._ignore_epipe=True\n self._result_queue=mp_context.SimpleQueue()\n self._work_ids=queue.Queue()\n \n def _start_executor_manager_thread(self):\n if self._executor_manager_thread is None:\n \n if not self._safe_to_dynamically_spawn_children:\n self._launch_processes()\n self._executor_manager_thread=_ExecutorManagerThread(self)\n self._executor_manager_thread.start()\n _threads_wakeups[self._executor_manager_thread]=\\\n self._executor_manager_thread_wakeup\n \n def _adjust_process_count(self):\n \n if self._idle_worker_semaphore.acquire(blocking=False):\n return\n \n process_count=len(self._processes)\n if process_count = 1.\")\n \n results=super().map(partial(_process_chunk,fn),\n _get_chunks(*iterables,chunksize=chunksize),\n timeout=timeout)\n return _chain_from_iterable_of_lists(results)\n \n def shutdown(self,wait=True,*,cancel_futures=False):\n with self._shutdown_lock:\n self._cancel_pending_futures=cancel_futures\n self._shutdown_thread=True\n if self._executor_manager_thread_wakeup is not None:\n \n self._executor_manager_thread_wakeup.wakeup()\n \n if self._executor_manager_thread is not None and wait:\n self._executor_manager_thread.join()\n \n \n self._executor_manager_thread=None\n self._call_queue=None\n if self._result_queue is not None and wait:\n self._result_queue.close()\n self._result_queue=None\n self._processes=None\n self._executor_manager_thread_wakeup=None\n \n shutdown.__doc__=_base.Executor.shutdown.__doc__\n", ["concurrent.futures", "concurrent.futures._base", "functools", "itertools", "multiprocessing", "multiprocessing.connection", "multiprocessing.queues", "multiprocessing.synchronize", "os", "queue", "sys", "threading", "traceback", "weakref"]], "importlib.readers": [".py", "''\n\n\n\n\n\n\nfrom.resources.readers import(\nFileReader,ZipReader,MultiplexedPath,NamespaceReader,\n)\n\n__all__=['FileReader','ZipReader','MultiplexedPath','NamespaceReader']\n", ["importlib.resources.readers"]], "importlib._abc": [".py", "''\nfrom. import _bootstrap\nimport abc\n\n\nclass Loader(metaclass=abc.ABCMeta):\n\n ''\n \n def create_module(self,spec):\n ''\n\n\n\n\n \n \n return None\n \n \n \n \n def load_module(self,fullname):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not hasattr(self,'exec_module'):\n raise ImportError\n \n return _bootstrap._load_module_shim(self,fullname)\n", ["abc", "importlib", "importlib._bootstrap"]], "importlib.util": [".py", "''\nfrom ._abc import Loader\nfrom ._bootstrap import module_from_spec\nfrom ._bootstrap import _resolve_name\nfrom ._bootstrap import spec_from_loader\nfrom ._bootstrap import _find_spec\nfrom ._bootstrap_external import MAGIC_NUMBER\nfrom ._bootstrap_external import _RAW_MAGIC_NUMBER\nfrom ._bootstrap_external import cache_from_source\nfrom ._bootstrap_external import decode_source\nfrom ._bootstrap_external import source_from_cache\nfrom ._bootstrap_external import spec_from_file_location\n\nfrom contextlib import contextmanager\nimport _imp\nimport functools\nimport sys\nimport types\nimport warnings\n\n\ndef source_hash(source_bytes):\n ''\n return _imp.source_hash(_RAW_MAGIC_NUMBER,source_bytes)\n \n \ndef resolve_name(name,package):\n ''\n if not name.startswith('.'):\n return name\n elif not package:\n raise ImportError(f'no package specified for {repr(name)} '\n '(required for relative module names)')\n level=0\n for character in name:\n if character !='.':\n break\n level +=1\n return _resolve_name(name[level:],package,level)\n \n \ndef _find_spec_from_path(name,path=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if name not in sys.modules:\n return _find_spec(name,path)\n else :\n module=sys.modules[name]\n if module is None :\n return None\n try :\n spec=module.__spec__\n except AttributeError:\n raise ValueError('{}.__spec__ is not set'.format(name))from None\n else :\n if spec is None :\n raise ValueError('{}.__spec__ is None'.format(name))\n return spec\n \n \ndef find_spec(name,package=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n fullname=resolve_name(name,package)if name.startswith('.')else name\n if fullname not in sys.modules:\n parent_name=fullname.rpartition('.')[0]\n if parent_name:\n parent=__import__(parent_name,fromlist=['__path__'])\n try :\n parent_path=parent.__path__\n except AttributeError as e:\n raise ModuleNotFoundError(\n f\"__path__ attribute not found on {parent_name!r} \"\n f\"while trying to find {fullname!r}\",name=fullname)from e\n else :\n parent_path=None\n return _find_spec(fullname,parent_path)\n else :\n module=sys.modules[fullname]\n if module is None :\n return None\n try :\n spec=module.__spec__\n except AttributeError:\n raise ValueError('{}.__spec__ is not set'.format(name))from None\n else :\n if spec is None :\n raise ValueError('{}.__spec__ is None'.format(name))\n return spec\n \n \n@contextmanager\ndef _module_to_load(name):\n is_reload=name in sys.modules\n \n module=sys.modules.get(name)\n if not is_reload:\n \n \n \n module=type(sys)(name)\n \n \n module.__initializing__=True\n sys.modules[name]=module\n try :\n yield module\n except Exception:\n if not is_reload:\n try :\n del sys.modules[name]\n except KeyError:\n pass\n finally :\n module.__initializing__=False\n \n \ndef set_package(fxn):\n ''\n\n\n\n \n @functools.wraps(fxn)\n def set_package_wrapper(*args,**kwargs):\n warnings.warn('The import system now takes care of this automatically; '\n 'this decorator is slated for removal in Python 3.12',\n DeprecationWarning,stacklevel=2)\n module=fxn(*args,**kwargs)\n if getattr(module,'__package__',None )is None :\n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=module.__package__.rpartition('.')[0]\n return module\n return set_package_wrapper\n \n \ndef set_loader(fxn):\n ''\n\n\n\n \n @functools.wraps(fxn)\n def set_loader_wrapper(self,*args,**kwargs):\n warnings.warn('The import system now takes care of this automatically; '\n 'this decorator is slated for removal in Python 3.12',\n DeprecationWarning,stacklevel=2)\n module=fxn(self,*args,**kwargs)\n if getattr(module,'__loader__',None )is None :\n module.__loader__=self\n return module\n return set_loader_wrapper\n \n \ndef module_for_loader(fxn):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n warnings.warn('The import system now takes care of this automatically; '\n 'this decorator is slated for removal in Python 3.12',\n DeprecationWarning,stacklevel=2)\n @functools.wraps(fxn)\n def module_for_loader_wrapper(self,fullname,*args,**kwargs):\n with _module_to_load(fullname)as module:\n module.__loader__=self\n try :\n is_package=self.is_package(fullname)\n except (ImportError,AttributeError):\n pass\n else :\n if is_package:\n module.__package__=fullname\n else :\n module.__package__=fullname.rpartition('.')[0]\n \n return fxn(self,module,*args,**kwargs)\n \n return module_for_loader_wrapper\n \n \nclass _LazyModule(types.ModuleType):\n\n ''\n \n def __getattribute__(self,attr):\n ''\n \n \n \n self.__class__=types.ModuleType\n \n \n original_name=self.__spec__.name\n \n \n attrs_then=self.__spec__.loader_state['__dict__']\n attrs_now=self.__dict__\n attrs_updated={}\n for key,value in attrs_now.items():\n \n \n if key not in attrs_then:\n attrs_updated[key]=value\n elif id(attrs_now[key])!=id(attrs_then[key]):\n attrs_updated[key]=value\n self.__spec__.loader.exec_module(self)\n \n \n if original_name in sys.modules:\n if id(self)!=id(sys.modules[original_name]):\n raise ValueError(f\"module object for {original_name!r} \"\n \"substituted in sys.modules during a lazy \"\n \"load\")\n \n \n self.__dict__.update(attrs_updated)\n return getattr(self,attr)\n \n def __delattr__(self,attr):\n ''\n \n \n self.__getattribute__(attr)\n delattr(self,attr)\n \n \nclass LazyLoader(Loader):\n\n ''\n \n @staticmethod\n def __check_eager_loader(loader):\n if not hasattr(loader,'exec_module'):\n raise TypeError('loader must define exec_module()')\n \n @classmethod\n def factory(cls,loader):\n ''\n cls.__check_eager_loader(loader)\n return lambda *args,**kwargs:cls(loader(*args,**kwargs))\n \n def __init__(self,loader):\n self.__check_eager_loader(loader)\n self.loader=loader\n \n def create_module(self,spec):\n return self.loader.create_module(spec)\n \n def exec_module(self,module):\n ''\n module.__spec__.loader=self.loader\n module.__loader__=self.loader\n \n \n \n \n loader_state={}\n loader_state['__dict__']=module.__dict__.copy()\n loader_state['__class__']=module.__class__\n module.__spec__.loader_state=loader_state\n module.__class__=_LazyModule\n", ["_imp", "contextlib", "functools", "importlib._abc", "importlib._bootstrap", "importlib._bootstrap_external", "sys", "types", "warnings"]], "importlib._bootstrap": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_bootstrap_external=None\n_thread=None\nimport _weakref\n\ndef _wrap(new,old):\n ''\n for replace in['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n \n \ndef _new_module(name):\n return type(sys)(name)\n \n \n \n \n \n \n_module_locks={}\n\n_blocking_on={}\n\n\nclass _DeadlockError(RuntimeError):\n pass\n \n \nclass _ModuleLock:\n ''\n\n\n \n \n def __init__(self,name):\n self.lock=_thread.allocate_lock()\n self.wakeup=_thread.allocate_lock()\n self.name=name\n self.owner=None\n self.count=0\n self.waiters=0\n \n def has_deadlock(self):\n \n me=_thread.get_ident()\n tid=self.owner\n while True:\n lock=_blocking_on.get(tid)\n if lock is None:\n return False\n tid=lock.owner\n if tid ==me:\n return True\n \n def acquire(self):\n ''\n\n\n\n \n tid=_thread.get_ident()\n _blocking_on[tid]=self\n try:\n while True:\n with self.lock:\n if self.count ==0 or self.owner ==tid:\n self.owner=tid\n self.count +=1\n return True\n if self.has_deadlock():\n raise _DeadlockError('deadlock detected by %r'%self)\n if self.wakeup.acquire(False):\n self.waiters +=1\n \n self.wakeup.acquire()\n self.wakeup.release()\n finally:\n del _blocking_on[tid]\n \n def release(self):\n tid=_thread.get_ident()\n with self.lock:\n if self.owner !=tid:\n raise RuntimeError('cannot release un-acquired lock')\n assert self.count >0\n self.count -=1\n if self.count ==0:\n self.owner=None\n if self.waiters:\n self.waiters -=1\n self.wakeup.release()\n \n def __repr__(self):\n return '_ModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _DummyModuleLock:\n ''\n \n \n def __init__(self,name):\n self.name=name\n self.count=0\n \n def acquire(self):\n self.count +=1\n return True\n \n def release(self):\n if self.count ==0:\n raise RuntimeError('cannot release un-acquired lock')\n self.count -=1\n \n def __repr__(self):\n return '_DummyModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _ModuleLockManager:\n\n def __init__(self,name):\n self._name=name\n self._lock=None\n \n def __enter__(self):\n self._lock=_get_module_lock(self._name)\n self._lock.acquire()\n \n def __exit__(self,*args,**kwargs):\n self._lock.release()\n \n \n \n \ndef _get_module_lock(name):\n ''\n\n\n \n \n _imp.acquire_lock()\n try:\n try:\n lock=_module_locks[name]()\n except KeyError:\n lock=None\n \n if lock is None:\n if _thread is None:\n lock=_DummyModuleLock(name)\n else:\n lock=_ModuleLock(name)\n \n def cb(ref,name=name):\n _imp.acquire_lock()\n try:\n \n \n \n if _module_locks.get(name)is ref:\n del _module_locks[name]\n finally:\n _imp.release_lock()\n \n _module_locks[name]=_weakref.ref(lock,cb)\n finally:\n _imp.release_lock()\n \n return lock\n \n \ndef _lock_unlock_module(name):\n ''\n\n\n\n \n lock=_get_module_lock(name)\n try:\n lock.acquire()\n except _DeadlockError:\n \n \n pass\n else:\n lock.release()\n \n \ndef _call_with_frames_removed(f,*args,**kwds):\n ''\n\n\n\n\n\n \n return f(*args,**kwds)\n \n \ndef _verbose_message(message,*args,verbosity=1):\n ''\n if sys.flags.verbose >=verbosity:\n if not message.startswith(('#','import ')):\n message='# '+message\n print(message.format(*args),file=sys.stderr)\n \n \ndef _requires_builtin(fxn):\n ''\n def _requires_builtin_wrapper(self,fullname):\n if fullname not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_builtin_wrapper,fxn)\n return _requires_builtin_wrapper\n \n \ndef _requires_frozen(fxn):\n ''\n def _requires_frozen_wrapper(self,fullname):\n if not _imp.is_frozen(fullname):\n raise ImportError('{!r} is not a frozen module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_frozen_wrapper,fxn)\n return _requires_frozen_wrapper\n \n \n \ndef _load_module_shim(self,fullname):\n ''\n\n\n\n \n spec=spec_from_loader(fullname,self)\n if fullname in sys.modules:\n module=sys.modules[fullname]\n _exec(spec,module)\n return sys.modules[fullname]\n else:\n return _load(spec)\n \n \n \ndef _module_repr(module):\n\n loader=getattr(module,'__loader__',None)\n if hasattr(loader,'module_repr'):\n \n \n \n try:\n return loader.module_repr(module)\n except Exception:\n pass\n try:\n spec=module.__spec__\n except AttributeError:\n pass\n else:\n if spec is not None:\n return _module_repr_from_spec(spec)\n \n \n \n try:\n name=module.__name__\n except AttributeError:\n name='?'\n try:\n filename=module.__file__\n except AttributeError:\n if loader is None:\n return ''.format(name)\n else:\n return ''.format(name,loader)\n else:\n return ''.format(name,filename)\n \n \nclass _installed_safely:\n\n def __init__(self,module):\n self._module=module\n self._spec=module.__spec__\n \n def __enter__(self):\n \n \n \n self._spec._initializing=True\n sys.modules[self._spec.name]=self._module\n \n def __exit__(self,*args):\n try:\n spec=self._spec\n if any(arg is not None for arg in args):\n try:\n del sys.modules[spec.name]\n except KeyError:\n pass\n else:\n _verbose_message('import {!r} # {!r}',spec.name,spec.loader)\n finally:\n self._spec._initializing=False\n \n \nclass ModuleSpec:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,name,loader,*,origin=None,loader_state=None,\n is_package=None):\n self.name=name\n self.loader=loader\n self.origin=origin\n self.loader_state=loader_state\n self.submodule_search_locations=[]if is_package else None\n \n \n self._set_fileattr=False\n self._cached=None\n \n def __repr__(self):\n args=['name={!r}'.format(self.name),\n 'loader={!r}'.format(self.loader)]\n if self.origin is not None:\n args.append('origin={!r}'.format(self.origin))\n if self.submodule_search_locations is not None:\n args.append('submodule_search_locations={}'\n .format(self.submodule_search_locations))\n return '{}({})'.format(self.__class__.__name__,', '.join(args))\n \n def __eq__(self,other):\n smsl=self.submodule_search_locations\n try:\n return(self.name ==other.name and\n self.loader ==other.loader and\n self.origin ==other.origin and\n smsl ==other.submodule_search_locations and\n self.cached ==other.cached and\n self.has_location ==other.has_location)\n except AttributeError:\n return False\n \n @property\n def cached(self):\n if self._cached is None:\n if self.origin is not None and self._set_fileattr:\n if _bootstrap_external is None:\n raise NotImplementedError\n self._cached=_bootstrap_external._get_cached(self.origin)\n return self._cached\n \n @cached.setter\n def cached(self,cached):\n self._cached=cached\n \n @property\n def parent(self):\n ''\n if self.submodule_search_locations is None:\n return self.name.rpartition('.')[0]\n else:\n return self.name\n \n @property\n def has_location(self):\n return self._set_fileattr\n \n @has_location.setter\n def has_location(self,value):\n self._set_fileattr=bool(value)\n \n \ndef spec_from_loader(name,loader,*,origin=None,is_package=None):\n ''\n if hasattr(loader,'get_filename'):\n if _bootstrap_external is None:\n raise NotImplementedError\n spec_from_file_location=_bootstrap_external.spec_from_file_location\n \n if is_package is None:\n return spec_from_file_location(name,loader=loader)\n search=[]if is_package else None\n return spec_from_file_location(name,loader=loader,\n submodule_search_locations=search)\n \n if is_package is None:\n if hasattr(loader,'is_package'):\n try:\n is_package=loader.is_package(name)\n except ImportError:\n is_package=None\n else:\n \n is_package=False\n \n return ModuleSpec(name,loader,origin=origin,is_package=is_package)\n \n \ndef _spec_from_module(module,loader=None,origin=None):\n\n try:\n spec=module.__spec__\n except AttributeError:\n pass\n else:\n if spec is not None:\n return spec\n \n name=module.__name__\n if loader is None:\n try:\n loader=module.__loader__\n except AttributeError:\n \n pass\n try:\n location=module.__file__\n except AttributeError:\n location=None\n if origin is None:\n if location is None:\n try:\n origin=loader._ORIGIN\n except AttributeError:\n origin=None\n else:\n origin=location\n try:\n cached=module.__cached__\n except AttributeError:\n cached=None\n try:\n submodule_search_locations=list(module.__path__)\n except AttributeError:\n submodule_search_locations=None\n \n spec=ModuleSpec(name,loader,origin=origin)\n spec._set_fileattr=False if location is None else True\n spec.cached=cached\n spec.submodule_search_locations=submodule_search_locations\n return spec\n \n \ndef _init_module_attrs(spec,module,*,override=False):\n\n\n\n if(override or getattr(module,'__name__',None)is None):\n try:\n module.__name__=spec.name\n except AttributeError:\n pass\n \n if override or getattr(module,'__loader__',None)is None:\n loader=spec.loader\n if loader is None:\n \n if spec.submodule_search_locations is not None:\n if _bootstrap_external is None:\n raise NotImplementedError\n _NamespaceLoader=_bootstrap_external._NamespaceLoader\n \n loader=_NamespaceLoader.__new__(_NamespaceLoader)\n loader._path=spec.submodule_search_locations\n spec.loader=loader\n \n \n \n \n \n \n \n \n \n \n module.__file__=None\n try:\n module.__loader__=loader\n except AttributeError:\n pass\n \n if override or getattr(module,'__package__',None)is None:\n try:\n module.__package__=spec.parent\n except AttributeError:\n pass\n \n try:\n module.__spec__=spec\n except AttributeError:\n pass\n \n if override or getattr(module,'__path__',None)is None:\n if spec.submodule_search_locations is not None:\n try:\n module.__path__=spec.submodule_search_locations\n except AttributeError:\n pass\n \n if spec.has_location:\n if override or getattr(module,'__file__',None)is None:\n try:\n module.__file__=spec.origin\n except AttributeError:\n pass\n \n if override or getattr(module,'__cached__',None)is None:\n if spec.cached is not None:\n try:\n module.__cached__=spec.cached\n except AttributeError:\n pass\n return module\n \n \ndef module_from_spec(spec):\n ''\n \n module=None\n if hasattr(spec.loader,'create_module'):\n \n \n module=spec.loader.create_module(spec)\n elif hasattr(spec.loader,'exec_module'):\n raise ImportError('loaders that define exec_module() '\n 'must also define create_module()')\n if module is None:\n module=_new_module(spec.name)\n _init_module_attrs(spec,module)\n return module\n \n \ndef _module_repr_from_spec(spec):\n ''\n \n name='?'if spec.name is None else spec.name\n if spec.origin is None:\n if spec.loader is None:\n return ''.format(name)\n else:\n return ''.format(name,spec.loader)\n else:\n if spec.has_location:\n return ''.format(name,spec.origin)\n else:\n return ''.format(spec.name,spec.origin)\n \n \n \ndef _exec(spec,module):\n ''\n name=spec.name\n with _ModuleLockManager(name):\n if sys.modules.get(name)is not module:\n msg='module {!r} not in sys.modules'.format(name)\n raise ImportError(msg,name=name)\n if spec.loader is None:\n if spec.submodule_search_locations is None:\n raise ImportError('missing loader',name=spec.name)\n \n _init_module_attrs(spec,module,override=True)\n return module\n _init_module_attrs(spec,module,override=True)\n if not hasattr(spec.loader,'exec_module'):\n \n \n \n spec.loader.load_module(name)\n else:\n spec.loader.exec_module(module)\n return sys.modules[name]\n \n \ndef _load_backward_compatible(spec):\n\n\n\n spec.loader.load_module(spec.name)\n \n module=sys.modules[spec.name]\n if getattr(module,'__loader__',None)is None:\n try:\n module.__loader__=spec.loader\n except AttributeError:\n pass\n if getattr(module,'__package__',None)is None:\n try:\n \n \n \n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=spec.name.rpartition('.')[0]\n except AttributeError:\n pass\n if getattr(module,'__spec__',None)is None:\n try:\n module.__spec__=spec\n except AttributeError:\n pass\n return module\n \ndef _load_unlocked(spec):\n\n if spec.loader is not None:\n \n if not hasattr(spec.loader,'exec_module'):\n return _load_backward_compatible(spec)\n \n module=module_from_spec(spec)\n with _installed_safely(module):\n if spec.loader is None:\n if spec.submodule_search_locations is None:\n raise ImportError('missing loader',name=spec.name)\n \n else:\n spec.loader.exec_module(module)\n \n \n \n \n return sys.modules[spec.name]\n \n \n \ndef _load(spec):\n ''\n\n\n\n\n\n\n \n with _ModuleLockManager(spec.name):\n return _load_unlocked(spec)\n \n \n \n \nclass BuiltinImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(module):\n ''\n\n\n\n \n return ''.format(module.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None,target=None):\n if path is not None:\n return None\n if _imp.is_builtin(fullname):\n return spec_from_loader(fullname,cls,origin='built-in')\n else:\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None):\n ''\n\n\n\n\n\n \n spec=cls.find_spec(fullname,path)\n return spec.loader if spec is not None else None\n \n @classmethod\n def create_module(self,spec):\n ''\n if spec.name not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(spec.name),\n name=spec.name)\n return _call_with_frames_removed(_imp.create_builtin,spec)\n \n @classmethod\n def exec_module(self,module):\n ''\n _call_with_frames_removed(_imp.exec_builtin,module)\n \n @classmethod\n @_requires_builtin\n def get_code(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def is_package(cls,fullname):\n ''\n return False\n \n load_module=classmethod(_load_module_shim)\n \n \nclass FrozenImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(m):\n ''\n\n\n\n \n return ''.format(m.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None,target=None):\n if _imp.is_frozen(fullname):\n return spec_from_loader(fullname,cls,origin='frozen')\n else:\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None):\n ''\n\n\n\n \n return cls if _imp.is_frozen(fullname)else None\n \n @classmethod\n def create_module(cls,spec):\n ''\n \n @staticmethod\n def exec_module(module):\n name=module.__spec__.name\n if not _imp.is_frozen(name):\n raise ImportError('{!r} is not a frozen module'.format(name),\n name=name)\n code=_call_with_frames_removed(_imp.get_frozen_object,name)\n exec(code,module.__dict__)\n \n @classmethod\n def load_module(cls,fullname):\n ''\n\n\n\n \n return _load_module_shim(cls,fullname)\n \n @classmethod\n @_requires_frozen\n def get_code(cls,fullname):\n ''\n return _imp.get_frozen_object(fullname)\n \n @classmethod\n @_requires_frozen\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_frozen\n def is_package(cls,fullname):\n ''\n return _imp.is_frozen_package(fullname)\n \n \n \n \nclass _ImportLockContext:\n\n ''\n \n def __enter__(self):\n ''\n _imp.acquire_lock()\n \n def __exit__(self,exc_type,exc_value,exc_traceback):\n ''\n _imp.release_lock()\n \n \ndef _resolve_name(name,package,level):\n ''\n bits=package.rsplit('.',level -1)\n if len(bits)= 0')\n if level >0:\n if not isinstance(package,str):\n raise TypeError('__package__ not set to a string')\n elif not package:\n raise ImportError('attempted relative import with no known parent '\n 'package')\n if not name and level ==0:\n raise ValueError('Empty module name')\n \n \n_ERR_MSG_PREFIX='No module named '\n_ERR_MSG=_ERR_MSG_PREFIX+'{!r}'\n\ndef _find_and_load_unlocked(name,import_):\n path=None\n parent=name.rpartition('.')[0]\n if parent:\n if parent not in sys.modules:\n _call_with_frames_removed(import_,parent)\n \n if name in sys.modules:\n return sys.modules[name]\n parent_module=sys.modules[parent]\n try:\n path=parent_module.__path__\n except AttributeError:\n msg=(_ERR_MSG+'; {!r} is not a package').format(name,parent)\n raise ModuleNotFoundError(msg,name=name)from None\n spec=_find_spec(name,path)\n if spec is None:\n raise ModuleNotFoundError(_ERR_MSG.format(name),name=name)\n else:\n module=_load_unlocked(spec)\n if parent:\n \n parent_module=sys.modules[parent]\n setattr(parent_module,name.rpartition('.')[2],module)\n return module\n \n \n_NEEDS_LOADING=object()\n\n\ndef _find_and_load(name,import_):\n ''\n with _ModuleLockManager(name):\n module=sys.modules.get(name,_NEEDS_LOADING)\n if module is _NEEDS_LOADING:\n return _find_and_load_unlocked(name,import_)\n \n if module is None:\n message=('import of {} halted; '\n 'None in sys.modules'.format(name))\n raise ModuleNotFoundError(message,name=name)\n \n _lock_unlock_module(name)\n return module\n \n \ndef _gcd_import(name,package=None,level=0):\n ''\n\n\n\n\n\n\n \n _sanity_check(name,package,level)\n if level >0:\n name=_resolve_name(name,package,level)\n return _find_and_load(name,_gcd_import)\n \n \ndef _handle_fromlist(module,fromlist,import_,*,recursive=False):\n ''\n\n\n\n\n\n \n \n \n if hasattr(module,'__path__'):\n for x in fromlist:\n if not isinstance(x,str):\n if recursive:\n where=module.__name__+'.__all__'\n else:\n where=\"``from list''\"\n raise TypeError(f\"Item in {where} must be str, \"\n f\"not {type(x).__name__}\")\n elif x =='*':\n if not recursive and hasattr(module,'__all__'):\n _handle_fromlist(module,module.__all__,import_,\n recursive=True)\n elif not hasattr(module,x):\n from_name='{}.{}'.format(module.__name__,x)\n try:\n _call_with_frames_removed(import_,from_name)\n except ModuleNotFoundError as exc:\n \n \n \n if(exc.name ==from_name and\n sys.modules.get(from_name,_NEEDS_LOADING)is not None):\n continue\n raise\n return module\n \n \ndef _calc___package__(globals):\n ''\n\n\n\n\n \n package=globals.get('__package__')\n spec=globals.get('__spec__')\n if package is not None:\n if spec is not None and package !=spec.parent:\n _warnings.warn(\"__package__ != __spec__.parent \"\n f\"({package !r} != {spec.parent !r})\",\n ImportWarning,stacklevel=3)\n return package\n elif spec is not None:\n return spec.parent\n else:\n _warnings.warn(\"can't resolve package from __spec__ or __package__, \"\n \"falling back on __name__ and __path__\",\n ImportWarning,stacklevel=3)\n package=globals['__name__']\n if '__path__'not in globals:\n package=package.rpartition('.')[0]\n return package\n \n \ndef __import__(name,globals=None,locals=None,fromlist=(),level=0):\n ''\n\n\n\n\n\n\n\n\n \n if level ==0:\n module=_gcd_import(name)\n else:\n globals_=globals if globals is not None else{}\n package=_calc___package__(globals_)\n module=_gcd_import(name,package,level)\n if not fromlist:\n \n \n if level ==0:\n return _gcd_import(name.partition('.')[0])\n elif not name:\n return module\n else:\n \n \n cut_off=len(name)-len(name.partition('.')[0])\n \n \n return sys.modules[module.__name__[:len(module.__name__)-cut_off]]\n else:\n return _handle_fromlist(module,fromlist,_gcd_import)\n \n \ndef _builtin_from_name(name):\n spec=BuiltinImporter.find_spec(name)\n if spec is None:\n raise ImportError('no built-in module named '+name)\n return _load_unlocked(spec)\n \n \ndef _setup(sys_module,_imp_module):\n ''\n\n\n\n\n\n \n global _imp,sys\n _imp=_imp_module\n sys=sys_module\n \n \n module_type=type(sys)\n for name,module in sys.modules.items():\n if isinstance(module,module_type):\n if name in sys.builtin_module_names:\n loader=BuiltinImporter\n elif _imp.is_frozen(name):\n loader=FrozenImporter\n else:\n continue\n spec=_spec_from_module(module,loader)\n _init_module_attrs(spec,module)\n \n \n self_module=sys.modules[__name__]\n \n \n for builtin_name in('_warnings',):\n if builtin_name not in sys.modules:\n builtin_module=_builtin_from_name(builtin_name)\n else:\n builtin_module=sys.modules[builtin_name]\n setattr(self_module,builtin_name,builtin_module)\n \n \ndef _install(sys_module,_imp_module):\n ''\n _setup(sys_module,_imp_module)\n \n sys.meta_path.append(BuiltinImporter)\n sys.meta_path.append(FrozenImporter)\n \n \ndef _install_external_importers():\n ''\n global _bootstrap_external\n import _frozen_importlib_external\n _bootstrap_external=_frozen_importlib_external\n _frozen_importlib_external._install(sys.modules[__name__])\n", ["_frozen_importlib_external", "_weakref"]], "importlib": [".py", "''\n__all__=['__import__','import_module','invalidate_caches','reload']\n\n\n\n\n\n\n\n\n\nimport _imp\nimport sys\n\ntry:\n import _frozen_importlib as _bootstrap\nexcept ImportError:\n from. import _bootstrap\n _bootstrap._setup(sys,_imp)\nelse:\n\n\n _bootstrap.__name__='importlib._bootstrap'\n _bootstrap.__package__='importlib'\n try:\n _bootstrap.__file__=__file__.replace('__init__.py','_bootstrap.py')\n except NameError:\n \n \n pass\n sys.modules['importlib._bootstrap']=_bootstrap\n \ntry:\n import _frozen_importlib_external as _bootstrap_external\nexcept ImportError:\n from. import _bootstrap_external\n _bootstrap_external._set_bootstrap_module(_bootstrap)\n _bootstrap._bootstrap_external=_bootstrap_external\nelse:\n _bootstrap_external.__name__='importlib._bootstrap_external'\n _bootstrap_external.__package__='importlib'\n try:\n _bootstrap_external.__file__=__file__.replace('__init__.py','_bootstrap_external.py')\n except NameError:\n \n \n pass\n sys.modules['importlib._bootstrap_external']=_bootstrap_external\n \n \n_pack_uint32=_bootstrap_external._pack_uint32\n_unpack_uint32=_bootstrap_external._unpack_uint32\n\n\n\n\nimport warnings\n\n\n\n\nfrom._bootstrap import __import__\n\n\ndef invalidate_caches():\n ''\n \n for finder in sys.meta_path:\n if hasattr(finder,'invalidate_caches'):\n finder.invalidate_caches()\n \n \ndef import_module(name,package=None):\n ''\n\n\n\n\n\n \n level=0\n if name.startswith('.'):\n if not package:\n raise TypeError(\"the 'package' argument is required to perform a \"\n f\"relative import for {name !r}\")\n for character in name:\n if character !='.':\n break\n level +=1\n return _bootstrap._gcd_import(name[level:],package,level)\n \n \n_RELOADING={}\n\n\ndef reload(module):\n ''\n\n\n\n \n try:\n name=module.__spec__.name\n except AttributeError:\n try:\n name=module.__name__\n except AttributeError:\n raise TypeError(\"reload() argument must be a module\")\n \n if sys.modules.get(name)is not module:\n raise ImportError(f\"module {name} not in sys.modules\",name=name)\n if name in _RELOADING:\n return _RELOADING[name]\n _RELOADING[name]=module\n try:\n parent_name=name.rpartition('.')[0]\n if parent_name:\n try:\n parent=sys.modules[parent_name]\n except KeyError:\n raise ImportError(f\"parent {parent_name !r} not in sys.modules\",\n name=parent_name)from None\n else:\n pkgpath=parent.__path__\n else:\n pkgpath=None\n target=module\n spec=module.__spec__=_bootstrap._find_spec(name,pkgpath,target)\n if spec is None:\n raise ModuleNotFoundError(f\"spec not found for the module {name !r}\",name=name)\n _bootstrap._exec(spec,module)\n \n return sys.modules[name]\n finally:\n try:\n del _RELOADING[name]\n except KeyError:\n pass\n", ["_frozen_importlib", "_frozen_importlib_external", "_imp", "importlib", "importlib._bootstrap", "importlib._bootstrap_external", "sys", "warnings"], 1], "importlib._bootstrap_external": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_bootstrap=None\n\n\nimport _imp\nimport _io\nimport sys\nimport _warnings\nimport marshal\n\n\n_MS_WINDOWS=(sys.platform =='win32')\nif _MS_WINDOWS:\n import nt as _os\n import winreg\nelse:\n import posix as _os\n \n \nif _MS_WINDOWS:\n path_separators=['\\\\','/']\nelse:\n path_separators=['/']\n \nassert all(len(sep)==1 for sep in path_separators)\npath_sep=path_separators[0]\npath_sep_tuple=tuple(path_separators)\npath_separators=''.join(path_separators)\n_pathseps_with_colon={f':{s}'for s in path_separators}\n\n\n\n_CASE_INSENSITIVE_PLATFORMS_STR_KEY='win',\n_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY='cygwin','darwin'\n_CASE_INSENSITIVE_PLATFORMS=(_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY\n+_CASE_INSENSITIVE_PLATFORMS_STR_KEY)\n\n\ndef _make_relax_case():\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS_STR_KEY):\n key='PYTHONCASEOK'\n else:\n key=b'PYTHONCASEOK'\n \n def _relax_case():\n ''\n return not sys.flags.ignore_environment and key in _os.environ\n else:\n def _relax_case():\n ''\n return False\n return _relax_case\n \n_relax_case=_make_relax_case()\n\n\ndef _pack_uint32(x):\n ''\n return(int(x)&0xFFFFFFFF).to_bytes(4,'little')\n \n \ndef _unpack_uint32(data):\n ''\n assert len(data)==4\n return int.from_bytes(data,'little')\n \ndef _unpack_uint16(data):\n ''\n assert len(data)==2\n return int.from_bytes(data,'little')\n \n \nif _MS_WINDOWS:\n def _path_join(*path_parts):\n ''\n if not path_parts:\n return \"\"\n if len(path_parts)==1:\n return path_parts[0]\n root=\"\"\n path=[]\n for new_root,tail in map(_os._path_splitroot,path_parts):\n if new_root.startswith(path_sep_tuple)or new_root.endswith(path_sep_tuple):\n root=new_root.rstrip(path_separators)or root\n path=[path_sep+tail]\n elif new_root.endswith(':'):\n if root.casefold()!=new_root.casefold():\n \n \n root=new_root\n path=[tail]\n else:\n path.append(tail)\n else:\n root=new_root or root\n path.append(tail)\n path=[p.rstrip(path_separators)for p in path if p]\n if len(path)==1 and not path[0]:\n \n return root+path_sep\n return root+path_sep.join(path)\n \nelse:\n def _path_join(*path_parts):\n ''\n return path_sep.join([part.rstrip(path_separators)\n for part in path_parts if part])\n \n \ndef _path_split(path):\n ''\n i=max(path.rfind(p)for p in path_separators)\n if i <0:\n return '',path\n return path[:i],path[i+1:]\n \n \ndef _path_stat(path):\n ''\n\n\n\n\n \n return _os.stat(path)\n \n \ndef _path_is_mode_type(path,mode):\n ''\n try:\n stat_info=_path_stat(path)\n except OSError:\n return False\n return(stat_info.st_mode&0o170000)==mode\n \n \ndef _path_isfile(path):\n ''\n return _path_is_mode_type(path,0o100000)\n \n \ndef _path_isdir(path):\n ''\n if not path:\n path=_os.getcwd()\n return _path_is_mode_type(path,0o040000)\n \n \nif _MS_WINDOWS:\n def _path_isabs(path):\n ''\n if not path:\n return False\n root=_os._path_splitroot(path)[0].replace('/','\\\\')\n return len(root)>1 and(root.startswith('\\\\\\\\')or root.endswith('\\\\'))\n \nelse:\n def _path_isabs(path):\n ''\n return path.startswith(path_separators)\n \n \ndef _path_abspath(path):\n ''\n if not _path_isabs(path):\n for sep in path_separators:\n path=path.removeprefix(f\".{sep}\")\n return _path_join(_os.getcwd(),path)\n else:\n return path\n \n \ndef _write_atomic(path,data,mode=0o666):\n ''\n\n \n \n path_tmp=f'{path}.{id(path)}'\n fd=_os.open(path_tmp,\n _os.O_EXCL |_os.O_CREAT |_os.O_WRONLY,mode&0o666)\n try:\n \n \n with _io.FileIO(fd,'wb')as file:\n file.write(data)\n _os.replace(path_tmp,path)\n except OSError:\n try:\n _os.unlink(path_tmp)\n except OSError:\n pass\n raise\n \n \n_code_type=type(_write_atomic.__code__)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nMAGIC_NUMBER=(3531).to_bytes(2,'little')+b'\\r\\n'\n\n_RAW_MAGIC_NUMBER=int.from_bytes(MAGIC_NUMBER,'little')\n\n_PYCACHE='__pycache__'\n_OPT='opt-'\n\nSOURCE_SUFFIXES=['.py']\nif _MS_WINDOWS:\n SOURCE_SUFFIXES.append('.pyw')\n \nEXTENSION_SUFFIXES=_imp.extension_suffixes()\n\nBYTECODE_SUFFIXES=['.pyc']\n\nDEBUG_BYTECODE_SUFFIXES=OPTIMIZED_BYTECODE_SUFFIXES=BYTECODE_SUFFIXES\n\ndef cache_from_source(path,debug_override=None,*,optimization=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if debug_override is not None:\n _warnings.warn('the debug_override parameter is deprecated; use '\n \"'optimization' instead\",DeprecationWarning)\n if optimization is not None:\n message='debug_override or optimization must be set to None'\n raise TypeError(message)\n optimization=''if debug_override else 1\n path=_os.fspath(path)\n head,tail=_path_split(path)\n base,sep,rest=tail.rpartition('.')\n tag=sys.implementation.cache_tag\n if tag is None:\n raise NotImplementedError('sys.implementation.cache_tag is None')\n almost_filename=''.join([(base if base else rest),sep,tag])\n if optimization is None:\n if sys.flags.optimize ==0:\n optimization=''\n else:\n optimization=sys.flags.optimize\n optimization=str(optimization)\n if optimization !='':\n if not optimization.isalnum():\n raise ValueError(f'{optimization !r} is not alphanumeric')\n almost_filename=f'{almost_filename}.{_OPT}{optimization}'\n filename=almost_filename+BYTECODE_SUFFIXES[0]\n if sys.pycache_prefix is not None:\n \n \n \n \n \n \n \n \n head=_path_abspath(head)\n \n \n \n \n if head[1]==':'and head[0]not in path_separators:\n head=head[2:]\n \n \n \n return _path_join(\n sys.pycache_prefix,\n head.lstrip(path_separators),\n filename,\n )\n return _path_join(head,_PYCACHE,filename)\n \n \ndef source_from_cache(path):\n ''\n\n\n\n\n\n\n \n if sys.implementation.cache_tag is None:\n raise NotImplementedError('sys.implementation.cache_tag is None')\n path=_os.fspath(path)\n head,pycache_filename=_path_split(path)\n found_in_pycache_prefix=False\n if sys.pycache_prefix is not None:\n stripped_path=sys.pycache_prefix.rstrip(path_separators)\n if head.startswith(stripped_path+path_sep):\n head=head[len(stripped_path):]\n found_in_pycache_prefix=True\n if not found_in_pycache_prefix:\n head,pycache=_path_split(head)\n if pycache !=_PYCACHE:\n raise ValueError(f'{_PYCACHE} not bottom-level directory in '\n f'{path !r}')\n dot_count=pycache_filename.count('.')\n if dot_count not in{2,3}:\n raise ValueError(f'expected only 2 or 3 dots in {pycache_filename !r}')\n elif dot_count ==3:\n optimization=pycache_filename.rsplit('.',2)[-2]\n if not optimization.startswith(_OPT):\n raise ValueError(\"optimization portion of filename does not start \"\n f\"with {_OPT !r}\")\n opt_level=optimization[len(_OPT):]\n if not opt_level.isalnum():\n raise ValueError(f\"optimization level {optimization !r} is not an \"\n \"alphanumeric value\")\n base_filename=pycache_filename.partition('.')[0]\n return _path_join(head,base_filename+SOURCE_SUFFIXES[0])\n \n \ndef _get_sourcefile(bytecode_path):\n ''\n\n\n\n\n \n if len(bytecode_path)==0:\n return None\n rest,_,extension=bytecode_path.rpartition('.')\n if not rest or extension.lower()[-3:-1]!='py':\n return bytecode_path\n try:\n source_path=source_from_cache(bytecode_path)\n except(NotImplementedError,ValueError):\n source_path=bytecode_path[:-1]\n return source_path if _path_isfile(source_path)else bytecode_path\n \n \ndef _get_cached(filename):\n if filename.endswith(tuple(SOURCE_SUFFIXES)):\n try:\n return cache_from_source(filename)\n except NotImplementedError:\n pass\n elif filename.endswith(tuple(BYTECODE_SUFFIXES)):\n return filename\n else:\n return None\n \n \ndef _calc_mode(path):\n ''\n try:\n mode=_path_stat(path).st_mode\n except OSError:\n mode=0o666\n \n \n mode |=0o200\n return mode\n \n \ndef _check_name(method):\n ''\n\n\n\n\n\n \n def _check_name_wrapper(self,name=None,*args,**kwargs):\n if name is None:\n name=self.name\n elif self.name !=name:\n raise ImportError('loader for %s cannot handle %s'%\n (self.name,name),name=name)\n return method(self,name,*args,**kwargs)\n \n \n \n if _bootstrap is not None:\n _wrap=_bootstrap._wrap\n else:\n def _wrap(new,old):\n for replace in['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n \n _wrap(_check_name_wrapper,method)\n return _check_name_wrapper\n \n \ndef _classify_pyc(data,name,exc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n magic=data[:4]\n if magic !=MAGIC_NUMBER:\n message=f'bad magic number in {name !r}: {magic !r}'\n _bootstrap._verbose_message('{}',message)\n raise ImportError(message,**exc_details)\n if len(data)<16:\n message=f'reached EOF while reading pyc header of {name !r}'\n _bootstrap._verbose_message('{}',message)\n raise EOFError(message)\n flags=_unpack_uint32(data[4:8])\n \n if flags&~0b11:\n message=f'invalid flags {flags !r} in {name !r}'\n raise ImportError(message,**exc_details)\n return flags\n \n \ndef _validate_timestamp_pyc(data,source_mtime,source_size,name,\nexc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _unpack_uint32(data[8:12])!=(source_mtime&0xFFFFFFFF):\n message=f'bytecode is stale for {name !r}'\n _bootstrap._verbose_message('{}',message)\n raise ImportError(message,**exc_details)\n if(source_size is not None and\n _unpack_uint32(data[12:16])!=(source_size&0xFFFFFFFF)):\n raise ImportError(f'bytecode is stale for {name !r}',**exc_details)\n \n \ndef _validate_hash_pyc(data,source_hash,name,exc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if data[8:16]!=source_hash:\n raise ImportError(\n f'hash in bytecode doesn\\'t match hash of source {name !r}',\n **exc_details,\n )\n \n \ndef _compile_bytecode(data,name=None,bytecode_path=None,source_path=None):\n ''\n code=marshal.loads(data)\n if isinstance(code,_code_type):\n _bootstrap._verbose_message('code object from {!r}',bytecode_path)\n if source_path is not None:\n _imp._fix_co_filename(code,source_path)\n return code\n else:\n raise ImportError(f'Non-code object in {bytecode_path !r}',\n name=name,path=bytecode_path)\n \n \ndef _code_to_timestamp_pyc(code,mtime=0,source_size=0):\n ''\n data=bytearray(MAGIC_NUMBER)\n data.extend(_pack_uint32(0))\n data.extend(_pack_uint32(mtime))\n data.extend(_pack_uint32(source_size))\n data.extend(marshal.dumps(code))\n return data\n \n \ndef _code_to_hash_pyc(code,source_hash,checked=True):\n ''\n data=bytearray(MAGIC_NUMBER)\n flags=0b1 |checked <<1\n data.extend(_pack_uint32(flags))\n assert len(source_hash)==8\n data.extend(source_hash)\n data.extend(marshal.dumps(code))\n return data\n \n \ndef decode_source(source_bytes):\n ''\n\n\n \n import tokenize\n source_bytes_readline=_io.BytesIO(source_bytes).readline\n encoding=tokenize.detect_encoding(source_bytes_readline)\n newline_decoder=_io.IncrementalNewlineDecoder(None,True)\n return newline_decoder.decode(source_bytes.decode(encoding[0]))\n \n \n \n \n_POPULATE=object()\n\n\ndef spec_from_file_location(name,location=None,*,loader=None,\nsubmodule_search_locations=_POPULATE):\n ''\n\n\n\n\n\n\n\n\n \n if location is None:\n \n \n \n location=''\n if hasattr(loader,'get_filename'):\n \n try:\n location=loader.get_filename(name)\n except ImportError:\n pass\n else:\n location=_os.fspath(location)\n try:\n location=_path_abspath(location)\n except OSError:\n pass\n \n \n \n \n \n \n \n spec=_bootstrap.ModuleSpec(name,loader,origin=location)\n spec._set_fileattr=True\n \n \n if loader is None:\n for loader_class,suffixes in _get_supported_file_loaders():\n if location.endswith(tuple(suffixes)):\n loader=loader_class(name,location)\n spec.loader=loader\n break\n else:\n return None\n \n \n if submodule_search_locations is _POPULATE:\n \n if hasattr(loader,'is_package'):\n try:\n is_package=loader.is_package(name)\n except ImportError:\n pass\n else:\n if is_package:\n spec.submodule_search_locations=[]\n else:\n spec.submodule_search_locations=submodule_search_locations\n if spec.submodule_search_locations ==[]:\n if location:\n dirname=_path_split(location)[0]\n spec.submodule_search_locations.append(dirname)\n \n return spec\n \n \ndef _bless_my_loader(module_globals):\n ''\n\n\n \n \n \n \n \n \n \n \n if not isinstance(module_globals,dict):\n return None\n \n missing=object()\n loader=module_globals.get('__loader__',None)\n spec=module_globals.get('__spec__',missing)\n \n if loader is None:\n if spec is missing:\n \n \n return None\n elif spec is None:\n raise ValueError('Module globals is missing a __spec__.loader')\n \n spec_loader=getattr(spec,'loader',missing)\n \n if spec_loader in(missing,None):\n if loader is None:\n exc=AttributeError if spec_loader is missing else ValueError\n raise exc('Module globals is missing a __spec__.loader')\n _warnings.warn(\n 'Module globals is missing a __spec__.loader',\n DeprecationWarning)\n spec_loader=loader\n \n assert spec_loader is not None\n if loader is not None and loader !=spec_loader:\n _warnings.warn(\n 'Module globals; __loader__ != __spec__.loader',\n DeprecationWarning)\n return loader\n \n return spec_loader\n \n \n \n \nclass WindowsRegistryFinder:\n\n ''\n \n REGISTRY_KEY=(\n 'Software\\\\Python\\\\PythonCore\\\\{sys_version}'\n '\\\\Modules\\\\{fullname}')\n REGISTRY_KEY_DEBUG=(\n 'Software\\\\Python\\\\PythonCore\\\\{sys_version}'\n '\\\\Modules\\\\{fullname}\\\\Debug')\n DEBUG_BUILD=(_MS_WINDOWS and '_d.pyd'in EXTENSION_SUFFIXES)\n \n @staticmethod\n def _open_registry(key):\n try:\n return winreg.OpenKey(winreg.HKEY_CURRENT_USER,key)\n except OSError:\n return winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,key)\n \n @classmethod\n def _search_registry(cls,fullname):\n if cls.DEBUG_BUILD:\n registry_key=cls.REGISTRY_KEY_DEBUG\n else:\n registry_key=cls.REGISTRY_KEY\n key=registry_key.format(fullname=fullname,\n sys_version='%d.%d'%sys.version_info[:2])\n try:\n with cls._open_registry(key)as hkey:\n filepath=winreg.QueryValue(hkey,'')\n except OSError:\n return None\n return filepath\n \n @classmethod\n def find_spec(cls,fullname,path=None,target=None):\n filepath=cls._search_registry(fullname)\n if filepath is None:\n return None\n try:\n _path_stat(filepath)\n except OSError:\n return None\n for loader,suffixes in _get_supported_file_loaders():\n if filepath.endswith(tuple(suffixes)):\n spec=_bootstrap.spec_from_loader(fullname,\n loader(fullname,filepath),\n origin=filepath)\n return spec\n \n \nclass _LoaderBasics:\n\n ''\n \n \n def is_package(self,fullname):\n ''\n \n filename=_path_split(self.get_filename(fullname))[1]\n filename_base=filename.rsplit('.',1)[0]\n tail_name=fullname.rpartition('.')[2]\n return filename_base =='__init__'and tail_name !='__init__'\n \n def create_module(self,spec):\n ''\n \n def exec_module(self,module):\n ''\n code=self.get_code(module.__name__)\n if code is None:\n raise ImportError(f'cannot load module {module.__name__ !r} when '\n 'get_code() returns None')\n _bootstrap._call_with_frames_removed(exec,code,module.__dict__)\n \n def load_module(self,fullname):\n ''\n \n return _bootstrap._load_module_shim(self,fullname)\n \n \nclass SourceLoader(_LoaderBasics):\n\n def path_mtime(self,path):\n ''\n\n\n\n \n raise OSError\n \n def path_stats(self,path):\n ''\n\n\n\n\n\n\n\n\n\n \n return{'mtime':self.path_mtime(path)}\n \n def _cache_bytecode(self,source_path,cache_path,data):\n ''\n\n\n\n\n \n \n return self.set_data(cache_path,data)\n \n def set_data(self,path,data):\n ''\n\n\n \n \n \n def get_source(self,fullname):\n ''\n path=self.get_filename(fullname)\n try:\n source_bytes=self.get_data(path)\n except OSError as exc:\n raise ImportError('source not available through get_data()',\n name=fullname)from exc\n return decode_source(source_bytes)\n \n def source_to_code(self,data,path,*,_optimize=-1):\n ''\n\n\n \n return _bootstrap._call_with_frames_removed(compile,data,path,'exec',\n dont_inherit=True,optimize=_optimize)\n \n def get_code(self,fullname):\n ''\n\n\n\n\n \n source_path=self.get_filename(fullname)\n source_mtime=None\n source_bytes=None\n source_hash=None\n hash_based=False\n check_source=True\n try:\n bytecode_path=cache_from_source(source_path)\n except NotImplementedError:\n bytecode_path=None\n else:\n try:\n st=self.path_stats(source_path)\n except OSError:\n pass\n else:\n source_mtime=int(st['mtime'])\n try:\n data=self.get_data(bytecode_path)\n except OSError:\n pass\n else:\n exc_details={\n 'name':fullname,\n 'path':bytecode_path,\n }\n try:\n flags=_classify_pyc(data,fullname,exc_details)\n bytes_data=memoryview(data)[16:]\n hash_based=flags&0b1 !=0\n if hash_based:\n check_source=flags&0b10 !=0\n if(_imp.check_hash_based_pycs !='never'and\n (check_source or\n _imp.check_hash_based_pycs =='always')):\n source_bytes=self.get_data(source_path)\n source_hash=_imp.source_hash(\n _RAW_MAGIC_NUMBER,\n source_bytes,\n )\n _validate_hash_pyc(data,source_hash,fullname,\n exc_details)\n else:\n _validate_timestamp_pyc(\n data,\n source_mtime,\n st['size'],\n fullname,\n exc_details,\n )\n except(ImportError,EOFError):\n pass\n else:\n _bootstrap._verbose_message('{} matches {}',bytecode_path,\n source_path)\n return _compile_bytecode(bytes_data,name=fullname,\n bytecode_path=bytecode_path,\n source_path=source_path)\n if source_bytes is None:\n source_bytes=self.get_data(source_path)\n code_object=self.source_to_code(source_bytes,source_path)\n _bootstrap._verbose_message('code object from {}',source_path)\n if(not sys.dont_write_bytecode and bytecode_path is not None and\n source_mtime is not None):\n if hash_based:\n if source_hash is None:\n source_hash=_imp.source_hash(_RAW_MAGIC_NUMBER,\n source_bytes)\n data=_code_to_hash_pyc(code_object,source_hash,check_source)\n else:\n data=_code_to_timestamp_pyc(code_object,source_mtime,\n len(source_bytes))\n try:\n self._cache_bytecode(source_path,bytecode_path,data)\n except NotImplementedError:\n pass\n return code_object\n \n \nclass FileLoader:\n\n ''\n \n \n def __init__(self,fullname,path):\n ''\n \n self.name=fullname\n self.path=path\n \n def __eq__(self,other):\n return(self.__class__ ==other.__class__ and\n self.__dict__ ==other.__dict__)\n \n def __hash__(self):\n return hash(self.name)^hash(self.path)\n \n @_check_name\n def load_module(self,fullname):\n ''\n\n\n\n \n \n \n \n return super(FileLoader,self).load_module(fullname)\n \n @_check_name\n def get_filename(self,fullname):\n ''\n return self.path\n \n def get_data(self,path):\n ''\n if isinstance(self,(SourceLoader,ExtensionFileLoader)):\n with _io.open_code(str(path))as file:\n return file.read()\n else:\n with _io.FileIO(path,'r')as file:\n return file.read()\n \n @_check_name\n def get_resource_reader(self,module):\n from importlib.readers import FileReader\n return FileReader(self)\n \n \nclass SourceFileLoader(FileLoader,SourceLoader):\n\n ''\n \n def path_stats(self,path):\n ''\n st=_path_stat(path)\n return{'mtime':st.st_mtime,'size':st.st_size}\n \n def _cache_bytecode(self,source_path,bytecode_path,data):\n \n mode=_calc_mode(source_path)\n return self.set_data(bytecode_path,data,_mode=mode)\n \n def set_data(self,path,data,*,_mode=0o666):\n ''\n parent,filename=_path_split(path)\n path_parts=[]\n \n while parent and not _path_isdir(parent):\n parent,part=_path_split(parent)\n path_parts.append(part)\n \n for part in reversed(path_parts):\n parent=_path_join(parent,part)\n try:\n _os.mkdir(parent)\n except FileExistsError:\n \n continue\n except OSError as exc:\n \n \n _bootstrap._verbose_message('could not create {!r}: {!r}',\n parent,exc)\n return\n try:\n _write_atomic(path,data,_mode)\n _bootstrap._verbose_message('created {!r}',path)\n except OSError as exc:\n \n _bootstrap._verbose_message('could not create {!r}: {!r}',path,\n exc)\n \n \nclass SourcelessFileLoader(FileLoader,_LoaderBasics):\n\n ''\n \n def get_code(self,fullname):\n path=self.get_filename(fullname)\n data=self.get_data(path)\n \n \n exc_details={\n 'name':fullname,\n 'path':path,\n }\n _classify_pyc(data,fullname,exc_details)\n return _compile_bytecode(\n memoryview(data)[16:],\n name=fullname,\n bytecode_path=path,\n )\n \n def get_source(self,fullname):\n ''\n return None\n \n \nclass ExtensionFileLoader(FileLoader,_LoaderBasics):\n\n ''\n\n\n\n \n \n def __init__(self,name,path):\n self.name=name\n self.path=path\n \n def __eq__(self,other):\n return(self.__class__ ==other.__class__ and\n self.__dict__ ==other.__dict__)\n \n def __hash__(self):\n return hash(self.name)^hash(self.path)\n \n def create_module(self,spec):\n ''\n module=_bootstrap._call_with_frames_removed(\n _imp.create_dynamic,spec)\n _bootstrap._verbose_message('extension module {!r} loaded from {!r}',\n spec.name,self.path)\n return module\n \n def exec_module(self,module):\n ''\n _bootstrap._call_with_frames_removed(_imp.exec_dynamic,module)\n _bootstrap._verbose_message('extension module {!r} executed from {!r}',\n self.name,self.path)\n \n def is_package(self,fullname):\n ''\n file_name=_path_split(self.path)[1]\n return any(file_name =='__init__'+suffix\n for suffix in EXTENSION_SUFFIXES)\n \n def get_code(self,fullname):\n ''\n return None\n \n def get_source(self,fullname):\n ''\n return None\n \n @_check_name\n def get_filename(self,fullname):\n ''\n return self.path\n \n \nclass _NamespacePath:\n ''\n\n\n\n \n \n \n \n _epoch=0\n \n def __init__(self,name,path,path_finder):\n self._name=name\n self._path=path\n self._last_parent_path=tuple(self._get_parent_path())\n self._last_epoch=self._epoch\n self._path_finder=path_finder\n \n def _find_parent_path_names(self):\n ''\n parent,dot,me=self._name.rpartition('.')\n if dot =='':\n \n return 'sys','path'\n \n \n return parent,'__path__'\n \n def _get_parent_path(self):\n parent_module_name,path_attr_name=self._find_parent_path_names()\n return getattr(sys.modules[parent_module_name],path_attr_name)\n \n def _recalculate(self):\n \n parent_path=tuple(self._get_parent_path())\n if parent_path !=self._last_parent_path or self._epoch !=self._last_epoch:\n spec=self._path_finder(self._name,parent_path)\n \n \n if spec is not None and spec.loader is None:\n if spec.submodule_search_locations:\n self._path=spec.submodule_search_locations\n self._last_parent_path=parent_path\n self._last_epoch=self._epoch\n return self._path\n \n def __iter__(self):\n return iter(self._recalculate())\n \n def __getitem__(self,index):\n return self._recalculate()[index]\n \n def __setitem__(self,index,path):\n self._path[index]=path\n \n def __len__(self):\n return len(self._recalculate())\n \n def __repr__(self):\n return f'_NamespacePath({self._path !r})'\n \n def __contains__(self,item):\n return item in self._recalculate()\n \n def append(self,item):\n self._path.append(item)\n \n \n \n \n \nclass NamespaceLoader:\n def __init__(self,name,path,path_finder):\n self._path=_NamespacePath(name,path,path_finder)\n \n def is_package(self,fullname):\n return True\n \n def get_source(self,fullname):\n return ''\n \n def get_code(self,fullname):\n return compile('','','exec',dont_inherit=True)\n \n def create_module(self,spec):\n ''\n \n def exec_module(self,module):\n pass\n \n def load_module(self,fullname):\n ''\n\n\n\n \n \n _bootstrap._verbose_message('namespace module loaded with path {!r}',\n self._path)\n \n return _bootstrap._load_module_shim(self,fullname)\n \n def get_resource_reader(self,module):\n from importlib.readers import NamespaceReader\n return NamespaceReader(self._path)\n \n \n \n_NamespaceLoader=NamespaceLoader\n\n\n\n\nclass PathFinder:\n\n ''\n \n @staticmethod\n def invalidate_caches():\n ''\n \n for name,finder in list(sys.path_importer_cache.items()):\n \n \n if finder is None or not _path_isabs(name):\n del sys.path_importer_cache[name]\n elif hasattr(finder,'invalidate_caches'):\n finder.invalidate_caches()\n \n \n _NamespacePath._epoch +=1\n \n @staticmethod\n def _path_hooks(path):\n ''\n if sys.path_hooks is not None and not sys.path_hooks:\n _warnings.warn('sys.path_hooks is empty',ImportWarning)\n for hook in sys.path_hooks:\n try:\n return hook(path)\n except ImportError:\n continue\n else:\n return None\n \n @classmethod\n def _path_importer_cache(cls,path):\n ''\n\n\n\n\n \n if path =='':\n try:\n path=_os.getcwd()\n except FileNotFoundError:\n \n \n return None\n try:\n finder=sys.path_importer_cache[path]\n except KeyError:\n finder=cls._path_hooks(path)\n sys.path_importer_cache[path]=finder\n return finder\n \n @classmethod\n def _get_spec(cls,fullname,path,target=None):\n ''\n \n \n namespace_path=[]\n for entry in path:\n if not isinstance(entry,str):\n continue\n finder=cls._path_importer_cache(entry)\n if finder is not None:\n spec=finder.find_spec(fullname,target)\n if spec is None:\n continue\n if spec.loader is not None:\n return spec\n portions=spec.submodule_search_locations\n if portions is None:\n raise ImportError('spec missing loader')\n \n \n \n \n namespace_path.extend(portions)\n else:\n spec=_bootstrap.ModuleSpec(fullname,None)\n spec.submodule_search_locations=namespace_path\n return spec\n \n @classmethod\n def find_spec(cls,fullname,path=None,target=None):\n ''\n\n\n \n if path is None:\n path=sys.path\n spec=cls._get_spec(fullname,path,target)\n if spec is None:\n return None\n elif spec.loader is None:\n namespace_path=spec.submodule_search_locations\n if namespace_path:\n \n \n spec.origin=None\n spec.submodule_search_locations=_NamespacePath(fullname,namespace_path,cls._get_spec)\n return spec\n else:\n return None\n else:\n return spec\n \n @staticmethod\n def find_distributions(*args,**kwargs):\n ''\n\n\n\n\n\n\n \n from importlib.metadata import MetadataPathFinder\n return MetadataPathFinder.find_distributions(*args,**kwargs)\n \n \nclass FileFinder:\n\n ''\n\n\n\n\n \n \n def __init__(self,path,*loader_details):\n ''\n\n \n loaders=[]\n for loader,suffixes in loader_details:\n loaders.extend((suffix,loader)for suffix in suffixes)\n self._loaders=loaders\n \n if not path or path =='.':\n self.path=_os.getcwd()\n else:\n self.path=_path_abspath(path)\n self._path_mtime=-1\n self._path_cache=set()\n self._relaxed_path_cache=set()\n \n def invalidate_caches(self):\n ''\n self._path_mtime=-1\n \n def _get_spec(self,loader_class,fullname,path,smsl,target):\n loader=loader_class(fullname,path)\n return spec_from_file_location(fullname,path,loader=loader,\n submodule_search_locations=smsl)\n \n def find_spec(self,fullname,target=None):\n ''\n\n\n \n is_namespace=False\n tail_module=fullname.rpartition('.')[2]\n try:\n mtime=_path_stat(self.path or _os.getcwd()).st_mtime\n except OSError:\n mtime=-1\n if mtime !=self._path_mtime:\n self._fill_cache()\n self._path_mtime=mtime\n \n if _relax_case():\n cache=self._relaxed_path_cache\n cache_module=tail_module.lower()\n else:\n cache=self._path_cache\n cache_module=tail_module\n \n if cache_module in cache:\n base_path=_path_join(self.path,tail_module)\n for suffix,loader_class in self._loaders:\n init_filename='__init__'+suffix\n full_path=_path_join(base_path,init_filename)\n if _path_isfile(full_path):\n return self._get_spec(loader_class,fullname,full_path,[base_path],target)\n else:\n \n \n is_namespace=_path_isdir(base_path)\n \n for suffix,loader_class in self._loaders:\n try:\n full_path=_path_join(self.path,tail_module+suffix)\n except ValueError:\n return None\n _bootstrap._verbose_message('trying {}',full_path,verbosity=2)\n if cache_module+suffix in cache:\n if _path_isfile(full_path):\n return self._get_spec(loader_class,fullname,full_path,\n None,target)\n if is_namespace:\n _bootstrap._verbose_message('possible namespace for {}',base_path)\n spec=_bootstrap.ModuleSpec(fullname,None)\n spec.submodule_search_locations=[base_path]\n return spec\n return None\n \n def _fill_cache(self):\n ''\n path=self.path\n try:\n contents=_os.listdir(path or _os.getcwd())\n except(FileNotFoundError,PermissionError,NotADirectoryError):\n \n \n contents=[]\n \n \n if not sys.platform.startswith('win'):\n self._path_cache=set(contents)\n else:\n \n \n \n \n \n lower_suffix_contents=set()\n for item in contents:\n name,dot,suffix=item.partition('.')\n if dot:\n new_name=f'{name}.{suffix.lower()}'\n else:\n new_name=name\n lower_suffix_contents.add(new_name)\n self._path_cache=lower_suffix_contents\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n self._relaxed_path_cache={fn.lower()for fn in contents}\n \n @classmethod\n def path_hook(cls,*loader_details):\n ''\n\n\n\n\n\n\n \n def path_hook_for_FileFinder(path):\n ''\n if not _path_isdir(path):\n raise ImportError('only directories are supported',path=path)\n return cls(path,*loader_details)\n \n return path_hook_for_FileFinder\n \n def __repr__(self):\n return f'FileFinder({self.path !r})'\n \n \n \n \ndef _fix_up_module(ns,name,pathname,cpathname=None):\n\n loader=ns.get('__loader__')\n spec=ns.get('__spec__')\n if not loader:\n if spec:\n loader=spec.loader\n elif pathname ==cpathname:\n loader=SourcelessFileLoader(name,pathname)\n else:\n loader=SourceFileLoader(name,pathname)\n if not spec:\n spec=spec_from_file_location(name,pathname,loader=loader)\n if cpathname:\n spec.cached=_path_abspath(cpathname)\n try:\n ns['__spec__']=spec\n ns['__loader__']=loader\n ns['__file__']=pathname\n ns['__cached__']=cpathname\n except Exception:\n \n pass\n \n \ndef _get_supported_file_loaders():\n ''\n\n\n \n extensions=ExtensionFileLoader,_imp.extension_suffixes()\n source=SourceFileLoader,SOURCE_SUFFIXES\n bytecode=SourcelessFileLoader,BYTECODE_SUFFIXES\n return[extensions,source,bytecode]\n \n \ndef _set_bootstrap_module(_bootstrap_module):\n global _bootstrap\n _bootstrap=_bootstrap_module\n \n \ndef _install(_bootstrap_module):\n ''\n _set_bootstrap_module(_bootstrap_module)\n supported_loaders=_get_supported_file_loaders()\n sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])\n sys.meta_path.append(PathFinder)\n", ["_imp", "_io", "_warnings", "importlib.metadata", "importlib.readers", "marshal", "nt", "posix", "sys", "tokenize", "winreg"]], "importlib.machinery": [".py", "''\n\nfrom._bootstrap import ModuleSpec\nfrom._bootstrap import BuiltinImporter\nfrom._bootstrap import FrozenImporter\nfrom._bootstrap_external import(SOURCE_SUFFIXES,DEBUG_BYTECODE_SUFFIXES,\nOPTIMIZED_BYTECODE_SUFFIXES,BYTECODE_SUFFIXES,\nEXTENSION_SUFFIXES)\nfrom._bootstrap_external import WindowsRegistryFinder\nfrom._bootstrap_external import PathFinder\nfrom._bootstrap_external import FileFinder\nfrom._bootstrap_external import SourceFileLoader\nfrom._bootstrap_external import SourcelessFileLoader\nfrom._bootstrap_external import ExtensionFileLoader\nfrom._bootstrap_external import NamespaceLoader\n\n\ndef all_suffixes():\n ''\n return SOURCE_SUFFIXES+BYTECODE_SUFFIXES+EXTENSION_SUFFIXES\n", ["importlib._bootstrap", "importlib._bootstrap_external"]], "importlib.simple": [".py", "''\n\n\n\n\n\n\nfrom.resources.simple import(\nSimpleReader,ResourceHandle,ResourceContainer,TraversableReader,\n)\n\n__all__=[\n'SimpleReader','ResourceHandle','ResourceContainer','TraversableReader',\n]\n", ["importlib.resources.simple"]], "importlib.abc": [".py", "''\nfrom. import _bootstrap_external\nfrom. import machinery\ntry:\n import _frozen_importlib\nexcept ImportError as exc:\n if exc.name !='_frozen_importlib':\n raise\n _frozen_importlib=None\ntry:\n import _frozen_importlib_external\nexcept ImportError:\n _frozen_importlib_external=_bootstrap_external\nfrom._abc import Loader\nimport abc\nimport warnings\n\nfrom.resources import abc as _resources_abc\n\n\n__all__=[\n'Loader','MetaPathFinder','PathEntryFinder',\n'ResourceLoader','InspectLoader','ExecutionLoader',\n'FileLoader','SourceLoader',\n]\n\n\ndef __getattr__(name):\n ''\n\n\n \n if name in _resources_abc.__all__:\n obj=getattr(_resources_abc,name)\n warnings._deprecated(f\"{__name__}.{name}\",remove=(3,14))\n globals()[name]=obj\n return obj\n raise AttributeError(f'module {__name__ !r} has no attribute {name !r}')\n \n \ndef _register(abstract_cls,*classes):\n for cls in classes:\n abstract_cls.register(cls)\n if _frozen_importlib is not None:\n try:\n frozen_cls=getattr(_frozen_importlib,cls.__name__)\n except AttributeError:\n frozen_cls=getattr(_frozen_importlib_external,cls.__name__)\n abstract_cls.register(frozen_cls)\n \n \nclass MetaPathFinder(metaclass=abc.ABCMeta):\n\n ''\n \n \n \n \n def invalidate_caches(self):\n ''\n\n \n \n_register(MetaPathFinder,machinery.BuiltinImporter,machinery.FrozenImporter,\nmachinery.PathFinder,machinery.WindowsRegistryFinder)\n\n\nclass PathEntryFinder(metaclass=abc.ABCMeta):\n\n ''\n \n def invalidate_caches(self):\n ''\n\n \n \n_register(PathEntryFinder,machinery.FileFinder)\n\n\nclass ResourceLoader(Loader):\n\n ''\n\n\n\n\n \n \n @abc.abstractmethod\n def get_data(self,path):\n ''\n \n raise OSError\n \n \nclass InspectLoader(Loader):\n\n ''\n\n\n\n\n \n \n def is_package(self,fullname):\n ''\n\n\n\n \n raise ImportError\n \n def get_code(self,fullname):\n ''\n\n\n\n\n\n \n source=self.get_source(fullname)\n if source is None:\n return None\n return self.source_to_code(source)\n \n @abc.abstractmethod\n def get_source(self,fullname):\n ''\n\n\n\n \n raise ImportError\n \n @staticmethod\n def source_to_code(data,path=''):\n ''\n\n\n \n return compile(data,path,'exec',dont_inherit=True)\n \n exec_module=_bootstrap_external._LoaderBasics.exec_module\n load_module=_bootstrap_external._LoaderBasics.load_module\n \n_register(InspectLoader,machinery.BuiltinImporter,machinery.FrozenImporter,machinery.NamespaceLoader)\n\n\nclass ExecutionLoader(InspectLoader):\n\n ''\n\n\n\n\n \n \n @abc.abstractmethod\n def get_filename(self,fullname):\n ''\n\n\n\n \n raise ImportError\n \n def get_code(self,fullname):\n ''\n\n\n\n \n source=self.get_source(fullname)\n if source is None:\n return None\n try:\n path=self.get_filename(fullname)\n except ImportError:\n return self.source_to_code(source)\n else:\n return self.source_to_code(source,path)\n \n_register(ExecutionLoader,machinery.ExtensionFileLoader)\n\n\nclass FileLoader(_bootstrap_external.FileLoader,ResourceLoader,ExecutionLoader):\n\n ''\n \n \n_register(FileLoader,machinery.SourceFileLoader,\nmachinery.SourcelessFileLoader)\n\n\nclass SourceLoader(_bootstrap_external.SourceLoader,ResourceLoader,ExecutionLoader):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def path_mtime(self,path):\n ''\n if self.path_stats.__func__ is SourceLoader.path_stats:\n raise OSError\n return int(self.path_stats(path)['mtime'])\n \n def path_stats(self,path):\n ''\n\n\n\n\n \n if self.path_mtime.__func__ is SourceLoader.path_mtime:\n raise OSError\n return{'mtime':self.path_mtime(path)}\n \n def set_data(self,path,data):\n ''\n\n\n\n\n\n\n \n \n_register(SourceLoader,machinery.SourceFileLoader)\n", ["_frozen_importlib", "_frozen_importlib_external", "abc", "importlib", "importlib._abc", "importlib._bootstrap_external", "importlib.machinery", "importlib.resources", "importlib.resources.abc", "warnings"]], "importlib.resources.readers": [".py", "import collections\nimport itertools\nimport pathlib\nimport operator\nimport zipfile\n\nfrom. import abc\n\nfrom._itertools import only\n\n\ndef remove_duplicates(items):\n return iter(collections.OrderedDict.fromkeys(items))\n \n \nclass FileReader(abc.TraversableResources):\n def __init__(self,loader):\n self.path=pathlib.Path(loader.path).parent\n \n def resource_path(self,resource):\n ''\n\n\n\n \n return str(self.path.joinpath(resource))\n \n def files(self):\n return self.path\n \n \nclass ZipReader(abc.TraversableResources):\n def __init__(self,loader,module):\n _,_,name=module.rpartition('.')\n self.prefix=loader.prefix.replace('\\\\','/')+name+'/'\n self.archive=loader.archive\n \n def open_resource(self,resource):\n try:\n return super().open_resource(resource)\n except KeyError as exc:\n raise FileNotFoundError(exc.args[0])\n \n def is_resource(self,path):\n ''\n\n\n \n target=self.files().joinpath(path)\n return target.is_file()and target.exists()\n \n def files(self):\n return zipfile.Path(self.archive,self.prefix)\n \n \nclass MultiplexedPath(abc.Traversable):\n ''\n\n\n\n\n \n \n def __init__(self,*paths):\n self._paths=list(map(pathlib.Path,remove_duplicates(paths)))\n if not self._paths:\n message='MultiplexedPath must contain at least one path'\n raise FileNotFoundError(message)\n if not all(path.is_dir()for path in self._paths):\n raise NotADirectoryError('MultiplexedPath only supports directories')\n \n def iterdir(self):\n children=(child for path in self._paths for child in path.iterdir())\n by_name=operator.attrgetter('name')\n groups=itertools.groupby(sorted(children,key=by_name),key=by_name)\n return map(self._follow,(locs for name,locs in groups))\n \n def read_bytes(self):\n raise FileNotFoundError(f'{self} is not a file')\n \n def read_text(self,*args,**kwargs):\n raise FileNotFoundError(f'{self} is not a file')\n \n def is_dir(self):\n return True\n \n def is_file(self):\n return False\n \n def joinpath(self,*descendants):\n try:\n return super().joinpath(*descendants)\n except abc.TraversalError:\n \n \n return self._paths[0].joinpath(*descendants)\n \n @classmethod\n def _follow(cls,children):\n ''\n\n\n\n\n\n \n subdirs,one_dir,one_file=itertools.tee(children,3)\n \n try:\n return only(one_dir)\n except ValueError:\n try:\n return cls(*subdirs)\n except NotADirectoryError:\n return next(one_file)\n \n def open(self,*args,**kwargs):\n raise FileNotFoundError(f'{self} is not a file')\n \n @property\n def name(self):\n return self._paths[0].name\n \n def __repr__(self):\n paths=', '.join(f\"'{path}'\"for path in self._paths)\n return f'MultiplexedPath({paths})'\n \n \nclass NamespaceReader(abc.TraversableResources):\n def __init__(self,namespace_path):\n if 'NamespacePath'not in str(namespace_path):\n raise ValueError('Invalid path')\n self.path=MultiplexedPath(*list(namespace_path))\n \n def resource_path(self,resource):\n ''\n\n\n\n \n return str(self.path.joinpath(resource))\n \n def files(self):\n return self.path\n", ["collections", "importlib.resources", "importlib.resources._itertools", "importlib.resources.abc", "itertools", "operator", "pathlib", "zipfile"]], "importlib.resources._common": [".py", "import os\nimport pathlib\nimport tempfile\nimport functools\nimport contextlib\nimport types\nimport importlib\nimport inspect\nimport warnings\nimport itertools\n\nfrom typing import Union,Optional,cast\nfrom.abc import ResourceReader,Traversable\n\nfrom._adapters import wrap_spec\n\nPackage=Union[types.ModuleType,str]\nAnchor=Package\n\n\ndef package_to_anchor(func):\n ''\n\n\n\n\n\n\n\n \n undefined=object()\n \n @functools.wraps(func)\n def wrapper(anchor=undefined,package=undefined):\n if package is not undefined:\n if anchor is not undefined:\n return func(anchor,package)\n warnings.warn(\n \"First parameter to files is renamed to 'anchor'\",\n DeprecationWarning,\n stacklevel=2,\n )\n return func(package)\n elif anchor is undefined:\n return func()\n return func(anchor)\n \n return wrapper\n \n \n@package_to_anchor\ndef files(anchor:Optional[Anchor]=None)->Traversable:\n ''\n\n \n return from_package(resolve(anchor))\n \n \ndef get_resource_reader(package:types.ModuleType)->Optional[ResourceReader]:\n ''\n\n \n \n \n \n \n \n spec=package.__spec__\n reader=getattr(spec.loader,'get_resource_reader',None)\n if reader is None:\n return None\n return reader(spec.name)\n \n \n@functools.singledispatch\ndef resolve(cand:Optional[Anchor])->types.ModuleType:\n return cast(types.ModuleType,cand)\n \n \n@resolve.register\ndef _(cand:str)->types.ModuleType:\n return importlib.import_module(cand)\n \n \n@resolve.register\ndef _(cand:None)->types.ModuleType:\n return resolve(_infer_caller().f_globals['__name__'])\n \n \ndef _infer_caller():\n ''\n\n \n \n def is_this_file(frame_info):\n return frame_info.filename ==__file__\n \n def is_wrapper(frame_info):\n return frame_info.function =='wrapper'\n \n not_this_file=itertools.filterfalse(is_this_file,inspect.stack())\n \n callers=itertools.filterfalse(is_wrapper,not_this_file)\n return next(callers).frame\n \n \ndef from_package(package:types.ModuleType):\n ''\n\n\n \n spec=wrap_spec(package)\n reader=spec.loader.get_resource_reader(spec.name)\n return reader.files()\n \n \n@contextlib.contextmanager\ndef _tempfile(\nreader,\nsuffix='',\n\n\n*,\n_os_remove=os.remove,\n):\n\n\n\n fd,raw_path=tempfile.mkstemp(suffix=suffix)\n try:\n try:\n os.write(fd,reader())\n finally:\n os.close(fd)\n del reader\n yield pathlib.Path(raw_path)\n finally:\n try:\n _os_remove(raw_path)\n except FileNotFoundError:\n pass\n \n \ndef _temp_file(path):\n return _tempfile(path.read_bytes,suffix=path.name)\n \n \ndef _is_present_dir(path:Traversable)->bool:\n ''\n\n\n\n\n\n \n with contextlib.suppress(FileNotFoundError):\n return path.is_dir()\n return False\n \n \n@functools.singledispatch\ndef as_file(path):\n ''\n\n\n \n return _temp_dir(path)if _is_present_dir(path)else _temp_file(path)\n \n \n@as_file.register(pathlib.Path)\n@contextlib.contextmanager\ndef _(path):\n ''\n\n \n yield path\n \n \n@contextlib.contextmanager\ndef _temp_path(dir:tempfile.TemporaryDirectory):\n ''\n\n \n with dir as result:\n yield pathlib.Path(result)\n \n \n@contextlib.contextmanager\ndef _temp_dir(path):\n ''\n\n\n \n assert path.is_dir()\n with _temp_path(tempfile.TemporaryDirectory())as temp_dir:\n yield _write_contents(temp_dir,path)\n \n \ndef _write_contents(target,source):\n child=target.joinpath(source.name)\n if source.is_dir():\n child.mkdir()\n for item in source.iterdir():\n _write_contents(child,item)\n else:\n child.write_bytes(source.read_bytes())\n return child\n", ["contextlib", "functools", "importlib", "importlib.resources._adapters", "importlib.resources.abc", "inspect", "itertools", "os", "pathlib", "tempfile", "types", "typing", "warnings"]], "importlib.resources": [".py", "''\n\nfrom._common import(\nas_file,\nfiles,\nPackage,\n)\n\nfrom._legacy import(\ncontents,\nopen_binary,\nread_binary,\nopen_text,\nread_text,\nis_resource,\npath,\nResource,\n)\n\nfrom.abc import ResourceReader\n\n\n__all__=[\n'Package',\n'Resource',\n'ResourceReader',\n'as_file',\n'contents',\n'files',\n'is_resource',\n'open_binary',\n'open_text',\n'path',\n'read_binary',\n'read_text',\n]\n", ["importlib.resources._common", "importlib.resources._legacy", "importlib.resources.abc"], 1], "importlib.resources._itertools": [".py", "\ndef only(iterable,default=None,too_long=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n it=iter(iterable)\n first_value=next(it,default)\n \n try:\n second_value=next(it)\n except StopIteration:\n pass\n else:\n msg=(\n 'Expected exactly one item in iterable, but got {!r}, {!r}, '\n 'and perhaps more.'.format(first_value,second_value)\n )\n raise too_long or ValueError(msg)\n \n return first_value\n", []], "importlib.resources._adapters": [".py", "from contextlib import suppress\nfrom io import TextIOWrapper\n\nfrom. import abc\n\n\nclass SpecLoaderAdapter:\n ''\n\n \n \n def __init__(self,spec,adapter=lambda spec:spec.loader):\n self.spec=spec\n self.loader=adapter(spec)\n \n def __getattr__(self,name):\n return getattr(self.spec,name)\n \n \nclass TraversableResourcesLoader:\n ''\n\n \n \n def __init__(self,spec):\n self.spec=spec\n \n def get_resource_reader(self,name):\n return CompatibilityFiles(self.spec)._native()\n \n \ndef _io_wrapper(file,mode='r',*args,**kwargs):\n if mode =='r':\n return TextIOWrapper(file,*args,**kwargs)\n elif mode =='rb':\n return file\n raise ValueError(f\"Invalid mode value '{mode}', only 'r' and 'rb' are supported\")\n \n \nclass CompatibilityFiles:\n ''\n\n\n \n \n class SpecPath(abc.Traversable):\n ''\n\n\n \n \n def __init__(self,spec,reader):\n self._spec=spec\n self._reader=reader\n \n def iterdir(self):\n if not self._reader:\n return iter(())\n return iter(\n CompatibilityFiles.ChildPath(self._reader,path)\n for path in self._reader.contents()\n )\n \n def is_file(self):\n return False\n \n is_dir=is_file\n \n def joinpath(self,other):\n if not self._reader:\n return CompatibilityFiles.OrphanPath(other)\n return CompatibilityFiles.ChildPath(self._reader,other)\n \n @property\n def name(self):\n return self._spec.name\n \n def open(self,mode='r',*args,**kwargs):\n return _io_wrapper(self._reader.open_resource(None),mode,*args,**kwargs)\n \n class ChildPath(abc.Traversable):\n ''\n\n\n \n \n def __init__(self,reader,name):\n self._reader=reader\n self._name=name\n \n def iterdir(self):\n return iter(())\n \n def is_file(self):\n return self._reader.is_resource(self.name)\n \n def is_dir(self):\n return not self.is_file()\n \n def joinpath(self,other):\n return CompatibilityFiles.OrphanPath(self.name,other)\n \n @property\n def name(self):\n return self._name\n \n def open(self,mode='r',*args,**kwargs):\n return _io_wrapper(\n self._reader.open_resource(self.name),mode,*args,**kwargs\n )\n \n class OrphanPath(abc.Traversable):\n ''\n\n\n \n \n def __init__(self,*path_parts):\n if len(path_parts)<1:\n raise ValueError('Need at least one path part to construct a path')\n self._path=path_parts\n \n def iterdir(self):\n return iter(())\n \n def is_file(self):\n return False\n \n is_dir=is_file\n \n def joinpath(self,other):\n return CompatibilityFiles.OrphanPath(*self._path,other)\n \n @property\n def name(self):\n return self._path[-1]\n \n def open(self,mode='r',*args,**kwargs):\n raise FileNotFoundError(\"Can't open orphan path\")\n \n def __init__(self,spec):\n self.spec=spec\n \n @property\n def _reader(self):\n with suppress(AttributeError):\n return self.spec.loader.get_resource_reader(self.spec.name)\n \n def _native(self):\n ''\n\n \n reader=self._reader\n return reader if hasattr(reader,'files')else self\n \n def __getattr__(self,attr):\n return getattr(self._reader,attr)\n \n def files(self):\n return CompatibilityFiles.SpecPath(self.spec,self._reader)\n \n \ndef wrap_spec(package):\n ''\n\n\n \n return SpecLoaderAdapter(package.__spec__,TraversableResourcesLoader)\n", ["contextlib", "importlib.resources", "importlib.resources.abc", "io"]], "importlib.resources._legacy": [".py", "import functools\nimport os\nimport pathlib\nimport types\nimport warnings\n\nfrom typing import Union,Iterable,ContextManager,BinaryIO,TextIO,Any\n\nfrom. import _common\n\nPackage=Union[types.ModuleType,str]\nResource=str\n\n\ndef deprecated(func):\n @functools.wraps(func)\n def wrapper(*args,**kwargs):\n warnings.warn(\n f\"{func.__name__} is deprecated. Use files() instead. \"\n \"Refer to https://importlib-resources.readthedocs.io\"\n \"/en/latest/using.html#migrating-from-legacy for migration advice.\",\n DeprecationWarning,\n stacklevel=2,\n )\n return func(*args,**kwargs)\n \n return wrapper\n \n \ndef normalize_path(path:Any)->str:\n ''\n\n\n \n str_path=str(path)\n parent,file_name=os.path.split(str_path)\n if parent:\n raise ValueError(f'{path !r} must be only a file name')\n return file_name\n \n \n@deprecated\ndef open_binary(package:Package,resource:Resource)->BinaryIO:\n ''\n return(_common.files(package)/normalize_path(resource)).open('rb')\n \n \n@deprecated\ndef read_binary(package:Package,resource:Resource)->bytes:\n ''\n return(_common.files(package)/normalize_path(resource)).read_bytes()\n \n \n@deprecated\ndef open_text(\npackage:Package,\nresource:Resource,\nencoding:str='utf-8',\nerrors:str='strict',\n)->TextIO:\n ''\n return(_common.files(package)/normalize_path(resource)).open(\n 'r',encoding=encoding,errors=errors\n )\n \n \n@deprecated\ndef read_text(\npackage:Package,\nresource:Resource,\nencoding:str='utf-8',\nerrors:str='strict',\n)->str:\n ''\n\n\n\n \n with open_text(package,resource,encoding,errors)as fp:\n return fp.read()\n \n \n@deprecated\ndef contents(package:Package)->Iterable[str]:\n ''\n\n\n\n\n \n return[path.name for path in _common.files(package).iterdir()]\n \n \n@deprecated\ndef is_resource(package:Package,name:str)->bool:\n ''\n\n\n \n resource=normalize_path(name)\n return any(\n traversable.name ==resource and traversable.is_file()\n for traversable in _common.files(package).iterdir()\n )\n \n \n@deprecated\ndef path(\npackage:Package,\nresource:Resource,\n)->ContextManager[pathlib.Path]:\n ''\n\n\n\n\n\n\n \n return _common.as_file(_common.files(package)/normalize_path(resource))\n", ["functools", "importlib.resources", "importlib.resources._common", "os", "pathlib", "types", "typing", "warnings"]], "importlib.resources.simple": [".py", "''\n\n\n\nimport abc\nimport io\nimport itertools\nfrom typing import BinaryIO,List\n\nfrom.abc import Traversable,TraversableResources\n\n\nclass SimpleReader(abc.ABC):\n ''\n\n\n \n \n @property\n @abc.abstractmethod\n def package(self)->str:\n ''\n\n \n \n @abc.abstractmethod\n def children(self)->List['SimpleReader']:\n ''\n\n\n \n \n @abc.abstractmethod\n def resources(self)->List[str]:\n ''\n\n \n \n @abc.abstractmethod\n def open_binary(self,resource:str)->BinaryIO:\n ''\n\n \n \n @property\n def name(self):\n return self.package.split('.')[-1]\n \n \nclass ResourceContainer(Traversable):\n ''\n\n \n \n def __init__(self,reader:SimpleReader):\n self.reader=reader\n \n def is_dir(self):\n return True\n \n def is_file(self):\n return False\n \n def iterdir(self):\n files=(ResourceHandle(self,name)for name in self.reader.resources)\n dirs=map(ResourceContainer,self.reader.children())\n return itertools.chain(files,dirs)\n \n def open(self,*args,**kwargs):\n raise IsADirectoryError()\n \n \nclass ResourceHandle(Traversable):\n ''\n\n \n \n def __init__(self,parent:ResourceContainer,name:str):\n self.parent=parent\n self.name=name\n \n def is_file(self):\n return True\n \n def is_dir(self):\n return False\n \n def open(self,mode='r',*args,**kwargs):\n stream=self.parent.reader.open_binary(self.name)\n if 'b'not in mode:\n stream=io.TextIOWrapper(*args,**kwargs)\n return stream\n \n def joinpath(self,name):\n raise RuntimeError(\"Cannot traverse into a resource\")\n \n \nclass TraversableReader(TraversableResources,SimpleReader):\n ''\n\n\n\n \n \n def files(self):\n return ResourceContainer(self)\n", ["abc", "importlib.resources.abc", "io", "itertools", "typing"]], "importlib.resources.abc": [".py", "import abc\nimport io\nimport itertools\nimport os\nimport pathlib\nfrom typing import Any,BinaryIO,Iterable,Iterator,NoReturn,Text,Optional\nfrom typing import runtime_checkable,Protocol\nfrom typing import Union\n\n\nStrPath=Union[str,os.PathLike[str]]\n\n__all__=[\"ResourceReader\",\"Traversable\",\"TraversableResources\"]\n\n\nclass ResourceReader(metaclass=abc.ABCMeta):\n ''\n \n @abc.abstractmethod\n def open_resource(self,resource:Text)->BinaryIO:\n ''\n\n\n\n \n \n \n \n raise FileNotFoundError\n \n @abc.abstractmethod\n def resource_path(self,resource:Text)->Text:\n ''\n\n\n\n\n \n \n \n \n raise FileNotFoundError\n \n @abc.abstractmethod\n def is_resource(self,path:Text)->bool:\n ''\n\n\n \n raise FileNotFoundError\n \n @abc.abstractmethod\n def contents(self)->Iterable[str]:\n ''\n raise FileNotFoundError\n \n \nclass TraversalError(Exception):\n pass\n \n \n@runtime_checkable\nclass Traversable(Protocol):\n ''\n\n\n\n\n\n \n \n @abc.abstractmethod\n def iterdir(self)->Iterator[\"Traversable\"]:\n ''\n\n \n \n def read_bytes(self)->bytes:\n ''\n\n \n with self.open('rb')as strm:\n return strm.read()\n \n def read_text(self,encoding:Optional[str]=None)->str:\n ''\n\n \n with self.open(encoding=encoding)as strm:\n return strm.read()\n \n @abc.abstractmethod\n def is_dir(self)->bool:\n ''\n\n \n \n @abc.abstractmethod\n def is_file(self)->bool:\n ''\n\n \n \n def joinpath(self,*descendants:StrPath)->\"Traversable\":\n ''\n\n\n\n\n\n \n if not descendants:\n return self\n names=itertools.chain.from_iterable(\n path.parts for path in map(pathlib.PurePosixPath,descendants)\n )\n target=next(names)\n matches=(\n traversable for traversable in self.iterdir()if traversable.name ==target\n )\n try:\n match=next(matches)\n except StopIteration:\n raise TraversalError(\n \"Target not found during traversal.\",target,list(names)\n )\n return match.joinpath(*names)\n \n def __truediv__(self,child:StrPath)->\"Traversable\":\n ''\n\n \n return self.joinpath(child)\n \n @abc.abstractmethod\n def open(self,mode='r',*args,**kwargs):\n ''\n\n\n\n\n\n \n \n @property\n @abc.abstractmethod\n def name(self)->str:\n ''\n\n \n \n \nclass TraversableResources(ResourceReader):\n ''\n\n\n \n \n @abc.abstractmethod\n def files(self)->\"Traversable\":\n ''\n \n def open_resource(self,resource:StrPath)->io.BufferedReader:\n return self.files().joinpath(resource).open('rb')\n \n def resource_path(self,resource:Any)->NoReturn:\n raise FileNotFoundError(resource)\n \n def is_resource(self,path:StrPath)->bool:\n return self.files().joinpath(path).is_file()\n \n def contents(self)->Iterator[str]:\n return(item.name for item in self.files().iterdir())\n", ["abc", "io", "itertools", "os", "pathlib", "typing"]], "importlib.metadata._meta": [".py", "from typing import Protocol\nfrom typing import Any,Dict,Iterator,List,Optional,TypeVar,Union,overload\n\n\n_T=TypeVar(\"_T\")\n\n\nclass PackageMetadata(Protocol):\n def __len__(self)->int:\n ...\n \n def __contains__(self,item:str)->bool:\n ...\n \n def __getitem__(self,key:str)->str:\n ...\n \n def __iter__(self)->Iterator[str]:\n ...\n \n @overload\n def get(self,name:str,failobj:None=None)->Optional[str]:\n ...\n \n @overload\n def get(self,name:str,failobj:_T)->Union[str,_T]:\n ...\n \n \n @overload\n def get_all(self,name:str,failobj:None=None)->Optional[List[Any]]:\n ...\n \n @overload\n def get_all(self,name:str,failobj:_T)->Union[List[Any],_T]:\n ''\n\n \n \n @property\n def json(self)->Dict[str,Union[str,List[str]]]:\n ''\n\n \n \n \nclass SimplePath(Protocol[_T]):\n ''\n\n \n \n def joinpath(self)->_T:\n ...\n \n def __truediv__(self,other:Union[str,_T])->_T:\n ...\n \n @property\n def parent(self)->_T:\n ...\n \n def read_text(self)->str:\n ...\n", ["typing"]], "importlib.metadata._text": [".py", "import re\n\nfrom._functools import method_cache\n\n\n\nclass FoldedCase(str):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __lt__(self,other):\n return self.lower()other.lower()\n \n def __eq__(self,other):\n return self.lower()==other.lower()\n \n def __ne__(self,other):\n return self.lower()!=other.lower()\n \n def __hash__(self):\n return hash(self.lower())\n \n def __contains__(self,other):\n return super().lower().__contains__(other.lower())\n \n def in_(self,other):\n ''\n return self in FoldedCase(other)\n \n \n @method_cache\n def lower(self):\n return super().lower()\n \n def index(self,sub):\n return self.lower().index(sub.lower())\n \n def split(self,splitter=' ',maxsplit=0):\n pattern=re.compile(re.escape(splitter),re.I)\n return pattern.split(self,maxsplit)\n", ["importlib.metadata._functools", "re"]], "importlib.metadata": [".py", "import os\nimport re\nimport abc\nimport csv\nimport sys\nimport email\nimport pathlib\nimport zipfile\nimport operator\nimport textwrap\nimport warnings\nimport functools\nimport itertools\nimport posixpath\nimport contextlib\nimport collections\nimport inspect\n\nfrom. import _adapters,_meta\nfrom._collections import FreezableDefaultDict,Pair\nfrom._functools import method_cache,pass_none\nfrom._itertools import always_iterable,unique_everseen\nfrom._meta import PackageMetadata,SimplePath\n\nfrom contextlib import suppress\nfrom importlib import import_module\nfrom importlib.abc import MetaPathFinder\nfrom itertools import starmap\nfrom typing import List,Mapping,Optional,cast\n\n\n__all__=[\n'Distribution',\n'DistributionFinder',\n'PackageMetadata',\n'PackageNotFoundError',\n'distribution',\n'distributions',\n'entry_points',\n'files',\n'metadata',\n'packages_distributions',\n'requires',\n'version',\n]\n\n\nclass PackageNotFoundError(ModuleNotFoundError):\n ''\n \n def __str__(self):\n return f\"No package metadata was found for {self.name}\"\n \n @property\n def name(self):\n (name,)=self.args\n return name\n \n \nclass Sectioned:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n _sample=textwrap.dedent(\n \"\"\"\n [sec1]\n # comments ignored\n a = 1\n b = 2\n\n [sec2]\n a = 2\n \"\"\"\n ).lstrip()\n \n @classmethod\n def section_pairs(cls,text):\n return(\n section._replace(value=Pair.parse(section.value))\n for section in cls.read(text,filter_=cls.valid)\n if section.name is not None\n )\n \n @staticmethod\n def read(text,filter_=None):\n lines=filter(filter_,map(str.strip,text.splitlines()))\n name=None\n for value in lines:\n section_match=value.startswith('[')and value.endswith(']')\n if section_match:\n name=value.strip('[]')\n continue\n yield Pair(name,value)\n \n @staticmethod\n def valid(line):\n return line and not line.startswith('#')\n \n \nclass DeprecatedTuple:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n _warn=functools.partial(\n warnings.warn,\n \"EntryPoint tuple interface is deprecated. Access members by name.\",\n DeprecationWarning,\n stacklevel=2,\n )\n \n def __getitem__(self,item):\n self._warn()\n return self._key()[item]\n \n \nclass EntryPoint(DeprecatedTuple):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n pattern=re.compile(\n r'(?P[\\w.]+)\\s*'\n r'(:\\s*(?P[\\w.]+)\\s*)?'\n r'((?P\\[.*\\])\\s*)?$'\n )\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n name:str\n value:str\n group:str\n \n dist:Optional['Distribution']=None\n \n def __init__(self,name,value,group):\n vars(self).update(name=name,value=value,group=group)\n \n def load(self):\n ''\n\n\n \n match=self.pattern.match(self.value)\n module=import_module(match.group('module'))\n attrs=filter(None,(match.group('attr')or '').split('.'))\n return functools.reduce(getattr,attrs,module)\n \n @property\n def module(self):\n match=self.pattern.match(self.value)\n return match.group('module')\n \n @property\n def attr(self):\n match=self.pattern.match(self.value)\n return match.group('attr')\n \n @property\n def extras(self):\n match=self.pattern.match(self.value)\n return re.findall(r'\\w+',match.group('extras')or '')\n \n def _for(self,dist):\n vars(self).update(dist=dist)\n return self\n \n def matches(self,**params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n attrs=(getattr(self,param)for param in params)\n return all(map(operator.eq,params.values(),attrs))\n \n def _key(self):\n return self.name,self.value,self.group\n \n def __lt__(self,other):\n return self._key()'\n \n \nclass DeprecatedNonAbstract:\n def __new__(cls,*args,**kwargs):\n all_names={\n name for subclass in inspect.getmro(cls)for name in vars(subclass)\n }\n abstract={\n name\n for name in all_names\n if getattr(getattr(cls,name),'__isabstractmethod__',False)\n }\n if abstract:\n warnings.warn(\n f\"Unimplemented abstract methods {abstract}\",\n DeprecationWarning,\n stacklevel=2,\n )\n return super().__new__(cls)\n \n \nclass Distribution(DeprecatedNonAbstract):\n ''\n \n @abc.abstractmethod\n def read_text(self,filename)->Optional[str]:\n ''\n\n\n\n \n \n @abc.abstractmethod\n def locate_file(self,path):\n ''\n\n\n \n \n @classmethod\n def from_name(cls,name:str):\n ''\n\n\n\n\n\n\n\n \n if not name:\n raise ValueError(\"A distribution name is required.\")\n try:\n return next(cls.discover(name=name))\n except StopIteration:\n raise PackageNotFoundError(name)\n \n @classmethod\n def discover(cls,**kwargs):\n ''\n\n\n\n\n\n\n \n context=kwargs.pop('context',None)\n if context and kwargs:\n raise ValueError(\"cannot accept context and kwargs\")\n context=context or DistributionFinder.Context(**kwargs)\n return itertools.chain.from_iterable(\n resolver(context)for resolver in cls._discover_resolvers()\n )\n \n @staticmethod\n def at(path):\n ''\n\n\n\n \n return PathDistribution(pathlib.Path(path))\n \n @staticmethod\n def _discover_resolvers():\n ''\n declared=(\n getattr(finder,'find_distributions',None)for finder in sys.meta_path\n )\n return filter(None,declared)\n \n @property\n def metadata(self)->_meta.PackageMetadata:\n ''\n\n\n\n \n opt_text=(\n self.read_text('METADATA')\n or self.read_text('PKG-INFO')\n \n \n \n or self.read_text('')\n )\n text=cast(str,opt_text)\n return _adapters.Message(email.message_from_string(text))\n \n @property\n def name(self):\n ''\n return self.metadata['Name']\n \n @property\n def _normalized_name(self):\n ''\n return Prepared.normalize(self.name)\n \n @property\n def version(self):\n ''\n return self.metadata['Version']\n \n @property\n def entry_points(self):\n return EntryPoints._from_text_for(self.read_text('entry_points.txt'),self)\n \n @property\n def files(self):\n ''\n\n\n\n\n\n\n\n \n \n def make_file(name,hash=None,size_str=None):\n result=PackagePath(name)\n result.hash=FileHash(hash)if hash else None\n result.size=int(size_str)if size_str else None\n result.dist=self\n return result\n \n @pass_none\n def make_files(lines):\n return starmap(make_file,csv.reader(lines))\n \n @pass_none\n def skip_missing_files(package_paths):\n return list(filter(lambda path:path.locate().exists(),package_paths))\n \n return skip_missing_files(\n make_files(\n self._read_files_distinfo()\n or self._read_files_egginfo_installed()\n or self._read_files_egginfo_sources()\n )\n )\n \n def _read_files_distinfo(self):\n ''\n\n \n text=self.read_text('RECORD')\n return text and text.splitlines()\n \n def _read_files_egginfo_installed(self):\n ''\n\n\n\n\n\n\n\n\n \n text=self.read_text('installed-files.txt')\n \n \n \n subdir=getattr(self,'_path',None)\n if not text or not subdir:\n return\n \n paths=(\n (subdir /name)\n .resolve()\n .relative_to(self.locate_file('').resolve())\n .as_posix()\n for name in text.splitlines()\n )\n return map('\"{}\"'.format,paths)\n \n def _read_files_egginfo_sources(self):\n ''\n\n\n\n\n\n\n\n\n\n \n text=self.read_text('SOURCES.txt')\n return text and map('\"{}\"'.format,text.splitlines())\n \n @property\n def requires(self):\n ''\n reqs=self._read_dist_info_reqs()or self._read_egg_info_reqs()\n return reqs and list(reqs)\n \n def _read_dist_info_reqs(self):\n return self.metadata.get_all('Requires-Dist')\n \n def _read_egg_info_reqs(self):\n source=self.read_text('requires.txt')\n return pass_none(self._deps_from_requires_text)(source)\n \n @classmethod\n def _deps_from_requires_text(cls,source):\n return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source))\n \n @staticmethod\n def _convert_egg_info_reqs_to_simple_reqs(sections):\n ''\n\n\n\n\n\n\n\n \n \n def make_condition(name):\n return name and f'extra == \"{name}\"'\n \n def quoted_marker(section):\n section=section or ''\n extra,sep,markers=section.partition(':')\n if extra and markers:\n markers=f'({markers})'\n conditions=list(filter(None,[markers,make_condition(extra)]))\n return '; '+' and '.join(conditions)if conditions else ''\n \n def url_req_space(req):\n ''\n\n\n \n \n return ' '*('@'in req)\n \n for section in sections:\n space=url_req_space(section.value)\n yield section.value+space+quoted_marker(section.name)\n \n \nclass DistributionFinder(MetaPathFinder):\n ''\n\n \n \n class Context:\n ''\n\n\n\n\n\n\n\n\n \n \n name=None\n ''\n\n\n \n \n def __init__(self,**kwargs):\n vars(self).update(kwargs)\n \n @property\n def path(self):\n ''\n\n\n\n\n\n \n return vars(self).get('path',sys.path)\n \n @abc.abstractmethod\n def find_distributions(self,context=Context()):\n ''\n\n\n\n\n\n \n \n \nclass FastPath:\n ''\n\n\n\n\n\n \n \n @functools.lru_cache()\n def __new__(cls,root):\n return super().__new__(cls)\n \n def __init__(self,root):\n self.root=root\n \n def joinpath(self,child):\n return pathlib.Path(self.root,child)\n \n def children(self):\n with suppress(Exception):\n return os.listdir(self.root or '.')\n with suppress(Exception):\n return self.zip_children()\n return[]\n \n def zip_children(self):\n zip_path=zipfile.Path(self.root)\n names=zip_path.root.namelist()\n self.joinpath=zip_path.joinpath\n \n return dict.fromkeys(child.split(posixpath.sep,1)[0]for child in names)\n \n def search(self,name):\n return self.lookup(self.mtime).search(name)\n \n @property\n def mtime(self):\n with suppress(OSError):\n return os.stat(self.root).st_mtime\n self.lookup.cache_clear()\n \n @method_cache\n def lookup(self,mtime):\n return Lookup(self)\n \n \nclass Lookup:\n def __init__(self,path:FastPath):\n base=os.path.basename(path.root).lower()\n base_is_egg=base.endswith(\".egg\")\n self.infos=FreezableDefaultDict(list)\n self.eggs=FreezableDefaultDict(list)\n \n for child in path.children():\n low=child.lower()\n if low.endswith((\".dist-info\",\".egg-info\")):\n \n name=low.rpartition(\".\")[0].partition(\"-\")[0]\n normalized=Prepared.normalize(name)\n self.infos[normalized].append(path.joinpath(child))\n elif base_is_egg and low ==\"egg-info\":\n name=base.rpartition(\".\")[0].partition(\"-\")[0]\n legacy_normalized=Prepared.legacy_normalize(name)\n self.eggs[legacy_normalized].append(path.joinpath(child))\n \n self.infos.freeze()\n self.eggs.freeze()\n \n def search(self,prepared):\n infos=(\n self.infos[prepared.normalized]\n if prepared\n else itertools.chain.from_iterable(self.infos.values())\n )\n eggs=(\n self.eggs[prepared.legacy_normalized]\n if prepared\n else itertools.chain.from_iterable(self.eggs.values())\n )\n return itertools.chain(infos,eggs)\n \n \nclass Prepared:\n ''\n\n \n \n normalized=None\n legacy_normalized=None\n \n def __init__(self,name):\n self.name=name\n if name is None:\n return\n self.normalized=self.normalize(name)\n self.legacy_normalized=self.legacy_normalize(name)\n \n @staticmethod\n def normalize(name):\n ''\n\n \n return re.sub(r\"[-_.]+\",\"-\",name).lower().replace('-','_')\n \n @staticmethod\n def legacy_normalize(name):\n ''\n\n\n \n return name.lower().replace('-','_')\n \n def __bool__(self):\n return bool(self.name)\n \n \nclass MetadataPathFinder(DistributionFinder):\n @classmethod\n def find_distributions(cls,context=DistributionFinder.Context()):\n ''\n\n\n\n\n\n\n \n found=cls._search_paths(context.name,context.path)\n return map(PathDistribution,found)\n \n @classmethod\n def _search_paths(cls,name,paths):\n ''\n prepared=Prepared(name)\n return itertools.chain.from_iterable(\n path.search(prepared)for path in map(FastPath,paths)\n )\n \n def invalidate_caches(cls):\n FastPath.__new__.cache_clear()\n \n \nclass PathDistribution(Distribution):\n def __init__(self,path:SimplePath):\n ''\n\n\n \n self._path=path\n \n def read_text(self,filename):\n with suppress(\n FileNotFoundError,\n IsADirectoryError,\n KeyError,\n NotADirectoryError,\n PermissionError,\n ):\n return self._path.joinpath(filename).read_text(encoding='utf-8')\n \n read_text.__doc__=Distribution.read_text.__doc__\n \n def locate_file(self,path):\n return self._path.parent /path\n \n @property\n def _normalized_name(self):\n ''\n\n\n \n stem=os.path.basename(str(self._path))\n return(\n pass_none(Prepared.normalize)(self._name_from_stem(stem))\n or super()._normalized_name\n )\n \n @staticmethod\n def _name_from_stem(stem):\n ''\n\n\n\n\n\n\n\n \n filename,ext=os.path.splitext(stem)\n if ext not in('.dist-info','.egg-info'):\n return\n name,sep,rest=filename.partition('-')\n return name\n \n \ndef distribution(distribution_name):\n ''\n\n\n\n \n return Distribution.from_name(distribution_name)\n \n \ndef distributions(**kwargs):\n ''\n\n\n \n return Distribution.discover(**kwargs)\n \n \ndef metadata(distribution_name)->_meta.PackageMetadata:\n ''\n\n\n\n \n return Distribution.from_name(distribution_name).metadata\n \n \ndef version(distribution_name):\n ''\n\n\n\n\n \n return distribution(distribution_name).version\n \n \n_unique=functools.partial(\nunique_everseen,\nkey=operator.attrgetter('_normalized_name'),\n)\n''\n\n\n\n\ndef entry_points(**params)->EntryPoints:\n ''\n\n\n\n\n\n\n \n eps=itertools.chain.from_iterable(\n dist.entry_points for dist in _unique(distributions())\n )\n return EntryPoints(eps).select(**params)\n \n \ndef files(distribution_name):\n ''\n\n\n\n \n return distribution(distribution_name).files\n \n \ndef requires(distribution_name):\n ''\n\n\n\n\n \n return distribution(distribution_name).requires\n \n \ndef packages_distributions()->Mapping[str,List[str]]:\n ''\n\n\n\n\n\n\n\n \n pkg_to_dist=collections.defaultdict(list)\n for dist in distributions():\n for pkg in _top_level_declared(dist)or _top_level_inferred(dist):\n pkg_to_dist[pkg].append(dist.metadata['Name'])\n return dict(pkg_to_dist)\n \n \ndef _top_level_declared(dist):\n return(dist.read_text('top_level.txt')or '').split()\n \n \ndef _top_level_inferred(dist):\n opt_names={\n f.parts[0]if len(f.parts)>1 else inspect.getmodulename(f)\n for f in always_iterable(dist.files)\n }\n \n @pass_none\n def importable_name(name):\n return '.'not in name\n \n return filter(importable_name,opt_names)\n", ["abc", "collections", "contextlib", "csv", "email", "functools", "importlib", "importlib.abc", "importlib.metadata", "importlib.metadata._adapters", "importlib.metadata._collections", "importlib.metadata._functools", "importlib.metadata._itertools", "importlib.metadata._meta", "inspect", "itertools", "operator", "os", "pathlib", "posixpath", "re", "sys", "textwrap", "typing", "warnings", "zipfile"], 1], "importlib.metadata._functools": [".py", "import types\nimport functools\n\n\n\ndef method_cache(method,cache_wrapper=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n cache_wrapper=cache_wrapper or functools.lru_cache()\n \n def wrapper(self,*args,**kwargs):\n \n bound_method=types.MethodType(method,self)\n cached_method=cache_wrapper(bound_method)\n setattr(self,method.__name__,cached_method)\n return cached_method(*args,**kwargs)\n \n \n wrapper.cache_clear=lambda:None\n \n return wrapper\n \n \n \ndef pass_none(func):\n ''\n\n\n\n\n\n\n \n \n @functools.wraps(func)\n def wrapper(param,*args,**kwargs):\n if param is not None:\n return func(param,*args,**kwargs)\n \n return wrapper\n", ["functools", "types"]], "importlib.metadata._collections": [".py", "import collections\n\n\n\nclass FreezableDefaultDict(collections.defaultdict):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __missing__(self,key):\n return getattr(self,'_frozen',super().__missing__)(key)\n \n def freeze(self):\n self._frozen=lambda key:self.default_factory()\n \n \nclass Pair(collections.namedtuple('Pair','name value')):\n @classmethod\n def parse(cls,text):\n return cls(*map(str.strip,text.split(\"=\",1)))\n", ["collections"]], "importlib.metadata._itertools": [".py", "from itertools import filterfalse\n\n\ndef unique_everseen(iterable,key=None):\n ''\n \n \n seen=set()\n seen_add=seen.add\n if key is None:\n for element in filterfalse(seen.__contains__,iterable):\n seen_add(element)\n yield element\n else:\n for element in iterable:\n k=key(element)\n if k not in seen:\n seen_add(k)\n yield element\n \n \n \ndef always_iterable(obj,base_type=(str,bytes)):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if obj is None:\n return iter(())\n \n if(base_type is not None)and isinstance(obj,base_type):\n return iter((obj,))\n \n try:\n return iter(obj)\n except TypeError:\n return iter((obj,))\n", ["itertools"]], "importlib.metadata._adapters": [".py", "import functools\nimport warnings\nimport re\nimport textwrap\nimport email.message\n\nfrom._text import FoldedCase\n\n\n\n_warn=functools.partial(\nwarnings.warn,\n\"Implicit None on return values is deprecated and will raise KeyErrors.\",\nDeprecationWarning,\nstacklevel=2,\n)\n\n\nclass Message(email.message.Message):\n multiple_use_keys=set(\n map(\n FoldedCase,\n [\n 'Classifier',\n 'Obsoletes-Dist',\n 'Platform',\n 'Project-URL',\n 'Provides-Dist',\n 'Provides-Extra',\n 'Requires-Dist',\n 'Requires-External',\n 'Supported-Platform',\n 'Dynamic',\n ],\n )\n )\n ''\n\n \n \n def __new__(cls,orig:email.message.Message):\n res=super().__new__(cls)\n vars(res).update(vars(orig))\n return res\n \n def __init__(self,*args,**kwargs):\n self._headers=self._repair_headers()\n \n \n def __iter__(self):\n return super().__iter__()\n \n def __getitem__(self,item):\n ''\n\n\n \n res=super().__getitem__(item)\n if res is None:\n _warn()\n return res\n \n def _repair_headers(self):\n def redent(value):\n ''\n if not value or '\\n'not in value:\n return value\n return textwrap.dedent(' '*8+value)\n \n headers=[(key,redent(value))for key,value in vars(self)['_headers']]\n if self._payload:\n headers.append(('Description',self.get_payload()))\n return headers\n \n @property\n def json(self):\n ''\n\n\n \n \n def transform(key):\n value=self.get_all(key)if key in self.multiple_use_keys else self[key]\n if key =='Keywords':\n value=re.split(r'\\s+',value)\n tk=key.lower().replace('-','_')\n return tk,value\n \n return dict(map(transform,map(FoldedCase,self)))\n", ["email.message", "functools", "importlib.metadata._text", "re", "textwrap", "warnings"]], "pydoc_data": [".py", "", [], 1], "pydoc_data.topics": [".py", "\n\ntopics={'assert':'The \"assert\" statement\\n'\n'**********************\\n'\n'\\n'\n'Assert statements are a convenient way to insert debugging '\n'assertions\\n'\n'into a program:\\n'\n'\\n'\n' assert_stmt ::= \"assert\" expression [\",\" expression]\\n'\n'\\n'\n'The simple form, \"assert expression\", is equivalent to\\n'\n'\\n'\n' if __debug__:\\n'\n' if not expression: raise AssertionError\\n'\n'\\n'\n'The extended form, \"assert expression1, expression2\", is '\n'equivalent to\\n'\n'\\n'\n' if __debug__:\\n'\n' if not expression1: raise AssertionError(expression2)\\n'\n'\\n'\n'These equivalences assume that \"__debug__\" and \"AssertionError\" '\n'refer\\n'\n'to the built-in variables with those names. In the current\\n'\n'implementation, the built-in variable \"__debug__\" is \"True\" under\\n'\n'normal circumstances, \"False\" when optimization is requested '\n'(command\\n'\n'line option \"-O\"). The current code generator emits no code for '\n'an\\n'\n'assert statement when optimization is requested at compile time. '\n'Note\\n'\n'that it is unnecessary to include the source code for the '\n'expression\\n'\n'that failed in the error message; it will be displayed as part of '\n'the\\n'\n'stack trace.\\n'\n'\\n'\n'Assignments to \"__debug__\" are illegal. The value for the '\n'built-in\\n'\n'variable is determined when the interpreter starts.\\n',\n'assignment':'Assignment statements\\n'\n'*********************\\n'\n'\\n'\n'Assignment statements are used to (re)bind names to values and '\n'to\\n'\n'modify attributes or items of mutable objects:\\n'\n'\\n'\n' assignment_stmt ::= (target_list \"=\")+ (starred_expression '\n'| yield_expression)\\n'\n' target_list ::= target (\",\" target)* [\",\"]\\n'\n' target ::= identifier\\n'\n' | \"(\" [target_list] \")\"\\n'\n' | \"[\" [target_list] \"]\"\\n'\n' | attributeref\\n'\n' | subscription\\n'\n' | slicing\\n'\n' | \"*\" target\\n'\n'\\n'\n'(See section Primaries for the syntax definitions for '\n'*attributeref*,\\n'\n'*subscription*, and *slicing*.)\\n'\n'\\n'\n'An assignment statement evaluates the expression list '\n'(remember that\\n'\n'this can be a single expression or a comma-separated list, the '\n'latter\\n'\n'yielding a tuple) and assigns the single resulting object to '\n'each of\\n'\n'the target lists, from left to right.\\n'\n'\\n'\n'Assignment is defined recursively depending on the form of the '\n'target\\n'\n'(list). When a target is part of a mutable object (an '\n'attribute\\n'\n'reference, subscription or slicing), the mutable object must\\n'\n'ultimately perform the assignment and decide about its '\n'validity, and\\n'\n'may raise an exception if the assignment is unacceptable. The '\n'rules\\n'\n'observed by various types and the exceptions raised are given '\n'with the\\n'\n'definition of the object types (see section The standard type\\n'\n'hierarchy).\\n'\n'\\n'\n'Assignment of an object to a target list, optionally enclosed '\n'in\\n'\n'parentheses or square brackets, is recursively defined as '\n'follows.\\n'\n'\\n'\n'* If the target list is a single target with no trailing '\n'comma,\\n'\n' optionally in parentheses, the object is assigned to that '\n'target.\\n'\n'\\n'\n'* Else: The object must be an iterable with the same number of '\n'items\\n'\n' as there are targets in the target list, and the items are '\n'assigned,\\n'\n' from left to right, to the corresponding targets.\\n'\n'\\n'\n' * If the target list contains one target prefixed with an '\n'asterisk,\\n'\n' called a \u201cstarred\u201d target: The object must be an iterable '\n'with at\\n'\n' least as many items as there are targets in the target '\n'list, minus\\n'\n' one. The first items of the iterable are assigned, from '\n'left to\\n'\n' right, to the targets before the starred target. The '\n'final items\\n'\n' of the iterable are assigned to the targets after the '\n'starred\\n'\n' target. A list of the remaining items in the iterable is '\n'then\\n'\n' assigned to the starred target (the list can be empty).\\n'\n'\\n'\n' * Else: The object must be an iterable with the same number '\n'of items\\n'\n' as there are targets in the target list, and the items '\n'are\\n'\n' assigned, from left to right, to the corresponding '\n'targets.\\n'\n'\\n'\n'Assignment of an object to a single target is recursively '\n'defined as\\n'\n'follows.\\n'\n'\\n'\n'* If the target is an identifier (name):\\n'\n'\\n'\n' * If the name does not occur in a \"global\" or \"nonlocal\" '\n'statement\\n'\n' in the current code block: the name is bound to the object '\n'in the\\n'\n' current local namespace.\\n'\n'\\n'\n' * Otherwise: the name is bound to the object in the global '\n'namespace\\n'\n' or the outer namespace determined by \"nonlocal\", '\n'respectively.\\n'\n'\\n'\n' The name is rebound if it was already bound. This may cause '\n'the\\n'\n' reference count for the object previously bound to the name '\n'to reach\\n'\n' zero, causing the object to be deallocated and its '\n'destructor (if it\\n'\n' has one) to be called.\\n'\n'\\n'\n'* If the target is an attribute reference: The primary '\n'expression in\\n'\n' the reference is evaluated. It should yield an object with\\n'\n' assignable attributes; if this is not the case, \"TypeError\" '\n'is\\n'\n' raised. That object is then asked to assign the assigned '\n'object to\\n'\n' the given attribute; if it cannot perform the assignment, it '\n'raises\\n'\n' an exception (usually but not necessarily '\n'\"AttributeError\").\\n'\n'\\n'\n' Note: If the object is a class instance and the attribute '\n'reference\\n'\n' occurs on both sides of the assignment operator, the '\n'right-hand side\\n'\n' expression, \"a.x\" can access either an instance attribute or '\n'(if no\\n'\n' instance attribute exists) a class attribute. The left-hand '\n'side\\n'\n' target \"a.x\" is always set as an instance attribute, '\n'creating it if\\n'\n' necessary. Thus, the two occurrences of \"a.x\" do not '\n'necessarily\\n'\n' refer to the same attribute: if the right-hand side '\n'expression\\n'\n' refers to a class attribute, the left-hand side creates a '\n'new\\n'\n' instance attribute as the target of the assignment:\\n'\n'\\n'\n' class Cls:\\n'\n' x = 3 # class variable\\n'\n' inst = Cls()\\n'\n' inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x '\n'as 3\\n'\n'\\n'\n' This description does not necessarily apply to descriptor\\n'\n' attributes, such as properties created with \"property()\".\\n'\n'\\n'\n'* If the target is a subscription: The primary expression in '\n'the\\n'\n' reference is evaluated. It should yield either a mutable '\n'sequence\\n'\n' object (such as a list) or a mapping object (such as a '\n'dictionary).\\n'\n' Next, the subscript expression is evaluated.\\n'\n'\\n'\n' If the primary is a mutable sequence object (such as a '\n'list), the\\n'\n' subscript must yield an integer. If it is negative, the '\n'sequence\u2019s\\n'\n' length is added to it. The resulting value must be a '\n'nonnegative\\n'\n' integer less than the sequence\u2019s length, and the sequence is '\n'asked\\n'\n' to assign the assigned object to its item with that index. '\n'If the\\n'\n' index is out of range, \"IndexError\" is raised (assignment to '\n'a\\n'\n' subscripted sequence cannot add new items to a list).\\n'\n'\\n'\n' If the primary is a mapping object (such as a dictionary), '\n'the\\n'\n' subscript must have a type compatible with the mapping\u2019s key '\n'type,\\n'\n' and the mapping is then asked to create a key/datum pair '\n'which maps\\n'\n' the subscript to the assigned object. This can either '\n'replace an\\n'\n' existing key/value pair with the same key value, or insert a '\n'new\\n'\n' key/value pair (if no key with the same value existed).\\n'\n'\\n'\n' For user-defined objects, the \"__setitem__()\" method is '\n'called with\\n'\n' appropriate arguments.\\n'\n'\\n'\n'* If the target is a slicing: The primary expression in the '\n'reference\\n'\n' is evaluated. It should yield a mutable sequence object '\n'(such as a\\n'\n' list). The assigned object should be a sequence object of '\n'the same\\n'\n' type. Next, the lower and upper bound expressions are '\n'evaluated,\\n'\n' insofar they are present; defaults are zero and the '\n'sequence\u2019s\\n'\n' length. The bounds should evaluate to integers. If either '\n'bound is\\n'\n' negative, the sequence\u2019s length is added to it. The '\n'resulting\\n'\n' bounds are clipped to lie between zero and the sequence\u2019s '\n'length,\\n'\n' inclusive. Finally, the sequence object is asked to replace '\n'the\\n'\n' slice with the items of the assigned sequence. The length '\n'of the\\n'\n' slice may be different from the length of the assigned '\n'sequence,\\n'\n' thus changing the length of the target sequence, if the '\n'target\\n'\n' sequence allows it.\\n'\n'\\n'\n'**CPython implementation detail:** In the current '\n'implementation, the\\n'\n'syntax for targets is taken to be the same as for expressions, '\n'and\\n'\n'invalid syntax is rejected during the code generation phase, '\n'causing\\n'\n'less detailed error messages.\\n'\n'\\n'\n'Although the definition of assignment implies that overlaps '\n'between\\n'\n'the left-hand side and the right-hand side are \u2018simultaneous\u2019 '\n'(for\\n'\n'example \"a, b = b, a\" swaps two variables), overlaps *within* '\n'the\\n'\n'collection of assigned-to variables occur left-to-right, '\n'sometimes\\n'\n'resulting in confusion. For instance, the following program '\n'prints\\n'\n'\"[0, 2]\":\\n'\n'\\n'\n' x = [0, 1]\\n'\n' i = 0\\n'\n' i, x[i] = 1, 2 # i is updated, then x[i] is '\n'updated\\n'\n' print(x)\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 3132** - Extended Iterable Unpacking\\n'\n' The specification for the \"*target\" feature.\\n'\n'\\n'\n'\\n'\n'Augmented assignment statements\\n'\n'===============================\\n'\n'\\n'\n'Augmented assignment is the combination, in a single '\n'statement, of a\\n'\n'binary operation and an assignment statement:\\n'\n'\\n'\n' augmented_assignment_stmt ::= augtarget augop '\n'(expression_list | yield_expression)\\n'\n' augtarget ::= identifier | attributeref | '\n'subscription | slicing\\n'\n' augop ::= \"+=\" | \"-=\" | \"*=\" | \"@=\" | '\n'\"/=\" | \"//=\" | \"%=\" | \"**=\"\\n'\n' | \">>=\" | \"<<=\" | \"&=\" | \"^=\" | \"|=\"\\n'\n'\\n'\n'(See section Primaries for the syntax definitions of the last '\n'three\\n'\n'symbols.)\\n'\n'\\n'\n'An augmented assignment evaluates the target (which, unlike '\n'normal\\n'\n'assignment statements, cannot be an unpacking) and the '\n'expression\\n'\n'list, performs the binary operation specific to the type of '\n'assignment\\n'\n'on the two operands, and assigns the result to the original '\n'target.\\n'\n'The target is only evaluated once.\\n'\n'\\n'\n'An augmented assignment expression like \"x += 1\" can be '\n'rewritten as\\n'\n'\"x = x + 1\" to achieve a similar, but not exactly equal '\n'effect. In the\\n'\n'augmented version, \"x\" is only evaluated once. Also, when '\n'possible,\\n'\n'the actual operation is performed *in-place*, meaning that '\n'rather than\\n'\n'creating a new object and assigning that to the target, the '\n'old object\\n'\n'is modified instead.\\n'\n'\\n'\n'Unlike normal assignments, augmented assignments evaluate the '\n'left-\\n'\n'hand side *before* evaluating the right-hand side. For '\n'example, \"a[i]\\n'\n'+= f(x)\" first looks-up \"a[i]\", then it evaluates \"f(x)\" and '\n'performs\\n'\n'the addition, and lastly, it writes the result back to '\n'\"a[i]\".\\n'\n'\\n'\n'With the exception of assigning to tuples and multiple targets '\n'in a\\n'\n'single statement, the assignment done by augmented assignment\\n'\n'statements is handled the same way as normal assignments. '\n'Similarly,\\n'\n'with the exception of the possible *in-place* behavior, the '\n'binary\\n'\n'operation performed by augmented assignment is the same as the '\n'normal\\n'\n'binary operations.\\n'\n'\\n'\n'For targets which are attribute references, the same caveat '\n'about\\n'\n'class and instance attributes applies as for regular '\n'assignments.\\n'\n'\\n'\n'\\n'\n'Annotated assignment statements\\n'\n'===============================\\n'\n'\\n'\n'*Annotation* assignment is the combination, in a single '\n'statement, of\\n'\n'a variable or attribute annotation and an optional assignment\\n'\n'statement:\\n'\n'\\n'\n' annotated_assignment_stmt ::= augtarget \":\" expression\\n'\n' [\"=\" (starred_expression | '\n'yield_expression)]\\n'\n'\\n'\n'The difference from normal Assignment statements is that only '\n'single\\n'\n'target is allowed.\\n'\n'\\n'\n'For simple names as assignment targets, if in class or module '\n'scope,\\n'\n'the annotations are evaluated and stored in a special class or '\n'module\\n'\n'attribute \"__annotations__\" that is a dictionary mapping from '\n'variable\\n'\n'names (mangled if private) to evaluated annotations. This '\n'attribute is\\n'\n'writable and is automatically created at the start of class or '\n'module\\n'\n'body execution, if annotations are found statically.\\n'\n'\\n'\n'For expressions as assignment targets, the annotations are '\n'evaluated\\n'\n'if in class or module scope, but not stored.\\n'\n'\\n'\n'If a name is annotated in a function scope, then this name is '\n'local\\n'\n'for that scope. Annotations are never evaluated and stored in '\n'function\\n'\n'scopes.\\n'\n'\\n'\n'If the right hand side is present, an annotated assignment '\n'performs\\n'\n'the actual assignment before evaluating annotations (where\\n'\n'applicable). If the right hand side is not present for an '\n'expression\\n'\n'target, then the interpreter evaluates the target except for '\n'the last\\n'\n'\"__setitem__()\" or \"__setattr__()\" call.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 526** - Syntax for Variable Annotations\\n'\n' The proposal that added syntax for annotating the types '\n'of\\n'\n' variables (including class variables and instance '\n'variables),\\n'\n' instead of expressing them through comments.\\n'\n'\\n'\n' **PEP 484** - Type hints\\n'\n' The proposal that added the \"typing\" module to provide a '\n'standard\\n'\n' syntax for type annotations that can be used in static '\n'analysis\\n'\n' tools and IDEs.\\n'\n'\\n'\n'Changed in version 3.8: Now annotated assignments allow same\\n'\n'expressions in the right hand side as the regular '\n'assignments.\\n'\n'Previously, some expressions (like un-parenthesized tuple '\n'expressions)\\n'\n'caused a syntax error.\\n',\n'async':'Coroutines\\n'\n'**********\\n'\n'\\n'\n'New in version 3.5.\\n'\n'\\n'\n'\\n'\n'Coroutine function definition\\n'\n'=============================\\n'\n'\\n'\n' async_funcdef ::= [decorators] \"async\" \"def\" funcname \"(\" '\n'[parameter_list] \")\"\\n'\n' [\"->\" expression] \":\" suite\\n'\n'\\n'\n'Execution of Python coroutines can be suspended and resumed at '\n'many\\n'\n'points (see *coroutine*). \"await\" expressions, \"async for\" and '\n'\"async\\n'\n'with\" can only be used in the body of a coroutine function.\\n'\n'\\n'\n'Functions defined with \"async def\" syntax are always coroutine\\n'\n'functions, even if they do not contain \"await\" or \"async\" '\n'keywords.\\n'\n'\\n'\n'It is a \"SyntaxError\" to use a \"yield from\" expression inside the '\n'body\\n'\n'of a coroutine function.\\n'\n'\\n'\n'An example of a coroutine function:\\n'\n'\\n'\n' async def func(param1, param2):\\n'\n' do_stuff()\\n'\n' await some_coroutine()\\n'\n'\\n'\n'Changed in version 3.7: \"await\" and \"async\" are now keywords;\\n'\n'previously they were only treated as such inside the body of a\\n'\n'coroutine function.\\n'\n'\\n'\n'\\n'\n'The \"async for\" statement\\n'\n'=========================\\n'\n'\\n'\n' async_for_stmt ::= \"async\" for_stmt\\n'\n'\\n'\n'An *asynchronous iterable* provides an \"__aiter__\" method that\\n'\n'directly returns an *asynchronous iterator*, which can call\\n'\n'asynchronous code in its \"__anext__\" method.\\n'\n'\\n'\n'The \"async for\" statement allows convenient iteration over\\n'\n'asynchronous iterables.\\n'\n'\\n'\n'The following code:\\n'\n'\\n'\n' async for TARGET in ITER:\\n'\n' SUITE\\n'\n' else:\\n'\n' SUITE2\\n'\n'\\n'\n'Is semantically equivalent to:\\n'\n'\\n'\n' iter = (ITER)\\n'\n' iter = type(iter).__aiter__(iter)\\n'\n' running = True\\n'\n'\\n'\n' while running:\\n'\n' try:\\n'\n' TARGET = await type(iter).__anext__(iter)\\n'\n' except StopAsyncIteration:\\n'\n' running = False\\n'\n' else:\\n'\n' SUITE\\n'\n' else:\\n'\n' SUITE2\\n'\n'\\n'\n'See also \"__aiter__()\" and \"__anext__()\" for details.\\n'\n'\\n'\n'It is a \"SyntaxError\" to use an \"async for\" statement outside the '\n'body\\n'\n'of a coroutine function.\\n'\n'\\n'\n'\\n'\n'The \"async with\" statement\\n'\n'==========================\\n'\n'\\n'\n' async_with_stmt ::= \"async\" with_stmt\\n'\n'\\n'\n'An *asynchronous context manager* is a *context manager* that is '\n'able\\n'\n'to suspend execution in its *enter* and *exit* methods.\\n'\n'\\n'\n'The following code:\\n'\n'\\n'\n' async with EXPRESSION as TARGET:\\n'\n' SUITE\\n'\n'\\n'\n'is semantically equivalent to:\\n'\n'\\n'\n' manager = (EXPRESSION)\\n'\n' aenter = type(manager).__aenter__\\n'\n' aexit = type(manager).__aexit__\\n'\n' value = await aenter(manager)\\n'\n' hit_except = False\\n'\n'\\n'\n' try:\\n'\n' TARGET = value\\n'\n' SUITE\\n'\n' except:\\n'\n' hit_except = True\\n'\n' if not await aexit(manager, *sys.exc_info()):\\n'\n' raise\\n'\n' finally:\\n'\n' if not hit_except:\\n'\n' await aexit(manager, None, None, None)\\n'\n'\\n'\n'See also \"__aenter__()\" and \"__aexit__()\" for details.\\n'\n'\\n'\n'It is a \"SyntaxError\" to use an \"async with\" statement outside the\\n'\n'body of a coroutine function.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 492** - Coroutines with async and await syntax\\n'\n' The proposal that made coroutines a proper standalone concept '\n'in\\n'\n' Python, and added supporting syntax.\\n'\n'\\n'\n'-[ Footnotes ]-\\n'\n'\\n'\n'[1] The exception is propagated to the invocation stack unless '\n'there\\n'\n' is a \"finally\" clause which happens to raise another '\n'exception.\\n'\n' That new exception causes the old one to be lost.\\n'\n'\\n'\n'[2] In pattern matching, a sequence is defined as one of the\\n'\n' following:\\n'\n'\\n'\n' * a class that inherits from \"collections.abc.Sequence\"\\n'\n'\\n'\n' * a Python class that has been registered as\\n'\n' \"collections.abc.Sequence\"\\n'\n'\\n'\n' * a builtin class that has its (CPython) '\n'\"Py_TPFLAGS_SEQUENCE\"\\n'\n' bit set\\n'\n'\\n'\n' * a class that inherits from any of the above\\n'\n'\\n'\n' The following standard library classes are sequences:\\n'\n'\\n'\n' * \"array.array\"\\n'\n'\\n'\n' * \"collections.deque\"\\n'\n'\\n'\n' * \"list\"\\n'\n'\\n'\n' * \"memoryview\"\\n'\n'\\n'\n' * \"range\"\\n'\n'\\n'\n' * \"tuple\"\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' Subject values of type \"str\", \"bytes\", and \"bytearray\" do '\n'not\\n'\n' match sequence patterns.\\n'\n'\\n'\n'[3] In pattern matching, a mapping is defined as one of the '\n'following:\\n'\n'\\n'\n' * a class that inherits from \"collections.abc.Mapping\"\\n'\n'\\n'\n' * a Python class that has been registered as\\n'\n' \"collections.abc.Mapping\"\\n'\n'\\n'\n' * a builtin class that has its (CPython) '\n'\"Py_TPFLAGS_MAPPING\"\\n'\n' bit set\\n'\n'\\n'\n' * a class that inherits from any of the above\\n'\n'\\n'\n' The standard library classes \"dict\" and '\n'\"types.MappingProxyType\"\\n'\n' are mappings.\\n'\n'\\n'\n'[4] A string literal appearing as the first statement in the '\n'function\\n'\n' body is transformed into the function\u2019s \"__doc__\" attribute '\n'and\\n'\n' therefore the function\u2019s *docstring*.\\n'\n'\\n'\n'[5] A string literal appearing as the first statement in the class\\n'\n' body is transformed into the namespace\u2019s \"__doc__\" item and\\n'\n' therefore the class\u2019s *docstring*.\\n',\n'atom-identifiers':'Identifiers (Names)\\n'\n'*******************\\n'\n'\\n'\n'An identifier occurring as an atom is a name. See '\n'section Identifiers\\n'\n'and keywords for lexical definition and section Naming '\n'and binding for\\n'\n'documentation of naming and binding.\\n'\n'\\n'\n'When the name is bound to an object, evaluation of the '\n'atom yields\\n'\n'that object. When a name is not bound, an attempt to '\n'evaluate it\\n'\n'raises a \"NameError\" exception.\\n'\n'\\n'\n'**Private name mangling:** When an identifier that '\n'textually occurs in\\n'\n'a class definition begins with two or more underscore '\n'characters and\\n'\n'does not end in two or more underscores, it is '\n'considered a *private\\n'\n'name* of that class. Private names are transformed to a '\n'longer form\\n'\n'before code is generated for them. The transformation '\n'inserts the\\n'\n'class name, with leading underscores removed and a '\n'single underscore\\n'\n'inserted, in front of the name. For example, the '\n'identifier \"__spam\"\\n'\n'occurring in a class named \"Ham\" will be transformed to '\n'\"_Ham__spam\".\\n'\n'This transformation is independent of the syntactical '\n'context in which\\n'\n'the identifier is used. If the transformed name is '\n'extremely long\\n'\n'(longer than 255 characters), implementation defined '\n'truncation may\\n'\n'happen. If the class name consists only of underscores, '\n'no\\n'\n'transformation is done.\\n',\n'atom-literals':'Literals\\n'\n'********\\n'\n'\\n'\n'Python supports string and bytes literals and various '\n'numeric\\n'\n'literals:\\n'\n'\\n'\n' literal ::= stringliteral | bytesliteral\\n'\n' | integer | floatnumber | imagnumber\\n'\n'\\n'\n'Evaluation of a literal yields an object of the given type '\n'(string,\\n'\n'bytes, integer, floating point number, complex number) with '\n'the given\\n'\n'value. The value may be approximated in the case of '\n'floating point\\n'\n'and imaginary (complex) literals. See section Literals for '\n'details.\\n'\n'\\n'\n'All literals correspond to immutable data types, and hence '\n'the\\n'\n'object\u2019s identity is less important than its value. '\n'Multiple\\n'\n'evaluations of literals with the same value (either the '\n'same\\n'\n'occurrence in the program text or a different occurrence) '\n'may obtain\\n'\n'the same object or a different object with the same '\n'value.\\n',\n'attribute-access':'Customizing attribute access\\n'\n'****************************\\n'\n'\\n'\n'The following methods can be defined to customize the '\n'meaning of\\n'\n'attribute access (use of, assignment to, or deletion of '\n'\"x.name\") for\\n'\n'class instances.\\n'\n'\\n'\n'object.__getattr__(self, name)\\n'\n'\\n'\n' Called when the default attribute access fails with '\n'an\\n'\n' \"AttributeError\" (either \"__getattribute__()\" raises '\n'an\\n'\n' \"AttributeError\" because *name* is not an instance '\n'attribute or an\\n'\n' attribute in the class tree for \"self\"; or '\n'\"__get__()\" of a *name*\\n'\n' property raises \"AttributeError\"). This method '\n'should either\\n'\n' return the (computed) attribute value or raise an '\n'\"AttributeError\"\\n'\n' exception.\\n'\n'\\n'\n' Note that if the attribute is found through the '\n'normal mechanism,\\n'\n' \"__getattr__()\" is not called. (This is an '\n'intentional asymmetry\\n'\n' between \"__getattr__()\" and \"__setattr__()\".) This is '\n'done both for\\n'\n' efficiency reasons and because otherwise '\n'\"__getattr__()\" would have\\n'\n' no way to access other attributes of the instance. '\n'Note that at\\n'\n' least for instance variables, you can fake total '\n'control by not\\n'\n' inserting any values in the instance attribute '\n'dictionary (but\\n'\n' instead inserting them in another object). See the\\n'\n' \"__getattribute__()\" method below for a way to '\n'actually get total\\n'\n' control over attribute access.\\n'\n'\\n'\n'object.__getattribute__(self, name)\\n'\n'\\n'\n' Called unconditionally to implement attribute '\n'accesses for\\n'\n' instances of the class. If the class also defines '\n'\"__getattr__()\",\\n'\n' the latter will not be called unless '\n'\"__getattribute__()\" either\\n'\n' calls it explicitly or raises an \"AttributeError\". '\n'This method\\n'\n' should return the (computed) attribute value or raise '\n'an\\n'\n' \"AttributeError\" exception. In order to avoid '\n'infinite recursion in\\n'\n' this method, its implementation should always call '\n'the base class\\n'\n' method with the same name to access any attributes it '\n'needs, for\\n'\n' example, \"object.__getattribute__(self, name)\".\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' This method may still be bypassed when looking up '\n'special methods\\n'\n' as the result of implicit invocation via language '\n'syntax or\\n'\n' built-in functions. See Special method lookup.\\n'\n'\\n'\n' For certain sensitive attribute accesses, raises an '\n'auditing event\\n'\n' \"object.__getattr__\" with arguments \"obj\" and '\n'\"name\".\\n'\n'\\n'\n'object.__setattr__(self, name, value)\\n'\n'\\n'\n' Called when an attribute assignment is attempted. '\n'This is called\\n'\n' instead of the normal mechanism (i.e. store the value '\n'in the\\n'\n' instance dictionary). *name* is the attribute name, '\n'*value* is the\\n'\n' value to be assigned to it.\\n'\n'\\n'\n' If \"__setattr__()\" wants to assign to an instance '\n'attribute, it\\n'\n' should call the base class method with the same name, '\n'for example,\\n'\n' \"object.__setattr__(self, name, value)\".\\n'\n'\\n'\n' For certain sensitive attribute assignments, raises '\n'an auditing\\n'\n' event \"object.__setattr__\" with arguments \"obj\", '\n'\"name\", \"value\".\\n'\n'\\n'\n'object.__delattr__(self, name)\\n'\n'\\n'\n' Like \"__setattr__()\" but for attribute deletion '\n'instead of\\n'\n' assignment. This should only be implemented if \"del '\n'obj.name\" is\\n'\n' meaningful for the object.\\n'\n'\\n'\n' For certain sensitive attribute deletions, raises an '\n'auditing event\\n'\n' \"object.__delattr__\" with arguments \"obj\" and '\n'\"name\".\\n'\n'\\n'\n'object.__dir__(self)\\n'\n'\\n'\n' Called when \"dir()\" is called on the object. A '\n'sequence must be\\n'\n' returned. \"dir()\" converts the returned sequence to a '\n'list and\\n'\n' sorts it.\\n'\n'\\n'\n'\\n'\n'Customizing module attribute access\\n'\n'===================================\\n'\n'\\n'\n'Special names \"__getattr__\" and \"__dir__\" can be also '\n'used to\\n'\n'customize access to module attributes. The \"__getattr__\" '\n'function at\\n'\n'the module level should accept one argument which is the '\n'name of an\\n'\n'attribute and return the computed value or raise an '\n'\"AttributeError\".\\n'\n'If an attribute is not found on a module object through '\n'the normal\\n'\n'lookup, i.e. \"object.__getattribute__()\", then '\n'\"__getattr__\" is\\n'\n'searched in the module \"__dict__\" before raising an '\n'\"AttributeError\".\\n'\n'If found, it is called with the attribute name and the '\n'result is\\n'\n'returned.\\n'\n'\\n'\n'The \"__dir__\" function should accept no arguments, and '\n'return a\\n'\n'sequence of strings that represents the names accessible '\n'on module. If\\n'\n'present, this function overrides the standard \"dir()\" '\n'search on a\\n'\n'module.\\n'\n'\\n'\n'For a more fine grained customization of the module '\n'behavior (setting\\n'\n'attributes, properties, etc.), one can set the '\n'\"__class__\" attribute\\n'\n'of a module object to a subclass of \"types.ModuleType\". '\n'For example:\\n'\n'\\n'\n' import sys\\n'\n' from types import ModuleType\\n'\n'\\n'\n' class VerboseModule(ModuleType):\\n'\n' def __repr__(self):\\n'\n\" return f'Verbose {self.__name__}'\\n\"\n'\\n'\n' def __setattr__(self, attr, value):\\n'\n\" print(f'Setting {attr}...')\\n\"\n' super().__setattr__(attr, value)\\n'\n'\\n'\n' sys.modules[__name__].__class__ = VerboseModule\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' Defining module \"__getattr__\" and setting module '\n'\"__class__\" only\\n'\n' affect lookups made using the attribute access syntax '\n'\u2013 directly\\n'\n' accessing the module globals (whether by code within '\n'the module, or\\n'\n' via a reference to the module\u2019s globals dictionary) is '\n'unaffected.\\n'\n'\\n'\n'Changed in version 3.5: \"__class__\" module attribute is '\n'now writable.\\n'\n'\\n'\n'New in version 3.7: \"__getattr__\" and \"__dir__\" module '\n'attributes.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 562** - Module __getattr__ and __dir__\\n'\n' Describes the \"__getattr__\" and \"__dir__\" functions '\n'on modules.\\n'\n'\\n'\n'\\n'\n'Implementing Descriptors\\n'\n'========================\\n'\n'\\n'\n'The following methods only apply when an instance of the '\n'class\\n'\n'containing the method (a so-called *descriptor* class) '\n'appears in an\\n'\n'*owner* class (the descriptor must be in either the '\n'owner\u2019s class\\n'\n'dictionary or in the class dictionary for one of its '\n'parents). In the\\n'\n'examples below, \u201cthe attribute\u201d refers to the attribute '\n'whose name is\\n'\n'the key of the property in the owner class\u2019 \"__dict__\".\\n'\n'\\n'\n'object.__get__(self, instance, owner=None)\\n'\n'\\n'\n' Called to get the attribute of the owner class (class '\n'attribute\\n'\n' access) or of an instance of that class (instance '\n'attribute\\n'\n' access). The optional *owner* argument is the owner '\n'class, while\\n'\n' *instance* is the instance that the attribute was '\n'accessed through,\\n'\n' or \"None\" when the attribute is accessed through the '\n'*owner*.\\n'\n'\\n'\n' This method should return the computed attribute '\n'value or raise an\\n'\n' \"AttributeError\" exception.\\n'\n'\\n'\n' **PEP 252** specifies that \"__get__()\" is callable '\n'with one or two\\n'\n' arguments. Python\u2019s own built-in descriptors support '\n'this\\n'\n' specification; however, it is likely that some '\n'third-party tools\\n'\n' have descriptors that require both arguments. '\n'Python\u2019s own\\n'\n' \"__getattribute__()\" implementation always passes in '\n'both arguments\\n'\n' whether they are required or not.\\n'\n'\\n'\n'object.__set__(self, instance, value)\\n'\n'\\n'\n' Called to set the attribute on an instance *instance* '\n'of the owner\\n'\n' class to a new value, *value*.\\n'\n'\\n'\n' Note, adding \"__set__()\" or \"__delete__()\" changes '\n'the kind of\\n'\n' descriptor to a \u201cdata descriptor\u201d. See Invoking '\n'Descriptors for\\n'\n' more details.\\n'\n'\\n'\n'object.__delete__(self, instance)\\n'\n'\\n'\n' Called to delete the attribute on an instance '\n'*instance* of the\\n'\n' owner class.\\n'\n'\\n'\n'object.__set_name__(self, owner, name)\\n'\n'\\n'\n' Called at the time the owning class *owner* is '\n'created. The\\n'\n' descriptor has been assigned to *name*.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' \"__set_name__()\" is only called implicitly as part '\n'of the \"type\"\\n'\n' constructor, so it will need to be called '\n'explicitly with the\\n'\n' appropriate parameters when a descriptor is added '\n'to a class\\n'\n' after initial creation:\\n'\n'\\n'\n' class A:\\n'\n' pass\\n'\n' descr = custom_descriptor()\\n'\n' A.attr = descr\\n'\n\" descr.__set_name__(A, 'attr')\\n\"\n'\\n'\n' See Creating the class object for more details.\\n'\n'\\n'\n' New in version 3.6.\\n'\n'\\n'\n'The attribute \"__objclass__\" is interpreted by the '\n'\"inspect\" module as\\n'\n'specifying the class where this object was defined '\n'(setting this\\n'\n'appropriately can assist in runtime introspection of '\n'dynamic class\\n'\n'attributes). For callables, it may indicate that an '\n'instance of the\\n'\n'given type (or a subclass) is expected or required as '\n'the first\\n'\n'positional argument (for example, CPython sets this '\n'attribute for\\n'\n'unbound methods that are implemented in C).\\n'\n'\\n'\n'\\n'\n'Invoking Descriptors\\n'\n'====================\\n'\n'\\n'\n'In general, a descriptor is an object attribute with '\n'\u201cbinding\\n'\n'behavior\u201d, one whose attribute access has been '\n'overridden by methods\\n'\n'in the descriptor protocol: \"__get__()\", \"__set__()\", '\n'and\\n'\n'\"__delete__()\". If any of those methods are defined for '\n'an object, it\\n'\n'is said to be a descriptor.\\n'\n'\\n'\n'The default behavior for attribute access is to get, '\n'set, or delete\\n'\n'the attribute from an object\u2019s dictionary. For instance, '\n'\"a.x\" has a\\n'\n'lookup chain starting with \"a.__dict__[\\'x\\']\", then\\n'\n'\"type(a).__dict__[\\'x\\']\", and continuing through the '\n'base classes of\\n'\n'\"type(a)\" excluding metaclasses.\\n'\n'\\n'\n'However, if the looked-up value is an object defining '\n'one of the\\n'\n'descriptor methods, then Python may override the default '\n'behavior and\\n'\n'invoke the descriptor method instead. Where this occurs '\n'in the\\n'\n'precedence chain depends on which descriptor methods '\n'were defined and\\n'\n'how they were called.\\n'\n'\\n'\n'The starting point for descriptor invocation is a '\n'binding, \"a.x\". How\\n'\n'the arguments are assembled depends on \"a\":\\n'\n'\\n'\n'Direct Call\\n'\n' The simplest and least common call is when user code '\n'directly\\n'\n' invokes a descriptor method: \"x.__get__(a)\".\\n'\n'\\n'\n'Instance Binding\\n'\n' If binding to an object instance, \"a.x\" is '\n'transformed into the\\n'\n' call: \"type(a).__dict__[\\'x\\'].__get__(a, type(a))\".\\n'\n'\\n'\n'Class Binding\\n'\n' If binding to a class, \"A.x\" is transformed into the '\n'call:\\n'\n' \"A.__dict__[\\'x\\'].__get__(None, A)\".\\n'\n'\\n'\n'Super Binding\\n'\n' If \"a\" is an instance of \"super\", then the binding '\n'\"super(B,\\n'\n' obj).m()\" searches \"obj.__class__.__mro__\" for the '\n'base class \"A\"\\n'\n' immediately preceding \"B\" and then invokes the '\n'descriptor with the\\n'\n' call: \"A.__dict__[\\'m\\'].__get__(obj, '\n'obj.__class__)\".\\n'\n'\\n'\n'For instance bindings, the precedence of descriptor '\n'invocation depends\\n'\n'on which descriptor methods are defined. A descriptor '\n'can define any\\n'\n'combination of \"__get__()\", \"__set__()\" and '\n'\"__delete__()\". If it\\n'\n'does not define \"__get__()\", then accessing the '\n'attribute will return\\n'\n'the descriptor object itself unless there is a value in '\n'the object\u2019s\\n'\n'instance dictionary. If the descriptor defines '\n'\"__set__()\" and/or\\n'\n'\"__delete__()\", it is a data descriptor; if it defines '\n'neither, it is\\n'\n'a non-data descriptor. Normally, data descriptors '\n'define both\\n'\n'\"__get__()\" and \"__set__()\", while non-data descriptors '\n'have just the\\n'\n'\"__get__()\" method. Data descriptors with \"__get__()\" '\n'and \"__set__()\"\\n'\n'(and/or \"__delete__()\") defined always override a '\n'redefinition in an\\n'\n'instance dictionary. In contrast, non-data descriptors '\n'can be\\n'\n'overridden by instances.\\n'\n'\\n'\n'Python methods (including \"staticmethod()\" and '\n'\"classmethod()\") are\\n'\n'implemented as non-data descriptors. Accordingly, '\n'instances can\\n'\n'redefine and override methods. This allows individual '\n'instances to\\n'\n'acquire behaviors that differ from other instances of '\n'the same class.\\n'\n'\\n'\n'The \"property()\" function is implemented as a data '\n'descriptor.\\n'\n'Accordingly, instances cannot override the behavior of a '\n'property.\\n'\n'\\n'\n'\\n'\n'__slots__\\n'\n'=========\\n'\n'\\n'\n'*__slots__* allow us to explicitly declare data members '\n'(like\\n'\n'properties) and deny the creation of *__dict__* and '\n'*__weakref__*\\n'\n'(unless explicitly declared in *__slots__* or available '\n'in a parent.)\\n'\n'\\n'\n'The space saved over using *__dict__* can be '\n'significant. Attribute\\n'\n'lookup speed can be significantly improved as well.\\n'\n'\\n'\n'object.__slots__\\n'\n'\\n'\n' This class variable can be assigned a string, '\n'iterable, or sequence\\n'\n' of strings with variable names used by instances. '\n'*__slots__*\\n'\n' reserves space for the declared variables and '\n'prevents the\\n'\n' automatic creation of *__dict__* and *__weakref__* '\n'for each\\n'\n' instance.\\n'\n'\\n'\n'\\n'\n'Notes on using *__slots__*\\n'\n'--------------------------\\n'\n'\\n'\n'* When inheriting from a class without *__slots__*, the '\n'*__dict__* and\\n'\n' *__weakref__* attribute of the instances will always '\n'be accessible.\\n'\n'\\n'\n'* Without a *__dict__* variable, instances cannot be '\n'assigned new\\n'\n' variables not listed in the *__slots__* definition. '\n'Attempts to\\n'\n' assign to an unlisted variable name raises '\n'\"AttributeError\". If\\n'\n' dynamic assignment of new variables is desired, then '\n'add\\n'\n' \"\\'__dict__\\'\" to the sequence of strings in the '\n'*__slots__*\\n'\n' declaration.\\n'\n'\\n'\n'* Without a *__weakref__* variable for each instance, '\n'classes defining\\n'\n' *__slots__* do not support weak references to its '\n'instances. If weak\\n'\n' reference support is needed, then add '\n'\"\\'__weakref__\\'\" to the\\n'\n' sequence of strings in the *__slots__* declaration.\\n'\n'\\n'\n'* *__slots__* are implemented at the class level by '\n'creating\\n'\n' descriptors (Implementing Descriptors) for each '\n'variable name. As a\\n'\n' result, class attributes cannot be used to set default '\n'values for\\n'\n' instance variables defined by *__slots__*; otherwise, '\n'the class\\n'\n' attribute would overwrite the descriptor assignment.\\n'\n'\\n'\n'* The action of a *__slots__* declaration is not limited '\n'to the class\\n'\n' where it is defined. *__slots__* declared in parents '\n'are available\\n'\n' in child classes. However, child subclasses will get a '\n'*__dict__*\\n'\n' and *__weakref__* unless they also define *__slots__* '\n'(which should\\n'\n' only contain names of any *additional* slots).\\n'\n'\\n'\n'* If a class defines a slot also defined in a base '\n'class, the instance\\n'\n' variable defined by the base class slot is '\n'inaccessible (except by\\n'\n' retrieving its descriptor directly from the base '\n'class). This\\n'\n' renders the meaning of the program undefined. In the '\n'future, a\\n'\n' check may be added to prevent this.\\n'\n'\\n'\n'* Nonempty *__slots__* does not work for classes derived '\n'from\\n'\n' \u201cvariable-length\u201d built-in types such as \"int\", '\n'\"bytes\" and \"tuple\".\\n'\n'\\n'\n'* Any non-string iterable may be assigned to '\n'*__slots__*. Mappings may\\n'\n' also be used; however, in the future, special meaning '\n'may be\\n'\n' assigned to the values corresponding to each key.\\n'\n'\\n'\n'* *__class__* assignment works only if both classes have '\n'the same\\n'\n' *__slots__*.\\n'\n'\\n'\n'* Multiple inheritance with multiple slotted parent '\n'classes can be\\n'\n' used, but only one parent is allowed to have '\n'attributes created by\\n'\n' slots (the other bases must have empty slot layouts) - '\n'violations\\n'\n' raise \"TypeError\".\\n'\n'\\n'\n'* If an iterator is used for *__slots__* then a '\n'descriptor is created\\n'\n' for each of the iterator\u2019s values. However, the '\n'*__slots__*\\n'\n' attribute will be an empty iterator.\\n',\n'attribute-references':'Attribute references\\n'\n'********************\\n'\n'\\n'\n'An attribute reference is a primary followed by a '\n'period and a name:\\n'\n'\\n'\n' attributeref ::= primary \".\" identifier\\n'\n'\\n'\n'The primary must evaluate to an object of a type '\n'that supports\\n'\n'attribute references, which most objects do. This '\n'object is then\\n'\n'asked to produce the attribute whose name is the '\n'identifier. This\\n'\n'production can be customized by overriding the '\n'\"__getattr__()\" method.\\n'\n'If this attribute is not available, the exception '\n'\"AttributeError\" is\\n'\n'raised. Otherwise, the type and value of the object '\n'produced is\\n'\n'determined by the object. Multiple evaluations of '\n'the same attribute\\n'\n'reference may yield different objects.\\n',\n'augassign':'Augmented assignment statements\\n'\n'*******************************\\n'\n'\\n'\n'Augmented assignment is the combination, in a single statement, '\n'of a\\n'\n'binary operation and an assignment statement:\\n'\n'\\n'\n' augmented_assignment_stmt ::= augtarget augop '\n'(expression_list | yield_expression)\\n'\n' augtarget ::= identifier | attributeref | '\n'subscription | slicing\\n'\n' augop ::= \"+=\" | \"-=\" | \"*=\" | \"@=\" | '\n'\"/=\" | \"//=\" | \"%=\" | \"**=\"\\n'\n' | \">>=\" | \"<<=\" | \"&=\" | \"^=\" | \"|=\"\\n'\n'\\n'\n'(See section Primaries for the syntax definitions of the last '\n'three\\n'\n'symbols.)\\n'\n'\\n'\n'An augmented assignment evaluates the target (which, unlike '\n'normal\\n'\n'assignment statements, cannot be an unpacking) and the '\n'expression\\n'\n'list, performs the binary operation specific to the type of '\n'assignment\\n'\n'on the two operands, and assigns the result to the original '\n'target.\\n'\n'The target is only evaluated once.\\n'\n'\\n'\n'An augmented assignment expression like \"x += 1\" can be '\n'rewritten as\\n'\n'\"x = x + 1\" to achieve a similar, but not exactly equal effect. '\n'In the\\n'\n'augmented version, \"x\" is only evaluated once. Also, when '\n'possible,\\n'\n'the actual operation is performed *in-place*, meaning that '\n'rather than\\n'\n'creating a new object and assigning that to the target, the old '\n'object\\n'\n'is modified instead.\\n'\n'\\n'\n'Unlike normal assignments, augmented assignments evaluate the '\n'left-\\n'\n'hand side *before* evaluating the right-hand side. For '\n'example, \"a[i]\\n'\n'+= f(x)\" first looks-up \"a[i]\", then it evaluates \"f(x)\" and '\n'performs\\n'\n'the addition, and lastly, it writes the result back to \"a[i]\".\\n'\n'\\n'\n'With the exception of assigning to tuples and multiple targets '\n'in a\\n'\n'single statement, the assignment done by augmented assignment\\n'\n'statements is handled the same way as normal assignments. '\n'Similarly,\\n'\n'with the exception of the possible *in-place* behavior, the '\n'binary\\n'\n'operation performed by augmented assignment is the same as the '\n'normal\\n'\n'binary operations.\\n'\n'\\n'\n'For targets which are attribute references, the same caveat '\n'about\\n'\n'class and instance attributes applies as for regular '\n'assignments.\\n',\n'await':'Await expression\\n'\n'****************\\n'\n'\\n'\n'Suspend the execution of *coroutine* on an *awaitable* object. Can\\n'\n'only be used inside a *coroutine function*.\\n'\n'\\n'\n' await_expr ::= \"await\" primary\\n'\n'\\n'\n'New in version 3.5.\\n',\n'binary':'Binary arithmetic operations\\n'\n'****************************\\n'\n'\\n'\n'The binary arithmetic operations have the conventional priority\\n'\n'levels. Note that some of these operations also apply to certain '\n'non-\\n'\n'numeric types. Apart from the power operator, there are only two\\n'\n'levels, one for multiplicative operators and one for additive\\n'\n'operators:\\n'\n'\\n'\n' m_expr ::= u_expr | m_expr \"*\" u_expr | m_expr \"@\" m_expr |\\n'\n' m_expr \"//\" u_expr | m_expr \"/\" u_expr |\\n'\n' m_expr \"%\" u_expr\\n'\n' a_expr ::= m_expr | a_expr \"+\" m_expr | a_expr \"-\" m_expr\\n'\n'\\n'\n'The \"*\" (multiplication) operator yields the product of its '\n'arguments.\\n'\n'The arguments must either both be numbers, or one argument must be '\n'an\\n'\n'integer and the other must be a sequence. In the former case, the\\n'\n'numbers are converted to a common type and then multiplied '\n'together.\\n'\n'In the latter case, sequence repetition is performed; a negative\\n'\n'repetition factor yields an empty sequence.\\n'\n'\\n'\n'This operation can be customized using the special \"__mul__()\" '\n'and\\n'\n'\"__rmul__()\" methods.\\n'\n'\\n'\n'The \"@\" (at) operator is intended to be used for matrix\\n'\n'multiplication. No builtin Python types implement this operator.\\n'\n'\\n'\n'New in version 3.5.\\n'\n'\\n'\n'The \"/\" (division) and \"//\" (floor division) operators yield the\\n'\n'quotient of their arguments. The numeric arguments are first\\n'\n'converted to a common type. Division of integers yields a float, '\n'while\\n'\n'floor division of integers results in an integer; the result is '\n'that\\n'\n'of mathematical division with the \u2018floor\u2019 function applied to the\\n'\n'result. Division by zero raises the \"ZeroDivisionError\" '\n'exception.\\n'\n'\\n'\n'This operation can be customized using the special \"__div__()\" '\n'and\\n'\n'\"__floordiv__()\" methods.\\n'\n'\\n'\n'The \"%\" (modulo) operator yields the remainder from the division '\n'of\\n'\n'the first argument by the second. The numeric arguments are '\n'first\\n'\n'converted to a common type. A zero right argument raises the\\n'\n'\"ZeroDivisionError\" exception. The arguments may be floating '\n'point\\n'\n'numbers, e.g., \"3.14%0.7\" equals \"0.34\" (since \"3.14\" equals '\n'\"4*0.7 +\\n'\n'0.34\".) The modulo operator always yields a result with the same '\n'sign\\n'\n'as its second operand (or zero); the absolute value of the result '\n'is\\n'\n'strictly smaller than the absolute value of the second operand '\n'[1].\\n'\n'\\n'\n'The floor division and modulo operators are connected by the '\n'following\\n'\n'identity: \"x == (x//y)*y + (x%y)\". Floor division and modulo are '\n'also\\n'\n'connected with the built-in function \"divmod()\": \"divmod(x, y) ==\\n'\n'(x//y, x%y)\". [2].\\n'\n'\\n'\n'In addition to performing the modulo operation on numbers, the '\n'\"%\"\\n'\n'operator is also overloaded by string objects to perform '\n'old-style\\n'\n'string formatting (also known as interpolation). The syntax for\\n'\n'string formatting is described in the Python Library Reference,\\n'\n'section printf-style String Formatting.\\n'\n'\\n'\n'The *modulo* operation can be customized using the special '\n'\"__mod__()\"\\n'\n'method.\\n'\n'\\n'\n'The floor division operator, the modulo operator, and the '\n'\"divmod()\"\\n'\n'function are not defined for complex numbers. Instead, convert to '\n'a\\n'\n'floating point number using the \"abs()\" function if appropriate.\\n'\n'\\n'\n'The \"+\" (addition) operator yields the sum of its arguments. The\\n'\n'arguments must either both be numbers or both be sequences of the '\n'same\\n'\n'type. In the former case, the numbers are converted to a common '\n'type\\n'\n'and then added together. In the latter case, the sequences are\\n'\n'concatenated.\\n'\n'\\n'\n'This operation can be customized using the special \"__add__()\" '\n'and\\n'\n'\"__radd__()\" methods.\\n'\n'\\n'\n'The \"-\" (subtraction) operator yields the difference of its '\n'arguments.\\n'\n'The numeric arguments are first converted to a common type.\\n'\n'\\n'\n'This operation can be customized using the special \"__sub__()\" '\n'method.\\n',\n'bitwise':'Binary bitwise operations\\n'\n'*************************\\n'\n'\\n'\n'Each of the three bitwise operations has a different priority '\n'level:\\n'\n'\\n'\n' and_expr ::= shift_expr | and_expr \"&\" shift_expr\\n'\n' xor_expr ::= and_expr | xor_expr \"^\" and_expr\\n'\n' or_expr ::= xor_expr | or_expr \"|\" xor_expr\\n'\n'\\n'\n'The \"&\" operator yields the bitwise AND of its arguments, which '\n'must\\n'\n'be integers or one of them must be a custom object overriding\\n'\n'\"__and__()\" or \"__rand__()\" special methods.\\n'\n'\\n'\n'The \"^\" operator yields the bitwise XOR (exclusive OR) of its\\n'\n'arguments, which must be integers or one of them must be a '\n'custom\\n'\n'object overriding \"__xor__()\" or \"__rxor__()\" special methods.\\n'\n'\\n'\n'The \"|\" operator yields the bitwise (inclusive) OR of its '\n'arguments,\\n'\n'which must be integers or one of them must be a custom object\\n'\n'overriding \"__or__()\" or \"__ror__()\" special methods.\\n',\n'bltin-code-objects':'Code Objects\\n'\n'************\\n'\n'\\n'\n'Code objects are used by the implementation to '\n'represent \u201cpseudo-\\n'\n'compiled\u201d executable Python code such as a function '\n'body. They differ\\n'\n'from function objects because they don\u2019t contain a '\n'reference to their\\n'\n'global execution environment. Code objects are '\n'returned by the built-\\n'\n'in \"compile()\" function and can be extracted from '\n'function objects\\n'\n'through their \"__code__\" attribute. See also the '\n'\"code\" module.\\n'\n'\\n'\n'Accessing \"__code__\" raises an auditing event '\n'\"object.__getattr__\"\\n'\n'with arguments \"obj\" and \"\"__code__\"\".\\n'\n'\\n'\n'A code object can be executed or evaluated by passing '\n'it (instead of a\\n'\n'source string) to the \"exec()\" or \"eval()\" built-in '\n'functions.\\n'\n'\\n'\n'See The standard type hierarchy for more '\n'information.\\n',\n'bltin-ellipsis-object':'The Ellipsis Object\\n'\n'*******************\\n'\n'\\n'\n'This object is commonly used by slicing (see '\n'Slicings). It supports\\n'\n'no special operations. There is exactly one '\n'ellipsis object, named\\n'\n'\"Ellipsis\" (a built-in name). \"type(Ellipsis)()\" '\n'produces the\\n'\n'\"Ellipsis\" singleton.\\n'\n'\\n'\n'It is written as \"Ellipsis\" or \"...\".\\n',\n'bltin-null-object':'The Null Object\\n'\n'***************\\n'\n'\\n'\n'This object is returned by functions that don\u2019t '\n'explicitly return a\\n'\n'value. It supports no special operations. There is '\n'exactly one null\\n'\n'object, named \"None\" (a built-in name). \"type(None)()\" '\n'produces the\\n'\n'same singleton.\\n'\n'\\n'\n'It is written as \"None\".\\n',\n'bltin-type-objects':'Type Objects\\n'\n'************\\n'\n'\\n'\n'Type objects represent the various object types. An '\n'object\u2019s type is\\n'\n'accessed by the built-in function \"type()\". There are '\n'no special\\n'\n'operations on types. The standard module \"types\" '\n'defines names for\\n'\n'all standard built-in types.\\n'\n'\\n'\n'Types are written like this: \"\".\\n',\n'booleans':'Boolean operations\\n'\n'******************\\n'\n'\\n'\n' or_test ::= and_test | or_test \"or\" and_test\\n'\n' and_test ::= not_test | and_test \"and\" not_test\\n'\n' not_test ::= comparison | \"not\" not_test\\n'\n'\\n'\n'In the context of Boolean operations, and also when expressions '\n'are\\n'\n'used by control flow statements, the following values are '\n'interpreted\\n'\n'as false: \"False\", \"None\", numeric zero of all types, and empty\\n'\n'strings and containers (including strings, tuples, lists,\\n'\n'dictionaries, sets and frozensets). All other values are '\n'interpreted\\n'\n'as true. User-defined objects can customize their truth value '\n'by\\n'\n'providing a \"__bool__()\" method.\\n'\n'\\n'\n'The operator \"not\" yields \"True\" if its argument is false, '\n'\"False\"\\n'\n'otherwise.\\n'\n'\\n'\n'The expression \"x and y\" first evaluates *x*; if *x* is false, '\n'its\\n'\n'value is returned; otherwise, *y* is evaluated and the resulting '\n'value\\n'\n'is returned.\\n'\n'\\n'\n'The expression \"x or y\" first evaluates *x*; if *x* is true, its '\n'value\\n'\n'is returned; otherwise, *y* is evaluated and the resulting value '\n'is\\n'\n'returned.\\n'\n'\\n'\n'Note that neither \"and\" nor \"or\" restrict the value and type '\n'they\\n'\n'return to \"False\" and \"True\", but rather return the last '\n'evaluated\\n'\n'argument. This is sometimes useful, e.g., if \"s\" is a string '\n'that\\n'\n'should be replaced by a default value if it is empty, the '\n'expression\\n'\n'\"s or \\'foo\\'\" yields the desired value. Because \"not\" has to '\n'create a\\n'\n'new value, it returns a boolean value regardless of the type of '\n'its\\n'\n'argument (for example, \"not \\'foo\\'\" produces \"False\" rather '\n'than \"\\'\\'\".)\\n',\n'break':'The \"break\" statement\\n'\n'*********************\\n'\n'\\n'\n' break_stmt ::= \"break\"\\n'\n'\\n'\n'\"break\" may only occur syntactically nested in a \"for\" or \"while\"\\n'\n'loop, but not nested in a function or class definition within that\\n'\n'loop.\\n'\n'\\n'\n'It terminates the nearest enclosing loop, skipping the optional '\n'\"else\"\\n'\n'clause if the loop has one.\\n'\n'\\n'\n'If a \"for\" loop is terminated by \"break\", the loop control target\\n'\n'keeps its current value.\\n'\n'\\n'\n'When \"break\" passes control out of a \"try\" statement with a '\n'\"finally\"\\n'\n'clause, that \"finally\" clause is executed before really leaving '\n'the\\n'\n'loop.\\n',\n'callable-types':'Emulating callable objects\\n'\n'**************************\\n'\n'\\n'\n'object.__call__(self[, args...])\\n'\n'\\n'\n' Called when the instance is \u201ccalled\u201d as a function; if '\n'this method\\n'\n' is defined, \"x(arg1, arg2, ...)\" roughly translates to\\n'\n' \"type(x).__call__(x, arg1, ...)\".\\n',\n'calls':'Calls\\n'\n'*****\\n'\n'\\n'\n'A call calls a callable object (e.g., a *function*) with a '\n'possibly\\n'\n'empty series of *arguments*:\\n'\n'\\n'\n' call ::= primary \"(\" [argument_list [\",\"] | '\n'comprehension] \")\"\\n'\n' argument_list ::= positional_arguments [\",\" '\n'starred_and_keywords]\\n'\n' [\",\" keywords_arguments]\\n'\n' | starred_and_keywords [\",\" '\n'keywords_arguments]\\n'\n' | keywords_arguments\\n'\n' positional_arguments ::= positional_item (\",\" positional_item)*\\n'\n' positional_item ::= assignment_expression | \"*\" expression\\n'\n' starred_and_keywords ::= (\"*\" expression | keyword_item)\\n'\n' (\",\" \"*\" expression | \",\" '\n'keyword_item)*\\n'\n' keywords_arguments ::= (keyword_item | \"**\" expression)\\n'\n' (\",\" keyword_item | \",\" \"**\" '\n'expression)*\\n'\n' keyword_item ::= identifier \"=\" expression\\n'\n'\\n'\n'An optional trailing comma may be present after the positional and\\n'\n'keyword arguments but does not affect the semantics.\\n'\n'\\n'\n'The primary must evaluate to a callable object (user-defined\\n'\n'functions, built-in functions, methods of built-in objects, class\\n'\n'objects, methods of class instances, and all objects having a\\n'\n'\"__call__()\" method are callable). All argument expressions are\\n'\n'evaluated before the call is attempted. Please refer to section\\n'\n'Function definitions for the syntax of formal *parameter* lists.\\n'\n'\\n'\n'If keyword arguments are present, they are first converted to\\n'\n'positional arguments, as follows. First, a list of unfilled slots '\n'is\\n'\n'created for the formal parameters. If there are N positional\\n'\n'arguments, they are placed in the first N slots. Next, for each\\n'\n'keyword argument, the identifier is used to determine the\\n'\n'corresponding slot (if the identifier is the same as the first '\n'formal\\n'\n'parameter name, the first slot is used, and so on). If the slot '\n'is\\n'\n'already filled, a \"TypeError\" exception is raised. Otherwise, the\\n'\n'value of the argument is placed in the slot, filling it (even if '\n'the\\n'\n'expression is \"None\", it fills the slot). When all arguments have\\n'\n'been processed, the slots that are still unfilled are filled with '\n'the\\n'\n'corresponding default value from the function definition. '\n'(Default\\n'\n'values are calculated, once, when the function is defined; thus, a\\n'\n'mutable object such as a list or dictionary used as default value '\n'will\\n'\n'be shared by all calls that don\u2019t specify an argument value for '\n'the\\n'\n'corresponding slot; this should usually be avoided.) If there are '\n'any\\n'\n'unfilled slots for which no default value is specified, a '\n'\"TypeError\"\\n'\n'exception is raised. Otherwise, the list of filled slots is used '\n'as\\n'\n'the argument list for the call.\\n'\n'\\n'\n'**CPython implementation detail:** An implementation may provide\\n'\n'built-in functions whose positional parameters do not have names, '\n'even\\n'\n'if they are \u2018named\u2019 for the purpose of documentation, and which\\n'\n'therefore cannot be supplied by keyword. In CPython, this is the '\n'case\\n'\n'for functions implemented in C that use \"PyArg_ParseTuple()\" to '\n'parse\\n'\n'their arguments.\\n'\n'\\n'\n'If there are more positional arguments than there are formal '\n'parameter\\n'\n'slots, a \"TypeError\" exception is raised, unless a formal '\n'parameter\\n'\n'using the syntax \"*identifier\" is present; in this case, that '\n'formal\\n'\n'parameter receives a tuple containing the excess positional '\n'arguments\\n'\n'(or an empty tuple if there were no excess positional arguments).\\n'\n'\\n'\n'If any keyword argument does not correspond to a formal parameter\\n'\n'name, a \"TypeError\" exception is raised, unless a formal parameter\\n'\n'using the syntax \"**identifier\" is present; in this case, that '\n'formal\\n'\n'parameter receives a dictionary containing the excess keyword\\n'\n'arguments (using the keywords as keys and the argument values as\\n'\n'corresponding values), or a (new) empty dictionary if there were '\n'no\\n'\n'excess keyword arguments.\\n'\n'\\n'\n'If the syntax \"*expression\" appears in the function call, '\n'\"expression\"\\n'\n'must evaluate to an *iterable*. Elements from these iterables are\\n'\n'treated as if they were additional positional arguments. For the '\n'call\\n'\n'\"f(x1, x2, *y, x3, x4)\", if *y* evaluates to a sequence *y1*, \u2026, '\n'*yM*,\\n'\n'this is equivalent to a call with M+4 positional arguments *x1*, '\n'*x2*,\\n'\n'*y1*, \u2026, *yM*, *x3*, *x4*.\\n'\n'\\n'\n'A consequence of this is that although the \"*expression\" syntax '\n'may\\n'\n'appear *after* explicit keyword arguments, it is processed '\n'*before*\\n'\n'the keyword arguments (and any \"**expression\" arguments \u2013 see '\n'below).\\n'\n'So:\\n'\n'\\n'\n' >>> def f(a, b):\\n'\n' ... print(a, b)\\n'\n' ...\\n'\n' >>> f(b=1, *(2,))\\n'\n' 2 1\\n'\n' >>> f(a=1, *(2,))\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 1, in \\n'\n\" TypeError: f() got multiple values for keyword argument 'a'\\n\"\n' >>> f(1, *(2,))\\n'\n' 1 2\\n'\n'\\n'\n'It is unusual for both keyword arguments and the \"*expression\" '\n'syntax\\n'\n'to be used in the same call, so in practice this confusion does '\n'not\\n'\n'arise.\\n'\n'\\n'\n'If the syntax \"**expression\" appears in the function call,\\n'\n'\"expression\" must evaluate to a *mapping*, the contents of which '\n'are\\n'\n'treated as additional keyword arguments. If a keyword is already\\n'\n'present (as an explicit keyword argument, or from another '\n'unpacking),\\n'\n'a \"TypeError\" exception is raised.\\n'\n'\\n'\n'Formal parameters using the syntax \"*identifier\" or \"**identifier\"\\n'\n'cannot be used as positional argument slots or as keyword argument\\n'\n'names.\\n'\n'\\n'\n'Changed in version 3.5: Function calls accept any number of \"*\" '\n'and\\n'\n'\"**\" unpackings, positional arguments may follow iterable '\n'unpackings\\n'\n'(\"*\"), and keyword arguments may follow dictionary unpackings '\n'(\"**\").\\n'\n'Originally proposed by **PEP 448**.\\n'\n'\\n'\n'A call always returns some value, possibly \"None\", unless it raises '\n'an\\n'\n'exception. How this value is computed depends on the type of the\\n'\n'callable object.\\n'\n'\\n'\n'If it is\u2014\\n'\n'\\n'\n'a user-defined function:\\n'\n' The code block for the function is executed, passing it the\\n'\n' argument list. The first thing the code block will do is bind '\n'the\\n'\n' formal parameters to the arguments; this is described in '\n'section\\n'\n' Function definitions. When the code block executes a \"return\"\\n'\n' statement, this specifies the return value of the function '\n'call.\\n'\n'\\n'\n'a built-in function or method:\\n'\n' The result is up to the interpreter; see Built-in Functions for '\n'the\\n'\n' descriptions of built-in functions and methods.\\n'\n'\\n'\n'a class object:\\n'\n' A new instance of that class is returned.\\n'\n'\\n'\n'a class instance method:\\n'\n' The corresponding user-defined function is called, with an '\n'argument\\n'\n' list that is one longer than the argument list of the call: the\\n'\n' instance becomes the first argument.\\n'\n'\\n'\n'a class instance:\\n'\n' The class must define a \"__call__()\" method; the effect is then '\n'the\\n'\n' same as if that method was called.\\n',\n'class':'Class definitions\\n'\n'*****************\\n'\n'\\n'\n'A class definition defines a class object (see section The '\n'standard\\n'\n'type hierarchy):\\n'\n'\\n'\n' classdef ::= [decorators] \"class\" classname [inheritance] \":\" '\n'suite\\n'\n' inheritance ::= \"(\" [argument_list] \")\"\\n'\n' classname ::= identifier\\n'\n'\\n'\n'A class definition is an executable statement. The inheritance '\n'list\\n'\n'usually gives a list of base classes (see Metaclasses for more\\n'\n'advanced uses), so each item in the list should evaluate to a '\n'class\\n'\n'object which allows subclassing. Classes without an inheritance '\n'list\\n'\n'inherit, by default, from the base class \"object\"; hence,\\n'\n'\\n'\n' class Foo:\\n'\n' pass\\n'\n'\\n'\n'is equivalent to\\n'\n'\\n'\n' class Foo(object):\\n'\n' pass\\n'\n'\\n'\n'The class\u2019s suite is then executed in a new execution frame (see\\n'\n'Naming and binding), using a newly created local namespace and the\\n'\n'original global namespace. (Usually, the suite contains mostly\\n'\n'function definitions.) When the class\u2019s suite finishes execution, '\n'its\\n'\n'execution frame is discarded but its local namespace is saved. [5] '\n'A\\n'\n'class object is then created using the inheritance list for the '\n'base\\n'\n'classes and the saved local namespace for the attribute '\n'dictionary.\\n'\n'The class name is bound to this class object in the original local\\n'\n'namespace.\\n'\n'\\n'\n'The order in which attributes are defined in the class body is\\n'\n'preserved in the new class\u2019s \"__dict__\". Note that this is '\n'reliable\\n'\n'only right after the class is created and only for classes that '\n'were\\n'\n'defined using the definition syntax.\\n'\n'\\n'\n'Class creation can be customized heavily using metaclasses.\\n'\n'\\n'\n'Classes can also be decorated: just like when decorating '\n'functions,\\n'\n'\\n'\n' @f1(arg)\\n'\n' @f2\\n'\n' class Foo: pass\\n'\n'\\n'\n'is roughly equivalent to\\n'\n'\\n'\n' class Foo: pass\\n'\n' Foo = f1(arg)(f2(Foo))\\n'\n'\\n'\n'The evaluation rules for the decorator expressions are the same as '\n'for\\n'\n'function decorators. The result is then bound to the class name.\\n'\n'\\n'\n'Changed in version 3.9: Classes may be decorated with any valid\\n'\n'\"assignment_expression\". Previously, the grammar was much more\\n'\n'restrictive; see **PEP 614** for details.\\n'\n'\\n'\n'**Programmer\u2019s note:** Variables defined in the class definition '\n'are\\n'\n'class attributes; they are shared by instances. Instance '\n'attributes\\n'\n'can be set in a method with \"self.name = value\". Both class and\\n'\n'instance attributes are accessible through the notation '\n'\u201c\"self.name\"\u201d,\\n'\n'and an instance attribute hides a class attribute with the same '\n'name\\n'\n'when accessed in this way. Class attributes can be used as '\n'defaults\\n'\n'for instance attributes, but using mutable values there can lead '\n'to\\n'\n'unexpected results. Descriptors can be used to create instance\\n'\n'variables with different implementation details.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 3115** - Metaclasses in Python 3000\\n'\n' The proposal that changed the declaration of metaclasses to '\n'the\\n'\n' current syntax, and the semantics for how classes with\\n'\n' metaclasses are constructed.\\n'\n'\\n'\n' **PEP 3129** - Class Decorators\\n'\n' The proposal that added class decorators. Function and '\n'method\\n'\n' decorators were introduced in **PEP 318**.\\n',\n'comparisons':'Comparisons\\n'\n'***********\\n'\n'\\n'\n'Unlike C, all comparison operations in Python have the same '\n'priority,\\n'\n'which is lower than that of any arithmetic, shifting or '\n'bitwise\\n'\n'operation. Also unlike C, expressions like \"a < b < c\" have '\n'the\\n'\n'interpretation that is conventional in mathematics:\\n'\n'\\n'\n' comparison ::= or_expr (comp_operator or_expr)*\\n'\n' comp_operator ::= \"<\" | \">\" | \"==\" | \">=\" | \"<=\" | \"!=\"\\n'\n' | \"is\" [\"not\"] | [\"not\"] \"in\"\\n'\n'\\n'\n'Comparisons yield boolean values: \"True\" or \"False\". Custom '\n'*rich\\n'\n'comparison methods* may return non-boolean values. In this '\n'case Python\\n'\n'will call \"bool()\" on such value in boolean contexts.\\n'\n'\\n'\n'Comparisons can be chained arbitrarily, e.g., \"x < y <= z\" '\n'is\\n'\n'equivalent to \"x < y and y <= z\", except that \"y\" is '\n'evaluated only\\n'\n'once (but in both cases \"z\" is not evaluated at all when \"x < '\n'y\" is\\n'\n'found to be false).\\n'\n'\\n'\n'Formally, if *a*, *b*, *c*, \u2026, *y*, *z* are expressions and '\n'*op1*,\\n'\n'*op2*, \u2026, *opN* are comparison operators, then \"a op1 b op2 c '\n'... y\\n'\n'opN z\" is equivalent to \"a op1 b and b op2 c and ... y opN '\n'z\", except\\n'\n'that each expression is evaluated at most once.\\n'\n'\\n'\n'Note that \"a op1 b op2 c\" doesn\u2019t imply any kind of '\n'comparison between\\n'\n'*a* and *c*, so that, e.g., \"x < y > z\" is perfectly legal '\n'(though\\n'\n'perhaps not pretty).\\n'\n'\\n'\n'\\n'\n'Value comparisons\\n'\n'=================\\n'\n'\\n'\n'The operators \"<\", \">\", \"==\", \">=\", \"<=\", and \"!=\" compare '\n'the values\\n'\n'of two objects. The objects do not need to have the same '\n'type.\\n'\n'\\n'\n'Chapter Objects, values and types states that objects have a '\n'value (in\\n'\n'addition to type and identity). The value of an object is a '\n'rather\\n'\n'abstract notion in Python: For example, there is no canonical '\n'access\\n'\n'method for an object\u2019s value. Also, there is no requirement '\n'that the\\n'\n'value of an object should be constructed in a particular way, '\n'e.g.\\n'\n'comprised of all its data attributes. Comparison operators '\n'implement a\\n'\n'particular notion of what the value of an object is. One can '\n'think of\\n'\n'them as defining the value of an object indirectly, by means '\n'of their\\n'\n'comparison implementation.\\n'\n'\\n'\n'Because all types are (direct or indirect) subtypes of '\n'\"object\", they\\n'\n'inherit the default comparison behavior from \"object\". Types '\n'can\\n'\n'customize their comparison behavior by implementing *rich '\n'comparison\\n'\n'methods* like \"__lt__()\", described in Basic customization.\\n'\n'\\n'\n'The default behavior for equality comparison (\"==\" and \"!=\") '\n'is based\\n'\n'on the identity of the objects. Hence, equality comparison '\n'of\\n'\n'instances with the same identity results in equality, and '\n'equality\\n'\n'comparison of instances with different identities results in\\n'\n'inequality. A motivation for this default behavior is the '\n'desire that\\n'\n'all objects should be reflexive (i.e. \"x is y\" implies \"x == '\n'y\").\\n'\n'\\n'\n'A default order comparison (\"<\", \">\", \"<=\", and \">=\") is not '\n'provided;\\n'\n'an attempt raises \"TypeError\". A motivation for this default '\n'behavior\\n'\n'is the lack of a similar invariant as for equality.\\n'\n'\\n'\n'The behavior of the default equality comparison, that '\n'instances with\\n'\n'different identities are always unequal, may be in contrast '\n'to what\\n'\n'types will need that have a sensible definition of object '\n'value and\\n'\n'value-based equality. Such types will need to customize '\n'their\\n'\n'comparison behavior, and in fact, a number of built-in types '\n'have done\\n'\n'that.\\n'\n'\\n'\n'The following list describes the comparison behavior of the '\n'most\\n'\n'important built-in types.\\n'\n'\\n'\n'* Numbers of built-in numeric types (Numeric Types \u2014 int, '\n'float,\\n'\n' complex) and of the standard library types '\n'\"fractions.Fraction\" and\\n'\n' \"decimal.Decimal\" can be compared within and across their '\n'types,\\n'\n' with the restriction that complex numbers do not support '\n'order\\n'\n' comparison. Within the limits of the types involved, they '\n'compare\\n'\n' mathematically (algorithmically) correct without loss of '\n'precision.\\n'\n'\\n'\n' The not-a-number values \"float(\\'NaN\\')\" and '\n'\"decimal.Decimal(\\'NaN\\')\"\\n'\n' are special. Any ordered comparison of a number to a '\n'not-a-number\\n'\n' value is false. A counter-intuitive implication is that '\n'not-a-number\\n'\n' values are not equal to themselves. For example, if \"x =\\n'\n' float(\\'NaN\\')\", \"3 < x\", \"x < 3\" and \"x == x\" are all '\n'false, while \"x\\n'\n' != x\" is true. This behavior is compliant with IEEE 754.\\n'\n'\\n'\n'* \"None\" and \"NotImplemented\" are singletons. **PEP 8** '\n'advises that\\n'\n' comparisons for singletons should always be done with \"is\" '\n'or \"is\\n'\n' not\", never the equality operators.\\n'\n'\\n'\n'* Binary sequences (instances of \"bytes\" or \"bytearray\") can '\n'be\\n'\n' compared within and across their types. They compare\\n'\n' lexicographically using the numeric values of their '\n'elements.\\n'\n'\\n'\n'* Strings (instances of \"str\") compare lexicographically '\n'using the\\n'\n' numerical Unicode code points (the result of the built-in '\n'function\\n'\n' \"ord()\") of their characters. [3]\\n'\n'\\n'\n' Strings and binary sequences cannot be directly compared.\\n'\n'\\n'\n'* Sequences (instances of \"tuple\", \"list\", or \"range\") can be '\n'compared\\n'\n' only within each of their types, with the restriction that '\n'ranges do\\n'\n' not support order comparison. Equality comparison across '\n'these\\n'\n' types results in inequality, and ordering comparison across '\n'these\\n'\n' types raises \"TypeError\".\\n'\n'\\n'\n' Sequences compare lexicographically using comparison of\\n'\n' corresponding elements. The built-in containers typically '\n'assume\\n'\n' identical objects are equal to themselves. That lets them '\n'bypass\\n'\n' equality tests for identical objects to improve performance '\n'and to\\n'\n' maintain their internal invariants.\\n'\n'\\n'\n' Lexicographical comparison between built-in collections '\n'works as\\n'\n' follows:\\n'\n'\\n'\n' * For two collections to compare equal, they must be of the '\n'same\\n'\n' type, have the same length, and each pair of '\n'corresponding\\n'\n' elements must compare equal (for example, \"[1,2] == '\n'(1,2)\" is\\n'\n' false because the type is not the same).\\n'\n'\\n'\n' * Collections that support order comparison are ordered the '\n'same as\\n'\n' their first unequal elements (for example, \"[1,2,x] <= '\n'[1,2,y]\"\\n'\n' has the same value as \"x <= y\"). If a corresponding '\n'element does\\n'\n' not exist, the shorter collection is ordered first (for '\n'example,\\n'\n' \"[1,2] < [1,2,3]\" is true).\\n'\n'\\n'\n'* Mappings (instances of \"dict\") compare equal if and only if '\n'they\\n'\n' have equal *(key, value)* pairs. Equality comparison of the '\n'keys and\\n'\n' values enforces reflexivity.\\n'\n'\\n'\n' Order comparisons (\"<\", \">\", \"<=\", and \">=\") raise '\n'\"TypeError\".\\n'\n'\\n'\n'* Sets (instances of \"set\" or \"frozenset\") can be compared '\n'within and\\n'\n' across their types.\\n'\n'\\n'\n' They define order comparison operators to mean subset and '\n'superset\\n'\n' tests. Those relations do not define total orderings (for '\n'example,\\n'\n' the two sets \"{1,2}\" and \"{2,3}\" are not equal, nor subsets '\n'of one\\n'\n' another, nor supersets of one another). Accordingly, sets '\n'are not\\n'\n' appropriate arguments for functions which depend on total '\n'ordering\\n'\n' (for example, \"min()\", \"max()\", and \"sorted()\" produce '\n'undefined\\n'\n' results given a list of sets as inputs).\\n'\n'\\n'\n' Comparison of sets enforces reflexivity of its elements.\\n'\n'\\n'\n'* Most other built-in types have no comparison methods '\n'implemented, so\\n'\n' they inherit the default comparison behavior.\\n'\n'\\n'\n'User-defined classes that customize their comparison behavior '\n'should\\n'\n'follow some consistency rules, if possible:\\n'\n'\\n'\n'* Equality comparison should be reflexive. In other words, '\n'identical\\n'\n' objects should compare equal:\\n'\n'\\n'\n' \"x is y\" implies \"x == y\"\\n'\n'\\n'\n'* Comparison should be symmetric. In other words, the '\n'following\\n'\n' expressions should have the same result:\\n'\n'\\n'\n' \"x == y\" and \"y == x\"\\n'\n'\\n'\n' \"x != y\" and \"y != x\"\\n'\n'\\n'\n' \"x < y\" and \"y > x\"\\n'\n'\\n'\n' \"x <= y\" and \"y >= x\"\\n'\n'\\n'\n'* Comparison should be transitive. The following '\n'(non-exhaustive)\\n'\n' examples illustrate that:\\n'\n'\\n'\n' \"x > y and y > z\" implies \"x > z\"\\n'\n'\\n'\n' \"x < y and y <= z\" implies \"x < z\"\\n'\n'\\n'\n'* Inverse comparison should result in the boolean negation. '\n'In other\\n'\n' words, the following expressions should have the same '\n'result:\\n'\n'\\n'\n' \"x == y\" and \"not x != y\"\\n'\n'\\n'\n' \"x < y\" and \"not x >= y\" (for total ordering)\\n'\n'\\n'\n' \"x > y\" and \"not x <= y\" (for total ordering)\\n'\n'\\n'\n' The last two expressions apply to totally ordered '\n'collections (e.g.\\n'\n' to sequences, but not to sets or mappings). See also the\\n'\n' \"total_ordering()\" decorator.\\n'\n'\\n'\n'* The \"hash()\" result should be consistent with equality. '\n'Objects that\\n'\n' are equal should either have the same hash value, or be '\n'marked as\\n'\n' unhashable.\\n'\n'\\n'\n'Python does not enforce these consistency rules. In fact, '\n'the\\n'\n'not-a-number values are an example for not following these '\n'rules.\\n'\n'\\n'\n'\\n'\n'Membership test operations\\n'\n'==========================\\n'\n'\\n'\n'The operators \"in\" and \"not in\" test for membership. \"x in '\n's\"\\n'\n'evaluates to \"True\" if *x* is a member of *s*, and \"False\" '\n'otherwise.\\n'\n'\"x not in s\" returns the negation of \"x in s\". All built-in '\n'sequences\\n'\n'and set types support this as well as dictionary, for which '\n'\"in\" tests\\n'\n'whether the dictionary has a given key. For container types '\n'such as\\n'\n'list, tuple, set, frozenset, dict, or collections.deque, the\\n'\n'expression \"x in y\" is equivalent to \"any(x is e or x == e '\n'for e in\\n'\n'y)\".\\n'\n'\\n'\n'For the string and bytes types, \"x in y\" is \"True\" if and '\n'only if *x*\\n'\n'is a substring of *y*. An equivalent test is \"y.find(x) != '\n'-1\".\\n'\n'Empty strings are always considered to be a substring of any '\n'other\\n'\n'string, so \"\"\" in \"abc\"\" will return \"True\".\\n'\n'\\n'\n'For user-defined classes which define the \"__contains__()\" '\n'method, \"x\\n'\n'in y\" returns \"True\" if \"y.__contains__(x)\" returns a true '\n'value, and\\n'\n'\"False\" otherwise.\\n'\n'\\n'\n'For user-defined classes which do not define \"__contains__()\" '\n'but do\\n'\n'define \"__iter__()\", \"x in y\" is \"True\" if some value \"z\", '\n'for which\\n'\n'the expression \"x is z or x == z\" is true, is produced while '\n'iterating\\n'\n'over \"y\". If an exception is raised during the iteration, it '\n'is as if\\n'\n'\"in\" raised that exception.\\n'\n'\\n'\n'Lastly, the old-style iteration protocol is tried: if a class '\n'defines\\n'\n'\"__getitem__()\", \"x in y\" is \"True\" if and only if there is a '\n'non-\\n'\n'negative integer index *i* such that \"x is y[i] or x == '\n'y[i]\", and no\\n'\n'lower integer index raises the \"IndexError\" exception. (If '\n'any other\\n'\n'exception is raised, it is as if \"in\" raised that '\n'exception).\\n'\n'\\n'\n'The operator \"not in\" is defined to have the inverse truth '\n'value of\\n'\n'\"in\".\\n'\n'\\n'\n'\\n'\n'Identity comparisons\\n'\n'====================\\n'\n'\\n'\n'The operators \"is\" and \"is not\" test for an object\u2019s '\n'identity: \"x is\\n'\n'y\" is true if and only if *x* and *y* are the same object. '\n'An\\n'\n'Object\u2019s identity is determined using the \"id()\" function. '\n'\"x is not\\n'\n'y\" yields the inverse truth value. [4]\\n',\n'compound':'Compound statements\\n'\n'*******************\\n'\n'\\n'\n'Compound statements contain (groups of) other statements; they '\n'affect\\n'\n'or control the execution of those other statements in some way. '\n'In\\n'\n'general, compound statements span multiple lines, although in '\n'simple\\n'\n'incarnations a whole compound statement may be contained in one '\n'line.\\n'\n'\\n'\n'The \"if\", \"while\" and \"for\" statements implement traditional '\n'control\\n'\n'flow constructs. \"try\" specifies exception handlers and/or '\n'cleanup\\n'\n'code for a group of statements, while the \"with\" statement '\n'allows the\\n'\n'execution of initialization and finalization code around a block '\n'of\\n'\n'code. Function and class definitions are also syntactically '\n'compound\\n'\n'statements.\\n'\n'\\n'\n'A compound statement consists of one or more \u2018clauses.\u2019 A '\n'clause\\n'\n'consists of a header and a \u2018suite.\u2019 The clause headers of a\\n'\n'particular compound statement are all at the same indentation '\n'level.\\n'\n'Each clause header begins with a uniquely identifying keyword '\n'and ends\\n'\n'with a colon. A suite is a group of statements controlled by a\\n'\n'clause. A suite can be one or more semicolon-separated simple\\n'\n'statements on the same line as the header, following the '\n'header\u2019s\\n'\n'colon, or it can be one or more indented statements on '\n'subsequent\\n'\n'lines. Only the latter form of a suite can contain nested '\n'compound\\n'\n'statements; the following is illegal, mostly because it wouldn\u2019t '\n'be\\n'\n'clear to which \"if\" clause a following \"else\" clause would '\n'belong:\\n'\n'\\n'\n' if test1: if test2: print(x)\\n'\n'\\n'\n'Also note that the semicolon binds tighter than the colon in '\n'this\\n'\n'context, so that in the following example, either all or none of '\n'the\\n'\n'\"print()\" calls are executed:\\n'\n'\\n'\n' if x < y < z: print(x); print(y); print(z)\\n'\n'\\n'\n'Summarizing:\\n'\n'\\n'\n' compound_stmt ::= if_stmt\\n'\n' | while_stmt\\n'\n' | for_stmt\\n'\n' | try_stmt\\n'\n' | with_stmt\\n'\n' | match_stmt\\n'\n' | funcdef\\n'\n' | classdef\\n'\n' | async_with_stmt\\n'\n' | async_for_stmt\\n'\n' | async_funcdef\\n'\n' suite ::= stmt_list NEWLINE | NEWLINE INDENT '\n'statement+ DEDENT\\n'\n' statement ::= stmt_list NEWLINE | compound_stmt\\n'\n' stmt_list ::= simple_stmt (\";\" simple_stmt)* [\";\"]\\n'\n'\\n'\n'Note that statements always end in a \"NEWLINE\" possibly followed '\n'by a\\n'\n'\"DEDENT\". Also note that optional continuation clauses always '\n'begin\\n'\n'with a keyword that cannot start a statement, thus there are no\\n'\n'ambiguities (the \u2018dangling \"else\"\u2019 problem is solved in Python '\n'by\\n'\n'requiring nested \"if\" statements to be indented).\\n'\n'\\n'\n'The formatting of the grammar rules in the following sections '\n'places\\n'\n'each clause on a separate line for clarity.\\n'\n'\\n'\n'\\n'\n'The \"if\" statement\\n'\n'==================\\n'\n'\\n'\n'The \"if\" statement is used for conditional execution:\\n'\n'\\n'\n' if_stmt ::= \"if\" assignment_expression \":\" suite\\n'\n' (\"elif\" assignment_expression \":\" suite)*\\n'\n' [\"else\" \":\" suite]\\n'\n'\\n'\n'It selects exactly one of the suites by evaluating the '\n'expressions one\\n'\n'by one until one is found to be true (see section Boolean '\n'operations\\n'\n'for the definition of true and false); then that suite is '\n'executed\\n'\n'(and no other part of the \"if\" statement is executed or '\n'evaluated).\\n'\n'If all expressions are false, the suite of the \"else\" clause, '\n'if\\n'\n'present, is executed.\\n'\n'\\n'\n'\\n'\n'The \"while\" statement\\n'\n'=====================\\n'\n'\\n'\n'The \"while\" statement is used for repeated execution as long as '\n'an\\n'\n'expression is true:\\n'\n'\\n'\n' while_stmt ::= \"while\" assignment_expression \":\" suite\\n'\n' [\"else\" \":\" suite]\\n'\n'\\n'\n'This repeatedly tests the expression and, if it is true, '\n'executes the\\n'\n'first suite; if the expression is false (which may be the first '\n'time\\n'\n'it is tested) the suite of the \"else\" clause, if present, is '\n'executed\\n'\n'and the loop terminates.\\n'\n'\\n'\n'A \"break\" statement executed in the first suite terminates the '\n'loop\\n'\n'without executing the \"else\" clause\u2019s suite. A \"continue\" '\n'statement\\n'\n'executed in the first suite skips the rest of the suite and goes '\n'back\\n'\n'to testing the expression.\\n'\n'\\n'\n'\\n'\n'The \"for\" statement\\n'\n'===================\\n'\n'\\n'\n'The \"for\" statement is used to iterate over the elements of a '\n'sequence\\n'\n'(such as a string, tuple or list) or other iterable object:\\n'\n'\\n'\n' for_stmt ::= \"for\" target_list \"in\" expression_list \":\" '\n'suite\\n'\n' [\"else\" \":\" suite]\\n'\n'\\n'\n'The expression list is evaluated once; it should yield an '\n'iterable\\n'\n'object. An iterator is created for the result of the\\n'\n'\"expression_list\". The suite is then executed once for each '\n'item\\n'\n'provided by the iterator, in the order returned by the '\n'iterator. Each\\n'\n'item in turn is assigned to the target list using the standard '\n'rules\\n'\n'for assignments (see Assignment statements), and then the suite '\n'is\\n'\n'executed. When the items are exhausted (which is immediately '\n'when the\\n'\n'sequence is empty or an iterator raises a \"StopIteration\" '\n'exception),\\n'\n'the suite in the \"else\" clause, if present, is executed, and the '\n'loop\\n'\n'terminates.\\n'\n'\\n'\n'A \"break\" statement executed in the first suite terminates the '\n'loop\\n'\n'without executing the \"else\" clause\u2019s suite. A \"continue\" '\n'statement\\n'\n'executed in the first suite skips the rest of the suite and '\n'continues\\n'\n'with the next item, or with the \"else\" clause if there is no '\n'next\\n'\n'item.\\n'\n'\\n'\n'The for-loop makes assignments to the variables in the target '\n'list.\\n'\n'This overwrites all previous assignments to those variables '\n'including\\n'\n'those made in the suite of the for-loop:\\n'\n'\\n'\n' for i in range(10):\\n'\n' print(i)\\n'\n' i = 5 # this will not affect the for-loop\\n'\n' # because i will be overwritten with '\n'the next\\n'\n' # index in the range\\n'\n'\\n'\n'Names in the target list are not deleted when the loop is '\n'finished,\\n'\n'but if the sequence is empty, they will not have been assigned '\n'to at\\n'\n'all by the loop. Hint: the built-in function \"range()\" returns '\n'an\\n'\n'iterator of integers suitable to emulate the effect of Pascal\u2019s '\n'\"for i\\n'\n':= a to b do\"; e.g., \"list(range(3))\" returns the list \"[0, 1, '\n'2]\".\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' There is a subtlety when the sequence is being modified by the '\n'loop\\n'\n' (this can only occur for mutable sequences, e.g. lists). An\\n'\n' internal counter is used to keep track of which item is used '\n'next,\\n'\n' and this is incremented on each iteration. When this counter '\n'has\\n'\n' reached the length of the sequence the loop terminates. This '\n'means\\n'\n' that if the suite deletes the current (or a previous) item '\n'from the\\n'\n' sequence, the next item will be skipped (since it gets the '\n'index of\\n'\n' the current item which has already been treated). Likewise, '\n'if the\\n'\n' suite inserts an item in the sequence before the current item, '\n'the\\n'\n' current item will be treated again the next time through the '\n'loop.\\n'\n' This can lead to nasty bugs that can be avoided by making a\\n'\n' temporary copy using a slice of the whole sequence, e.g.,\\n'\n'\\n'\n' for x in a[:]:\\n'\n' if x < 0: a.remove(x)\\n'\n'\\n'\n'\\n'\n'The \"try\" statement\\n'\n'===================\\n'\n'\\n'\n'The \"try\" statement specifies exception handlers and/or cleanup '\n'code\\n'\n'for a group of statements:\\n'\n'\\n'\n' try_stmt ::= try1_stmt | try2_stmt\\n'\n' try1_stmt ::= \"try\" \":\" suite\\n'\n' (\"except\" [expression [\"as\" identifier]] \":\" '\n'suite)+\\n'\n' [\"else\" \":\" suite]\\n'\n' [\"finally\" \":\" suite]\\n'\n' try2_stmt ::= \"try\" \":\" suite\\n'\n' \"finally\" \":\" suite\\n'\n'\\n'\n'The \"except\" clause(s) specify one or more exception handlers. '\n'When no\\n'\n'exception occurs in the \"try\" clause, no exception handler is\\n'\n'executed. When an exception occurs in the \"try\" suite, a search '\n'for an\\n'\n'exception handler is started. This search inspects the except '\n'clauses\\n'\n'in turn until one is found that matches the exception. An '\n'expression-\\n'\n'less except clause, if present, must be last; it matches any\\n'\n'exception. For an except clause with an expression, that '\n'expression\\n'\n'is evaluated, and the clause matches the exception if the '\n'resulting\\n'\n'object is \u201ccompatible\u201d with the exception. An object is '\n'compatible\\n'\n'with an exception if it is the class or a base class of the '\n'exception\\n'\n'object, or a tuple containing an item that is the class or a '\n'base\\n'\n'class of the exception object.\\n'\n'\\n'\n'If no except clause matches the exception, the search for an '\n'exception\\n'\n'handler continues in the surrounding code and on the invocation '\n'stack.\\n'\n'[1]\\n'\n'\\n'\n'If the evaluation of an expression in the header of an except '\n'clause\\n'\n'raises an exception, the original search for a handler is '\n'canceled and\\n'\n'a search starts for the new exception in the surrounding code '\n'and on\\n'\n'the call stack (it is treated as if the entire \"try\" statement '\n'raised\\n'\n'the exception).\\n'\n'\\n'\n'When a matching except clause is found, the exception is '\n'assigned to\\n'\n'the target specified after the \"as\" keyword in that except '\n'clause, if\\n'\n'present, and the except clause\u2019s suite is executed. All except\\n'\n'clauses must have an executable block. When the end of this '\n'block is\\n'\n'reached, execution continues normally after the entire try '\n'statement.\\n'\n'(This means that if two nested handlers exist for the same '\n'exception,\\n'\n'and the exception occurs in the try clause of the inner handler, '\n'the\\n'\n'outer handler will not handle the exception.)\\n'\n'\\n'\n'When an exception has been assigned using \"as target\", it is '\n'cleared\\n'\n'at the end of the except clause. This is as if\\n'\n'\\n'\n' except E as N:\\n'\n' foo\\n'\n'\\n'\n'was translated to\\n'\n'\\n'\n' except E as N:\\n'\n' try:\\n'\n' foo\\n'\n' finally:\\n'\n' del N\\n'\n'\\n'\n'This means the exception must be assigned to a different name to '\n'be\\n'\n'able to refer to it after the except clause. Exceptions are '\n'cleared\\n'\n'because with the traceback attached to them, they form a '\n'reference\\n'\n'cycle with the stack frame, keeping all locals in that frame '\n'alive\\n'\n'until the next garbage collection occurs.\\n'\n'\\n'\n'Before an except clause\u2019s suite is executed, details about the\\n'\n'exception are stored in the \"sys\" module and can be accessed '\n'via\\n'\n'\"sys.exc_info()\". \"sys.exc_info()\" returns a 3-tuple consisting '\n'of the\\n'\n'exception class, the exception instance and a traceback object '\n'(see\\n'\n'section The standard type hierarchy) identifying the point in '\n'the\\n'\n'program where the exception occurred. The details about the '\n'exception\\n'\n'accessed via \"sys.exc_info()\" are restored to their previous '\n'values\\n'\n'when leaving an exception handler:\\n'\n'\\n'\n' >>> print(sys.exc_info())\\n'\n' (None, None, None)\\n'\n' >>> try:\\n'\n' ... raise TypeError\\n'\n' ... except:\\n'\n' ... print(sys.exc_info())\\n'\n' ... try:\\n'\n' ... raise ValueError\\n'\n' ... except:\\n'\n' ... print(sys.exc_info())\\n'\n' ... print(sys.exc_info())\\n'\n' ...\\n'\n\" (, TypeError(), )\\n'\n\" (, ValueError(), )\\n'\n\" (, TypeError(), )\\n'\n' >>> print(sys.exc_info())\\n'\n' (None, None, None)\\n'\n'\\n'\n'The optional \"else\" clause is executed if the control flow '\n'leaves the\\n'\n'\"try\" suite, no exception was raised, and no \"return\", '\n'\"continue\", or\\n'\n'\"break\" statement was executed. Exceptions in the \"else\" clause '\n'are\\n'\n'not handled by the preceding \"except\" clauses.\\n'\n'\\n'\n'If \"finally\" is present, it specifies a \u2018cleanup\u2019 handler. The '\n'\"try\"\\n'\n'clause is executed, including any \"except\" and \"else\" clauses. '\n'If an\\n'\n'exception occurs in any of the clauses and is not handled, the\\n'\n'exception is temporarily saved. The \"finally\" clause is '\n'executed. If\\n'\n'there is a saved exception it is re-raised at the end of the '\n'\"finally\"\\n'\n'clause. If the \"finally\" clause raises another exception, the '\n'saved\\n'\n'exception is set as the context of the new exception. If the '\n'\"finally\"\\n'\n'clause executes a \"return\", \"break\" or \"continue\" statement, the '\n'saved\\n'\n'exception is discarded:\\n'\n'\\n'\n' >>> def f():\\n'\n' ... try:\\n'\n' ... 1/0\\n'\n' ... finally:\\n'\n' ... return 42\\n'\n' ...\\n'\n' >>> f()\\n'\n' 42\\n'\n'\\n'\n'The exception information is not available to the program '\n'during\\n'\n'execution of the \"finally\" clause.\\n'\n'\\n'\n'When a \"return\", \"break\" or \"continue\" statement is executed in '\n'the\\n'\n'\"try\" suite of a \"try\"\u2026\"finally\" statement, the \"finally\" clause '\n'is\\n'\n'also executed \u2018on the way out.\u2019\\n'\n'\\n'\n'The return value of a function is determined by the last '\n'\"return\"\\n'\n'statement executed. Since the \"finally\" clause always executes, '\n'a\\n'\n'\"return\" statement executed in the \"finally\" clause will always '\n'be the\\n'\n'last one executed:\\n'\n'\\n'\n' >>> def foo():\\n'\n' ... try:\\n'\n\" ... return 'try'\\n\"\n' ... finally:\\n'\n\" ... return 'finally'\\n\"\n' ...\\n'\n' >>> foo()\\n'\n\" 'finally'\\n\"\n'\\n'\n'Additional information on exceptions can be found in section\\n'\n'Exceptions, and information on using the \"raise\" statement to '\n'generate\\n'\n'exceptions may be found in section The raise statement.\\n'\n'\\n'\n'Changed in version 3.8: Prior to Python 3.8, a \"continue\" '\n'statement\\n'\n'was illegal in the \"finally\" clause due to a problem with the\\n'\n'implementation.\\n'\n'\\n'\n'\\n'\n'The \"with\" statement\\n'\n'====================\\n'\n'\\n'\n'The \"with\" statement is used to wrap the execution of a block '\n'with\\n'\n'methods defined by a context manager (see section With '\n'Statement\\n'\n'Context Managers). This allows common \"try\"\u2026\"except\"\u2026\"finally\" '\n'usage\\n'\n'patterns to be encapsulated for convenient reuse.\\n'\n'\\n'\n' with_stmt ::= \"with\" ( \"(\" with_stmt_contents \",\"? '\n'\")\" | with_stmt_contents ) \":\" suite\\n'\n' with_stmt_contents ::= with_item (\",\" with_item)*\\n'\n' with_item ::= expression [\"as\" target]\\n'\n'\\n'\n'The execution of the \"with\" statement with one \u201citem\u201d proceeds '\n'as\\n'\n'follows:\\n'\n'\\n'\n'1. The context expression (the expression given in the '\n'\"with_item\") is\\n'\n' evaluated to obtain a context manager.\\n'\n'\\n'\n'2. The context manager\u2019s \"__enter__()\" is loaded for later use.\\n'\n'\\n'\n'3. The context manager\u2019s \"__exit__()\" is loaded for later use.\\n'\n'\\n'\n'4. The context manager\u2019s \"__enter__()\" method is invoked.\\n'\n'\\n'\n'5. If a target was included in the \"with\" statement, the return '\n'value\\n'\n' from \"__enter__()\" is assigned to it.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' The \"with\" statement guarantees that if the \"__enter__()\" '\n'method\\n'\n' returns without an error, then \"__exit__()\" will always be\\n'\n' called. Thus, if an error occurs during the assignment to '\n'the\\n'\n' target list, it will be treated the same as an error '\n'occurring\\n'\n' within the suite would be. See step 6 below.\\n'\n'\\n'\n'6. The suite is executed.\\n'\n'\\n'\n'7. The context manager\u2019s \"__exit__()\" method is invoked. If an\\n'\n' exception caused the suite to be exited, its type, value, '\n'and\\n'\n' traceback are passed as arguments to \"__exit__()\". Otherwise, '\n'three\\n'\n' \"None\" arguments are supplied.\\n'\n'\\n'\n' If the suite was exited due to an exception, and the return '\n'value\\n'\n' from the \"__exit__()\" method was false, the exception is '\n'reraised.\\n'\n' If the return value was true, the exception is suppressed, '\n'and\\n'\n' execution continues with the statement following the \"with\"\\n'\n' statement.\\n'\n'\\n'\n' If the suite was exited for any reason other than an '\n'exception, the\\n'\n' return value from \"__exit__()\" is ignored, and execution '\n'proceeds\\n'\n' at the normal location for the kind of exit that was taken.\\n'\n'\\n'\n'The following code:\\n'\n'\\n'\n' with EXPRESSION as TARGET:\\n'\n' SUITE\\n'\n'\\n'\n'is semantically equivalent to:\\n'\n'\\n'\n' manager = (EXPRESSION)\\n'\n' enter = type(manager).__enter__\\n'\n' exit = type(manager).__exit__\\n'\n' value = enter(manager)\\n'\n' hit_except = False\\n'\n'\\n'\n' try:\\n'\n' TARGET = value\\n'\n' SUITE\\n'\n' except:\\n'\n' hit_except = True\\n'\n' if not exit(manager, *sys.exc_info()):\\n'\n' raise\\n'\n' finally:\\n'\n' if not hit_except:\\n'\n' exit(manager, None, None, None)\\n'\n'\\n'\n'With more than one item, the context managers are processed as '\n'if\\n'\n'multiple \"with\" statements were nested:\\n'\n'\\n'\n' with A() as a, B() as b:\\n'\n' SUITE\\n'\n'\\n'\n'is semantically equivalent to:\\n'\n'\\n'\n' with A() as a:\\n'\n' with B() as b:\\n'\n' SUITE\\n'\n'\\n'\n'You can also write multi-item context managers in multiple lines '\n'if\\n'\n'the items are surrounded by parentheses. For example:\\n'\n'\\n'\n' with (\\n'\n' A() as a,\\n'\n' B() as b,\\n'\n' ):\\n'\n' SUITE\\n'\n'\\n'\n'Changed in version 3.1: Support for multiple context '\n'expressions.\\n'\n'\\n'\n'Changed in version 3.10: Support for using grouping parentheses '\n'to\\n'\n'break the statement in multiple lines.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 343** - The \u201cwith\u201d statement\\n'\n' The specification, background, and examples for the Python '\n'\"with\"\\n'\n' statement.\\n'\n'\\n'\n'\\n'\n'The \"match\" statement\\n'\n'=====================\\n'\n'\\n'\n'New in version 3.10.\\n'\n'\\n'\n'The match statement is used for pattern matching. Syntax:\\n'\n'\\n'\n' match_stmt ::= \\'match\\' subject_expr \":\" NEWLINE INDENT '\n'case_block+ DEDENT\\n'\n' subject_expr ::= star_named_expression \",\" '\n'star_named_expressions?\\n'\n' | named_expression\\n'\n' case_block ::= \\'case\\' patterns [guard] \":\" block\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' This section uses single quotes to denote soft keywords.\\n'\n'\\n'\n'Pattern matching takes a pattern as input (following \"case\") and '\n'a\\n'\n'subject value (following \"match\"). The pattern (which may '\n'contain\\n'\n'subpatterns) is matched against the subject value. The outcomes '\n'are:\\n'\n'\\n'\n'* A match success or failure (also termed a pattern success or\\n'\n' failure).\\n'\n'\\n'\n'* Possible binding of matched values to a name. The '\n'prerequisites for\\n'\n' this are further discussed below.\\n'\n'\\n'\n'The \"match\" and \"case\" keywords are soft keywords.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' * **PEP 634** \u2013 Structural Pattern Matching: Specification\\n'\n'\\n'\n' * **PEP 636** \u2013 Structural Pattern Matching: Tutorial\\n'\n'\\n'\n'\\n'\n'Overview\\n'\n'--------\\n'\n'\\n'\n'Here\u2019s an overview of the logical flow of a match statement:\\n'\n'\\n'\n'1. The subject expression \"subject_expr\" is evaluated and a '\n'resulting\\n'\n' subject value obtained. If the subject expression contains a '\n'comma,\\n'\n' a tuple is constructed using the standard rules.\\n'\n'\\n'\n'2. Each pattern in a \"case_block\" is attempted to match with '\n'the\\n'\n' subject value. The specific rules for success or failure are\\n'\n' described below. The match attempt can also bind some or all '\n'of the\\n'\n' standalone names within the pattern. The precise pattern '\n'binding\\n'\n' rules vary per pattern type and are specified below. **Name\\n'\n' bindings made during a successful pattern match outlive the\\n'\n' executed block and can be used after the match statement**.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' During failed pattern matches, some subpatterns may '\n'succeed.\\n'\n' Do not rely on bindings being made for a failed match.\\n'\n' Conversely, do not rely on variables remaining unchanged '\n'after\\n'\n' a failed match. The exact behavior is dependent on\\n'\n' implementation and may vary. This is an intentional '\n'decision\\n'\n' made to allow different implementations to add '\n'optimizations.\\n'\n'\\n'\n'3. If the pattern succeeds, the corresponding guard (if present) '\n'is\\n'\n' evaluated. In this case all name bindings are guaranteed to '\n'have\\n'\n' happened.\\n'\n'\\n'\n' * If the guard evaluates as truthy or missing, the \"block\" '\n'inside\\n'\n' \"case_block\" is executed.\\n'\n'\\n'\n' * Otherwise, the next \"case_block\" is attempted as described '\n'above.\\n'\n'\\n'\n' * If there are no further case blocks, the match statement '\n'is\\n'\n' completed.\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' Users should generally never rely on a pattern being '\n'evaluated.\\n'\n' Depending on implementation, the interpreter may cache values '\n'or use\\n'\n' other optimizations which skip repeated evaluations.\\n'\n'\\n'\n'A sample match statement:\\n'\n'\\n'\n' >>> flag = False\\n'\n' >>> match (100, 200):\\n'\n' ... case (100, 300): # Mismatch: 200 != 300\\n'\n\" ... print('Case 1')\\n\"\n' ... case (100, 200) if flag: # Successful match, but '\n'guard fails\\n'\n\" ... print('Case 2')\\n\"\n' ... case (100, y): # Matches and binds y to 200\\n'\n\" ... print(f'Case 3, y: {y}')\\n\"\n' ... case _: # Pattern not attempted\\n'\n\" ... print('Case 4, I match anything!')\\n\"\n' ...\\n'\n' Case 3, y: 200\\n'\n'\\n'\n'In this case, \"if flag\" is a guard. Read more about that in the '\n'next\\n'\n'section.\\n'\n'\\n'\n'\\n'\n'Guards\\n'\n'------\\n'\n'\\n'\n' guard ::= \"if\" named_expression\\n'\n'\\n'\n'A \"guard\" (which is part of the \"case\") must succeed for code '\n'inside\\n'\n'the \"case\" block to execute. It takes the form: \"if\" followed '\n'by an\\n'\n'expression.\\n'\n'\\n'\n'The logical flow of a \"case\" block with a \"guard\" follows:\\n'\n'\\n'\n'1. Check that the pattern in the \"case\" block succeeded. If '\n'the\\n'\n' pattern failed, the \"guard\" is not evaluated and the next '\n'\"case\"\\n'\n' block is checked.\\n'\n'\\n'\n'2. If the pattern succeeded, evaluate the \"guard\".\\n'\n'\\n'\n' * If the \"guard\" condition evaluates to \u201ctruthy\u201d, the case '\n'block is\\n'\n' selected.\\n'\n'\\n'\n' * If the \"guard\" condition evaluates to \u201cfalsy\u201d, the case '\n'block is\\n'\n' not selected.\\n'\n'\\n'\n' * If the \"guard\" raises an exception during evaluation, the\\n'\n' exception bubbles up.\\n'\n'\\n'\n'Guards are allowed to have side effects as they are '\n'expressions.\\n'\n'Guard evaluation must proceed from the first to the last case '\n'block,\\n'\n'one at a time, skipping case blocks whose pattern(s) don\u2019t all\\n'\n'succeed. (I.e., guard evaluation must happen in order.) Guard\\n'\n'evaluation must stop once a case block is selected.\\n'\n'\\n'\n'\\n'\n'Irrefutable Case Blocks\\n'\n'-----------------------\\n'\n'\\n'\n'An irrefutable case block is a match-all case block. A match\\n'\n'statement may have at most one irrefutable case block, and it '\n'must be\\n'\n'last.\\n'\n'\\n'\n'A case block is considered irrefutable if it has no guard and '\n'its\\n'\n'pattern is irrefutable. A pattern is considered irrefutable if '\n'we can\\n'\n'prove from its syntax alone that it will always succeed. Only '\n'the\\n'\n'following patterns are irrefutable:\\n'\n'\\n'\n'* AS Patterns whose left-hand side is irrefutable\\n'\n'\\n'\n'* OR Patterns containing at least one irrefutable pattern\\n'\n'\\n'\n'* Capture Patterns\\n'\n'\\n'\n'* Wildcard Patterns\\n'\n'\\n'\n'* parenthesized irrefutable patterns\\n'\n'\\n'\n'\\n'\n'Patterns\\n'\n'--------\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' This section uses grammar notations beyond standard EBNF:\\n'\n'\\n'\n' * the notation \"SEP.RULE+\" is shorthand for \"RULE (SEP '\n'RULE)*\"\\n'\n'\\n'\n' * the notation \"!RULE\" is shorthand for a negative lookahead\\n'\n' assertion\\n'\n'\\n'\n'The top-level syntax for \"patterns\" is:\\n'\n'\\n'\n' patterns ::= open_sequence_pattern | pattern\\n'\n' pattern ::= as_pattern | or_pattern\\n'\n' closed_pattern ::= | literal_pattern\\n'\n' | capture_pattern\\n'\n' | wildcard_pattern\\n'\n' | value_pattern\\n'\n' | group_pattern\\n'\n' | sequence_pattern\\n'\n' | mapping_pattern\\n'\n' | class_pattern\\n'\n'\\n'\n'The descriptions below will include a description \u201cin simple '\n'terms\u201d of\\n'\n'what a pattern does for illustration purposes (credits to '\n'Raymond\\n'\n'Hettinger for a document that inspired most of the '\n'descriptions). Note\\n'\n'that these descriptions are purely for illustration purposes and '\n'**may\\n'\n'not** reflect the underlying implementation. Furthermore, they '\n'do not\\n'\n'cover all valid forms.\\n'\n'\\n'\n'\\n'\n'OR Patterns\\n'\n'~~~~~~~~~~~\\n'\n'\\n'\n'An OR pattern is two or more patterns separated by vertical bars '\n'\"|\".\\n'\n'Syntax:\\n'\n'\\n'\n' or_pattern ::= \"|\".closed_pattern+\\n'\n'\\n'\n'Only the final subpattern may be irrefutable, and each '\n'subpattern must\\n'\n'bind the same set of names to avoid ambiguity.\\n'\n'\\n'\n'An OR pattern matches each of its subpatterns in turn to the '\n'subject\\n'\n'value, until one succeeds. The OR pattern is then considered\\n'\n'successful. Otherwise, if none of the subpatterns succeed, the '\n'OR\\n'\n'pattern fails.\\n'\n'\\n'\n'In simple terms, \"P1 | P2 | ...\" will try to match \"P1\", if it '\n'fails\\n'\n'it will try to match \"P2\", succeeding immediately if any '\n'succeeds,\\n'\n'failing otherwise.\\n'\n'\\n'\n'\\n'\n'AS Patterns\\n'\n'~~~~~~~~~~~\\n'\n'\\n'\n'An AS pattern matches an OR pattern on the left of the \"as\" '\n'keyword\\n'\n'against a subject. Syntax:\\n'\n'\\n'\n' as_pattern ::= or_pattern \"as\" capture_pattern\\n'\n'\\n'\n'If the OR pattern fails, the AS pattern fails. Otherwise, the '\n'AS\\n'\n'pattern binds the subject to the name on the right of the as '\n'keyword\\n'\n'and succeeds. \"capture_pattern\" cannot be a a \"_\".\\n'\n'\\n'\n'In simple terms \"P as NAME\" will match with \"P\", and on success '\n'it\\n'\n'will set \"NAME = \".\\n'\n'\\n'\n'\\n'\n'Literal Patterns\\n'\n'~~~~~~~~~~~~~~~~\\n'\n'\\n'\n'A literal pattern corresponds to most literals in Python. '\n'Syntax:\\n'\n'\\n'\n' literal_pattern ::= signed_number\\n'\n' | signed_number \"+\" NUMBER\\n'\n' | signed_number \"-\" NUMBER\\n'\n' | strings\\n'\n' | \"None\"\\n'\n' | \"True\"\\n'\n' | \"False\"\\n'\n' | signed_number: NUMBER | \"-\" NUMBER\\n'\n'\\n'\n'The rule \"strings\" and the token \"NUMBER\" are defined in the '\n'standard\\n'\n'Python grammar. Triple-quoted strings are supported. Raw '\n'strings and\\n'\n'byte strings are supported. Formatted string literals are not\\n'\n'supported.\\n'\n'\\n'\n'The forms \"signed_number \\'+\\' NUMBER\" and \"signed_number \\'-\\' '\n'NUMBER\"\\n'\n'are for expressing complex numbers; they require a real number '\n'on the\\n'\n'left and an imaginary number on the right. E.g. \"3 + 4j\".\\n'\n'\\n'\n'In simple terms, \"LITERAL\" will succeed only if \" ==\\n'\n'LITERAL\". For the singletons \"None\", \"True\" and \"False\", the '\n'\"is\"\\n'\n'operator is used.\\n'\n'\\n'\n'\\n'\n'Capture Patterns\\n'\n'~~~~~~~~~~~~~~~~\\n'\n'\\n'\n'A capture pattern binds the subject value to a name. Syntax:\\n'\n'\\n'\n\" capture_pattern ::= !'_' NAME\\n\"\n'\\n'\n'A single underscore \"_\" is not a capture pattern (this is what '\n'\"!\\'_\\'\"\\n'\n'expresses). It is instead treated as a \"wildcard_pattern\".\\n'\n'\\n'\n'In a given pattern, a given name can only be bound once. E.g. '\n'\"case\\n'\n'x, x: ...\" is invalid while \"case [x] | x: ...\" is allowed.\\n'\n'\\n'\n'Capture patterns always succeed. The binding follows scoping '\n'rules\\n'\n'established by the assignment expression operator in **PEP '\n'572**; the\\n'\n'name becomes a local variable in the closest containing function '\n'scope\\n'\n'unless there\u2019s an applicable \"global\" or \"nonlocal\" statement.\\n'\n'\\n'\n'In simple terms \"NAME\" will always succeed and it will set \"NAME '\n'=\\n'\n'\".\\n'\n'\\n'\n'\\n'\n'Wildcard Patterns\\n'\n'~~~~~~~~~~~~~~~~~\\n'\n'\\n'\n'A wildcard pattern always succeeds (matches anything) and binds '\n'no\\n'\n'name. Syntax:\\n'\n'\\n'\n\" wildcard_pattern ::= '_'\\n\"\n'\\n'\n'\"_\" is a soft keyword within any pattern, but only within '\n'patterns.\\n'\n'It is an identifier, as usual, even within \"match\" subject\\n'\n'expressions, \"guard\"s, and \"case\" blocks.\\n'\n'\\n'\n'In simple terms, \"_\" will always succeed.\\n'\n'\\n'\n'\\n'\n'Value Patterns\\n'\n'~~~~~~~~~~~~~~\\n'\n'\\n'\n'A value pattern represents a named value in Python. Syntax:\\n'\n'\\n'\n' value_pattern ::= attr\\n'\n' attr ::= name_or_attr \".\" NAME\\n'\n' name_or_attr ::= attr | NAME\\n'\n'\\n'\n'The dotted name in the pattern is looked up using standard '\n'Python name\\n'\n'resolution rules. The pattern succeeds if the value found '\n'compares\\n'\n'equal to the subject value (using the \"==\" equality operator).\\n'\n'\\n'\n'In simple terms \"NAME1.NAME2\" will succeed only if \" '\n'==\\n'\n'NAME1.NAME2\"\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' If the same value occurs multiple times in the same match '\n'statement,\\n'\n' the interpreter may cache the first value found and reuse it '\n'rather\\n'\n' than repeat the same lookup. This cache is strictly tied to a '\n'given\\n'\n' execution of a given match statement.\\n'\n'\\n'\n'\\n'\n'Group Patterns\\n'\n'~~~~~~~~~~~~~~\\n'\n'\\n'\n'A group pattern allows users to add parentheses around patterns '\n'to\\n'\n'emphasize the intended grouping. Otherwise, it has no '\n'additional\\n'\n'syntax. Syntax:\\n'\n'\\n'\n' group_pattern ::= \"(\" pattern \")\"\\n'\n'\\n'\n'In simple terms \"(P)\" has the same effect as \"P\".\\n'\n'\\n'\n'\\n'\n'Sequence Patterns\\n'\n'~~~~~~~~~~~~~~~~~\\n'\n'\\n'\n'A sequence pattern contains several subpatterns to be matched '\n'against\\n'\n'sequence elements. The syntax is similar to the unpacking of a '\n'list or\\n'\n'tuple.\\n'\n'\\n'\n' sequence_pattern ::= \"[\" [maybe_sequence_pattern] \"]\"\\n'\n' | \"(\" [open_sequence_pattern] \")\"\\n'\n' open_sequence_pattern ::= maybe_star_pattern \",\" '\n'[maybe_sequence_pattern]\\n'\n' maybe_sequence_pattern ::= \",\".maybe_star_pattern+ \",\"?\\n'\n' maybe_star_pattern ::= star_pattern | pattern\\n'\n' star_pattern ::= \"*\" (capture_pattern | '\n'wildcard_pattern)\\n'\n'\\n'\n'There is no difference if parentheses or square brackets are '\n'used for\\n'\n'sequence patterns (i.e. \"(...)\" vs \"[...]\" ).\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' A single pattern enclosed in parentheses without a trailing '\n'comma\\n'\n' (e.g. \"(3 | 4)\") is a group pattern. While a single pattern '\n'enclosed\\n'\n' in square brackets (e.g. \"[3 | 4]\") is still a sequence '\n'pattern.\\n'\n'\\n'\n'At most one star subpattern may be in a sequence pattern. The '\n'star\\n'\n'subpattern may occur in any position. If no star subpattern is\\n'\n'present, the sequence pattern is a fixed-length sequence '\n'pattern;\\n'\n'otherwise it is a variable-length sequence pattern.\\n'\n'\\n'\n'The following is the logical flow for matching a sequence '\n'pattern\\n'\n'against a subject value:\\n'\n'\\n'\n'1. If the subject value is not a sequence [2], the sequence '\n'pattern\\n'\n' fails.\\n'\n'\\n'\n'2. If the subject value is an instance of \"str\", \"bytes\" or\\n'\n' \"bytearray\" the sequence pattern fails.\\n'\n'\\n'\n'3. The subsequent steps depend on whether the sequence pattern '\n'is\\n'\n' fixed or variable-length.\\n'\n'\\n'\n' If the sequence pattern is fixed-length:\\n'\n'\\n'\n' 1. If the length of the subject sequence is not equal to the '\n'number\\n'\n' of subpatterns, the sequence pattern fails\\n'\n'\\n'\n' 2. Subpatterns in the sequence pattern are matched to their\\n'\n' corresponding items in the subject sequence from left to '\n'right.\\n'\n' Matching stops as soon as a subpattern fails. If all\\n'\n' subpatterns succeed in matching their corresponding item, '\n'the\\n'\n' sequence pattern succeeds.\\n'\n'\\n'\n' Otherwise, if the sequence pattern is variable-length:\\n'\n'\\n'\n' 1. If the length of the subject sequence is less than the '\n'number of\\n'\n' non-star subpatterns, the sequence pattern fails.\\n'\n'\\n'\n' 2. The leading non-star subpatterns are matched to their\\n'\n' corresponding items as for fixed-length sequences.\\n'\n'\\n'\n' 3. If the previous step succeeds, the star subpattern matches '\n'a\\n'\n' list formed of the remaining subject items, excluding the\\n'\n' remaining items corresponding to non-star subpatterns '\n'following\\n'\n' the star subpattern.\\n'\n'\\n'\n' 4. Remaining non-star subpatterns are matched to their\\n'\n' corresponding subject items, as for a fixed-length '\n'sequence.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' The length of the subject sequence is obtained via \"len()\" '\n'(i.e.\\n'\n' via the \"__len__()\" protocol). This length may be cached '\n'by the\\n'\n' interpreter in a similar manner as value patterns.\\n'\n'\\n'\n'In simple terms \"[P1, P2, P3,\" \u2026 \", P]\" matches only if all '\n'the\\n'\n'following happens:\\n'\n'\\n'\n'* check \"\" is a sequence\\n'\n'\\n'\n'* \"len(subject) == \"\\n'\n'\\n'\n'* \"P1\" matches \"[0]\" (note that this match can also '\n'bind\\n'\n' names)\\n'\n'\\n'\n'* \"P2\" matches \"[1]\" (note that this match can also '\n'bind\\n'\n' names)\\n'\n'\\n'\n'* \u2026 and so on for the corresponding pattern/element.\\n'\n'\\n'\n'\\n'\n'Mapping Patterns\\n'\n'~~~~~~~~~~~~~~~~\\n'\n'\\n'\n'A mapping pattern contains one or more key-value patterns. The '\n'syntax\\n'\n'is similar to the construction of a dictionary. Syntax:\\n'\n'\\n'\n' mapping_pattern ::= \"{\" [items_pattern] \"}\"\\n'\n' items_pattern ::= \",\".key_value_pattern+ \",\"?\\n'\n' key_value_pattern ::= (literal_pattern | value_pattern) \":\" '\n'pattern\\n'\n' | double_star_pattern\\n'\n' double_star_pattern ::= \"**\" capture_pattern\\n'\n'\\n'\n'At most one double star pattern may be in a mapping pattern. '\n'The\\n'\n'double star pattern must be the last subpattern in the mapping\\n'\n'pattern.\\n'\n'\\n'\n'Duplicate keys in mapping patterns are disallowed. Duplicate '\n'literal\\n'\n'keys will raise a \"SyntaxError\". Two keys that otherwise have '\n'the same\\n'\n'value will raise a \"ValueError\" at runtime.\\n'\n'\\n'\n'The following is the logical flow for matching a mapping '\n'pattern\\n'\n'against a subject value:\\n'\n'\\n'\n'1. If the subject value is not a mapping [3],the mapping '\n'pattern\\n'\n' fails.\\n'\n'\\n'\n'2. If every key given in the mapping pattern is present in the '\n'subject\\n'\n' mapping, and the pattern for each key matches the '\n'corresponding\\n'\n' item of the subject mapping, the mapping pattern succeeds.\\n'\n'\\n'\n'3. If duplicate keys are detected in the mapping pattern, the '\n'pattern\\n'\n' is considered invalid. A \"SyntaxError\" is raised for '\n'duplicate\\n'\n' literal values; or a \"ValueError\" for named keys of the same '\n'value.\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' Key-value pairs are matched using the two-argument form of '\n'the\\n'\n' mapping subject\u2019s \"get()\" method. Matched key-value pairs '\n'must\\n'\n' already be present in the mapping, and not created on-the-fly '\n'via\\n'\n' \"__missing__()\" or \"__getitem__()\".\\n'\n'\\n'\n'In simple terms \"{KEY1: P1, KEY2: P2, ... }\" matches only if all '\n'the\\n'\n'following happens:\\n'\n'\\n'\n'* check \"\" is a mapping\\n'\n'\\n'\n'* \"KEY1 in \"\\n'\n'\\n'\n'* \"P1\" matches \"[KEY1]\"\\n'\n'\\n'\n'* \u2026 and so on for the corresponding KEY/pattern pair.\\n'\n'\\n'\n'\\n'\n'Class Patterns\\n'\n'~~~~~~~~~~~~~~\\n'\n'\\n'\n'A class pattern represents a class and its positional and '\n'keyword\\n'\n'arguments (if any). Syntax:\\n'\n'\\n'\n' class_pattern ::= name_or_attr \"(\" [pattern_arguments '\n'\",\"?] \")\"\\n'\n' pattern_arguments ::= positional_patterns [\",\" '\n'keyword_patterns]\\n'\n' | keyword_patterns\\n'\n' positional_patterns ::= \",\".pattern+\\n'\n' keyword_patterns ::= \",\".keyword_pattern+\\n'\n' keyword_pattern ::= NAME \"=\" pattern\\n'\n'\\n'\n'The same keyword should not be repeated in class patterns.\\n'\n'\\n'\n'The following is the logical flow for matching a mapping '\n'pattern\\n'\n'against a subject value:\\n'\n'\\n'\n'1. If \"name_or_attr\" is not an instance of the builtin \"type\" , '\n'raise\\n'\n' \"TypeError\".\\n'\n'\\n'\n'2. If the subject value is not an instance of \"name_or_attr\" '\n'(tested\\n'\n' via \"isinstance()\"), the class pattern fails.\\n'\n'\\n'\n'3. If no pattern arguments are present, the pattern succeeds.\\n'\n' Otherwise, the subsequent steps depend on whether keyword or\\n'\n' positional argument patterns are present.\\n'\n'\\n'\n' For a number of built-in types (specified below), a single\\n'\n' positional subpattern is accepted which will match the '\n'entire\\n'\n' subject; for these types keyword patterns also work as for '\n'other\\n'\n' types.\\n'\n'\\n'\n' If only keyword patterns are present, they are processed as\\n'\n' follows, one by one:\\n'\n'\\n'\n' I. The keyword is looked up as an attribute on the subject.\\n'\n'\\n'\n' * If this raises an exception other than \"AttributeError\", '\n'the\\n'\n' exception bubbles up.\\n'\n'\\n'\n' * If this raises \"AttributeError\", the class pattern has '\n'failed.\\n'\n'\\n'\n' * Else, the subpattern associated with the keyword pattern '\n'is\\n'\n' matched against the subject\u2019s attribute value. If this '\n'fails,\\n'\n' the class pattern fails; if this succeeds, the match '\n'proceeds\\n'\n' to the next keyword.\\n'\n'\\n'\n' II. If all keyword patterns succeed, the class pattern '\n'succeeds.\\n'\n'\\n'\n' If any positional patterns are present, they are converted '\n'to\\n'\n' keyword patterns using the \"__match_args__\" attribute on the '\n'class\\n'\n' \"name_or_attr\" before matching:\\n'\n'\\n'\n' I. The equivalent of \"getattr(cls, \"__match_args__\", ()))\" '\n'is\\n'\n' called.\\n'\n'\\n'\n' * If this raises an exception, the exception bubbles up.\\n'\n'\\n'\n' * If the returned value is not a tuple, the conversion '\n'fails and\\n'\n' \"TypeError\" is raised.\\n'\n'\\n'\n' * If there are more positional patterns than\\n'\n' \"len(cls.__match_args__)\", \"TypeError\" is raised.\\n'\n'\\n'\n' * Otherwise, positional pattern \"i\" is converted to a '\n'keyword\\n'\n' pattern using \"__match_args__[i]\" as the keyword.\\n'\n' \"__match_args__[i]\" must be a string; if not \"TypeError\" '\n'is\\n'\n' raised.\\n'\n'\\n'\n' * If there are duplicate keywords, \"TypeError\" is raised.\\n'\n'\\n'\n' See also:\\n'\n'\\n'\n' Customizing positional arguments in class pattern '\n'matching\\n'\n'\\n'\n' II. Once all positional patterns have been converted to '\n'keyword\\n'\n' patterns,\\n'\n' the match proceeds as if there were only keyword '\n'patterns.\\n'\n'\\n'\n' For the following built-in types the handling of positional\\n'\n' subpatterns is different:\\n'\n'\\n'\n' * \"bool\"\\n'\n'\\n'\n' * \"bytearray\"\\n'\n'\\n'\n' * \"bytes\"\\n'\n'\\n'\n' * \"dict\"\\n'\n'\\n'\n' * \"float\"\\n'\n'\\n'\n' * \"frozenset\"\\n'\n'\\n'\n' * \"int\"\\n'\n'\\n'\n' * \"list\"\\n'\n'\\n'\n' * \"set\"\\n'\n'\\n'\n' * \"str\"\\n'\n'\\n'\n' * \"tuple\"\\n'\n'\\n'\n' These classes accept a single positional argument, and the '\n'pattern\\n'\n' there is matched against the whole object rather than an '\n'attribute.\\n'\n' For example \"int(0|1)\" matches the value \"0\", but not the '\n'values\\n'\n' \"0.0\" or \"False\".\\n'\n'\\n'\n'In simple terms \"CLS(P1, attr=P2)\" matches only if the '\n'following\\n'\n'happens:\\n'\n'\\n'\n'* \"isinstance(, CLS)\"\\n'\n'\\n'\n'* convert \"P1\" to a keyword pattern using \"CLS.__match_args__\"\\n'\n'\\n'\n'* For each keyword argument \"attr=P2\":\\n'\n' * \"hasattr(, \"attr\")\"\\n'\n'\\n'\n' * \"P2\" matches \".attr\"\\n'\n'\\n'\n'* \u2026 and so on for the corresponding keyword argument/pattern '\n'pair.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' * **PEP 634** \u2013 Structural Pattern Matching: Specification\\n'\n'\\n'\n' * **PEP 636** \u2013 Structural Pattern Matching: Tutorial\\n'\n'\\n'\n'\\n'\n'Function definitions\\n'\n'====================\\n'\n'\\n'\n'A function definition defines a user-defined function object '\n'(see\\n'\n'section The standard type hierarchy):\\n'\n'\\n'\n' funcdef ::= [decorators] \"def\" funcname \"(\" '\n'[parameter_list] \")\"\\n'\n' [\"->\" expression] \":\" suite\\n'\n' decorators ::= decorator+\\n'\n' decorator ::= \"@\" assignment_expression '\n'NEWLINE\\n'\n' parameter_list ::= defparameter (\",\" '\n'defparameter)* \",\" \"/\" [\",\" [parameter_list_no_posonly]]\\n'\n' | parameter_list_no_posonly\\n'\n' parameter_list_no_posonly ::= defparameter (\",\" '\n'defparameter)* [\",\" [parameter_list_starargs]]\\n'\n' | parameter_list_starargs\\n'\n' parameter_list_starargs ::= \"*\" [parameter] (\",\" '\n'defparameter)* [\",\" [\"**\" parameter [\",\"]]]\\n'\n' | \"**\" parameter [\",\"]\\n'\n' parameter ::= identifier [\":\" expression]\\n'\n' defparameter ::= parameter [\"=\" expression]\\n'\n' funcname ::= identifier\\n'\n'\\n'\n'A function definition is an executable statement. Its execution '\n'binds\\n'\n'the function name in the current local namespace to a function '\n'object\\n'\n'(a wrapper around the executable code for the function). This\\n'\n'function object contains a reference to the current global '\n'namespace\\n'\n'as the global namespace to be used when the function is called.\\n'\n'\\n'\n'The function definition does not execute the function body; this '\n'gets\\n'\n'executed only when the function is called. [4]\\n'\n'\\n'\n'A function definition may be wrapped by one or more *decorator*\\n'\n'expressions. Decorator expressions are evaluated when the '\n'function is\\n'\n'defined, in the scope that contains the function definition. '\n'The\\n'\n'result must be a callable, which is invoked with the function '\n'object\\n'\n'as the only argument. The returned value is bound to the '\n'function name\\n'\n'instead of the function object. Multiple decorators are applied '\n'in\\n'\n'nested fashion. For example, the following code\\n'\n'\\n'\n' @f1(arg)\\n'\n' @f2\\n'\n' def func(): pass\\n'\n'\\n'\n'is roughly equivalent to\\n'\n'\\n'\n' def func(): pass\\n'\n' func = f1(arg)(f2(func))\\n'\n'\\n'\n'except that the original function is not temporarily bound to '\n'the name\\n'\n'\"func\".\\n'\n'\\n'\n'Changed in version 3.9: Functions may be decorated with any '\n'valid\\n'\n'\"assignment_expression\". Previously, the grammar was much more\\n'\n'restrictive; see **PEP 614** for details.\\n'\n'\\n'\n'When one or more *parameters* have the form *parameter* \"=\"\\n'\n'*expression*, the function is said to have \u201cdefault parameter '\n'values.\u201d\\n'\n'For a parameter with a default value, the corresponding '\n'*argument* may\\n'\n'be omitted from a call, in which case the parameter\u2019s default '\n'value is\\n'\n'substituted. If a parameter has a default value, all following\\n'\n'parameters up until the \u201c\"*\"\u201d must also have a default value \u2014 '\n'this is\\n'\n'a syntactic restriction that is not expressed by the grammar.\\n'\n'\\n'\n'**Default parameter values are evaluated from left to right when '\n'the\\n'\n'function definition is executed.** This means that the '\n'expression is\\n'\n'evaluated once, when the function is defined, and that the same '\n'\u201cpre-\\n'\n'computed\u201d value is used for each call. This is especially '\n'important\\n'\n'to understand when a default parameter value is a mutable '\n'object, such\\n'\n'as a list or a dictionary: if the function modifies the object '\n'(e.g.\\n'\n'by appending an item to a list), the default parameter value is '\n'in\\n'\n'effect modified. This is generally not what was intended. A '\n'way\\n'\n'around this is to use \"None\" as the default, and explicitly test '\n'for\\n'\n'it in the body of the function, e.g.:\\n'\n'\\n'\n' def whats_on_the_telly(penguin=None):\\n'\n' if penguin is None:\\n'\n' penguin = []\\n'\n' penguin.append(\"property of the zoo\")\\n'\n' return penguin\\n'\n'\\n'\n'Function call semantics are described in more detail in section '\n'Calls.\\n'\n'A function call always assigns values to all parameters '\n'mentioned in\\n'\n'the parameter list, either from positional arguments, from '\n'keyword\\n'\n'arguments, or from default values. If the form \u201c\"*identifier\"\u201d '\n'is\\n'\n'present, it is initialized to a tuple receiving any excess '\n'positional\\n'\n'parameters, defaulting to the empty tuple. If the form\\n'\n'\u201c\"**identifier\"\u201d is present, it is initialized to a new ordered\\n'\n'mapping receiving any excess keyword arguments, defaulting to a '\n'new\\n'\n'empty mapping of the same type. Parameters after \u201c\"*\"\u201d or\\n'\n'\u201c\"*identifier\"\u201d are keyword-only parameters and may only be '\n'passed by\\n'\n'keyword arguments. Parameters before \u201c\"/\"\u201d are positional-only\\n'\n'parameters and may only be passed by positional arguments.\\n'\n'\\n'\n'Changed in version 3.8: The \"/\" function parameter syntax may be '\n'used\\n'\n'to indicate positional-only parameters. See **PEP 570** for '\n'details.\\n'\n'\\n'\n'Parameters may have an *annotation* of the form \u201c\": '\n'expression\"\u201d\\n'\n'following the parameter name. Any parameter may have an '\n'annotation,\\n'\n'even those of the form \"*identifier\" or \"**identifier\". '\n'Functions may\\n'\n'have \u201creturn\u201d annotation of the form \u201c\"-> expression\"\u201d after '\n'the\\n'\n'parameter list. These annotations can be any valid Python '\n'expression.\\n'\n'The presence of annotations does not change the semantics of a\\n'\n'function. The annotation values are available as values of a\\n'\n'dictionary keyed by the parameters\u2019 names in the '\n'\"__annotations__\"\\n'\n'attribute of the function object. If the \"annotations\" import '\n'from\\n'\n'\"__future__\" is used, annotations are preserved as strings at '\n'runtime\\n'\n'which enables postponed evaluation. Otherwise, they are '\n'evaluated\\n'\n'when the function definition is executed. In this case '\n'annotations\\n'\n'may be evaluated in a different order than they appear in the '\n'source\\n'\n'code.\\n'\n'\\n'\n'It is also possible to create anonymous functions (functions not '\n'bound\\n'\n'to a name), for immediate use in expressions. This uses lambda\\n'\n'expressions, described in section Lambdas. Note that the '\n'lambda\\n'\n'expression is merely a shorthand for a simplified function '\n'definition;\\n'\n'a function defined in a \u201c\"def\"\u201d statement can be passed around '\n'or\\n'\n'assigned to another name just like a function defined by a '\n'lambda\\n'\n'expression. The \u201c\"def\"\u201d form is actually more powerful since '\n'it\\n'\n'allows the execution of multiple statements and annotations.\\n'\n'\\n'\n'**Programmer\u2019s note:** Functions are first-class objects. A '\n'\u201c\"def\"\u201d\\n'\n'statement executed inside a function definition defines a local\\n'\n'function that can be returned or passed around. Free variables '\n'used\\n'\n'in the nested function can access the local variables of the '\n'function\\n'\n'containing the def. See section Naming and binding for '\n'details.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 3107** - Function Annotations\\n'\n' The original specification for function annotations.\\n'\n'\\n'\n' **PEP 484** - Type Hints\\n'\n' Definition of a standard meaning for annotations: type '\n'hints.\\n'\n'\\n'\n' **PEP 526** - Syntax for Variable Annotations\\n'\n' Ability to type hint variable declarations, including '\n'class\\n'\n' variables and instance variables\\n'\n'\\n'\n' **PEP 563** - Postponed Evaluation of Annotations\\n'\n' Support for forward references within annotations by '\n'preserving\\n'\n' annotations in a string form at runtime instead of eager\\n'\n' evaluation.\\n'\n'\\n'\n'\\n'\n'Class definitions\\n'\n'=================\\n'\n'\\n'\n'A class definition defines a class object (see section The '\n'standard\\n'\n'type hierarchy):\\n'\n'\\n'\n' classdef ::= [decorators] \"class\" classname [inheritance] '\n'\":\" suite\\n'\n' inheritance ::= \"(\" [argument_list] \")\"\\n'\n' classname ::= identifier\\n'\n'\\n'\n'A class definition is an executable statement. The inheritance '\n'list\\n'\n'usually gives a list of base classes (see Metaclasses for more\\n'\n'advanced uses), so each item in the list should evaluate to a '\n'class\\n'\n'object which allows subclassing. Classes without an inheritance '\n'list\\n'\n'inherit, by default, from the base class \"object\"; hence,\\n'\n'\\n'\n' class Foo:\\n'\n' pass\\n'\n'\\n'\n'is equivalent to\\n'\n'\\n'\n' class Foo(object):\\n'\n' pass\\n'\n'\\n'\n'The class\u2019s suite is then executed in a new execution frame '\n'(see\\n'\n'Naming and binding), using a newly created local namespace and '\n'the\\n'\n'original global namespace. (Usually, the suite contains mostly\\n'\n'function definitions.) When the class\u2019s suite finishes '\n'execution, its\\n'\n'execution frame is discarded but its local namespace is saved. '\n'[5] A\\n'\n'class object is then created using the inheritance list for the '\n'base\\n'\n'classes and the saved local namespace for the attribute '\n'dictionary.\\n'\n'The class name is bound to this class object in the original '\n'local\\n'\n'namespace.\\n'\n'\\n'\n'The order in which attributes are defined in the class body is\\n'\n'preserved in the new class\u2019s \"__dict__\". Note that this is '\n'reliable\\n'\n'only right after the class is created and only for classes that '\n'were\\n'\n'defined using the definition syntax.\\n'\n'\\n'\n'Class creation can be customized heavily using metaclasses.\\n'\n'\\n'\n'Classes can also be decorated: just like when decorating '\n'functions,\\n'\n'\\n'\n' @f1(arg)\\n'\n' @f2\\n'\n' class Foo: pass\\n'\n'\\n'\n'is roughly equivalent to\\n'\n'\\n'\n' class Foo: pass\\n'\n' Foo = f1(arg)(f2(Foo))\\n'\n'\\n'\n'The evaluation rules for the decorator expressions are the same '\n'as for\\n'\n'function decorators. The result is then bound to the class '\n'name.\\n'\n'\\n'\n'Changed in version 3.9: Classes may be decorated with any valid\\n'\n'\"assignment_expression\". Previously, the grammar was much more\\n'\n'restrictive; see **PEP 614** for details.\\n'\n'\\n'\n'**Programmer\u2019s note:** Variables defined in the class definition '\n'are\\n'\n'class attributes; they are shared by instances. Instance '\n'attributes\\n'\n'can be set in a method with \"self.name = value\". Both class '\n'and\\n'\n'instance attributes are accessible through the notation '\n'\u201c\"self.name\"\u201d,\\n'\n'and an instance attribute hides a class attribute with the same '\n'name\\n'\n'when accessed in this way. Class attributes can be used as '\n'defaults\\n'\n'for instance attributes, but using mutable values there can lead '\n'to\\n'\n'unexpected results. Descriptors can be used to create instance\\n'\n'variables with different implementation details.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 3115** - Metaclasses in Python 3000\\n'\n' The proposal that changed the declaration of metaclasses to '\n'the\\n'\n' current syntax, and the semantics for how classes with\\n'\n' metaclasses are constructed.\\n'\n'\\n'\n' **PEP 3129** - Class Decorators\\n'\n' The proposal that added class decorators. Function and '\n'method\\n'\n' decorators were introduced in **PEP 318**.\\n'\n'\\n'\n'\\n'\n'Coroutines\\n'\n'==========\\n'\n'\\n'\n'New in version 3.5.\\n'\n'\\n'\n'\\n'\n'Coroutine function definition\\n'\n'-----------------------------\\n'\n'\\n'\n' async_funcdef ::= [decorators] \"async\" \"def\" funcname \"(\" '\n'[parameter_list] \")\"\\n'\n' [\"->\" expression] \":\" suite\\n'\n'\\n'\n'Execution of Python coroutines can be suspended and resumed at '\n'many\\n'\n'points (see *coroutine*). \"await\" expressions, \"async for\" and '\n'\"async\\n'\n'with\" can only be used in the body of a coroutine function.\\n'\n'\\n'\n'Functions defined with \"async def\" syntax are always coroutine\\n'\n'functions, even if they do not contain \"await\" or \"async\" '\n'keywords.\\n'\n'\\n'\n'It is a \"SyntaxError\" to use a \"yield from\" expression inside '\n'the body\\n'\n'of a coroutine function.\\n'\n'\\n'\n'An example of a coroutine function:\\n'\n'\\n'\n' async def func(param1, param2):\\n'\n' do_stuff()\\n'\n' await some_coroutine()\\n'\n'\\n'\n'Changed in version 3.7: \"await\" and \"async\" are now keywords;\\n'\n'previously they were only treated as such inside the body of a\\n'\n'coroutine function.\\n'\n'\\n'\n'\\n'\n'The \"async for\" statement\\n'\n'-------------------------\\n'\n'\\n'\n' async_for_stmt ::= \"async\" for_stmt\\n'\n'\\n'\n'An *asynchronous iterable* provides an \"__aiter__\" method that\\n'\n'directly returns an *asynchronous iterator*, which can call\\n'\n'asynchronous code in its \"__anext__\" method.\\n'\n'\\n'\n'The \"async for\" statement allows convenient iteration over\\n'\n'asynchronous iterables.\\n'\n'\\n'\n'The following code:\\n'\n'\\n'\n' async for TARGET in ITER:\\n'\n' SUITE\\n'\n' else:\\n'\n' SUITE2\\n'\n'\\n'\n'Is semantically equivalent to:\\n'\n'\\n'\n' iter = (ITER)\\n'\n' iter = type(iter).__aiter__(iter)\\n'\n' running = True\\n'\n'\\n'\n' while running:\\n'\n' try:\\n'\n' TARGET = await type(iter).__anext__(iter)\\n'\n' except StopAsyncIteration:\\n'\n' running = False\\n'\n' else:\\n'\n' SUITE\\n'\n' else:\\n'\n' SUITE2\\n'\n'\\n'\n'See also \"__aiter__()\" and \"__anext__()\" for details.\\n'\n'\\n'\n'It is a \"SyntaxError\" to use an \"async for\" statement outside '\n'the body\\n'\n'of a coroutine function.\\n'\n'\\n'\n'\\n'\n'The \"async with\" statement\\n'\n'--------------------------\\n'\n'\\n'\n' async_with_stmt ::= \"async\" with_stmt\\n'\n'\\n'\n'An *asynchronous context manager* is a *context manager* that is '\n'able\\n'\n'to suspend execution in its *enter* and *exit* methods.\\n'\n'\\n'\n'The following code:\\n'\n'\\n'\n' async with EXPRESSION as TARGET:\\n'\n' SUITE\\n'\n'\\n'\n'is semantically equivalent to:\\n'\n'\\n'\n' manager = (EXPRESSION)\\n'\n' aenter = type(manager).__aenter__\\n'\n' aexit = type(manager).__aexit__\\n'\n' value = await aenter(manager)\\n'\n' hit_except = False\\n'\n'\\n'\n' try:\\n'\n' TARGET = value\\n'\n' SUITE\\n'\n' except:\\n'\n' hit_except = True\\n'\n' if not await aexit(manager, *sys.exc_info()):\\n'\n' raise\\n'\n' finally:\\n'\n' if not hit_except:\\n'\n' await aexit(manager, None, None, None)\\n'\n'\\n'\n'See also \"__aenter__()\" and \"__aexit__()\" for details.\\n'\n'\\n'\n'It is a \"SyntaxError\" to use an \"async with\" statement outside '\n'the\\n'\n'body of a coroutine function.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 492** - Coroutines with async and await syntax\\n'\n' The proposal that made coroutines a proper standalone '\n'concept in\\n'\n' Python, and added supporting syntax.\\n'\n'\\n'\n'-[ Footnotes ]-\\n'\n'\\n'\n'[1] The exception is propagated to the invocation stack unless '\n'there\\n'\n' is a \"finally\" clause which happens to raise another '\n'exception.\\n'\n' That new exception causes the old one to be lost.\\n'\n'\\n'\n'[2] In pattern matching, a sequence is defined as one of the\\n'\n' following:\\n'\n'\\n'\n' * a class that inherits from \"collections.abc.Sequence\"\\n'\n'\\n'\n' * a Python class that has been registered as\\n'\n' \"collections.abc.Sequence\"\\n'\n'\\n'\n' * a builtin class that has its (CPython) '\n'\"Py_TPFLAGS_SEQUENCE\"\\n'\n' bit set\\n'\n'\\n'\n' * a class that inherits from any of the above\\n'\n'\\n'\n' The following standard library classes are sequences:\\n'\n'\\n'\n' * \"array.array\"\\n'\n'\\n'\n' * \"collections.deque\"\\n'\n'\\n'\n' * \"list\"\\n'\n'\\n'\n' * \"memoryview\"\\n'\n'\\n'\n' * \"range\"\\n'\n'\\n'\n' * \"tuple\"\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' Subject values of type \"str\", \"bytes\", and \"bytearray\" do '\n'not\\n'\n' match sequence patterns.\\n'\n'\\n'\n'[3] In pattern matching, a mapping is defined as one of the '\n'following:\\n'\n'\\n'\n' * a class that inherits from \"collections.abc.Mapping\"\\n'\n'\\n'\n' * a Python class that has been registered as\\n'\n' \"collections.abc.Mapping\"\\n'\n'\\n'\n' * a builtin class that has its (CPython) '\n'\"Py_TPFLAGS_MAPPING\"\\n'\n' bit set\\n'\n'\\n'\n' * a class that inherits from any of the above\\n'\n'\\n'\n' The standard library classes \"dict\" and '\n'\"types.MappingProxyType\"\\n'\n' are mappings.\\n'\n'\\n'\n'[4] A string literal appearing as the first statement in the '\n'function\\n'\n' body is transformed into the function\u2019s \"__doc__\" attribute '\n'and\\n'\n' therefore the function\u2019s *docstring*.\\n'\n'\\n'\n'[5] A string literal appearing as the first statement in the '\n'class\\n'\n' body is transformed into the namespace\u2019s \"__doc__\" item and\\n'\n' therefore the class\u2019s *docstring*.\\n',\n'context-managers':'With Statement Context Managers\\n'\n'*******************************\\n'\n'\\n'\n'A *context manager* is an object that defines the '\n'runtime context to\\n'\n'be established when executing a \"with\" statement. The '\n'context manager\\n'\n'handles the entry into, and the exit from, the desired '\n'runtime context\\n'\n'for the execution of the block of code. Context '\n'managers are normally\\n'\n'invoked using the \"with\" statement (described in section '\n'The with\\n'\n'statement), but can also be used by directly invoking '\n'their methods.\\n'\n'\\n'\n'Typical uses of context managers include saving and '\n'restoring various\\n'\n'kinds of global state, locking and unlocking resources, '\n'closing opened\\n'\n'files, etc.\\n'\n'\\n'\n'For more information on context managers, see Context '\n'Manager Types.\\n'\n'\\n'\n'object.__enter__(self)\\n'\n'\\n'\n' Enter the runtime context related to this object. The '\n'\"with\"\\n'\n' statement will bind this method\u2019s return value to the '\n'target(s)\\n'\n' specified in the \"as\" clause of the statement, if '\n'any.\\n'\n'\\n'\n'object.__exit__(self, exc_type, exc_value, traceback)\\n'\n'\\n'\n' Exit the runtime context related to this object. The '\n'parameters\\n'\n' describe the exception that caused the context to be '\n'exited. If the\\n'\n' context was exited without an exception, all three '\n'arguments will\\n'\n' be \"None\".\\n'\n'\\n'\n' If an exception is supplied, and the method wishes to '\n'suppress the\\n'\n' exception (i.e., prevent it from being propagated), '\n'it should\\n'\n' return a true value. Otherwise, the exception will be '\n'processed\\n'\n' normally upon exit from this method.\\n'\n'\\n'\n' Note that \"__exit__()\" methods should not reraise the '\n'passed-in\\n'\n' exception; this is the caller\u2019s responsibility.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 343** - The \u201cwith\u201d statement\\n'\n' The specification, background, and examples for the '\n'Python \"with\"\\n'\n' statement.\\n',\n'continue':'The \"continue\" statement\\n'\n'************************\\n'\n'\\n'\n' continue_stmt ::= \"continue\"\\n'\n'\\n'\n'\"continue\" may only occur syntactically nested in a \"for\" or '\n'\"while\"\\n'\n'loop, but not nested in a function or class definition within '\n'that\\n'\n'loop. It continues with the next cycle of the nearest enclosing '\n'loop.\\n'\n'\\n'\n'When \"continue\" passes control out of a \"try\" statement with a\\n'\n'\"finally\" clause, that \"finally\" clause is executed before '\n'really\\n'\n'starting the next loop cycle.\\n',\n'conversions':'Arithmetic conversions\\n'\n'**********************\\n'\n'\\n'\n'When a description of an arithmetic operator below uses the '\n'phrase\\n'\n'\u201cthe numeric arguments are converted to a common type\u201d, this '\n'means\\n'\n'that the operator implementation for built-in types works as '\n'follows:\\n'\n'\\n'\n'* If either argument is a complex number, the other is '\n'converted to\\n'\n' complex;\\n'\n'\\n'\n'* otherwise, if either argument is a floating point number, '\n'the other\\n'\n' is converted to floating point;\\n'\n'\\n'\n'* otherwise, both must be integers and no conversion is '\n'necessary.\\n'\n'\\n'\n'Some additional rules apply for certain operators (e.g., a '\n'string as a\\n'\n'left argument to the \u2018%\u2019 operator). Extensions must define '\n'their own\\n'\n'conversion behavior.\\n',\n'customization':'Basic customization\\n'\n'*******************\\n'\n'\\n'\n'object.__new__(cls[, ...])\\n'\n'\\n'\n' Called to create a new instance of class *cls*. '\n'\"__new__()\" is a\\n'\n' static method (special-cased so you need not declare it '\n'as such)\\n'\n' that takes the class of which an instance was requested '\n'as its\\n'\n' first argument. The remaining arguments are those '\n'passed to the\\n'\n' object constructor expression (the call to the class). '\n'The return\\n'\n' value of \"__new__()\" should be the new object instance '\n'(usually an\\n'\n' instance of *cls*).\\n'\n'\\n'\n' Typical implementations create a new instance of the '\n'class by\\n'\n' invoking the superclass\u2019s \"__new__()\" method using\\n'\n' \"super().__new__(cls[, ...])\" with appropriate arguments '\n'and then\\n'\n' modifying the newly-created instance as necessary before '\n'returning\\n'\n' it.\\n'\n'\\n'\n' If \"__new__()\" is invoked during object construction and '\n'it returns\\n'\n' an instance or subclass of *cls*, then the new '\n'instance\u2019s\\n'\n' \"__init__()\" method will be invoked like '\n'\"__init__(self[, ...])\",\\n'\n' where *self* is the new instance and the remaining '\n'arguments are\\n'\n' the same as were passed to the object constructor.\\n'\n'\\n'\n' If \"__new__()\" does not return an instance of *cls*, '\n'then the new\\n'\n' instance\u2019s \"__init__()\" method will not be invoked.\\n'\n'\\n'\n' \"__new__()\" is intended mainly to allow subclasses of '\n'immutable\\n'\n' types (like int, str, or tuple) to customize instance '\n'creation. It\\n'\n' is also commonly overridden in custom metaclasses in '\n'order to\\n'\n' customize class creation.\\n'\n'\\n'\n'object.__init__(self[, ...])\\n'\n'\\n'\n' Called after the instance has been created (by '\n'\"__new__()\"), but\\n'\n' before it is returned to the caller. The arguments are '\n'those\\n'\n' passed to the class constructor expression. If a base '\n'class has an\\n'\n' \"__init__()\" method, the derived class\u2019s \"__init__()\" '\n'method, if\\n'\n' any, must explicitly call it to ensure proper '\n'initialization of the\\n'\n' base class part of the instance; for example:\\n'\n' \"super().__init__([args...])\".\\n'\n'\\n'\n' Because \"__new__()\" and \"__init__()\" work together in '\n'constructing\\n'\n' objects (\"__new__()\" to create it, and \"__init__()\" to '\n'customize\\n'\n' it), no non-\"None\" value may be returned by '\n'\"__init__()\"; doing so\\n'\n' will cause a \"TypeError\" to be raised at runtime.\\n'\n'\\n'\n'object.__del__(self)\\n'\n'\\n'\n' Called when the instance is about to be destroyed. This '\n'is also\\n'\n' called a finalizer or (improperly) a destructor. If a '\n'base class\\n'\n' has a \"__del__()\" method, the derived class\u2019s '\n'\"__del__()\" method,\\n'\n' if any, must explicitly call it to ensure proper '\n'deletion of the\\n'\n' base class part of the instance.\\n'\n'\\n'\n' It is possible (though not recommended!) for the '\n'\"__del__()\" method\\n'\n' to postpone destruction of the instance by creating a '\n'new reference\\n'\n' to it. This is called object *resurrection*. It is\\n'\n' implementation-dependent whether \"__del__()\" is called a '\n'second\\n'\n' time when a resurrected object is about to be destroyed; '\n'the\\n'\n' current *CPython* implementation only calls it once.\\n'\n'\\n'\n' It is not guaranteed that \"__del__()\" methods are called '\n'for\\n'\n' objects that still exist when the interpreter exits.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' \"del x\" doesn\u2019t directly call \"x.__del__()\" \u2014 the '\n'former\\n'\n' decrements the reference count for \"x\" by one, and the '\n'latter is\\n'\n' only called when \"x\"\u2019s reference count reaches zero.\\n'\n'\\n'\n' **CPython implementation detail:** It is possible for a '\n'reference\\n'\n' cycle to prevent the reference count of an object from '\n'going to\\n'\n' zero. In this case, the cycle will be later detected '\n'and deleted\\n'\n' by the *cyclic garbage collector*. A common cause of '\n'reference\\n'\n' cycles is when an exception has been caught in a local '\n'variable.\\n'\n' The frame\u2019s locals then reference the exception, which '\n'references\\n'\n' its own traceback, which references the locals of all '\n'frames caught\\n'\n' in the traceback.\\n'\n'\\n'\n' See also: Documentation for the \"gc\" module.\\n'\n'\\n'\n' Warning:\\n'\n'\\n'\n' Due to the precarious circumstances under which '\n'\"__del__()\"\\n'\n' methods are invoked, exceptions that occur during '\n'their execution\\n'\n' are ignored, and a warning is printed to \"sys.stderr\" '\n'instead.\\n'\n' In particular:\\n'\n'\\n'\n' * \"__del__()\" can be invoked when arbitrary code is '\n'being\\n'\n' executed, including from any arbitrary thread. If '\n'\"__del__()\"\\n'\n' needs to take a lock or invoke any other blocking '\n'resource, it\\n'\n' may deadlock as the resource may already be taken by '\n'the code\\n'\n' that gets interrupted to execute \"__del__()\".\\n'\n'\\n'\n' * \"__del__()\" can be executed during interpreter '\n'shutdown. As a\\n'\n' consequence, the global variables it needs to access '\n'(including\\n'\n' other modules) may already have been deleted or set '\n'to \"None\".\\n'\n' Python guarantees that globals whose name begins '\n'with a single\\n'\n' underscore are deleted from their module before '\n'other globals\\n'\n' are deleted; if no other references to such globals '\n'exist, this\\n'\n' may help in assuring that imported modules are still '\n'available\\n'\n' at the time when the \"__del__()\" method is called.\\n'\n'\\n'\n'object.__repr__(self)\\n'\n'\\n'\n' Called by the \"repr()\" built-in function to compute the '\n'\u201cofficial\u201d\\n'\n' string representation of an object. If at all possible, '\n'this\\n'\n' should look like a valid Python expression that could be '\n'used to\\n'\n' recreate an object with the same value (given an '\n'appropriate\\n'\n' environment). If this is not possible, a string of the '\n'form\\n'\n' \"<...some useful description...>\" should be returned. '\n'The return\\n'\n' value must be a string object. If a class defines '\n'\"__repr__()\" but\\n'\n' not \"__str__()\", then \"__repr__()\" is also used when an '\n'\u201cinformal\u201d\\n'\n' string representation of instances of that class is '\n'required.\\n'\n'\\n'\n' This is typically used for debugging, so it is important '\n'that the\\n'\n' representation is information-rich and unambiguous.\\n'\n'\\n'\n'object.__str__(self)\\n'\n'\\n'\n' Called by \"str(object)\" and the built-in functions '\n'\"format()\" and\\n'\n' \"print()\" to compute the \u201cinformal\u201d or nicely printable '\n'string\\n'\n' representation of an object. The return value must be a '\n'string\\n'\n' object.\\n'\n'\\n'\n' This method differs from \"object.__repr__()\" in that '\n'there is no\\n'\n' expectation that \"__str__()\" return a valid Python '\n'expression: a\\n'\n' more convenient or concise representation can be used.\\n'\n'\\n'\n' The default implementation defined by the built-in type '\n'\"object\"\\n'\n' calls \"object.__repr__()\".\\n'\n'\\n'\n'object.__bytes__(self)\\n'\n'\\n'\n' Called by bytes to compute a byte-string representation '\n'of an\\n'\n' object. This should return a \"bytes\" object.\\n'\n'\\n'\n'object.__format__(self, format_spec)\\n'\n'\\n'\n' Called by the \"format()\" built-in function, and by '\n'extension,\\n'\n' evaluation of formatted string literals and the '\n'\"str.format()\"\\n'\n' method, to produce a \u201cformatted\u201d string representation '\n'of an\\n'\n' object. The *format_spec* argument is a string that '\n'contains a\\n'\n' description of the formatting options desired. The '\n'interpretation\\n'\n' of the *format_spec* argument is up to the type '\n'implementing\\n'\n' \"__format__()\", however most classes will either '\n'delegate\\n'\n' formatting to one of the built-in types, or use a '\n'similar\\n'\n' formatting option syntax.\\n'\n'\\n'\n' See Format Specification Mini-Language for a description '\n'of the\\n'\n' standard formatting syntax.\\n'\n'\\n'\n' The return value must be a string object.\\n'\n'\\n'\n' Changed in version 3.4: The __format__ method of '\n'\"object\" itself\\n'\n' raises a \"TypeError\" if passed any non-empty string.\\n'\n'\\n'\n' Changed in version 3.7: \"object.__format__(x, \\'\\')\" is '\n'now\\n'\n' equivalent to \"str(x)\" rather than \"format(str(x), '\n'\\'\\')\".\\n'\n'\\n'\n'object.__lt__(self, other)\\n'\n'object.__le__(self, other)\\n'\n'object.__eq__(self, other)\\n'\n'object.__ne__(self, other)\\n'\n'object.__gt__(self, other)\\n'\n'object.__ge__(self, other)\\n'\n'\\n'\n' These are the so-called \u201crich comparison\u201d methods. The\\n'\n' correspondence between operator symbols and method names '\n'is as\\n'\n' follows: \"xy\" calls\\n'\n' \"x.__gt__(y)\", and \"x>=y\" calls \"x.__ge__(y)\".\\n'\n'\\n'\n' A rich comparison method may return the singleton '\n'\"NotImplemented\"\\n'\n' if it does not implement the operation for a given pair '\n'of\\n'\n' arguments. By convention, \"False\" and \"True\" are '\n'returned for a\\n'\n' successful comparison. However, these methods can return '\n'any value,\\n'\n' so if the comparison operator is used in a Boolean '\n'context (e.g.,\\n'\n' in the condition of an \"if\" statement), Python will call '\n'\"bool()\"\\n'\n' on the value to determine if the result is true or '\n'false.\\n'\n'\\n'\n' By default, \"object\" implements \"__eq__()\" by using '\n'\"is\", returning\\n'\n' \"NotImplemented\" in the case of a false comparison: '\n'\"True if x is y\\n'\n' else NotImplemented\". For \"__ne__()\", by default it '\n'delegates to\\n'\n' \"__eq__()\" and inverts the result unless it is '\n'\"NotImplemented\".\\n'\n' There are no other implied relationships among the '\n'comparison\\n'\n' operators or default implementations; for example, the '\n'truth of\\n'\n' \"(x.__hash__\".\\n'\n'\\n'\n' If a class that does not override \"__eq__()\" wishes to '\n'suppress\\n'\n' hash support, it should include \"__hash__ = None\" in the '\n'class\\n'\n' definition. A class which defines its own \"__hash__()\" '\n'that\\n'\n' explicitly raises a \"TypeError\" would be incorrectly '\n'identified as\\n'\n' hashable by an \"isinstance(obj, '\n'collections.abc.Hashable)\" call.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' By default, the \"__hash__()\" values of str and bytes '\n'objects are\\n'\n' \u201csalted\u201d with an unpredictable random value. Although '\n'they\\n'\n' remain constant within an individual Python process, '\n'they are not\\n'\n' predictable between repeated invocations of '\n'Python.This is\\n'\n' intended to provide protection against a '\n'denial-of-service caused\\n'\n' by carefully-chosen inputs that exploit the worst '\n'case\\n'\n' performance of a dict insertion, O(n^2) complexity. '\n'See\\n'\n' http://www.ocert.org/advisories/ocert-2011-003.html '\n'for\\n'\n' details.Changing hash values affects the iteration '\n'order of sets.\\n'\n' Python has never made guarantees about this ordering '\n'(and it\\n'\n' typically varies between 32-bit and 64-bit builds).See '\n'also\\n'\n' \"PYTHONHASHSEED\".\\n'\n'\\n'\n' Changed in version 3.3: Hash randomization is enabled by '\n'default.\\n'\n'\\n'\n'object.__bool__(self)\\n'\n'\\n'\n' Called to implement truth value testing and the built-in '\n'operation\\n'\n' \"bool()\"; should return \"False\" or \"True\". When this '\n'method is not\\n'\n' defined, \"__len__()\" is called, if it is defined, and '\n'the object is\\n'\n' considered true if its result is nonzero. If a class '\n'defines\\n'\n' neither \"__len__()\" nor \"__bool__()\", all its instances '\n'are\\n'\n' considered true.\\n',\n'debugger':'\"pdb\" \u2014 The Python Debugger\\n'\n'***************************\\n'\n'\\n'\n'**Source code:** Lib/pdb.py\\n'\n'\\n'\n'======================================================================\\n'\n'\\n'\n'The module \"pdb\" defines an interactive source code debugger '\n'for\\n'\n'Python programs. It supports setting (conditional) breakpoints '\n'and\\n'\n'single stepping at the source line level, inspection of stack '\n'frames,\\n'\n'source code listing, and evaluation of arbitrary Python code in '\n'the\\n'\n'context of any stack frame. It also supports post-mortem '\n'debugging\\n'\n'and can be called under program control.\\n'\n'\\n'\n'The debugger is extensible \u2013 it is actually defined as the '\n'class\\n'\n'\"Pdb\". This is currently undocumented but easily understood by '\n'reading\\n'\n'the source. The extension interface uses the modules \"bdb\" and '\n'\"cmd\".\\n'\n'\\n'\n'The debugger\u2019s prompt is \"(Pdb)\". Typical usage to run a program '\n'under\\n'\n'control of the debugger is:\\n'\n'\\n'\n' >>> import pdb\\n'\n' >>> import mymodule\\n'\n\" >>> pdb.run('mymodule.test()')\\n\"\n' > (0)?()\\n'\n' (Pdb) continue\\n'\n' > (1)?()\\n'\n' (Pdb) continue\\n'\n\" NameError: 'spam'\\n\"\n' > (1)?()\\n'\n' (Pdb)\\n'\n'\\n'\n'Changed in version 3.3: Tab-completion via the \"readline\" module '\n'is\\n'\n'available for commands and command arguments, e.g. the current '\n'global\\n'\n'and local names are offered as arguments of the \"p\" command.\\n'\n'\\n'\n'\"pdb.py\" can also be invoked as a script to debug other '\n'scripts. For\\n'\n'example:\\n'\n'\\n'\n' python3 -m pdb myscript.py\\n'\n'\\n'\n'When invoked as a script, pdb will automatically enter '\n'post-mortem\\n'\n'debugging if the program being debugged exits abnormally. After '\n'post-\\n'\n'mortem debugging (or after normal exit of the program), pdb '\n'will\\n'\n'restart the program. Automatic restarting preserves pdb\u2019s state '\n'(such\\n'\n'as breakpoints) and in most cases is more useful than quitting '\n'the\\n'\n'debugger upon program\u2019s exit.\\n'\n'\\n'\n'New in version 3.2: \"pdb.py\" now accepts a \"-c\" option that '\n'executes\\n'\n'commands as if given in a \".pdbrc\" file, see Debugger Commands.\\n'\n'\\n'\n'New in version 3.7: \"pdb.py\" now accepts a \"-m\" option that '\n'execute\\n'\n'modules similar to the way \"python3 -m\" does. As with a script, '\n'the\\n'\n'debugger will pause execution just before the first line of the\\n'\n'module.\\n'\n'\\n'\n'The typical usage to break into the debugger from a running '\n'program is\\n'\n'to insert\\n'\n'\\n'\n' import pdb; pdb.set_trace()\\n'\n'\\n'\n'at the location you want to break into the debugger. You can '\n'then\\n'\n'step through the code following this statement, and continue '\n'running\\n'\n'without the debugger using the \"continue\" command.\\n'\n'\\n'\n'New in version 3.7: The built-in \"breakpoint()\", when called '\n'with\\n'\n'defaults, can be used instead of \"import pdb; pdb.set_trace()\".\\n'\n'\\n'\n'The typical usage to inspect a crashed program is:\\n'\n'\\n'\n' >>> import pdb\\n'\n' >>> import mymodule\\n'\n' >>> mymodule.test()\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 1, in \\n'\n' File \"./mymodule.py\", line 4, in test\\n'\n' test2()\\n'\n' File \"./mymodule.py\", line 3, in test2\\n'\n' print(spam)\\n'\n' NameError: spam\\n'\n' >>> pdb.pm()\\n'\n' > ./mymodule.py(3)test2()\\n'\n' -> print(spam)\\n'\n' (Pdb)\\n'\n'\\n'\n'The module defines the following functions; each enters the '\n'debugger\\n'\n'in a slightly different way:\\n'\n'\\n'\n'pdb.run(statement, globals=None, locals=None)\\n'\n'\\n'\n' Execute the *statement* (given as a string or a code object) '\n'under\\n'\n' debugger control. The debugger prompt appears before any '\n'code is\\n'\n' executed; you can set breakpoints and type \"continue\", or you '\n'can\\n'\n' step through the statement using \"step\" or \"next\" (all these\\n'\n' commands are explained below). The optional *globals* and '\n'*locals*\\n'\n' arguments specify the environment in which the code is '\n'executed; by\\n'\n' default the dictionary of the module \"__main__\" is used. '\n'(See the\\n'\n' explanation of the built-in \"exec()\" or \"eval()\" functions.)\\n'\n'\\n'\n'pdb.runeval(expression, globals=None, locals=None)\\n'\n'\\n'\n' Evaluate the *expression* (given as a string or a code '\n'object)\\n'\n' under debugger control. When \"runeval()\" returns, it returns '\n'the\\n'\n' value of the expression. Otherwise this function is similar '\n'to\\n'\n' \"run()\".\\n'\n'\\n'\n'pdb.runcall(function, *args, **kwds)\\n'\n'\\n'\n' Call the *function* (a function or method object, not a '\n'string)\\n'\n' with the given arguments. When \"runcall()\" returns, it '\n'returns\\n'\n' whatever the function call returned. The debugger prompt '\n'appears\\n'\n' as soon as the function is entered.\\n'\n'\\n'\n'pdb.set_trace(*, header=None)\\n'\n'\\n'\n' Enter the debugger at the calling stack frame. This is '\n'useful to\\n'\n' hard-code a breakpoint at a given point in a program, even if '\n'the\\n'\n' code is not otherwise being debugged (e.g. when an assertion\\n'\n' fails). If given, *header* is printed to the console just '\n'before\\n'\n' debugging begins.\\n'\n'\\n'\n' Changed in version 3.7: The keyword-only argument *header*.\\n'\n'\\n'\n'pdb.post_mortem(traceback=None)\\n'\n'\\n'\n' Enter post-mortem debugging of the given *traceback* object. '\n'If no\\n'\n' *traceback* is given, it uses the one of the exception that '\n'is\\n'\n' currently being handled (an exception must be being handled '\n'if the\\n'\n' default is to be used).\\n'\n'\\n'\n'pdb.pm()\\n'\n'\\n'\n' Enter post-mortem debugging of the traceback found in\\n'\n' \"sys.last_traceback\".\\n'\n'\\n'\n'The \"run*\" functions and \"set_trace()\" are aliases for '\n'instantiating\\n'\n'the \"Pdb\" class and calling the method of the same name. If you '\n'want\\n'\n'to access further features, you have to do this yourself:\\n'\n'\\n'\n\"class pdb.Pdb(completekey='tab', stdin=None, stdout=None, \"\n'skip=None, nosigint=False, readrc=True)\\n'\n'\\n'\n' \"Pdb\" is the debugger class.\\n'\n'\\n'\n' The *completekey*, *stdin* and *stdout* arguments are passed '\n'to the\\n'\n' underlying \"cmd.Cmd\" class; see the description there.\\n'\n'\\n'\n' The *skip* argument, if given, must be an iterable of '\n'glob-style\\n'\n' module name patterns. The debugger will not step into frames '\n'that\\n'\n' originate in a module that matches one of these patterns. '\n'[1]\\n'\n'\\n'\n' By default, Pdb sets a handler for the SIGINT signal (which '\n'is sent\\n'\n' when the user presses \"Ctrl-C\" on the console) when you give '\n'a\\n'\n' \"continue\" command. This allows you to break into the '\n'debugger\\n'\n' again by pressing \"Ctrl-C\". If you want Pdb not to touch '\n'the\\n'\n' SIGINT handler, set *nosigint* to true.\\n'\n'\\n'\n' The *readrc* argument defaults to true and controls whether '\n'Pdb\\n'\n' will load .pdbrc files from the filesystem.\\n'\n'\\n'\n' Example call to enable tracing with *skip*:\\n'\n'\\n'\n\" import pdb; pdb.Pdb(skip=['django.*']).set_trace()\\n\"\n'\\n'\n' Raises an auditing event \"pdb.Pdb\" with no arguments.\\n'\n'\\n'\n' New in version 3.1: The *skip* argument.\\n'\n'\\n'\n' New in version 3.2: The *nosigint* argument. Previously, a '\n'SIGINT\\n'\n' handler was never set by Pdb.\\n'\n'\\n'\n' Changed in version 3.6: The *readrc* argument.\\n'\n'\\n'\n' run(statement, globals=None, locals=None)\\n'\n' runeval(expression, globals=None, locals=None)\\n'\n' runcall(function, *args, **kwds)\\n'\n' set_trace()\\n'\n'\\n'\n' See the documentation for the functions explained above.\\n'\n'\\n'\n'\\n'\n'Debugger Commands\\n'\n'=================\\n'\n'\\n'\n'The commands recognized by the debugger are listed below. Most\\n'\n'commands can be abbreviated to one or two letters as indicated; '\n'e.g.\\n'\n'\"h(elp)\" means that either \"h\" or \"help\" can be used to enter '\n'the help\\n'\n'command (but not \"he\" or \"hel\", nor \"H\" or \"Help\" or \"HELP\").\\n'\n'Arguments to commands must be separated by whitespace (spaces '\n'or\\n'\n'tabs). Optional arguments are enclosed in square brackets '\n'(\"[]\") in\\n'\n'the command syntax; the square brackets must not be typed.\\n'\n'Alternatives in the command syntax are separated by a vertical '\n'bar\\n'\n'(\"|\").\\n'\n'\\n'\n'Entering a blank line repeats the last command entered. '\n'Exception: if\\n'\n'the last command was a \"list\" command, the next 11 lines are '\n'listed.\\n'\n'\\n'\n'Commands that the debugger doesn\u2019t recognize are assumed to be '\n'Python\\n'\n'statements and are executed in the context of the program being\\n'\n'debugged. Python statements can also be prefixed with an '\n'exclamation\\n'\n'point (\"!\"). This is a powerful way to inspect the program '\n'being\\n'\n'debugged; it is even possible to change a variable or call a '\n'function.\\n'\n'When an exception occurs in such a statement, the exception name '\n'is\\n'\n'printed but the debugger\u2019s state is not changed.\\n'\n'\\n'\n'The debugger supports aliases. Aliases can have parameters '\n'which\\n'\n'allows one a certain level of adaptability to the context under\\n'\n'examination.\\n'\n'\\n'\n'Multiple commands may be entered on a single line, separated by '\n'\";;\".\\n'\n'(A single \";\" is not used as it is the separator for multiple '\n'commands\\n'\n'in a line that is passed to the Python parser.) No intelligence '\n'is\\n'\n'applied to separating the commands; the input is split at the '\n'first\\n'\n'\";;\" pair, even if it is in the middle of a quoted string.\\n'\n'\\n'\n'If a file \".pdbrc\" exists in the user\u2019s home directory or in '\n'the\\n'\n'current directory, it is read in and executed as if it had been '\n'typed\\n'\n'at the debugger prompt. This is particularly useful for '\n'aliases. If\\n'\n'both files exist, the one in the home directory is read first '\n'and\\n'\n'aliases defined there can be overridden by the local file.\\n'\n'\\n'\n'Changed in version 3.2: \".pdbrc\" can now contain commands that\\n'\n'continue debugging, such as \"continue\" or \"next\". Previously, '\n'these\\n'\n'commands had no effect.\\n'\n'\\n'\n'h(elp) [command]\\n'\n'\\n'\n' Without argument, print the list of available commands. With '\n'a\\n'\n' *command* as argument, print help about that command. \"help '\n'pdb\"\\n'\n' displays the full documentation (the docstring of the \"pdb\"\\n'\n' module). Since the *command* argument must be an identifier, '\n'\"help\\n'\n' exec\" must be entered to get help on the \"!\" command.\\n'\n'\\n'\n'w(here)\\n'\n'\\n'\n' Print a stack trace, with the most recent frame at the '\n'bottom. An\\n'\n' arrow indicates the current frame, which determines the '\n'context of\\n'\n' most commands.\\n'\n'\\n'\n'd(own) [count]\\n'\n'\\n'\n' Move the current frame *count* (default one) levels down in '\n'the\\n'\n' stack trace (to a newer frame).\\n'\n'\\n'\n'u(p) [count]\\n'\n'\\n'\n' Move the current frame *count* (default one) levels up in the '\n'stack\\n'\n' trace (to an older frame).\\n'\n'\\n'\n'b(reak) [([filename:]lineno | function) [, condition]]\\n'\n'\\n'\n' With a *lineno* argument, set a break there in the current '\n'file.\\n'\n' With a *function* argument, set a break at the first '\n'executable\\n'\n' statement within that function. The line number may be '\n'prefixed\\n'\n' with a filename and a colon, to specify a breakpoint in '\n'another\\n'\n' file (probably one that hasn\u2019t been loaded yet). The file '\n'is\\n'\n' searched on \"sys.path\". Note that each breakpoint is '\n'assigned a\\n'\n' number to which all the other breakpoint commands refer.\\n'\n'\\n'\n' If a second argument is present, it is an expression which '\n'must\\n'\n' evaluate to true before the breakpoint is honored.\\n'\n'\\n'\n' Without argument, list all breaks, including for each '\n'breakpoint,\\n'\n' the number of times that breakpoint has been hit, the '\n'current\\n'\n' ignore count, and the associated condition if any.\\n'\n'\\n'\n'tbreak [([filename:]lineno | function) [, condition]]\\n'\n'\\n'\n' Temporary breakpoint, which is removed automatically when it '\n'is\\n'\n' first hit. The arguments are the same as for \"break\".\\n'\n'\\n'\n'cl(ear) [filename:lineno | bpnumber ...]\\n'\n'\\n'\n' With a *filename:lineno* argument, clear all the breakpoints '\n'at\\n'\n' this line. With a space separated list of breakpoint numbers, '\n'clear\\n'\n' those breakpoints. Without argument, clear all breaks (but '\n'first\\n'\n' ask confirmation).\\n'\n'\\n'\n'disable [bpnumber ...]\\n'\n'\\n'\n' Disable the breakpoints given as a space separated list of\\n'\n' breakpoint numbers. Disabling a breakpoint means it cannot '\n'cause\\n'\n' the program to stop execution, but unlike clearing a '\n'breakpoint, it\\n'\n' remains in the list of breakpoints and can be (re-)enabled.\\n'\n'\\n'\n'enable [bpnumber ...]\\n'\n'\\n'\n' Enable the breakpoints specified.\\n'\n'\\n'\n'ignore bpnumber [count]\\n'\n'\\n'\n' Set the ignore count for the given breakpoint number. If '\n'count is\\n'\n' omitted, the ignore count is set to 0. A breakpoint becomes '\n'active\\n'\n' when the ignore count is zero. When non-zero, the count is\\n'\n' decremented each time the breakpoint is reached and the '\n'breakpoint\\n'\n' is not disabled and any associated condition evaluates to '\n'true.\\n'\n'\\n'\n'condition bpnumber [condition]\\n'\n'\\n'\n' Set a new *condition* for the breakpoint, an expression which '\n'must\\n'\n' evaluate to true before the breakpoint is honored. If '\n'*condition*\\n'\n' is absent, any existing condition is removed; i.e., the '\n'breakpoint\\n'\n' is made unconditional.\\n'\n'\\n'\n'commands [bpnumber]\\n'\n'\\n'\n' Specify a list of commands for breakpoint number *bpnumber*. '\n'The\\n'\n' commands themselves appear on the following lines. Type a '\n'line\\n'\n' containing just \"end\" to terminate the commands. An example:\\n'\n'\\n'\n' (Pdb) commands 1\\n'\n' (com) p some_variable\\n'\n' (com) end\\n'\n' (Pdb)\\n'\n'\\n'\n' To remove all commands from a breakpoint, type \"commands\" '\n'and\\n'\n' follow it immediately with \"end\"; that is, give no commands.\\n'\n'\\n'\n' With no *bpnumber* argument, \"commands\" refers to the last\\n'\n' breakpoint set.\\n'\n'\\n'\n' You can use breakpoint commands to start your program up '\n'again.\\n'\n' Simply use the \"continue\" command, or \"step\", or any other '\n'command\\n'\n' that resumes execution.\\n'\n'\\n'\n' Specifying any command resuming execution (currently '\n'\"continue\",\\n'\n' \"step\", \"next\", \"return\", \"jump\", \"quit\" and their '\n'abbreviations)\\n'\n' terminates the command list (as if that command was '\n'immediately\\n'\n' followed by end). This is because any time you resume '\n'execution\\n'\n' (even with a simple next or step), you may encounter another\\n'\n' breakpoint\u2014which could have its own command list, leading to\\n'\n' ambiguities about which list to execute.\\n'\n'\\n'\n' If you use the \u2018silent\u2019 command in the command list, the '\n'usual\\n'\n' message about stopping at a breakpoint is not printed. This '\n'may be\\n'\n' desirable for breakpoints that are to print a specific '\n'message and\\n'\n' then continue. If none of the other commands print anything, '\n'you\\n'\n' see no sign that the breakpoint was reached.\\n'\n'\\n'\n's(tep)\\n'\n'\\n'\n' Execute the current line, stop at the first possible '\n'occasion\\n'\n' (either in a function that is called or on the next line in '\n'the\\n'\n' current function).\\n'\n'\\n'\n'n(ext)\\n'\n'\\n'\n' Continue execution until the next line in the current '\n'function is\\n'\n' reached or it returns. (The difference between \"next\" and '\n'\"step\"\\n'\n' is that \"step\" stops inside a called function, while \"next\"\\n'\n' executes called functions at (nearly) full speed, only '\n'stopping at\\n'\n' the next line in the current function.)\\n'\n'\\n'\n'unt(il) [lineno]\\n'\n'\\n'\n' Without argument, continue execution until the line with a '\n'number\\n'\n' greater than the current one is reached.\\n'\n'\\n'\n' With a line number, continue execution until a line with a '\n'number\\n'\n' greater or equal to that is reached. In both cases, also '\n'stop when\\n'\n' the current frame returns.\\n'\n'\\n'\n' Changed in version 3.2: Allow giving an explicit line '\n'number.\\n'\n'\\n'\n'r(eturn)\\n'\n'\\n'\n' Continue execution until the current function returns.\\n'\n'\\n'\n'c(ont(inue))\\n'\n'\\n'\n' Continue execution, only stop when a breakpoint is '\n'encountered.\\n'\n'\\n'\n'j(ump) lineno\\n'\n'\\n'\n' Set the next line that will be executed. Only available in '\n'the\\n'\n' bottom-most frame. This lets you jump back and execute code '\n'again,\\n'\n' or jump forward to skip code that you don\u2019t want to run.\\n'\n'\\n'\n' It should be noted that not all jumps are allowed \u2013 for '\n'instance it\\n'\n' is not possible to jump into the middle of a \"for\" loop or '\n'out of a\\n'\n' \"finally\" clause.\\n'\n'\\n'\n'l(ist) [first[, last]]\\n'\n'\\n'\n' List source code for the current file. Without arguments, '\n'list 11\\n'\n' lines around the current line or continue the previous '\n'listing.\\n'\n' With \".\" as argument, list 11 lines around the current line. '\n'With\\n'\n' one argument, list 11 lines around at that line. With two\\n'\n' arguments, list the given range; if the second argument is '\n'less\\n'\n' than the first, it is interpreted as a count.\\n'\n'\\n'\n' The current line in the current frame is indicated by \"->\". '\n'If an\\n'\n' exception is being debugged, the line where the exception '\n'was\\n'\n' originally raised or propagated is indicated by \">>\", if it '\n'differs\\n'\n' from the current line.\\n'\n'\\n'\n' New in version 3.2: The \">>\" marker.\\n'\n'\\n'\n'll | longlist\\n'\n'\\n'\n' List all source code for the current function or frame.\\n'\n' Interesting lines are marked as for \"list\".\\n'\n'\\n'\n' New in version 3.2.\\n'\n'\\n'\n'a(rgs)\\n'\n'\\n'\n' Print the argument list of the current function.\\n'\n'\\n'\n'p expression\\n'\n'\\n'\n' Evaluate the *expression* in the current context and print '\n'its\\n'\n' value.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' \"print()\" can also be used, but is not a debugger command \u2014 '\n'this\\n'\n' executes the Python \"print()\" function.\\n'\n'\\n'\n'pp expression\\n'\n'\\n'\n' Like the \"p\" command, except the value of the expression is '\n'pretty-\\n'\n' printed using the \"pprint\" module.\\n'\n'\\n'\n'whatis expression\\n'\n'\\n'\n' Print the type of the *expression*.\\n'\n'\\n'\n'source expression\\n'\n'\\n'\n' Try to get source code for the given object and display it.\\n'\n'\\n'\n' New in version 3.2.\\n'\n'\\n'\n'display [expression]\\n'\n'\\n'\n' Display the value of the expression if it changed, each time\\n'\n' execution stops in the current frame.\\n'\n'\\n'\n' Without expression, list all display expressions for the '\n'current\\n'\n' frame.\\n'\n'\\n'\n' New in version 3.2.\\n'\n'\\n'\n'undisplay [expression]\\n'\n'\\n'\n' Do not display the expression any more in the current frame.\\n'\n' Without expression, clear all display expressions for the '\n'current\\n'\n' frame.\\n'\n'\\n'\n' New in version 3.2.\\n'\n'\\n'\n'interact\\n'\n'\\n'\n' Start an interactive interpreter (using the \"code\" module) '\n'whose\\n'\n' global namespace contains all the (global and local) names '\n'found in\\n'\n' the current scope.\\n'\n'\\n'\n' New in version 3.2.\\n'\n'\\n'\n'alias [name [command]]\\n'\n'\\n'\n' Create an alias called *name* that executes *command*. The '\n'command\\n'\n' must *not* be enclosed in quotes. Replaceable parameters can '\n'be\\n'\n' indicated by \"%1\", \"%2\", and so on, while \"%*\" is replaced by '\n'all\\n'\n' the parameters. If no command is given, the current alias '\n'for\\n'\n' *name* is shown. If no arguments are given, all aliases are '\n'listed.\\n'\n'\\n'\n' Aliases may be nested and can contain anything that can be '\n'legally\\n'\n' typed at the pdb prompt. Note that internal pdb commands '\n'*can* be\\n'\n' overridden by aliases. Such a command is then hidden until '\n'the\\n'\n' alias is removed. Aliasing is recursively applied to the '\n'first\\n'\n' word of the command line; all other words in the line are '\n'left\\n'\n' alone.\\n'\n'\\n'\n' As an example, here are two useful aliases (especially when '\n'placed\\n'\n' in the \".pdbrc\" file):\\n'\n'\\n'\n' # Print instance variables (usage \"pi classInst\")\\n'\n' alias pi for k in %1.__dict__.keys(): '\n'print(\"%1.\",k,\"=\",%1.__dict__[k])\\n'\n' # Print instance variables in self\\n'\n' alias ps pi self\\n'\n'\\n'\n'unalias name\\n'\n'\\n'\n' Delete the specified alias.\\n'\n'\\n'\n'! statement\\n'\n'\\n'\n' Execute the (one-line) *statement* in the context of the '\n'current\\n'\n' stack frame. The exclamation point can be omitted unless the '\n'first\\n'\n' word of the statement resembles a debugger command. To set '\n'a\\n'\n' global variable, you can prefix the assignment command with '\n'a\\n'\n' \"global\" statement on the same line, e.g.:\\n'\n'\\n'\n\" (Pdb) global list_options; list_options = ['-l']\\n\"\n' (Pdb)\\n'\n'\\n'\n'run [args ...]\\n'\n'restart [args ...]\\n'\n'\\n'\n' Restart the debugged Python program. If an argument is '\n'supplied,\\n'\n' it is split with \"shlex\" and the result is used as the new\\n'\n' \"sys.argv\". History, breakpoints, actions and debugger '\n'options are\\n'\n' preserved. \"restart\" is an alias for \"run\".\\n'\n'\\n'\n'q(uit)\\n'\n'\\n'\n' Quit from the debugger. The program being executed is '\n'aborted.\\n'\n'\\n'\n'debug code\\n'\n'\\n'\n' Enter a recursive debugger that steps through the code '\n'argument\\n'\n' (which is an arbitrary expression or statement to be executed '\n'in\\n'\n' the current environment).\\n'\n'\\n'\n'retval\\n'\n'\\n'\n' Print the return value for the last return of a function.\\n'\n'\\n'\n'-[ Footnotes ]-\\n'\n'\\n'\n'[1] Whether a frame is considered to originate in a certain '\n'module is\\n'\n' determined by the \"__name__\" in the frame globals.\\n',\n'del':'The \"del\" statement\\n'\n'*******************\\n'\n'\\n'\n' del_stmt ::= \"del\" target_list\\n'\n'\\n'\n'Deletion is recursively defined very similar to the way assignment '\n'is\\n'\n'defined. Rather than spelling it out in full details, here are some\\n'\n'hints.\\n'\n'\\n'\n'Deletion of a target list recursively deletes each target, from left\\n'\n'to right.\\n'\n'\\n'\n'Deletion of a name removes the binding of that name from the local '\n'or\\n'\n'global namespace, depending on whether the name occurs in a \"global\"\\n'\n'statement in the same code block. If the name is unbound, a\\n'\n'\"NameError\" exception will be raised.\\n'\n'\\n'\n'Deletion of attribute references, subscriptions and slicings is '\n'passed\\n'\n'to the primary object involved; deletion of a slicing is in general\\n'\n'equivalent to assignment of an empty slice of the right type (but '\n'even\\n'\n'this is determined by the sliced object).\\n'\n'\\n'\n'Changed in version 3.2: Previously it was illegal to delete a name\\n'\n'from the local namespace if it occurs as a free variable in a nested\\n'\n'block.\\n',\n'dict':'Dictionary displays\\n'\n'*******************\\n'\n'\\n'\n'A dictionary display is a possibly empty series of key/datum pairs\\n'\n'enclosed in curly braces:\\n'\n'\\n'\n' dict_display ::= \"{\" [key_datum_list | dict_comprehension] '\n'\"}\"\\n'\n' key_datum_list ::= key_datum (\",\" key_datum)* [\",\"]\\n'\n' key_datum ::= expression \":\" expression | \"**\" or_expr\\n'\n' dict_comprehension ::= expression \":\" expression comp_for\\n'\n'\\n'\n'A dictionary display yields a new dictionary object.\\n'\n'\\n'\n'If a comma-separated sequence of key/datum pairs is given, they are\\n'\n'evaluated from left to right to define the entries of the '\n'dictionary:\\n'\n'each key object is used as a key into the dictionary to store the\\n'\n'corresponding datum. This means that you can specify the same key\\n'\n'multiple times in the key/datum list, and the final dictionary\u2019s '\n'value\\n'\n'for that key will be the last one given.\\n'\n'\\n'\n'A double asterisk \"**\" denotes *dictionary unpacking*. Its operand\\n'\n'must be a *mapping*. Each mapping item is added to the new\\n'\n'dictionary. Later values replace values already set by earlier\\n'\n'key/datum pairs and earlier dictionary unpackings.\\n'\n'\\n'\n'New in version 3.5: Unpacking into dictionary displays, originally\\n'\n'proposed by **PEP 448**.\\n'\n'\\n'\n'A dict comprehension, in contrast to list and set comprehensions,\\n'\n'needs two expressions separated with a colon followed by the usual\\n'\n'\u201cfor\u201d and \u201cif\u201d clauses. When the comprehension is run, the '\n'resulting\\n'\n'key and value elements are inserted in the new dictionary in the '\n'order\\n'\n'they are produced.\\n'\n'\\n'\n'Restrictions on the types of the key values are listed earlier in\\n'\n'section The standard type hierarchy. (To summarize, the key type\\n'\n'should be *hashable*, which excludes all mutable objects.) Clashes\\n'\n'between duplicate keys are not detected; the last datum (textually\\n'\n'rightmost in the display) stored for a given key value prevails.\\n'\n'\\n'\n'Changed in version 3.8: Prior to Python 3.8, in dict '\n'comprehensions,\\n'\n'the evaluation order of key and value was not well-defined. In\\n'\n'CPython, the value was evaluated before the key. Starting with '\n'3.8,\\n'\n'the key is evaluated before the value, as proposed by **PEP 572**.\\n',\n'dynamic-features':'Interaction with dynamic features\\n'\n'*********************************\\n'\n'\\n'\n'Name resolution of free variables occurs at runtime, not '\n'at compile\\n'\n'time. This means that the following code will print 42:\\n'\n'\\n'\n' i = 10\\n'\n' def f():\\n'\n' print(i)\\n'\n' i = 42\\n'\n' f()\\n'\n'\\n'\n'The \"eval()\" and \"exec()\" functions do not have access '\n'to the full\\n'\n'environment for resolving names. Names may be resolved '\n'in the local\\n'\n'and global namespaces of the caller. Free variables are '\n'not resolved\\n'\n'in the nearest enclosing namespace, but in the global '\n'namespace. [1]\\n'\n'The \"exec()\" and \"eval()\" functions have optional '\n'arguments to\\n'\n'override the global and local namespace. If only one '\n'namespace is\\n'\n'specified, it is used for both.\\n',\n'else':'The \"if\" statement\\n'\n'******************\\n'\n'\\n'\n'The \"if\" statement is used for conditional execution:\\n'\n'\\n'\n' if_stmt ::= \"if\" assignment_expression \":\" suite\\n'\n' (\"elif\" assignment_expression \":\" suite)*\\n'\n' [\"else\" \":\" suite]\\n'\n'\\n'\n'It selects exactly one of the suites by evaluating the expressions '\n'one\\n'\n'by one until one is found to be true (see section Boolean '\n'operations\\n'\n'for the definition of true and false); then that suite is executed\\n'\n'(and no other part of the \"if\" statement is executed or evaluated).\\n'\n'If all expressions are false, the suite of the \"else\" clause, if\\n'\n'present, is executed.\\n',\n'exceptions':'Exceptions\\n'\n'**********\\n'\n'\\n'\n'Exceptions are a means of breaking out of the normal flow of '\n'control\\n'\n'of a code block in order to handle errors or other '\n'exceptional\\n'\n'conditions. An exception is *raised* at the point where the '\n'error is\\n'\n'detected; it may be *handled* by the surrounding code block or '\n'by any\\n'\n'code block that directly or indirectly invoked the code block '\n'where\\n'\n'the error occurred.\\n'\n'\\n'\n'The Python interpreter raises an exception when it detects a '\n'run-time\\n'\n'error (such as division by zero). A Python program can also\\n'\n'explicitly raise an exception with the \"raise\" statement. '\n'Exception\\n'\n'handlers are specified with the \"try\" \u2026 \"except\" statement. '\n'The\\n'\n'\"finally\" clause of such a statement can be used to specify '\n'cleanup\\n'\n'code which does not handle the exception, but is executed '\n'whether an\\n'\n'exception occurred or not in the preceding code.\\n'\n'\\n'\n'Python uses the \u201ctermination\u201d model of error handling: an '\n'exception\\n'\n'handler can find out what happened and continue execution at '\n'an outer\\n'\n'level, but it cannot repair the cause of the error and retry '\n'the\\n'\n'failing operation (except by re-entering the offending piece '\n'of code\\n'\n'from the top).\\n'\n'\\n'\n'When an exception is not handled at all, the interpreter '\n'terminates\\n'\n'execution of the program, or returns to its interactive main '\n'loop. In\\n'\n'either case, it prints a stack traceback, except when the '\n'exception is\\n'\n'\"SystemExit\".\\n'\n'\\n'\n'Exceptions are identified by class instances. The \"except\" '\n'clause is\\n'\n'selected depending on the class of the instance: it must '\n'reference the\\n'\n'class of the instance or a base class thereof. The instance '\n'can be\\n'\n'received by the handler and can carry additional information '\n'about the\\n'\n'exceptional condition.\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' Exception messages are not part of the Python API. Their '\n'contents\\n'\n' may change from one version of Python to the next without '\n'warning\\n'\n' and should not be relied on by code which will run under '\n'multiple\\n'\n' versions of the interpreter.\\n'\n'\\n'\n'See also the description of the \"try\" statement in section The '\n'try\\n'\n'statement and \"raise\" statement in section The raise '\n'statement.\\n'\n'\\n'\n'-[ Footnotes ]-\\n'\n'\\n'\n'[1] This limitation occurs because the code that is executed '\n'by these\\n'\n' operations is not available at the time the module is '\n'compiled.\\n',\n'execmodel':'Execution model\\n'\n'***************\\n'\n'\\n'\n'\\n'\n'Structure of a program\\n'\n'======================\\n'\n'\\n'\n'A Python program is constructed from code blocks. A *block* is '\n'a piece\\n'\n'of Python program text that is executed as a unit. The '\n'following are\\n'\n'blocks: a module, a function body, and a class definition. '\n'Each\\n'\n'command typed interactively is a block. A script file (a file '\n'given\\n'\n'as standard input to the interpreter or specified as a command '\n'line\\n'\n'argument to the interpreter) is a code block. A script command '\n'(a\\n'\n'command specified on the interpreter command line with the '\n'\"-c\"\\n'\n'option) is a code block. A module run as a top level script (as '\n'module\\n'\n'\"__main__\") from the command line using a \"-m\" argument is also '\n'a code\\n'\n'block. The string argument passed to the built-in functions '\n'\"eval()\"\\n'\n'and \"exec()\" is a code block.\\n'\n'\\n'\n'A code block is executed in an *execution frame*. A frame '\n'contains\\n'\n'some administrative information (used for debugging) and '\n'determines\\n'\n'where and how execution continues after the code block\u2019s '\n'execution has\\n'\n'completed.\\n'\n'\\n'\n'\\n'\n'Naming and binding\\n'\n'==================\\n'\n'\\n'\n'\\n'\n'Binding of names\\n'\n'----------------\\n'\n'\\n'\n'*Names* refer to objects. Names are introduced by name '\n'binding\\n'\n'operations.\\n'\n'\\n'\n'The following constructs bind names: formal parameters to '\n'functions,\\n'\n'\"import\" statements, class and function definitions (these bind '\n'the\\n'\n'class or function name in the defining block), and targets that '\n'are\\n'\n'identifiers if occurring in an assignment, \"for\" loop header, '\n'or after\\n'\n'\"as\" in a \"with\" statement or \"except\" clause. The \"import\" '\n'statement\\n'\n'of the form \"from ... import *\" binds all names defined in the\\n'\n'imported module, except those beginning with an underscore. '\n'This form\\n'\n'may only be used at the module level.\\n'\n'\\n'\n'A target occurring in a \"del\" statement is also considered '\n'bound for\\n'\n'this purpose (though the actual semantics are to unbind the '\n'name).\\n'\n'\\n'\n'Each assignment or import statement occurs within a block '\n'defined by a\\n'\n'class or function definition or at the module level (the '\n'top-level\\n'\n'code block).\\n'\n'\\n'\n'If a name is bound in a block, it is a local variable of that '\n'block,\\n'\n'unless declared as \"nonlocal\" or \"global\". If a name is bound '\n'at the\\n'\n'module level, it is a global variable. (The variables of the '\n'module\\n'\n'code block are local and global.) If a variable is used in a '\n'code\\n'\n'block but not defined there, it is a *free variable*.\\n'\n'\\n'\n'Each occurrence of a name in the program text refers to the '\n'*binding*\\n'\n'of that name established by the following name resolution '\n'rules.\\n'\n'\\n'\n'\\n'\n'Resolution of names\\n'\n'-------------------\\n'\n'\\n'\n'A *scope* defines the visibility of a name within a block. If '\n'a local\\n'\n'variable is defined in a block, its scope includes that block. '\n'If the\\n'\n'definition occurs in a function block, the scope extends to any '\n'blocks\\n'\n'contained within the defining one, unless a contained block '\n'introduces\\n'\n'a different binding for the name.\\n'\n'\\n'\n'When a name is used in a code block, it is resolved using the '\n'nearest\\n'\n'enclosing scope. The set of all such scopes visible to a code '\n'block\\n'\n'is called the block\u2019s *environment*.\\n'\n'\\n'\n'When a name is not found at all, a \"NameError\" exception is '\n'raised. If\\n'\n'the current scope is a function scope, and the name refers to a '\n'local\\n'\n'variable that has not yet been bound to a value at the point '\n'where the\\n'\n'name is used, an \"UnboundLocalError\" exception is raised.\\n'\n'\"UnboundLocalError\" is a subclass of \"NameError\".\\n'\n'\\n'\n'If a name binding operation occurs anywhere within a code '\n'block, all\\n'\n'uses of the name within the block are treated as references to '\n'the\\n'\n'current block. This can lead to errors when a name is used '\n'within a\\n'\n'block before it is bound. This rule is subtle. Python lacks\\n'\n'declarations and allows name binding operations to occur '\n'anywhere\\n'\n'within a code block. The local variables of a code block can '\n'be\\n'\n'determined by scanning the entire text of the block for name '\n'binding\\n'\n'operations.\\n'\n'\\n'\n'If the \"global\" statement occurs within a block, all uses of '\n'the name\\n'\n'specified in the statement refer to the binding of that name in '\n'the\\n'\n'top-level namespace. Names are resolved in the top-level '\n'namespace by\\n'\n'searching the global namespace, i.e. the namespace of the '\n'module\\n'\n'containing the code block, and the builtins namespace, the '\n'namespace\\n'\n'of the module \"builtins\". The global namespace is searched '\n'first. If\\n'\n'the name is not found there, the builtins namespace is '\n'searched. The\\n'\n'\"global\" statement must precede all uses of the name.\\n'\n'\\n'\n'The \"global\" statement has the same scope as a name binding '\n'operation\\n'\n'in the same block. If the nearest enclosing scope for a free '\n'variable\\n'\n'contains a global statement, the free variable is treated as a '\n'global.\\n'\n'\\n'\n'The \"nonlocal\" statement causes corresponding names to refer '\n'to\\n'\n'previously bound variables in the nearest enclosing function '\n'scope.\\n'\n'\"SyntaxError\" is raised at compile time if the given name does '\n'not\\n'\n'exist in any enclosing function scope.\\n'\n'\\n'\n'The namespace for a module is automatically created the first '\n'time a\\n'\n'module is imported. The main module for a script is always '\n'called\\n'\n'\"__main__\".\\n'\n'\\n'\n'Class definition blocks and arguments to \"exec()\" and \"eval()\" '\n'are\\n'\n'special in the context of name resolution. A class definition '\n'is an\\n'\n'executable statement that may use and define names. These '\n'references\\n'\n'follow the normal rules for name resolution with an exception '\n'that\\n'\n'unbound local variables are looked up in the global namespace. '\n'The\\n'\n'namespace of the class definition becomes the attribute '\n'dictionary of\\n'\n'the class. The scope of names defined in a class block is '\n'limited to\\n'\n'the class block; it does not extend to the code blocks of '\n'methods \u2013\\n'\n'this includes comprehensions and generator expressions since '\n'they are\\n'\n'implemented using a function scope. This means that the '\n'following\\n'\n'will fail:\\n'\n'\\n'\n' class A:\\n'\n' a = 42\\n'\n' b = list(a + i for i in range(10))\\n'\n'\\n'\n'\\n'\n'Builtins and restricted execution\\n'\n'---------------------------------\\n'\n'\\n'\n'**CPython implementation detail:** Users should not touch\\n'\n'\"__builtins__\"; it is strictly an implementation detail. '\n'Users\\n'\n'wanting to override values in the builtins namespace should '\n'\"import\"\\n'\n'the \"builtins\" module and modify its attributes appropriately.\\n'\n'\\n'\n'The builtins namespace associated with the execution of a code '\n'block\\n'\n'is actually found by looking up the name \"__builtins__\" in its '\n'global\\n'\n'namespace; this should be a dictionary or a module (in the '\n'latter case\\n'\n'the module\u2019s dictionary is used). By default, when in the '\n'\"__main__\"\\n'\n'module, \"__builtins__\" is the built-in module \"builtins\"; when '\n'in any\\n'\n'other module, \"__builtins__\" is an alias for the dictionary of '\n'the\\n'\n'\"builtins\" module itself.\\n'\n'\\n'\n'\\n'\n'Interaction with dynamic features\\n'\n'---------------------------------\\n'\n'\\n'\n'Name resolution of free variables occurs at runtime, not at '\n'compile\\n'\n'time. This means that the following code will print 42:\\n'\n'\\n'\n' i = 10\\n'\n' def f():\\n'\n' print(i)\\n'\n' i = 42\\n'\n' f()\\n'\n'\\n'\n'The \"eval()\" and \"exec()\" functions do not have access to the '\n'full\\n'\n'environment for resolving names. Names may be resolved in the '\n'local\\n'\n'and global namespaces of the caller. Free variables are not '\n'resolved\\n'\n'in the nearest enclosing namespace, but in the global '\n'namespace. [1]\\n'\n'The \"exec()\" and \"eval()\" functions have optional arguments to\\n'\n'override the global and local namespace. If only one namespace '\n'is\\n'\n'specified, it is used for both.\\n'\n'\\n'\n'\\n'\n'Exceptions\\n'\n'==========\\n'\n'\\n'\n'Exceptions are a means of breaking out of the normal flow of '\n'control\\n'\n'of a code block in order to handle errors or other exceptional\\n'\n'conditions. An exception is *raised* at the point where the '\n'error is\\n'\n'detected; it may be *handled* by the surrounding code block or '\n'by any\\n'\n'code block that directly or indirectly invoked the code block '\n'where\\n'\n'the error occurred.\\n'\n'\\n'\n'The Python interpreter raises an exception when it detects a '\n'run-time\\n'\n'error (such as division by zero). A Python program can also\\n'\n'explicitly raise an exception with the \"raise\" statement. '\n'Exception\\n'\n'handlers are specified with the \"try\" \u2026 \"except\" statement. '\n'The\\n'\n'\"finally\" clause of such a statement can be used to specify '\n'cleanup\\n'\n'code which does not handle the exception, but is executed '\n'whether an\\n'\n'exception occurred or not in the preceding code.\\n'\n'\\n'\n'Python uses the \u201ctermination\u201d model of error handling: an '\n'exception\\n'\n'handler can find out what happened and continue execution at an '\n'outer\\n'\n'level, but it cannot repair the cause of the error and retry '\n'the\\n'\n'failing operation (except by re-entering the offending piece of '\n'code\\n'\n'from the top).\\n'\n'\\n'\n'When an exception is not handled at all, the interpreter '\n'terminates\\n'\n'execution of the program, or returns to its interactive main '\n'loop. In\\n'\n'either case, it prints a stack traceback, except when the '\n'exception is\\n'\n'\"SystemExit\".\\n'\n'\\n'\n'Exceptions are identified by class instances. The \"except\" '\n'clause is\\n'\n'selected depending on the class of the instance: it must '\n'reference the\\n'\n'class of the instance or a base class thereof. The instance '\n'can be\\n'\n'received by the handler and can carry additional information '\n'about the\\n'\n'exceptional condition.\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' Exception messages are not part of the Python API. Their '\n'contents\\n'\n' may change from one version of Python to the next without '\n'warning\\n'\n' and should not be relied on by code which will run under '\n'multiple\\n'\n' versions of the interpreter.\\n'\n'\\n'\n'See also the description of the \"try\" statement in section The '\n'try\\n'\n'statement and \"raise\" statement in section The raise '\n'statement.\\n'\n'\\n'\n'-[ Footnotes ]-\\n'\n'\\n'\n'[1] This limitation occurs because the code that is executed by '\n'these\\n'\n' operations is not available at the time the module is '\n'compiled.\\n',\n'exprlists':'Expression lists\\n'\n'****************\\n'\n'\\n'\n' expression_list ::= expression (\",\" expression)* [\",\"]\\n'\n' starred_list ::= starred_item (\",\" starred_item)* '\n'[\",\"]\\n'\n' starred_expression ::= expression | (starred_item \",\")* '\n'[starred_item]\\n'\n' starred_item ::= assignment_expression | \"*\" or_expr\\n'\n'\\n'\n'Except when part of a list or set display, an expression list\\n'\n'containing at least one comma yields a tuple. The length of '\n'the tuple\\n'\n'is the number of expressions in the list. The expressions are\\n'\n'evaluated from left to right.\\n'\n'\\n'\n'An asterisk \"*\" denotes *iterable unpacking*. Its operand must '\n'be an\\n'\n'*iterable*. The iterable is expanded into a sequence of items, '\n'which\\n'\n'are included in the new tuple, list, or set, at the site of '\n'the\\n'\n'unpacking.\\n'\n'\\n'\n'New in version 3.5: Iterable unpacking in expression lists, '\n'originally\\n'\n'proposed by **PEP 448**.\\n'\n'\\n'\n'The trailing comma is required only to create a single tuple '\n'(a.k.a. a\\n'\n'*singleton*); it is optional in all other cases. A single '\n'expression\\n'\n'without a trailing comma doesn\u2019t create a tuple, but rather '\n'yields the\\n'\n'value of that expression. (To create an empty tuple, use an '\n'empty pair\\n'\n'of parentheses: \"()\".)\\n',\n'floating':'Floating point literals\\n'\n'***********************\\n'\n'\\n'\n'Floating point literals are described by the following lexical\\n'\n'definitions:\\n'\n'\\n'\n' floatnumber ::= pointfloat | exponentfloat\\n'\n' pointfloat ::= [digitpart] fraction | digitpart \".\"\\n'\n' exponentfloat ::= (digitpart | pointfloat) exponent\\n'\n' digitpart ::= digit ([\"_\"] digit)*\\n'\n' fraction ::= \".\" digitpart\\n'\n' exponent ::= (\"e\" | \"E\") [\"+\" | \"-\"] digitpart\\n'\n'\\n'\n'Note that the integer and exponent parts are always interpreted '\n'using\\n'\n'radix 10. For example, \"077e010\" is legal, and denotes the same '\n'number\\n'\n'as \"77e10\". The allowed range of floating point literals is\\n'\n'implementation-dependent. As in integer literals, underscores '\n'are\\n'\n'supported for digit grouping.\\n'\n'\\n'\n'Some examples of floating point literals:\\n'\n'\\n'\n' 3.14 10. .001 1e100 3.14e-10 0e0 '\n'3.14_15_93\\n'\n'\\n'\n'Changed in version 3.6: Underscores are now allowed for '\n'grouping\\n'\n'purposes in literals.\\n',\n'for':'The \"for\" statement\\n'\n'*******************\\n'\n'\\n'\n'The \"for\" statement is used to iterate over the elements of a '\n'sequence\\n'\n'(such as a string, tuple or list) or other iterable object:\\n'\n'\\n'\n' for_stmt ::= \"for\" target_list \"in\" expression_list \":\" suite\\n'\n' [\"else\" \":\" suite]\\n'\n'\\n'\n'The expression list is evaluated once; it should yield an iterable\\n'\n'object. An iterator is created for the result of the\\n'\n'\"expression_list\". The suite is then executed once for each item\\n'\n'provided by the iterator, in the order returned by the iterator. '\n'Each\\n'\n'item in turn is assigned to the target list using the standard rules\\n'\n'for assignments (see Assignment statements), and then the suite is\\n'\n'executed. When the items are exhausted (which is immediately when '\n'the\\n'\n'sequence is empty or an iterator raises a \"StopIteration\" '\n'exception),\\n'\n'the suite in the \"else\" clause, if present, is executed, and the '\n'loop\\n'\n'terminates.\\n'\n'\\n'\n'A \"break\" statement executed in the first suite terminates the loop\\n'\n'without executing the \"else\" clause\u2019s suite. A \"continue\" statement\\n'\n'executed in the first suite skips the rest of the suite and '\n'continues\\n'\n'with the next item, or with the \"else\" clause if there is no next\\n'\n'item.\\n'\n'\\n'\n'The for-loop makes assignments to the variables in the target list.\\n'\n'This overwrites all previous assignments to those variables '\n'including\\n'\n'those made in the suite of the for-loop:\\n'\n'\\n'\n' for i in range(10):\\n'\n' print(i)\\n'\n' i = 5 # this will not affect the for-loop\\n'\n' # because i will be overwritten with the '\n'next\\n'\n' # index in the range\\n'\n'\\n'\n'Names in the target list are not deleted when the loop is finished,\\n'\n'but if the sequence is empty, they will not have been assigned to at\\n'\n'all by the loop. Hint: the built-in function \"range()\" returns an\\n'\n'iterator of integers suitable to emulate the effect of Pascal\u2019s \"for '\n'i\\n'\n':= a to b do\"; e.g., \"list(range(3))\" returns the list \"[0, 1, 2]\".\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' There is a subtlety when the sequence is being modified by the '\n'loop\\n'\n' (this can only occur for mutable sequences, e.g. lists). An\\n'\n' internal counter is used to keep track of which item is used next,\\n'\n' and this is incremented on each iteration. When this counter has\\n'\n' reached the length of the sequence the loop terminates. This '\n'means\\n'\n' that if the suite deletes the current (or a previous) item from '\n'the\\n'\n' sequence, the next item will be skipped (since it gets the index '\n'of\\n'\n' the current item which has already been treated). Likewise, if '\n'the\\n'\n' suite inserts an item in the sequence before the current item, the\\n'\n' current item will be treated again the next time through the loop.\\n'\n' This can lead to nasty bugs that can be avoided by making a\\n'\n' temporary copy using a slice of the whole sequence, e.g.,\\n'\n'\\n'\n' for x in a[:]:\\n'\n' if x < 0: a.remove(x)\\n',\n'formatstrings':'Format String Syntax\\n'\n'********************\\n'\n'\\n'\n'The \"str.format()\" method and the \"Formatter\" class share '\n'the same\\n'\n'syntax for format strings (although in the case of '\n'\"Formatter\",\\n'\n'subclasses can define their own format string syntax). The '\n'syntax is\\n'\n'related to that of formatted string literals, but it is '\n'less\\n'\n'sophisticated and, in particular, does not support '\n'arbitrary\\n'\n'expressions.\\n'\n'\\n'\n'Format strings contain \u201creplacement fields\u201d surrounded by '\n'curly braces\\n'\n'\"{}\". Anything that is not contained in braces is '\n'considered literal\\n'\n'text, which is copied unchanged to the output. If you need '\n'to include\\n'\n'a brace character in the literal text, it can be escaped by '\n'doubling:\\n'\n'\"{{\" and \"}}\".\\n'\n'\\n'\n'The grammar for a replacement field is as follows:\\n'\n'\\n'\n' replacement_field ::= \"{\" [field_name] [\"!\" '\n'conversion] [\":\" format_spec] \"}\"\\n'\n' field_name ::= arg_name (\".\" attribute_name | '\n'\"[\" element_index \"]\")*\\n'\n' arg_name ::= [identifier | digit+]\\n'\n' attribute_name ::= identifier\\n'\n' element_index ::= digit+ | index_string\\n'\n' index_string ::= +\\n'\n' conversion ::= \"r\" | \"s\" | \"a\"\\n'\n' format_spec ::= \\n'\n'\\n'\n'In less formal terms, the replacement field can start with '\n'a\\n'\n'*field_name* that specifies the object whose value is to be '\n'formatted\\n'\n'and inserted into the output instead of the replacement '\n'field. The\\n'\n'*field_name* is optionally followed by a *conversion* '\n'field, which is\\n'\n'preceded by an exclamation point \"\\'!\\'\", and a '\n'*format_spec*, which is\\n'\n'preceded by a colon \"\\':\\'\". These specify a non-default '\n'format for the\\n'\n'replacement value.\\n'\n'\\n'\n'See also the Format Specification Mini-Language section.\\n'\n'\\n'\n'The *field_name* itself begins with an *arg_name* that is '\n'either a\\n'\n'number or a keyword. If it\u2019s a number, it refers to a '\n'positional\\n'\n'argument, and if it\u2019s a keyword, it refers to a named '\n'keyword\\n'\n'argument. If the numerical arg_names in a format string '\n'are 0, 1, 2,\\n'\n'\u2026 in sequence, they can all be omitted (not just some) and '\n'the numbers\\n'\n'0, 1, 2, \u2026 will be automatically inserted in that order. '\n'Because\\n'\n'*arg_name* is not quote-delimited, it is not possible to '\n'specify\\n'\n'arbitrary dictionary keys (e.g., the strings \"\\'10\\'\" or '\n'\"\\':-]\\'\") within\\n'\n'a format string. The *arg_name* can be followed by any '\n'number of index\\n'\n'or attribute expressions. An expression of the form '\n'\"\\'.name\\'\" selects\\n'\n'the named attribute using \"getattr()\", while an expression '\n'of the form\\n'\n'\"\\'[index]\\'\" does an index lookup using \"__getitem__()\".\\n'\n'\\n'\n'Changed in version 3.1: The positional argument specifiers '\n'can be\\n'\n'omitted for \"str.format()\", so \"\\'{} {}\\'.format(a, b)\" is '\n'equivalent to\\n'\n'\"\\'{0} {1}\\'.format(a, b)\".\\n'\n'\\n'\n'Changed in version 3.4: The positional argument specifiers '\n'can be\\n'\n'omitted for \"Formatter\".\\n'\n'\\n'\n'Some simple format string examples:\\n'\n'\\n'\n' \"First, thou shalt count to {0}\" # References first '\n'positional argument\\n'\n' \"Bring me a {}\" # Implicitly '\n'references the first positional argument\\n'\n' \"From {} to {}\" # Same as \"From {0} to '\n'{1}\"\\n'\n' \"My quest is {name}\" # References keyword '\n\"argument 'name'\\n\"\n' \"Weight in tons {0.weight}\" # \\'weight\\' attribute '\n'of first positional arg\\n'\n' \"Units destroyed: {players[0]}\" # First element of '\n\"keyword argument 'players'.\\n\"\n'\\n'\n'The *conversion* field causes a type coercion before '\n'formatting.\\n'\n'Normally, the job of formatting a value is done by the '\n'\"__format__()\"\\n'\n'method of the value itself. However, in some cases it is '\n'desirable to\\n'\n'force a type to be formatted as a string, overriding its '\n'own\\n'\n'definition of formatting. By converting the value to a '\n'string before\\n'\n'calling \"__format__()\", the normal formatting logic is '\n'bypassed.\\n'\n'\\n'\n'Three conversion flags are currently supported: \"\\'!s\\'\" '\n'which calls\\n'\n'\"str()\" on the value, \"\\'!r\\'\" which calls \"repr()\" and '\n'\"\\'!a\\'\" which\\n'\n'calls \"ascii()\".\\n'\n'\\n'\n'Some examples:\\n'\n'\\n'\n' \"Harold\\'s a clever {0!s}\" # Calls str() on the '\n'argument first\\n'\n' \"Bring out the holy {name!r}\" # Calls repr() on the '\n'argument first\\n'\n' \"More {!a}\" # Calls ascii() on the '\n'argument first\\n'\n'\\n'\n'The *format_spec* field contains a specification of how the '\n'value\\n'\n'should be presented, including such details as field width, '\n'alignment,\\n'\n'padding, decimal precision and so on. Each value type can '\n'define its\\n'\n'own \u201cformatting mini-language\u201d or interpretation of the '\n'*format_spec*.\\n'\n'\\n'\n'Most built-in types support a common formatting '\n'mini-language, which\\n'\n'is described in the next section.\\n'\n'\\n'\n'A *format_spec* field can also include nested replacement '\n'fields\\n'\n'within it. These nested replacement fields may contain a '\n'field name,\\n'\n'conversion flag and format specification, but deeper '\n'nesting is not\\n'\n'allowed. The replacement fields within the format_spec '\n'are\\n'\n'substituted before the *format_spec* string is interpreted. '\n'This\\n'\n'allows the formatting of a value to be dynamically '\n'specified.\\n'\n'\\n'\n'See the Format examples section for some examples.\\n'\n'\\n'\n'\\n'\n'Format Specification Mini-Language\\n'\n'==================================\\n'\n'\\n'\n'\u201cFormat specifications\u201d are used within replacement fields '\n'contained\\n'\n'within a format string to define how individual values are '\n'presented\\n'\n'(see Format String Syntax and Formatted string literals). '\n'They can\\n'\n'also be passed directly to the built-in \"format()\" '\n'function. Each\\n'\n'formattable type may define how the format specification is '\n'to be\\n'\n'interpreted.\\n'\n'\\n'\n'Most built-in types implement the following options for '\n'format\\n'\n'specifications, although some of the formatting options are '\n'only\\n'\n'supported by the numeric types.\\n'\n'\\n'\n'A general convention is that an empty format specification '\n'produces\\n'\n'the same result as if you had called \"str()\" on the value. '\n'A non-empty\\n'\n'format specification typically modifies the result.\\n'\n'\\n'\n'The general form of a *standard format specifier* is:\\n'\n'\\n'\n' format_spec ::= '\n'[[fill]align][sign][#][0][width][grouping_option][.precision][type]\\n'\n' fill ::= \\n'\n' align ::= \"<\" | \">\" | \"=\" | \"^\"\\n'\n' sign ::= \"+\" | \"-\" | \" \"\\n'\n' width ::= digit+\\n'\n' grouping_option ::= \"_\" | \",\"\\n'\n' precision ::= digit+\\n'\n' type ::= \"b\" | \"c\" | \"d\" | \"e\" | \"E\" | \"f\" | '\n'\"F\" | \"g\" | \"G\" | \"n\" | \"o\" | \"s\" | \"x\" | \"X\" | \"%\"\\n'\n'\\n'\n'If a valid *align* value is specified, it can be preceded '\n'by a *fill*\\n'\n'character that can be any character and defaults to a space '\n'if\\n'\n'omitted. It is not possible to use a literal curly brace '\n'(\u201d\"{\"\u201d or\\n'\n'\u201c\"}\"\u201d) as the *fill* character in a formatted string '\n'literal or when\\n'\n'using the \"str.format()\" method. However, it is possible '\n'to insert a\\n'\n'curly brace with a nested replacement field. This '\n'limitation doesn\u2019t\\n'\n'affect the \"format()\" function.\\n'\n'\\n'\n'The meaning of the various alignment options is as '\n'follows:\\n'\n'\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | Option | '\n'Meaning '\n'|\\n'\n' '\n'|===========|============================================================|\\n'\n' | \"\\'<\\'\" | Forces the field to be left-aligned '\n'within the available |\\n'\n' | | space (this is the default for most '\n'objects). |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'>\\'\" | Forces the field to be right-aligned '\n'within the available |\\n'\n' | | space (this is the default for '\n'numbers). |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'=\\'\" | Forces the padding to be placed after '\n'the sign (if any) |\\n'\n' | | but before the digits. This is used for '\n'printing fields |\\n'\n' | | in the form \u2018+000000120\u2019. This alignment '\n'option is only |\\n'\n' | | valid for numeric types. It becomes the '\n'default for |\\n'\n' | | numbers when \u20180\u2019 immediately precedes the '\n'field width. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'^\\'\" | Forces the field to be centered within '\n'the available |\\n'\n' | | '\n'space. '\n'|\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n'\\n'\n'Note that unless a minimum field width is defined, the '\n'field width\\n'\n'will always be the same size as the data to fill it, so '\n'that the\\n'\n'alignment option has no meaning in this case.\\n'\n'\\n'\n'The *sign* option is only valid for number types, and can '\n'be one of\\n'\n'the following:\\n'\n'\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | Option | '\n'Meaning '\n'|\\n'\n' '\n'|===========|============================================================|\\n'\n' | \"\\'+\\'\" | indicates that a sign should be used for '\n'both positive as |\\n'\n' | | well as negative '\n'numbers. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'-\\'\" | indicates that a sign should be used '\n'only for negative |\\n'\n' | | numbers (this is the default '\n'behavior). |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | space | indicates that a leading space should be '\n'used on positive |\\n'\n' | | numbers, and a minus sign on negative '\n'numbers. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n'\\n'\n'The \"\\'#\\'\" option causes the \u201calternate form\u201d to be used '\n'for the\\n'\n'conversion. The alternate form is defined differently for '\n'different\\n'\n'types. This option is only valid for integer, float and '\n'complex\\n'\n'types. For integers, when binary, octal, or hexadecimal '\n'output is\\n'\n'used, this option adds the respective prefix \"\\'0b\\'\", '\n'\"\\'0o\\'\", \"\\'0x\\'\",\\n'\n'or \"\\'0X\\'\" to the output value. For float and complex the '\n'alternate\\n'\n'form causes the result of the conversion to always contain '\n'a decimal-\\n'\n'point character, even if no digits follow it. Normally, a '\n'decimal-\\n'\n'point character appears in the result of these conversions '\n'only if a\\n'\n'digit follows it. In addition, for \"\\'g\\'\" and \"\\'G\\'\" '\n'conversions,\\n'\n'trailing zeros are not removed from the result.\\n'\n'\\n'\n'The \"\\',\\'\" option signals the use of a comma for a '\n'thousands separator.\\n'\n'For a locale aware separator, use the \"\\'n\\'\" integer '\n'presentation type\\n'\n'instead.\\n'\n'\\n'\n'Changed in version 3.1: Added the \"\\',\\'\" option (see also '\n'**PEP 378**).\\n'\n'\\n'\n'The \"\\'_\\'\" option signals the use of an underscore for a '\n'thousands\\n'\n'separator for floating point presentation types and for '\n'integer\\n'\n'presentation type \"\\'d\\'\". For integer presentation types '\n'\"\\'b\\'\", \"\\'o\\'\",\\n'\n'\"\\'x\\'\", and \"\\'X\\'\", underscores will be inserted every 4 '\n'digits. For\\n'\n'other presentation types, specifying this option is an '\n'error.\\n'\n'\\n'\n'Changed in version 3.6: Added the \"\\'_\\'\" option (see also '\n'**PEP 515**).\\n'\n'\\n'\n'*width* is a decimal integer defining the minimum total '\n'field width,\\n'\n'including any prefixes, separators, and other formatting '\n'characters.\\n'\n'If not specified, then the field width will be determined '\n'by the\\n'\n'content.\\n'\n'\\n'\n'When no explicit alignment is given, preceding the *width* '\n'field by a\\n'\n'zero (\"\\'0\\'\") character enables sign-aware zero-padding '\n'for numeric\\n'\n'types. This is equivalent to a *fill* character of \"\\'0\\'\" '\n'with an\\n'\n'*alignment* type of \"\\'=\\'\".\\n'\n'\\n'\n'Changed in version 3.10: Preceding the *width* field by '\n'\"\\'0\\'\" no\\n'\n'longer affects the default alignment for strings.\\n'\n'\\n'\n'The *precision* is a decimal number indicating how many '\n'digits should\\n'\n'be displayed after the decimal point for a floating point '\n'value\\n'\n'formatted with \"\\'f\\'\" and \"\\'F\\'\", or before and after the '\n'decimal point\\n'\n'for a floating point value formatted with \"\\'g\\'\" or '\n'\"\\'G\\'\". For non-\\n'\n'number types the field indicates the maximum field size - '\n'in other\\n'\n'words, how many characters will be used from the field '\n'content. The\\n'\n'*precision* is not allowed for integer values.\\n'\n'\\n'\n'Finally, the *type* determines how the data should be '\n'presented.\\n'\n'\\n'\n'The available string presentation types are:\\n'\n'\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | Type | '\n'Meaning '\n'|\\n'\n' '\n'|===========|============================================================|\\n'\n' | \"\\'s\\'\" | String format. This is the default type '\n'for strings and |\\n'\n' | | may be '\n'omitted. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | None | The same as '\n'\"\\'s\\'\". |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n'\\n'\n'The available integer presentation types are:\\n'\n'\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | Type | '\n'Meaning '\n'|\\n'\n' '\n'|===========|============================================================|\\n'\n' | \"\\'b\\'\" | Binary format. Outputs the number in '\n'base 2. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'c\\'\" | Character. Converts the integer to the '\n'corresponding |\\n'\n' | | unicode character before '\n'printing. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'d\\'\" | Decimal Integer. Outputs the number in '\n'base 10. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'o\\'\" | Octal format. Outputs the number in base '\n'8. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'x\\'\" | Hex format. Outputs the number in base '\n'16, using lower- |\\n'\n' | | case letters for the digits above '\n'9. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'X\\'\" | Hex format. Outputs the number in base '\n'16, using upper- |\\n'\n' | | case letters for the digits above 9. In '\n'case \"\\'#\\'\" is |\\n'\n' | | specified, the prefix \"\\'0x\\'\" will be '\n'upper-cased to \"\\'0X\\'\" |\\n'\n' | | as '\n'well. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'n\\'\" | Number. This is the same as \"\\'d\\'\", '\n'except that it uses the |\\n'\n' | | current locale setting to insert the '\n'appropriate number |\\n'\n' | | separator '\n'characters. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | None | The same as '\n'\"\\'d\\'\". |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n'\\n'\n'In addition to the above presentation types, integers can '\n'be formatted\\n'\n'with the floating point presentation types listed below '\n'(except \"\\'n\\'\"\\n'\n'and \"None\"). When doing so, \"float()\" is used to convert '\n'the integer\\n'\n'to a floating point number before formatting.\\n'\n'\\n'\n'The available presentation types for \"float\" and \"Decimal\" '\n'values are:\\n'\n'\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | Type | '\n'Meaning '\n'|\\n'\n' '\n'|===========|============================================================|\\n'\n' | \"\\'e\\'\" | Scientific notation. For a given '\n'precision \"p\", formats |\\n'\n' | | the number in scientific notation with the '\n'letter \u2018e\u2019 |\\n'\n' | | separating the coefficient from the '\n'exponent. The |\\n'\n' | | coefficient has one digit before and \"p\" '\n'digits after the |\\n'\n' | | decimal point, for a total of \"p + 1\" '\n'significant digits. |\\n'\n' | | With no precision given, uses a precision '\n'of \"6\" digits |\\n'\n' | | after the decimal point for \"float\", and '\n'shows all |\\n'\n' | | coefficient digits for \"Decimal\". If no '\n'digits follow the |\\n'\n' | | decimal point, the decimal point is also '\n'removed unless |\\n'\n' | | the \"#\" option is '\n'used. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'E\\'\" | Scientific notation. Same as \"\\'e\\'\" '\n'except it uses an upper |\\n'\n' | | case \u2018E\u2019 as the separator '\n'character. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'f\\'\" | Fixed-point notation. For a given '\n'precision \"p\", formats |\\n'\n' | | the number as a decimal number with '\n'exactly \"p\" digits |\\n'\n' | | following the decimal point. With no '\n'precision given, uses |\\n'\n' | | a precision of \"6\" digits after the '\n'decimal point for |\\n'\n' | | \"float\", and uses a precision large enough '\n'to show all |\\n'\n' | | coefficient digits for \"Decimal\". If no '\n'digits follow the |\\n'\n' | | decimal point, the decimal point is also '\n'removed unless |\\n'\n' | | the \"#\" option is '\n'used. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'F\\'\" | Fixed-point notation. Same as \"\\'f\\'\", '\n'but converts \"nan\" to |\\n'\n' | | \"NAN\" and \"inf\" to '\n'\"INF\". |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'g\\'\" | General format. For a given precision '\n'\"p >= 1\", this |\\n'\n' | | rounds the number to \"p\" significant '\n'digits and then |\\n'\n' | | formats the result in either fixed-point '\n'format or in |\\n'\n' | | scientific notation, depending on its '\n'magnitude. A |\\n'\n' | | precision of \"0\" is treated as equivalent '\n'to a precision |\\n'\n' | | of \"1\". The precise rules are as follows: '\n'suppose that |\\n'\n' | | the result formatted with presentation '\n'type \"\\'e\\'\" and |\\n'\n' | | precision \"p-1\" would have exponent '\n'\"exp\". Then, if \"m <= |\\n'\n' | | exp < p\", where \"m\" is -4 for floats and '\n'-6 for |\\n'\n' | | \"Decimals\", the number is formatted with '\n'presentation type |\\n'\n' | | \"\\'f\\'\" and precision \"p-1-exp\". '\n'Otherwise, the number is |\\n'\n' | | formatted with presentation type \"\\'e\\'\" '\n'and precision |\\n'\n' | | \"p-1\". In both cases insignificant '\n'trailing zeros are |\\n'\n' | | removed from the significand, and the '\n'decimal point is |\\n'\n' | | also removed if there are no remaining '\n'digits following |\\n'\n' | | it, unless the \"\\'#\\'\" option is used. '\n'With no precision |\\n'\n' | | given, uses a precision of \"6\" significant '\n'digits for |\\n'\n' | | \"float\". For \"Decimal\", the coefficient of '\n'the result is |\\n'\n' | | formed from the coefficient digits of the '\n'value; |\\n'\n' | | scientific notation is used for values '\n'smaller than \"1e-6\" |\\n'\n' | | in absolute value and values where the '\n'place value of the |\\n'\n' | | least significant digit is larger than 1, '\n'and fixed-point |\\n'\n' | | notation is used otherwise. Positive and '\n'negative |\\n'\n' | | infinity, positive and negative zero, and '\n'nans, are |\\n'\n' | | formatted as \"inf\", \"-inf\", \"0\", \"-0\" and '\n'\"nan\" |\\n'\n' | | respectively, regardless of the '\n'precision. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'G\\'\" | General format. Same as \"\\'g\\'\" except '\n'switches to \"\\'E\\'\" if |\\n'\n' | | the number gets too large. The '\n'representations of infinity |\\n'\n' | | and NaN are uppercased, '\n'too. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'n\\'\" | Number. This is the same as \"\\'g\\'\", '\n'except that it uses the |\\n'\n' | | current locale setting to insert the '\n'appropriate number |\\n'\n' | | separator '\n'characters. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | \"\\'%\\'\" | Percentage. Multiplies the number by 100 '\n'and displays in |\\n'\n' | | fixed (\"\\'f\\'\") format, followed by a '\n'percent sign. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n' | None | For \"float\" this is the same as \"\\'g\\'\", '\n'except that when |\\n'\n' | | fixed-point notation is used to format the '\n'result, it |\\n'\n' | | always includes at least one digit past '\n'the decimal point. |\\n'\n' | | The precision used is as large as needed '\n'to represent the |\\n'\n' | | given value faithfully. For \"Decimal\", '\n'this is the same |\\n'\n' | | as either \"\\'g\\'\" or \"\\'G\\'\" depending on '\n'the value of |\\n'\n' | | \"context.capitals\" for the current decimal '\n'context. The |\\n'\n' | | overall effect is to match the output of '\n'\"str()\" as |\\n'\n' | | altered by the other format '\n'modifiers. |\\n'\n' '\n'+-----------+------------------------------------------------------------+\\n'\n'\\n'\n'\\n'\n'Format examples\\n'\n'===============\\n'\n'\\n'\n'This section contains examples of the \"str.format()\" syntax '\n'and\\n'\n'comparison with the old \"%\"-formatting.\\n'\n'\\n'\n'In most of the cases the syntax is similar to the old '\n'\"%\"-formatting,\\n'\n'with the addition of the \"{}\" and with \":\" used instead of '\n'\"%\". For\\n'\n'example, \"\\'%03.2f\\'\" can be translated to \"\\'{:03.2f}\\'\".\\n'\n'\\n'\n'The new format syntax also supports new and different '\n'options, shown\\n'\n'in the following examples.\\n'\n'\\n'\n'Accessing arguments by position:\\n'\n'\\n'\n\" >>> '{0}, {1}, {2}'.format('a', 'b', 'c')\\n\"\n\" 'a, b, c'\\n\"\n\" >>> '{}, {}, {}'.format('a', 'b', 'c') # 3.1+ only\\n\"\n\" 'a, b, c'\\n\"\n\" >>> '{2}, {1}, {0}'.format('a', 'b', 'c')\\n\"\n\" 'c, b, a'\\n\"\n\" >>> '{2}, {1}, {0}'.format(*'abc') # unpacking \"\n'argument sequence\\n'\n\" 'c, b, a'\\n\"\n\" >>> '{0}{1}{0}'.format('abra', 'cad') # arguments' \"\n'indices can be repeated\\n'\n\" 'abracadabra'\\n\"\n'\\n'\n'Accessing arguments by name:\\n'\n'\\n'\n\" >>> 'Coordinates: {latitude}, \"\n\"{longitude}'.format(latitude='37.24N', \"\n\"longitude='-115.81W')\\n\"\n\" 'Coordinates: 37.24N, -115.81W'\\n\"\n\" >>> coord = {'latitude': '37.24N', 'longitude': \"\n\"'-115.81W'}\\n\"\n\" >>> 'Coordinates: {latitude}, \"\n\"{longitude}'.format(**coord)\\n\"\n\" 'Coordinates: 37.24N, -115.81W'\\n\"\n'\\n'\n'Accessing arguments\u2019 attributes:\\n'\n'\\n'\n' >>> c = 3-5j\\n'\n\" >>> ('The complex number {0} is formed from the real \"\n\"part {0.real} '\\n\"\n\" ... 'and the imaginary part {0.imag}.').format(c)\\n\"\n\" 'The complex number (3-5j) is formed from the real part \"\n\"3.0 and the imaginary part -5.0.'\\n\"\n' >>> class Point:\\n'\n' ... def __init__(self, x, y):\\n'\n' ... self.x, self.y = x, y\\n'\n' ... def __str__(self):\\n'\n\" ... return 'Point({self.x}, \"\n\"{self.y})'.format(self=self)\\n\"\n' ...\\n'\n' >>> str(Point(4, 2))\\n'\n\" 'Point(4, 2)'\\n\"\n'\\n'\n'Accessing arguments\u2019 items:\\n'\n'\\n'\n' >>> coord = (3, 5)\\n'\n\" >>> 'X: {0[0]}; Y: {0[1]}'.format(coord)\\n\"\n\" 'X: 3; Y: 5'\\n\"\n'\\n'\n'Replacing \"%s\" and \"%r\":\\n'\n'\\n'\n' >>> \"repr() shows quotes: {!r}; str() doesn\\'t: '\n'{!s}\".format(\\'test1\\', \\'test2\\')\\n'\n' \"repr() shows quotes: \\'test1\\'; str() doesn\\'t: test2\"\\n'\n'\\n'\n'Aligning the text and specifying a width:\\n'\n'\\n'\n\" >>> '{:<30}'.format('left aligned')\\n\"\n\" 'left aligned '\\n\"\n\" >>> '{:>30}'.format('right aligned')\\n\"\n\" ' right aligned'\\n\"\n\" >>> '{:^30}'.format('centered')\\n\"\n\" ' centered '\\n\"\n\" >>> '{:*^30}'.format('centered') # use '*' as a fill \"\n'char\\n'\n\" '***********centered***********'\\n\"\n'\\n'\n'Replacing \"%+f\", \"%-f\", and \"% f\" and specifying a sign:\\n'\n'\\n'\n\" >>> '{:+f}; {:+f}'.format(3.14, -3.14) # show it \"\n'always\\n'\n\" '+3.140000; -3.140000'\\n\"\n\" >>> '{: f}; {: f}'.format(3.14, -3.14) # show a space \"\n'for positive numbers\\n'\n\" ' 3.140000; -3.140000'\\n\"\n\" >>> '{:-f}; {:-f}'.format(3.14, -3.14) # show only the \"\n\"minus -- same as '{:f}; {:f}'\\n\"\n\" '3.140000; -3.140000'\\n\"\n'\\n'\n'Replacing \"%x\" and \"%o\" and converting the value to '\n'different bases:\\n'\n'\\n'\n' >>> # format also supports binary numbers\\n'\n' >>> \"int: {0:d}; hex: {0:x}; oct: {0:o}; bin: '\n'{0:b}\".format(42)\\n'\n\" 'int: 42; hex: 2a; oct: 52; bin: 101010'\\n\"\n' >>> # with 0x, 0o, or 0b as prefix:\\n'\n' >>> \"int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: '\n'{0:#b}\".format(42)\\n'\n\" 'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010'\\n\"\n'\\n'\n'Using the comma as a thousands separator:\\n'\n'\\n'\n\" >>> '{:,}'.format(1234567890)\\n\"\n\" '1,234,567,890'\\n\"\n'\\n'\n'Expressing a percentage:\\n'\n'\\n'\n' >>> points = 19\\n'\n' >>> total = 22\\n'\n\" >>> 'Correct answers: {:.2%}'.format(points/total)\\n\"\n\" 'Correct answers: 86.36%'\\n\"\n'\\n'\n'Using type-specific formatting:\\n'\n'\\n'\n' >>> import datetime\\n'\n' >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\\n'\n\" >>> '{:%Y-%m-%d %H:%M:%S}'.format(d)\\n\"\n\" '2010-07-04 12:15:58'\\n\"\n'\\n'\n'Nesting arguments and more complex examples:\\n'\n'\\n'\n\" >>> for align, text in zip('<^>', ['left', 'center', \"\n\"'right']):\\n\"\n\" ... '{0:{fill}{align}16}'.format(text, fill=align, \"\n'align=align)\\n'\n' ...\\n'\n\" 'left<<<<<<<<<<<<'\\n\"\n\" '^^^^^center^^^^^'\\n\"\n\" '>>>>>>>>>>>right'\\n\"\n' >>>\\n'\n' >>> octets = [192, 168, 0, 1]\\n'\n\" >>> '{:02X}{:02X}{:02X}{:02X}'.format(*octets)\\n\"\n\" 'C0A80001'\\n\"\n' >>> int(_, 16)\\n'\n' 3232235521\\n'\n' >>>\\n'\n' >>> width = 5\\n'\n' >>> for num in range(5,12): \\n'\n\" ... for base in 'dXob':\\n\"\n\" ... print('{0:{width}{base}}'.format(num, \"\n\"base=base, width=width), end=' ')\\n\"\n' ... print()\\n'\n' ...\\n'\n' 5 5 5 101\\n'\n' 6 6 6 110\\n'\n' 7 7 7 111\\n'\n' 8 8 10 1000\\n'\n' 9 9 11 1001\\n'\n' 10 A 12 1010\\n'\n' 11 B 13 1011\\n',\n'function':'Function definitions\\n'\n'********************\\n'\n'\\n'\n'A function definition defines a user-defined function object '\n'(see\\n'\n'section The standard type hierarchy):\\n'\n'\\n'\n' funcdef ::= [decorators] \"def\" funcname \"(\" '\n'[parameter_list] \")\"\\n'\n' [\"->\" expression] \":\" suite\\n'\n' decorators ::= decorator+\\n'\n' decorator ::= \"@\" assignment_expression '\n'NEWLINE\\n'\n' parameter_list ::= defparameter (\",\" '\n'defparameter)* \",\" \"/\" [\",\" [parameter_list_no_posonly]]\\n'\n' | parameter_list_no_posonly\\n'\n' parameter_list_no_posonly ::= defparameter (\",\" '\n'defparameter)* [\",\" [parameter_list_starargs]]\\n'\n' | parameter_list_starargs\\n'\n' parameter_list_starargs ::= \"*\" [parameter] (\",\" '\n'defparameter)* [\",\" [\"**\" parameter [\",\"]]]\\n'\n' | \"**\" parameter [\",\"]\\n'\n' parameter ::= identifier [\":\" expression]\\n'\n' defparameter ::= parameter [\"=\" expression]\\n'\n' funcname ::= identifier\\n'\n'\\n'\n'A function definition is an executable statement. Its execution '\n'binds\\n'\n'the function name in the current local namespace to a function '\n'object\\n'\n'(a wrapper around the executable code for the function). This\\n'\n'function object contains a reference to the current global '\n'namespace\\n'\n'as the global namespace to be used when the function is called.\\n'\n'\\n'\n'The function definition does not execute the function body; this '\n'gets\\n'\n'executed only when the function is called. [4]\\n'\n'\\n'\n'A function definition may be wrapped by one or more *decorator*\\n'\n'expressions. Decorator expressions are evaluated when the '\n'function is\\n'\n'defined, in the scope that contains the function definition. '\n'The\\n'\n'result must be a callable, which is invoked with the function '\n'object\\n'\n'as the only argument. The returned value is bound to the '\n'function name\\n'\n'instead of the function object. Multiple decorators are applied '\n'in\\n'\n'nested fashion. For example, the following code\\n'\n'\\n'\n' @f1(arg)\\n'\n' @f2\\n'\n' def func(): pass\\n'\n'\\n'\n'is roughly equivalent to\\n'\n'\\n'\n' def func(): pass\\n'\n' func = f1(arg)(f2(func))\\n'\n'\\n'\n'except that the original function is not temporarily bound to '\n'the name\\n'\n'\"func\".\\n'\n'\\n'\n'Changed in version 3.9: Functions may be decorated with any '\n'valid\\n'\n'\"assignment_expression\". Previously, the grammar was much more\\n'\n'restrictive; see **PEP 614** for details.\\n'\n'\\n'\n'When one or more *parameters* have the form *parameter* \"=\"\\n'\n'*expression*, the function is said to have \u201cdefault parameter '\n'values.\u201d\\n'\n'For a parameter with a default value, the corresponding '\n'*argument* may\\n'\n'be omitted from a call, in which case the parameter\u2019s default '\n'value is\\n'\n'substituted. If a parameter has a default value, all following\\n'\n'parameters up until the \u201c\"*\"\u201d must also have a default value \u2014 '\n'this is\\n'\n'a syntactic restriction that is not expressed by the grammar.\\n'\n'\\n'\n'**Default parameter values are evaluated from left to right when '\n'the\\n'\n'function definition is executed.** This means that the '\n'expression is\\n'\n'evaluated once, when the function is defined, and that the same '\n'\u201cpre-\\n'\n'computed\u201d value is used for each call. This is especially '\n'important\\n'\n'to understand when a default parameter value is a mutable '\n'object, such\\n'\n'as a list or a dictionary: if the function modifies the object '\n'(e.g.\\n'\n'by appending an item to a list), the default parameter value is '\n'in\\n'\n'effect modified. This is generally not what was intended. A '\n'way\\n'\n'around this is to use \"None\" as the default, and explicitly test '\n'for\\n'\n'it in the body of the function, e.g.:\\n'\n'\\n'\n' def whats_on_the_telly(penguin=None):\\n'\n' if penguin is None:\\n'\n' penguin = []\\n'\n' penguin.append(\"property of the zoo\")\\n'\n' return penguin\\n'\n'\\n'\n'Function call semantics are described in more detail in section '\n'Calls.\\n'\n'A function call always assigns values to all parameters '\n'mentioned in\\n'\n'the parameter list, either from positional arguments, from '\n'keyword\\n'\n'arguments, or from default values. If the form \u201c\"*identifier\"\u201d '\n'is\\n'\n'present, it is initialized to a tuple receiving any excess '\n'positional\\n'\n'parameters, defaulting to the empty tuple. If the form\\n'\n'\u201c\"**identifier\"\u201d is present, it is initialized to a new ordered\\n'\n'mapping receiving any excess keyword arguments, defaulting to a '\n'new\\n'\n'empty mapping of the same type. Parameters after \u201c\"*\"\u201d or\\n'\n'\u201c\"*identifier\"\u201d are keyword-only parameters and may only be '\n'passed by\\n'\n'keyword arguments. Parameters before \u201c\"/\"\u201d are positional-only\\n'\n'parameters and may only be passed by positional arguments.\\n'\n'\\n'\n'Changed in version 3.8: The \"/\" function parameter syntax may be '\n'used\\n'\n'to indicate positional-only parameters. See **PEP 570** for '\n'details.\\n'\n'\\n'\n'Parameters may have an *annotation* of the form \u201c\": '\n'expression\"\u201d\\n'\n'following the parameter name. Any parameter may have an '\n'annotation,\\n'\n'even those of the form \"*identifier\" or \"**identifier\". '\n'Functions may\\n'\n'have \u201creturn\u201d annotation of the form \u201c\"-> expression\"\u201d after '\n'the\\n'\n'parameter list. These annotations can be any valid Python '\n'expression.\\n'\n'The presence of annotations does not change the semantics of a\\n'\n'function. The annotation values are available as values of a\\n'\n'dictionary keyed by the parameters\u2019 names in the '\n'\"__annotations__\"\\n'\n'attribute of the function object. If the \"annotations\" import '\n'from\\n'\n'\"__future__\" is used, annotations are preserved as strings at '\n'runtime\\n'\n'which enables postponed evaluation. Otherwise, they are '\n'evaluated\\n'\n'when the function definition is executed. In this case '\n'annotations\\n'\n'may be evaluated in a different order than they appear in the '\n'source\\n'\n'code.\\n'\n'\\n'\n'It is also possible to create anonymous functions (functions not '\n'bound\\n'\n'to a name), for immediate use in expressions. This uses lambda\\n'\n'expressions, described in section Lambdas. Note that the '\n'lambda\\n'\n'expression is merely a shorthand for a simplified function '\n'definition;\\n'\n'a function defined in a \u201c\"def\"\u201d statement can be passed around '\n'or\\n'\n'assigned to another name just like a function defined by a '\n'lambda\\n'\n'expression. The \u201c\"def\"\u201d form is actually more powerful since '\n'it\\n'\n'allows the execution of multiple statements and annotations.\\n'\n'\\n'\n'**Programmer\u2019s note:** Functions are first-class objects. A '\n'\u201c\"def\"\u201d\\n'\n'statement executed inside a function definition defines a local\\n'\n'function that can be returned or passed around. Free variables '\n'used\\n'\n'in the nested function can access the local variables of the '\n'function\\n'\n'containing the def. See section Naming and binding for '\n'details.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 3107** - Function Annotations\\n'\n' The original specification for function annotations.\\n'\n'\\n'\n' **PEP 484** - Type Hints\\n'\n' Definition of a standard meaning for annotations: type '\n'hints.\\n'\n'\\n'\n' **PEP 526** - Syntax for Variable Annotations\\n'\n' Ability to type hint variable declarations, including '\n'class\\n'\n' variables and instance variables\\n'\n'\\n'\n' **PEP 563** - Postponed Evaluation of Annotations\\n'\n' Support for forward references within annotations by '\n'preserving\\n'\n' annotations in a string form at runtime instead of eager\\n'\n' evaluation.\\n',\n'global':'The \"global\" statement\\n'\n'**********************\\n'\n'\\n'\n' global_stmt ::= \"global\" identifier (\",\" identifier)*\\n'\n'\\n'\n'The \"global\" statement is a declaration which holds for the '\n'entire\\n'\n'current code block. It means that the listed identifiers are to '\n'be\\n'\n'interpreted as globals. It would be impossible to assign to a '\n'global\\n'\n'variable without \"global\", although free variables may refer to\\n'\n'globals without being declared global.\\n'\n'\\n'\n'Names listed in a \"global\" statement must not be used in the same '\n'code\\n'\n'block textually preceding that \"global\" statement.\\n'\n'\\n'\n'Names listed in a \"global\" statement must not be defined as '\n'formal\\n'\n'parameters, or as targets in \"with\" statements or \"except\" '\n'clauses, or\\n'\n'in a \"for\" target list, \"class\" definition, function definition,\\n'\n'\"import\" statement, or variable annotation.\\n'\n'\\n'\n'**CPython implementation detail:** The current implementation does '\n'not\\n'\n'enforce some of these restrictions, but programs should not abuse '\n'this\\n'\n'freedom, as future implementations may enforce them or silently '\n'change\\n'\n'the meaning of the program.\\n'\n'\\n'\n'**Programmer\u2019s note:** \"global\" is a directive to the parser. It\\n'\n'applies only to code parsed at the same time as the \"global\"\\n'\n'statement. In particular, a \"global\" statement contained in a '\n'string\\n'\n'or code object supplied to the built-in \"exec()\" function does '\n'not\\n'\n'affect the code block *containing* the function call, and code\\n'\n'contained in such a string is unaffected by \"global\" statements in '\n'the\\n'\n'code containing the function call. The same applies to the '\n'\"eval()\"\\n'\n'and \"compile()\" functions.\\n',\n'id-classes':'Reserved classes of identifiers\\n'\n'*******************************\\n'\n'\\n'\n'Certain classes of identifiers (besides keywords) have '\n'special\\n'\n'meanings. These classes are identified by the patterns of '\n'leading and\\n'\n'trailing underscore characters:\\n'\n'\\n'\n'\"_*\"\\n'\n' Not imported by \"from module import *\". The special '\n'identifier \"_\"\\n'\n' is used in the interactive interpreter to store the result '\n'of the\\n'\n' last evaluation; it is stored in the \"builtins\" module. '\n'When not\\n'\n' in interactive mode, \"_\" has no special meaning and is not '\n'defined.\\n'\n' See section The import statement.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' The name \"_\" is often used in conjunction with\\n'\n' internationalization; refer to the documentation for the\\n'\n' \"gettext\" module for more information on this '\n'convention.\\n'\n'\\n'\n'\"__*__\"\\n'\n' System-defined names, informally known as \u201cdunder\u201d names. '\n'These\\n'\n' names are defined by the interpreter and its '\n'implementation\\n'\n' (including the standard library). Current system names are\\n'\n' discussed in the Special method names section and '\n'elsewhere. More\\n'\n' will likely be defined in future versions of Python. *Any* '\n'use of\\n'\n' \"__*__\" names, in any context, that does not follow '\n'explicitly\\n'\n' documented use, is subject to breakage without warning.\\n'\n'\\n'\n'\"__*\"\\n'\n' Class-private names. Names in this category, when used '\n'within the\\n'\n' context of a class definition, are re-written to use a '\n'mangled form\\n'\n' to help avoid name clashes between \u201cprivate\u201d attributes of '\n'base and\\n'\n' derived classes. See section Identifiers (Names).\\n',\n'identifiers':'Identifiers and keywords\\n'\n'************************\\n'\n'\\n'\n'Identifiers (also referred to as *names*) are described by '\n'the\\n'\n'following lexical definitions.\\n'\n'\\n'\n'The syntax of identifiers in Python is based on the Unicode '\n'standard\\n'\n'annex UAX-31, with elaboration and changes as defined below; '\n'see also\\n'\n'**PEP 3131** for further details.\\n'\n'\\n'\n'Within the ASCII range (U+0001..U+007F), the valid characters '\n'for\\n'\n'identifiers are the same as in Python 2.x: the uppercase and '\n'lowercase\\n'\n'letters \"A\" through \"Z\", the underscore \"_\" and, except for '\n'the first\\n'\n'character, the digits \"0\" through \"9\".\\n'\n'\\n'\n'Python 3.0 introduces additional characters from outside the '\n'ASCII\\n'\n'range (see **PEP 3131**). For these characters, the '\n'classification\\n'\n'uses the version of the Unicode Character Database as '\n'included in the\\n'\n'\"unicodedata\" module.\\n'\n'\\n'\n'Identifiers are unlimited in length. Case is significant.\\n'\n'\\n'\n' identifier ::= xid_start xid_continue*\\n'\n' id_start ::= \\n'\n' id_continue ::= \\n'\n' xid_start ::= \\n'\n' xid_continue ::= \\n'\n'\\n'\n'The Unicode category codes mentioned above stand for:\\n'\n'\\n'\n'* *Lu* - uppercase letters\\n'\n'\\n'\n'* *Ll* - lowercase letters\\n'\n'\\n'\n'* *Lt* - titlecase letters\\n'\n'\\n'\n'* *Lm* - modifier letters\\n'\n'\\n'\n'* *Lo* - other letters\\n'\n'\\n'\n'* *Nl* - letter numbers\\n'\n'\\n'\n'* *Mn* - nonspacing marks\\n'\n'\\n'\n'* *Mc* - spacing combining marks\\n'\n'\\n'\n'* *Nd* - decimal numbers\\n'\n'\\n'\n'* *Pc* - connector punctuations\\n'\n'\\n'\n'* *Other_ID_Start* - explicit list of characters in '\n'PropList.txt to\\n'\n' support backwards compatibility\\n'\n'\\n'\n'* *Other_ID_Continue* - likewise\\n'\n'\\n'\n'All identifiers are converted into the normal form NFKC while '\n'parsing;\\n'\n'comparison of identifiers is based on NFKC.\\n'\n'\\n'\n'A non-normative HTML file listing all valid identifier '\n'characters for\\n'\n'Unicode 4.1 can be found at\\n'\n'https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt\\n'\n'\\n'\n'\\n'\n'Keywords\\n'\n'========\\n'\n'\\n'\n'The following identifiers are used as reserved words, or '\n'*keywords* of\\n'\n'the language, and cannot be used as ordinary identifiers. '\n'They must\\n'\n'be spelled exactly as written here:\\n'\n'\\n'\n' False await else import pass\\n'\n' None break except in raise\\n'\n' True class finally is return\\n'\n' and continue for lambda try\\n'\n' as def from nonlocal while\\n'\n' assert del global not with\\n'\n' async elif if or yield\\n'\n'\\n'\n'\\n'\n'Soft Keywords\\n'\n'=============\\n'\n'\\n'\n'New in version 3.10.\\n'\n'\\n'\n'Some identifiers are only reserved under specific contexts. '\n'These are\\n'\n'known as *soft keywords*. The identifiers \"match\", \"case\" '\n'and \"_\" can\\n'\n'syntactically act as keywords in contexts related to the '\n'pattern\\n'\n'matching statement, but this distinction is done at the '\n'parser level,\\n'\n'not when tokenizing.\\n'\n'\\n'\n'As soft keywords, their use with pattern matching is possible '\n'while\\n'\n'still preserving compatibility with existing code that uses '\n'\"match\",\\n'\n'\"case\" and \"_\" as identifier names.\\n'\n'\\n'\n'\\n'\n'Reserved classes of identifiers\\n'\n'===============================\\n'\n'\\n'\n'Certain classes of identifiers (besides keywords) have '\n'special\\n'\n'meanings. These classes are identified by the patterns of '\n'leading and\\n'\n'trailing underscore characters:\\n'\n'\\n'\n'\"_*\"\\n'\n' Not imported by \"from module import *\". The special '\n'identifier \"_\"\\n'\n' is used in the interactive interpreter to store the result '\n'of the\\n'\n' last evaluation; it is stored in the \"builtins\" module. '\n'When not\\n'\n' in interactive mode, \"_\" has no special meaning and is not '\n'defined.\\n'\n' See section The import statement.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' The name \"_\" is often used in conjunction with\\n'\n' internationalization; refer to the documentation for '\n'the\\n'\n' \"gettext\" module for more information on this '\n'convention.\\n'\n'\\n'\n'\"__*__\"\\n'\n' System-defined names, informally known as \u201cdunder\u201d names. '\n'These\\n'\n' names are defined by the interpreter and its '\n'implementation\\n'\n' (including the standard library). Current system names '\n'are\\n'\n' discussed in the Special method names section and '\n'elsewhere. More\\n'\n' will likely be defined in future versions of Python. '\n'*Any* use of\\n'\n' \"__*__\" names, in any context, that does not follow '\n'explicitly\\n'\n' documented use, is subject to breakage without warning.\\n'\n'\\n'\n'\"__*\"\\n'\n' Class-private names. Names in this category, when used '\n'within the\\n'\n' context of a class definition, are re-written to use a '\n'mangled form\\n'\n' to help avoid name clashes between \u201cprivate\u201d attributes of '\n'base and\\n'\n' derived classes. See section Identifiers (Names).\\n',\n'if':'The \"if\" statement\\n'\n'******************\\n'\n'\\n'\n'The \"if\" statement is used for conditional execution:\\n'\n'\\n'\n' if_stmt ::= \"if\" assignment_expression \":\" suite\\n'\n' (\"elif\" assignment_expression \":\" suite)*\\n'\n' [\"else\" \":\" suite]\\n'\n'\\n'\n'It selects exactly one of the suites by evaluating the expressions '\n'one\\n'\n'by one until one is found to be true (see section Boolean operations\\n'\n'for the definition of true and false); then that suite is executed\\n'\n'(and no other part of the \"if\" statement is executed or evaluated).\\n'\n'If all expressions are false, the suite of the \"else\" clause, if\\n'\n'present, is executed.\\n',\n'imaginary':'Imaginary literals\\n'\n'******************\\n'\n'\\n'\n'Imaginary literals are described by the following lexical '\n'definitions:\\n'\n'\\n'\n' imagnumber ::= (floatnumber | digitpart) (\"j\" | \"J\")\\n'\n'\\n'\n'An imaginary literal yields a complex number with a real part '\n'of 0.0.\\n'\n'Complex numbers are represented as a pair of floating point '\n'numbers\\n'\n'and have the same restrictions on their range. To create a '\n'complex\\n'\n'number with a nonzero real part, add a floating point number to '\n'it,\\n'\n'e.g., \"(3+4j)\". Some examples of imaginary literals:\\n'\n'\\n'\n' 3.14j 10.j 10j .001j 1e100j 3.14e-10j '\n'3.14_15_93j\\n',\n'import':'The \"import\" statement\\n'\n'**********************\\n'\n'\\n'\n' import_stmt ::= \"import\" module [\"as\" identifier] (\",\" '\n'module [\"as\" identifier])*\\n'\n' | \"from\" relative_module \"import\" identifier '\n'[\"as\" identifier]\\n'\n' (\",\" identifier [\"as\" identifier])*\\n'\n' | \"from\" relative_module \"import\" \"(\" '\n'identifier [\"as\" identifier]\\n'\n' (\",\" identifier [\"as\" identifier])* [\",\"] \")\"\\n'\n' | \"from\" relative_module \"import\" \"*\"\\n'\n' module ::= (identifier \".\")* identifier\\n'\n' relative_module ::= \".\"* module | \".\"+\\n'\n'\\n'\n'The basic import statement (no \"from\" clause) is executed in two\\n'\n'steps:\\n'\n'\\n'\n'1. find a module, loading and initializing it if necessary\\n'\n'\\n'\n'2. define a name or names in the local namespace for the scope '\n'where\\n'\n' the \"import\" statement occurs.\\n'\n'\\n'\n'When the statement contains multiple clauses (separated by commas) '\n'the\\n'\n'two steps are carried out separately for each clause, just as '\n'though\\n'\n'the clauses had been separated out into individual import '\n'statements.\\n'\n'\\n'\n'The details of the first step, finding and loading modules are\\n'\n'described in greater detail in the section on the import system, '\n'which\\n'\n'also describes the various types of packages and modules that can '\n'be\\n'\n'imported, as well as all the hooks that can be used to customize '\n'the\\n'\n'import system. Note that failures in this step may indicate '\n'either\\n'\n'that the module could not be located, *or* that an error occurred\\n'\n'while initializing the module, which includes execution of the\\n'\n'module\u2019s code.\\n'\n'\\n'\n'If the requested module is retrieved successfully, it will be '\n'made\\n'\n'available in the local namespace in one of three ways:\\n'\n'\\n'\n'* If the module name is followed by \"as\", then the name following '\n'\"as\"\\n'\n' is bound directly to the imported module.\\n'\n'\\n'\n'* If no other name is specified, and the module being imported is '\n'a\\n'\n' top level module, the module\u2019s name is bound in the local '\n'namespace\\n'\n' as a reference to the imported module\\n'\n'\\n'\n'* If the module being imported is *not* a top level module, then '\n'the\\n'\n' name of the top level package that contains the module is bound '\n'in\\n'\n' the local namespace as a reference to the top level package. '\n'The\\n'\n' imported module must be accessed using its full qualified name\\n'\n' rather than directly\\n'\n'\\n'\n'The \"from\" form uses a slightly more complex process:\\n'\n'\\n'\n'1. find the module specified in the \"from\" clause, loading and\\n'\n' initializing it if necessary;\\n'\n'\\n'\n'2. for each of the identifiers specified in the \"import\" clauses:\\n'\n'\\n'\n' 1. check if the imported module has an attribute by that name\\n'\n'\\n'\n' 2. if not, attempt to import a submodule with that name and '\n'then\\n'\n' check the imported module again for that attribute\\n'\n'\\n'\n' 3. if the attribute is not found, \"ImportError\" is raised.\\n'\n'\\n'\n' 4. otherwise, a reference to that value is stored in the local\\n'\n' namespace, using the name in the \"as\" clause if it is '\n'present,\\n'\n' otherwise using the attribute name\\n'\n'\\n'\n'Examples:\\n'\n'\\n'\n' import foo # foo imported and bound locally\\n'\n' import foo.bar.baz # foo.bar.baz imported, foo bound '\n'locally\\n'\n' import foo.bar.baz as fbb # foo.bar.baz imported and bound as '\n'fbb\\n'\n' from foo.bar import baz # foo.bar.baz imported and bound as '\n'baz\\n'\n' from foo import attr # foo imported and foo.attr bound as '\n'attr\\n'\n'\\n'\n'If the list of identifiers is replaced by a star (\"\\'*\\'\"), all '\n'public\\n'\n'names defined in the module are bound in the local namespace for '\n'the\\n'\n'scope where the \"import\" statement occurs.\\n'\n'\\n'\n'The *public names* defined by a module are determined by checking '\n'the\\n'\n'module\u2019s namespace for a variable named \"__all__\"; if defined, it '\n'must\\n'\n'be a sequence of strings which are names defined or imported by '\n'that\\n'\n'module. The names given in \"__all__\" are all considered public '\n'and\\n'\n'are required to exist. If \"__all__\" is not defined, the set of '\n'public\\n'\n'names includes all names found in the module\u2019s namespace which do '\n'not\\n'\n'begin with an underscore character (\"\\'_\\'\"). \"__all__\" should '\n'contain\\n'\n'the entire public API. It is intended to avoid accidentally '\n'exporting\\n'\n'items that are not part of the API (such as library modules which '\n'were\\n'\n'imported and used within the module).\\n'\n'\\n'\n'The wild card form of import \u2014 \"from module import *\" \u2014 is only\\n'\n'allowed at the module level. Attempting to use it in class or\\n'\n'function definitions will raise a \"SyntaxError\".\\n'\n'\\n'\n'When specifying what module to import you do not have to specify '\n'the\\n'\n'absolute name of the module. When a module or package is '\n'contained\\n'\n'within another package it is possible to make a relative import '\n'within\\n'\n'the same top package without having to mention the package name. '\n'By\\n'\n'using leading dots in the specified module or package after \"from\" '\n'you\\n'\n'can specify how high to traverse up the current package hierarchy\\n'\n'without specifying exact names. One leading dot means the current\\n'\n'package where the module making the import exists. Two dots means '\n'up\\n'\n'one package level. Three dots is up two levels, etc. So if you '\n'execute\\n'\n'\"from . import mod\" from a module in the \"pkg\" package then you '\n'will\\n'\n'end up importing \"pkg.mod\". If you execute \"from ..subpkg2 import '\n'mod\"\\n'\n'from within \"pkg.subpkg1\" you will import \"pkg.subpkg2.mod\". The\\n'\n'specification for relative imports is contained in the Package\\n'\n'Relative Imports section.\\n'\n'\\n'\n'\"importlib.import_module()\" is provided to support applications '\n'that\\n'\n'determine dynamically the modules to be loaded.\\n'\n'\\n'\n'Raises an auditing event \"import\" with arguments \"module\", '\n'\"filename\",\\n'\n'\"sys.path\", \"sys.meta_path\", \"sys.path_hooks\".\\n'\n'\\n'\n'\\n'\n'Future statements\\n'\n'=================\\n'\n'\\n'\n'A *future statement* is a directive to the compiler that a '\n'particular\\n'\n'module should be compiled using syntax or semantics that will be\\n'\n'available in a specified future release of Python where the '\n'feature\\n'\n'becomes standard.\\n'\n'\\n'\n'The future statement is intended to ease migration to future '\n'versions\\n'\n'of Python that introduce incompatible changes to the language. '\n'It\\n'\n'allows use of the new features on a per-module basis before the\\n'\n'release in which the feature becomes standard.\\n'\n'\\n'\n' future_stmt ::= \"from\" \"__future__\" \"import\" feature [\"as\" '\n'identifier]\\n'\n' (\",\" feature [\"as\" identifier])*\\n'\n' | \"from\" \"__future__\" \"import\" \"(\" feature '\n'[\"as\" identifier]\\n'\n' (\",\" feature [\"as\" identifier])* [\",\"] \")\"\\n'\n' feature ::= identifier\\n'\n'\\n'\n'A future statement must appear near the top of the module. The '\n'only\\n'\n'lines that can appear before a future statement are:\\n'\n'\\n'\n'* the module docstring (if any),\\n'\n'\\n'\n'* comments,\\n'\n'\\n'\n'* blank lines, and\\n'\n'\\n'\n'* other future statements.\\n'\n'\\n'\n'The only feature that requires using the future statement is\\n'\n'\"annotations\" (see **PEP 563**).\\n'\n'\\n'\n'All historical features enabled by the future statement are still\\n'\n'recognized by Python 3. The list includes \"absolute_import\",\\n'\n'\"division\", \"generators\", \"generator_stop\", \"unicode_literals\",\\n'\n'\"print_function\", \"nested_scopes\" and \"with_statement\". They are '\n'all\\n'\n'redundant because they are always enabled, and only kept for '\n'backwards\\n'\n'compatibility.\\n'\n'\\n'\n'A future statement is recognized and treated specially at compile\\n'\n'time: Changes to the semantics of core constructs are often\\n'\n'implemented by generating different code. It may even be the '\n'case\\n'\n'that a new feature introduces new incompatible syntax (such as a '\n'new\\n'\n'reserved word), in which case the compiler may need to parse the\\n'\n'module differently. Such decisions cannot be pushed off until\\n'\n'runtime.\\n'\n'\\n'\n'For any given release, the compiler knows which feature names '\n'have\\n'\n'been defined, and raises a compile-time error if a future '\n'statement\\n'\n'contains a feature not known to it.\\n'\n'\\n'\n'The direct runtime semantics are the same as for any import '\n'statement:\\n'\n'there is a standard module \"__future__\", described later, and it '\n'will\\n'\n'be imported in the usual way at the time the future statement is\\n'\n'executed.\\n'\n'\\n'\n'The interesting runtime semantics depend on the specific feature\\n'\n'enabled by the future statement.\\n'\n'\\n'\n'Note that there is nothing special about the statement:\\n'\n'\\n'\n' import __future__ [as name]\\n'\n'\\n'\n'That is not a future statement; it\u2019s an ordinary import statement '\n'with\\n'\n'no special semantics or syntax restrictions.\\n'\n'\\n'\n'Code compiled by calls to the built-in functions \"exec()\" and\\n'\n'\"compile()\" that occur in a module \"M\" containing a future '\n'statement\\n'\n'will, by default, use the new syntax or semantics associated with '\n'the\\n'\n'future statement. This can be controlled by optional arguments '\n'to\\n'\n'\"compile()\" \u2014 see the documentation of that function for details.\\n'\n'\\n'\n'A future statement typed at an interactive interpreter prompt '\n'will\\n'\n'take effect for the rest of the interpreter session. If an\\n'\n'interpreter is started with the \"-i\" option, is passed a script '\n'name\\n'\n'to execute, and the script includes a future statement, it will be '\n'in\\n'\n'effect in the interactive session started after the script is\\n'\n'executed.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 236** - Back to the __future__\\n'\n' The original proposal for the __future__ mechanism.\\n',\n'in':'Membership test operations\\n'\n'**************************\\n'\n'\\n'\n'The operators \"in\" and \"not in\" test for membership. \"x in s\"\\n'\n'evaluates to \"True\" if *x* is a member of *s*, and \"False\" otherwise.\\n'\n'\"x not in s\" returns the negation of \"x in s\". All built-in '\n'sequences\\n'\n'and set types support this as well as dictionary, for which \"in\" '\n'tests\\n'\n'whether the dictionary has a given key. For container types such as\\n'\n'list, tuple, set, frozenset, dict, or collections.deque, the\\n'\n'expression \"x in y\" is equivalent to \"any(x is e or x == e for e in\\n'\n'y)\".\\n'\n'\\n'\n'For the string and bytes types, \"x in y\" is \"True\" if and only if *x*\\n'\n'is a substring of *y*. An equivalent test is \"y.find(x) != -1\".\\n'\n'Empty strings are always considered to be a substring of any other\\n'\n'string, so \"\"\" in \"abc\"\" will return \"True\".\\n'\n'\\n'\n'For user-defined classes which define the \"__contains__()\" method, \"x\\n'\n'in y\" returns \"True\" if \"y.__contains__(x)\" returns a true value, and\\n'\n'\"False\" otherwise.\\n'\n'\\n'\n'For user-defined classes which do not define \"__contains__()\" but do\\n'\n'define \"__iter__()\", \"x in y\" is \"True\" if some value \"z\", for which\\n'\n'the expression \"x is z or x == z\" is true, is produced while '\n'iterating\\n'\n'over \"y\". If an exception is raised during the iteration, it is as if\\n'\n'\"in\" raised that exception.\\n'\n'\\n'\n'Lastly, the old-style iteration protocol is tried: if a class defines\\n'\n'\"__getitem__()\", \"x in y\" is \"True\" if and only if there is a non-\\n'\n'negative integer index *i* such that \"x is y[i] or x == y[i]\", and no\\n'\n'lower integer index raises the \"IndexError\" exception. (If any other\\n'\n'exception is raised, it is as if \"in\" raised that exception).\\n'\n'\\n'\n'The operator \"not in\" is defined to have the inverse truth value of\\n'\n'\"in\".\\n',\n'integers':'Integer literals\\n'\n'****************\\n'\n'\\n'\n'Integer literals are described by the following lexical '\n'definitions:\\n'\n'\\n'\n' integer ::= decinteger | bininteger | octinteger | '\n'hexinteger\\n'\n' decinteger ::= nonzerodigit ([\"_\"] digit)* | \"0\"+ ([\"_\"] '\n'\"0\")*\\n'\n' bininteger ::= \"0\" (\"b\" | \"B\") ([\"_\"] bindigit)+\\n'\n' octinteger ::= \"0\" (\"o\" | \"O\") ([\"_\"] octdigit)+\\n'\n' hexinteger ::= \"0\" (\"x\" | \"X\") ([\"_\"] hexdigit)+\\n'\n' nonzerodigit ::= \"1\"...\"9\"\\n'\n' digit ::= \"0\"...\"9\"\\n'\n' bindigit ::= \"0\" | \"1\"\\n'\n' octdigit ::= \"0\"...\"7\"\\n'\n' hexdigit ::= digit | \"a\"...\"f\" | \"A\"...\"F\"\\n'\n'\\n'\n'There is no limit for the length of integer literals apart from '\n'what\\n'\n'can be stored in available memory.\\n'\n'\\n'\n'Underscores are ignored for determining the numeric value of '\n'the\\n'\n'literal. They can be used to group digits for enhanced '\n'readability.\\n'\n'One underscore can occur between digits, and after base '\n'specifiers\\n'\n'like \"0x\".\\n'\n'\\n'\n'Note that leading zeros in a non-zero decimal number are not '\n'allowed.\\n'\n'This is for disambiguation with C-style octal literals, which '\n'Python\\n'\n'used before version 3.0.\\n'\n'\\n'\n'Some examples of integer literals:\\n'\n'\\n'\n' 7 2147483647 0o177 0b100110111\\n'\n' 3 79228162514264337593543950336 0o377 0xdeadbeef\\n'\n' 100_000_000_000 0b_1110_0101\\n'\n'\\n'\n'Changed in version 3.6: Underscores are now allowed for '\n'grouping\\n'\n'purposes in literals.\\n',\n'lambda':'Lambdas\\n'\n'*******\\n'\n'\\n'\n' lambda_expr ::= \"lambda\" [parameter_list] \":\" expression\\n'\n'\\n'\n'Lambda expressions (sometimes called lambda forms) are used to '\n'create\\n'\n'anonymous functions. The expression \"lambda parameters: '\n'expression\"\\n'\n'yields a function object. The unnamed object behaves like a '\n'function\\n'\n'object defined with:\\n'\n'\\n'\n' def (parameters):\\n'\n' return expression\\n'\n'\\n'\n'See section Function definitions for the syntax of parameter '\n'lists.\\n'\n'Note that functions created with lambda expressions cannot '\n'contain\\n'\n'statements or annotations.\\n',\n'lists':'List displays\\n'\n'*************\\n'\n'\\n'\n'A list display is a possibly empty series of expressions enclosed '\n'in\\n'\n'square brackets:\\n'\n'\\n'\n' list_display ::= \"[\" [starred_list | comprehension] \"]\"\\n'\n'\\n'\n'A list display yields a new list object, the contents being '\n'specified\\n'\n'by either a list of expressions or a comprehension. When a comma-\\n'\n'separated list of expressions is supplied, its elements are '\n'evaluated\\n'\n'from left to right and placed into the list object in that order.\\n'\n'When a comprehension is supplied, the list is constructed from the\\n'\n'elements resulting from the comprehension.\\n',\n'naming':'Naming and binding\\n'\n'******************\\n'\n'\\n'\n'\\n'\n'Binding of names\\n'\n'================\\n'\n'\\n'\n'*Names* refer to objects. Names are introduced by name binding\\n'\n'operations.\\n'\n'\\n'\n'The following constructs bind names: formal parameters to '\n'functions,\\n'\n'\"import\" statements, class and function definitions (these bind '\n'the\\n'\n'class or function name in the defining block), and targets that '\n'are\\n'\n'identifiers if occurring in an assignment, \"for\" loop header, or '\n'after\\n'\n'\"as\" in a \"with\" statement or \"except\" clause. The \"import\" '\n'statement\\n'\n'of the form \"from ... import *\" binds all names defined in the\\n'\n'imported module, except those beginning with an underscore. This '\n'form\\n'\n'may only be used at the module level.\\n'\n'\\n'\n'A target occurring in a \"del\" statement is also considered bound '\n'for\\n'\n'this purpose (though the actual semantics are to unbind the '\n'name).\\n'\n'\\n'\n'Each assignment or import statement occurs within a block defined '\n'by a\\n'\n'class or function definition or at the module level (the '\n'top-level\\n'\n'code block).\\n'\n'\\n'\n'If a name is bound in a block, it is a local variable of that '\n'block,\\n'\n'unless declared as \"nonlocal\" or \"global\". If a name is bound at '\n'the\\n'\n'module level, it is a global variable. (The variables of the '\n'module\\n'\n'code block are local and global.) If a variable is used in a '\n'code\\n'\n'block but not defined there, it is a *free variable*.\\n'\n'\\n'\n'Each occurrence of a name in the program text refers to the '\n'*binding*\\n'\n'of that name established by the following name resolution rules.\\n'\n'\\n'\n'\\n'\n'Resolution of names\\n'\n'===================\\n'\n'\\n'\n'A *scope* defines the visibility of a name within a block. If a '\n'local\\n'\n'variable is defined in a block, its scope includes that block. If '\n'the\\n'\n'definition occurs in a function block, the scope extends to any '\n'blocks\\n'\n'contained within the defining one, unless a contained block '\n'introduces\\n'\n'a different binding for the name.\\n'\n'\\n'\n'When a name is used in a code block, it is resolved using the '\n'nearest\\n'\n'enclosing scope. The set of all such scopes visible to a code '\n'block\\n'\n'is called the block\u2019s *environment*.\\n'\n'\\n'\n'When a name is not found at all, a \"NameError\" exception is '\n'raised. If\\n'\n'the current scope is a function scope, and the name refers to a '\n'local\\n'\n'variable that has not yet been bound to a value at the point where '\n'the\\n'\n'name is used, an \"UnboundLocalError\" exception is raised.\\n'\n'\"UnboundLocalError\" is a subclass of \"NameError\".\\n'\n'\\n'\n'If a name binding operation occurs anywhere within a code block, '\n'all\\n'\n'uses of the name within the block are treated as references to '\n'the\\n'\n'current block. This can lead to errors when a name is used within '\n'a\\n'\n'block before it is bound. This rule is subtle. Python lacks\\n'\n'declarations and allows name binding operations to occur anywhere\\n'\n'within a code block. The local variables of a code block can be\\n'\n'determined by scanning the entire text of the block for name '\n'binding\\n'\n'operations.\\n'\n'\\n'\n'If the \"global\" statement occurs within a block, all uses of the '\n'name\\n'\n'specified in the statement refer to the binding of that name in '\n'the\\n'\n'top-level namespace. Names are resolved in the top-level '\n'namespace by\\n'\n'searching the global namespace, i.e. the namespace of the module\\n'\n'containing the code block, and the builtins namespace, the '\n'namespace\\n'\n'of the module \"builtins\". The global namespace is searched '\n'first. If\\n'\n'the name is not found there, the builtins namespace is searched. '\n'The\\n'\n'\"global\" statement must precede all uses of the name.\\n'\n'\\n'\n'The \"global\" statement has the same scope as a name binding '\n'operation\\n'\n'in the same block. If the nearest enclosing scope for a free '\n'variable\\n'\n'contains a global statement, the free variable is treated as a '\n'global.\\n'\n'\\n'\n'The \"nonlocal\" statement causes corresponding names to refer to\\n'\n'previously bound variables in the nearest enclosing function '\n'scope.\\n'\n'\"SyntaxError\" is raised at compile time if the given name does '\n'not\\n'\n'exist in any enclosing function scope.\\n'\n'\\n'\n'The namespace for a module is automatically created the first time '\n'a\\n'\n'module is imported. The main module for a script is always '\n'called\\n'\n'\"__main__\".\\n'\n'\\n'\n'Class definition blocks and arguments to \"exec()\" and \"eval()\" '\n'are\\n'\n'special in the context of name resolution. A class definition is '\n'an\\n'\n'executable statement that may use and define names. These '\n'references\\n'\n'follow the normal rules for name resolution with an exception '\n'that\\n'\n'unbound local variables are looked up in the global namespace. '\n'The\\n'\n'namespace of the class definition becomes the attribute dictionary '\n'of\\n'\n'the class. The scope of names defined in a class block is limited '\n'to\\n'\n'the class block; it does not extend to the code blocks of methods '\n'\u2013\\n'\n'this includes comprehensions and generator expressions since they '\n'are\\n'\n'implemented using a function scope. This means that the '\n'following\\n'\n'will fail:\\n'\n'\\n'\n' class A:\\n'\n' a = 42\\n'\n' b = list(a + i for i in range(10))\\n'\n'\\n'\n'\\n'\n'Builtins and restricted execution\\n'\n'=================================\\n'\n'\\n'\n'**CPython implementation detail:** Users should not touch\\n'\n'\"__builtins__\"; it is strictly an implementation detail. Users\\n'\n'wanting to override values in the builtins namespace should '\n'\"import\"\\n'\n'the \"builtins\" module and modify its attributes appropriately.\\n'\n'\\n'\n'The builtins namespace associated with the execution of a code '\n'block\\n'\n'is actually found by looking up the name \"__builtins__\" in its '\n'global\\n'\n'namespace; this should be a dictionary or a module (in the latter '\n'case\\n'\n'the module\u2019s dictionary is used). By default, when in the '\n'\"__main__\"\\n'\n'module, \"__builtins__\" is the built-in module \"builtins\"; when in '\n'any\\n'\n'other module, \"__builtins__\" is an alias for the dictionary of '\n'the\\n'\n'\"builtins\" module itself.\\n'\n'\\n'\n'\\n'\n'Interaction with dynamic features\\n'\n'=================================\\n'\n'\\n'\n'Name resolution of free variables occurs at runtime, not at '\n'compile\\n'\n'time. This means that the following code will print 42:\\n'\n'\\n'\n' i = 10\\n'\n' def f():\\n'\n' print(i)\\n'\n' i = 42\\n'\n' f()\\n'\n'\\n'\n'The \"eval()\" and \"exec()\" functions do not have access to the '\n'full\\n'\n'environment for resolving names. Names may be resolved in the '\n'local\\n'\n'and global namespaces of the caller. Free variables are not '\n'resolved\\n'\n'in the nearest enclosing namespace, but in the global namespace. '\n'[1]\\n'\n'The \"exec()\" and \"eval()\" functions have optional arguments to\\n'\n'override the global and local namespace. If only one namespace '\n'is\\n'\n'specified, it is used for both.\\n',\n'nonlocal':'The \"nonlocal\" statement\\n'\n'************************\\n'\n'\\n'\n' nonlocal_stmt ::= \"nonlocal\" identifier (\",\" identifier)*\\n'\n'\\n'\n'The \"nonlocal\" statement causes the listed identifiers to refer '\n'to\\n'\n'previously bound variables in the nearest enclosing scope '\n'excluding\\n'\n'globals. This is important because the default behavior for '\n'binding is\\n'\n'to search the local namespace first. The statement allows\\n'\n'encapsulated code to rebind variables outside of the local '\n'scope\\n'\n'besides the global (module) scope.\\n'\n'\\n'\n'Names listed in a \"nonlocal\" statement, unlike those listed in '\n'a\\n'\n'\"global\" statement, must refer to pre-existing bindings in an\\n'\n'enclosing scope (the scope in which a new binding should be '\n'created\\n'\n'cannot be determined unambiguously).\\n'\n'\\n'\n'Names listed in a \"nonlocal\" statement must not collide with '\n'pre-\\n'\n'existing bindings in the local scope.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 3104** - Access to Names in Outer Scopes\\n'\n' The specification for the \"nonlocal\" statement.\\n',\n'numbers':'Numeric literals\\n'\n'****************\\n'\n'\\n'\n'There are three types of numeric literals: integers, floating '\n'point\\n'\n'numbers, and imaginary numbers. There are no complex literals\\n'\n'(complex numbers can be formed by adding a real number and an\\n'\n'imaginary number).\\n'\n'\\n'\n'Note that numeric literals do not include a sign; a phrase like '\n'\"-1\"\\n'\n'is actually an expression composed of the unary operator \u2018\"-\"\u2019 '\n'and the\\n'\n'literal \"1\".\\n',\n'numeric-types':'Emulating numeric types\\n'\n'***********************\\n'\n'\\n'\n'The following methods can be defined to emulate numeric '\n'objects.\\n'\n'Methods corresponding to operations that are not supported '\n'by the\\n'\n'particular kind of number implemented (e.g., bitwise '\n'operations for\\n'\n'non-integral numbers) should be left undefined.\\n'\n'\\n'\n'object.__add__(self, other)\\n'\n'object.__sub__(self, other)\\n'\n'object.__mul__(self, other)\\n'\n'object.__matmul__(self, other)\\n'\n'object.__truediv__(self, other)\\n'\n'object.__floordiv__(self, other)\\n'\n'object.__mod__(self, other)\\n'\n'object.__divmod__(self, other)\\n'\n'object.__pow__(self, other[, modulo])\\n'\n'object.__lshift__(self, other)\\n'\n'object.__rshift__(self, other)\\n'\n'object.__and__(self, other)\\n'\n'object.__xor__(self, other)\\n'\n'object.__or__(self, other)\\n'\n'\\n'\n' These methods are called to implement the binary '\n'arithmetic\\n'\n' operations (\"+\", \"-\", \"*\", \"@\", \"/\", \"//\", \"%\", '\n'\"divmod()\",\\n'\n' \"pow()\", \"**\", \"<<\", \">>\", \"&\", \"^\", \"|\"). For '\n'instance, to\\n'\n' evaluate the expression \"x + y\", where *x* is an '\n'instance of a\\n'\n' class that has an \"__add__()\" method, \"x.__add__(y)\" is '\n'called.\\n'\n' The \"__divmod__()\" method should be the equivalent to '\n'using\\n'\n' \"__floordiv__()\" and \"__mod__()\"; it should not be '\n'related to\\n'\n' \"__truediv__()\". Note that \"__pow__()\" should be '\n'defined to accept\\n'\n' an optional third argument if the ternary version of the '\n'built-in\\n'\n' \"pow()\" function is to be supported.\\n'\n'\\n'\n' If one of those methods does not support the operation '\n'with the\\n'\n' supplied arguments, it should return \"NotImplemented\".\\n'\n'\\n'\n'object.__radd__(self, other)\\n'\n'object.__rsub__(self, other)\\n'\n'object.__rmul__(self, other)\\n'\n'object.__rmatmul__(self, other)\\n'\n'object.__rtruediv__(self, other)\\n'\n'object.__rfloordiv__(self, other)\\n'\n'object.__rmod__(self, other)\\n'\n'object.__rdivmod__(self, other)\\n'\n'object.__rpow__(self, other[, modulo])\\n'\n'object.__rlshift__(self, other)\\n'\n'object.__rrshift__(self, other)\\n'\n'object.__rand__(self, other)\\n'\n'object.__rxor__(self, other)\\n'\n'object.__ror__(self, other)\\n'\n'\\n'\n' These methods are called to implement the binary '\n'arithmetic\\n'\n' operations (\"+\", \"-\", \"*\", \"@\", \"/\", \"//\", \"%\", '\n'\"divmod()\",\\n'\n' \"pow()\", \"**\", \"<<\", \">>\", \"&\", \"^\", \"|\") with reflected '\n'(swapped)\\n'\n' operands. These functions are only called if the left '\n'operand does\\n'\n' not support the corresponding operation [3] and the '\n'operands are of\\n'\n' different types. [4] For instance, to evaluate the '\n'expression \"x -\\n'\n' y\", where *y* is an instance of a class that has an '\n'\"__rsub__()\"\\n'\n' method, \"y.__rsub__(x)\" is called if \"x.__sub__(y)\" '\n'returns\\n'\n' *NotImplemented*.\\n'\n'\\n'\n' Note that ternary \"pow()\" will not try calling '\n'\"__rpow__()\" (the\\n'\n' coercion rules would become too complicated).\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' If the right operand\u2019s type is a subclass of the left '\n'operand\u2019s\\n'\n' type and that subclass provides a different '\n'implementation of the\\n'\n' reflected method for the operation, this method will '\n'be called\\n'\n' before the left operand\u2019s non-reflected method. This '\n'behavior\\n'\n' allows subclasses to override their ancestors\u2019 '\n'operations.\\n'\n'\\n'\n'object.__iadd__(self, other)\\n'\n'object.__isub__(self, other)\\n'\n'object.__imul__(self, other)\\n'\n'object.__imatmul__(self, other)\\n'\n'object.__itruediv__(self, other)\\n'\n'object.__ifloordiv__(self, other)\\n'\n'object.__imod__(self, other)\\n'\n'object.__ipow__(self, other[, modulo])\\n'\n'object.__ilshift__(self, other)\\n'\n'object.__irshift__(self, other)\\n'\n'object.__iand__(self, other)\\n'\n'object.__ixor__(self, other)\\n'\n'object.__ior__(self, other)\\n'\n'\\n'\n' These methods are called to implement the augmented '\n'arithmetic\\n'\n' assignments (\"+=\", \"-=\", \"*=\", \"@=\", \"/=\", \"//=\", \"%=\", '\n'\"**=\",\\n'\n' \"<<=\", \">>=\", \"&=\", \"^=\", \"|=\"). These methods should '\n'attempt to\\n'\n' do the operation in-place (modifying *self*) and return '\n'the result\\n'\n' (which could be, but does not have to be, *self*). If a '\n'specific\\n'\n' method is not defined, the augmented assignment falls '\n'back to the\\n'\n' normal methods. For instance, if *x* is an instance of '\n'a class\\n'\n' with an \"__iadd__()\" method, \"x += y\" is equivalent to '\n'\"x =\\n'\n' x.__iadd__(y)\" . Otherwise, \"x.__add__(y)\" and '\n'\"y.__radd__(x)\" are\\n'\n' considered, as with the evaluation of \"x + y\". In '\n'certain\\n'\n' situations, augmented assignment can result in '\n'unexpected errors\\n'\n' (see Why does a_tuple[i] += [\u2018item\u2019] raise an exception '\n'when the\\n'\n' addition works?), but this behavior is in fact part of '\n'the data\\n'\n' model.\\n'\n'\\n'\n'object.__neg__(self)\\n'\n'object.__pos__(self)\\n'\n'object.__abs__(self)\\n'\n'object.__invert__(self)\\n'\n'\\n'\n' Called to implement the unary arithmetic operations '\n'(\"-\", \"+\",\\n'\n' \"abs()\" and \"~\").\\n'\n'\\n'\n'object.__complex__(self)\\n'\n'object.__int__(self)\\n'\n'object.__float__(self)\\n'\n'\\n'\n' Called to implement the built-in functions \"complex()\", '\n'\"int()\" and\\n'\n' \"float()\". Should return a value of the appropriate '\n'type.\\n'\n'\\n'\n'object.__index__(self)\\n'\n'\\n'\n' Called to implement \"operator.index()\", and whenever '\n'Python needs\\n'\n' to losslessly convert the numeric object to an integer '\n'object (such\\n'\n' as in slicing, or in the built-in \"bin()\", \"hex()\" and '\n'\"oct()\"\\n'\n' functions). Presence of this method indicates that the '\n'numeric\\n'\n' object is an integer type. Must return an integer.\\n'\n'\\n'\n' If \"__int__()\", \"__float__()\" and \"__complex__()\" are '\n'not defined\\n'\n' then corresponding built-in functions \"int()\", \"float()\" '\n'and\\n'\n' \"complex()\" fall back to \"__index__()\".\\n'\n'\\n'\n'object.__round__(self[, ndigits])\\n'\n'object.__trunc__(self)\\n'\n'object.__floor__(self)\\n'\n'object.__ceil__(self)\\n'\n'\\n'\n' Called to implement the built-in function \"round()\" and '\n'\"math\"\\n'\n' functions \"trunc()\", \"floor()\" and \"ceil()\". Unless '\n'*ndigits* is\\n'\n' passed to \"__round__()\" all these methods should return '\n'the value\\n'\n' of the object truncated to an \"Integral\" (typically an '\n'\"int\").\\n'\n'\\n'\n' If \"__int__()\" is not defined then the built-in function '\n'\"int()\"\\n'\n' falls back to \"__trunc__()\".\\n',\n'objects':'Objects, values and types\\n'\n'*************************\\n'\n'\\n'\n'*Objects* are Python\u2019s abstraction for data. All data in a '\n'Python\\n'\n'program is represented by objects or by relations between '\n'objects. (In\\n'\n'a sense, and in conformance to Von Neumann\u2019s model of a \u201cstored\\n'\n'program computer\u201d, code is also represented by objects.)\\n'\n'\\n'\n'Every object has an identity, a type and a value. An object\u2019s\\n'\n'*identity* never changes once it has been created; you may think '\n'of it\\n'\n'as the object\u2019s address in memory. The \u2018\"is\"\u2019 operator compares '\n'the\\n'\n'identity of two objects; the \"id()\" function returns an integer\\n'\n'representing its identity.\\n'\n'\\n'\n'**CPython implementation detail:** For CPython, \"id(x)\" is the '\n'memory\\n'\n'address where \"x\" is stored.\\n'\n'\\n'\n'An object\u2019s type determines the operations that the object '\n'supports\\n'\n'(e.g., \u201cdoes it have a length?\u201d) and also defines the possible '\n'values\\n'\n'for objects of that type. The \"type()\" function returns an '\n'object\u2019s\\n'\n'type (which is an object itself). Like its identity, an '\n'object\u2019s\\n'\n'*type* is also unchangeable. [1]\\n'\n'\\n'\n'The *value* of some objects can change. Objects whose value can\\n'\n'change are said to be *mutable*; objects whose value is '\n'unchangeable\\n'\n'once they are created are called *immutable*. (The value of an\\n'\n'immutable container object that contains a reference to a '\n'mutable\\n'\n'object can change when the latter\u2019s value is changed; however '\n'the\\n'\n'container is still considered immutable, because the collection '\n'of\\n'\n'objects it contains cannot be changed. So, immutability is not\\n'\n'strictly the same as having an unchangeable value, it is more '\n'subtle.)\\n'\n'An object\u2019s mutability is determined by its type; for instance,\\n'\n'numbers, strings and tuples are immutable, while dictionaries '\n'and\\n'\n'lists are mutable.\\n'\n'\\n'\n'Objects are never explicitly destroyed; however, when they '\n'become\\n'\n'unreachable they may be garbage-collected. An implementation is\\n'\n'allowed to postpone garbage collection or omit it altogether \u2014 it '\n'is a\\n'\n'matter of implementation quality how garbage collection is\\n'\n'implemented, as long as no objects are collected that are still\\n'\n'reachable.\\n'\n'\\n'\n'**CPython implementation detail:** CPython currently uses a '\n'reference-\\n'\n'counting scheme with (optional) delayed detection of cyclically '\n'linked\\n'\n'garbage, which collects most objects as soon as they become\\n'\n'unreachable, but is not guaranteed to collect garbage containing\\n'\n'circular references. See the documentation of the \"gc\" module '\n'for\\n'\n'information on controlling the collection of cyclic garbage. '\n'Other\\n'\n'implementations act differently and CPython may change. Do not '\n'depend\\n'\n'on immediate finalization of objects when they become unreachable '\n'(so\\n'\n'you should always close files explicitly).\\n'\n'\\n'\n'Note that the use of the implementation\u2019s tracing or debugging\\n'\n'facilities may keep objects alive that would normally be '\n'collectable.\\n'\n'Also note that catching an exception with a \u2018\"try\"\u2026\"except\"\u2019 '\n'statement\\n'\n'may keep objects alive.\\n'\n'\\n'\n'Some objects contain references to \u201cexternal\u201d resources such as '\n'open\\n'\n'files or windows. It is understood that these resources are '\n'freed\\n'\n'when the object is garbage-collected, but since garbage '\n'collection is\\n'\n'not guaranteed to happen, such objects also provide an explicit '\n'way to\\n'\n'release the external resource, usually a \"close()\" method. '\n'Programs\\n'\n'are strongly recommended to explicitly close such objects. The\\n'\n'\u2018\"try\"\u2026\"finally\"\u2019 statement and the \u2018\"with\"\u2019 statement provide\\n'\n'convenient ways to do this.\\n'\n'\\n'\n'Some objects contain references to other objects; these are '\n'called\\n'\n'*containers*. Examples of containers are tuples, lists and\\n'\n'dictionaries. The references are part of a container\u2019s value. '\n'In\\n'\n'most cases, when we talk about the value of a container, we imply '\n'the\\n'\n'values, not the identities of the contained objects; however, '\n'when we\\n'\n'talk about the mutability of a container, only the identities of '\n'the\\n'\n'immediately contained objects are implied. So, if an immutable\\n'\n'container (like a tuple) contains a reference to a mutable '\n'object, its\\n'\n'value changes if that mutable object is changed.\\n'\n'\\n'\n'Types affect almost all aspects of object behavior. Even the\\n'\n'importance of object identity is affected in some sense: for '\n'immutable\\n'\n'types, operations that compute new values may actually return a\\n'\n'reference to any existing object with the same type and value, '\n'while\\n'\n'for mutable objects this is not allowed. E.g., after \"a = 1; b = '\n'1\",\\n'\n'\"a\" and \"b\" may or may not refer to the same object with the '\n'value\\n'\n'one, depending on the implementation, but after \"c = []; d = []\", '\n'\"c\"\\n'\n'and \"d\" are guaranteed to refer to two different, unique, newly\\n'\n'created empty lists. (Note that \"c = d = []\" assigns the same '\n'object\\n'\n'to both \"c\" and \"d\".)\\n',\n'operator-summary':'Operator precedence\\n'\n'*******************\\n'\n'\\n'\n'The following table summarizes the operator precedence '\n'in Python, from\\n'\n'highest precedence (most binding) to lowest precedence '\n'(least\\n'\n'binding). Operators in the same box have the same '\n'precedence. Unless\\n'\n'the syntax is explicitly given, operators are binary. '\n'Operators in\\n'\n'the same box group left to right (except for '\n'exponentiation, which\\n'\n'groups from right to left).\\n'\n'\\n'\n'Note that comparisons, membership tests, and identity '\n'tests, all have\\n'\n'the same precedence and have a left-to-right chaining '\n'feature as\\n'\n'described in the Comparisons section.\\n'\n'\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| Operator | '\n'Description |\\n'\n'|=================================================|=======================================|\\n'\n'| \"(expressions...)\", \"[expressions...]\", \"{key: | '\n'Binding or parenthesized expression, |\\n'\n'| value...}\", \"{expressions...}\" | list '\n'display, dictionary display, set |\\n'\n'| | '\n'display |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"x[index]\", \"x[index:index]\", | '\n'Subscription, slicing, call, |\\n'\n'| \"x(arguments...)\", \"x.attribute\" | '\n'attribute reference |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"await\" \"x\" | '\n'Await expression |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"**\" | '\n'Exponentiation [5] |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"+x\", \"-x\", \"~x\" | '\n'Positive, negative, bitwise NOT |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"*\", \"@\", \"/\", \"//\", \"%\" | '\n'Multiplication, matrix |\\n'\n'| | '\n'multiplication, division, floor |\\n'\n'| | '\n'division, remainder [6] |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"+\", \"-\" | '\n'Addition and subtraction |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"<<\", \">>\" | '\n'Shifts |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"&\" | '\n'Bitwise AND |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"^\" | '\n'Bitwise XOR |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"|\" | '\n'Bitwise OR |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"in\", \"not in\", \"is\", \"is not\", \"<\", \"<=\", \">\", | '\n'Comparisons, including membership |\\n'\n'| \">=\", \"!=\", \"==\" | '\n'tests and identity tests |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"not\" \"x\" | '\n'Boolean NOT |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"and\" | '\n'Boolean AND |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"or\" | '\n'Boolean OR |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"if\" \u2013 \"else\" | '\n'Conditional expression |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \"lambda\" | '\n'Lambda expression |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'| \":=\" | '\n'Assignment expression |\\n'\n'+-------------------------------------------------+---------------------------------------+\\n'\n'\\n'\n'-[ Footnotes ]-\\n'\n'\\n'\n'[1] While \"abs(x%y) < abs(y)\" is true mathematically, '\n'for floats it\\n'\n' may not be true numerically due to roundoff. For '\n'example, and\\n'\n' assuming a platform on which a Python float is an '\n'IEEE 754 double-\\n'\n' precision number, in order that \"-1e-100 % 1e100\" '\n'have the same\\n'\n' sign as \"1e100\", the computed result is \"-1e-100 + '\n'1e100\", which\\n'\n' is numerically exactly equal to \"1e100\". The '\n'function\\n'\n' \"math.fmod()\" returns a result whose sign matches '\n'the sign of the\\n'\n' first argument instead, and so returns \"-1e-100\" in '\n'this case.\\n'\n' Which approach is more appropriate depends on the '\n'application.\\n'\n'\\n'\n'[2] If x is very close to an exact integer multiple of '\n'y, it\u2019s\\n'\n' possible for \"x//y\" to be one larger than '\n'\"(x-x%y)//y\" due to\\n'\n' rounding. In such cases, Python returns the latter '\n'result, in\\n'\n' order to preserve that \"divmod(x,y)[0] * y + x % y\" '\n'be very close\\n'\n' to \"x\".\\n'\n'\\n'\n'[3] The Unicode standard distinguishes between *code '\n'points* (e.g.\\n'\n' U+0041) and *abstract characters* (e.g. \u201cLATIN '\n'CAPITAL LETTER A\u201d).\\n'\n' While most abstract characters in Unicode are only '\n'represented\\n'\n' using one code point, there is a number of abstract '\n'characters\\n'\n' that can in addition be represented using a sequence '\n'of more than\\n'\n' one code point. For example, the abstract character '\n'\u201cLATIN\\n'\n' CAPITAL LETTER C WITH CEDILLA\u201d can be represented as '\n'a single\\n'\n' *precomposed character* at code position U+00C7, or '\n'as a sequence\\n'\n' of a *base character* at code position U+0043 (LATIN '\n'CAPITAL\\n'\n' LETTER C), followed by a *combining character* at '\n'code position\\n'\n' U+0327 (COMBINING CEDILLA).\\n'\n'\\n'\n' The comparison operators on strings compare at the '\n'level of\\n'\n' Unicode code points. This may be counter-intuitive '\n'to humans. For\\n'\n' example, \"\"\\\\u00C7\" == \"\\\\u0043\\\\u0327\"\" is \"False\", '\n'even though both\\n'\n' strings represent the same abstract character \u201cLATIN '\n'CAPITAL\\n'\n' LETTER C WITH CEDILLA\u201d.\\n'\n'\\n'\n' To compare strings at the level of abstract '\n'characters (that is,\\n'\n' in a way intuitive to humans), use '\n'\"unicodedata.normalize()\".\\n'\n'\\n'\n'[4] Due to automatic garbage-collection, free lists, and '\n'the dynamic\\n'\n' nature of descriptors, you may notice seemingly '\n'unusual behaviour\\n'\n' in certain uses of the \"is\" operator, like those '\n'involving\\n'\n' comparisons between instance methods, or constants. '\n'Check their\\n'\n' documentation for more info.\\n'\n'\\n'\n'[5] The power operator \"**\" binds less tightly than an '\n'arithmetic or\\n'\n' bitwise unary operator on its right, that is, '\n'\"2**-1\" is \"0.5\".\\n'\n'\\n'\n'[6] The \"%\" operator is also used for string formatting; '\n'the same\\n'\n' precedence applies.\\n',\n'pass':'The \"pass\" statement\\n'\n'********************\\n'\n'\\n'\n' pass_stmt ::= \"pass\"\\n'\n'\\n'\n'\"pass\" is a null operation \u2014 when it is executed, nothing happens. '\n'It\\n'\n'is useful as a placeholder when a statement is required '\n'syntactically,\\n'\n'but no code needs to be executed, for example:\\n'\n'\\n'\n' def f(arg): pass # a function that does nothing (yet)\\n'\n'\\n'\n' class C: pass # a class with no methods (yet)\\n',\n'power':'The power operator\\n'\n'******************\\n'\n'\\n'\n'The power operator binds more tightly than unary operators on its\\n'\n'left; it binds less tightly than unary operators on its right. '\n'The\\n'\n'syntax is:\\n'\n'\\n'\n' power ::= (await_expr | primary) [\"**\" u_expr]\\n'\n'\\n'\n'Thus, in an unparenthesized sequence of power and unary operators, '\n'the\\n'\n'operators are evaluated from right to left (this does not '\n'constrain\\n'\n'the evaluation order for the operands): \"-1**2\" results in \"-1\".\\n'\n'\\n'\n'The power operator has the same semantics as the built-in \"pow()\"\\n'\n'function, when called with two arguments: it yields its left '\n'argument\\n'\n'raised to the power of its right argument. The numeric arguments '\n'are\\n'\n'first converted to a common type, and the result is of that type.\\n'\n'\\n'\n'For int operands, the result has the same type as the operands '\n'unless\\n'\n'the second argument is negative; in that case, all arguments are\\n'\n'converted to float and a float result is delivered. For example,\\n'\n'\"10**2\" returns \"100\", but \"10**-2\" returns \"0.01\".\\n'\n'\\n'\n'Raising \"0.0\" to a negative power results in a '\n'\"ZeroDivisionError\".\\n'\n'Raising a negative number to a fractional power results in a '\n'\"complex\"\\n'\n'number. (In earlier versions it raised a \"ValueError\".)\\n'\n'\\n'\n'This operation can be customized using the special \"__pow__()\" '\n'method.\\n',\n'raise':'The \"raise\" statement\\n'\n'*********************\\n'\n'\\n'\n' raise_stmt ::= \"raise\" [expression [\"from\" expression]]\\n'\n'\\n'\n'If no expressions are present, \"raise\" re-raises the last '\n'exception\\n'\n'that was active in the current scope. If no exception is active '\n'in\\n'\n'the current scope, a \"RuntimeError\" exception is raised indicating\\n'\n'that this is an error.\\n'\n'\\n'\n'Otherwise, \"raise\" evaluates the first expression as the exception\\n'\n'object. It must be either a subclass or an instance of\\n'\n'\"BaseException\". If it is a class, the exception instance will be\\n'\n'obtained when needed by instantiating the class with no arguments.\\n'\n'\\n'\n'The *type* of the exception is the exception instance\u2019s class, the\\n'\n'*value* is the instance itself.\\n'\n'\\n'\n'A traceback object is normally created automatically when an '\n'exception\\n'\n'is raised and attached to it as the \"__traceback__\" attribute, '\n'which\\n'\n'is writable. You can create an exception and set your own traceback '\n'in\\n'\n'one step using the \"with_traceback()\" exception method (which '\n'returns\\n'\n'the same exception instance, with its traceback set to its '\n'argument),\\n'\n'like so:\\n'\n'\\n'\n' raise Exception(\"foo occurred\").with_traceback(tracebackobj)\\n'\n'\\n'\n'The \"from\" clause is used for exception chaining: if given, the '\n'second\\n'\n'*expression* must be another exception class or instance. If the\\n'\n'second expression is an exception instance, it will be attached to '\n'the\\n'\n'raised exception as the \"__cause__\" attribute (which is writable). '\n'If\\n'\n'the expression is an exception class, the class will be '\n'instantiated\\n'\n'and the resulting exception instance will be attached to the '\n'raised\\n'\n'exception as the \"__cause__\" attribute. If the raised exception is '\n'not\\n'\n'handled, both exceptions will be printed:\\n'\n'\\n'\n' >>> try:\\n'\n' ... print(1 / 0)\\n'\n' ... except Exception as exc:\\n'\n' ... raise RuntimeError(\"Something bad happened\") from exc\\n'\n' ...\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 2, in \\n'\n' ZeroDivisionError: division by zero\\n'\n'\\n'\n' The above exception was the direct cause of the following '\n'exception:\\n'\n'\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 4, in \\n'\n' RuntimeError: Something bad happened\\n'\n'\\n'\n'A similar mechanism works implicitly if an exception is raised '\n'inside\\n'\n'an exception handler or a \"finally\" clause: the previous exception '\n'is\\n'\n'then attached as the new exception\u2019s \"__context__\" attribute:\\n'\n'\\n'\n' >>> try:\\n'\n' ... print(1 / 0)\\n'\n' ... except:\\n'\n' ... raise RuntimeError(\"Something bad happened\")\\n'\n' ...\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 2, in \\n'\n' ZeroDivisionError: division by zero\\n'\n'\\n'\n' During handling of the above exception, another exception '\n'occurred:\\n'\n'\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 4, in \\n'\n' RuntimeError: Something bad happened\\n'\n'\\n'\n'Exception chaining can be explicitly suppressed by specifying '\n'\"None\"\\n'\n'in the \"from\" clause:\\n'\n'\\n'\n' >>> try:\\n'\n' ... print(1 / 0)\\n'\n' ... except:\\n'\n' ... raise RuntimeError(\"Something bad happened\") from None\\n'\n' ...\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 4, in \\n'\n' RuntimeError: Something bad happened\\n'\n'\\n'\n'Additional information on exceptions can be found in section\\n'\n'Exceptions, and information about handling exceptions is in '\n'section\\n'\n'The try statement.\\n'\n'\\n'\n'Changed in version 3.3: \"None\" is now permitted as \"Y\" in \"raise X\\n'\n'from Y\".\\n'\n'\\n'\n'New in version 3.3: The \"__suppress_context__\" attribute to '\n'suppress\\n'\n'automatic display of the exception context.\\n',\n'return':'The \"return\" statement\\n'\n'**********************\\n'\n'\\n'\n' return_stmt ::= \"return\" [expression_list]\\n'\n'\\n'\n'\"return\" may only occur syntactically nested in a function '\n'definition,\\n'\n'not within a nested class definition.\\n'\n'\\n'\n'If an expression list is present, it is evaluated, else \"None\" is\\n'\n'substituted.\\n'\n'\\n'\n'\"return\" leaves the current function call with the expression list '\n'(or\\n'\n'\"None\") as return value.\\n'\n'\\n'\n'When \"return\" passes control out of a \"try\" statement with a '\n'\"finally\"\\n'\n'clause, that \"finally\" clause is executed before really leaving '\n'the\\n'\n'function.\\n'\n'\\n'\n'In a generator function, the \"return\" statement indicates that '\n'the\\n'\n'generator is done and will cause \"StopIteration\" to be raised. '\n'The\\n'\n'returned value (if any) is used as an argument to construct\\n'\n'\"StopIteration\" and becomes the \"StopIteration.value\" attribute.\\n'\n'\\n'\n'In an asynchronous generator function, an empty \"return\" '\n'statement\\n'\n'indicates that the asynchronous generator is done and will cause\\n'\n'\"StopAsyncIteration\" to be raised. A non-empty \"return\" statement '\n'is\\n'\n'a syntax error in an asynchronous generator function.\\n',\n'sequence-types':'Emulating container types\\n'\n'*************************\\n'\n'\\n'\n'The following methods can be defined to implement '\n'container objects.\\n'\n'Containers usually are sequences (such as lists or tuples) '\n'or mappings\\n'\n'(like dictionaries), but can represent other containers as '\n'well. The\\n'\n'first set of methods is used either to emulate a sequence '\n'or to\\n'\n'emulate a mapping; the difference is that for a sequence, '\n'the\\n'\n'allowable keys should be the integers *k* for which \"0 <= '\n'k < N\" where\\n'\n'*N* is the length of the sequence, or slice objects, which '\n'define a\\n'\n'range of items. It is also recommended that mappings '\n'provide the\\n'\n'methods \"keys()\", \"values()\", \"items()\", \"get()\", '\n'\"clear()\",\\n'\n'\"setdefault()\", \"pop()\", \"popitem()\", \"copy()\", and '\n'\"update()\"\\n'\n'behaving similar to those for Python\u2019s standard dictionary '\n'objects.\\n'\n'The \"collections.abc\" module provides a \"MutableMapping\" '\n'abstract base\\n'\n'class to help create those methods from a base set of '\n'\"__getitem__()\",\\n'\n'\"__setitem__()\", \"__delitem__()\", and \"keys()\". Mutable '\n'sequences\\n'\n'should provide methods \"append()\", \"count()\", \"index()\", '\n'\"extend()\",\\n'\n'\"insert()\", \"pop()\", \"remove()\", \"reverse()\" and \"sort()\", '\n'like Python\\n'\n'standard list objects. Finally, sequence types should '\n'implement\\n'\n'addition (meaning concatenation) and multiplication '\n'(meaning\\n'\n'repetition) by defining the methods \"__add__()\", '\n'\"__radd__()\",\\n'\n'\"__iadd__()\", \"__mul__()\", \"__rmul__()\" and \"__imul__()\" '\n'described\\n'\n'below; they should not define other numerical operators. '\n'It is\\n'\n'recommended that both mappings and sequences implement '\n'the\\n'\n'\"__contains__()\" method to allow efficient use of the \"in\" '\n'operator;\\n'\n'for mappings, \"in\" should search the mapping\u2019s keys; for '\n'sequences, it\\n'\n'should search through the values. It is further '\n'recommended that both\\n'\n'mappings and sequences implement the \"__iter__()\" method '\n'to allow\\n'\n'efficient iteration through the container; for mappings, '\n'\"__iter__()\"\\n'\n'should iterate through the object\u2019s keys; for sequences, '\n'it should\\n'\n'iterate through the values.\\n'\n'\\n'\n'object.__len__(self)\\n'\n'\\n'\n' Called to implement the built-in function \"len()\". '\n'Should return\\n'\n' the length of the object, an integer \">=\" 0. Also, an '\n'object that\\n'\n' doesn\u2019t define a \"__bool__()\" method and whose '\n'\"__len__()\" method\\n'\n' returns zero is considered to be false in a Boolean '\n'context.\\n'\n'\\n'\n' **CPython implementation detail:** In CPython, the '\n'length is\\n'\n' required to be at most \"sys.maxsize\". If the length is '\n'larger than\\n'\n' \"sys.maxsize\" some features (such as \"len()\") may '\n'raise\\n'\n' \"OverflowError\". To prevent raising \"OverflowError\" by '\n'truth value\\n'\n' testing, an object must define a \"__bool__()\" method.\\n'\n'\\n'\n'object.__length_hint__(self)\\n'\n'\\n'\n' Called to implement \"operator.length_hint()\". Should '\n'return an\\n'\n' estimated length for the object (which may be greater '\n'or less than\\n'\n' the actual length). The length must be an integer \">=\" '\n'0. The\\n'\n' return value may also be \"NotImplemented\", which is '\n'treated the\\n'\n' same as if the \"__length_hint__\" method didn\u2019t exist at '\n'all. This\\n'\n' method is purely an optimization and is never required '\n'for\\n'\n' correctness.\\n'\n'\\n'\n' New in version 3.4.\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' Slicing is done exclusively with the following three '\n'methods. A\\n'\n' call like\\n'\n'\\n'\n' a[1:2] = b\\n'\n'\\n'\n' is translated to\\n'\n'\\n'\n' a[slice(1, 2, None)] = b\\n'\n'\\n'\n' and so forth. Missing slice items are always filled in '\n'with \"None\".\\n'\n'\\n'\n'object.__getitem__(self, key)\\n'\n'\\n'\n' Called to implement evaluation of \"self[key]\". For '\n'sequence types,\\n'\n' the accepted keys should be integers and slice '\n'objects. Note that\\n'\n' the special interpretation of negative indexes (if the '\n'class wishes\\n'\n' to emulate a sequence type) is up to the '\n'\"__getitem__()\" method. If\\n'\n' *key* is of an inappropriate type, \"TypeError\" may be '\n'raised; if of\\n'\n' a value outside the set of indexes for the sequence '\n'(after any\\n'\n' special interpretation of negative values), '\n'\"IndexError\" should be\\n'\n' raised. For mapping types, if *key* is missing (not in '\n'the\\n'\n' container), \"KeyError\" should be raised.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' \"for\" loops expect that an \"IndexError\" will be '\n'raised for\\n'\n' illegal indexes to allow proper detection of the end '\n'of the\\n'\n' sequence.\\n'\n'\\n'\n'object.__setitem__(self, key, value)\\n'\n'\\n'\n' Called to implement assignment to \"self[key]\". Same '\n'note as for\\n'\n' \"__getitem__()\". This should only be implemented for '\n'mappings if\\n'\n' the objects support changes to the values for keys, or '\n'if new keys\\n'\n' can be added, or for sequences if elements can be '\n'replaced. The\\n'\n' same exceptions should be raised for improper *key* '\n'values as for\\n'\n' the \"__getitem__()\" method.\\n'\n'\\n'\n'object.__delitem__(self, key)\\n'\n'\\n'\n' Called to implement deletion of \"self[key]\". Same note '\n'as for\\n'\n' \"__getitem__()\". This should only be implemented for '\n'mappings if\\n'\n' the objects support removal of keys, or for sequences '\n'if elements\\n'\n' can be removed from the sequence. The same exceptions '\n'should be\\n'\n' raised for improper *key* values as for the '\n'\"__getitem__()\" method.\\n'\n'\\n'\n'object.__missing__(self, key)\\n'\n'\\n'\n' Called by \"dict\".\"__getitem__()\" to implement '\n'\"self[key]\" for dict\\n'\n' subclasses when key is not in the dictionary.\\n'\n'\\n'\n'object.__iter__(self)\\n'\n'\\n'\n' This method is called when an iterator is required for '\n'a container.\\n'\n' This method should return a new iterator object that '\n'can iterate\\n'\n' over all the objects in the container. For mappings, '\n'it should\\n'\n' iterate over the keys of the container.\\n'\n'\\n'\n' Iterator objects also need to implement this method; '\n'they are\\n'\n' required to return themselves. For more information on '\n'iterator\\n'\n' objects, see Iterator Types.\\n'\n'\\n'\n'object.__reversed__(self)\\n'\n'\\n'\n' Called (if present) by the \"reversed()\" built-in to '\n'implement\\n'\n' reverse iteration. It should return a new iterator '\n'object that\\n'\n' iterates over all the objects in the container in '\n'reverse order.\\n'\n'\\n'\n' If the \"__reversed__()\" method is not provided, the '\n'\"reversed()\"\\n'\n' built-in will fall back to using the sequence protocol '\n'(\"__len__()\"\\n'\n' and \"__getitem__()\"). Objects that support the '\n'sequence protocol\\n'\n' should only provide \"__reversed__()\" if they can '\n'provide an\\n'\n' implementation that is more efficient than the one '\n'provided by\\n'\n' \"reversed()\".\\n'\n'\\n'\n'The membership test operators (\"in\" and \"not in\") are '\n'normally\\n'\n'implemented as an iteration through a container. However, '\n'container\\n'\n'objects can supply the following special method with a '\n'more efficient\\n'\n'implementation, which also does not require the object be '\n'iterable.\\n'\n'\\n'\n'object.__contains__(self, item)\\n'\n'\\n'\n' Called to implement membership test operators. Should '\n'return true\\n'\n' if *item* is in *self*, false otherwise. For mapping '\n'objects, this\\n'\n' should consider the keys of the mapping rather than the '\n'values or\\n'\n' the key-item pairs.\\n'\n'\\n'\n' For objects that don\u2019t define \"__contains__()\", the '\n'membership test\\n'\n' first tries iteration via \"__iter__()\", then the old '\n'sequence\\n'\n' iteration protocol via \"__getitem__()\", see this '\n'section in the\\n'\n' language reference.\\n',\n'shifting':'Shifting operations\\n'\n'*******************\\n'\n'\\n'\n'The shifting operations have lower priority than the arithmetic\\n'\n'operations:\\n'\n'\\n'\n' shift_expr ::= a_expr | shift_expr (\"<<\" | \">>\") a_expr\\n'\n'\\n'\n'These operators accept integers as arguments. They shift the '\n'first\\n'\n'argument to the left or right by the number of bits given by '\n'the\\n'\n'second argument.\\n'\n'\\n'\n'This operation can be customized using the special '\n'\"__lshift__()\" and\\n'\n'\"__rshift__()\" methods.\\n'\n'\\n'\n'A right shift by *n* bits is defined as floor division by '\n'\"pow(2,n)\".\\n'\n'A left shift by *n* bits is defined as multiplication with '\n'\"pow(2,n)\".\\n',\n'slicings':'Slicings\\n'\n'********\\n'\n'\\n'\n'A slicing selects a range of items in a sequence object (e.g., '\n'a\\n'\n'string, tuple or list). Slicings may be used as expressions or '\n'as\\n'\n'targets in assignment or \"del\" statements. The syntax for a '\n'slicing:\\n'\n'\\n'\n' slicing ::= primary \"[\" slice_list \"]\"\\n'\n' slice_list ::= slice_item (\",\" slice_item)* [\",\"]\\n'\n' slice_item ::= expression | proper_slice\\n'\n' proper_slice ::= [lower_bound] \":\" [upper_bound] [ \":\" '\n'[stride] ]\\n'\n' lower_bound ::= expression\\n'\n' upper_bound ::= expression\\n'\n' stride ::= expression\\n'\n'\\n'\n'There is ambiguity in the formal syntax here: anything that '\n'looks like\\n'\n'an expression list also looks like a slice list, so any '\n'subscription\\n'\n'can be interpreted as a slicing. Rather than further '\n'complicating the\\n'\n'syntax, this is disambiguated by defining that in this case the\\n'\n'interpretation as a subscription takes priority over the\\n'\n'interpretation as a slicing (this is the case if the slice list\\n'\n'contains no proper slice).\\n'\n'\\n'\n'The semantics for a slicing are as follows. The primary is '\n'indexed\\n'\n'(using the same \"__getitem__()\" method as normal subscription) '\n'with a\\n'\n'key that is constructed from the slice list, as follows. If the '\n'slice\\n'\n'list contains at least one comma, the key is a tuple containing '\n'the\\n'\n'conversion of the slice items; otherwise, the conversion of the '\n'lone\\n'\n'slice item is the key. The conversion of a slice item that is '\n'an\\n'\n'expression is that expression. The conversion of a proper slice '\n'is a\\n'\n'slice object (see section The standard type hierarchy) whose '\n'\"start\",\\n'\n'\"stop\" and \"step\" attributes are the values of the expressions '\n'given\\n'\n'as lower bound, upper bound and stride, respectively, '\n'substituting\\n'\n'\"None\" for missing expressions.\\n',\n'specialattrs':'Special Attributes\\n'\n'******************\\n'\n'\\n'\n'The implementation adds a few special read-only attributes '\n'to several\\n'\n'object types, where they are relevant. Some of these are '\n'not reported\\n'\n'by the \"dir()\" built-in function.\\n'\n'\\n'\n'object.__dict__\\n'\n'\\n'\n' A dictionary or other mapping object used to store an '\n'object\u2019s\\n'\n' (writable) attributes.\\n'\n'\\n'\n'instance.__class__\\n'\n'\\n'\n' The class to which a class instance belongs.\\n'\n'\\n'\n'class.__bases__\\n'\n'\\n'\n' The tuple of base classes of a class object.\\n'\n'\\n'\n'definition.__name__\\n'\n'\\n'\n' The name of the class, function, method, descriptor, or '\n'generator\\n'\n' instance.\\n'\n'\\n'\n'definition.__qualname__\\n'\n'\\n'\n' The *qualified name* of the class, function, method, '\n'descriptor, or\\n'\n' generator instance.\\n'\n'\\n'\n' New in version 3.3.\\n'\n'\\n'\n'class.__mro__\\n'\n'\\n'\n' This attribute is a tuple of classes that are considered '\n'when\\n'\n' looking for base classes during method resolution.\\n'\n'\\n'\n'class.mro()\\n'\n'\\n'\n' This method can be overridden by a metaclass to customize '\n'the\\n'\n' method resolution order for its instances. It is called '\n'at class\\n'\n' instantiation, and its result is stored in \"__mro__\".\\n'\n'\\n'\n'class.__subclasses__()\\n'\n'\\n'\n' Each class keeps a list of weak references to its '\n'immediate\\n'\n' subclasses. This method returns a list of all those '\n'references\\n'\n' still alive. The list is in definition order. Example:\\n'\n'\\n'\n' >>> int.__subclasses__()\\n'\n\" []\\n\"\n'\\n'\n'-[ Footnotes ]-\\n'\n'\\n'\n'[1] Additional information on these special methods may be '\n'found in\\n'\n' the Python Reference Manual (Basic customization).\\n'\n'\\n'\n'[2] As a consequence, the list \"[1, 2]\" is considered equal '\n'to \"[1.0,\\n'\n' 2.0]\", and similarly for tuples.\\n'\n'\\n'\n'[3] They must have since the parser can\u2019t tell the type of '\n'the\\n'\n' operands.\\n'\n'\\n'\n'[4] Cased characters are those with general category '\n'property being\\n'\n' one of \u201cLu\u201d (Letter, uppercase), \u201cLl\u201d (Letter, '\n'lowercase), or \u201cLt\u201d\\n'\n' (Letter, titlecase).\\n'\n'\\n'\n'[5] To format only a tuple you should therefore provide a '\n'singleton\\n'\n' tuple whose only element is the tuple to be formatted.\\n',\n'specialnames':'Special method names\\n'\n'********************\\n'\n'\\n'\n'A class can implement certain operations that are invoked by '\n'special\\n'\n'syntax (such as arithmetic operations or subscripting and '\n'slicing) by\\n'\n'defining methods with special names. This is Python\u2019s '\n'approach to\\n'\n'*operator overloading*, allowing classes to define their own '\n'behavior\\n'\n'with respect to language operators. For instance, if a '\n'class defines\\n'\n'a method named \"__getitem__()\", and \"x\" is an instance of '\n'this class,\\n'\n'then \"x[i]\" is roughly equivalent to \"type(x).__getitem__(x, '\n'i)\".\\n'\n'Except where mentioned, attempts to execute an operation '\n'raise an\\n'\n'exception when no appropriate method is defined (typically\\n'\n'\"AttributeError\" or \"TypeError\").\\n'\n'\\n'\n'Setting a special method to \"None\" indicates that the '\n'corresponding\\n'\n'operation is not available. For example, if a class sets '\n'\"__iter__()\"\\n'\n'to \"None\", the class is not iterable, so calling \"iter()\" on '\n'its\\n'\n'instances will raise a \"TypeError\" (without falling back to\\n'\n'\"__getitem__()\"). [2]\\n'\n'\\n'\n'When implementing a class that emulates any built-in type, '\n'it is\\n'\n'important that the emulation only be implemented to the '\n'degree that it\\n'\n'makes sense for the object being modelled. For example, '\n'some\\n'\n'sequences may work well with retrieval of individual '\n'elements, but\\n'\n'extracting a slice may not make sense. (One example of this '\n'is the\\n'\n'\"NodeList\" interface in the W3C\u2019s Document Object Model.)\\n'\n'\\n'\n'\\n'\n'Basic customization\\n'\n'===================\\n'\n'\\n'\n'object.__new__(cls[, ...])\\n'\n'\\n'\n' Called to create a new instance of class *cls*. '\n'\"__new__()\" is a\\n'\n' static method (special-cased so you need not declare it '\n'as such)\\n'\n' that takes the class of which an instance was requested '\n'as its\\n'\n' first argument. The remaining arguments are those passed '\n'to the\\n'\n' object constructor expression (the call to the class). '\n'The return\\n'\n' value of \"__new__()\" should be the new object instance '\n'(usually an\\n'\n' instance of *cls*).\\n'\n'\\n'\n' Typical implementations create a new instance of the '\n'class by\\n'\n' invoking the superclass\u2019s \"__new__()\" method using\\n'\n' \"super().__new__(cls[, ...])\" with appropriate arguments '\n'and then\\n'\n' modifying the newly-created instance as necessary before '\n'returning\\n'\n' it.\\n'\n'\\n'\n' If \"__new__()\" is invoked during object construction and '\n'it returns\\n'\n' an instance or subclass of *cls*, then the new '\n'instance\u2019s\\n'\n' \"__init__()\" method will be invoked like \"__init__(self[, '\n'...])\",\\n'\n' where *self* is the new instance and the remaining '\n'arguments are\\n'\n' the same as were passed to the object constructor.\\n'\n'\\n'\n' If \"__new__()\" does not return an instance of *cls*, then '\n'the new\\n'\n' instance\u2019s \"__init__()\" method will not be invoked.\\n'\n'\\n'\n' \"__new__()\" is intended mainly to allow subclasses of '\n'immutable\\n'\n' types (like int, str, or tuple) to customize instance '\n'creation. It\\n'\n' is also commonly overridden in custom metaclasses in '\n'order to\\n'\n' customize class creation.\\n'\n'\\n'\n'object.__init__(self[, ...])\\n'\n'\\n'\n' Called after the instance has been created (by '\n'\"__new__()\"), but\\n'\n' before it is returned to the caller. The arguments are '\n'those\\n'\n' passed to the class constructor expression. If a base '\n'class has an\\n'\n' \"__init__()\" method, the derived class\u2019s \"__init__()\" '\n'method, if\\n'\n' any, must explicitly call it to ensure proper '\n'initialization of the\\n'\n' base class part of the instance; for example:\\n'\n' \"super().__init__([args...])\".\\n'\n'\\n'\n' Because \"__new__()\" and \"__init__()\" work together in '\n'constructing\\n'\n' objects (\"__new__()\" to create it, and \"__init__()\" to '\n'customize\\n'\n' it), no non-\"None\" value may be returned by \"__init__()\"; '\n'doing so\\n'\n' will cause a \"TypeError\" to be raised at runtime.\\n'\n'\\n'\n'object.__del__(self)\\n'\n'\\n'\n' Called when the instance is about to be destroyed. This '\n'is also\\n'\n' called a finalizer or (improperly) a destructor. If a '\n'base class\\n'\n' has a \"__del__()\" method, the derived class\u2019s \"__del__()\" '\n'method,\\n'\n' if any, must explicitly call it to ensure proper deletion '\n'of the\\n'\n' base class part of the instance.\\n'\n'\\n'\n' It is possible (though not recommended!) for the '\n'\"__del__()\" method\\n'\n' to postpone destruction of the instance by creating a new '\n'reference\\n'\n' to it. This is called object *resurrection*. It is\\n'\n' implementation-dependent whether \"__del__()\" is called a '\n'second\\n'\n' time when a resurrected object is about to be destroyed; '\n'the\\n'\n' current *CPython* implementation only calls it once.\\n'\n'\\n'\n' It is not guaranteed that \"__del__()\" methods are called '\n'for\\n'\n' objects that still exist when the interpreter exits.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' \"del x\" doesn\u2019t directly call \"x.__del__()\" \u2014 the '\n'former\\n'\n' decrements the reference count for \"x\" by one, and the '\n'latter is\\n'\n' only called when \"x\"\u2019s reference count reaches zero.\\n'\n'\\n'\n' **CPython implementation detail:** It is possible for a '\n'reference\\n'\n' cycle to prevent the reference count of an object from '\n'going to\\n'\n' zero. In this case, the cycle will be later detected and '\n'deleted\\n'\n' by the *cyclic garbage collector*. A common cause of '\n'reference\\n'\n' cycles is when an exception has been caught in a local '\n'variable.\\n'\n' The frame\u2019s locals then reference the exception, which '\n'references\\n'\n' its own traceback, which references the locals of all '\n'frames caught\\n'\n' in the traceback.\\n'\n'\\n'\n' See also: Documentation for the \"gc\" module.\\n'\n'\\n'\n' Warning:\\n'\n'\\n'\n' Due to the precarious circumstances under which '\n'\"__del__()\"\\n'\n' methods are invoked, exceptions that occur during their '\n'execution\\n'\n' are ignored, and a warning is printed to \"sys.stderr\" '\n'instead.\\n'\n' In particular:\\n'\n'\\n'\n' * \"__del__()\" can be invoked when arbitrary code is '\n'being\\n'\n' executed, including from any arbitrary thread. If '\n'\"__del__()\"\\n'\n' needs to take a lock or invoke any other blocking '\n'resource, it\\n'\n' may deadlock as the resource may already be taken by '\n'the code\\n'\n' that gets interrupted to execute \"__del__()\".\\n'\n'\\n'\n' * \"__del__()\" can be executed during interpreter '\n'shutdown. As a\\n'\n' consequence, the global variables it needs to access '\n'(including\\n'\n' other modules) may already have been deleted or set '\n'to \"None\".\\n'\n' Python guarantees that globals whose name begins with '\n'a single\\n'\n' underscore are deleted from their module before other '\n'globals\\n'\n' are deleted; if no other references to such globals '\n'exist, this\\n'\n' may help in assuring that imported modules are still '\n'available\\n'\n' at the time when the \"__del__()\" method is called.\\n'\n'\\n'\n'object.__repr__(self)\\n'\n'\\n'\n' Called by the \"repr()\" built-in function to compute the '\n'\u201cofficial\u201d\\n'\n' string representation of an object. If at all possible, '\n'this\\n'\n' should look like a valid Python expression that could be '\n'used to\\n'\n' recreate an object with the same value (given an '\n'appropriate\\n'\n' environment). If this is not possible, a string of the '\n'form\\n'\n' \"<...some useful description...>\" should be returned. The '\n'return\\n'\n' value must be a string object. If a class defines '\n'\"__repr__()\" but\\n'\n' not \"__str__()\", then \"__repr__()\" is also used when an '\n'\u201cinformal\u201d\\n'\n' string representation of instances of that class is '\n'required.\\n'\n'\\n'\n' This is typically used for debugging, so it is important '\n'that the\\n'\n' representation is information-rich and unambiguous.\\n'\n'\\n'\n'object.__str__(self)\\n'\n'\\n'\n' Called by \"str(object)\" and the built-in functions '\n'\"format()\" and\\n'\n' \"print()\" to compute the \u201cinformal\u201d or nicely printable '\n'string\\n'\n' representation of an object. The return value must be a '\n'string\\n'\n' object.\\n'\n'\\n'\n' This method differs from \"object.__repr__()\" in that '\n'there is no\\n'\n' expectation that \"__str__()\" return a valid Python '\n'expression: a\\n'\n' more convenient or concise representation can be used.\\n'\n'\\n'\n' The default implementation defined by the built-in type '\n'\"object\"\\n'\n' calls \"object.__repr__()\".\\n'\n'\\n'\n'object.__bytes__(self)\\n'\n'\\n'\n' Called by bytes to compute a byte-string representation '\n'of an\\n'\n' object. This should return a \"bytes\" object.\\n'\n'\\n'\n'object.__format__(self, format_spec)\\n'\n'\\n'\n' Called by the \"format()\" built-in function, and by '\n'extension,\\n'\n' evaluation of formatted string literals and the '\n'\"str.format()\"\\n'\n' method, to produce a \u201cformatted\u201d string representation of '\n'an\\n'\n' object. The *format_spec* argument is a string that '\n'contains a\\n'\n' description of the formatting options desired. The '\n'interpretation\\n'\n' of the *format_spec* argument is up to the type '\n'implementing\\n'\n' \"__format__()\", however most classes will either '\n'delegate\\n'\n' formatting to one of the built-in types, or use a '\n'similar\\n'\n' formatting option syntax.\\n'\n'\\n'\n' See Format Specification Mini-Language for a description '\n'of the\\n'\n' standard formatting syntax.\\n'\n'\\n'\n' The return value must be a string object.\\n'\n'\\n'\n' Changed in version 3.4: The __format__ method of \"object\" '\n'itself\\n'\n' raises a \"TypeError\" if passed any non-empty string.\\n'\n'\\n'\n' Changed in version 3.7: \"object.__format__(x, \\'\\')\" is '\n'now\\n'\n' equivalent to \"str(x)\" rather than \"format(str(x), '\n'\\'\\')\".\\n'\n'\\n'\n'object.__lt__(self, other)\\n'\n'object.__le__(self, other)\\n'\n'object.__eq__(self, other)\\n'\n'object.__ne__(self, other)\\n'\n'object.__gt__(self, other)\\n'\n'object.__ge__(self, other)\\n'\n'\\n'\n' These are the so-called \u201crich comparison\u201d methods. The\\n'\n' correspondence between operator symbols and method names '\n'is as\\n'\n' follows: \"xy\" calls\\n'\n' \"x.__gt__(y)\", and \"x>=y\" calls \"x.__ge__(y)\".\\n'\n'\\n'\n' A rich comparison method may return the singleton '\n'\"NotImplemented\"\\n'\n' if it does not implement the operation for a given pair '\n'of\\n'\n' arguments. By convention, \"False\" and \"True\" are returned '\n'for a\\n'\n' successful comparison. However, these methods can return '\n'any value,\\n'\n' so if the comparison operator is used in a Boolean '\n'context (e.g.,\\n'\n' in the condition of an \"if\" statement), Python will call '\n'\"bool()\"\\n'\n' on the value to determine if the result is true or '\n'false.\\n'\n'\\n'\n' By default, \"object\" implements \"__eq__()\" by using \"is\", '\n'returning\\n'\n' \"NotImplemented\" in the case of a false comparison: \"True '\n'if x is y\\n'\n' else NotImplemented\". For \"__ne__()\", by default it '\n'delegates to\\n'\n' \"__eq__()\" and inverts the result unless it is '\n'\"NotImplemented\".\\n'\n' There are no other implied relationships among the '\n'comparison\\n'\n' operators or default implementations; for example, the '\n'truth of\\n'\n' \"(x.__hash__\".\\n'\n'\\n'\n' If a class that does not override \"__eq__()\" wishes to '\n'suppress\\n'\n' hash support, it should include \"__hash__ = None\" in the '\n'class\\n'\n' definition. A class which defines its own \"__hash__()\" '\n'that\\n'\n' explicitly raises a \"TypeError\" would be incorrectly '\n'identified as\\n'\n' hashable by an \"isinstance(obj, '\n'collections.abc.Hashable)\" call.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' By default, the \"__hash__()\" values of str and bytes '\n'objects are\\n'\n' \u201csalted\u201d with an unpredictable random value. Although '\n'they\\n'\n' remain constant within an individual Python process, '\n'they are not\\n'\n' predictable between repeated invocations of Python.This '\n'is\\n'\n' intended to provide protection against a '\n'denial-of-service caused\\n'\n' by carefully-chosen inputs that exploit the worst case\\n'\n' performance of a dict insertion, O(n^2) complexity. '\n'See\\n'\n' http://www.ocert.org/advisories/ocert-2011-003.html '\n'for\\n'\n' details.Changing hash values affects the iteration '\n'order of sets.\\n'\n' Python has never made guarantees about this ordering '\n'(and it\\n'\n' typically varies between 32-bit and 64-bit builds).See '\n'also\\n'\n' \"PYTHONHASHSEED\".\\n'\n'\\n'\n' Changed in version 3.3: Hash randomization is enabled by '\n'default.\\n'\n'\\n'\n'object.__bool__(self)\\n'\n'\\n'\n' Called to implement truth value testing and the built-in '\n'operation\\n'\n' \"bool()\"; should return \"False\" or \"True\". When this '\n'method is not\\n'\n' defined, \"__len__()\" is called, if it is defined, and the '\n'object is\\n'\n' considered true if its result is nonzero. If a class '\n'defines\\n'\n' neither \"__len__()\" nor \"__bool__()\", all its instances '\n'are\\n'\n' considered true.\\n'\n'\\n'\n'\\n'\n'Customizing attribute access\\n'\n'============================\\n'\n'\\n'\n'The following methods can be defined to customize the '\n'meaning of\\n'\n'attribute access (use of, assignment to, or deletion of '\n'\"x.name\") for\\n'\n'class instances.\\n'\n'\\n'\n'object.__getattr__(self, name)\\n'\n'\\n'\n' Called when the default attribute access fails with an\\n'\n' \"AttributeError\" (either \"__getattribute__()\" raises an\\n'\n' \"AttributeError\" because *name* is not an instance '\n'attribute or an\\n'\n' attribute in the class tree for \"self\"; or \"__get__()\" of '\n'a *name*\\n'\n' property raises \"AttributeError\"). This method should '\n'either\\n'\n' return the (computed) attribute value or raise an '\n'\"AttributeError\"\\n'\n' exception.\\n'\n'\\n'\n' Note that if the attribute is found through the normal '\n'mechanism,\\n'\n' \"__getattr__()\" is not called. (This is an intentional '\n'asymmetry\\n'\n' between \"__getattr__()\" and \"__setattr__()\".) This is '\n'done both for\\n'\n' efficiency reasons and because otherwise \"__getattr__()\" '\n'would have\\n'\n' no way to access other attributes of the instance. Note '\n'that at\\n'\n' least for instance variables, you can fake total control '\n'by not\\n'\n' inserting any values in the instance attribute dictionary '\n'(but\\n'\n' instead inserting them in another object). See the\\n'\n' \"__getattribute__()\" method below for a way to actually '\n'get total\\n'\n' control over attribute access.\\n'\n'\\n'\n'object.__getattribute__(self, name)\\n'\n'\\n'\n' Called unconditionally to implement attribute accesses '\n'for\\n'\n' instances of the class. If the class also defines '\n'\"__getattr__()\",\\n'\n' the latter will not be called unless \"__getattribute__()\" '\n'either\\n'\n' calls it explicitly or raises an \"AttributeError\". This '\n'method\\n'\n' should return the (computed) attribute value or raise an\\n'\n' \"AttributeError\" exception. In order to avoid infinite '\n'recursion in\\n'\n' this method, its implementation should always call the '\n'base class\\n'\n' method with the same name to access any attributes it '\n'needs, for\\n'\n' example, \"object.__getattribute__(self, name)\".\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' This method may still be bypassed when looking up '\n'special methods\\n'\n' as the result of implicit invocation via language '\n'syntax or\\n'\n' built-in functions. See Special method lookup.\\n'\n'\\n'\n' For certain sensitive attribute accesses, raises an '\n'auditing event\\n'\n' \"object.__getattr__\" with arguments \"obj\" and \"name\".\\n'\n'\\n'\n'object.__setattr__(self, name, value)\\n'\n'\\n'\n' Called when an attribute assignment is attempted. This '\n'is called\\n'\n' instead of the normal mechanism (i.e. store the value in '\n'the\\n'\n' instance dictionary). *name* is the attribute name, '\n'*value* is the\\n'\n' value to be assigned to it.\\n'\n'\\n'\n' If \"__setattr__()\" wants to assign to an instance '\n'attribute, it\\n'\n' should call the base class method with the same name, for '\n'example,\\n'\n' \"object.__setattr__(self, name, value)\".\\n'\n'\\n'\n' For certain sensitive attribute assignments, raises an '\n'auditing\\n'\n' event \"object.__setattr__\" with arguments \"obj\", \"name\", '\n'\"value\".\\n'\n'\\n'\n'object.__delattr__(self, name)\\n'\n'\\n'\n' Like \"__setattr__()\" but for attribute deletion instead '\n'of\\n'\n' assignment. This should only be implemented if \"del '\n'obj.name\" is\\n'\n' meaningful for the object.\\n'\n'\\n'\n' For certain sensitive attribute deletions, raises an '\n'auditing event\\n'\n' \"object.__delattr__\" with arguments \"obj\" and \"name\".\\n'\n'\\n'\n'object.__dir__(self)\\n'\n'\\n'\n' Called when \"dir()\" is called on the object. A sequence '\n'must be\\n'\n' returned. \"dir()\" converts the returned sequence to a '\n'list and\\n'\n' sorts it.\\n'\n'\\n'\n'\\n'\n'Customizing module attribute access\\n'\n'-----------------------------------\\n'\n'\\n'\n'Special names \"__getattr__\" and \"__dir__\" can be also used '\n'to\\n'\n'customize access to module attributes. The \"__getattr__\" '\n'function at\\n'\n'the module level should accept one argument which is the '\n'name of an\\n'\n'attribute and return the computed value or raise an '\n'\"AttributeError\".\\n'\n'If an attribute is not found on a module object through the '\n'normal\\n'\n'lookup, i.e. \"object.__getattribute__()\", then \"__getattr__\" '\n'is\\n'\n'searched in the module \"__dict__\" before raising an '\n'\"AttributeError\".\\n'\n'If found, it is called with the attribute name and the '\n'result is\\n'\n'returned.\\n'\n'\\n'\n'The \"__dir__\" function should accept no arguments, and '\n'return a\\n'\n'sequence of strings that represents the names accessible on '\n'module. If\\n'\n'present, this function overrides the standard \"dir()\" search '\n'on a\\n'\n'module.\\n'\n'\\n'\n'For a more fine grained customization of the module behavior '\n'(setting\\n'\n'attributes, properties, etc.), one can set the \"__class__\" '\n'attribute\\n'\n'of a module object to a subclass of \"types.ModuleType\". For '\n'example:\\n'\n'\\n'\n' import sys\\n'\n' from types import ModuleType\\n'\n'\\n'\n' class VerboseModule(ModuleType):\\n'\n' def __repr__(self):\\n'\n\" return f'Verbose {self.__name__}'\\n\"\n'\\n'\n' def __setattr__(self, attr, value):\\n'\n\" print(f'Setting {attr}...')\\n\"\n' super().__setattr__(attr, value)\\n'\n'\\n'\n' sys.modules[__name__].__class__ = VerboseModule\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' Defining module \"__getattr__\" and setting module '\n'\"__class__\" only\\n'\n' affect lookups made using the attribute access syntax \u2013 '\n'directly\\n'\n' accessing the module globals (whether by code within the '\n'module, or\\n'\n' via a reference to the module\u2019s globals dictionary) is '\n'unaffected.\\n'\n'\\n'\n'Changed in version 3.5: \"__class__\" module attribute is now '\n'writable.\\n'\n'\\n'\n'New in version 3.7: \"__getattr__\" and \"__dir__\" module '\n'attributes.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 562** - Module __getattr__ and __dir__\\n'\n' Describes the \"__getattr__\" and \"__dir__\" functions on '\n'modules.\\n'\n'\\n'\n'\\n'\n'Implementing Descriptors\\n'\n'------------------------\\n'\n'\\n'\n'The following methods only apply when an instance of the '\n'class\\n'\n'containing the method (a so-called *descriptor* class) '\n'appears in an\\n'\n'*owner* class (the descriptor must be in either the owner\u2019s '\n'class\\n'\n'dictionary or in the class dictionary for one of its '\n'parents). In the\\n'\n'examples below, \u201cthe attribute\u201d refers to the attribute '\n'whose name is\\n'\n'the key of the property in the owner class\u2019 \"__dict__\".\\n'\n'\\n'\n'object.__get__(self, instance, owner=None)\\n'\n'\\n'\n' Called to get the attribute of the owner class (class '\n'attribute\\n'\n' access) or of an instance of that class (instance '\n'attribute\\n'\n' access). The optional *owner* argument is the owner '\n'class, while\\n'\n' *instance* is the instance that the attribute was '\n'accessed through,\\n'\n' or \"None\" when the attribute is accessed through the '\n'*owner*.\\n'\n'\\n'\n' This method should return the computed attribute value or '\n'raise an\\n'\n' \"AttributeError\" exception.\\n'\n'\\n'\n' **PEP 252** specifies that \"__get__()\" is callable with '\n'one or two\\n'\n' arguments. Python\u2019s own built-in descriptors support '\n'this\\n'\n' specification; however, it is likely that some '\n'third-party tools\\n'\n' have descriptors that require both arguments. Python\u2019s '\n'own\\n'\n' \"__getattribute__()\" implementation always passes in both '\n'arguments\\n'\n' whether they are required or not.\\n'\n'\\n'\n'object.__set__(self, instance, value)\\n'\n'\\n'\n' Called to set the attribute on an instance *instance* of '\n'the owner\\n'\n' class to a new value, *value*.\\n'\n'\\n'\n' Note, adding \"__set__()\" or \"__delete__()\" changes the '\n'kind of\\n'\n' descriptor to a \u201cdata descriptor\u201d. See Invoking '\n'Descriptors for\\n'\n' more details.\\n'\n'\\n'\n'object.__delete__(self, instance)\\n'\n'\\n'\n' Called to delete the attribute on an instance *instance* '\n'of the\\n'\n' owner class.\\n'\n'\\n'\n'object.__set_name__(self, owner, name)\\n'\n'\\n'\n' Called at the time the owning class *owner* is created. '\n'The\\n'\n' descriptor has been assigned to *name*.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' \"__set_name__()\" is only called implicitly as part of '\n'the \"type\"\\n'\n' constructor, so it will need to be called explicitly '\n'with the\\n'\n' appropriate parameters when a descriptor is added to a '\n'class\\n'\n' after initial creation:\\n'\n'\\n'\n' class A:\\n'\n' pass\\n'\n' descr = custom_descriptor()\\n'\n' A.attr = descr\\n'\n\" descr.__set_name__(A, 'attr')\\n\"\n'\\n'\n' See Creating the class object for more details.\\n'\n'\\n'\n' New in version 3.6.\\n'\n'\\n'\n'The attribute \"__objclass__\" is interpreted by the \"inspect\" '\n'module as\\n'\n'specifying the class where this object was defined (setting '\n'this\\n'\n'appropriately can assist in runtime introspection of dynamic '\n'class\\n'\n'attributes). For callables, it may indicate that an instance '\n'of the\\n'\n'given type (or a subclass) is expected or required as the '\n'first\\n'\n'positional argument (for example, CPython sets this '\n'attribute for\\n'\n'unbound methods that are implemented in C).\\n'\n'\\n'\n'\\n'\n'Invoking Descriptors\\n'\n'--------------------\\n'\n'\\n'\n'In general, a descriptor is an object attribute with '\n'\u201cbinding\\n'\n'behavior\u201d, one whose attribute access has been overridden by '\n'methods\\n'\n'in the descriptor protocol: \"__get__()\", \"__set__()\", and\\n'\n'\"__delete__()\". If any of those methods are defined for an '\n'object, it\\n'\n'is said to be a descriptor.\\n'\n'\\n'\n'The default behavior for attribute access is to get, set, or '\n'delete\\n'\n'the attribute from an object\u2019s dictionary. For instance, '\n'\"a.x\" has a\\n'\n'lookup chain starting with \"a.__dict__[\\'x\\']\", then\\n'\n'\"type(a).__dict__[\\'x\\']\", and continuing through the base '\n'classes of\\n'\n'\"type(a)\" excluding metaclasses.\\n'\n'\\n'\n'However, if the looked-up value is an object defining one of '\n'the\\n'\n'descriptor methods, then Python may override the default '\n'behavior and\\n'\n'invoke the descriptor method instead. Where this occurs in '\n'the\\n'\n'precedence chain depends on which descriptor methods were '\n'defined and\\n'\n'how they were called.\\n'\n'\\n'\n'The starting point for descriptor invocation is a binding, '\n'\"a.x\". How\\n'\n'the arguments are assembled depends on \"a\":\\n'\n'\\n'\n'Direct Call\\n'\n' The simplest and least common call is when user code '\n'directly\\n'\n' invokes a descriptor method: \"x.__get__(a)\".\\n'\n'\\n'\n'Instance Binding\\n'\n' If binding to an object instance, \"a.x\" is transformed '\n'into the\\n'\n' call: \"type(a).__dict__[\\'x\\'].__get__(a, type(a))\".\\n'\n'\\n'\n'Class Binding\\n'\n' If binding to a class, \"A.x\" is transformed into the '\n'call:\\n'\n' \"A.__dict__[\\'x\\'].__get__(None, A)\".\\n'\n'\\n'\n'Super Binding\\n'\n' If \"a\" is an instance of \"super\", then the binding '\n'\"super(B,\\n'\n' obj).m()\" searches \"obj.__class__.__mro__\" for the base '\n'class \"A\"\\n'\n' immediately preceding \"B\" and then invokes the descriptor '\n'with the\\n'\n' call: \"A.__dict__[\\'m\\'].__get__(obj, obj.__class__)\".\\n'\n'\\n'\n'For instance bindings, the precedence of descriptor '\n'invocation depends\\n'\n'on which descriptor methods are defined. A descriptor can '\n'define any\\n'\n'combination of \"__get__()\", \"__set__()\" and \"__delete__()\". '\n'If it\\n'\n'does not define \"__get__()\", then accessing the attribute '\n'will return\\n'\n'the descriptor object itself unless there is a value in the '\n'object\u2019s\\n'\n'instance dictionary. If the descriptor defines \"__set__()\" '\n'and/or\\n'\n'\"__delete__()\", it is a data descriptor; if it defines '\n'neither, it is\\n'\n'a non-data descriptor. Normally, data descriptors define '\n'both\\n'\n'\"__get__()\" and \"__set__()\", while non-data descriptors have '\n'just the\\n'\n'\"__get__()\" method. Data descriptors with \"__get__()\" and '\n'\"__set__()\"\\n'\n'(and/or \"__delete__()\") defined always override a '\n'redefinition in an\\n'\n'instance dictionary. In contrast, non-data descriptors can '\n'be\\n'\n'overridden by instances.\\n'\n'\\n'\n'Python methods (including \"staticmethod()\" and '\n'\"classmethod()\") are\\n'\n'implemented as non-data descriptors. Accordingly, instances '\n'can\\n'\n'redefine and override methods. This allows individual '\n'instances to\\n'\n'acquire behaviors that differ from other instances of the '\n'same class.\\n'\n'\\n'\n'The \"property()\" function is implemented as a data '\n'descriptor.\\n'\n'Accordingly, instances cannot override the behavior of a '\n'property.\\n'\n'\\n'\n'\\n'\n'__slots__\\n'\n'---------\\n'\n'\\n'\n'*__slots__* allow us to explicitly declare data members '\n'(like\\n'\n'properties) and deny the creation of *__dict__* and '\n'*__weakref__*\\n'\n'(unless explicitly declared in *__slots__* or available in a '\n'parent.)\\n'\n'\\n'\n'The space saved over using *__dict__* can be significant. '\n'Attribute\\n'\n'lookup speed can be significantly improved as well.\\n'\n'\\n'\n'object.__slots__\\n'\n'\\n'\n' This class variable can be assigned a string, iterable, '\n'or sequence\\n'\n' of strings with variable names used by instances. '\n'*__slots__*\\n'\n' reserves space for the declared variables and prevents '\n'the\\n'\n' automatic creation of *__dict__* and *__weakref__* for '\n'each\\n'\n' instance.\\n'\n'\\n'\n'\\n'\n'Notes on using *__slots__*\\n'\n'~~~~~~~~~~~~~~~~~~~~~~~~~~\\n'\n'\\n'\n'* When inheriting from a class without *__slots__*, the '\n'*__dict__* and\\n'\n' *__weakref__* attribute of the instances will always be '\n'accessible.\\n'\n'\\n'\n'* Without a *__dict__* variable, instances cannot be '\n'assigned new\\n'\n' variables not listed in the *__slots__* definition. '\n'Attempts to\\n'\n' assign to an unlisted variable name raises '\n'\"AttributeError\". If\\n'\n' dynamic assignment of new variables is desired, then add\\n'\n' \"\\'__dict__\\'\" to the sequence of strings in the '\n'*__slots__*\\n'\n' declaration.\\n'\n'\\n'\n'* Without a *__weakref__* variable for each instance, '\n'classes defining\\n'\n' *__slots__* do not support weak references to its '\n'instances. If weak\\n'\n' reference support is needed, then add \"\\'__weakref__\\'\" to '\n'the\\n'\n' sequence of strings in the *__slots__* declaration.\\n'\n'\\n'\n'* *__slots__* are implemented at the class level by '\n'creating\\n'\n' descriptors (Implementing Descriptors) for each variable '\n'name. As a\\n'\n' result, class attributes cannot be used to set default '\n'values for\\n'\n' instance variables defined by *__slots__*; otherwise, the '\n'class\\n'\n' attribute would overwrite the descriptor assignment.\\n'\n'\\n'\n'* The action of a *__slots__* declaration is not limited to '\n'the class\\n'\n' where it is defined. *__slots__* declared in parents are '\n'available\\n'\n' in child classes. However, child subclasses will get a '\n'*__dict__*\\n'\n' and *__weakref__* unless they also define *__slots__* '\n'(which should\\n'\n' only contain names of any *additional* slots).\\n'\n'\\n'\n'* If a class defines a slot also defined in a base class, '\n'the instance\\n'\n' variable defined by the base class slot is inaccessible '\n'(except by\\n'\n' retrieving its descriptor directly from the base class). '\n'This\\n'\n' renders the meaning of the program undefined. In the '\n'future, a\\n'\n' check may be added to prevent this.\\n'\n'\\n'\n'* Nonempty *__slots__* does not work for classes derived '\n'from\\n'\n' \u201cvariable-length\u201d built-in types such as \"int\", \"bytes\" '\n'and \"tuple\".\\n'\n'\\n'\n'* Any non-string iterable may be assigned to *__slots__*. '\n'Mappings may\\n'\n' also be used; however, in the future, special meaning may '\n'be\\n'\n' assigned to the values corresponding to each key.\\n'\n'\\n'\n'* *__class__* assignment works only if both classes have the '\n'same\\n'\n' *__slots__*.\\n'\n'\\n'\n'* Multiple inheritance with multiple slotted parent classes '\n'can be\\n'\n' used, but only one parent is allowed to have attributes '\n'created by\\n'\n' slots (the other bases must have empty slot layouts) - '\n'violations\\n'\n' raise \"TypeError\".\\n'\n'\\n'\n'* If an iterator is used for *__slots__* then a descriptor '\n'is created\\n'\n' for each of the iterator\u2019s values. However, the '\n'*__slots__*\\n'\n' attribute will be an empty iterator.\\n'\n'\\n'\n'\\n'\n'Customizing class creation\\n'\n'==========================\\n'\n'\\n'\n'Whenever a class inherits from another class, '\n'*__init_subclass__* is\\n'\n'called on that class. This way, it is possible to write '\n'classes which\\n'\n'change the behavior of subclasses. This is closely related '\n'to class\\n'\n'decorators, but where class decorators only affect the '\n'specific class\\n'\n'they\u2019re applied to, \"__init_subclass__\" solely applies to '\n'future\\n'\n'subclasses of the class defining the method.\\n'\n'\\n'\n'classmethod object.__init_subclass__(cls)\\n'\n'\\n'\n' This method is called whenever the containing class is '\n'subclassed.\\n'\n' *cls* is then the new subclass. If defined as a normal '\n'instance\\n'\n' method, this method is implicitly converted to a class '\n'method.\\n'\n'\\n'\n' Keyword arguments which are given to a new class are '\n'passed to the\\n'\n' parent\u2019s class \"__init_subclass__\". For compatibility '\n'with other\\n'\n' classes using \"__init_subclass__\", one should take out '\n'the needed\\n'\n' keyword arguments and pass the others over to the base '\n'class, as\\n'\n' in:\\n'\n'\\n'\n' class Philosopher:\\n'\n' def __init_subclass__(cls, /, default_name, '\n'**kwargs):\\n'\n' super().__init_subclass__(**kwargs)\\n'\n' cls.default_name = default_name\\n'\n'\\n'\n' class AustralianPhilosopher(Philosopher, '\n'default_name=\"Bruce\"):\\n'\n' pass\\n'\n'\\n'\n' The default implementation \"object.__init_subclass__\" '\n'does nothing,\\n'\n' but raises an error if it is called with any arguments.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' The metaclass hint \"metaclass\" is consumed by the rest '\n'of the\\n'\n' type machinery, and is never passed to '\n'\"__init_subclass__\"\\n'\n' implementations. The actual metaclass (rather than the '\n'explicit\\n'\n' hint) can be accessed as \"type(cls)\".\\n'\n'\\n'\n' New in version 3.6.\\n'\n'\\n'\n'\\n'\n'Metaclasses\\n'\n'-----------\\n'\n'\\n'\n'By default, classes are constructed using \"type()\". The '\n'class body is\\n'\n'executed in a new namespace and the class name is bound '\n'locally to the\\n'\n'result of \"type(name, bases, namespace)\".\\n'\n'\\n'\n'The class creation process can be customized by passing the\\n'\n'\"metaclass\" keyword argument in the class definition line, '\n'or by\\n'\n'inheriting from an existing class that included such an '\n'argument. In\\n'\n'the following example, both \"MyClass\" and \"MySubclass\" are '\n'instances\\n'\n'of \"Meta\":\\n'\n'\\n'\n' class Meta(type):\\n'\n' pass\\n'\n'\\n'\n' class MyClass(metaclass=Meta):\\n'\n' pass\\n'\n'\\n'\n' class MySubclass(MyClass):\\n'\n' pass\\n'\n'\\n'\n'Any other keyword arguments that are specified in the class '\n'definition\\n'\n'are passed through to all metaclass operations described '\n'below.\\n'\n'\\n'\n'When a class definition is executed, the following steps '\n'occur:\\n'\n'\\n'\n'* MRO entries are resolved;\\n'\n'\\n'\n'* the appropriate metaclass is determined;\\n'\n'\\n'\n'* the class namespace is prepared;\\n'\n'\\n'\n'* the class body is executed;\\n'\n'\\n'\n'* the class object is created.\\n'\n'\\n'\n'\\n'\n'Resolving MRO entries\\n'\n'---------------------\\n'\n'\\n'\n'If a base that appears in class definition is not an '\n'instance of\\n'\n'\"type\", then an \"__mro_entries__\" method is searched on it. '\n'If found,\\n'\n'it is called with the original bases tuple. This method must '\n'return a\\n'\n'tuple of classes that will be used instead of this base. The '\n'tuple may\\n'\n'be empty, in such case the original base is ignored.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 560** - Core support for typing module and generic '\n'types\\n'\n'\\n'\n'\\n'\n'Determining the appropriate metaclass\\n'\n'-------------------------------------\\n'\n'\\n'\n'The appropriate metaclass for a class definition is '\n'determined as\\n'\n'follows:\\n'\n'\\n'\n'* if no bases and no explicit metaclass are given, then '\n'\"type()\" is\\n'\n' used;\\n'\n'\\n'\n'* if an explicit metaclass is given and it is *not* an '\n'instance of\\n'\n' \"type()\", then it is used directly as the metaclass;\\n'\n'\\n'\n'* if an instance of \"type()\" is given as the explicit '\n'metaclass, or\\n'\n' bases are defined, then the most derived metaclass is '\n'used.\\n'\n'\\n'\n'The most derived metaclass is selected from the explicitly '\n'specified\\n'\n'metaclass (if any) and the metaclasses (i.e. \"type(cls)\") of '\n'all\\n'\n'specified base classes. The most derived metaclass is one '\n'which is a\\n'\n'subtype of *all* of these candidate metaclasses. If none of '\n'the\\n'\n'candidate metaclasses meets that criterion, then the class '\n'definition\\n'\n'will fail with \"TypeError\".\\n'\n'\\n'\n'\\n'\n'Preparing the class namespace\\n'\n'-----------------------------\\n'\n'\\n'\n'Once the appropriate metaclass has been identified, then the '\n'class\\n'\n'namespace is prepared. If the metaclass has a \"__prepare__\" '\n'attribute,\\n'\n'it is called as \"namespace = metaclass.__prepare__(name, '\n'bases,\\n'\n'**kwds)\" (where the additional keyword arguments, if any, '\n'come from\\n'\n'the class definition). The \"__prepare__\" method should be '\n'implemented\\n'\n'as a \"classmethod()\". The namespace returned by '\n'\"__prepare__\" is\\n'\n'passed in to \"__new__\", but when the final class object is '\n'created the\\n'\n'namespace is copied into a new \"dict\".\\n'\n'\\n'\n'If the metaclass has no \"__prepare__\" attribute, then the '\n'class\\n'\n'namespace is initialised as an empty ordered mapping.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 3115** - Metaclasses in Python 3000\\n'\n' Introduced the \"__prepare__\" namespace hook\\n'\n'\\n'\n'\\n'\n'Executing the class body\\n'\n'------------------------\\n'\n'\\n'\n'The class body is executed (approximately) as \"exec(body, '\n'globals(),\\n'\n'namespace)\". The key difference from a normal call to '\n'\"exec()\" is that\\n'\n'lexical scoping allows the class body (including any '\n'methods) to\\n'\n'reference names from the current and outer scopes when the '\n'class\\n'\n'definition occurs inside a function.\\n'\n'\\n'\n'However, even when the class definition occurs inside the '\n'function,\\n'\n'methods defined inside the class still cannot see names '\n'defined at the\\n'\n'class scope. Class variables must be accessed through the '\n'first\\n'\n'parameter of instance or class methods, or through the '\n'implicit\\n'\n'lexically scoped \"__class__\" reference described in the next '\n'section.\\n'\n'\\n'\n'\\n'\n'Creating the class object\\n'\n'-------------------------\\n'\n'\\n'\n'Once the class namespace has been populated by executing the '\n'class\\n'\n'body, the class object is created by calling '\n'\"metaclass(name, bases,\\n'\n'namespace, **kwds)\" (the additional keywords passed here are '\n'the same\\n'\n'as those passed to \"__prepare__\").\\n'\n'\\n'\n'This class object is the one that will be referenced by the '\n'zero-\\n'\n'argument form of \"super()\". \"__class__\" is an implicit '\n'closure\\n'\n'reference created by the compiler if any methods in a class '\n'body refer\\n'\n'to either \"__class__\" or \"super\". This allows the zero '\n'argument form\\n'\n'of \"super()\" to correctly identify the class being defined '\n'based on\\n'\n'lexical scoping, while the class or instance that was used '\n'to make the\\n'\n'current call is identified based on the first argument '\n'passed to the\\n'\n'method.\\n'\n'\\n'\n'**CPython implementation detail:** In CPython 3.6 and later, '\n'the\\n'\n'\"__class__\" cell is passed to the metaclass as a '\n'\"__classcell__\" entry\\n'\n'in the class namespace. If present, this must be propagated '\n'up to the\\n'\n'\"type.__new__\" call in order for the class to be '\n'initialised\\n'\n'correctly. Failing to do so will result in a \"RuntimeError\" '\n'in Python\\n'\n'3.8.\\n'\n'\\n'\n'When using the default metaclass \"type\", or any metaclass '\n'that\\n'\n'ultimately calls \"type.__new__\", the following additional\\n'\n'customisation steps are invoked after creating the class '\n'object:\\n'\n'\\n'\n'* first, \"type.__new__\" collects all of the descriptors in '\n'the class\\n'\n' namespace that define a \"__set_name__()\" method;\\n'\n'\\n'\n'* second, all of these \"__set_name__\" methods are called '\n'with the\\n'\n' class being defined and the assigned name of that '\n'particular\\n'\n' descriptor;\\n'\n'\\n'\n'* finally, the \"__init_subclass__()\" hook is called on the '\n'immediate\\n'\n' parent of the new class in its method resolution order.\\n'\n'\\n'\n'After the class object is created, it is passed to the '\n'class\\n'\n'decorators included in the class definition (if any) and the '\n'resulting\\n'\n'object is bound in the local namespace as the defined '\n'class.\\n'\n'\\n'\n'When a new class is created by \"type.__new__\", the object '\n'provided as\\n'\n'the namespace parameter is copied to a new ordered mapping '\n'and the\\n'\n'original object is discarded. The new copy is wrapped in a '\n'read-only\\n'\n'proxy, which becomes the \"__dict__\" attribute of the class '\n'object.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 3135** - New super\\n'\n' Describes the implicit \"__class__\" closure reference\\n'\n'\\n'\n'\\n'\n'Uses for metaclasses\\n'\n'--------------------\\n'\n'\\n'\n'The potential uses for metaclasses are boundless. Some ideas '\n'that have\\n'\n'been explored include enum, logging, interface checking, '\n'automatic\\n'\n'delegation, automatic property creation, proxies, '\n'frameworks, and\\n'\n'automatic resource locking/synchronization.\\n'\n'\\n'\n'\\n'\n'Customizing instance and subclass checks\\n'\n'========================================\\n'\n'\\n'\n'The following methods are used to override the default '\n'behavior of the\\n'\n'\"isinstance()\" and \"issubclass()\" built-in functions.\\n'\n'\\n'\n'In particular, the metaclass \"abc.ABCMeta\" implements these '\n'methods in\\n'\n'order to allow the addition of Abstract Base Classes (ABCs) '\n'as\\n'\n'\u201cvirtual base classes\u201d to any class or type (including '\n'built-in\\n'\n'types), including other ABCs.\\n'\n'\\n'\n'class.__instancecheck__(self, instance)\\n'\n'\\n'\n' Return true if *instance* should be considered a (direct '\n'or\\n'\n' indirect) instance of *class*. If defined, called to '\n'implement\\n'\n' \"isinstance(instance, class)\".\\n'\n'\\n'\n'class.__subclasscheck__(self, subclass)\\n'\n'\\n'\n' Return true if *subclass* should be considered a (direct '\n'or\\n'\n' indirect) subclass of *class*. If defined, called to '\n'implement\\n'\n' \"issubclass(subclass, class)\".\\n'\n'\\n'\n'Note that these methods are looked up on the type '\n'(metaclass) of a\\n'\n'class. They cannot be defined as class methods in the '\n'actual class.\\n'\n'This is consistent with the lookup of special methods that '\n'are called\\n'\n'on instances, only in this case the instance is itself a '\n'class.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 3119** - Introducing Abstract Base Classes\\n'\n' Includes the specification for customizing '\n'\"isinstance()\" and\\n'\n' \"issubclass()\" behavior through \"__instancecheck__()\" '\n'and\\n'\n' \"__subclasscheck__()\", with motivation for this '\n'functionality in\\n'\n' the context of adding Abstract Base Classes (see the '\n'\"abc\"\\n'\n' module) to the language.\\n'\n'\\n'\n'\\n'\n'Emulating generic types\\n'\n'=======================\\n'\n'\\n'\n'One can implement the generic class syntax as specified by '\n'**PEP 484**\\n'\n'(for example \"List[int]\") by defining a special method:\\n'\n'\\n'\n'classmethod object.__class_getitem__(cls, key)\\n'\n'\\n'\n' Return an object representing the specialization of a '\n'generic class\\n'\n' by type arguments found in *key*.\\n'\n'\\n'\n'This method is looked up on the class object itself, and '\n'when defined\\n'\n'in the class body, this method is implicitly a class '\n'method. Note,\\n'\n'this mechanism is primarily reserved for use with static '\n'type hints,\\n'\n'other usage is discouraged.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 560** - Core support for typing module and generic '\n'types\\n'\n'\\n'\n'\\n'\n'Emulating callable objects\\n'\n'==========================\\n'\n'\\n'\n'object.__call__(self[, args...])\\n'\n'\\n'\n' Called when the instance is \u201ccalled\u201d as a function; if '\n'this method\\n'\n' is defined, \"x(arg1, arg2, ...)\" roughly translates to\\n'\n' \"type(x).__call__(x, arg1, ...)\".\\n'\n'\\n'\n'\\n'\n'Emulating container types\\n'\n'=========================\\n'\n'\\n'\n'The following methods can be defined to implement container '\n'objects.\\n'\n'Containers usually are sequences (such as lists or tuples) '\n'or mappings\\n'\n'(like dictionaries), but can represent other containers as '\n'well. The\\n'\n'first set of methods is used either to emulate a sequence or '\n'to\\n'\n'emulate a mapping; the difference is that for a sequence, '\n'the\\n'\n'allowable keys should be the integers *k* for which \"0 <= k '\n'< N\" where\\n'\n'*N* is the length of the sequence, or slice objects, which '\n'define a\\n'\n'range of items. It is also recommended that mappings '\n'provide the\\n'\n'methods \"keys()\", \"values()\", \"items()\", \"get()\", '\n'\"clear()\",\\n'\n'\"setdefault()\", \"pop()\", \"popitem()\", \"copy()\", and '\n'\"update()\"\\n'\n'behaving similar to those for Python\u2019s standard dictionary '\n'objects.\\n'\n'The \"collections.abc\" module provides a \"MutableMapping\" '\n'abstract base\\n'\n'class to help create those methods from a base set of '\n'\"__getitem__()\",\\n'\n'\"__setitem__()\", \"__delitem__()\", and \"keys()\". Mutable '\n'sequences\\n'\n'should provide methods \"append()\", \"count()\", \"index()\", '\n'\"extend()\",\\n'\n'\"insert()\", \"pop()\", \"remove()\", \"reverse()\" and \"sort()\", '\n'like Python\\n'\n'standard list objects. Finally, sequence types should '\n'implement\\n'\n'addition (meaning concatenation) and multiplication '\n'(meaning\\n'\n'repetition) by defining the methods \"__add__()\", '\n'\"__radd__()\",\\n'\n'\"__iadd__()\", \"__mul__()\", \"__rmul__()\" and \"__imul__()\" '\n'described\\n'\n'below; they should not define other numerical operators. It '\n'is\\n'\n'recommended that both mappings and sequences implement the\\n'\n'\"__contains__()\" method to allow efficient use of the \"in\" '\n'operator;\\n'\n'for mappings, \"in\" should search the mapping\u2019s keys; for '\n'sequences, it\\n'\n'should search through the values. It is further recommended '\n'that both\\n'\n'mappings and sequences implement the \"__iter__()\" method to '\n'allow\\n'\n'efficient iteration through the container; for mappings, '\n'\"__iter__()\"\\n'\n'should iterate through the object\u2019s keys; for sequences, it '\n'should\\n'\n'iterate through the values.\\n'\n'\\n'\n'object.__len__(self)\\n'\n'\\n'\n' Called to implement the built-in function \"len()\". '\n'Should return\\n'\n' the length of the object, an integer \">=\" 0. Also, an '\n'object that\\n'\n' doesn\u2019t define a \"__bool__()\" method and whose '\n'\"__len__()\" method\\n'\n' returns zero is considered to be false in a Boolean '\n'context.\\n'\n'\\n'\n' **CPython implementation detail:** In CPython, the length '\n'is\\n'\n' required to be at most \"sys.maxsize\". If the length is '\n'larger than\\n'\n' \"sys.maxsize\" some features (such as \"len()\") may raise\\n'\n' \"OverflowError\". To prevent raising \"OverflowError\" by '\n'truth value\\n'\n' testing, an object must define a \"__bool__()\" method.\\n'\n'\\n'\n'object.__length_hint__(self)\\n'\n'\\n'\n' Called to implement \"operator.length_hint()\". Should '\n'return an\\n'\n' estimated length for the object (which may be greater or '\n'less than\\n'\n' the actual length). The length must be an integer \">=\" 0. '\n'The\\n'\n' return value may also be \"NotImplemented\", which is '\n'treated the\\n'\n' same as if the \"__length_hint__\" method didn\u2019t exist at '\n'all. This\\n'\n' method is purely an optimization and is never required '\n'for\\n'\n' correctness.\\n'\n'\\n'\n' New in version 3.4.\\n'\n'\\n'\n'Note:\\n'\n'\\n'\n' Slicing is done exclusively with the following three '\n'methods. A\\n'\n' call like\\n'\n'\\n'\n' a[1:2] = b\\n'\n'\\n'\n' is translated to\\n'\n'\\n'\n' a[slice(1, 2, None)] = b\\n'\n'\\n'\n' and so forth. Missing slice items are always filled in '\n'with \"None\".\\n'\n'\\n'\n'object.__getitem__(self, key)\\n'\n'\\n'\n' Called to implement evaluation of \"self[key]\". For '\n'sequence types,\\n'\n' the accepted keys should be integers and slice objects. '\n'Note that\\n'\n' the special interpretation of negative indexes (if the '\n'class wishes\\n'\n' to emulate a sequence type) is up to the \"__getitem__()\" '\n'method. If\\n'\n' *key* is of an inappropriate type, \"TypeError\" may be '\n'raised; if of\\n'\n' a value outside the set of indexes for the sequence '\n'(after any\\n'\n' special interpretation of negative values), \"IndexError\" '\n'should be\\n'\n' raised. For mapping types, if *key* is missing (not in '\n'the\\n'\n' container), \"KeyError\" should be raised.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' \"for\" loops expect that an \"IndexError\" will be raised '\n'for\\n'\n' illegal indexes to allow proper detection of the end of '\n'the\\n'\n' sequence.\\n'\n'\\n'\n'object.__setitem__(self, key, value)\\n'\n'\\n'\n' Called to implement assignment to \"self[key]\". Same note '\n'as for\\n'\n' \"__getitem__()\". This should only be implemented for '\n'mappings if\\n'\n' the objects support changes to the values for keys, or if '\n'new keys\\n'\n' can be added, or for sequences if elements can be '\n'replaced. The\\n'\n' same exceptions should be raised for improper *key* '\n'values as for\\n'\n' the \"__getitem__()\" method.\\n'\n'\\n'\n'object.__delitem__(self, key)\\n'\n'\\n'\n' Called to implement deletion of \"self[key]\". Same note '\n'as for\\n'\n' \"__getitem__()\". This should only be implemented for '\n'mappings if\\n'\n' the objects support removal of keys, or for sequences if '\n'elements\\n'\n' can be removed from the sequence. The same exceptions '\n'should be\\n'\n' raised for improper *key* values as for the '\n'\"__getitem__()\" method.\\n'\n'\\n'\n'object.__missing__(self, key)\\n'\n'\\n'\n' Called by \"dict\".\"__getitem__()\" to implement \"self[key]\" '\n'for dict\\n'\n' subclasses when key is not in the dictionary.\\n'\n'\\n'\n'object.__iter__(self)\\n'\n'\\n'\n' This method is called when an iterator is required for a '\n'container.\\n'\n' This method should return a new iterator object that can '\n'iterate\\n'\n' over all the objects in the container. For mappings, it '\n'should\\n'\n' iterate over the keys of the container.\\n'\n'\\n'\n' Iterator objects also need to implement this method; they '\n'are\\n'\n' required to return themselves. For more information on '\n'iterator\\n'\n' objects, see Iterator Types.\\n'\n'\\n'\n'object.__reversed__(self)\\n'\n'\\n'\n' Called (if present) by the \"reversed()\" built-in to '\n'implement\\n'\n' reverse iteration. It should return a new iterator '\n'object that\\n'\n' iterates over all the objects in the container in reverse '\n'order.\\n'\n'\\n'\n' If the \"__reversed__()\" method is not provided, the '\n'\"reversed()\"\\n'\n' built-in will fall back to using the sequence protocol '\n'(\"__len__()\"\\n'\n' and \"__getitem__()\"). Objects that support the sequence '\n'protocol\\n'\n' should only provide \"__reversed__()\" if they can provide '\n'an\\n'\n' implementation that is more efficient than the one '\n'provided by\\n'\n' \"reversed()\".\\n'\n'\\n'\n'The membership test operators (\"in\" and \"not in\") are '\n'normally\\n'\n'implemented as an iteration through a container. However, '\n'container\\n'\n'objects can supply the following special method with a more '\n'efficient\\n'\n'implementation, which also does not require the object be '\n'iterable.\\n'\n'\\n'\n'object.__contains__(self, item)\\n'\n'\\n'\n' Called to implement membership test operators. Should '\n'return true\\n'\n' if *item* is in *self*, false otherwise. For mapping '\n'objects, this\\n'\n' should consider the keys of the mapping rather than the '\n'values or\\n'\n' the key-item pairs.\\n'\n'\\n'\n' For objects that don\u2019t define \"__contains__()\", the '\n'membership test\\n'\n' first tries iteration via \"__iter__()\", then the old '\n'sequence\\n'\n' iteration protocol via \"__getitem__()\", see this section '\n'in the\\n'\n' language reference.\\n'\n'\\n'\n'\\n'\n'Emulating numeric types\\n'\n'=======================\\n'\n'\\n'\n'The following methods can be defined to emulate numeric '\n'objects.\\n'\n'Methods corresponding to operations that are not supported '\n'by the\\n'\n'particular kind of number implemented (e.g., bitwise '\n'operations for\\n'\n'non-integral numbers) should be left undefined.\\n'\n'\\n'\n'object.__add__(self, other)\\n'\n'object.__sub__(self, other)\\n'\n'object.__mul__(self, other)\\n'\n'object.__matmul__(self, other)\\n'\n'object.__truediv__(self, other)\\n'\n'object.__floordiv__(self, other)\\n'\n'object.__mod__(self, other)\\n'\n'object.__divmod__(self, other)\\n'\n'object.__pow__(self, other[, modulo])\\n'\n'object.__lshift__(self, other)\\n'\n'object.__rshift__(self, other)\\n'\n'object.__and__(self, other)\\n'\n'object.__xor__(self, other)\\n'\n'object.__or__(self, other)\\n'\n'\\n'\n' These methods are called to implement the binary '\n'arithmetic\\n'\n' operations (\"+\", \"-\", \"*\", \"@\", \"/\", \"//\", \"%\", '\n'\"divmod()\",\\n'\n' \"pow()\", \"**\", \"<<\", \">>\", \"&\", \"^\", \"|\"). For instance, '\n'to\\n'\n' evaluate the expression \"x + y\", where *x* is an instance '\n'of a\\n'\n' class that has an \"__add__()\" method, \"x.__add__(y)\" is '\n'called.\\n'\n' The \"__divmod__()\" method should be the equivalent to '\n'using\\n'\n' \"__floordiv__()\" and \"__mod__()\"; it should not be '\n'related to\\n'\n' \"__truediv__()\". Note that \"__pow__()\" should be defined '\n'to accept\\n'\n' an optional third argument if the ternary version of the '\n'built-in\\n'\n' \"pow()\" function is to be supported.\\n'\n'\\n'\n' If one of those methods does not support the operation '\n'with the\\n'\n' supplied arguments, it should return \"NotImplemented\".\\n'\n'\\n'\n'object.__radd__(self, other)\\n'\n'object.__rsub__(self, other)\\n'\n'object.__rmul__(self, other)\\n'\n'object.__rmatmul__(self, other)\\n'\n'object.__rtruediv__(self, other)\\n'\n'object.__rfloordiv__(self, other)\\n'\n'object.__rmod__(self, other)\\n'\n'object.__rdivmod__(self, other)\\n'\n'object.__rpow__(self, other[, modulo])\\n'\n'object.__rlshift__(self, other)\\n'\n'object.__rrshift__(self, other)\\n'\n'object.__rand__(self, other)\\n'\n'object.__rxor__(self, other)\\n'\n'object.__ror__(self, other)\\n'\n'\\n'\n' These methods are called to implement the binary '\n'arithmetic\\n'\n' operations (\"+\", \"-\", \"*\", \"@\", \"/\", \"//\", \"%\", '\n'\"divmod()\",\\n'\n' \"pow()\", \"**\", \"<<\", \">>\", \"&\", \"^\", \"|\") with reflected '\n'(swapped)\\n'\n' operands. These functions are only called if the left '\n'operand does\\n'\n' not support the corresponding operation [3] and the '\n'operands are of\\n'\n' different types. [4] For instance, to evaluate the '\n'expression \"x -\\n'\n' y\", where *y* is an instance of a class that has an '\n'\"__rsub__()\"\\n'\n' method, \"y.__rsub__(x)\" is called if \"x.__sub__(y)\" '\n'returns\\n'\n' *NotImplemented*.\\n'\n'\\n'\n' Note that ternary \"pow()\" will not try calling '\n'\"__rpow__()\" (the\\n'\n' coercion rules would become too complicated).\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' If the right operand\u2019s type is a subclass of the left '\n'operand\u2019s\\n'\n' type and that subclass provides a different '\n'implementation of the\\n'\n' reflected method for the operation, this method will be '\n'called\\n'\n' before the left operand\u2019s non-reflected method. This '\n'behavior\\n'\n' allows subclasses to override their ancestors\u2019 '\n'operations.\\n'\n'\\n'\n'object.__iadd__(self, other)\\n'\n'object.__isub__(self, other)\\n'\n'object.__imul__(self, other)\\n'\n'object.__imatmul__(self, other)\\n'\n'object.__itruediv__(self, other)\\n'\n'object.__ifloordiv__(self, other)\\n'\n'object.__imod__(self, other)\\n'\n'object.__ipow__(self, other[, modulo])\\n'\n'object.__ilshift__(self, other)\\n'\n'object.__irshift__(self, other)\\n'\n'object.__iand__(self, other)\\n'\n'object.__ixor__(self, other)\\n'\n'object.__ior__(self, other)\\n'\n'\\n'\n' These methods are called to implement the augmented '\n'arithmetic\\n'\n' assignments (\"+=\", \"-=\", \"*=\", \"@=\", \"/=\", \"//=\", \"%=\", '\n'\"**=\",\\n'\n' \"<<=\", \">>=\", \"&=\", \"^=\", \"|=\"). These methods should '\n'attempt to\\n'\n' do the operation in-place (modifying *self*) and return '\n'the result\\n'\n' (which could be, but does not have to be, *self*). If a '\n'specific\\n'\n' method is not defined, the augmented assignment falls '\n'back to the\\n'\n' normal methods. For instance, if *x* is an instance of a '\n'class\\n'\n' with an \"__iadd__()\" method, \"x += y\" is equivalent to \"x '\n'=\\n'\n' x.__iadd__(y)\" . Otherwise, \"x.__add__(y)\" and '\n'\"y.__radd__(x)\" are\\n'\n' considered, as with the evaluation of \"x + y\". In '\n'certain\\n'\n' situations, augmented assignment can result in unexpected '\n'errors\\n'\n' (see Why does a_tuple[i] += [\u2018item\u2019] raise an exception '\n'when the\\n'\n' addition works?), but this behavior is in fact part of '\n'the data\\n'\n' model.\\n'\n'\\n'\n'object.__neg__(self)\\n'\n'object.__pos__(self)\\n'\n'object.__abs__(self)\\n'\n'object.__invert__(self)\\n'\n'\\n'\n' Called to implement the unary arithmetic operations (\"-\", '\n'\"+\",\\n'\n' \"abs()\" and \"~\").\\n'\n'\\n'\n'object.__complex__(self)\\n'\n'object.__int__(self)\\n'\n'object.__float__(self)\\n'\n'\\n'\n' Called to implement the built-in functions \"complex()\", '\n'\"int()\" and\\n'\n' \"float()\". Should return a value of the appropriate '\n'type.\\n'\n'\\n'\n'object.__index__(self)\\n'\n'\\n'\n' Called to implement \"operator.index()\", and whenever '\n'Python needs\\n'\n' to losslessly convert the numeric object to an integer '\n'object (such\\n'\n' as in slicing, or in the built-in \"bin()\", \"hex()\" and '\n'\"oct()\"\\n'\n' functions). Presence of this method indicates that the '\n'numeric\\n'\n' object is an integer type. Must return an integer.\\n'\n'\\n'\n' If \"__int__()\", \"__float__()\" and \"__complex__()\" are not '\n'defined\\n'\n' then corresponding built-in functions \"int()\", \"float()\" '\n'and\\n'\n' \"complex()\" fall back to \"__index__()\".\\n'\n'\\n'\n'object.__round__(self[, ndigits])\\n'\n'object.__trunc__(self)\\n'\n'object.__floor__(self)\\n'\n'object.__ceil__(self)\\n'\n'\\n'\n' Called to implement the built-in function \"round()\" and '\n'\"math\"\\n'\n' functions \"trunc()\", \"floor()\" and \"ceil()\". Unless '\n'*ndigits* is\\n'\n' passed to \"__round__()\" all these methods should return '\n'the value\\n'\n' of the object truncated to an \"Integral\" (typically an '\n'\"int\").\\n'\n'\\n'\n' If \"__int__()\" is not defined then the built-in function '\n'\"int()\"\\n'\n' falls back to \"__trunc__()\".\\n'\n'\\n'\n'\\n'\n'With Statement Context Managers\\n'\n'===============================\\n'\n'\\n'\n'A *context manager* is an object that defines the runtime '\n'context to\\n'\n'be established when executing a \"with\" statement. The '\n'context manager\\n'\n'handles the entry into, and the exit from, the desired '\n'runtime context\\n'\n'for the execution of the block of code. Context managers '\n'are normally\\n'\n'invoked using the \"with\" statement (described in section The '\n'with\\n'\n'statement), but can also be used by directly invoking their '\n'methods.\\n'\n'\\n'\n'Typical uses of context managers include saving and '\n'restoring various\\n'\n'kinds of global state, locking and unlocking resources, '\n'closing opened\\n'\n'files, etc.\\n'\n'\\n'\n'For more information on context managers, see Context '\n'Manager Types.\\n'\n'\\n'\n'object.__enter__(self)\\n'\n'\\n'\n' Enter the runtime context related to this object. The '\n'\"with\"\\n'\n' statement will bind this method\u2019s return value to the '\n'target(s)\\n'\n' specified in the \"as\" clause of the statement, if any.\\n'\n'\\n'\n'object.__exit__(self, exc_type, exc_value, traceback)\\n'\n'\\n'\n' Exit the runtime context related to this object. The '\n'parameters\\n'\n' describe the exception that caused the context to be '\n'exited. If the\\n'\n' context was exited without an exception, all three '\n'arguments will\\n'\n' be \"None\".\\n'\n'\\n'\n' If an exception is supplied, and the method wishes to '\n'suppress the\\n'\n' exception (i.e., prevent it from being propagated), it '\n'should\\n'\n' return a true value. Otherwise, the exception will be '\n'processed\\n'\n' normally upon exit from this method.\\n'\n'\\n'\n' Note that \"__exit__()\" methods should not reraise the '\n'passed-in\\n'\n' exception; this is the caller\u2019s responsibility.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 343** - The \u201cwith\u201d statement\\n'\n' The specification, background, and examples for the '\n'Python \"with\"\\n'\n' statement.\\n'\n'\\n'\n'\\n'\n'Customizing positional arguments in class pattern matching\\n'\n'==========================================================\\n'\n'\\n'\n'When using a class name in a pattern, positional arguments '\n'in the\\n'\n'pattern are not allowed by default, i.e. \"case MyClass(x, '\n'y)\" is\\n'\n'typically invalid without special support in \"MyClass\". To '\n'be able to\\n'\n'use that kind of patterns, the class needs to define a\\n'\n'*__match_args__* attribute.\\n'\n'\\n'\n'object.__match_args__\\n'\n'\\n'\n' This class variable can be assigned a tuple of strings. '\n'When this\\n'\n' class is used in a class pattern with positional '\n'arguments, each\\n'\n' positional argument will be converted into a keyword '\n'argument,\\n'\n' using the corresponding value in *__match_args__* as the '\n'keyword.\\n'\n' The absence of this attribute is equivalent to setting it '\n'to \"()\".\\n'\n'\\n'\n'For example, if \"MyClass.__match_args__\" is \"(\"left\", '\n'\"center\",\\n'\n'\"right\")\" that means that \"case MyClass(x, y)\" is equivalent '\n'to \"case\\n'\n'MyClass(left=x, center=y)\". Note that the number of '\n'arguments in the\\n'\n'pattern must be smaller than or equal to the number of '\n'elements in\\n'\n'*__match_args__*; if it is larger, the pattern match attempt '\n'will\\n'\n'raise a \"TypeError\".\\n'\n'\\n'\n'New in version 3.10.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 634** - Structural Pattern Matching\\n'\n' The specification for the Python \"match\" statement.\\n'\n'\\n'\n'\\n'\n'Special method lookup\\n'\n'=====================\\n'\n'\\n'\n'For custom classes, implicit invocations of special methods '\n'are only\\n'\n'guaranteed to work correctly if defined on an object\u2019s type, '\n'not in\\n'\n'the object\u2019s instance dictionary. That behaviour is the '\n'reason why\\n'\n'the following code raises an exception:\\n'\n'\\n'\n' >>> class C:\\n'\n' ... pass\\n'\n' ...\\n'\n' >>> c = C()\\n'\n' >>> c.__len__ = lambda: 5\\n'\n' >>> len(c)\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 1, in \\n'\n\" TypeError: object of type 'C' has no len()\\n\"\n'\\n'\n'The rationale behind this behaviour lies with a number of '\n'special\\n'\n'methods such as \"__hash__()\" and \"__repr__()\" that are '\n'implemented by\\n'\n'all objects, including type objects. If the implicit lookup '\n'of these\\n'\n'methods used the conventional lookup process, they would '\n'fail when\\n'\n'invoked on the type object itself:\\n'\n'\\n'\n' >>> 1 .__hash__() == hash(1)\\n'\n' True\\n'\n' >>> int.__hash__() == hash(int)\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 1, in \\n'\n\" TypeError: descriptor '__hash__' of 'int' object needs an \"\n'argument\\n'\n'\\n'\n'Incorrectly attempting to invoke an unbound method of a '\n'class in this\\n'\n'way is sometimes referred to as \u2018metaclass confusion\u2019, and '\n'is avoided\\n'\n'by bypassing the instance when looking up special methods:\\n'\n'\\n'\n' >>> type(1).__hash__(1) == hash(1)\\n'\n' True\\n'\n' >>> type(int).__hash__(int) == hash(int)\\n'\n' True\\n'\n'\\n'\n'In addition to bypassing any instance attributes in the '\n'interest of\\n'\n'correctness, implicit special method lookup generally also '\n'bypasses\\n'\n'the \"__getattribute__()\" method even of the object\u2019s '\n'metaclass:\\n'\n'\\n'\n' >>> class Meta(type):\\n'\n' ... def __getattribute__(*args):\\n'\n' ... print(\"Metaclass getattribute invoked\")\\n'\n' ... return type.__getattribute__(*args)\\n'\n' ...\\n'\n' >>> class C(object, metaclass=Meta):\\n'\n' ... def __len__(self):\\n'\n' ... return 10\\n'\n' ... def __getattribute__(*args):\\n'\n' ... print(\"Class getattribute invoked\")\\n'\n' ... return object.__getattribute__(*args)\\n'\n' ...\\n'\n' >>> c = C()\\n'\n' >>> c.__len__() # Explicit lookup via '\n'instance\\n'\n' Class getattribute invoked\\n'\n' 10\\n'\n' >>> type(c).__len__(c) # Explicit lookup via '\n'type\\n'\n' Metaclass getattribute invoked\\n'\n' 10\\n'\n' >>> len(c) # Implicit lookup\\n'\n' 10\\n'\n'\\n'\n'Bypassing the \"__getattribute__()\" machinery in this fashion '\n'provides\\n'\n'significant scope for speed optimisations within the '\n'interpreter, at\\n'\n'the cost of some flexibility in the handling of special '\n'methods (the\\n'\n'special method *must* be set on the class object itself in '\n'order to be\\n'\n'consistently invoked by the interpreter).\\n',\n'string-methods':'String Methods\\n'\n'**************\\n'\n'\\n'\n'Strings implement all of the common sequence operations, '\n'along with\\n'\n'the additional methods described below.\\n'\n'\\n'\n'Strings also support two styles of string formatting, one '\n'providing a\\n'\n'large degree of flexibility and customization (see '\n'\"str.format()\",\\n'\n'Format String Syntax and Custom String Formatting) and the '\n'other based\\n'\n'on C \"printf\" style formatting that handles a narrower '\n'range of types\\n'\n'and is slightly harder to use correctly, but is often '\n'faster for the\\n'\n'cases it can handle (printf-style String Formatting).\\n'\n'\\n'\n'The Text Processing Services section of the standard '\n'library covers a\\n'\n'number of other modules that provide various text related '\n'utilities\\n'\n'(including regular expression support in the \"re\" '\n'module).\\n'\n'\\n'\n'str.capitalize()\\n'\n'\\n'\n' Return a copy of the string with its first character '\n'capitalized\\n'\n' and the rest lowercased.\\n'\n'\\n'\n' Changed in version 3.8: The first character is now put '\n'into\\n'\n' titlecase rather than uppercase. This means that '\n'characters like\\n'\n' digraphs will only have their first letter capitalized, '\n'instead of\\n'\n' the full character.\\n'\n'\\n'\n'str.casefold()\\n'\n'\\n'\n' Return a casefolded copy of the string. Casefolded '\n'strings may be\\n'\n' used for caseless matching.\\n'\n'\\n'\n' Casefolding is similar to lowercasing but more '\n'aggressive because\\n'\n' it is intended to remove all case distinctions in a '\n'string. For\\n'\n' example, the German lowercase letter \"\\'\u00df\\'\" is '\n'equivalent to \"\"ss\"\".\\n'\n' Since it is already lowercase, \"lower()\" would do '\n'nothing to \"\\'\u00df\\'\";\\n'\n' \"casefold()\" converts it to \"\"ss\"\".\\n'\n'\\n'\n' The casefolding algorithm is described in section 3.13 '\n'of the\\n'\n' Unicode Standard.\\n'\n'\\n'\n' New in version 3.3.\\n'\n'\\n'\n'str.center(width[, fillchar])\\n'\n'\\n'\n' Return centered in a string of length *width*. Padding '\n'is done\\n'\n' using the specified *fillchar* (default is an ASCII '\n'space). The\\n'\n' original string is returned if *width* is less than or '\n'equal to\\n'\n' \"len(s)\".\\n'\n'\\n'\n'str.count(sub[, start[, end]])\\n'\n'\\n'\n' Return the number of non-overlapping occurrences of '\n'substring *sub*\\n'\n' in the range [*start*, *end*]. Optional arguments '\n'*start* and\\n'\n' *end* are interpreted as in slice notation.\\n'\n'\\n'\n\"str.encode(encoding='utf-8', errors='strict')\\n\"\n'\\n'\n' Return an encoded version of the string as a bytes '\n'object. Default\\n'\n' encoding is \"\\'utf-8\\'\". *errors* may be given to set a '\n'different\\n'\n' error handling scheme. The default for *errors* is '\n'\"\\'strict\\'\",\\n'\n' meaning that encoding errors raise a \"UnicodeError\". '\n'Other possible\\n'\n' values are \"\\'ignore\\'\", \"\\'replace\\'\", '\n'\"\\'xmlcharrefreplace\\'\",\\n'\n' \"\\'backslashreplace\\'\" and any other name registered '\n'via\\n'\n' \"codecs.register_error()\", see section Error Handlers. '\n'For a list\\n'\n' of possible encodings, see section Standard Encodings.\\n'\n'\\n'\n' By default, the *errors* argument is not checked for '\n'best\\n'\n' performances, but only used at the first encoding '\n'error. Enable the\\n'\n' Python Development Mode, or use a debug build to check '\n'*errors*.\\n'\n'\\n'\n' Changed in version 3.1: Support for keyword arguments '\n'added.\\n'\n'\\n'\n' Changed in version 3.9: The *errors* is now checked in '\n'development\\n'\n' mode and in debug mode.\\n'\n'\\n'\n'str.endswith(suffix[, start[, end]])\\n'\n'\\n'\n' Return \"True\" if the string ends with the specified '\n'*suffix*,\\n'\n' otherwise return \"False\". *suffix* can also be a tuple '\n'of suffixes\\n'\n' to look for. With optional *start*, test beginning at '\n'that\\n'\n' position. With optional *end*, stop comparing at that '\n'position.\\n'\n'\\n'\n'str.expandtabs(tabsize=8)\\n'\n'\\n'\n' Return a copy of the string where all tab characters '\n'are replaced\\n'\n' by one or more spaces, depending on the current column '\n'and the\\n'\n' given tab size. Tab positions occur every *tabsize* '\n'characters\\n'\n' (default is 8, giving tab positions at columns 0, 8, 16 '\n'and so on).\\n'\n' To expand the string, the current column is set to zero '\n'and the\\n'\n' string is examined character by character. If the '\n'character is a\\n'\n' tab (\"\\\\t\"), one or more space characters are inserted '\n'in the result\\n'\n' until the current column is equal to the next tab '\n'position. (The\\n'\n' tab character itself is not copied.) If the character '\n'is a newline\\n'\n' (\"\\\\n\") or return (\"\\\\r\"), it is copied and the current '\n'column is\\n'\n' reset to zero. Any other character is copied unchanged '\n'and the\\n'\n' current column is incremented by one regardless of how '\n'the\\n'\n' character is represented when printed.\\n'\n'\\n'\n\" >>> '01\\\\t012\\\\t0123\\\\t01234'.expandtabs()\\n\"\n\" '01 012 0123 01234'\\n\"\n\" >>> '01\\\\t012\\\\t0123\\\\t01234'.expandtabs(4)\\n\"\n\" '01 012 0123 01234'\\n\"\n'\\n'\n'str.find(sub[, start[, end]])\\n'\n'\\n'\n' Return the lowest index in the string where substring '\n'*sub* is\\n'\n' found within the slice \"s[start:end]\". Optional '\n'arguments *start*\\n'\n' and *end* are interpreted as in slice notation. Return '\n'\"-1\" if\\n'\n' *sub* is not found.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' The \"find()\" method should be used only if you need '\n'to know the\\n'\n' position of *sub*. To check if *sub* is a substring '\n'or not, use\\n'\n' the \"in\" operator:\\n'\n'\\n'\n\" >>> 'Py' in 'Python'\\n\"\n' True\\n'\n'\\n'\n'str.format(*args, **kwargs)\\n'\n'\\n'\n' Perform a string formatting operation. The string on '\n'which this\\n'\n' method is called can contain literal text or '\n'replacement fields\\n'\n' delimited by braces \"{}\". Each replacement field '\n'contains either\\n'\n' the numeric index of a positional argument, or the name '\n'of a\\n'\n' keyword argument. Returns a copy of the string where '\n'each\\n'\n' replacement field is replaced with the string value of '\n'the\\n'\n' corresponding argument.\\n'\n'\\n'\n' >>> \"The sum of 1 + 2 is {0}\".format(1+2)\\n'\n\" 'The sum of 1 + 2 is 3'\\n\"\n'\\n'\n' See Format String Syntax for a description of the '\n'various\\n'\n' formatting options that can be specified in format '\n'strings.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' When formatting a number (\"int\", \"float\", \"complex\",\\n'\n' \"decimal.Decimal\" and subclasses) with the \"n\" type '\n'(ex:\\n'\n' \"\\'{:n}\\'.format(1234)\"), the function temporarily '\n'sets the\\n'\n' \"LC_CTYPE\" locale to the \"LC_NUMERIC\" locale to '\n'decode\\n'\n' \"decimal_point\" and \"thousands_sep\" fields of '\n'\"localeconv()\" if\\n'\n' they are non-ASCII or longer than 1 byte, and the '\n'\"LC_NUMERIC\"\\n'\n' locale is different than the \"LC_CTYPE\" locale. This '\n'temporary\\n'\n' change affects other threads.\\n'\n'\\n'\n' Changed in version 3.7: When formatting a number with '\n'the \"n\" type,\\n'\n' the function sets temporarily the \"LC_CTYPE\" locale to '\n'the\\n'\n' \"LC_NUMERIC\" locale in some cases.\\n'\n'\\n'\n'str.format_map(mapping)\\n'\n'\\n'\n' Similar to \"str.format(**mapping)\", except that '\n'\"mapping\" is used\\n'\n' directly and not copied to a \"dict\". This is useful if '\n'for example\\n'\n' \"mapping\" is a dict subclass:\\n'\n'\\n'\n' >>> class Default(dict):\\n'\n' ... def __missing__(self, key):\\n'\n' ... return key\\n'\n' ...\\n'\n\" >>> '{name} was born in \"\n\"{country}'.format_map(Default(name='Guido'))\\n\"\n\" 'Guido was born in country'\\n\"\n'\\n'\n' New in version 3.2.\\n'\n'\\n'\n'str.index(sub[, start[, end]])\\n'\n'\\n'\n' Like \"find()\", but raise \"ValueError\" when the '\n'substring is not\\n'\n' found.\\n'\n'\\n'\n'str.isalnum()\\n'\n'\\n'\n' Return \"True\" if all characters in the string are '\n'alphanumeric and\\n'\n' there is at least one character, \"False\" otherwise. A '\n'character\\n'\n' \"c\" is alphanumeric if one of the following returns '\n'\"True\":\\n'\n' \"c.isalpha()\", \"c.isdecimal()\", \"c.isdigit()\", or '\n'\"c.isnumeric()\".\\n'\n'\\n'\n'str.isalpha()\\n'\n'\\n'\n' Return \"True\" if all characters in the string are '\n'alphabetic and\\n'\n' there is at least one character, \"False\" otherwise. '\n'Alphabetic\\n'\n' characters are those characters defined in the Unicode '\n'character\\n'\n' database as \u201cLetter\u201d, i.e., those with general category '\n'property\\n'\n' being one of \u201cLm\u201d, \u201cLt\u201d, \u201cLu\u201d, \u201cLl\u201d, or \u201cLo\u201d. Note '\n'that this is\\n'\n' different from the \u201cAlphabetic\u201d property defined in the '\n'Unicode\\n'\n' Standard.\\n'\n'\\n'\n'str.isascii()\\n'\n'\\n'\n' Return \"True\" if the string is empty or all characters '\n'in the\\n'\n' string are ASCII, \"False\" otherwise. ASCII characters '\n'have code\\n'\n' points in the range U+0000-U+007F.\\n'\n'\\n'\n' New in version 3.7.\\n'\n'\\n'\n'str.isdecimal()\\n'\n'\\n'\n' Return \"True\" if all characters in the string are '\n'decimal\\n'\n' characters and there is at least one character, \"False\" '\n'otherwise.\\n'\n' Decimal characters are those that can be used to form '\n'numbers in\\n'\n' base 10, e.g. U+0660, ARABIC-INDIC DIGIT ZERO. '\n'Formally a decimal\\n'\n' character is a character in the Unicode General '\n'Category \u201cNd\u201d.\\n'\n'\\n'\n'str.isdigit()\\n'\n'\\n'\n' Return \"True\" if all characters in the string are '\n'digits and there\\n'\n' is at least one character, \"False\" otherwise. Digits '\n'include\\n'\n' decimal characters and digits that need special '\n'handling, such as\\n'\n' the compatibility superscript digits. This covers '\n'digits which\\n'\n' cannot be used to form numbers in base 10, like the '\n'Kharosthi\\n'\n' numbers. Formally, a digit is a character that has the '\n'property\\n'\n' value Numeric_Type=Digit or Numeric_Type=Decimal.\\n'\n'\\n'\n'str.isidentifier()\\n'\n'\\n'\n' Return \"True\" if the string is a valid identifier '\n'according to the\\n'\n' language definition, section Identifiers and keywords.\\n'\n'\\n'\n' Call \"keyword.iskeyword()\" to test whether string \"s\" '\n'is a reserved\\n'\n' identifier, such as \"def\" and \"class\".\\n'\n'\\n'\n' Example:\\n'\n'\\n'\n' >>> from keyword import iskeyword\\n'\n'\\n'\n\" >>> 'hello'.isidentifier(), iskeyword('hello')\\n\"\n' True, False\\n'\n\" >>> 'def'.isidentifier(), iskeyword('def')\\n\"\n' True, True\\n'\n'\\n'\n'str.islower()\\n'\n'\\n'\n' Return \"True\" if all cased characters [4] in the string '\n'are\\n'\n' lowercase and there is at least one cased character, '\n'\"False\"\\n'\n' otherwise.\\n'\n'\\n'\n'str.isnumeric()\\n'\n'\\n'\n' Return \"True\" if all characters in the string are '\n'numeric\\n'\n' characters, and there is at least one character, '\n'\"False\" otherwise.\\n'\n' Numeric characters include digit characters, and all '\n'characters\\n'\n' that have the Unicode numeric value property, e.g. '\n'U+2155, VULGAR\\n'\n' FRACTION ONE FIFTH. Formally, numeric characters are '\n'those with\\n'\n' the property value Numeric_Type=Digit, '\n'Numeric_Type=Decimal or\\n'\n' Numeric_Type=Numeric.\\n'\n'\\n'\n'str.isprintable()\\n'\n'\\n'\n' Return \"True\" if all characters in the string are '\n'printable or the\\n'\n' string is empty, \"False\" otherwise. Nonprintable '\n'characters are\\n'\n' those characters defined in the Unicode character '\n'database as\\n'\n' \u201cOther\u201d or \u201cSeparator\u201d, excepting the ASCII space '\n'(0x20) which is\\n'\n' considered printable. (Note that printable characters '\n'in this\\n'\n' context are those which should not be escaped when '\n'\"repr()\" is\\n'\n' invoked on a string. It has no bearing on the handling '\n'of strings\\n'\n' written to \"sys.stdout\" or \"sys.stderr\".)\\n'\n'\\n'\n'str.isspace()\\n'\n'\\n'\n' Return \"True\" if there are only whitespace characters '\n'in the string\\n'\n' and there is at least one character, \"False\" '\n'otherwise.\\n'\n'\\n'\n' A character is *whitespace* if in the Unicode character '\n'database\\n'\n' (see \"unicodedata\"), either its general category is '\n'\"Zs\"\\n'\n' (\u201cSeparator, space\u201d), or its bidirectional class is one '\n'of \"WS\",\\n'\n' \"B\", or \"S\".\\n'\n'\\n'\n'str.istitle()\\n'\n'\\n'\n' Return \"True\" if the string is a titlecased string and '\n'there is at\\n'\n' least one character, for example uppercase characters '\n'may only\\n'\n' follow uncased characters and lowercase characters only '\n'cased ones.\\n'\n' Return \"False\" otherwise.\\n'\n'\\n'\n'str.isupper()\\n'\n'\\n'\n' Return \"True\" if all cased characters [4] in the string '\n'are\\n'\n' uppercase and there is at least one cased character, '\n'\"False\"\\n'\n' otherwise.\\n'\n'\\n'\n\" >>> 'BANANA'.isupper()\\n\"\n' True\\n'\n\" >>> 'banana'.isupper()\\n\"\n' False\\n'\n\" >>> 'baNana'.isupper()\\n\"\n' False\\n'\n\" >>> ' '.isupper()\\n\"\n' False\\n'\n'\\n'\n'str.join(iterable)\\n'\n'\\n'\n' Return a string which is the concatenation of the '\n'strings in\\n'\n' *iterable*. A \"TypeError\" will be raised if there are '\n'any non-\\n'\n' string values in *iterable*, including \"bytes\" '\n'objects. The\\n'\n' separator between elements is the string providing this '\n'method.\\n'\n'\\n'\n'str.ljust(width[, fillchar])\\n'\n'\\n'\n' Return the string left justified in a string of length '\n'*width*.\\n'\n' Padding is done using the specified *fillchar* (default '\n'is an ASCII\\n'\n' space). The original string is returned if *width* is '\n'less than or\\n'\n' equal to \"len(s)\".\\n'\n'\\n'\n'str.lower()\\n'\n'\\n'\n' Return a copy of the string with all the cased '\n'characters [4]\\n'\n' converted to lowercase.\\n'\n'\\n'\n' The lowercasing algorithm used is described in section '\n'3.13 of the\\n'\n' Unicode Standard.\\n'\n'\\n'\n'str.lstrip([chars])\\n'\n'\\n'\n' Return a copy of the string with leading characters '\n'removed. The\\n'\n' *chars* argument is a string specifying the set of '\n'characters to be\\n'\n' removed. If omitted or \"None\", the *chars* argument '\n'defaults to\\n'\n' removing whitespace. The *chars* argument is not a '\n'prefix; rather,\\n'\n' all combinations of its values are stripped:\\n'\n'\\n'\n\" >>> ' spacious '.lstrip()\\n\"\n\" 'spacious '\\n\"\n\" >>> 'www.example.com'.lstrip('cmowz.')\\n\"\n\" 'example.com'\\n\"\n'\\n'\n' See \"str.removeprefix()\" for a method that will remove '\n'a single\\n'\n' prefix string rather than all of a set of characters. '\n'For example:\\n'\n'\\n'\n\" >>> 'Arthur: three!'.lstrip('Arthur: ')\\n\"\n\" 'ee!'\\n\"\n\" >>> 'Arthur: three!'.removeprefix('Arthur: ')\\n\"\n\" 'three!'\\n\"\n'\\n'\n'static str.maketrans(x[, y[, z]])\\n'\n'\\n'\n' This static method returns a translation table usable '\n'for\\n'\n' \"str.translate()\".\\n'\n'\\n'\n' If there is only one argument, it must be a dictionary '\n'mapping\\n'\n' Unicode ordinals (integers) or characters (strings of '\n'length 1) to\\n'\n' Unicode ordinals, strings (of arbitrary lengths) or '\n'\"None\".\\n'\n' Character keys will then be converted to ordinals.\\n'\n'\\n'\n' If there are two arguments, they must be strings of '\n'equal length,\\n'\n' and in the resulting dictionary, each character in x '\n'will be mapped\\n'\n' to the character at the same position in y. If there '\n'is a third\\n'\n' argument, it must be a string, whose characters will be '\n'mapped to\\n'\n' \"None\" in the result.\\n'\n'\\n'\n'str.partition(sep)\\n'\n'\\n'\n' Split the string at the first occurrence of *sep*, and '\n'return a\\n'\n' 3-tuple containing the part before the separator, the '\n'separator\\n'\n' itself, and the part after the separator. If the '\n'separator is not\\n'\n' found, return a 3-tuple containing the string itself, '\n'followed by\\n'\n' two empty strings.\\n'\n'\\n'\n'str.removeprefix(prefix, /)\\n'\n'\\n'\n' If the string starts with the *prefix* string, return\\n'\n' \"string[len(prefix):]\". Otherwise, return a copy of the '\n'original\\n'\n' string:\\n'\n'\\n'\n\" >>> 'TestHook'.removeprefix('Test')\\n\"\n\" 'Hook'\\n\"\n\" >>> 'BaseTestCase'.removeprefix('Test')\\n\"\n\" 'BaseTestCase'\\n\"\n'\\n'\n' New in version 3.9.\\n'\n'\\n'\n'str.removesuffix(suffix, /)\\n'\n'\\n'\n' If the string ends with the *suffix* string and that '\n'*suffix* is\\n'\n' not empty, return \"string[:-len(suffix)]\". Otherwise, '\n'return a copy\\n'\n' of the original string:\\n'\n'\\n'\n\" >>> 'MiscTests'.removesuffix('Tests')\\n\"\n\" 'Misc'\\n\"\n\" >>> 'TmpDirMixin'.removesuffix('Tests')\\n\"\n\" 'TmpDirMixin'\\n\"\n'\\n'\n' New in version 3.9.\\n'\n'\\n'\n'str.replace(old, new[, count])\\n'\n'\\n'\n' Return a copy of the string with all occurrences of '\n'substring *old*\\n'\n' replaced by *new*. If the optional argument *count* is '\n'given, only\\n'\n' the first *count* occurrences are replaced.\\n'\n'\\n'\n'str.rfind(sub[, start[, end]])\\n'\n'\\n'\n' Return the highest index in the string where substring '\n'*sub* is\\n'\n' found, such that *sub* is contained within '\n'\"s[start:end]\".\\n'\n' Optional arguments *start* and *end* are interpreted as '\n'in slice\\n'\n' notation. Return \"-1\" on failure.\\n'\n'\\n'\n'str.rindex(sub[, start[, end]])\\n'\n'\\n'\n' Like \"rfind()\" but raises \"ValueError\" when the '\n'substring *sub* is\\n'\n' not found.\\n'\n'\\n'\n'str.rjust(width[, fillchar])\\n'\n'\\n'\n' Return the string right justified in a string of length '\n'*width*.\\n'\n' Padding is done using the specified *fillchar* (default '\n'is an ASCII\\n'\n' space). The original string is returned if *width* is '\n'less than or\\n'\n' equal to \"len(s)\".\\n'\n'\\n'\n'str.rpartition(sep)\\n'\n'\\n'\n' Split the string at the last occurrence of *sep*, and '\n'return a\\n'\n' 3-tuple containing the part before the separator, the '\n'separator\\n'\n' itself, and the part after the separator. If the '\n'separator is not\\n'\n' found, return a 3-tuple containing two empty strings, '\n'followed by\\n'\n' the string itself.\\n'\n'\\n'\n'str.rsplit(sep=None, maxsplit=- 1)\\n'\n'\\n'\n' Return a list of the words in the string, using *sep* '\n'as the\\n'\n' delimiter string. If *maxsplit* is given, at most '\n'*maxsplit* splits\\n'\n' are done, the *rightmost* ones. If *sep* is not '\n'specified or\\n'\n' \"None\", any whitespace string is a separator. Except '\n'for splitting\\n'\n' from the right, \"rsplit()\" behaves like \"split()\" which '\n'is\\n'\n' described in detail below.\\n'\n'\\n'\n'str.rstrip([chars])\\n'\n'\\n'\n' Return a copy of the string with trailing characters '\n'removed. The\\n'\n' *chars* argument is a string specifying the set of '\n'characters to be\\n'\n' removed. If omitted or \"None\", the *chars* argument '\n'defaults to\\n'\n' removing whitespace. The *chars* argument is not a '\n'suffix; rather,\\n'\n' all combinations of its values are stripped:\\n'\n'\\n'\n\" >>> ' spacious '.rstrip()\\n\"\n\" ' spacious'\\n\"\n\" >>> 'mississippi'.rstrip('ipz')\\n\"\n\" 'mississ'\\n\"\n'\\n'\n' See \"str.removesuffix()\" for a method that will remove '\n'a single\\n'\n' suffix string rather than all of a set of characters. '\n'For example:\\n'\n'\\n'\n\" >>> 'Monty Python'.rstrip(' Python')\\n\"\n\" 'M'\\n\"\n\" >>> 'Monty Python'.removesuffix(' Python')\\n\"\n\" 'Monty'\\n\"\n'\\n'\n'str.split(sep=None, maxsplit=- 1)\\n'\n'\\n'\n' Return a list of the words in the string, using *sep* '\n'as the\\n'\n' delimiter string. If *maxsplit* is given, at most '\n'*maxsplit*\\n'\n' splits are done (thus, the list will have at most '\n'\"maxsplit+1\"\\n'\n' elements). If *maxsplit* is not specified or \"-1\", '\n'then there is\\n'\n' no limit on the number of splits (all possible splits '\n'are made).\\n'\n'\\n'\n' If *sep* is given, consecutive delimiters are not '\n'grouped together\\n'\n' and are deemed to delimit empty strings (for example,\\n'\n' \"\\'1,,2\\'.split(\\',\\')\" returns \"[\\'1\\', \\'\\', '\n'\\'2\\']\"). The *sep* argument\\n'\n' may consist of multiple characters (for example,\\n'\n' \"\\'1<>2<>3\\'.split(\\'<>\\')\" returns \"[\\'1\\', \\'2\\', '\n'\\'3\\']\"). Splitting an\\n'\n' empty string with a specified separator returns '\n'\"[\\'\\']\".\\n'\n'\\n'\n' For example:\\n'\n'\\n'\n\" >>> '1,2,3'.split(',')\\n\"\n\" ['1', '2', '3']\\n\"\n\" >>> '1,2,3'.split(',', maxsplit=1)\\n\"\n\" ['1', '2,3']\\n\"\n\" >>> '1,2,,3,'.split(',')\\n\"\n\" ['1', '2', '', '3', '']\\n\"\n'\\n'\n' If *sep* is not specified or is \"None\", a different '\n'splitting\\n'\n' algorithm is applied: runs of consecutive whitespace '\n'are regarded\\n'\n' as a single separator, and the result will contain no '\n'empty strings\\n'\n' at the start or end if the string has leading or '\n'trailing\\n'\n' whitespace. Consequently, splitting an empty string or '\n'a string\\n'\n' consisting of just whitespace with a \"None\" separator '\n'returns \"[]\".\\n'\n'\\n'\n' For example:\\n'\n'\\n'\n\" >>> '1 2 3'.split()\\n\"\n\" ['1', '2', '3']\\n\"\n\" >>> '1 2 3'.split(maxsplit=1)\\n\"\n\" ['1', '2 3']\\n\"\n\" >>> ' 1 2 3 '.split()\\n\"\n\" ['1', '2', '3']\\n\"\n'\\n'\n'str.splitlines([keepends])\\n'\n'\\n'\n' Return a list of the lines in the string, breaking at '\n'line\\n'\n' boundaries. Line breaks are not included in the '\n'resulting list\\n'\n' unless *keepends* is given and true.\\n'\n'\\n'\n' This method splits on the following line boundaries. '\n'In\\n'\n' particular, the boundaries are a superset of *universal '\n'newlines*.\\n'\n'\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | Representation | '\n'Description |\\n'\n' '\n'|=========================|===============================|\\n'\n' | \"\\\\n\" | Line '\n'Feed |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\r\" | Carriage '\n'Return |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\r\\\\n\" | Carriage Return + Line '\n'Feed |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\v\" or \"\\\\x0b\" | Line '\n'Tabulation |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\f\" or \"\\\\x0c\" | Form '\n'Feed |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\x1c\" | File '\n'Separator |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\x1d\" | Group '\n'Separator |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\x1e\" | Record '\n'Separator |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\x85\" | Next Line (C1 Control '\n'Code) |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\u2028\" | Line '\n'Separator |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n' | \"\\\\u2029\" | Paragraph '\n'Separator |\\n'\n' '\n'+-------------------------+-------------------------------+\\n'\n'\\n'\n' Changed in version 3.2: \"\\\\v\" and \"\\\\f\" added to list '\n'of line\\n'\n' boundaries.\\n'\n'\\n'\n' For example:\\n'\n'\\n'\n\" >>> 'ab c\\\\n\\\\nde fg\\\\rkl\\\\r\\\\n'.splitlines()\\n\"\n\" ['ab c', '', 'de fg', 'kl']\\n\"\n\" >>> 'ab c\\\\n\\\\nde \"\n\"fg\\\\rkl\\\\r\\\\n'.splitlines(keepends=True)\\n\"\n\" ['ab c\\\\n', '\\\\n', 'de fg\\\\r', 'kl\\\\r\\\\n']\\n\"\n'\\n'\n' Unlike \"split()\" when a delimiter string *sep* is '\n'given, this\\n'\n' method returns an empty list for the empty string, and '\n'a terminal\\n'\n' line break does not result in an extra line:\\n'\n'\\n'\n' >>> \"\".splitlines()\\n'\n' []\\n'\n' >>> \"One line\\\\n\".splitlines()\\n'\n\" ['One line']\\n\"\n'\\n'\n' For comparison, \"split(\\'\\\\n\\')\" gives:\\n'\n'\\n'\n\" >>> ''.split('\\\\n')\\n\"\n\" ['']\\n\"\n\" >>> 'Two lines\\\\n'.split('\\\\n')\\n\"\n\" ['Two lines', '']\\n\"\n'\\n'\n'str.startswith(prefix[, start[, end]])\\n'\n'\\n'\n' Return \"True\" if string starts with the *prefix*, '\n'otherwise return\\n'\n' \"False\". *prefix* can also be a tuple of prefixes to '\n'look for.\\n'\n' With optional *start*, test string beginning at that '\n'position.\\n'\n' With optional *end*, stop comparing string at that '\n'position.\\n'\n'\\n'\n'str.strip([chars])\\n'\n'\\n'\n' Return a copy of the string with the leading and '\n'trailing\\n'\n' characters removed. The *chars* argument is a string '\n'specifying the\\n'\n' set of characters to be removed. If omitted or \"None\", '\n'the *chars*\\n'\n' argument defaults to removing whitespace. The *chars* '\n'argument is\\n'\n' not a prefix or suffix; rather, all combinations of its '\n'values are\\n'\n' stripped:\\n'\n'\\n'\n\" >>> ' spacious '.strip()\\n\"\n\" 'spacious'\\n\"\n\" >>> 'www.example.com'.strip('cmowz.')\\n\"\n\" 'example'\\n\"\n'\\n'\n' The outermost leading and trailing *chars* argument '\n'values are\\n'\n' stripped from the string. Characters are removed from '\n'the leading\\n'\n' end until reaching a string character that is not '\n'contained in the\\n'\n' set of characters in *chars*. A similar action takes '\n'place on the\\n'\n' trailing end. For example:\\n'\n'\\n'\n\" >>> comment_string = '#....... Section 3.2.1 Issue \"\n\"#32 .......'\\n\"\n\" >>> comment_string.strip('.#! ')\\n\"\n\" 'Section 3.2.1 Issue #32'\\n\"\n'\\n'\n'str.swapcase()\\n'\n'\\n'\n' Return a copy of the string with uppercase characters '\n'converted to\\n'\n' lowercase and vice versa. Note that it is not '\n'necessarily true that\\n'\n' \"s.swapcase().swapcase() == s\".\\n'\n'\\n'\n'str.title()\\n'\n'\\n'\n' Return a titlecased version of the string where words '\n'start with an\\n'\n' uppercase character and the remaining characters are '\n'lowercase.\\n'\n'\\n'\n' For example:\\n'\n'\\n'\n\" >>> 'Hello world'.title()\\n\"\n\" 'Hello World'\\n\"\n'\\n'\n' The algorithm uses a simple language-independent '\n'definition of a\\n'\n' word as groups of consecutive letters. The definition '\n'works in\\n'\n' many contexts but it means that apostrophes in '\n'contractions and\\n'\n' possessives form word boundaries, which may not be the '\n'desired\\n'\n' result:\\n'\n'\\n'\n' >>> \"they\\'re bill\\'s friends from the UK\".title()\\n'\n' \"They\\'Re Bill\\'S Friends From The Uk\"\\n'\n'\\n'\n' A workaround for apostrophes can be constructed using '\n'regular\\n'\n' expressions:\\n'\n'\\n'\n' >>> import re\\n'\n' >>> def titlecase(s):\\n'\n' ... return re.sub(r\"[A-Za-z]+(\\'[A-Za-z]+)?\",\\n'\n' ... lambda mo: '\n'mo.group(0).capitalize(),\\n'\n' ... s)\\n'\n' ...\\n'\n' >>> titlecase(\"they\\'re bill\\'s friends.\")\\n'\n' \"They\\'re Bill\\'s Friends.\"\\n'\n'\\n'\n'str.translate(table)\\n'\n'\\n'\n' Return a copy of the string in which each character has '\n'been mapped\\n'\n' through the given translation table. The table must be '\n'an object\\n'\n' that implements indexing via \"__getitem__()\", typically '\n'a *mapping*\\n'\n' or *sequence*. When indexed by a Unicode ordinal (an '\n'integer), the\\n'\n' table object can do any of the following: return a '\n'Unicode ordinal\\n'\n' or a string, to map the character to one or more other '\n'characters;\\n'\n' return \"None\", to delete the character from the return '\n'string; or\\n'\n' raise a \"LookupError\" exception, to map the character '\n'to itself.\\n'\n'\\n'\n' You can use \"str.maketrans()\" to create a translation '\n'map from\\n'\n' character-to-character mappings in different formats.\\n'\n'\\n'\n' See also the \"codecs\" module for a more flexible '\n'approach to custom\\n'\n' character mappings.\\n'\n'\\n'\n'str.upper()\\n'\n'\\n'\n' Return a copy of the string with all the cased '\n'characters [4]\\n'\n' converted to uppercase. Note that '\n'\"s.upper().isupper()\" might be\\n'\n' \"False\" if \"s\" contains uncased characters or if the '\n'Unicode\\n'\n' category of the resulting character(s) is not \u201cLu\u201d '\n'(Letter,\\n'\n' uppercase), but e.g. \u201cLt\u201d (Letter, titlecase).\\n'\n'\\n'\n' The uppercasing algorithm used is described in section '\n'3.13 of the\\n'\n' Unicode Standard.\\n'\n'\\n'\n'str.zfill(width)\\n'\n'\\n'\n' Return a copy of the string left filled with ASCII '\n'\"\\'0\\'\" digits to\\n'\n' make a string of length *width*. A leading sign prefix\\n'\n' (\"\\'+\\'\"/\"\\'-\\'\") is handled by inserting the padding '\n'*after* the sign\\n'\n' character rather than before. The original string is '\n'returned if\\n'\n' *width* is less than or equal to \"len(s)\".\\n'\n'\\n'\n' For example:\\n'\n'\\n'\n' >>> \"42\".zfill(5)\\n'\n\" '00042'\\n\"\n' >>> \"-42\".zfill(5)\\n'\n\" '-0042'\\n\",\n'strings':'String and Bytes literals\\n'\n'*************************\\n'\n'\\n'\n'String literals are described by the following lexical '\n'definitions:\\n'\n'\\n'\n' stringliteral ::= [stringprefix](shortstring | longstring)\\n'\n' stringprefix ::= \"r\" | \"u\" | \"R\" | \"U\" | \"f\" | \"F\"\\n'\n' | \"fr\" | \"Fr\" | \"fR\" | \"FR\" | \"rf\" | \"rF\" | '\n'\"Rf\" | \"RF\"\\n'\n' shortstring ::= \"\\'\" shortstringitem* \"\\'\" | \\'\"\\' '\n'shortstringitem* \\'\"\\'\\n'\n' longstring ::= \"\\'\\'\\'\" longstringitem* \"\\'\\'\\'\" | '\n'\\'\"\"\"\\' longstringitem* \\'\"\"\"\\'\\n'\n' shortstringitem ::= shortstringchar | stringescapeseq\\n'\n' longstringitem ::= longstringchar | stringescapeseq\\n'\n' shortstringchar ::= \\n'\n' longstringchar ::= \\n'\n' stringescapeseq ::= \"\\\\\" \\n'\n'\\n'\n' bytesliteral ::= bytesprefix(shortbytes | longbytes)\\n'\n' bytesprefix ::= \"b\" | \"B\" | \"br\" | \"Br\" | \"bR\" | \"BR\" | '\n'\"rb\" | \"rB\" | \"Rb\" | \"RB\"\\n'\n' shortbytes ::= \"\\'\" shortbytesitem* \"\\'\" | \\'\"\\' '\n'shortbytesitem* \\'\"\\'\\n'\n' longbytes ::= \"\\'\\'\\'\" longbytesitem* \"\\'\\'\\'\" | \\'\"\"\"\\' '\n'longbytesitem* \\'\"\"\"\\'\\n'\n' shortbytesitem ::= shortbyteschar | bytesescapeseq\\n'\n' longbytesitem ::= longbyteschar | bytesescapeseq\\n'\n' shortbyteschar ::= \\n'\n' longbyteschar ::= \\n'\n' bytesescapeseq ::= \"\\\\\" \\n'\n'\\n'\n'One syntactic restriction not indicated by these productions is '\n'that\\n'\n'whitespace is not allowed between the \"stringprefix\" or '\n'\"bytesprefix\"\\n'\n'and the rest of the literal. The source character set is defined '\n'by\\n'\n'the encoding declaration; it is UTF-8 if no encoding declaration '\n'is\\n'\n'given in the source file; see section Encoding declarations.\\n'\n'\\n'\n'In plain English: Both types of literals can be enclosed in '\n'matching\\n'\n'single quotes (\"\\'\") or double quotes (\"\"\"). They can also be '\n'enclosed\\n'\n'in matching groups of three single or double quotes (these are\\n'\n'generally referred to as *triple-quoted strings*). The '\n'backslash\\n'\n'(\"\\\\\") character is used to escape characters that otherwise have '\n'a\\n'\n'special meaning, such as newline, backslash itself, or the quote\\n'\n'character.\\n'\n'\\n'\n'Bytes literals are always prefixed with \"\\'b\\'\" or \"\\'B\\'\"; they '\n'produce\\n'\n'an instance of the \"bytes\" type instead of the \"str\" type. They '\n'may\\n'\n'only contain ASCII characters; bytes with a numeric value of 128 '\n'or\\n'\n'greater must be expressed with escapes.\\n'\n'\\n'\n'Both string and bytes literals may optionally be prefixed with a\\n'\n'letter \"\\'r\\'\" or \"\\'R\\'\"; such strings are called *raw strings* '\n'and treat\\n'\n'backslashes as literal characters. As a result, in string '\n'literals,\\n'\n'\"\\'\\\\U\\'\" and \"\\'\\\\u\\'\" escapes in raw strings are not treated '\n'specially.\\n'\n'Given that Python 2.x\u2019s raw unicode literals behave differently '\n'than\\n'\n'Python 3.x\u2019s the \"\\'ur\\'\" syntax is not supported.\\n'\n'\\n'\n'New in version 3.3: The \"\\'rb\\'\" prefix of raw bytes literals has '\n'been\\n'\n'added as a synonym of \"\\'br\\'\".\\n'\n'\\n'\n'New in version 3.3: Support for the unicode legacy literal\\n'\n'(\"u\\'value\\'\") was reintroduced to simplify the maintenance of '\n'dual\\n'\n'Python 2.x and 3.x codebases. See **PEP 414** for more '\n'information.\\n'\n'\\n'\n'A string literal with \"\\'f\\'\" or \"\\'F\\'\" in its prefix is a '\n'*formatted\\n'\n'string literal*; see Formatted string literals. The \"\\'f\\'\" may '\n'be\\n'\n'combined with \"\\'r\\'\", but not with \"\\'b\\'\" or \"\\'u\\'\", therefore '\n'raw\\n'\n'formatted strings are possible, but formatted bytes literals are '\n'not.\\n'\n'\\n'\n'In triple-quoted literals, unescaped newlines and quotes are '\n'allowed\\n'\n'(and are retained), except that three unescaped quotes in a row\\n'\n'terminate the literal. (A \u201cquote\u201d is the character used to open '\n'the\\n'\n'literal, i.e. either \"\\'\" or \"\"\".)\\n'\n'\\n'\n'Unless an \"\\'r\\'\" or \"\\'R\\'\" prefix is present, escape sequences '\n'in string\\n'\n'and bytes literals are interpreted according to rules similar to '\n'those\\n'\n'used by Standard C. The recognized escape sequences are:\\n'\n'\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| Escape Sequence | Meaning | Notes '\n'|\\n'\n'|===================|===================================|=========|\\n'\n'| \"\\\\newline\" | Backslash and newline ignored '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\\\\\\" | Backslash (\"\\\\\") '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\\\'\" | Single quote (\"\\'\") '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\\"\" | Double quote (\"\"\") '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\a\" | ASCII Bell (BEL) '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\b\" | ASCII Backspace (BS) '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\f\" | ASCII Formfeed (FF) '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\n\" | ASCII Linefeed (LF) '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\r\" | ASCII Carriage Return (CR) '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\t\" | ASCII Horizontal Tab (TAB) '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\v\" | ASCII Vertical Tab (VT) '\n'| |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\ooo\" | Character with octal value *ooo* | '\n'(1,3) |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\xhh\" | Character with hex value *hh* | '\n'(2,3) |\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'\\n'\n'Escape sequences only recognized in string literals are:\\n'\n'\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| Escape Sequence | Meaning | Notes '\n'|\\n'\n'|===================|===================================|=========|\\n'\n'| \"\\\\N{name}\" | Character named *name* in the | '\n'(4) |\\n'\n'| | Unicode database | '\n'|\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\uxxxx\" | Character with 16-bit hex value | '\n'(5) |\\n'\n'| | *xxxx* | '\n'|\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'| \"\\\\Uxxxxxxxx\" | Character with 32-bit hex value | '\n'(6) |\\n'\n'| | *xxxxxxxx* | '\n'|\\n'\n'+-------------------+-----------------------------------+---------+\\n'\n'\\n'\n'Notes:\\n'\n'\\n'\n'1. As in Standard C, up to three octal digits are accepted.\\n'\n'\\n'\n'2. Unlike in Standard C, exactly two hex digits are required.\\n'\n'\\n'\n'3. In a bytes literal, hexadecimal and octal escapes denote the '\n'byte\\n'\n' with the given value. In a string literal, these escapes '\n'denote a\\n'\n' Unicode character with the given value.\\n'\n'\\n'\n'4. Changed in version 3.3: Support for name aliases [1] has been\\n'\n' added.\\n'\n'\\n'\n'5. Exactly four hex digits are required.\\n'\n'\\n'\n'6. Any Unicode character can be encoded this way. Exactly eight '\n'hex\\n'\n' digits are required.\\n'\n'\\n'\n'Unlike Standard C, all unrecognized escape sequences are left in '\n'the\\n'\n'string unchanged, i.e., *the backslash is left in the result*. '\n'(This\\n'\n'behavior is useful when debugging: if an escape sequence is '\n'mistyped,\\n'\n'the resulting output is more easily recognized as broken.) It is '\n'also\\n'\n'important to note that the escape sequences only recognized in '\n'string\\n'\n'literals fall into the category of unrecognized escapes for '\n'bytes\\n'\n'literals.\\n'\n'\\n'\n' Changed in version 3.6: Unrecognized escape sequences produce '\n'a\\n'\n' \"DeprecationWarning\". In a future Python version they will be '\n'a\\n'\n' \"SyntaxWarning\" and eventually a \"SyntaxError\".\\n'\n'\\n'\n'Even in a raw literal, quotes can be escaped with a backslash, '\n'but the\\n'\n'backslash remains in the result; for example, \"r\"\\\\\"\"\" is a '\n'valid\\n'\n'string literal consisting of two characters: a backslash and a '\n'double\\n'\n'quote; \"r\"\\\\\"\" is not a valid string literal (even a raw string '\n'cannot\\n'\n'end in an odd number of backslashes). Specifically, *a raw '\n'literal\\n'\n'cannot end in a single backslash* (since the backslash would '\n'escape\\n'\n'the following quote character). Note also that a single '\n'backslash\\n'\n'followed by a newline is interpreted as those two characters as '\n'part\\n'\n'of the literal, *not* as a line continuation.\\n',\n'subscriptions':'Subscriptions\\n'\n'*************\\n'\n'\\n'\n'Subscription of a sequence (string, tuple or list) or '\n'mapping\\n'\n'(dictionary) object usually selects an item from the '\n'collection:\\n'\n'\\n'\n' subscription ::= primary \"[\" expression_list \"]\"\\n'\n'\\n'\n'The primary must evaluate to an object that supports '\n'subscription\\n'\n'(lists or dictionaries for example). User-defined objects '\n'can support\\n'\n'subscription by defining a \"__getitem__()\" method.\\n'\n'\\n'\n'For built-in objects, there are two types of objects that '\n'support\\n'\n'subscription:\\n'\n'\\n'\n'If the primary is a mapping, the expression list must '\n'evaluate to an\\n'\n'object whose value is one of the keys of the mapping, and '\n'the\\n'\n'subscription selects the value in the mapping that '\n'corresponds to that\\n'\n'key. (The expression list is a tuple except if it has '\n'exactly one\\n'\n'item.)\\n'\n'\\n'\n'If the primary is a sequence, the expression list must '\n'evaluate to an\\n'\n'integer or a slice (as discussed in the following '\n'section).\\n'\n'\\n'\n'The formal syntax makes no special provision for negative '\n'indices in\\n'\n'sequences; however, built-in sequences all provide a '\n'\"__getitem__()\"\\n'\n'method that interprets negative indices by adding the '\n'length of the\\n'\n'sequence to the index (so that \"x[-1]\" selects the last '\n'item of \"x\").\\n'\n'The resulting value must be a nonnegative integer less than '\n'the number\\n'\n'of items in the sequence, and the subscription selects the '\n'item whose\\n'\n'index is that value (counting from zero). Since the support '\n'for\\n'\n'negative indices and slicing occurs in the object\u2019s '\n'\"__getitem__()\"\\n'\n'method, subclasses overriding this method will need to '\n'explicitly add\\n'\n'that support.\\n'\n'\\n'\n'A string\u2019s items are characters. A character is not a '\n'separate data\\n'\n'type but a string of exactly one character.\\n'\n'\\n'\n'Subscription of certain *classes* or *types* creates a '\n'generic alias.\\n'\n'In this case, user-defined classes can support subscription '\n'by\\n'\n'providing a \"__class_getitem__()\" classmethod.\\n',\n'truth':'Truth Value Testing\\n'\n'*******************\\n'\n'\\n'\n'Any object can be tested for truth value, for use in an \"if\" or\\n'\n'\"while\" condition or as operand of the Boolean operations below.\\n'\n'\\n'\n'By default, an object is considered true unless its class defines\\n'\n'either a \"__bool__()\" method that returns \"False\" or a \"__len__()\"\\n'\n'method that returns zero, when called with the object. [1] Here '\n'are\\n'\n'most of the built-in objects considered false:\\n'\n'\\n'\n'* constants defined to be false: \"None\" and \"False\".\\n'\n'\\n'\n'* zero of any numeric type: \"0\", \"0.0\", \"0j\", \"Decimal(0)\",\\n'\n' \"Fraction(0, 1)\"\\n'\n'\\n'\n'* empty sequences and collections: \"\\'\\'\", \"()\", \"[]\", \"{}\", '\n'\"set()\",\\n'\n' \"range(0)\"\\n'\n'\\n'\n'Operations and built-in functions that have a Boolean result '\n'always\\n'\n'return \"0\" or \"False\" for false and \"1\" or \"True\" for true, unless\\n'\n'otherwise stated. (Important exception: the Boolean operations '\n'\"or\"\\n'\n'and \"and\" always return one of their operands.)\\n',\n'try':'The \"try\" statement\\n'\n'*******************\\n'\n'\\n'\n'The \"try\" statement specifies exception handlers and/or cleanup code\\n'\n'for a group of statements:\\n'\n'\\n'\n' try_stmt ::= try1_stmt | try2_stmt\\n'\n' try1_stmt ::= \"try\" \":\" suite\\n'\n' (\"except\" [expression [\"as\" identifier]] \":\" '\n'suite)+\\n'\n' [\"else\" \":\" suite]\\n'\n' [\"finally\" \":\" suite]\\n'\n' try2_stmt ::= \"try\" \":\" suite\\n'\n' \"finally\" \":\" suite\\n'\n'\\n'\n'The \"except\" clause(s) specify one or more exception handlers. When '\n'no\\n'\n'exception occurs in the \"try\" clause, no exception handler is\\n'\n'executed. When an exception occurs in the \"try\" suite, a search for '\n'an\\n'\n'exception handler is started. This search inspects the except '\n'clauses\\n'\n'in turn until one is found that matches the exception. An '\n'expression-\\n'\n'less except clause, if present, must be last; it matches any\\n'\n'exception. For an except clause with an expression, that expression\\n'\n'is evaluated, and the clause matches the exception if the resulting\\n'\n'object is \u201ccompatible\u201d with the exception. An object is compatible\\n'\n'with an exception if it is the class or a base class of the '\n'exception\\n'\n'object, or a tuple containing an item that is the class or a base\\n'\n'class of the exception object.\\n'\n'\\n'\n'If no except clause matches the exception, the search for an '\n'exception\\n'\n'handler continues in the surrounding code and on the invocation '\n'stack.\\n'\n'[1]\\n'\n'\\n'\n'If the evaluation of an expression in the header of an except clause\\n'\n'raises an exception, the original search for a handler is canceled '\n'and\\n'\n'a search starts for the new exception in the surrounding code and on\\n'\n'the call stack (it is treated as if the entire \"try\" statement '\n'raised\\n'\n'the exception).\\n'\n'\\n'\n'When a matching except clause is found, the exception is assigned to\\n'\n'the target specified after the \"as\" keyword in that except clause, '\n'if\\n'\n'present, and the except clause\u2019s suite is executed. All except\\n'\n'clauses must have an executable block. When the end of this block '\n'is\\n'\n'reached, execution continues normally after the entire try '\n'statement.\\n'\n'(This means that if two nested handlers exist for the same '\n'exception,\\n'\n'and the exception occurs in the try clause of the inner handler, the\\n'\n'outer handler will not handle the exception.)\\n'\n'\\n'\n'When an exception has been assigned using \"as target\", it is cleared\\n'\n'at the end of the except clause. This is as if\\n'\n'\\n'\n' except E as N:\\n'\n' foo\\n'\n'\\n'\n'was translated to\\n'\n'\\n'\n' except E as N:\\n'\n' try:\\n'\n' foo\\n'\n' finally:\\n'\n' del N\\n'\n'\\n'\n'This means the exception must be assigned to a different name to be\\n'\n'able to refer to it after the except clause. Exceptions are cleared\\n'\n'because with the traceback attached to them, they form a reference\\n'\n'cycle with the stack frame, keeping all locals in that frame alive\\n'\n'until the next garbage collection occurs.\\n'\n'\\n'\n'Before an except clause\u2019s suite is executed, details about the\\n'\n'exception are stored in the \"sys\" module and can be accessed via\\n'\n'\"sys.exc_info()\". \"sys.exc_info()\" returns a 3-tuple consisting of '\n'the\\n'\n'exception class, the exception instance and a traceback object (see\\n'\n'section The standard type hierarchy) identifying the point in the\\n'\n'program where the exception occurred. The details about the '\n'exception\\n'\n'accessed via \"sys.exc_info()\" are restored to their previous values\\n'\n'when leaving an exception handler:\\n'\n'\\n'\n' >>> print(sys.exc_info())\\n'\n' (None, None, None)\\n'\n' >>> try:\\n'\n' ... raise TypeError\\n'\n' ... except:\\n'\n' ... print(sys.exc_info())\\n'\n' ... try:\\n'\n' ... raise ValueError\\n'\n' ... except:\\n'\n' ... print(sys.exc_info())\\n'\n' ... print(sys.exc_info())\\n'\n' ...\\n'\n\" (, TypeError(), )\\n'\n\" (, ValueError(), )\\n'\n\" (, TypeError(), )\\n'\n' >>> print(sys.exc_info())\\n'\n' (None, None, None)\\n'\n'\\n'\n'The optional \"else\" clause is executed if the control flow leaves '\n'the\\n'\n'\"try\" suite, no exception was raised, and no \"return\", \"continue\", '\n'or\\n'\n'\"break\" statement was executed. Exceptions in the \"else\" clause are\\n'\n'not handled by the preceding \"except\" clauses.\\n'\n'\\n'\n'If \"finally\" is present, it specifies a \u2018cleanup\u2019 handler. The '\n'\"try\"\\n'\n'clause is executed, including any \"except\" and \"else\" clauses. If '\n'an\\n'\n'exception occurs in any of the clauses and is not handled, the\\n'\n'exception is temporarily saved. The \"finally\" clause is executed. '\n'If\\n'\n'there is a saved exception it is re-raised at the end of the '\n'\"finally\"\\n'\n'clause. If the \"finally\" clause raises another exception, the saved\\n'\n'exception is set as the context of the new exception. If the '\n'\"finally\"\\n'\n'clause executes a \"return\", \"break\" or \"continue\" statement, the '\n'saved\\n'\n'exception is discarded:\\n'\n'\\n'\n' >>> def f():\\n'\n' ... try:\\n'\n' ... 1/0\\n'\n' ... finally:\\n'\n' ... return 42\\n'\n' ...\\n'\n' >>> f()\\n'\n' 42\\n'\n'\\n'\n'The exception information is not available to the program during\\n'\n'execution of the \"finally\" clause.\\n'\n'\\n'\n'When a \"return\", \"break\" or \"continue\" statement is executed in the\\n'\n'\"try\" suite of a \"try\"\u2026\"finally\" statement, the \"finally\" clause is\\n'\n'also executed \u2018on the way out.\u2019\\n'\n'\\n'\n'The return value of a function is determined by the last \"return\"\\n'\n'statement executed. Since the \"finally\" clause always executes, a\\n'\n'\"return\" statement executed in the \"finally\" clause will always be '\n'the\\n'\n'last one executed:\\n'\n'\\n'\n' >>> def foo():\\n'\n' ... try:\\n'\n\" ... return 'try'\\n\"\n' ... finally:\\n'\n\" ... return 'finally'\\n\"\n' ...\\n'\n' >>> foo()\\n'\n\" 'finally'\\n\"\n'\\n'\n'Additional information on exceptions can be found in section\\n'\n'Exceptions, and information on using the \"raise\" statement to '\n'generate\\n'\n'exceptions may be found in section The raise statement.\\n'\n'\\n'\n'Changed in version 3.8: Prior to Python 3.8, a \"continue\" statement\\n'\n'was illegal in the \"finally\" clause due to a problem with the\\n'\n'implementation.\\n',\n'types':'The standard type hierarchy\\n'\n'***************************\\n'\n'\\n'\n'Below is a list of the types that are built into Python. '\n'Extension\\n'\n'modules (written in C, Java, or other languages, depending on the\\n'\n'implementation) can define additional types. Future versions of\\n'\n'Python may add types to the type hierarchy (e.g., rational '\n'numbers,\\n'\n'efficiently stored arrays of integers, etc.), although such '\n'additions\\n'\n'will often be provided via the standard library instead.\\n'\n'\\n'\n'Some of the type descriptions below contain a paragraph listing\\n'\n'\u2018special attributes.\u2019 These are attributes that provide access to '\n'the\\n'\n'implementation and are not intended for general use. Their '\n'definition\\n'\n'may change in the future.\\n'\n'\\n'\n'None\\n'\n' This type has a single value. There is a single object with '\n'this\\n'\n' value. This object is accessed through the built-in name \"None\". '\n'It\\n'\n' is used to signify the absence of a value in many situations, '\n'e.g.,\\n'\n' it is returned from functions that don\u2019t explicitly return\\n'\n' anything. Its truth value is false.\\n'\n'\\n'\n'NotImplemented\\n'\n' This type has a single value. There is a single object with '\n'this\\n'\n' value. This object is accessed through the built-in name\\n'\n' \"NotImplemented\". Numeric methods and rich comparison methods\\n'\n' should return this value if they do not implement the operation '\n'for\\n'\n' the operands provided. (The interpreter will then try the\\n'\n' reflected operation, or some other fallback, depending on the\\n'\n' operator.) It should not be evaluated in a boolean context.\\n'\n'\\n'\n' See Implementing the arithmetic operations for more details.\\n'\n'\\n'\n' Changed in version 3.9: Evaluating \"NotImplemented\" in a '\n'boolean\\n'\n' context is deprecated. While it currently evaluates as true, it\\n'\n' will emit a \"DeprecationWarning\". It will raise a \"TypeError\" in '\n'a\\n'\n' future version of Python.\\n'\n'\\n'\n'Ellipsis\\n'\n' This type has a single value. There is a single object with '\n'this\\n'\n' value. This object is accessed through the literal \"...\" or the\\n'\n' built-in name \"Ellipsis\". Its truth value is true.\\n'\n'\\n'\n'\"numbers.Number\"\\n'\n' These are created by numeric literals and returned as results '\n'by\\n'\n' arithmetic operators and arithmetic built-in functions. '\n'Numeric\\n'\n' objects are immutable; once created their value never changes.\\n'\n' Python numbers are of course strongly related to mathematical\\n'\n' numbers, but subject to the limitations of numerical '\n'representation\\n'\n' in computers.\\n'\n'\\n'\n' The string representations of the numeric classes, computed by\\n'\n' \"__repr__()\" and \"__str__()\", have the following properties:\\n'\n'\\n'\n' * They are valid numeric literals which, when passed to their '\n'class\\n'\n' constructor, produce an object having the value of the '\n'original\\n'\n' numeric.\\n'\n'\\n'\n' * The representation is in base 10, when possible.\\n'\n'\\n'\n' * Leading zeros, possibly excepting a single zero before a '\n'decimal\\n'\n' point, are not shown.\\n'\n'\\n'\n' * Trailing zeros, possibly excepting a single zero after a '\n'decimal\\n'\n' point, are not shown.\\n'\n'\\n'\n' * A sign is shown only when the number is negative.\\n'\n'\\n'\n' Python distinguishes between integers, floating point numbers, '\n'and\\n'\n' complex numbers:\\n'\n'\\n'\n' \"numbers.Integral\"\\n'\n' These represent elements from the mathematical set of '\n'integers\\n'\n' (positive and negative).\\n'\n'\\n'\n' There are two types of integers:\\n'\n'\\n'\n' Integers (\"int\")\\n'\n' These represent numbers in an unlimited range, subject to\\n'\n' available (virtual) memory only. For the purpose of '\n'shift\\n'\n' and mask operations, a binary representation is assumed, '\n'and\\n'\n' negative numbers are represented in a variant of 2\u2019s\\n'\n' complement which gives the illusion of an infinite string '\n'of\\n'\n' sign bits extending to the left.\\n'\n'\\n'\n' Booleans (\"bool\")\\n'\n' These represent the truth values False and True. The two\\n'\n' objects representing the values \"False\" and \"True\" are '\n'the\\n'\n' only Boolean objects. The Boolean type is a subtype of '\n'the\\n'\n' integer type, and Boolean values behave like the values 0 '\n'and\\n'\n' 1, respectively, in almost all contexts, the exception '\n'being\\n'\n' that when converted to a string, the strings \"\"False\"\" or\\n'\n' \"\"True\"\" are returned, respectively.\\n'\n'\\n'\n' The rules for integer representation are intended to give '\n'the\\n'\n' most meaningful interpretation of shift and mask operations\\n'\n' involving negative integers.\\n'\n'\\n'\n' \"numbers.Real\" (\"float\")\\n'\n' These represent machine-level double precision floating '\n'point\\n'\n' numbers. You are at the mercy of the underlying machine\\n'\n' architecture (and C or Java implementation) for the accepted\\n'\n' range and handling of overflow. Python does not support '\n'single-\\n'\n' precision floating point numbers; the savings in processor '\n'and\\n'\n' memory usage that are usually the reason for using these are\\n'\n' dwarfed by the overhead of using objects in Python, so there '\n'is\\n'\n' no reason to complicate the language with two kinds of '\n'floating\\n'\n' point numbers.\\n'\n'\\n'\n' \"numbers.Complex\" (\"complex\")\\n'\n' These represent complex numbers as a pair of machine-level\\n'\n' double precision floating point numbers. The same caveats '\n'apply\\n'\n' as for floating point numbers. The real and imaginary parts '\n'of a\\n'\n' complex number \"z\" can be retrieved through the read-only\\n'\n' attributes \"z.real\" and \"z.imag\".\\n'\n'\\n'\n'Sequences\\n'\n' These represent finite ordered sets indexed by non-negative\\n'\n' numbers. The built-in function \"len()\" returns the number of '\n'items\\n'\n' of a sequence. When the length of a sequence is *n*, the index '\n'set\\n'\n' contains the numbers 0, 1, \u2026, *n*-1. Item *i* of sequence *a* '\n'is\\n'\n' selected by \"a[i]\".\\n'\n'\\n'\n' Sequences also support slicing: \"a[i:j]\" selects all items with\\n'\n' index *k* such that *i* \"<=\" *k* \"<\" *j*. When used as an\\n'\n' expression, a slice is a sequence of the same type. This '\n'implies\\n'\n' that the index set is renumbered so that it starts at 0.\\n'\n'\\n'\n' Some sequences also support \u201cextended slicing\u201d with a third '\n'\u201cstep\u201d\\n'\n' parameter: \"a[i:j:k]\" selects all items of *a* with index *x* '\n'where\\n'\n' \"x = i + n*k\", *n* \">=\" \"0\" and *i* \"<=\" *x* \"<\" *j*.\\n'\n'\\n'\n' Sequences are distinguished according to their mutability:\\n'\n'\\n'\n' Immutable sequences\\n'\n' An object of an immutable sequence type cannot change once it '\n'is\\n'\n' created. (If the object contains references to other '\n'objects,\\n'\n' these other objects may be mutable and may be changed; '\n'however,\\n'\n' the collection of objects directly referenced by an '\n'immutable\\n'\n' object cannot change.)\\n'\n'\\n'\n' The following types are immutable sequences:\\n'\n'\\n'\n' Strings\\n'\n' A string is a sequence of values that represent Unicode '\n'code\\n'\n' points. All the code points in the range \"U+0000 - '\n'U+10FFFF\"\\n'\n' can be represented in a string. Python doesn\u2019t have a '\n'*char*\\n'\n' type; instead, every code point in the string is '\n'represented\\n'\n' as a string object with length \"1\". The built-in '\n'function\\n'\n' \"ord()\" converts a code point from its string form to an\\n'\n' integer in the range \"0 - 10FFFF\"; \"chr()\" converts an\\n'\n' integer in the range \"0 - 10FFFF\" to the corresponding '\n'length\\n'\n' \"1\" string object. \"str.encode()\" can be used to convert '\n'a\\n'\n' \"str\" to \"bytes\" using the given text encoding, and\\n'\n' \"bytes.decode()\" can be used to achieve the opposite.\\n'\n'\\n'\n' Tuples\\n'\n' The items of a tuple are arbitrary Python objects. Tuples '\n'of\\n'\n' two or more items are formed by comma-separated lists of\\n'\n' expressions. A tuple of one item (a \u2018singleton\u2019) can be\\n'\n' formed by affixing a comma to an expression (an expression '\n'by\\n'\n' itself does not create a tuple, since parentheses must be\\n'\n' usable for grouping of expressions). An empty tuple can '\n'be\\n'\n' formed by an empty pair of parentheses.\\n'\n'\\n'\n' Bytes\\n'\n' A bytes object is an immutable array. The items are '\n'8-bit\\n'\n' bytes, represented by integers in the range 0 <= x < 256.\\n'\n' Bytes literals (like \"b\\'abc\\'\") and the built-in '\n'\"bytes()\"\\n'\n' constructor can be used to create bytes objects. Also, '\n'bytes\\n'\n' objects can be decoded to strings via the \"decode()\" '\n'method.\\n'\n'\\n'\n' Mutable sequences\\n'\n' Mutable sequences can be changed after they are created. '\n'The\\n'\n' subscription and slicing notations can be used as the target '\n'of\\n'\n' assignment and \"del\" (delete) statements.\\n'\n'\\n'\n' There are currently two intrinsic mutable sequence types:\\n'\n'\\n'\n' Lists\\n'\n' The items of a list are arbitrary Python objects. Lists '\n'are\\n'\n' formed by placing a comma-separated list of expressions '\n'in\\n'\n' square brackets. (Note that there are no special cases '\n'needed\\n'\n' to form lists of length 0 or 1.)\\n'\n'\\n'\n' Byte Arrays\\n'\n' A bytearray object is a mutable array. They are created '\n'by\\n'\n' the built-in \"bytearray()\" constructor. Aside from being\\n'\n' mutable (and hence unhashable), byte arrays otherwise '\n'provide\\n'\n' the same interface and functionality as immutable \"bytes\"\\n'\n' objects.\\n'\n'\\n'\n' The extension module \"array\" provides an additional example '\n'of a\\n'\n' mutable sequence type, as does the \"collections\" module.\\n'\n'\\n'\n'Set types\\n'\n' These represent unordered, finite sets of unique, immutable\\n'\n' objects. As such, they cannot be indexed by any subscript. '\n'However,\\n'\n' they can be iterated over, and the built-in function \"len()\"\\n'\n' returns the number of items in a set. Common uses for sets are '\n'fast\\n'\n' membership testing, removing duplicates from a sequence, and\\n'\n' computing mathematical operations such as intersection, union,\\n'\n' difference, and symmetric difference.\\n'\n'\\n'\n' For set elements, the same immutability rules apply as for\\n'\n' dictionary keys. Note that numeric types obey the normal rules '\n'for\\n'\n' numeric comparison: if two numbers compare equal (e.g., \"1\" and\\n'\n' \"1.0\"), only one of them can be contained in a set.\\n'\n'\\n'\n' There are currently two intrinsic set types:\\n'\n'\\n'\n' Sets\\n'\n' These represent a mutable set. They are created by the '\n'built-in\\n'\n' \"set()\" constructor and can be modified afterwards by '\n'several\\n'\n' methods, such as \"add()\".\\n'\n'\\n'\n' Frozen sets\\n'\n' These represent an immutable set. They are created by the\\n'\n' built-in \"frozenset()\" constructor. As a frozenset is '\n'immutable\\n'\n' and *hashable*, it can be used again as an element of '\n'another\\n'\n' set, or as a dictionary key.\\n'\n'\\n'\n'Mappings\\n'\n' These represent finite sets of objects indexed by arbitrary '\n'index\\n'\n' sets. The subscript notation \"a[k]\" selects the item indexed by '\n'\"k\"\\n'\n' from the mapping \"a\"; this can be used in expressions and as '\n'the\\n'\n' target of assignments or \"del\" statements. The built-in '\n'function\\n'\n' \"len()\" returns the number of items in a mapping.\\n'\n'\\n'\n' There is currently a single intrinsic mapping type:\\n'\n'\\n'\n' Dictionaries\\n'\n' These represent finite sets of objects indexed by nearly\\n'\n' arbitrary values. The only types of values not acceptable '\n'as\\n'\n' keys are values containing lists or dictionaries or other\\n'\n' mutable types that are compared by value rather than by '\n'object\\n'\n' identity, the reason being that the efficient implementation '\n'of\\n'\n' dictionaries requires a key\u2019s hash value to remain constant.\\n'\n' Numeric types used for keys obey the normal rules for '\n'numeric\\n'\n' comparison: if two numbers compare equal (e.g., \"1\" and '\n'\"1.0\")\\n'\n' then they can be used interchangeably to index the same\\n'\n' dictionary entry.\\n'\n'\\n'\n' Dictionaries preserve insertion order, meaning that keys will '\n'be\\n'\n' produced in the same order they were added sequentially over '\n'the\\n'\n' dictionary. Replacing an existing key does not change the '\n'order,\\n'\n' however removing a key and re-inserting it will add it to '\n'the\\n'\n' end instead of keeping its old place.\\n'\n'\\n'\n' Dictionaries are mutable; they can be created by the \"{...}\"\\n'\n' notation (see section Dictionary displays).\\n'\n'\\n'\n' The extension modules \"dbm.ndbm\" and \"dbm.gnu\" provide\\n'\n' additional examples of mapping types, as does the '\n'\"collections\"\\n'\n' module.\\n'\n'\\n'\n' Changed in version 3.7: Dictionaries did not preserve '\n'insertion\\n'\n' order in versions of Python before 3.6. In CPython 3.6,\\n'\n' insertion order was preserved, but it was considered an\\n'\n' implementation detail at that time rather than a language\\n'\n' guarantee.\\n'\n'\\n'\n'Callable types\\n'\n' These are the types to which the function call operation (see\\n'\n' section Calls) can be applied:\\n'\n'\\n'\n' User-defined functions\\n'\n' A user-defined function object is created by a function\\n'\n' definition (see section Function definitions). It should be\\n'\n' called with an argument list containing the same number of '\n'items\\n'\n' as the function\u2019s formal parameter list.\\n'\n'\\n'\n' Special attributes:\\n'\n'\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | Attribute | Meaning '\n'| |\\n'\n' '\n'|===========================|=================================|=============|\\n'\n' | \"__doc__\" | The function\u2019s documentation '\n'| Writable |\\n'\n' | | string, or \"None\" if '\n'| |\\n'\n' | | unavailable; not inherited by '\n'| |\\n'\n' | | subclasses. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__name__\" | The function\u2019s name. '\n'| Writable |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__qualname__\" | The function\u2019s *qualified '\n'| Writable |\\n'\n' | | name*. New in version 3.3. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__module__\" | The name of the module the '\n'| Writable |\\n'\n' | | function was defined in, or '\n'| |\\n'\n' | | \"None\" if unavailable. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__defaults__\" | A tuple containing default '\n'| Writable |\\n'\n' | | argument values for those '\n'| |\\n'\n' | | arguments that have defaults, '\n'| |\\n'\n' | | or \"None\" if no arguments have '\n'| |\\n'\n' | | a default value. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__code__\" | The code object representing '\n'| Writable |\\n'\n' | | the compiled function body. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__globals__\" | A reference to the dictionary '\n'| Read-only |\\n'\n' | | that holds the function\u2019s '\n'| |\\n'\n' | | global variables \u2014 the global '\n'| |\\n'\n' | | namespace of the module in '\n'| |\\n'\n' | | which the function was defined. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__dict__\" | The namespace supporting '\n'| Writable |\\n'\n' | | arbitrary function attributes. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__closure__\" | \"None\" or a tuple of cells that '\n'| Read-only |\\n'\n' | | contain bindings for the '\n'| |\\n'\n' | | function\u2019s free variables. See '\n'| |\\n'\n' | | below for information on the '\n'| |\\n'\n' | | \"cell_contents\" attribute. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__annotations__\" | A dict containing annotations '\n'| Writable |\\n'\n' | | of parameters. The keys of the '\n'| |\\n'\n' | | dict are the parameter names, '\n'| |\\n'\n' | | and \"\\'return\\'\" for the '\n'return | |\\n'\n' | | annotation, if provided. For '\n'| |\\n'\n' | | more information on working '\n'| |\\n'\n' | | with this attribute, see '\n'| |\\n'\n' | | Annotations Best Practices. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n' | \"__kwdefaults__\" | A dict containing defaults for '\n'| Writable |\\n'\n' | | keyword-only parameters. '\n'| |\\n'\n' '\n'+---------------------------+---------------------------------+-------------+\\n'\n'\\n'\n' Most of the attributes labelled \u201cWritable\u201d check the type of '\n'the\\n'\n' assigned value.\\n'\n'\\n'\n' Function objects also support getting and setting arbitrary\\n'\n' attributes, which can be used, for example, to attach '\n'metadata\\n'\n' to functions. Regular attribute dot-notation is used to get '\n'and\\n'\n' set such attributes. *Note that the current implementation '\n'only\\n'\n' supports function attributes on user-defined functions. '\n'Function\\n'\n' attributes on built-in functions may be supported in the\\n'\n' future.*\\n'\n'\\n'\n' A cell object has the attribute \"cell_contents\". This can be\\n'\n' used to get the value of the cell, as well as set the value.\\n'\n'\\n'\n' Additional information about a function\u2019s definition can be\\n'\n' retrieved from its code object; see the description of '\n'internal\\n'\n' types below. The \"cell\" type can be accessed in the \"types\"\\n'\n' module.\\n'\n'\\n'\n' Instance methods\\n'\n' An instance method object combines a class, a class instance '\n'and\\n'\n' any callable object (normally a user-defined function).\\n'\n'\\n'\n' Special read-only attributes: \"__self__\" is the class '\n'instance\\n'\n' object, \"__func__\" is the function object; \"__doc__\" is the\\n'\n' method\u2019s documentation (same as \"__func__.__doc__\"); '\n'\"__name__\"\\n'\n' is the method name (same as \"__func__.__name__\"); '\n'\"__module__\"\\n'\n' is the name of the module the method was defined in, or '\n'\"None\"\\n'\n' if unavailable.\\n'\n'\\n'\n' Methods also support accessing (but not setting) the '\n'arbitrary\\n'\n' function attributes on the underlying function object.\\n'\n'\\n'\n' User-defined method objects may be created when getting an\\n'\n' attribute of a class (perhaps via an instance of that class), '\n'if\\n'\n' that attribute is a user-defined function object or a class\\n'\n' method object.\\n'\n'\\n'\n' When an instance method object is created by retrieving a '\n'user-\\n'\n' defined function object from a class via one of its '\n'instances,\\n'\n' its \"__self__\" attribute is the instance, and the method '\n'object\\n'\n' is said to be bound. The new method\u2019s \"__func__\" attribute '\n'is\\n'\n' the original function object.\\n'\n'\\n'\n' When an instance method object is created by retrieving a '\n'class\\n'\n' method object from a class or instance, its \"__self__\" '\n'attribute\\n'\n' is the class itself, and its \"__func__\" attribute is the\\n'\n' function object underlying the class method.\\n'\n'\\n'\n' When an instance method object is called, the underlying\\n'\n' function (\"__func__\") is called, inserting the class '\n'instance\\n'\n' (\"__self__\") in front of the argument list. For instance, '\n'when\\n'\n' \"C\" is a class which contains a definition for a function '\n'\"f()\",\\n'\n' and \"x\" is an instance of \"C\", calling \"x.f(1)\" is equivalent '\n'to\\n'\n' calling \"C.f(x, 1)\".\\n'\n'\\n'\n' When an instance method object is derived from a class '\n'method\\n'\n' object, the \u201cclass instance\u201d stored in \"__self__\" will '\n'actually\\n'\n' be the class itself, so that calling either \"x.f(1)\" or '\n'\"C.f(1)\"\\n'\n' is equivalent to calling \"f(C,1)\" where \"f\" is the '\n'underlying\\n'\n' function.\\n'\n'\\n'\n' Note that the transformation from function object to '\n'instance\\n'\n' method object happens each time the attribute is retrieved '\n'from\\n'\n' the instance. In some cases, a fruitful optimization is to\\n'\n' assign the attribute to a local variable and call that local\\n'\n' variable. Also notice that this transformation only happens '\n'for\\n'\n' user-defined functions; other callable objects (and all non-\\n'\n' callable objects) are retrieved without transformation. It '\n'is\\n'\n' also important to note that user-defined functions which are\\n'\n' attributes of a class instance are not converted to bound\\n'\n' methods; this *only* happens when the function is an '\n'attribute\\n'\n' of the class.\\n'\n'\\n'\n' Generator functions\\n'\n' A function or method which uses the \"yield\" statement (see\\n'\n' section The yield statement) is called a *generator '\n'function*.\\n'\n' Such a function, when called, always returns an iterator '\n'object\\n'\n' which can be used to execute the body of the function: '\n'calling\\n'\n' the iterator\u2019s \"iterator.__next__()\" method will cause the\\n'\n' function to execute until it provides a value using the '\n'\"yield\"\\n'\n' statement. When the function executes a \"return\" statement '\n'or\\n'\n' falls off the end, a \"StopIteration\" exception is raised and '\n'the\\n'\n' iterator will have reached the end of the set of values to '\n'be\\n'\n' returned.\\n'\n'\\n'\n' Coroutine functions\\n'\n' A function or method which is defined using \"async def\" is\\n'\n' called a *coroutine function*. Such a function, when '\n'called,\\n'\n' returns a *coroutine* object. It may contain \"await\"\\n'\n' expressions, as well as \"async with\" and \"async for\" '\n'statements.\\n'\n' See also the Coroutine Objects section.\\n'\n'\\n'\n' Asynchronous generator functions\\n'\n' A function or method which is defined using \"async def\" and\\n'\n' which uses the \"yield\" statement is called a *asynchronous\\n'\n' generator function*. Such a function, when called, returns '\n'an\\n'\n' asynchronous iterator object which can be used in an \"async '\n'for\"\\n'\n' statement to execute the body of the function.\\n'\n'\\n'\n' Calling the asynchronous iterator\u2019s \"aiterator.__anext__()\"\\n'\n' method will return an *awaitable* which when awaited will\\n'\n' execute until it provides a value using the \"yield\" '\n'expression.\\n'\n' When the function executes an empty \"return\" statement or '\n'falls\\n'\n' off the end, a \"StopAsyncIteration\" exception is raised and '\n'the\\n'\n' asynchronous iterator will have reached the end of the set '\n'of\\n'\n' values to be yielded.\\n'\n'\\n'\n' Built-in functions\\n'\n' A built-in function object is a wrapper around a C function.\\n'\n' Examples of built-in functions are \"len()\" and \"math.sin()\"\\n'\n' (\"math\" is a standard built-in module). The number and type '\n'of\\n'\n' the arguments are determined by the C function. Special '\n'read-\\n'\n' only attributes: \"__doc__\" is the function\u2019s documentation\\n'\n' string, or \"None\" if unavailable; \"__name__\" is the '\n'function\u2019s\\n'\n' name; \"__self__\" is set to \"None\" (but see the next item);\\n'\n' \"__module__\" is the name of the module the function was '\n'defined\\n'\n' in or \"None\" if unavailable.\\n'\n'\\n'\n' Built-in methods\\n'\n' This is really a different disguise of a built-in function, '\n'this\\n'\n' time containing an object passed to the C function as an\\n'\n' implicit extra argument. An example of a built-in method is\\n'\n' \"alist.append()\", assuming *alist* is a list object. In this\\n'\n' case, the special read-only attribute \"__self__\" is set to '\n'the\\n'\n' object denoted by *alist*.\\n'\n'\\n'\n' Classes\\n'\n' Classes are callable. These objects normally act as '\n'factories\\n'\n' for new instances of themselves, but variations are possible '\n'for\\n'\n' class types that override \"__new__()\". The arguments of the\\n'\n' call are passed to \"__new__()\" and, in the typical case, to\\n'\n' \"__init__()\" to initialize the new instance.\\n'\n'\\n'\n' Class Instances\\n'\n' Instances of arbitrary classes can be made callable by '\n'defining\\n'\n' a \"__call__()\" method in their class.\\n'\n'\\n'\n'Modules\\n'\n' Modules are a basic organizational unit of Python code, and are\\n'\n' created by the import system as invoked either by the \"import\"\\n'\n' statement, or by calling functions such as\\n'\n' \"importlib.import_module()\" and built-in \"__import__()\". A '\n'module\\n'\n' object has a namespace implemented by a dictionary object (this '\n'is\\n'\n' the dictionary referenced by the \"__globals__\" attribute of\\n'\n' functions defined in the module). Attribute references are\\n'\n' translated to lookups in this dictionary, e.g., \"m.x\" is '\n'equivalent\\n'\n' to \"m.__dict__[\"x\"]\". A module object does not contain the code\\n'\n' object used to initialize the module (since it isn\u2019t needed '\n'once\\n'\n' the initialization is done).\\n'\n'\\n'\n' Attribute assignment updates the module\u2019s namespace dictionary,\\n'\n' e.g., \"m.x = 1\" is equivalent to \"m.__dict__[\"x\"] = 1\".\\n'\n'\\n'\n' Predefined (writable) attributes:\\n'\n'\\n'\n' \"__name__\"\\n'\n' The module\u2019s name.\\n'\n'\\n'\n' \"__doc__\"\\n'\n' The module\u2019s documentation string, or \"None\" if '\n'unavailable.\\n'\n'\\n'\n' \"__file__\"\\n'\n' The pathname of the file from which the module was loaded, '\n'if\\n'\n' it was loaded from a file. The \"__file__\" attribute may '\n'be\\n'\n' missing for certain types of modules, such as C modules '\n'that\\n'\n' are statically linked into the interpreter. For '\n'extension\\n'\n' modules loaded dynamically from a shared library, it\u2019s '\n'the\\n'\n' pathname of the shared library file.\\n'\n'\\n'\n' \"__annotations__\"\\n'\n' A dictionary containing *variable annotations* collected\\n'\n' during module body execution. For best practices on '\n'working\\n'\n' with \"__annotations__\", please see Annotations Best\\n'\n' Practices.\\n'\n'\\n'\n' Special read-only attribute: \"__dict__\" is the module\u2019s '\n'namespace\\n'\n' as a dictionary object.\\n'\n'\\n'\n' **CPython implementation detail:** Because of the way CPython\\n'\n' clears module dictionaries, the module dictionary will be '\n'cleared\\n'\n' when the module falls out of scope even if the dictionary still '\n'has\\n'\n' live references. To avoid this, copy the dictionary or keep '\n'the\\n'\n' module around while using its dictionary directly.\\n'\n'\\n'\n'Custom classes\\n'\n' Custom class types are typically created by class definitions '\n'(see\\n'\n' section Class definitions). A class has a namespace implemented '\n'by\\n'\n' a dictionary object. Class attribute references are translated '\n'to\\n'\n' lookups in this dictionary, e.g., \"C.x\" is translated to\\n'\n' \"C.__dict__[\"x\"]\" (although there are a number of hooks which '\n'allow\\n'\n' for other means of locating attributes). When the attribute name '\n'is\\n'\n' not found there, the attribute search continues in the base\\n'\n' classes. This search of the base classes uses the C3 method\\n'\n' resolution order which behaves correctly even in the presence '\n'of\\n'\n' \u2018diamond\u2019 inheritance structures where there are multiple\\n'\n' inheritance paths leading back to a common ancestor. Additional\\n'\n' details on the C3 MRO used by Python can be found in the\\n'\n' documentation accompanying the 2.3 release at\\n'\n' https://www.python.org/download/releases/2.3/mro/.\\n'\n'\\n'\n' When a class attribute reference (for class \"C\", say) would '\n'yield a\\n'\n' class method object, it is transformed into an instance method\\n'\n' object whose \"__self__\" attribute is \"C\". When it would yield '\n'a\\n'\n' static method object, it is transformed into the object wrapped '\n'by\\n'\n' the static method object. See section Implementing Descriptors '\n'for\\n'\n' another way in which attributes retrieved from a class may '\n'differ\\n'\n' from those actually contained in its \"__dict__\".\\n'\n'\\n'\n' Class attribute assignments update the class\u2019s dictionary, '\n'never\\n'\n' the dictionary of a base class.\\n'\n'\\n'\n' A class object can be called (see above) to yield a class '\n'instance\\n'\n' (see below).\\n'\n'\\n'\n' Special attributes:\\n'\n'\\n'\n' \"__name__\"\\n'\n' The class name.\\n'\n'\\n'\n' \"__module__\"\\n'\n' The name of the module in which the class was defined.\\n'\n'\\n'\n' \"__dict__\"\\n'\n' The dictionary containing the class\u2019s namespace.\\n'\n'\\n'\n' \"__bases__\"\\n'\n' A tuple containing the base classes, in the order of '\n'their\\n'\n' occurrence in the base class list.\\n'\n'\\n'\n' \"__doc__\"\\n'\n' The class\u2019s documentation string, or \"None\" if undefined.\\n'\n'\\n'\n' \"__annotations__\"\\n'\n' A dictionary containing *variable annotations* collected\\n'\n' during class body execution. For best practices on '\n'working\\n'\n' with \"__annotations__\", please see Annotations Best\\n'\n' Practices.\\n'\n'\\n'\n'Class instances\\n'\n' A class instance is created by calling a class object (see '\n'above).\\n'\n' A class instance has a namespace implemented as a dictionary '\n'which\\n'\n' is the first place in which attribute references are searched.\\n'\n' When an attribute is not found there, and the instance\u2019s class '\n'has\\n'\n' an attribute by that name, the search continues with the class\\n'\n' attributes. If a class attribute is found that is a '\n'user-defined\\n'\n' function object, it is transformed into an instance method '\n'object\\n'\n' whose \"__self__\" attribute is the instance. Static method and\\n'\n' class method objects are also transformed; see above under\\n'\n' \u201cClasses\u201d. See section Implementing Descriptors for another way '\n'in\\n'\n' which attributes of a class retrieved via its instances may '\n'differ\\n'\n' from the objects actually stored in the class\u2019s \"__dict__\". If '\n'no\\n'\n' class attribute is found, and the object\u2019s class has a\\n'\n' \"__getattr__()\" method, that is called to satisfy the lookup.\\n'\n'\\n'\n' Attribute assignments and deletions update the instance\u2019s\\n'\n' dictionary, never a class\u2019s dictionary. If the class has a\\n'\n' \"__setattr__()\" or \"__delattr__()\" method, this is called '\n'instead\\n'\n' of updating the instance dictionary directly.\\n'\n'\\n'\n' Class instances can pretend to be numbers, sequences, or '\n'mappings\\n'\n' if they have methods with certain special names. See section\\n'\n' Special method names.\\n'\n'\\n'\n' Special attributes: \"__dict__\" is the attribute dictionary;\\n'\n' \"__class__\" is the instance\u2019s class.\\n'\n'\\n'\n'I/O objects (also known as file objects)\\n'\n' A *file object* represents an open file. Various shortcuts are\\n'\n' available to create file objects: the \"open()\" built-in '\n'function,\\n'\n' and also \"os.popen()\", \"os.fdopen()\", and the \"makefile()\" '\n'method\\n'\n' of socket objects (and perhaps by other functions or methods\\n'\n' provided by extension modules).\\n'\n'\\n'\n' The objects \"sys.stdin\", \"sys.stdout\" and \"sys.stderr\" are\\n'\n' initialized to file objects corresponding to the interpreter\u2019s\\n'\n' standard input, output and error streams; they are all open in '\n'text\\n'\n' mode and therefore follow the interface defined by the\\n'\n' \"io.TextIOBase\" abstract class.\\n'\n'\\n'\n'Internal types\\n'\n' A few types used internally by the interpreter are exposed to '\n'the\\n'\n' user. Their definitions may change with future versions of the\\n'\n' interpreter, but they are mentioned here for completeness.\\n'\n'\\n'\n' Code objects\\n'\n' Code objects represent *byte-compiled* executable Python '\n'code,\\n'\n' or *bytecode*. The difference between a code object and a\\n'\n' function object is that the function object contains an '\n'explicit\\n'\n' reference to the function\u2019s globals (the module in which it '\n'was\\n'\n' defined), while a code object contains no context; also the\\n'\n' default argument values are stored in the function object, '\n'not\\n'\n' in the code object (because they represent values calculated '\n'at\\n'\n' run-time). Unlike function objects, code objects are '\n'immutable\\n'\n' and contain no references (directly or indirectly) to '\n'mutable\\n'\n' objects.\\n'\n'\\n'\n' Special read-only attributes: \"co_name\" gives the function '\n'name;\\n'\n' \"co_argcount\" is the total number of positional arguments\\n'\n' (including positional-only arguments and arguments with '\n'default\\n'\n' values); \"co_posonlyargcount\" is the number of '\n'positional-only\\n'\n' arguments (including arguments with default values);\\n'\n' \"co_kwonlyargcount\" is the number of keyword-only arguments\\n'\n' (including arguments with default values); \"co_nlocals\" is '\n'the\\n'\n' number of local variables used by the function (including\\n'\n' arguments); \"co_varnames\" is a tuple containing the names of '\n'the\\n'\n' local variables (starting with the argument names);\\n'\n' \"co_cellvars\" is a tuple containing the names of local '\n'variables\\n'\n' that are referenced by nested functions; \"co_freevars\" is a\\n'\n' tuple containing the names of free variables; \"co_code\" is a\\n'\n' string representing the sequence of bytecode instructions;\\n'\n' \"co_consts\" is a tuple containing the literals used by the\\n'\n' bytecode; \"co_names\" is a tuple containing the names used by '\n'the\\n'\n' bytecode; \"co_filename\" is the filename from which the code '\n'was\\n'\n' compiled; \"co_firstlineno\" is the first line number of the\\n'\n' function; \"co_lnotab\" is a string encoding the mapping from\\n'\n' bytecode offsets to line numbers (for details see the source\\n'\n' code of the interpreter); \"co_stacksize\" is the required '\n'stack\\n'\n' size; \"co_flags\" is an integer encoding a number of flags '\n'for\\n'\n' the interpreter.\\n'\n'\\n'\n' The following flag bits are defined for \"co_flags\": bit '\n'\"0x04\"\\n'\n' is set if the function uses the \"*arguments\" syntax to accept '\n'an\\n'\n' arbitrary number of positional arguments; bit \"0x08\" is set '\n'if\\n'\n' the function uses the \"**keywords\" syntax to accept '\n'arbitrary\\n'\n' keyword arguments; bit \"0x20\" is set if the function is a\\n'\n' generator.\\n'\n'\\n'\n' Future feature declarations (\"from __future__ import '\n'division\")\\n'\n' also use bits in \"co_flags\" to indicate whether a code '\n'object\\n'\n' was compiled with a particular feature enabled: bit \"0x2000\" '\n'is\\n'\n' set if the function was compiled with future division '\n'enabled;\\n'\n' bits \"0x10\" and \"0x1000\" were used in earlier versions of\\n'\n' Python.\\n'\n'\\n'\n' Other bits in \"co_flags\" are reserved for internal use.\\n'\n'\\n'\n' If a code object represents a function, the first item in\\n'\n' \"co_consts\" is the documentation string of the function, or\\n'\n' \"None\" if undefined.\\n'\n'\\n'\n' Frame objects\\n'\n' Frame objects represent execution frames. They may occur in\\n'\n' traceback objects (see below), and are also passed to '\n'registered\\n'\n' trace functions.\\n'\n'\\n'\n' Special read-only attributes: \"f_back\" is to the previous '\n'stack\\n'\n' frame (towards the caller), or \"None\" if this is the bottom\\n'\n' stack frame; \"f_code\" is the code object being executed in '\n'this\\n'\n' frame; \"f_locals\" is the dictionary used to look up local\\n'\n' variables; \"f_globals\" is used for global variables;\\n'\n' \"f_builtins\" is used for built-in (intrinsic) names; '\n'\"f_lasti\"\\n'\n' gives the precise instruction (this is an index into the\\n'\n' bytecode string of the code object).\\n'\n'\\n'\n' Accessing \"f_code\" raises an auditing event '\n'\"object.__getattr__\"\\n'\n' with arguments \"obj\" and \"\"f_code\"\".\\n'\n'\\n'\n' Special writable attributes: \"f_trace\", if not \"None\", is a\\n'\n' function called for various events during code execution '\n'(this\\n'\n' is used by the debugger). Normally an event is triggered for\\n'\n' each new source line - this can be disabled by setting\\n'\n' \"f_trace_lines\" to \"False\".\\n'\n'\\n'\n' Implementations *may* allow per-opcode events to be requested '\n'by\\n'\n' setting \"f_trace_opcodes\" to \"True\". Note that this may lead '\n'to\\n'\n' undefined interpreter behaviour if exceptions raised by the\\n'\n' trace function escape to the function being traced.\\n'\n'\\n'\n' \"f_lineno\" is the current line number of the frame \u2014 writing '\n'to\\n'\n' this from within a trace function jumps to the given line '\n'(only\\n'\n' for the bottom-most frame). A debugger can implement a Jump\\n'\n' command (aka Set Next Statement) by writing to f_lineno.\\n'\n'\\n'\n' Frame objects support one method:\\n'\n'\\n'\n' frame.clear()\\n'\n'\\n'\n' This method clears all references to local variables held '\n'by\\n'\n' the frame. Also, if the frame belonged to a generator, '\n'the\\n'\n' generator is finalized. This helps break reference '\n'cycles\\n'\n' involving frame objects (for example when catching an\\n'\n' exception and storing its traceback for later use).\\n'\n'\\n'\n' \"RuntimeError\" is raised if the frame is currently '\n'executing.\\n'\n'\\n'\n' New in version 3.4.\\n'\n'\\n'\n' Traceback objects\\n'\n' Traceback objects represent a stack trace of an exception. '\n'A\\n'\n' traceback object is implicitly created when an exception '\n'occurs,\\n'\n' and may also be explicitly created by calling\\n'\n' \"types.TracebackType\".\\n'\n'\\n'\n' For implicitly created tracebacks, when the search for an\\n'\n' exception handler unwinds the execution stack, at each '\n'unwound\\n'\n' level a traceback object is inserted in front of the current\\n'\n' traceback. When an exception handler is entered, the stack\\n'\n' trace is made available to the program. (See section The try\\n'\n' statement.) It is accessible as the third item of the tuple\\n'\n' returned by \"sys.exc_info()\", and as the \"__traceback__\"\\n'\n' attribute of the caught exception.\\n'\n'\\n'\n' When the program contains no suitable handler, the stack '\n'trace\\n'\n' is written (nicely formatted) to the standard error stream; '\n'if\\n'\n' the interpreter is interactive, it is also made available to '\n'the\\n'\n' user as \"sys.last_traceback\".\\n'\n'\\n'\n' For explicitly created tracebacks, it is up to the creator '\n'of\\n'\n' the traceback to determine how the \"tb_next\" attributes '\n'should\\n'\n' be linked to form a full stack trace.\\n'\n'\\n'\n' Special read-only attributes: \"tb_frame\" points to the '\n'execution\\n'\n' frame of the current level; \"tb_lineno\" gives the line '\n'number\\n'\n' where the exception occurred; \"tb_lasti\" indicates the '\n'precise\\n'\n' instruction. The line number and last instruction in the\\n'\n' traceback may differ from the line number of its frame object '\n'if\\n'\n' the exception occurred in a \"try\" statement with no matching\\n'\n' except clause or with a finally clause.\\n'\n'\\n'\n' Accessing \"tb_frame\" raises an auditing event\\n'\n' \"object.__getattr__\" with arguments \"obj\" and \"\"tb_frame\"\".\\n'\n'\\n'\n' Special writable attribute: \"tb_next\" is the next level in '\n'the\\n'\n' stack trace (towards the frame where the exception occurred), '\n'or\\n'\n' \"None\" if there is no next level.\\n'\n'\\n'\n' Changed in version 3.7: Traceback objects can now be '\n'explicitly\\n'\n' instantiated from Python code, and the \"tb_next\" attribute '\n'of\\n'\n' existing instances can be updated.\\n'\n'\\n'\n' Slice objects\\n'\n' Slice objects are used to represent slices for '\n'\"__getitem__()\"\\n'\n' methods. They are also created by the built-in \"slice()\"\\n'\n' function.\\n'\n'\\n'\n' Special read-only attributes: \"start\" is the lower bound; '\n'\"stop\"\\n'\n' is the upper bound; \"step\" is the step value; each is \"None\" '\n'if\\n'\n' omitted. These attributes can have any type.\\n'\n'\\n'\n' Slice objects support one method:\\n'\n'\\n'\n' slice.indices(self, length)\\n'\n'\\n'\n' This method takes a single integer argument *length* and\\n'\n' computes information about the slice that the slice '\n'object\\n'\n' would describe if applied to a sequence of *length* '\n'items.\\n'\n' It returns a tuple of three integers; respectively these '\n'are\\n'\n' the *start* and *stop* indices and the *step* or stride\\n'\n' length of the slice. Missing or out-of-bounds indices are\\n'\n' handled in a manner consistent with regular slices.\\n'\n'\\n'\n' Static method objects\\n'\n' Static method objects provide a way of defeating the\\n'\n' transformation of function objects to method objects '\n'described\\n'\n' above. A static method object is a wrapper around any other\\n'\n' object, usually a user-defined method object. When a static\\n'\n' method object is retrieved from a class or a class instance, '\n'the\\n'\n' object actually returned is the wrapped object, which is not\\n'\n' subject to any further transformation. Static method objects '\n'are\\n'\n' also callable. Static method objects are created by the '\n'built-in\\n'\n' \"staticmethod()\" constructor.\\n'\n'\\n'\n' Class method objects\\n'\n' A class method object, like a static method object, is a '\n'wrapper\\n'\n' around another object that alters the way in which that '\n'object\\n'\n' is retrieved from classes and class instances. The behaviour '\n'of\\n'\n' class method objects upon such retrieval is described above,\\n'\n' under \u201cUser-defined methods\u201d. Class method objects are '\n'created\\n'\n' by the built-in \"classmethod()\" constructor.\\n',\n'typesfunctions':'Functions\\n'\n'*********\\n'\n'\\n'\n'Function objects are created by function definitions. The '\n'only\\n'\n'operation on a function object is to call it: '\n'\"func(argument-list)\".\\n'\n'\\n'\n'There are really two flavors of function objects: built-in '\n'functions\\n'\n'and user-defined functions. Both support the same '\n'operation (to call\\n'\n'the function), but the implementation is different, hence '\n'the\\n'\n'different object types.\\n'\n'\\n'\n'See Function definitions for more information.\\n',\n'typesmapping':'Mapping Types \u2014 \"dict\"\\n'\n'**********************\\n'\n'\\n'\n'A *mapping* object maps *hashable* values to arbitrary '\n'objects.\\n'\n'Mappings are mutable objects. There is currently only one '\n'standard\\n'\n'mapping type, the *dictionary*. (For other containers see '\n'the built-\\n'\n'in \"list\", \"set\", and \"tuple\" classes, and the \"collections\" '\n'module.)\\n'\n'\\n'\n'A dictionary\u2019s keys are *almost* arbitrary values. Values '\n'that are\\n'\n'not *hashable*, that is, values containing lists, '\n'dictionaries or\\n'\n'other mutable types (that are compared by value rather than '\n'by object\\n'\n'identity) may not be used as keys. Numeric types used for '\n'keys obey\\n'\n'the normal rules for numeric comparison: if two numbers '\n'compare equal\\n'\n'(such as \"1\" and \"1.0\") then they can be used '\n'interchangeably to index\\n'\n'the same dictionary entry. (Note however, that since '\n'computers store\\n'\n'floating-point numbers as approximations it is usually '\n'unwise to use\\n'\n'them as dictionary keys.)\\n'\n'\\n'\n'Dictionaries can be created by placing a comma-separated '\n'list of \"key:\\n'\n'value\" pairs within braces, for example: \"{\\'jack\\': 4098, '\n\"'sjoerd':\\n\"\n'4127}\" or \"{4098: \\'jack\\', 4127: \\'sjoerd\\'}\", or by the '\n'\"dict\"\\n'\n'constructor.\\n'\n'\\n'\n'class dict(**kwarg)\\n'\n'class dict(mapping, **kwarg)\\n'\n'class dict(iterable, **kwarg)\\n'\n'\\n'\n' Return a new dictionary initialized from an optional '\n'positional\\n'\n' argument and a possibly empty set of keyword arguments.\\n'\n'\\n'\n' Dictionaries can be created by several means:\\n'\n'\\n'\n' * Use a comma-separated list of \"key: value\" pairs within '\n'braces:\\n'\n' \"{\\'jack\\': 4098, \\'sjoerd\\': 4127}\" or \"{4098: '\n\"'jack', 4127:\\n\"\n' \\'sjoerd\\'}\"\\n'\n'\\n'\n' * Use a dict comprehension: \"{}\", \"{x: x ** 2 for x in '\n'range(10)}\"\\n'\n'\\n'\n' * Use the type constructor: \"dict()\", \"dict([(\\'foo\\', '\n\"100), ('bar',\\n\"\n' 200)])\", \"dict(foo=100, bar=200)\"\\n'\n'\\n'\n' If no positional argument is given, an empty dictionary '\n'is created.\\n'\n' If a positional argument is given and it is a mapping '\n'object, a\\n'\n' dictionary is created with the same key-value pairs as '\n'the mapping\\n'\n' object. Otherwise, the positional argument must be an '\n'*iterable*\\n'\n' object. Each item in the iterable must itself be an '\n'iterable with\\n'\n' exactly two objects. The first object of each item '\n'becomes a key\\n'\n' in the new dictionary, and the second object the '\n'corresponding\\n'\n' value. If a key occurs more than once, the last value '\n'for that key\\n'\n' becomes the corresponding value in the new dictionary.\\n'\n'\\n'\n' If keyword arguments are given, the keyword arguments and '\n'their\\n'\n' values are added to the dictionary created from the '\n'positional\\n'\n' argument. If a key being added is already present, the '\n'value from\\n'\n' the keyword argument replaces the value from the '\n'positional\\n'\n' argument.\\n'\n'\\n'\n' To illustrate, the following examples all return a '\n'dictionary equal\\n'\n' to \"{\"one\": 1, \"two\": 2, \"three\": 3}\":\\n'\n'\\n'\n' >>> a = dict(one=1, two=2, three=3)\\n'\n\" >>> b = {'one': 1, 'two': 2, 'three': 3}\\n\"\n\" >>> c = dict(zip(['one', 'two', 'three'], [1, 2, 3]))\\n\"\n\" >>> d = dict([('two', 2), ('one', 1), ('three', 3)])\\n\"\n\" >>> e = dict({'three': 3, 'one': 1, 'two': 2})\\n\"\n\" >>> f = dict({'one': 1, 'three': 3}, two=2)\\n\"\n' >>> a == b == c == d == e == f\\n'\n' True\\n'\n'\\n'\n' Providing keyword arguments as in the first example only '\n'works for\\n'\n' keys that are valid Python identifiers. Otherwise, any '\n'valid keys\\n'\n' can be used.\\n'\n'\\n'\n' These are the operations that dictionaries support (and '\n'therefore,\\n'\n' custom mapping types should support too):\\n'\n'\\n'\n' list(d)\\n'\n'\\n'\n' Return a list of all the keys used in the dictionary '\n'*d*.\\n'\n'\\n'\n' len(d)\\n'\n'\\n'\n' Return the number of items in the dictionary *d*.\\n'\n'\\n'\n' d[key]\\n'\n'\\n'\n' Return the item of *d* with key *key*. Raises a '\n'\"KeyError\" if\\n'\n' *key* is not in the map.\\n'\n'\\n'\n' If a subclass of dict defines a method \"__missing__()\" '\n'and *key*\\n'\n' is not present, the \"d[key]\" operation calls that '\n'method with\\n'\n' the key *key* as argument. The \"d[key]\" operation '\n'then returns\\n'\n' or raises whatever is returned or raised by the\\n'\n' \"__missing__(key)\" call. No other operations or '\n'methods invoke\\n'\n' \"__missing__()\". If \"__missing__()\" is not defined, '\n'\"KeyError\"\\n'\n' is raised. \"__missing__()\" must be a method; it cannot '\n'be an\\n'\n' instance variable:\\n'\n'\\n'\n' >>> class Counter(dict):\\n'\n' ... def __missing__(self, key):\\n'\n' ... return 0\\n'\n' >>> c = Counter()\\n'\n\" >>> c['red']\\n\"\n' 0\\n'\n\" >>> c['red'] += 1\\n\"\n\" >>> c['red']\\n\"\n' 1\\n'\n'\\n'\n' The example above shows part of the implementation of\\n'\n' \"collections.Counter\". A different \"__missing__\" '\n'method is used\\n'\n' by \"collections.defaultdict\".\\n'\n'\\n'\n' d[key] = value\\n'\n'\\n'\n' Set \"d[key]\" to *value*.\\n'\n'\\n'\n' del d[key]\\n'\n'\\n'\n' Remove \"d[key]\" from *d*. Raises a \"KeyError\" if '\n'*key* is not\\n'\n' in the map.\\n'\n'\\n'\n' key in d\\n'\n'\\n'\n' Return \"True\" if *d* has a key *key*, else \"False\".\\n'\n'\\n'\n' key not in d\\n'\n'\\n'\n' Equivalent to \"not key in d\".\\n'\n'\\n'\n' iter(d)\\n'\n'\\n'\n' Return an iterator over the keys of the dictionary. '\n'This is a\\n'\n' shortcut for \"iter(d.keys())\".\\n'\n'\\n'\n' clear()\\n'\n'\\n'\n' Remove all items from the dictionary.\\n'\n'\\n'\n' copy()\\n'\n'\\n'\n' Return a shallow copy of the dictionary.\\n'\n'\\n'\n' classmethod fromkeys(iterable[, value])\\n'\n'\\n'\n' Create a new dictionary with keys from *iterable* and '\n'values set\\n'\n' to *value*.\\n'\n'\\n'\n' \"fromkeys()\" is a class method that returns a new '\n'dictionary.\\n'\n' *value* defaults to \"None\". All of the values refer '\n'to just a\\n'\n' single instance, so it generally doesn\u2019t make sense '\n'for *value*\\n'\n' to be a mutable object such as an empty list. To get '\n'distinct\\n'\n' values, use a dict comprehension instead.\\n'\n'\\n'\n' get(key[, default])\\n'\n'\\n'\n' Return the value for *key* if *key* is in the '\n'dictionary, else\\n'\n' *default*. If *default* is not given, it defaults to '\n'\"None\", so\\n'\n' that this method never raises a \"KeyError\".\\n'\n'\\n'\n' items()\\n'\n'\\n'\n' Return a new view of the dictionary\u2019s items (\"(key, '\n'value)\"\\n'\n' pairs). See the documentation of view objects.\\n'\n'\\n'\n' keys()\\n'\n'\\n'\n' Return a new view of the dictionary\u2019s keys. See the\\n'\n' documentation of view objects.\\n'\n'\\n'\n' pop(key[, default])\\n'\n'\\n'\n' If *key* is in the dictionary, remove it and return '\n'its value,\\n'\n' else return *default*. If *default* is not given and '\n'*key* is\\n'\n' not in the dictionary, a \"KeyError\" is raised.\\n'\n'\\n'\n' popitem()\\n'\n'\\n'\n' Remove and return a \"(key, value)\" pair from the '\n'dictionary.\\n'\n' Pairs are returned in LIFO (last-in, first-out) '\n'order.\\n'\n'\\n'\n' \"popitem()\" is useful to destructively iterate over a\\n'\n' dictionary, as often used in set algorithms. If the '\n'dictionary\\n'\n' is empty, calling \"popitem()\" raises a \"KeyError\".\\n'\n'\\n'\n' Changed in version 3.7: LIFO order is now guaranteed. '\n'In prior\\n'\n' versions, \"popitem()\" would return an arbitrary '\n'key/value pair.\\n'\n'\\n'\n' reversed(d)\\n'\n'\\n'\n' Return a reverse iterator over the keys of the '\n'dictionary. This\\n'\n' is a shortcut for \"reversed(d.keys())\".\\n'\n'\\n'\n' New in version 3.8.\\n'\n'\\n'\n' setdefault(key[, default])\\n'\n'\\n'\n' If *key* is in the dictionary, return its value. If '\n'not, insert\\n'\n' *key* with a value of *default* and return *default*. '\n'*default*\\n'\n' defaults to \"None\".\\n'\n'\\n'\n' update([other])\\n'\n'\\n'\n' Update the dictionary with the key/value pairs from '\n'*other*,\\n'\n' overwriting existing keys. Return \"None\".\\n'\n'\\n'\n' \"update()\" accepts either another dictionary object or '\n'an\\n'\n' iterable of key/value pairs (as tuples or other '\n'iterables of\\n'\n' length two). If keyword arguments are specified, the '\n'dictionary\\n'\n' is then updated with those key/value pairs: '\n'\"d.update(red=1,\\n'\n' blue=2)\".\\n'\n'\\n'\n' values()\\n'\n'\\n'\n' Return a new view of the dictionary\u2019s values. See '\n'the\\n'\n' documentation of view objects.\\n'\n'\\n'\n' An equality comparison between one \"dict.values()\" '\n'view and\\n'\n' another will always return \"False\". This also applies '\n'when\\n'\n' comparing \"dict.values()\" to itself:\\n'\n'\\n'\n\" >>> d = {'a': 1}\\n\"\n' >>> d.values() == d.values()\\n'\n' False\\n'\n'\\n'\n' d | other\\n'\n'\\n'\n' Create a new dictionary with the merged keys and '\n'values of *d*\\n'\n' and *other*, which must both be dictionaries. The '\n'values of\\n'\n' *other* take priority when *d* and *other* share '\n'keys.\\n'\n'\\n'\n' New in version 3.9.\\n'\n'\\n'\n' d |= other\\n'\n'\\n'\n' Update the dictionary *d* with keys and values from '\n'*other*,\\n'\n' which may be either a *mapping* or an *iterable* of '\n'key/value\\n'\n' pairs. The values of *other* take priority when *d* '\n'and *other*\\n'\n' share keys.\\n'\n'\\n'\n' New in version 3.9.\\n'\n'\\n'\n' Dictionaries compare equal if and only if they have the '\n'same \"(key,\\n'\n' value)\" pairs (regardless of ordering). Order comparisons '\n'(\u2018<\u2019,\\n'\n' \u2018<=\u2019, \u2018>=\u2019, \u2018>\u2019) raise \"TypeError\".\\n'\n'\\n'\n' Dictionaries preserve insertion order. Note that '\n'updating a key\\n'\n' does not affect the order. Keys added after deletion are '\n'inserted\\n'\n' at the end.\\n'\n'\\n'\n' >>> d = {\"one\": 1, \"two\": 2, \"three\": 3, \"four\": 4}\\n'\n' >>> d\\n'\n\" {'one': 1, 'two': 2, 'three': 3, 'four': 4}\\n\"\n' >>> list(d)\\n'\n\" ['one', 'two', 'three', 'four']\\n\"\n' >>> list(d.values())\\n'\n' [1, 2, 3, 4]\\n'\n' >>> d[\"one\"] = 42\\n'\n' >>> d\\n'\n\" {'one': 42, 'two': 2, 'three': 3, 'four': 4}\\n\"\n' >>> del d[\"two\"]\\n'\n' >>> d[\"two\"] = None\\n'\n' >>> d\\n'\n\" {'one': 42, 'three': 3, 'four': 4, 'two': None}\\n\"\n'\\n'\n' Changed in version 3.7: Dictionary order is guaranteed to '\n'be\\n'\n' insertion order. This behavior was an implementation '\n'detail of\\n'\n' CPython from 3.6.\\n'\n'\\n'\n' Dictionaries and dictionary views are reversible.\\n'\n'\\n'\n' >>> d = {\"one\": 1, \"two\": 2, \"three\": 3, \"four\": 4}\\n'\n' >>> d\\n'\n\" {'one': 1, 'two': 2, 'three': 3, 'four': 4}\\n\"\n' >>> list(reversed(d))\\n'\n\" ['four', 'three', 'two', 'one']\\n\"\n' >>> list(reversed(d.values()))\\n'\n' [4, 3, 2, 1]\\n'\n' >>> list(reversed(d.items()))\\n'\n\" [('four', 4), ('three', 3), ('two', 2), ('one', 1)]\\n\"\n'\\n'\n' Changed in version 3.8: Dictionaries are now reversible.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' \"types.MappingProxyType\" can be used to create a read-only '\n'view of a\\n'\n' \"dict\".\\n'\n'\\n'\n'\\n'\n'Dictionary view objects\\n'\n'=======================\\n'\n'\\n'\n'The objects returned by \"dict.keys()\", \"dict.values()\" and\\n'\n'\"dict.items()\" are *view objects*. They provide a dynamic '\n'view on the\\n'\n'dictionary\u2019s entries, which means that when the dictionary '\n'changes,\\n'\n'the view reflects these changes.\\n'\n'\\n'\n'Dictionary views can be iterated over to yield their '\n'respective data,\\n'\n'and support membership tests:\\n'\n'\\n'\n'len(dictview)\\n'\n'\\n'\n' Return the number of entries in the dictionary.\\n'\n'\\n'\n'iter(dictview)\\n'\n'\\n'\n' Return an iterator over the keys, values or items '\n'(represented as\\n'\n' tuples of \"(key, value)\") in the dictionary.\\n'\n'\\n'\n' Keys and values are iterated over in insertion order. '\n'This allows\\n'\n' the creation of \"(value, key)\" pairs using \"zip()\": '\n'\"pairs =\\n'\n' zip(d.values(), d.keys())\". Another way to create the '\n'same list is\\n'\n' \"pairs = [(v, k) for (k, v) in d.items()]\".\\n'\n'\\n'\n' Iterating views while adding or deleting entries in the '\n'dictionary\\n'\n' may raise a \"RuntimeError\" or fail to iterate over all '\n'entries.\\n'\n'\\n'\n' Changed in version 3.7: Dictionary order is guaranteed to '\n'be\\n'\n' insertion order.\\n'\n'\\n'\n'x in dictview\\n'\n'\\n'\n' Return \"True\" if *x* is in the underlying dictionary\u2019s '\n'keys, values\\n'\n' or items (in the latter case, *x* should be a \"(key, '\n'value)\"\\n'\n' tuple).\\n'\n'\\n'\n'reversed(dictview)\\n'\n'\\n'\n' Return a reverse iterator over the keys, values or items '\n'of the\\n'\n' dictionary. The view will be iterated in reverse order of '\n'the\\n'\n' insertion.\\n'\n'\\n'\n' Changed in version 3.8: Dictionary views are now '\n'reversible.\\n'\n'\\n'\n'dictview.mapping\\n'\n'\\n'\n' Return a \"types.MappingProxyType\" that wraps the '\n'original\\n'\n' dictionary to which the view refers.\\n'\n'\\n'\n' New in version 3.10.\\n'\n'\\n'\n'Keys views are set-like since their entries are unique and '\n'hashable.\\n'\n'If all values are hashable, so that \"(key, value)\" pairs are '\n'unique\\n'\n'and hashable, then the items view is also set-like. (Values '\n'views are\\n'\n'not treated as set-like since the entries are generally not '\n'unique.)\\n'\n'For set-like views, all of the operations defined for the '\n'abstract\\n'\n'base class \"collections.abc.Set\" are available (for example, '\n'\"==\",\\n'\n'\"<\", or \"^\").\\n'\n'\\n'\n'An example of dictionary view usage:\\n'\n'\\n'\n\" >>> dishes = {'eggs': 2, 'sausage': 1, 'bacon': 1, \"\n\"'spam': 500}\\n\"\n' >>> keys = dishes.keys()\\n'\n' >>> values = dishes.values()\\n'\n'\\n'\n' >>> # iteration\\n'\n' >>> n = 0\\n'\n' >>> for val in values:\\n'\n' ... n += val\\n'\n' >>> print(n)\\n'\n' 504\\n'\n'\\n'\n' >>> # keys and values are iterated over in the same order '\n'(insertion order)\\n'\n' >>> list(keys)\\n'\n\" ['eggs', 'sausage', 'bacon', 'spam']\\n\"\n' >>> list(values)\\n'\n' [2, 1, 1, 500]\\n'\n'\\n'\n' >>> # view objects are dynamic and reflect dict changes\\n'\n\" >>> del dishes['eggs']\\n\"\n\" >>> del dishes['sausage']\\n\"\n' >>> list(keys)\\n'\n\" ['bacon', 'spam']\\n\"\n'\\n'\n' >>> # set operations\\n'\n\" >>> keys & {'eggs', 'bacon', 'salad'}\\n\"\n\" {'bacon'}\\n\"\n\" >>> keys ^ {'sausage', 'juice'}\\n\"\n\" {'juice', 'sausage', 'bacon', 'spam'}\\n\"\n'\\n'\n' >>> # get back a read-only proxy for the original '\n'dictionary\\n'\n' >>> values.mapping\\n'\n\" mappingproxy({'eggs': 2, 'sausage': 1, 'bacon': 1, \"\n\"'spam': 500})\\n\"\n\" >>> values.mapping['spam']\\n\"\n' 500\\n',\n'typesmethods':'Methods\\n'\n'*******\\n'\n'\\n'\n'Methods are functions that are called using the attribute '\n'notation.\\n'\n'There are two flavors: built-in methods (such as \"append()\" '\n'on lists)\\n'\n'and class instance methods. Built-in methods are described '\n'with the\\n'\n'types that support them.\\n'\n'\\n'\n'If you access a method (a function defined in a class '\n'namespace)\\n'\n'through an instance, you get a special object: a *bound '\n'method* (also\\n'\n'called *instance method*) object. When called, it will add '\n'the \"self\"\\n'\n'argument to the argument list. Bound methods have two '\n'special read-\\n'\n'only attributes: \"m.__self__\" is the object on which the '\n'method\\n'\n'operates, and \"m.__func__\" is the function implementing the '\n'method.\\n'\n'Calling \"m(arg-1, arg-2, ..., arg-n)\" is completely '\n'equivalent to\\n'\n'calling \"m.__func__(m.__self__, arg-1, arg-2, ..., arg-n)\".\\n'\n'\\n'\n'Like function objects, bound method objects support getting '\n'arbitrary\\n'\n'attributes. However, since method attributes are actually '\n'stored on\\n'\n'the underlying function object (\"meth.__func__\"), setting '\n'method\\n'\n'attributes on bound methods is disallowed. Attempting to '\n'set an\\n'\n'attribute on a method results in an \"AttributeError\" being '\n'raised. In\\n'\n'order to set a method attribute, you need to explicitly set '\n'it on the\\n'\n'underlying function object:\\n'\n'\\n'\n' >>> class C:\\n'\n' ... def method(self):\\n'\n' ... pass\\n'\n' ...\\n'\n' >>> c = C()\\n'\n\" >>> c.method.whoami = 'my name is method' # can't set on \"\n'the method\\n'\n' Traceback (most recent call last):\\n'\n' File \"\", line 1, in \\n'\n\" AttributeError: 'method' object has no attribute \"\n\"'whoami'\\n\"\n\" >>> c.method.__func__.whoami = 'my name is method'\\n\"\n' >>> c.method.whoami\\n'\n\" 'my name is method'\\n\"\n'\\n'\n'See The standard type hierarchy for more information.\\n',\n'typesmodules':'Modules\\n'\n'*******\\n'\n'\\n'\n'The only special operation on a module is attribute access: '\n'\"m.name\",\\n'\n'where *m* is a module and *name* accesses a name defined in '\n'*m*\u2019s\\n'\n'symbol table. Module attributes can be assigned to. (Note '\n'that the\\n'\n'\"import\" statement is not, strictly speaking, an operation '\n'on a module\\n'\n'object; \"import foo\" does not require a module object named '\n'*foo* to\\n'\n'exist, rather it requires an (external) *definition* for a '\n'module\\n'\n'named *foo* somewhere.)\\n'\n'\\n'\n'A special attribute of every module is \"__dict__\". This is '\n'the\\n'\n'dictionary containing the module\u2019s symbol table. Modifying '\n'this\\n'\n'dictionary will actually change the module\u2019s symbol table, '\n'but direct\\n'\n'assignment to the \"__dict__\" attribute is not possible (you '\n'can write\\n'\n'\"m.__dict__[\\'a\\'] = 1\", which defines \"m.a\" to be \"1\", but '\n'you can\u2019t\\n'\n'write \"m.__dict__ = {}\"). Modifying \"__dict__\" directly is '\n'not\\n'\n'recommended.\\n'\n'\\n'\n'Modules built into the interpreter are written like this: '\n'\"\". If loaded from a file, they are '\n'written as\\n'\n'\"\".\\n',\n'typesseq':'Sequence Types \u2014 \"list\", \"tuple\", \"range\"\\n'\n'*****************************************\\n'\n'\\n'\n'There are three basic sequence types: lists, tuples, and range\\n'\n'objects. Additional sequence types tailored for processing of '\n'binary\\n'\n'data and text strings are described in dedicated sections.\\n'\n'\\n'\n'\\n'\n'Common Sequence Operations\\n'\n'==========================\\n'\n'\\n'\n'The operations in the following table are supported by most '\n'sequence\\n'\n'types, both mutable and immutable. The '\n'\"collections.abc.Sequence\" ABC\\n'\n'is provided to make it easier to correctly implement these '\n'operations\\n'\n'on custom sequence types.\\n'\n'\\n'\n'This table lists the sequence operations sorted in ascending '\n'priority.\\n'\n'In the table, *s* and *t* are sequences of the same type, *n*, '\n'*i*,\\n'\n'*j* and *k* are integers and *x* is an arbitrary object that '\n'meets any\\n'\n'type and value restrictions imposed by *s*.\\n'\n'\\n'\n'The \"in\" and \"not in\" operations have the same priorities as '\n'the\\n'\n'comparison operations. The \"+\" (concatenation) and \"*\" '\n'(repetition)\\n'\n'operations have the same priority as the corresponding numeric\\n'\n'operations. [3]\\n'\n'\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| Operation | Result '\n'| Notes |\\n'\n'|============================|==================================|============|\\n'\n'| \"x in s\" | \"True\" if an item of *s* is '\n'| (1) |\\n'\n'| | equal to *x*, else \"False\" '\n'| |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"x not in s\" | \"False\" if an item of *s* is '\n'| (1) |\\n'\n'| | equal to *x*, else \"True\" '\n'| |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"s + t\" | the concatenation of *s* and *t* '\n'| (6)(7) |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"s * n\" or \"n * s\" | equivalent to adding *s* to '\n'| (2)(7) |\\n'\n'| | itself *n* times '\n'| |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"s[i]\" | *i*th item of *s*, origin 0 '\n'| (3) |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"s[i:j]\" | slice of *s* from *i* to *j* '\n'| (3)(4) |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"s[i:j:k]\" | slice of *s* from *i* to *j* '\n'| (3)(5) |\\n'\n'| | with step *k* '\n'| |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"len(s)\" | length of *s* '\n'| |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"min(s)\" | smallest item of *s* '\n'| |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"max(s)\" | largest item of *s* '\n'| |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"s.index(x[, i[, j]])\" | index of the first occurrence of '\n'| (8) |\\n'\n'| | *x* in *s* (at or after index '\n'| |\\n'\n'| | *i* and before index *j*) '\n'| |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'| \"s.count(x)\" | total number of occurrences of '\n'| |\\n'\n'| | *x* in *s* '\n'| |\\n'\n'+----------------------------+----------------------------------+------------+\\n'\n'\\n'\n'Sequences of the same type also support comparisons. In '\n'particular,\\n'\n'tuples and lists are compared lexicographically by comparing\\n'\n'corresponding elements. This means that to compare equal, every\\n'\n'element must compare equal and the two sequences must be of the '\n'same\\n'\n'type and have the same length. (For full details see '\n'Comparisons in\\n'\n'the language reference.)\\n'\n'\\n'\n'Notes:\\n'\n'\\n'\n'1. While the \"in\" and \"not in\" operations are used only for '\n'simple\\n'\n' containment testing in the general case, some specialised '\n'sequences\\n'\n' (such as \"str\", \"bytes\" and \"bytearray\") also use them for\\n'\n' subsequence testing:\\n'\n'\\n'\n' >>> \"gg\" in \"eggs\"\\n'\n' True\\n'\n'\\n'\n'2. Values of *n* less than \"0\" are treated as \"0\" (which yields '\n'an\\n'\n' empty sequence of the same type as *s*). Note that items in '\n'the\\n'\n' sequence *s* are not copied; they are referenced multiple '\n'times.\\n'\n' This often haunts new Python programmers; consider:\\n'\n'\\n'\n' >>> lists = [[]] * 3\\n'\n' >>> lists\\n'\n' [[], [], []]\\n'\n' >>> lists[0].append(3)\\n'\n' >>> lists\\n'\n' [[3], [3], [3]]\\n'\n'\\n'\n' What has happened is that \"[[]]\" is a one-element list '\n'containing\\n'\n' an empty list, so all three elements of \"[[]] * 3\" are '\n'references\\n'\n' to this single empty list. Modifying any of the elements of\\n'\n' \"lists\" modifies this single list. You can create a list of\\n'\n' different lists this way:\\n'\n'\\n'\n' >>> lists = [[] for i in range(3)]\\n'\n' >>> lists[0].append(3)\\n'\n' >>> lists[1].append(5)\\n'\n' >>> lists[2].append(7)\\n'\n' >>> lists\\n'\n' [[3], [5], [7]]\\n'\n'\\n'\n' Further explanation is available in the FAQ entry How do I '\n'create a\\n'\n' multidimensional list?.\\n'\n'\\n'\n'3. If *i* or *j* is negative, the index is relative to the end '\n'of\\n'\n' sequence *s*: \"len(s) + i\" or \"len(s) + j\" is substituted. '\n'But\\n'\n' note that \"-0\" is still \"0\".\\n'\n'\\n'\n'4. The slice of *s* from *i* to *j* is defined as the sequence '\n'of\\n'\n' items with index *k* such that \"i <= k < j\". If *i* or *j* '\n'is\\n'\n' greater than \"len(s)\", use \"len(s)\". If *i* is omitted or '\n'\"None\",\\n'\n' use \"0\". If *j* is omitted or \"None\", use \"len(s)\". If *i* '\n'is\\n'\n' greater than or equal to *j*, the slice is empty.\\n'\n'\\n'\n'5. The slice of *s* from *i* to *j* with step *k* is defined as '\n'the\\n'\n' sequence of items with index \"x = i + n*k\" such that \"0 <= n '\n'<\\n'\n' (j-i)/k\". In other words, the indices are \"i\", \"i+k\", '\n'\"i+2*k\",\\n'\n' \"i+3*k\" and so on, stopping when *j* is reached (but never\\n'\n' including *j*). When *k* is positive, *i* and *j* are '\n'reduced to\\n'\n' \"len(s)\" if they are greater. When *k* is negative, *i* and '\n'*j* are\\n'\n' reduced to \"len(s) - 1\" if they are greater. If *i* or *j* '\n'are\\n'\n' omitted or \"None\", they become \u201cend\u201d values (which end '\n'depends on\\n'\n' the sign of *k*). Note, *k* cannot be zero. If *k* is '\n'\"None\", it\\n'\n' is treated like \"1\".\\n'\n'\\n'\n'6. Concatenating immutable sequences always results in a new '\n'object.\\n'\n' This means that building up a sequence by repeated '\n'concatenation\\n'\n' will have a quadratic runtime cost in the total sequence '\n'length.\\n'\n' To get a linear runtime cost, you must switch to one of the\\n'\n' alternatives below:\\n'\n'\\n'\n' * if concatenating \"str\" objects, you can build a list and '\n'use\\n'\n' \"str.join()\" at the end or else write to an \"io.StringIO\"\\n'\n' instance and retrieve its value when complete\\n'\n'\\n'\n' * if concatenating \"bytes\" objects, you can similarly use\\n'\n' \"bytes.join()\" or \"io.BytesIO\", or you can do in-place\\n'\n' concatenation with a \"bytearray\" object. \"bytearray\" '\n'objects are\\n'\n' mutable and have an efficient overallocation mechanism\\n'\n'\\n'\n' * if concatenating \"tuple\" objects, extend a \"list\" instead\\n'\n'\\n'\n' * for other types, investigate the relevant class '\n'documentation\\n'\n'\\n'\n'7. Some sequence types (such as \"range\") only support item '\n'sequences\\n'\n' that follow specific patterns, and hence don\u2019t support '\n'sequence\\n'\n' concatenation or repetition.\\n'\n'\\n'\n'8. \"index\" raises \"ValueError\" when *x* is not found in *s*. Not '\n'all\\n'\n' implementations support passing the additional arguments *i* '\n'and\\n'\n' *j*. These arguments allow efficient searching of subsections '\n'of\\n'\n' the sequence. Passing the extra arguments is roughly '\n'equivalent to\\n'\n' using \"s[i:j].index(x)\", only without copying any data and '\n'with the\\n'\n' returned index being relative to the start of the sequence '\n'rather\\n'\n' than the start of the slice.\\n'\n'\\n'\n'\\n'\n'Immutable Sequence Types\\n'\n'========================\\n'\n'\\n'\n'The only operation that immutable sequence types generally '\n'implement\\n'\n'that is not also implemented by mutable sequence types is '\n'support for\\n'\n'the \"hash()\" built-in.\\n'\n'\\n'\n'This support allows immutable sequences, such as \"tuple\" '\n'instances, to\\n'\n'be used as \"dict\" keys and stored in \"set\" and \"frozenset\" '\n'instances.\\n'\n'\\n'\n'Attempting to hash an immutable sequence that contains '\n'unhashable\\n'\n'values will result in \"TypeError\".\\n'\n'\\n'\n'\\n'\n'Mutable Sequence Types\\n'\n'======================\\n'\n'\\n'\n'The operations in the following table are defined on mutable '\n'sequence\\n'\n'types. The \"collections.abc.MutableSequence\" ABC is provided to '\n'make\\n'\n'it easier to correctly implement these operations on custom '\n'sequence\\n'\n'types.\\n'\n'\\n'\n'In the table *s* is an instance of a mutable sequence type, *t* '\n'is any\\n'\n'iterable object and *x* is an arbitrary object that meets any '\n'type and\\n'\n'value restrictions imposed by *s* (for example, \"bytearray\" '\n'only\\n'\n'accepts integers that meet the value restriction \"0 <= x <= '\n'255\").\\n'\n'\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| Operation | '\n'Result | Notes |\\n'\n'|================================|==================================|=======================|\\n'\n'| \"s[i] = x\" | item *i* of *s* is replaced '\n'by | |\\n'\n'| | '\n'*x* | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s[i:j] = t\" | slice of *s* from *i* to *j* '\n'is | |\\n'\n'| | replaced by the contents of '\n'the | |\\n'\n'| | iterable '\n'*t* | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"del s[i:j]\" | same as \"s[i:j] = '\n'[]\" | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s[i:j:k] = t\" | the elements of \"s[i:j:k]\" '\n'are | (1) |\\n'\n'| | replaced by those of '\n'*t* | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"del s[i:j:k]\" | removes the elements '\n'of | |\\n'\n'| | \"s[i:j:k]\" from the '\n'list | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.append(x)\" | appends *x* to the end of '\n'the | |\\n'\n'| | sequence (same '\n'as | |\\n'\n'| | \"s[len(s):len(s)] = '\n'[x]\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.clear()\" | removes all items from *s* '\n'(same | (5) |\\n'\n'| | as \"del '\n's[:]\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.copy()\" | creates a shallow copy of '\n'*s* | (5) |\\n'\n'| | (same as '\n'\"s[:]\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.extend(t)\" or \"s += t\" | extends *s* with the contents '\n'of | |\\n'\n'| | *t* (for the most part the '\n'same | |\\n'\n'| | as \"s[len(s):len(s)] = '\n't\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s *= n\" | updates *s* with its '\n'contents | (6) |\\n'\n'| | repeated *n* '\n'times | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.insert(i, x)\" | inserts *x* into *s* at '\n'the | |\\n'\n'| | index given by *i* (same '\n'as | |\\n'\n'| | \"s[i:i] = '\n'[x]\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.pop()\" or \"s.pop(i)\" | retrieves the item at *i* '\n'and | (2) |\\n'\n'| | also removes it from '\n'*s* | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.remove(x)\" | remove the first item from '\n'*s* | (3) |\\n'\n'| | where \"s[i]\" is equal to '\n'*x* | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.reverse()\" | reverses the items of *s* '\n'in | (4) |\\n'\n'| | '\n'place | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'\\n'\n'Notes:\\n'\n'\\n'\n'1. *t* must have the same length as the slice it is replacing.\\n'\n'\\n'\n'2. The optional argument *i* defaults to \"-1\", so that by '\n'default the\\n'\n' last item is removed and returned.\\n'\n'\\n'\n'3. \"remove()\" raises \"ValueError\" when *x* is not found in *s*.\\n'\n'\\n'\n'4. The \"reverse()\" method modifies the sequence in place for '\n'economy\\n'\n' of space when reversing a large sequence. To remind users '\n'that it\\n'\n' operates by side effect, it does not return the reversed '\n'sequence.\\n'\n'\\n'\n'5. \"clear()\" and \"copy()\" are included for consistency with the\\n'\n' interfaces of mutable containers that don\u2019t support slicing\\n'\n' operations (such as \"dict\" and \"set\"). \"copy()\" is not part '\n'of the\\n'\n' \"collections.abc.MutableSequence\" ABC, but most concrete '\n'mutable\\n'\n' sequence classes provide it.\\n'\n'\\n'\n' New in version 3.3: \"clear()\" and \"copy()\" methods.\\n'\n'\\n'\n'6. The value *n* is an integer, or an object implementing\\n'\n' \"__index__()\". Zero and negative values of *n* clear the '\n'sequence.\\n'\n' Items in the sequence are not copied; they are referenced '\n'multiple\\n'\n' times, as explained for \"s * n\" under Common Sequence '\n'Operations.\\n'\n'\\n'\n'\\n'\n'Lists\\n'\n'=====\\n'\n'\\n'\n'Lists are mutable sequences, typically used to store collections '\n'of\\n'\n'homogeneous items (where the precise degree of similarity will '\n'vary by\\n'\n'application).\\n'\n'\\n'\n'class list([iterable])\\n'\n'\\n'\n' Lists may be constructed in several ways:\\n'\n'\\n'\n' * Using a pair of square brackets to denote the empty list: '\n'\"[]\"\\n'\n'\\n'\n' * Using square brackets, separating items with commas: \"[a]\", '\n'\"[a,\\n'\n' b, c]\"\\n'\n'\\n'\n' * Using a list comprehension: \"[x for x in iterable]\"\\n'\n'\\n'\n' * Using the type constructor: \"list()\" or \"list(iterable)\"\\n'\n'\\n'\n' The constructor builds a list whose items are the same and in '\n'the\\n'\n' same order as *iterable*\u2019s items. *iterable* may be either '\n'a\\n'\n' sequence, a container that supports iteration, or an '\n'iterator\\n'\n' object. If *iterable* is already a list, a copy is made and\\n'\n' returned, similar to \"iterable[:]\". For example, '\n'\"list(\\'abc\\')\"\\n'\n' returns \"[\\'a\\', \\'b\\', \\'c\\']\" and \"list( (1, 2, 3) )\" '\n'returns \"[1, 2,\\n'\n' 3]\". If no argument is given, the constructor creates a new '\n'empty\\n'\n' list, \"[]\".\\n'\n'\\n'\n' Many other operations also produce lists, including the '\n'\"sorted()\"\\n'\n' built-in.\\n'\n'\\n'\n' Lists implement all of the common and mutable sequence '\n'operations.\\n'\n' Lists also provide the following additional method:\\n'\n'\\n'\n' sort(*, key=None, reverse=False)\\n'\n'\\n'\n' This method sorts the list in place, using only \"<\" '\n'comparisons\\n'\n' between items. Exceptions are not suppressed - if any '\n'comparison\\n'\n' operations fail, the entire sort operation will fail (and '\n'the\\n'\n' list will likely be left in a partially modified state).\\n'\n'\\n'\n' \"sort()\" accepts two arguments that can only be passed by\\n'\n' keyword (keyword-only arguments):\\n'\n'\\n'\n' *key* specifies a function of one argument that is used '\n'to\\n'\n' extract a comparison key from each list element (for '\n'example,\\n'\n' \"key=str.lower\"). The key corresponding to each item in '\n'the list\\n'\n' is calculated once and then used for the entire sorting '\n'process.\\n'\n' The default value of \"None\" means that list items are '\n'sorted\\n'\n' directly without calculating a separate key value.\\n'\n'\\n'\n' The \"functools.cmp_to_key()\" utility is available to '\n'convert a\\n'\n' 2.x style *cmp* function to a *key* function.\\n'\n'\\n'\n' *reverse* is a boolean value. If set to \"True\", then the '\n'list\\n'\n' elements are sorted as if each comparison were reversed.\\n'\n'\\n'\n' This method modifies the sequence in place for economy of '\n'space\\n'\n' when sorting a large sequence. To remind users that it '\n'operates\\n'\n' by side effect, it does not return the sorted sequence '\n'(use\\n'\n' \"sorted()\" to explicitly request a new sorted list '\n'instance).\\n'\n'\\n'\n' The \"sort()\" method is guaranteed to be stable. A sort '\n'is\\n'\n' stable if it guarantees not to change the relative order '\n'of\\n'\n' elements that compare equal \u2014 this is helpful for sorting '\n'in\\n'\n' multiple passes (for example, sort by department, then by '\n'salary\\n'\n' grade).\\n'\n'\\n'\n' For sorting examples and a brief sorting tutorial, see '\n'Sorting\\n'\n' HOW TO.\\n'\n'\\n'\n' **CPython implementation detail:** While a list is being '\n'sorted,\\n'\n' the effect of attempting to mutate, or even inspect, the '\n'list is\\n'\n' undefined. The C implementation of Python makes the list '\n'appear\\n'\n' empty for the duration, and raises \"ValueError\" if it can '\n'detect\\n'\n' that the list has been mutated during a sort.\\n'\n'\\n'\n'\\n'\n'Tuples\\n'\n'======\\n'\n'\\n'\n'Tuples are immutable sequences, typically used to store '\n'collections of\\n'\n'heterogeneous data (such as the 2-tuples produced by the '\n'\"enumerate()\"\\n'\n'built-in). Tuples are also used for cases where an immutable '\n'sequence\\n'\n'of homogeneous data is needed (such as allowing storage in a '\n'\"set\" or\\n'\n'\"dict\" instance).\\n'\n'\\n'\n'class tuple([iterable])\\n'\n'\\n'\n' Tuples may be constructed in a number of ways:\\n'\n'\\n'\n' * Using a pair of parentheses to denote the empty tuple: '\n'\"()\"\\n'\n'\\n'\n' * Using a trailing comma for a singleton tuple: \"a,\" or '\n'\"(a,)\"\\n'\n'\\n'\n' * Separating items with commas: \"a, b, c\" or \"(a, b, c)\"\\n'\n'\\n'\n' * Using the \"tuple()\" built-in: \"tuple()\" or '\n'\"tuple(iterable)\"\\n'\n'\\n'\n' The constructor builds a tuple whose items are the same and '\n'in the\\n'\n' same order as *iterable*\u2019s items. *iterable* may be either '\n'a\\n'\n' sequence, a container that supports iteration, or an '\n'iterator\\n'\n' object. If *iterable* is already a tuple, it is returned\\n'\n' unchanged. For example, \"tuple(\\'abc\\')\" returns \"(\\'a\\', '\n'\\'b\\', \\'c\\')\"\\n'\n' and \"tuple( [1, 2, 3] )\" returns \"(1, 2, 3)\". If no argument '\n'is\\n'\n' given, the constructor creates a new empty tuple, \"()\".\\n'\n'\\n'\n' Note that it is actually the comma which makes a tuple, not '\n'the\\n'\n' parentheses. The parentheses are optional, except in the '\n'empty\\n'\n' tuple case, or when they are needed to avoid syntactic '\n'ambiguity.\\n'\n' For example, \"f(a, b, c)\" is a function call with three '\n'arguments,\\n'\n' while \"f((a, b, c))\" is a function call with a 3-tuple as the '\n'sole\\n'\n' argument.\\n'\n'\\n'\n' Tuples implement all of the common sequence operations.\\n'\n'\\n'\n'For heterogeneous collections of data where access by name is '\n'clearer\\n'\n'than access by index, \"collections.namedtuple()\" may be a more\\n'\n'appropriate choice than a simple tuple object.\\n'\n'\\n'\n'\\n'\n'Ranges\\n'\n'======\\n'\n'\\n'\n'The \"range\" type represents an immutable sequence of numbers and '\n'is\\n'\n'commonly used for looping a specific number of times in \"for\" '\n'loops.\\n'\n'\\n'\n'class range(stop)\\n'\n'class range(start, stop[, step])\\n'\n'\\n'\n' The arguments to the range constructor must be integers '\n'(either\\n'\n' built-in \"int\" or any object that implements the \"__index__\"\\n'\n' special method). If the *step* argument is omitted, it '\n'defaults to\\n'\n' \"1\". If the *start* argument is omitted, it defaults to \"0\". '\n'If\\n'\n' *step* is zero, \"ValueError\" is raised.\\n'\n'\\n'\n' For a positive *step*, the contents of a range \"r\" are '\n'determined\\n'\n' by the formula \"r[i] = start + step*i\" where \"i >= 0\" and '\n'\"r[i] <\\n'\n' stop\".\\n'\n'\\n'\n' For a negative *step*, the contents of the range are still\\n'\n' determined by the formula \"r[i] = start + step*i\", but the\\n'\n' constraints are \"i >= 0\" and \"r[i] > stop\".\\n'\n'\\n'\n' A range object will be empty if \"r[0]\" does not meet the '\n'value\\n'\n' constraint. Ranges do support negative indices, but these '\n'are\\n'\n' interpreted as indexing from the end of the sequence '\n'determined by\\n'\n' the positive indices.\\n'\n'\\n'\n' Ranges containing absolute values larger than \"sys.maxsize\" '\n'are\\n'\n' permitted but some features (such as \"len()\") may raise\\n'\n' \"OverflowError\".\\n'\n'\\n'\n' Range examples:\\n'\n'\\n'\n' >>> list(range(10))\\n'\n' [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\\n'\n' >>> list(range(1, 11))\\n'\n' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\\n'\n' >>> list(range(0, 30, 5))\\n'\n' [0, 5, 10, 15, 20, 25]\\n'\n' >>> list(range(0, 10, 3))\\n'\n' [0, 3, 6, 9]\\n'\n' >>> list(range(0, -10, -1))\\n'\n' [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\\n'\n' >>> list(range(0))\\n'\n' []\\n'\n' >>> list(range(1, 0))\\n'\n' []\\n'\n'\\n'\n' Ranges implement all of the common sequence operations '\n'except\\n'\n' concatenation and repetition (due to the fact that range '\n'objects\\n'\n' can only represent sequences that follow a strict pattern '\n'and\\n'\n' repetition and concatenation will usually violate that '\n'pattern).\\n'\n'\\n'\n' start\\n'\n'\\n'\n' The value of the *start* parameter (or \"0\" if the '\n'parameter was\\n'\n' not supplied)\\n'\n'\\n'\n' stop\\n'\n'\\n'\n' The value of the *stop* parameter\\n'\n'\\n'\n' step\\n'\n'\\n'\n' The value of the *step* parameter (or \"1\" if the parameter '\n'was\\n'\n' not supplied)\\n'\n'\\n'\n'The advantage of the \"range\" type over a regular \"list\" or '\n'\"tuple\" is\\n'\n'that a \"range\" object will always take the same (small) amount '\n'of\\n'\n'memory, no matter the size of the range it represents (as it '\n'only\\n'\n'stores the \"start\", \"stop\" and \"step\" values, calculating '\n'individual\\n'\n'items and subranges as needed).\\n'\n'\\n'\n'Range objects implement the \"collections.abc.Sequence\" ABC, and\\n'\n'provide features such as containment tests, element index '\n'lookup,\\n'\n'slicing and support for negative indices (see Sequence Types \u2014 '\n'list,\\n'\n'tuple, range):\\n'\n'\\n'\n'>>> r = range(0, 20, 2)\\n'\n'>>> r\\n'\n'range(0, 20, 2)\\n'\n'>>> 11 in r\\n'\n'False\\n'\n'>>> 10 in r\\n'\n'True\\n'\n'>>> r.index(10)\\n'\n'5\\n'\n'>>> r[5]\\n'\n'10\\n'\n'>>> r[:5]\\n'\n'range(0, 10, 2)\\n'\n'>>> r[-1]\\n'\n'18\\n'\n'\\n'\n'Testing range objects for equality with \"==\" and \"!=\" compares '\n'them as\\n'\n'sequences. That is, two range objects are considered equal if '\n'they\\n'\n'represent the same sequence of values. (Note that two range '\n'objects\\n'\n'that compare equal might have different \"start\", \"stop\" and '\n'\"step\"\\n'\n'attributes, for example \"range(0) == range(2, 1, 3)\" or '\n'\"range(0, 3,\\n'\n'2) == range(0, 4, 2)\".)\\n'\n'\\n'\n'Changed in version 3.2: Implement the Sequence ABC. Support '\n'slicing\\n'\n'and negative indices. Test \"int\" objects for membership in '\n'constant\\n'\n'time instead of iterating through all items.\\n'\n'\\n'\n'Changed in version 3.3: Define \u2018==\u2019 and \u2018!=\u2019 to compare range '\n'objects\\n'\n'based on the sequence of values they define (instead of '\n'comparing\\n'\n'based on object identity).\\n'\n'\\n'\n'New in version 3.3: The \"start\", \"stop\" and \"step\" attributes.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' * The linspace recipe shows how to implement a lazy version of '\n'range\\n'\n' suitable for floating point applications.\\n',\n'typesseq-mutable':'Mutable Sequence Types\\n'\n'**********************\\n'\n'\\n'\n'The operations in the following table are defined on '\n'mutable sequence\\n'\n'types. The \"collections.abc.MutableSequence\" ABC is '\n'provided to make\\n'\n'it easier to correctly implement these operations on '\n'custom sequence\\n'\n'types.\\n'\n'\\n'\n'In the table *s* is an instance of a mutable sequence '\n'type, *t* is any\\n'\n'iterable object and *x* is an arbitrary object that '\n'meets any type and\\n'\n'value restrictions imposed by *s* (for example, '\n'\"bytearray\" only\\n'\n'accepts integers that meet the value restriction \"0 <= x '\n'<= 255\").\\n'\n'\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| Operation | '\n'Result | Notes '\n'|\\n'\n'|================================|==================================|=======================|\\n'\n'| \"s[i] = x\" | item *i* of *s* is '\n'replaced by | |\\n'\n'| | '\n'*x* | '\n'|\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s[i:j] = t\" | slice of *s* from *i* '\n'to *j* is | |\\n'\n'| | replaced by the '\n'contents of the | |\\n'\n'| | iterable '\n'*t* | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"del s[i:j]\" | same as \"s[i:j] = '\n'[]\" | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s[i:j:k] = t\" | the elements of '\n'\"s[i:j:k]\" are | (1) |\\n'\n'| | replaced by those of '\n'*t* | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"del s[i:j:k]\" | removes the elements '\n'of | |\\n'\n'| | \"s[i:j:k]\" from the '\n'list | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.append(x)\" | appends *x* to the '\n'end of the | |\\n'\n'| | sequence (same '\n'as | |\\n'\n'| | \"s[len(s):len(s)] = '\n'[x]\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.clear()\" | removes all items '\n'from *s* (same | (5) |\\n'\n'| | as \"del '\n's[:]\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.copy()\" | creates a shallow '\n'copy of *s* | (5) |\\n'\n'| | (same as '\n'\"s[:]\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.extend(t)\" or \"s += t\" | extends *s* with the '\n'contents of | |\\n'\n'| | *t* (for the most '\n'part the same | |\\n'\n'| | as \"s[len(s):len(s)] '\n'= t\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s *= n\" | updates *s* with its '\n'contents | (6) |\\n'\n'| | repeated *n* '\n'times | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.insert(i, x)\" | inserts *x* into *s* '\n'at the | |\\n'\n'| | index given by *i* '\n'(same as | |\\n'\n'| | \"s[i:i] = '\n'[x]\") | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.pop()\" or \"s.pop(i)\" | retrieves the item at '\n'*i* and | (2) |\\n'\n'| | also removes it from '\n'*s* | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.remove(x)\" | remove the first item '\n'from *s* | (3) |\\n'\n'| | where \"s[i]\" is equal '\n'to *x* | |\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'| \"s.reverse()\" | reverses the items of '\n'*s* in | (4) |\\n'\n'| | '\n'place | '\n'|\\n'\n'+--------------------------------+----------------------------------+-----------------------+\\n'\n'\\n'\n'Notes:\\n'\n'\\n'\n'1. *t* must have the same length as the slice it is '\n'replacing.\\n'\n'\\n'\n'2. The optional argument *i* defaults to \"-1\", so that '\n'by default the\\n'\n' last item is removed and returned.\\n'\n'\\n'\n'3. \"remove()\" raises \"ValueError\" when *x* is not found '\n'in *s*.\\n'\n'\\n'\n'4. The \"reverse()\" method modifies the sequence in place '\n'for economy\\n'\n' of space when reversing a large sequence. To remind '\n'users that it\\n'\n' operates by side effect, it does not return the '\n'reversed sequence.\\n'\n'\\n'\n'5. \"clear()\" and \"copy()\" are included for consistency '\n'with the\\n'\n' interfaces of mutable containers that don\u2019t support '\n'slicing\\n'\n' operations (such as \"dict\" and \"set\"). \"copy()\" is '\n'not part of the\\n'\n' \"collections.abc.MutableSequence\" ABC, but most '\n'concrete mutable\\n'\n' sequence classes provide it.\\n'\n'\\n'\n' New in version 3.3: \"clear()\" and \"copy()\" methods.\\n'\n'\\n'\n'6. The value *n* is an integer, or an object '\n'implementing\\n'\n' \"__index__()\". Zero and negative values of *n* clear '\n'the sequence.\\n'\n' Items in the sequence are not copied; they are '\n'referenced multiple\\n'\n' times, as explained for \"s * n\" under Common Sequence '\n'Operations.\\n',\n'unary':'Unary arithmetic and bitwise operations\\n'\n'***************************************\\n'\n'\\n'\n'All unary arithmetic and bitwise operations have the same '\n'priority:\\n'\n'\\n'\n' u_expr ::= power | \"-\" u_expr | \"+\" u_expr | \"~\" u_expr\\n'\n'\\n'\n'The unary \"-\" (minus) operator yields the negation of its numeric\\n'\n'argument; the operation can be overridden with the \"__neg__()\" '\n'special\\n'\n'method.\\n'\n'\\n'\n'The unary \"+\" (plus) operator yields its numeric argument '\n'unchanged;\\n'\n'the operation can be overridden with the \"__pos__()\" special '\n'method.\\n'\n'\\n'\n'The unary \"~\" (invert) operator yields the bitwise inversion of '\n'its\\n'\n'integer argument. The bitwise inversion of \"x\" is defined as\\n'\n'\"-(x+1)\". It only applies to integral numbers or to custom '\n'objects\\n'\n'that override the \"__invert__()\" special method.\\n'\n'\\n'\n'In all three cases, if the argument does not have the proper type, '\n'a\\n'\n'\"TypeError\" exception is raised.\\n',\n'while':'The \"while\" statement\\n'\n'*********************\\n'\n'\\n'\n'The \"while\" statement is used for repeated execution as long as an\\n'\n'expression is true:\\n'\n'\\n'\n' while_stmt ::= \"while\" assignment_expression \":\" suite\\n'\n' [\"else\" \":\" suite]\\n'\n'\\n'\n'This repeatedly tests the expression and, if it is true, executes '\n'the\\n'\n'first suite; if the expression is false (which may be the first '\n'time\\n'\n'it is tested) the suite of the \"else\" clause, if present, is '\n'executed\\n'\n'and the loop terminates.\\n'\n'\\n'\n'A \"break\" statement executed in the first suite terminates the '\n'loop\\n'\n'without executing the \"else\" clause\u2019s suite. A \"continue\" '\n'statement\\n'\n'executed in the first suite skips the rest of the suite and goes '\n'back\\n'\n'to testing the expression.\\n',\n'with':'The \"with\" statement\\n'\n'********************\\n'\n'\\n'\n'The \"with\" statement is used to wrap the execution of a block with\\n'\n'methods defined by a context manager (see section With Statement\\n'\n'Context Managers). This allows common \"try\"\u2026\"except\"\u2026\"finally\" '\n'usage\\n'\n'patterns to be encapsulated for convenient reuse.\\n'\n'\\n'\n' with_stmt ::= \"with\" ( \"(\" with_stmt_contents \",\"? \")\" | '\n'with_stmt_contents ) \":\" suite\\n'\n' with_stmt_contents ::= with_item (\",\" with_item)*\\n'\n' with_item ::= expression [\"as\" target]\\n'\n'\\n'\n'The execution of the \"with\" statement with one \u201citem\u201d proceeds as\\n'\n'follows:\\n'\n'\\n'\n'1. The context expression (the expression given in the \"with_item\") '\n'is\\n'\n' evaluated to obtain a context manager.\\n'\n'\\n'\n'2. The context manager\u2019s \"__enter__()\" is loaded for later use.\\n'\n'\\n'\n'3. The context manager\u2019s \"__exit__()\" is loaded for later use.\\n'\n'\\n'\n'4. The context manager\u2019s \"__enter__()\" method is invoked.\\n'\n'\\n'\n'5. If a target was included in the \"with\" statement, the return '\n'value\\n'\n' from \"__enter__()\" is assigned to it.\\n'\n'\\n'\n' Note:\\n'\n'\\n'\n' The \"with\" statement guarantees that if the \"__enter__()\" '\n'method\\n'\n' returns without an error, then \"__exit__()\" will always be\\n'\n' called. Thus, if an error occurs during the assignment to the\\n'\n' target list, it will be treated the same as an error occurring\\n'\n' within the suite would be. See step 6 below.\\n'\n'\\n'\n'6. The suite is executed.\\n'\n'\\n'\n'7. The context manager\u2019s \"__exit__()\" method is invoked. If an\\n'\n' exception caused the suite to be exited, its type, value, and\\n'\n' traceback are passed as arguments to \"__exit__()\". Otherwise, '\n'three\\n'\n' \"None\" arguments are supplied.\\n'\n'\\n'\n' If the suite was exited due to an exception, and the return '\n'value\\n'\n' from the \"__exit__()\" method was false, the exception is '\n'reraised.\\n'\n' If the return value was true, the exception is suppressed, and\\n'\n' execution continues with the statement following the \"with\"\\n'\n' statement.\\n'\n'\\n'\n' If the suite was exited for any reason other than an exception, '\n'the\\n'\n' return value from \"__exit__()\" is ignored, and execution '\n'proceeds\\n'\n' at the normal location for the kind of exit that was taken.\\n'\n'\\n'\n'The following code:\\n'\n'\\n'\n' with EXPRESSION as TARGET:\\n'\n' SUITE\\n'\n'\\n'\n'is semantically equivalent to:\\n'\n'\\n'\n' manager = (EXPRESSION)\\n'\n' enter = type(manager).__enter__\\n'\n' exit = type(manager).__exit__\\n'\n' value = enter(manager)\\n'\n' hit_except = False\\n'\n'\\n'\n' try:\\n'\n' TARGET = value\\n'\n' SUITE\\n'\n' except:\\n'\n' hit_except = True\\n'\n' if not exit(manager, *sys.exc_info()):\\n'\n' raise\\n'\n' finally:\\n'\n' if not hit_except:\\n'\n' exit(manager, None, None, None)\\n'\n'\\n'\n'With more than one item, the context managers are processed as if\\n'\n'multiple \"with\" statements were nested:\\n'\n'\\n'\n' with A() as a, B() as b:\\n'\n' SUITE\\n'\n'\\n'\n'is semantically equivalent to:\\n'\n'\\n'\n' with A() as a:\\n'\n' with B() as b:\\n'\n' SUITE\\n'\n'\\n'\n'You can also write multi-item context managers in multiple lines if\\n'\n'the items are surrounded by parentheses. For example:\\n'\n'\\n'\n' with (\\n'\n' A() as a,\\n'\n' B() as b,\\n'\n' ):\\n'\n' SUITE\\n'\n'\\n'\n'Changed in version 3.1: Support for multiple context expressions.\\n'\n'\\n'\n'Changed in version 3.10: Support for using grouping parentheses to\\n'\n'break the statement in multiple lines.\\n'\n'\\n'\n'See also:\\n'\n'\\n'\n' **PEP 343** - The \u201cwith\u201d statement\\n'\n' The specification, background, and examples for the Python '\n'\"with\"\\n'\n' statement.\\n',\n'yield':'The \"yield\" statement\\n'\n'*********************\\n'\n'\\n'\n' yield_stmt ::= yield_expression\\n'\n'\\n'\n'A \"yield\" statement is semantically equivalent to a yield '\n'expression.\\n'\n'The yield statement can be used to omit the parentheses that would\\n'\n'otherwise be required in the equivalent yield expression '\n'statement.\\n'\n'For example, the yield statements\\n'\n'\\n'\n' yield \\n'\n' yield from \\n'\n'\\n'\n'are equivalent to the yield expression statements\\n'\n'\\n'\n' (yield )\\n'\n' (yield from )\\n'\n'\\n'\n'Yield expressions and statements are only used when defining a\\n'\n'*generator* function, and are only used in the body of the '\n'generator\\n'\n'function. Using yield in a function definition is sufficient to '\n'cause\\n'\n'that definition to create a generator function instead of a normal\\n'\n'function.\\n'\n'\\n'\n'For full details of \"yield\" semantics, refer to the Yield '\n'expressions\\n'\n'section.\\n'}\n", []], "collections": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n'ChainMap',\n'Counter',\n'OrderedDict',\n'UserDict',\n'UserList',\n'UserString',\n'defaultdict',\n'deque',\n'namedtuple',\n]\n\nimport _collections_abc\nimport sys as _sys\n\nfrom itertools import chain as _chain\nfrom itertools import repeat as _repeat\nfrom itertools import starmap as _starmap\nfrom keyword import iskeyword as _iskeyword\nfrom operator import eq as _eq\nfrom operator import itemgetter as _itemgetter\nfrom reprlib import recursive_repr as _recursive_repr\nfrom _weakref import proxy as _proxy\n\ntry:\n from _collections import deque\nexcept ImportError:\n pass\nelse:\n _collections_abc.MutableSequence.register(deque)\n \ntry:\n from _collections import _deque_iterator\nexcept ImportError:\n pass\n \ntry:\n from _collections import defaultdict\nexcept ImportError:\n pass\n \n \n \n \n \n \nclass _OrderedDictKeysView(_collections_abc.KeysView):\n\n def __reversed__(self):\n yield from reversed(self._mapping)\n \nclass _OrderedDictItemsView(_collections_abc.ItemsView):\n\n def __reversed__(self):\n for key in reversed(self._mapping):\n yield(key,self._mapping[key])\n \nclass _OrderedDictValuesView(_collections_abc.ValuesView):\n\n def __reversed__(self):\n for key in reversed(self._mapping):\n yield self._mapping[key]\n \nclass _Link(object):\n __slots__='prev','next','key','__weakref__'\n \nclass OrderedDict(dict):\n ''\n \n \n \n \n \n \n \n \n \n \n \n \n \n def __new__(cls,/,*args,**kwds):\n ''\n self=dict.__new__(cls)\n self.__hardroot=_Link()\n self.__root=root=_proxy(self.__hardroot)\n root.prev=root.next=root\n self.__map={}\n return self\n \n def __init__(self,other=(),/,**kwds):\n ''\n\n \n self.__update(other,**kwds)\n \n def __setitem__(self,key,value,\n dict_setitem=dict.__setitem__,proxy=_proxy,Link=_Link):\n ''\n \n \n if key not in self:\n self.__map[key]=link=Link()\n root=self.__root\n last=root.prev\n link.prev,link.next,link.key=last,root,key\n last.next=link\n root.prev=proxy(link)\n dict_setitem(self,key,value)\n \n def __delitem__(self,key,dict_delitem=dict.__delitem__):\n ''\n \n \n dict_delitem(self,key)\n link=self.__map.pop(key)\n link_prev=link.prev\n link_next=link.next\n link_prev.next=link_next\n link_next.prev=link_prev\n link.prev=None\n link.next=None\n \n def __iter__(self):\n ''\n \n root=self.__root\n curr=root.next\n while curr is not root:\n yield curr.key\n curr=curr.next\n \n def __reversed__(self):\n ''\n \n root=self.__root\n curr=root.prev\n while curr is not root:\n yield curr.key\n curr=curr.prev\n \n def clear(self):\n ''\n root=self.__root\n root.prev=root.next=root\n self.__map.clear()\n dict.clear(self)\n \n def popitem(self,last=True):\n ''\n\n\n \n if not self:\n raise KeyError('dictionary is empty')\n root=self.__root\n if last:\n link=root.prev\n link_prev=link.prev\n link_prev.next=root\n root.prev=link_prev\n else:\n link=root.next\n link_next=link.next\n root.next=link_next\n link_next.prev=root\n key=link.key\n del self.__map[key]\n value=dict.pop(self,key)\n return key,value\n \n def move_to_end(self,key,last=True):\n ''\n\n\n \n link=self.__map[key]\n link_prev=link.prev\n link_next=link.next\n soft_link=link_next.prev\n link_prev.next=link_next\n link_next.prev=link_prev\n root=self.__root\n if last:\n last=root.prev\n link.prev=last\n link.next=root\n root.prev=soft_link\n last.next=link\n else:\n first=root.next\n link.prev=root\n link.next=first\n first.prev=soft_link\n root.next=link\n \n def __sizeof__(self):\n sizeof=_sys.getsizeof\n n=len(self)+1\n size=sizeof(self.__dict__)\n size +=sizeof(self.__map)*2\n size +=sizeof(self.__hardroot)*n\n size +=sizeof(self.__root)*n\n return size\n \n update=__update=_collections_abc.MutableMapping.update\n \n def keys(self):\n ''\n return _OrderedDictKeysView(self)\n \n def items(self):\n ''\n return _OrderedDictItemsView(self)\n \n def values(self):\n ''\n return _OrderedDictValuesView(self)\n \n __ne__=_collections_abc.MutableMapping.__ne__\n \n __marker=object()\n \n def pop(self,key,default=__marker):\n ''\n\n\n\n \n marker=self.__marker\n result=dict.pop(self,key,marker)\n if result is not marker:\n \n link=self.__map.pop(key)\n link_prev=link.prev\n link_next=link.next\n link_prev.next=link_next\n link_next.prev=link_prev\n link.prev=None\n link.next=None\n return result\n if default is marker:\n raise KeyError(key)\n return default\n \n def setdefault(self,key,default=None):\n ''\n\n\n \n if key in self:\n return self[key]\n self[key]=default\n return default\n \n @_recursive_repr()\n def __repr__(self):\n ''\n if not self:\n return '%s()'%(self.__class__.__name__,)\n return '%s(%r)'%(self.__class__.__name__,dict(self.items()))\n \n def __reduce__(self):\n ''\n state=self.__getstate__()\n if state:\n if isinstance(state,tuple):\n state,slots=state\n else:\n slots={}\n state=state.copy()\n slots=slots.copy()\n for k in vars(OrderedDict()):\n state.pop(k,None)\n slots.pop(k,None)\n if slots:\n state=state,slots\n else:\n state=state or None\n return self.__class__,(),state,None,iter(self.items())\n \n def copy(self):\n ''\n return self.__class__(self)\n \n @classmethod\n def fromkeys(cls,iterable,value=None):\n ''\n \n self=cls()\n for key in iterable:\n self[key]=value\n return self\n \n def __eq__(self,other):\n ''\n\n\n \n if isinstance(other,OrderedDict):\n return dict.__eq__(self,other)and all(map(_eq,self,other))\n return dict.__eq__(self,other)\n \n def __ior__(self,other):\n self.update(other)\n return self\n \n def __or__(self,other):\n if not isinstance(other,dict):\n return NotImplemented\n new=self.__class__(self)\n new.update(other)\n return new\n \n def __ror__(self,other):\n if not isinstance(other,dict):\n return NotImplemented\n new=self.__class__(other)\n new.update(self)\n return new\n \n \ntry:\n from _collections import OrderedDict\nexcept ImportError:\n\n pass\n \n \n \n \n \n \ntry:\n from _collections import _tuplegetter\nexcept ImportError:\n _tuplegetter=lambda index,doc:property(_itemgetter(index),doc=doc)\n \ndef namedtuple(typename,field_names,*,rename=False,defaults=None,module=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n if isinstance(field_names,str):\n field_names=field_names.replace(',',' ').split()\n field_names=list(map(str,field_names))\n typename=_sys.intern(str(typename))\n \n if rename:\n seen=set()\n for index,name in enumerate(field_names):\n if(not name.isidentifier()\n or _iskeyword(name)\n or name.startswith('_')\n or name in seen):\n field_names[index]=f'_{index}'\n seen.add(name)\n \n for name in[typename]+field_names:\n if type(name)is not str:\n raise TypeError('Type names and field names must be strings')\n if not name.isidentifier():\n raise ValueError('Type names and field names must be valid '\n f'identifiers: {name !r}')\n if _iskeyword(name):\n raise ValueError('Type names and field names cannot be a '\n f'keyword: {name !r}')\n \n seen=set()\n for name in field_names:\n if name.startswith('_')and not rename:\n raise ValueError('Field names cannot start with an underscore: '\n f'{name !r}')\n if name in seen:\n raise ValueError(f'Encountered duplicate field name: {name !r}')\n seen.add(name)\n \n field_defaults={}\n if defaults is not None:\n defaults=tuple(defaults)\n if len(defaults)>len(field_names):\n raise TypeError('Got more default values than field names')\n field_defaults=dict(reversed(list(zip(reversed(field_names),\n reversed(defaults)))))\n \n \n field_names=tuple(map(_sys.intern,field_names))\n num_fields=len(field_names)\n arg_list=', '.join(field_names)\n if num_fields ==1:\n arg_list +=','\n repr_fmt='('+', '.join(f'{name}=%r'for name in field_names)+')'\n tuple_new=tuple.__new__\n _dict,_tuple,_len,_map,_zip=dict,tuple,len,map,zip\n \n \n \n namespace={\n '_tuple_new':tuple_new,\n '__builtins__':{},\n '__name__':f'namedtuple_{typename}',\n }\n code=f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'\n __new__=eval(code,namespace)\n __new__.__name__='__new__'\n __new__.__doc__=f'Create new instance of {typename}({arg_list})'\n if defaults is not None:\n __new__.__defaults__=defaults\n \n @classmethod\n def _make(cls,iterable):\n result=tuple_new(cls,iterable)\n if _len(result)!=num_fields:\n raise TypeError(f'Expected {num_fields} arguments, got {len(result)}')\n return result\n \n _make.__func__.__doc__=(f'Make a new {typename} object from a sequence '\n 'or iterable')\n \n def _replace(self,/,**kwds):\n result=self._make(_map(kwds.pop,field_names,self))\n if kwds:\n raise ValueError(f'Got unexpected field names: {list(kwds)!r}')\n return result\n \n _replace.__doc__=(f'Return a new {typename} object replacing specified '\n 'fields with new values')\n \n def __repr__(self):\n ''\n return self.__class__.__name__+repr_fmt %self\n \n def _asdict(self):\n ''\n return _dict(_zip(self._fields,self))\n \n def __getnewargs__(self):\n ''\n return _tuple(self)\n \n \n for method in(\n __new__,\n _make.__func__,\n _replace,\n __repr__,\n _asdict,\n __getnewargs__,\n ):\n method.__qualname__=f'{typename}.{method.__name__}'\n \n \n \n class_namespace={\n '__doc__':f'{typename}({arg_list})',\n '__slots__':(),\n '_fields':field_names,\n '_field_defaults':field_defaults,\n '__new__':__new__,\n '_make':_make,\n '_replace':_replace,\n '__repr__':__repr__,\n '_asdict':_asdict,\n '__getnewargs__':__getnewargs__,\n '__match_args__':field_names,\n }\n for index,name in enumerate(field_names):\n doc=_sys.intern(f'Alias for field number {index}')\n class_namespace[name]=_tuplegetter(index,doc)\n \n result=type(typename,(tuple,),class_namespace)\n \n \n \n \n \n \n if module is None:\n try:\n module=_sys._getframemodulename(1)or '__main__'\n except AttributeError:\n try:\n module=_sys._getframe(1).f_globals.get('__name__','__main__')\n except(AttributeError,ValueError):\n pass\n if module is not None:\n result.__module__=module\n \n return result\n \n \n \n \n \n \ndef _count_elements(mapping,iterable):\n ''\n mapping_get=mapping.get\n for elem in iterable:\n mapping[elem]=mapping_get(elem,0)+1\n \ntry:\n from _collections import _count_elements\nexcept ImportError:\n pass\n \nclass Counter(dict):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n def __init__(self,iterable=None,/,**kwds):\n ''\n\n\n\n\n\n\n\n\n \n super().__init__()\n self.update(iterable,**kwds)\n \n def __missing__(self,key):\n ''\n \n return 0\n \n def total(self):\n ''\n return sum(self.values())\n \n def most_common(self,n=None):\n ''\n\n\n\n\n\n \n \n if n is None:\n return sorted(self.items(),key=_itemgetter(1),reverse=True)\n \n \n import heapq\n return heapq.nlargest(n,self.items(),key=_itemgetter(1))\n \n def elements(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n return _chain.from_iterable(_starmap(_repeat,self.items()))\n \n \n \n @classmethod\n def fromkeys(cls,iterable,v=None):\n \n \n \n \n \n \n \n raise NotImplementedError(\n 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')\n \n def update(self,iterable=None,/,**kwds):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n if iterable is not None:\n if isinstance(iterable,_collections_abc.Mapping):\n if self:\n self_get=self.get\n for elem,count in iterable.items():\n self[elem]=count+self_get(elem,0)\n else:\n \n super().update(iterable)\n else:\n _count_elements(self,iterable)\n if kwds:\n self.update(kwds)\n \n def subtract(self,iterable=None,/,**kwds):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if iterable is not None:\n self_get=self.get\n if isinstance(iterable,_collections_abc.Mapping):\n for elem,count in iterable.items():\n self[elem]=self_get(elem,0)-count\n else:\n for elem in iterable:\n self[elem]=self_get(elem,0)-1\n if kwds:\n self.subtract(kwds)\n \n def copy(self):\n ''\n return self.__class__(self)\n \n def __reduce__(self):\n return self.__class__,(dict(self),)\n \n def __delitem__(self,elem):\n ''\n if elem in self:\n super().__delitem__(elem)\n \n def __repr__(self):\n if not self:\n return f'{self.__class__.__name__}()'\n try:\n \n d=dict(self.most_common())\n except TypeError:\n \n d=dict(self)\n return f'{self.__class__.__name__}({d !r})'\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def __eq__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return all(self[e]==other[e]for c in(self,other)for e in c)\n \n def __ne__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return not self ==other\n \n def __le__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return all(self[e]<=other[e]for c in(self,other)for e in c)\n \n def __lt__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return self <=other and self !=other\n \n def __ge__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return all(self[e]>=other[e]for c in(self,other)for e in c)\n \n def __gt__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return self >=other and self !=other\n \n def __add__(self,other):\n ''\n\n\n\n\n \n if not isinstance(other,Counter):\n return NotImplemented\n result=Counter()\n for elem,count in self.items():\n newcount=count+other[elem]\n if newcount >0:\n result[elem]=newcount\n for elem,count in other.items():\n if elem not in self and count >0:\n result[elem]=count\n return result\n \n def __sub__(self,other):\n ''\n\n\n\n\n \n if not isinstance(other,Counter):\n return NotImplemented\n result=Counter()\n for elem,count in self.items():\n newcount=count -other[elem]\n if newcount >0:\n result[elem]=newcount\n for elem,count in other.items():\n if elem not in self and count <0:\n result[elem]=0 -count\n return result\n \n def __or__(self,other):\n ''\n\n\n\n\n \n if not isinstance(other,Counter):\n return NotImplemented\n result=Counter()\n for elem,count in self.items():\n other_count=other[elem]\n newcount=other_count if count 0:\n result[elem]=newcount\n for elem,count in other.items():\n if elem not in self and count >0:\n result[elem]=count\n return result\n \n def __and__(self,other):\n ''\n\n\n\n\n \n if not isinstance(other,Counter):\n return NotImplemented\n result=Counter()\n for elem,count in self.items():\n other_count=other[elem]\n newcount=count if count 0:\n result[elem]=newcount\n return result\n \n def __pos__(self):\n ''\n result=Counter()\n for elem,count in self.items():\n if count >0:\n result[elem]=count\n return result\n \n def __neg__(self):\n ''\n\n\n \n result=Counter()\n for elem,count in self.items():\n if count <0:\n result[elem]=0 -count\n return result\n \n def _keep_positive(self):\n ''\n nonpositive=[elem for elem,count in self.items()if not count >0]\n for elem in nonpositive:\n del self[elem]\n return self\n \n def __iadd__(self,other):\n ''\n\n\n\n\n\n\n \n for elem,count in other.items():\n self[elem]+=count\n return self._keep_positive()\n \n def __isub__(self,other):\n ''\n\n\n\n\n\n\n \n for elem,count in other.items():\n self[elem]-=count\n return self._keep_positive()\n \n def __ior__(self,other):\n ''\n\n\n\n\n\n\n \n for elem,other_count in other.items():\n count=self[elem]\n if other_count >count:\n self[elem]=other_count\n return self._keep_positive()\n \n def __iand__(self,other):\n ''\n\n\n\n\n\n\n \n for elem,count in self.items():\n other_count=other[elem]\n if other_count self.__cast(other)\n \n def __ge__(self,other):\n return self.data >=self.__cast(other)\n \n def __cast(self,other):\n return other.data if isinstance(other,UserList)else other\n \n def __contains__(self,item):\n return item in self.data\n \n def __len__(self):\n return len(self.data)\n \n def __getitem__(self,i):\n if isinstance(i,slice):\n return self.__class__(self.data[i])\n else:\n return self.data[i]\n \n def __setitem__(self,i,item):\n self.data[i]=item\n \n def __delitem__(self,i):\n del self.data[i]\n \n def __add__(self,other):\n if isinstance(other,UserList):\n return self.__class__(self.data+other.data)\n elif isinstance(other,type(self.data)):\n return self.__class__(self.data+other)\n return self.__class__(self.data+list(other))\n \n def __radd__(self,other):\n if isinstance(other,UserList):\n return self.__class__(other.data+self.data)\n elif isinstance(other,type(self.data)):\n return self.__class__(other+self.data)\n return self.__class__(list(other)+self.data)\n \n def __iadd__(self,other):\n if isinstance(other,UserList):\n self.data +=other.data\n elif isinstance(other,type(self.data)):\n self.data +=other\n else:\n self.data +=list(other)\n return self\n \n def __mul__(self,n):\n return self.__class__(self.data *n)\n \n __rmul__=__mul__\n \n def __imul__(self,n):\n self.data *=n\n return self\n \n def __copy__(self):\n inst=self.__class__.__new__(self.__class__)\n inst.__dict__.update(self.__dict__)\n \n inst.__dict__[\"data\"]=self.__dict__[\"data\"][:]\n return inst\n \n def append(self,item):\n self.data.append(item)\n \n def insert(self,i,item):\n self.data.insert(i,item)\n \n def pop(self,i=-1):\n return self.data.pop(i)\n \n def remove(self,item):\n self.data.remove(item)\n \n def clear(self):\n self.data.clear()\n \n def copy(self):\n return self.__class__(self)\n \n def count(self,item):\n return self.data.count(item)\n \n def index(self,item,*args):\n return self.data.index(item,*args)\n \n def reverse(self):\n self.data.reverse()\n \n def sort(self,/,*args,**kwds):\n self.data.sort(*args,**kwds)\n \n def extend(self,other):\n if isinstance(other,UserList):\n self.data.extend(other.data)\n else:\n self.data.extend(other)\n \n \n \n \n \n \nclass UserString(_collections_abc.Sequence):\n\n def __init__(self,seq):\n if isinstance(seq,str):\n self.data=seq\n elif isinstance(seq,UserString):\n self.data=seq.data[:]\n else:\n self.data=str(seq)\n \n def __str__(self):\n return str(self.data)\n \n def __repr__(self):\n return repr(self.data)\n \n def __int__(self):\n return int(self.data)\n \n def __float__(self):\n return float(self.data)\n \n def __complex__(self):\n return complex(self.data)\n \n def __hash__(self):\n return hash(self.data)\n \n def __getnewargs__(self):\n return(self.data[:],)\n \n def __eq__(self,string):\n if isinstance(string,UserString):\n return self.data ==string.data\n return self.data ==string\n \n def __lt__(self,string):\n if isinstance(string,UserString):\n return self.data string.data\n return self.data >string\n \n def __ge__(self,string):\n if isinstance(string,UserString):\n return self.data >=string.data\n return self.data >=string\n \n def __contains__(self,char):\n if isinstance(char,UserString):\n char=char.data\n return char in self.data\n \n def __len__(self):\n return len(self.data)\n \n def __getitem__(self,index):\n return self.__class__(self.data[index])\n \n def __add__(self,other):\n if isinstance(other,UserString):\n return self.__class__(self.data+other.data)\n elif isinstance(other,str):\n return self.__class__(self.data+other)\n return self.__class__(self.data+str(other))\n \n def __radd__(self,other):\n if isinstance(other,str):\n return self.__class__(other+self.data)\n return self.__class__(str(other)+self.data)\n \n def __mul__(self,n):\n return self.__class__(self.data *n)\n \n __rmul__=__mul__\n \n def __mod__(self,args):\n return self.__class__(self.data %args)\n \n def __rmod__(self,template):\n return self.__class__(str(template)%self)\n \n \n def capitalize(self):\n return self.__class__(self.data.capitalize())\n \n def casefold(self):\n return self.__class__(self.data.casefold())\n \n def center(self,width,*args):\n return self.__class__(self.data.center(width,*args))\n \n def count(self,sub,start=0,end=_sys.maxsize):\n if isinstance(sub,UserString):\n sub=sub.data\n return self.data.count(sub,start,end)\n \n def removeprefix(self,prefix,/):\n if isinstance(prefix,UserString):\n prefix=prefix.data\n return self.__class__(self.data.removeprefix(prefix))\n \n def removesuffix(self,suffix,/):\n if isinstance(suffix,UserString):\n suffix=suffix.data\n return self.__class__(self.data.removesuffix(suffix))\n \n def encode(self,encoding='utf-8',errors='strict'):\n encoding='utf-8'if encoding is None else encoding\n errors='strict'if errors is None else errors\n return self.data.encode(encoding,errors)\n \n def endswith(self,suffix,start=0,end=_sys.maxsize):\n return self.data.endswith(suffix,start,end)\n \n def expandtabs(self,tabsize=8):\n return self.__class__(self.data.expandtabs(tabsize))\n \n def find(self,sub,start=0,end=_sys.maxsize):\n if isinstance(sub,UserString):\n sub=sub.data\n return self.data.find(sub,start,end)\n \n def format(self,/,*args,**kwds):\n return self.data.format(*args,**kwds)\n \n def format_map(self,mapping):\n return self.data.format_map(mapping)\n \n def index(self,sub,start=0,end=_sys.maxsize):\n return self.data.index(sub,start,end)\n \n def isalpha(self):\n return self.data.isalpha()\n \n def isalnum(self):\n return self.data.isalnum()\n \n def isascii(self):\n return self.data.isascii()\n \n def isdecimal(self):\n return self.data.isdecimal()\n \n def isdigit(self):\n return self.data.isdigit()\n \n def isidentifier(self):\n return self.data.isidentifier()\n \n def islower(self):\n return self.data.islower()\n \n def isnumeric(self):\n return self.data.isnumeric()\n \n def isprintable(self):\n return self.data.isprintable()\n \n def isspace(self):\n return self.data.isspace()\n \n def istitle(self):\n return self.data.istitle()\n \n def isupper(self):\n return self.data.isupper()\n \n def join(self,seq):\n return self.data.join(seq)\n \n def ljust(self,width,*args):\n return self.__class__(self.data.ljust(width,*args))\n \n def lower(self):\n return self.__class__(self.data.lower())\n \n def lstrip(self,chars=None):\n return self.__class__(self.data.lstrip(chars))\n \n maketrans=str.maketrans\n \n def partition(self,sep):\n return self.data.partition(sep)\n \n def replace(self,old,new,maxsplit=-1):\n if isinstance(old,UserString):\n old=old.data\n if isinstance(new,UserString):\n new=new.data\n return self.__class__(self.data.replace(old,new,maxsplit))\n \n def rfind(self,sub,start=0,end=_sys.maxsize):\n if isinstance(sub,UserString):\n sub=sub.data\n return self.data.rfind(sub,start,end)\n \n def rindex(self,sub,start=0,end=_sys.maxsize):\n return self.data.rindex(sub,start,end)\n \n def rjust(self,width,*args):\n return self.__class__(self.data.rjust(width,*args))\n \n def rpartition(self,sep):\n return self.data.rpartition(sep)\n \n def rstrip(self,chars=None):\n return self.__class__(self.data.rstrip(chars))\n \n def split(self,sep=None,maxsplit=-1):\n return self.data.split(sep,maxsplit)\n \n def rsplit(self,sep=None,maxsplit=-1):\n return self.data.rsplit(sep,maxsplit)\n \n def splitlines(self,keepends=False):\n return self.data.splitlines(keepends)\n \n def startswith(self,prefix,start=0,end=_sys.maxsize):\n return self.data.startswith(prefix,start,end)\n \n def strip(self,chars=None):\n return self.__class__(self.data.strip(chars))\n \n def swapcase(self):\n return self.__class__(self.data.swapcase())\n \n def title(self):\n return self.__class__(self.data.title())\n \n def translate(self,*args):\n return self.__class__(self.data.translate(*args))\n \n def upper(self):\n return self.__class__(self.data.upper())\n \n def zfill(self,width):\n return self.__class__(self.data.zfill(width))\n", ["_collections", "_collections_abc", "_weakref", "copy", "heapq", "itertools", "keyword", "operator", "reprlib", "sys"], 1], "collections.abc": [".py", "from _collections_abc import *\nfrom _collections_abc import __all__\nfrom _collections_abc import _CallableGenericAlias\n", ["_collections_abc"]], "logging.config": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nConfiguration functions for the logging package for Python. The core package\nis based on PEP 282 and comments thereto in comp.lang.python, and influenced\nby Apache's log4j system.\n\nCopyright (C) 2001-2022 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging' and log away!\n\"\"\"\n\nimport errno\nimport functools\nimport io\nimport logging\nimport logging.handlers\nimport os\nimport queue\nimport re\nimport struct\nimport threading\nimport traceback\n\nfrom socketserver import ThreadingTCPServer,StreamRequestHandler\n\n\nDEFAULT_LOGGING_CONFIG_PORT=9030\n\nRESET_ERROR=errno.ECONNRESET\n\n\n\n\n\n\n_listener=None\n\ndef fileConfig(fname,defaults=None,disable_existing_loggers=True,encoding=None):\n ''\n\n\n\n\n\n\n \n import configparser\n \n if isinstance(fname,str):\n if not os.path.exists(fname):\n raise FileNotFoundError(f\"{fname} doesn't exist\")\n elif not os.path.getsize(fname):\n raise RuntimeError(f'{fname} is an empty file')\n \n if isinstance(fname,configparser.RawConfigParser):\n cp=fname\n else:\n try:\n cp=configparser.ConfigParser(defaults)\n if hasattr(fname,'readline'):\n cp.read_file(fname)\n else:\n encoding=io.text_encoding(encoding)\n cp.read(fname,encoding=encoding)\n except configparser.ParsingError as e:\n raise RuntimeError(f'{fname} is invalid: {e}')\n \n formatters=_create_formatters(cp)\n \n \n logging._acquireLock()\n try:\n _clearExistingHandlers()\n \n \n handlers=_install_handlers(cp,formatters)\n _install_loggers(cp,handlers,disable_existing_loggers)\n finally:\n logging._releaseLock()\n \n \ndef _resolve(name):\n ''\n name=name.split('.')\n used=name.pop(0)\n found=__import__(used)\n for n in name:\n used=used+'.'+n\n try:\n found=getattr(found,n)\n except AttributeError:\n __import__(used)\n found=getattr(found,n)\n return found\n \ndef _strip_spaces(alist):\n return map(str.strip,alist)\n \ndef _create_formatters(cp):\n ''\n flist=cp[\"formatters\"][\"keys\"]\n if not len(flist):\n return{}\n flist=flist.split(\",\")\n flist=_strip_spaces(flist)\n formatters={}\n for form in flist:\n sectname=\"formatter_%s\"%form\n fs=cp.get(sectname,\"format\",raw=True,fallback=None)\n dfs=cp.get(sectname,\"datefmt\",raw=True,fallback=None)\n stl=cp.get(sectname,\"style\",raw=True,fallback='%')\n defaults=cp.get(sectname,\"defaults\",raw=True,fallback=None)\n \n c=logging.Formatter\n class_name=cp[sectname].get(\"class\")\n if class_name:\n c=_resolve(class_name)\n \n if defaults is not None:\n defaults=eval(defaults,vars(logging))\n f=c(fs,dfs,stl,defaults=defaults)\n else:\n f=c(fs,dfs,stl)\n formatters[form]=f\n return formatters\n \n \ndef _install_handlers(cp,formatters):\n ''\n hlist=cp[\"handlers\"][\"keys\"]\n if not len(hlist):\n return{}\n hlist=hlist.split(\",\")\n hlist=_strip_spaces(hlist)\n handlers={}\n fixups=[]\n for hand in hlist:\n section=cp[\"handler_%s\"%hand]\n klass=section[\"class\"]\n fmt=section.get(\"formatter\",\"\")\n try:\n klass=eval(klass,vars(logging))\n except(AttributeError,NameError):\n klass=_resolve(klass)\n args=section.get(\"args\",'()')\n args=eval(args,vars(logging))\n kwargs=section.get(\"kwargs\",'{}')\n kwargs=eval(kwargs,vars(logging))\n h=klass(*args,**kwargs)\n h.name=hand\n if \"level\"in section:\n level=section[\"level\"]\n h.setLevel(level)\n if len(fmt):\n h.setFormatter(formatters[fmt])\n if issubclass(klass,logging.handlers.MemoryHandler):\n target=section.get(\"target\",\"\")\n if len(target):\n fixups.append((h,target))\n handlers[hand]=h\n \n for h,t in fixups:\n h.setTarget(handlers[t])\n return handlers\n \ndef _handle_existing_loggers(existing,child_loggers,disable_existing):\n ''\n\n\n\n\n\n\n\n\n \n root=logging.root\n for log in existing:\n logger=root.manager.loggerDict[log]\n if log in child_loggers:\n if not isinstance(logger,logging.PlaceHolder):\n logger.setLevel(logging.NOTSET)\n logger.handlers=[]\n logger.propagate=True\n else:\n logger.disabled=disable_existing\n \ndef _install_loggers(cp,handlers,disable_existing):\n ''\n \n \n llist=cp[\"loggers\"][\"keys\"]\n llist=llist.split(\",\")\n llist=list(_strip_spaces(llist))\n llist.remove(\"root\")\n section=cp[\"logger_root\"]\n root=logging.root\n log=root\n if \"level\"in section:\n level=section[\"level\"]\n log.setLevel(level)\n for h in root.handlers[:]:\n root.removeHandler(h)\n hlist=section[\"handlers\"]\n if len(hlist):\n hlist=hlist.split(\",\")\n hlist=_strip_spaces(hlist)\n for hand in hlist:\n log.addHandler(handlers[hand])\n \n \n \n \n \n \n \n \n \n \n existing=list(root.manager.loggerDict.keys())\n \n \n \n \n existing.sort()\n \n \n child_loggers=[]\n \n for log in llist:\n section=cp[\"logger_%s\"%log]\n qn=section[\"qualname\"]\n propagate=section.getint(\"propagate\",fallback=1)\n logger=logging.getLogger(qn)\n if qn in existing:\n i=existing.index(qn)+1\n prefixed=qn+\".\"\n pflen=len(prefixed)\n num_existing=len(existing)\n while i [a-z]+)://(?P.*)$')\n \n WORD_PATTERN=re.compile(r'^\\s*(\\w+)\\s*')\n DOT_PATTERN=re.compile(r'^\\.\\s*(\\w+)\\s*')\n INDEX_PATTERN=re.compile(r'^\\[\\s*(\\w+)\\s*\\]\\s*')\n DIGIT_PATTERN=re.compile(r'^\\d+$')\n \n value_converters={\n 'ext':'ext_convert',\n 'cfg':'cfg_convert',\n }\n \n \n importer=staticmethod(__import__)\n \n def __init__(self,config):\n self.config=ConvertingDict(config)\n self.config.configurator=self\n \n def resolve(self,s):\n ''\n\n\n \n name=s.split('.')\n used=name.pop(0)\n try:\n found=self.importer(used)\n for frag in name:\n used +='.'+frag\n try:\n found=getattr(found,frag)\n except AttributeError:\n self.importer(used)\n found=getattr(found,frag)\n return found\n except ImportError as e:\n v=ValueError('Cannot resolve %r: %s'%(s,e))\n raise v from e\n \n def ext_convert(self,value):\n ''\n return self.resolve(value)\n \n def cfg_convert(self,value):\n ''\n rest=value\n m=self.WORD_PATTERN.match(rest)\n if m is None:\n raise ValueError(\"Unable to convert %r\"%value)\n else:\n rest=rest[m.end():]\n d=self.config[m.groups()[0]]\n \n while rest:\n m=self.DOT_PATTERN.match(rest)\n if m:\n d=d[m.groups()[0]]\n else:\n m=self.INDEX_PATTERN.match(rest)\n if m:\n idx=m.groups()[0]\n if not self.DIGIT_PATTERN.match(idx):\n d=d[idx]\n else:\n try:\n n=int(idx)\n d=d[n]\n except TypeError:\n d=d[idx]\n if m:\n rest=rest[m.end():]\n else:\n raise ValueError('Unable to convert '\n '%r at %r'%(value,rest))\n \n return d\n \n def convert(self,value):\n ''\n\n\n\n \n if not isinstance(value,ConvertingDict)and isinstance(value,dict):\n value=ConvertingDict(value)\n value.configurator=self\n elif not isinstance(value,ConvertingList)and isinstance(value,list):\n value=ConvertingList(value)\n value.configurator=self\n elif not isinstance(value,ConvertingTuple)and\\\n isinstance(value,tuple)and not hasattr(value,'_fields'):\n value=ConvertingTuple(value)\n value.configurator=self\n elif isinstance(value,str):\n m=self.CONVERT_PATTERN.match(value)\n if m:\n d=m.groupdict()\n prefix=d['prefix']\n converter=self.value_converters.get(prefix,None)\n if converter:\n suffix=d['suffix']\n converter=getattr(self,converter)\n value=converter(suffix)\n return value\n \n def configure_custom(self,config):\n ''\n c=config.pop('()')\n if not callable(c):\n c=self.resolve(c)\n props=config.pop('.',None)\n \n kwargs={k:config[k]for k in config if valid_ident(k)}\n result=c(**kwargs)\n if props:\n for name,value in props.items():\n setattr(result,name,value)\n return result\n \n def as_tuple(self,value):\n ''\n if isinstance(value,list):\n value=tuple(value)\n return value\n \nclass DictConfigurator(BaseConfigurator):\n ''\n\n\n \n \n def configure(self):\n ''\n \n config=self.config\n if 'version'not in config:\n raise ValueError(\"dictionary doesn't specify a version\")\n if config['version']!=1:\n raise ValueError(\"Unsupported version: %s\"%config['version'])\n incremental=config.pop('incremental',False)\n EMPTY_DICT={}\n logging._acquireLock()\n try:\n if incremental:\n handlers=config.get('handlers',EMPTY_DICT)\n for name in handlers:\n if name not in logging._handlers:\n raise ValueError('No handler found with '\n 'name %r'%name)\n else:\n try:\n handler=logging._handlers[name]\n handler_config=handlers[name]\n level=handler_config.get('level',None)\n if level:\n handler.setLevel(logging._checkLevel(level))\n except Exception as e:\n raise ValueError('Unable to configure handler '\n '%r'%name)from e\n loggers=config.get('loggers',EMPTY_DICT)\n for name in loggers:\n try:\n self.configure_logger(name,loggers[name],True)\n except Exception as e:\n raise ValueError('Unable to configure logger '\n '%r'%name)from e\n root=config.get('root',None)\n if root:\n try:\n self.configure_root(root,True)\n except Exception as e:\n raise ValueError('Unable to configure root '\n 'logger')from e\n else:\n disable_existing=config.pop('disable_existing_loggers',True)\n \n _clearExistingHandlers()\n \n \n formatters=config.get('formatters',EMPTY_DICT)\n for name in formatters:\n try:\n formatters[name]=self.configure_formatter(\n formatters[name])\n except Exception as e:\n raise ValueError('Unable to configure '\n 'formatter %r'%name)from e\n \n filters=config.get('filters',EMPTY_DICT)\n for name in filters:\n try:\n filters[name]=self.configure_filter(filters[name])\n except Exception as e:\n raise ValueError('Unable to configure '\n 'filter %r'%name)from e\n \n \n \n \n handlers=config.get('handlers',EMPTY_DICT)\n deferred=[]\n for name in sorted(handlers):\n try:\n handler=self.configure_handler(handlers[name])\n handler.name=name\n handlers[name]=handler\n except Exception as e:\n if ' not configured yet'in str(e.__cause__):\n deferred.append(name)\n else:\n raise ValueError('Unable to configure handler '\n '%r'%name)from e\n \n \n for name in deferred:\n try:\n handler=self.configure_handler(handlers[name])\n handler.name=name\n handlers[name]=handler\n except Exception as e:\n raise ValueError('Unable to configure handler '\n '%r'%name)from e\n \n \n \n \n \n \n \n \n \n \n \n root=logging.root\n existing=list(root.manager.loggerDict.keys())\n \n \n \n \n existing.sort()\n \n \n child_loggers=[]\n \n loggers=config.get('loggers',EMPTY_DICT)\n for name in loggers:\n if name in existing:\n i=existing.index(name)+1\n prefixed=name+\".\"\n pflen=len(prefixed)\n num_existing=len(existing)\n while i L\",chunk)[0]\n chunk=self.connection.recv(slen)\n while len(chunk)0:\n mode='a'\n if \"b\"not in mode:\n encoding=io.text_encoding(encoding)\n BaseRotatingHandler.__init__(self,filename,mode,encoding=encoding,\n delay=delay,errors=errors)\n self.maxBytes=maxBytes\n self.backupCount=backupCount\n \n def doRollover(self):\n ''\n\n \n if self.stream:\n self.stream.close()\n self.stream=None\n if self.backupCount >0:\n for i in range(self.backupCount -1,0,-1):\n sfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,i))\n dfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,\n i+1))\n if os.path.exists(sfn):\n if os.path.exists(dfn):\n os.remove(dfn)\n os.rename(sfn,dfn)\n dfn=self.rotation_filename(self.baseFilename+\".1\")\n if os.path.exists(dfn):\n os.remove(dfn)\n self.rotate(self.baseFilename,dfn)\n if not self.delay:\n self.stream=self._open()\n \n def shouldRollover(self,record):\n ''\n\n\n\n\n \n \n if os.path.exists(self.baseFilename)and not os.path.isfile(self.baseFilename):\n return False\n if self.stream is None:\n self.stream=self._open()\n if self.maxBytes >0:\n msg=\"%s\\n\"%self.format(record)\n self.stream.seek(0,2)\n if self.stream.tell()+len(msg)>=self.maxBytes:\n return True\n return False\n \nclass TimedRotatingFileHandler(BaseRotatingHandler):\n ''\n\n\n\n\n\n \n def __init__(self,filename,when='h',interval=1,backupCount=0,\n encoding=None,delay=False,utc=False,atTime=None,\n errors=None):\n encoding=io.text_encoding(encoding)\n BaseRotatingHandler.__init__(self,filename,'a',encoding=encoding,\n delay=delay,errors=errors)\n self.when=when.upper()\n self.backupCount=backupCount\n self.utc=utc\n self.atTime=atTime\n \n \n \n \n \n \n \n \n \n \n \n \n if self.when =='S':\n self.interval=1\n self.suffix=\"%Y-%m-%d_%H-%M-%S\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='M':\n self.interval=60\n self.suffix=\"%Y-%m-%d_%H-%M\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='H':\n self.interval=60 *60\n self.suffix=\"%Y-%m-%d_%H\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}(\\.\\w+)?$\"\n elif self.when =='D'or self.when =='MIDNIGHT':\n self.interval=60 *60 *24\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when.startswith('W'):\n self.interval=60 *60 *24 *7\n if len(self.when)!=2:\n raise ValueError(\"You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s\"%self.when)\n if self.when[1]<'0'or self.when[1]>'6':\n raise ValueError(\"Invalid day specified for weekly rollover: %s\"%self.when)\n self.dayOfWeek=int(self.when[1])\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n else:\n raise ValueError(\"Invalid rollover interval specified: %s\"%self.when)\n \n self.extMatch=re.compile(self.extMatch,re.ASCII)\n self.interval=self.interval *interval\n \n \n filename=self.baseFilename\n if os.path.exists(filename):\n t=os.stat(filename)[ST_MTIME]\n else:\n t=int(time.time())\n self.rolloverAt=self.computeRollover(t)\n \n def computeRollover(self,currentTime):\n ''\n\n \n result=currentTime+self.interval\n \n \n \n \n \n \n \n if self.when =='MIDNIGHT'or self.when.startswith('W'):\n \n if self.utc:\n t=time.gmtime(currentTime)\n else:\n t=time.localtime(currentTime)\n currentHour=t[3]\n currentMinute=t[4]\n currentSecond=t[5]\n currentDay=t[6]\n \n if self.atTime is None:\n rotate_ts=_MIDNIGHT\n else:\n rotate_ts=((self.atTime.hour *60+self.atTime.minute)*60+\n self.atTime.second)\n \n r=rotate_ts -((currentHour *60+currentMinute)*60+\n currentSecond)\n if r <0:\n \n \n \n r +=_MIDNIGHT\n currentDay=(currentDay+1)%7\n result=currentTime+r\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.when.startswith('W'):\n day=currentDay\n if day !=self.dayOfWeek:\n if day =self.rolloverAt:\n \n if os.path.exists(self.baseFilename)and not os.path.isfile(self.baseFilename):\n \n \n self.rolloverAt=self.computeRollover(t)\n return False\n \n return True\n return False\n \n def getFilesToDelete(self):\n ''\n\n\n\n \n dirName,baseName=os.path.split(self.baseFilename)\n fileNames=os.listdir(dirName)\n result=[]\n \n n,e=os.path.splitext(baseName)\n prefix=n+'.'\n plen=len(prefix)\n for fileName in fileNames:\n if self.namer is None:\n \n if not fileName.startswith(baseName):\n continue\n else:\n \n \n \n if(not fileName.startswith(baseName)and fileName.endswith(e)and\n len(fileName)>(plen+1)and not fileName[plen+1].isdigit()):\n continue\n \n if fileName[:plen]==prefix:\n suffix=fileName[plen:]\n \n \n parts=suffix.split('.')\n for part in parts:\n if self.extMatch.match(part):\n result.append(os.path.join(dirName,fileName))\n break\n if len(result)0:\n for s in self.getFilesToDelete():\n os.remove(s)\n if not self.delay:\n self.stream=self._open()\n newRolloverAt=self.computeRollover(currentTime)\n while newRolloverAt <=currentTime:\n newRolloverAt=newRolloverAt+self.interval\n \n if(self.when =='MIDNIGHT'or self.when.startswith('W'))and not self.utc:\n dstAtRollover=time.localtime(newRolloverAt)[-1]\n if dstNow !=dstAtRollover:\n if not dstNow:\n addend=-3600\n else:\n addend=3600\n newRolloverAt +=addend\n self.rolloverAt=newRolloverAt\n \nclass WatchedFileHandler(logging.FileHandler):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,filename,mode='a',encoding=None,delay=False,\n errors=None):\n if \"b\"not in mode:\n encoding=io.text_encoding(encoding)\n logging.FileHandler.__init__(self,filename,mode=mode,\n encoding=encoding,delay=delay,\n errors=errors)\n self.dev,self.ino=-1,-1\n self._statstream()\n \n def _statstream(self):\n if self.stream:\n sres=os.fstat(self.stream.fileno())\n self.dev,self.ino=sres[ST_DEV],sres[ST_INO]\n \n def reopenIfNeeded(self):\n ''\n\n\n\n\n\n \n \n \n \n \n try:\n \n sres=os.stat(self.baseFilename)\n except FileNotFoundError:\n sres=None\n \n if not sres or sres[ST_DEV]!=self.dev or sres[ST_INO]!=self.ino:\n if self.stream is not None:\n \n self.stream.flush()\n self.stream.close()\n self.stream=None\n \n self.stream=self._open()\n self._statstream()\n \n def emit(self,record):\n ''\n\n\n\n\n \n self.reopenIfNeeded()\n logging.FileHandler.emit(self,record)\n \n \nclass SocketHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,host,port):\n ''\n\n\n\n\n\n \n logging.Handler.__init__(self)\n self.host=host\n self.port=port\n if port is None:\n self.address=host\n else:\n self.address=(host,port)\n self.sock=None\n self.closeOnError=False\n self.retryTime=None\n \n \n \n self.retryStart=1.0\n self.retryMax=30.0\n self.retryFactor=2.0\n \n def makeSocket(self,timeout=1):\n ''\n\n\n \n if self.port is not None:\n result=socket.create_connection(self.address,timeout=timeout)\n else:\n result=socket.socket(socket.AF_UNIX,socket.SOCK_STREAM)\n result.settimeout(timeout)\n try:\n result.connect(self.address)\n except OSError:\n result.close()\n raise\n return result\n \n def createSocket(self):\n ''\n\n\n\n \n now=time.time()\n \n \n \n if self.retryTime is None:\n attempt=True\n else:\n attempt=(now >=self.retryTime)\n if attempt:\n try:\n self.sock=self.makeSocket()\n self.retryTime=None\n except OSError:\n \n if self.retryTime is None:\n self.retryPeriod=self.retryStart\n else:\n self.retryPeriod=self.retryPeriod *self.retryFactor\n if self.retryPeriod >self.retryMax:\n self.retryPeriod=self.retryMax\n self.retryTime=now+self.retryPeriod\n \n def send(self,s):\n ''\n\n\n\n\n \n if self.sock is None:\n self.createSocket()\n \n \n \n if self.sock:\n try:\n self.sock.sendall(s)\n except OSError:\n self.sock.close()\n self.sock=None\n \n def makePickle(self,record):\n ''\n\n\n \n ei=record.exc_info\n if ei:\n \n dummy=self.format(record)\n \n \n \n d=dict(record.__dict__)\n d['msg']=record.getMessage()\n d['args']=None\n d['exc_info']=None\n \n d.pop('message',None)\n s=pickle.dumps(d,1)\n slen=struct.pack(\">L\",len(s))\n return slen+s\n \n def handleError(self,record):\n ''\n\n\n\n\n\n \n if self.closeOnError and self.sock:\n self.sock.close()\n self.sock=None\n else:\n logging.Handler.handleError(self,record)\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n \n try:\n s=self.makePickle(record)\n self.send(s)\n except Exception:\n self.handleError(record)\n \n def close(self):\n ''\n\n \n self.acquire()\n try:\n sock=self.sock\n if sock:\n self.sock=None\n sock.close()\n logging.Handler.close(self)\n finally:\n self.release()\n \nclass DatagramHandler(SocketHandler):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,host,port):\n ''\n\n \n SocketHandler.__init__(self,host,port)\n self.closeOnError=False\n \n def makeSocket(self):\n ''\n\n\n \n if self.port is None:\n family=socket.AF_UNIX\n else:\n family=socket.AF_INET\n s=socket.socket(family,socket.SOCK_DGRAM)\n return s\n \n def send(self,s):\n ''\n\n\n\n\n\n \n if self.sock is None:\n self.createSocket()\n self.sock.sendto(s,self.address)\n \nclass SysLogHandler(logging.Handler):\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n LOG_EMERG=0\n LOG_ALERT=1\n LOG_CRIT=2\n LOG_ERR=3\n LOG_WARNING=4\n LOG_NOTICE=5\n LOG_INFO=6\n LOG_DEBUG=7\n \n \n LOG_KERN=0\n LOG_USER=1\n LOG_MAIL=2\n LOG_DAEMON=3\n LOG_AUTH=4\n LOG_SYSLOG=5\n LOG_LPR=6\n LOG_NEWS=7\n LOG_UUCP=8\n LOG_CRON=9\n LOG_AUTHPRIV=10\n LOG_FTP=11\n LOG_NTP=12\n LOG_SECURITY=13\n LOG_CONSOLE=14\n LOG_SOLCRON=15\n \n \n LOG_LOCAL0=16\n LOG_LOCAL1=17\n LOG_LOCAL2=18\n LOG_LOCAL3=19\n LOG_LOCAL4=20\n LOG_LOCAL5=21\n LOG_LOCAL6=22\n LOG_LOCAL7=23\n \n priority_names={\n \"alert\":LOG_ALERT,\n \"crit\":LOG_CRIT,\n \"critical\":LOG_CRIT,\n \"debug\":LOG_DEBUG,\n \"emerg\":LOG_EMERG,\n \"err\":LOG_ERR,\n \"error\":LOG_ERR,\n \"info\":LOG_INFO,\n \"notice\":LOG_NOTICE,\n \"panic\":LOG_EMERG,\n \"warn\":LOG_WARNING,\n \"warning\":LOG_WARNING,\n }\n \n facility_names={\n \"auth\":LOG_AUTH,\n \"authpriv\":LOG_AUTHPRIV,\n \"console\":LOG_CONSOLE,\n \"cron\":LOG_CRON,\n \"daemon\":LOG_DAEMON,\n \"ftp\":LOG_FTP,\n \"kern\":LOG_KERN,\n \"lpr\":LOG_LPR,\n \"mail\":LOG_MAIL,\n \"news\":LOG_NEWS,\n \"ntp\":LOG_NTP,\n \"security\":LOG_SECURITY,\n \"solaris-cron\":LOG_SOLCRON,\n \"syslog\":LOG_SYSLOG,\n \"user\":LOG_USER,\n \"uucp\":LOG_UUCP,\n \"local0\":LOG_LOCAL0,\n \"local1\":LOG_LOCAL1,\n \"local2\":LOG_LOCAL2,\n \"local3\":LOG_LOCAL3,\n \"local4\":LOG_LOCAL4,\n \"local5\":LOG_LOCAL5,\n \"local6\":LOG_LOCAL6,\n \"local7\":LOG_LOCAL7,\n }\n \n \n \n \n \n priority_map={\n \"DEBUG\":\"debug\",\n \"INFO\":\"info\",\n \"WARNING\":\"warning\",\n \"ERROR\":\"error\",\n \"CRITICAL\":\"critical\"\n }\n \n def __init__(self,address=('localhost',SYSLOG_UDP_PORT),\n facility=LOG_USER,socktype=None):\n ''\n\n\n\n\n\n\n\n\n\n \n logging.Handler.__init__(self)\n \n self.address=address\n self.facility=facility\n self.socktype=socktype\n self.socket=None\n self.createSocket()\n \n def _connect_unixsocket(self,address):\n use_socktype=self.socktype\n if use_socktype is None:\n use_socktype=socket.SOCK_DGRAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try:\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except OSError:\n self.socket.close()\n if self.socktype is not None:\n \n raise\n use_socktype=socket.SOCK_STREAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try:\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except OSError:\n self.socket.close()\n raise\n \n def createSocket(self):\n ''\n\n\n\n\n\n \n address=self.address\n socktype=self.socktype\n \n if isinstance(address,str):\n self.unixsocket=True\n \n \n \n \n try:\n self._connect_unixsocket(address)\n except OSError:\n pass\n else:\n self.unixsocket=False\n if socktype is None:\n socktype=socket.SOCK_DGRAM\n host,port=address\n ress=socket.getaddrinfo(host,port,0,socktype)\n if not ress:\n raise OSError(\"getaddrinfo returns an empty list\")\n for res in ress:\n af,socktype,proto,_,sa=res\n err=sock=None\n try:\n sock=socket.socket(af,socktype,proto)\n if socktype ==socket.SOCK_STREAM:\n sock.connect(sa)\n break\n except OSError as exc:\n err=exc\n if sock is not None:\n sock.close()\n if err is not None:\n raise err\n self.socket=sock\n self.socktype=socktype\n \n def encodePriority(self,facility,priority):\n ''\n\n\n\n\n \n if isinstance(facility,str):\n facility=self.facility_names[facility]\n if isinstance(priority,str):\n priority=self.priority_names[priority]\n return(facility <<3)|priority\n \n def close(self):\n ''\n\n \n self.acquire()\n try:\n sock=self.socket\n if sock:\n self.socket=None\n sock.close()\n logging.Handler.close(self)\n finally:\n self.release()\n \n def mapPriority(self,levelName):\n ''\n\n\n\n\n\n \n return self.priority_map.get(levelName,\"warning\")\n \n ident=''\n append_nul=True\n \n def emit(self,record):\n ''\n\n\n\n\n \n try:\n msg=self.format(record)\n if self.ident:\n msg=self.ident+msg\n if self.append_nul:\n msg +='\\000'\n \n \n \n prio='<%d>'%self.encodePriority(self.facility,\n self.mapPriority(record.levelname))\n prio=prio.encode('utf-8')\n \n msg=msg.encode('utf-8')\n msg=prio+msg\n \n if not self.socket:\n self.createSocket()\n \n if self.unixsocket:\n try:\n self.socket.send(msg)\n except OSError:\n self.socket.close()\n self._connect_unixsocket(self.address)\n self.socket.send(msg)\n elif self.socktype ==socket.SOCK_DGRAM:\n self.socket.sendto(msg,self.address)\n else:\n self.socket.sendall(msg)\n except Exception:\n self.handleError(record)\n \nclass SMTPHandler(logging.Handler):\n ''\n\n \n def __init__(self,mailhost,fromaddr,toaddrs,subject,\n credentials=None,secure=None,timeout=5.0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n logging.Handler.__init__(self)\n if isinstance(mailhost,(list,tuple)):\n self.mailhost,self.mailport=mailhost\n else:\n self.mailhost,self.mailport=mailhost,None\n if isinstance(credentials,(list,tuple)):\n self.username,self.password=credentials\n else:\n self.username=None\n self.fromaddr=fromaddr\n if isinstance(toaddrs,str):\n toaddrs=[toaddrs]\n self.toaddrs=toaddrs\n self.subject=subject\n self.secure=secure\n self.timeout=timeout\n \n def getSubject(self,record):\n ''\n\n\n\n\n \n return self.subject\n \n def emit(self,record):\n ''\n\n\n\n \n try:\n import smtplib\n from email.message import EmailMessage\n import email.utils\n \n port=self.mailport\n if not port:\n port=smtplib.SMTP_PORT\n smtp=smtplib.SMTP(self.mailhost,port,timeout=self.timeout)\n msg=EmailMessage()\n msg['From']=self.fromaddr\n msg['To']=','.join(self.toaddrs)\n msg['Subject']=self.getSubject(record)\n msg['Date']=email.utils.localtime()\n msg.set_content(self.format(record))\n if self.username:\n if self.secure is not None:\n smtp.ehlo()\n smtp.starttls(*self.secure)\n smtp.ehlo()\n smtp.login(self.username,self.password)\n smtp.send_message(msg)\n smtp.quit()\n except Exception:\n self.handleError(record)\n \nclass NTEventLogHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n def __init__(self,appname,dllname=None,logtype=\"Application\"):\n logging.Handler.__init__(self)\n try:\n import win32evtlogutil,win32evtlog\n self.appname=appname\n self._welu=win32evtlogutil\n if not dllname:\n dllname=os.path.split(self._welu.__file__)\n dllname=os.path.split(dllname[0])\n dllname=os.path.join(dllname[0],r'win32service.pyd')\n self.dllname=dllname\n self.logtype=logtype\n \n \n \n try:\n self._welu.AddSourceToRegistry(appname,dllname,logtype)\n except Exception as e:\n \n \n if getattr(e,'winerror',None)!=5:\n raise\n self.deftype=win32evtlog.EVENTLOG_ERROR_TYPE\n self.typemap={\n logging.DEBUG:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.INFO:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.WARNING:win32evtlog.EVENTLOG_WARNING_TYPE,\n logging.ERROR:win32evtlog.EVENTLOG_ERROR_TYPE,\n logging.CRITICAL:win32evtlog.EVENTLOG_ERROR_TYPE,\n }\n except ImportError:\n print(\"The Python Win32 extensions for NT (service, event \"\\\n \"logging) appear not to be available.\")\n self._welu=None\n \n def getMessageID(self,record):\n ''\n\n\n\n\n\n \n return 1\n \n def getEventCategory(self,record):\n ''\n\n\n\n\n \n return 0\n \n def getEventType(self,record):\n ''\n\n\n\n\n\n\n\n\n \n return self.typemap.get(record.levelno,self.deftype)\n \n def emit(self,record):\n ''\n\n\n\n\n \n if self._welu:\n try:\n id=self.getMessageID(record)\n cat=self.getEventCategory(record)\n type=self.getEventType(record)\n msg=self.format(record)\n self._welu.ReportEvent(self.appname,id,cat,type,[msg])\n except Exception:\n self.handleError(record)\n \n def close(self):\n ''\n\n\n\n\n\n\n\n \n \n logging.Handler.close(self)\n \nclass HTTPHandler(logging.Handler):\n ''\n\n\n \n def __init__(self,host,url,method=\"GET\",secure=False,credentials=None,\n context=None):\n ''\n\n\n \n logging.Handler.__init__(self)\n method=method.upper()\n if method not in[\"GET\",\"POST\"]:\n raise ValueError(\"method must be GET or POST\")\n if not secure and context is not None:\n raise ValueError(\"context parameter only makes sense \"\n \"with secure=True\")\n self.host=host\n self.url=url\n self.method=method\n self.secure=secure\n self.credentials=credentials\n self.context=context\n \n def mapLogRecord(self,record):\n ''\n\n\n\n \n return record.__dict__\n \n def getConnection(self,host,secure):\n ''\n\n\n\n\n \n import http.client\n if secure:\n connection=http.client.HTTPSConnection(host,context=self.context)\n else:\n connection=http.client.HTTPConnection(host)\n return connection\n \n def emit(self,record):\n ''\n\n\n\n \n try:\n import urllib.parse\n host=self.host\n h=self.getConnection(host,self.secure)\n url=self.url\n data=urllib.parse.urlencode(self.mapLogRecord(record))\n if self.method ==\"GET\":\n if(url.find('?')>=0):\n sep='&'\n else:\n sep='?'\n url=url+\"%c%s\"%(sep,data)\n h.putrequest(self.method,url)\n \n \n i=host.find(\":\")\n if i >=0:\n host=host[:i]\n \n \n \n if self.method ==\"POST\":\n h.putheader(\"Content-type\",\n \"application/x-www-form-urlencoded\")\n h.putheader(\"Content-length\",str(len(data)))\n if self.credentials:\n import base64\n s=('%s:%s'%self.credentials).encode('utf-8')\n s='Basic '+base64.b64encode(s).strip().decode('ascii')\n h.putheader('Authorization',s)\n h.endheaders()\n if self.method ==\"POST\":\n h.send(data.encode('utf-8'))\n h.getresponse()\n except Exception:\n self.handleError(record)\n \nclass BufferingHandler(logging.Handler):\n ''\n\n\n\n \n def __init__(self,capacity):\n ''\n\n \n logging.Handler.__init__(self)\n self.capacity=capacity\n self.buffer=[]\n \n def shouldFlush(self,record):\n ''\n\n\n\n\n \n return(len(self.buffer)>=self.capacity)\n \n def emit(self,record):\n ''\n\n\n\n\n \n self.buffer.append(record)\n if self.shouldFlush(record):\n self.flush()\n \n def flush(self):\n ''\n\n\n\n \n self.acquire()\n try:\n self.buffer.clear()\n finally:\n self.release()\n \n def close(self):\n ''\n\n\n\n \n try:\n self.flush()\n finally:\n logging.Handler.close(self)\n \nclass MemoryHandler(BufferingHandler):\n ''\n\n\n\n \n def __init__(self,capacity,flushLevel=logging.ERROR,target=None,\n flushOnClose=True):\n ''\n\n\n\n\n\n\n\n\n\n\n \n BufferingHandler.__init__(self,capacity)\n self.flushLevel=flushLevel\n self.target=target\n \n self.flushOnClose=flushOnClose\n \n def shouldFlush(self,record):\n ''\n\n \n return(len(self.buffer)>=self.capacity)or\\\n (record.levelno >=self.flushLevel)\n \n def setTarget(self,target):\n ''\n\n \n self.acquire()\n try:\n self.target=target\n finally:\n self.release()\n \n def flush(self):\n ''\n\n\n\n\n\n \n self.acquire()\n try:\n if self.target:\n for record in self.buffer:\n self.target.handle(record)\n self.buffer.clear()\n finally:\n self.release()\n \n def close(self):\n ''\n\n\n \n try:\n if self.flushOnClose:\n self.flush()\n finally:\n self.acquire()\n try:\n self.target=None\n BufferingHandler.close(self)\n finally:\n self.release()\n \n \nclass QueueHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,queue):\n ''\n\n \n logging.Handler.__init__(self)\n self.queue=queue\n self.listener=None\n \n def enqueue(self,record):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(record)\n \n def prepare(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n msg=self.format(record)\n \n record=copy.copy(record)\n record.message=msg\n record.msg=msg\n record.args=None\n record.exc_info=None\n record.exc_text=None\n record.stack_info=None\n return record\n \n def emit(self,record):\n ''\n\n\n\n \n try:\n self.enqueue(self.prepare(record))\n except Exception:\n self.handleError(record)\n \n \nclass QueueListener(object):\n ''\n\n\n\n \n _sentinel=None\n \n def __init__(self,queue,*handlers,respect_handler_level=False):\n ''\n\n\n \n self.queue=queue\n self.handlers=handlers\n self._thread=None\n self.respect_handler_level=respect_handler_level\n \n def dequeue(self,block):\n ''\n\n\n\n\n \n return self.queue.get(block)\n \n def start(self):\n ''\n\n\n\n\n \n self._thread=t=threading.Thread(target=self._monitor)\n t.daemon=True\n t.start()\n \n def prepare(self,record):\n ''\n\n\n\n\n\n \n return record\n \n def handle(self,record):\n ''\n\n\n\n\n \n record=self.prepare(record)\n for handler in self.handlers:\n if not self.respect_handler_level:\n process=True\n else:\n process=record.levelno >=handler.level\n if process:\n handler.handle(record)\n \n def _monitor(self):\n ''\n\n\n\n\n\n \n q=self.queue\n has_task_done=hasattr(q,'task_done')\n while True:\n try:\n record=self.dequeue(True)\n if record is self._sentinel:\n if has_task_done:\n q.task_done()\n break\n self.handle(record)\n if has_task_done:\n q.task_done()\n except queue.Empty:\n break\n \n def enqueue_sentinel(self):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(self._sentinel)\n \n def stop(self):\n ''\n\n\n\n\n\n \n self.enqueue_sentinel()\n self._thread.join()\n self._thread=None\n", ["base64", "copy", "email.message", "email.utils", "http.client", "io", "logging", "os", "pickle", "queue", "re", "smtplib", "socket", "stat", "struct", "threading", "time", "urllib.parse", "win32evtlog", "win32evtlogutil"]], "logging": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nLogging package for Python. Based on PEP 282 and comments thereto in\ncomp.lang.python.\n\nCopyright (C) 2001-2022 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging' and log away!\n\"\"\"\n\nimport sys,os,time,io,re,traceback,warnings,weakref,collections.abc\n\nfrom types import GenericAlias\nfrom string import Template\nfrom string import Formatter as StrFormatter\n\n\n__all__=['BASIC_FORMAT','BufferingFormatter','CRITICAL','DEBUG','ERROR',\n'FATAL','FileHandler','Filter','Formatter','Handler','INFO',\n'LogRecord','Logger','LoggerAdapter','NOTSET','NullHandler',\n'StreamHandler','WARN','WARNING','addLevelName','basicConfig',\n'captureWarnings','critical','debug','disable','error',\n'exception','fatal','getLevelName','getLogger','getLoggerClass',\n'info','log','makeLogRecord','setLoggerClass','shutdown',\n'warn','warning','getLogRecordFactory','setLogRecordFactory',\n'lastResort','raiseExceptions','getLevelNamesMapping',\n'getHandlerByName','getHandlerNames']\n\nimport threading\n\n__author__=\"Vinay Sajip \"\n__status__=\"production\"\n\n__version__=\"0.5.1.2\"\n__date__=\"07 February 2010\"\n\n\n\n\n\n\n\n\n_startTime=time.time()\n\n\n\n\n\nraiseExceptions=True\n\n\n\n\nlogThreads=True\n\n\n\n\nlogMultiprocessing=True\n\n\n\n\nlogProcesses=True\n\n\n\n\nlogAsyncioTasks=True\n\n\n\n\n\n\n\n\n\n\n\n\nCRITICAL=50\nFATAL=CRITICAL\nERROR=40\nWARNING=30\nWARN=WARNING\nINFO=20\nDEBUG=10\nNOTSET=0\n\n_levelToName={\nCRITICAL:'CRITICAL',\nERROR:'ERROR',\nWARNING:'WARNING',\nINFO:'INFO',\nDEBUG:'DEBUG',\nNOTSET:'NOTSET',\n}\n_nameToLevel={\n'CRITICAL':CRITICAL,\n'FATAL':FATAL,\n'ERROR':ERROR,\n'WARN':WARNING,\n'WARNING':WARNING,\n'INFO':INFO,\n'DEBUG':DEBUG,\n'NOTSET':NOTSET,\n}\n\ndef getLevelNamesMapping():\n return _nameToLevel.copy()\n \ndef getLevelName(level):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n result=_levelToName.get(level)\n if result is not None:\n return result\n result=_nameToLevel.get(level)\n if result is not None:\n return result\n return \"Level %s\"%level\n \ndef addLevelName(level,levelName):\n ''\n\n\n\n \n _acquireLock()\n try:\n _levelToName[level]=levelName\n _nameToLevel[levelName]=level\n finally:\n _releaseLock()\n \nif hasattr(sys,\"_getframe\"):\n currentframe=lambda:sys._getframe(1)\nelse:\n def currentframe():\n ''\n try:\n raise Exception\n except Exception as exc:\n return exc.__traceback__.tb_frame.f_back\n \n \n \n \n \n \n \n \n \n \n \n \n \n_srcfile=os.path.normcase(addLevelName.__code__.co_filename)\n\n\n\n\n\n\n\n\ndef _is_internal_frame(frame):\n ''\n filename=os.path.normcase(frame.f_code.co_filename)\n return filename ==_srcfile or(\n \"importlib\"in filename and \"_bootstrap\"in filename\n )\n \n \ndef _checkLevel(level):\n if isinstance(level,int):\n rv=level\n elif str(level)==level:\n if level not in _nameToLevel:\n raise ValueError(\"Unknown level: %r\"%level)\n rv=_nameToLevel[level]\n else:\n raise TypeError(\"Level not an integer or a valid string: %r\"\n %(level,))\n return rv\n \n \n \n \n \n \n \n \n \n \n \n \n \n_lock=threading.RLock()\n\ndef _acquireLock():\n ''\n\n\n\n \n if _lock:\n _lock.acquire()\n \ndef _releaseLock():\n ''\n\n \n if _lock:\n _lock.release()\n \n \n \n \nif not hasattr(os,'register_at_fork'):\n def _register_at_fork_reinit_lock(instance):\n pass\nelse:\n\n\n\n _at_fork_reinit_lock_weakset=weakref.WeakSet()\n \n def _register_at_fork_reinit_lock(instance):\n _acquireLock()\n try:\n _at_fork_reinit_lock_weakset.add(instance)\n finally:\n _releaseLock()\n \n def _after_at_fork_child_reinit_locks():\n for handler in _at_fork_reinit_lock_weakset:\n handler._at_fork_reinit()\n \n \n \n _lock._at_fork_reinit()\n \n os.register_at_fork(before=_acquireLock,\n after_in_child=_after_at_fork_child_reinit_locks,\n after_in_parent=_releaseLock)\n \n \n \n \n \n \nclass LogRecord(object):\n ''\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level,pathname,lineno,\n msg,args,exc_info,func=None,sinfo=None,**kwargs):\n ''\n\n \n ct=time.time()\n self.name=name\n self.msg=msg\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if(args and len(args)==1 and isinstance(args[0],collections.abc.Mapping)\n and args[0]):\n args=args[0]\n self.args=args\n self.levelname=getLevelName(level)\n self.levelno=level\n self.pathname=pathname\n try:\n self.filename=os.path.basename(pathname)\n self.module=os.path.splitext(self.filename)[0]\n except(TypeError,ValueError,AttributeError):\n self.filename=pathname\n self.module=\"Unknown module\"\n self.exc_info=exc_info\n self.exc_text=None\n self.stack_info=sinfo\n self.lineno=lineno\n self.funcName=func\n self.created=ct\n self.msecs=int((ct -int(ct))*1000)+0.0\n self.relativeCreated=(self.created -_startTime)*1000\n if logThreads:\n self.thread=threading.get_ident()\n self.threadName=threading.current_thread().name\n else:\n self.thread=None\n self.threadName=None\n if not logMultiprocessing:\n self.processName=None\n else:\n self.processName='MainProcess'\n mp=sys.modules.get('multiprocessing')\n if mp is not None:\n \n \n \n \n try:\n self.processName=mp.current_process().name\n except Exception:\n pass\n if logProcesses and hasattr(os,'getpid'):\n self.process=os.getpid()\n else:\n self.process=None\n \n self.taskName=None\n if logAsyncioTasks:\n asyncio=sys.modules.get('asyncio')\n if asyncio:\n try:\n self.taskName=asyncio.current_task().get_name()\n except Exception:\n pass\n \n def __repr__(self):\n return ''%(self.name,self.levelno,\n self.pathname,self.lineno,self.msg)\n \n def getMessage(self):\n ''\n\n\n\n\n \n msg=str(self.msg)\n if self.args:\n msg=msg %self.args\n return msg\n \n \n \n \n_logRecordFactory=LogRecord\n\ndef setLogRecordFactory(factory):\n ''\n\n\n\n\n \n global _logRecordFactory\n _logRecordFactory=factory\n \ndef getLogRecordFactory():\n ''\n\n \n \n return _logRecordFactory\n \ndef makeLogRecord(dict):\n ''\n\n\n\n\n \n rv=_logRecordFactory(None,None,\"\",0,\"\",(),None,None)\n rv.__dict__.update(dict)\n return rv\n \n \n \n \n \n_str_formatter=StrFormatter()\ndel StrFormatter\n\n\nclass PercentStyle(object):\n\n default_format='%(message)s'\n asctime_format='%(asctime)s'\n asctime_search='%(asctime)'\n validation_pattern=re.compile(r'%\\(\\w+\\)[#0+ -]*(\\*|\\d+)?(\\.(\\*|\\d+))?[diouxefgcrsa%]',re.I)\n \n def __init__(self,fmt,*,defaults=None):\n self._fmt=fmt or self.default_format\n self._defaults=defaults\n \n def usesTime(self):\n return self._fmt.find(self.asctime_search)>=0\n \n def validate(self):\n ''\n if not self.validation_pattern.search(self._fmt):\n raise ValueError(\"Invalid format '%s' for '%s' style\"%(self._fmt,self.default_format[0]))\n \n def _format(self,record):\n if defaults :=self._defaults:\n values=defaults |record.__dict__\n else:\n values=record.__dict__\n return self._fmt %values\n \n def format(self,record):\n try:\n return self._format(record)\n except KeyError as e:\n raise ValueError('Formatting field not found in record: %s'%e)\n \n \nclass StrFormatStyle(PercentStyle):\n default_format='{message}'\n asctime_format='{asctime}'\n asctime_search='{asctime'\n \n fmt_spec=re.compile(r'^(.?[<>=^])?[+ -]?#?0?(\\d+|{\\w+})?[,_]?(\\.(\\d+|{\\w+}))?[bcdefgnosx%]?$',re.I)\n field_spec=re.compile(r'^(\\d+|\\w+)(\\.\\w+|\\[[^]]+\\])*$')\n \n def _format(self,record):\n if defaults :=self._defaults:\n values=defaults |record.__dict__\n else:\n values=record.__dict__\n return self._fmt.format(**values)\n \n def validate(self):\n ''\n fields=set()\n try:\n for _,fieldname,spec,conversion in _str_formatter.parse(self._fmt):\n if fieldname:\n if not self.field_spec.match(fieldname):\n raise ValueError('invalid field name/expression: %r'%fieldname)\n fields.add(fieldname)\n if conversion and conversion not in 'rsa':\n raise ValueError('invalid conversion: %r'%conversion)\n if spec and not self.fmt_spec.match(spec):\n raise ValueError('bad specifier: %r'%spec)\n except ValueError as e:\n raise ValueError('invalid format: %s'%e)\n if not fields:\n raise ValueError('invalid format: no fields')\n \n \nclass StringTemplateStyle(PercentStyle):\n default_format='${message}'\n asctime_format='${asctime}'\n asctime_search='${asctime}'\n \n def __init__(self,*args,**kwargs):\n super().__init__(*args,**kwargs)\n self._tpl=Template(self._fmt)\n \n def usesTime(self):\n fmt=self._fmt\n return fmt.find('$asctime')>=0 or fmt.find(self.asctime_search)>=0\n \n def validate(self):\n pattern=Template.pattern\n fields=set()\n for m in pattern.finditer(self._fmt):\n d=m.groupdict()\n if d['named']:\n fields.add(d['named'])\n elif d['braced']:\n fields.add(d['braced'])\n elif m.group(0)=='$':\n raise ValueError('invalid format: bare \\'$\\' not allowed')\n if not fields:\n raise ValueError('invalid format: no fields')\n \n def _format(self,record):\n if defaults :=self._defaults:\n values=defaults |record.__dict__\n else:\n values=record.__dict__\n return self._tpl.substitute(**values)\n \n \nBASIC_FORMAT=\"%(levelname)s:%(name)s:%(message)s\"\n\n_STYLES={\n'%':(PercentStyle,BASIC_FORMAT),\n'{':(StrFormatStyle,'{levelname}:{name}:{message}'),\n'$':(StringTemplateStyle,'${levelname}:${name}:${message}'),\n}\n\nclass Formatter(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n converter=time.localtime\n \n def __init__(self,fmt=None,datefmt=None,style='%',validate=True,*,\n defaults=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if style not in _STYLES:\n raise ValueError('Style must be one of: %s'%','.join(\n _STYLES.keys()))\n self._style=_STYLES[style][0](fmt,defaults=defaults)\n if validate:\n self._style.validate()\n \n self._fmt=self._style._fmt\n self.datefmt=datefmt\n \n default_time_format='%Y-%m-%d %H:%M:%S'\n default_msec_format='%s,%03d'\n \n def formatTime(self,record,datefmt=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ct=self.converter(record.created)\n if datefmt:\n s=time.strftime(datefmt,ct)\n else:\n s=time.strftime(self.default_time_format,ct)\n if self.default_msec_format:\n s=self.default_msec_format %(s,record.msecs)\n return s\n \n def formatException(self,ei):\n ''\n\n\n\n\n \n sio=io.StringIO()\n tb=ei[2]\n \n \n \n traceback.print_exception(ei[0],ei[1],tb,None,sio)\n s=sio.getvalue()\n sio.close()\n if s[-1:]==\"\\n\":\n s=s[:-1]\n return s\n \n def usesTime(self):\n ''\n\n \n return self._style.usesTime()\n \n def formatMessage(self,record):\n return self._style.format(record)\n \n def formatStack(self,stack_info):\n ''\n\n\n\n\n\n\n\n\n \n return stack_info\n \n def format(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n \n record.message=record.getMessage()\n if self.usesTime():\n record.asctime=self.formatTime(record,self.datefmt)\n s=self.formatMessage(record)\n if record.exc_info:\n \n \n if not record.exc_text:\n record.exc_text=self.formatException(record.exc_info)\n if record.exc_text:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+record.exc_text\n if record.stack_info:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+self.formatStack(record.stack_info)\n return s\n \n \n \n \n_defaultFormatter=Formatter()\n\nclass BufferingFormatter(object):\n ''\n\n \n def __init__(self,linefmt=None):\n ''\n\n\n \n if linefmt:\n self.linefmt=linefmt\n else:\n self.linefmt=_defaultFormatter\n \n def formatHeader(self,records):\n ''\n\n \n return \"\"\n \n def formatFooter(self,records):\n ''\n\n \n return \"\"\n \n def format(self,records):\n ''\n\n \n rv=\"\"\n if len(records)>0:\n rv=rv+self.formatHeader(records)\n for record in records:\n rv=rv+self.linefmt.format(record)\n rv=rv+self.formatFooter(records)\n return rv\n \n \n \n \n \nclass Filter(object):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,name=''):\n ''\n\n\n\n\n\n \n self.name=name\n self.nlen=len(name)\n \n def filter(self,record):\n ''\n\n\n\n\n \n if self.nlen ==0:\n return True\n elif self.name ==record.name:\n return True\n elif record.name.find(self.name,0,self.nlen)!=0:\n return False\n return(record.name[self.nlen]==\".\")\n \nclass Filterer(object):\n ''\n\n\n \n def __init__(self):\n ''\n\n \n self.filters=[]\n \n def addFilter(self,filter):\n ''\n\n \n if not(filter in self.filters):\n self.filters.append(filter)\n \n def removeFilter(self,filter):\n ''\n\n \n if filter in self.filters:\n self.filters.remove(filter)\n \n def filter(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n for f in self.filters:\n if hasattr(f,'filter'):\n result=f.filter(record)\n else:\n result=f(record)\n if not result:\n return False\n if isinstance(result,LogRecord):\n record=result\n return record\n \n \n \n \n \n_handlers=weakref.WeakValueDictionary()\n_handlerList=[]\n\ndef _removeHandlerRef(wr):\n ''\n\n \n \n \n \n \n acquire,release,handlers=_acquireLock,_releaseLock,_handlerList\n if acquire and release and handlers:\n acquire()\n try:\n handlers.remove(wr)\n except ValueError:\n pass\n finally:\n release()\n \ndef _addHandlerRef(handler):\n ''\n\n \n _acquireLock()\n try:\n _handlerList.append(weakref.ref(handler,_removeHandlerRef))\n finally:\n _releaseLock()\n \n \ndef getHandlerByName(name):\n ''\n\n\n \n return _handlers.get(name)\n \n \ndef getHandlerNames():\n ''\n\n \n result=set(_handlers.keys())\n return frozenset(result)\n \n \nclass Handler(Filterer):\n ''\n\n\n\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n\n \n Filterer.__init__(self)\n self._name=None\n self.level=_checkLevel(level)\n self.formatter=None\n self._closed=False\n \n _addHandlerRef(self)\n self.createLock()\n \n def get_name(self):\n return self._name\n \n def set_name(self,name):\n _acquireLock()\n try:\n if self._name in _handlers:\n del _handlers[self._name]\n self._name=name\n if name:\n _handlers[name]=self\n finally:\n _releaseLock()\n \n name=property(get_name,set_name)\n \n def createLock(self):\n ''\n\n \n self.lock=threading.RLock()\n _register_at_fork_reinit_lock(self)\n \n def _at_fork_reinit(self):\n self.lock._at_fork_reinit()\n \n def acquire(self):\n ''\n\n \n if self.lock:\n self.lock.acquire()\n \n def release(self):\n ''\n\n \n if self.lock:\n self.lock.release()\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n \n def format(self,record):\n ''\n\n\n\n\n \n if self.formatter:\n fmt=self.formatter\n else:\n fmt=_defaultFormatter\n return fmt.format(record)\n \n def emit(self,record):\n ''\n\n\n\n\n \n raise NotImplementedError('emit must be implemented '\n 'by Handler subclasses')\n \n def handle(self,record):\n ''\n\n\n\n\n\n\n\n\n \n rv=self.filter(record)\n if isinstance(rv,LogRecord):\n record=rv\n if rv:\n self.acquire()\n try:\n self.emit(record)\n finally:\n self.release()\n return rv\n \n def setFormatter(self,fmt):\n ''\n\n \n self.formatter=fmt\n \n def flush(self):\n ''\n\n\n\n\n \n pass\n \n def close(self):\n ''\n\n\n\n\n\n\n \n \n _acquireLock()\n try:\n self._closed=True\n if self._name and self._name in _handlers:\n del _handlers[self._name]\n finally:\n _releaseLock()\n \n def handleError(self,record):\n ''\n\n\n\n\n\n\n\n\n\n \n if raiseExceptions and sys.stderr:\n t,v,tb=sys.exc_info()\n try:\n sys.stderr.write('--- Logging error ---\\n')\n traceback.print_exception(t,v,tb,None,sys.stderr)\n sys.stderr.write('Call stack:\\n')\n \n \n frame=tb.tb_frame\n while(frame and os.path.dirname(frame.f_code.co_filename)==\n __path__[0]):\n frame=frame.f_back\n if frame:\n traceback.print_stack(frame,file=sys.stderr)\n else:\n \n sys.stderr.write('Logged from file %s, line %s\\n'%(\n record.filename,record.lineno))\n \n try:\n sys.stderr.write('Message: %r\\n'\n 'Arguments: %s\\n'%(record.msg,\n record.args))\n except RecursionError:\n raise\n except Exception:\n sys.stderr.write('Unable to print the message and arguments'\n ' - possible formatting error.\\nUse the'\n ' traceback above to help find the error.\\n'\n )\n except OSError:\n pass\n finally:\n del t,v,tb\n \n def __repr__(self):\n level=getLevelName(self.level)\n return '<%s (%s)>'%(self.__class__.__name__,level)\n \nclass StreamHandler(Handler):\n ''\n\n\n\n \n \n terminator='\\n'\n \n def __init__(self,stream=None):\n ''\n\n\n\n \n Handler.__init__(self)\n if stream is None:\n stream=sys.stderr\n self.stream=stream\n \n def flush(self):\n ''\n\n \n self.acquire()\n try:\n if self.stream and hasattr(self.stream,\"flush\"):\n self.stream.flush()\n finally:\n self.release()\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n\n\n \n try:\n msg=self.format(record)\n stream=self.stream\n \n stream.write(msg+self.terminator)\n self.flush()\n except RecursionError:\n raise\n except Exception:\n self.handleError(record)\n \n def setStream(self,stream):\n ''\n\n\n\n\n\n \n if stream is self.stream:\n result=None\n else:\n result=self.stream\n self.acquire()\n try:\n self.flush()\n self.stream=stream\n finally:\n self.release()\n return result\n \n def __repr__(self):\n level=getLevelName(self.level)\n name=getattr(self.stream,'name','')\n \n name=str(name)\n if name:\n name +=' '\n return '<%s %s(%s)>'%(self.__class__.__name__,name,level)\n \n __class_getitem__=classmethod(GenericAlias)\n \n \nclass FileHandler(StreamHandler):\n ''\n\n \n def __init__(self,filename,mode='a',encoding=None,delay=False,errors=None):\n ''\n\n \n \n filename=os.fspath(filename)\n \n \n self.baseFilename=os.path.abspath(filename)\n self.mode=mode\n self.encoding=encoding\n if \"b\"not in mode:\n self.encoding=io.text_encoding(encoding)\n self.errors=errors\n self.delay=delay\n \n \n \n self._builtin_open=open\n if delay:\n \n \n Handler.__init__(self)\n self.stream=None\n else:\n StreamHandler.__init__(self,self._open())\n \n def close(self):\n ''\n\n \n self.acquire()\n try:\n try:\n if self.stream:\n try:\n self.flush()\n finally:\n stream=self.stream\n self.stream=None\n if hasattr(stream,\"close\"):\n stream.close()\n finally:\n \n \n \n \n StreamHandler.close(self)\n finally:\n self.release()\n \n def _open(self):\n ''\n\n\n \n open_func=self._builtin_open\n return open_func(self.baseFilename,self.mode,\n encoding=self.encoding,errors=self.errors)\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n\n \n if self.stream is None:\n if self.mode !='w'or not self._closed:\n self.stream=self._open()\n if self.stream:\n StreamHandler.emit(self,record)\n \n def __repr__(self):\n level=getLevelName(self.level)\n return '<%s %s (%s)>'%(self.__class__.__name__,self.baseFilename,level)\n \n \nclass _StderrHandler(StreamHandler):\n ''\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n \n Handler.__init__(self,level)\n \n @property\n def stream(self):\n return sys.stderr\n \n \n_defaultLastResort=_StderrHandler(WARNING)\nlastResort=_defaultLastResort\n\n\n\n\n\nclass PlaceHolder(object):\n ''\n\n\n\n \n def __init__(self,alogger):\n ''\n\n \n self.loggerMap={alogger:None}\n \n def append(self,alogger):\n ''\n\n \n if alogger not in self.loggerMap:\n self.loggerMap[alogger]=None\n \n \n \n \n \ndef setLoggerClass(klass):\n ''\n\n\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n global _loggerClass\n _loggerClass=klass\n \ndef getLoggerClass():\n ''\n\n \n return _loggerClass\n \nclass Manager(object):\n ''\n\n\n \n def __init__(self,rootnode):\n ''\n\n \n self.root=rootnode\n self.disable=0\n self.emittedNoHandlerWarning=False\n self.loggerDict={}\n self.loggerClass=None\n self.logRecordFactory=None\n \n @property\n def disable(self):\n return self._disable\n \n @disable.setter\n def disable(self,value):\n self._disable=_checkLevel(value)\n \n def getLogger(self,name):\n ''\n\n\n\n\n\n\n\n\n \n rv=None\n if not isinstance(name,str):\n raise TypeError('A logger name must be a string')\n _acquireLock()\n try:\n if name in self.loggerDict:\n rv=self.loggerDict[name]\n if isinstance(rv,PlaceHolder):\n ph=rv\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupChildren(ph,rv)\n self._fixupParents(rv)\n else:\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupParents(rv)\n finally:\n _releaseLock()\n return rv\n \n def setLoggerClass(self,klass):\n ''\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n self.loggerClass=klass\n \n def setLogRecordFactory(self,factory):\n ''\n\n\n \n self.logRecordFactory=factory\n \n def _fixupParents(self,alogger):\n ''\n\n\n \n name=alogger.name\n i=name.rfind(\".\")\n rv=None\n while(i >0)and not rv:\n substr=name[:i]\n if substr not in self.loggerDict:\n self.loggerDict[substr]=PlaceHolder(alogger)\n else:\n obj=self.loggerDict[substr]\n if isinstance(obj,Logger):\n rv=obj\n else:\n assert isinstance(obj,PlaceHolder)\n obj.append(alogger)\n i=name.rfind(\".\",0,i -1)\n if not rv:\n rv=self.root\n alogger.parent=rv\n \n def _fixupChildren(self,ph,alogger):\n ''\n\n\n \n name=alogger.name\n namelen=len(name)\n for c in ph.loggerMap.keys():\n \n if c.parent.name[:namelen]!=name:\n alogger.parent=c.parent\n c.parent=alogger\n \n def _clear_cache(self):\n ''\n\n\n \n \n _acquireLock()\n for logger in self.loggerDict.values():\n if isinstance(logger,Logger):\n logger._cache.clear()\n self.root._cache.clear()\n _releaseLock()\n \n \n \n \n \nclass Logger(Filterer):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level=NOTSET):\n ''\n\n \n Filterer.__init__(self)\n self.name=name\n self.level=_checkLevel(level)\n self.parent=None\n self.propagate=True\n self.handlers=[]\n self.disabled=False\n self._cache={}\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n self.manager._clear_cache()\n \n def debug(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(DEBUG):\n self._log(DEBUG,msg,args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(INFO):\n self._log(INFO,msg,args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(WARNING):\n self._log(WARNING,msg,args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(ERROR):\n self._log(ERROR,msg,args,**kwargs)\n \n def exception(self,msg,*args,exc_info=True,**kwargs):\n ''\n\n \n self.error(msg,*args,exc_info=exc_info,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(CRITICAL):\n self._log(CRITICAL,msg,args,**kwargs)\n \n def fatal(self,msg,*args,**kwargs):\n ''\n\n \n self.critical(msg,*args,**kwargs)\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if not isinstance(level,int):\n if raiseExceptions:\n raise TypeError(\"level must be an integer\")\n else:\n return\n if self.isEnabledFor(level):\n self._log(level,msg,args,**kwargs)\n \n def findCaller(self,stack_info=False,stacklevel=1):\n ''\n\n\n \n f=currentframe()\n \n \n if f is None:\n return \"(unknown file)\",0,\"(unknown function)\",None\n while stacklevel >0:\n next_f=f.f_back\n if next_f is None:\n \n \n break\n \n \n \n \n f=next_f\n if not _is_internal_frame(f):\n stacklevel -=1\n co=f.f_code\n sinfo=None\n if stack_info:\n with io.StringIO()as sio:\n sio.write(\"Stack (most recent call last):\\n\")\n traceback.print_stack(f,file=sio)\n sinfo=sio.getvalue()\n if sinfo[-1]=='\\n':\n sinfo=sinfo[:-1]\n return co.co_filename,f.f_lineno,co.co_name,sinfo\n \n def makeRecord(self,name,level,fn,lno,msg,args,exc_info,\n func=None,extra=None,sinfo=None):\n ''\n\n\n \n rv=_logRecordFactory(name,level,fn,lno,msg,args,exc_info,func,\n sinfo)\n if extra is not None:\n for key in extra:\n if(key in[\"message\",\"asctime\"])or(key in rv.__dict__):\n raise KeyError(\"Attempt to overwrite %r in LogRecord\"%key)\n rv.__dict__[key]=extra[key]\n return rv\n \n def _log(self,level,msg,args,exc_info=None,extra=None,stack_info=False,\n stacklevel=1):\n ''\n\n\n \n sinfo=None\n if _srcfile:\n \n \n \n try:\n fn,lno,func,sinfo=self.findCaller(stack_info,stacklevel)\n except ValueError:\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n else:\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n if exc_info:\n if isinstance(exc_info,BaseException):\n exc_info=(type(exc_info),exc_info,exc_info.__traceback__)\n elif not isinstance(exc_info,tuple):\n exc_info=sys.exc_info()\n record=self.makeRecord(self.name,level,fn,lno,msg,args,\n exc_info,func,extra,sinfo)\n self.handle(record)\n \n def handle(self,record):\n ''\n\n\n\n\n \n if self.disabled:\n return\n maybe_record=self.filter(record)\n if not maybe_record:\n return\n if isinstance(maybe_record,LogRecord):\n record=maybe_record\n self.callHandlers(record)\n \n def addHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try:\n if not(hdlr in self.handlers):\n self.handlers.append(hdlr)\n finally:\n _releaseLock()\n \n def removeHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try:\n if hdlr in self.handlers:\n self.handlers.remove(hdlr)\n finally:\n _releaseLock()\n \n def hasHandlers(self):\n ''\n\n\n\n\n\n\n\n \n c=self\n rv=False\n while c:\n if c.handlers:\n rv=True\n break\n if not c.propagate:\n break\n else:\n c=c.parent\n return rv\n \n def callHandlers(self,record):\n ''\n\n\n\n\n\n\n\n \n c=self\n found=0\n while c:\n for hdlr in c.handlers:\n found=found+1\n if record.levelno >=hdlr.level:\n hdlr.handle(record)\n if not c.propagate:\n c=None\n else:\n c=c.parent\n if(found ==0):\n if lastResort:\n if record.levelno >=lastResort.level:\n lastResort.handle(record)\n elif raiseExceptions and not self.manager.emittedNoHandlerWarning:\n sys.stderr.write(\"No handlers could be found for logger\"\n \" \\\"%s\\\"\\n\"%self.name)\n self.manager.emittedNoHandlerWarning=True\n \n def getEffectiveLevel(self):\n ''\n\n\n\n\n \n logger=self\n while logger:\n if logger.level:\n return logger.level\n logger=logger.parent\n return NOTSET\n \n def isEnabledFor(self,level):\n ''\n\n \n if self.disabled:\n return False\n \n try:\n return self._cache[level]\n except KeyError:\n _acquireLock()\n try:\n if self.manager.disable >=level:\n is_enabled=self._cache[level]=False\n else:\n is_enabled=self._cache[level]=(\n level >=self.getEffectiveLevel()\n )\n finally:\n _releaseLock()\n return is_enabled\n \n def getChild(self,suffix):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.root is not self:\n suffix='.'.join((self.name,suffix))\n return self.manager.getLogger(suffix)\n \n def getChildren(self):\n \n def _hierlevel(logger):\n if logger is logger.manager.root:\n return 0\n return 1+logger.name.count('.')\n \n d=self.manager.loggerDict\n _acquireLock()\n try:\n \n \n \n return set(item for item in d.values()\n if isinstance(item,Logger)and item.parent is self and\n _hierlevel(item)==1+_hierlevel(item.parent))\n finally:\n _releaseLock()\n \n def __repr__(self):\n level=getLevelName(self.getEffectiveLevel())\n return '<%s %s (%s)>'%(self.__class__.__name__,self.name,level)\n \n def __reduce__(self):\n if getLogger(self.name)is not self:\n import pickle\n raise pickle.PicklingError('logger cannot be pickled')\n return getLogger,(self.name,)\n \n \nclass RootLogger(Logger):\n ''\n\n\n\n \n def __init__(self,level):\n ''\n\n \n Logger.__init__(self,\"root\",level)\n \n def __reduce__(self):\n return getLogger,()\n \n_loggerClass=Logger\n\nclass LoggerAdapter(object):\n ''\n\n\n \n \n def __init__(self,logger,extra=None):\n ''\n\n\n\n\n\n\n\n\n \n self.logger=logger\n self.extra=extra\n \n def process(self,msg,kwargs):\n ''\n\n\n\n\n\n\n\n \n kwargs[\"extra\"]=self.extra\n return msg,kwargs\n \n \n \n \n def debug(self,msg,*args,**kwargs):\n ''\n\n \n self.log(DEBUG,msg,*args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n \n self.log(INFO,msg,*args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n \n self.log(WARNING,msg,*args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n \n self.log(ERROR,msg,*args,**kwargs)\n \n def exception(self,msg,*args,exc_info=True,**kwargs):\n ''\n\n \n self.log(ERROR,msg,*args,exc_info=exc_info,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n \n self.log(CRITICAL,msg,*args,**kwargs)\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n \n if self.isEnabledFor(level):\n msg,kwargs=self.process(msg,kwargs)\n self.logger.log(level,msg,*args,**kwargs)\n \n def isEnabledFor(self,level):\n ''\n\n \n return self.logger.isEnabledFor(level)\n \n def setLevel(self,level):\n ''\n\n \n self.logger.setLevel(level)\n \n def getEffectiveLevel(self):\n ''\n\n \n return self.logger.getEffectiveLevel()\n \n def hasHandlers(self):\n ''\n\n \n return self.logger.hasHandlers()\n \n def _log(self,level,msg,args,exc_info=None,extra=None,stack_info=False):\n ''\n\n \n return self.logger._log(\n level,\n msg,\n args,\n exc_info=exc_info,\n extra=extra,\n stack_info=stack_info,\n )\n \n @property\n def manager(self):\n return self.logger.manager\n \n @manager.setter\n def manager(self,value):\n self.logger.manager=value\n \n @property\n def name(self):\n return self.logger.name\n \n def __repr__(self):\n logger=self.logger\n level=getLevelName(logger.getEffectiveLevel())\n return '<%s %s (%s)>'%(self.__class__.__name__,logger.name,level)\n \n __class_getitem__=classmethod(GenericAlias)\n \nroot=RootLogger(WARNING)\nLogger.root=root\nLogger.manager=Manager(Logger.root)\n\n\n\n\n\ndef basicConfig(**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n _acquireLock()\n try:\n force=kwargs.pop('force',False)\n encoding=kwargs.pop('encoding',None)\n errors=kwargs.pop('errors','backslashreplace')\n if force:\n for h in root.handlers[:]:\n root.removeHandler(h)\n h.close()\n if len(root.handlers)==0:\n handlers=kwargs.pop(\"handlers\",None)\n if handlers is None:\n if \"stream\"in kwargs and \"filename\"in kwargs:\n raise ValueError(\"'stream' and 'filename' should not be \"\n \"specified together\")\n else:\n if \"stream\"in kwargs or \"filename\"in kwargs:\n raise ValueError(\"'stream' or 'filename' should not be \"\n \"specified together with 'handlers'\")\n if handlers is None:\n filename=kwargs.pop(\"filename\",None)\n mode=kwargs.pop(\"filemode\",'a')\n if filename:\n if 'b'in mode:\n errors=None\n else:\n encoding=io.text_encoding(encoding)\n h=FileHandler(filename,mode,\n encoding=encoding,errors=errors)\n else:\n stream=kwargs.pop(\"stream\",None)\n h=StreamHandler(stream)\n handlers=[h]\n dfs=kwargs.pop(\"datefmt\",None)\n style=kwargs.pop(\"style\",'%')\n if style not in _STYLES:\n raise ValueError('Style must be one of: %s'%','.join(\n _STYLES.keys()))\n fs=kwargs.pop(\"format\",_STYLES[style][1])\n fmt=Formatter(fs,dfs,style)\n for h in handlers:\n if h.formatter is None:\n h.setFormatter(fmt)\n root.addHandler(h)\n level=kwargs.pop(\"level\",None)\n if level is not None:\n root.setLevel(level)\n if kwargs:\n keys=', '.join(kwargs.keys())\n raise ValueError('Unrecognised argument(s): %s'%keys)\n finally:\n _releaseLock()\n \n \n \n \n \n \ndef getLogger(name=None):\n ''\n\n\n\n \n if not name or isinstance(name,str)and name ==root.name:\n return root\n return Logger.manager.getLogger(name)\n \ndef critical(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.critical(msg,*args,**kwargs)\n \ndef fatal(msg,*args,**kwargs):\n ''\n\n \n critical(msg,*args,**kwargs)\n \ndef error(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.error(msg,*args,**kwargs)\n \ndef exception(msg,*args,exc_info=True,**kwargs):\n ''\n\n\n\n \n error(msg,*args,exc_info=exc_info,**kwargs)\n \ndef warning(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.warning(msg,*args,**kwargs)\n \ndef warn(msg,*args,**kwargs):\n warnings.warn(\"The 'warn' function is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n warning(msg,*args,**kwargs)\n \ndef info(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.info(msg,*args,**kwargs)\n \ndef debug(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.debug(msg,*args,**kwargs)\n \ndef log(level,msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.log(level,msg,*args,**kwargs)\n \ndef disable(level=CRITICAL):\n ''\n\n \n root.manager.disable=level\n root.manager._clear_cache()\n \ndef shutdown(handlerList=_handlerList):\n ''\n\n\n\n\n \n for wr in reversed(handlerList[:]):\n \n \n try:\n h=wr()\n if h:\n try:\n h.acquire()\n \n \n \n if getattr(h,'flushOnClose',True):\n h.flush()\n h.close()\n except(OSError,ValueError):\n \n \n \n \n pass\n finally:\n h.release()\n except:\n if raiseExceptions:\n raise\n \n \n \nimport atexit\natexit.register(shutdown)\n\n\n\nclass NullHandler(Handler):\n ''\n\n\n\n\n\n\n\n \n def handle(self,record):\n ''\n \n def emit(self,record):\n ''\n \n def createLock(self):\n self.lock=None\n \n def _at_fork_reinit(self):\n pass\n \n \n \n_warnings_showwarning=None\n\ndef _showwarning(message,category,filename,lineno,file=None,line=None):\n ''\n\n\n\n\n\n \n if file is not None:\n if _warnings_showwarning is not None:\n _warnings_showwarning(message,category,filename,lineno,file,line)\n else:\n s=warnings.formatwarning(message,category,filename,lineno,line)\n logger=getLogger(\"py.warnings\")\n if not logger.handlers:\n logger.addHandler(NullHandler())\n \n \n logger.warning(str(s))\n \ndef captureWarnings(capture):\n ''\n\n\n\n \n global _warnings_showwarning\n if capture:\n if _warnings_showwarning is None:\n _warnings_showwarning=warnings.showwarning\n warnings.showwarning=_showwarning\n else:\n if _warnings_showwarning is not None:\n warnings.showwarning=_warnings_showwarning\n _warnings_showwarning=None\n", ["atexit", "collections.abc", "io", "os", "pickle", "re", "string", "sys", "threading", "time", "traceback", "types", "warnings", "weakref"], 1], "logging.brython_handlers": [".py", "import logging\n\nfrom browser.ajax import ajax\n\n\nclass XMLHTTPHandler(logging.Handler):\n ''\n\n\n \n def __init__(self,url,method=\"GET\"):\n ''\n\n\n \n logging.Handler.__init__(self)\n method=method.upper()\n if method not in [\"GET\",\"POST\"]:\n raise ValueError(\"method must be GET or POST\")\n self.url=url\n self.method=method\n \n def mapLogRecord(self,record):\n ''\n\n\n\n \n return record.__dict__\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n req=ajax.open(self.method,self.url,sync=False )\n req.send(self.mapLogRecord(record))\n except :\n self.handleError(record)\n", ["browser.ajax", "logging"]], "email.contentmanager": [".py", "import binascii\nimport email.charset\nimport email.message\nimport email.errors\nfrom email import quoprimime\n\nclass ContentManager:\n\n def __init__(self):\n self.get_handlers={}\n self.set_handlers={}\n \n def add_get_handler(self,key,handler):\n self.get_handlers[key]=handler\n \n def get_content(self,msg,*args,**kw):\n content_type=msg.get_content_type()\n if content_type in self.get_handlers:\n return self.get_handlers[content_type](msg,*args,**kw)\n maintype=msg.get_content_maintype()\n if maintype in self.get_handlers:\n return self.get_handlers[maintype](msg,*args,**kw)\n if ''in self.get_handlers:\n return self.get_handlers[''](msg,*args,**kw)\n raise KeyError(content_type)\n \n def add_set_handler(self,typekey,handler):\n self.set_handlers[typekey]=handler\n \n def set_content(self,msg,obj,*args,**kw):\n if msg.get_content_maintype()=='multipart':\n \n \n raise TypeError(\"set_content not valid on multipart\")\n handler=self._find_set_handler(msg,obj)\n msg.clear_content()\n handler(msg,obj,*args,**kw)\n \n def _find_set_handler(self,msg,obj):\n full_path_for_error=None\n for typ in type(obj).__mro__:\n if typ in self.set_handlers:\n return self.set_handlers[typ]\n qname=typ.__qualname__\n modname=getattr(typ,'__module__','')\n full_path='.'.join((modname,qname))if modname else qname\n if full_path_for_error is None:\n full_path_for_error=full_path\n if full_path in self.set_handlers:\n return self.set_handlers[full_path]\n if qname in self.set_handlers:\n return self.set_handlers[qname]\n name=typ.__name__\n if name in self.set_handlers:\n return self.set_handlers[name]\n if None in self.set_handlers:\n return self.set_handlers[None]\n raise KeyError(full_path_for_error)\n \n \nraw_data_manager=ContentManager()\n\n\ndef get_text_content(msg,errors='replace'):\n content=msg.get_payload(decode=True)\n charset=msg.get_param('charset','ASCII')\n return content.decode(charset,errors=errors)\nraw_data_manager.add_get_handler('text',get_text_content)\n\n\ndef get_non_text_content(msg):\n return msg.get_payload(decode=True)\nfor maintype in 'audio image video application'.split():\n raw_data_manager.add_get_handler(maintype,get_non_text_content)\ndel maintype\n\n\ndef get_message_content(msg):\n return msg.get_payload(0)\nfor subtype in 'rfc822 external-body'.split():\n raw_data_manager.add_get_handler('message/'+subtype,get_message_content)\ndel subtype\n\n\ndef get_and_fixup_unknown_message_content(msg):\n\n\n\n\n\n\n return bytes(msg.get_payload(0))\nraw_data_manager.add_get_handler('message',\nget_and_fixup_unknown_message_content)\n\n\ndef _prepare_set(msg,maintype,subtype,headers):\n msg['Content-Type']='/'.join((maintype,subtype))\n if headers:\n if not hasattr(headers[0],'name'):\n mp=msg.policy\n headers=[mp.header_factory(*mp.header_source_parse([header]))\n for header in headers]\n try:\n for header in headers:\n if header.defects:\n raise header.defects[0]\n msg[header.name]=header\n except email.errors.HeaderDefect as exc:\n raise ValueError(\"Invalid header: {}\".format(\n header.fold(policy=msg.policy)))from exc\n \n \ndef _finalize_set(msg,disposition,filename,cid,params):\n if disposition is None and filename is not None:\n disposition='attachment'\n if disposition is not None:\n msg['Content-Disposition']=disposition\n if filename is not None:\n msg.set_param('filename',\n filename,\n header='Content-Disposition',\n replace=True)\n if cid is not None:\n msg['Content-ID']=cid\n if params is not None:\n for key,value in params.items():\n msg.set_param(key,value)\n \n \n \n \n \n \ndef _encode_base64(data,max_line_length):\n encoded_lines=[]\n unencoded_bytes_per_line=max_line_length //4 *3\n for i in range(0,len(data),unencoded_bytes_per_line):\n thisline=data[i:i+unencoded_bytes_per_line]\n encoded_lines.append(binascii.b2a_base64(thisline).decode('ascii'))\n return ''.join(encoded_lines)\n \n \ndef _encode_text(string,charset,cte,policy):\n lines=string.encode(charset).splitlines()\n linesep=policy.linesep.encode('ascii')\n def embedded_body(lines):return linesep.join(lines)+linesep\n def normal_body(lines):return b'\\n'.join(lines)+b'\\n'\n if cte is None:\n \n if max((len(x)for x in lines),default=0)<=policy.max_line_length:\n try:\n return '7bit',normal_body(lines).decode('ascii')\n except UnicodeDecodeError:\n pass\n if policy.cte_type =='8bit':\n return '8bit',normal_body(lines).decode('ascii','surrogateescape')\n sniff=embedded_body(lines[:10])\n sniff_qp=quoprimime.body_encode(sniff.decode('latin-1'),\n policy.max_line_length)\n sniff_base64=binascii.b2a_base64(sniff)\n \n if len(sniff_qp)>len(sniff_base64):\n cte='base64'\n else:\n cte='quoted-printable'\n if len(lines)<=10:\n return cte,sniff_qp\n if cte =='7bit':\n data=normal_body(lines).decode('ascii')\n elif cte =='8bit':\n data=normal_body(lines).decode('ascii','surrogateescape')\n elif cte =='quoted-printable':\n data=quoprimime.body_encode(normal_body(lines).decode('latin-1'),\n policy.max_line_length)\n elif cte =='base64':\n data=_encode_base64(embedded_body(lines),policy.max_line_length)\n else:\n raise ValueError(\"Unknown content transfer encoding {}\".format(cte))\n return cte,data\n \n \ndef set_text_content(msg,string,subtype=\"plain\",charset='utf-8',cte=None,\ndisposition=None,filename=None,cid=None,\nparams=None,headers=None):\n _prepare_set(msg,'text',subtype,headers)\n cte,payload=_encode_text(string,charset,cte,msg.policy)\n msg.set_payload(payload)\n msg.set_param('charset',\n email.charset.ALIASES.get(charset,charset),\n replace=True)\n msg['Content-Transfer-Encoding']=cte\n _finalize_set(msg,disposition,filename,cid,params)\nraw_data_manager.add_set_handler(str,set_text_content)\n\n\ndef set_message_content(msg,message,subtype=\"rfc822\",cte=None,\ndisposition=None,filename=None,cid=None,\nparams=None,headers=None):\n if subtype =='partial':\n raise ValueError(\"message/partial is not supported for Message objects\")\n if subtype =='rfc822':\n if cte not in(None,'7bit','8bit','binary'):\n \n raise ValueError(\n \"message/rfc822 parts do not support cte={}\".format(cte))\n \n \n \n \n \n cte='8bit'if cte is None else cte\n elif subtype =='external-body':\n if cte not in(None,'7bit'):\n \n raise ValueError(\n \"message/external-body parts do not support cte={}\".format(cte))\n cte='7bit'\n elif cte is None:\n \n \n cte='7bit'\n _prepare_set(msg,'message',subtype,headers)\n msg.set_payload([message])\n msg['Content-Transfer-Encoding']=cte\n _finalize_set(msg,disposition,filename,cid,params)\nraw_data_manager.add_set_handler(email.message.Message,set_message_content)\n\n\ndef set_bytes_content(msg,data,maintype,subtype,cte='base64',\ndisposition=None,filename=None,cid=None,\nparams=None,headers=None):\n _prepare_set(msg,maintype,subtype,headers)\n if cte =='base64':\n data=_encode_base64(data,max_line_length=msg.policy.max_line_length)\n elif cte =='quoted-printable':\n \n \n \n data=binascii.b2a_qp(data,istext=False,header=False,quotetabs=True)\n data=data.decode('ascii')\n elif cte =='7bit':\n data=data.decode('ascii')\n elif cte in('8bit','binary'):\n data=data.decode('ascii','surrogateescape')\n msg.set_payload(data)\n msg['Content-Transfer-Encoding']=cte\n _finalize_set(msg,disposition,filename,cid,params)\nfor typ in(bytes,bytearray,memoryview):\n raw_data_manager.add_set_handler(typ,set_bytes_content)\ndel typ\n", ["binascii", "email", "email.charset", "email.errors", "email.message", "email.quoprimime"]], "email._policybase": [".py", "''\n\n\n\n\nimport abc\nfrom email import header\nfrom email import charset as _charset\nfrom email.utils import _has_surrogates\n\n__all__=[\n'Policy',\n'Compat32',\n'compat32',\n]\n\n\nclass _PolicyBase:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,**kw):\n ''\n\n\n\n \n for name,value in kw.items():\n if hasattr(self,name):\n super(_PolicyBase,self).__setattr__(name,value)\n else :\n raise TypeError(\n \"{!r} is an invalid keyword argument for {}\".format(\n name,self.__class__.__name__))\n \n def __repr__(self):\n args=[\"{}={!r}\".format(name,value)\n for name,value in self.__dict__.items()]\n return \"{}({})\".format(self.__class__.__name__,', '.join(args))\n \n def clone(self,**kw):\n ''\n\n\n\n\n \n newpolicy=self.__class__.__new__(self.__class__)\n for attr,value in self.__dict__.items():\n object.__setattr__(newpolicy,attr,value)\n for attr,value in kw.items():\n if not hasattr(self,attr):\n raise TypeError(\n \"{!r} is an invalid keyword argument for {}\".format(\n attr,self.__class__.__name__))\n object.__setattr__(newpolicy,attr,value)\n return newpolicy\n \n def __setattr__(self,name,value):\n if hasattr(self,name):\n msg=\"{!r} object attribute {!r} is read-only\"\n else :\n msg=\"{!r} object has no attribute {!r}\"\n raise AttributeError(msg.format(self.__class__.__name__,name))\n \n def __add__(self,other):\n ''\n\n\n\n \n return self.clone(**other.__dict__)\n \n \ndef _append_doc(doc,added_doc):\n doc=doc.rsplit('\\n',1)[0]\n added_doc=added_doc.split('\\n',1)[1]\n return doc+'\\n'+added_doc\n \ndef _extend_docstrings(cls):\n if cls.__doc__ and cls.__doc__.startswith('+'):\n cls.__doc__=_append_doc(cls.__bases__[0].__doc__,cls.__doc__)\n for name,attr in cls.__dict__.items():\n if attr.__doc__ and attr.__doc__.startswith('+'):\n for c in (c for base in cls.__bases__ for c in base.mro()):\n doc=getattr(getattr(c,name),'__doc__')\n if doc:\n attr.__doc__=_append_doc(doc,attr.__doc__)\n break\n return cls\n \n \nclass Policy(_PolicyBase,metaclass=abc.ABCMeta):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n raise_on_defect=False\n linesep='\\n'\n cte_type='8bit'\n max_line_length=78\n mangle_from_=False\n message_factory=None\n \n def handle_defect(self,obj,defect):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.raise_on_defect:\n raise defect\n self.register_defect(obj,defect)\n \n def register_defect(self,obj,defect):\n ''\n\n\n\n\n\n\n\n\n \n obj.defects.append(defect)\n \n def header_max_count(self,name):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return None\n \n @abc.abstractmethod\n def header_source_parse(self,sourcelines):\n ''\n\n\n\n\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def header_store_parse(self,name,value):\n ''\n\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def header_fetch_parse(self,name,value):\n ''\n\n\n\n\n\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def fold(self,name,value):\n ''\n\n\n\n\n\n\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def fold_binary(self,name,value):\n ''\n\n\n\n\n \n raise NotImplementedError\n \n \n@_extend_docstrings\nclass Compat32(Policy):\n\n ''\n\n\n \n \n mangle_from_=True\n \n def _sanitize_header(self,name,value):\n \n \n if not isinstance(value,str):\n \n return value\n if _has_surrogates(value):\n return header.Header(value,charset=_charset.UNKNOWN8BIT,\n header_name=name)\n else :\n return value\n \n def header_source_parse(self,sourcelines):\n ''\n\n\n\n\n\n \n name,value=sourcelines[0].split(':',1)\n value=value.lstrip(' \\t')+''.join(sourcelines[1:])\n return (name,value.rstrip('\\r\\n'))\n \n def header_store_parse(self,name,value):\n ''\n\n \n return (name,value)\n \n def header_fetch_parse(self,name,value):\n ''\n\n\n \n return self._sanitize_header(name,value)\n \n def fold(self,name,value):\n ''\n\n\n\n\n\n \n return self._fold(name,value,sanitize=True )\n \n def fold_binary(self,name,value):\n ''\n\n\n\n\n\n\n \n folded=self._fold(name,value,sanitize=self.cte_type =='7bit')\n return folded.encode('ascii','surrogateescape')\n \n def _fold(self,name,value,sanitize):\n parts=[]\n parts.append('%s: '%name)\n if isinstance(value,str):\n if _has_surrogates(value):\n if sanitize:\n h=header.Header(value,\n charset=_charset.UNKNOWN8BIT,\n header_name=name)\n else :\n \n \n \n \n \n \n parts.append(value)\n h=None\n else :\n h=header.Header(value,header_name=name)\n else :\n \n h=value\n if h is not None :\n \n \n maxlinelen=0\n if self.max_line_length is not None :\n maxlinelen=self.max_line_length\n parts.append(h.encode(linesep=self.linesep,maxlinelen=maxlinelen))\n parts.append(self.linesep)\n return ''.join(parts)\n \n \ncompat32=Compat32()\n", ["abc", "email", "email.charset", "email.header", "email.utils"]], "email.header": [".py", "\n\n\n\n\"\"\"Header encoding and decoding functionality.\"\"\"\n\n__all__=[\n'Header',\n'decode_header',\n'make_header',\n]\n\nimport re\nimport binascii\n\nimport email.quoprimime\nimport email.base64mime\n\nfrom email.errors import HeaderParseError\nfrom email import charset as _charset\nCharset=_charset.Charset\n\nNL='\\n'\nSPACE=' '\nBSPACE=b' '\nSPACE8=' '*8\nEMPTYSTRING=''\nMAXLINELEN=78\nFWS=' \\t'\n\nUSASCII=Charset('us-ascii')\nUTF8=Charset('utf-8')\n\n\necre=re.compile(r'''\n =\\? # literal =?\n (?P[^?]*?) # non-greedy up to the next ? is the charset\n \\? # literal ?\n (?P[qQbB]) # either a \"q\" or a \"b\", case insensitive\n \\? # literal ?\n (?P.*?) # non-greedy up to the next ?= is the encoded string\n \\?= # literal ?=\n ''',re.VERBOSE |re.MULTILINE)\n\n\n\n\nfcre=re.compile(r'[\\041-\\176]+:$')\n\n\n\n_embedded_header=re.compile(r'\\n[^ \\t]+:')\n\n\n\n_max_append=email.quoprimime._max_append\n\n\ndef decode_header(header):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if hasattr(header,'_chunks'):\n return[(_charset._encode(string,str(charset)),str(charset))\n for string,charset in header._chunks]\n \n if not ecre.search(header):\n return[(header,None)]\n \n \n \n words=[]\n for line in header.splitlines():\n parts=ecre.split(line)\n first=True\n while parts:\n unencoded=parts.pop(0)\n if first:\n unencoded=unencoded.lstrip()\n first=False\n if unencoded:\n words.append((unencoded,None,None))\n if parts:\n charset=parts.pop(0).lower()\n encoding=parts.pop(0).lower()\n encoded=parts.pop(0)\n words.append((encoded,encoding,charset))\n \n \n droplist=[]\n for n,w in enumerate(words):\n if n >1 and w[1]and words[n -2][1]and words[n -1][0].isspace():\n droplist.append(n -1)\n for d in reversed(droplist):\n del words[d]\n \n \n \n \n decoded_words=[]\n for encoded_string,encoding,charset in words:\n if encoding is None:\n \n decoded_words.append((encoded_string,charset))\n elif encoding =='q':\n word=email.quoprimime.header_decode(encoded_string)\n decoded_words.append((word,charset))\n elif encoding =='b':\n paderr=len(encoded_string)%4\n if paderr:\n encoded_string +='==='[:4 -paderr]\n try:\n word=email.base64mime.decode(encoded_string)\n except binascii.Error:\n raise HeaderParseError('Base64 decoding error')\n else:\n decoded_words.append((word,charset))\n else:\n raise AssertionError('Unexpected encoding: '+encoding)\n \n \n collapsed=[]\n last_word=last_charset=None\n for word,charset in decoded_words:\n if isinstance(word,str):\n word=bytes(word,'raw-unicode-escape')\n if last_word is None:\n last_word=word\n last_charset=charset\n elif charset !=last_charset:\n collapsed.append((last_word,last_charset))\n last_word=word\n last_charset=charset\n elif last_charset is None:\n last_word +=BSPACE+word\n else:\n last_word +=word\n collapsed.append((last_word,last_charset))\n return collapsed\n \n \ndef make_header(decoded_seq,maxlinelen=None,header_name=None,\ncontinuation_ws=' '):\n ''\n\n\n\n\n\n\n\n\n \n h=Header(maxlinelen=maxlinelen,header_name=header_name,\n continuation_ws=continuation_ws)\n for s,charset in decoded_seq:\n \n if charset is not None and not isinstance(charset,Charset):\n charset=Charset(charset)\n h.append(s,charset)\n return h\n \n \nclass Header:\n def __init__(self,s=None,charset=None,\n maxlinelen=None,header_name=None,\n continuation_ws=' ',errors='strict'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if charset is None:\n charset=USASCII\n elif not isinstance(charset,Charset):\n charset=Charset(charset)\n self._charset=charset\n self._continuation_ws=continuation_ws\n self._chunks=[]\n if s is not None:\n self.append(s,charset,errors)\n if maxlinelen is None:\n maxlinelen=MAXLINELEN\n self._maxlinelen=maxlinelen\n if header_name is None:\n self._headerlen=0\n else:\n \n self._headerlen=len(header_name)+2\n \n def __str__(self):\n ''\n self._normalize()\n uchunks=[]\n lastcs=None\n lastspace=None\n for string,charset in self._chunks:\n \n \n \n \n \n \n nextcs=charset\n if nextcs ==_charset.UNKNOWN8BIT:\n original_bytes=string.encode('ascii','surrogateescape')\n string=original_bytes.decode('ascii','replace')\n if uchunks:\n hasspace=string and self._nonctext(string[0])\n if lastcs not in(None,'us-ascii'):\n if nextcs in(None,'us-ascii')and not hasspace:\n uchunks.append(SPACE)\n nextcs=None\n elif nextcs not in(None,'us-ascii')and not lastspace:\n uchunks.append(SPACE)\n lastspace=string and self._nonctext(string[-1])\n lastcs=nextcs\n uchunks.append(string)\n return EMPTYSTRING.join(uchunks)\n \n \n \n def __eq__(self,other):\n \n \n \n return other ==str(self)\n \n def append(self,s,charset=None,errors='strict'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if charset is None:\n charset=self._charset\n elif not isinstance(charset,Charset):\n charset=Charset(charset)\n if not isinstance(s,str):\n input_charset=charset.input_codec or 'us-ascii'\n if input_charset ==_charset.UNKNOWN8BIT:\n s=s.decode('us-ascii','surrogateescape')\n else:\n s=s.decode(input_charset,errors)\n \n \n output_charset=charset.output_codec or 'us-ascii'\n if output_charset !=_charset.UNKNOWN8BIT:\n try:\n s.encode(output_charset,errors)\n except UnicodeEncodeError:\n if output_charset !='us-ascii':\n raise\n charset=UTF8\n self._chunks.append((s,charset))\n \n def _nonctext(self,s):\n ''\n \n return s.isspace()or s in('(',')','\\\\')\n \n def encode(self,splitchars=';, \\t',maxlinelen=None,linesep='\\n'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._normalize()\n if maxlinelen is None:\n maxlinelen=self._maxlinelen\n \n \n \n if maxlinelen ==0:\n maxlinelen=1000000\n formatter=_ValueFormatter(self._headerlen,maxlinelen,\n self._continuation_ws,splitchars)\n lastcs=None\n hasspace=lastspace=None\n for string,charset in self._chunks:\n if hasspace is not None:\n hasspace=string and self._nonctext(string[0])\n if lastcs not in(None,'us-ascii'):\n if not hasspace or charset not in(None,'us-ascii'):\n formatter.add_transition()\n elif charset not in(None,'us-ascii')and not lastspace:\n formatter.add_transition()\n lastspace=string and self._nonctext(string[-1])\n lastcs=charset\n hasspace=False\n lines=string.splitlines()\n if lines:\n formatter.feed('',lines[0],charset)\n else:\n formatter.feed('','',charset)\n for line in lines[1:]:\n formatter.newline()\n if charset.header_encoding is not None:\n formatter.feed(self._continuation_ws,' '+line.lstrip(),\n charset)\n else:\n sline=line.lstrip()\n fws=line[:len(line)-len(sline)]\n formatter.feed(fws,sline,charset)\n if len(lines)>1:\n formatter.newline()\n if self._chunks:\n formatter.add_transition()\n value=formatter._str(linesep)\n if _embedded_header.search(value):\n raise HeaderParseError(\"header value appears to contain \"\n \"an embedded header: {!r}\".format(value))\n return value\n \n def _normalize(self):\n \n \n chunks=[]\n last_charset=None\n last_chunk=[]\n for string,charset in self._chunks:\n if charset ==last_charset:\n last_chunk.append(string)\n else:\n if last_charset is not None:\n chunks.append((SPACE.join(last_chunk),last_charset))\n last_chunk=[string]\n last_charset=charset\n if last_chunk:\n chunks.append((SPACE.join(last_chunk),last_charset))\n self._chunks=chunks\n \n \nclass _ValueFormatter:\n def __init__(self,headerlen,maxlen,continuation_ws,splitchars):\n self._maxlen=maxlen\n self._continuation_ws=continuation_ws\n self._continuation_ws_len=len(continuation_ws)\n self._splitchars=splitchars\n self._lines=[]\n self._current_line=_Accumulator(headerlen)\n \n def _str(self,linesep):\n self.newline()\n return linesep.join(self._lines)\n \n def __str__(self):\n return self._str(NL)\n \n def newline(self):\n end_of_line=self._current_line.pop()\n if end_of_line !=(' ',''):\n self._current_line.push(*end_of_line)\n if len(self._current_line)>0:\n if self._current_line.is_onlyws()and self._lines:\n self._lines[-1]+=str(self._current_line)\n else:\n self._lines.append(str(self._current_line))\n self._current_line.reset()\n \n def add_transition(self):\n self._current_line.push(' ','')\n \n def feed(self,fws,string,charset):\n \n \n \n \n \n if charset.header_encoding is None:\n self._ascii_split(fws,string,self._splitchars)\n return\n \n \n \n \n \n \n \n encoded_lines=charset.header_encode_lines(string,self._maxlengths())\n \n \n try:\n first_line=encoded_lines.pop(0)\n except IndexError:\n \n return\n if first_line is not None:\n self._append_chunk(fws,first_line)\n try:\n last_line=encoded_lines.pop()\n except IndexError:\n \n return\n self.newline()\n self._current_line.push(self._continuation_ws,last_line)\n \n for line in encoded_lines:\n self._lines.append(self._continuation_ws+line)\n \n def _maxlengths(self):\n \n yield self._maxlen -len(self._current_line)\n while True:\n yield self._maxlen -self._continuation_ws_len\n \n def _ascii_split(self,fws,string,splitchars):\n \n \n \n \n \n \n \n \n \n \n \n \n \n parts=re.split(\"([\"+FWS+\"]+)\",fws+string)\n if parts[0]:\n parts[:0]=['']\n else:\n parts.pop(0)\n for fws,part in zip(*[iter(parts)]*2):\n self._append_chunk(fws,part)\n \n def _append_chunk(self,fws,string):\n self._current_line.push(fws,string)\n if len(self._current_line)>self._maxlen:\n \n \n for ch in self._splitchars:\n for i in range(self._current_line.part_count()-1,0,-1):\n if ch.isspace():\n fws=self._current_line[i][0]\n if fws and fws[0]==ch:\n break\n prevpart=self._current_line[i -1][1]\n if prevpart and prevpart[-1]==ch:\n break\n else:\n continue\n break\n else:\n fws,part=self._current_line.pop()\n if self._current_line._initial_size >0:\n \n self.newline()\n if not fws:\n \n \n fws=' '\n self._current_line.push(fws,part)\n return\n remainder=self._current_line.pop_from(i)\n self._lines.append(str(self._current_line))\n self._current_line.reset(remainder)\n \n \nclass _Accumulator(list):\n\n def __init__(self,initial_size=0):\n self._initial_size=initial_size\n super().__init__()\n \n def push(self,fws,string):\n self.append((fws,string))\n \n def pop_from(self,i=0):\n popped=self[i:]\n self[i:]=[]\n return popped\n \n def pop(self):\n if self.part_count()==0:\n return('','')\n return super().pop()\n \n def __len__(self):\n return sum((len(fws)+len(part)for fws,part in self),\n self._initial_size)\n \n def __str__(self):\n return EMPTYSTRING.join((EMPTYSTRING.join((fws,part))\n for fws,part in self))\n \n def reset(self,startval=None):\n if startval is None:\n startval=[]\n self[:]=startval\n self._initial_size=0\n \n def is_onlyws(self):\n return self._initial_size ==0 and(not self or str(self).isspace())\n \n def part_count(self):\n return super().__len__()\n", ["binascii", "email", "email.base64mime", "email.charset", "email.errors", "email.quoprimime", "re"]], "email._encoded_words": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\nimport base64\nimport binascii\nimport functools\nfrom string import ascii_letters,digits\nfrom email import errors\n\n__all__=['decode_q',\n'encode_q',\n'decode_b',\n'encode_b',\n'len_q',\n'len_b',\n'decode',\n'encode',\n]\n\n\n\n\n\n\n_q_byte_subber=functools.partial(re.compile(br'=([a-fA-F0-9]{2})').sub,\nlambda m:bytes.fromhex(m.group(1).decode()))\n\ndef decode_q(encoded):\n encoded=encoded.replace(b'_',b' ')\n return _q_byte_subber(encoded),[]\n \n \n \nclass _QByteMap(dict):\n\n safe=b'-!*+/'+ascii_letters.encode('ascii')+digits.encode('ascii')\n \n def __missing__(self,key):\n if key in self.safe:\n self[key]=chr(key)\n else:\n self[key]=\"={:02X}\".format(key)\n return self[key]\n \n_q_byte_map=_QByteMap()\n\n\n_q_byte_map[ord(' ')]='_'\n\ndef encode_q(bstring):\n return ''.join(_q_byte_map[x]for x in bstring)\n \ndef len_q(bstring):\n return sum(len(_q_byte_map[x])for x in bstring)\n \n \n \n \n \n \ndef decode_b(encoded):\n\n\n pad_err=len(encoded)%4\n missing_padding=b'==='[:4 -pad_err]if pad_err else b''\n try:\n return(\n base64.b64decode(encoded+missing_padding,validate=True),\n [errors.InvalidBase64PaddingDefect()]if pad_err else[],\n )\n except binascii.Error:\n \n \n \n \n \n try:\n return(\n base64.b64decode(encoded,validate=False),\n [errors.InvalidBase64CharactersDefect()],\n )\n except binascii.Error:\n \n \n try:\n return(\n base64.b64decode(encoded+b'==',validate=False),\n [errors.InvalidBase64CharactersDefect(),\n errors.InvalidBase64PaddingDefect()],\n )\n except binascii.Error:\n \n \n \n \n \n return encoded,[errors.InvalidBase64LengthDefect()]\n \ndef encode_b(bstring):\n return base64.b64encode(bstring).decode('ascii')\n \ndef len_b(bstring):\n groups_of_3,leftover=divmod(len(bstring),3)\n \n return groups_of_3 *4+(4 if leftover else 0)\n \n \n_cte_decoders={\n'q':decode_q,\n'b':decode_b,\n}\n\ndef decode(ew):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n _,charset,cte,cte_string,_=ew.split('?')\n charset,_,lang=charset.partition('*')\n cte=cte.lower()\n \n bstring=cte_string.encode('ascii','surrogateescape')\n bstring,defects=_cte_decoders[cte](bstring)\n \n try:\n string=bstring.decode(charset)\n except UnicodeDecodeError:\n defects.append(errors.UndecodableBytesDefect(\"Encoded word \"\n f\"contains bytes not decodable using {charset !r} charset\"))\n string=bstring.decode(charset,'surrogateescape')\n except(LookupError,UnicodeEncodeError):\n string=bstring.decode('ascii','surrogateescape')\n if charset.lower()!='unknown-8bit':\n defects.append(errors.CharsetError(f\"Unknown charset {charset !r} \"\n f\"in encoded word; decoded as unknown bytes\"))\n return string,charset,lang,defects\n \n \n_cte_encoders={\n'q':encode_q,\n'b':encode_b,\n}\n\n_cte_encode_length={\n'q':len_q,\n'b':len_b,\n}\n\ndef encode(string,charset='utf-8',encoding=None,lang=''):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if charset =='unknown-8bit':\n bstring=string.encode('ascii','surrogateescape')\n else:\n bstring=string.encode(charset)\n if encoding is None:\n qlen=_cte_encode_length['q'](bstring)\n blen=_cte_encode_length['b'](bstring)\n \n encoding='q'if qlen -blen <5 else 'b'\n encoded=_cte_encoders[encoding](bstring)\n if lang:\n lang='*'+lang\n return \"=?{}{}?{}?{}?=\".format(charset,lang,encoding,encoded)\n", ["base64", "binascii", "email", "email.errors", "functools", "re", "string"]], "email._header_value_parser": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\nimport sys\nimport urllib\nfrom string import hexdigits\nfrom operator import itemgetter\nfrom email import _encoded_words as _ew\nfrom email import errors\nfrom email import utils\n\n\n\n\n\nWSP=set(' \\t')\nCFWS_LEADER=WSP |set('(')\nSPECIALS=set(r'()<>@,:;.\\\"[]')\nATOM_ENDS=SPECIALS |WSP\nDOT_ATOM_ENDS=ATOM_ENDS -set('.')\n\nPHRASE_ENDS=SPECIALS -set('.\"(')\nTSPECIALS=(SPECIALS |set('/?='))-set('.')\nTOKEN_ENDS=TSPECIALS |WSP\nASPECIALS=TSPECIALS |set(\"*'%\")\nATTRIBUTE_ENDS=ASPECIALS |WSP\nEXTENDED_ATTRIBUTE_ENDS=ATTRIBUTE_ENDS -set('%')\n\ndef quote_string(value):\n return '\"'+str(value).replace('\\\\','\\\\\\\\').replace('\"',r'\\\"')+'\"'\n \n \nrfc2047_matcher=re.compile(r'''\n =\\? # literal =?\n [^?]* # charset\n \\? # literal ?\n [qQbB] # literal 'q' or 'b', case insensitive\n \\? # literal ?\n .*? # encoded word\n \\?= # literal ?=\n''',re.VERBOSE |re.MULTILINE)\n\n\n\n\n\n\nclass TokenList(list):\n\n token_type=None\n syntactic_break=True\n ew_combine_allowed=True\n \n def __init__(self,*args,**kw):\n super().__init__(*args,**kw)\n self.defects=[]\n \n def __str__(self):\n return ''.join(str(x)for x in self)\n \n def __repr__(self):\n return '{}({})'.format(self.__class__.__name__,\n super().__repr__())\n \n @property\n def value(self):\n return ''.join(x.value for x in self if x.value)\n \n @property\n def all_defects(self):\n return sum((x.all_defects for x in self),self.defects)\n \n def startswith_fws(self):\n return self[0].startswith_fws()\n \n @property\n def as_ew_allowed(self):\n ''\n return all(part.as_ew_allowed for part in self)\n \n @property\n def comments(self):\n comments=[]\n for token in self:\n comments.extend(token.comments)\n return comments\n \n def fold(self,*,policy):\n return _refold_parse_tree(self,policy=policy)\n \n def pprint(self,indent=''):\n print(self.ppstr(indent=indent))\n \n def ppstr(self,indent=''):\n return '\\n'.join(self._pp(indent=indent))\n \n def _pp(self,indent=''):\n yield '{}{}/{}('.format(\n indent,\n self.__class__.__name__,\n self.token_type)\n for token in self:\n if not hasattr(token,'_pp'):\n yield(indent+' !! invalid element in token '\n 'list: {!r}'.format(token))\n else:\n yield from token._pp(indent+' ')\n if self.defects:\n extra=' Defects: {}'.format(self.defects)\n else:\n extra=''\n yield '{}){}'.format(indent,extra)\n \n \nclass WhiteSpaceTokenList(TokenList):\n\n @property\n def value(self):\n return ' '\n \n @property\n def comments(self):\n return[x.content for x in self if x.token_type =='comment']\n \n \nclass UnstructuredTokenList(TokenList):\n token_type='unstructured'\n \n \nclass Phrase(TokenList):\n token_type='phrase'\n \nclass Word(TokenList):\n token_type='word'\n \n \nclass CFWSList(WhiteSpaceTokenList):\n token_type='cfws'\n \n \nclass Atom(TokenList):\n token_type='atom'\n \n \nclass Token(TokenList):\n token_type='token'\n encode_as_ew=False\n \n \nclass EncodedWord(TokenList):\n token_type='encoded-word'\n cte=None\n charset=None\n lang=None\n \n \nclass QuotedString(TokenList):\n\n token_type='quoted-string'\n \n @property\n def content(self):\n for x in self:\n if x.token_type =='bare-quoted-string':\n return x.value\n \n @property\n def quoted_value(self):\n res=[]\n for x in self:\n if x.token_type =='bare-quoted-string':\n res.append(str(x))\n else:\n res.append(x.value)\n return ''.join(res)\n \n @property\n def stripped_value(self):\n for token in self:\n if token.token_type =='bare-quoted-string':\n return token.value\n \n \nclass BareQuotedString(QuotedString):\n\n token_type='bare-quoted-string'\n \n def __str__(self):\n return quote_string(''.join(str(x)for x in self))\n \n @property\n def value(self):\n return ''.join(str(x)for x in self)\n \n \nclass Comment(WhiteSpaceTokenList):\n\n token_type='comment'\n \n def __str__(self):\n return ''.join(sum([\n [\"(\"],\n [self.quote(x)for x in self],\n [\")\"],\n ],[]))\n \n def quote(self,value):\n if value.token_type =='comment':\n return str(value)\n return str(value).replace('\\\\','\\\\\\\\').replace(\n '(',r'\\(').replace(\n ')',r'\\)')\n \n @property\n def content(self):\n return ''.join(str(x)for x in self)\n \n @property\n def comments(self):\n return[self.content]\n \nclass AddressList(TokenList):\n\n token_type='address-list'\n \n @property\n def addresses(self):\n return[x for x in self if x.token_type =='address']\n \n @property\n def mailboxes(self):\n return sum((x.mailboxes\n for x in self if x.token_type =='address'),[])\n \n @property\n def all_mailboxes(self):\n return sum((x.all_mailboxes\n for x in self if x.token_type =='address'),[])\n \n \nclass Address(TokenList):\n\n token_type='address'\n \n @property\n def display_name(self):\n if self[0].token_type =='group':\n return self[0].display_name\n \n @property\n def mailboxes(self):\n if self[0].token_type =='mailbox':\n return[self[0]]\n elif self[0].token_type =='invalid-mailbox':\n return[]\n return self[0].mailboxes\n \n @property\n def all_mailboxes(self):\n if self[0].token_type =='mailbox':\n return[self[0]]\n elif self[0].token_type =='invalid-mailbox':\n return[self[0]]\n return self[0].all_mailboxes\n \nclass MailboxList(TokenList):\n\n token_type='mailbox-list'\n \n @property\n def mailboxes(self):\n return[x for x in self if x.token_type =='mailbox']\n \n @property\n def all_mailboxes(self):\n return[x for x in self\n if x.token_type in('mailbox','invalid-mailbox')]\n \n \nclass GroupList(TokenList):\n\n token_type='group-list'\n \n @property\n def mailboxes(self):\n if not self or self[0].token_type !='mailbox-list':\n return[]\n return self[0].mailboxes\n \n @property\n def all_mailboxes(self):\n if not self or self[0].token_type !='mailbox-list':\n return[]\n return self[0].all_mailboxes\n \n \nclass Group(TokenList):\n\n token_type=\"group\"\n \n @property\n def mailboxes(self):\n if self[2].token_type !='group-list':\n return[]\n return self[2].mailboxes\n \n @property\n def all_mailboxes(self):\n if self[2].token_type !='group-list':\n return[]\n return self[2].all_mailboxes\n \n @property\n def display_name(self):\n return self[0].display_name\n \n \nclass NameAddr(TokenList):\n\n token_type='name-addr'\n \n @property\n def display_name(self):\n if len(self)==1:\n return None\n return self[0].display_name\n \n @property\n def local_part(self):\n return self[-1].local_part\n \n @property\n def domain(self):\n return self[-1].domain\n \n @property\n def route(self):\n return self[-1].route\n \n @property\n def addr_spec(self):\n return self[-1].addr_spec\n \n \nclass AngleAddr(TokenList):\n\n token_type='angle-addr'\n \n @property\n def local_part(self):\n for x in self:\n if x.token_type =='addr-spec':\n return x.local_part\n \n @property\n def domain(self):\n for x in self:\n if x.token_type =='addr-spec':\n return x.domain\n \n @property\n def route(self):\n for x in self:\n if x.token_type =='obs-route':\n return x.domains\n \n @property\n def addr_spec(self):\n for x in self:\n if x.token_type =='addr-spec':\n if x.local_part:\n return x.addr_spec\n else:\n return quote_string(x.local_part)+x.addr_spec\n else:\n return '<>'\n \n \nclass ObsRoute(TokenList):\n\n token_type='obs-route'\n \n @property\n def domains(self):\n return[x.domain for x in self if x.token_type =='domain']\n \n \nclass Mailbox(TokenList):\n\n token_type='mailbox'\n \n @property\n def display_name(self):\n if self[0].token_type =='name-addr':\n return self[0].display_name\n \n @property\n def local_part(self):\n return self[0].local_part\n \n @property\n def domain(self):\n return self[0].domain\n \n @property\n def route(self):\n if self[0].token_type =='name-addr':\n return self[0].route\n \n @property\n def addr_spec(self):\n return self[0].addr_spec\n \n \nclass InvalidMailbox(TokenList):\n\n token_type='invalid-mailbox'\n \n @property\n def display_name(self):\n return None\n \n local_part=domain=route=addr_spec=display_name\n \n \nclass Domain(TokenList):\n\n token_type='domain'\n as_ew_allowed=False\n \n @property\n def domain(self):\n return ''.join(super().value.split())\n \n \nclass DotAtom(TokenList):\n token_type='dot-atom'\n \n \nclass DotAtomText(TokenList):\n token_type='dot-atom-text'\n as_ew_allowed=True\n \n \nclass NoFoldLiteral(TokenList):\n token_type='no-fold-literal'\n as_ew_allowed=False\n \n \nclass AddrSpec(TokenList):\n\n token_type='addr-spec'\n as_ew_allowed=False\n \n @property\n def local_part(self):\n return self[0].local_part\n \n @property\n def domain(self):\n if len(self)<3:\n return None\n return self[-1].domain\n \n @property\n def value(self):\n if len(self)<3:\n return self[0].value\n return self[0].value.rstrip()+self[1].value+self[2].value.lstrip()\n \n @property\n def addr_spec(self):\n nameset=set(self.local_part)\n if len(nameset)>len(nameset -DOT_ATOM_ENDS):\n lp=quote_string(self.local_part)\n else:\n lp=self.local_part\n if self.domain is not None:\n return lp+'@'+self.domain\n return lp\n \n \nclass ObsLocalPart(TokenList):\n\n token_type='obs-local-part'\n as_ew_allowed=False\n \n \nclass DisplayName(Phrase):\n\n token_type='display-name'\n ew_combine_allowed=False\n \n @property\n def display_name(self):\n res=TokenList(self)\n if len(res)==0:\n return res.value\n if res[0].token_type =='cfws':\n res.pop(0)\n else:\n if res[0][0].token_type =='cfws':\n res[0]=TokenList(res[0][1:])\n if res[-1].token_type =='cfws':\n res.pop()\n else:\n if res[-1][-1].token_type =='cfws':\n res[-1]=TokenList(res[-1][:-1])\n return res.value\n \n @property\n def value(self):\n quote=False\n if self.defects:\n quote=True\n else:\n for x in self:\n if x.token_type =='quoted-string':\n quote=True\n if len(self)!=0 and quote:\n pre=post=''\n if self[0].token_type =='cfws'or self[0][0].token_type =='cfws':\n pre=' '\n if self[-1].token_type =='cfws'or self[-1][-1].token_type =='cfws':\n post=' '\n return pre+quote_string(self.display_name)+post\n else:\n return super().value\n \n \nclass LocalPart(TokenList):\n\n token_type='local-part'\n as_ew_allowed=False\n \n @property\n def value(self):\n if self[0].token_type ==\"quoted-string\":\n return self[0].quoted_value\n else:\n return self[0].value\n \n @property\n def local_part(self):\n \n res=[DOT]\n last=DOT\n last_is_tl=False\n for tok in self[0]+[DOT]:\n if tok.token_type =='cfws':\n continue\n if(last_is_tl and tok.token_type =='dot'and\n last[-1].token_type =='cfws'):\n res[-1]=TokenList(last[:-1])\n is_tl=isinstance(tok,TokenList)\n if(is_tl and last.token_type =='dot'and\n tok[0].token_type =='cfws'):\n res.append(TokenList(tok[1:]))\n else:\n res.append(tok)\n last=res[-1]\n last_is_tl=is_tl\n res=TokenList(res[1:-1])\n return res.value\n \n \nclass DomainLiteral(TokenList):\n\n token_type='domain-literal'\n as_ew_allowed=False\n \n @property\n def domain(self):\n return ''.join(super().value.split())\n \n @property\n def ip(self):\n for x in self:\n if x.token_type =='ptext':\n return x.value\n \n \nclass MIMEVersion(TokenList):\n\n token_type='mime-version'\n major=None\n minor=None\n \n \nclass Parameter(TokenList):\n\n token_type='parameter'\n sectioned=False\n extended=False\n charset='us-ascii'\n \n @property\n def section_number(self):\n \n \n return self[1].number if self.sectioned else 0\n \n @property\n def param_value(self):\n \n for token in self:\n if token.token_type =='value':\n return token.stripped_value\n if token.token_type =='quoted-string':\n for token in token:\n if token.token_type =='bare-quoted-string':\n for token in token:\n if token.token_type =='value':\n return token.stripped_value\n return ''\n \n \nclass InvalidParameter(Parameter):\n\n token_type='invalid-parameter'\n \n \nclass Attribute(TokenList):\n\n token_type='attribute'\n \n @property\n def stripped_value(self):\n for token in self:\n if token.token_type.endswith('attrtext'):\n return token.value\n \nclass Section(TokenList):\n\n token_type='section'\n number=None\n \n \nclass Value(TokenList):\n\n token_type='value'\n \n @property\n def stripped_value(self):\n token=self[0]\n if token.token_type =='cfws':\n token=self[1]\n if token.token_type.endswith(\n ('quoted-string','attribute','extended-attribute')):\n return token.stripped_value\n return self.value\n \n \nclass MimeParameters(TokenList):\n\n token_type='mime-parameters'\n syntactic_break=False\n \n @property\n def params(self):\n \n \n \n \n \n params={}\n for token in self:\n if not token.token_type.endswith('parameter'):\n continue\n if token[0].token_type !='attribute':\n continue\n name=token[0].value.strip()\n if name not in params:\n params[name]=[]\n params[name].append((token.section_number,token))\n for name,parts in params.items():\n parts=sorted(parts,key=itemgetter(0))\n first_param=parts[0][1]\n charset=first_param.charset\n \n \n \n if not first_param.extended and len(parts)>1:\n if parts[1][0]==0:\n parts[1][1].defects.append(errors.InvalidHeaderDefect(\n 'duplicate parameter name; duplicate(s) ignored'))\n parts=parts[:1]\n \n \n value_parts=[]\n i=0\n for section_number,param in parts:\n if section_number !=i:\n \n \n \n if not param.extended:\n param.defects.append(errors.InvalidHeaderDefect(\n 'duplicate parameter name; duplicate ignored'))\n continue\n else:\n param.defects.append(errors.InvalidHeaderDefect(\n \"inconsistent RFC2231 parameter numbering\"))\n i +=1\n value=param.param_value\n if param.extended:\n try:\n value=urllib.parse.unquote_to_bytes(value)\n except UnicodeEncodeError:\n \n \n \n value=urllib.parse.unquote(value,encoding='latin-1')\n else:\n try:\n value=value.decode(charset,'surrogateescape')\n except(LookupError,UnicodeEncodeError):\n \n \n \n \n value=value.decode('us-ascii','surrogateescape')\n if utils._has_surrogates(value):\n param.defects.append(errors.UndecodableBytesDefect())\n value_parts.append(value)\n value=''.join(value_parts)\n yield name,value\n \n def __str__(self):\n params=[]\n for name,value in self.params:\n if value:\n params.append('{}={}'.format(name,quote_string(value)))\n else:\n params.append(name)\n params='; '.join(params)\n return ' '+params if params else ''\n \n \nclass ParameterizedHeaderValue(TokenList):\n\n\n\n syntactic_break=False\n \n @property\n def params(self):\n for token in reversed(self):\n if token.token_type =='mime-parameters':\n return token.params\n return{}\n \n \nclass ContentType(ParameterizedHeaderValue):\n token_type='content-type'\n as_ew_allowed=False\n maintype='text'\n subtype='plain'\n \n \nclass ContentDisposition(ParameterizedHeaderValue):\n token_type='content-disposition'\n as_ew_allowed=False\n content_disposition=None\n \n \nclass ContentTransferEncoding(TokenList):\n token_type='content-transfer-encoding'\n as_ew_allowed=False\n cte='7bit'\n \n \nclass HeaderLabel(TokenList):\n token_type='header-label'\n as_ew_allowed=False\n \n \nclass MsgID(TokenList):\n token_type='msg-id'\n as_ew_allowed=False\n \n def fold(self,policy):\n \n return str(self)+policy.linesep\n \n \nclass MessageID(MsgID):\n token_type='message-id'\n \n \nclass InvalidMessageID(MessageID):\n token_type='invalid-message-id'\n \n \nclass Header(TokenList):\n token_type='header'\n \n \n \n \n \n \nclass Terminal(str):\n\n as_ew_allowed=True\n ew_combine_allowed=True\n syntactic_break=True\n \n def __new__(cls,value,token_type):\n self=super().__new__(cls,value)\n self.token_type=token_type\n self.defects=[]\n return self\n \n def __repr__(self):\n return \"{}({})\".format(self.__class__.__name__,super().__repr__())\n \n def pprint(self):\n print(self.__class__.__name__+'/'+self.token_type)\n \n @property\n def all_defects(self):\n return list(self.defects)\n \n def _pp(self,indent=''):\n return[\"{}{}/{}({}){}\".format(\n indent,\n self.__class__.__name__,\n self.token_type,\n super().__repr__(),\n ''if not self.defects else ' {}'.format(self.defects),\n )]\n \n def pop_trailing_ws(self):\n \n return None\n \n @property\n def comments(self):\n return[]\n \n def __getnewargs__(self):\n return(str(self),self.token_type)\n \n \nclass WhiteSpaceTerminal(Terminal):\n\n @property\n def value(self):\n return ' '\n \n def startswith_fws(self):\n return True\n \n \nclass ValueTerminal(Terminal):\n\n @property\n def value(self):\n return self\n \n def startswith_fws(self):\n return False\n \n \nclass EWWhiteSpaceTerminal(WhiteSpaceTerminal):\n\n @property\n def value(self):\n return ''\n \n def __str__(self):\n return ''\n \n \nclass _InvalidEwError(errors.HeaderParseError):\n ''\n \n \n \n \n \nDOT=ValueTerminal('.','dot')\nListSeparator=ValueTerminal(',','list-separator')\nRouteComponentMarker=ValueTerminal('@','route-component-marker')\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_wsp_splitter=re.compile(r'([{}]+)'.format(''.join(WSP))).split\n_non_atom_end_matcher=re.compile(r\"[^{}]+\".format(\nre.escape(''.join(ATOM_ENDS)))).match\n_non_printable_finder=re.compile(r\"[\\x00-\\x20\\x7F]\").findall\n_non_token_end_matcher=re.compile(r\"[^{}]+\".format(\nre.escape(''.join(TOKEN_ENDS)))).match\n_non_attribute_end_matcher=re.compile(r\"[^{}]+\".format(\nre.escape(''.join(ATTRIBUTE_ENDS)))).match\n_non_extended_attribute_end_matcher=re.compile(r\"[^{}]+\".format(\nre.escape(''.join(EXTENDED_ATTRIBUTE_ENDS)))).match\n\ndef _validate_xtext(xtext):\n ''\n \n non_printables=_non_printable_finder(xtext)\n if non_printables:\n xtext.defects.append(errors.NonPrintableDefect(non_printables))\n if utils._has_surrogates(xtext):\n xtext.defects.append(errors.UndecodableBytesDefect(\n \"Non-ASCII characters found in header token\"))\n \ndef _get_ptext_to_endchars(value,endchars):\n ''\n\n\n\n\n\n\n \n fragment,*remainder=_wsp_splitter(value,1)\n vchars=[]\n escape=False\n had_qp=False\n for pos in range(len(fragment)):\n if fragment[pos]=='\\\\':\n if escape:\n escape=False\n had_qp=True\n else:\n escape=True\n continue\n if escape:\n escape=False\n elif fragment[pos]in endchars:\n break\n vchars.append(fragment[pos])\n else:\n pos=pos+1\n return ''.join(vchars),''.join([fragment[pos:]]+remainder),had_qp\n \ndef get_fws(value):\n ''\n\n\n\n\n\n \n newvalue=value.lstrip()\n fws=WhiteSpaceTerminal(value[:len(value)-len(newvalue)],'fws')\n return fws,newvalue\n \ndef get_encoded_word(value):\n ''\n\n \n ew=EncodedWord()\n if not value.startswith('=?'):\n raise errors.HeaderParseError(\n \"expected encoded word but found {}\".format(value))\n tok,*remainder=value[2:].split('?=',1)\n if tok ==value[2:]:\n raise errors.HeaderParseError(\n \"expected encoded word but found {}\".format(value))\n remstr=''.join(remainder)\n if(len(remstr)>1 and\n remstr[0]in hexdigits and\n remstr[1]in hexdigits and\n tok.count('?')<2):\n \n rest,*remainder=remstr.split('?=',1)\n tok=tok+'?='+rest\n if len(tok.split())>1:\n ew.defects.append(errors.InvalidHeaderDefect(\n \"whitespace inside encoded word\"))\n ew.cte=value\n value=''.join(remainder)\n try:\n text,charset,lang,defects=_ew.decode('=?'+tok+'?=')\n except(ValueError,KeyError):\n raise _InvalidEwError(\n \"encoded word format invalid: '{}'\".format(ew.cte))\n ew.charset=charset\n ew.lang=lang\n ew.defects.extend(defects)\n while text:\n if text[0]in WSP:\n token,text=get_fws(text)\n ew.append(token)\n continue\n chars,*remainder=_wsp_splitter(text,1)\n vtext=ValueTerminal(chars,'vtext')\n _validate_xtext(vtext)\n ew.append(vtext)\n text=''.join(remainder)\n \n if value and value[0]not in WSP:\n ew.defects.append(errors.InvalidHeaderDefect(\n \"missing trailing whitespace after encoded-word\"))\n return ew,value\n \ndef get_unstructured(value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n unstructured=UnstructuredTokenList()\n while value:\n if value[0]in WSP:\n token,value=get_fws(value)\n unstructured.append(token)\n continue\n valid_ew=True\n if value.startswith('=?'):\n try:\n token,value=get_encoded_word(value)\n except _InvalidEwError:\n valid_ew=False\n except errors.HeaderParseError:\n \n \n pass\n else:\n have_ws=True\n if len(unstructured)>0:\n if unstructured[-1].token_type !='fws':\n unstructured.defects.append(errors.InvalidHeaderDefect(\n \"missing whitespace before encoded word\"))\n have_ws=False\n if have_ws and len(unstructured)>1:\n if unstructured[-2].token_type =='encoded-word':\n unstructured[-1]=EWWhiteSpaceTerminal(\n unstructured[-1],'fws')\n unstructured.append(token)\n continue\n tok,*remainder=_wsp_splitter(value,1)\n \n \n \n \n \n \n if valid_ew and rfc2047_matcher.search(tok):\n tok,*remainder=value.partition('=?')\n vtext=ValueTerminal(tok,'vtext')\n _validate_xtext(vtext)\n unstructured.append(vtext)\n value=''.join(remainder)\n return unstructured\n \ndef get_qp_ctext(value):\n ''\n\n\n\n\n\n\n\n\n\n \n ptext,value,_=_get_ptext_to_endchars(value,'()')\n ptext=WhiteSpaceTerminal(ptext,'ptext')\n _validate_xtext(ptext)\n return ptext,value\n \ndef get_qcontent(value):\n ''\n\n\n\n\n\n\n\n \n ptext,value,_=_get_ptext_to_endchars(value,'\"')\n ptext=ValueTerminal(ptext,'ptext')\n _validate_xtext(ptext)\n return ptext,value\n \ndef get_atext(value):\n ''\n\n\n\n \n m=_non_atom_end_matcher(value)\n if not m:\n raise errors.HeaderParseError(\n \"expected atext but found '{}'\".format(value))\n atext=m.group()\n value=value[len(atext):]\n atext=ValueTerminal(atext,'atext')\n _validate_xtext(atext)\n return atext,value\n \ndef get_bare_quoted_string(value):\n ''\n\n\n\n\n \n if value[0]!='\"':\n raise errors.HeaderParseError(\n \"expected '\\\"' but found '{}'\".format(value))\n bare_quoted_string=BareQuotedString()\n value=value[1:]\n if value and value[0]=='\"':\n token,value=get_qcontent(value)\n bare_quoted_string.append(token)\n while value and value[0]!='\"':\n if value[0]in WSP:\n token,value=get_fws(value)\n elif value[:2]=='=?':\n valid_ew=False\n try:\n token,value=get_encoded_word(value)\n bare_quoted_string.defects.append(errors.InvalidHeaderDefect(\n \"encoded word inside quoted string\"))\n valid_ew=True\n except errors.HeaderParseError:\n token,value=get_qcontent(value)\n \n \n if valid_ew and len(bare_quoted_string)>1:\n if(bare_quoted_string[-1].token_type =='fws'and\n bare_quoted_string[-2].token_type =='encoded-word'):\n bare_quoted_string[-1]=EWWhiteSpaceTerminal(\n bare_quoted_string[-1],'fws')\n else:\n token,value=get_qcontent(value)\n bare_quoted_string.append(token)\n if not value:\n bare_quoted_string.defects.append(errors.InvalidHeaderDefect(\n \"end of header inside quoted string\"))\n return bare_quoted_string,value\n return bare_quoted_string,value[1:]\n \ndef get_comment(value):\n ''\n\n\n\n \n if value and value[0]!='(':\n raise errors.HeaderParseError(\n \"expected '(' but found '{}'\".format(value))\n comment=Comment()\n value=value[1:]\n while value and value[0]!=\")\":\n if value[0]in WSP:\n token,value=get_fws(value)\n elif value[0]=='(':\n token,value=get_comment(value)\n else:\n token,value=get_qp_ctext(value)\n comment.append(token)\n if not value:\n comment.defects.append(errors.InvalidHeaderDefect(\n \"end of header inside comment\"))\n return comment,value\n return comment,value[1:]\n \ndef get_cfws(value):\n ''\n\n \n cfws=CFWSList()\n while value and value[0]in CFWS_LEADER:\n if value[0]in WSP:\n token,value=get_fws(value)\n else:\n token,value=get_comment(value)\n cfws.append(token)\n return cfws,value\n \ndef get_quoted_string(value):\n ''\n\n\n\n\n \n quoted_string=QuotedString()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n quoted_string.append(token)\n token,value=get_bare_quoted_string(value)\n quoted_string.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n quoted_string.append(token)\n return quoted_string,value\n \ndef get_atom(value):\n ''\n\n\n \n atom=Atom()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n atom.append(token)\n if value and value[0]in ATOM_ENDS:\n raise errors.HeaderParseError(\n \"expected atom but found '{}'\".format(value))\n if value.startswith('=?'):\n try:\n token,value=get_encoded_word(value)\n except errors.HeaderParseError:\n \n \n token,value=get_atext(value)\n else:\n token,value=get_atext(value)\n atom.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n atom.append(token)\n return atom,value\n \ndef get_dot_atom_text(value):\n ''\n\n \n dot_atom_text=DotAtomText()\n if not value or value[0]in ATOM_ENDS:\n raise errors.HeaderParseError(\"expected atom at a start of \"\n \"dot-atom-text but found '{}'\".format(value))\n while value and value[0]not in ATOM_ENDS:\n token,value=get_atext(value)\n dot_atom_text.append(token)\n if value and value[0]=='.':\n dot_atom_text.append(DOT)\n value=value[1:]\n if dot_atom_text[-1]is DOT:\n raise errors.HeaderParseError(\"expected atom at end of dot-atom-text \"\n \"but found '{}'\".format('.'+value))\n return dot_atom_text,value\n \ndef get_dot_atom(value):\n ''\n\n\n\n \n dot_atom=DotAtom()\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n dot_atom.append(token)\n if value.startswith('=?'):\n try:\n token,value=get_encoded_word(value)\n except errors.HeaderParseError:\n \n \n token,value=get_dot_atom_text(value)\n else:\n token,value=get_dot_atom_text(value)\n dot_atom.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n dot_atom.append(token)\n return dot_atom,value\n \ndef get_word(value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n else:\n leader=None\n if not value:\n raise errors.HeaderParseError(\n \"Expected 'atom' or 'quoted-string' but found nothing.\")\n if value[0]=='\"':\n token,value=get_quoted_string(value)\n elif value[0]in SPECIALS:\n raise errors.HeaderParseError(\"Expected 'atom' or 'quoted-string' \"\n \"but found '{}'\".format(value))\n else:\n token,value=get_atom(value)\n if leader is not None:\n token[:0]=[leader]\n return token,value\n \ndef get_phrase(value):\n ''\n\n\n\n\n\n\n\n\n\n \n phrase=Phrase()\n try:\n token,value=get_word(value)\n phrase.append(token)\n except errors.HeaderParseError:\n phrase.defects.append(errors.InvalidHeaderDefect(\n \"phrase does not start with word\"))\n while value and value[0]not in PHRASE_ENDS:\n if value[0]=='.':\n phrase.append(DOT)\n phrase.defects.append(errors.ObsoleteHeaderDefect(\n \"period in 'phrase'\"))\n value=value[1:]\n else:\n try:\n token,value=get_word(value)\n except errors.HeaderParseError:\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n phrase.defects.append(errors.ObsoleteHeaderDefect(\n \"comment found without atom\"))\n else:\n raise\n phrase.append(token)\n return phrase,value\n \ndef get_local_part(value):\n ''\n\n \n local_part=LocalPart()\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n raise errors.HeaderParseError(\n \"expected local-part but found '{}'\".format(value))\n try:\n token,value=get_dot_atom(value)\n except errors.HeaderParseError:\n try:\n token,value=get_word(value)\n except errors.HeaderParseError:\n if value[0]!='\\\\'and value[0]in PHRASE_ENDS:\n raise\n token=TokenList()\n if leader is not None:\n token[:0]=[leader]\n local_part.append(token)\n if value and(value[0]=='\\\\'or value[0]not in PHRASE_ENDS):\n obs_local_part,value=get_obs_local_part(str(local_part)+value)\n if obs_local_part.token_type =='invalid-obs-local-part':\n local_part.defects.append(errors.InvalidHeaderDefect(\n \"local-part is not dot-atom, quoted-string, or obs-local-part\"))\n else:\n local_part.defects.append(errors.ObsoleteHeaderDefect(\n \"local-part is not a dot-atom (contains CFWS)\"))\n local_part[0]=obs_local_part\n try:\n local_part.value.encode('ascii')\n except UnicodeEncodeError:\n local_part.defects.append(errors.NonASCIILocalPartDefect(\n \"local-part contains non-ASCII characters)\"))\n return local_part,value\n \ndef get_obs_local_part(value):\n ''\n \n obs_local_part=ObsLocalPart()\n last_non_ws_was_dot=False\n while value and(value[0]=='\\\\'or value[0]not in PHRASE_ENDS):\n if value[0]=='.':\n if last_non_ws_was_dot:\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"invalid repeated '.'\"))\n obs_local_part.append(DOT)\n last_non_ws_was_dot=True\n value=value[1:]\n continue\n elif value[0]=='\\\\':\n obs_local_part.append(ValueTerminal(value[0],\n 'misplaced-special'))\n value=value[1:]\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"'\\\\' character outside of quoted-string/ccontent\"))\n last_non_ws_was_dot=False\n continue\n if obs_local_part and obs_local_part[-1].token_type !='dot':\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"missing '.' between words\"))\n try:\n token,value=get_word(value)\n last_non_ws_was_dot=False\n except errors.HeaderParseError:\n if value[0]not in CFWS_LEADER:\n raise\n token,value=get_cfws(value)\n obs_local_part.append(token)\n if(obs_local_part[0].token_type =='dot'or\n obs_local_part[0].token_type =='cfws'and\n obs_local_part[1].token_type =='dot'):\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"Invalid leading '.' in local part\"))\n if(obs_local_part[-1].token_type =='dot'or\n obs_local_part[-1].token_type =='cfws'and\n obs_local_part[-2].token_type =='dot'):\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"Invalid trailing '.' in local part\"))\n if obs_local_part.defects:\n obs_local_part.token_type='invalid-obs-local-part'\n return obs_local_part,value\n \ndef get_dtext(value):\n ''\n\n\n\n\n\n\n\n\n\n \n ptext,value,had_qp=_get_ptext_to_endchars(value,'[]')\n ptext=ValueTerminal(ptext,'ptext')\n if had_qp:\n ptext.defects.append(errors.ObsoleteHeaderDefect(\n \"quoted printable found in domain-literal\"))\n _validate_xtext(ptext)\n return ptext,value\n \ndef _check_for_early_dl_end(value,domain_literal):\n if value:\n return False\n domain_literal.append(errors.InvalidHeaderDefect(\n \"end of input inside domain-literal\"))\n domain_literal.append(ValueTerminal(']','domain-literal-end'))\n return True\n \ndef get_domain_literal(value):\n ''\n\n \n domain_literal=DomainLiteral()\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n domain_literal.append(token)\n if not value:\n raise errors.HeaderParseError(\"expected domain-literal\")\n if value[0]!='[':\n raise errors.HeaderParseError(\"expected '[' at start of domain-literal \"\n \"but found '{}'\".format(value))\n value=value[1:]\n if _check_for_early_dl_end(value,domain_literal):\n return domain_literal,value\n domain_literal.append(ValueTerminal('[','domain-literal-start'))\n if value[0]in WSP:\n token,value=get_fws(value)\n domain_literal.append(token)\n token,value=get_dtext(value)\n domain_literal.append(token)\n if _check_for_early_dl_end(value,domain_literal):\n return domain_literal,value\n if value[0]in WSP:\n token,value=get_fws(value)\n domain_literal.append(token)\n if _check_for_early_dl_end(value,domain_literal):\n return domain_literal,value\n if value[0]!=']':\n raise errors.HeaderParseError(\"expected ']' at end of domain-literal \"\n \"but found '{}'\".format(value))\n domain_literal.append(ValueTerminal(']','domain-literal-end'))\n value=value[1:]\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n domain_literal.append(token)\n return domain_literal,value\n \ndef get_domain(value):\n ''\n\n\n \n domain=Domain()\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n raise errors.HeaderParseError(\n \"expected domain but found '{}'\".format(value))\n if value[0]=='[':\n token,value=get_domain_literal(value)\n if leader is not None:\n token[:0]=[leader]\n domain.append(token)\n return domain,value\n try:\n token,value=get_dot_atom(value)\n except errors.HeaderParseError:\n token,value=get_atom(value)\n if value and value[0]=='@':\n raise errors.HeaderParseError('Invalid Domain')\n if leader is not None:\n token[:0]=[leader]\n domain.append(token)\n if value and value[0]=='.':\n domain.defects.append(errors.ObsoleteHeaderDefect(\n \"domain is not a dot-atom (contains CFWS)\"))\n if domain[0].token_type =='dot-atom':\n domain[:]=domain[0]\n while value and value[0]=='.':\n domain.append(DOT)\n token,value=get_atom(value[1:])\n domain.append(token)\n return domain,value\n \ndef get_addr_spec(value):\n ''\n\n \n addr_spec=AddrSpec()\n token,value=get_local_part(value)\n addr_spec.append(token)\n if not value or value[0]!='@':\n addr_spec.defects.append(errors.InvalidHeaderDefect(\n \"addr-spec local part with no domain\"))\n return addr_spec,value\n addr_spec.append(ValueTerminal('@','address-at-symbol'))\n token,value=get_domain(value[1:])\n addr_spec.append(token)\n return addr_spec,value\n \ndef get_obs_route(value):\n ''\n\n\n\n\n \n obs_route=ObsRoute()\n while value and(value[0]==','or value[0]in CFWS_LEADER):\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n obs_route.append(token)\n elif value[0]==',':\n obs_route.append(ListSeparator)\n value=value[1:]\n if not value or value[0]!='@':\n raise errors.HeaderParseError(\n \"expected obs-route domain but found '{}'\".format(value))\n obs_route.append(RouteComponentMarker)\n token,value=get_domain(value[1:])\n obs_route.append(token)\n while value and value[0]==',':\n obs_route.append(ListSeparator)\n value=value[1:]\n if not value:\n break\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n obs_route.append(token)\n if value[0]=='@':\n obs_route.append(RouteComponentMarker)\n token,value=get_domain(value[1:])\n obs_route.append(token)\n if not value:\n raise errors.HeaderParseError(\"end of header while parsing obs-route\")\n if value[0]!=':':\n raise errors.HeaderParseError(\"expected ':' marking end of \"\n \"obs-route but found '{}'\".format(value))\n obs_route.append(ValueTerminal(':','end-of-obs-route-marker'))\n return obs_route,value[1:]\n \ndef get_angle_addr(value):\n ''\n\n\n \n angle_addr=AngleAddr()\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n angle_addr.append(token)\n if not value or value[0]!='<':\n raise errors.HeaderParseError(\n \"expected angle-addr but found '{}'\".format(value))\n angle_addr.append(ValueTerminal('<','angle-addr-start'))\n value=value[1:]\n \n \n if value[0]=='>':\n angle_addr.append(ValueTerminal('>','angle-addr-end'))\n angle_addr.defects.append(errors.InvalidHeaderDefect(\n \"null addr-spec in angle-addr\"))\n value=value[1:]\n return angle_addr,value\n try:\n token,value=get_addr_spec(value)\n except errors.HeaderParseError:\n try:\n token,value=get_obs_route(value)\n angle_addr.defects.append(errors.ObsoleteHeaderDefect(\n \"obsolete route specification in angle-addr\"))\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected addr-spec or obs-route but found '{}'\".format(value))\n angle_addr.append(token)\n token,value=get_addr_spec(value)\n angle_addr.append(token)\n if value and value[0]=='>':\n value=value[1:]\n else:\n angle_addr.defects.append(errors.InvalidHeaderDefect(\n \"missing trailing '>' on angle-addr\"))\n angle_addr.append(ValueTerminal('>','angle-addr-end'))\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n angle_addr.append(token)\n return angle_addr,value\n \ndef get_display_name(value):\n ''\n\n\n\n\n\n \n display_name=DisplayName()\n token,value=get_phrase(value)\n display_name.extend(token[:])\n display_name.defects=token.defects[:]\n return display_name,value\n \n \ndef get_name_addr(value):\n ''\n\n \n name_addr=NameAddr()\n \n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n raise errors.HeaderParseError(\n \"expected name-addr but found '{}'\".format(leader))\n if value[0]!='<':\n if value[0]in PHRASE_ENDS:\n raise errors.HeaderParseError(\n \"expected name-addr but found '{}'\".format(value))\n token,value=get_display_name(value)\n if not value:\n raise errors.HeaderParseError(\n \"expected name-addr but found '{}'\".format(token))\n if leader is not None:\n token[0][:0]=[leader]\n leader=None\n name_addr.append(token)\n token,value=get_angle_addr(value)\n if leader is not None:\n token[:0]=[leader]\n name_addr.append(token)\n return name_addr,value\n \ndef get_mailbox(value):\n ''\n\n \n \n \n mailbox=Mailbox()\n try:\n token,value=get_name_addr(value)\n except errors.HeaderParseError:\n try:\n token,value=get_addr_spec(value)\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected mailbox but found '{}'\".format(value))\n if any(isinstance(x,errors.InvalidHeaderDefect)\n for x in token.all_defects):\n mailbox.token_type='invalid-mailbox'\n mailbox.append(token)\n return mailbox,value\n \ndef get_invalid_mailbox(value,endchars):\n ''\n\n\n\n\n \n invalid_mailbox=InvalidMailbox()\n while value and value[0]not in endchars:\n if value[0]in PHRASE_ENDS:\n invalid_mailbox.append(ValueTerminal(value[0],\n 'misplaced-special'))\n value=value[1:]\n else:\n token,value=get_phrase(value)\n invalid_mailbox.append(token)\n return invalid_mailbox,value\n \ndef get_mailbox_list(value):\n ''\n\n\n\n\n\n\n\n\n\n \n mailbox_list=MailboxList()\n while value and value[0]!=';':\n try:\n token,value=get_mailbox(value)\n mailbox_list.append(token)\n except errors.HeaderParseError:\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value or value[0]in ',;':\n mailbox_list.append(leader)\n mailbox_list.defects.append(errors.ObsoleteHeaderDefect(\n \"empty element in mailbox-list\"))\n else:\n token,value=get_invalid_mailbox(value,',;')\n if leader is not None:\n token[:0]=[leader]\n mailbox_list.append(token)\n mailbox_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid mailbox in mailbox-list\"))\n elif value[0]==',':\n mailbox_list.defects.append(errors.ObsoleteHeaderDefect(\n \"empty element in mailbox-list\"))\n else:\n token,value=get_invalid_mailbox(value,',;')\n if leader is not None:\n token[:0]=[leader]\n mailbox_list.append(token)\n mailbox_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid mailbox in mailbox-list\"))\n if value and value[0]not in ',;':\n \n \n mailbox=mailbox_list[-1]\n mailbox.token_type='invalid-mailbox'\n token,value=get_invalid_mailbox(value,',;')\n mailbox.extend(token)\n mailbox_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid mailbox in mailbox-list\"))\n if value and value[0]==',':\n mailbox_list.append(ListSeparator)\n value=value[1:]\n return mailbox_list,value\n \n \ndef get_group_list(value):\n ''\n\n\n \n group_list=GroupList()\n if not value:\n group_list.defects.append(errors.InvalidHeaderDefect(\n \"end of header before group-list\"))\n return group_list,value\n leader=None\n if value and value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n \n \n \n group_list.defects.append(errors.InvalidHeaderDefect(\n \"end of header in group-list\"))\n group_list.append(leader)\n return group_list,value\n if value[0]==';':\n group_list.append(leader)\n return group_list,value\n token,value=get_mailbox_list(value)\n if len(token.all_mailboxes)==0:\n if leader is not None:\n group_list.append(leader)\n group_list.extend(token)\n group_list.defects.append(errors.ObsoleteHeaderDefect(\n \"group-list with empty entries\"))\n return group_list,value\n if leader is not None:\n token[:0]=[leader]\n group_list.append(token)\n return group_list,value\n \ndef get_group(value):\n ''\n\n \n group=Group()\n token,value=get_display_name(value)\n if not value or value[0]!=':':\n raise errors.HeaderParseError(\"expected ':' at end of group \"\n \"display name but found '{}'\".format(value))\n group.append(token)\n group.append(ValueTerminal(':','group-display-name-terminator'))\n value=value[1:]\n if value and value[0]==';':\n group.append(ValueTerminal(';','group-terminator'))\n return group,value[1:]\n token,value=get_group_list(value)\n group.append(token)\n if not value:\n group.defects.append(errors.InvalidHeaderDefect(\n \"end of header in group\"))\n elif value[0]!=';':\n raise errors.HeaderParseError(\n \"expected ';' at end of group but found {}\".format(value))\n group.append(ValueTerminal(';','group-terminator'))\n value=value[1:]\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n group.append(token)\n return group,value\n \ndef get_address(value):\n ''\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n address=Address()\n try:\n token,value=get_group(value)\n except errors.HeaderParseError:\n try:\n token,value=get_mailbox(value)\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected address but found '{}'\".format(value))\n address.append(token)\n return address,value\n \ndef get_address_list(value):\n ''\n\n\n\n\n\n\n\n \n address_list=AddressList()\n while value:\n try:\n token,value=get_address(value)\n address_list.append(token)\n except errors.HeaderParseError:\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value or value[0]==',':\n address_list.append(leader)\n address_list.defects.append(errors.ObsoleteHeaderDefect(\n \"address-list entry with no content\"))\n else:\n token,value=get_invalid_mailbox(value,',')\n if leader is not None:\n token[:0]=[leader]\n address_list.append(Address([token]))\n address_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid address in address-list\"))\n elif value[0]==',':\n address_list.defects.append(errors.ObsoleteHeaderDefect(\n \"empty element in address-list\"))\n else:\n token,value=get_invalid_mailbox(value,',')\n if leader is not None:\n token[:0]=[leader]\n address_list.append(Address([token]))\n address_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid address in address-list\"))\n if value and value[0]!=',':\n \n \n mailbox=address_list[-1][0]\n mailbox.token_type='invalid-mailbox'\n token,value=get_invalid_mailbox(value,',')\n mailbox.extend(token)\n address_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid address in address-list\"))\n if value:\n address_list.append(ValueTerminal(',','list-separator'))\n value=value[1:]\n return address_list,value\n \n \ndef get_no_fold_literal(value):\n ''\n \n no_fold_literal=NoFoldLiteral()\n if not value:\n raise errors.HeaderParseError(\n \"expected no-fold-literal but found '{}'\".format(value))\n if value[0]!='[':\n raise errors.HeaderParseError(\n \"expected '[' at the start of no-fold-literal \"\n \"but found '{}'\".format(value))\n no_fold_literal.append(ValueTerminal('[','no-fold-literal-start'))\n value=value[1:]\n token,value=get_dtext(value)\n no_fold_literal.append(token)\n if not value or value[0]!=']':\n raise errors.HeaderParseError(\n \"expected ']' at the end of no-fold-literal \"\n \"but found '{}'\".format(value))\n no_fold_literal.append(ValueTerminal(']','no-fold-literal-end'))\n return no_fold_literal,value[1:]\n \ndef get_msg_id(value):\n ''\n\n\n\n \n msg_id=MsgID()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n msg_id.append(token)\n if not value or value[0]!='<':\n raise errors.HeaderParseError(\n \"expected msg-id but found '{}'\".format(value))\n msg_id.append(ValueTerminal('<','msg-id-start'))\n value=value[1:]\n \n try:\n token,value=get_dot_atom_text(value)\n except errors.HeaderParseError:\n try:\n \n token,value=get_obs_local_part(value)\n msg_id.defects.append(errors.ObsoleteHeaderDefect(\n \"obsolete id-left in msg-id\"))\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected dot-atom-text or obs-id-left\"\n \" but found '{}'\".format(value))\n msg_id.append(token)\n if not value or value[0]!='@':\n msg_id.defects.append(errors.InvalidHeaderDefect(\n \"msg-id with no id-right\"))\n \n \n \n if value and value[0]=='>':\n msg_id.append(ValueTerminal('>','msg-id-end'))\n value=value[1:]\n return msg_id,value\n msg_id.append(ValueTerminal('@','address-at-symbol'))\n value=value[1:]\n \n try:\n token,value=get_dot_atom_text(value)\n except errors.HeaderParseError:\n try:\n token,value=get_no_fold_literal(value)\n except errors.HeaderParseError:\n try:\n token,value=get_domain(value)\n msg_id.defects.append(errors.ObsoleteHeaderDefect(\n \"obsolete id-right in msg-id\"))\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected dot-atom-text, no-fold-literal or obs-id-right\"\n \" but found '{}'\".format(value))\n msg_id.append(token)\n if value and value[0]=='>':\n value=value[1:]\n else:\n msg_id.defects.append(errors.InvalidHeaderDefect(\n \"missing trailing '>' on msg-id\"))\n msg_id.append(ValueTerminal('>','msg-id-end'))\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n msg_id.append(token)\n return msg_id,value\n \n \ndef parse_message_id(value):\n ''\n \n message_id=MessageID()\n try:\n token,value=get_msg_id(value)\n message_id.append(token)\n except errors.HeaderParseError as ex:\n token=get_unstructured(value)\n message_id=InvalidMessageID(token)\n message_id.defects.append(\n errors.InvalidHeaderDefect(\"Invalid msg-id: {!r}\".format(ex)))\n else:\n \n if value:\n message_id.defects.append(errors.InvalidHeaderDefect(\n \"Unexpected {!r}\".format(value)))\n \n return message_id\n \n \n \n \n \n \n \n \n \ndef parse_mime_version(value):\n ''\n\n \n \n \n mime_version=MIMEVersion()\n if not value:\n mime_version.defects.append(errors.HeaderMissingRequiredValue(\n \"Missing MIME version number (eg: 1.0)\"))\n return mime_version\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mime_version.append(token)\n if not value:\n mime_version.defects.append(errors.HeaderMissingRequiredValue(\n \"Expected MIME version number but found only CFWS\"))\n digits=''\n while value and value[0]!='.'and value[0]not in CFWS_LEADER:\n digits +=value[0]\n value=value[1:]\n if not digits.isdigit():\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Expected MIME major version number but found {!r}\".format(digits)))\n mime_version.append(ValueTerminal(digits,'xtext'))\n else:\n mime_version.major=int(digits)\n mime_version.append(ValueTerminal(digits,'digits'))\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mime_version.append(token)\n if not value or value[0]!='.':\n if mime_version.major is not None:\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Incomplete MIME version; found only major number\"))\n if value:\n mime_version.append(ValueTerminal(value,'xtext'))\n return mime_version\n mime_version.append(ValueTerminal('.','version-separator'))\n value=value[1:]\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mime_version.append(token)\n if not value:\n if mime_version.major is not None:\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Incomplete MIME version; found only major number\"))\n return mime_version\n digits=''\n while value and value[0]not in CFWS_LEADER:\n digits +=value[0]\n value=value[1:]\n if not digits.isdigit():\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Expected MIME minor version number but found {!r}\".format(digits)))\n mime_version.append(ValueTerminal(digits,'xtext'))\n else:\n mime_version.minor=int(digits)\n mime_version.append(ValueTerminal(digits,'digits'))\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mime_version.append(token)\n if value:\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Excess non-CFWS text after MIME version\"))\n mime_version.append(ValueTerminal(value,'xtext'))\n return mime_version\n \ndef get_invalid_parameter(value):\n ''\n\n\n\n\n \n invalid_parameter=InvalidParameter()\n while value and value[0]!=';':\n if value[0]in PHRASE_ENDS:\n invalid_parameter.append(ValueTerminal(value[0],\n 'misplaced-special'))\n value=value[1:]\n else:\n token,value=get_phrase(value)\n invalid_parameter.append(token)\n return invalid_parameter,value\n \ndef get_ttext(value):\n ''\n\n\n\n\n\n\n \n m=_non_token_end_matcher(value)\n if not m:\n raise errors.HeaderParseError(\n \"expected ttext but found '{}'\".format(value))\n ttext=m.group()\n value=value[len(ttext):]\n ttext=ValueTerminal(ttext,'ttext')\n _validate_xtext(ttext)\n return ttext,value\n \ndef get_token(value):\n ''\n\n\n\n\n\n\n \n mtoken=Token()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mtoken.append(token)\n if value and value[0]in TOKEN_ENDS:\n raise errors.HeaderParseError(\n \"expected token but found '{}'\".format(value))\n token,value=get_ttext(value)\n mtoken.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mtoken.append(token)\n return mtoken,value\n \ndef get_attrtext(value):\n ''\n\n\n\n\n\n\n \n m=_non_attribute_end_matcher(value)\n if not m:\n raise errors.HeaderParseError(\n \"expected attrtext but found {!r}\".format(value))\n attrtext=m.group()\n value=value[len(attrtext):]\n attrtext=ValueTerminal(attrtext,'attrtext')\n _validate_xtext(attrtext)\n return attrtext,value\n \ndef get_attribute(value):\n ''\n\n\n\n\n\n\n \n attribute=Attribute()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n attribute.append(token)\n if value and value[0]in ATTRIBUTE_ENDS:\n raise errors.HeaderParseError(\n \"expected token but found '{}'\".format(value))\n token,value=get_attrtext(value)\n attribute.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n attribute.append(token)\n return attribute,value\n \ndef get_extended_attrtext(value):\n ''\n\n\n\n\n\n \n m=_non_extended_attribute_end_matcher(value)\n if not m:\n raise errors.HeaderParseError(\n \"expected extended attrtext but found {!r}\".format(value))\n attrtext=m.group()\n value=value[len(attrtext):]\n attrtext=ValueTerminal(attrtext,'extended-attrtext')\n _validate_xtext(attrtext)\n return attrtext,value\n \ndef get_extended_attribute(value):\n ''\n\n\n\n\n \n \n attribute=Attribute()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n attribute.append(token)\n if value and value[0]in EXTENDED_ATTRIBUTE_ENDS:\n raise errors.HeaderParseError(\n \"expected token but found '{}'\".format(value))\n token,value=get_extended_attrtext(value)\n attribute.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n attribute.append(token)\n return attribute,value\n \ndef get_section(value):\n ''\n\n\n\n\n\n\n \n section=Section()\n if not value or value[0]!='*':\n raise errors.HeaderParseError(\"Expected section but found {}\".format(\n value))\n section.append(ValueTerminal('*','section-marker'))\n value=value[1:]\n if not value or not value[0].isdigit():\n raise errors.HeaderParseError(\"Expected section number but \"\n \"found {}\".format(value))\n digits=''\n while value and value[0].isdigit():\n digits +=value[0]\n value=value[1:]\n if digits[0]=='0'and digits !='0':\n section.defects.append(errors.InvalidHeaderDefect(\n \"section number has an invalid leading 0\"))\n section.number=int(digits)\n section.append(ValueTerminal(digits,'digits'))\n return section,value\n \n \ndef get_value(value):\n ''\n\n \n v=Value()\n if not value:\n raise errors.HeaderParseError(\"Expected value but found end of string\")\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n raise errors.HeaderParseError(\"Expected value but found \"\n \"only {}\".format(leader))\n if value[0]=='\"':\n token,value=get_quoted_string(value)\n else:\n token,value=get_extended_attribute(value)\n if leader is not None:\n token[:0]=[leader]\n v.append(token)\n return v,value\n \ndef get_parameter(value):\n ''\n\n\n\n\n\n \n \n \n \n param=Parameter()\n token,value=get_attribute(value)\n param.append(token)\n if not value or value[0]==';':\n param.defects.append(errors.InvalidHeaderDefect(\"Parameter contains \"\n \"name ({}) but no value\".format(token)))\n return param,value\n if value[0]=='*':\n try:\n token,value=get_section(value)\n param.sectioned=True\n param.append(token)\n except errors.HeaderParseError:\n pass\n if not value:\n raise errors.HeaderParseError(\"Incomplete parameter\")\n if value[0]=='*':\n param.append(ValueTerminal('*','extended-parameter-marker'))\n value=value[1:]\n param.extended=True\n if value[0]!='=':\n raise errors.HeaderParseError(\"Parameter not followed by '='\")\n param.append(ValueTerminal('=','parameter-separator'))\n value=value[1:]\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n param.append(token)\n remainder=None\n appendto=param\n if param.extended and value and value[0]=='\"':\n \n \n \n qstring,remainder=get_quoted_string(value)\n inner_value=qstring.stripped_value\n semi_valid=False\n if param.section_number ==0:\n if inner_value and inner_value[0]==\"'\":\n semi_valid=True\n else:\n token,rest=get_attrtext(inner_value)\n if rest and rest[0]==\"'\":\n semi_valid=True\n else:\n try:\n token,rest=get_extended_attrtext(inner_value)\n except:\n pass\n else:\n if not rest:\n semi_valid=True\n if semi_valid:\n param.defects.append(errors.InvalidHeaderDefect(\n \"Quoted string value for extended parameter is invalid\"))\n param.append(qstring)\n for t in qstring:\n if t.token_type =='bare-quoted-string':\n t[:]=[]\n appendto=t\n break\n value=inner_value\n else:\n remainder=None\n param.defects.append(errors.InvalidHeaderDefect(\n \"Parameter marked as extended but appears to have a \"\n \"quoted string value that is non-encoded\"))\n if value and value[0]==\"'\":\n token=None\n else:\n token,value=get_value(value)\n if not param.extended or param.section_number >0:\n if not value or value[0]!=\"'\":\n appendto.append(token)\n if remainder is not None:\n assert not value,value\n value=remainder\n return param,value\n param.defects.append(errors.InvalidHeaderDefect(\n \"Apparent initial-extended-value but attribute \"\n \"was not marked as extended or was not initial section\"))\n if not value:\n \n param.defects.append(errors.InvalidHeaderDefect(\n \"Missing required charset/lang delimiters\"))\n appendto.append(token)\n if remainder is None:\n return param,value\n else:\n if token is not None:\n for t in token:\n if t.token_type =='extended-attrtext':\n break\n t.token_type =='attrtext'\n appendto.append(t)\n param.charset=t.value\n if value[0]!=\"'\":\n raise errors.HeaderParseError(\"Expected RFC2231 char/lang encoding \"\n \"delimiter, but found {!r}\".format(value))\n appendto.append(ValueTerminal(\"'\",'RFC2231-delimiter'))\n value=value[1:]\n if value and value[0]!=\"'\":\n token,value=get_attrtext(value)\n appendto.append(token)\n param.lang=token.value\n if not value or value[0]!=\"'\":\n raise errors.HeaderParseError(\"Expected RFC2231 char/lang encoding \"\n \"delimiter, but found {}\".format(value))\n appendto.append(ValueTerminal(\"'\",'RFC2231-delimiter'))\n value=value[1:]\n if remainder is not None:\n \n v=Value()\n while value:\n if value[0]in WSP:\n token,value=get_fws(value)\n elif value[0]=='\"':\n token=ValueTerminal('\"','DQUOTE')\n value=value[1:]\n else:\n token,value=get_qcontent(value)\n v.append(token)\n token=v\n else:\n token,value=get_value(value)\n appendto.append(token)\n if remainder is not None:\n assert not value,value\n value=remainder\n return param,value\n \ndef parse_mime_parameters(value):\n ''\n\n\n\n\n\n\n\n\n\n\n \n mime_parameters=MimeParameters()\n while value:\n try:\n token,value=get_parameter(value)\n mime_parameters.append(token)\n except errors.HeaderParseError:\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n mime_parameters.append(leader)\n return mime_parameters\n if value[0]==';':\n if leader is not None:\n mime_parameters.append(leader)\n mime_parameters.defects.append(errors.InvalidHeaderDefect(\n \"parameter entry with no content\"))\n else:\n token,value=get_invalid_parameter(value)\n if leader:\n token[:0]=[leader]\n mime_parameters.append(token)\n mime_parameters.defects.append(errors.InvalidHeaderDefect(\n \"invalid parameter {!r}\".format(token)))\n if value and value[0]!=';':\n \n \n param=mime_parameters[-1]\n param.token_type='invalid-parameter'\n token,value=get_invalid_parameter(value)\n param.extend(token)\n mime_parameters.defects.append(errors.InvalidHeaderDefect(\n \"parameter with invalid trailing text {!r}\".format(token)))\n if value:\n \n mime_parameters.append(ValueTerminal(';','parameter-separator'))\n value=value[1:]\n return mime_parameters\n \ndef _find_mime_parameters(tokenlist,value):\n ''\n\n \n while value and value[0]!=';':\n if value[0]in PHRASE_ENDS:\n tokenlist.append(ValueTerminal(value[0],'misplaced-special'))\n value=value[1:]\n else:\n token,value=get_phrase(value)\n tokenlist.append(token)\n if not value:\n return\n tokenlist.append(ValueTerminal(';','parameter-separator'))\n tokenlist.append(parse_mime_parameters(value[1:]))\n \ndef parse_content_type_header(value):\n ''\n\n\n\n\n \n ctype=ContentType()\n if not value:\n ctype.defects.append(errors.HeaderMissingRequiredValue(\n \"Missing content type specification\"))\n return ctype\n try:\n token,value=get_token(value)\n except errors.HeaderParseError:\n ctype.defects.append(errors.InvalidHeaderDefect(\n \"Expected content maintype but found {!r}\".format(value)))\n _find_mime_parameters(ctype,value)\n return ctype\n ctype.append(token)\n \n \n if not value or value[0]!='/':\n ctype.defects.append(errors.InvalidHeaderDefect(\n \"Invalid content type\"))\n if value:\n _find_mime_parameters(ctype,value)\n return ctype\n ctype.maintype=token.value.strip().lower()\n ctype.append(ValueTerminal('/','content-type-separator'))\n value=value[1:]\n try:\n token,value=get_token(value)\n except errors.HeaderParseError:\n ctype.defects.append(errors.InvalidHeaderDefect(\n \"Expected content subtype but found {!r}\".format(value)))\n _find_mime_parameters(ctype,value)\n return ctype\n ctype.append(token)\n ctype.subtype=token.value.strip().lower()\n if not value:\n return ctype\n if value[0]!=';':\n ctype.defects.append(errors.InvalidHeaderDefect(\n \"Only parameters are valid after content type, but \"\n \"found {!r}\".format(value)))\n \n \n \n del ctype.maintype,ctype.subtype\n _find_mime_parameters(ctype,value)\n return ctype\n ctype.append(ValueTerminal(';','parameter-separator'))\n ctype.append(parse_mime_parameters(value[1:]))\n return ctype\n \ndef parse_content_disposition_header(value):\n ''\n\n \n disp_header=ContentDisposition()\n if not value:\n disp_header.defects.append(errors.HeaderMissingRequiredValue(\n \"Missing content disposition\"))\n return disp_header\n try:\n token,value=get_token(value)\n except errors.HeaderParseError:\n disp_header.defects.append(errors.InvalidHeaderDefect(\n \"Expected content disposition but found {!r}\".format(value)))\n _find_mime_parameters(disp_header,value)\n return disp_header\n disp_header.append(token)\n disp_header.content_disposition=token.value.strip().lower()\n if not value:\n return disp_header\n if value[0]!=';':\n disp_header.defects.append(errors.InvalidHeaderDefect(\n \"Only parameters are valid after content disposition, but \"\n \"found {!r}\".format(value)))\n _find_mime_parameters(disp_header,value)\n return disp_header\n disp_header.append(ValueTerminal(';','parameter-separator'))\n disp_header.append(parse_mime_parameters(value[1:]))\n return disp_header\n \ndef parse_content_transfer_encoding_header(value):\n ''\n\n \n \n cte_header=ContentTransferEncoding()\n if not value:\n cte_header.defects.append(errors.HeaderMissingRequiredValue(\n \"Missing content transfer encoding\"))\n return cte_header\n try:\n token,value=get_token(value)\n except errors.HeaderParseError:\n cte_header.defects.append(errors.InvalidHeaderDefect(\n \"Expected content transfer encoding but found {!r}\".format(value)))\n else:\n cte_header.append(token)\n cte_header.cte=token.value.strip().lower()\n if not value:\n return cte_header\n while value:\n cte_header.defects.append(errors.InvalidHeaderDefect(\n \"Extra text after content transfer encoding\"))\n if value[0]in PHRASE_ENDS:\n cte_header.append(ValueTerminal(value[0],'misplaced-special'))\n value=value[1:]\n else:\n token,value=get_phrase(value)\n cte_header.append(token)\n return cte_header\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _steal_trailing_WSP_if_exists(lines):\n wsp=''\n if lines and lines[-1]and lines[-1][-1]in WSP:\n wsp=lines[-1][-1]\n lines[-1]=lines[-1][:-1]\n return wsp\n \ndef _refold_parse_tree(parse_tree,*,policy):\n ''\n\n \n \n maxlen=policy.max_line_length or sys.maxsize\n encoding='utf-8'if policy.utf8 else 'us-ascii'\n lines=['']\n last_ew=None\n wrap_as_ew_blocked=0\n want_encoding=False\n end_ew_not_allowed=Terminal('','wrap_as_ew_blocked')\n parts=list(parse_tree)\n while parts:\n part=parts.pop(0)\n if part is end_ew_not_allowed:\n wrap_as_ew_blocked -=1\n continue\n tstr=str(part)\n if part.token_type =='ptext'and set(tstr)&SPECIALS:\n \n want_encoding=True\n try:\n tstr.encode(encoding)\n charset=encoding\n except UnicodeEncodeError:\n if any(isinstance(x,errors.UndecodableBytesDefect)\n for x in part.all_defects):\n charset='unknown-8bit'\n else:\n \n \n charset='utf-8'\n want_encoding=True\n if part.token_type =='mime-parameters':\n \n _fold_mime_parameters(part,lines,maxlen,encoding)\n continue\n if want_encoding and not wrap_as_ew_blocked:\n if not part.as_ew_allowed:\n want_encoding=False\n last_ew=None\n if part.syntactic_break:\n encoded_part=part.fold(policy=policy)[:-len(policy.linesep)]\n if policy.linesep not in encoded_part:\n \n if len(encoded_part)>maxlen -len(lines[-1]):\n \n newline=_steal_trailing_WSP_if_exists(lines)\n \n lines.append(newline)\n lines[-1]+=encoded_part\n continue\n \n \n \n \n if not hasattr(part,'encode'):\n \n parts=list(part)+parts\n else:\n \n \n last_ew=_fold_as_ew(tstr,lines,maxlen,last_ew,\n part.ew_combine_allowed,charset)\n want_encoding=False\n continue\n if len(tstr)<=maxlen -len(lines[-1]):\n lines[-1]+=tstr\n continue\n \n \n \n if(part.syntactic_break and\n len(tstr)+1 <=maxlen):\n newline=_steal_trailing_WSP_if_exists(lines)\n if newline or part.startswith_fws():\n lines.append(newline+tstr)\n last_ew=None\n continue\n if not hasattr(part,'encode'):\n \n newparts=list(part)\n if not part.as_ew_allowed:\n wrap_as_ew_blocked +=1\n newparts.append(end_ew_not_allowed)\n parts=newparts+parts\n continue\n if part.as_ew_allowed and not wrap_as_ew_blocked:\n \n \n parts.insert(0,part)\n want_encoding=True\n continue\n \n newline=_steal_trailing_WSP_if_exists(lines)\n if newline or part.startswith_fws():\n lines.append(newline+tstr)\n else:\n \n lines[-1]+=tstr\n return policy.linesep.join(lines)+policy.linesep\n \ndef _fold_as_ew(to_encode,lines,maxlen,last_ew,ew_combine_allowed,charset):\n ''\n\n\n\n\n\n\n\n\n \n if last_ew is not None and ew_combine_allowed:\n to_encode=str(\n get_unstructured(lines[-1][last_ew:]+to_encode))\n lines[-1]=lines[-1][:last_ew]\n if to_encode[0]in WSP:\n \n \n leading_wsp=to_encode[0]\n to_encode=to_encode[1:]\n if(len(lines[-1])==maxlen):\n lines.append(_steal_trailing_WSP_if_exists(lines))\n lines[-1]+=leading_wsp\n trailing_wsp=''\n if to_encode[-1]in WSP:\n \n trailing_wsp=to_encode[-1]\n to_encode=to_encode[:-1]\n new_last_ew=len(lines[-1])if last_ew is None else last_ew\n \n encode_as='utf-8'if charset =='us-ascii'else charset\n \n \n \n chrome_len=len(encode_as)+7\n \n if(chrome_len+1)>=maxlen:\n raise errors.HeaderParseError(\n \"max_line_length is too small to fit an encoded word\")\n \n while to_encode:\n remaining_space=maxlen -len(lines[-1])\n text_space=remaining_space -chrome_len\n if text_space <=0:\n lines.append(' ')\n continue\n \n to_encode_word=to_encode[:text_space]\n encoded_word=_ew.encode(to_encode_word,charset=encode_as)\n excess=len(encoded_word)-remaining_space\n while excess >0:\n \n \n to_encode_word=to_encode_word[:-1]\n encoded_word=_ew.encode(to_encode_word,charset=encode_as)\n excess=len(encoded_word)-remaining_space\n lines[-1]+=encoded_word\n to_encode=to_encode[len(to_encode_word):]\n \n if to_encode:\n lines.append(' ')\n new_last_ew=len(lines[-1])\n lines[-1]+=trailing_wsp\n return new_last_ew if ew_combine_allowed else None\n \ndef _fold_mime_parameters(part,lines,maxlen,encoding):\n ''\n\n\n\n\n\n\n \n \n \n \n \n \n \n for name,value in part.params:\n \n \n \n \n \n if not lines[-1].rstrip().endswith(';'):\n lines[-1]+=';'\n charset=encoding\n error_handler='strict'\n try:\n value.encode(encoding)\n encoding_required=False\n except UnicodeEncodeError:\n encoding_required=True\n if utils._has_surrogates(value):\n charset='unknown-8bit'\n error_handler='surrogateescape'\n else:\n charset='utf-8'\n if encoding_required:\n encoded_value=urllib.parse.quote(\n value,safe='',errors=error_handler)\n tstr=\"{}*={}''{}\".format(name,charset,encoded_value)\n else:\n tstr='{}={}'.format(name,quote_string(value))\n if len(lines[-1])+len(tstr)+1 1:\n \n \n raise ValueError(\"Header values may not contain linefeed \"\n \"or carriage return characters\")\n return (name,self.header_factory(name,value))\n \n def header_fetch_parse(self,name,value):\n ''\n\n\n\n\n\n\n \n if hasattr(value,'name'):\n return value\n \n value=''.join(linesep_splitter.split(value))\n return self.header_factory(name,value)\n \n def fold(self,name,value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return self._fold(name,value,refold_binary=True )\n \n def fold_binary(self,name,value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n folded=self._fold(name,value,refold_binary=self.cte_type =='7bit')\n charset='utf8'if self.utf8 else 'ascii'\n return folded.encode(charset,'surrogateescape')\n \n def _fold(self,name,value,refold_binary=False ):\n if hasattr(value,'name'):\n return value.fold(policy=self)\n maxlen=self.max_line_length if self.max_line_length else sys.maxsize\n lines=value.splitlines()\n refold=(self.refold_source =='all'or\n self.refold_source =='long'and\n (lines and len(lines[0])+len(name)+2 >maxlen or\n any(len(x)>maxlen for x in lines[1:])))\n if refold or refold_binary and _has_surrogates(value):\n return self.header_factory(name,''.join(lines)).fold(policy=self)\n return name+': '+self.linesep.join(lines)+self.linesep\n \n \ndefault=EmailPolicy()\n\ndel default.header_factory\nstrict=default.clone(raise_on_defect=True )\nSMTP=default.clone(linesep='\\r\\n')\nHTTP=default.clone(linesep='\\r\\n',max_line_length=None )\nSMTPUTF8=SMTP.clone(utf8=True )\n", ["email._policybase", "email.contentmanager", "email.headerregistry", "email.message", "email.utils", "re", "sys"]], "email": [".py", "\n\n\n\n\"\"\"A package for parsing, handling, and generating email messages.\"\"\"\n\n__all__=[\n'base64mime',\n'charset',\n'encoders',\n'errors',\n'feedparser',\n'generator',\n'header',\n'iterators',\n'message',\n'message_from_file',\n'message_from_binary_file',\n'message_from_string',\n'message_from_bytes',\n'mime',\n'parser',\n'quoprimime',\n'utils',\n]\n\n\n\n\n\ndef message_from_string(s,*args,**kws):\n ''\n\n\n \n from email.parser import Parser\n return Parser(*args,**kws).parsestr(s)\n \ndef message_from_bytes(s,*args,**kws):\n ''\n\n\n \n from email.parser import BytesParser\n return BytesParser(*args,**kws).parsebytes(s)\n \ndef message_from_file(fp,*args,**kws):\n ''\n\n\n \n from email.parser import Parser\n return Parser(*args,**kws).parse(fp)\n \ndef message_from_binary_file(fp,*args,**kws):\n ''\n\n\n \n from email.parser import BytesParser\n return BytesParser(*args,**kws).parse(fp)\n", ["email.parser"], 1], "email.message": [".py", "\n\n\n\n\"\"\"Basic message object for the email package object model.\"\"\"\n\n__all__=['Message','EmailMessage']\n\nimport binascii\nimport re\nimport quopri\nfrom io import BytesIO,StringIO\n\n\nfrom email import utils\nfrom email import errors\nfrom email._policybase import compat32\nfrom email import charset as _charset\nfrom email._encoded_words import decode_b\nCharset=_charset.Charset\n\nSEMISPACE='; '\n\n\n\ntspecials=re.compile(r'[ \\(\\)<>@,;:\\\\\"/\\[\\]\\?=]')\n\n\ndef _splitparam(param):\n\n\n\n\n a,sep,b=str(param).partition(';')\n if not sep:\n return a.strip(),None\n return a.strip(),b.strip()\n \ndef _formatparam(param,value=None,quote=True):\n ''\n\n\n\n\n\n\n \n if value is not None and len(value)>0:\n \n \n \n if isinstance(value,tuple):\n \n param +='*'\n value=utils.encode_rfc2231(value[2],value[0],value[1])\n return '%s=%s'%(param,value)\n else:\n try:\n value.encode('ascii')\n except UnicodeEncodeError:\n param +='*'\n value=utils.encode_rfc2231(value,'utf-8','')\n return '%s=%s'%(param,value)\n \n \n if quote or tspecials.search(value):\n return '%s=\"%s\"'%(param,utils.quote(value))\n else:\n return '%s=%s'%(param,value)\n else:\n return param\n \ndef _parseparam(s):\n\n s=';'+str(s)\n plist=[]\n while s[:1]==';':\n s=s[1:]\n end=s.find(';')\n while end >0 and(s.count('\"',0,end)-s.count('\\\\\"',0,end))%2:\n end=s.find(';',end+1)\n if end <0:\n end=len(s)\n f=s[:end]\n if '='in f:\n i=f.index('=')\n f=f[:i].strip().lower()+'='+f[i+1:].strip()\n plist.append(f.strip())\n s=s[end:]\n return plist\n \n \ndef _unquotevalue(value):\n\n\n\n\n if isinstance(value,tuple):\n return value[0],value[1],utils.unquote(value[2])\n else:\n return utils.unquote(value)\n \n \ndef _decode_uu(encoded):\n ''\n decoded_lines=[]\n encoded_lines_iter=iter(encoded.splitlines())\n for line in encoded_lines_iter:\n if line.startswith(b\"begin \"):\n mode,_,path=line.removeprefix(b\"begin \").partition(b\" \")\n try:\n int(mode,base=8)\n except ValueError:\n continue\n else:\n break\n else:\n raise ValueError(\"`begin` line not found\")\n for line in encoded_lines_iter:\n if not line:\n raise ValueError(\"Truncated input\")\n elif line.strip(b' \\t\\r\\n\\f')==b'end':\n break\n try:\n decoded_line=binascii.a2b_uu(line)\n except binascii.Error:\n \n nbytes=(((line[0]-32)&63)*4+5)//3\n decoded_line=binascii.a2b_uu(line[:nbytes])\n decoded_lines.append(decoded_line)\n \n return b''.join(decoded_lines)\n \n \nclass Message:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,policy=compat32):\n self.policy=policy\n self._headers=[]\n self._unixfrom=None\n self._payload=None\n self._charset=None\n \n self.preamble=self.epilogue=None\n self.defects=[]\n \n self._default_type='text/plain'\n \n def __str__(self):\n ''\n \n return self.as_string()\n \n def as_string(self,unixfrom=False,maxheaderlen=0,policy=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n from email.generator import Generator\n policy=self.policy if policy is None else policy\n fp=StringIO()\n g=Generator(fp,\n mangle_from_=False,\n maxheaderlen=maxheaderlen,\n policy=policy)\n g.flatten(self,unixfrom=unixfrom)\n return fp.getvalue()\n \n def __bytes__(self):\n ''\n \n return self.as_bytes()\n \n def as_bytes(self,unixfrom=False,policy=None):\n ''\n\n\n\n\n\n \n from email.generator import BytesGenerator\n policy=self.policy if policy is None else policy\n fp=BytesIO()\n g=BytesGenerator(fp,mangle_from_=False,policy=policy)\n g.flatten(self,unixfrom=unixfrom)\n return fp.getvalue()\n \n def is_multipart(self):\n ''\n return isinstance(self._payload,list)\n \n \n \n \n def set_unixfrom(self,unixfrom):\n self._unixfrom=unixfrom\n \n def get_unixfrom(self):\n return self._unixfrom\n \n \n \n \n def attach(self,payload):\n ''\n\n\n\n\n \n if self._payload is None:\n self._payload=[payload]\n else:\n try:\n self._payload.append(payload)\n except AttributeError:\n raise TypeError(\"Attach is not valid on a message with a\"\n \" non-multipart payload\")\n \n def get_payload(self,i=None,decode=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.is_multipart():\n if decode:\n return None\n if i is None:\n return self._payload\n else:\n return self._payload[i]\n \n \n if i is not None and not isinstance(self._payload,list):\n raise TypeError('Expected list, got %s'%type(self._payload))\n payload=self._payload\n \n cte=str(self.get('content-transfer-encoding','')).lower()\n \n if isinstance(payload,str):\n if utils._has_surrogates(payload):\n bpayload=payload.encode('ascii','surrogateescape')\n if not decode:\n try:\n payload=bpayload.decode(self.get_param('charset','ascii'),'replace')\n except LookupError:\n payload=bpayload.decode('ascii','replace')\n elif decode:\n try:\n bpayload=payload.encode('ascii')\n except UnicodeError:\n \n \n \n \n bpayload=payload.encode('raw-unicode-escape')\n if not decode:\n return payload\n if cte =='quoted-printable':\n return quopri.decodestring(bpayload)\n elif cte =='base64':\n \n \n value,defects=decode_b(b''.join(bpayload.splitlines()))\n for defect in defects:\n self.policy.handle_defect(self,defect)\n return value\n elif cte in('x-uuencode','uuencode','uue','x-uue'):\n try:\n return _decode_uu(bpayload)\n except ValueError:\n \n return bpayload\n if isinstance(payload,str):\n return bpayload\n return payload\n \n def set_payload(self,payload,charset=None):\n ''\n\n\n\n \n if hasattr(payload,'encode'):\n if charset is None:\n self._payload=payload\n return\n if not isinstance(charset,Charset):\n charset=Charset(charset)\n payload=payload.encode(charset.output_charset)\n if hasattr(payload,'decode'):\n self._payload=payload.decode('ascii','surrogateescape')\n else:\n self._payload=payload\n if charset is not None:\n self.set_charset(charset)\n \n def set_charset(self,charset):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if charset is None:\n self.del_param('charset')\n self._charset=None\n return\n if not isinstance(charset,Charset):\n charset=Charset(charset)\n self._charset=charset\n if 'MIME-Version'not in self:\n self.add_header('MIME-Version','1.0')\n if 'Content-Type'not in self:\n self.add_header('Content-Type','text/plain',\n charset=charset.get_output_charset())\n else:\n self.set_param('charset',charset.get_output_charset())\n if charset !=charset.get_output_charset():\n self._payload=charset.body_encode(self._payload)\n if 'Content-Transfer-Encoding'not in self:\n cte=charset.get_body_encoding()\n try:\n cte(self)\n except TypeError:\n \n \n \n payload=self._payload\n if payload:\n try:\n payload=payload.encode('ascii','surrogateescape')\n except UnicodeError:\n payload=payload.encode(charset.output_charset)\n self._payload=charset.body_encode(payload)\n self.add_header('Content-Transfer-Encoding',cte)\n \n def get_charset(self):\n ''\n \n return self._charset\n \n \n \n \n def __len__(self):\n ''\n return len(self._headers)\n \n def __getitem__(self,name):\n ''\n\n\n\n\n\n\n \n return self.get(name)\n \n def __setitem__(self,name,val):\n ''\n\n\n\n \n max_count=self.policy.header_max_count(name)\n if max_count:\n lname=name.lower()\n found=0\n for k,v in self._headers:\n if k.lower()==lname:\n found +=1\n if found >=max_count:\n raise ValueError(\"There may be at most {} {} headers \"\n \"in a message\".format(max_count,name))\n self._headers.append(self.policy.header_store_parse(name,val))\n \n def __delitem__(self,name):\n ''\n\n\n \n name=name.lower()\n newheaders=[]\n for k,v in self._headers:\n if k.lower()!=name:\n newheaders.append((k,v))\n self._headers=newheaders\n \n def __contains__(self,name):\n name_lower=name.lower()\n for k,v in self._headers:\n if name_lower ==k.lower():\n return True\n return False\n \n def __iter__(self):\n for field,value in self._headers:\n yield field\n \n def keys(self):\n ''\n\n\n\n\n\n \n return[k for k,v in self._headers]\n \n def values(self):\n ''\n\n\n\n\n\n \n return[self.policy.header_fetch_parse(k,v)\n for k,v in self._headers]\n \n def items(self):\n ''\n\n\n\n\n\n \n return[(k,self.policy.header_fetch_parse(k,v))\n for k,v in self._headers]\n \n def get(self,name,failobj=None):\n ''\n\n\n\n \n name=name.lower()\n for k,v in self._headers:\n if k.lower()==name:\n return self.policy.header_fetch_parse(k,v)\n return failobj\n \n \n \n \n \n \n def set_raw(self,name,value):\n ''\n\n\n \n self._headers.append((name,value))\n \n def raw_items(self):\n ''\n\n\n \n return iter(self._headers.copy())\n \n \n \n \n \n def get_all(self,name,failobj=None):\n ''\n\n\n\n\n\n\n \n values=[]\n name=name.lower()\n for k,v in self._headers:\n if k.lower()==name:\n values.append(self.policy.header_fetch_parse(k,v))\n if not values:\n return failobj\n return values\n \n def add_header(self,_name,_value,**_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n parts=[]\n for k,v in _params.items():\n if v is None:\n parts.append(k.replace('_','-'))\n else:\n parts.append(_formatparam(k.replace('_','-'),v))\n if _value is not None:\n parts.insert(0,_value)\n self[_name]=SEMISPACE.join(parts)\n \n def replace_header(self,_name,_value):\n ''\n\n\n\n\n \n _name=_name.lower()\n for i,(k,v)in zip(range(len(self._headers)),self._headers):\n if k.lower()==_name:\n self._headers[i]=self.policy.header_store_parse(k,_value)\n break\n else:\n raise KeyError(_name)\n \n \n \n \n \n def get_content_type(self):\n ''\n\n\n\n\n\n\n\n\n\n\n \n missing=object()\n value=self.get('content-type',missing)\n if value is missing:\n \n return self.get_default_type()\n ctype=_splitparam(value)[0].lower()\n \n if ctype.count('/')!=1:\n return 'text/plain'\n return ctype\n \n def get_content_maintype(self):\n ''\n\n\n\n \n ctype=self.get_content_type()\n return ctype.split('/')[0]\n \n def get_content_subtype(self):\n ''\n\n\n\n \n ctype=self.get_content_type()\n return ctype.split('/')[1]\n \n def get_default_type(self):\n ''\n\n\n\n\n \n return self._default_type\n \n def set_default_type(self,ctype):\n ''\n\n\n\n\n \n self._default_type=ctype\n \n def _get_params_preserve(self,failobj,header):\n \n \n missing=object()\n value=self.get(header,missing)\n if value is missing:\n return failobj\n params=[]\n for p in _parseparam(value):\n try:\n name,val=p.split('=',1)\n name=name.strip()\n val=val.strip()\n except ValueError:\n \n name=p.strip()\n val=''\n params.append((name,val))\n params=utils.decode_params(params)\n return params\n \n def get_params(self,failobj=None,header='content-type',unquote=True):\n ''\n\n\n\n\n\n\n\n\n\n\n \n missing=object()\n params=self._get_params_preserve(missing,header)\n if params is missing:\n return failobj\n if unquote:\n return[(k,_unquotevalue(v))for k,v in params]\n else:\n return params\n \n def get_param(self,param,failobj=None,header='content-type',\n unquote=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if header not in self:\n return failobj\n for k,v in self._get_params_preserve(failobj,header):\n if k.lower()==param.lower():\n if unquote:\n return _unquotevalue(v)\n else:\n return v\n return failobj\n \n def set_param(self,param,value,header='Content-Type',requote=True,\n charset=None,language='',replace=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not isinstance(value,tuple)and charset:\n value=(charset,language,value)\n \n if header not in self and header.lower()=='content-type':\n ctype='text/plain'\n else:\n ctype=self.get(header)\n if not self.get_param(param,header=header):\n if not ctype:\n ctype=_formatparam(param,value,requote)\n else:\n ctype=SEMISPACE.join(\n [ctype,_formatparam(param,value,requote)])\n else:\n ctype=''\n for old_param,old_value in self.get_params(header=header,\n unquote=requote):\n append_param=''\n if old_param.lower()==param.lower():\n append_param=_formatparam(param,value,requote)\n else:\n append_param=_formatparam(old_param,old_value,requote)\n if not ctype:\n ctype=append_param\n else:\n ctype=SEMISPACE.join([ctype,append_param])\n if ctype !=self.get(header):\n if replace:\n self.replace_header(header,ctype)\n else:\n del self[header]\n self[header]=ctype\n \n def del_param(self,param,header='content-type',requote=True):\n ''\n\n\n\n\n\n \n if header not in self:\n return\n new_ctype=''\n for p,v in self.get_params(header=header,unquote=requote):\n if p.lower()!=param.lower():\n if not new_ctype:\n new_ctype=_formatparam(p,v,requote)\n else:\n new_ctype=SEMISPACE.join([new_ctype,\n _formatparam(p,v,requote)])\n if new_ctype !=self.get(header):\n del self[header]\n self[header]=new_ctype\n \n def set_type(self,type,header='Content-Type',requote=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not type.count('/')==1:\n raise ValueError\n \n if header.lower()=='content-type':\n del self['mime-version']\n self['MIME-Version']='1.0'\n if header not in self:\n self[header]=type\n return\n params=self.get_params(header=header,unquote=requote)\n del self[header]\n self[header]=type\n \n for p,v in params[1:]:\n self.set_param(p,v,header,requote)\n \n def get_filename(self,failobj=None):\n ''\n\n\n\n\n\n \n missing=object()\n filename=self.get_param('filename',missing,'content-disposition')\n if filename is missing:\n filename=self.get_param('name',missing,'content-type')\n if filename is missing:\n return failobj\n return utils.collapse_rfc2231_value(filename).strip()\n \n def get_boundary(self,failobj=None):\n ''\n\n\n\n \n missing=object()\n boundary=self.get_param('boundary',missing)\n if boundary is missing:\n return failobj\n \n return utils.collapse_rfc2231_value(boundary).rstrip()\n \n def set_boundary(self,boundary):\n ''\n\n\n\n\n\n\n\n \n missing=object()\n params=self._get_params_preserve(missing,'content-type')\n if params is missing:\n \n \n raise errors.HeaderParseError('No Content-Type header found')\n newparams=[]\n foundp=False\n for pk,pv in params:\n if pk.lower()=='boundary':\n newparams.append(('boundary','\"%s\"'%boundary))\n foundp=True\n else:\n newparams.append((pk,pv))\n if not foundp:\n \n \n \n newparams.append(('boundary','\"%s\"'%boundary))\n \n newheaders=[]\n for h,v in self._headers:\n if h.lower()=='content-type':\n parts=[]\n for k,v in newparams:\n if v =='':\n parts.append(k)\n else:\n parts.append('%s=%s'%(k,v))\n val=SEMISPACE.join(parts)\n newheaders.append(self.policy.header_store_parse(h,val))\n \n else:\n newheaders.append((h,v))\n self._headers=newheaders\n \n def get_content_charset(self,failobj=None):\n ''\n\n\n\n\n \n missing=object()\n charset=self.get_param('charset',missing)\n if charset is missing:\n return failobj\n if isinstance(charset,tuple):\n \n pcharset=charset[0]or 'us-ascii'\n try:\n \n \n \n as_bytes=charset[2].encode('raw-unicode-escape')\n charset=str(as_bytes,pcharset)\n except(LookupError,UnicodeError):\n charset=charset[2]\n \n try:\n charset.encode('us-ascii')\n except UnicodeError:\n return failobj\n \n return charset.lower()\n \n def get_charsets(self,failobj=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return[part.get_content_charset(failobj)for part in self.walk()]\n \n def get_content_disposition(self):\n ''\n\n\n\n \n value=self.get('content-disposition')\n if value is None:\n return None\n c_d=_splitparam(value)[0].lower()\n return c_d\n \n \n from email.iterators import walk\n \n \nclass MIMEPart(Message):\n\n def __init__(self,policy=None):\n if policy is None:\n from email.policy import default\n policy=default\n super().__init__(policy)\n \n \n def as_string(self,unixfrom=False,maxheaderlen=None,policy=None):\n ''\n\n\n\n\n\n\n\n\n \n policy=self.policy if policy is None else policy\n if maxheaderlen is None:\n maxheaderlen=policy.max_line_length\n return super().as_string(unixfrom,maxheaderlen,policy)\n \n def __str__(self):\n return self.as_string(policy=self.policy.clone(utf8=True))\n \n def is_attachment(self):\n c_d=self.get('content-disposition')\n return False if c_d is None else c_d.content_disposition =='attachment'\n \n def _find_body(self,part,preferencelist):\n if part.is_attachment():\n return\n maintype,subtype=part.get_content_type().split('/')\n if maintype =='text':\n if subtype in preferencelist:\n yield(preferencelist.index(subtype),part)\n return\n if maintype !='multipart'or not self.is_multipart():\n return\n if subtype !='related':\n for subpart in part.iter_parts():\n yield from self._find_body(subpart,preferencelist)\n return\n if 'related'in preferencelist:\n yield(preferencelist.index('related'),part)\n candidate=None\n start=part.get_param('start')\n if start:\n for subpart in part.iter_parts():\n if subpart['content-id']==start:\n candidate=subpart\n break\n if candidate is None:\n subparts=part.get_payload()\n candidate=subparts[0]if subparts else None\n if candidate is not None:\n yield from self._find_body(candidate,preferencelist)\n \n def get_body(self,preferencelist=('related','html','plain')):\n ''\n\n\n\n\n\n\n\n \n best_prio=len(preferencelist)\n body=None\n for prio,part in self._find_body(self,preferencelist):\n if prio From ',payload)\n self._write_lines(payload)\n \n \n _writeBody=_handle_text\n \n def _handle_multipart(self,msg):\n \n \n \n msgtexts=[]\n subparts=msg.get_payload()\n if subparts is None:\n subparts=[]\n elif isinstance(subparts,str):\n \n self.write(subparts)\n return\n elif not isinstance(subparts,list):\n \n subparts=[subparts]\n for part in subparts:\n s=self._new_buffer()\n g=self.clone(s)\n g.flatten(part,unixfrom=False,linesep=self._NL)\n msgtexts.append(s.getvalue())\n \n boundary=msg.get_boundary()\n if not boundary:\n \n \n alltext=self._encoded_NL.join(msgtexts)\n boundary=self._make_boundary(alltext)\n msg.set_boundary(boundary)\n \n if msg.preamble is not None:\n if self._mangle_from_:\n preamble=fcre.sub('>From ',msg.preamble)\n else:\n preamble=msg.preamble\n self._write_lines(preamble)\n self.write(self._NL)\n \n self.write('--'+boundary+self._NL)\n \n if msgtexts:\n self._fp.write(msgtexts.pop(0))\n \n \n \n for body_part in msgtexts:\n \n self.write(self._NL+'--'+boundary+self._NL)\n \n self._fp.write(body_part)\n \n self.write(self._NL+'--'+boundary+'--'+self._NL)\n if msg.epilogue is not None:\n if self._mangle_from_:\n epilogue=fcre.sub('>From ',msg.epilogue)\n else:\n epilogue=msg.epilogue\n self._write_lines(epilogue)\n \n def _handle_multipart_signed(self,msg):\n \n \n \n p=self.policy\n self.policy=p.clone(max_line_length=0)\n try:\n self._handle_multipart(msg)\n finally:\n self.policy=p\n \n def _handle_message_delivery_status(self,msg):\n \n \n \n blocks=[]\n for part in msg.get_payload():\n s=self._new_buffer()\n g=self.clone(s)\n g.flatten(part,unixfrom=False,linesep=self._NL)\n text=s.getvalue()\n lines=text.split(self._encoded_NL)\n \n if lines and lines[-1]==self._encoded_EMPTY:\n blocks.append(self._encoded_NL.join(lines[:-1]))\n else:\n blocks.append(text)\n \n \n \n self._fp.write(self._encoded_NL.join(blocks))\n \n def _handle_message(self,msg):\n s=self._new_buffer()\n g=self.clone(s)\n \n \n \n \n \n \n \n \n \n payload=msg._payload\n if isinstance(payload,list):\n g.flatten(msg.get_payload(0),unixfrom=False,linesep=self._NL)\n payload=s.getvalue()\n else:\n payload=self._encode(payload)\n self._fp.write(payload)\n \n \n \n \n \n \n @classmethod\n def _make_boundary(cls,text=None):\n \n \n token=random.randrange(sys.maxsize)\n boundary=('='*15)+(_fmt %token)+'=='\n if text is None:\n return boundary\n b=boundary\n counter=0\n while True:\n cre=cls._compile_re('^--'+re.escape(b)+'(--)?$',re.MULTILINE)\n if not cre.search(text):\n break\n b=boundary+'.'+str(counter)\n counter +=1\n return b\n \n @classmethod\n def _compile_re(cls,s,flags):\n return re.compile(s,flags)\n \n \nclass BytesGenerator(Generator):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def write(self,s):\n self._fp.write(s.encode('ascii','surrogateescape'))\n \n def _new_buffer(self):\n return BytesIO()\n \n def _encode(self,s):\n return s.encode('ascii')\n \n def _write_headers(self,msg):\n \n \n for h,v in msg.raw_items():\n self._fp.write(self.policy.fold_binary(h,v))\n \n self.write(self._NL)\n \n def _handle_text(self,msg):\n \n \n if msg._payload is None:\n return\n if _has_surrogates(msg._payload)and not self.policy.cte_type =='7bit':\n if self._mangle_from_:\n msg._payload=fcre.sub(\">From \",msg._payload)\n self._write_lines(msg._payload)\n else:\n super(BytesGenerator,self)._handle_text(msg)\n \n \n _writeBody=_handle_text\n \n @classmethod\n def _compile_re(cls,s,flags):\n return re.compile(s.encode('ascii'),flags)\n \n \n_FMT='[Non-text (%(type)s) part of message omitted, filename %(filename)s]'\n\nclass DecodedGenerator(Generator):\n ''\n\n\n\n \n def __init__(self,outfp,mangle_from_=None,maxheaderlen=None,fmt=None,*,\n policy=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n Generator.__init__(self,outfp,mangle_from_,maxheaderlen,\n policy=policy)\n if fmt is None:\n self._fmt=_FMT\n else:\n self._fmt=fmt\n \n def _dispatch(self,msg):\n for part in msg.walk():\n maintype=part.get_content_maintype()\n if maintype =='text':\n print(part.get_payload(decode=False),file=self)\n elif maintype =='multipart':\n \n pass\n else:\n print(self._fmt %{\n 'type':part.get_content_type(),\n 'maintype':part.get_content_maintype(),\n 'subtype':part.get_content_subtype(),\n 'filename':part.get_filename('[no filename]'),\n 'description':part.get('Content-Description',\n '[no description]'),\n 'encoding':part.get('Content-Transfer-Encoding',\n '[no encoding]'),\n },file=self)\n \n \n \n_width=len(repr(sys.maxsize -1))\n_fmt='%%0%dd'%_width\n\n\n_make_boundary=Generator._make_boundary\n", ["copy", "email.utils", "io", "random", "re", "sys", "time"]], "email.utils": [".py", "\n\n\n\n\"\"\"Miscellaneous utilities.\"\"\"\n\n__all__=[\n'collapse_rfc2231_value',\n'decode_params',\n'decode_rfc2231',\n'encode_rfc2231',\n'formataddr',\n'formatdate',\n'format_datetime',\n'getaddresses',\n'make_msgid',\n'mktime_tz',\n'parseaddr',\n'parsedate',\n'parsedate_tz',\n'parsedate_to_datetime',\n'unquote',\n]\n\nimport os\nimport re\nimport time\nimport random\nimport socket\nimport datetime\nimport urllib.parse\n\nfrom email._parseaddr import quote\nfrom email._parseaddr import AddressList as _AddressList\nfrom email._parseaddr import mktime_tz\n\nfrom email._parseaddr import parsedate,parsedate_tz,_parsedate_tz\n\n\nfrom email.charset import Charset\n\nCOMMASPACE=', '\nEMPTYSTRING=''\nUEMPTYSTRING=''\nCRLF='\\r\\n'\nTICK=\"'\"\n\nspecialsre=re.compile(r'[][\\\\()<>@,:;\".]')\nescapesre=re.compile(r'[\\\\\"]')\n\ndef _has_surrogates(s):\n ''\n \n \n \n try:\n s.encode()\n return False\n except UnicodeEncodeError:\n return True\n \n \n \ndef _sanitize(string):\n\n\n\n\n original_bytes=string.encode('utf-8','surrogateescape')\n return original_bytes.decode('utf-8','replace')\n \n \n \n \n \ndef formataddr(pair,charset='utf-8'):\n ''\n\n\n\n\n\n\n\n\n\n\n \n name,address=pair\n \n address.encode('ascii')\n if name:\n try:\n name.encode('ascii')\n except UnicodeEncodeError:\n if isinstance(charset,str):\n charset=Charset(charset)\n encoded_name=charset.header_encode(name)\n return \"%s <%s>\"%(encoded_name,address)\n else:\n quotes=''\n if specialsre.search(name):\n quotes='\"'\n name=escapesre.sub(r'\\\\\\g<0>',name)\n return '%s%s%s <%s>'%(quotes,name,quotes,address)\n return address\n \n \n \ndef getaddresses(fieldvalues):\n ''\n all=COMMASPACE.join(str(v)for v in fieldvalues)\n a=_AddressList(all)\n return a.addresslist\n \n \ndef _format_timetuple_and_zone(timetuple,zone):\n return '%s, %02d %s %04d %02d:%02d:%02d %s'%(\n ['Mon','Tue','Wed','Thu','Fri','Sat','Sun'][timetuple[6]],\n timetuple[2],\n ['Jan','Feb','Mar','Apr','May','Jun',\n 'Jul','Aug','Sep','Oct','Nov','Dec'][timetuple[1]-1],\n timetuple[0],timetuple[3],timetuple[4],timetuple[5],\n zone)\n \ndef formatdate(timeval=None,localtime=False,usegmt=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n if timeval is None:\n timeval=time.time()\n dt=datetime.datetime.fromtimestamp(timeval,datetime.timezone.utc)\n \n if localtime:\n dt=dt.astimezone()\n usegmt=False\n elif not usegmt:\n dt=dt.replace(tzinfo=None)\n return format_datetime(dt,usegmt)\n \ndef format_datetime(dt,usegmt=False):\n ''\n\n\n\n\n \n now=dt.timetuple()\n if usegmt:\n if dt.tzinfo is None or dt.tzinfo !=datetime.timezone.utc:\n raise ValueError(\"usegmt option requires a UTC datetime\")\n zone='GMT'\n elif dt.tzinfo is None:\n zone='-0000'\n else:\n zone=dt.strftime(\"%z\")\n return _format_timetuple_and_zone(now,zone)\n \n \ndef make_msgid(idstring=None,domain=None):\n ''\n\n\n\n\n\n\n\n \n timeval=int(time.time()*100)\n pid=os.getpid()\n randint=random.getrandbits(64)\n if idstring is None:\n idstring=''\n else:\n idstring='.'+idstring\n if domain is None:\n domain=socket.getfqdn()\n msgid='<%d.%d.%d%s@%s>'%(timeval,pid,randint,idstring,domain)\n return msgid\n \n \ndef parsedate_to_datetime(data):\n parsed_date_tz=_parsedate_tz(data)\n if parsed_date_tz is None:\n raise ValueError('Invalid date value or format \"%s\"'%str(data))\n *dtuple,tz=parsed_date_tz\n if tz is None:\n return datetime.datetime(*dtuple[:6])\n return datetime.datetime(*dtuple[:6],\n tzinfo=datetime.timezone(datetime.timedelta(seconds=tz)))\n \n \ndef parseaddr(addr):\n ''\n\n\n\n\n \n addrs=_AddressList(addr).addresslist\n if not addrs:\n return '',''\n return addrs[0]\n \n \n \ndef unquote(str):\n ''\n if len(str)>1:\n if str.startswith('\"')and str.endswith('\"'):\n return str[1:-1].replace('\\\\\\\\','\\\\').replace('\\\\\"','\"')\n if str.startswith('<')and str.endswith('>'):\n return str[1:-1]\n return str\n \n \n \n \ndef decode_rfc2231(s):\n ''\n parts=s.split(TICK,2)\n if len(parts)<=2:\n return None,None,s\n return parts\n \n \ndef encode_rfc2231(s,charset=None,language=None):\n ''\n\n\n\n\n \n s=urllib.parse.quote(s,safe='',encoding=charset or 'ascii')\n if charset is None and language is None:\n return s\n if language is None:\n language=''\n return \"%s'%s'%s\"%(charset,language,s)\n \n \nrfc2231_continuation=re.compile(r'^(?P\\w+)\\*((?P[0-9]+)\\*?)?$',\nre.ASCII)\n\ndef decode_params(params):\n ''\n\n\n \n new_params=[params[0]]\n \n \n \n rfc2231_params={}\n for name,value in params[1:]:\n encoded=name.endswith('*')\n value=unquote(value)\n mo=rfc2231_continuation.match(name)\n if mo:\n name,num=mo.group('name','num')\n if num is not None:\n num=int(num)\n rfc2231_params.setdefault(name,[]).append((num,value,encoded))\n else:\n new_params.append((name,'\"%s\"'%quote(value)))\n if rfc2231_params:\n for name,continuations in rfc2231_params.items():\n value=[]\n extended=False\n \n continuations.sort()\n \n \n \n \n \n for num,s,encoded in continuations:\n if encoded:\n \n \n \n s=urllib.parse.unquote(s,encoding=\"latin-1\")\n extended=True\n value.append(s)\n value=quote(EMPTYSTRING.join(value))\n if extended:\n charset,language,value=decode_rfc2231(value)\n new_params.append((name,(charset,language,'\"%s\"'%value)))\n else:\n new_params.append((name,'\"%s\"'%value))\n return new_params\n \ndef collapse_rfc2231_value(value,errors='replace',\nfallback_charset='us-ascii'):\n if not isinstance(value,tuple)or len(value)!=3:\n return unquote(value)\n \n \n \n charset,language,text=value\n if charset is None:\n \n \n charset=fallback_charset\n rawbytes=bytes(text,'raw-unicode-escape')\n try:\n return str(rawbytes,charset,errors)\n except LookupError:\n \n return unquote(text)\n \n \n \n \n \n \n \n \ndef localtime(dt=None,isdst=None):\n ''\n\n\n\n\n\n\n\n \n if isdst is not None:\n import warnings\n warnings._deprecated(\n \"The 'isdst' parameter to 'localtime'\",\n message='{name} is deprecated and slated for removal in Python {remove}',\n remove=(3,14),\n )\n if dt is None:\n dt=datetime.datetime.now()\n return dt.astimezone()\n", ["datetime", "email._parseaddr", "email.charset", "os", "random", "re", "socket", "time", "urllib.parse", "warnings"]], "email.charset": [".py", "\n\n\n\n__all__=[\n'Charset',\n'add_alias',\n'add_charset',\n'add_codec',\n]\n\nfrom functools import partial\n\nimport email.base64mime\nimport email.quoprimime\n\nfrom email import errors\nfrom email.encoders import encode_7or8bit\n\n\n\nQP=1\nBASE64=2\nSHORTEST=3\n\n\nRFC2047_CHROME_LEN=7\n\nDEFAULT_CHARSET='us-ascii'\nUNKNOWN8BIT='unknown-8bit'\nEMPTYSTRING=''\n\n\n\nCHARSETS={\n\n'iso-8859-1':(QP,QP,None),\n'iso-8859-2':(QP,QP,None),\n'iso-8859-3':(QP,QP,None),\n'iso-8859-4':(QP,QP,None),\n\n\n\n\n'iso-8859-9':(QP,QP,None),\n'iso-8859-10':(QP,QP,None),\n\n'iso-8859-13':(QP,QP,None),\n'iso-8859-14':(QP,QP,None),\n'iso-8859-15':(QP,QP,None),\n'iso-8859-16':(QP,QP,None),\n'windows-1252':(QP,QP,None),\n'viscii':(QP,QP,None),\n'us-ascii':(None,None,None),\n'big5':(BASE64,BASE64,None),\n'gb2312':(BASE64,BASE64,None),\n'euc-jp':(BASE64,None,'iso-2022-jp'),\n'shift_jis':(BASE64,None,'iso-2022-jp'),\n'iso-2022-jp':(BASE64,None,None),\n'koi8-r':(BASE64,BASE64,None),\n'utf-8':(SHORTEST,BASE64,'utf-8'),\n}\n\n\n\nALIASES={\n'latin_1':'iso-8859-1',\n'latin-1':'iso-8859-1',\n'latin_2':'iso-8859-2',\n'latin-2':'iso-8859-2',\n'latin_3':'iso-8859-3',\n'latin-3':'iso-8859-3',\n'latin_4':'iso-8859-4',\n'latin-4':'iso-8859-4',\n'latin_5':'iso-8859-9',\n'latin-5':'iso-8859-9',\n'latin_6':'iso-8859-10',\n'latin-6':'iso-8859-10',\n'latin_7':'iso-8859-13',\n'latin-7':'iso-8859-13',\n'latin_8':'iso-8859-14',\n'latin-8':'iso-8859-14',\n'latin_9':'iso-8859-15',\n'latin-9':'iso-8859-15',\n'latin_10':'iso-8859-16',\n'latin-10':'iso-8859-16',\n'cp949':'ks_c_5601-1987',\n'euc_jp':'euc-jp',\n'euc_kr':'euc-kr',\n'ascii':'us-ascii',\n}\n\n\n\nCODEC_MAP={\n'gb2312':'eucgb2312_cn',\n'big5':'big5_tw',\n\n\n\n'us-ascii':None,\n}\n\n\n\ndef add_charset(charset,header_enc=None,body_enc=None,output_charset=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if body_enc ==SHORTEST:\n raise ValueError('SHORTEST not allowed for body_enc')\n CHARSETS[charset]=(header_enc,body_enc,output_charset)\n \n \ndef add_alias(alias,canonical):\n ''\n\n\n\n \n ALIASES[alias]=canonical\n \n \ndef add_codec(charset,codecname):\n ''\n\n\n\n\n \n CODEC_MAP[charset]=codecname\n \n \n \n \ndef _encode(string,codec):\n if codec ==UNKNOWN8BIT:\n return string.encode('ascii','surrogateescape')\n else:\n return string.encode(codec)\n \n \nclass Charset:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,input_charset=DEFAULT_CHARSET):\n \n \n \n \n try:\n if isinstance(input_charset,str):\n input_charset.encode('ascii')\n else:\n input_charset=str(input_charset,'ascii')\n except UnicodeError:\n raise errors.CharsetError(input_charset)\n input_charset=input_charset.lower()\n \n self.input_charset=ALIASES.get(input_charset,input_charset)\n \n \n \n henc,benc,conv=CHARSETS.get(self.input_charset,\n (SHORTEST,BASE64,None))\n if not conv:\n conv=self.input_charset\n \n self.header_encoding=henc\n self.body_encoding=benc\n self.output_charset=ALIASES.get(conv,conv)\n \n \n self.input_codec=CODEC_MAP.get(self.input_charset,\n self.input_charset)\n self.output_codec=CODEC_MAP.get(self.output_charset,\n self.output_charset)\n \n def __repr__(self):\n return self.input_charset.lower()\n \n def __eq__(self,other):\n return str(self)==str(other).lower()\n \n def get_body_encoding(self):\n ''\n\n\n\n\n\n\n\n\n\n\n \n assert self.body_encoding !=SHORTEST\n if self.body_encoding ==QP:\n return 'quoted-printable'\n elif self.body_encoding ==BASE64:\n return 'base64'\n else:\n return encode_7or8bit\n \n def get_output_charset(self):\n ''\n\n\n\n \n return self.output_charset or self.input_charset\n \n def header_encode(self,string):\n ''\n\n\n\n\n\n\n\n\n \n codec=self.output_codec or 'us-ascii'\n header_bytes=_encode(string,codec)\n \n encoder_module=self._get_encoder(header_bytes)\n if encoder_module is None:\n return string\n return encoder_module.header_encode(header_bytes,codec)\n \n def header_encode_lines(self,string,maxlengths):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n codec=self.output_codec or 'us-ascii'\n header_bytes=_encode(string,codec)\n encoder_module=self._get_encoder(header_bytes)\n encoder=partial(encoder_module.header_encode,charset=codec)\n \n \n charset=self.get_output_charset()\n extra=len(charset)+RFC2047_CHROME_LEN\n \n \n \n \n \n \n \n \n \n \n \n lines=[]\n current_line=[]\n maxlen=next(maxlengths)-extra\n for character in string:\n current_line.append(character)\n this_line=EMPTYSTRING.join(current_line)\n length=encoder_module.header_length(_encode(this_line,charset))\n if length >maxlen:\n \n current_line.pop()\n \n if not lines and not current_line:\n lines.append(None)\n else:\n joined_line=EMPTYSTRING.join(current_line)\n header_bytes=_encode(joined_line,codec)\n lines.append(encoder(header_bytes))\n current_line=[character]\n maxlen=next(maxlengths)-extra\n joined_line=EMPTYSTRING.join(current_line)\n header_bytes=_encode(joined_line,codec)\n lines.append(encoder(header_bytes))\n return lines\n \n def _get_encoder(self,header_bytes):\n if self.header_encoding ==BASE64:\n return email.base64mime\n elif self.header_encoding ==QP:\n return email.quoprimime\n elif self.header_encoding ==SHORTEST:\n len64=email.base64mime.header_length(header_bytes)\n lenqp=email.quoprimime.header_length(header_bytes)\n if len64 '\nb'?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`'\nb'abcdefghijklmnopqrstuvwxyz{|}~\\t'):\n _QUOPRI_BODY_MAP[c]=chr(c)\n \n \n \n \ndef header_check(octet):\n ''\n return chr(octet)!=_QUOPRI_HEADER_MAP[octet]\n \n \ndef body_check(octet):\n ''\n return chr(octet)!=_QUOPRI_BODY_MAP[octet]\n \n \ndef header_length(bytearray):\n ''\n\n\n\n\n\n\n\n \n return sum(len(_QUOPRI_HEADER_MAP[octet])for octet in bytearray)\n \n \ndef body_length(bytearray):\n ''\n\n\n\n\n \n return sum(len(_QUOPRI_BODY_MAP[octet])for octet in bytearray)\n \n \ndef _max_append(L,s,maxlen,extra=''):\n if not isinstance(s,str):\n s=chr(s)\n if not L:\n L.append(s.lstrip())\n elif len(L[-1])+len(s)<=maxlen:\n L[-1]+=extra+s\n else:\n L.append(s.lstrip())\n \n \ndef unquote(s):\n ''\n return chr(int(s[1:3],16))\n \n \ndef quote(c):\n return _QUOPRI_MAP[ord(c)]\n \n \ndef header_encode(header_bytes,charset='iso-8859-1'):\n ''\n\n\n\n\n\n\n\n\n \n \n if not header_bytes:\n return ''\n \n encoded=header_bytes.decode('latin1').translate(_QUOPRI_HEADER_MAP)\n \n \n return '=?%s?q?%s?='%(charset,encoded)\n \n \n_QUOPRI_BODY_ENCODE_MAP=_QUOPRI_BODY_MAP[:]\nfor c in b'\\r\\n':\n _QUOPRI_BODY_ENCODE_MAP[c]=chr(c)\ndel c\n\ndef body_encode(body,maxlinelen=76,eol=NL):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if maxlinelen <4:\n raise ValueError(\"maxlinelen must be at least 4\")\n if not body:\n return body\n \n \n body=body.translate(_QUOPRI_BODY_ENCODE_MAP)\n \n soft_break='='+eol\n \n maxlinelen1=maxlinelen -1\n \n encoded_body=[]\n append=encoded_body.append\n \n for line in body.splitlines():\n \n start=0\n laststart=len(line)-1 -maxlinelen\n while start <=laststart:\n stop=start+maxlinelen1\n \n if line[stop -2]=='=':\n append(line[start:stop -1])\n start=stop -2\n elif line[stop -1]=='=':\n append(line[start:stop])\n start=stop -1\n else:\n append(line[start:stop]+'=')\n start=stop\n \n \n if line and line[-1]in ' \\t':\n room=start -laststart\n if room >=3:\n \n \n q=quote(line[-1])\n elif room ==2:\n \n q=line[-1]+soft_break\n else:\n \n \n q=soft_break+quote(line[-1])\n append(line[start:-1]+q)\n else:\n append(line[start:])\n \n \n if body[-1]in CRLF:\n append('')\n \n return eol.join(encoded_body)\n \n \n \n \n \ndef decode(encoded,eol=NL):\n ''\n\n\n \n if not encoded:\n return encoded\n \n \n \n decoded=''\n \n for line in encoded.splitlines():\n line=line.rstrip()\n if not line:\n decoded +=eol\n continue\n \n i=0\n n=len(line)\n while i '+re.escape(separator)+\n r')(?P--)?(?P[ \\t]*)(?P\\r\\n|\\r|\\n)?$')\n capturing_preamble=True\n preamble=[]\n linesep=False\n close_boundary_seen=False\n while True:\n line=self._input.readline()\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n if line =='':\n break\n mo=boundaryre.match(line)\n if mo:\n \n \n \n \n if mo.group('end'):\n close_boundary_seen=True\n linesep=mo.group('linesep')\n break\n \n if capturing_preamble:\n if preamble:\n \n \n lastline=preamble[-1]\n eolmo=NLCRE_eol.search(lastline)\n if eolmo:\n preamble[-1]=lastline[:-len(eolmo.group(0))]\n self._cur.preamble=EMPTYSTRING.join(preamble)\n capturing_preamble=False\n self._input.unreadline(line)\n continue\n \n \n \n \n while True:\n line=self._input.readline()\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n mo=boundaryre.match(line)\n if not mo:\n self._input.unreadline(line)\n break\n \n \n self._input.push_eof_matcher(boundaryre.match)\n for retval in self._parsegen():\n if retval is NeedMoreData:\n yield NeedMoreData\n continue\n break\n \n \n \n \n if self._last.get_content_maintype()=='multipart':\n epilogue=self._last.epilogue\n if epilogue =='':\n self._last.epilogue=None\n elif epilogue is not None:\n mo=NLCRE_eol.search(epilogue)\n if mo:\n end=len(mo.group(0))\n self._last.epilogue=epilogue[:-end]\n else:\n payload=self._last._payload\n if isinstance(payload,str):\n mo=NLCRE_eol.search(payload)\n if mo:\n payload=payload[:-len(mo.group(0))]\n self._last._payload=payload\n self._input.pop_eof_matcher()\n self._pop_message()\n \n \n self._last=self._cur\n else:\n \n assert capturing_preamble\n preamble.append(line)\n \n \n \n if capturing_preamble:\n defect=errors.StartBoundaryNotFoundDefect()\n self.policy.handle_defect(self._cur,defect)\n self._cur.set_payload(EMPTYSTRING.join(preamble))\n epilogue=[]\n for line in self._input:\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n self._cur.epilogue=EMPTYSTRING.join(epilogue)\n return\n \n \n if not close_boundary_seen:\n defect=errors.CloseBoundaryNotFoundDefect()\n self.policy.handle_defect(self._cur,defect)\n return\n \n \n \n if linesep:\n epilogue=['']\n else:\n epilogue=[]\n for line in self._input:\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n epilogue.append(line)\n \n \n \n if epilogue:\n firstline=epilogue[0]\n bolmo=NLCRE_bol.match(firstline)\n if bolmo:\n epilogue[0]=firstline[len(bolmo.group(0)):]\n self._cur.epilogue=EMPTYSTRING.join(epilogue)\n return\n \n \n lines=[]\n for line in self._input:\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n lines.append(line)\n self._cur.set_payload(EMPTYSTRING.join(lines))\n \n def _parse_headers(self,lines):\n \n lastheader=''\n lastvalue=[]\n for lineno,line in enumerate(lines):\n \n if line[0]in ' \\t':\n if not lastheader:\n \n \n \n defect=errors.FirstHeaderLineIsContinuationDefect(line)\n self.policy.handle_defect(self._cur,defect)\n continue\n lastvalue.append(line)\n continue\n if lastheader:\n self._cur.set_raw(*self.policy.header_source_parse(lastvalue))\n lastheader,lastvalue='',[]\n \n if line.startswith('From '):\n if lineno ==0:\n \n mo=NLCRE_eol.search(line)\n if mo:\n line=line[:-len(mo.group(0))]\n self._cur.set_unixfrom(line)\n continue\n elif lineno ==len(lines)-1:\n \n \n \n self._input.unreadline(line)\n return\n else:\n \n \n defect=errors.MisplacedEnvelopeHeaderDefect(line)\n self._cur.defects.append(defect)\n continue\n \n \n \n i=line.find(':')\n \n \n \n \n if i ==0:\n defect=errors.InvalidHeaderDefect(\"Missing header name.\")\n self._cur.defects.append(defect)\n continue\n \n assert i >0,\"_parse_headers fed line with no : and no leading WS\"\n lastheader=line[:i]\n lastvalue=[line]\n \n if lastheader:\n self._cur.set_raw(*self.policy.header_source_parse(lastvalue))\n \n \nclass BytesFeedParser(FeedParser):\n ''\n \n def feed(self,data):\n super().feed(data.decode('ascii','surrogateescape'))\n", ["collections", "email", "email._policybase", "email.errors", "email.message", "io", "re"]], "email._parseaddr": [".py", "\n\n\n\"\"\"Email address parsing code.\n\nLifted directly from rfc822.py. This should eventually be rewritten.\n\"\"\"\n\n__all__=[\n'mktime_tz',\n'parsedate',\n'parsedate_tz',\n'quote',\n]\n\nimport time,calendar\n\nSPACE=' '\nEMPTYSTRING=''\nCOMMASPACE=', '\n\n\n_monthnames=['jan','feb','mar','apr','may','jun','jul',\n'aug','sep','oct','nov','dec',\n'january','february','march','april','may','june','july',\n'august','september','october','november','december']\n\n_daynames=['mon','tue','wed','thu','fri','sat','sun']\n\n\n\n\n\n\n\n_timezones={'UT':0,'UTC':0,'GMT':0,'Z':0,\n'AST':-400,'ADT':-300,\n'EST':-500,'EDT':-400,\n'CST':-600,'CDT':-500,\n'MST':-700,'MDT':-600,\n'PST':-800,'PDT':-700\n}\n\n\ndef parsedate_tz(data):\n ''\n\n\n \n res=_parsedate_tz(data)\n if not res:\n return\n if res[9]is None:\n res[9]=0\n return tuple(res)\n \ndef _parsedate_tz(data):\n ''\n\n\n\n\n\n\n\n \n if not data:\n return None\n data=data.split()\n if not data:\n return None\n \n \n if data[0].endswith(',')or data[0].lower()in _daynames:\n \n del data[0]\n else:\n i=data[0].rfind(',')\n if i >=0:\n data[0]=data[0][i+1:]\n if len(data)==3:\n stuff=data[0].split('-')\n if len(stuff)==3:\n data=stuff+data[1:]\n if len(data)==4:\n s=data[3]\n i=s.find('+')\n if i ==-1:\n i=s.find('-')\n if i >0:\n data[3:]=[s[:i],s[i:]]\n else:\n data.append('')\n if len(data)<5:\n return None\n data=data[:5]\n [dd,mm,yy,tm,tz]=data\n if not(dd and mm and yy):\n return None\n mm=mm.lower()\n if mm not in _monthnames:\n dd,mm=mm,dd.lower()\n if mm not in _monthnames:\n return None\n mm=_monthnames.index(mm)+1\n if mm >12:\n mm -=12\n if dd[-1]==',':\n dd=dd[:-1]\n i=yy.find(':')\n if i >0:\n yy,tm=tm,yy\n if yy[-1]==',':\n yy=yy[:-1]\n if not yy:\n return None\n if not yy[0].isdigit():\n yy,tz=tz,yy\n if tm[-1]==',':\n tm=tm[:-1]\n tm=tm.split(':')\n if len(tm)==2:\n [thh,tmm]=tm\n tss='0'\n elif len(tm)==3:\n [thh,tmm,tss]=tm\n elif len(tm)==1 and '.'in tm[0]:\n \n tm=tm[0].split('.')\n if len(tm)==2:\n [thh,tmm]=tm\n tss=0\n elif len(tm)==3:\n [thh,tmm,tss]=tm\n else:\n return None\n else:\n return None\n try:\n yy=int(yy)\n dd=int(dd)\n thh=int(thh)\n tmm=int(tmm)\n tss=int(tss)\n except ValueError:\n return None\n \n \n \n \n \n if yy <100:\n \n if yy >68:\n yy +=1900\n \n else:\n yy +=2000\n tzoffset=None\n tz=tz.upper()\n if tz in _timezones:\n tzoffset=_timezones[tz]\n else:\n try:\n tzoffset=int(tz)\n except ValueError:\n pass\n if tzoffset ==0 and tz.startswith('-'):\n tzoffset=None\n \n if tzoffset:\n if tzoffset <0:\n tzsign=-1\n tzoffset=-tzoffset\n else:\n tzsign=1\n tzoffset=tzsign *((tzoffset //100)*3600+(tzoffset %100)*60)\n \n return[yy,mm,dd,thh,tmm,tss,0,1,-1,tzoffset]\n \n \ndef parsedate(data):\n ''\n t=parsedate_tz(data)\n if isinstance(t,tuple):\n return t[:9]\n else:\n return t\n \n \ndef mktime_tz(data):\n ''\n if data[9]is None:\n \n return time.mktime(data[:8]+(-1,))\n else:\n t=calendar.timegm(data)\n return t -data[9]\n \n \ndef quote(str):\n ''\n\n\n\n\n \n return str.replace('\\\\','\\\\\\\\').replace('\"','\\\\\"')\n \n \nclass AddrlistClass:\n ''\n\n\n\n\n\n\n \n \n def __init__(self,field):\n ''\n\n\n\n \n self.specials='()<>@,:;.\\\"[]'\n self.pos=0\n self.LWS=' \\t'\n self.CR='\\r\\n'\n self.FWS=self.LWS+self.CR\n self.atomends=self.specials+self.LWS+self.CR\n \n \n \n self.phraseends=self.atomends.replace('.','')\n self.field=field\n self.commentlist=[]\n \n def gotonext(self):\n ''\n wslist=[]\n while self.pos =len(self.field):\n \n if plist:\n returnlist=[(SPACE.join(self.commentlist),plist[0])]\n \n elif self.field[self.pos]in '.@':\n \n \n self.pos=oldpos\n self.commentlist=oldcl\n addrspec=self.getaddrspec()\n returnlist=[(SPACE.join(self.commentlist),addrspec)]\n \n elif self.field[self.pos]==':':\n \n returnlist=[]\n \n fieldlen=len(self.field)\n self.pos +=1\n while self.pos ':\n self.pos +=1\n break\n elif self.field[self.pos]=='@':\n self.pos +=1\n expectroute=True\n elif self.field[self.pos]==':':\n self.pos +=1\n else:\n adlist=self.getaddrspec()\n self.pos +=1\n break\n self.gotonext()\n \n return adlist\n \n def getaddrspec(self):\n ''\n aslist=[]\n \n self.gotonext()\n while self.pos =len(self.field)or self.field[self.pos]!='@':\n return EMPTYSTRING.join(aslist)\n \n aslist.append('@')\n self.pos +=1\n self.gotonext()\n domain=self.getdomain()\n if not domain:\n \n \n return EMPTYSTRING\n return EMPTYSTRING.join(aslist)+domain\n \n def getdomain(self):\n ''\n sdlist=[]\n while self.pos '\n return lp\n \n def __repr__(self):\n return \"{}(display_name={!r}, username={!r}, domain={!r})\".format(\n self.__class__.__name__,\n self.display_name,self.username,self.domain)\n \n def __str__(self):\n disp=self.display_name\n if not parser.SPECIALS.isdisjoint(disp):\n disp=parser.quote_string(disp)\n if disp:\n addr_spec=''if self.addr_spec =='<>'else self.addr_spec\n return \"{} <{}>\".format(disp,addr_spec)\n return self.addr_spec\n \n def __eq__(self,other):\n if not isinstance(other,Address):\n return NotImplemented\n return(self.display_name ==other.display_name and\n self.username ==other.username and\n self.domain ==other.domain)\n \n \nclass Group:\n\n def __init__(self,display_name=None,addresses=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._display_name=display_name\n self._addresses=tuple(addresses)if addresses else tuple()\n \n @property\n def display_name(self):\n return self._display_name\n \n @property\n def addresses(self):\n return self._addresses\n \n def __repr__(self):\n return \"{}(display_name={!r}, addresses={!r}\".format(\n self.__class__.__name__,\n self.display_name,self.addresses)\n \n def __str__(self):\n if self.display_name is None and len(self.addresses)==1:\n return str(self.addresses[0])\n disp=self.display_name\n if disp is not None and not parser.SPECIALS.isdisjoint(disp):\n disp=parser.quote_string(disp)\n adrstr=\", \".join(str(x)for x in self.addresses)\n adrstr=' '+adrstr if adrstr else adrstr\n return \"{}:{};\".format(disp,adrstr)\n \n def __eq__(self,other):\n if not isinstance(other,Group):\n return NotImplemented\n return(self.display_name ==other.display_name and\n self.addresses ==other.addresses)\n \n \n \n \nclass BaseHeader(str):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __new__(cls,name,value):\n kwds={'defects':[]}\n cls.parse(value,kwds)\n if utils._has_surrogates(kwds['decoded']):\n kwds['decoded']=utils._sanitize(kwds['decoded'])\n self=str.__new__(cls,kwds['decoded'])\n del kwds['decoded']\n self.init(name,**kwds)\n return self\n \n def init(self,name,*,parse_tree,defects):\n self._name=name\n self._parse_tree=parse_tree\n self._defects=defects\n \n @property\n def name(self):\n return self._name\n \n @property\n def defects(self):\n return tuple(self._defects)\n \n def __reduce__(self):\n return(\n _reconstruct_header,\n (\n self.__class__.__name__,\n self.__class__.__bases__,\n str(self),\n ),\n self.__getstate__())\n \n @classmethod\n def _reconstruct(cls,value):\n return str.__new__(cls,value)\n \n def fold(self,*,policy):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n header=parser.Header([\n parser.HeaderLabel([\n parser.ValueTerminal(self.name,'header-name'),\n parser.ValueTerminal(':','header-sep')]),\n ])\n if self._parse_tree:\n header.append(\n parser.CFWSList([parser.WhiteSpaceTerminal(' ','fws')]))\n header.append(self._parse_tree)\n return header.fold(policy=policy)\n \n \ndef _reconstruct_header(cls_name,bases,value):\n return type(cls_name,bases,{})._reconstruct(value)\n \n \nclass UnstructuredHeader:\n\n max_count=None\n value_parser=staticmethod(parser.get_unstructured)\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=cls.value_parser(value)\n kwds['decoded']=str(kwds['parse_tree'])\n \n \nclass UniqueUnstructuredHeader(UnstructuredHeader):\n\n max_count=1\n \n \nclass DateHeader:\n\n ''\n\n\n\n\n\n\n \n \n max_count=None\n \n \n value_parser=staticmethod(parser.get_unstructured)\n \n @classmethod\n def parse(cls,value,kwds):\n if not value:\n kwds['defects'].append(errors.HeaderMissingRequiredValue())\n kwds['datetime']=None\n kwds['decoded']=''\n kwds['parse_tree']=parser.TokenList()\n return\n if isinstance(value,str):\n kwds['decoded']=value\n try:\n value=utils.parsedate_to_datetime(value)\n except ValueError:\n kwds['defects'].append(errors.InvalidDateDefect('Invalid date value or format'))\n kwds['datetime']=None\n kwds['parse_tree']=parser.TokenList()\n return\n kwds['datetime']=value\n kwds['decoded']=utils.format_datetime(kwds['datetime'])\n kwds['parse_tree']=cls.value_parser(kwds['decoded'])\n \n def init(self,*args,**kw):\n self._datetime=kw.pop('datetime')\n super().init(*args,**kw)\n \n @property\n def datetime(self):\n return self._datetime\n \n \nclass UniqueDateHeader(DateHeader):\n\n max_count=1\n \n \nclass AddressHeader:\n\n max_count=None\n \n @staticmethod\n def value_parser(value):\n address_list,value=parser.get_address_list(value)\n assert not value,'this should not happen'\n return address_list\n \n @classmethod\n def parse(cls,value,kwds):\n if isinstance(value,str):\n \n \n kwds['parse_tree']=address_list=cls.value_parser(value)\n groups=[]\n for addr in address_list.addresses:\n groups.append(Group(addr.display_name,\n [Address(mb.display_name or '',\n mb.local_part or '',\n mb.domain or '')\n for mb in addr.all_mailboxes]))\n defects=list(address_list.all_defects)\n else:\n \n if not hasattr(value,'__iter__'):\n value=[value]\n groups=[Group(None,[item])if not hasattr(item,'addresses')\n else item\n for item in value]\n defects=[]\n kwds['groups']=groups\n kwds['defects']=defects\n kwds['decoded']=', '.join([str(item)for item in groups])\n if 'parse_tree'not in kwds:\n kwds['parse_tree']=cls.value_parser(kwds['decoded'])\n \n def init(self,*args,**kw):\n self._groups=tuple(kw.pop('groups'))\n self._addresses=None\n super().init(*args,**kw)\n \n @property\n def groups(self):\n return self._groups\n \n @property\n def addresses(self):\n if self._addresses is None:\n self._addresses=tuple(address for group in self._groups\n for address in group.addresses)\n return self._addresses\n \n \nclass UniqueAddressHeader(AddressHeader):\n\n max_count=1\n \n \nclass SingleAddressHeader(AddressHeader):\n\n @property\n def address(self):\n if len(self.addresses)!=1:\n raise ValueError((\"value of single address header {} is not \"\n \"a single address\").format(self.name))\n return self.addresses[0]\n \n \nclass UniqueSingleAddressHeader(SingleAddressHeader):\n\n max_count=1\n \n \nclass MIMEVersionHeader:\n\n max_count=1\n \n value_parser=staticmethod(parser.parse_mime_version)\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=parse_tree=cls.value_parser(value)\n kwds['decoded']=str(parse_tree)\n kwds['defects'].extend(parse_tree.all_defects)\n kwds['major']=None if parse_tree.minor is None else parse_tree.major\n kwds['minor']=parse_tree.minor\n if parse_tree.minor is not None:\n kwds['version']='{}.{}'.format(kwds['major'],kwds['minor'])\n else:\n kwds['version']=None\n \n def init(self,*args,**kw):\n self._version=kw.pop('version')\n self._major=kw.pop('major')\n self._minor=kw.pop('minor')\n super().init(*args,**kw)\n \n @property\n def major(self):\n return self._major\n \n @property\n def minor(self):\n return self._minor\n \n @property\n def version(self):\n return self._version\n \n \nclass ParameterizedMIMEHeader:\n\n\n\n\n max_count=1\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=parse_tree=cls.value_parser(value)\n kwds['decoded']=str(parse_tree)\n kwds['defects'].extend(parse_tree.all_defects)\n if parse_tree.params is None:\n kwds['params']={}\n else:\n \n kwds['params']={utils._sanitize(name).lower():\n utils._sanitize(value)\n for name,value in parse_tree.params}\n \n def init(self,*args,**kw):\n self._params=kw.pop('params')\n super().init(*args,**kw)\n \n @property\n def params(self):\n return MappingProxyType(self._params)\n \n \nclass ContentTypeHeader(ParameterizedMIMEHeader):\n\n value_parser=staticmethod(parser.parse_content_type_header)\n \n def init(self,*args,**kw):\n super().init(*args,**kw)\n self._maintype=utils._sanitize(self._parse_tree.maintype)\n self._subtype=utils._sanitize(self._parse_tree.subtype)\n \n @property\n def maintype(self):\n return self._maintype\n \n @property\n def subtype(self):\n return self._subtype\n \n @property\n def content_type(self):\n return self.maintype+'/'+self.subtype\n \n \nclass ContentDispositionHeader(ParameterizedMIMEHeader):\n\n value_parser=staticmethod(parser.parse_content_disposition_header)\n \n def init(self,*args,**kw):\n super().init(*args,**kw)\n cd=self._parse_tree.content_disposition\n self._content_disposition=cd if cd is None else utils._sanitize(cd)\n \n @property\n def content_disposition(self):\n return self._content_disposition\n \n \nclass ContentTransferEncodingHeader:\n\n max_count=1\n \n value_parser=staticmethod(parser.parse_content_transfer_encoding_header)\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=parse_tree=cls.value_parser(value)\n kwds['decoded']=str(parse_tree)\n kwds['defects'].extend(parse_tree.all_defects)\n \n def init(self,*args,**kw):\n super().init(*args,**kw)\n self._cte=utils._sanitize(self._parse_tree.cte)\n \n @property\n def cte(self):\n return self._cte\n \n \nclass MessageIDHeader:\n\n max_count=1\n value_parser=staticmethod(parser.parse_message_id)\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=parse_tree=cls.value_parser(value)\n kwds['decoded']=str(parse_tree)\n kwds['defects'].extend(parse_tree.all_defects)\n \n \n \n \n_default_header_map={\n'subject':UniqueUnstructuredHeader,\n'date':UniqueDateHeader,\n'resent-date':DateHeader,\n'orig-date':UniqueDateHeader,\n'sender':UniqueSingleAddressHeader,\n'resent-sender':SingleAddressHeader,\n'to':UniqueAddressHeader,\n'resent-to':AddressHeader,\n'cc':UniqueAddressHeader,\n'resent-cc':AddressHeader,\n'bcc':UniqueAddressHeader,\n'resent-bcc':AddressHeader,\n'from':UniqueAddressHeader,\n'resent-from':AddressHeader,\n'reply-to':UniqueAddressHeader,\n'mime-version':MIMEVersionHeader,\n'content-type':ContentTypeHeader,\n'content-disposition':ContentDispositionHeader,\n'content-transfer-encoding':ContentTransferEncodingHeader,\n'message-id':MessageIDHeader,\n}\n\nclass HeaderRegistry:\n\n ''\n \n def __init__(self,base_class=BaseHeader,default_class=UnstructuredHeader,\n use_default_map=True):\n ''\n\n\n\n\n\n\n\n\n \n self.registry={}\n self.base_class=base_class\n self.default_class=default_class\n if use_default_map:\n self.registry.update(_default_header_map)\n \n def map_to_type(self,name,cls):\n ''\n\n \n self.registry[name.lower()]=cls\n \n def __getitem__(self,name):\n cls=self.registry.get(name.lower(),self.default_class)\n return type('_'+cls.__name__,(cls,self.base_class),{})\n \n def __call__(self,name,value):\n ''\n\n\n\n\n\n\n\n \n return self[name](name,value)\n", ["email", "email._header_value_parser", "email.errors", "email.utils", "types"]], "email.mime.multipart": [".py", "\n\n\n\n\"\"\"Base class for MIME multipart/* type messages.\"\"\"\n\n__all__=['MIMEMultipart']\n\nfrom email.mime.base import MIMEBase\n\n\nclass MIMEMultipart(MIMEBase):\n ''\n \n def __init__(self,_subtype='mixed',boundary=None,_subparts=None,\n *,policy=None,\n **_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n MIMEBase.__init__(self,'multipart',_subtype,policy=policy,**_params)\n \n \n \n \n self._payload=[]\n \n if _subparts:\n for p in _subparts:\n self.attach(p)\n if boundary:\n self.set_boundary(boundary)\n", ["email.mime.base"]], "email.mime": [".py", "", [], 1], "email.mime.message": [".py", "\n\n\n\n\"\"\"Class representing message/* MIME documents.\"\"\"\n\n__all__=['MIMEMessage']\n\nfrom email import message\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEMessage(MIMENonMultipart):\n ''\n \n def __init__(self,_msg,_subtype='rfc822',*,policy=None):\n ''\n\n\n\n\n\n\n\n \n MIMENonMultipart.__init__(self,'message',_subtype,policy=policy)\n if not isinstance(_msg,message.Message):\n raise TypeError('Argument is not an instance of Message')\n \n \n message.Message.attach(self,_msg)\n \n self.set_default_type('message/rfc822')\n", ["email", "email.message", "email.mime.nonmultipart"]], "email.mime.application": [".py", "\n\n\n\n\"\"\"Class representing application/* type MIME documents.\"\"\"\n\n__all__=[\"MIMEApplication\"]\n\nfrom email import encoders\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEApplication(MIMENonMultipart):\n ''\n \n def __init__(self,_data,_subtype='octet-stream',\n _encoder=encoders.encode_base64,*,policy=None,**_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _subtype is None:\n raise TypeError('Invalid application MIME subtype')\n MIMENonMultipart.__init__(self,'application',_subtype,policy=policy,\n **_params)\n self.set_payload(_data)\n _encoder(self)\n", ["email", "email.encoders", "email.mime.nonmultipart"]], "email.mime.nonmultipart": [".py", "\n\n\n\n\"\"\"Base class for MIME type messages that are not multipart.\"\"\"\n\n__all__=['MIMENonMultipart']\n\nfrom email import errors\nfrom email.mime.base import MIMEBase\n\n\nclass MIMENonMultipart(MIMEBase):\n ''\n \n def attach(self,payload):\n \n \n \n raise errors.MultipartConversionError(\n 'Cannot attach additional subparts to non-multipart/*')\n", ["email", "email.errors", "email.mime.base"]], "email.mime.text": [".py", "\n\n\n\n\"\"\"Class representing text/* type MIME documents.\"\"\"\n\n__all__=['MIMEText']\n\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEText(MIMENonMultipart):\n ''\n \n def __init__(self,_text,_subtype='plain',_charset=None,*,policy=None):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \n if _charset is None:\n try:\n _text.encode('us-ascii')\n _charset='us-ascii'\n except UnicodeEncodeError:\n _charset='utf-8'\n \n MIMENonMultipart.__init__(self,'text',_subtype,policy=policy,\n charset=str(_charset))\n \n self.set_payload(_text,_charset)\n", ["email.mime.nonmultipart"]], "email.mime.audio": [".py", "\n\n\n\n\"\"\"Class representing audio/* type MIME documents.\"\"\"\n\n__all__=['MIMEAudio']\n\nfrom io import BytesIO\nfrom email import encoders\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEAudio(MIMENonMultipart):\n ''\n \n def __init__(self,_audiodata,_subtype=None,\n _encoder=encoders.encode_base64,*,policy=None,**_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _subtype is None:\n _subtype=_what(_audiodata)\n if _subtype is None:\n raise TypeError('Could not find audio MIME subtype')\n MIMENonMultipart.__init__(self,'audio',_subtype,policy=policy,\n **_params)\n self.set_payload(_audiodata)\n _encoder(self)\n \n \n_rules=[]\n\n\n\n\n\n\ndef _what(data):\n\n\n\n\n\n hdr=data[:512]\n fakefile=BytesIO(hdr)\n for testfn in _rules:\n if res :=testfn(hdr,fakefile):\n return res\n else:\n return None\n \n \ndef rule(rulefunc):\n _rules.append(rulefunc)\n return rulefunc\n \n \n@rule\ndef _aiff(h,f):\n if not h.startswith(b'FORM'):\n return None\n if h[8:12]in{b'AIFC',b'AIFF'}:\n return 'x-aiff'\n else:\n return None\n \n \n@rule\ndef _au(h,f):\n if h.startswith(b'.snd'):\n return 'basic'\n else:\n return None\n \n \n@rule\ndef _wav(h,f):\n\n if not h.startswith(b'RIFF')or h[8:12]!=b'WAVE'or h[12:16]!=b'fmt ':\n return None\n else:\n return \"x-wav\"\n", ["email", "email.encoders", "email.mime.nonmultipart", "io"]], "email.mime.image": [".py", "\n\n\n\n\"\"\"Class representing image/* type MIME documents.\"\"\"\n\n__all__=['MIMEImage']\n\nfrom email import encoders\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEImage(MIMENonMultipart):\n ''\n \n def __init__(self,_imagedata,_subtype=None,\n _encoder=encoders.encode_base64,*,policy=None,**_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n _subtype=_what(_imagedata)if _subtype is None else _subtype\n if _subtype is None:\n raise TypeError('Could not guess image MIME subtype')\n MIMENonMultipart.__init__(self,'image',_subtype,policy=policy,\n **_params)\n self.set_payload(_imagedata)\n _encoder(self)\n \n \n_rules=[]\n\n\n\ndef _what(data):\n for rule in _rules:\n if res :=rule(data):\n return res\n else:\n return None\n \n \ndef rule(rulefunc):\n _rules.append(rulefunc)\n return rulefunc\n \n \n@rule\ndef _jpeg(h):\n ''\n if h[6:10]in(b'JFIF',b'Exif'):\n return 'jpeg'\n elif h[:4]==b'\\xff\\xd8\\xff\\xdb':\n return 'jpeg'\n \n \n@rule\ndef _png(h):\n if h.startswith(b'\\211PNG\\r\\n\\032\\n'):\n return 'png'\n \n \n@rule\ndef _gif(h):\n ''\n if h[:6]in(b'GIF87a',b'GIF89a'):\n return 'gif'\n \n \n@rule\ndef _tiff(h):\n ''\n if h[:2]in(b'MM',b'II'):\n return 'tiff'\n \n \n@rule\ndef _rgb(h):\n ''\n if h.startswith(b'\\001\\332'):\n return 'rgb'\n \n \n@rule\ndef _pbm(h):\n ''\n if len(h)>=3 and\\\n h[0]==ord(b'P')and h[1]in b'14'and h[2]in b' \\t\\n\\r':\n return 'pbm'\n \n \n@rule\ndef _pgm(h):\n ''\n if len(h)>=3 and\\\n h[0]==ord(b'P')and h[1]in b'25'and h[2]in b' \\t\\n\\r':\n return 'pgm'\n \n \n@rule\ndef _ppm(h):\n ''\n if len(h)>=3 and\\\n h[0]==ord(b'P')and h[1]in b'36'and h[2]in b' \\t\\n\\r':\n return 'ppm'\n \n \n@rule\ndef _rast(h):\n ''\n if h.startswith(b'\\x59\\xA6\\x6A\\x95'):\n return 'rast'\n \n \n@rule\ndef _xbm(h):\n ''\n if h.startswith(b'#define '):\n return 'xbm'\n \n \n@rule\ndef _bmp(h):\n if h.startswith(b'BM'):\n return 'bmp'\n \n \n@rule\ndef _webp(h):\n if h.startswith(b'RIFF')and h[8:12]==b'WEBP':\n return 'webp'\n \n \n@rule\ndef _exr(h):\n if h.startswith(b'\\x76\\x2f\\x31\\x01'):\n return 'exr'\n", ["email", "email.encoders", "email.mime.nonmultipart"]], "email.mime.base": [".py", "\n\n\n\n\"\"\"Base class for MIME specializations.\"\"\"\n\n__all__=['MIMEBase']\n\nimport email.policy\n\nfrom email import message\n\n\nclass MIMEBase(message.Message):\n ''\n \n def __init__(self,_maintype,_subtype,*,policy=None,**_params):\n ''\n\n\n\n\n \n if policy is None:\n policy=email.policy.compat32\n message.Message.__init__(self,policy=policy)\n ctype='%s/%s'%(_maintype,_subtype)\n self.add_header('Content-Type',ctype,**_params)\n self['MIME-Version']='1.0'\n", ["email", "email.message", "email.policy"]]} +var scripts = {"$timestamp": 1709623255701, "pyexpat": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nconst XML_PARAM_ENTITY_PARSING_NEVER = 0,\n XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE = 1,\n XML_PARAM_ENTITY_PARSING_ALWAYS = 2\n\nconst FAIL = {}\n\nconst xml_entities = {\n '>': '>',\n '<': '<',\n '"': '\"',\n ''': \"'\",\n '&': '&'\n }\n\nvar xmlparser = $B.make_class('xmlparser',\n function(encoding, namespace_separator, intern){\n return {\n __class__: xmlparser,\n encoding,\n namespace_separator,\n intern,\n buffer_text: false,\n _buffer: '',\n _state: 'data',\n _data_buffer: '',\n _initialized: false,\n _maybe_entity: null,\n _element_stack: [],\n _chunk_size: 2 << 14\n }\n }\n)\n\nxmlparser._handle_stack = function(self){\n if(! (self._element instanceof ELEMENT)){\n return\n }\n if(self._element.name === undefined){\n console.log('name undefined', self._element)\n alert()\n }\n if(self._element.is_end){\n if(self._element_stack.length == 0){\n raise_error(self, 'no opening tag for closing ' + self._element.name)\n }else{\n var expected = $B.last(self._element_stack)\n if(expected !== self._element.name){\n console.log('error handle stack, stack', self._element_stack, self._element)\n raise_error(self, `tag mismatch, ` +\n `expected closing tag ${expected}, ` +\n `got: ${self._element.name}`)\n }\n self._element_stack.pop()\n if(self._element_stack.length == 0){\n flush_char_data(self)\n }\n }\n }else if(! self._element.self_closing){\n self._element_stack.push(self._element.name)\n }\n}\n\nxmlparser.CharacterDataHandler = _b_.None\n\nxmlparser.CommentHandler = _b_.None\n\nxmlparser.EndElementHandler = _b_.None\n\nfunction check_entity(parser, pos){\n var entity = parser._maybe_entity\n var decimal = /&#(\\d+);$/.exec(entity)\n if(decimal){\n return _b_.chr(parseInt(decimal[1]))\n }\n var hexa = /&#x(\\d+);$/.exec(entity)\n if(hexa){\n return _b_.chr(parseInt(hexa[1], 16))\n }\n var xml_entity = xml_entities[entity]\n if(xml_entity){\n return xml_entity\n }\n raise_error_known_position(parser, `unknown entity: \"${entity}\"`, pos)\n}\n\nfunction flush_char_data(parser){\n var buf = parser._data_buffer\n if(buf.length > 0){\n let handler = parser._handlers.CharacterDataHandler\n if(handler !== _b_.None){\n handler(buf)\n }\n }\n parser._data_buffer = ''\n}\n\nfunction flush_final_char_data(parser){\n var buf = parser._data_buffer\n for(var i = 0; i < buf.length; i++){\n if(! buf[i].match(/\\s/)){\n var pos = parser._pos - buf.length + i - 1\n console.log('rest', buf)\n var msg = `junk after document element: line 1, column ${pos}`\n raise_error(parser, msg)\n }\n }\n}\n\nconst encoding_re = /<\\?xml .*encoding\\s*=\\s*\"(.*?)\"/\n\nconst handler_names = [\n 'CharacterDataHandler',\n 'CommentHandler',\n 'StartElementHandler',\n 'EndElementHandler'\n ]\n\nxmlparser.Parse = function(){\n var $ = $B.args('Parse', 3,\n {self: null, data: null, isfinal: null},\n ['self', 'data', 'isfinal'], arguments,\n {}, null, null),\n self = $.self,\n data = $.data,\n isfinal = $.isfinal,\n decoder,\n array\n if(self.finished){\n throw Error('parsing finished')\n }\n if(_b_.isinstance(data, _b_.bytes)){\n if(self.encoding === _b_.None){\n // try getting encoding from prolog\n decoder = new TextDecoder('iso-8859-1')\n array = new Uint8Array(data.source.slice(0, 200))\n var head = decoder.decode(array)\n var mo = encoding_re.exec(head)\n if(mo){\n self.encoding = mo[1]\n }else{\n self.encoding = 'utf-8' // default\n }\n }\n // decode bytes\n decoder = new TextDecoder(self.encoding)\n array = new Uint8Array(data.source)\n data = decoder.decode(array)\n }\n if(! self._initialized){\n if(data[0] != '<'){\n throw Error(\"XML or text declaration not at start of entity\")\n }\n self._initialized = true\n }\n self._buffer = data\n self._buffer_length = _b_.len(data)\n self._pos = 0\n\n var handlers = self._handlers = {}\n for(var handler_name of handler_names){\n let handler = $B.$getattr(self, handler_name)\n if(handler !== _b_.None){\n handlers[handler_name] = $B.$call(handler)\n }else{\n handlers[handler_name] = _b_.None\n }\n }\n\n for(var token of xmlparser.xml_tokenizer(self)){\n if(token instanceof ELEMENT){\n if(! token.is_declaration && ! token.is_end){\n if(handlers.StartElementHandler !== _b_.None){\n flush_char_data(self)\n handlers.StartElementHandler(token.name, token.attrs)\n }\n if(token.self_closing &&\n handlers.EndElementHandler !== _b_.None){\n handlers.EndElementHandler(token.name)\n }\n }else if(token.is_end &&\n handlers.EndElementHandler !== _b_.None){\n flush_char_data(self)\n handlers.EndElementHandler(token.name)\n }\n }else if(token instanceof DATA &&\n handlers.CharacterDataHandler !== _b_.None){\n handlers.CharacterDataHandler(token.value)\n }else if(token instanceof COMMENT &&\n handlers.CommentHandler !== _b_.None){\n flush_char_data(self)\n handlers.CommentHandler(token.value)\n }\n }\n flush_final_char_data(self)\n if(isfinal){\n self.finished = true\n }\n}\n\nxmlparser.ParseFile = function(){\n var $ = $B.args('ParseFile', 2,\n {self: null, file: null},\n ['self', 'file'], arguments,\n {}, null, null),\n self = $.self,\n file = $.file\n var reader = $B.$call($B.$getattr(file, 'read'))\n while(true){\n var data = reader(self._chunk_size)\n if(data.length == 0){\n return xmlparser.Parse(self, data, true)\n }else{\n xmlparser.Parse(self, data, false)\n }\n }\n}\n\nxmlparser.SetBase = function(self, base){\n self._base = base\n return _b_.None\n}\n\nxmlparser.SetParamEntityParsing = function(self, peParsing){\n self._peParsing = peParsing\n return peParsing\n}\n\nxmlparser.StartElementHandler = _b_.None\n\nxmlparser.xml_tokenizer = function*(self){\n // convert bytes to string\n self._element = new DOCUMENT(self)\n while(self._pos < self._buffer_length){\n var char = self._buffer[self._pos]\n self._element = self._element.feed(char)\n if(self._element.closed){\n yield self._element\n }\n self._pos++\n }\n console.log('fini')\n}\n\n$B.set_func_names(xmlparser, 'expat')\n\nfunction raise_error_known_position(parser, message, pos){\n message += ' at position ' + pos\n var ix = pos\n while(ix >= 0 && parser._buffer[ix] !== '\\n'){\n ix--\n }\n message += '\\n' + parser._buffer.substring(ix, pos + 1)\n message += '\\n' + ' '.repeat(pos - ix - 1) + '^'\n throw error.$factory(message)\n}\n\nfunction raise_error(parser, message){\n throw error.$factory(message)\n}\n\nfunction raise_error1(element, char){\n var head = element\n while(head.origin){\n head = head.origin\n }\n console.log(head)\n var cls = element.constructor.name,\n message = cls + ' expected ' + element.expect +\n ', got: ' + char\n var pos = head.parser._pos\n raise_error_known_position(head.parser, message, pos)\n}\n\nvar error = $B.make_class(\"error\",\n function(message){\n return {\n __class__: error,\n msg: message,\n args: $B.fast_tuple([message]),\n __cause__: _b_.None,\n __context__: _b_.None,\n __suppress_context__: false\n }\n })\nerror.__bases__ = [_b_.Exception, _b_.object]\nerror.__mro__ = [_b_.Exception, _b_.BaseException, _b_.object]\n\n$B.set_func_names(error, \"expat\")\n\nfunction expect_chars(element, char, stop){\n var res\n if(! element.hasOwnProperty('expected_chars')){\n element.expected_chars = ''\n }\n if(is_char(char)){\n element.expected_chars += char\n if(stop){\n var end_pos = element.expected_chars.length - stop.length\n var tail = element.expected_chars.substr(end_pos)\n if(tail == stop){\n res = {value: element.expected_chars.substr(0, end_pos)}\n delete element.expected_chars\n return res\n }\n }\n }else{\n res = {value: element.expected_chars}\n if(element.expected_pos == literal.length){\n delete element.expected_pos\n return {value: literal}\n }\n }\n return {value: null}\n}\n\n\nfunction expect_name(element, char){\n if(! element.hasOwnProperty('expected_name')){\n if(is_id_start(char)){\n element.expected_name = char\n }else if(! is_whitespace(char)){\n raise_error(element.parser, 'expected name start, got: ' + char)\n }\n }else if(is_id_continue(char)){\n element.expected_name += char\n }else if(is_whitespace(char)){\n var res = {value: element.expected_name}\n delete element.expected_name\n return res\n }else{\n raise_error(element.parser, 'name expected id, got: ' + char)\n }\n return {}\n}\n\nfunction expect_literal(element, literal, char){\n if(! element.hasOwnProperty('expected_pos')){\n element.expected_pos = 0\n }\n if(literal[element.expected_pos] == char){\n element.expected_pos++\n if(element.expected_pos == literal.length){\n delete element.expected_pos\n return {value: literal}\n }else{\n return {value: null}\n }\n }\n return FAIL\n}\n\nfunction get_parser(element){\n while(element.origin){\n element = element.origin\n }\n return element.parser\n}\n\nfunction get_pos(element){\n while(element.origin){\n element = element.origin\n }\n return element.parser._pos\n}\n\n/*\ndocument ::= prolog element Misc*\n\nprolog ::= XMLDecl? Misc* (doctypedecl Misc*)?\nXMLDecl ::= ''\nMisc ::= Comment | PI | S\nComment ::= ''\nPI ::= '' Char*)))? '?>'\ndoctypedecl ::= ''\n*/\nfunction DOCUMENT(parser){\n this.parser = parser\n this.expect = 'prolog'\n this.names = []\n}\n\nDOCUMENT.prototype.feed = function(char){\n if(this.expect == 'prolog'){\n this.expect = 'element'\n return (new prolog(this)).feed(char)\n if(char !== '<'){\n raise_error(this.parser, 'expected <')\n }\n this.expect = 'name_start_or_special'\n }else if(this.expect == 'name_start_or_special'){\n if(char == '!'){\n this.expect = 'comment_or_doctype'\n }else if(char == '?'){\n this.expect = 'xmldecl_or_pi'\n }else if(is_id_start(char)){\n this.expect = 'prolog'\n return new ELEMENT(this).feed(char)\n }else{\n raise_error1(this, char)\n }\n }else if(this.expect == 'comment_or_doctype'){\n if(char == '-'){\n this.expect = 'comment'\n }else if(char == 'D'){\n this.expect = 'DOCTYPE'\n return this.feed(char)\n }else{\n raise_error('expected comment or DOCTYPE, got: ' + char)\n }\n }else if(this.expect == 'DOCTYPE'){\n var res = expect_literal(this, 'DOCTYPE', char)\n if(res.value){\n return new DOCTYPE(this.parser, this)\n }\n }else if(this.expect == 'xmldecl_or_pi'){\n var res = expect_name(this, char)\n if(res.value){\n if(res.value == 'xml'){\n this.expect = 'prolog'\n return new XMLDECL(this.parser, this)\n }else{\n this.expect = 'prolog'\n var pi = new PI(this.parser, this)\n pi.name = res.value\n pi.expect = 'content'\n return pi\n }\n }\n return this\n }else if(this.expect == 'comment'){\n if(char == '-'){\n this.expect = 'prolog'\n return new COMMENT(this.parser, this)\n }else{\n raise_error(this.parser, 'DOCUMENT, expected -, got: ' + char)\n }\n }else{\n raise_error(this.parser, 'DOCUMENT, unhandled expect: ' + this.expect)\n }\n return this\n}\n\n/*\nprolog ::= XMLDecl? Misc* (doctypedecl Misc*)?\n*/\nfunction prolog(origin){\n this.origin = origin\n this.expect = 'XMLDecl?'\n}\n\nprolog.prototype.feed = function(char){\n if(this.expect == 'XMLDecl?'){\n return (new XMLDecl(this)).feed(char)\n }\n return this\n}\n\n/*\nXMLDecl ::= ''\n*/\nfunction XMLDecl(origin){\n this.origin = origin\n this.expect = ''\nintSubset ::= (markupdecl | DeclSep)*\nmarkupdecl ::= elementdecl | AttlistDecl | EntityDecl | NotationDecl\n | PI | Comment\nDeclSep ::= PEReference | S\n*/\n\nfunction DOCTYPE(parser, origin){\n this.parser = parser\n this.origin = origin\n this.expect = 'element_start'\n}\n\nDOCTYPE.prototype.feed = function(char){\n console.log('DOCTYPE feed', this.expect, 'char', char)\n if(this.expect == 'element_start'){\n var res = expect_name(this, char)\n if(res.value){\n this.name = res.value\n this.expect = 'external_id_or_[_or_>'\n }\n }else if(this.expect == 'external_id_or_[_or_>'){\n if(char == '['){\n this.expect = '>'\n return new intSubset(this)\n }else if(char == '>'){\n this.expect == 'no_whitespace'\n }else if(char == 'S' || char == 'P'){\n this.expect = '[_or_>'\n var res = new ExternalID(this)\n return res.feed(char)\n }else{\n raise_error(this.parser, 'DOCTYPE expected SYSTEM, PUBLIC, [ or >, got: ' + char)\n }\n }else if(this.expect == '[_or_>'){\n if(char == '['){\n this.expect = '>'\n return new intSubset(this)\n }else if(char == '>'){\n this.expect = 'no_whitespace'\n }else if(! is_whitespace(char)){\n raise_error(this.parser, 'DOCTYPE expected [ or >, got: ' + char)\n }\n }else if(this.expect == '>'){\n if(! is_whitespace(char)){\n if(char == '>'){\n this.expect = 'no_whitespace'\n }else{\n raise_error(this.parser, 'DOCTYPE expected >, got: ' + char)\n }\n }\n }else if(this.expect = 'no_whitespace'){\n if(! is_whitespace(char)){\n return this.origin.feed(char)\n }\n }\n return this\n}\n\n/*\nXMLDecl ::= ''\nVersionInfo ::= S 'version' Eq (\"'\" VersionNum \"'\" | '\"' VersionNum '\"')\nEq ::= S? '=' S?\nVersionNum ::= '1.0'\nEncodingDecl ::= S 'encoding' Eq ('\"' EncName '\"' | \"'\" EncName \"'\" )\nEncName ::= [A-Za-z] ([A-Za-z0-9._] | '-')*\nSDDecl ::= S 'standalone' Eq\n ((\"'\" ('yes' | 'no') \"'\") | ('\"' ('yes' | 'no') '\"'))\n*/\nfunction XMLDECL(parser, origin){\n this.parser = parser\n this.expect = 'version_info'\n this.origin = origin\n}\n\nXMLDECL.prototype.feed = function(char){\n switch(this.expect){\n case 'version_info':\n var res = expect_literal(this, 'version', char)\n if(res.value){\n this.expect = 'eq'\n this.attr_name = 'version'\n }\n break\n case 'eq':\n if(char == '='){\n this.expect = 'quote'\n }else if(! is_whitespace(char)){\n raise_error(this.parser, 'expect =, got: ' + char)\n }\n break\n case 'quote':\n if(is_quote(char)){\n this.expect = char\n this.quoted = ''\n }else if(! is_whitespace(char)){\n raise_error(this.parser, 'expected quote, got: ' + char)\n }\n break\n case '\"':\n case \"'\":\n var res = expect_literal(this, this.expect, char)\n if(res.value){\n this[this.attr_name] = this.quoted\n this.expect = 'encoding_or_sd_or_close'\n }else{\n this.quoted += char\n }\n break\n case 'encoding_or_sd_or_close':\n switch(char){\n case 'e':\n if(! this.hasOwnProperty('encoding')){\n this.expect = 'encoding'\n return this.feed(char)\n }\n break\n case 's':\n if(! this.hasOwnProperty('standalone')){\n this.expect = 'standalone'\n return this.feed(char)\n }\n break\n case '?':\n this.expect = '>'\n break\n default:\n if(! is_whitespace(char)){\n raise_error(this.parser,\n 'expected encoding, standalone or ?, got: ' + char)\n }\n }\n break\n case 'encoding':\n case 'standalone':\n var res = expect_literal(this, this.expect, char)\n if(res.value){\n this.attr_name = this.expect\n this.expect = 'eq'\n }\n break\n case '>':\n if(char == '>'){\n this.closed = true\n }else if(! is_whitespace(char)){\n if(this.closed){\n return this.origin.feed(char)\n }\n raise_error(this.parser, 'expected >, got: ' + char)\n }\n break\n default:\n raise_error(this.parser, 'unhandled case: ' + this.expect)\n }\n return this\n}\n\n/*\nPI ::= '' Char*)))? '?>'\nPITarget ::= Name - (('X' | 'x') ('M' | 'm') ('L' | 'l'))\n*/\nfunction PI(parser, origin){\n this.parser = parser\n this.origin = origin\n this.expect = 'pi_target'\n}\n\nPI.prototype.feed = function(char){\n if(this.expect == 'pi_target'){\n var res = expect_name(this, char)\n if(res.value){\n this.pi_target = res.value\n this.expect = 'content'\n }\n }else if(this.expect == 'content'){\n var res = expect_chars(this, char, '?>')\n if(res.value){\n this.content = res.value\n this.closed = true\n this.expect = 'no_whitespace'\n }\n }else if(this.expect == 'no_whitespace'){\n if(! is_whitespace(char)){\n return this.origin.feed(char)\n }\n }\n return this\n}\n\nfunction CDATA(){\n this.content = ''\n this.expect = ']'\n this.level = 1\n}\n\nCDATA.prototype.feed = function(char){\n switch(this.expect){\n case ']':\n if(char == '>'){\n throw Error('closed without closing ]')\n }else if(char == '['){\n this.level++\n }else if(char == ']'){\n if(this.level == 1){\n this.expect = '>'\n }else{\n this.level--\n }\n }else{\n this.content += char\n }\n break\n case '>':\n if(char != '>'){\n console.log('-- error', this, 'char', char)\n throw Error('expected \">\", got: ' + char)\n }\n this.closed = true\n break\n }\n return this\n}\n\nfunction DTD(parser){\n this.parser = parser\n this.expect = 'name_start'\n this.items = []\n}\n\nDTD.prototype.feed = function(char){\n if(this.expect == 'name_start'){\n if(is_id_start(char)){\n this.name = char\n this.expect = 'name_continue'\n }else if(char == '-'){\n this.expect = '-' // maybe comment start\n }else if(char == '['){\n return new CDATA()\n }else{\n throw Error('expected name, got ' + char)\n }\n }else if(this.expect == 'name_continue'){\n if(is_id_continue(char)){\n this.name += char\n }else{\n console.log('DD, name', this.name)\n if(this.name == 'DOCTYPE'){\n return new DOCTYPE(this.parser)\n }else if(this.name == 'ENTITY'){\n return new ENTITY(this.parser)\n }\n if(char == '>'){\n this.closed = true\n }else{\n this.expect == 'any'\n }\n }\n }else if(this.expect == '-'){\n if(char == '-'){\n // comment\n this.is_comment = true\n }else{\n throw Error('expected -, got: ' + char)\n }\n }else{\n if(char == '>'){\n this.closed = true\n }else{\n this.items.push(char)\n }\n }\n return this\n}\n\nDTD.prototype.toString = function(){\n var res = ` 0){\n res += ' '\n var items = this.items.map(x => x.toString())\n res += items.join(' ')\n }\n return res + '>'\n}\n\nfunction COMMENT(parser, origin){\n this.parser = parser\n this.origin = origin\n this.value = ''\n this.expect = '-->'\n}\n\nCOMMENT.prototype.feed = function(char){\n if(this.expect == '-->'){\n var res = expect_chars(this, char, '-->')\n if(res.value){\n this.content = res.value\n this.expect = 'no_whitespace'\n }\n }else if(this.expect == 'no_whitespace'){\n if(! is_whitespace(char)){\n return this.origin.feed(char)\n }\n }\n return this\n}\n\n/*\nelement ::= EmptyElemTag | STag content ETag\nSTag ::= '<' Name (S Attribute)* S? '>'\nAttribute ::= Name Eq AttValue\nETag ::= ''\ncontent ::= CharData?\n ((element | Reference | CDSect | PI | Comment) CharData?)*\nEmptyElemTag ::= '<' Name (S Attribute)* S? '/>'\n*/\n\nfunction ELEMENT(origin) {\n this.origin = origin\n this.expect = '?_/_or_name_start'\n this.attrs = $B.empty_dict()\n}\n\nELEMENT.prototype.add_attribute_name = function(attr_name){\n if(_b_.dict.$contains(this.attrs, attr_name)){\n throw Error(`duplicate attribute name: ${attr_name}`)\n }\n _b_.dict.$setitem(this.attrs, attr_name, _b_.None)\n}\n\nELEMENT.prototype.set_attribute_value = function(value){\n _b_.dict.$setitem(this.attrs, this.attr_name, value)\n}\n\nELEMENT.prototype.feed = function(char){\n console.log('ELEMENT feed, expects', this.expect, 'char', char)\n if(this.expect == 'name_start'){\n if(char == '?'){\n if(this.is_declaration){\n throw Error('already got ?')\n }\n this.is_declaration = true\n }else if(char == '/'){\n if(this.is_end){\n throw Error('already got /')\n }\n this.is_end = true\n }else if(is_id_start(char)){\n this.name = char\n this.expect = 'name_continue'\n }\n }else if(this.expect == 'name_continue'){\n if(is_id_continue(char)){\n this.name += char\n }else{\n // end of element name\n if(this.is_declaration){\n if(this.name == 'xml'){\n this.is_xml_header = true\n }else{\n return new PROCESSING_INSTRUCTION(this.parser, this.name)\n }\n }\n if(is_whitespace(char)){\n this.expect = 'attr_name_start'\n }else if(char == '>'){\n this.closed = true\n }else if(char == '/'){\n this.self_closing = true\n this.expect = '>'\n }else{\n throw Error('unexpected at end of element name: ' + char)\n }\n }\n }else if(this.expect == 'attr_name_start'){\n if(char == '/'){\n this.self_closing = true\n }else if(char == '>'){\n this.expect = 'no_whitespace'\n }else if(is_id_start(char)){\n this.attr_name = char\n this.expect = 'attr_name_continue'\n }else if(char == '?' && this.is_declaration){\n this.expect = '>'\n }else if(! is_whitespace(char)){\n throw Error('expected attribute name, got: ' + char)\n }\n }else if(this.expect == 'attr_name_continue'){\n if(is_id_continue(char)){\n this.attr_name += char\n }else if(char == '='){\n this.add_attribute_name(this.attr_name)\n this.expect = 'attr_value_start'\n this.attr_value = ''\n }else if(is_whitespace(char)){\n this.add_attribute_name(this.attr_name)\n this.expect = '='\n }else if(char == '>'){\n this.add_attribute_name(this.attr_name)\n this.closed = true\n }else{\n throw Error('unexpected character in attribute name: ' + char)\n }\n }else if(this.expect == '='){\n if(char == '='){\n this.expect = 'attr_value_start'\n }else if(! is_whitespace(char)){\n raise_error1(this, char)\n }\n }else if(this.expect == 'attr_value'){\n if(char == '='){\n this.expect = 'attr_value_start'\n this.attr_value = ''\n }else if(char == '>'){\n this.closed = true\n }else if(is_id_start(char)){\n this.attr_name = char\n this.expect = 'attr_name_continue'\n }else if(! is_whitespace(char)){\n throw Error('expected attribute value or name, got: ' + char)\n }\n }else if(this.expect == 'attr_value_start'){\n if(char == '\"' || char == \"'\"){\n this.expect = 'quote'\n this.quote = char\n this.attr_value = ''\n }else if(! is_whitespace(char)){\n throw Error('unexpect attribute value start: ' + char)\n }\n }else if(this.expect == \"quote\"){\n if(char == this.quote){\n this.set_attribute_value(this.attr_value)\n this.expect = 'attr_name_start'\n }else{\n this.attr_value += char\n }\n }else if(this.expect == '>'){\n if(char == '>'){\n this.closed = true\n }else{\n throw Error('expected >, got: ' + char)\n }\n }else if(this.expect == 'attr_name'){\n if(char instanceof Name){\n if(_b_.dict.__contains__(this.attrs, char.value)){\n throw Error('duplicate value ' + char.value)\n }\n _b_.dict.$setitem(this.attrs, char.value, _b_.None)\n this.last_attr = char.value\n }else if(char.value == '?' && this.is_declaration){\n if(this.question_mark){\n throw Error('already ?')\n }\n this.question_mark = true\n }else if(char == END){\n if(this.is_declaration && ! this.question_mark){\n throw Error('missing ')\n }\n }else if(char instanceof Punctuation && char.value == '/'){\n this.no_end = true\n this.expect = END\n }else{\n throw Error('expected attribute name, got ' + char)\n }\n }else if(this.expect == 'attr_value'){\n _b_.dict.$setitem(this.attrs, this.last_attr, char)\n this.expect = 'attr_name'\n }else if(this.expect == END){\n // after \"/\"\n if(char != END){\n throw Error('nothing after /')\n }\n }else if(this.expect == 'no_whitespace'){\n if(! is_whitespace(char)){\n return this.origin.feed(char)\n }\n }else{\n raise_error1(this, char)\n }\n return this\n}\n\nELEMENT.prototype.toString = function() {\n var res = `<`\n res += this.is_end ? '/' : ''\n res += this.name\n if(this.attrs.length > 0){\n res += ' '\n }\n var attrs = []\n for(var item of _b_.dict.$iter_items(this.attrs)){\n console.log('item', item)\n attrs.push(`${item.key}: ${item.value.toString()}`)\n }\n res += attrs.join(' ')\n if(this.no_end){\n res += '/'\n }\n return res + '>'\n}\n\n/*\nEntityDecl ::= GEDecl | PEDecl\nPEDecl ::= ''\nPEDef ::= EntityValue | ExternalID\n*/\nfunction ENTITY(parser){\n this.parser = parser\n}\n\nENTITY.prototype.feed = function(char){\n if(! is_whitespace(char)){\n if(is_id_start(char)){\n return new GEDecl(this.parser, char)\n }else if(char == \"%\"){\n return new PEDecl(this.parser)\n }\n throw Error('unexpected after ENTITY: ' + char)\n }\n}\n\n/*\nGEDecl ::= ''\nEntityDef ::= EntityValue | (ExternalID NDataDecl?)\nExternalID ::= 'SYSTEM' S SystemLiteral\n | 'PUBLIC' S PubidLiteral S SystemLiteral\nNDataDecl ::= S 'NDATA' S Name\nEntityValue ::= '\"' ([^%&\"] | PEReference | Reference)* '\"'\n | \"'\" ([^%&'] | PEReference | Reference)* \"'\"\n\n*/\nfunction GEDecl(parser, char){\n this.parser = parser\n this.expect = 'name_continue'\n this.name = char\n this.state = 'name'\n}\n\nGEDecl.prototype.feed = function(char){\n switch(this.expect){\n case 'name_start':\n if(is_id_start(char)){\n if(this.state == 'NDATA'){\n this.ndata_name = char\n }\n this.expect = 'name_continue'\n }else if(! is_whitespace(char)){\n throw Error('GEDecl expected name start, got: ' + char)\n }\n break\n case 'name_continue':\n if(is_id_continue(char)){\n if(this.state == 'name'){\n this.name += char\n }else if(this.state == 'NDATA'){\n this.ndata_name += char\n }\n }else if(is_whitespace(char)){\n if(this.state == 'NDATA'){\n this.expect = '>'\n }else{\n this.expect = 'entity_def'\n }\n }else if(char == '>' && this.state == 'NDATA'){\n this.closed = true\n }else{\n throw Error('GEDecl expected name, got: ' + char)\n }\n break\n case 'entity_def':\n if(is_quote(char)){\n this.quoted = ''\n this.state = this.expect\n this.expect = char\n }else if(char == 'S' || char == 'P'){\n this.expect = char == 'S' ? 'SYSTEM' : 'PUBLIC'\n this.expect_pos = 1\n this.external_id = this.expect\n }else if(! is_whitespace(char)){\n throw Error('GEDCL expect quote, SYSTEM or PUBLIC, got: ' + char)\n }\n break\n case 'SYSTEM':\n case 'PUBLIC':\n if(char == this.expect[this.expect_pos]){\n this.expect_pos++\n if(this.expect_pos == this.expect.length){\n this.expect = this.expect == 'SYSTEM' ? 'system_literal' :\n 'pubid_literal'\n }\n }else{\n throw Error(`GEDecl expected ${this.expect}, got: ${char}`)\n }\n break\n case 'NDATA':\n if(char == this.expect[this.expect_pos]){\n this.expect_pos++\n if(this.expect_pos == this.expect.length){\n this.expect = 'name_start'\n this.ndata_name = ''\n this.state = 'NDATA'\n }\n }else{\n throw Error(`GEDecl expected ${this.expect}, got: ${char}`)\n }\n break\n case '\"':\n case \"'\":\n if(this.state == 'entity_def'){\n if(char == this.expect){\n this.entity_def = this.quoted\n this.expect = '>'\n }else{\n this.quoted += char\n }\n }else if(this.state == 'system_literal'){\n if(char == this.expect){\n this.system_literal = this.quoted\n this.expect = 'n_data_decl_or_close'\n }else{\n this.quoted += char\n }\n }\n break\n case 'system_literal':\n if(is_quote(char)){\n this.expect = char\n this.state = 'system_literal'\n this.quoted = ''\n }else if(! is_whitespace(char)){\n throw Error('GEDecl expected SystemLiteral, got: ' + char)\n }\n break\n case '>':\n if(! is_whitespace(char)){\n if(char == '>'){\n this.closed = true\n }else{\n throw Error('GEDecl expected >, got: ' + char)\n }\n }\n break\n case 'n_data_decl_or_close':\n if(char == '>'){\n this.closed = true\n }else if(char == 'N'){\n this.expect = 'NDATA'\n this.expect_pos = 1\n }else if(! is_whitespace(char)){\n throw Error('GEDecl expected NDATA or >, got: ' + char)\n }\n break\n default:\n console.log(this.parser._buffer.substr(0, this.parser._pos))\n throw Error('pas fini...')\n }\n return this\n}\n\n/*\nExternalID ::= 'SYSTEM' S SystemLiteral\n | 'PUBLIC' S PubidLiteral S SystemLiteral\n*/\nfunction ExternalID(origin){\n this.origin = origin\n this.expect = 'first'\n}\n\nExternalID.prototype.feed = function(char){\n if(this.expect == 'first'){\n if(! is_whitespace(char)){\n if(char == 'S'){\n this.expect = 'SYSTEM'\n return this.feed(char)\n }else if(char == 'P'){\n this.expect = 'PUBLIC'\n return this.feed(char)\n }else{\n raise_error(this, 'ExternalID expected SYSTME or PUBLIC, got: ' + char)\n }\n }\n }else if(this.expect == 'SYSTEM' || this.expect == 'PUBLIC'){\n var res = expect_literal(this, this.expect, char)\n if(res.value){\n this.type = this.expect\n if(this.type == 'SYSTEM'){\n this.expect = '[_or_>'\n return new SystemLiteral(this)\n }else{\n this.expect = 'system_after_pubid'\n return new PubidLiteral(this)\n }\n }\n }else if(this.expect == 'system_after_pubid'){\n if(! is_whitespace(char)){\n this.expect = '[_or_>'\n return (new SystemLiteral(this)).feed(char)\n }\n }else if(this.expect == '[_or_>'){\n if(char == '['){\n this.expect = '>'\n return new intSubset(this)\n }else if(char == '>'){\n return this.origin.feed(char)\n }else{\n raise_error1(this, char)\n }\n }else if(this.expect == '>'){\n if(char == '>'){\n this.expect = 'no_whitespace'\n }else if(! is_whitespace(char)){\n raise_error1(this, char)\n }\n }else if(this.expect == 'no_whitespace'){\n if(! is_whitespace(char)){\n console.log('return to origin', this.origin, 'char', char)\n return this.origin.feed(char)\n }\n }\n return this\n}\n\n/*\nPubidLiteral ::= '\"' PubidChar* '\"' | \"'\" (PubidChar - \"'\")* \"'\"\nPubidChar ::= #x20 | #xD | #xA | [a-zA-Z0-9]\n | [-'()+,./:=?;!*#@$_%]\n*/\nfunction PubidLiteral(origin){\n this.origin = origin\n this.expect = 'quote'\n}\n\n\nfunction is_pubid_char(char){\n /*\n#x20 | #xD | #xA | [a-zA-Z0-9]\n | [-'()+,./:=?;!*#@$_%]\n*/\n return char.match(new RegExp(\"[a-zA-Z0-9-'()+,./:=?;!*#@$_%]\")) ||\n ' \\n\\r'.includes(char)\n}\n\nPubidLiteral.prototype.feed = function(char){\n if(this.expect == 'quote'){\n if(is_quote(char)){\n this.expect = char\n this.content = ''\n }else if(! is_whitespace(char)){\n raise_error1(this, char)\n }\n }else if(this.expect == 'no_whitespace'){\n if(! is_whitespace(char)){\n return this.origin.feed(char)\n }\n }else{\n if(char == this.expect){\n this.expect = 'no_whitespace'\n }else if(is_pubid_char(char)){\n this.content += char\n }else{\n console.log('PubidLiteral expects', this.expect, 'char', char)\n console.log(is_pubid_char(char))\n raise_error1(this, char)\n }\n }\n return this\n}\n\nfunction SystemLiteral(origin){\n this.origin = origin\n this.expect = 'quote'\n}\n\nSystemLiteral.prototype.feed = function(char){\n console.log('SystemLiteral expects', this.expect, 'char', char)\n if(this.expect == 'quote'){\n if(is_quote(char)){\n this.expect = char\n this.content = ''\n }else if(! is_whitespace(char)){\n raise_error1(this, char)\n }\n }else if(this.expect == 'no_whitespace'){\n if(! is_whitespace(char)){\n return this.origin.feed(char)\n }\n }else{\n if(char == this.expect){\n this.expect = 'no_whitespace'\n }else{\n this.content += char\n }\n }\n return this\n}\n\nfunction PROCESSING_INSTRUCTION(parser, name){\n this.parser = parser\n this.name = name\n this.expect = '?'\n this.content = ''\n}\n\nPROCESSING_INSTRUCTION.prototype.feed = function(char){\n // capture everything until the sequence ?>\n if(this.expect == '?'){\n if(char == '?'){\n this.expect = '>'\n }else{\n this.content += char\n }\n }else if(this.expect == '>'){\n if(char == '>'){\n this.closed = true\n }else{\n this.content += '?' + char\n this.expect = '-'\n }\n }\n return this\n}\n\nfunction ATTR(name){\n this.name = name\n}\n\nATTR.prototype.toString = function(){\n var res = this.name\n if(this.hasOwnProperty('value')){\n res += '=' + this.value\n }\n return res\n}\n\nfunction DATA(value) {\n this.value = value\n}\n\nDATA.prototype.toString = function() {\n return `${this.value}`\n}\n\nvar START = 'START'\nvar END = 'END'\n\n\nfunction Name(value){\n this.value = value\n}\n\nName.prototype.toString = function(){\n return this.value\n}\n\nfunction Punctuation(value){\n this.value = value\n}\n\nfunction String(quote, value){\n this.quote = quote\n this.value = value\n}\n\nString.prototype.toString = function(){\n return this.quote + this.value + this.quote\n}\n\nconst punctuations = '!?/'\n\nfunction open(url){\n var xhr = new XMLHttpRequest()\n xhr.open('GET', url, false)\n xhr.onreadystatechange = function(ev){\n if(this.readyState == 4){\n process(this.responseText)\n }\n }\n xhr.send()\n}\n\nfunction create_parser(){\n var $ = $B.args('ParserCreate', 3,\n {encoding: null, namespace_separator: null, intern: null},\n ['encoding', 'namespace_separator', 'intern'], arguments,\n {encoding: _b_.None, namespace_separator: _b_.None, intern: _b_.None},\n null, null),\n encoding = $.encoding,\n ns_sep = $.namespace_separator,\n intern = $.intern\n if(encoding !== _b_.None && ! _b_.isinstance(encoding, _b_.str)){\n throw _b_.TypeError.$factory(\n `ParserCreate() argument 'encoding' must be ` +\n `str or None, not ${$B.class_name(encoding)}`)\n }\n if(ns_sep !== _b_.None){\n if(! _b_.isinstance(ns_sep, _b_.str)){\n throw _b_.TypeError.$factory(\n `ParserCreate() argument 'namespace_separator' must be ` +\n `str or None, not ${$B.class_name(ns_sep)}`)\n }\n if(ns_sep.length != 1){\n throw _b_.ValueError.$factory(\"namespace_separator must be at \" +\n \"most one character, omitted, or None\")\n }\n }\n if(intern === _b_.None){\n intern = $B.empty_dict()\n }else if(! _b_.isinstance(intern, _b_.dict)){\n throw _b_.TypeError.$factory('intern must be a dictionary')\n }\n return xmlparser.$factory(encoding, ns_sep, intern)\n}\n\nfunction display(text){\n report.value += text + '\\n'\n}\n\nfunction process(src){\n var indent = 0\n for(var token of xml_tokenizer(src)){\n if(indent > 50){\n break\n }\n var head = ' '.repeat(indent)\n if(token instanceof DATA){\n display(head + ' ' + token.toString())\n }else if(token instanceof ELEMENT){\n if(token.is_end){\n indent--\n }\n head = ' '.repeat(indent)\n display(head + token.toString())\n if(token.is_end || token.self_closing || token.is_declaration){\n //\n }else{\n indent++\n }\n }else if(token instanceof DECLARATION){\n display(head + token.toString())\n }else{\n console.log(head + 'token', token, token.toString())\n }\n }\n}\n\nfunction is_id_start(char){\n return char.match(/\\p{L}/u) || char == \"_\"\n}\n\nfunction is_id_continue(char){\n return char.match(/\\p{L}/u) || \"-_:\".includes(char) || char.match(/\\d/)\n}\n\nfunction is_whitespace(s){\n for(let char of s){\n if(! ' \\n\\r\\t'.includes(char)){\n return false\n }\n }\n return s.length > 0\n}\n\nfunction is_quote(char){\n return char == '\"' || char == \"'\"\n}\n\nfunction is_char(char){\n // #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]\n var cp = char.codePointAt(0)\n return ([0x9, 0xa, 0xd].includes(cp)) ||\n (0x20 <= cp && cp <= 0xd7ff) ||\n (0xe000 <= cp && cp <= 0xfffd) ||\n (0x10000 <= cp && cp <= 0x10ffff)\n}\n\nvar model = 'model',\n errors = 'errors'\n\n$B.addToImported('pyexpat',\n {\n create_parser,\n ParserCreate: create_parser,\n model,\n error,\n errors,\n XML_PARAM_ENTITY_PARSING_NEVER,\n XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE,\n XML_PARAM_ENTITY_PARSING_ALWAYS\n }\n)\n\n})(__BRYTHON__)"], "_svg": [".js", "// creation of a SVG element\n(function($B){\n\nvar _b_ = $B.builtins\nvar TagSum = $B.TagSum // defined in py_dom.js\n\nvar $svgNS = \"http://www.w3.org/2000/svg\"\nvar $xlinkNS = \"http://www.w3.org/1999/xlink\"\n\nfunction makeTagDict(tagName){\n // return the dictionary for the class associated with tagName\n var dict = $B.make_class(tagName)\n\n dict.__init__ = function(){\n var $ns = $B.args('__init__', 1, {self: null}, ['self'],\n arguments, {}, 'args', 'kw'),\n self = $ns['self'],\n args = $ns['args']\n if(args.length == 1){\n var first = args[0]\n if($B.$isinstance(first, [_b_.str, _b_.int, _b_.float])){\n self.appendChild(document.createTextNode(_b_.str.$factory(first)))\n }else if(first.__class__ === TagSum){\n for(var i = 0, len = first.children.length; i < len; i++){\n self.appendChild(first.children[i].elt)\n }\n }else{ // argument is another DOMNode instance\n try{self.appendChild(first.elt)}\n catch(err){throw _b_.ValueError.$factory('wrong element ' + first)}\n }\n }\n\n // attributes\n var items = _b_.list.$factory(_b_.dict.items($ns['kw']))\n for(var item of _b_.dict.$iter_items($ns.kw)){\n // keyword arguments\n var arg = item.key,\n value = $B.py_immutable_to_js(item.value)\n if(arg.toLowerCase().substr(0,2) == \"on\"){\n // Event binding passed as argument \"onclick\", \"onfocus\"...\n // Better use method bind of DOMNode objects\n $B.DOMNode.bind(self,\n arg.toLowerCase().substr(2),\n value)\n }else if(arg.toLowerCase() == \"style\"){\n $B.DOMNode.set_style(self, value)\n }else if(arg.toLowerCase().indexOf(\"href\") !== -1){ // xlink:href\n self.setAttributeNS( \"http://www.w3.org/1999/xlink\",\n \"href\",value)\n }else{\n if(value !== false){\n // option.selected=false sets it to true :-)\n try{\n arg = arg.replace('_', '-')\n self.setAttributeNS(null, arg, value)\n }catch(err){\n throw _b_.ValueError.$factory(\"can't set attribute \" + arg)\n }\n }\n }\n }\n }\n\n dict.__mro__ = [$B.DOMNode, $B.builtins.object]\n\n dict.__new__ = function(cls){\n var res = $B.DOMNode.$factory(document.createElementNS($svgNS, tagName))\n res.__class__ = cls\n return res\n }\n\n dict.$factory = function(){\n var res = $B.DOMNode.$factory(\n document.createElementNS($svgNS, tagName))\n res.__class__ = dict\n // apply __init__\n dict.__init__(res, ...arguments)\n return res\n }\n\n $B.set_func_names(dict, \"browser.svg\")\n\n return dict\n}\n\n\n// SVG\nvar $svg_tags = ['a',\n'altGlyph',\n'altGlyphDef',\n'altGlyphItem',\n'animate',\n'animateColor',\n'animateMotion',\n'animateTransform',\n'circle',\n'clipPath',\n'color_profile', // instead of color-profile\n'cursor',\n'defs',\n'desc',\n'ellipse',\n'feBlend',\n'foreignObject', //patch to enable foreign objects\n'g',\n'image',\n'line',\n'linearGradient',\n'marker',\n'mask',\n'path',\n'pattern',\n'polygon',\n'polyline',\n'radialGradient',\n'rect',\n'set',\n'stop',\n'svg',\n'text',\n'tref',\n'tspan',\n'use']\n\n// create classes\nvar obj = new Object()\nvar dicts = {}\nfor(var i = 0, len = $svg_tags.length; i < len; i++){\n var tag = $svg_tags[i]\n obj[tag] = makeTagDict(tag)\n}\n\n$B.imported._svg = obj\n})(__BRYTHON__)\n"], "_locale": [".js", "var am = {\n \"C\": \"AM\",\n \"aa\": \"saaku\",\n \"ab\": \"AM\",\n \"ae\": \"AM\",\n \"af\": \"vm.\",\n \"ak\": \"AN\",\n \"am\": \"\\u1325\\u12cb\\u1275\",\n \"an\": \"AM\",\n \"ar\": \"\\u0635\",\n \"as\": \"\\u09f0\\u09be\\u09a4\\u09bf\\u09aa\\u09c1\",\n \"av\": \"AM\",\n \"ay\": \"AM\",\n \"az\": \"AM\",\n \"ba\": \"\",\n \"be\": \"\",\n \"bg\": \"\",\n \"bh\": \"AM\",\n \"bi\": \"AM\",\n \"bm\": \"AM\",\n \"bn\": \"AM\",\n \"bo\": \"\\u0f66\\u0f94\\u0f0b\\u0f51\\u0fb2\\u0f7c\",\n \"br\": \"A.M.\",\n \"bs\": \"prijepodne\",\n \"ca\": \"a. m.\",\n \"ce\": \"AM\",\n \"ch\": \"AM\",\n \"co\": \"\",\n \"cr\": \"AM\",\n \"cs\": \"dop.\",\n \"cu\": \"\\u0414\\u041f\",\n \"cv\": \"AM\",\n \"cy\": \"yb\",\n \"da\": \"\",\n \"de\": \"\",\n \"dv\": \"\\u0789\\u0786\",\n \"dz\": \"\\u0f66\\u0f94\\u0f0b\\u0f46\\u0f0b\",\n \"ee\": \"\\u014bdi\",\n \"el\": \"\\u03c0\\u03bc\",\n \"en\": \"AM\",\n \"eo\": \"atm\",\n \"es\": \"\",\n \"et\": \"AM\",\n \"eu\": \"AM\",\n \"fa\": \"\\u0642.\\u0638\",\n \"ff\": \"\",\n \"fi\": \"ap.\",\n \"fj\": \"AM\",\n \"fo\": \"um fyr.\",\n \"fr\": \"\",\n \"fy\": \"AM\",\n \"ga\": \"r.n.\",\n \"gd\": \"m\",\n \"gl\": \"a.m.\",\n \"gn\": \"a.m.\",\n \"gu\": \"\\u0aaa\\u0ac2\\u0ab0\\u0acd\\u0ab5\\u00a0\\u0aae\\u0aa7\\u0acd\\u0aaf\\u0abe\\u0ab9\\u0acd\\u0aa8\",\n \"gv\": \"a.m.\",\n \"ha\": \"AM\",\n \"he\": \"AM\",\n \"hi\": \"\\u092a\\u0942\\u0930\\u094d\\u0935\\u093e\\u0939\\u094d\\u0928\",\n \"ho\": \"AM\",\n \"hr\": \"\",\n \"ht\": \"AM\",\n \"hu\": \"de.\",\n \"hy\": \"\",\n \"hz\": \"AM\",\n \"ia\": \"a.m.\",\n \"id\": \"AM\",\n \"ie\": \"AM\",\n \"ig\": \"A.M.\",\n \"ii\": \"\\ua0b5\\ua1aa\\ua20c\\ua210\",\n \"ik\": \"AM\",\n \"io\": \"AM\",\n \"is\": \"f.h.\",\n \"it\": \"\",\n \"iu\": \"AM\",\n \"ja\": \"\\u5348\\u524d\",\n \"jv\": \"\",\n \"ka\": \"AM\",\n \"kg\": \"AM\",\n \"ki\": \"Kiroko\",\n \"kj\": \"AM\",\n \"kk\": \"AM\",\n \"kl\": \"\",\n \"km\": \"\\u1796\\u17d2\\u179a\\u17b9\\u1780\",\n \"kn\": \"\\u0caa\\u0cc2\\u0cb0\\u0ccd\\u0cb5\\u0cbe\\u0cb9\\u0ccd\\u0ca8\",\n \"ko\": \"\\uc624\\uc804\",\n \"kr\": \"AM\",\n \"ks\": \"AM\",\n \"ku\": \"\\u067e.\\u0646\",\n \"kv\": \"AM\",\n \"kw\": \"a.m.\",\n \"ky\": \"\",\n \"la\": \"\",\n \"lb\": \"\",\n \"lg\": \"AM\",\n \"li\": \"AM\",\n \"ln\": \"nt\\u0254\\u0301ng\\u0254\\u0301\",\n \"lo\": \"\\u0e81\\u0ec8\\u0ead\\u0e99\\u0e97\\u0ec8\\u0ebd\\u0e87\",\n \"lt\": \"prie\\u0161piet\",\n \"lu\": \"Dinda\",\n \"lv\": \"priek\\u0161p.\",\n \"mg\": \"AM\",\n \"mh\": \"AM\",\n \"mi\": \"a.m.\",\n \"mk\": \"\\u043f\\u0440\\u0435\\u0442\\u043f\\u043b.\",\n \"ml\": \"AM\",\n \"mn\": \"??\",\n \"mo\": \"AM\",\n \"mr\": \"\\u092e.\\u092a\\u0942.\",\n \"ms\": \"PG\",\n \"mt\": \"AM\",\n \"my\": \"\\u1014\\u1036\\u1014\\u1000\\u103a\",\n \"na\": \"AM\",\n \"nb\": \"a.m.\",\n \"nd\": \"AM\",\n \"ne\": \"\\u092a\\u0942\\u0930\\u094d\\u0935\\u093e\\u0939\\u094d\\u0928\",\n \"ng\": \"AM\",\n \"nl\": \"\",\n \"nn\": \"f.m.\",\n \"no\": \"a.m.\",\n \"nr\": \"AM\",\n \"nv\": \"AM\",\n \"ny\": \"AM\",\n \"oc\": \"AM\",\n \"oj\": \"AM\",\n \"om\": \"WD\",\n \"or\": \"AM\",\n \"os\": \"AM\",\n \"pa\": \"\\u0a38\\u0a35\\u0a47\\u0a30\",\n \"pi\": \"AM\",\n \"pl\": \"AM\",\n \"ps\": \"\\u063a.\\u0645.\",\n \"pt\": \"\",\n \"qu\": \"a.m.\",\n \"rc\": \"AM\",\n \"rm\": \"AM\",\n \"rn\": \"Z.MU.\",\n \"ro\": \"a.m.\",\n \"ru\": \"\",\n \"rw\": \"AM\",\n \"sa\": \"\\u092e\\u0927\\u094d\\u092f\\u093e\\u0928\\u092a\\u0942\\u0930\\u094d\\u0935\",\n \"sc\": \"AM\",\n \"sd\": \"AM\",\n \"se\": \"i.b.\",\n \"sg\": \"ND\",\n \"sh\": \"AM\",\n \"si\": \"\\u0db4\\u0dd9.\\u0dc0.\",\n \"sk\": \"AM\",\n \"sl\": \"dop.\",\n \"sm\": \"AM\",\n \"sn\": \"AM\",\n \"so\": \"sn.\",\n \"sq\": \"e paradites\",\n \"sr\": \"pre podne\",\n \"ss\": \"AM\",\n \"st\": \"AM\",\n \"su\": \"AM\",\n \"sv\": \"\",\n \"sw\": \"AM\",\n \"ta\": \"\\u0b95\\u0bbe\\u0bb2\\u0bc8\",\n \"te\": \"\\u0c2a\\u0c42\\u0c30\\u0c4d\\u0c35\\u0c3e\\u0c39\\u0c4d\\u0c28\",\n \"tg\": \"\",\n \"th\": \"AM\",\n \"ti\": \"\\u1295\\u1309\\u1206 \\u1230\\u12d3\\u1270\",\n \"tk\": \"\",\n \"tl\": \"AM\",\n \"tn\": \"AM\",\n \"to\": \"AM\",\n \"tr\": \"\\u00d6\\u00d6\",\n \"ts\": \"AM\",\n \"tt\": \"\",\n \"tw\": \"AM\",\n \"ty\": \"AM\",\n \"ug\": \"\\u0686?\\u0634\\u062a\\u0649\\u0646 \\u0628?\\u0631?\\u0646\",\n \"uk\": \"AM\",\n \"ur\": \"AM\",\n \"uz\": \"TO\",\n \"ve\": \"AM\",\n \"vi\": \"SA\",\n \"vo\": \"AM\",\n \"wa\": \"AM\",\n \"wo\": \"\",\n \"xh\": \"AM\",\n \"yi\": \"\\ua0b5\\ua1aa\\ua20c\\ua210\",\n \"yo\": \"\\u00c0\\u00e1r?`\",\n \"za\": \"AM\",\n \"zh\": \"\\u4e0a\\u5348\",\n \"zu\": \"AM\"\n}\nvar pm = {\n \"C\": \"PM\",\n \"aa\": \"carra\",\n \"ab\": \"PM\",\n \"ae\": \"PM\",\n \"af\": \"nm.\",\n \"ak\": \"EW\",\n \"am\": \"\\u12a8\\u1230\\u12d3\\u1275\",\n \"an\": \"PM\",\n \"ar\": \"\\u0645\",\n \"as\": \"\\u0986\\u09ac\\u09c7\\u09b2\\u09bf\",\n \"av\": \"PM\",\n \"ay\": \"PM\",\n \"az\": \"PM\",\n \"ba\": \"\",\n \"be\": \"\",\n \"bg\": \"\",\n \"bh\": \"PM\",\n \"bi\": \"PM\",\n \"bm\": \"PM\",\n \"bn\": \"PM\",\n \"bo\": \"\\u0f55\\u0fb1\\u0f72\\u0f0b\\u0f51\\u0fb2\\u0f7c\",\n \"br\": \"G.M.\",\n \"bs\": \"popodne\",\n \"ca\": \"p. m.\",\n \"ce\": \"PM\",\n \"ch\": \"PM\",\n \"co\": \"\",\n \"cr\": \"PM\",\n \"cs\": \"odp.\",\n \"cu\": \"\\u041f\\u041f\",\n \"cv\": \"PM\",\n \"cy\": \"yh\",\n \"da\": \"\",\n \"de\": \"\",\n \"dv\": \"\\u0789\\u078a\",\n \"dz\": \"\\u0f55\\u0fb1\\u0f72\\u0f0b\\u0f46\\u0f0b\",\n \"ee\": \"\\u0263etr\\u0254\",\n \"el\": \"\\u03bc\\u03bc\",\n \"en\": \"PM\",\n \"eo\": \"ptm\",\n \"es\": \"\",\n \"et\": \"PM\",\n \"eu\": \"PM\",\n \"fa\": \"\\u0628.\\u0638\",\n \"ff\": \"\",\n \"fi\": \"ip.\",\n \"fj\": \"PM\",\n \"fo\": \"um sein.\",\n \"fr\": \"\",\n \"fy\": \"PM\",\n \"ga\": \"i.n.\",\n \"gd\": \"f\",\n \"gl\": \"p.m.\",\n \"gn\": \"p.m.\",\n \"gu\": \"\\u0a89\\u0aa4\\u0acd\\u0aa4\\u0ab0\\u00a0\\u0aae\\u0aa7\\u0acd\\u0aaf\\u0abe\\u0ab9\\u0acd\\u0aa8\",\n \"gv\": \"p.m.\",\n \"ha\": \"PM\",\n \"he\": \"PM\",\n \"hi\": \"\\u0905\\u092a\\u0930\\u093e\\u0939\\u094d\\u0928\",\n \"ho\": \"PM\",\n \"hr\": \"\",\n \"ht\": \"PM\",\n \"hu\": \"du.\",\n \"hy\": \"\",\n \"hz\": \"PM\",\n \"ia\": \"p.m.\",\n \"id\": \"PM\",\n \"ie\": \"PM\",\n \"ig\": \"P.M.\",\n \"ii\": \"\\ua0b5\\ua1aa\\ua20c\\ua248\",\n \"ik\": \"PM\",\n \"io\": \"PM\",\n \"is\": \"e.h.\",\n \"it\": \"\",\n \"iu\": \"PM\",\n \"ja\": \"\\u5348\\u5f8c\",\n \"jv\": \"\",\n \"ka\": \"PM\",\n \"kg\": \"PM\",\n \"ki\": \"Hwa\\u0129-in\\u0129\",\n \"kj\": \"PM\",\n \"kk\": \"PM\",\n \"kl\": \"\",\n \"km\": \"\\u179b\\u17d2\\u1784\\u17b6\\u1785\",\n \"kn\": \"\\u0c85\\u0caa\\u0cb0\\u0cbe\\u0cb9\\u0ccd\\u0ca8\",\n \"ko\": \"\\uc624\\ud6c4\",\n \"kr\": \"PM\",\n \"ks\": \"PM\",\n \"ku\": \"\\u062f.\\u0646\",\n \"kv\": \"PM\",\n \"kw\": \"p.m.\",\n \"ky\": \"\",\n \"la\": \"\",\n \"lb\": \"\",\n \"lg\": \"PM\",\n \"li\": \"PM\",\n \"ln\": \"mp\\u00f3kwa\",\n \"lo\": \"\\u0eab\\u0ebc\\u0eb1\\u0e87\\u0e97\\u0ec8\\u0ebd\\u0e87\",\n \"lt\": \"popiet\",\n \"lu\": \"Dilolo\",\n \"lv\": \"p\\u0113cp.\",\n \"mg\": \"PM\",\n \"mh\": \"PM\",\n \"mi\": \"p.m.\",\n \"mk\": \"\\u043f\\u043e\\u043f\\u043b.\",\n \"ml\": \"PM\",\n \"mn\": \"?\\u0425\",\n \"mo\": \"PM\",\n \"mr\": \"\\u092e.\\u0928\\u0902.\",\n \"ms\": \"PTG\",\n \"mt\": \"PM\",\n \"my\": \"\\u100a\\u1014\\u1031\",\n \"na\": \"PM\",\n \"nb\": \"p.m.\",\n \"nd\": \"PM\",\n \"ne\": \"\\u0905\\u092a\\u0930\\u093e\\u0939\\u094d\\u0928\",\n \"ng\": \"PM\",\n \"nl\": \"\",\n \"nn\": \"e.m.\",\n \"no\": \"p.m.\",\n \"nr\": \"PM\",\n \"nv\": \"PM\",\n \"ny\": \"PM\",\n \"oc\": \"PM\",\n \"oj\": \"PM\",\n \"om\": \"WB\",\n \"or\": \"PM\",\n \"os\": \"PM\",\n \"pa\": \"\\u0a36\\u0a3e\\u0a2e\",\n \"pi\": \"PM\",\n \"pl\": \"PM\",\n \"ps\": \"\\u063a.\\u0648.\",\n \"pt\": \"\",\n \"qu\": \"p.m.\",\n \"rc\": \"PM\",\n \"rm\": \"PM\",\n \"rn\": \"Z.MW.\",\n \"ro\": \"p.m.\",\n \"ru\": \"\",\n \"rw\": \"PM\",\n \"sa\": \"\\u092e\\u0927\\u094d\\u092f\\u093e\\u0928\\u092a\\u091a\\u094d\\u092f\\u093e\\u0924\",\n \"sc\": \"PM\",\n \"sd\": \"PM\",\n \"se\": \"e.b.\",\n \"sg\": \"LK\",\n \"sh\": \"PM\",\n \"si\": \"\\u0db4.\\u0dc0.\",\n \"sk\": \"PM\",\n \"sl\": \"pop.\",\n \"sm\": \"PM\",\n \"sn\": \"PM\",\n \"so\": \"gn.\",\n \"sq\": \"e pasdites\",\n \"sr\": \"po podne\",\n \"ss\": \"PM\",\n \"st\": \"PM\",\n \"su\": \"PM\",\n \"sv\": \"\",\n \"sw\": \"PM\",\n \"ta\": \"\\u0bae\\u0bbe\\u0bb2\\u0bc8\",\n \"te\": \"\\u0c05\\u0c2a\\u0c30\\u0c3e\\u0c39\\u0c4d\\u0c28\",\n \"tg\": \"\",\n \"th\": \"PM\",\n \"ti\": \"\\u12f5\\u1215\\u122d \\u1230\\u12d3\\u1275\",\n \"tk\": \"\",\n \"tl\": \"PM\",\n \"tn\": \"PM\",\n \"to\": \"PM\",\n \"tr\": \"\\u00d6S\",\n \"ts\": \"PM\",\n \"tt\": \"\",\n \"tw\": \"PM\",\n \"ty\": \"PM\",\n \"ug\": \"\\u0686?\\u0634\\u062a\\u0649\\u0646 \\u0643?\\u064a\\u0649\\u0646\",\n \"uk\": \"PM\",\n \"ur\": \"PM\",\n \"uz\": \"TK\",\n \"ve\": \"PM\",\n \"vi\": \"CH\",\n \"vo\": \"PM\",\n \"wa\": \"PM\",\n \"wo\": \"\",\n \"xh\": \"PM\",\n \"yi\": \"\\ua0b5\\ua1aa\\ua20c\\ua248\",\n \"yo\": \"?`s\\u00e1n\",\n \"za\": \"PM\",\n \"zh\": \"\\u4e0b\\u5348\",\n \"zu\": \"PM\"\n}\n\nvar X_format = {\n \"%H:%M:%S\": [\n \"C\",\n \"ab\",\n \"ae\",\n \"af\",\n \"an\",\n \"av\",\n \"ay\",\n \"az\",\n \"ba\",\n \"be\",\n \"bg\",\n \"bh\",\n \"bi\",\n \"bm\",\n \"bo\",\n \"br\",\n \"bs\",\n \"ca\",\n \"ce\",\n \"ch\",\n \"co\",\n \"cr\",\n \"cs\",\n \"cu\",\n \"cv\",\n \"cy\",\n \"da\",\n \"de\",\n \"dv\",\n \"eo\",\n \"es\",\n \"et\",\n \"eu\",\n \"ff\",\n \"fj\",\n \"fo\",\n \"fr\",\n \"fy\",\n \"ga\",\n \"gd\",\n \"gl\",\n \"gn\",\n \"gu\",\n \"gv\",\n \"ha\",\n \"he\",\n \"hi\",\n \"ho\",\n \"hr\",\n \"ht\",\n \"hu\",\n \"hy\",\n \"hz\",\n \"ia\",\n \"ie\",\n \"ig\",\n \"ik\",\n \"io\",\n \"is\",\n \"it\",\n \"ja\",\n \"ka\",\n \"kg\",\n \"ki\",\n \"kj\",\n \"kk\",\n \"kl\",\n \"km\",\n \"kn\",\n \"kv\",\n \"kw\",\n \"ky\",\n \"la\",\n \"lb\",\n \"lg\",\n \"li\",\n \"ln\",\n \"lo\",\n \"lt\",\n \"lu\",\n \"lv\",\n \"mg\",\n \"mh\",\n \"mk\",\n \"mn\",\n \"mo\",\n \"mr\",\n \"mt\",\n \"my\",\n \"na\",\n \"nb\",\n \"nd\",\n \"ng\",\n \"nl\",\n \"nn\",\n \"no\",\n \"nr\",\n \"nv\",\n \"ny\",\n \"oj\",\n \"or\",\n \"os\",\n \"pi\",\n \"pl\",\n \"ps\",\n \"pt\",\n \"rc\",\n \"rm\",\n \"rn\",\n \"ro\",\n \"ru\",\n \"rw\",\n \"sa\",\n \"sc\",\n \"se\",\n \"sg\",\n \"sh\",\n \"sk\",\n \"sl\",\n \"sm\",\n \"sn\",\n \"sr\",\n \"ss\",\n \"st\",\n \"su\",\n \"sv\",\n \"sw\",\n \"ta\",\n \"te\",\n \"tg\",\n \"th\",\n \"tk\",\n \"tl\",\n \"tn\",\n \"tr\",\n \"ts\",\n \"tt\",\n \"tw\",\n \"ty\",\n \"ug\",\n \"uk\",\n \"uz\",\n \"ve\",\n \"vo\",\n \"wa\",\n \"wo\",\n \"xh\",\n \"yo\",\n \"za\",\n \"zh\",\n \"zu\"\n ],\n \"%i:%M:%S %p\": [\n \"aa\",\n \"ak\",\n \"am\",\n \"bn\",\n \"el\",\n \"en\",\n \"iu\",\n \"kr\",\n \"ks\",\n \"mi\",\n \"ml\",\n \"ms\",\n \"ne\",\n \"om\",\n \"sd\",\n \"so\",\n \"sq\",\n \"ti\",\n \"to\",\n \"ur\",\n \"vi\"\n ],\n \"%I:%M:%S %p\": [\n \"ar\",\n \"fa\",\n \"ku\",\n \"qu\"\n ],\n \"%p %i:%M:%S\": [\n \"as\",\n \"ii\",\n \"ko\",\n \"yi\"\n ],\n \"\\u0f46\\u0f74\\u0f0b\\u0f5a\\u0f7c\\u0f51\\u0f0b%i:%M:%S %p\": [\n \"dz\"\n ],\n \"%p ga %i:%M:%S\": [\n \"ee\"\n ],\n \"%H.%M.%S\": [\n \"fi\",\n \"id\",\n \"jv\",\n \"oc\",\n \"si\"\n ],\n \"%p %I:%M:%S\": [\n \"pa\"\n ]\n}\nvar x_format = {\n \"%m/%d/%y\": [\n \"C\"\n ],\n \"%d/%m/%Y\": [\n \"aa\",\n \"am\",\n \"bm\",\n \"bn\",\n \"ca\",\n \"co\",\n \"cy\",\n \"el\",\n \"es\",\n \"ff\",\n \"fr\",\n \"ga\",\n \"gd\",\n \"gl\",\n \"gn\",\n \"gv\",\n \"ha\",\n \"he\",\n \"id\",\n \"ig\",\n \"it\",\n \"iu\",\n \"jv\",\n \"ki\",\n \"kr\",\n \"kw\",\n \"la\",\n \"lg\",\n \"ln\",\n \"lo\",\n \"lu\",\n \"mi\",\n \"ml\",\n \"ms\",\n \"mt\",\n \"nd\",\n \"oc\",\n \"om\",\n \"pt\",\n \"qu\",\n \"rn\",\n \"sd\",\n \"sg\",\n \"so\",\n \"sw\",\n \"ti\",\n \"to\",\n \"uk\",\n \"ur\",\n \"uz\",\n \"vi\",\n \"wo\",\n \"yo\"\n ],\n \"%m/%d/%Y\": [\n \"ab\",\n \"ae\",\n \"an\",\n \"av\",\n \"ay\",\n \"bh\",\n \"bi\",\n \"ch\",\n \"cr\",\n \"cv\",\n \"ee\",\n \"en\",\n \"fj\",\n \"ho\",\n \"ht\",\n \"hz\",\n \"ie\",\n \"ik\",\n \"io\",\n \"kg\",\n \"kj\",\n \"ks\",\n \"kv\",\n \"li\",\n \"mh\",\n \"mo\",\n \"na\",\n \"ne\",\n \"ng\",\n \"nv\",\n \"ny\",\n \"oj\",\n \"pi\",\n \"rc\",\n \"sc\",\n \"sh\",\n \"sm\",\n \"su\",\n \"tl\",\n \"tw\",\n \"ty\",\n \"wa\",\n \"za\",\n \"zu\"\n ],\n \"%Y-%m-%d\": [\n \"af\",\n \"br\",\n \"ce\",\n \"dz\",\n \"eo\",\n \"ko\",\n \"lt\",\n \"mg\",\n \"nr\",\n \"rw\",\n \"se\",\n \"si\",\n \"sn\",\n \"ss\",\n \"st\",\n \"sv\",\n \"tn\",\n \"ts\",\n \"ug\",\n \"ve\",\n \"vo\",\n \"xh\"\n ],\n \"%Y/%m/%d\": [\n \"ak\",\n \"bo\",\n \"eu\",\n \"ia\",\n \"ii\",\n \"ja\",\n \"ku\",\n \"yi\",\n \"zh\"\n ],\n \"null\": [\n \"ar\",\n \"fa\",\n \"ps\",\n \"th\"\n ],\n \"%d-%m-%Y\": [\n \"as\",\n \"da\",\n \"fy\",\n \"hi\",\n \"kl\",\n \"mr\",\n \"my\",\n \"nl\",\n \"rm\",\n \"sa\",\n \"ta\"\n ],\n \"%d.%m.%Y\": [\n \"az\",\n \"cs\",\n \"de\",\n \"et\",\n \"fi\",\n \"fo\",\n \"hy\",\n \"is\",\n \"ka\",\n \"kk\",\n \"lv\",\n \"mk\",\n \"nb\",\n \"nn\",\n \"no\",\n \"os\",\n \"pl\",\n \"ro\",\n \"ru\",\n \"sq\",\n \"tg\",\n \"tr\",\n \"tt\"\n ],\n \"%d.%m.%y\": [\n \"ba\",\n \"be\",\n \"lb\"\n ],\n \"%d.%m.%Y \\u0433.\": [\n \"bg\"\n ],\n \"%d.%m.%Y.\": [\n \"bs\",\n \"hr\",\n \"sr\"\n ],\n \"%Y.%m.%d\": [\n \"cu\",\n \"mn\"\n ],\n \"%d/%m/%y\": [\n \"dv\",\n \"km\"\n ],\n \"%d-%m-%y\": [\n \"gu\",\n \"kn\",\n \"or\",\n \"pa\",\n \"te\"\n ],\n \"%Y. %m. %d.\": [\n \"hu\"\n ],\n \"%d-%b %y\": [\n \"ky\"\n ],\n \"%d. %m. %Y\": [\n \"sk\",\n \"sl\"\n ],\n \"%d.%m.%y \\u00fd.\": [\n \"tk\"\n ]\n}\n\n\n\n__BRYTHON__.imported._locale = (function($B){\n var _b_ = $B.builtins\n return {\n CHAR_MAX: 127,\n LC_ALL: 6,\n LC_COLLATE: 3,\n LC_CTYPE: 0,\n LC_MESSAGES: 5,\n LC_MONETARY: 4,\n LC_NUMERIC: 1,\n LC_TIME: 2,\n Error: _b_.ValueError,\n\n _date_format: function(spec, hour){\n var t,\n locale = __BRYTHON__.locale.substr(0, 2)\n\n if(spec == \"p\"){\n var res = hours < 12 ? am[locale] : pm[locale]\n if(res === undefined){\n throw _b_.ValueError.$factory(\"no format \" + spec + \" for locale \" +\n locale)\n }\n return res\n }\n else if(spec == \"x\"){\n t = x_format\n }else if(spec == \"X\"){\n t = X_format\n }else{\n throw _b_.ValueError.$factory(\"invalid format\", spec)\n }\n for(var key in t){\n if(t[key].indexOf(locale) > -1){\n return key\n }\n }\n throw _b_.ValueError.$factory(\"no format \" + spec + \" for locale \" +\n locale)\n },\n\n localeconv: function(){\n var conv = {'grouping': [127],\n 'currency_symbol': '',\n 'n_sign_posn': 127,\n 'p_cs_precedes': 127,\n 'n_cs_precedes': 127,\n 'mon_grouping': [],\n 'n_sep_by_space': 127,\n 'decimal_point': '.',\n 'negative_sign': '',\n 'positive_sign': '',\n 'p_sep_by_space': 127,\n 'int_curr_symbol': '',\n 'p_sign_posn': 127,\n 'thousands_sep': '',\n 'mon_thousands_sep': '',\n 'frac_digits': 127,\n 'mon_decimal_point': '',\n 'int_frac_digits': 127\n }\n var res = $B.empty_dict()\n for(var key in conv){\n _b_.dict.$setitem(res, key, conv[key])\n }\n\n return res\n },\n\n setlocale : function(){\n var $ = $B.args(\"setlocale\", 2, {category: null, locale: null},\n [\"category\", \"locale\"], arguments, {locale: _b_.None},\n null, null)\n /// XXX category is currently ignored\n if($.locale == \"\"){\n // use browser language setting, if it is set\n var LANG = ($B.language || \"\").substr(0, 2)\n if(am.hasOwnProperty(LANG)){\n $B.locale = LANG\n return LANG\n }else{\n console.log(\"Unknown locale: \" + LANG)\n }\n }else if($.locale === _b_.None){\n // return current locale\n return $B.locale\n }else{\n // Only use 2 first characters\n try{$.locale.substr(0, 2)}\n catch(err){\n throw $module.Error.$factory(\"Invalid locale: \" + $.locale)\n }\n if(am.hasOwnProperty($.locale.substr(0, 2))){\n $B.locale = $.locale\n return $.locale\n }else{\n throw $module.Error.$factory(\"Unknown locale: \" + $.locale)\n }\n }\n }\n }\n})(__BRYTHON__)\n"], "modulefinder": [".js", "(function($B){\n\nvar _b_=$B.builtins\nvar _mod = {}\n\n$ModuleFinderDict = {__class__:_b_.type,__name__:'ModuleFinder'}\n$ModuleFinderDict.__mro__ = [_b_.object]\n\n$ModuleFinderDict.run_script = function(self, pathname){\n // pathname is the url of a Python script\n var py_src = _b_.$open(pathname).read()\n // transform into internal Brython tree structure\n var root = $B.py2js(py_src)\n // walk the tree to find occurences of imports\n function walk(node){\n var modules = []\n var ctx = node.context\n if(ctx && ctx.type=='node'){ctx = ctx.tree[0]}\n\n if(ctx && ctx.type==\"import\"){\n for(var i=0, _len_i = ctx.tree.length; i < _len_i;i++){\n if(modules.indexOf(ctx.tree[i].name)==-1){\n modules.push(ctx.tree[i].name)\n }\n }\n }else if(ctx && ctx.type==\"from\"){\n if(modules.indexOf(ctx.module)==-1){\n modules.push(ctx.module)\n }\n }\n\n for(var i=0, _len_i = node.children.length; i < _len_i;i++){\n mods = walk(node.children[i])\n for(var j=0, _len_j = mods.length; j < _len_j;j++){\n if(modules.indexOf(mods[j])==-1){modules.push(mods[j])}\n }\n }\n return modules\n }\n self.modules = walk(root)\n}\n\n_mod.ModuleFinder = function(){return {__class__:$ModuleFinderDict}\n}\n_mod.ModuleFinder.$dict = $ModuleFinderDict\n_mod.ModuleFinder.__class__ = $B.$factory\n$ModuleFinderDict.$factory = _mod.ModuleFinder\n\n$B.addToImported('modulefinder', _mod)\n\n})(__BRYTHON__)\n"], "_profile": [".js", "// Private interface to the profiling instrumentation implemented in py_utils.js.\n// Uses local a copy of the eval function from py_builtin_functions.js\n\nvar $module=(function($B) {\n eval($B.InjectBuiltins());\n return {\n brython:$B,\n data:$B.$profile_data,\n start:$B.$profile.start,\n stop:$B.$profile.stop,\n pause:$B.$profile.pause,\n status:$B.$profile.status,\n clear:$B.$profile.clear,\n elapsed:$B.$profile.elapsed,\n run:function(src,_globals,_locals,nruns) {\n var current_frame = $B.frames_stack[$B.frames_stack.length-1]\n if(current_frame!==undefined){\n var current_locals_id = current_frame[0].replace(/\\./,'_'),\n current_globals_id = current_frame[2].replace(/\\./,'_')\n }\n\n var is_exec = true,\n leave = false\n\n // code will be run in a specific block\n var globals_id = '$profile_'+$B.UUID(),\n locals_id\n\n if(_locals===_globals){\n locals_id = globals_id\n }else{\n locals_id = '$profile_'+$B.UUID()\n }\n // Initialise the object for block namespaces\n eval('var $locals_'+globals_id+' = {}\\nvar $locals_'+locals_id+' = {}')\n\n // Initialise block globals\n\n // A _globals dictionary is provided, set or reuse its attribute\n // globals_id\n _globals.globals_id = _globals.globals_id || globals_id\n globals_id = _globals.globals_id\n\n if(_locals === _globals || _locals === undefined){\n locals_id = globals_id\n parent_scope = $B.builtins_scope\n }else{\n // The parent block of locals must be set to globals\n parent_scope = {\n id: globals_id,\n parent_block: $B.builtins_scope,\n binding: {}\n }\n for(var attr of _b_.dict.$keys_string(_globals)){\n parent_scope.binding[attr] = true\n }\n }\n\n // Initialise block globals\n if(_globals.$jsobj){\n var items = _globals.$jsobj\n }else{\n var items = {}\n for(var key of _b_.dict.$keys_string(_globals)){\n items[key] = _b_.dict.$getitem_string(_globals, key)\n }\n }\n for(var item in items){\n item1 = to_alias(item)\n try{\n eval('$locals_' + globals_id + '[\"' + item1 +\n '\"] = items[item]')\n }catch(err){\n console.log(err)\n console.log('error setting', item)\n break\n }\n }\n\n // Initialise block locals\n var items = _b_.dict.items(_locals), item\n if(_locals.$jsobj){\n var items = _locals.$jsobj\n }else{\n var items = {}\n for(var key of _b_.dict.$keys_string(_locals)){\n items[key] = _b_.dict.$getitem_string(_locals, key)\n } }\n for(var item in items){\n item1 = to_alias(item)\n try{\n eval('$locals_' + locals_id + '[\"' + item[0] + '\"] = item[1]')\n }catch(err){\n console.log(err)\n console.log('error setting', item)\n break\n }\n }\n //var nb_modules = Object.keys(__BRYTHON__.modules).length\n //console.log('before exec', nb_modules)\n\n console.log(\"call py2js\", src, globals_id, locals_id, parent_scope)\n var root = $B.py2js(src, globals_id, locals_id, parent_scope),\n js, gns, lns\n\n try{\n\n var js = root.to_js()\n\n var i,res,gns;\n for(i=0;i 2){\n var brython_scripts = [\n 'brython_builtins',\n \n 'py_ast_classes',\n 'stdlib_paths',\n 'unicode_data',\n 'version_info',\n \n 'py_tokens',\n 'python_tokenizer',\n 'py_ast',\n 'py2js',\n 'loaders',\n 'py_utils',\n 'py_object',\n 'py_type',\n 'py_builtin_functions',\n 'py_sort',\n 'py_exceptions',\n 'py_range_slice',\n 'py_bytes',\n 'py_set',\n 'py_import',\n 'py_string',\n 'py_int',\n 'py_long_int',\n 'py_float',\n 'py_complex',\n 'py_dict',\n 'py_list',\n 'js_objects',\n 'py_generator',\n 'py_dom',\n 'py_pattern_matching',\n 'async',\n 'py_flags',\n 'builtin_modules',\n 'ast_to_js',\n 'symtable',\n \n 'action_helpers_generated_version',\n 'string_parser',\n 'number_parser',\n 'python_parser_peg_version',\n 'pegen',\n 'gen_parse',\n 'brython_ready'\n ]\n }else{\n var brython_scripts = ['brython']\n }\n\n if(VFS !== null){\n brython_scripts.push(VFS)\n }\n return brython_scripts\n}\n\nvar wclass = $B.make_class(\"Worker\",\n function(worker){\n return {\n __class__: wclass,\n worker\n }\n }\n)\n\nwclass.send = function(){\n var $ = $B.args('send', 2, {self: null, message: null}, ['self', 'message'],\n arguments, {}, 'args', null)\n var message = $B.pyobj2structuredclone($.message)\n return $.self.worker.postMessage(message, ...$.args)\n}\n\nwclass.__mro__ = [$B.JSObj, _b_.object]\n\n$B.set_func_names(wclass, \"browser.worker\")\n\n\nvar _Worker = $B.make_class(\"Worker\", function(id, onmessage, onerror){\n $B.warn(_b_.DeprecationWarning,\n \"worker.Worker is deprecated in version 3.12. \" +\n \"Use worker.create_worker instead\")\n var $ = $B.args(\"__init__\", 3, {id: null, onmessage: null, onerror: null},\n ['id', 'onmessage', 'onerror'], arguments,\n {onmessage: _b_.None, onerror: _b_.None}, null, null),\n id = $.id,\n worker_script = $B.webworkers[id]\n\n if(worker_script === undefined){\n throw _b_.KeyError.$factory(id)\n }\n var filepath = worker_script.src ? worker_script.src : $B.script_path + \"#\" + id,\n filename = $B.strip_host(filepath),\n src = $B.file_cache[filename]\n\n var indexedDB = worker_script.attributes &&\n worker_script.attributes.getNamedItem('indexedDB')\n var script_id = \"worker\" + $B.UUID(),\n filename = $B.script_path + \"#\" + id\n $B.url2name[filename] = script_id\n\n var js = $B.py2js({src, filename}, script_id).to_js(),\n header = '';\n var brython_scripts = scripts_to_load(\n $B.get_option_from_filename('debug', filename))\n brython_scripts.forEach(function(script){\n if(script != VFS || VFS == \"brython_stdlib\"){\n var url = $B.brython_path + script + \".js\"\n }else{\n // attribute $B.brython_modules is set to the path of\n // brython_modules.js by the script itself\n var url = $B.brython_modules\n }\n if(! $B.get_option('cache')){ // cf. issue 1954\n url += '?' + (new Date()).getTime()\n }\n header += 'importScripts(\"' + url + '\")\\n'\n })\n // set __BRYTHON__.imported[script_id]\n header += `\n var $B = __BRYTHON__,\n _b_ = $B.builtins\n var module = $B.module.$factory(\"${script_id}\")\n module.__file__ = \"${filename}\"\n module.__doc__ = _b_.None\n $B.imported[\"${script_id}\"] = module\\n`\n // restore brython_path\n header += `$B.brython_path = \"${$B.brython_path}\"\\n`\n // restore path for imports (cf. issue #1305)\n header += `$B.make_import_paths(\"${filename}\")\\n`\n // Call brython() to initialize internal Brython values\n header += `brython(${JSON.stringify($B.$options)})\\n`\n js = header + js\n js = `try{${js}}catch(err){$B.handle_error(err)}`\n\n var blob = new Blob([js], {type: \"application/js\"}),\n url = URL.createObjectURL(blob),\n w = new Worker(url),\n res = wclass.$factory(w)\n return res\n})\n\nfunction create_worker(){\n var $ = $B.args(\"__init__\", 4,\n {id: null, onready: null, onmessage: null, onerror: null},\n ['id', 'onready', 'onmessage', 'onerror'], arguments,\n {onready: _b_.None, onmessage: _b_.None, onerror: _b_.None},\n null, null),\n id = $.id,\n worker_script = $B.webworkers[id],\n onready = $.onready === _b_.None ? _b_.None : $B.$call($.onready),\n onmessage = $.onmessage === _b_.None ? _b_.None : $B.$call($.onmessage),\n onerror = $.onerror === _b_.None ? _b_.None : $B.$call($.onerror)\n\n if(worker_script === undefined){\n throw _b_.RuntimeError.$factory(`No webworker with id '${id}'`)\n }\n var script_id = \"worker\" + $B.UUID(),\n filepath = worker_script.src ? worker_script.src : $B.script_path + \"#\" + id,\n filename = $B.strip_host(filepath),\n src = $B.file_cache[filename]\n $B.url2name[filename] = script_id\n\n var brython_scripts = scripts_to_load(\n $B.get_option_from_filename('debug', filename))\n\n var js = $B.py2js({src, filename}, script_id).to_js(),\n header = '';\n for(var script of brython_scripts){\n if(script != VFS || VFS == \"brython_stdlib\"){\n var url = $B.brython_path + script + \".js\"\n }else{\n // attribute $B.brython_modules is set to the path of\n // brython_modules.js by the script itself\n var url = $B.brython_modules\n }\n if(! $B.get_option('cache')){ // cf. issue 1954\n url += '?' + (new Date()).getTime()\n }\n header += 'importScripts(\"' + url + '\")\\n'\n }\n // set __BRYTHON__.imported[script_id]\n header += `\n var $B = __BRYTHON__,\n _b_ = $B.builtins\n var module = $B.module.$factory(\"${script_id}\")\n module.__file__ = \"${filename}\"\n module.__doc__ = _b_.None\n $B.imported[\"${script_id}\"] = module\\n`\n\n header += '$B.file_cache[module.__file__] = `' + src + '`\\n'\n // restore brython_path\n header += `$B.brython_path = \"${$B.brython_path}\"\\n`\n // restore path for imports (cf. issue #1305)\n header += `$B.make_import_paths(\"${filename}\")\\n`\n\n // Call brython() to initialize internal Brython values\n var save_option = JSON.stringify($B.save_options)\n header += `brython(${save_option})\\n`\n\n // send dummy message to trigger resolution of Promise\n var ok_token = Math.random().toString(36).substr(2, 8),\n error_token = Math.random().toString(36).substr(2, 8)\n\n // open indexedDB cache before running worker code\n js = `$B.idb_open_promise().then(function(){\\n` +\n `try{\\n` +\n `${js}\\n` +\n `self.postMessage('${ok_token}')\\n` +\n `}catch(err){\\n` +\n `self.postMessage(\"${error_token}Error in worker ${id}\\\\n\" + $B.error_trace(err))\\n` +\n `}\\n})`\n js = header + js\n\n var p = new Promise(function(resolve, reject){\n try{\n var blob = new Blob([js], {type: \"application/js\"}),\n url = URL.createObjectURL(blob),\n w = new Worker(url),\n res = wclass.$factory(w)\n }catch(err){\n reject(err)\n }\n\n w.onmessage = function(ev){\n if(ev.data == ok_token){\n resolve(res)\n }else if(typeof ev.data == 'string' &&\n ev.data.startsWith(error_token)){\n reject(ev.data.substr(error_token.length))\n }else{\n if(onmessage !== _b_.None){\n onmessage(ev)\n }\n try{\n resolve(res)\n }catch(err){\n reject(err)\n }\n }\n }\n\n return res\n })\n\n var error_func = onerror === _b_.None ? $B.handle_error : onerror\n\n if(onready !== _b_.None){\n p.then(onready).catch(error_func)\n }else{\n p.catch(error_func)\n }\n return _b_.None\n}\n\nvar module = {\n Worker: _Worker,\n create_worker\n}\n\n$B.addToImported('_webworker', module)\n\n})(__BRYTHON__)\n"], "_ast": [".js", "(function($B){\n\nvar _b_ = $B.builtins,\n ast = $B.ast, // created in py2js\n mod = {}\nmod.PyCF_ONLY_AST = $B.PyCF_ONLY_AST\nmod.PyCF_TYPE_COMMENTS = $B.PyCF_TYPE_COMMENTS\nmod.AST = $B.AST // in builtin_modules.js\n$B.create_python_ast_classes() // in py_ast.js\nfor(var klass in ast){\n mod[klass] = $B.python_ast_classes[klass]\n}\n\nvar Load = 'Load',\n Store = 'Store',\n Del = 'Del'\n\n// Note: the ensure_literal_* functions are only used to validate a restricted\n// set of non-recursive literals that have already been checked with\n// validate_expr, so they don't accept the validator state\nfunction ensure_literal_number(exp, allow_real, allow_imaginary){\n if(exp.__class__ !== mod.Constant){\n return false\n }\n var value = exp.value\n if(allow_real && $B.$isinstance(value, [_b_.int, _b_.float])){\n return true\n }\n if(allow_imaginary && $B.$isinstance(value, _b_.complex)){\n return true\n }\n return false\n}\n\nfunction ensure_literal_negative(exp, allow_real, allow_imaginary){\n if(exp.__class__ !== mod.UnaryOp){\n return false\n }\n // Must be negation ...\n if(exp.op !== mod.USub) {\n return false\n }\n // ... of a constant ...\n var operand = exp.operand\n if(operand.__class__ !== mod.Constant){\n return false\n }\n // ... number\n return ensure_literal_number(operand, allow_real, allow_imaginary)\n}\n\nfunction ensure_literal_complex(exp){\n if(exp.__class__ !== mod.BinOp){\n return false\n }\n var left = exp.left,\n right = exp.right;\n // Ensure op is addition or subtraction\n if(exp.op !== mod.Add && exp.op !== mod.Sub){\n return false\n }\n // Check LHS is a real number (potentially signed)\n switch(left.__class__){\n case mod.Constant:\n if(!ensure_literal_number(left, true, false)){\n return false\n }\n break;\n case mod.UnaryOp:\n if(!ensure_literal_negative(left, true, false)){\n return false\n }\n break;\n default:\n return false\n }\n // Check RHS is an imaginary number (no separate sign allowed)\n switch(right.__class__){\n case mod.Constant:\n if(!ensure_literal_number(right, false, true)){\n return false\n }\n break;\n default:\n return false\n }\n return true\n}\n\nfunction validate_arguments(args){\n validate_args(args.posonlyargs)\n validate_args(args.args)\n if(args.vararg && args.vararg.annotation){\n validate_expr(args.vararg.annotation, Load)\n }\n validate_args(args.kwonlyargs)\n if(args.kwarg && args.kwarg.annotation){\n validate_expr(args.kwarg.annotation, Load)\n }\n if(args.defaults.length > args.posonlyargs.length + args.args.length){\n throw _b_.ValueError.$factory(\n \"more positional defaults than args on arguments\")\n }\n if(args.kw_defaults.length != args.kwonlyargs.length){\n throw _b_.ValueError.$factory(\n \"length of kwonlyargs is not the same as \" +\n \"kw_defaults on arguments\")\n }\n validate_exprs(args.defaults, Load, 0)\n validate_exprs(args.kw_defaults, Load, 1)\n}\n\nfunction validate_pattern(p, star_ok){\n var ret = -1\n switch(p.__class__) {\n case mod.MatchValue:\n validate_pattern_match_value(p.value)\n break;\n case mod.MatchSingleton:\n if([_b_.None, _b_.True, _b_.False].indexOf(p.value) == -1){\n throw _b_.ValueError(\n \"MatchSingleton can only contain True, False and None\")\n }\n break;\n case mod.MatchSequence:\n validate_patterns(p.patterns, 1);\n break;\n case mod.MatchMapping:\n if(p.keys.length != p.patterns.length){\n throw _b_.ValueError.$factory(\n \"MatchMapping doesn't have the same number of keys as patterns\");\n }\n if(p.rest){\n validate_capture(p.rest)\n }\n\n var keys = p.keys;\n for(var key of keys){\n if(key.__class__ === mod.Constant) {\n var literal = key.value;\n if([_b_.None, _b_.True, _b_.False].indexOf(literal) > -1){\n /* validate_pattern_match_value will ensure the key\n doesn't contain True, False and None but it is\n syntactically valid, so we will pass those on in\n a special case. */\n continue;\n }\n }\n validate_pattern_match_value(key)\n }\n validate_patterns(p.patterns, 0);\n break;\n case mod.MatchClass:\n if(p.kwd_attrs.length != p.kwd_patterns.length){\n throw _b_.ValueError.$factory(\n \"MatchClass doesn't have the same number of \" +\n \"keyword attributes as patterns\")\n }\n validate_expr(p.cls, Load)\n var cls = p.cls;\n while(true){\n if(cls.__class__ === mod.Name){\n break\n }else if(cls.__class__ === mod.Attribute) {\n cls = cls.value;\n continue;\n }else {\n throw _b_.ValueError.$factory(\n \"MatchClass cls field can only contain Name \" +\n \"or Attribute nodes.\")\n }\n }\n\n for(var identifier of p.kwd_attrs){\n validate_name(identifier)\n }\n\n validate_patterns(p.patterns, 0)\n validate_patterns(p.kwd_patterns, 0);\n break;\n case mod.MatchStar:\n if (!star_ok) {\n throw _b_.ValueError.$factory(\"can't use MatchStar here\")\n }\n if(p.name === undefined){\n validate_capture(p.name)\n }\n break;\n case mod.MatchAs:\n if(p.name){\n validate_capture(p.name)\n }\n if(p.pattern == undefined){\n ret = 1;\n }else if(p.name == undefined){\n throw _b_.ValueError.$factory(\n \"MatchAs must specify a target name if a pattern is given\")\n }else{\n validate_pattern(p.pattern, 0);\n }\n break;\n case mod.MatchOr:\n if(p.patterns.length < 2){\n throw _b_.ValueError.$factory(\n \"MatchOr requires at least 2 patterns\")\n }\n validate_patterns(p.patterns, 0)\n break;\n // No default case, so the compiler will emit a warning if new pattern\n // kinds are added without being handled here\n }\n if(ret < 0){\n throw _b_.SystemError.$factory(\"unexpected pattern\")\n }\n return true\n}\n\nfunction validate_patterns(patterns, star_ok){\n for(var pattern of patterns){\n validate_pattern(pattern, star_ok)\n }\n return true\n}\n\nfunction validate_pattern_match_value(exp){\n validate_expr(exp, Load)\n switch (exp.__class__){\n case mod.Constant:\n /* Ellipsis and immutable sequences are not allowed.\n For True, False and None, MatchSingleton() should\n be used */\n validate_expr(exp, Load)\n var literal = exp.value\n if($B.$isinstance(literal, [_b_.int, _b_.float, _b_.bytes,\n _b_.complex, _b_.str])){\n return true\n }\n throw _b_.ValueError.$factory(\n \"unexpected constant inside of a literal pattern\")\n case mod.Attribute:\n // Constants and attribute lookups are always permitted\n return true\n case mod.UnaryOp:\n // Negated numbers are permitted (whether real or imaginary)\n // Compiler will complain if AST folding doesn't create a constant\n if(ensure_literal_negative(exp, true, true)){\n return true\n }\n break;\n case mod.BinOp:\n // Complex literals are permitted\n // Compiler will complain if AST folding doesn't create a constant\n if(ensure_literal_complex(exp)){\n return true\n }\n break;\n case mod.JoinedStr:\n // Handled in the later stages\n return 1;\n default:\n break;\n }\n throw _b_.ValueError.$factory(\n \"patterns may only match literals and attribute lookups\")\n}\n\nfunction validate_capture(name){\n if(name == \"_\"){\n throw _b_.ValueError.$factory(\"can't capture name '_' in patterns\")\n }\n validate_name(name)\n}\n\nfunction validate_name(name){\n var forbidden = [\"None\", \"True\", \"False\"]\n if(forbidden.indexOf(name) > -1){\n throw _b_.ValueError.$factory(`identifier field can't represent` +\n ` '${name}' constant\", forbidden[i]`)\n }\n return true\n}\n\nfunction validate_comprehension(gens){\n if(gens.length == 0) {\n throw _b_.ValueError.$factory(\"comprehension with no generators\")\n }\n for(var comp of gens){\n validate_expr(comp.target, Store)\n validate_expr(comp.iter, Load)\n validate_exprs(comp.ifs, Load, 0)\n }\n return true\n}\n\nfunction validate_keywords(keywords){\n for(var keyword of keywords){\n validate_expr(keyword.value, Load)\n }\n return true\n}\n\nfunction validate_args(args){\n for(var arg of args){\n if(arg.annotation){\n validate_expr(arg.annotation, Load)\n }\n }\n return true\n}\n\nfunction validate_nonempty_seq(seq, what, owner){\n if(seq.length > 0){\n return true\n }\n throw _b_.ValueError.$factory(`empty ${what} on ${owner}`)\n}\n\nfunction validate_assignlist(targets, ctx){\n validate_nonempty_seq(targets, \"targets\", ctx == Del ? \"Delete\" : \"Assign\")\n validate_exprs(targets, ctx, 0)\n}\n\nfunction validate_body(body, owner){\n validate_nonempty_seq(body, \"body\", owner)\n validate_stmts(body)\n}\n\nfunction validate_exprs(exprs, ctx, null_ok){\n for(var expr of exprs){\n if(expr !== _b_.None){\n validate_expr(expr, ctx)\n }else if(!null_ok){\n throw _b_.ValueError.$factory(\n \"None disallowed in expression list\")\n }\n\n }\n return true\n}\n\nfunction validate_expr(exp, ctx){\n var check_ctx = 1,\n actual_ctx;\n\n /* First check expression context. */\n switch (exp.__class__) {\n case mod.Name:\n validate_name(exp.id)\n actual_ctx = exp.ctx\n break;\n case mod.Attribute:\n case mod.Subscript:\n case mod.Starred:\n case mod.List:\n case mod.Tuple:\n actual_ctx = exp.ctx;\n break\n default:\n if(ctx != Load){\n throw _b_.ValueError.$factory(\"expression which can't be \" +\n `assigned to in ${ctx} context`)\n }\n check_ctx = 0;\n /* set actual_ctx to prevent gcc warning */\n actual_ctx = 0;\n }\n actual_ctx = actual_ctx === 0 ? actual_ctx :\n actual_ctx.__class__.__name__\n if(check_ctx && actual_ctx != ctx){\n throw _b_.ValueError.$factory(`expression must have ` +\n `${ctx} context but has ${actual_ctx} instead`)\n }\n\n /* Now validate expression. */\n switch (exp.__class__) {\n case mod.BoolOp:\n if(exp.values.length < 2){\n throw _b_.ValueError.$factory(\"BoolOp with less than 2 values\")\n }\n validate_exprs(exp.values, Load, 0);\n break;\n case mod.BinOp:\n validate_expr(exp.left, Load)\n validate_expr(exp.right, Load)\n break;\n case mod.UnaryOp:\n validate_expr(exp.operand, Load);\n break;\n case mod.Lambda:\n validate_arguments(exp.args)\n validate_expr(exp.body, Load);\n break;\n case mod.IfExp:\n validate_expr(exp.test, Load)\n validate_expr(exp.body, Load)\n validate_expr(exp.orelse, Load)\n break;\n case mod.Dict:\n if(exp.keys.length != exp.values.length){\n throw _b_.ValueError.$factory(\n \"Dict doesn't have the same number of keys as values\");\n }\n /* null_ok=1 for keys expressions to allow dict unpacking to work in\n dict literals, i.e. ``{**{a:b}}`` */\n validate_exprs(exp.keys, Load, 1)\n validate_exprs(exp.values, Load, 0);\n break;\n case mod.Set:\n validate_exprs(exp.elts, Load, 0);\n break;\n case mod.ListComp:\n case mod.SetComp:\n case mod.GeneratorExp:\n validate_comprehension(exp.generators)\n validate_expr(exp.elt, Load)\n break;\n case mod.DictComp:\n validate_comprehension(exp.generators)\n validate_expr(exp.key, Load)\n validate_expr(exp.value, Load)\n break;\n case mod.Yield:\n if(exp.value){\n validate_expr(exp.value, Load)\n }\n break;\n case mod.YieldFrom:\n validate_expr(exp.value, Load)\n break;\n case mod.Await:\n validate_expr(exp.value, Load)\n break;\n case mod.Compare:\n if(exp.comparators.length == 0){\n throw _b_.ValueError.$factory(\"Compare with no comparators\")\n }\n if(exp.comparators.length != exp.ops){\n throw _b_.ValueError.$factory(\"Compare has a different number \" +\n \"of comparators and operands\")\n }\n validate_exprs(exp.comparators, Load, 0)\n validate_expr(exp.left, Load)\n break;\n case mod.Call:\n validate_expr(exp.func, Load)\n validate_exprs(exp.args, Load, 0)\n validate_keywords(exp.keywords)\n break;\n case mod.Constant:\n validate_constant(exp.value)\n break;\n case mod.JoinedStr:\n validate_exprs(exp.values, Load, 0)\n break;\n case mod.FormattedValue:\n validate_expr(exp.value, Load)\n if (exp.format_spec) {\n validate_expr(exp.format_spec, Load)\n break;\n }\n break;\n case mod.Attribute:\n validate_expr(exp.value, Load)\n break;\n case mod.Subscript:\n validate_expr(exp.slice, Load)\n validate_expr(exp.value, Load)\n break;\n case mod.Starred:\n validate_expr(exp.value, ctx)\n break;\n case mod.Slice:\n if(exp.lower){\n validate_expr(exp.lower, Load)\n }\n if(exp.upper){\n validate_expr(exp.upper, Load)\n }\n if(exp.step){\n validate_expr(exp.step, Load)\n }\n break;\n case mod.List:\n validate_exprs(exp.elts, ctx, 0)\n break;\n case mod.Tuple:\n validate_exprs(exp.elts, ctx, 0)\n break;\n case mod.NamedExpr:\n validate_expr(exp.value, Load)\n break;\n /* This last case doesn't have any checking. */\n case mod.Name:\n ret = 1;\n break;\n // No default case mod.so compiler emits warning for unhandled cases\n }\n return true\n}\n\nfunction validate_constant(value){\n if (value == _b_.None || value == _b_.Ellipsis){\n return true\n }\n if($B.$isinstance(value,\n [_b_.int, _b_.float, _b_.complex, _b_.bool, _b_.bytes, _b_.str])){\n return true\n }\n\n if($B.$isinstance(value, [_b_.tuple, _b_.frozenset])){\n var it = _b_.iter(value)\n while(true){\n try{\n var item = _b_.next(it)\n validate_constant(item)\n }catch(err){\n if($B.is_exc(err, [_b_.StopIteration])){\n return true\n }\n throw err\n }\n }\n }\n}\n\nfunction validate_stmts(seq){\n for(var stmt of seq) {\n if(stmt !== _b_.None){\n validate_stmt(stmt)\n }else{\n throw _b_.ValueError.$factory(\"None disallowed in statement list\");\n }\n }\n}\n\nfunction validate_stmt(stmt){\n switch (stmt.__class__) {\n case mod.FunctionDef:\n validate_body(stmt.body, \"FunctionDef\")\n validate_arguments(stmt.args)\n validate_exprs(stmt.decorator_list, Load, 0)\n if(stmt.returns){\n validate_expr(stmt.returns, Load)\n }\n break;\n case mod.ClassDef:\n validate_body(stmt.body, \"ClassDef\")\n validate_exprs(stmt.bases, Load, 0)\n validate_keywords(stmt.keywords)\n validate_exprs(stmtdecorator_list, Load, 0)\n break;\n case mod.Return:\n if(stmt.value){\n validate_expr(stmt.value, Load)\n }\n break;\n case mod.Delete:\n validate_assignlist(stmt.targets, Del);\n break;\n case mod.Assign:\n validate_assignlist(stmt.targets, Store)\n validate_expr(stmt.value, Load)\n break;\n case mod.AugAssign:\n validate_expr(stmt.target, Store) &&\n validate_expr(stmt.value, Load);\n break;\n case mod.AnnAssign:\n if(stmt.target.__class__ != mod.Name && stmt.simple){\n throw _b_.TypeError.$factory(\n \"AnnAssign with simple non-Name target\")\n }\n validate_expr(stmt.target, Store)\n if(stmt.value){\n validate_expr(stmt.value, Load)\n validate_expr(stmt.annotation, Load);\n }\n break;\n case mod.For:\n validate_expr(stmt.target, Store)\n validate_expr(stmt.iter, Load)\n validate_body(stmt.body, \"For\")\n validate_stmts(stmt.orelse)\n break;\n case mod.AsyncFor:\n validate_expr(stmt.target, Store)\n validate_expr(stmt.iter, Load)\n validate_body(stmt.body, \"AsyncFor\")\n validate_stmts(stmt.orelse)\n break;\n case mod.While:\n validate_expr(stmt.test, Load)\n validate_body(stmt.body, \"While\")\n validate_stmts(stmt.orelse)\n break;\n case mod.If:\n validate_expr(stmt.test, Load)\n validate_body(stmt.body, \"If\")\n validate_stmts(stmt.orelse)\n break;\n case mod.With:\n validate_nonempty_seq(stmt.items, \"items\", \"With\")\n for (var item of stmt.items){\n validate_expr(item.context_expr, Load) &&\n (! item.optional_vars || validate_expr(item.optional_vars, Store))\n }\n validate_body(stmt.body, \"With\");\n break;\n case mod.AsyncWith:\n validate_nonempty_seq(stmt.items, \"items\", \"AsyncWith\")\n for(var item of stmt.items){\n validate_expr(item.context_expr, Load)\n if(item.optional_vars){\n validate_expr(item.optional_vars, Store)\n }\n }\n validate_body(stmt.body, \"AsyncWith\");\n break;\n case mod.Match:\n validate_expr(stmt.subject, Load)\n validate_nonempty_seq(stmt.cases, \"cases\", \"Match\")\n for(var m of stmt.cases){\n validate_pattern(m.pattern, 0)\n if(m.guard){\n validate_expr(m.guard, Load)\n }\n validate_body(m.body, \"match_case\")\n }\n break;\n case mod.Raise:\n if(stmt.exc){\n validate_expr(stmt.exc, Load)\n if(stmt.cause){\n validate_expr(stmt.cause, Load)\n }\n break;\n }\n if(stmt.cause) {\n throw _b_.ValueError.$factory(\"Raise with cause but no exception\");\n }\n break;\n case mod.Try:\n validate_body(stmt.body, \"Try\")\n if(stmt.handlers.length == 0 + stmt.finalbody.length == 0){\n throw _b_.ValueError.$factor(\n \"Try has neither except handlers nor finalbody\");\n }\n if(stmt.handlers.length == 0 && stmt.orelse.length > 0){\n throw _b_.ValueError.$factory(\n \"Try has orelse but no except handlers\");\n }\n for(var handler of stmt.handlers){\n if(handler.type){\n validate_expr(handler.type, Load)\n validate_body(handler.body, \"ExceptHandler\")\n }\n }\n if(stmt.finalbody.length > 0){\n validate_stmts(stmt.finalbody)\n }\n if(stmt.orelse.length > 0){\n validate_stmts(stmt.orelse)\n }\n break;\n case mod.TryStar:\n validate_body(stmt.body, \"TryStar\")\n if(stmt.handlers.length + stmt.finalbody.length == 0){\n throw _b_.ValueError.$factory(\n \"TryStar has neither except handlers nor finalbody\");\n }\n if(stmt.handlers.length == 0 && stmt.orelse.length > 0){\n throw _b_.ValueError.$factory(\n \"TryStar has orelse but no except handlers\");\n }\n for(var handler of stm.handlers){\n if(handler.type){\n validate_expr(handler.type, Load)\n validate_body(handler.body, \"ExceptHandler\")\n }\n }\n if(stmt.finalbody.length > 0){\n validate_stmts(stmt.finalbody)\n }\n if(stmt.orelse.length > 0){\n validate_stmts(stmt.orelse)\n }\n break;\n case mod.Assert:\n validate_expr(stmt.test, Load)\n if(stmt.msg){\n validate_expr(stmt.msg, Load)\n }\n break;\n case mod.Import:\n validate_nonempty_seq(stmt.names, \"names\", \"Import\");\n break;\n case mod.ImportFrom:\n if(stmt.level < 0) {\n throw _b_.ValueError.$factory(\"Negative ImportFrom level\")\n }\n validate_nonempty_seq(stmt.names, \"names\", \"ImportFrom\");\n break;\n case mod.Global:\n validate_nonempty_seq(stmt.names, \"names\", \"Global\");\n break;\n case mod.Nonlocal:\n validate_nonempty_seq(stmt.names, \"names\", \"Nonlocal\");\n break;\n case mod.Expr:\n validate_expr(stmt.value, Load);\n break;\n case mod.AsyncFunctionDef:\n validate_body(stmt.body, \"AsyncFunctionDef\")\n validate_arguments(stmt.args)\n validate_exprs(stmt.decorator_list, Load, 0)\n if(stmt.returns){\n validate_expr(stmt.returns, Load)\n }\n break;\n case mod.Pass:\n case mod.Break:\n case mod.Continue:\n break;\n // No default case so compiler emits warning for unhandled cases\n }\n}\n\n\nmod._validate = function(ast_obj){\n switch (ast_obj.__class__) {\n case mod.Module:\n validate_stmts(ast_obj.body);\n break;\n case mod.Interactive:\n validate_stmts(ast_obj.body);\n break;\n case mod.Expression:\n validate_expr(ast_obj.body, Load);\n break;\n case mod.FunctionType:\n validate_exprs(ast_obj.argtypes, Load, 0) &&\n validate_expr(ast_obj.returns, Load);\n break;\n // No default case so compiler emits warning for unhandled cases\n }\n}\n\n$B.imported._ast = mod\n\n}\n)(__BRYTHON__)\n"], "_strptime": [".js", "\n(function($B){\n var _b_ = __BRYTHON__.builtins\n $B.imported._strptime = {\n _strptime_datetime: function(cls, s, fmt){\n var pos_s = 0,\n pos_fmt = 0,\n dt = {}\n function error(time_data, format){\n throw _b_.ValueError.$factory(\n `time data '${time_data}' does not match format '${format}'`)\n }\n\n var locale = __BRYTHON__.locale,\n shortdays = [],\n longdays = [],\n conv_func = locale == \"C\" ?\n function(d, options){\n return d.toLocaleDateString('en-EN', options)\n } :\n function(d, options){\n return d.toLocaleDateString(locale, options)\n }\n\n for(var day = 16; day < 23; day++){\n var d = new Date(Date.UTC(2012, 11, day, 3, 0, 0))\n shortdays.push(conv_func(d, {weekday: 'short'}))\n longdays.push(conv_func(d, {weekday: 'long'}))\n }\n\n var shortmonths = [],\n longmonths = []\n\n for(var month = 0; month < 12; month++){\n var d = new Date(Date.UTC(2012, month, 11, 3, 0, 0))\n shortmonths.push(conv_func(d, {month: 'short'}))\n longmonths.push(conv_func(d, {month: 'long'}))\n }\n\n var shortdays_re = new RegExp(shortdays.join(\"|\").replace(\".\", \"\\\\.\")),\n longdays_re = new RegExp(longdays.join(\"|\")),\n shortmonths_re = new RegExp(shortmonths.join(\"|\").replace(\".\", \"\\\\.\")),\n longmonths_re = new RegExp(longmonths.join(\"|\"))\n\n var regexps = {\n d: [\"day\", new RegExp(\"^[123][0-9]|0?[1-9]\")],\n f: [\"microsecond\", new RegExp(\"^\\\\d{1,6}\")],\n H: [\"hour\", new RegExp(\"^[01][0-9]|2[0-3]|\\\\d\")],\n I: [\"hour\", new RegExp(\"^1[0-2]|0?[0-9]\")],\n m: [\"month\", new RegExp(\"^1[012]|0?[1-9]\")],\n M: [\"minute\", new RegExp(\"^[1-5][0-9]|0?[0-9]\")],\n S: [\"second\", new RegExp(\"^[1-5]\\\\d|0?\\\\d\")],\n y: [\"year\", new RegExp(\"^0{0,2}\\\\d{2}\")],\n Y: [\"year\", new RegExp(\"^\\\\d{4}\")],\n z: [\"tzinfo\", new RegExp(\"Z\")]\n }\n\n for(var key in regexps){\n var re = new RegExp('%' + key, \"g\"),\n mo = fmt.match(re)\n if(mo && mo.length > 1){\n throw _b_.ValueError.$factory('strptime directive %' +\n key + ' defined more than once')\n }\n }\n\n while(pos_fmt < fmt.length){\n var car = fmt.charAt(pos_fmt)\n if(car == \"%\"){\n var spec = fmt.charAt(pos_fmt + 1),\n regexp = regexps[spec]\n if(regexp !== undefined){\n var re = regexp[1],\n attr = regexp[0],\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error(s, fmt)\n }else{\n dt[attr] = parseInt(res[0])\n if(attr == \"microsecond\"){\n while(dt[attr] < 100000){\n dt[attr] *= 10\n }\n }else if(attr == \"tzinfo\"){\n // Only value supported for the moment : Z\n // (UTC)\n var dt_module = $B.imported[cls.__module__]\n dt.tzinfo = dt_module.timezone.utc\n }\n pos_fmt += 2\n pos_s += res[0].length\n }\n }else if(spec == \"a\" || spec == \"A\"){\n // Locale's abbreviated (a) or full (A) weekday name\n var attr = \"weekday\",\n re = spec == \"a\" ? shortdays_re : longdays_re,\n t = spec == \"a\" ? shortdays : longdays\n res = re.exec(s.substr(pos_s))\n if(res === null){\n console.log('error', re, 'string', s.substr(pos_s), 'fmt', fmt)\n error(s, fmt)\n }else{\n var match = res[0],\n ix = t.indexOf(match)\n }\n dt.weekday = ix\n pos_fmt += 2\n pos_s += match.length\n }else if(spec == \"b\" || spec == \"B\"){\n // Locales's abbreviated (b) or full (B) month\n var attr = \"month\",\n re = spec == \"b\" ? shortmonths_re : longmonths_re,\n t = spec == \"b\" ? shortmonths : longmonths,\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error(s, fmt)\n }else{\n var match = res[0],\n ix = t.indexOf(match)\n }\n dt.month = ix + 1\n pos_fmt += 2\n pos_s += match.length\n }else if(spec == \"c\"){\n // Locale's appropriate date and time representation\n var fmt1 = fmt.substr(0, pos_fmt - 1) + _locale_c_format() +\n fmt.substr(pos_fmt + 2)\n fmt = fmt1\n }else if(spec == \"%\"){\n if(s.charAt(pos_s) == \"%\"){\n pos_fmt++\n pos_s++\n }else{\n error(s, fmt)\n }\n }else{\n pos_fmt++\n }\n }else{\n if(car == s.charAt(pos_s)){\n pos_fmt++\n pos_s++\n }else{\n error(s, fmt)\n }\n }\n }\n\n if(pos_s < s.length){\n throw _b_.ValueError.$factory('unconverted data remains: ' +\n s.substr(pos_s))\n }\n\n return $B.$call(cls)(dt.year, dt.month, dt.day,\n dt.hour || 0, dt.minute || 0, dt.second || 0,\n dt.microsecond || 0, dt.tzinfo || _b_.None)\n }\n }\n})(__BRYTHON__)\n"], "dis": [".js", "(function($B){\n\nvar dict = $B.builtins.dict\nvar mod = {\n dis:function(src){\n $B.$py_module_path['__main__'] = $B.brython_path\n return __BRYTHON__.py2js(src,'__main__','__main__',\n $B.builtins_scope).to_js()\n },\n OPTIMIZED: 1,\n NEWLOCALS: 2,\n VARARGS: 4,\n VARKEYWORDS: 8,\n NESTED: 16,\n GENERATOR: 32,\n NOFREE: 64,\n COROUTINE: 128,\n ITERABLE_COROUTINE: 256,\n ASYNC_GENERATOR: 512,\n COMPILER_FLAG_NAMES: $B.builtins.dict.$factory()\n}\nmod.COMPILER_FLAG_NAMES = dict.$factory([\n [1, \"OPTIMIZED\"],\n [2, \"NEWLOCALS\"],\n [4, \"VARARGS\"],\n [8, \"VARKEYWORDS\"],\n [16, \"NESTED\"],\n [32, \"GENERATOR\"],\n [64, \"NOFREE\"],\n [128, \"COROUTINE\"],\n [256, \"ITERABLE_COROUTINE\"],\n [512, \"ASYNC_GENERATOR\"]\n])\n\n$B.addToImported('dis', mod)\n\n})(__BRYTHON__)"], "_zlib_utils": [".js", "\n\n(function($B){\n\n\nfunction rfind(buf, seq){\n var buflen = buf.length,\n len = seq.length\n for(var i = buflen - len; i >= 0; i--){\n var chunk = buf.slice(i, i + len),\n found = true\n for(var j = 0; j < len; j++){\n if(chunk[j] != seq[j]){\n found = false\n break\n }\n }\n if(found){return i}\n }\n return -1\n}\n\n\nvar c;\nvar crcTable = [];\nfor(var n =0; n < 256; n++){\n c = n;\n for(var k =0; k < 8; k++){\n c = ((c&1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));\n }\n crcTable[n] = c;\n}\n\nvar mod = {\n crc32: function(bytes, crc) {\n var crc = crc ^ (-1);\n\n for (var byte of bytes.source) {\n crc = (crc >>> 8) ^ crcTable[(crc ^ byte) & 0xFF];\n }\n\n return (crc ^ (-1)) >>> 0;\n },\n\n lz_generator: function(text, size, min_len){\n /*\n Returns a list of items based on the LZ algorithm, using the\n specified window size and a minimum match length.\n The items are a tuple (length, distance) if a match has been\n found, and a byte otherwise.\n */\n // 'text' is an instance of Python 'bytes' class, the actual\n // bytes are in text.source\n text = text.source\n if(min_len === undefined){\n min_len = 3\n }\n var pos = 0, // position in text\n items = [] // returned items\n while(pos < text.length){\n sequence = text.slice(pos, pos + min_len)\n if(sequence.length < 3){\n for(var i = pos; i < text.length; i++){\n items.push(text[i])\n }\n break\n }\n // Search the sequence in the 'size' previous bytes\n buf = text.slice(pos - size, pos)\n buf_pos = rfind(buf, sequence)\n if(buf_pos > -1){\n // Match of length 3 found; search a longer one\n var len = 1\n while(len < 259 &&\n buf_pos + len < buf.length &&\n pos + len < text.length &&\n text[pos + len] == buf[buf_pos + len]){\n len += 1\n }\n match = text.slice(pos, pos + len)\n // \"Lazy matching\": search longer match starting at next\n // position\n longer_match = false\n if(pos + len < text.length - 2){\n match2 = text.slice(pos + 1, pos + len + 2)\n longer_buf_pos = rfind(buf, match2)\n if(longer_buf_pos > -1){\n // found longer match : emit current byte as\n // literal and move 1 byte forward\n longer_match = true\n char = text[pos]\n items.push(char)\n pos += 1\n }\n }\n if(! longer_match){\n distance = buf.length - buf_pos\n items.push($B.fast_tuple([len, distance]))\n if(pos + len == text.length){\n break\n }else{\n pos += len\n items.push(text[pos])\n pos += 1\n }\n }\n }else{\n char = text[pos]\n items.push(char)\n pos += 1\n }\n }\n return items\n }\n}\n\n$B.addToImported('_zlib_utils', mod)\n\n})(__BRYTHON__)"], "marshal": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nvar module = {\n loads: function(){\n var $ = $B.args('loads', 1, {obj:null}, ['obj'], arguments, {},\n null, null)\n return $B.structuredclone2pyobj(JSON.parse($.obj))\n },\n load: function(){\n var $ = $B.args('load', 1, {file:null}, ['file'], arguments, {},\n null, null)\n var content = $B.$call($B.$getattr($.file, \"read\"))()\n return $module.loads(_b_.bytes.decode(content, \"latin-1\"));\n },\n dump: function(){\n var $ = $B.args('dump', 2, {value:null, file: null},\n ['value', 'file'], arguments, {}, null, null)\n var s = JSON.stringify($B.pyobj2structuredclone($.value))\n $B.$getattr($.file, \"write\")(_b_.str.encode(s, 'latin-1'))\n var flush = $B.$getattr($.file, \"flush\", null)\n if(flush !== null){\n $B.$call(flush)()\n }\n return _b_.None\n },\n dumps: function(){\n var $ = $B.args('dumps', 1, {obj:null}, ['obj'], arguments, {},\n null, null)\n return JSON.stringify($B.pyobj2structuredclone($.obj))\n }\n}\n\n$B.addToImported('marshal', module)\n\n})(__BRYTHON__)\n"], "_json": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nfunction simple(obj){\n switch(typeof obj){\n case 'string':\n case 'number':\n case 'boolean':\n return true\n }\n if(obj instanceof Number ||\n Array.isArray(obj) ||\n $B.$isinstance(obj, [_b_.list, _b_.tuple, _b_.dict])){\n return true\n }\n return false\n}\n\nfunction to_json(obj, level){\n var $defaults = {skipkeys:_b_.False, ensure_ascii:_b_.True,\n check_circular:_b_.True, allow_nan:_b_.True, cls:_b_.None,\n indent:_b_.None, separators:_b_.None, \"default\":_b_.None,\n sort_keys:_b_.False},\n $ = $B.args(\"to_json\", 2, {obj: null, level: null}, ['obj', 'level'],\n arguments, {level: 1}, null, \"kw\")\n\n var kw = _b_.dict.$to_obj($.kw)\n for(var key in $defaults){\n if(! kw.hasOwnProperty(key)){\n kw[key] = $defaults[key]\n }\n }\n\n var indent = kw.indent,\n ensure_ascii = kw.ensure_ascii,\n separators = kw.separators === _b_.None ?\n kw.indent === _b_.None ? [', ', ': '] : [',', ': '] :\n kw.separators,\n skipkeys = kw.skipkeys,\n _default = kw.default,\n sort_keys = kw.sort_keys,\n allow_nan = kw.allow_nan,\n check_circular = kw.check_circular\n\n var item_separator = separators[0],\n key_separator = separators[1]\n if(indent !== _b_.None){\n var indent_str\n if(typeof indent == \"string\"){\n indent_str = indent\n }else if(typeof indent == \"number\" && indent >= 1){\n indent_str = \" \".repeat(indent)\n }else{\n throw _b_.ValueError.$factory(\"invalid indent: \" +\n _b_.str.$factory(indent))\n }\n }\n var kwarg = {$kw: [{}]}\n for(var key in kw){\n kwarg.$kw[0][key] = kw[key]\n }\n\n switch(typeof obj){\n case 'string':\n var res = JSON.stringify(obj)\n if(ensure_ascii){\n var escaped = ''\n for(var i = 0, len = res.length; i < len; i++){\n var u = res.codePointAt(i)\n if(u > 127){\n u = u.toString(16)\n while(u.length < 4){\n u = \"0\" + u\n }\n escaped += '\\\\u' + u\n }else{\n escaped += res.charAt(i)\n }\n }\n return escaped\n }\n return res\n case 'boolean':\n return obj.toString()\n case 'number':\n if([Infinity, -Infinity].indexOf(obj) > -1 ||\n isNaN(obj)){\n if(! allow_nan){\n throw _b_.ValueError.$factory(\n 'Out of range float values are not JSON compliant')\n }\n }\n return obj.toString()\n }\n if(obj instanceof String){\n if(! ensure_ascii){\n return $B.String(obj)\n }\n // string with surrogate pairs. cf. issue #1903.\n var res = ''\n if(obj.surrogates){\n var s_ix = 0,\n s_pos = obj.surrogates[s_ix]\n for(var i = 0, len = obj.length; i < len; i++){\n if(i == s_pos){\n var code = obj.codePointAt(i) - 0x10000\n res += '\\\\u' + (0xD800 | (code >> 10)).toString(16) +\n '\\\\u' + (0xDC00 | (code & 0x3FF)).toString(16)\n i++\n s_ix++\n s_pos = obj.surrogates[s_ix]\n }else{\n var code = obj.charCodeAt(i)\n if(code < 127){\n var x = _b_.repr(obj[i])\n res += x.substr(1, x.length - 2)\n }else{\n var x = code.toString(16)\n while(x.length < 4){\n x = '0' + x\n }\n res += '\\\\u' + x\n }\n }\n }\n }\n return '\"' + res.replace(new RegExp('\"', \"g\"), '\\\\\"') + '\"'\n }\n\n if($B.$isinstance(obj, _b_.list)){\n var res = []\n var sep = item_separator,\n first = '[',\n last = ']'\n if(indent !== _b_.None){\n sep += \"\\n\" + indent_str.repeat(level)\n first = '[' + '\\n' + indent_str.repeat(level)\n last = '\\n' + indent_str.repeat(level - 1) + ']'\n level++\n }\n for(var i = 0, len = obj.length; i < len; i++){\n res.push(to_json(obj[i], level, kwarg))\n }\n return first + res.join(sep) + last\n }else if($B.$isinstance(obj, _b_.float)){\n return obj.value\n }else if(obj.__class__ === $B.long_int){\n return obj.value.toString()\n }else if(obj === _b_.None){\n return \"null\"\n }else if($B.$isinstance(obj, _b_.dict)){\n var res = [],\n items = Array.from($B.make_js_iterator(_b_.dict.items(obj)))\n if(sort_keys){\n // Sort keys by alphabetical order\n items.sort()\n }\n var sep = item_separator,\n first = '{',\n last = '}'\n if(indent !== _b_.None){\n sep += \"\\n\" + indent_str.repeat(level)\n first = '{' + '\\n' + indent_str.repeat(level)\n last = '\\n' + indent_str.repeat(level - 1) + '}'\n level++\n }\n for(var i = 0, len = items.length; i < len; i++){\n var item = items[i]\n if(! simple(item[0])){\n if(! skipkeys){\n throw _b_.TypeError.$factory(\"keys must be str, int, \" +\n \"float, bool or None, not \" + $B.class_name(obj))\n }\n }else{\n // In the result, key must be a string\n var key = _b_.str.$factory(item[0])\n // Check circular reference\n if(check_circular && $B.repr.enter(item[1])){\n throw _b_.ValueError.$factory(\"Circular reference detected\")\n }\n res.push(\n [to_json(key, level, kwarg), to_json(item[1], level, kwarg)].\n join(key_separator))\n if(check_circular){\n $B.repr.leave(item[1])\n }\n }\n }\n return first + res.join(sep) + last\n }\n // For other types, use function default if provided\n if(_default == _b_.None){\n throw _b_.TypeError.$factory(\"Object of type \" + $B.class_name(obj) +\n \" is not JSON serializable\")\n }else{\n return to_json($B.$call(_default)(obj), level, kwarg)\n }\n}\n\nfunction loads(s){\n var args = []\n for(var i = 1, len = arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n var decoder = JSONDecoder.$factory.apply(null, args)\n return JSONDecoder.decode(decoder, s)\n}\n\nfunction to_py(obj, kw){\n // Conversion to Python objects\n // kw are the keyword arguments to loads()\n var res\n if(obj instanceof List){\n return obj.items.map(x => to_py(x, kw))\n }else if(obj instanceof Dict){\n if(kw.object_pairs_hook !== _b_.None){\n var pairs = []\n for(var i = 0, len = obj.keys.length; i < len; i++){\n pairs.push($B.fast_tuple([obj.keys[i],\n to_py(obj.values[i], kw)]))\n }\n return $B.$call(kw.object_pairs_hook)(pairs)\n }else{\n var dict = $B.empty_dict()\n for(var i = 0, len = obj.keys.length; i < len; i++){\n _b_.dict.$setitem(dict, obj.keys[i], to_py(obj.values[i], kw))\n }\n return kw.object_hook === _b_.None ? dict :\n $B.$call(kw.object_hook)(dict)\n }\n }else if(obj.type == 'str'){\n return obj.value\n }else if(obj.type == 'num'){\n if(obj.value.search(/[.eE]/) > -1){\n // float\n if(kw.parse_float !== _b_.None){\n return $B.$call(kw.parse_float)(obj.value)\n }\n return $B.fast_float(parseFloat(obj.value))\n }else{\n // integer\n if(kw.parse_int !== _b_.None){\n return $B.$call(kw.parse_int)(obj.value)\n }\n var int = parseInt(obj.value)\n if(Math.abs(int) < $B.max_int){\n return int\n }else{\n return $B.fast_long_int(BigInt(obj.value))\n }\n }\n }else{\n if(obj instanceof Number && kw.parse_float !== _b_.None){\n return $B.$call(kw.parse_float)(obj)\n }else if(kw.parse_int !== _b_.None &&\n (typeof obj == 'number' || obj.__class__ === $B.long_int)){\n return $B.$call(kw.parse_int)(obj)\n }else if(kw.parse_constant !== _b_.None && ! isFinite(obj)){\n return kw.parse_constant(obj)\n }\n return obj\n }\n}\n\nvar escapes = {'n': '\\n',\n 't': '\\t',\n 'b': '\\b',\n 'r': '\\r',\n 'f': '\\f',\n '\\\\': '\\\\',\n '\"': '\\\"',\n \"'\": \"\\\\'\",\n '/': '/'\n }\n\nfunction string_at(s, i){\n var error = $B.$call($B.imported[\"json\"].JSONDecodeError)\n\n var j = i + 1,\n escaped = false,\n len = s.length,\n value = ''\n while(j < len){\n if(s[j] == '\"' && ! escaped){\n return [{type: 'str', value}, j + 1]\n }else if(! escaped && s[j] == '\\\\'){\n escaped = ! escaped\n j++\n }else if(escaped){\n var esc = escapes[s[j]]\n if(esc){\n value += esc\n j++\n escaped = false\n }else if(s[j] == 'u' &&\n s.substr(j + 1, 4).match(/[0-9a-fA-f]{4}/)){\n // unicode escape\n value += String.fromCharCode(parseInt(s.substr(j + 1, 4), 16))\n j += 5\n escaped = ! escaped\n }else{\n throw error('invalid escape \"' + s[j] + '\"', s, j)\n }\n }else{\n value += s[j]\n j++\n }\n }\n}\n\nfunction to_num(num_string, nb_dots, exp){\n // convert to correct Brython type\n if(exp || nb_dots){\n return new Number(num_string)\n }else{\n var int = parseInt(num_string)\n if(Math.abs(int) < $B.max_int){\n return int\n }else{\n if(num_string.startsWith('-')){\n return $B.fast_long_int(num_string.substr(1), false)\n }else{\n return $B.fast_long_int(num_string, true)\n }\n }\n }\n}\n\nfunction num_at(s, i){\n var res = s[i],\n j = i + 1,\n nb_dots = 0,\n exp = false,\n len = s.length\n while(j < len){\n if(s[j].match(/\\d/)){\n j++\n }else if(s[j] == '.' && nb_dots == 0){\n nb_dots++\n j++\n }else if('eE'.indexOf(s[j]) > -1 && ! exp){\n exp = ! exp\n j++\n }else if(s[j] == '-' && 'eE'.includes(s[j-1])){\n j++\n }else{\n return [{type: 'num', value: s.substring(i, j)}, j]\n }\n }\n return [{type: 'num', value: s.substring(i, j)}, j]\n}\n\nvar JSONError = $B.make_class('json.decoder.JSONError')\nJSONError.__bases__ = [_b_.Exception]\nJSONError.__mro__ = _b_.type.mro(JSONError)\n\n\nfunction* tokenize(s){\n var i = 0,\n len = s.length,\n line_num = 1,\n column_start = 0,\n value,\n end\n while(i < len){\n if(s[i] == \" \" || s[i] == '\\r' || s[i] == '\\n' || s[i] == '\\t'){\n i++\n line_num++\n column_start = i\n }else if('[]{}:,'.indexOf(s[i]) > -1){\n yield [s[i], i]\n i++\n }else if(s.substr(i, 4) == 'null'){\n yield [_b_.None , i]\n i += 4\n }else if(s.substr(i, 4) == 'true'){\n yield [true, i]\n i += 4\n }else if(s.substr(i, 5) == 'false'){\n yield [false, i]\n i += 5\n }else if(s.substr(i, 8) == 'Infinity'){\n yield [{type: 'num', value: 'Infinity'}, i]\n i += 8\n }else if(s.substr(i, 9) == '-Infinity'){\n yield [{type: 'num', value: '-Infinity'}, i]\n i += 9\n }else if(s.substr(i, 3) == 'NaN'){\n yield [{type: 'num', value: 'NaN'}, i]\n i += 3\n }else if(s[i] == '\"'){\n value = string_at(s, i)\n yield value\n i = value[1]\n }else if(s[i].match(/\\d/) || s[i] == '-'){\n value = num_at(s, i)\n yield value\n i = value[1]\n }else{\n throw $B.$call(JSONError)('Extra data: ' +\n `line ${line_num} column ${1 + i - column_start}`)\n }\n }\n}\n\nfunction Node(parent){\n this.parent = parent\n if(parent instanceof List){\n this.list = parent.items\n }else if(parent instanceof Dict){\n this.list = parent.values\n }else if(parent === undefined){\n this.list = []\n }\n}\n\nNode.prototype.transition = function(token){\n if([true, false, _b_.None].includes(token) ||\n ['str', 'num'].includes(token.type)){\n if(this.parent === undefined &&\n (this.list.length > 0 || this.content)){\n throw Error('Extra data')\n }\n this.list.push(token)\n return this.parent ? this.parent : this\n }else if(token == '{'){\n if(this.parent === undefined){\n this.content = new Dict(this)\n return this.content\n }\n return new Dict(this.parent)\n }else if(token == '['){\n if(this.parent === undefined){\n this.content = new List(this)\n return this.content\n }\n return new List(this.parent)\n }else{\n throw Error('unexpected item:' + token)\n }\n}\n\nfunction Dict(parent){\n this.parent = parent\n this.keys = []\n this.values = []\n this.expect = 'key'\n if(parent instanceof List){\n parent.items.push(this)\n }else if(parent instanceof Dict){\n parent.values.push(this)\n }\n}\n\nDict.prototype.transition = function(token){\n if(this.expect == 'key'){\n if(token.type == 'str'){\n this.keys.push(token.value)\n this.expect = ':'\n return this\n }else if(token == '}' && this.keys.length == 0){\n return this.parent\n }else{\n throw Error('expected str')\n }\n }else if(this.expect == ':'){\n if(token == ':'){\n this.expect = '}'\n return new Node(this)\n }else{\n throw Error('expected :')\n }\n }else if(this.expect == '}'){\n if(token == '}'){\n return this.parent\n }else if(token == ','){\n this.expect = 'key'\n return this\n }\n throw Error('expected }')\n }\n}\n\nfunction List(parent){\n if(parent instanceof List){\n parent.items.push(this)\n }\n this.parent = parent\n this.items = []\n this.expect = 'item'\n}\n\nList.prototype.transition = function(token){\n if(this.expect == 'item'){\n this.expect = ','\n if([true, false, _b_.None].indexOf(token) > -1){\n this.items.push(token)\n return this\n }else if(token.type == 'num' || token.type == 'str'){\n this.items.push(token)\n return this\n }else if(token == '{'){\n return new Dict(this)\n }else if(token == '['){\n return new List(this)\n }else if(token == ']'){\n if(this.items.length == 0){\n if(this.parent instanceof Dict){\n this.parent.values.push(this)\n }\n return this.parent\n }\n throw Error('unexpected ]')\n }else{\n console.log('token', token)\n throw Error('unexpected item:' + token)\n }\n\n }else if(this.expect == ','){\n this.expect = 'item'\n if(token == ','){\n return this\n }else if(token == ']'){\n if(this.parent instanceof Dict){\n this.parent.values.push(this)\n }\n return this.parent\n }else{\n throw Error('expected :')\n }\n }\n}\n\nfunction parse(s){\n var res,\n state,\n node = new Node(),\n root = node,\n token\n for(var item of tokenize(s)){\n token = item[0]\n try{\n node = node.transition(token)\n }catch(err){\n console.log('error, item', item)\n console.log(err, err.message)\n console.log('node', node)\n if(err.__class__){\n throw err\n }else{\n var error = $B.$call($B.imported[\"json\"].JSONDecodeError)\n throw error(err.message, s, item[1])\n }\n }\n }\n return root.content ? root.content : root.list[0]\n}\n\nvar JSONDecoder = $B.make_class(\"JSONDecoder\",\n function(){\n var $defaults = {cls: _b_.None, object_hook: _b_.None,\n parse_float: _b_.None, parse_int: _b_.None,\n parse_constant: _b_.None, object_pairs_hook: _b_.None},\n $ = $B.args(\"decode\", 0, {}, [], arguments, {}, null, \"kw\")\n var kw = _b_.dict.$to_obj($.kw)\n for(var key in $defaults){\n if(kw[key] === undefined){\n kw[key] = $defaults[key]\n }\n }\n return {\n __class__: JSONDecoder,\n object_hook: kw.object_hook,\n parse_float: kw.parse_float,\n parse_int: kw.parse_int,\n parse_constant: kw.parse_constant,\n object_pairs_hook: kw.object_pairs_hook,\n memo: $B.empty_dict()\n }\n }\n)\n\nJSONDecoder.decode = function(self, s){\n return to_py(parse(s), self)\n}\n\n$B.imported._json = {\n dumps: function(){\n return _b_.str.$factory(to_json.apply(null, arguments))\n },\n loads,\n JSONDecoder\n}\n\n})(__BRYTHON__)"], "_binascii": [".js", "(function($B){\n\nvar _b_ = $B.builtins,\n _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\"\n\nvar error = $B.make_class(\"error\", _b_.Exception.$factory)\nerror.__bases__ = [_b_.Exception]\n$B.set_func_names(error, \"binascii\")\n\nfunction decode(bytes, altchars, validate){\n var output = [],\n chr1, chr2, chr3,\n enc1, enc2, enc3, enc4\n\n var alphabet = make_alphabet(altchars)\n\n var input = bytes.source\n\n // If validate is set, check that all characters in input\n // are in the alphabet\n var _input = ''\n var padding = 0\n for(var i = 0, len = input.length; i < len; i++){\n var car = String.fromCharCode(input[i])\n var char_num = alphabet.indexOf(car)\n if(char_num == -1){\n if(validate){throw error.$factory(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i < input.length - 2){\n if(validate){throw error.$factory(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i >= input.length - 2){\n padding++\n _input += car\n }else{\n _input += car\n }\n }\n input = _input\n if(_input.length == padding){return _b_.bytes.$factory([])}\n if( _input.length % 4 > 0){throw error.$factory(\"Incorrect padding\")}\n\n var i = 0\n while(i < input.length){\n\n enc1 = alphabet.indexOf(input.charAt(i++))\n enc2 = alphabet.indexOf(input.charAt(i++))\n enc3 = alphabet.indexOf(input.charAt(i++))\n enc4 = alphabet.indexOf(input.charAt(i++))\n\n chr1 = (enc1 << 2) | (enc2 >> 4)\n chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)\n chr3 = ((enc3 & 3) << 6) | enc4\n\n output.push(chr1)\n\n if(enc3 != 64){output.push(chr2)}\n if(enc4 != 64){output.push(chr3)}\n\n }\n // return Python bytes\n return _b_.bytes.$factory(output)\n}\n\n\nvar hex2int = {},\n hex = '0123456789abcdef'\nfor(var i = 0; i < hex.length; i++){\n hex2int[hex[i]] = i\n hex2int[hex[i].toUpperCase()] = i\n}\n\nfunction make_alphabet(altchars){\n var alphabet = _keyStr\n if(altchars !== undefined && altchars !== _b_.None){\n // altchars is an instance of Python bytes\n var source = altchars.source\n alphabet = alphabet.substr(0,alphabet.length-3) +\n _b_.chr(source[0]) + _b_.chr(source[1]) + '='\n }\n return alphabet\n}\n\nvar module = {\n a2b_base64: function(){\n var $ = $B.args(\"a2b_base64\", 2, {s: null, strict_mode: null}, \n ['s', 'strict_mode'],\n arguments, {strict_mode: false}, null, null)\n var bytes\n if($B.$isinstance($.s, _b_.str)){\n bytes = _b_.str.encode($.s, 'ascii')\n }else if($B.$isinstance($.s, [_b_.bytes, _b_.bytearray])){\n bytes = $.s\n }else{\n throw _b_.TypeError.$factory('wrong type: ' + $B.class_name($.s))\n }\n return decode(bytes)\n },\n a2b_hex: function(){\n var $ = $B.args(\"a2b_hex\", 1, {s: null}, ['s'],\n arguments, {}, null, null),\n s = $.s\n if($B.$isinstance(s, _b_.bytes)){\n s = _b_.bytes.decode(s, 'ascii')\n }\n if(typeof s !== \"string\"){\n throw _b_.TypeError.$factory(\"argument should be bytes, \" +\n \"buffer or ASCII string, not '\" + $B.class_name(s) + \"'\")\n }\n\n var len = s.length\n if(len % 2 == 1){\n throw _b_.TypeError.$factory('Odd-length string')\n }\n\n var res = []\n for(var i = 0; i < len; i += 2){\n res.push((hex2int[s.charAt(i)] << 4) + hex2int[s.charAt(i + 1)])\n }\n return _b_.bytes.$factory(res)\n },\n b2a_base64: function(){\n var $ = $B.args(\"b2a_base64\", 1, {data: null}, ['data'],\n arguments, {}, null, \"kw\")\n var newline = _b_.dict.$get_string($.kw, 'newline', false)\n\n var string = $B.to_bytes($.data),\n res = btoa(String.fromCharCode.apply(null, string))\n if(newline){res += \"\\n\"}\n return _b_.bytes.$factory(res, \"ascii\")\n },\n b2a_hex: function(obj){\n var string = $B.to_bytes(obj),\n res = []\n function conv(c){\n if(c > 9){\n c = c + 'a'.charCodeAt(0) - 10\n }else{\n c = c + '0'.charCodeAt(0)\n }\n return c\n }\n string.forEach(function(char){\n res.push(conv((char >> 4) & 0xf))\n res.push(conv(char & 0xf))\n })\n return _b_.bytes.$factory(res)\n },\n b2a_uu: function(obj){\n var string = _b_.bytes.decode(obj, 'ascii')\n var len = string.length,\n res = String.fromCharCode((0x20 + len) & 0x3F)\n while(string.length > 0){\n var s = string.slice(0, 3)\n while(s.length < 3){s.push(String.fromCharCode(0))}\n var A = s[0],\n B = s[1],\n C = s[2]\n var a = (A >> 2) & 0x3F,\n b = ((A << 4) | ((B >> 4) & 0xF)) & 0x3F,\n c = (((B << 2) | ((C >> 6) & 0x3)) & 0x3F),\n d = C & 0x3F\n res += String.fromCharCode(0x20 + a, 0x20 + b, 0x20 + c, 0x20 + d)\n string = string.slice(3)\n }\n return _b_.bytes.$factory(res + \"\\n\", \"ascii\")\n },\n error: error\n}\n\nmodule.hexlify = module.b2a_hex\nmodule.unhexlify = module.a2b_hex\n\n$B.imported._binascii = module\n}\n)(__BRYTHON__)"], "_string": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nfunction parts(format_string){\n var result = [],\n _parts = $B.split_format(format_string) // defined in py_string.js\n for(var i = 0; i < _parts.length; i+= 2){\n result.push({pre: _parts[i], fmt: _parts[i + 1]})\n }\n return result\n}\n\nfunction Tuple(){\n var args = []\n for(var i=0, len=arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n return _b_.tuple.$factory(args)\n}\n\n$B.imported._string = {\n\n formatter_field_name_split: function(fieldname){\n // Split the argument as a field name\n var parsed = $B.parse_format(fieldname),\n first = parsed.name,\n rest = []\n if(first.match(/\\d+/)){first = parseInt(first)}\n parsed.name_ext.forEach(function(ext){\n if(ext.startsWith(\"[\")){\n var item = ext.substr(1, ext.length - 2)\n if(item.match(/\\d+/)){\n rest.push(Tuple(false, parseInt(item)))\n }else{\n rest.push(Tuple(false, item))\n }\n }else{\n rest.push(Tuple(true, ext.substr(1)))\n }\n })\n return Tuple(first, _b_.iter(rest))\n },\n formatter_parser: function(format_string){\n // Parse the argument as a format string\n\n if(! _b_.isinstance(format_string, _b_.str)){\n throw _b_.ValueError.$factory(\"Invalid format string type: \" +\n $B.class_name(format_string))\n }\n\n var result = []\n parts(format_string).forEach(function(item){\n var pre = item.pre === undefined ? \"\" : item.pre,\n fmt = item.fmt\n if(fmt === undefined){\n result.push(Tuple(pre, _b_.None, _b_.None, _b_.None))\n }else if(fmt.string == ''){\n result.push(Tuple(pre, '', '', _b_.None))\n }else{\n result.push(Tuple(pre,\n fmt.raw_name + fmt.name_ext.join(\"\"),\n fmt.raw_spec,\n fmt.conv || _b_.None))\n }\n })\n return result\n }\n}\n})(__BRYTHON__)"], "_webcomponent": [".js", "// module for Web Components\n(function($B){\n\nvar _b_ = $B.builtins\n\nfunction define(tag_name, cls, options){\n var $ = $B.args(\"define\", 3, {tag_name: null, cls: null, options: null},\n [\"tag_name\", \"cls\", \"options\"], arguments, {options: _b_.None},\n null, null),\n tag_name = $.tag_name,\n cls = $.cls,\n options = $.options,\n _extends,\n extend_dom_name = 'HTMLElement'\n if(options !== _b_.None){\n if(! $B.$isinstance(options, _b_.dict)){\n throw _b_.TypeError.$factory('options can only be None or a ' +\n `dict, not '${$B.class_name(options)}'`)\n }\n try{\n _extends = _b_.dict.$getitem(options, 'extends')\n }catch(err){\n // ignore\n }\n }else{\n let stack = [...cls.__bases__];\n while(stack.length) {\n base = stack.pop();\n if(base.__module__ === 'browser.html'){\n _extends = base.__name__.toLowerCase()\n break\n }\n\n stack.push(...base.__bases__);\n }\n }\n\n if(_extends){\n if(typeof _extends != 'string'){\n throw _b_.TypeError.$factory('value for extends must be a ' +\n `string, not '${$B.class_name(_extends)}'`)\n }\n var elt = document.createElement(_extends)\n if(elt instanceof HTMLUnknownElement){\n throw _b_.ValueError.$factory(`'${_extends}' is not a valid ` +\n 'tag name')\n }\n var extend_tag = _extends.toLowerCase()\n extend_dom_name = Object.getPrototypeOf(elt).constructor.name\n }\n if(typeof tag_name != \"string\"){\n throw _b_.TypeError.$factory(\"first argument of define() \" +\n \"must be a string, not '\" + $B.class_name(tag_name) + \"'\")\n }else if(tag_name.indexOf(\"-\") == -1){\n throw _b_.ValueError.$factory(\"custom tag name must \" +\n \"contain a hyphen (-)\")\n }\n if(!$B.$isinstance(cls, _b_.type)){\n throw _b_.TypeError.$factory(\"second argument of define() \" +\n \"must be a class, not '\" + $B.class_name(tag_name) + \"'\")\n }\n cls.$webcomponent = true\n\n // Create the Javascript class used for the component. It must have\n // the same name as the Python class\n var src = String.raw`var WebComponent = class extends HTMLElement {\n constructor(){\n // Always call super first in constructor\n super()\n var html = $B.imported['browser.html']\n // Create tag in module html\n if(html['tag_name'] === undefined){\n html.maketag('tag_name', WebComponent)\n }\n var init = $B.$getattr(cls, \"__init__\", _b_.None)\n if(init !== _b_.None){\n try{\n var _self = $B.DOMNode.$factory(this),\n attrs_before_init = []\n for(var i = 0, len = _self.attributes.length; i < len; i++){\n attrs_before_init.push(_self.attributes.item(i))\n }\n _self.__class__ = cls\n $B.$call(init)(_self)\n if(WebComponent.initialized){\n // Check that init() did not introduce new attributes,\n // which is illegal\n // cf. https://html.spec.whatwg.org/multipage/custom-elements.html#custom-element-conformance\n for(var i = 0, len = _self.attributes.length; i < len; i++){\n var item = _self.attributes.item(i)\n if(attrs_before_init.indexOf(item) == -1){\n throw _b_.TypeError.$factory(\"Custom element \" +\n \"must not create attributes, found: \" +\n item.name + '=\"' + item.value + '\"')\n }\n }\n }\n }catch(err){\n $B.handle_error(err)\n }\n }\n }\n static get observedAttributes(){\n var obs_attr = $B.$getattr(cls, \"observedAttributes\", null)\n if(obs_attr === null){\n return []\n }else if(typeof obs_attr == \"function\"){\n var warning = _b_.DeprecationWarning.$factory(\n \"Setting observedAttributes as a method \" +\n \"is deprecated. Set it as a class attribute.\")\n // module _warning is in builtin_modules.js\n $B.imported._warnings.warn(warning)\n return $B.$call(obs_attr)(this)\n }else if(Array.isArray(obs_attr)){\n return obs_attr\n }else{\n throw _b_.TypeError.$factory(\n \"wrong type for observedAttributes: \" +\n $B.class_name(obs_attr))\n }\n }\n }\n `\n var name = cls.__name__,\n code = src.replace(/WebComponent/g, name).\n replace(/tag_name/g, tag_name).\n replace(/HTMLElement/, extend_dom_name)\n var src = eval(code)\n var webcomp = eval(name) // JS class for component\n webcomp.$cls = cls\n\n // Override __getattribute__ to handle DOMNode attributes such as\n // attachShadow\n cls.__getattribute__ = function(self, attr){\n try{\n return $B.DOMNode.__getattribute__(self, attr)\n }catch(err){\n if($B.DOMNode[attr]){\n if(typeof $B.DOMNode[attr] == 'function'){\n return function(){\n var args = [self]\n for(var i = 0, len = arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n return $B.DOMNode[attr].apply(null, args)\n }\n }else{\n return $B.DOMNode[attr]\n }\n }\n throw err\n }\n }\n\n var mro = [cls].concat(cls.__mro__).reverse()\n for(var i = 0, len = mro.length; i < len; i++){\n var pcls = mro[i]\n for(var key in pcls){\n if((! webcomp.hasOwnProperty(key)) &&\n typeof pcls[key] == \"function\" &&\n // don't set $factory (would make it a class)\n key !== '$factory'\n ){\n webcomp.prototype[key] = (function(attr, klass){\n return function(){\n try{\n return $B.$call(klass[attr])($B.DOMNode.$factory(this), ...arguments)\n }catch(err){\n $B.show_error(err)\n }\n }\n })(key, pcls)\n }\n }\n }\n\n // define WebComp as the class to use for the specified tag name\n if(_extends){\n customElements.define(tag_name, webcomp, {extends: extend_tag})\n }else{\n customElements.define(tag_name, webcomp)\n }\n webcomp.initialized = true\n}\n\nfunction get(name){\n var ce = customElements.get(name)\n if(ce && ce.$cls){return ce.$cls}\n return _b_.None\n}\n\nvar module = {\n define: define,\n get: get\n}\n\n$B.addToImported('_webcomponent', module)\n\n})(__BRYTHON__)\n"], "html_parser": [".js", "(function($B){\n\n_b_ = $B.builtins\n\nvar ELEMENT_NODE = 1,\n TEXT_NODE = 3,\n COMMENT_NODE = 8,\n DOCUMENT_TYPE_NODE = 10\n\nvar HTMLNode = $B.make_class(\"HTMLNode\",\n function(){\n return {\n __class__: HTMLNode,\n nodeType: TEXT_NODE,\n text: \"\"\n }\n }\n)\n\nHTMLNode.__str__ = function(self){\n return self.text\n}\n\n$B.set_func_names(HTMLNode, \"_html_parser\")\n\nfunction* tokenize(src){\n var node = HTMLNode.$factory(),\n pos = 0,\n tag = \"\",\n type = \"text\"\n while(pos < src.length){\n var char = src[pos]\n switch(type){\n case \"text\":\n if(char == \"<\"){\n // starts a tag if immediately followed by a letter or by /\n var tag_mo = /^(\\/?)[a-zA-Z]+/.exec(src.substr(pos + 1))\n if(tag_mo){\n yield node\n node = HTMLNode.$factory()\n type = \"tag\"\n node.tagName = \"\"\n node.nodeType = ELEMENT_NODE\n node.closing = tag_mo[1] != \"\"\n node.attrs = []\n }else{\n // doctype declaration\n var decl_mo = /^/i.exec(src.substr(pos))\n if(decl_mo){\n yield node\n node = HTMLNode.$factory()\n node.text = decl_mo[0]\n node.doctype = decl_mo[1]\n node.nodeType = DOCUMENT_TYPE_NODE\n yield node\n node = HTMLNode.$factory()\n type = \"text\"\n pos += decl_mo[0].length\n break\n }else{\n // comment\n var comment_mo = /^\\/.exec(src.substr(pos))\n if(comment_mo){\n yield node\n node = HTMLNode.$factory()\n node.text = comment_mo[0]\n node.comment = comment_mo[1]\n node.nodeType = COMMENT_NODE\n yield node\n node = HTMLNode.$factory()\n type = \"text\"\n pos += comment_mo[0].length\n break\n }\n }\n }\n }\n pos++\n node.text += char\n break\n case \"tag\":\n if(char.search(/[_a-zA-Z]/) > -1){\n var mo = /\\w+/.exec(src.substr(pos))\n if(mo !== null){\n pos += mo[0].length\n if(node.tagName == \"\"){\n node.tagName = mo[0].toUpperCase()\n }\n node.text += mo[0]\n }else{\n pos++\n }\n }else if(char == \">\"){\n node.text += char\n yield node\n node = HTMLNode.$factory()\n type = \"text\"\n pos++\n }else if(char == \"=\"){\n node.text += char\n pos++\n }else if(char == \"'\" || char == '\"'){\n var i = pos + 1,\n found_string_end = false\n while(i < src.length){\n if(src[i] == char){\n var nb_escape = 0\n while(src[i - 1 - nb_escape] == '/'){\n nb_escape++\n }\n if(nb_escape % 2 == 0){\n node.text += src.substr(pos, i + 1 - pos)\n pos = i + 1\n found_string_end = true\n break\n }else{\n i++\n }\n }else if(src[i] == '>'){\n break\n }else{\n i++\n }\n }\n if(! found_string_end){\n // unterminated string: ignore\n pos++\n }\n }else{\n node.text += char\n pos++\n }\n break\n default:\n pos++\n }\n }\n yield node\n}\nvar module = {\n ELEMENT_NODE: 1,\n TEXT_NODE: 3,\n COMMENT_NODE: 8,\n DOCUMENT_TYPE_NODE: 10,\n tokenize: tokenize\n}\n\n$B.addToImported('html_parser', module)\n\n})(__BRYTHON__)\n"], "_ajax": [".js", "// ajax\n__BRYTHON__.imported._ajax = (function($B){\n\n\nvar $N = $B.builtins.None,\n _b_ = $B.builtins\n\nvar add_to_res = function(res, key, val) {\n if($B.$isinstance(val, _b_.list)){\n for (j = 0; j < val.length; j++) {\n add_to_res(res, key, val[j])\n }\n }else if (val instanceof File || val instanceof Blob){\n res.append(key, val)\n }else{res.append(key, _b_.str.$factory(val))}\n}\n\nfunction set_timeout(self, timeout){\n if(timeout.seconds !== undefined){\n self.js.$requestTimer = setTimeout(\n function() {\n self.js.abort()\n if(timeout.func){\n timeout.func()\n }\n },\n timeout.seconds * 1000)\n }\n}\n\nfunction _read(req){\n var xhr = req.js\n if(xhr.responseType == \"json\"){\n return $B.structuredclone2pyobj(xhr.response)\n }\n if(req.charset_user_defined){\n // on blocking mode, xhr.response is a string\n var bytes = []\n for(var i = 0, len = xhr.response.length; i < len; i++){\n var cp = xhr.response.codePointAt(i)\n if(cp > 0xf700){\n bytes.push(cp - 0xf700)\n }else{\n bytes.push(cp)\n }\n }\n }else if(typeof xhr.response == \"string\"){\n if(req.mode == 'binary'){\n return _b_.str.encode(xhr.response, req.encoding || 'utf-8')\n }\n return xhr.response\n }else{\n // else it's an ArrayBuffer\n var buf = new Uint8Array(xhr.response),\n bytes = Array.from(buf.values())\n }\n var b = _b_.bytes.$factory(bytes)\n if(req.mode == \"binary\"){\n return b\n }else if(req.mode == \"document\"){\n return $B.jsobj2pyobj(xhr.response)\n }else{\n var encoding = req.encoding || \"utf-8\"\n return _b_.bytes.decode(b, encoding)\n }\n}\n\nfunction stringify(d){\n var items = []\n for(var entry of _b_.dict.$iter_items(d)){\n items.push(encodeURIComponent(entry.key) + \"=\" +\n encodeURIComponent(entry.value))\n }\n return items.join(\"&\")\n}\n\nfunction handle_kwargs(self, kw, method){\n // kw was created with $B.obj_dict(), its keys/values are in kw.$jsobj\n var data,\n encoding,\n headers={},\n cache,\n mode = \"text\",\n timeout = {},\n rawdata\n\n for(var item of _b_.dict.$iter_items(kw)){\n var key = item.key\n if(key == \"data\"){\n var rawdata = item.value\n if(typeof rawdata == \"string\" || rawdata instanceof FormData){\n data = rawdata\n }else if(rawdata.__class__ === _b_.dict){\n data = stringify(rawdata)\n }else{\n throw _b_.TypeError.$factory(\"wrong type for data: \" +\n $B.class_name(rawdata))\n }\n }else if(key == \"encoding\"){\n encoding = item.value\n }else if(key == \"headers\"){\n var value = item.value\n if(! $B.$isinstance(value, _b_.dict)){\n throw _b_.ValueError.$factory(\n \"headers must be a dict, not \" + $B.class_name(value))\n }\n for(var subitem of _b_.dict.$iter_items(value)){\n headers[subitem.key.toLowerCase()] = subitem.value\n }\n }else if(key.startsWith(\"on\")){\n var event = key.substr(2)\n if(event == \"timeout\"){\n timeout.func = item.value\n }else{\n var f = item.value\n ajax.bind(self, event, f)\n }\n }else if(key == \"mode\"){\n var mode = item.value\n }else if(key == \"timeout\"){\n timeout.seconds = item.value\n }else if(key == \"cache\"){\n cache = item.value\n }\n }\n if(encoding && mode != \"text\"){\n throw _b_.ValueError.$factory(\"encoding not supported for mode \" +\n mode)\n }\n if((method == \"post\" || method == \"put\") && ! headers){\n // For POST requests, set default header\n self.js.setRequestHeader(\"Content-type\",\n \"application/x-www-form-urlencoded\")\n }\n\n return {cache, data, rawdata, encoding, headers, mode, timeout}\n}\n\nvar ajax = $B.make_class('ajax')\n\najax.__repr__ = function(self){\n return ''\n}\n\najax.__getattribute__ = function(self, attr){\n if(ajax[attr] !== undefined){\n return function(){\n return ajax[attr].call(null, self, ...arguments)\n }\n }else if(attr == \"text\"){\n return _read(self)\n }else if(attr == \"json\"){\n if(self.js.responseType == \"json\"){\n return _read(self)\n }else{\n var resp = _read(self)\n try{\n return $B.structuredclone2pyobj(JSON.parse(resp))\n }catch(err){\n console.log('attr json, invalid resp', resp)\n throw err\n }\n }\n }else if(self.js[attr] !== undefined){\n if(typeof self.js[attr] == \"function\"){\n return function(){\n if(attr == \"setRequestHeader\"){\n ajax.set_header.call(null, self, ...arguments)\n }else{\n if(attr == 'overrideMimeType'){\n console.log('override mime type')\n self.hasMimeType = true\n }\n return self.js[attr](...arguments)\n }\n }\n }else{\n return self.js[attr]\n }\n }else if(attr == \"xml\"){\n return $B.jsobj2pyobj(self.js.responseXML)\n }\n}\n\najax.bind = function(self, evt, func){\n // req.bind(evt,func) is the same as req.onevt = func\n self.js['on' + evt] = function(){\n try{\n return func.apply(null, arguments)\n }catch(err){\n $B.handle_error(err)\n }\n }\n return _b_.None\n}\n\najax.open = function(){\n var $ = $B.args('open', 4,\n {self: null, method: null, url: null, async: null},\n ['self', 'method', 'url', 'async'], arguments,\n {async: true}, null, null),\n self = $.self,\n method = $.method,\n url = $.url,\n async = $.async\n if(typeof method !== \"string\"){\n throw _b_.TypeError.$factory(\n 'open() argument method should be string, got ' +\n $B.class_name(method))\n }\n if(typeof url !== \"string\"){\n throw _b_.TypeError.$factory(\n 'open() argument url should be string, got ' +\n $B.class_name(url))\n }\n self.$method = method\n self.blocking = ! self.async\n self.js.open(method, url, async)\n}\n\najax.read = function(self){\n return _read(self)\n}\n\najax.send = function(self, params){\n // params can be Python dictionary or string\n var content_type\n for(var key in self.headers){\n var value = self.headers[key]\n self.js.setRequestHeader(key, value)\n if(key == 'content-type'){\n content_type = value\n }\n }\n if((self.encoding || self.blocking) && ! self.hasMimeType){\n // On blocking mode, or if an encoding has been specified,\n // override Mime type so that bytes are not processed\n // (unless the Mime type has been explicitely set)\n self.js.overrideMimeType('text/plain;charset=x-user-defined')\n self.charset_user_defined = true\n }\n var res = ''\n if(! params){\n self.js.send()\n return _b_.None\n }\n if($B.$isinstance(params, _b_.str)){\n res = params\n }else if($B.$isinstance(params, _b_.dict)){\n if(content_type == 'multipart/form-data'){\n // The FormData object serializes the data in the 'multipart/form-data'\n // content-type so we may as well override that header if it was set\n // by the user.\n res = new FormData()\n var items = _b_.list.$factory(_b_.dict.items(params))\n for(var i = 0, len = items.length; i < len; i++){\n add_to_res(res, _b_.str.$factory(items[i][0]), items[i][1])\n }\n }else{\n if(self.$method && self.$method.toUpperCase() == \"POST\" &&\n ! content_type){\n // Set default Content-Type for POST requests\n self.js.setRequestHeader(\"Content-Type\",\n \"application/x-www-form-urlencoded\")\n }\n var items = _b_.list.$factory(_b_.dict.items(params))\n for(var i = 0, len = items.length; i < len; i++){\n var key = encodeURIComponent(_b_.str.$factory(items[i][0]));\n if($B.$isinstance(items[i][1], _b_.list)){\n for (j = 0; j < items[i][1].length; j++) {\n res += key +'=' +\n encodeURIComponent(_b_.str.$factory(items[i][1][j])) + '&'\n }\n }else{\n res += key + '=' +\n encodeURIComponent(_b_.str.$factory(items[i][1])) + '&'\n }\n }\n res = res.substr(0, res.length - 1)\n }\n }else if(params instanceof FormData){\n res = params\n }else{\n throw _b_.TypeError.$factory(\n \"send() argument must be string or dictionary, not '\" +\n _b_.str.$factory(params.__class__) + \"'\")\n }\n self.js.send(res)\n return _b_.None\n}\n\najax.responseType = _b_.property.$factory(\n function(_self){\n return _self.responseType\n },\n function(_self, value){\n console.log('set response type', value)\n _self.js.responseType = value\n }\n)\n\najax.set_header = function(self, key, value){\n self.headers[key.toLowerCase()] = value\n}\n\najax.set_timeout = function(self, seconds, func){\n self.js.$requestTimer = setTimeout(\n function() {\n self.js.abort()\n func()\n },\n seconds * 1000)\n}\n\najax.$factory = function(){\n\n var xmlhttp = new XMLHttpRequest()\n\n xmlhttp.onreadystatechange = function(){\n // here, \"this\" refers to xmlhttp\n var state = this.readyState\n if(this.responseType == \"\" || this.responseType == \"text\"){\n res.js.text = this.responseText\n }\n var timer = this.$requestTimer\n if(state == 0 && this.onuninitialized){\n this.onuninitialized(res)\n }else if(state == 1 && this.onloading){\n this.onloading(res)\n }else if(state == 2 && this.onloaded){\n this.onloaded(res)\n }else if(state == 3 && this.oninteractive){\n this.oninteractive(res)\n }else if(state == 4 && this.oncomplete){\n if(timer !== null){\n globalThis.clearTimeout(timer)\n }\n this.oncomplete(res)\n }\n }\n var res = {\n __class__: ajax,\n js: xmlhttp,\n headers: {}\n }\n return res\n}\n\n\nfunction _request_without_body(method){\n var $ = $B.args(method, 3, {method: null, url: null, blocking: null},\n [\"method\", \"url\", \"blocking\"], arguments, {blocking: false},\n null, \"kw\"),\n method = $.method,\n url = $.url,\n async = !$.blocking,\n kw = $.kw\n\n var self = ajax.$factory()\n self.blocking = $.blocking\n var items = handle_kwargs(self, kw, method),\n mode = self.mode = items.mode,\n encoding = self.encoding = items.encoding,\n qs = items.data\n if(qs){\n url += \"?\" + qs\n }\n if(! (items.cache === true)){\n url += (qs ? \"&\" : \"?\") + (new Date()).getTime()\n }\n self.js.open(method.toUpperCase(), url, async)\n\n if(async){\n if(mode == \"json\" || mode == \"document\"){\n self.js.responseType = mode\n }else{\n self.js.responseType = \"arraybuffer\"\n if(mode != \"text\" && mode != \"binary\"){\n throw _b_.ValueError.$factory(\"invalid mode: \" + mode)\n }\n }\n }else{\n self.js.overrideMimeType('text/plain;charset=x-user-defined')\n self.charset_user_defined = true\n }\n for(var key in items.headers){\n self.js.setRequestHeader(key, items.headers[key])\n }\n var timeout = items.timeout\n if(timeout.seconds){\n ajax.set_timeout(self, timeout.seconds, timeout.func)\n }\n // Add function read() to return str or bytes according to mode\n self.js.send()\n}\n\nfunction _request_with_body(method){\n var $ = $B.args(method, 3, {method: null, url: null, blocking: null},\n [\"method\", \"url\", \"blocking\"], arguments, {blocking: false},\n null, \"kw\"),\n method = $.method,\n url = $.url,\n async = !$.blocking,\n kw = $.kw,\n content_type\n var self = ajax.$factory()\n self.js.open(method.toUpperCase(), url, async)\n var items = handle_kwargs(self, kw, method), // common with browser.aio\n data = items.data\n\n if($B.$isinstance(data, _b_.dict)){\n data = stringify(data)\n }\n for(var key in items.headers){\n var value = items.headers[key]\n self.js.setRequestHeader(key, value)\n if(key == 'content-type'){\n content_type = value\n }\n }\n if(method.toUpperCase() == 'POST' && !content_type){\n // set default Content-Type for POST requests\n self.js.setRequestHeader('Content-Type',\n 'application/x-www-form-urlencoded')\n }\n\n // Add function read() to return str or bytes according to mode\n self.js.read = function(){\n return _read(self)\n }\n self.js.send(data)\n}\n\nfunction form_data(form){\n var missing = {},\n $ = $B.args('form_data', 1, {form: null}, ['form'], arguments,\n {form: missing}, null, null)\n if($.form === missing){\n return new FormData()\n }else{\n return new FormData($.form)\n }\n}\n\nfunction connect(){\n _request_without_body.call(null, \"connect\", ...arguments)\n}\n\nfunction _delete(){\n _request_without_body.call(null, \"delete\", ...arguments)\n}\n\nfunction get(){\n _request_without_body.call(null, \"get\", ...arguments)\n}\n\nfunction head(){\n _request_without_body.call(null, \"head\", ...arguments)\n}\n\nfunction options(){\n _request_without_body.call(null, \"options\", ...arguments)\n}\n\nfunction patch(){\n _request_with_body.call(null, \"put\", ...arguments)\n}\n\nfunction post(){\n _request_with_body.call(null, \"post\", ...arguments)\n}\n\nfunction put(){\n _request_with_body.call(null, \"put\", ...arguments)\n}\n\nfunction trace(){\n _request_without_body.call(null, \"trace\", ...arguments)\n}\n\nfunction file_upload(){\n // ajax.file_upload(url, file, method=\"POST\", **callbacks)\n var $ = $B.args(\"file_upload\", 2, {url: null, \"file\": file},\n [\"url\", \"file\"], arguments, {}, null, \"kw\"),\n url = $.url,\n file = $.file,\n kw = $.kw\n\n var self = ajax.$factory()\n\n var items = handle_kwargs(self, kw, method),\n rawdata = items.rawdata,\n headers = items.headers\n\n for(var key in headers){\n var value = headers[key]\n self.js.setRequestHeader(key, value)\n if(key == 'content-type'){\n content_type = value\n }\n }\n\n var timeout = items.timeout\n if(timeout.seconds){\n ajax.set_timeout(self, timeout.seconds, timeout.func)\n }\n\n var method = _b_.dict.$get_string(kw, 'method', 'POST'),\n field_name = _b_.dict.$get_string(kw, 'field_name', 'filetosave')\n\n var formdata = new FormData()\n formdata.append(field_name, file, file.name)\n\n if(rawdata){\n if(rawdata instanceof FormData){\n // append additional data\n for(var d of rawdata){\n formdata.append(d[0], d[1])\n }\n }else if($B.$isinstance(rawdata, _b_.dict)){\n for(var item of _b_.dict.$iter_items(rawdata)){\n formdata.append(item.key, item.value)\n }\n }else{\n throw _b_.ValueError.$factory(\n 'data value must be a dict of form_data')\n }\n }\n\n self.js.open(method, url, _b_.True)\n self.js.send(formdata)\n\n}\n\n$B.set_func_names(ajax)\n\nreturn {\n ajax: ajax,\n Ajax: ajax,\n delete: _delete,\n file_upload: file_upload,\n connect,\n form_data,\n get,\n head,\n options,\n patch,\n post,\n put,\n trace\n}\n\n})(__BRYTHON__)\n"], "array": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nvar typecodes = {\n 'b': Int8Array, // signed char, 1 byte\n 'B': Uint8Array, // unsigned char, 1\n 'u': Uint32Array, // Py_UNICODE Unicode character, 2 (deprecated)\n 'h': Int16Array, // signed short, 2\n 'H': Uint16Array, // unsigned short, 2\n 'i': Int16Array, // signed int, 2\n 'I': Uint16Array, // unsigned int, 2\n 'l': Int32Array, // signed long, 4\n 'L': Uint32Array, // unsigned long, 4\n 'q': null, // signed long, 8 (not implemented)\n 'Q': null, // unsigned long, 8 (not implemented)\n 'f': Float32Array, // float, 4\n 'd': Float64Array // double float, 8\n}\n\nvar array = $B.make_class(\"array\",\n function(){\n var missing = {},\n $ = $B.args(\"array\", 2, {typecode: null, initializer: null},\n [\"typecode\", \"initializer\"], arguments, {initializer: missing},\n null, null),\n typecode = $.typecode,\n initializer = $.initializer\n if(! typecodes.hasOwnProperty(typecode)){\n throw _b_.ValueError.$factory(\"bad typecode (must be b, \" +\n \"B, u, h, H, i, I, l, L, q, Q, f or d)\")\n }\n if(typecodes[typecode] === null){\n console.log(\"array factory, $\", $, typecode)\n throw _b_.NotImplementedError.$factory(\"type code \" +\n typecode + \" is not implemented\")\n }\n var res = {\n __class__: array,\n typecode: typecode,\n obj: null\n }\n if(initializer !== missing){\n if(Array.isArray(initializer)){\n array.fromlist(res, initializer)\n }else if($B.$isinstance(initializer, _b_.bytes)){\n array.frombytes(res, initializer)\n }else{\n array.extend(res, initializer)\n }\n }\n return res\n }\n)\n\narray.$buffer_protocol = true\narray.$match_sequence_pattern = true // for Pattern Matching (PEP 634)\n\narray.__getitem__ = function(self, key){\n if(self.obj && self.obj[key] !== undefined){\n return self.obj[key]\n }\n throw _b_.IndexError.$factory(\"array index out of range\")\n}\n\nvar array_iterator = $B.make_iterator_class(\"array_iterator\")\narray.__iter__ = function(self){\n return array_iterator.$factory(self.obj === null ? [] : self.obj)\n}\n\narray.__len__ = function(self){\n return self.obj === null ? 0 : self.obj.length\n}\n\narray.__mul__ = function(self, nb){\n if(typeof nb == \"number\" || $B.$isinstance(nb, _b_.int)){\n var t = [],\n copy = self.obj.slice()\n for(var i = 0; i < nb; i++){\n t = t.concat(copy)\n }\n return {\n __class__: array,\n typecode: self.typecode,\n obj: t\n }\n }\n throw _b_.ValueError.$factory(\"cannot multiply array by \" +\n $B.class_name(nb))\n}\n\narray.__setitem__ = function(_self, index, value){\n if(_self.obj[index] === undefined){\n throw _b_.IndexError.$factory(\"array index out of range\")\n }\n _self.obj[index] = value\n}\n\narray.__str__ = function(self){\n $B.args(\"__str__\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n var res = \"array('\" + self.typecode + \"'\"\n if(self.obj !== null){\n res += \", [\" + self.obj + \"]\"\n }\n return res + \")\"\n}\n\nfunction normalize_index(self, i){\n // return an index i between 0 and self.obj.length - 1\n if(i < 0){\n i = self.obj.length + i\n }\n if(i < 0){i = 0}\n else if(i > self.obj.length - 1){\n i = self.obj.length\n }\n return i\n}\n\narray.append = function(self, value){\n $B.args(\"append\", 2, {self: null, value: null},\n [\"self\", \"value\"], arguments, {}, null, null)\n var pos = self.obj === null ? 0 : self.obj.length\n return array.insert(self, pos, value)\n}\n\narray.count = function(self, x){\n $B.args(\"count\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n if(self.obj === null){return 0}\n return self.obj.filter(function(item){return item == x}).length\n}\n\narray.extend = function(self, iterable){\n $B.args(\"extend\", 2, {self: null, iterable: null},\n [\"self\", \"iterable\"], arguments, {}, null, null)\n if(iterable.__class__ === array){\n if(iterable.typecode !== self.typecode){\n throw _b_.TypeError.$factory(\"can only extend with array \" +\n \"of same kind\")\n }\n if(iterable.obj === null){return _b_.None}\n // create new object with length = sum of lengths\n var newobj = new typecodes[self.typecode](self.obj.length +\n iterable.obj.length)\n // copy self.obj\n newobj.set(self.obj)\n // copy iterable.obj\n newobj.set(iterable.obj, self.obj.length)\n self.obj = newobj\n }else{\n var it = _b_.iter(iterable)\n while(true){\n try{\n var item = _b_.next(it)\n array.append(self, item)\n }catch(err){\n if(err.__class__ !== _b_.StopIteration){\n throw err\n }\n break\n }\n }\n }\n return _b_.None\n}\n\narray.frombytes = function(self, s){\n $B.args(\"frombytes\", 2, {self: null, s: null},\n [\"self\", \"s\"], arguments, {}, null, null)\n if(! $B.$isinstance(s, _b_.bytes)){\n throw _b_.TypeError.$factory(\"a bytes-like object is required, \" +\n \"not '\" + $B.class_name(s) + \"'\")\n }\n self.obj = new typecodes[self.typecode](s.source)\n return _b_.None\n}\n\narray.fromlist = function(self, list){\n $B.args(\"fromlist\", 2, {self: null, list: null},\n [\"self\", \"list\"], arguments, {}, null, null)\n var it = _b_.iter(list)\n while(true){\n try{\n var item = _b_.next(it)\n try{\n array.append(self, item)\n }catch(err){\n console.log(err)\n return _b_.None\n }\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n return _b_.None\n }\n throw err\n }\n }\n}\n\narray.fromstring = array.frombytes\n\narray.index = function(self, x){\n $B.args(\"index\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n var res = self.obj.findIndex(function(item){return x == item})\n if(res == -1){\n throw _b_.ValueError.$factory(\"array.index(x): x not in array\")\n }\n return res\n}\n\narray.insert = function(self, i, value){\n $B.args(\"insert\", 3, {self: null, i: null, value: null},\n [\"self\", \"i\", \"value\"], arguments, {}, null, null)\n if(self.obj === null){\n self.obj = [value]\n }else{\n self.obj.splice(i, 0, value)\n }\n return _b_.None\n}\n\narray.itemsize = function(self){\n return typecodes[self.typecode].BYTES_PER_ELEMENT\n}\n\narray.pop = function(self, i){\n var $ = $B.args(\"count\", 2, {self: null, i: null},\n [\"self\", \"i\"], arguments, {i: -1}, null, null)\n i = $.i\n if(self.obj === null){\n throw _b_.IndexError.$factory(\"pop from empty array\")\n }else if(self.obj.length == 1){\n var res = self.obj[0]\n self.obj = null\n return res\n }\n i = normalize_index(self, i)\n // store value to return\n var res = self.obj[i]\n // create new array, size = previous size - 1\n var newobj = new typecodes[self.typecode](self.obj.length - 1)\n // fill new array with values until i excluded\n newobj.set(self.obj.slice(0, i))\n // fill with values after i\n newobj.set(self.obj.slice(i + 1), i)\n // set self.obj to new array\n self.obj = newobj\n // return stored value\n return res\n}\n\narray.remove = function(self, x){\n $B.args(\"remove\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n var res = self.obj.findIndex(function(item){return x == item})\n if(res == -1){\n throw _b_.ValueError.$factory(\"array.remove(x): x not in array\")\n }\n array.pop(self, res)\n return _b_.None\n}\n\narray.reverse = function(self){\n $B.args(\"reverse\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n if(self.obj === null){return _b_.None}\n self.obj.reverse()\n return _b_.None\n}\n\narray.tobytes = function(self){\n $B.args(\"tobytes\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n var items = Array.prototype.slice.call(self.obj),\n res = []\n items.forEach(function(item){\n while(item > 256){\n res.push(item % 256)\n item = Math.floor(item / 256)\n }\n res.push(item)\n })\n return _b_.bytes.$factory(res)\n}\n\narray.tolist = function(self){\n $B.args(\"tolist\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n if(self.obj === null){\n return $B.$list([])\n }\n return Array.prototype.slice.call(self.obj)\n}\n\narray.tostring = array.tobytes\n\narray.typecode = function(self){\n return self.typecode\n}\n\n$B.set_func_names(array, \"array\")\n\nvar module = {\n array: array,\n typecodes: Object.keys(typecodes).join('')\n}\n\n$B.addToImported('array', module)\n\n})(__BRYTHON__)\n"], "_tokenize": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\n$B.$import('token')\n\nvar TokenizerIter = $B.make_class('TokenizerIter',\n function(it){\n return {\n __class__: TokenizerIter,\n it\n }\n }\n)\n\nTokenizerIter.__iter__ = function(self){\n var js_iter = function*(){\n var line_num = 0\n while(true){\n try{\n var bytes = self.it()\n }catch(err){\n if($B.is_exc(err, [_b_.StopIteration])){\n token = endmarker\n token.lineno++\n token.end_lineno++\n yield $B.fast_tuple([token.num_type, token.string,\n $B.fast_tuple([token.lineno, token.col_offset]),\n $B.fast_tuple([token.end_lineno, token.end_col_offset]),\n token.line])\n }\n throw err\n }\n line_num++\n var line = _b_.bytes.decode(bytes, 'utf-8')\n for(var token of $B.tokenizer(line, 'test')){\n if(token.num_type == $B.py_tokens.ENCODING){ // skip encoding token\n continue\n }else if(token.num_type == $B.py_tokens.ENDMARKER){\n var endmarker = token\n continue\n }\n token.type = token.num_type\n token.lineno = line_num\n token.end_lineno = line_num\n yield $B.fast_tuple([token.num_type, token.string,\n $B.fast_tuple([token.lineno, token.col_offset]),\n $B.fast_tuple([token.end_lineno, token.end_col_offset]),\n token.line])\n }\n }\n\n }\n return $B.generator.$factory(js_iter)()\n}\n\nTokenizerIter.__next__ = function*(self){\n\n}\n\n$B.set_func_names(TokenizerIter, '_tokenize')\n\n$B.addToImported('_tokenize', {TokenizerIter})\n\n\n})(__BRYTHON__)"], "_base64": [".js", "(function($B){\n\nvar _b_ = $B.builtins,\n _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\"\n\nfunction make_alphabet(altchars){\n var alphabet = _keyStr\n if(altchars !== undefined && altchars !== _b_.None){\n // altchars is an instance of Python bytes\n var source = altchars.source\n alphabet = alphabet.substr(0,alphabet.length-3) +\n _b_.chr(source[0]) + _b_.chr(source[1]) + '='\n }\n return alphabet\n}\n\nvar Base64 = {\n error: function(){return 'binascii_error'},\n\n encode: function(bytes, altchars){\n\n var input = bytes.source,\n output = \"\",\n chr1, chr2, chr3, enc1, enc2, enc3, enc4\n var i = 0\n\n var alphabet = make_alphabet(altchars)\n\n while(i < input.length){\n\n chr1 = input[i++]\n chr2 = input[i++]\n chr3 = input[i++]\n\n enc1 = chr1 >> 2\n enc2 = ((chr1 & 3) << 4) | (chr2 >> 4)\n enc3 = ((chr2 & 15) << 2) | (chr3 >> 6)\n enc4 = chr3 & 63\n\n if(isNaN(chr2)){\n enc3 = enc4 = 64\n }else if(isNaN(chr3)){\n enc4 = 64\n }\n\n output = output + alphabet.charAt(enc1) +\n alphabet.charAt(enc2) +\n alphabet.charAt(enc3) +\n alphabet.charAt(enc4)\n\n }\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n },\n\n\n decode: function(bytes, altchars, validate){\n var output = [],\n chr1, chr2, chr3,\n enc1, enc2, enc3, enc4\n\n var alphabet = make_alphabet(altchars)\n\n var input = bytes.source\n\n // If validate is set, check that all characters in input\n // are in the alphabet\n var _input = ''\n var padding = 0\n for(var i = 0, len = input.length; i < len; i++){\n var car = String.fromCharCode(input[i])\n var char_num = alphabet.indexOf(car)\n if(char_num == -1){\n if(validate){throw Base64.error(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i < input.length - 2){\n if(validate){throw Base64.error(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i >= input.length - 2){\n padding++\n _input += car\n }else{\n _input += car\n }\n }\n input = _input\n if(_input.length == padding){return _b_.bytes.$factory([])}\n if( _input.length % 4 > 0){throw Base64.error(\"Incorrect padding\")}\n\n var i = 0\n while(i < input.length){\n\n enc1 = alphabet.indexOf(input.charAt(i++))\n enc2 = alphabet.indexOf(input.charAt(i++))\n enc3 = alphabet.indexOf(input.charAt(i++))\n enc4 = alphabet.indexOf(input.charAt(i++))\n\n chr1 = (enc1 << 2) | (enc2 >> 4)\n chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)\n chr3 = ((enc3 & 3) << 6) | enc4\n\n output.push(chr1)\n\n if(enc3 != 64){output.push(chr2)}\n if(enc4 != 64){output.push(chr3)}\n\n }\n // return Python bytes\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n\n },\n\n _utf8_encode: function(string) {\n string = string.replace(/\\r\\n/g, \"\\n\")\n var utftext = \"\";\n\n for(var n = 0; n < string.length; n++){\n\n var c = string.charCodeAt(n)\n\n if(c < 128){\n utftext += String.fromCharCode(c)\n }else if((c > 127) && (c < 2048)){\n utftext += String.fromCharCode((c >> 6) | 192)\n utftext += String.fromCharCode((c & 63) | 128)\n }else{\n utftext += String.fromCharCode((c >> 12) | 224)\n utftext += String.fromCharCode(((c >> 6) & 63) | 128)\n utftext += String.fromCharCode((c & 63) | 128)\n }\n\n }\n\n return utftext\n },\n\n _utf8_decode: function(utftext) {\n var string = \"\",\n i = 0,\n c = c1 = c2 = 0\n\n while(i < utftext.length){\n\n c = utftext.charCodeAt(i)\n\n if(c < 128){\n string += String.fromCharCode(c)\n i++\n }else if((c > 191) && (c < 224)){\n c2 = utftext.charCodeAt(i + 1)\n string += String.fromCharCode(((c & 31) << 6) | (c2 & 63))\n i += 2\n }else{\n c2 = utftext.charCodeAt(i + 1)\n c3 = utftext.charCodeAt(i + 2)\n string += String.fromCharCode(\n ((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63))\n i += 3\n }\n\n }\n\n return string\n }\n\n}\n\n$B.addToImported('_base64', {Base64:Base64})\n}\n\n)(__BRYTHON__)"], "posix": [".js", "/*\nThis module provides access to operating system functionality that is\nstandardized by the C Standard and the POSIX standard (a thinly\ndisguised Unix interface). Refer to the library manual and\ncorresponding Unix manual entries for more information on calls.\n*/\nvar $B = __BRYTHON__,\n _b_ = $B.builtins\n\nfunction _randint(a, b){\n return parseInt(Math.random() * (b - a + 1) + a)\n}\n\nvar stat_result = $B.make_class(\"stat_result\",\n function(filename){\n filename = _b_.str.$factory(filename)\n if($B.file_cache && $B.file_cache.hasOwnProperty(filename)){\n var f = $B.file_cache[filename],\n res = {\n __class__: stat_result,\n st_atime: __BRYTHON__.timestamp,\n st_ctime: f.ctime,\n st_mtime: f.mtime,\n st_uid: -1,\n st_gid: -1,\n st_ino: -1,\n st_mode: 0,\n st_size: f.length\n };\n [\"mtime\", \"ctime\", \"atime_ns\", \"mtime_ns\", \"ctime_ns\"].\n forEach(function(item){\n res[\"st_\" + item] = res.st_atime\n });\n return res\n }else if($B.files && $B.files.hasOwnProperty(filename)){\n var f = $B.files[filename],\n res = {\n __class__: stat_result,\n st_atime: __BRYTHON__.timestamp,\n st_ctime: f.ctime,\n st_mtime: f.mtime,\n st_uid: -1,\n st_gid: -1,\n st_ino: -1,\n st_mode: 0,\n st_size: f.content.length\n };\n for(var item of [\"mtime\", \"ctime\", \"atime_ns\", \"mtime_ns\", \"ctime_ns\"]){\n res[\"st_\" + item] = res.st_atime\n }\n return res\n\n }else{\n var res = {\n __class__: stat_result,\n st_atime: __BRYTHON__.timestamp,\n st_uid: -1,\n st_gid: -1,\n st_ino: -1,\n st_mode: filename.endsWith('/') ? 16895 : 33206,\n st_size: 1 // fake\n };\n [\"mtime\", \"ctime\", \"atime_ns\", \"mtime_ns\", \"ctime_ns\"].\n forEach(function(item){\n res[\"st_\" + item] = res.st_atime\n });\n return res\n }\n }\n)\n$B.set_func_names(stat_result, \"posix\")\n\nvar module = {\n F_OK: 0,\n O_APPEND: 8,\n O_BINARY: 32768,\n O_CREAT: 256,\n O_EXCL: 1024,\n O_NOINHERIT: 128,\n O_RANDOM: 16,\n O_RDONLY: 0,\n O_RDWR: 2,\n O_SEQUENTIAL: 32,\n O_SHORT_LIVED: 4096,\n O_TEMPORARY: 64,\n O_TEXT: 16384,\n O_TRUNC: 512,\n O_WRONLY: 1,\n P_DETACH: 4,\n P_NOWAIT: 1,\n P_NOWAITO: 3,\n P_OVERLAY: 2,\n P_WAIT: 0,\n R_OK: 4,\n TMP_MAX: 32767,\n W_OK: 2,\n X_OK: 1,\n _have_functions: ['MS_WINDOWS'],\n environ: _b_.dict.$factory(\n [['PYTHONPATH', $B.brython_path],\n ['PYTHONUSERBASE', ' ']]),\n error: _b_.OSError,\n fspath: function(path){\n return path\n },\n getcwd: function(){return $B.brython_path},\n getpid: function(){return 0},\n lstat: function(filename){\n return stat_result.$factory(filename)\n },\n open: function(path, flags){return _b_.open(path, flags)},\n remove: function(path) {\n var $ = $B.args(\"remove\", 1, { path: null }, [\"path\"], arguments, {}, null, null)\n console.log($)\n\n var path = $.path\n var found_file = false\n\n if ($B.file_cache && $B.file_cache.hasOwnProperty(path)){\n delete $B.file_cache[path]\n found_file = true\n }\n if ($B.files && $B.files.hasOwnProperty(path)){\n delete $B.files[path]\n found_file = true\n }\n\n if(!found_file) {\n throw _b_.FileNotFoundError.$factory(`No such file or directory: '${path}'`)\n }\n\n return _b_.None\n },\n stat: function(filename){return stat_result.$factory(filename)},\n stat_result: function(filename){return stat_result.$factory(filename)},\n urandom: function(n){\n const randbytes = new Uint8Array(n);\n crypto.getRandomValues(randbytes);\n return _b_.bytes.$factory(Array.from(randbytes));\n },\n WTERMSIG: function(){return 0},\n WNOHANG: function(){return _b_.tuple.$factory([0, 0])}\n};\n\n[\"WCOREDUMP\", \"WIFCONTINUED\", \"WIFSTOPPED\", \"WIFSIGNALED\", \"WIFEXITED\"].forEach(function(funcname){\n module[funcname] = function(){return false}\n });\n\n[\"WEXITSTATUS\", \"WSTOPSIG\", \"WTERMSIG\"].\n forEach(function(funcname){\n module[funcname] = function(){return _b_.None}\n });\n\n[\"_exit\", \"_getdiskusage\", \"_getfileinformation\", \"_getfinalpathname\",\n \"_getfullpathname\", \"_isdir\", \"abort\", \"access\", \"chdir\", \"chmod\",\n \"close\", \"closerange\", \"device_encoding\", \"dup\", \"dup2\",\n \"execv\", \"execve\", \"fsat\", \"fsync\", \"get_terminal_size\", \"getcwdb\",\n \"getlogin\", \"getppid\", \"isatty\", \"kill\", \"link\", \"listdir\", \"lseek\",\n \"mkdir\", \"pipe\", \"putenv\", \"read\", \"readlink\", \"rename\",\n \"replace\", \"rmdir\", \"spawnv\", \"spawnve\", \"startfile\", \"stat_float_times\",\n \"statvfs_result\", \"strerror\", \"symlink\", \"system\", \"terminal_size\",\n \"times\", \"times_result\", \"umask\", \"uname_result\", \"unlink\", \"utime\",\n \"waitpid\", \"write\"].forEach(function(funcname){\n module[funcname] = function(){\n throw _b_.NotImplementedError.$factory(\"posix.\" + funcname +\n \" is not implemented\")\n }\n });\n\n$B.addToImported('posix', module)"], "_io_classes": [".js", "var _b_ = __BRYTHON__.builtins\n\nfunction get_self(name, args){\n return $B.args(name, 1, {self: null}, [\"self\"], args, {}, null, null).self\n}\n\nvar _IOBase = $B.make_class(\"_IOBase\")\n_IOBase.__mro__ = [_b_.object]\n\n_IOBase.close = function(){\n get_self(\"close\", arguments).__closed = true\n}\n\n_IOBase.flush = function(){\n get_self(\"flush\", arguments)\n return _b_.None\n}\n\n$B.set_func_names(_IOBase, '_io')\n\n// Base class for binary streams that support some kind of buffering.\nvar _BufferedIOBase = $B.make_class(\"_BufferedIOBase\")\n_BufferedIOBase.__mro__ = [_IOBase, _b_.object]\n\n_BufferedIOBase.__enter__ = function(self){\n return self\n}\n_BufferedIOBase.__exit__ = function(self, type, value, traceback){\n try{\n $B.$call($B.$getattr(self, 'close'))()\n self.__closed = true\n return true\n }catch(err){\n return false\n }\n}\n\n$B.set_func_names(_BufferedIOBase, '_io')\n\n// Base class for raw binary I/O.\nvar _RawIOBase = $B.make_class(\"_RawIOBase\")\n\n_RawIOBase.__mro__ = [_IOBase, _b_.object]\n\n_RawIOBase.read = function(){\n var $ = $B.args(\"read\", 2, {self: null, size: null}, [\"self\", \"size\"],\n arguments, {size: -1}, null, null),\n self = $.self,\n size = $.size,\n res\n self.$pos = self.$pos || 0\n if(size == -1){\n if(self.$pos == 0){\n res = self.$content\n }else{\n res = _b_.bytes.$factory(self.$content.source.slice(self.$pos))\n }\n self.$pos = self.$content.source.length - 1\n }else{\n res = _b_.bytes.$factory(self.$content.source.slice(self.$pos, size))\n self.$pos += size\n }\n return res\n}\n\n_RawIOBase.readall = function(){\n return _RawIOBase.read(get_self(\"readall\", arguments))\n}\n\n$B.set_func_names(_RawIOBase, '_io')\n\n// Base class for text streams.\n_TextIOBase = $B.make_class(\"_TextIOBase\")\n_TextIOBase.__mro__ = [_IOBase, _b_.object]\n\nvar StringIO = $B.make_class(\"StringIO\",\n function(){\n var $ = $B.args(\"StringIO\", 2, {value: null, newline: null},\n [\"value\", \"newline\"], arguments, {value: '', newline: \"\\n\"},\n null, null)\n return {\n __class__: StringIO,\n $counter: 0,\n $content: $.value\n }\n }\n)\n\nStringIO.__mro__ = [$B.Reader, _b_.object]\n\nStringIO.getvalue = function(){\n var $ = $B.args(\"getvalue\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n return $.self.$content.substr(0) // copy\n}\n\nStringIO.truncate = function(self, size){\n var $ = $B.args('truncate', 2, {self: null, size: null}, ['self', 'size'],\n arguments, {size: _b_.None}, null, null),\n self = $.self,\n size = $.size\n if(size === _b_.None){\n size = self.$counter\n }\n self.$content = self.$content.substr(0, size)\n self.$counter = self.$content.length\n return self.$counter\n}\n\nStringIO.write = function(){\n var $ = $B.args(\"write\", 2, {self: null, data: null},\n [\"self\", \"data\"], arguments, {}, null, null)\n if(! $B.$isinstance($.data, _b_.str)){\n throw _b_.TypeError.$factory('string argument expected, got ' +\n `'${$B.class_name($.data)}'`)\n }\n var text = $.self.$content,\n position = $.self.$counter\n text = text.substr(0, position) + $.data +\n text.substr(position + $.data.length)\n $.self.$content = text\n $.self.$counter = position + $.data.length\n return $.data.length\n}\n\n$B.set_func_names(StringIO, \"_io\")\n\nvar BytesIO = $B.make_class(\"BytesIO\",\n function(){\n var $ = $B.args(\"BytesIO\", 1, {value: null},\n [\"value\"], arguments, {value: _b_.bytes.$factory()},\n null, null)\n return {\n __class__: BytesIO,\n $binary: true,\n $content: $.value,\n $length: $.value.source.length,\n $counter: 0\n }\n }\n)\nBytesIO.__mro__ = [$B.Reader, _b_.object]\n\nBytesIO.getbuffer = function(){\n var self = get_self(\"getbuffer\", arguments)\n return self.$content\n}\n\nBytesIO.getvalue = function(){\n var self = get_self(\"getvalue\", arguments)\n return self.$content\n}\n\nBytesIO.read = function(){\n var $ = $B.args(\"read\", 2, {self: null, nbytes: null},\n [\"self\", \"nbytes\"], arguments, {nbytes: _b_.None}, null, null),\n self = $.self,\n nbytes = $.nbytes,\n res\n var source = self.$content.source\n if(nbytes === _b_.None){\n res = $B.fast_bytes(source.slice(self.$counter))\n self.$counter = source.length\n }else if(! _b_.isinstance(nbytes, _b_.int)){\n throw _b_.TypeError.$factory('number of bytes should be int, not ' +\n $B.class_name(nbytes))\n }else{\n res = $B.fast_bytes(source.slice(self.$counter,\n self.$counter + nbytes))\n self.$counter = Math.min(self.$counter + nbytes, source.length)\n }\n return res\n}\n\nBytesIO.write = function(){\n var $ = $B.args(\"write\", 2, {self: null, data: null},\n [\"self\", \"data\"], arguments, {}, null, null)\n $.self.$content.source = $.self.$content.source.concat(\n $.data.source)\n $.self.$counter += $.data.source.length\n return _b_.None\n}\n\n$B.set_func_names(BytesIO, \"_io\")\n\nvar BlockingIOError = $B.make_class('BlockingIOError')\nBlockingIOError.__bases__ = [_b_.OSError]\n\n$B.set_func_names(BlockingIOError, '_io')\n\nvar $module = (function($B){\n return {\n _BufferedIOBase,\n _IOBase,\n _RawIOBase,\n _TextIOBase: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n BlockingIOError,\n BytesIO: BytesIO,\n FileIO: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n StringIO: StringIO,\n BufferedReader: $B.BufferedReader,\n BufferedWriter: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n BufferedRWPair: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n BufferedRandom: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n IncrementalNewlineDecoder: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n TextIOWrapper: $B.TextIOWrapper\n }\n})(__BRYTHON__)\n$module._IOBase.__doc__ = \"_IOBase\"\n\n__BRYTHON__.imported._io_classes = $module"], "math": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nconst INF = $B.fast_float(Number.POSITIVE_INFINITY),\n NINF = $B.fast_float(Number.NEGATIVE_INFINITY),\n ZERO = $B.fast_float(0),\n NAN = $B.fast_float(Number.NaN)\n\nvar float_check = function(x) {\n // Returns a Javascript number\n if(x.__class__ === $B.long_int){\n var res = parseInt(x.value)\n if(! isFinite(res)){\n throw _b_.OverflowError.$factory('int too big for float')\n }\n return res\n }else if(x.__class__ === _b_.float){\n return x.value\n }\n try{\n return _b_.float.$factory(x).value\n }catch(err){\n throw _b_.TypeError.$factory('must be real number, not ' +\n $B.class_name(x))\n }\n}\n\nfunction check_int(x){\n if(! $B.$isinstance(x, _b_.int)){\n throw _b_.TypeError.$factory(\"'\" + $B.class_name(x) +\n \"' object cannot be interpreted as an integer\")\n }\n}\n\nfunction check_int_or_round_float(x){\n return ($B.$isinstance(x, _b_.float) && Number.isInteger(x.value)) ||\n $B.$isinstance(x, _b_.int)\n}\n\nvar isWholeNumber = function(x){return (x * 10) % 10 == 0}\n\nvar isOdd = function(x) {return isWholeNumber(x) && 2 * Math.floor(x / 2) != x}\n\nvar isNegZero = function(x) {return x === 0 && Math.atan2(x,x) < 0}\n\nfunction overflow(){\n throw _b_.OverflowError.$factory(\"math range error\")\n}\n\nfunction value_error(){\n throw _b_.ValueError.$factory(\"math range error\")\n}\n\nvar EPSILON = Math.pow(2, -52),\n MAX_VALUE = (2 - EPSILON) * Math.pow(2, 1023),\n MIN_VALUE = Math.pow(2, -1022),\n Py_HUGE_VAL = Number.POSITIVE_INFINITY,\n logpi = 1.144729885849400174143427351353058711647,\n sqrtpi = 1.772453850905516027298167483341145182798\n\nfunction nextUp(x){\n if(x !== x){ // NaN\n return x\n }\n if(_b_.float.$funcs.isinf(x)){\n if(_b_.float.$funcs.isninf(x)){\n return -MAX_VALUE\n }\n return _mod.inf\n }\n if($B.$isinstance(x, $B.long_int)){\n x = Number(x.value)\n }else if($B.$isinstance(x, _b_.float)){\n x = x.value\n }\n\n if(x == +MAX_VALUE){\n return +1 / 0\n }\n if(typeof x == \"number\"){\n var y = x * (x < 0 ? 1 - EPSILON / 2 : 1 + EPSILON)\n if(y == x){\n y = MIN_VALUE * EPSILON > 0 ? x + MIN_VALUE * EPSILON : x + MIN_VALUE\n }\n if(y === +1 / 0){\n y = +MAX_VALUE\n }\n var b = x + (y - x) / 2\n if(x < b && b < y){\n y = b;\n }\n var c = (y + x) / 2\n if(x < c && c < y){\n y = c;\n }\n return y === 0 ? -0 : y\n }else{\n var factor = $B.rich_comp('__lt__', x, 0) ? 1 - EPSILON / 2 :\n 1 + EPSILON\n var y = $B.rich_op(\"__mul__\", x , factor)\n if(y == x){\n y = MIN_VALUE * EPSILON > 0 ?\n $B.rich_op('__add__', x, MIN_VALUE * EPSILON) :\n $B.rich_op('__add__', x, MIN_VALUE)\n }\n if(y === +1 / 0){\n y = +MAX_VALUE\n }\n var y_minus_x = $B.rich_op('__sub__', y, x)\n var z = $B.rich_op('__truediv__', y_minus_x, 2) // (y - x) / 2\n\n var b = $B.rich_op('__add__', x, z)\n if($B.rich_comp('__lt__', x, b) && $B.rich_comp('__lt__', b, y)){\n y = b;\n }\n var c = $B.rich_op('__truediv__', $B.rich_op('__add__', y, x), 2)\n if($B.rich_comp('__lt__', x, c) && $B.rich_comp('__lt__', c, y)){\n y = c;\n }\n return y === 0 ? -0 : y\n }\n}\n\nfunction gcd2(a, b){\n // GCD of 2 factors\n if($B.rich_comp(\"__gt__\", b, a)){\n var temp = a\n a = b\n b = temp\n }\n while(true){\n if(b == 0){\n return a\n }\n a = $B.rich_op(\"__mod__\", a, b)\n if(a == 0){\n return b\n }\n b = $B.rich_op(\"__mod__\", b, a)\n }\n}\n\nconst LANCZOS_N = 13,\n lanczos_g = 6.024680040776729583740234375,\n lanczos_g_minus_half = 5.524680040776729583740234375,\n lanczos_num_coeffs = [\n 23531376880.410759688572007674451636754734846804940,\n 42919803642.649098768957899047001988850926355848959,\n 35711959237.355668049440185451547166705960488635843,\n 17921034426.037209699919755754458931112671403265390,\n 6039542586.3520280050642916443072979210699388420708,\n 1439720407.3117216736632230727949123939715485786772,\n 248874557.86205415651146038641322942321632125127801,\n 31426415.585400194380614231628318205362874684987640,\n 2876370.6289353724412254090516208496135991145378768,\n 186056.26539522349504029498971604569928220784236328,\n 8071.6720023658162106380029022722506138218516325024,\n 210.82427775157934587250973392071336271166969580291,\n 2.5066282746310002701649081771338373386264310793408\n ],\n /* denominator is x*(x+1)*...*(x+LANCZOS_N-2) */\n lanczos_den_coeffs = [\n 0.0, 39916800.0, 120543840.0, 150917976.0, 105258076.0, 45995730.0,\n 13339535.0, 2637558.0, 357423.0, 32670.0, 1925.0, 66.0, 1.0],\n /* gamma values for small positive integers, 1 though NGAMMA_INTEGRAL */\n NGAMMA_INTEGRAL = 23,\n gamma_integral = [\n 1.0, 1.0, 2.0, 6.0, 24.0, 120.0, 720.0, 5040.0, 40320.0, 362880.0,\n 3628800.0, 39916800.0, 479001600.0, 6227020800.0, 87178291200.0,\n 1307674368000.0, 20922789888000.0, 355687428096000.0,\n 6402373705728000.0, 121645100408832000.0, 2432902008176640000.0,\n 51090942171709440000.0, 1124000727777607680000.0]\n\n/* Lanczos' sum L_g(x), for positive x */\nfunction lanczos_sum(x){\n var num = 0.0,\n den = 0.0,\n i\n /* evaluate the rational function lanczos_sum(x). For large\n x, the obvious algorithm risks overflow, so we instead\n rescale the denominator and numerator of the rational\n function by x**(1-LANCZOS_N) and treat this as a\n rational function in 1/x. This also reduces the error for\n larger x values. The choice of cutoff point (5.0 below) is\n somewhat arbitrary; in tests, smaller cutoff values than\n this resulted in lower accuracy. */\n if (x < 5.0) {\n for (i = LANCZOS_N; --i >= 0; ) {\n num = num * x + lanczos_num_coeffs[i];\n den = den * x + lanczos_den_coeffs[i];\n }\n }else{\n for (i = 0; i < LANCZOS_N; i++) {\n num = num / x + lanczos_num_coeffs[i];\n den = den / x + lanczos_den_coeffs[i];\n }\n }\n return num/den;\n}\n\nfunction m_sinpi(x){\n // x is float\n // returns a float\n var r,\n y = fmod(fabs(x), 2.0), // float\n n = _b_.round($B.fast_float(2.0 * y.value)) // int\n switch(n){\n case 0:\n r = sin(pi.value * y.value);\n break;\n case 1:\n r = cos(pi.value * (y.value - 0.5));\n break;\n case 2:\n /* N.B. -sin(pi*(y-1.0)) is *not* equivalent: it would give\n -0.0 instead of 0.0 when y == 1.0. */\n r = sin(pi.value * (1.0 - y.value));\n break;\n case 3:\n r = _b_.float.__neg__(cos(pi.value *(y.value - 1.5)))\n break;\n case 4:\n r = sin(pi.value * (y.value - 2.0));\n break;\n }\n return $B.fast_float(copysign(1.0, x).value * r.value);\n}\n\n/*\n lgamma: natural log of the absolute value of the Gamma function.\n For large arguments, Lanczos' formula works extremely well here.\n*/\nfunction m_lgamma(x){\n var r,\n absx\n\n /* special cases */\n if(! isfinite(x)){\n if(isnan(x)){\n return x; /* lgamma(nan) = nan */\n }else{\n return $B.fast_float(Number.POSITIVE_INFINITY); /* lgamma(+-inf) = +inf */\n }\n }\n\n /* integer arguments */\n var x1 = float_check(x)\n if(Number.isInteger(x1) && x1 <= 2.0){\n if(x1 <= 0.0){\n value_error()\n }else{\n return $B.fast_float(0.0); /* lgamma(1) = lgamma(2) = 0.0 */\n }\n }\n\n absx = fabs(x)\n /* tiny arguments: lgamma(x) ~ -log(fabs(x)) for small x */\n if (absx.value < 1e-20){\n return $B.fast_float(-log(absx).value);\n }\n /* Lanczos' formula. We could save a fraction of a ulp in accuracy by\n having a second set of numerator coefficients for lanczos_sum that\n absorbed the exp(-lanczos_g) term, and throwing out the lanczos_g\n subtraction below; it's probably not worth it. */\n var lsum = $B.fast_float(lanczos_sum(absx.value))\n r = log(lsum).value - lanczos_g;\n r += (absx.value - 0.5) *\n (log($B.fast_float(absx.value + lanczos_g - 0.5)).value - 1)\n if (x1 < 0.0){\n /* Use reflection formula to get value for negative x. */\n r = logpi - log(fabs(m_sinpi(absx))).value - log(absx).value - r\n }\n r = $B.fast_float(r)\n if(isinf(r)){\n overflow()\n }\n return r;\n}\n\nfunction acos(x){\n $B.check_nb_args('acos', 1, arguments)\n $B.check_no_kw('acos', x)\n if(_mod.isinf(x)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }else if(_mod.isnan(x)){\n return _mod.nan\n }else{\n x = float_check(x)\n if(x > 1 || x < -1){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return _b_.float.$factory(Math.acos(x))\n }\n}\n\nfunction acosh(x){\n $B.check_nb_args('acosh', 1, arguments)\n $B.check_no_kw('acosh', x)\n\n if(_b_.float.$funcs.isinf(x)){\n if(_b_.float.$funcs.isninf(x)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return _mod.inf\n }else if(_mod.isnan(x)){\n return _mod.nan\n }\n var y = float_check(x)\n if(y <= 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if(y > Math.pow(2, 28)){ // issue 1590\n return _b_.float.$factory(_mod.log(y).value + _mod.log(2).value)\n }\n return _b_.float.$factory(Math.log(y + Math.sqrt(y * y - 1)))\n}\n\nfunction asin(x){\n $B.check_nb_args('asin', 1, arguments)\n $B.check_no_kw('asin', x)\n if(_mod.isinf(x)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }else if(_mod.isnan(x)){\n return _mod.nan\n }else{\n x = float_check(x)\n if(x > 1 || x < -1){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return _b_.float.$factory(Math.asin(x))\n }\n}\n\nfunction asinh(x){\n $B.check_nb_args('asinh', 1, arguments)\n $B.check_no_kw('asinh', x)\n\n var y = float_check(x)\n if(_b_.float.$funcs.isninf(x)){\n return NINF\n }else if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n if(y == 0 && 1 / y === -Infinity){\n return $B.fast_float(-0.0)\n }\n return _b_.float.$factory(Math.asinh(y))\n}\n\nfunction atan(x){\n $B.check_nb_args('atan', 1, arguments)\n $B.check_no_kw('atan', x)\n\n if(_b_.float.$funcs.isninf(x)){return _b_.float.$factory(-Math.PI / 2)}\n if(_b_.float.$funcs.isinf(x)){return _b_.float.$factory(Math.PI / 2)}\n return _b_.float.$factory(Math.atan(float_check(x)))\n}\n\nfunction atan2(x, y){\n $B.check_nb_args('atan2', 2, arguments)\n $B.check_no_kw('atan2', x, y)\n\n return _b_.float.$factory(Math.atan2(float_check(x), float_check(y)))\n}\n\nfunction atanh(x){\n $B.check_nb_args('atanh', 1, arguments)\n $B.check_no_kw('atanh', x)\n if(_b_.float.$funcs.isinf(x)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n var y = float_check(x)\n if(y == 0){\n return 0\n }else if(y <= -1 || y >= 1){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return _b_.float.$factory(0.5 * Math.log((1 / y + 1)/(1 / y - 1)));\n}\n\nfunction cbrt(x){\n // Cubic root\n $B.check_nb_args('cbrt ', 1, arguments)\n $B.check_no_kw('cbrt ', x)\n\n var y = float_check(x)\n if(_b_.float.$funcs.isninf(x)){\n return NINF\n }else if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n var _r = $B.fast_float(Math.cbrt(y))\n if(_b_.float.$funcs.isinf(_r)){\n throw _b_.OverflowError.$factory(\"math range error\")\n }\n return _r\n}\n\nfunction ceil(x){\n $B.check_nb_args('ceil', 1, arguments)\n $B.check_no_kw('ceil', x)\n\n var res\n\n if($B.$isinstance(x, _b_.float)){\n if(_b_.float.$funcs.isinf(x)){\n throw _b_.OverflowError.$factory(\n \"cannot convert float infinity to integer\")\n }else if(_mod.isnan(x)){\n throw _b_.OverflowError.$factory(\n \"cannot convert float NaN to integer\")\n }\n }\n\n var klass = x.__class__ || $B.get_class(x)\n\n try{\n // Use attribute of the object's class, not of the object\n // itself (special method)\n return $B.$call($B.$getattr(klass, '__ceil__'))(x)\n }catch(err){\n if(! $B.is_exc(err, [_b_.AttributeError])){\n throw err\n }\n }\n\n try{\n x = $B.$call($B.$getattr(klass, '__float__'))(x)\n }catch(err){\n if(! $B.is_exc(err, [_b_.AttributeError])){\n throw err\n }else{\n throw _b_.TypeError.$factory(\"must be real number, not \" +\n $B.class_name(x))\n }\n }\n return _mod.ceil(x)\n}\n\nconst ULLONG_MAX = 2n ** 64n - 1n,\n LONG_MAX = 2147483647,\n LONG_MIN = -2147483647,\n LLONG_MAX = 9223372036854775807n,\n LLONG_MIN = -9223372036854775807n,\n p2_64 = 2n ** 64n\n\nconst reduced_factorial_odd_part = [\n 0x0000000000000001n, 0x0000000000000001n, 0x0000000000000001n, 0x0000000000000003n,\n 0x0000000000000003n, 0x000000000000000fn, 0x000000000000002dn, 0x000000000000013bn,\n 0x000000000000013bn, 0x0000000000000b13n, 0x000000000000375fn, 0x0000000000026115n,\n 0x000000000007233fn, 0x00000000005cca33n, 0x0000000002898765n, 0x00000000260eeeebn,\n 0x00000000260eeeebn, 0x0000000286fddd9bn, 0x00000016beecca73n, 0x000001b02b930689n,\n 0x00000870d9df20adn, 0x0000b141df4dae31n, 0x00079dd498567c1bn, 0x00af2e19afc5266dn,\n 0x020d8a4d0f4f7347n, 0x335281867ec241efn, 0x9b3093d46fdd5923n, 0x5e1f9767cc5866b1n,\n 0x92dd23d6966aced7n, 0xa30d0f4f0a196e5bn, 0x8dc3e5a1977d7755n, 0x2ab8ce915831734bn,\n 0x2ab8ce915831734bn, 0x81d2a0bc5e5fdcabn, 0x9efcac82445da75bn, 0xbc8b95cf58cde171n,\n 0xa0e8444a1f3cecf9n, 0x4191deb683ce3ffdn, 0xddd3878bc84ebfc7n, 0xcb39a64b83ff3751n,\n 0xf8203f7993fc1495n, 0xbd2a2a78b35f4bddn, 0x84757be6b6d13921n, 0x3fbbcfc0b524988bn,\n 0xbd11ed47c8928df9n, 0x3c26b59e41c2f4c5n, 0x677a5137e883fdb3n, 0xff74e943b03b93ddn,\n 0xfe5ebbcb10b2bb97n, 0xb021f1de3235e7e7n, 0x33509eb2e743a58fn, 0x390f9da41279fb7dn,\n 0xe5cb0154f031c559n, 0x93074695ba4ddb6dn, 0x81c471caa636247fn, 0xe1347289b5a1d749n,\n 0x286f21c3f76ce2ffn, 0x00be84a2173e8ac7n, 0x1595065ca215b88bn, 0xf95877595b018809n,\n 0x9c2efe3c5516f887n, 0x373294604679382bn, 0xaf1ff7a888adcd35n, 0x18ddf279a2c5800bn,\n 0x18ddf279a2c5800bn, 0x505a90e2542582cbn, 0x5bacad2cd8d5dc2bn, 0xfe3152bcbff89f41n,\n 0xe1467e88bf829351n, 0xb8001adb9e31b4d5n, 0x2803ac06a0cbb91fn, 0x1904b5d698805799n,\n 0xe12a648b5c831461n, 0x3516abbd6160cfa9n, 0xac46d25f12fe036dn, 0x78bfa1da906b00efn,\n 0xf6390338b7f111bdn, 0x0f25f80f538255d9n, 0x4ec8ca55b8db140fn, 0x4ff670740b9b30a1n,\n 0x8fd032443a07f325n, 0x80dfe7965c83eeb5n, 0xa3dc1714d1213afdn, 0x205b7bbfcdc62007n,\n 0xa78126bbe140a093n, 0x9de1dc61ca7550cfn, 0x84f0046d01b492c5n, 0x2d91810b945de0f3n,\n 0xf5408b7f6008aa71n, 0x43707f4863034149n, 0xdac65fb9679279d5n, 0xc48406e7d1114eb7n,\n 0xa7dc9ed3c88e1271n, 0xfb25b2efdb9cb30dn, 0x1bebda0951c4df63n, 0x5c85e975580ee5bdn,\n 0x1591bc60082cb137n, 0x2c38606318ef25d7n, 0x76ca72f7c5c63e27n, 0xf04a75d17baa0915n,\n 0x77458175139ae30dn, 0x0e6c1330bc1b9421n, 0xdf87d2b5797e8293n, 0xefa5c703e1e68925n,\n 0x2b6b1b3278b4f6e1n, 0xceee27b382394249n, 0xd74e3829f5dab91dn, 0xfdb17989c26b5f1fn,\n 0xc1b7d18781530845n, 0x7b4436b2105a8561n, 0x7ba7c0418372a7d7n, 0x9dbc5c67feb6c639n,\n 0x502686d7f6ff6b8fn, 0x6101855406be7a1fn, 0x9956afb5806930e7n, 0xe1f0ee88af40f7c5n,\n 0x984b057bda5c1151n, 0x9a49819acc13ea05n, 0x8ef0dead0896ef27n, 0x71f7826efe292b21n,\n 0xad80a480e46986efn, 0x01cdc0ebf5e0c6f7n, 0x6e06f839968f68dbn, 0xdd5943ab56e76139n,\n 0xcdcf31bf8604c5e7n, 0x7e2b4a847054a1cbn, 0x0ca75697a4d3d0f5n, 0x4703f53ac514a98bn,\n];\n\nconst inverted_factorial_odd_part = [\n 0x0000000000000001n, 0x0000000000000001n, 0x0000000000000001n, 0xaaaaaaaaaaaaaaabn,\n 0xaaaaaaaaaaaaaaabn, 0xeeeeeeeeeeeeeeefn, 0x4fa4fa4fa4fa4fa5n, 0x2ff2ff2ff2ff2ff3n,\n 0x2ff2ff2ff2ff2ff3n, 0x938cc70553e3771bn, 0xb71c27cddd93e49fn, 0xb38e3229fcdee63dn,\n 0xe684bb63544a4cbfn, 0xc2f684917ca340fbn, 0xf747c9cba417526dn, 0xbb26eb51d7bd49c3n,\n 0xbb26eb51d7bd49c3n, 0xb0a7efb985294093n, 0xbe4b8c69f259eabbn, 0x6854d17ed6dc4fb9n,\n 0xe1aa904c915f4325n, 0x3b8206df131cead1n, 0x79c6009fea76fe13n, 0xd8c5d381633cd365n,\n 0x4841f12b21144677n, 0x4a91ff68200b0d0fn, 0x8f9513a58c4f9e8bn, 0x2b3e690621a42251n,\n 0x4f520f00e03c04e7n, 0x2edf84ee600211d3n, 0xadcaa2764aaacdfdn, 0x161f4f9033f4fe63n,\n 0x161f4f9033f4fe63n, 0xbada2932ea4d3e03n, 0xcec189f3efaa30d3n, 0xf7475bb68330bf91n,\n 0x37eb7bf7d5b01549n, 0x46b35660a4e91555n, 0xa567c12d81f151f7n, 0x4c724007bb2071b1n,\n 0x0f4a0cce58a016bdn, 0xfa21068e66106475n, 0x244ab72b5a318ae1n, 0x366ce67e080d0f23n,\n 0xd666fdae5dd2a449n, 0xd740ddd0acc06a0dn, 0xb050bbbb28e6f97bn, 0x70b003fe890a5c75n,\n 0xd03aabff83037427n, 0x13ec4ca72c783bd7n, 0x90282c06afdbd96fn, 0x4414ddb9db4a95d5n,\n 0xa2c68735ae6832e9n, 0xbf72d71455676665n, 0xa8469fab6b759b7fn, 0xc1e55b56e606caf9n,\n 0x40455630fc4a1cffn, 0x0120a7b0046d16f7n, 0xa7c3553b08faef23n, 0x9f0bfd1b08d48639n,\n 0xa433ffce9a304d37n, 0xa22ad1d53915c683n, 0xcb6cbc723ba5dd1dn, 0x547fb1b8ab9d0ba3n,\n 0x547fb1b8ab9d0ba3n, 0x8f15a826498852e3n, 0x32e1a03f38880283n, 0x3de4cce63283f0c1n,\n 0x5dfe6667e4da95b1n, 0xfda6eeeef479e47dn, 0xf14de991cc7882dfn, 0xe68db79247630ca9n,\n 0xa7d6db8207ee8fa1n, 0x255e1f0fcf034499n, 0xc9a8990e43dd7e65n, 0x3279b6f289702e0fn,\n 0xe7b5905d9b71b195n, 0x03025ba41ff0da69n, 0xb7df3d6d3be55aefn, 0xf89b212ebff2b361n,\n 0xfe856d095996f0adn, 0xd6e533e9fdf20f9dn, 0xf8c0e84a63da3255n, 0xa677876cd91b4db7n,\n 0x07ed4f97780d7d9bn, 0x90a8705f258db62fn, 0xa41bbb2be31b1c0dn, 0x6ec28690b038383bn,\n 0xdb860c3bb2edd691n, 0x0838286838a980f9n, 0x558417a74b36f77dn, 0x71779afc3646ef07n,\n 0x743cda377ccb6e91n, 0x7fdf9f3fe89153c5n, 0xdc97d25df49b9a4bn, 0x76321a778eb37d95n,\n 0x7cbb5e27da3bd487n, 0x9cff4ade1a009de7n, 0x70eb166d05c15197n, 0xdcf0460b71d5fe3dn,\n 0x5ac1ee5260b6a3c5n, 0xc922dedfdd78efe1n, 0xe5d381dc3b8eeb9bn, 0xd57e5347bafc6aadn,\n 0x86939040983acd21n, 0x395b9d69740a4ff9n, 0x1467299c8e43d135n, 0x5fe440fcad975cdfn,\n 0xcaa9a39794a6ca8dn, 0xf61dbd640868dea1n, 0xac09d98d74843be7n, 0x2b103b9e1a6b4809n,\n 0x2ab92d16960f536fn, 0x6653323d5e3681dfn, 0xefd48c1c0624e2d7n, 0xa496fefe04816f0dn,\n 0x1754a7b07bbdd7b1n, 0x23353c829a3852cdn, 0xbf831261abd59097n, 0x57a8e656df0618e1n,\n 0x16e9206c3100680fn, 0xadad4c6ee921dac7n, 0x635f2b3860265353n, 0xdd6d0059f44b3d09n,\n 0xac4dd6b894447dd7n, 0x42ea183eeaa87be3n, 0x15612d1550ee5b5dn, 0x226fa19d656cb623n,\n]\n\nconst factorial_trailing_zeros = [\n 0, 0, 1, 1, 3, 3, 4, 4, 7, 7, 8, 8, 10, 10, 11, 11, // 0-15\n 15, 15, 16, 16, 18, 18, 19, 19, 22, 22, 23, 23, 25, 25, 26, 26, // 16-31\n 31, 31, 32, 32, 34, 34, 35, 35, 38, 38, 39, 39, 41, 41, 42, 42, // 32-47\n 46, 46, 47, 47, 49, 49, 50, 50, 53, 53, 54, 54, 56, 56, 57, 57, // 48-63\n 63, 63, 64, 64, 66, 66, 67, 67, 70, 70, 71, 71, 73, 73, 74, 74, // 64-79\n 78, 78, 79, 79, 81, 81, 82, 82, 85, 85, 86, 86, 88, 88, 89, 89, // 80-95\n 94, 94, 95, 95, 97, 97, 98, 98, 101, 101, 102, 102, 104, 104, 105, 105, // 96-111\n 109, 109, 110, 110, 112, 112, 113, 113, 116, 116, 117, 117, 119, 119, 120, 120, // 112-127\n].map(BigInt)\n\nconst NULL = undefined\n\n/* Calculate C(n, k) for n in the 63-bit range. */\n\nfunction perm_comb_small(n, k, iscomb){\n if(k == 0){\n return 1n\n }\n\n /* For small enough n and k the result fits in the 64-bit range and can\n * be calculated without allocating intermediate PyLong objects. */\n if(iscomb){\n /* Maps k to the maximal n so that 2*k-1 <= n <= 127 and C(n, k)\n * fits into a uint64_t. Exclude k = 1, because the second fast\n * path is faster for this case.*/\n var fast_comb_limits1 = [\n 0, 0, 127, 127, 127, 127, 127, 127, // 0-7\n 127, 127, 127, 127, 127, 127, 127, 127, // 8-15\n 116, 105, 97, 91, 86, 82, 78, 76, // 16-23\n 74, 72, 71, 70, 69, 68, 68, 67, // 24-31\n 67, 67, 67 // 32-34\n ];\n if(k < fast_comb_limits1.length && n <= fast_comb_limits1[k]){\n /*\n comb(n, k) fits into a uint64_t. We compute it as\n comb_odd_part << shift\n where 2**shift is the largest power of two dividing comb(n, k)\n and comb_odd_part is comb(n, k) >> shift. comb_odd_part can be\n calculated efficiently via arithmetic modulo 2**64, using three\n lookups and two uint64_t multiplications.\n */\n var comb_odd_part = reduced_factorial_odd_part[n]\n * inverted_factorial_odd_part[k]\n * inverted_factorial_odd_part[n - k];\n comb_odd_part %= p2_64\n var shift = factorial_trailing_zeros[n]\n - factorial_trailing_zeros[k]\n - factorial_trailing_zeros[n - k];\n return comb_odd_part << shift;\n }\n\n /* Maps k to the maximal n so that 2*k-1 <= n <= 127 and C(n, k)*k\n * fits into a long long (which is at least 64 bit). Only contains\n * items larger than in fast_comb_limits1. */\n var fast_comb_limits2 = [\n 0, ULLONG_MAX, 4294967296, 3329022, 102570, 13467, 3612, 1449, // 0-7\n 746, 453, 308, 227, 178, 147 // 8-13\n ];\n if (k < fast_comb_limits2.length && n <= fast_comb_limits2[k]) {\n /* C(n, k) = C(n, k-1) * (n-k+1) / k */\n var result = n,\n i = 1n;\n while(i < k){\n result *= --n;\n result /= ++i;\n }\n return result;\n }\n }else{\n /* Maps k to the maximal n so that k <= n and P(n, k)\n * fits into a long long (which is at least 64 bit). */\n var fast_perm_limits = [\n 0, ULLONG_MAX, 4294967296, 2642246, 65537, 7133, 1627, 568, // 0-7\n 259, 142, 88, 61, 45, 36, 30, 26, // 8-15\n 24, 22, 21, 20, 20 // 16-20\n ];\n if (k < fast_perm_limits.length && n <= fast_perm_limits[k]) {\n if(n <= 127){\n /* P(n, k) fits into a uint64_t. */\n var perm_odd_part = reduced_factorial_odd_part[n]\n * inverted_factorial_odd_part[n - k];\n perm_odd_part %= p2_64\n var shift = factorial_trailing_zeros[n]\n - factorial_trailing_zeros[n - k];\n var res = perm_odd_part << shift\n\n return res;\n }\n\n /* P(n, k) = P(n, k-1) * (n-k+1) */\n var result = n;\n for (var i = 1; i < k; i++) {\n result *= --n;\n }\n return result\n }\n }\n\n /* For larger n use recursive formulas:\n *\n * P(n, k) = P(n, j) * P(n-j, k-j)\n * C(n, k) = C(n, j) * C(n-j, k-j) // C(k, j)\n */\n var j = k / 2n;\n var a = perm_comb_small(n, j, iscomb);\n var b = perm_comb_small(n - j, k - j, iscomb);\n a = a * b;\n if(iscomb){\n b = perm_comb_small(k, j, 1);\n a = a / b;\n }\n return a;\n}\n\n/* Calculate P(n, k) or C(n, k) using recursive formulas.\n * It is more efficient than sequential multiplication thanks to\n * Karatsuba multiplication.\n */\nfunction perm_comb(n, k, iscomb){\n if(k == 0){\n return 1;\n }\n if(k == 1){\n return n;\n }\n\n /* P(n, k) = P(n, j) * P(n-j, k-j) */\n /* C(n, k) = C(n, j) * C(n-j, k-j) // C(k, j) */\n var j = k / 2n\n var a = perm_comb(n, j, iscomb);\n //var t = j\n //n = n - t;\n var b = perm_comb(n - j, k - j, iscomb);\n a = a * b;\n if(iscomb){\n b = perm_comb_small(k, j, 1);\n a = a / b;\n }\n return a;\n}\n\nfunction comb(n, k){\n var $ = $B.args('comb', 2, {n: null, k: null}, ['n', 'k'],\n arguments, {}, null, null),\n n = $.n,\n k = $.k\n\n var result = NULL,\n temp,\n overflow, cmp;\n\n // accept integers or objects with __index__\n n = $B.PyNumber_Index(n)\n k = $B.PyNumber_Index(k)\n\n n = _b_.int.$to_bigint(n);\n k = _b_.int.$to_bigint(k);\n\n if(n < 0){\n throw _b_.ValueError.$factory(\n \"n must be a non-negative integer\");\n }\n if(k < 0){\n throw _b_.ValueError.$factory(\n \"k must be a non-negative integer\");\n }\n\n overflow = n > LLONG_MAX || n < LLONG_MIN\n if(! overflow){\n overflow = k > LLONG_MAX || k < LLONG_MIN\n if (overflow || k > n) {\n result = 0n;\n }else{\n if(n - k < k){\n k = n - k\n }\n if (k > 1) {\n result = perm_comb_small(n, k, 1);\n }\n }\n /* For k == 1 just return the original n in perm_comb(). */\n }else{\n /* k = min(k, n - k) */\n temp = n - k\n if(temp < 0) {\n result = 0n;\n }\n if (temp < k) {\n k = temp\n }\n\n overflow = k > LLONG_MAX || k < LLONG_MIN\n if (overflow) {\n throw _b_.OverflowError.$factory(\n \"min(n - k, k) must not exceed \" +\n LLONG_MAX);\n }\n }\n if(result === undefined){\n result = perm_comb(n, k, 1);\n }\n\n return _b_.int.$int_or_long(result)\n}\n\n\nfunction copysign(x, y){\n $B.check_nb_args_no_kw('copysign', 2, arguments)\n\n var x1 = Math.abs(float_check(x))\n var y1 = float_check(y)\n var sign = Math.sign(y1)\n sign = (sign == 1 || Object.is(sign, +0)) ? 1 : - 1\n return _b_.float.$factory(x1 * sign)\n}\n\nfunction cos(x){\n $B.check_nb_args('cos ', 1, arguments)\n $B.check_no_kw('cos ', x)\n return _b_.float.$factory(Math.cos(float_check(x)))\n}\n\nfunction cosh(x){\n $B.check_nb_args('cosh', 1, arguments)\n $B.check_no_kw('cosh', x)\n\n if(_b_.float.$funcs.isinf(x)){return INF}\n var y = float_check(x)\n if(Math.cosh !== undefined){return _b_.float.$factory(Math.cosh(y))}\n return _b_.float.$factory((Math.pow(Math.E, y) +\n Math.pow(Math.E, -y)) / 2)\n}\n\nfunction degrees(x){\n $B.check_nb_args('degrees', 1, arguments)\n $B.check_no_kw('degrees', x)\n return _b_.float.$factory(float_check(x) * 180 / Math.PI)\n}\n\nfunction dist(p, q){\n $B.check_nb_args_no_kw('dist', 2, arguments)\n\n function test(x){\n if(typeof x === \"number\"){\n return x\n }else if(x.__class__ === _b_.float){\n return x.value\n }\n var y = $B.$getattr(x, '__float__', null)\n if(y === null){\n throw _b_.TypeError.$factory('not a float')\n }\n return $B.$call(y)().value\n }\n\n // build list of differences (as floats) between coordinates of p and q\n var diffs = [],\n diff\n\n if(Array.isArray(p) && Array.isArray(q)){\n // simple case : p and q are lists of tuples\n if(p.length != q.length){\n throw _b_.ValueError.$factory(\"both points must have \" +\n \"the same number of dimensions\")\n }\n p = p.map(test)\n q = q.map(test)\n for(var i = 0, len = p.length; i < len; i++){\n var next_p = p[i],\n next_q = q[i]\n var diff = Math.abs(next_p - next_q)\n diffs.push(diff)\n }\n }else{\n var itp = _b_.iter(p),\n itq = _b_.iter(q),\n res = 0\n\n while(true){\n try{\n var next_p = _b_.next(itp)\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n // check that the other iterator is also exhausted\n try{\n var next_q = _b_.next(itq)\n throw _b_.ValueError.$factory(\"both points must have \" +\n \"the same number of dimensions\")\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n break\n }\n throw err\n }\n }\n throw err\n }\n next_p = test(next_p)\n try{\n var next_q = _b_.next(itq)\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n throw _b_.ValueError.$factory(\"both points must have \" +\n \"the same number of dimensions\")\n }\n throw err\n }\n next_q = test(next_q)\n diff = Math.abs(next_p - next_q)\n diffs.push(diff)\n }\n }\n for(var diff of diffs){\n if(! isFinite(diff) && ! isNaN(diff)){\n return _mod.inf\n }\n }\n for(var diff of diffs){\n if(isNaN(diff)){\n return _mod.nan\n }\n }\n\n var res = 0,\n scale = 1,\n max_diff = Math.max(...diffs),\n min_diff = Math.min(...diffs)\n max_value = Math.sqrt(Number.MAX_VALUE) / p.length,\n min_value = Math.sqrt(Number.MIN_VALUE) * p.length\n if(max_diff > max_value){\n var nb = 0\n while(max_diff > max_value){\n scale *= 2\n max_diff /= 2\n nb++\n }\n for(var diff of diffs){\n diff = diff / scale\n res += diff * diff\n }\n return $B.fast_float(scale * Math.sqrt(res))\n }else if(min_diff !== 0 && min_diff < min_value){\n while(min_diff < min_value){\n scale *= 2\n min_diff *= 2\n }\n for(var diff of diffs){\n diff = diff * scale\n res += diff * diff\n }\n return $B.fast_float(Math.sqrt(res) / scale)\n }else{\n for(var diff of diffs){\n res += Math.pow(diff, 2)\n }\n return $B.fast_float(Math.sqrt(res))\n }\n}\n\nconst e = _b_.float.$factory(Math.E)\n\nconst ERF_SERIES_CUTOFF = 1.5,\n ERF_SERIES_TERMS = 25,\n ERFC_CONTFRAC_CUTOFF = 30.0,\n ERFC_CONTFRAC_TERMS = 50\n\n/*\n Error function, via power series.\n Given a finite float x, return an approximation to erf(x).\n Converges reasonably fast for small x.\n*/\n\nfunction m_erf_series(x){\n var x2, acc, fk, result\n var i\n\n x2 = x * x\n acc = 0.0\n fk = ERF_SERIES_TERMS + 0.5\n for(i = 0; i < ERF_SERIES_TERMS; i++){\n acc = 2.0 + x2 * acc / fk\n fk -= 1.0\n }\n result = acc * x * exp(-x2).value / sqrtpi\n return result\n}\n\nfunction m_erfc_contfrac(x){\n var x2, a, da, p, p_last, q, q_last, b, result;\n var i\n\n if(x >= ERFC_CONTFRAC_CUTOFF){\n return 0.0\n }\n\n x2 = x * x\n a = 0.0\n da = 0.5\n p = 1.0\n p_last = 0.0\n q = da + x2\n q_last = 1.0\n for(i = 0; i < ERFC_CONTFRAC_TERMS; i++){\n var temp\n a += da\n da += 2.0\n b = da + x2\n temp = p; p = b * p - a * p_last; p_last = temp\n temp = q; q = b * q - a * q_last; q_last = temp\n }\n result = p / q * x * exp(-x2).value / sqrtpi\n return result\n}\n\n\nfunction erf(x){\n var absx,\n cf\n var x1 = float_check(x)\n if(isNaN(x1)){\n return x\n }\n absx = fabs(x)\n if(absx.value < ERF_SERIES_CUTOFF){\n return $B.fast_float(m_erf_series(x1))\n }else{\n cf = m_erfc_contfrac(absx.value)\n return $B.fast_float(x1 > 0.0 ? 1.0 - cf : cf - 1.0)\n }\n}\n\nfunction erfc(x){\n\n // inspired from\n // http://stackoverflow.com/questions/457408/is-there-an-easily-available-implementation-of-erf-for-python\n var y = float_check(x)\n var t = 1.0 / (1.0 + 0.5 * Math.abs(y))\n var ans = 1 - t * Math.exp( -y * y - 1.26551223 +\n t * ( 1.00002368 +\n t * ( 0.37409196 +\n t * ( 0.09678418 +\n t * (-0.18628806 +\n t * ( 0.27886807 +\n t * (-1.13520398 +\n t * ( 1.48851587 +\n t * (-0.82215223 +\n t * 0.17087277)))))))))\n if(y >= 0.0){return 1 - ans}\n return 1 + ans\n}\n\nfunction erfc(x){\n $B.check_nb_args_no_kw('erfc', 1, arguments)\n var absx, cf;\n\n var x1 = float_check(x)\n if(isNaN(x1)){\n return x\n }\n absx = fabs(x);\n if(absx.value < ERF_SERIES_CUTOFF){\n return $B.fast_float(1.0 - m_erf_series(x1))\n }else{\n cf = m_erfc_contfrac(absx.value)\n return $B.fast_float(x1 > 0.0 ? cf : 2.0 - cf)\n }\n}\n\nfunction exp(x){\n $B.check_nb_args('exp', 1, arguments)\n $B.check_no_kw('exp', x)\n\n if(_b_.float.$funcs.isninf(x)){\n return _b_.float.$factory(0)\n }\n if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n var _r = Math.exp(float_check(x))\n if(! isNaN(_r) && ! isFinite(_r)){\n throw _b_.OverflowError.$factory(\"math range error\")\n }\n return _b_.float.$factory(_r)\n}\n\nfunction exp2(x){\n return pow(2, x)\n}\n\nfunction expm1(x){\n $B.check_nb_args('expm1', 1, arguments)\n $B.check_no_kw('expm1', x)\n\n if(_b_.float.$funcs.isninf(x)){\n return $B.fast_float(-1)\n }else if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n var _r = Math.expm1(float_check(x))\n if((! isNaN(_r)) && ! isFinite(_r)){\n overflow()\n }\n return $B.fast_float(_r)\n}\n\nfunction fabs(x){\n $B.check_nb_args_no_kw('fabs', 1, arguments)\n return _b_.float.$funcs.fabs(float_check(x)) // located in py_float.js\n}\n\n// factorial implementation, adapted from CPython's mathmodule.c\n\nconst SmallFactorials = [\n 1n, 1n, 2n, 6n, 24n, 120n, 720n, 5040n, 40320n,\n 362880n, 3628800n, 39916800n, 479001600n,\n 6227020800n, 87178291200n, 1307674368000n,\n 20922789888000n, 355687428096000n, 6402373705728000n,\n 121645100408832000n, 2432902008176640000n\n ]\n\nconst SIZEOF_LONG = 4\n\nfunction _Py_bit_length(x){\n const BIT_LENGTH_TABLE = [\n 0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4,\n 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5\n ]\n var msb = 0;\n while(x >= 32n){\n msb += 6;\n x >>= 6n;\n }\n msb += BIT_LENGTH_TABLE[parseInt(x)];\n return msb\n}\nfunction count_set_bits(n){\n var count = 0n;\n while(n != 0){\n ++count;\n n &= n - 1n; /* clear least significant bit */\n }\n return count;\n}\n\nfunction factorial_partial_product(start, stop, max_bits){\n var midpoint,\n num_operands,\n left,\n right,\n result\n\n /* If the return value will fit an unsigned long, then we can\n * multiply in a tight, fast loop where each multiply is O(1).\n * Compute an upper bound on the number of bits required to store\n * the answer.\n *\n * Storing some integer z requires floor(lg(z))+1 bits, which is\n * conveniently the value returned by bit_length(z). The\n * product x*y will require at most\n * bit_length(x) + bit_length(y) bits to store, based\n * on the idea that lg product = lg x + lg y.\n *\n * We know that stop - 2 is the largest number to be multiplied. From\n * there, we have: bit_length(answer) <= num_operands *\n * bit_length(stop - 2)\n */\n\n num_operands = (stop - start) / 2n;\n max_bits = BigInt(max_bits)\n /* The \"num_operands <= 8 * SIZEOF_LONG\" check guards against the\n * unlikely case of an overflow in num_operands * max_bits. */\n if(num_operands <= 8 * SIZEOF_LONG &&\n num_operands * max_bits <= 8 * SIZEOF_LONG) {\n var j,\n total;\n for (total = start, j = start + 2n; j < stop; j += 2n){\n total *= j;\n }\n return total\n }\n\n /* find midpoint of range(start, stop), rounded up to next odd number. */\n midpoint = (start + num_operands) | 1n;\n left = factorial_partial_product(start, midpoint,\n _Py_bit_length(midpoint - 2n));\n right = factorial_partial_product(midpoint, stop, max_bits);\n result = left * right\n return result;\n}\n\n\nfunction factorial_odd_part(n){\n var i,\n v, lower, upper,\n partial, tmp, inner, outer;\n\n inner = 1n\n outer = inner;\n upper = 3n;\n for (i = BigInt(_Py_bit_length(n)) - 2n; i >= 0; i--) {\n v = n >> i;\n if (v <= 2){\n continue\n }\n lower = upper;\n /* (v + 1) | 1 = least odd integer strictly larger than n / 2**i */\n upper = (v + 1n) | 1n;\n /* Here inner is the product of all odd integers j in the range (0,\n n/2**(i+1)]. The factorial_partial_product call below gives the\n product of all odd integers j in the range (n/2**(i+1), n/2**i]. */\n partial = factorial_partial_product(lower, upper,\n _Py_bit_length(upper-2n));\n /* inner *= partial */\n tmp = inner * partial\n inner = tmp;\n /* Now inner is the product of all odd integers j in the range (0,\n n/2**i], giving the inner product in the formula above. */\n\n /* outer *= inner; */\n tmp = outer * inner\n outer = tmp;\n }\n return outer;\n}\n\nfunction factorial(arg){\n var x,\n two_valuation,\n overflow,\n result,\n odd_part;\n // Check that arg can be converted to an integer, and transform it to\n // a bigint\n x = _b_.int.$to_bigint($B.PyNumber_Index(arg))\n overflow = x > LONG_MAX || x < LONG_MIN\n if(x > LONG_MAX) {\n throw _b_.OverflowError.$factory(\n \"factorial() argument should not exceed \" +\n LONG_MAX)\n }else if(x < 0) {\n throw _b_.ValueError.$factory(\n \"factorial() not defined for negative values\");\n }\n\n /* use lookup table if x is small */\n if (x < SmallFactorials.length){\n return _b_.int.$int_or_long(SmallFactorials[x]);\n }\n /* else express in the form odd_part * 2**two_valuation, and compute as\n odd_part << two_valuation. */\n odd_part = factorial_odd_part(x);\n two_valuation = x - count_set_bits(x);\n return _b_.int.$int_or_long(odd_part << two_valuation);\n}\n\nfunction floor(x){\n $B.check_nb_args_no_kw('floor', 1, arguments)\n\n if(typeof x == \"number\" || x.__class__ === _b_.float){\n return Math.floor(float_check(x))\n }\n var klass = $B.get_class(x)\n try{\n return $B.$call($B.$getattr(klass, \"__floor__\"))(x)\n }catch(err){\n if($B.is_exc(err, [_b_.AttributeError])){\n try{\n var float = $B.$call($B.$getattr(klass, \"__float__\"))(x)\n return floor(float)\n }catch(err){\n if($B.is_exc(err, [_b_.AttributeError])){\n throw _b_.TypeError.$factory(\"no __float__\")\n }\n throw err\n }\n }\n }\n}\n\nfunction fmod(x, y){\n $B.check_nb_args_no_kw('fmod', 2, arguments)\n if($B.$isinstance(x, _b_.float)){\n if(_b_.float.$funcs.isinf(x)){\n throw _b_.ValueError.$factory('math domain error')\n }\n }\n y = float_check(y)\n if(y == 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n return _b_.float.$factory(float_check(x) % float_check(y))\n}\n\nfunction frexp(x){\n $B.check_nb_args_no_kw('frexp', 1, arguments)\n\n var _l = _b_.float.$funcs.frexp(x)\n return _b_.tuple.$factory([_b_.float.$factory(_l[0]), _l[1]])\n}\n\nfunction fsum(x){\n $B.check_nb_args_no_kw('fsum', 1, arguments)\n\n /* Translation into Javascript of the function msum in an Active\n State Cookbook recipe : https://code.activestate.com/recipes/393090/\n by Raymond Hettinger\n */\n var partials = [],\n res = new Number(),\n _it = _b_.iter(x)\n while(true){\n try{\n var x = _b_.next(_it),\n i = 0\n x = float_check(x)\n for(var j = 0, len = partials.length; j < len; j++){\n var y = float_check(partials[j])\n if(Math.abs(x) < Math.abs(y)){\n var z = x\n x = y\n y = z\n }\n var hi = x + y,\n lo = y - (hi - x)\n if(lo){\n partials[i] = lo\n i++\n }\n x = hi\n }\n partials = partials.slice(0, i).concat([x])\n }catch(err){\n if($B.$isinstance(err, _b_.StopIteration)){break}\n throw err\n }\n }\n var res = 0\n for(var i = 0; i < partials.length; i++){\n res += partials[i]\n }\n return $B.fast_float(res)\n}\n\nfunction gamma(x){\n $B.check_nb_args('gamma', 1, arguments)\n $B.check_no_kw('gamma', x)\n var x_as_number = x,\n r,\n y,\n z,\n sqrtpow\n\n /* special cases */\n if($B.$isinstance(x, _b_.float)){\n x_as_number = x.value\n }else if(! $B.$isinstance(x, _b_.int)){\n throw _b_.TypeError.$factory(\"must be real number, not \" +\n $B.class_name(x))\n }\n if(x_as_number === Number.POSITIVE_INFINITY || isNaN(x_as_number)){\n return x\n }else if(x_as_number === Number.NEGATIVE_INFINITY || x_as_number == 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n\n /* integer arguments */\n if(Number.isInteger(x_as_number)){\n if($B.rich_comp('__lt__', x, 0.0)){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if($B.rich_comp('__le__', x, NGAMMA_INTEGRAL)){\n return $B.fast_float(gamma_integral[x_as_number - 1])\n }\n }\n var absx = fabs(x)\n\n /* tiny arguments: tgamma(x) ~ 1/x for x near 0 */\n if(absx.value < 1e-20){\n r = 1.0 / x_as_number\n if(r === Infinity || r === -Infinity){\n overflow()\n }\n return $B.fast_float(r)\n }\n\n /* large arguments: assuming IEEE 754 doubles, tgamma(x) overflows for\n x > 200, and underflows to +-0.0 for x < -200, not a negative\n integer. */\n if(absx.value > 200.0){\n if(x_as_number < 0.0){\n return $B.fast_float(0.0 / m_sinpi(x).value);\n }else{\n overflow()\n }\n }\n\n y = absx.value + lanczos_g_minus_half;\n /* compute error in sum */\n if (absx.value > lanczos_g_minus_half) {\n /* note: the correction can be foiled by an optimizing\n compiler that (incorrectly) thinks that an expression like\n a + b - a - b can be optimized to 0.0. This shouldn't\n happen in a standards-conforming compiler. */\n var q = y - absx.value;\n z = q - lanczos_g_minus_half;\n }else{\n var q = y - lanczos_g_minus_half;\n z = q - absx.value;\n }\n z = z * lanczos_g / y;\n if (x_as_number < 0.0) {\n r = -pi.value / m_sinpi(absx).value /\n absx.value * _mod.exp(y).value /\n lanczos_sum(absx.value);\n r -= z * r;\n if(absx.value < 140.0){\n r /= pow(y, absx.value - 0.5).value;\n }else{\n sqrtpow = pow(y, absx.value / 2.0 - 0.25);\n r /= sqrtpow.value;\n r /= sqrtpow.value;\n }\n }else{\n r = lanczos_sum(absx.value) / exp(y).value;\n r += z * r;\n if(absx.value < 140.0){\n r *= pow(y, absx.value - 0.5).value;\n }else{\n sqrtpow = pow(y, absx.value / 2.0 - 0.25);\n r *= sqrtpow.value;\n r *= sqrtpow.value;\n }\n }\n if(r === Number.POSITIVE_INFINITY){\n overflow()\n }\n return $B.fast_float(r);\n}\n\n\n// GCD algorithm. Javascript adaptation of Python script at\n// https://gist.github.com/cmpute/baa545f0c2b6be8b628e9ded3c19f6c1\n// by Jacob Zhong\nfunction bit_length(x){\n return x.toString(2).length\n}\n\n$B.nb_simple_gcd = 0\n\nfunction simple_gcd(a, b){\n /* a fits into a long, so b must too */\n $B.nb_simple_gcd++\n var x = a >= 0 ? a : -a,\n y = b >= 0 ? b : -b\n\n /* usual Euclidean algorithm for longs */\n while (y != 0) {\n t = y;\n y = x % y;\n x = t;\n }\n return x\n}\n\nfunction lgcd(x, y){\n var a, b, c, d\n if(x < y){\n return lgcd(y, x)\n }\n var shift = BigInt(Math.max(Math.floor(bit_length(x) / 64),\n Math.floor(bit_length(y) / 64))),\n xbar = x >> (shift * 64n),\n ybar = y >> (shift * 64n)\n while(y > p2_64){\n [a, b, c, d] = [1n, 0n, 0n, 1n]\n while(ybar + c != 0 && ybar + d != 0){\n q = (xbar + a) / (ybar + c)\n p = (xbar + b) / (ybar + d)\n if(q != p){\n break\n }\n [a, c] = [c, a - q * c]\n [b, d] = [d, b - q * d]\n [xbar, ybar] = [ybar, xbar - q * ybar]\n }\n if(b == 0){\n [x, y] = [y, x % y]\n }else{\n [x, y] = [a * x + b * y, c * x + d * y]\n }\n }\n return simple_gcd(x, y)\n}\n\nfunction xgcd(x, y){\n var xneg = x < 0 ? -1n : 1n,\n yneg = y < 0 ? -1n : 1n,\n last_r,\n last_s,\n last_t,\n q, r, s, t;\n\n [x, y] = [x >= 0 ? x : -x, y >= 0 ? y : -y];\n\n // it's maintained that r = s * x + t * y, last_r = last_s * x + last_t * y\n [last_r, r] = [x, y];\n [last_s, s] = [1n, 0n];\n [last_t, t] = [0n, 1n];\n\n while(r > 0){\n q = last_r / r;\n [last_r, r] = [r, last_r - q * r];\n [last_s, s] = [s, last_s - q * s];\n [last_t, t] = [t, last_t - q * t];\n }\n return [last_r, last_s * xneg, last_t * yneg]\n}\n\nfunction lxgcd(x, y){\n var g, cy, cx,\n s, last_s,\n t, last_t,\n a, b, c, d\n x = x >= 0 ? x : -x\n y = y >= 0 ? y : -y\n\n if(x < y){\n [g, cy, cx] = xgcd(y, x)\n return [g, cx, cy]\n }\n\n var shift = BigInt(Math.max(Math.floor(bit_length(x) / 64),\n Math.floor(bit_length(y) / 64))),\n xbar = x >> (shift * 64n),\n ybar = y >> (shift * 64n);\n\n [last_s, s] = [1n, 0n];\n [last_t, t] = [0n, 1n];\n\n while(y > p2_64){\n [a, b, c, d] = [1n, 0n, 0n, 1n]\n while(ybar + c != 0 && ybar + d != 0){\n q = (xbar + a) / (ybar + c)\n p = (xbar + b) / (ybar + d)\n if(q != p){\n break\n };\n [a, c = c], [a - q * c];\n [b, d = d], [b - q * d];\n [xbar, ybar] = [ybar, xbar - q * ybar];\n }\n if(b == 0){\n q = x / y;\n [x, y] = [y, x % y];\n [last_s, s] = [s, last_s - q * s];\n [last_t, t] = [t, last_t - q * t];\n }else{\n [x, y] = [a * x + b * y, c * x + d * y];\n [last_s, s] = [a * last_s + b * s, c * last_s + d * s];\n [last_t, t] = [a * last_t + b * t, c * last_t + d * t];\n }\n }\n // notice that here x, y could be negative\n [g, cx, cy] = xgcd(x, y)\n\n return [g, cx * last_s + cy * s, cx * last_t + cy * t]\n}\n\nfunction gcd(x, y){\n var $ = $B.args(\"gcd\", 0, {}, [], arguments, {}, 'args', null)\n var args = $.args.map($B.PyNumber_Index)\n if(args.length == 0){\n return 0\n }else if(args.length == 1){\n return _b_.abs(args[0])\n }\n x = _b_.int.$to_bigint(args[0])\n y = _b_.int.$to_bigint(args[1])\n var res = lxgcd(x, y)[0],\n i = 2\n while(i < args.length){\n res = lxgcd(res, _b_.int.$to_bigint(args[i]))[0]\n i++\n }\n return _b_.int.$int_or_long(res)\n}\n\n\nfunction hypot(x, y){\n var $ = $B.args(\"hypot\", 0, {}, [],\n arguments, {}, \"args\", null)\n var args = []\n for(var arg of $.args){\n try{\n args.push(float_check(arg))\n }catch(err){\n if($B.is_exc(err, [_b_.ValueError])){\n throw _b_.TypeError.$factory('must be real number, not ' +\n $B.class_name(arg))\n }\n throw err\n }\n }\n return $B.fast_float(Math.hypot(...args))\n}\n\nvar inf = INF\n\nfunction isclose(){\n var $ = $B.args(\"isclose\",\n 4,\n {a: null, b: null, rel_tol: null, abs_tol: null},\n ['a', 'b', 'rel_tol', 'abs_tol'],\n arguments,\n {rel_tol: $B.fast_float(1e-09),\n abs_tol: $B.fast_float(0.0)},\n '*',\n null)\n var a = float_check($.a),\n b = float_check($.b),\n rel_tol = float_check($.rel_tol),\n abs_tol = float_check($.abs_tol)\n\n if(rel_tol < 0.0 || abs_tol < 0.0){\n throw _b_.ValueError.$factory('tolerances must be non-negative')\n }\n\n if(a == b){\n return _b_.True\n }\n if(_b_.float.$funcs.isinf(a) || _b_.float.$funcs.isinf(b)){\n return a === b\n }\n // isclose(a, b, rel_tol, abs_tol) is the same as\n // abs_diff = abs(a - b)\n // max_ab = max(abs(a), abs(b))\n // abs_diff <= abs_tol or abs_diff / max_ab <= rel_tol\n // This is more correct than in Python docs:\n // \"abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)\"\n // because this fails for Decimal instances, which do not support\n // multiplication by floats\n\n var diff = b - a,\n abs_diff = Math.abs(diff)\n if(abs_diff <= abs_tol){\n return true\n }\n var abs_a = Math.abs(a),\n abs_b = Math.abs(b),\n max_ab = Math.max(abs_a, abs_b)\n return abs_diff / max_ab <= rel_tol\n}\n\nfunction isfinite(x){\n $B.check_nb_args('isfinite', 1, arguments)\n $B.check_no_kw('isfinite', x)\n return isFinite(float_check(x))\n}\n\nfunction isinf(x){\n $B.check_nb_args('isinf', 1, arguments)\n $B.check_no_kw('isinf', x)\n return _b_.float.$funcs.isinf(x)\n}\n\nfunction isnan(x){\n $B.check_nb_args('isnan', 1, arguments)\n $B.check_no_kw('isnan', x)\n return isNaN(float_check(x))\n}\n\nfunction isqrt(x){\n $B.check_nb_args_no_kw('isqrt', 1, arguments)\n\n x = $B.PyNumber_Index(x)\n if($B.rich_comp(\"__lt__\", x, 0)){\n throw _b_.ValueError.$factory(\n \"isqrt() argument must be nonnegative\")\n }\n if(typeof x == \"number\"){\n return Math.floor(Math.sqrt(x))\n }else{ // big integer\n // adapted from code in mathmodule.c\n var n = x.value,\n bit_length = n.toString(2).length,\n c = BigInt(Math.floor((bit_length - 1) / 2)),\n c_bit_length = c.toString(2).length,\n a = 1n,\n d = 0n,\n e\n\n for(var s = BigInt(c_bit_length - 1); s >= 0; s--){\n // Loop invariant: (a-1)**2 < (n >> 2*(c - d)) < (a+1)**2\n e = d\n d = c >> s\n a = (a << d - e - 1n) + (n >> 2n*c - e - d + 1n) / a\n }\n return _b_.int.$int_or_long(a - (a * a > n ? 1n : 0n))\n }\n}\n\nfunction lcm(){\n var $ = $B.args(\"lcm\", 0, {}, [], arguments, {}, 'args', null),\n product = 1\n\n var args = $.args.map($B.PyNumber_Index)\n if(args.length == 0){\n return 1\n }else if(args.length == 1){\n return _b_.abs(args[0])\n }\n var a = _b_.abs(args[0]),\n b,\n product, gcd\n for(var i = 0, len = args.length; i < len; i++){\n b = _b_.abs(args[i])\n if(b == 0){\n return 0\n }\n gcd = gcd2(a, b)\n product = $B.rich_op('__mul__', a, b)\n a = $B.$getattr(product, \"__floordiv__\")(gcd)\n }\n return a\n}\n\nfunction ldexp(x, i){\n $B.check_nb_args('ldexp', 2, arguments)\n $B.check_no_kw('ldexp', x, i)\n return _b_.float.$funcs.ldexp(x, i) // in py_float.js\n}\n\nfunction lgamma(x){\n $B.check_nb_args('lgamma', 1, arguments)\n $B.check_no_kw('lgamma', x)\n\n return m_lgamma(x)\n}\n\nfunction longint_mant_exp(long_int){\n // Returns mantissa and exponent of a long integer\n var value = long_int.value,\n exp = value.toString(2).length,\n exp1 = exp,\n nb = 0n\n // 2 ** exp is infinite if n > 1023\n var nb = Math.floor(exp / 1023),\n exp1 = BigInt(exp - 1023 * nb)\n nb = BigInt(nb)\n var reduced_value = long_int.value / 2n ** (nb * 1023n)\n var mant = Number(reduced_value) / Number(2n ** exp1)\n return [mant, exp]\n}\n\nvar log10_func = Math.log10 || (x => Math.log(x) / Math.log(10)),\n log2_func = Math.log2 || (x => Math.log(x) / Math.log(2))\n\nfunction log(x, base){\n var $ = $B.args(\"log\", 2, {x: null, base: null}, ['x', 'base'],\n arguments, {base: _b_.None}, null, null),\n x = $.x,\n base = $.base\n if(base == 10){\n return log10(x)\n }else if(base == 2){\n return log2(x)\n }\n var log\n if($B.$isinstance(x, $B.long_int)){\n if(x.value <= 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n var mant_exp = longint_mant_exp(x)\n log = Math.log(mant_exp[0]) + Math.log(2) * mant_exp[1]\n }else if($B.$isinstance(x, _b_.int)){\n x = _b_.int.$int_value(x)\n if(x <= 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n log = Math.log(x)\n }else{\n var x1 = float_check(x)\n if(x1 <= 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n log = Math.log(x1)\n }\n if(x1 <= 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if(base === _b_.None){\n return $B.fast_float(log)\n }\n var denom = _mod.log(base).value\n if(denom == 0){\n throw _b_.ZeroDivisionError.$factory('float division by zero')\n }\n return $B.fast_float(log / denom)\n}\n\nfunction log1p(x){\n $B.check_nb_args('log1p', 1, arguments)\n $B.check_no_kw('log1p', x)\n if($B.$isinstance(x, $B.long_int)){\n if($B.long_int.bit_length(x) > 1024){\n throw _b_.OverflowError.$factory(\n \"int too large to convert to float\")\n }\n x = $B.long_int.$log2($B.fast_long_int(x.value + 1n))\n return $B.fast_float(Number(x.value) * Math.LN2)\n }\n x = float_check(x)\n if(x + 1 <= 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return $B.fast_float(Math.log1p(x))\n}\n\nfunction log2(x){\n $B.check_nb_args('log2', 1, arguments)\n $B.check_no_kw('log2', x)\n var log2_func = Math.log2 || (x => Math.log(x) / Math.LN2)\n if($B.$isinstance(x, $B.long_int)){\n if(x.value <= 0){\n throw _b_.ValueError.$factory('math domain error')\n }\n var mant_exp = longint_mant_exp(x)\n return $B.fast_float(log2_func(mant_exp[0]) + mant_exp[1])\n }\n if(_b_.float.$funcs.isninf(x)){\n throw _b_.ValueError.$factory('')\n }\n x = float_check(x)\n if(x == 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if(isNaN(x)){\n return _b_.float.$factory('nan')\n }\n if(x < 0.0){\n throw _b_.ValueError.$factory('math domain error')\n }\n return $B.fast_float(log2_func(x))\n}\n\nfunction log10(x){\n $B.check_nb_args('log10', 1, arguments)\n $B.check_no_kw('log10', x)\n if($B.$isinstance(x, $B.long_int)){\n return $B.fast_float($B.long_int.$log10(x).value)\n }\n x = float_check(x)\n if(x <= 0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n return $B.fast_float(Math.log10(x))\n}\n\nfunction modf(x){\n $B.check_nb_args('modf', 1, arguments)\n $B.check_no_kw('modf', x)\n\n if(_b_.float.$funcs.isninf(x)){\n return _b_.tuple.$factory([0.0, NINF])\n }\n if(_b_.float.$funcs.isinf(x)){\n return _b_.tuple.$factory([0.0, INF])\n }\n var x1 = float_check(x)\n\n if(isNaN(x1)){\n return _b_.tuple.$factory([_b_.float.$factory('nan'),\n _b_.float.$factory('nan')])\n }\n\n if(x1 > 0){\n var i = _b_.float.$factory(x1 - Math.floor(x1))\n return _b_.tuple.$factory([i, _b_.float.$factory(x1 - i.value)])\n }\n\n var x2 = Math.ceil(x1)\n var i = _b_.float.$factory(x1 - x2)\n return _b_.tuple.$factory([i, _b_.float.$factory(x2)])\n}\n\nvar nan = _b_.float.$factory('nan')\n\nfunction _nextafter(x, y){\n // always returns a Javascript number\n if($B.rich_comp('__lt__', y, x)){\n var nu = nextUp($B.rich_op('__mul__', -1, x))\n return -nu\n }else if($B.rich_comp('__gt__', y, x)){\n return nextUp(x)\n }else{\n var res = x !== x ? x : y\n res = typeof res == 'number' ? res : res.value\n return res\n }\n}\n\nfunction make_float(x){\n return typeof x == 'number' ? $B.fast_float(x) : x\n}\n\nfunction make_number(x){\n return typeof x == 'number' ? x : x.value\n}\n\nfunction doubleToByteArray(number) {\n // adapted from https://stackoverflow.com/questions/\n // 25942516/double-to-byte-array-conversion-in-javascript\n var buffer = new ArrayBuffer(8); // JS numbers are 8 bytes long, or 64 bits\n var longNum = new Float64Array(buffer); // so equivalent to Float64\n\n longNum[0] = number;\n\n return Array.from(new Uint8Array(buffer)).reverse(); // reverse to get little endian\n}\n\nfunction byteArrayToDouble(bytearray) {\n // adapted from https://stackoverflow.com/questions/\n // 42699162/javascript-convert-array-of-4-bytes-into-a-float-value-from-modbustcp-read\n // Create a buffer\n var buf = new ArrayBuffer(8);\n // Create a data view of it\n var view = new DataView(buf);\n\n // set bytes\n bytearray.forEach(function (b, i) {\n view.setUint8(i, b);\n });\n\n // Read the bits as a float\n var num = view.getFloat64(0);\n // Done\n return num\n}\n\nfunction addSteps(array, steps){\n // convert to BigInt, avoids issue when steps >= 2 ** 32\n if(steps.__class__ == $B.long_int){\n steps = steps.value\n }else{\n steps = BigInt(steps)\n }\n var positive = steps > 0n\n if(steps < 0n){\n steps = -steps\n }\n var x1 = steps >> 32n,\n x2 = steps - x1 * 2n ** 32n\n var buffer = new ArrayBuffer(8)\n var longStep = new BigInt64Array(buffer)\n longStep[0] = steps\n var stepArray = Array.from(new Uint8Array(buffer)).reverse()\n if(positive){\n var carry = 0\n for(var i = 7; i >= 0; i--){\n array[i] += stepArray[i] + carry\n if(array[i] > 255){\n carry = 1\n array[i] -= 256\n }else{\n carry = 0\n }\n }\n }else{\n var carry = 0\n for(var i = 7; i >= 0; i--){\n array[i] -= stepArray[i] - carry\n if(array[i] < 0){\n carry = -1\n array[i] += 256\n }else{\n carry = 0\n }\n }\n }\n}\n\nfunction nextafter(){\n var $ = $B.args(\"nextafter\", 3, {x: null, y: null, steps: null},\n ['x', 'y', 'steps'], arguments, {steps: _b_.None}, null, null),\n x = $.x,\n y = $.y,\n steps = $.steps\n if(! $B.$isinstance(x, [_b_.int, _b_.float])){\n throw _b_.TypeError.$factory('must be a real number, not ' +\n $B.class_name(x))\n }\n if(! $B.$isinstance(y, [_b_.int, _b_.float])){\n throw _b_.TypeError.$factory('must be a real number, not ' +\n $B.class_name(y))\n }\n if(isnan(x)){\n return make_float(x)\n }\n if(isnan(y)){\n return make_float(y)\n }\n if(steps === _b_.None){\n return $B.fast_float(_nextafter(x, y))\n }\n steps = $B.PyNumber_Index(steps);\n if(steps < 0) {\n throw _b_.ValueError.$factory(\n \"steps must be a non-negative integer\");\n }\n if(steps == 0){\n return make_float(x)\n }\n if(isnan(x)){\n return make_float(x)\n }\n if(isnan(y)){\n return make_float(y)\n }\n var x1 = make_number(x),\n y1 = make_number(y)\n\n if(y1 == x1){\n return make_float(y)\n }else if(y1 > x1){\n var x_uint64 = doubleToByteArray(x1)\n addSteps(x_uint64, steps)\n var res = byteArrayToDouble(x_uint64)\n return res >= y1 ? y : make_float(res)\n }else{\n var x_uint64 = doubleToByteArray(x1)\n addSteps(x_uint64, -steps)\n var res = byteArrayToDouble(x_uint64)\n return res <= y1 ? y : make_float(res)\n }\n}\n\nfunction perm(n, k){\n var $ = $B.args(\"perm\", 2, {n: null, k: null}, ['n', 'k'],\n arguments, {k: _b_.None}, null, null),\n n = $.n,\n k = $.k\n\n if(k === _b_.None){\n check_int(n)\n return _mod.factorial(n)\n }\n // raise TypeError if n or k is not an integer\n n = $B.PyNumber_Index(n)\n k = $B.PyNumber_Index(k)\n\n // transform to Javascript BigInt\n var n1 = _b_.int.$to_bigint(n),\n k1 = _b_.int.$to_bigint(k);\n\n if(k1 < 0){\n throw _b_.ValueError.$factory(\"k must be a non-negative integer\")\n }\n if(n1 < 0){\n throw _b_.ValueError.$factory(\"n must be a non-negative integer\")\n }\n if(k1 == 0){\n return 1\n }\n if(k1 == 1){\n return n\n }\n if(k1 == 2){\n return _b_.int.$int_or_long(n1 * (n1 - 1n))\n }\n if(k1 > n1){\n return 0\n }\n // Evaluates to n! / (n - k)!\n var fn = _mod.factorial(n),\n fn_k = _mod.factorial(n - k)\n return $B.rich_op('__floordiv__', fn, fn_k)\n}\n\nconst pi = $B.fast_float(Math.PI)\n\nfunction pow(){\n var $ = $B.args(\"pow\", 2, {base: null, exp: null}, ['base', 'exp'],\n arguments, {}, null, null),\n x = $.base,\n y = $.exp\n\n var x1 = float_check(x)\n var y1 = float_check(y)\n\n if(y1 == 0){\n return _b_.float.$factory(1)\n }\n if(x1 == 0 && y1 < 0){\n if(y1 === -Infinity){\n return INF\n }\n throw _b_.ValueError.$factory('math domain error')\n }\n if(isFinite(x1) && x1 < 0 && isFinite(y1) && ! Number.isInteger(y1)){\n throw _b_.ValueError.$factory('math domain error')\n }\n\n if(isNaN(y1)){\n if(x1 == 1){return _b_.float.$factory(1)}\n return NAN\n }\n if(x1 == 0){\n return ZERO\n }\n\n if(_b_.float.$funcs.isninf(y)){\n if(_b_.float.$funcs.isinf(x)){ // pow(INF, NINF) = 0.0\n return ZERO\n }else if(_b_.float.$funcs.isninf(x)){ // pow(NINF, NINF) = 0.0\n return ZERO\n }\n if(x1 == 1 || x1 == -1){return _b_.float.$factory(1)}\n if(x1 < 1 && x1 > -1){return INF}\n return ZERO\n }\n if(_b_.float.$funcs.isinf(y)){\n if(_b_.float.$funcs.isinf(x)){ // pow(INF, INF)\n return INF\n }\n if(_b_.float.$funcs.isninf(x)){\n return INF\n }\n if(x1 == 1 || x1 == -1){return _b_.float.$factory(1)}\n if(x1 < 1 && x1 > -1){return ZERO}\n return INF\n }\n\n if(isNaN(x1)){return _b_.float.$factory('nan')}\n if(_b_.float.$funcs.isninf(x)){\n if(y1 > 0 && isOdd(y1)){return NINF}\n if(y1 > 0){return INF} // this is even or a float\n if(y1 < 0){return ZERO}\n if(_b_.float.$float.isinf(y)){return INF}\n return _b_.float.$factory(1)\n }\n\n if(_b_.float.$funcs.isinf(x)){\n if(y1 > 0){return INF}\n if(y1 < 0){return ZERO}\n return _b_.float.$factory(1)\n }\n\n var r = Math.pow(x1, y1)\n if(isNaN(r)){\n return NAN\n }\n if(! isFinite(r)){\n overflow()\n }\n return _b_.float.$factory(r)\n}\n\nfunction prod(){\n var $ = $B.args(\"prod\", 1, {iterable:null, start:null},\n [\"iterable\", \"start\"], arguments, {start: 1}, \"*\",\n null),\n iterable = $.iterable,\n start = $.start\n var res = start,\n it = _b_.iter(iterable),\n x\n while(true){\n try{\n x = _b_.next(it)\n if(x == 0){\n return 0\n }\n res = $B.rich_op('__mul__', res, x)\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n return res\n }\n throw err\n }\n }\n}\n\nfunction radians(x){\n $B.check_nb_args('radians', 1, arguments)\n $B.check_no_kw('radians', x)\n\n return _b_.float.$factory(float_check(x) * Math.PI / 180)\n}\n\nfunction is_finite(x){\n return typeof x == \"number\" ||\n (x.__class__ === _b_.floar && isFinite(x.value)) ||\n $B.$isinstance(x, _b_.int) ||\n ($B.$isinstance(x, _b_.float) && isFinite(x.value))\n}\n\nfunction remainder(x, y){\n $B.check_nb_args_no_kw('remainder', 2, arguments)\n float_check(x) // might raise TypeError\n /* Deal with most common case first. */\n if(is_finite(x) && is_finite(y)){\n var absx,\n absy,\n c,\n m,\n r;\n\n if(float_check(y) == 0.0){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n\n absx = fabs(x);\n absy = fabs(y);\n m = fmod(absx, absy);\n\n c = absy.value - m.value\n if(m.value < c){\n r = m.value\n }else if(m.value > c){\n r = -c\n }else{\n r = m.value -\n 2.0 * fmod($B.fast_float(0.5 * (absx.value - m.value)), absy).value;\n }\n return $B.fast_float(copysign(1.0, x).value * r);\n }\n\n /* Special values. */\n if(float_check(y) == 0){\n if(isnan(x)){\n return x\n }\n }\n if(isinf(x)){\n if(isnan(y)){\n return y\n }\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n if(isnan(y)){\n return y;\n }\n return x;\n}\n\nfunction sin(x){\n $B.check_nb_args('sin ', 1, arguments)\n $B.check_no_kw('sin ', x)\n return _b_.float.$factory(Math.sin(float_check(x)))\n}\n\nfunction sinh(x) {\n $B.check_nb_args('sinh', 1, arguments)\n $B.check_no_kw('sinh', x)\n\n var y = float_check(x)\n if(Math.sinh !== undefined){\n return _b_.float.$factory(Math.sinh(y))\n }\n return _b_.float.$factory(\n (Math.pow(Math.E, y) - Math.pow(Math.E, -y)) / 2)\n}\n\nfunction sqrt(x){\n $B.check_nb_args('sqrt ', 1, arguments)\n $B.check_no_kw('sqrt ', x)\n\n if(_b_.float.$funcs.isninf(x)){\n value_error()\n }else if(_b_.float.$funcs.isinf(x)){\n return INF\n }\n var y = float_check(x)\n if(y < 0){\n value_error()\n }\n var _r = $B.fast_float(Math.sqrt(y))\n if(_b_.float.$funcs.isinf(_r)){\n overflow()\n }\n return _r\n}\n\n/*[clinic input]\nmath.sumprod\n\n p: object\n q: object\n /\n\nReturn the sum of products of values from two iterables p and q.\n\nRoughly equivalent to:\n\n sum(itertools.starmap(operator.mul, zip(p, q, strict=True)))\n\nFor float and mixed int/float inputs, the intermediate products\nand sums are computed with extended precision.\n[clinic start generated code]*/\n\nconst tl_zero = {hi: 0, lo: 0, tiny: 0}\n\nfunction _check_long_mult_overflow(a, b) {\n\n /* From Python2's int_mul code:\n\n Integer overflow checking for * is painful: Python tried a couple ways, but\n they didn't work on all platforms, or failed in endcases (a product of\n -sys.maxint-1 has been a particular pain).\n\n Here's another way:\n\n The native long product x*y is either exactly right or *way* off, being\n just the last n bits of the true product, where n is the number of bits\n in a long (the delivered product is the true product plus i*2**n for\n some integer i).\n\n The native double product (double)x * (double)y is subject to three\n rounding errors: on a sizeof(long)==8 box, each cast to double can lose\n info, and even on a sizeof(long)==4 box, the multiplication can lose info.\n But, unlike the native long product, it's not in *range* trouble: even\n if sizeof(long)==32 (256-bit longs), the product easily fits in the\n dynamic range of a double. So the leading 50 (or so) bits of the double\n product are correct.\n\n We check these two ways against each other, and declare victory if they're\n approximately the same. Else, because the native long product is the only\n one that can lose catastrophic amounts of information, it's the native long\n product that must have overflowed.\n\n */\n\n /*\n\n var longprod = (long)((unsigned long)a * b);\n double doubleprod = (double)a * (double)b;\n double doubled_longprod = (double)longprod;\n\n if (doubled_longprod == doubleprod) {\n return 0;\n }\n\n const double diff = doubled_longprod - doubleprod;\n const double absdiff = diff >= 0.0 ? diff : -diff;\n const double absprod = doubleprod >= 0.0 ? doubleprod : -doubleprod;\n\n if (32.0 * absdiff <= absprod) {\n return 0;\n }\n\n return 1;\n */\n return 0\n}\n\nfunction long_add_would_overflow(a, b){\n return (a > 0n) ? (b > BigInt(LONG_MAX) - a) : (b < BigInt(LONG_MIN) - a);\n}\n\nfunction PyLong_CheckExact(n){\n return typeof n == 'number' || n.__class__ === $B.long_int\n}\n\n/*\n The default implementation of dl_mul() depends on the C math library\n having an accurate fma() function as required by \u00a7 7.12.13.1 of the\n C99 standard.\n\n The UNRELIABLE_FMA option is provided as a slower but accurate\n alternative for builds where the fma() function is found wanting.\n The speed penalty may be modest (17% slower on an Apple M1 Max),\n so don't hesitate to enable this build option.\n\n The algorithms are from the T. J. Dekker paper:\n A Floating-Point Technique for Extending the Available Precision\n https://csclub.uwaterloo.ca/~pbarfuss/dekker1971.pdf\n*/\n\nfunction dl_split(x) {\n // Dekker (5.5) and (5.6).\n var t = x * 134217729.0; // Veltkamp constant = 2.0 ** 27 + 1\n var hi = t - (t - x);\n var lo = x - hi;\n return {hi, lo};\n}\n\nfunction dl_mul(x, y){\n // Dekker (5.12) and mul12()\n var xx = dl_split(x);\n var yy = dl_split(y);\n var p = xx.hi * yy.hi;\n var q = xx.hi * yy.lo + xx.lo * yy.hi;\n var z = p + q;\n var zz = p - z + q + xx.lo * yy.lo;\n return {hi: z, lo: zz};\n}\n\nfunction dl_sum(a, b){\n /* Algorithm 3.1 Error-free transformation of the sum */\n var x = a + b;\n var z = x - a;\n var y = (a - (x - z)) + (b - z);\n return {hi: x, lo: y};\n}\n\nfunction tl_fma(x, y, total){\n /* Algorithm 5.10 with SumKVert for K=3 */\n var pr = dl_mul(x, y);\n var sm = dl_sum(total.hi, pr.hi);\n var r1 = dl_sum(total.lo, pr.lo);\n var r2 = dl_sum(r1.hi, sm.lo);\n return {hi: sm.hi, lo: r2.hi, tiny: total.tiny + r1.lo + r2.lo}\n}\n\nfunction tl_to_d(total){\n var last = dl_sum(total.lo, total.hi);\n return total.tiny + last.lo + last.hi;\n}\n\nfunction sumprod(p, q){\n var $ = $B.args('sumprod', 2, {p: null, q: null}, ['p', 'q'],\n arguments, {}, null, null)\n var p_i = NULL,\n q_i = NULL,\n term_i = NULL,\n new_total = NULL;\n var p_it, q_it, total;\n var p_next, q_next;\n var p_stopped = false, q_stopped = false;\n var int_path_enabled = true,\n int_total_in_use = false;\n var flt_path_enabled = true,\n flt_total_in_use = false;\n var int_total = 0n;\n var flt_total = tl_zero;\n\n p_it = $B.make_js_iterator(p);\n q_it = $B.make_js_iterator(q);\n total = 0\n p_next = p_it.next\n q_next = q_it.next\n while (1) {\n var finished;\n p_i = p_it.next()\n if (p_i.done) {\n /*\n if (PyErr_Occurred()) {\n if (!PyErr_ExceptionMatches(PyExc_StopIteration)) {\n goto err_exit;\n }\n PyErr_Clear();\n }\n */\n p_stopped = true;\n }else{\n p_i = p_i.value\n }\n q_i = q_it.next()\n if (q_i.done) {\n /*\n if (PyErr_Occurred()) {\n if (!PyErr_ExceptionMatches(PyExc_StopIteration)) {\n goto err_exit;\n }\n PyErr_Clear();\n }\n */\n q_stopped = true;\n }else{\n q_i = q_i.value\n }\n if (p_stopped != q_stopped) {\n throw _b_.ValueError.$factory(\"Inputs are not the same length\");\n }\n\n finished = p_stopped & q_stopped;\n\n if (int_path_enabled) {\n\n if (! finished && PyLong_CheckExact(p_i) & PyLong_CheckExact(q_i)) {\n var overflow;\n var int_p, int_q, int_prod;\n\n int_p = _b_.int.$to_bigint($B.PyNumber_Index(p_i))\n overflow = int_p > LONG_MAX || int_p < LONG_MIN\n\n if (overflow) {\n finalize_int_path()\n }\n int_q = _b_.int.$to_bigint($B.PyNumber_Index(q_i));\n overflow = int_q > LONG_MAX || int_q < LONG_MIN\n if (overflow) {\n finalize_int_path()\n }\n if (_check_long_mult_overflow(int_p, int_q)) {\n finalize_int_path()\n }\n int_prod = int_p * int_q;\n if (long_add_would_overflow(int_total, int_prod)) {\n finalize_int_path()\n }\n if(int_path_enabled){\n int_total = int_total + int_prod;\n int_total_in_use = true;\n continue;\n }\n }\n\n if(finished){\n finalize_int_path()\n }\n\n function finalize_int_path(){\n // We're finished, overflowed, or have a non-int\n int_path_enabled = false;\n if (int_total_in_use) {\n term_i = _b_.int.$int_or_long(int_total);\n new_total = $B.rich_op('__add__', total, term_i);\n total = new_total\n new_total = NULL;\n int_total = 0; // An ounce of prevention, ...\n int_total_in_use = false;\n }\n }\n }\n\n if (flt_path_enabled) {\n\n if (!finished) {\n var flt_p, flt_q;\n var p_type_float = p_i.__class__ === _b_.float;\n var q_type_float = q_i.__class__ === _b_.float\n if(p_type_float && q_type_float) {\n flt_p = p_i;\n flt_q = q_i;\n }else if (p_type_float && (PyLong_CheckExact(q_i) ||\n typeof q_i == 'boolean')){\n /* We care about float/int pairs and int/float pairs because\n they arise naturally in several use cases such as price\n times quantity, measurements with integer weights, or\n data selected by a vector of bools. */\n flt_p = p_i\n flt_q = _b_.int.$int_value(q_i)\n }else if(q_type_float && (PyLong_CheckExact(p_i) ||\n typeof p_i == 'boolean')) {\n flt_q = q_i\n flt_p = _b_.int.$int_value(p_i)\n }else{\n finalize_flt_path()\n }\n if(flt_path_enabled){\n var new_flt_total = tl_fma(flt_p.value, flt_q.value, flt_total);\n if (isfinite(new_flt_total.hi)) {\n flt_total = new_flt_total;\n flt_total_in_use = true;\n continue;\n }\n }\n }\n if(finished){\n finalize_flt_path()\n }\n\n function finalize_flt_path(){\n // We're finished, overflowed, have a non-float, or got a non-finite value\n flt_path_enabled = false;\n if(flt_total_in_use){\n term_i = $B.fast_float(tl_to_d(flt_total));\n if (term_i == NULL) {\n err_exit()\n }\n new_total = $B.rich_op('__add__', total, term_i);\n total = new_total\n new_total = NULL\n flt_total = tl_zero;\n flt_total_in_use = false;\n }\n }\n }\n\n if (finished) {\n return total\n }\n term_i = $B.rich_op('__mul__', p_i, q_i);\n new_total = $B.rich_op('__add__', total, term_i);\n total = new_total\n new_total = NULL;\n }\n\n}\n\n\n\nfunction tan(x) {\n $B.check_nb_args('tan', 1, arguments)\n $B.check_no_kw('tan', x)\n\n var y = float_check(x)\n return _b_.float.$factory(Math.tan(y))\n}\n\nfunction tanh(x) {\n $B.check_nb_args('tanh', 1, arguments)\n $B.check_no_kw('tanh', x)\n\n var y = float_check(x)\n if(Math.tanh !== undefined){return _b_.float.$factory(Math.tanh(y))}\n return _b_.float.$factory((Math.pow(Math.E, y) - Math.pow(Math.E, -y))/\n (Math.pow(Math.E, y) + Math.pow(Math.E, -y)))\n}\n\nconst tau = $B.fast_float(2 * Math.PI)\n\nfunction trunc(x) {\n $B.check_nb_args('trunc', 1, arguments)\n $B.check_no_kw('trunc', x)\n\n try{return $B.$getattr(x, '__trunc__')()}catch(err){}\n var x1 = float_check(x)\n if(!isNaN(parseFloat(x1)) && isFinite(x1)){\n if(Math.trunc !== undefined){return _b_.int.$factory(Math.trunc(x1))}\n if(x1 > 0){return _b_.int.$factory(Math.floor(x1))}\n return _b_.int.$factory(Math.ceil(x1)) // x1 < 0\n }\n throw _b_.ValueError.$factory(\n 'object is not a number and does not contain __trunc__')\n}\n\nfunction ulp(){\n var $ = $B.args(\"ulp\", 1, {x: null}, ['x'], arguments, {}, null, null),\n x = $.x\n if($B.$isinstance(x, _b_.float)){\n if(_b_.float.$funcs.isinf(x)){\n return _mod.inf\n }else if(_b_.float.$funcs.isnan(x)){\n return _mod.nan\n }\n }\n if(typeof x == \"number\"){\n return x >= 0 ? $B.fast_float(nextUp(x) - x) :\n $B.fast_float(x - (-nextUp(-x)))\n }else if($B.$isinstance(x, $B.long_int)){\n x = Number(_b_.int.$to_bigint(x))\n return x > 0 ? $B.fast_float(nextUp(x) - x) :\n $B.fast_float(x - (-nextUp(-x)))\n }else{\n if($B.rich_comp('__ge__', x, 0)){\n return $B.rich_op('__sub__', $B.fast_float(nextUp(x.value)), x)\n }else{\n var neg_x = $B.$call($B.$getattr(x, \"__neg__\"))()\n return $B.rich_op('__sub__', x,\n $B.$call($B.$getattr($B.fast_float(nextUp(neg_x.value)), '__neg__'))())\n }\n }\n}\n\nvar _mod = {\n acos,\n acosh,\n asin,\n asinh,\n atan,\n atan2,\n atanh,\n cbrt,\n ceil,\n comb,\n copysign,\n cos,\n cosh,\n degrees,\n dist,\n e,\n erf,\n erfc,\n exp,\n exp2,\n expm1,\n fabs,\n factorial,\n floor,\n fmod,\n frexp,\n fsum,\n gamma,\n gcd,\n hypot,\n inf,\n isclose,\n isfinite,\n isinf,\n isnan,\n isqrt,\n lcm,\n ldexp,\n lgamma,\n log,\n log1p,\n log2,\n log10,\n modf,\n nan,\n nextafter,\n perm,\n pi,\n pow,\n prod,\n radians,\n remainder,\n sin,\n sinh,\n sqrt,\n sumprod,\n tan,\n tanh,\n tau,\n trunc,\n ulp\n}\n\nfor(var $attr in _mod){\n if(typeof _mod[$attr] === 'function'){\n _mod[$attr].__class__ = $B.builtin_function_or_method\n }\n}\n\n$B.addToImported('math', _mod)\n\n})(__BRYTHON__)\n"], "python_re": [".js", "// Regular expression\n(function($B){\n\nvar _debug = {value: 0}\n\nvar _b_ = $B.builtins\n\nvar MAXGROUPS = 2147483647,\n MAXREPEAT = 2147483648\n\nvar word_gcs = ['Ll', 'Lu', 'Lm', 'Lt', 'Lo',\n 'Nd',\n 'Mc', 'Me', 'Mn',\n 'Pc']\n\nfunction is_word(cp){\n if((cp >= 97 && cp <= 122) // a-z\n || (cp >= 65 && cp <= 90) // A-Z\n ){\n return true\n }\n for(var word_gc of word_gcs){\n if($B.in_unicode_category(word_gc, cp)){\n return true\n }\n }\n return false\n}\n\nvar ascii_word = {}\n\nfor(var cp = 0; cp <= 127; cp++){\n if(is_word(cp)){\n ascii_word[cp] = true\n }\n}\n\nfunction is_ascii_word(cp){\n return ascii_word[cp] !== undefined\n}\n\nfunction is_digit(cp){\n if(cp >= 48 && cp <= 57){\n return true\n }\n return $B.in_unicode_category('Nd', cp)\n}\n\nfunction is_ascii_digit(cp){\n return cp <= 127 && is_digit(cp)\n}\n\nvar $error_2 = {\n $name: \"error\",\n $qualname: \"error\",\n $is_class: true,\n __module__: \"re\"\n}\n\nvar error = $B.make_class(\"error\",\n function(message){\n return {\n __class__: error,\n msg: message,\n args: $B.fast_tuple([]),\n __cause__: _b_.None,\n __context__: _b_.None,\n __suppress_context__: false\n }\n })\nerror.__bases__ = [_b_.Exception, _b_.object]\nerror.__mro__ = [_b_.Exception, _b_.BaseException, _b_.object]\n\nerror.__str__ = function(self){\n var s = self.msg + ' at position ' + self.pos\n if(self.lineno > 1){\n s += ` (line ${self.lineno}, column ${self.colno})`\n }\n return s\n}\n\n$B.set_func_names(error, \"re\")\n\nfunction $last(t){\n return t[t.length - 1]\n}\n\nfunction fail(message, pos, pattern){\n var err = error.$factory(message)\n err.msg = message\n err.pos = pos\n if(pattern){\n err.pattern = pattern.py_obj // Python object passed to compile()\n err.lineno = 1\n var linestart = 0\n for(var i = 0, len = pattern.string.length; i < pos; i++){\n if(pattern.string[i] == '\\n'){\n err.lineno++\n linestart = i + 1\n }\n }\n err.colno = pos - linestart + 1\n }\n throw err\n}\n\nfunction warn(klass, message, pos, text){\n var frame = $B.frame_obj.frame,\n file = frame[3].__file__,\n src = $B.file_cache[file]\n if(text === undefined){\n var lineno = frame[1].$lineno\n var lines = src.split('\\n'),\n line = lines[lineno - 1]\n }else{\n if(Array.isArray(text)){\n text = from_codepoint_list(text)\n }\n var lineno = 1,\n line_start = 0\n for(var i = 0; i < pos; i++){\n if(text[i] == '\\n'){\n lineno++\n line_start = i + 1\n }\n }\n var line_end = text.substr(line_start).search('\\n'),\n line\n if(line_end == -1){\n line = text.substr(line_start)\n }else{\n line = text.substr(line_start, line_end)\n }\n var col_offset = pos - line_start\n }\n var warning = klass.$factory(message)\n warning.pos = pos\n warning.args[1] = [file, lineno, col_offset, lineno, col_offset,\n line]\n warning.filename = file\n warning.lineno = warning.end_lineno = lineno\n warning.offset = warning.end_offset = col_offset\n warning.line = line\n // module _warning is in builtin_modules.js\n $B.imported._warnings.warn(warning)\n}\n\nfunction chr(i){\n if(i < 0 || i > 1114111){\n throw _b_.ValueError.$factory('Outside valid range')\n }else if(i >= 0x10000 && i <= 0x10FFFF){\n var code = (i - 0x10000)\n return String.fromCodePoint(0xD800 | (code >> 10)) +\n String.fromCodePoint(0xDC00 | (code & 0x3FF))\n }else{\n return String.fromCodePoint(i)\n }\n}\n\nfunction ord(char){\n return char.charCodeAt(0)\n}\n\nconst LETTERS = {\n b: ord('b'),\n N: ord('N'),\n P: ord('P'),\n u: ord('u'),\n U: ord('U'),\n x: ord('x')\n}\n\nconst PARENTH_OPEN = ord('('),\n PARENTH_CLOSE = ord(')'),\n BRACKET_OPEN = ord('['),\n BRACKET_CLOSE = ord(']'),\n BRACE_OPEN = ord('{'),\n BRACE_CLOSE = ord('}'),\n EQUAL = ord('='),\n SUP = ord('>'),\n INF = ord('<'),\n MINUS = ord('-'),\n PLUS = ord('+'),\n OR = ord('|'),\n DOT = ord('.'),\n QUESTION_MARK = ord('?'),\n EXCLAMATION_MARK = ord('!'),\n COLON = ord(':'),\n BACKSLASH = ord('\\\\'),\n DOLLAR = ord('$'),\n CARET = ord('^'),\n LINEFEED = ord('\\n')\n\n// pattern tokenizer\n\nfunction is_ascii(name){\n return /^[\\x00-\\x7F]*$/.test(name)\n}\n\nfunction open_unicode_db(){\n if($B.unicodedb === undefined){\n var xhr = new XMLHttpRequest\n xhr.open(\"GET\",\n $B.brython_path + \"unicode.txt?\" + (new Date()).getTime(), false)\n xhr.onreadystatechange = function(){\n if(this.readyState == 4){\n if(this.status == 200){\n $B.unicodedb = this.responseText\n }else{\n console.log(\n \"Warning - could not load unicode.txt\")\n }\n }\n }\n xhr.send()\n }\n}\n\nfunction validate_named_char(description, pos){\n // validate that \\N{} is in the Unicode db\n // Load unicode table if not already loaded\n if(description.length == 0){\n fail(\"missing character name\", pos)\n }\n open_unicode_db()\n if($B.unicodedb !== undefined){\n var re = new RegExp(\"^([0-9A-F]+);\" +\n description.toUpperCase() + \";.*$\", \"m\")\n search = re.exec($B.unicodedb)\n if(search === null){\n fail(`undefined character name '${description}'`, pos)\n }\n return parseInt(search[1], 16)\n }else{\n fail(\"could not load unicode.txt\", pos)\n }\n}\n\nfunction validate_group_name(sname, pos, is_bytes){\n // sname is an instance of StringObj\n if(! _b_.str.isidentifier(sname.string)){\n fail(`bad character in group name '${sname.string}'`, pos + 4)\n }\n if(is_bytes && ! is_ascii(sname.string)){\n var s = _b_.bytes.decode(_b_.bytes.$factory(sname.codepoints),\n 'ascii', 'backslashreplace')\n warn(_b_.DeprecationWarning,\n `bad character in group name '${s}' at position ${pos + 4}`)\n }\n return true\n}\n\nfunction validate_group_num(so, pos){\n var s = so.string\n if(s.match(/^\\d+$/)){\n return true\n }\n try{\n var num = _b_.int.$factory(s)\n warn(_b_.DeprecationWarning,\n `bad character in group name '${s}' at position ${pos + 3}`,\n pos + 3, s)\n so.string = num + ''\n return true\n }catch(err){\n return false\n }\n}\n\nfunction validate_num_or_name(so, pos, is_bytes){\n return validate_group_num(so, pos, is_bytes) ||\n validate_group_name(so, pos - 1, is_bytes)\n}\n\nvar character_classes = {\n in_charset: to_codepoint_list('dDsSwW'),\n in_re: to_codepoint_list('AbBdDsSwWZ')\n}\n\nfunction escaped_char(args){\n var cps = args.codepoints,\n pos = args.pos,\n in_charset = args.in_charset,\n is_bytes = args.is_bytes // if pattern is bytes\n var special = cps[pos + 1]\n if(special === undefined){\n fail('bad escape (end of pattern)', pos)\n }\n var key = in_charset ? 'in_charset' : 'in_re'\n if(in_charset && special == LETTERS.b){\n // Inside a character range, \\b represents the backspace character,\n // for compatibility with Python\u2019s string literals.\n return '\\b'\n }\n if(character_classes[key].indexOf(special) > -1){\n return new CharacterClass(pos, special, 2)\n }else if(special == LETTERS.N && ! is_bytes){\n if(cps[pos + 2] != BRACE_OPEN){\n fail('missing {', pos)\n }\n var i = pos + 3,\n description = []\n while(i < cps.length){\n if(cps[i] == BRACE_CLOSE){\n break\n }\n description.push(cps[i])\n i++\n }\n if(description.length == 0){\n fail(\"missing character name\", pos)\n }\n if(i == cps.length){\n fail(\"missing }, unterminated name\", pos)\n }\n var cp = validate_named_char(from_codepoint_list(description), pos)\n return {\n type: 'N',\n ord: cp,\n char: chr(cp),\n length: i - pos + 1\n }\n }else if(special == LETTERS.x){\n // \\xhh = character with hex value hh\n var rest = from_codepoint_list(cps.slice(pos + 2)),\n mo = /^[0-9a-fA-F]{0,2}/.exec(rest),\n hh = mo ? mo[0] : ''\n if(mo && mo[0].length == 2){\n var cp = parseInt(mo[0], 16)\n return {\n type: 'x',\n ord: cp,\n char: chr(cp),\n length: 2 + mo[0].length\n }\n }\n fail('incomplete escape \\\\x' + hh, pos)\n }else if(special == LETTERS.u){\n // \\uxxxx = character with 16-bit hex value xxxx\n var rest = from_codepoint_list(cps.slice(pos + 2)),\n mo = /^[0-9a-fA-F]{0,4}/.exec(rest),\n xx = mo ? mo[0] : ''\n if(mo && mo[0].length == 4){\n var cp = parseInt(mo[0], 16)\n return {\n type: 'u',\n ord: cp,\n char: chr(cp),\n length: 2 + mo[0].length\n }\n }\n fail('incomplete escape \\\\u' + xx, pos)\n }else if(special == LETTERS.U){\n // \\Uxxxxxxxx = character with 32-bit hex value xxxxxxxx\n var rest = from_codepoint_list(cps.slice(pos + 2)),\n mo = /^[0-9a-fA-F]{0,8}/.exec(rest),\n xx = mo ? mo[0] : ''\n if(mo && mo[0].length == 8){\n var cp = parseInt(mo[0], 16)\n if(cp > 0x10FFFF){\n fail(`bad escape \\\\U${mo[0]}`, pos)\n }\n return {\n type: 'U',\n ord: cp,\n char: chr(cp),\n length: 2 + mo[0].length\n }\n }\n fail('incomplete escape \\\\U' + xx, pos)\n }else{\n // octal ?\n // If the first digit of number is 0, or number is 3 octal digits\n // long, it will not be interpreted as a group match, but as the\n // character with octal value number\n var rest = from_codepoint_list(cps.slice(pos + 1)),\n mo = /^[0-7]{3}/.exec(rest)\n if(in_charset){\n try{\n var res = $B.test_escape(rest, -1)\n if(res){\n return {\n type: 'u',\n ord: res[0].codePointAt(0),\n char: res[0],\n length: res[1]\n }\n }\n }catch(err){\n // ignore\n }\n }\n if(mo == null){\n mo = /^0[0-7]*/.exec(rest)\n }\n if(mo){\n var octal_value = parseInt(mo[0], 8)\n if(octal_value > 0o377){\n fail(`octal escape value \\\\` +\n `${mo[0]} outside of range 0-0o377`, pos)\n }\n return {\n type: 'o',\n ord: octal_value,\n char: chr(octal_value),\n length: 1 + mo[0].length\n }\n }\n var mo = /^\\d{1,2}/.exec(rest) // backref is at most 99\n if(mo){\n return {\n type: 'backref',\n value: parseInt(mo[0]),\n length: 1 + mo[0].length\n }\n }\n var trans = {a: chr(7), f: '\\f', n: '\\n', r: '\\r', t: '\\t', v: '\\v'},\n res = trans[chr(special)]\n if(res){\n return ord(res)\n }\n if(chr(special).match(/[a-zA-Z]/)){\n fail(\"bad escape \\\\\" + chr(special), pos)\n }else{\n return special\n }\n }\n}\n\nfunction check_character_range(t, positions){\n // Check if last 2 items in t are a valid character range\n var start = t[t.length - 2],\n end = t[t.length - 1]\n if(start instanceof CharacterClass || end instanceof CharacterClass){\n fail(`bad character range ${start}-${end}`,\n positions[positions.length - 2])\n }else if(end < start){\n fail(`bad character range ${start}-${end}`,\n positions[positions.length - 2])\n }\n t.splice(t.length - 2, 2, {\n type: 'character_range',\n start: start,\n end: end,\n ord: [start.ord, end.ord]\n })\n}\n\nfunction parse_character_set(text, pos, is_bytes){\n // Parse character set starting at position \"pos\" in \"text\"\n // pos is the position of the leading \"[\"\n var start = pos,\n result = {items: []},\n positions = []\n pos++\n if(text[pos] == CARET){\n result.neg = true\n pos++\n }else if(text[pos] == BRACKET_CLOSE){\n // a leading ] is the character \"]\", not the set end\n result.items.push(']')\n positions.push(pos)\n pos++\n }else if(text[pos] == BRACKET_OPEN){\n // send FutureWarning\n warn(_b_.FutureWarning, \"Possible nested set\", pos, text)\n }\n var range = false\n while(pos < text.length){\n var cp = text[pos],\n char = chr(cp)\n if(char == ']'){\n if(pos == start + 2 && result.neg){\n // in \"[^]]\", the first ] is the character \"]\"\n result.items.push(']')\n }else{\n return [result, pos]\n }\n }\n if(char == '\\\\'){\n var escape = escaped_char({\n codepoints: text,\n pos,\n in_charset: true,\n is_bytes\n })\n if(typeof escape == \"number\"){\n var s = chr(escape)\n escape = {\n ord: escape,\n length: 2,\n toString: function(){\n return s\n }\n }\n }\n if(escape.type == \"num\"){\n // [\\9] is invalid\n fail(\"bad escape 1 \\\\\" +\n escape.value.toString()[0], pos)\n }\n result.items.push(escape)\n positions.push(pos)\n if(range){\n check_character_range(result.items, positions)\n }\n range = false\n pos += escape.length\n }else if(char == '-'){\n // Character range, or character \"-\"\n if(pos == start + 1 ||\n (result.neg && pos == start + 2) ||\n pos == text.length - 2 || // [a-]\n range ||\n (result.items.length > 0 &&\n result.items[result.items.length - 1].type ==\n \"character_range\")){\n result.items.push({\n ord: cp,\n char,\n toString: function(){\n return this.char\n }\n })\n if(text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set difference\", pos, text)\n }\n pos++\n if(range){\n check_character_range(result.items, positions)\n }\n range = false\n }else{\n range = true\n if(text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set difference\", pos, text)\n }\n pos++\n }\n }else{\n positions.push(pos)\n result.items.push({\n ord: cp,\n char,\n toString: function(){\n return this.char\n }\n })\n if(range){\n check_character_range(result.items, positions)\n }\n range = false\n // FutureWarning for consecutive \"&\", \"|\" or \"~\"\n if(char == \"&\" && text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set intersection\", pos, text)\n }else if(char == \"|\" && text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set union\", pos, text)\n }else if(char == \"~\" && text[pos + 1] == cp){\n warn(_b_.FutureWarning, \"Possible set symmetric difference\",\n pos, text)\n }\n pos++\n }\n }\n fail(\"unterminated character set\", start)\n}\n\nfunction* tokenize(pattern, type, _verbose){\n // pattern is a list of codepoints\n var is_bytes = type == \"bytes\"\n // verbose_stack is the stack of verbose state for each group in the regex\n var verbose_stack = [_verbose],\n verbose = _verbose,\n parenth_pos\n var pos = 0\n while(pos < pattern.length){\n var cp = pattern[pos],\n char = String.fromCharCode(cp)\n if(verbose){\n // current group is in verbose mode\n if(char == \"#\"){\n // skip until next line feed\n while(pos < pattern.length && pattern[pos] != 10){\n pos++\n }\n pos++\n continue\n }else{\n while(pos < pattern.length &&\n [9, 10, 11, 12, 13, 32].indexOf(pattern[pos]) > -1){\n pos++\n }\n }\n cp = pattern[pos]\n if(cp === undefined){\n break\n }\n char = String.fromCharCode(cp)\n if(char == '#'){\n continue\n }\n }\n if(char == '('){\n parenth_pos = pos\n if(pattern[pos + 1] == QUESTION_MARK){\n if(pattern[pos + 2] == LETTERS.P){\n if(pattern[pos + 3] == INF){\n var name = [],\n i = pos + 4\n while(i < pattern.length){\n if(pattern[i] == SUP){\n break\n }else if(pattern[i] == PARENTH_CLOSE){\n fail(\"missing >, unterminated name\", pos)\n }\n name.push(pattern[i])\n i++\n }\n var sname = StringObj.from_codepoints(name)\n validate_group_name(sname, pos, is_bytes)\n name = sname\n if(i == pattern.length){\n fail(\"missing >, unterminated name\", pos)\n }\n yield new Group(pos, {type: 'name_def', value: name})\n verbose_stack.push(verbose)\n pos = i + 1\n continue\n }else if(pattern[pos + 3] == EQUAL){\n var name = [],\n i = pos + 4\n while(i < pattern.length){\n if(pattern[i] == PARENTH_CLOSE){\n break\n }\n name.push(pattern[i])\n i++\n }\n name = StringObj.from_codepoints(name)\n validate_group_name(name, pos, is_bytes)\n if(i == pattern.length){\n fail(\"missing ), unterminated name\", pos)\n }\n yield new BackReference(pos, 'name', name.string)\n pos = i + 1\n continue\n }else if(pattern[pos + 3] === undefined){\n fail(\"unexpected end of pattern\", pos)\n }else{\n fail(\"unknown extension ?P\" + chr(pattern[pos + 3]), pos)\n }\n }else if(pattern[pos + 2] == PARENTH_OPEN){\n var ref = [],\n i = pos + 3\n while(i < pattern.length){\n if(pattern[i] == PARENTH_CLOSE){\n break\n }\n ref.push(pattern[i])\n i++\n }\n var sref = StringObj.from_codepoints(ref)\n if(sref.string.match(/^\\d+$/)){\n ref = parseInt(sref.string)\n }else{\n validate_num_or_name(sref, pos, is_bytes)\n ref = sref.string\n }\n if(i == pattern.length){\n fail(\"missing ), unterminated name\", pos)\n }\n yield new ConditionalBackref(pos, ref)\n pos = i + 1\n continue\n }else if(pattern[pos + 2] == EQUAL){\n // (?=...) : lookahead assertion\n yield new Group(pos, {type: 'lookahead_assertion'})\n verbose_stack.push(verbose)\n pos += 3\n continue\n }else if(pattern[pos + 2] == EXCLAMATION_MARK){\n // (?!...) : negative lookahead assertion\n yield new Group(pos, {type: 'negative_lookahead_assertion'})\n verbose_stack.push(verbose)\n pos += 3\n continue\n }else if(from_codepoint_list(pattern.slice(pos + 2, pos + 4)) == ' -1){\n if(pattern[pos + 2] == MINUS){\n var on_flags = [],\n has_off = true,\n off_flags = []\n pos += 3\n }else{\n var on_flags = [chr(pattern[pos + 2])],\n has_off = false,\n off_flags = [],\n auL = auL_flags.indexOf(pattern[pos + 2]) > -1 ?\n 1 : 0,\n closed = false\n pos += 3\n while(pos < pattern.length){\n if(flags.indexOf(pattern[pos]) > -1){\n if(auL_flags.indexOf(pattern[pos]) > -1){\n auL++\n if(auL > 1){\n fail(\"bad inline flags: flags 'a', 'u'\" +\n \" and 'L' are incompatible\", pos)\n }\n }\n on_flags.push(chr(pattern[pos]))\n pos++\n }else if(pattern[pos] == MINUS){\n has_off = true\n closed = true\n pos++\n break\n }else if(String.fromCharCode(pattern[pos]).\n match(/[a-zA-Z]/)){\n fail(\"unknown flag\", pos)\n }else if(pattern[pos] == PARENTH_CLOSE){\n closed = true\n break\n }else if(pattern[pos] == COLON){\n yield new Group(pos, {name: \"Group\", type: \"flags\"})\n verbose_stack.push(verbose)\n closed = true\n break\n }else{\n fail(\"missing -, : or )\", pos)\n }\n }\n if(! closed){\n fail(\"missing -, : or )\", pos)\n }\n }\n if(has_off){\n while(pos < pattern.length){\n if(flags.indexOf(pattern[pos]) > -1){\n if(auL_flags.indexOf(pattern[pos]) > -1){\n fail(\"bad inline flags: cannot turn off \" +\n \"flags 'a', 'u' and 'L'\", pos)\n }\n if(on_flags.indexOf(chr(pattern[pos])) > -1){\n fail(\"bad inline flags: flag turned on and off\", pos)\n }\n off_flags.push(chr(pattern[pos]))\n pos++\n }else if(pattern[pos] == COLON){\n yield new Group(pos, {name: \"Group\", type: \"flags\"})\n verbose_stack.push(verbose)\n break\n }else if(String.fromCharCode(pattern[pos]).\n match(/[a-zA-Z]/)){\n fail(\"unknown flag\", pos)\n }else if(off_flags.length == 0){\n fail(\"missing flag\", pos)\n }else{\n fail(\"missing :\", pos)\n }\n }\n if(off_flags.length == 0){\n fail(\"missing flag\", pos)\n }\n }\n if(has_off && pattern[pos] != COLON){\n fail(\"missing :\", pos)\n }\n if(on_flags.length == 0 && off_flags.length == 0){\n fail(\"missing flag\", pos)\n }\n var set_flags = new SetFlags(flags_start,\n {on_flags, off_flags})\n\n yield set_flags\n // reset verbose\n if(on_flags.indexOf('x') > -1){\n verbose = true\n verbose_stack.push(verbose)\n }\n if(off_flags.indexOf('x') > -1){\n verbose = false\n }\n if(! closed){\n node = set_flags\n }\n pos++\n }else if(pattern[pos + 2] == ord('#')){\n pos += 3\n while(pos < pattern.length){\n if(pattern[pos] == PARENTH_CLOSE){\n break\n }\n pos++\n }\n if(pos == pattern.length){\n fail(\"missing ), unterminated comment\", pos)\n }\n pos++\n continue\n }else{\n fail(\"unknown extension ?\" + _b_.chr(pattern[pos + 2]),\n pos)\n }\n }else{\n yield new Group(pos)\n verbose_stack.push(verbose)\n pos++\n }\n }else if(cp == PARENTH_CLOSE){\n yield new GroupEnd(pos)\n verbose_stack.pop()\n verbose = $last(verbose_stack)\n pos++\n }else if(cp == BACKSLASH){\n var escape = escaped_char({codepoints: pattern, pos, is_bytes})\n if(escape instanceof CharacterClass){\n yield escape\n pos += escape.length\n }else if(escape.char !== undefined){\n yield new Char(pos, escape.ord)\n pos += escape.length\n }else if(escape.type == \"backref\"){\n var len = escape.length\n if(escape.value.length > 2){\n escape.value = escape.value.substr(0, 2)\n len = 2\n }\n yield new BackReference(pos, \"num\", escape.value)\n pos += len\n }else if(typeof escape == \"number\"){\n // eg \"\\.\"\n var esc = new Char(pos, escape)\n esc.escaped = true\n yield esc\n pos += 2\n }else{\n yield new Char(pos, escape)\n pos += escape.length\n }\n }else if(cp == BRACKET_OPEN){\n // Set of characters\n var set,\n end_pos\n [set, end_pos] = parse_character_set(pattern, pos, is_bytes)\n yield new CharacterSet(pos, set)\n pos = end_pos + 1\n }else if('+?*'.indexOf(char) > -1){\n yield new Repeater(pos, char)\n pos++\n }else if(cp == BRACE_OPEN){\n var reps = /\\{(\\d*)((,)(\\d*))?\\}/.exec(\n from_codepoint_list(pattern.slice(pos)))\n if(reps && reps[0] != '{}'){\n if(reps[1] == \"\"){\n var limits = [0]\n }else{\n var limits = [parseInt(reps[1])]\n }\n if(reps[4] !== undefined){\n if(reps[4] == \"\"){\n var max = Number.POSITIVE_INFINITY\n }else{\n var max = parseInt(reps[4])\n }\n limits.push(max)\n }\n yield new Repeater(pos, limits)\n pos += reps[0].length\n }else if(pattern[pos + 1] == BRACE_CLOSE){\n // {} is the characters \"{\" and \"}\"\n yield new Char(pos, BRACE_OPEN)\n pos++\n }else{\n yield new Char(pos, BRACE_OPEN)\n pos++\n }\n }else if(cp == OR){\n yield new Or(pos)\n pos++\n }else if(cp == DOT){\n yield new CharacterClass(pos, cp, 1)\n pos++\n }else if(cp == CARET){\n yield new StringStart(pos)\n pos++\n }else if(cp == DOLLAR){\n yield new StringEnd(pos)\n pos++\n }else{\n yield new Char(pos, cp)\n pos++\n }\n }\n}\n\nfunction transform_repl(data, pattern){\n // data.repl is a StringObj instance\n var repl = data.repl.string\n repl = repl.replace(/\\\\n/g, '\\n')\n repl = repl.replace(/\\\\r/g, '\\r')\n repl = repl.replace(/\\\\t/g, '\\t')\n repl = repl.replace(/\\\\b/g, '\\b')\n repl = repl.replace(/\\\\v/g, '\\v')\n repl = repl.replace(/\\\\f/g, '\\f')\n repl = repl.replace(/\\\\a/g, '\\x07')\n // detect backreferences\n var pos = 0,\n escaped = false,\n br = false,\n repl1 = \"\",\n has_backref = false\n while(pos < repl.length){\n br = false\n if(repl[pos] == \"\\\\\"){\n escaped = ! escaped\n if(escaped){\n pos++\n continue\n }\n }else if(escaped){\n escaped = false\n var mo = /^\\d+/.exec(repl.substr(pos))\n if(mo){\n var cps = to_codepoint_list(repl)\n var escape = escaped_char({\n codepoints: cps,\n pos: pos - 1,\n is_bytes: cps.type == \"bytes\"\n })\n if(escape.type == \"o\"){\n if(escape.ord > 0o377){\n fail(`octal escape value \\\\${mo[0]} ` +\n \" outside of range 0-0o377\", pos)\n }\n repl1 += escape.char\n pos += escape.length - 1\n continue\n }else if(escape.type != \"backref\"){\n var group_num = mo[0].substr(0,\n Math.min(2, mo[0].length))\n fail(`invalid group reference ${group_num}`, pos)\n }else{\n // only keep first 2 digits\n var group_num = mo[0].substr(0,\n Math.min(2, mo[0].length))\n // check that pattern has the specified group num\n if(pattern.groups === undefined){\n throw _b_.AttributeError.$factory(\"$groups\")\n }\n if(pattern.groups[group_num] === undefined){\n fail(`invalid group reference ${group_num}`,\n pos)\n }else{\n mo[0] = group_num\n }\n }\n if(! has_backref){\n var parts = [repl.substr(0, pos - 1),\n parseInt(mo[0])]\n }else{\n parts.push(repl.substring(next_pos, pos - 1))\n parts.push(parseInt(mo[0]))\n }\n has_backref = true\n var next_pos = pos + mo[0].length\n br = true\n pos += mo[0].length\n }else if(repl[pos] == \"g\"){\n pos++\n if(repl[pos] != '<'){\n fail(\"missing <\", pos)\n }\n pos++\n mo = /(.*?)>/.exec(repl.substr(pos))\n if(mo){\n if(mo[1] == \"\"){\n pos += mo[0].length\n fail(\"missing group name\", pos - 1)\n }\n var group_name = mo[1]\n if(group_name == '0'){\n // The backreference \\g<0> substitutes in the entire\n // substring matched by the RE.\n }else if(/^\\d+$/.exec(group_name)){\n if(pattern.groups[group_name] === undefined){\n fail(`invalid group reference ${group_name}`,\n pos)\n }\n }else{\n try{\n var group_num = _b_.int.$factory(group_name)\n if(group_num < 0){\n fail(`bad character in group name ` +\n `'${group_name}' at position ${pos}`, pos)\n }\n warn(_b_.DeprecationWarning,\n `bad character in group name '${group_name}' ` +\n `at position ${pos}`)\n mo[1] = group_name = group_num + ''\n }catch(err){\n if(! _b_.str.isidentifier(group_name)){\n var cps = to_codepoint_list(group_name)\n if(! $B.is_XID_Start(cps[0])){\n fail(\"bad character in group name '\" +\n group_name + \"'\", pos)\n }else{\n for(cp of cps.slice(1)){\n if(! $B.is_XID_Continue(cp)){\n fail(\"bad character in group name '\" +\n group_name + \"'\", pos)\n }\n }\n }\n }else if(data.type == \"bytes\" && ! is_ascii(group_name)){\n var b = _b_.bytes.$factory(group_name, 'latin-1'),\n s = _b_.bytes.decode(b, 'ascii', 'backslashreplace')\n warn(_b_.DeprecationWarning,\n `bad character in group name '${s}'` +\n ` at position ${pos}`)\n }\n }\n if(pattern.groups[group_name] === undefined){\n throw _b_.IndexError.$factory(\n `unknown group name '${group_name}'`,\n pos)\n }\n }\n if(! has_backref){\n var parts = [repl.substr(0, pos - 3),\n mo[1]]\n }else{\n parts.push(repl.substring(next_pos, pos - 3))\n parts.push(mo[1])\n }\n has_backref = true\n var next_pos = pos + mo[0].length\n br = true\n pos = next_pos\n }else{\n if(repl.substr(pos).length > 0){\n fail(\"missing >, unterminated name\", pos)\n }else{\n fail(\"missing group name\", pos)\n }\n }\n }else{\n if(/[a-zA-Z]/.exec(repl[pos])){\n fail(\"unknown escape\", pos)\n }\n pos += repl[pos]\n }\n }\n if(! br){\n repl1 += repl[pos]\n pos ++\n }\n }\n data.repl1 = repl1\n if(has_backref){\n parts.push(repl.substr(next_pos))\n data.repl = function(bmo){\n var mo = bmo.mo,\n res = parts[0],\n groups = mo.$groups,\n s = mo.string,\n group,\n is_bytes = s.type == 'bytes'\n for(var i = 1, len = parts.length; i < len; i += 2){\n if(parts[i] == 0){\n var x = s.substring(mo.start, mo.end)\n if(is_bytes){\n x = _b_.bytes.decode(x, 'latin-1')\n }\n res += x\n }else if(groups[parts[i]] === undefined){\n if(mo.node.$groups[parts[i]] !== undefined){\n // group is defined in the RE, but didn't contribute\n // to the match\n // groups[parts[i]] = ''\n }else{\n // group is not defined in the RE\n pos++\n group_num = parts[i].toString().substr(0, 2)\n fail(`invalid group reference ${group_num}`, pos)\n }\n }else{\n group = groups[parts[i]]\n var x = s.substring(group.start, group.end)\n if(is_bytes){\n x = _b_.bytes.decode(x, 'latin-1')\n }\n res += x\n }\n res += parts[i + 1]\n }\n return res\n }\n }else{\n data.repl = new StringObj(repl)\n }\n return data\n}\n\n\n\nvar Flag = $B.make_class(\"Flag\",\n function(value){\n return {\n __class__: Flag,\n value\n }\n }\n)\n\nFlag.__and__ = function(self, other){\n if(other.__class__ === Flag){\n return Flag.$factory(self.value & other.value)\n }else if(typeof other == \"number\" || typeof other == \"boolean\"){\n return Flag.$factory(self.value & other)\n }\n return _b_.NotImplemented\n}\n\nFlag.__index__ = function(self){\n return self.value\n}\n\nFlag.__invert__ = function(self){\n return Flag.$factory(~self.value)\n}\n\nFlag.__eq__ = function(self, other){\n return self.value == other.value\n}\n\nFlag.__or__ = function(self, other){\n if(other.__class__ === Flag){\n return Flag.$factory(self.value | other.value)\n }else if(typeof other == \"number\" || typeof other == \"boolean\"){\n return Flag.$factory(self.value | other)\n }\n return _b_.NotImplemented\n}\n\nFlag.__rand__ = function(self, other){\n if(typeof other == \"number\" || $B.$isinstance(other, _b_.int)){\n if(other == 0){\n return false // Flag.$factory(self.value)\n }\n return self.value & other\n }\n return _b_.NotImplemented\n}\n\nFlag.__ror__ = function(self, other){\n if(typeof other == \"number\" || $B.$isinstance(other, _b_.int)){\n if(other == 0){\n return self.value\n }\n return self.value | other\n }\n return _b_.NotImplemented\n}\n\nFlag.__repr__ = Flag.__str__ = function(self){\n if(self.value == 0){\n return \"re.none\"\n }\n var inverted = self.value < 0\n\n var t = [],\n value = inverted ? ~self.value : self.value\n for(var flag in inline_flags){\n if(value & inline_flags[flag].value){\n t.push('re.' + flag_names[flag])\n value &= ~inline_flags[flag].value\n }\n }\n if(value > 0){\n t.push('0x' + value.toString(16))\n }\n var res = t.join('|')\n if(inverted){\n if(t.length > 1){\n return '~(' + res + ')'\n }else{\n return '~' + res\n }\n }\n return res\n}\n\nFlag.__xor__ = function(self, other){\n return Flag.$factory(self.value ^ other.value)\n}\n\n$B.set_func_names(Flag, \"re\")\n\nvar no_flag = {}\n\nvar Scanner = $B.make_class(\"Scanner\",\n function(pattern, string, pos, endpos){\n var $ = $B.args('__init__', 4,\n {pattern: null, string: null, pos: null, endpos:null},\n ['pattern', 'string', 'pos', 'endpos'],\n arguments, {pos: 0, endpos: _b_.None}, null, null),\n endpos = endpos === _b_.None ? $.string.length : endpos\n return {\n __class__: Scanner,\n $string: $.string,\n pattern: $.pattern,\n pos: $.pos,\n endpos\n }\n }\n)\n\nScanner.match = function(self){\n return Pattern.match(self.pattern, self.$string)\n}\n\nScanner.search = function(self){\n if(! self.$iterator){\n self.$iterator = module.finditer(self.pattern, self.$string)\n }\n // return last match\n var mo = _b_.None\n for(mo of self.$iterator.js_gen){\n // set mo\n }\n return mo\n}\n\nvar GroupIndex = $B.make_class(\"GroupIndex\",\n function(self, _default){\n var res = $B.empty_dict()\n res.__class__ = GroupIndex\n for(var key in self.$groups){\n if(isNaN(parseInt(key))){\n _b_.dict.$setitem(res, key, self.$groups[key].num)\n }\n }\n return res\n }\n)\nGroupIndex.__mro__ = [_b_.dict, _b_.object]\nGroupIndex.__setitem__ = function(){\n throw _b_.TypeError.$factory(\"read only\")\n}\n\n$B.set_func_names(GroupIndex, \"re\")\n\nvar Pattern = $B.make_class(\"Pattern\",\n function(pattern){\n var nb_groups = 0\n for(var key in pattern.groups){\n if(isFinite(key)){\n nb_groups++\n }\n }\n return {\n __class__: Pattern,\n pattern: pattern.text,\n groups: nb_groups,\n flags: pattern.flags,\n $groups: pattern.groups,\n $pattern: pattern\n }\n }\n)\n\nPattern.__copy__ = function(self){\n return self\n}\n\nPattern.__deepcopy__ = function(self){\n return self\n}\n\nPattern.__eq__ = function(self, other){\n if(other.$pattern && self.$pattern.type != other.$pattern.$type){\n // warn(_b_.BytesWarning, \"cannot compare str and bytes pattern\", 1)\n }\n return self.pattern == other.pattern &&\n self.flags.value == other.flags.value\n}\n\nPattern.__hash__ = function(self){\n // best effort ;-)\n return _b_.hash(self.pattern) + self.flags.value\n}\n\nPattern.__new__ = Pattern.$factory\n\nPattern.__reduce__ = function(self){\n return Pattern.__reduce_ex__(self, 4)\n}\n\nPattern.__reduce_ex__ = function(self, protocol){\n var res = _reconstructor,\n state = [self.__class__].concat(self.__class__.__mro__)\n var d = $B.empty_dict()\n _b_.dict.$setitem(d, 'pattern', self.pattern)\n _b_.dict.$setitem(d, 'flags', self.flags.value)\n state.push(d)\n return $B.fast_tuple([res, $B.fast_tuple(state)])\n}\n\nfunction _reconstructor(cls, base, state){\n var pattern = _b_.dict.$getitem(state, 'pattern'),\n flags = Flag.$factory(_b_.dict.$getitem(state, 'flags'))\n return module.compile(pattern, flags)\n}\n\nPattern.__repr__ = Pattern.__str__ = function(self){\n var text = self.$pattern.text,\n s = text\n if(self.$pattern.type == \"bytes\"){\n s = _b_.str.$factory(_b_.str.encode(s, 'latin-1'))\n }else{\n s = _b_.repr(s)\n }\n s = s.substr(0, 200)\n var res = `re.compile(${s}`,\n flags = self.$pattern.flags\n if(flags === no_flag){\n return res + ')'\n }\n // mask UNICODE flag\n if(flags.__class__ === Flag){\n // copy flag, otherwise U.value would become 0\n flags = Flag.$factory(flags.value)\n flags.value &= ~U.value\n }else if(typeof flags == \"number\"){\n flags &= ~U.value\n }\n if(flags != 0 && flags.value != 0){\n res += `, ${_b_.str.$factory(flags)}`\n }\n return res + ')'\n}\n\nPattern.findall = function(self){\n var iter = Pattern.finditer.apply(null, arguments).js_gen,\n res = []\n\n while(true){\n var next = iter.next()\n if(next.done){\n return res\n }\n var bmo = next.value,\n mo = bmo.mo,\n groups = MatchObject.groups(bmo)\n\n // replace None by the empty string\n for(var i = 0, len = groups.length; i < len; i++){\n groups[i] = groups[i] === _b_.None ? \"\" : groups[i]\n }\n if(groups.length > 0){\n if(groups.length == 1){\n res.push(groups[0])\n }else{\n res.push($B.fast_tuple(groups))\n }\n }else{\n res.push(mo.string.substring(mo.start, mo.end))\n }\n }\n}\n\nPattern.finditer = function(self){\n var $ = $B.args(\"finditer\", 4,\n {self: null, string: null, pos: null, endpos: null},\n 'self string pos endpos'.split(' '), arguments,\n {pos: 0, endpos: _b_.None}, null, null)\n var data = prepare({string: $.string})\n var endpos = $.endpos === _b_.None ? data.string.length : $.endpos\n return $B.generator.$factory(iterator)(self.$pattern, data.string,\n self.flags, $.string, $.pos, endpos)\n}\n\nPattern.fullmatch = function(self, string){\n var $ = $B.args(\"match\", 4,\n {self: null, string: null, pos: null, endpos: null},\n [\"self\", \"string\", \"pos\", \"endpos\"], arguments,\n {pos: 0, endpos: _b_.None}, null, null)\n if($.endpos === _b_.None){\n $.endpos = $.string.length\n }\n var data = prepare({string: $.string})\n if(self.$pattern.type != data.string.type){\n throw _b_.TypeError.$factory(\"not the same type for pattern \" +\n \"and string\")\n }\n var fullmatch_pattern = create_fullmatch_pattern($.self.$pattern)\n var mo = match(fullmatch_pattern, data.string, $.pos, $.endpos)\n if(mo && mo.end - mo.start == $.endpos - $.pos){\n return MatchObject.$factory(mo)\n }else{\n return _b_.None\n }\n}\n\nPattern.groupindex = {\n __get__: function(self){\n return GroupIndex.$factory(self)\n }\n}\n\nPattern.match = function(self, string){\n var $ = $B.args(\"match\", 4,\n {self: null, string: null, pos: null, endpos: null},\n [\"self\", \"string\", \"pos\", \"endpos\"], arguments,\n {pos: 0, endpos: _b_.None}, null, null)\n if($.endpos === _b_.None){\n $.endpos = $.string.length\n }\n var data = prepare({string: $.string})\n if(self.$pattern.type != data.string.type){\n throw _b_.TypeError.$factory(\"not the same type for pattern \" +\n \"and string\")\n }\n var mo = match($.self.$pattern, data.string, $.pos,\n $.endpos)\n return mo ? MatchObject.$factory(mo) : _b_.None\n}\n\nPattern.scanner = function(self, string, pos, endpos){\n return Scanner.$factory.apply(null, arguments) // self, string, pos, endpos)\n}\n\nPattern.search = function(self, string){\n var $ = $B.args(\"match\", 4,\n {self: null, string: null, pos: null, endpos: null},\n [\"self\", \"string\", \"pos\", \"endpos\"], arguments,\n {pos: 0, endpos: _b_.None}, null, null)\n var data = prepare({string: $.string})\n if(self.$pattern.type != data.string.type){\n throw _b_.TypeError.$factory(\"not the same type for pattern \" +\n \"and string\")\n }\n if($.endpos === _b_.None){\n $.endpos = data.string.length\n }\n var pos = $.pos\n while(pos <= $.endpos){\n var mo = match(self.$pattern, data.string, pos)\n if(mo){\n return MatchObject.$factory(mo)\n }else{\n pos++\n }\n }\n return _b_.None\n}\n\nPattern.split = function(){\n return module.split.apply(null, arguments)\n}\n\nPattern.sub = function(){\n var $ = $B.args(\"match\", 4,\n {self: null, repl: null, string: null, count: null},\n \"self repl string count\".split(' '), arguments,\n {count: 0}, null, null)\n var data = prepare({string: $.string})\n if($.self.$pattern.type != data.string.type){\n throw _b_.TypeError.$factory(\"not the same type for pattern \" +\n \"and string\")\n }\n\n return module.sub($.self, $.repl, $.string, $.count)\n}\n\n$B.set_func_names(Pattern, \"re\")\n\nfunction Node(parent){\n this.parent = parent\n this.items = []\n}\n\nNode.prototype.add = function(item){\n this.items.push(item)\n item.parent = this\n}\n\nNode.prototype.fixed_length = function(){\n // Return the sum of items lengths if fixed, else undefined\n if(this.repeat){\n return false\n }\n var len = 0\n for(var item of this.items){\n if(item.fixed_length === undefined){\n console.log(\"pas de fixed length\", item)\n alert()\n }\n var sublen = item.fixed_length()\n if(sublen === false){\n return false\n }\n len += sublen\n }\n return len\n}\n\nfunction get_top(node){\n var top = node.parent\n while(top.parent){\n top = top.parent\n }\n return top\n}\n\nvar BackReference = function(pos, type, value){\n // for \"\\number\"\n this.name = \"BackReference\"\n this.pos = pos\n this.type = type // \"name\" or \"num\"\n this.value = value\n this.groups = []\n}\n\nBackReference.prototype.fixed_length = function(){\n // Return length of referenced group if it is fixed, else undefined\n if(this.repeat){\n return undefined\n }\n var group = this.get_group()\n if(group.fixed_length === undefined){\n console.log(\"group\", group, \"no fixed length\")\n }\n return group === undefined ? false : group.fixed_length()\n}\n\nBackReference.prototype.get_group = function(){\n var top = get_top(this)\n return top.$groups[this.value]\n}\n\nBackReference.prototype.match = function(string, pos, endpos, groups){\n this.repeat = this.repeat || {min: 1, max: 1}\n\n var group = groups[this.value]\n if(group === undefined){\n if(this.repeat.min == 0){\n return {\n nb_min: 0,\n nb_max: 0\n }\n }\n return false\n }\n\n // Get the codepoints matched by the referenced group\n group_cps = string.codepoints.slice(group.start, group.end)\n\n // search (repetitions of) the matched group codepoints\n var _pos = pos,\n nb = 0,\n group_len = group_cps.length,\n flag,\n cp\n while(string.cp_at(_pos) !== undefined && nb < this.repeat.max){\n flag = true\n for(var i = 0; i < group_len; i++){\n cp = string.cp_at(_pos + i)\n if(cp != group_cps[i]){\n flag = false\n break\n }\n }\n if(flag){\n nb++\n _pos += group_len\n }else{\n break\n }\n }\n if(nb >= this.repeat.min){\n // Returns the accepted minimum and maximum number of repeats\n // and the length of each repeat\n return {\n nb_min: this.repeat.min,\n nb_max: nb,\n group_len\n }\n }\n return false\n}\n\nBackReference.prototype.toString = function(){\n return \"BackRef to group\" + this.value\n}\n\nvar Case = function(){\n this.name = \"Case\"\n this.items = []\n this.groups = []\n this.text = 'Case '\n}\n\nCase.prototype.add = function(item){\n this.items.push(item)\n item.parent = this\n}\n\nCase.prototype.fixed_length = function(){\n var len\n for(var item of this.items){\n var fl = item.fixed_length()\n if(fl === false){\n return false\n }else if(len === undefined){\n len = fl\n }else{\n len += fl\n }\n }\n return len\n}\n\nCase.prototype.toString = function(){\n var res = 'Case '\n res += this.items.map(x => x + '').join(' ')\n return this.text = res\n}\n\nvar Choice = function(){\n this.type = \"choice\"\n this.items = []\n this.groups = []\n}\n\nChoice.prototype.add = Node.prototype.add\n\nChoice.prototype.fixed_length = function(){\n var len\n for(var item of this.items){\n var fl = item.fixed_length()\n if(fl === false){\n return false\n }else if(len === undefined){\n len = fl\n }else if(len != fl){\n return false\n }\n }\n return len\n}\n\nChoice.prototype.toString = function(){\n return 'Choice'\n}\n\nvar EmptyString = {\n toString: function(){\n return ''\n },\n match: function(string, pos, endpos){\n return {nb_min: 0, nb_max: 0}\n },\n fixed_length: function(){\n return 1\n },\n length: 0\n },\n Flags = function(flags){\n this.flags = flags\n },\n GroupEnd = function(pos){\n this.name = \"GroupEnd\"\n this.pos = pos\n this.text = ')'\n this.toString = function(){\n return '[end of group #' + this.group.num + ']'\n }\n },\n Or = function(pos){\n this.name = \"Or\"\n this.pos = pos\n this.text = '|'\n this.toString = function(){\n return '|'\n }\n },\n Repeater = function(pos, op){\n this.name = \"Repeater\"\n this.pos = pos\n this.op = op\n }\n\nfunction cased_cps(cp, ignore_case, ascii){\n // If cp is the codepoint of a cased Unicode character, return the list\n // of the codepoints that match the character in a case-insensitive way\n\n // ignore_case = this.flags && this.flags.value & IGNORECASE.value\n // ascii = this.flags.value & ASCII.value\n var cps,\n char = $B.codepoint2jsstring(cp)\n if(! ignore_case){\n return [cp]\n }\n if(ascii){\n // only test ASCII letters\n ignore_case = ignore_case && (\n (char >= 'a' && char <= 'z') ||\n (char >= 'A' && char <= 'Z'))\n }\n if(ignore_case){\n var char_up = char.toUpperCase(),\n char_low = char.toLowerCase(),\n cps = new Set([cp, $B.jsstring2codepoint(char_low),\n $B.jsstring2codepoint(char_up)])\n // special cases\n if(char.toLowerCase() == \"k\"){\n cps.add(0x212a) // Kelvin sign\n }\n if(cp == 0x212a){\n cps.add(ord('k'))\n cps.add(ord('K'))\n }\n if(char.toLowerCase() == \"s\"){\n cps.add(0x017f) // (Latin small letter long s)\n }\n if(cp == 0x017f){\n cps.add(ord('s'))\n cps.add(ord('S'))\n }\n if(char.toLowerCase() == 'i'){\n cps.add(0x0130) // (Latin capital letter I with dot above)\n cps.add(0x0131) // (Latin small letter dotless i)\n }\n if(cp == 0x0130 || cp == 0x0131){\n cps.add(ord('i'))\n cps.add(ord('I'))\n }\n return Array.from(cps)\n }else{\n cps = [cp]\n }\n return cps\n}\n\nvar Char = function(pos, cp, groups){\n // character in a regular expression or in a character set\n // pos : position of the character in the pattern string\n // cp : the character's codepoint\n // groups (optional) : the groups that contain the character\n this.pos = pos\n this.cp = cp\n this.char = chr(this.cp)\n this.text = this.char\n}\n\nChar.prototype.fixed_length = function(){\n if(this.repeat){\n return this.repeat.min\n }\n return this.char === EmptyString ? 0 : 1\n}\n\nChar.prototype.match = function(string, pos, endpos){\n // Returns {pos1, pos2} such that \"this\" matches all the substrings\n // string[pos:i] with pos1 <= i < pos2, or false if no match\n this.repeat = this.repeat || {min: 1, max: 1}\n\n var i = 0\n\n // browse string codepoints until they don't match, or the number of\n // matches is above the maximum allowed\n if(this.flags){\n if(this.flags.value & ASCII.value){\n if(this.cp > 127){\n return false\n }\n }\n if(this.flags.value & IGNORECASE.value &&\n (! this.is_bytes || this.cp <= 127)){\n // Flag IGNORECASE set\n // For bytes pattern, case insensitive matching only works\n // for ASCII characters\n var char_upper = this.char.toUpperCase(),\n char_lower = this.char.toLowerCase(),\n cp\n while(i < this.repeat.max && pos + i < endpos){\n cp = string.cp_at(pos + i)\n var char = chr(cp)\n if(char.toUpperCase() != char_upper &&\n char.toLowerCase() != char_lower){\n break\n }\n i++\n }\n }else{\n while(pos + i < endpos &&\n string.cp_at(pos + i) == this.cp &&\n i < this.repeat.max){\n i++\n }\n }\n }else{\n while(pos + i < endpos &&\n string.cp_at(pos + i) == this.cp &&\n i < this.repeat.max){\n i++\n }\n }\n var nb = i\n if(nb >= this.repeat.min){\n // Number of repeats ok\n return {\n nb_min: this.repeat.min,\n nb_max: nb\n }\n }else{\n return false\n }\n}\n\nChar.prototype.toString = function(){\n var res = 'Char ' + this.text\n if(this.repeat !== undefined){\n res += ' repeat {' + this.repeat.min + ',' + this.repeat.max + '}'\n if(this.non_greedy){\n res += '?'\n }\n }\n return res\n}\n\nfunction CharSeq(chars, flags){\n // sequence of consecutive characters\n this.chars = chars\n this.flags = flags\n this.merge_same_chars()\n}\n\nCharSeq.prototype.add_char = function(char){\n this.chars.push(char)\n this.merge_same_chars()\n}\n\nCharSeq.prototype.fixed_length = function(){\n var len = 0,\n cps = [],\n char_len\n for(var char of this.chars){\n if(! char.repeat){\n char_len = 1\n }else if(char.repeat.min == char.repeat.max){\n char_len = char.repeat.min\n }else{\n len = false\n break\n }\n for(var i = 0; i < char_len; i++){\n cps.push(char.cp)\n }\n len += char_len\n }\n this.cps = cps\n return this.len = len\n}\n\nCharSeq.prototype.match = function(string, pos, endpos){\n var mos = [],\n i = 0,\n backtrack,\n nb\n this.len = this.len === undefined ? this.fixed_length() : this.len\n // optimization if character sequence has a fixed length\n if(this.len !== false && ! (this.flags.value & IGNORECASE.value)){\n for(var i = 0; i < this.len; i++){\n if(string.cp_at(pos + i) !== this.cps[i]){\n return false\n }\n }\n return {nb_min: this.len, nb_max: this.len}\n }\n for(var i = 0, len = this.chars.length; i < len; i++){\n var char = this.chars[i],\n mo = char.match(string, pos, endpos) // form {nb_min, nb_max}\n if(_debug.value){\n console.log('CharSeq match, pos', pos, 'char', char, 'mo', mo)\n alert()\n }\n if(mo){\n nb = char.non_greedy ? mo.nb_min : mo.nb_max\n mos.push({nb,\n nb_min: mo.nb_min,\n nb_max: mo.nb_max,\n non_greedy: !!char.non_greedy\n })\n pos += nb\n }else{\n // backtrack\n backtrack = false\n while(mos.length > 0){\n i--\n mo = mos.pop()\n pos -= mo.nb\n nb = mo.nb\n if(mo.non_greedy && nb < mo.nb_max){\n nb += 1\n backtrack = true\n }else if(! mo.non_greedy && nb - 1 >= mo.nb_min){\n nb -= 1\n backtrack = true\n }\n if(backtrack){\n pos += nb\n mo.nb = nb\n mos.push(mo)\n break\n }\n }\n if(mos.length == 0){\n return false\n }\n }\n }\n var nb = 0,\n last_mo = $B.last(mos)\n for(var mo of mos.slice(0, mos.length - 1)){\n nb += mo.nb\n }\n var res = {\n nb_min: nb + last_mo.nb_min,\n nb_max: nb + last_mo.nb_max\n }\n return res\n}\n\nCharSeq.prototype.merge_same_chars = function(){\n // b?b merged into b+ etc.\n var current,\n chars = [],\n merged\n for(var item of this.chars){\n if(current && current.char == item.char &&\n current.non_greedy === item.non_greedy){\n if(! current.repeat){\n current.repeat = {min: 1, max: 1}\n }\n if(item.repeat){\n current.repeat.min += item.repeat.min\n current.repeat.max += item.repeat.max\n }else{\n current.repeat.min += 1\n current.repeat.max += 1\n }\n merged = true\n }else{\n chars.push(item)\n }\n current = item\n }\n if(merged){\n this.chars = chars\n }\n}\n\nCharSeq.prototype.toString = function(){\n var res = ''\n for(var char of this.chars){\n res += char.text\n }\n return 'CharSeq ' + res\n}\n\nfunction CharacterClass(pos, cp, length, groups){\n this.cp = cp\n this.value = chr(cp)\n this.length = length\n this.pos = pos\n\n var flags = this.flags\n\n // Test function : test(string, pos) returns:\n // - true if \"this\" matches 1 character string[pos]\n // - [true, 0] if \"this\" matches the empty string at pos\n // - false or undefined if \"this\" doesn't match\n switch(this.value){\n case 'A':\n this.test_func = function(string, pos){\n if(pos == 0){\n return [true, 0]\n }\n }\n break\n case 's':\n this.test_func = function(string, pos){\n var cp = string.cp_at(pos)\n return $B.in_unicode_category('Zs', cp) ||\n $B.unicode_bidi_whitespace.indexOf(cp) > -1\n }\n break\n case 'S':\n this.test_func = function(string, pos){\n var cp = string.cp_at(pos)\n return cp !== undefined &&\n ! $B.in_unicode_category('Zs', cp) &&\n $B.unicode_bidi_whitespace.indexOf(cp) == -1\n }\n break\n case '.':\n this.test_func = function(string, pos){\n if(string.cp_at(pos) === undefined){\n return false\n }\n if(this.flags.value & DOTALL.value){\n return true\n }else{\n return string.cp_at(pos) != 10\n }\n }\n break\n case 'd':\n this.test_func = function(string, pos){\n if(this.flags === undefined){\n console.log(\"\\\\d, no flags\", this)\n }\n var cp = string.cp_at(pos),\n tester = (this.flags.value & ASCII.value) ?\n is_ascii_digit : is_digit\n return tester(cp)\n }\n break\n case 'D':\n this.test_func = function(string, pos){\n var cp = string.cp_at(pos),\n tester = (this.flags.value & ASCII.value) ?\n is_ascii_digit : is_digit\n return ! tester(cp)\n }\n break\n case 'b':\n this.test_func = function(string, pos){\n var tester = is_word\n if(this.is_bytes || (this.flags.value & ASCII.value)){\n tester = is_ascii_word\n }\n var cp = string.cp_at(pos),\n ok = {nb_min: 0, nb_max: 0}\n\n // return true if char at pos is at the beginning or start\n // of a word\n if(pos == 0 && tester(cp)){\n return ok\n }\n if(string.cp_at(pos) === undefined && tester(string.cp_at(pos - 1))){\n return ok\n }\n if(pos > 0 && string.cp_at(pos) !== undefined){\n if((tester(string.cp_at(pos - 1))) !==\n tester(cp)){\n return ok\n }\n }\n return false\n }\n break\n case 'B':\n this.test_func = function(string, pos){\n var tester = is_word\n if(this.is_bytes || (this.flags.value & ASCII.value)){\n tester = is_ascii_word\n }\n\n var cp = string.cp_at(pos),\n ok = {nb_min: 0, nb_max: 0}\n // test is true if char at pos is not at the beginning or\n // start of a word\n if(pos == 0 && cp === undefined){\n // empty string\n return false\n }\n if(pos == 0 && tester(cp)){\n return false\n }\n if(cp === undefined &&\n tester(string.cp_at(pos - 1))){\n return false\n }\n if(pos > 0 && cp !== undefined){\n if(tester(string.cp_at(pos - 1)) !== tester(cp)){\n return false\n }\n }\n return ok\n }\n break\n case 'w':\n this.test_func = function(string, pos){\n var tester = is_word\n if(this.is_bytes || (this.flags.value & ASCII.value)){\n tester = is_ascii_word\n }\n return tester(string.cp_at(pos))\n }\n break\n case 'W':\n this.test_func = function(string, pos){\n var tester = is_word\n if(this.is_bytes || (this.flags.value & ASCII.value)){\n tester = is_ascii_word\n }\n return ! tester(string.cp_at(pos))\n }\n break\n case 'Z':\n this.test_func = function(string, pos){\n if(string.cp_at(pos) === undefined){\n return {nb_min: 0, nb_max: 0}\n }\n }\n break\n }\n}\n\nCharacterClass.prototype.fixed_length = function(){\n return this.repeat ? false : 1\n}\n\nCharacterClass.prototype.match = function(string, pos, endpos){\n // Returns {pos1, pos2} such that \"this\" matches all the substrings\n // string[pos:i] with pos1 <= i < pos2, or false if no match\n if(pos === undefined){\n console.log('no pos')\n throw Error()\n }\n var len = string.length\n this.repeat = this.repeat || {min: 1, max: 1}\n\n // browse string codepoints until they don't match, or the number of\n // matches is above the maximum allowed\n var i = 0\n while(i < this.repeat.max && i < len){\n var test = this.test_func(string, pos + i, this.flags)\n if(! test){\n break\n }\n i++\n }\n\n var nb = i\n if(nb >= this.repeat.min){\n // Number of repeats ok\n if('bBAZ'.indexOf(this.value) > -1 ){\n return {nb_min: 0, nb_max: 0}\n }\n return {\n nb_min: this.repeat.min,\n nb_max: nb\n }\n }else{\n return false\n }\n}\n\nCharacterClass.prototype.nb_repeats = Char.prototype.nb_repeats\n\nCharacterClass.prototype.toString = function(){\n return '\\\\' + this.value\n}\n\nvar CharacterSet = function(pos, set, groups){\n // character set\n this.pos = pos\n this.set = set\n this.neg = set.neg\n}\n\nCharacterSet.prototype.fixed_length = function(){\n return 1\n}\n\nCharacterSet.prototype.match = function(string, pos, endpos){\n var ignore_case = this.flags && (this.flags.value & IGNORECASE.value),\n test,\n match = false,\n i = 0,\n cp\n\n this.repeat = this.repeat || {min: 1, max: 1}\n\n while(i < this.repeat.max && (cp = string.cp_at(pos + i)) !== undefined){\n test = false\n\n if(string.cp_at(pos) === undefined){\n cp = EmptyString\n }\n try{\n $B.codepoint2jsstring(cp)\n }catch(err){\n console.log(err.message)\n console.log('cp', cp, '\\nstring', string, 'pos', pos)\n console.log($B.print_stack())\n throw _b_.Exception.$factory('bad codepoint')\n }\n var char = $B.codepoint2jsstring(cp),\n cps = cased_cps(cp, ignore_case, this.flags.value & ASCII.value),\n char_is_cased = cps.length > 1\n\n for(var cp1 of cps){\n for(var item of this.set.items){\n if(typeof item == 'string'){\n\n }\n if(Array.isArray(item.ord)){\n if(cp1 >= item.ord[0] &&\n cp1 <= item.ord[1]){\n test = true\n break\n }else if(ignore_case && char_is_cased){\n var start1 = chr(item.ord[0]).toUpperCase(),\n end1 = chr(item.ord[1]).toUpperCase(),\n char1 = char.toUpperCase()\n if(char1 >= start1 && char1 <= end1){\n test = true\n }\n var start1 = chr(item.ord[0]).toLowerCase(),\n end1 = chr(item.ord[1]).toLowerCase(),\n char1 = char.toLowerCase()\n if(char1 >= start1 && char1 <= end1){\n test = true\n }\n }\n }else if(item instanceof CharacterClass){\n test = !! item.match(string, pos + i, endpos) // boolean\n }else{\n if(item.ord == cp1){\n test = true\n break\n }\n item_str = typeof item == 'string' ? item : chr(item.ord)\n if(item_str == char){\n test = true\n break\n }\n if(ignore_case && char_is_cased &&\n (char.toUpperCase() == item_str.toUpperCase() ||\n char.toLowerCase() == item_str.toLowerCase())){\n test = true\n break\n }\n }\n }\n }\n if(this.neg){\n test = ! test\n }\n if(test){\n i++\n }else{\n break\n }\n }\n var nb = i\n if(nb >= this.repeat.min){\n // Number of repeats ok\n return {\n nb_min: this.repeat.min,\n nb_max: nb\n }\n }else{\n return false\n }\n\n}\n\nCharacterSet.prototype.nb_repeats = Char.prototype.nb_repeats\n\nCharacterSet.prototype.toString = function(){\n return 'CharSet'\n}\n\nvar ConditionalBackref = function(pos, group_ref){\n this.type = \"conditional backref\"\n this.pos = pos\n this.group_ref = group_ref\n this.chars = []\n this.match_codepoints = []\n this.nb_success = 0\n this.re_if_exists = new Group(pos)\n this.re_if_not_exists = new Group(pos)\n this.nb_options = 1\n}\n\nConditionalBackref.prototype.add = function(item){\n if(this.nb_options == 1){\n this.re_if_exists.add(item)\n }else if(this.nb_options == 2){\n this.re_if_not_exists.add(item)\n }\n item.parent = this\n}\n\nConditionalBackref.prototype.fixed_length = function(){\n var len = this.re_if_exists.fixed_length()\n if(len !== false && len == this.re_if_not_exists.fixed_length()){\n return len\n }\n return false\n}\n\nConditionalBackref.prototype.match = function(string, pos, endpos, groups){\n var re = groups[this.group_ref] ? this.re_if_exists :\n this.re_if_not_exists,\n pattern = {node: re, text: re + ''},\n mo = match(pattern, string, pos, endpos, false, groups)\n if(mo){\n return {nb_min: mo.end - mo.start, nb_max: mo.end - mo.start}\n }\n return false\n}\n\nConditionalBackref.prototype.toString = function(){\n return 'ConditionalBackref'\n}\n\nvar Group = function(pos, extension){\n this.type = \"group\"\n this.pos = pos\n this.items = []\n this.chars = []\n this.groups = []\n for(var key in extension){\n this[key] = extension[key]\n }\n if(extension && extension.type){\n if(extension.type.indexOf('lookahead') > -1){\n this.is_lookahead = true\n }else if(extension.type.indexOf('lookbehind') > -1){\n this.is_lookbehind = true\n }\n }\n}\n\nGroup.prototype.add = Node.prototype.add\n\nGroup.prototype.toString = function(){\n if(this.num === undefined){\n var res = 'Group ' + this.type + ' ' + this.pattern\n }else{\n var res = 'Group #' + this.num + ' ' + this.pattern\n }\n if(this.repeat !== undefined){\n res += ' repeat {' + this.repeat.min + ',' + this.repeat.max + '}'\n if(this.non_greedy){\n res += '?'\n }\n }\n return res\n}\n\nBackReference.prototype.nb_repeats = Group.prototype.nb_repeats\n\nGroup.prototype.fixed_length = Node.prototype.fixed_length\n\nfunction groups_in(pattern, group_list){\n if(group_list === undefined){\n group_list = new Set()\n }\n if(pattern instanceof Group && pattern.hasOwnProperty('num')){\n group_list.add(pattern.num)\n }\n if(pattern.items){\n for(var subpattern of pattern.items){\n for(var group of groups_in(subpattern, group_list)){\n group_list.add(group)\n }\n }\n }\n return group_list\n}\n\nfunction GroupRef(group_num, item){\n this.num = group_num\n this.item = item\n}\n\nGroupRef.prototype.fixed_length = function(){\n return this.item.fixed_length()\n}\n\nfunction Lookbehind(item){\n this.re = item\n this.neg = this.re.type == \"negative_lookbehind\"\n}\n\nLookbehind.prototype.match = function(string, pos, endpos, groups){\n var ok = {nb_min: 0, nb_max: 0},\n pattern = {node: this.re, text: this.re + ''},\n length = this.re.length,\n mo\n if(pos - length < 0){\n mo = false\n }else{\n mo = match(pattern, string, pos - length, endpos, false, groups)\n }\n if(mo){\n return this.neg ? false : ok\n }else{\n return this.neg ? ok : false\n }\n}\n\nLookbehind.prototype.fixed_length = function(){\n return this.re.fixed_length()\n}\n\nLookbehind.prototype.toString = function(){\n return \"Lookbehind\"\n}\n\nfunction SetFlags(pos, flags){\n this.pos = pos\n this.on_flags = flags.on_flags\n this.off_flags = flags.off_flags\n this.items = []\n}\n\nSetFlags.prototype.add = Node.prototype.add\n\nfunction StringStart(pos){\n this.pos = pos\n}\n\nStringStart.prototype.match = function(string, pos, endpos){\n var ok = {nb_min:0, nb_max: 0}\n if(this.flags.value & MULTILINE.value){\n return (pos == 0 || string.cp_at(pos - 1) == 10) ? ok : false\n }\n return pos == 0 ? ok : false\n}\n\nStringStart.prototype.fixed_length = function(){\n return 0\n}\n\nStringStart.prototype.toString = function(){\n return '^'\n}\n\nfunction StringEnd(pos){\n this.pos = pos\n}\n\nStringEnd.prototype.match = function(string, pos, endpos){\n var ok = {nb_min:0, nb_max: 0},\n cp = string.cp_at(pos)\n if(this.flags.value & MULTILINE.value){\n return (pos > string.codepoints.length - 1 ||\n cp == 10) ? ok : false\n }\n return pos > endpos - 1 ? ok :\n (pos == endpos - 1 && cp == 10) ? ok : false\n}\n\nStringEnd.prototype.fixed_length = function(){\n return 0\n}\n\nStringEnd.prototype.toString = function(){\n return '$'\n}\n\nvar cache = new Map()\n\nfunction compile(pattern, flags){\n if(pattern.__class__ === Pattern){\n if(flags !== no_flag){\n throw _b_.ValueError.$factory(\"no flags\")\n }\n return pattern\n }\n if(cache.has(pattern.py_obj)){\n if(cache.get(pattern.py_obj).has(flags.value || 0)){\n return cache.get(pattern.py_obj).get(flags.value || 0)\n }\n }\n var original_pattern = pattern,\n original_flags = flags,\n type = pattern.type,\n choices,\n allow_global_flags = true\n pattern = pattern.codepoints\n var is_bytes = type !== \"str\"\n if(is_bytes && flags && (flags.value & U.value)){\n throw _b_.ValueError.$factory(\"cannot use UNICODE flag with \" +\n \"a bytes pattern\")\n }\n if(flags && (flags.value & U.value) &&\n (flags.value & ASCII.value)){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n if(is_bytes){\n // bytes patterns ignore re.ASCII flag\n flags = Flag.$factory(flags.value || 0)\n //flags.value &= ~ASCII.value\n }\n var group_num = 0,\n group_stack = [],\n groups = {},\n pos,\n lookbehind,\n node = new Node(),\n accept_inline_flag = true,\n verbose = (flags.value || 0) & VERBOSE.value,\n comment = false,\n backrefs = {}\n node.$groups = groups\n for(var item of tokenize(pattern, type, verbose)){\n item.flags = flags\n item.is_bytes = is_bytes\n if(lookbehind){\n item.lookbehind = lookbehind\n lookbehind.parent = item\n lookbehind = false\n }\n if(allow_global_flags &&\n (group_stack.length > 0 || ! (item instanceof SetFlags))){\n allow_global_flags = false\n }\n if(item instanceof Group){\n group_stack.push(item)\n node.add(item)\n item.state = \"open\"\n group_num++\n item.num = group_num\n node = item // next items will be stored as group's items\n pos = item.pos\n if(item.non_capturing){\n delete item.num\n group_num--\n }else if(item.type == \"name_def\"){\n var value = item.value\n if(groups[value.string] !== undefined){\n fail(`redefinition of group name` +\n ` '${value.string}' as group ${group_num}; was group` +\n ` ${groups[value.string].num}`, pos)\n }\n item.name = value.string\n groups[value.string] = groups[group_num] =\n new GroupRef(group_num, item)\n }else if(item.is_lookahead){\n // a lookahead assertion is relative to the previous regexp\n group_num--\n while(node.items.length > 0){\n item.add(node.items.shift())\n }\n node = item\n }else if(item.is_lookbehind){\n // a lookbehind assertion is relative to the next regexp\n node.parent.items.pop() // remove from node items\n // temporarily create a group\n groups[group_num] = new GroupRef(group_num, item)\n }else if(item.type == \"flags\"){\n // save flags before a group with inline flags, eg \"(?i:a)\"\n item.flags_before = Flag.$factory(flags.value | 0)\n }else{\n groups[group_num] = new GroupRef(group_num, item)\n }\n }else if(item instanceof GroupEnd){\n end_pos = item.pos\n if(group_stack.length == 0){\n fail(\"unbalanced parenthesis\", end_pos, original_pattern)\n }\n var item = group_stack.pop()\n item.end_pos = end_pos\n try{\n item.pattern = from_codepoint_list(\n pattern.slice(item.pos, end_pos + 1))\n }catch(err){\n console.log(\"err avec pattern substring\", pattern)\n throw err\n }\n if(item.is_lookbehind){\n delete groups[group_num]\n group_num--\n // check that all elements have a fixed length\n item.length = item.fixed_length()\n if(item.length === false){\n fail(\"look-behind requires fixed-width pattern\", pos)\n }\n item.parent.add(new Lookbehind(item))\n item.non_capturing = true\n // store in variable \"lookbehind\", will be applied to next item\n lookbehind = item\n }else if(item.is_lookahead){\n delete item.num\n }\n if(item instanceof Group && item.items.length == 0){\n item.add(EmptyString)\n }else if(item instanceof ConditionalBackref){\n if(groups[item.group_ref] === undefined){\n // might be defined later; store in backrefs and check\n // when all items have been processed\n backrefs[item.group_ref] = backrefs[item.group_ref] | pos + 3\n }\n if(item.re_if_exists.items.length == 0){\n item.re_if_exists.add(EmptyString)\n }else if(item.re_if_not_exists.items.length == 0){\n item.re_if_not_exists.pos = pos\n item.re_if_not_exists.add(EmptyString)\n }\n }else if(item.type == \"flags\"){\n // restore flags when entering the group\n flags = Flag.$factory(item.flags_before.value)\n }\n item.state = 'closed'\n node = item.parent\n }else if(item instanceof ConditionalBackref){\n var pos = item.pos,\n group_ref = item.group_ref\n if(typeof group_ref == \"number\"){\n if(group_ref == 0){\n fail(`bad group number`, pos + 3)\n }else if(group_ref >= MAXGROUPS){\n fail(`invalid group reference ${group_ref}`, pos + 1)\n }else if(groups[group_ref] &&\n groups[group_ref].item.state == \"open\"){\n fail(\"cannot refer to an open group\", pos)\n }\n }else if(groups[group_ref] !== undefined){\n if(groups[group_ref].item.state == \"open\"){\n fail(\"cannot refer to an open group\", pos)\n }\n }else{\n fail(`unknown group name '${group_ref}'`, pos)\n }\n group_stack.push(item)\n node.add(item)\n item.state = \"open\"\n node = item // next items will be stored as group's items\n }else if(item instanceof BackReference){\n pos = item.pos\n if(item.type == \"num\" && item.value > 99){\n var head = item.value.toString().substr(0, 2)\n fail(`invalid group reference ${head}`, pos + 1)\n }\n if(groups[item.value] !== undefined){\n if(groups[item.value].item.state == \"open\"){\n fail(\"cannot refer to an open group\", pos)\n }\n var ref_item = groups[item.value].item.parent\n while(ref_item){\n if(ref_item.is_lookbehind){\n fail(\"cannot refer to group defined in the same lookbehind subpattern\", pos)\n }\n ref_item = ref_item.parent\n }\n }else if(item.type == \"name\"){\n fail(`unknown group name '${item.value}'`, pos)\n }else if(item.type == \"num\"){\n fail(`invalid group reference ${item.value}`, pos)\n }\n node.add(item)\n }else if(item instanceof Char ||\n item instanceof CharacterClass ||\n item instanceof CharacterSet){\n if(item instanceof CharacterSet){\n for(var elt of item.set.items){\n elt.flags = flags\n }\n }\n var added_to_charseq = false\n if(item instanceof Char){\n if(node.items && node.items.length > 0){\n var previous = $last(node.items)\n if(previous instanceof CharSeq){\n previous.add_char(item)\n added_to_charseq = true\n }else if(previous instanceof Char && ! previous.repeater){\n node.items.pop()\n node.items.push(new CharSeq([previous, item], flags))\n added_to_charseq = true\n }\n }\n }\n if(! added_to_charseq){\n node.add(item)\n }\n }else if(item instanceof Repeater){\n // check that item is not in a lookbehind group\n var pnode = node\n while(pnode){\n if(pnode.extension && pnode.extension.type &&\n pnode.extension.type.indexOf(\"lookbehind\") > -1){\n fail(\"look-behind requires fixed-width pattern\", pos)\n }\n pnode = pnode.parent\n }\n pos = item.pos\n if(node.items.length == 0){\n fail(\"nothing to repeat\", pos)\n }\n previous = $last(node.items)\n if(previous instanceof Char ||\n previous instanceof CharSeq ||\n previous instanceof CharacterClass ||\n previous instanceof CharacterSet ||\n previous instanceof Group ||\n previous instanceof BackReference){\n if(previous instanceof GroupEnd){\n // associate repeat with Group\n previous = previous.group\n }else if(previous instanceof CharSeq){\n previous = $last(previous.chars)\n }\n if(previous.repeater){\n if(item.op == '?' && ! previous.non_greedy){\n if(previous.possessive){\n fail('multiple repeat', pos)\n }\n previous.non_greedy = true\n if(previous instanceof CharacterClass &&\n previous.value == '.'){\n previous.min_repeat_one = true\n }\n }else{\n if(item instanceof Repeater && item.op == '+'){\n if(previous.possessive || previous.non_greedy){\n fail('multiple repeat', pos)\n }\n previous.possessive = true\n }else{\n fail(\"multiple repeat\", pos)\n }\n }\n }else{\n // convert to minimum and maximum number of repeats\n var min = 1,\n max = 1\n if(Array.isArray(item.op)){\n min = item.op[0]\n if(min >= MAXREPEAT){\n throw _b_.OverflowError.$factory(\n \"the repetition number is too large\")\n }\n max = item.op[1] === undefined ? min : item.op[1]\n if(isFinite(max) && max >= MAXREPEAT){\n throw _b_.OverflowError.$factory(\n \"the repetition number is too large\")\n }\n if(max < min){\n fail('min repeat greater than max repeat', pos)\n }\n }else if(item.op == \"?\"){\n min = 0\n max = 1\n }else if(item.op == \"*\"){\n min = 0\n max = Number.POSITIVE_INFINITY\n }else if(item.op == \"+\"){\n min = 1\n max = Number.POSITIVE_INFINITY\n }\n previous.repeater = item\n previous.repeat = {min, max}\n // mark all parents of item as no fixed length\n var parent = item\n while(parent){\n parent.fixed_length = false\n parent = parent.parent\n }\n }\n }else{\n fail(\"nothing to repeat\", pos)\n }\n }else if(item instanceof Or){\n if(group_stack.length > 0){\n item.group = group_stack[group_stack.length - 1]\n }else{\n item.group = false\n }\n pos = item.pos\n if(node instanceof ConditionalBackref){\n // case '(?(num)a|'\n if(node.nb_options == 1){\n node.nb_options++\n }else{\n fail('conditional backref with more than ' +\n 'two branches', pos)\n }\n }else if(node.items.length == 0){\n // token \"|\" in \"(|...)\" : first option is the empty string\n var choice = new Choice(),\n case1 = new Case()\n case1.add(new Char(pos, EmptyString))\n choice.add(case1)\n node.add(choice)\n var case2 = new Case()\n choice.add(case2)\n node = case2\n }else if(node instanceof Case){\n // node.parent is already a Choice\n var new_case = new Case()\n node.parent.add(new_case)\n node = new_case\n }else{\n // token \"|\" in \"(ab|...)\"\n var previous = node.items[node.items.length - 1]\n if(previous instanceof Case){\n var new_case = new Case()\n previous.add(new_case)\n node = new_case\n }else{\n var choice = new Choice(),\n case1 = new Case(),\n first_rank = node.items[0].rank\n while(node.items.length > 0){\n case1.add(node.items.shift())\n }\n case1.groups = node.$groups\n for(var group of group_stack){\n choice.groups.push(group)\n }\n choice.add(case1)\n node.add(choice)\n var case2 = new Case()\n choice.add(case2)\n node = case2\n }\n }\n }else if(item instanceof StringStart ||\n item instanceof StringEnd){\n node.add(item)\n }else if(item instanceof SetFlags){\n if(group_stack.length == 0 && ! allow_global_flags){\n // pattern like (?x) only allowed as first in reg exp\n fail('global flags not at the start of the ' +\n 'expression', item.pos)\n }\n // copy flags, otherwise re.ASCII etc might be modified\n flags = Flag.$factory(flags.value || U.value)\n if(item.on_flags.indexOf('u') > -1){\n if(is_bytes){\n fail(\"re.error: bad inline flags: cannot use 'u' flag \" +\n \"with a bytes pattern\", pos)\n }\n if(flags && flags.value & ASCII.value){\n // switch to Unicode\n flags.value ^= ASCII.value\n }\n if(group_stack.length == 0 &&\n original_flags && original_flags.value & ASCII.value){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n if(item.on_flags.indexOf('a') > -1){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n }\n if(item.on_flags.indexOf('a') > -1){\n if(group_stack.length == 0 &&\n original_flags && original_flags.value & U.value){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n if(flags && flags.value & U.value){\n // switch to ASCII\n flags.value ^= U.value\n }\n if(item.on_flags.indexOf('u') > -1){\n throw _b_.ValueError.$factory(\"ASCII and UNICODE flags \" +\n \"are incompatible\")\n }\n }\n if(flags.value === undefined){\n flags.value = 32\n }\n if(item.items.length == 0){\n if(! accept_inline_flag && group_stack.length == 0){\n var s = from_codepoint_list(pattern)\n warn(_b_.DeprecationWarning,\n `Flags not at the start of the expression '${s}'`,\n pos)\n }\n for(var on_flag of item.on_flags){\n if(! is_bytes || on_flag !== 'a'){\n flags.value |= inline_flags[on_flag].value\n }\n }\n for(var off_flag of item.off_flags){\n if(! is_bytes || off_flag !== 'a'){\n flags.value ^= inline_flags[off_flag].value\n }\n }\n }else{\n node.add(item)\n }\n }else{\n fail(\"unknown item type \" + item, pos)\n }\n if(! (item instanceof SetFlags) &&\n ! (item instanceof Group && item.type == \"flags\")){\n accept_inline_flag = false\n }\n }\n for(ref in backrefs){\n if(groups[ref] === undefined){\n fail('invalid group name ' + ref, backrefs[ref])\n }\n }\n if(group_stack.length > 0){\n var last = group_stack[group_stack.length - 1]\n fail(\"missing ), unterminated subpattern\", last.pos)\n }\n while(node.parent){\n node = node.parent\n }\n node.pattern = from_codepoint_list(pattern)\n node.groups = group_num\n flags = flags === no_flag ? 32 : flags\n node.flags = flags\n var res = {\n node,\n groups,\n flags,\n original_flags,\n text: from_codepoint_list(pattern),\n type, // \"str\" or \"bytes\"\n fixed_length: node.fixed_length()\n }\n if(! cache.has(original_pattern.py_obj)){\n cache.set(original_pattern.py_obj, new Map())\n }\n cache.get(original_pattern.py_obj).set(original_flags.value || 0, res)\n if(_debug.value){\n show(node)\n }\n return res\n}\n\nfunction show(node, indent){\n indent = indent === undefined ? 0 : indent\n if(indent == 0){\n log('root', node)\n }\n log(' '.repeat(indent) + node)\n if(node.items !== undefined){\n for(var item of node.items){\n show(item, indent + 1)\n }\n }\n}\n\nfunction to_codepoint_list(s){\n var items = []\n if(typeof s == \"string\" || $B.$isinstance(s, _b_.str)){\n if(typeof s != \"string\"){\n s = s.valueOf()\n }\n for(var char of s){\n items.push(char.codePointAt(0))\n }\n items.type = \"unicode\"\n }else if($B.$isinstance(s, [_b_.bytes, _b_.bytearray, _b_.memoryview])){\n if($B.$isinstance(s, _b_.memoryview)){\n items = s.obj.source\n }else{\n items = s.source\n }\n items.type = \"bytes\"\n }else{\n throw Error('invalid type ' + $B.class_name(s))\n }\n return items\n}\n\n$B.nb_from_cp = 0\nfunction from_codepoint_list(codepoints, type){\n $B.nb_from_cp++\n // Return a string\n if(type == \"bytes\"){\n return _b_.bytes.$factory(codepoints)\n }\n var s = ''\n for(var cp of codepoints){\n s += _b_.chr(cp)\n }\n return $B.String(s)\n}\n\nfunction string2bytes(s){\n var t = []\n for(var i = 0, len = s.length; i < len; i++){\n t.push(s.charCodeAt(i))\n }\n return _b_.bytes.$factory(t)\n}\n\nfunction check_pattern_flags(pattern, flags){\n if(pattern.__class__ === Pattern){\n if(flags !== no_flag){\n throw _b_.ValueError.$factory(\n \"cannot process flags argument with a compiled pattern\")\n }\n }\n return pattern\n}\n\nfunction StringObj(obj){\n // A StringObj object is a bridge between a Python string or bytes-like\n // object and Javascript\n // obj is the Python object\n // this.string is a Javascript string\n this.py_obj = obj\n this.codepoints = []\n this.type = \"str\"\n this.is_string = typeof obj == 'string'\n if(typeof obj == \"string\" ||\n (obj instanceof String && ! obj.codepoints)){\n // Python object represented as a Javascript string\n this.string = obj\n // Maps a position in codepoints to position in string\n this.index_map = {}\n for(var i = 0, len = obj.length; i < len; i++){\n this.index_map[this.codepoints.length] = i\n var cp = obj.codePointAt(i)\n this.codepoints.push(cp)\n if(cp >= 0x10000){\n i++\n }\n }\n this.length = _b_.str.__len__(obj)\n if(obj instanceof String){\n // store for next use\n obj.codepoints = this.codepoints\n obj.index_map = this.index_map\n }\n }else if(obj instanceof String){\n // string with surrogate pairs\n this.string = obj.string\n this.codepoints = obj.codepoints\n this.index_map = obj.index_map\n this.length = _b_.str.__len__(obj)\n }else if($B.$isinstance(obj, _b_.str)){ // str subclass\n var so = new StringObj(_b_.str.$factory(obj))\n this.string = so.string\n this.codepoints = so.codepoints\n this.length = _b_.str.__len__(obj)\n }else if($B.$isinstance(obj, [_b_.bytes, _b_.bytearray])){\n this.string = _b_.bytes.decode(obj, 'latin1')\n this.codepoints = obj.source\n this.type = \"bytes\"\n }else if($B.$isinstance(obj, _b_.memoryview)){\n this.string = _b_.bytes.decode(obj.obj, 'latin1')\n this.codepoints = obj.obj.source\n this.type = \"bytes\"\n }else if(obj.__class__ && obj.__class__.$buffer_protocol){\n // eg array.array\n this.codepoints = _b_.list.$factory(obj)\n this.string = from_codepoint_list(this.codepoints, \"bytes\")\n this.type = \"bytes\"\n }else if(Array.isArray(obj)){\n // list of codepoints\n this.codepoints = obj\n }else{\n throw _b_.TypeError.$factory(\n `expected string or bytes-like object, got '${$B.class_name(obj)}'`)\n }\n if(this.length === undefined){\n this.length = this.codepoints.length\n }\n}\n\nStringObj.prototype.cp_at = function(pos){\n if(pos >= this.length){\n return undefined\n }\n /*\n if(typeof this.string == 'string'){\n return this.string.charCodeAt(pos)\n }\n */\n var res = this.codepoints[pos]\n if(res !== undefined){\n return res\n }\n}\n\nStringObj.prototype.substring = function(start, end){\n // Returns a string\n var s\n if(this.string && this.index_map){\n if(this.index_map[start] === undefined){\n return ''\n }\n if(end === undefined){\n return this.string.substr(this.index_map[start])\n }\n return this.string.substring(this.index_map[start],\n this.index_map[end])\n }\n var codepoints,\n res = ''\n if(end === undefined){\n codepoints = this.codepoints.slice(start)\n }else{\n codepoints = this.codepoints.slice(start, end)\n }\n return from_codepoint_list(codepoints, this.type)\n}\n\nStringObj.prototype.to_str = function(){\n if(this.hasOwnProperty('string')){\n return this.string\n }\n return from_codepoint_list(this.codepoints, this.type)\n}\n\nStringObj.from_codepoints = function(cps){\n var res = new StringObj('')\n res.codepoints = cps\n for(var cp of cps){\n res.string += _b_.chr(cp)\n }\n return res\n}\n\nfunction prepare(args){\n // Check that all arguments are of the same type (string or bytes-like).\n // Return an object with all attributes transformed into StringObj\n // instances\n var res = {},\n keys = Object.keys(args),\n first = keys[0]\n res[first] = new StringObj(args[first])\n res.type = res[first].type\n for(var key of keys.slice(1)){\n res[key] = new StringObj(args[key])\n if(res[key].type != res.type){\n throw _b_.TypeError.$factory(`not the same type for ${first} and ${key}`)\n }\n }\n return res\n}\n\n\nfunction subn(pattern, repl, string, count, flags){\n // string is a StringObj instance\n // pattern is either a Pattern instance or a StringObj instance\n var res = '',\n pos = 0,\n nb_sub = 0\n\n if(pattern instanceof StringObj){\n pattern = compile(pattern, flags)\n }\n if(typeof repl != \"function\"){\n var data1 = transform_repl({repl}, pattern)\n repl1 = data1.repl1\n }\n pos = 0\n var s = string.to_str()\n for(var bmo of module.finditer(Pattern.$factory(pattern), s).js_gen){\n // finditer yields instances of MatchObject\n var mo = bmo.mo // instance of MO\n res += from_codepoint_list(string.codepoints.slice(pos, mo.start))\n if(typeof repl == \"function\"){\n var x = $B.$call(repl)(bmo)\n if(x.__class__ === _b_.bytes){\n x = _b_.bytes.decode(x, 'latin-1')\n }\n res += x // $B.$call(repl)(bmo)\n }else{\n res += repl1\n }\n nb_sub++\n pos = mo.end\n if(count != 0 && nb_sub >= count){\n break\n }\n }\n if(string.is_string){\n res += string.string.substr(pos)\n }else{\n res += from_codepoint_list(string.codepoints.slice(pos))\n }\n if(pattern.type === \"bytes\"){\n res = _b_.str.encode(res, \"latin-1\")\n }\n return [res, nb_sub]\n}\n\n// escaped chars : '\\t\\n\\x0b\\x0c\\r #$&()*+-.?[\\\\]^{|}~'\nvar escaped = [9, 10, 11, 12, 13, 32, 35, 36, 38, 40, 41, 42, 43, 45, 46, 63,\n 91, 92, 93, 94, 123, 124, 125, 126]\n\nfunction starts_with_string_start(pattern){\n // returns true if the pattern starts with ^ or \\A\n if(pattern.node){\n pattern = pattern.node\n }\n if(pattern.items){\n if(pattern.items.length == 0){\n return false\n }\n return starts_with_string_start(pattern.items[0])\n }else if(pattern instanceof CharacterClass){\n return pattern.value == 'A'\n }else if(pattern instanceof StringStart){\n return true\n }else{\n return false\n }\n}\n\nfunction* iterator(pattern, string, flags, original_string, pos, endpos){\n var result = [],\n pos = pos | 0,\n cp,\n accept_one = true // used to test one position after string end\n while((cp = string.cp_at(pos)) !== undefined || accept_one){\n var mo = match(pattern, string, pos, endpos)\n if(mo){\n yield MatchObject.$factory(mo)\n if(mo.end == mo.start){\n // If match has zero with, retry at the same position but\n // with the flag no_zero_width set, to avoid infinite loops\n mo = match(pattern, string, pos, endpos, true)\n if(mo){\n yield MatchObject.$factory(mo)\n pos = mo.end\n }else{\n pos++ // at least 1, else infinite loop\n }\n }else{\n pos = mo.end\n }\n }else{\n pos++\n }\n if(cp === undefined){\n accept_one = false\n }\n if (starts_with_string_start(pattern) && !(flags.value & MULTILINE.value)) {\n break\n }\n }\n delete original_string.in_iteration\n}\n\n\nfunction MO(node, pos, mo, len){\n // Match Object\n this.node = node\n this.start = pos\n this.mo = mo\n this.nb_min = mo.nb_min\n this.nb_max = mo.nb_max\n this.len = len\n this.nb = this.node.non_greedy ? mo.nb_min : mo.nb_max\n this.end = pos + len * this.nb\n}\n\nMO.prototype.backtrack = function(string, groups){\n if(this.node.possessive){\n return false\n }\n if(this.node.non_greedy && this.nb < this.nb_max){\n this.nb++\n this.end = this.start + this.len * this.nb\n return true\n }else if((! this.node.non_greedy) && this.nb > this.nb_min){\n this.nb--\n this.end = this.start + this.len * this.nb\n return true\n }else{\n return false\n }\n}\n\nfunction del_groups(groups, node){\n if(node.num !== undefined){\n delete groups[node.num]\n groups.$last.splice(groups.$last.indexOf(node.num), 1)\n if(node.name !== undefined){\n delete groups[node.name]\n }\n }\n for(var child of node.items){\n if(child instanceof Group){\n del_groups(groups, child)\n }\n }\n}\n\nfunction GroupMO(node, start, matches, string, groups, endpos){\n // Match Object for Groups\n this.node = node\n this.start = start\n this.matches = matches\n this.string = string\n this.end = matches.length > 0 ? $last(matches).end : start\n this.endpos = endpos === undefined ? this.end : endpos\n this.$groups = groups\n}\n\nGroupMO.prototype.backtrack = function(string, groups){\n if(_debug.value){\n console.log('group MO backtrack, this', this)\n alert()\n }\n // Try backtracking in the last match\n if(this.node.possessive || this.node.atomic){\n return false\n }\n if(this.matches.length > 0){\n var _match = $last(this.matches),\n mos = _match.mos,\n nb0 = mos.length\n while(mos.length > 0){\n var mo = mos.pop()\n if(mo.node instanceof Case){\n var rank = mo.node.parent.items.indexOf(mo.node)\n for(var _case of mo.node.parent.items.slice(rank + 1)){\n var _mo = match({node: _case, text: _case.text},\n string, mo.start)\n if(_mo){\n // update GroupMO object\n mos.push(_mo)\n this.end = _mo.end\n if(this.$groups.$last.length > 0){\n var ix = this.$groups.$last[this.$groups.$last.length - 1]\n this.$groups[ix].end = _mo.end\n }\n return true\n }\n }\n }\n if(mo.backtrack(string, groups)){\n mos.push(mo)\n if(this.node.num !== undefined){\n groups[this.node.num].end = mo.end\n }\n this.end = mo.end\n return true\n }\n }\n }\n // Else, remove last match if possible\n if(this.matches.length > this.node.repeat.min &&\n this.matches.length >= 1){\n this.matches.pop()\n if(this.matches.length > 0){\n this.end = $last(this.matches).end\n }else{\n // remove this group and its children from groups\n del_groups(groups, this.node)\n this.end = this.start\n }\n return true\n }\n // Group fails; if some of its subgroups succeded, remove them from\n // groups\n if(this.node.repeat.min > 0){\n del_groups(groups, this.node)\n }\n return false\n}\n\nGroupMO.prototype.toString = function(){\n var repr = _b_.repr(this.string.substring(this.start, this.end))\n repr = repr.substring(0, 50)\n return ''\n}\n\nGroupMO.prototype.groups = function(_default){\n var res = [],\n groupobj = this.$groups\n\n for(var key in this.node.$groups){\n if(isFinite(key)){\n res[key] = groupobj[key] === undefined ? _default :\n this.string.substring(groupobj[key].start, groupobj[key].end)\n }\n }\n res.shift()\n return $B.fast_tuple(res)\n}\n\n// Brython MatchObject\nvar MatchObject = $B.make_class(\"Match\",\n function(mo){\n return {\n __class__: MatchObject,\n mo\n }\n }\n)\n\nMatchObject.__copy__ = function(self){\n return self\n}\n\nMatchObject.__deepcopy__ = function(self){\n return self\n}\n\nMatchObject.__getitem__ = function(){\n var $ = $B.args(\"__getitem__\", 2, {self: null, key: null},\n ['self', 'key'], arguments, {}, null, null),\n self = $.self,\n key = $.key\n if(Array.isArray(key)){\n throw _b_.IndexError.$factory(\"no such group\")\n }\n if(key == 0){\n return self.mo.string.substring(self.mo.start, self.mo.end)\n }\n var match = self.mo.$groups[key]\n if(match !== undefined){\n return self.mo.string.substring(match.start, match.end)\n }else if(self.mo.node.$groups[key] !== undefined){\n return _b_.None\n }\n throw _b_.IndexError.$factory(\"no such group\")\n}\n\nMatchObject.__repr__ = MatchObject.__str__ = function(self){\n return self.mo.toString()\n}\n\nMatchObject.end = function(self){\n var $ = $B.args('end', 2, {self: null, group: null}, ['self', 'group'],\n arguments, {group: 0}, null, null)\n var group = MatchObject.group(self, $.group)\n if(group === _b_.None){\n return -1\n }else if($.group == 0){\n return self.mo.end\n }else{\n return self.mo.$groups[$.group].end\n }\n}\n\nMatchObject.endpos = _b_.property.$factory(\n function(self){\n return self.mo.endpos\n }\n)\n\nMatchObject.expand = function(){\n var $ = $B.args(\"expand\", 2, {self: null, template: null},\n ['self', 'template'], arguments, {}, null, null)\n var data = {\n repl: new StringObj($.template),\n }\n data = transform_repl(data, {groups: $.self.mo.node.$groups})\n if(typeof data.repl == \"function\"){\n return $B.$call(data.repl)(MatchObject.$factory($.self.mo))\n }else{\n return data.repl1\n }\n}\n\nMatchObject.group = function(self){\n var $ = $B.args(\"group\", 1, {self: null}, ['self'], arguments,\n {}, 'args', null),\n self = $.self,\n args = $.args\n if(args.length == 0){\n args[0] = 0\n }\n var groupobj = self.mo.$groups,\n result = []\n for(var group_id of args){\n if($B.rich_comp('__eq__', group_id, 0)){\n result.push(self.mo.string.substring(self.mo.start, self.mo.end))\n continue\n }\n try{\n // Convert group_id to int if possible\n group_id = $B.PyNumber_Index(group_id) // in py_utils.js\n }catch(err){\n // group_id can be an identifier\n }\n if(self.mo.node.$groups[group_id] === undefined){\n throw _b_.IndexError.$factory(\"no such group\")\n }\n var group = groupobj[group_id] // found in match\n result.push(group === undefined ?\n _b_.None :\n self.mo.string.substring(group.start, group.end))\n }\n if(args.length == 1){\n return result[0]\n }\n return $B.fast_tuple(result)\n}\n\nMatchObject.groupdict = function(){\n /*\n Return a dictionary containing all the named subgroups of the match, keyed\n by the subgroup name. The default argument is used for groups that did not\n participate in the match; it defaults to None.\n */\n var $ = $B.args(\"groupdict\", 2, {self: null, default: null},\n ['self', 'default'], arguments, {default: _b_.None},\n null, null),\n self = $.self,\n groupobj = $.self.mo.$groups,\n d = $B.empty_dict()\n for(var key in $.self.mo.node.$groups){\n if(! isFinite(key)){\n var value = groupobj[key] === undefined ? $.default :\n groupobj[key]\n if(value !== $.default){\n value = self.mo.string.substring(value.start, value.end)\n }\n _b_.dict.$setitem(d, key, value)\n }\n }\n return d\n}\n\nMatchObject.groups = function(self){\n var $ = $B.args(\"group\", 2, {self: null, default: null},\n ['self', 'default'], arguments,\n {default: _b_.None}, null, null),\n self = $.self,\n _default = $.default\n return self.mo.groups(_default)\n}\n\nMatchObject.lastindex = _b_.property.$factory(\n function(self){\n /* The integer index of the last matched capturing group, or None if\n no group was matched at all.\n */\n var last = self.mo.$groups.$last\n if(last.length == 0){\n return _b_.None\n }\n return parseInt($last(last))\n }\n)\n\nMatchObject.lastgroup = _b_.property.$factory(\n function(self){\n /* The name of the last matched capturing group, or None if the group\n didn't have a name, or if no group was matched at all.\n */\n var lastindex = MatchObject.lastindex.fget(self)\n if(lastindex === _b_.None){\n return _b_.None\n }\n var group = self.mo.node.$groups[lastindex],\n name = group.item.name\n return name === undefined ? _b_.None : name\n }\n)\n\nMatchObject.pos = _b_.property.$factory(\n function(self){\n return self.mo.start\n }\n)\n\nMatchObject.re = _b_.property.$factory(\n function(self){\n return self.mo.node.pattern\n }\n)\n\nMatchObject.regs = _b_.property.$factory(\n function(self){\n var res = [$B.fast_tuple($B.fast_tuple([self.mo.start, self.mo.end]))]\n for(var group_num in self.mo.node.$groups){\n if(isFinite(group_num)){\n var group = self.mo.node.$groups[group_num].item\n // group.pattern includes the opening and closing brackets\n res.push($B.fast_tuple([group.pos,\n group.pos + group.pattern.length - 2]))\n }\n }\n return $B.fast_tuple(res)\n }\n)\n\nMatchObject.span = function(){\n /*\n Match.span([group])\n\n For a match m, return the 2-tuple (m.start(group), m.end(group)). Note\n that if group did not contribute to the match, this is (-1, -1). group\n defaults to zero, the entire match.\n */\n var $ = $B.args(\"span\", 2, {self: null, group: null},\n ['self', 'group'], arguments,\n {group: 0}, null, null),\n self = $.self,\n group = $.group\n if(group == 0){\n return $B.fast_tuple([self.mo.start, self.mo.end])\n }\n var span = self.mo.$groups[group]\n if(span === undefined){\n return $B.fast_tuple([-1, -1])\n }\n return $B.fast_tuple([span.start, span.end])\n}\n\nMatchObject.start = function(self){\n var $ = $B.args('end', 2, {self: null, group: null}, ['self', 'group'],\n arguments, {group: 0}, null, null)\n var group = MatchObject.group(self, $.group)\n if(group === _b_.None){\n return -1\n }else if($.group == 0){\n return self.mo.start\n }else{\n return self.mo.$groups[$.group].start\n }\n}\n\nMatchObject.string = _b_.property.$factory(\n function(self){\n return self.mo.string.to_str()\n }\n)\n\n$B.set_func_names(MatchObject, 're')\n\nfunction log(){\n if(_debug.value){\n console.log.apply(null, arguments)\n }\n}\n\nfunction create_fullmatch_pattern(pattern){\n // transform into \"(?:)$\"\n // use a new pattern object, otherwise if pattern is in cache the\n // value in cache would be changed\n var new_pattern = {}\n for(var key in pattern){\n if(key == 'node'){\n continue\n }\n new_pattern[key] = pattern[key]\n }\n\n var ncgroup = new Group() // non-capturing group\n ncgroup.pos = 0\n ncgroup.non_capturing = true\n for(var item of pattern.node.items){\n ncgroup.add(item)\n }\n var se = new StringEnd()\n se.flags = Flag.$factory(32)\n new_pattern.node = new Node()\n new_pattern.node.add(ncgroup)\n new_pattern.node.add(se)\n return new_pattern\n}\n\nfunction match(pattern, string, pos, endpos, no_zero_width, groups){\n // Follow the pattern tree structure\n if(_debug.value){\n console.log('match pattern', pattern.text, 'pos', pos, string.substring(pos))\n alert()\n }\n if(endpos !== undefined){\n if(endpos < pos){\n return false\n }\n }else{\n endpos = string.length\n }\n if(pattern.node instanceof Node){\n show(pattern.node)\n }\n if(groups === undefined){\n groups = {$last:[]}\n }\n if(pattern.text === undefined){\n console.log('no text', pattern)\n }\n var node = pattern.node,\n mo\n if(node.items){\n // node is either a Choice between several items, or a sequence of\n // items\n if(node instanceof Choice){\n mo = false\n for(var _case of node.items){\n mo = match({node: _case, text: _case.text}, string, pos,\n endpos, no_zero_width, groups)\n if(mo){\n // remove groups inside choice and before successful case\n // that did not contribute to the match\n var groups_succeed = groups_in(_case),\n min_num = Math.min(Array.from(groups_succeed))\n for(var group_num of groups_in(node)){\n if(group_num < min_num){\n delete groups[group_num]\n }\n }\n if(_debug.value){\n console.log('case', _case + '', 'of choice', node +\n ' succeeds, groups', groups)\n }\n return mo\n }else{\n if(_debug.value){\n console.log('case', _case + '', 'of choice', node +\n ' fails')\n }\n }\n }\n return false\n }else{\n // sequence of items\n node.repeat = node.repeat === undefined ? {min: 1, max: 1} :\n node.repeat\n var start = pos,\n nb_repeat = 0,\n nb_zerolength_repeat = 0,\n matches = [],\n mos,\n match_start,\n empty_matches = {}\n // loop until we get enough repetitions\n while(true){\n if(empty_matches[pos]){\n // no use trying again\n return matches.length == 0 ? false :\n new GroupMO(node, start, matches, string, groups,\n endpos)\n }\n var initial_groups = Object.keys(groups)\n mos = []\n match_start = pos\n if(_debug.value){\n console.log(\"pattern\", pattern.text,\n \"loop in group match, match start\", match_start)\n }\n var i = 0\n while(i < node.items.length){\n var item = node.items[i]\n if(_debug.value){\n console.log('item', i, '/', node.items.length - 1,\n 'of pattern', pattern.text)\n }\n var mo = match({node: item, text: item + ''}, string, pos,\n endpos, no_zero_width, groups)\n if(mo){\n if(item instanceof Group &&\n item.type == \"lookahead_assertion\"){\n log(\"lookahead assertion\", item + '',\n \"succeeds, mo\", mo)\n }else{\n mos.push(mo)\n pos = mo.end\n }\n i++\n }else if(false && item instanceof Group &&\n item.type == \"negative_lookahead_assertion\"){\n log(\"negative lookahead assertion\", item, \"fails : ok !\")\n i++\n }else{\n if(_debug.value){\n console.log('item ' + item, 'of group fails, nb_repeat',\n nb_repeat, 'node repeat', node.repeat)\n }\n var backtrack = false\n while(mos.length > 0){\n var mo = mos.pop()\n if(mo.backtrack === undefined){\n log('no backtrack for', mo)\n }\n if(_debug.value){\n console.log('try backtrack on mo', mo)\n }\n if(mo.backtrack(string, groups)){\n log('can backtrack, mo', mo)\n mos.push(mo)\n i = mos.length\n log('mos', mos, 'restart at item', i)\n pos = mo.end\n backtrack = true\n break\n }\n }\n if(backtrack){\n log('backtrack ok')\n continue\n }else{\n if(node.type == \"negative_lookahead_assertion\"){\n // If a negative lookahead assertion fails,\n // return a match\n var res = new GroupMO(node, start, matches,\n string, groups, endpos)\n return res\n }\n if(nb_repeat == 0){\n // remove the groups introduced before\n // reaching this point\n for(var key in groups){\n if(initial_groups.indexOf(key) == -1){\n delete groups[key]\n }\n }\n }\n if(nb_repeat >= node.repeat.min){\n log(\"enough repetitions for node\", node)\n if(node.type == \"negative_lookahead_assertion\"){\n return false\n }\n return new GroupMO(node, start, matches, string,\n groups, endpos)\n }\n return false\n }\n }\n }\n if(node.type == \"negative_lookahead_assertion\"){\n // If a negative lookahead succeeds, return false\n return false\n }\n nb_repeat++\n if(pos > match_start){\n nb_zerolength_repeat = 0\n }else{\n nb_zerolength_repeat++\n empty_matches[pos] = true\n }\n matches.push({start: match_start, end: pos, mos})\n if(node.num !== undefined){\n groups[node.num] = $last(matches)\n if(node.name !== undefined){\n groups[node.name] = groups[node.num]\n }\n if(node.num != $last(groups.$last)){\n var ix = groups.$last.indexOf(node.num)\n if(ix > -1){\n groups.$last.splice(ix, 1)\n }\n groups.$last.push(node.num)\n }\n }\n if(nb_repeat >= node.repeat.max){\n var res = new GroupMO(node, start, matches, string,\n groups, endpos)\n if(res.start == res.end && no_zero_width){\n // no_zero_width is set when previous match in\n // iterator() had length 0; avoids infinite loops\n return false\n }\n return res\n }\n log('loop on group', pattern.text, 'nb repeats', nb_repeat,\n 'nb zero length', nb_zerolength_repeat, 'groups', groups)\n if(nb_zerolength_repeat == 65535){\n return matches.length == 0 ? false :\n new GroupMO(node, start, matches, string, groups,\n endpos)\n }\n }\n }\n }else{\n // for BackReference, Char, CharSeq, CharacterClass, CharacterSet,\n // ConditionalBackref, Lookbehind, StringStart, StringEnd\n var mo = node.match(string, pos, endpos, groups)\n if(_debug.value){\n console.log(node + '', \"mo\", mo)\n }\n if(mo){\n var len = mo.group_len === undefined ? 1 : mo.group_len,\n ix = node.non_greedy ? mo.nb_min : mo.nb_max,\n end = pos + len * ix\n return new MO(node, pos, mo, len)\n }else{\n return false\n }\n }\n}\n\n// expose re module API\nvar module = {\n cache: cache,\n compile: function(){\n var $ = $B.args(\"compile\", 2, {pattern: null, flags: null},\n ['pattern', 'flags'], arguments, {flags: no_flag},\n null, null)\n if($.pattern && $.pattern.__class__ === Pattern){\n if($.flags !== no_flag){\n throw _b_.ValueError.$factory(\n \"cannot process flags argument with a compiled pattern\")\n }\n return $.pattern\n }\n $.pattern = check_pattern_flags($.pattern, $.flags)\n var data = prepare({pattern: $.pattern})\n if(typeof $.flags == \"number\"){\n $.flags = Flag.$factory($.flags)\n }\n var jspat = compile(data.pattern, $.flags)\n return Pattern.$factory(jspat)\n },\n error: error,\n escape: function(){\n var $ = $B.args(\"escape\", 1, {pattern: null}, ['pattern'], arguments,\n {}, null, null),\n data = prepare({pattern: $.pattern}),\n pattern = data.pattern,\n res = []\n for(var cp of pattern.codepoints){\n if(escaped.indexOf(cp) > -1){\n res.push(BACKSLASH)\n }\n res.push(cp)\n }\n res = from_codepoint_list(res, data.type)\n if(data.type == \"bytes\" && $B.$isinstance(res, _b_.str)){\n res = _b_.str.encode(res, 'latin1')\n }\n return res\n },\n findall: function(){\n /* Return all non-overlapping matches of pattern in string, as a list\n of strings. The string is scanned left-to-right, and matches are\n returned in the order found. If one or more groups are present in\n the pattern, return a list of groups; this will be a list of tuples\n if the pattern has more than one group. Empty matches are included\n in the result.\n */\n var $ = $B.args(\"findall\", 3,\n {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags,\n data\n pattern = check_pattern_flags(pattern, flags)\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n }else{\n data = prepare({string, pattern})\n pattern = Pattern.$factory(compile(data.pattern, flags))\n }\n if(data.type === \"str\"){\n function conv(s){\n return s === EmptyString ? '' : s\n }\n }else{\n function conv(s){\n return string2bytes(s)\n }\n }\n\n var iter = module.finditer.apply(null, arguments).js_gen,\n res = []\n while(true){\n var next = iter.next()\n if(next.done){\n return res\n }\n var bmo = next.value,\n mo = bmo.mo,\n groups = MatchObject.groups(bmo)\n\n // replace None by the empty string\n for(var i = 0, len = groups.length; i < len; i++){\n groups[i] = groups[i] === _b_.None ? \"\" : groups[i]\n }\n if(groups.length > 0){\n if(groups.length == 1){\n res.push(groups[0])\n }else{\n res.push($B.fast_tuple(groups))\n }\n }else{\n res.push(mo.string.substring(mo.start, mo.end))\n }\n }\n console.log(\"end findall\")\n },\n finditer: function(){\n var $ = $B.args(\"finditer\", 3,\n {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags\n if($B.$isinstance(string, [_b_.bytearray, _b_.memoryview])){\n string.in_iteration = true\n }\n var original_string = string,\n data\n pattern = check_pattern_flags(pattern, flags)\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n flags = pattern.flags\n }else{\n data = prepare({string, pattern})\n pattern = Pattern.$factory(compile(data.pattern, flags))\n }\n if(pattern.__class__ !== Pattern){\n throw Error(\"pattern not a Python object\")\n }\n return $B.generator.$factory(iterator)(pattern.$pattern, data.string,\n flags, original_string)\n },\n fullmatch: function(){\n var $ = $B.args(\"fullmatch\", 3, {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags\n pattern = check_pattern_flags(pattern, flags)\n var data\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n pattern = pattern.$pattern\n }else{\n data = prepare({pattern, string})\n pattern = compile(data.pattern, flags)\n }\n\n var new_pattern = create_fullmatch_pattern(pattern)\n\n // match transformed RE\n var res = match(new_pattern, data.string, 0)\n var bmo = res === false ? _b_.None : MatchObject.$factory(res)\n if(bmo !== _b_.None){\n if(bmo.mo.string.codepoints.length != bmo.mo.end - bmo.mo.start){\n return _b_.None\n }else{\n return bmo\n }\n }\n return _b_.None\n },\n Match: MatchObject,\n match: function(){\n var $ = $B.args(\"match\", 3, {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags\n pattern = check_pattern_flags(pattern, flags)\n var data\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n pattern = pattern.$pattern\n }else{\n data = prepare({pattern, string})\n pattern = compile(data.pattern, flags)\n }\n var res = match(pattern, data.string, 0)\n return res === false ? _b_.None : MatchObject.$factory(res)\n },\n Pattern,\n purge: function(){\n var $ = $B.args(\"purge\", 0, {}, [], arguments, {}, null, null)\n cache.clear()\n return _b_.None\n },\n _reconstructor,\n Scanner,\n search: function(){\n var $ = $B.args(\"search\", 3, {pattern: null, string: null, flags: null},\n ['pattern', 'string', 'flags'], arguments,\n {flags: no_flag}, null, null),\n pattern = $.pattern,\n string = $.string,\n flags = $.flags,\n data\n pattern = check_pattern_flags(pattern, flags)\n if(pattern.__class__ === Pattern){\n data = prepare({string})\n }else{\n data = prepare({string, pattern})\n pattern = Pattern.$factory(compile(data.pattern, flags))\n }\n data.pattern = pattern\n // optimizations\n if(pattern.pattern.startsWith('\\\\A') ||\n pattern.pattern.startsWith('^')){\n if(! (pattern.$pattern.node.items[0] instanceof Choice)){\n var mo = match(data.pattern.$pattern, data.string, 0)\n if(mo){\n return MatchObject.$factory(mo)\n }else if(pattern.flags.value & MULTILINE.value){\n var pos = 0,\n cp\n while((cp = data.string.cp_at(pos)) !== undefined){\n if(cp == LINEFEED){\n mo = match(data.pattern.$pattern, data.string, pos + 1)\n if(mo){\n return MatchObject.$factory(mo)\n }\n }\n pos++\n }\n }else{\n return _b_.None\n }\n }\n }\n if(pattern.$pattern.fixed_length !== false &&\n isFinite(pattern.$pattern.fixed_length) &&\n pattern.pattern.endsWith('$') &&\n ! (pattern.flags.value & MULTILINE.value)){\n var mo = match(data.pattern.$pattern, data.string,\n data.string.length - pattern.$pattern.fixed_length)\n if(mo){\n return MatchObject.$factory(mo)\n }\n return _b_.None\n }\n var pos = 0\n if(data.string.codepoints.length == 0){\n mo = match(data.pattern.$pattern, data.string, 0)\n if(mo){\n mo.start = mo.end = 0\n }\n return mo ? MatchObject.$factory(mo) : _b_.None\n }\n while(pos < data.string.codepoints.length){\n var mo = match(data.pattern.$pattern, data.string, pos)\n if(mo){\n return MatchObject.$factory(mo)\n }else{\n pos++\n }\n }\n return _b_.None\n },\n set_debug: function(value){\n _debug.value = value\n },\n split: function(){\n var $ = $B.args(\"split\", 4,\n {pattern: null, string: null, maxsplit: null, flags: null},\n ['pattern', 'string', 'maxsplit', 'flags'],\n arguments, {maxsplit: 0, flags: no_flag}, null, null)\n var res = [],\n pattern = $.pattern,\n string = $.string,\n flags = $.flags,\n pos = 0,\n nb_split = 0,\n data\n if(pattern.__class__ !== Pattern){\n data = prepare({pattern, string})\n var comp = compile(data.pattern, flags)\n pattern = Pattern.$factory(comp)\n }else{\n data = {pattern, string}\n }\n for(var bmo of module.finditer(pattern, $.string).js_gen){\n var mo = bmo.mo, // finditer returns instances of MatchObject\n groupobj = mo.$groups\n res.push(data.string.substring(pos, mo.start))\n for(var key in mo.node.$groups){\n if(isFinite(key)){\n if(groupobj[key] !== undefined){\n res.push(data.string.substring(groupobj[key].start,\n groupobj[key].end))\n }else{\n res.push(_b_.None)\n }\n }\n }\n nb_split++\n pos = mo.end\n if(pos >= $.string.length){\n break\n }\n if($.maxsplit != 0 && nb_split >= $.maxsplit){\n break\n }\n }\n res.push(data.string.substring(pos))\n if(data.type === \"bytes\"){\n res = res.map(\n function(x){\n return $B.$isinstance(x, _b_.bytes) ?\n x :\n _b_.str.encode(x, \"latin-1\")\n }\n )\n }\n return res\n },\n sub: function(){\n var $ = $B.args(\"sub\", 5,\n {pattern: null, repl: null, string: null, count: null, flags: null},\n ['pattern', 'repl', 'string', 'count', 'flags'],\n arguments, {count: 0, flags: no_flag}, null, null),\n pattern = $.pattern,\n repl = $.repl,\n string = $.string,\n count = $.count,\n flags = $.flags,\n data\n check_pattern_flags(pattern, flags)\n if(typeof repl != \"function\"){\n if(pattern.__class__ != Pattern){\n data = prepare({pattern, string, repl})\n pattern = compile(data.pattern, flags)\n }else{\n data = prepare({string, repl})\n flags = pattern.flags\n pattern = pattern.$pattern\n }\n data = transform_repl(data, pattern)\n }else{\n if(pattern.__class__ != Pattern){\n data = prepare({pattern, string})\n pattern = compile(data.pattern, flags)\n }else{\n data = prepare({string})\n flags = pattern.flags\n pattern = pattern.$pattern\n }\n data.repl = repl\n }\n return subn(pattern, data.repl, data.string, count, flags)[0]\n },\n subn: function(){\n var $ = $B.args(\"sub\", 5,\n {pattern: null, repl: null, string: null, count: null, flags: null},\n ['pattern', 'repl', 'string', 'count', 'flags'],\n arguments, {count: 0, flags: no_flag}, null, null),\n pattern = $.pattern,\n repl = $.repl,\n string = $.string,\n count = $.count,\n flags = $.flags,\n data\n if(pattern.__class__ != Pattern){\n data = prepare({pattern, repl, string})\n }else{\n data = prepare({repl, string})\n data.pattern = pattern.$pattern\n }\n return $B.fast_tuple(subn(data.pattern, data.repl, data.string, count,\n flags))\n }\n\n}\n\nvar ASCII = module.A = module.ASCII = Flag.$factory(256)\nvar IGNORECASE = module.I = module.IGNORECASE = Flag.$factory(2)\nvar LOCALE = module.L = module.LOCALE = Flag.$factory(4)\nvar MULTILINE = module.M = module.MULTILINE = Flag.$factory(8)\nvar DOTALL = module.S = module.DOTALL = Flag.$factory(16)\nvar U = module.U = module.UNICODE = Flag.$factory(32)\nvar VERBOSE = module.X = module.VERBOSE = Flag.$factory(64)\nmodule.cache = cache\nmodule._compile = module.compile\n\n\nvar inline_flags = {\n i: IGNORECASE,\n L: LOCALE,\n m: MULTILINE,\n s: DOTALL,\n u: U,\n x: VERBOSE,\n a: ASCII\n}\n\nvar flag_names = {\n i: 'IGNORECASE',\n L: 'LOCALE',\n m: 'MULTILINE',\n s: 'DOTALL',\n u: 'U',\n x: 'VERBOSE',\n a: 'ASCII'\n}\n\n$B.addToImported('python_re', module)\n\n})(__BRYTHON__)"], "_sre_utils": [".js", "var $module=(function($B){\n\n function unicode_iscased(cp){\n // cp : Unicode code point\n var letter = String.fromCodePoint(cp)\n return (letter != letter.toLowerCase() ||\n letter != letter.toUpperCase())\n }\n\n function ascii_iscased(cp){\n if(cp > 255){return false}\n return unicode_iscased(cp)\n }\n\n function unicode_tolower(cp){\n var letter = String.fromCodePoint(cp),\n lower = letter.toLowerCase()\n return lower.charCodeAt(0)\n }\n\n function ascii_tolower(cp){\n return unicode_tolower(cp)\n }\n\nreturn {\n unicode_iscased: unicode_iscased,\n ascii_iscased: ascii_iscased,\n unicode_tolower: unicode_tolower,\n ascii_tolower: ascii_tolower\n}\n\n}\n\n)(__BRYTHON__)"], "_multiprocessing": [".js", "// multiprocessing\n(function($B){\n\nvar _b_ = $B.builtins\n\nvar Process = $B.make_class('Process')\n\nvar $convert_args=function(args) {\n var _list=[]\n for(var i=0, _len_i = args.length; i < _len_i; i++) {\n var _a=args[i]\n if($B.$isinstance(_a, _b_.str)){_list.push(\"'\"+_a+\"'\")} else {_list.push(_a)}\n }\n\n return _list.join(',')\n}\n\nProcess.is_alive = function(self){return self.$alive}\n\nProcess.join = function(self, timeout){\n // need to block until process is complete\n // could probably use a addEventListener to execute all existing code\n // after this join statement\n\n self.$worker.addEventListener('message', function (e) {\n var data=e.data\n if (data.stdout != '') { // output stdout from process\n $B.stdout.write(data.stdout)\n }\n }, false);\n}\n\nProcess.run = function(self){\n //fix me\n}\n\nProcess.start = function(self){\n self.$worker.postMessage({target: self.$target,\n args: $convert_args(self.$args),\n // kwargs: self.$kwargs\n })\n self.$worker.addEventListener('error', function(e) { throw e})\n self.$alive=true\n}\n\nProcess.terminate = function(self){\n self.$worker.terminate()\n self.$alive=false\n}\n\n// variables\n//name\n//daemon\n//pid\n//exitcode\n\nProcess. $factory = function(){\n //arguments group=None, target=None, name=None, args=(), kwargs=()\n\n var $ns=$B.args('Process',0,{},[],arguments,{},null,'kw')\n var kw=$ns['kw']\n\n var target=_b_.dict.get($ns['kw'],'target', _b_.None)\n var args=_b_.dict.get($ns['kw'],'args', _b_.tuple.$factory())\n\n var worker = new Worker('/src/web_workers/multiprocessing.js')\n\n var res = {\n __class__:Process,\n $worker: worker,\n name: $ns['name'] || _b_.None,\n $target: target+'',\n $args: args,\n //$kwargs: $ns['kw'],\n $alive: false\n }\n return res\n}\n\n$B.set_func_names(Process, \"multiprocessing\")\n\nvar Pool = $B.make_class(\"Pool\")\n\nPool.__enter__ = function(self){}\nPool.__exit__ = function(self){}\n\nPool.__str__ = Pool.toString = Pool.__repr__=function(self){\n return ''\n}\n\nPool.map = function(){\n\n var $ns=$B.args('Pool.map', 3,\n {self:null, func:null, fargs:null}, ['self', 'func', 'fargs'],\n arguments,{},'args','kw')\n var func = $ns['func']\n var fargs = $ns['fargs']\n\n var _results = []\n\n fargs = _b_.iter(fargs)\n\n var _pos = 0\n console.log(self.$processes)\n _workers =[]\n for(var i=0; i < self.$processes; i++) {\n _workers[i] = new Worker('/src/web_workers/multiprocessing.js')\n var arg\n\n try{arg = $B.$getattr(fargs, '__next__')()}\n catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n }\n console.log(arg)\n _workers[i].finished=false\n _workers[i].postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n\n _workers[i].addEventListener('message', function(e) {\n _results[e.data.pos]=e.data.result\n if (_results.length == args.length) return _results\n\n try {\n arg = $B.$getattr(fargs, '__next__')()\n e.currentTarget.postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n } catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n this.finished=true\n }\n }, false);\n }\n}\n\nPool.apply_async = function(){\n\n var $ns=$B.$MakeArgs('apply_async', 3,\n {self:null, func:null, fargs:null}, ['self', 'func', 'fargs'],\n arguments,{},'args','kw')\n var func = $ns['func']\n var fargs = $ns['fargs']\n\n fargs = _b_.iter(fargs)\n\n async_result = {}\n async_result.get = function(timeout){\n console.log(results)\n console.log(fargs)\n return this.results}\n async_result.results=[]\n\n var _pos=0\n\n _workers=[]\n for(var i=0; i < self.$processes; i++) {\n _workers[i] = new Worker('/src/web_workers/multiprocessing.js')\n var arg\n\n try{arg = $B.$getattr(fargs, '__next__')()}\n catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n }\n //console.log(arg)\n //_workers[i].finished=false\n _workers[i].postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n\n _workers[i].addEventListener('message', function(e) {\n async_result.results[e.data.pos]=e.data.result\n //if (_results.length == args.length) return _results\n\n try {\n arg = $B.$getattr(fargs, '__next__')()\n e.currentTarget.postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n } catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n this.finished=true\n }\n }, false);\n }\n\n console.log(\"return\", async_result)\n return async_result\n}\n\nPool.$factory = function(){\n console.log(\"pool\")\n console.log(arguments)\n var $ns=$B.args('Pool',1,\n {processes:null},['processes'],arguments,{},'args','kw')\n\n var processes = $ns['processes']\n\n if (processes === _b_.None) {\n // look to see if we have stored cpu_count in local storage\n // maybe we should create a brython config file with settings,etc..??\n\n // if not there use a tool such as Core Estimator to calculate number of cpu's\n // http://eligrey.com/blog/post/cpu-core-estimation-with-javascript\n }\n\n console.log(processes)\n var res = {\n __class__:Pool,\n $processes:processes\n }\n return res\n}\n\n$B.set_func_names(Pool, \"multiprocessing\")\n\n$B.imported._multiprocessing = {Process:Process, Pool:Pool}\n\n})(__BRYTHON__)\n"], "unicodedata": [".js", "// Implementation of unicodedata\n(function($B){\n\n var _b_ = $B.builtins\n\n // Load unicode table if not already loaded\n if($B.unicodedb === undefined){\n var xhr = new XMLHttpRequest\n xhr.open(\"GET\",\n $B.brython_path + \"unicode.txt\", false)\n xhr.onreadystatechange = function(){\n if(this.readyState == 4){\n if(this.status == 200){\n $B.unicodedb = this.responseText\n }else{\n console.log(\"Warning - could not \" +\n \"load unicode.txt\")\n }\n }\n }\n xhr.send()\n }\n\n function _info(chr){\n var ord = _b_.ord(chr),\n hex = ord.toString(16).toUpperCase()\n while(hex.length < 4){hex = \"0\" + hex}\n var re = new RegExp(\"^\" + hex +\";(.+?);(.*?);(.*?);(.*?);(.*?);(.*);(.*);(.*)$\",\n \"m\"),\n search = re.exec($B.unicodedb)\n if(search === null){\n return null\n }else{\n return {\n name: search[1],\n category: search[2],\n combining: search[3],\n bidirectional: search[4],\n decomposition: search[5],\n decimal: search[6],\n digit: search[7],\n numeric: search[8]\n }\n }\n }\n\n function bidirectional(chr){\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr, hex)\n throw _b_.KeyError.$factory(chr)\n }\n return search.bidirectional\n }\n\n function category(chr){\n // Returns the general category assigned to the character chr as\n // string.\n if(/\\p{Cn}/u.test(chr.charAt(0))){\n return \"Cn\"\n }\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return search.category\n }\n\n function combining(chr){\n // Returns the general category assigned to the character chr as\n // string.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.combining)\n }\n\n function decimal(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.decimal)\n }\n\n function decomposition(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return search.decomposition\n }\n\n function digit(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.digit)\n }\n\n function lookup(name){\n // Look up character by name. If a character with the given name is\n // found, return the corresponding character. If not found, KeyError\n // is raised.\n var re = new RegExp(\"^([0-9A-F]+);\" +\n name + \";(.*)$\", \"m\")\n search = re.exec($B.unicodedb)\n if(search === null){\n throw _b_.KeyError.$factory(\"undefined character name '\" +\n name + \"'\")\n }\n var res = parseInt(search[1], 16)\n return _b_.chr(res)\n }\n\n function name(chr, _default){\n // Returns the name assigned to the character chr as a string. If no\n // name is defined, default is returned, or, if not given, ValueError\n // is raised.\n var search = _info(chr)\n if(search === null){\n if(_default){return _default}\n throw _b_.KeyError.$factory(\"undefined character name '\" +\n chr + \"'\")\n }\n return search.name\n }\n\n function _norm(form, chr){\n var search = _info(chr)\n if(search === null){\n throw _b_.KeyError.$factory(chr)\n }\n switch(form){\n case \"NFC\":\n return chr\n case \"NFD\":\n var decomp = decomposition(chr),\n parts = decomp.split(\" \"),\n res = \"\"\n if(parts[0].startsWith(\"<\")){\n return chr\n }\n parts.forEach(function(part){\n if(! part.startsWith(\"<\")){\n res += _b_.chr(parseInt(part, 16))\n }\n })\n return res\n case \"NFKC\":\n var decomp = decomposition(chr),\n parts = decomp.split(\" \")\n if(parts[0] == \"\"){\n var res = \"\"\n parts.slice(1).forEach(function(part){\n res += _b_.chr(parseInt(part, 16))\n })\n return res\n }\n return chr\n case \"NFKD\":\n var decomp = decomposition(chr),\n parts = decomp.split(\" \")\n if(parts[0] == \"\"){\n var res = \"\"\n parts.slice(1).forEach(function(part){\n res += _b_.chr(parseInt(part, 16))\n })\n return res\n }\n return chr\n\n default:\n throw _b_.ValueError.$factory(\"invalid normalization form\")\n }\n }\n\n function normalize(form, unistr){\n var res = \"\"\n for(var i = 0, len = unistr.length; i < len; i++){\n res += _norm(form, unistr.charAt(i))\n }\n return res\n }\n\n function numeric(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n if(_default){return _default}\n throw _b_.KeyError.$factory(chr)\n }\n var parts = search.numeric.split('/'),\n value\n if(parts.length == 1){\n value = parseFloat(search.numeric)\n }else{\n value = parseInt(parts[0]) / parseInt(parts[1])\n }\n return $B.fast_float(value)\n }\n\n var module = {\n bidirectional: bidirectional,\n category: category,\n combining: combining,\n decimal: decimal,\n decomposition: decomposition,\n digit: digit,\n lookup: lookup,\n name: name,\n normalize: normalize,\n numeric: numeric,\n unidata_version: \"11.0.0\"\n }\n module.ucd_3_2_0 = {}\n for(var key in module){\n if(key == \"unidata_version\"){\n module.ucd_3_2_0[key] = '3.2.0'\n }else{\n module.ucd_3_2_0[key] = module[key] // approximation...\n }\n }\n $B.addToImported('unicodedata', module)\n\n})(__BRYTHON__)"], "_random": [".js", "// Javascript implementation of the _random module\n// Based on Ian Bicking's implementation of the Mersenne twister\n\n(function($B){\n\nvar _b_ = $B.builtins\n\n// Code copied from https://github.com/ianb/whrandom/blob/master/mersenne.js\n// by Ian Bicking\n\n// this program is a JavaScript version of Mersenne Twister,\n// a straight conversion from the original program, mt19937ar.c,\n// translated by y. okada on july 17, 2006.\n// and modified a little at july 20, 2006, but there are not any substantial differences.\n// modularized by Ian Bicking, March 25, 2013 (found original version at http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/VERSIONS/JAVASCRIPT/java-script.html)\n// in this program, procedure descriptions and comments of original source code were not removed.\n// lines commented with //c// were originally descriptions of c procedure. and a few following lines are appropriate JavaScript descriptions.\n// lines commented with /* and */ are original comments.\n// lines commented with // are additional comments in this JavaScript version.\n/*\n A C-program for MT19937, with initialization improved 2002/1/26.\n Coded by Takuji Nishimura and Makoto Matsumoto.\n\n Before using, initialize the state by using init_genrand(seed)\n or init_by_array(init_key, key_length).\n\n Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions\n are met:\n\n 1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n\n 3. The names of its contributors may not be used to endorse or promote\n products derived from this software without specific prior written\n permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n Any feedback is very welcome.\n http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html\n email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space)\n*/\n\nfunction RandomStream(seed) {\n /*jshint bitwise:false */\n /* Period parameters */\n //c//#define N 624\n //c//#define M 397\n //c//#define MATRIX_A 0x9908b0dfUL /* constant vector a */\n //c//#define UPPER_MASK 0x80000000UL /* most significant w-r bits */\n //c//#define LOWER_MASK 0x7fffffffUL /* least significant r bits */\n var N = 624\n var M = 397\n var MATRIX_A = 0x9908b0df /* constant vector a */\n var UPPER_MASK = 0x80000000 /* most significant w-r bits */\n var LOWER_MASK = 0x7fffffff /* least significant r bits */\n //c//static unsigned long mt[N]; /* the array for the state vector */\n //c//static int mti=N+1; /* mti==N+1 means mt[N] is not initialized */\n var mt = new Array(N) /* the array for the state vector */\n var mti = N + 1 /* mti==N+1 means mt[N] is not initialized */\n\n function unsigned32(n1){\n // returns a 32-bits unsiged integer from an operand to which applied a\n // bit operator.\n return n1 < 0 ? (n1 ^ UPPER_MASK) + UPPER_MASK : n1\n }\n\n function subtraction32(n1, n2){\n // emulates lowerflow of a c 32-bits unsiged integer variable, instead of\n // the operator -. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n return n1 < n2 ? unsigned32((0x100000000 - (n2 - n1)) & 0xffffffff) :\n n1 - n2\n }\n\n function addition32(n1, n2){\n // emulates overflow of a c 32-bits unsiged integer variable, instead of\n // the operator +. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n return unsigned32((n1 + n2) & 0xffffffff)\n }\n\n function multiplication32(n1, n2){\n // emulates overflow of a c 32-bits unsiged integer variable, instead of the\n // operator *. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n var sum = 0\n for (var i = 0; i < 32; ++i){\n if((n1 >>> i) & 0x1){\n sum = addition32(sum, unsigned32(n2 << i))\n }\n }\n return sum\n }\n\n /* initializes mt[N] with a seed */\n //c//void init_genrand(unsigned long s)\n function init_genrand(s) {\n //c//mt[0]= s & 0xffffffff;\n mt[0] = unsigned32(s & 0xffffffff)\n for(mti = 1; mti < N; mti++){\n mt[mti] =\n //c//(1812433253 * (mt[mti-1] ^ (mt[mti-1] >> 30)) + mti);\n addition32(multiplication32(1812433253,\n unsigned32(mt[mti - 1] ^ (mt[mti - 1] >>> 30))), mti)\n /* See Knuth TAOCP Vol2. 3rd Ed. P.106 for multiplier. */\n /* In the previous versions, MSBs of the seed affect */\n /* only MSBs of the array mt[]. */\n /* 2002/01/09 modified by Makoto Matsumoto */\n //c//mt[mti] &= 0xffffffff;\n mt[mti] = unsigned32(mt[mti] & 0xffffffff);\n /* for >32 bit machines */\n }\n }\n\n /* initialize by an array with array-length */\n /* init_key is the array for initializing keys */\n /* key_length is its length */\n /* slight change for C++, 2004/2/26 */\n //c//void init_by_array(unsigned long init_key[], int key_length)\n function init_by_array(init_key, key_length) {\n //c//int i, j, k;\n var i, j, k\n init_genrand(19650218)\n i = 1\n j = 0\n k = (N > key_length ? N : key_length)\n for(; k; k--){\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1664525))\n //c// + init_key[j] + j; /* non linear */\n mt[i] = addition32(\n addition32(unsigned32(mt[i] ^\n multiplication32(unsigned32(mt[i - 1] ^ (mt[i - 1] >>> 30)),\n 1664525)),\n init_key[j]), j)\n mt[i] =\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n unsigned32(mt[i] & 0xffffffff)\n i++\n j++\n if(i >= N){mt[0] = mt[N - 1]; i = 1}\n if(j >= key_length){j = 0}\n }\n for(k = N - 1; k; k--){\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1566083941))\n //c//- i; /* non linear */\n mt[i] = subtraction32(\n unsigned32(\n (mt[i]) ^\n multiplication32(\n unsigned32(mt[i - 1] ^ (mt[i - 1] >>> 30)),\n 1566083941)),\n i\n )\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n mt[i] = unsigned32(mt[i] & 0xffffffff)\n i++\n if(i >= N){mt[0] = mt[N - 1]; i = 1}\n }\n mt[0] = 0x80000000; /* MSB is 1; assuring non-zero initial array */\n }\n\n /* generates a random number on [0,0xffffffff]-interval */\n //c//unsigned long genrand_int32(void)\n function genrand_int32() {\n //c//unsigned long y;\n //c//static unsigned long mag01[2]={0x0UL, MATRIX_A};\n var y;\n var mag01 = [0x0, MATRIX_A];\n /* mag01[x] = x * MATRIX_A for x=0,1 */\n\n if(mti >= N){ /* generate N words at one time */\n //c//int kk;\n var kk\n\n if(mti == N + 1){ /* if init_genrand() has not been called, */\n init_genrand(Date.now()) /* a default initial seed is used */\n }\n\n for(kk = 0; kk < N - M; kk++){\n //c//y = (mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK);\n //c//mt[kk] = mt[kk+M] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK) | (mt[kk + 1]&LOWER_MASK))\n mt[kk] = unsigned32(mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1])\n }\n for(;kk < N - 1; kk++){\n //c//y = (mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK);\n //c//mt[kk] = mt[kk+(M-N)] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK) | (mt[kk + 1]&LOWER_MASK))\n mt[kk] = unsigned32(mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1])\n }\n //c//y = (mt[N-1]&UPPER_MASK)|(mt[0]&LOWER_MASK);\n //c//mt[N-1] = mt[M-1] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK))\n mt[N - 1] = unsigned32(mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1])\n mti = 0\n }\n\n y = mt[mti++]\n\n /* Tempering */\n //c//y ^= (y >> 11);\n //c//y ^= (y << 7) & 0x9d2c5680;\n //c//y ^= (y << 15) & 0xefc60000;\n //c//y ^= (y >> 18);\n y = unsigned32(y ^ (y >>> 11))\n y = unsigned32(y ^ ((y << 7) & 0x9d2c5680))\n y = unsigned32(y ^ ((y << 15) & 0xefc60000))\n y = unsigned32(y ^ (y >>> 18))\n\n return y\n }\n\n /* generates a random number on [0,0x7fffffff]-interval */\n //c//long genrand_int31(void)\n function genrand_int31(){\n //c//return (genrand_int32()>>1);\n return (genrand_int32()>>>1)\n }\n\n /* generates a random number on [0,1]-real-interval */\n //c//double genrand_real1(void)\n function genrand_real1(){\n return genrand_int32()*(1.0/4294967295.0)\n /* divided by 2^32-1 */\n }\n\n /* generates a random number on [0,1)-real-interval */\n //c//double genrand_real2(void)\n function genrand_real2(){\n return genrand_int32() * (1.0 / 4294967296.0)\n /* divided by 2^32 */\n }\n\n /* generates a random number on (0,1)-real-interval */\n //c//double genrand_real3(void)\n function genrand_real3() {\n return ((genrand_int32()) + 0.5) * (1.0 / 4294967296.0)\n /* divided by 2^32 */\n }\n\n /* generates a random number on [0,1) with 53-bit resolution*/\n //c//double genrand_res53(void)\n function genrand_res53() {\n //c//unsigned long a=genrand_int32()>>5, b=genrand_int32()>>6;\n var a = genrand_int32() >>> 5,\n b = genrand_int32() >>> 6\n return (a * 67108864.0 + b) * (1.0 / 9007199254740992.0)\n }\n /* These real versions are due to Isaku Wada, 2002/01/09 added */\n\n var random = genrand_res53\n\n random.seed = function(seed){\n if(seed === undefined || $B.is_none(seed)){\n const entries = new Uint32Array(N)\n crypto.getRandomValues(entries)\n init_by_array(Array.from(entries), N)\n return\n }\n\n if(!$B.$isinstance(seed, _b_.int)){\n seed = _b_.hash(seed)\n }\n\n // Transform to long integer\n if(typeof seed == \"number\"){\n seed = BigInt(seed)\n }else if(seed.__class__ === $B.long_int){\n seed = seed.value\n }else{\n return random.seed(seed.$brython_value)\n }\n\n // Take abs(seed)\n seed = seed > 0 ? seed : -seed\n\n var keys = []\n var int32_1 = 2n ** 32n - 1n\n\n // decomposition in factors of 2 ** 32\n while(seed >= int32_1){\n var quot = seed / int32_1,\n rest = seed % int32_1\n // Rest is a JS number (< 2 ** 32)\n keys.push(Number(rest))\n // Quotient is either a JS number or a instance of long_int\n // but seed must be long_int\n seed = quot\n }\n keys.push(Number(seed))\n\n init_by_array(keys, keys.length)\n }\n\n random.seed(seed)\n\n random.int31 = genrand_int31\n random.int32 = genrand_int32\n random.real1 = genrand_real1\n random.real2 = genrand_real2\n random.real3 = genrand_real3\n random.res53 = genrand_res53\n\n // Added for compatibility with Python\n random.getstate = function(){\n return $B.fast_tuple(mt.concat([mti]))\n }\n\n random.setstate = function(state){\n mt = state.slice(0, state.length - 1)\n mti = state[state.length - 1]\n }\n\n return random\n\n}\n\nvar Random = $B.make_class(\"Random\",\n function(){\n return {\n __class__: Random,\n _random: RandomStream(Date.now())\n }\n }\n)\n\nRandom.getrandbits = function(){\n var $ = $B.args(\"getrandbits\", 2, {self: null, k:null}, [\"self\", \"k\"],\n arguments, {}, null, null),\n self = $.self,\n k = $B.$GetInt($.k)\n\n if(k < 0)\n throw _b_.ValueError.$factory('number of bits must be non-negative')\n\n if(k === 0)\n return 0\n\n const words = Math.floor((k - 1) / 32) + 1\n const wordarray = new ArrayBuffer(words * 4)\n const wordarray_view = new DataView(wordarray)\n\n /* Fill-out bits of long integer, by 32-bit words, from least significant\n to most significant. */\n for(i = 0; i < words; i++, k -= 32){\n r = self._random.int32()\n if (k < 32)\n r >>>= (32 - k) /* Drop least significant bits */\n wordarray_view.setUint32(i * 4, r, true)\n }\n\n return _b_.int.from_bytes(_b_.bytes.$factory(Array.from(new Uint8Array(wordarray))), \"little\")\n}\n\nRandom.getstate = function(){\n var $ = $B.args('getstate', 1, {self: null},\n [\"self\"], arguments, {}, null, null),\n self = $.self\n return self._random.getstate()\n}\n\nRandom.random = function(){\n var $ = $B.args('random', 1, {self: null}, [\"self\"],\n arguments, {}, null, null),\n self = $.self\n return $B.fast_float(self._random())\n}\n\nRandom.seed = function(){\n var $ = $B.args('seed', 2, {self: null, n: null}, ['self', 'n'],\n arguments, {}, null, null),\n self = $.self,\n n = $.n\n\n if (self._random === undefined)\n self._random = RandomStream(n)\n else\n self._random.seed(n)\n}\n\nRandom.setstate = function(){\n var $ = $B.args('setstate', 2, {self: null, state:null}, ['self', 'state'],\n arguments, {}, null, null),\n self = $.self,\n state = $.state\n return self._random.setstate(state)\n}\n\n$B.set_func_names(Random, \"_random\")\n\n$B.imported._random = { Random }\n\n})(__BRYTHON__)\n"], "_symtable": [".js", "(function($B){\n\nvar _b_ = $B.builtins\n\nvar module = {\n CELL: 5,\n DEF_ANNOT: 256,\n DEF_BOUND: 134,\n DEF_FREE: 32,\n DEF_FREE_CLASS: 64,\n DEF_GLOBAL: 1,\n DEF_IMPORT: 128,\n DEF_LOCAL: 2,\n DEF_NONLOCAL: 8,\n DEF_PARAM: 4,\n FREE: 4,\n GLOBAL_EXPLICIT: 2,\n GLOBAL_IMPLICIT: 3,\n LOCAL: 1,\n SCOPE_MASK: 15,\n SCOPE_OFF: 11,\n TYPE_CLASS: 1,\n TYPE_FUNCTION: 0,\n TYPE_MODULE: 2,\n USE: 16,\n symtable: function(){\n var $ = $B.args('symtable', 3,\n {code: null, filename: null, compile_type: null},\n ['code', 'filename', 'compile_type'], arguments,\n {}, null, null)\n var ast = _b_.compile($.code, $.filename, $.compile_type,\n $B.PyCF_ONLY_AST)\n // ast is an instance of Python class\n // _Py_Symtable_Build in symtable.js uses the underlying JS object\n return $B._PySymtable_Build(ast.$js_ast, $.filename)\n }\n}\n\n$B.addToImported('_symtable', module)\n\n})(__BRYTHON__)"], "_sre": [".py", "\n''\n\n\n\n\n\n\n\nMAXREPEAT=2147483648\nMAXGROUPS=2147483647\n\nimport array\nimport operator,sys\nfrom sre_constants import ATCODES,OPCODES,CHCODES\nfrom sre_constants import SRE_INFO_PREFIX,SRE_INFO_LITERAL\nfrom sre_constants import SRE_FLAG_UNICODE,SRE_FLAG_LOCALE\n\n\nfrom _sre_utils import(unicode_iscased,ascii_iscased,unicode_tolower,\nascii_tolower)\n\nimport sys\n\n\n\nMAGIC=20171005\n\n\n\n\n\n\n\n\n\n\n\n\n\nCODESIZE=4\n\ncopyright=\"_sre.py 2.4c Copyright 2005 by Nik Haldimann\"\n\n\ndef getcodesize():\n return CODESIZE\n \ndef compile(pattern,flags,code,groups=0,groupindex={},indexgroup=[None]):\n ''\n \n return SRE_Pattern(pattern,flags,code,groups,groupindex,indexgroup)\n \ndef getlower(char_ord,flags):\n if(char_ord <128)or(flags&SRE_FLAG_UNICODE)\\\n or(flags&SRE_FLAG_LOCALE and char_ord <256):\n \n return ord(chr(char_ord).lower())\n else:\n return char_ord\n \n \nclass SRE_Pattern:\n\n def __init__(self,pattern,flags,code,groups=0,groupindex={},indexgroup=[None]):\n self.pattern=pattern\n self.flags=flags\n self.groups=groups\n self.groupindex=groupindex\n self._indexgroup=indexgroup\n self._code=code\n \n def match(self,string,pos=0,endpos=sys.maxsize):\n ''\n\n \n state=_State(string,pos,endpos,self.flags)\n if state.match(self._code):\n return SRE_Match(self,state)\n return None\n \n def fullmatch(self,string,pos=0,endpos=sys.maxsize):\n ''\n\n \n end=\"$\"if isinstance(string,str)else b\"$\"\n if not string.endswith(end):\n string +=end\n state=_State(string,pos,endpos,self.flags)\n if state.match(self._code):\n return SRE_Match(self,state)\n return None\n \n def search(self,string,pos=0,endpos=sys.maxsize):\n ''\n\n\n \n state=_State(string,pos,endpos,self.flags)\n if state.search(self._code):\n return SRE_Match(self,state)\n else:\n return None\n \n def findall(self,string,pos=0,endpos=sys.maxsize):\n ''\n matchlist=[]\n state=_State(string,pos,endpos,self.flags)\n while state.start <=state.end:\n state.reset()\n state.string_position=state.start\n if not state.search(self._code):\n break\n match=SRE_Match(self,state)\n if self.groups ==0 or self.groups ==1:\n item=match.group(self.groups)\n else:\n item=match.groups(\"\")\n matchlist.append(item)\n if state.string_position ==state.start:\n state.start +=1\n else:\n state.start=state.string_position\n return matchlist\n \n def _subx(self,template,string,count=0,subn=False):\n filter=template\n if not callable(template)and \"\\\\\"in template:\n \n \n \n \n import re as sre\n filter=sre._subx(self,template)\n state=_State(string,0,sys.maxsize,self.flags)\n sublist=[]\n \n n=last_pos=0\n while not count or n 0):\n \n if callable(filter):\n sublist.append(filter(SRE_Match(self,state)))\n else:\n sublist.append(filter)\n last_pos=state.string_position\n n +=1\n if state.string_position ==state.start:\n state.start +=1\n else:\n state.start=state.string_position\n \n if last_pos =0 and group <=self.re.groups:\n return group\n else:\n if group in self.re.groupindex:\n return self.re.groupindex[group]\n raise IndexError(\"no such group\")\n \n def _get_slice(self,group,default):\n group_indices=self.regs[group]\n if group_indices[0]>=0:\n return self.string[group_indices[0]:group_indices[1]]\n else:\n return default\n \n def start(self,group=0):\n ''\n\n \n return self.regs[self._get_index(group)][0]\n \n def end(self,group=0):\n ''\n\n \n return self.regs[self._get_index(group)][1]\n \n def span(self,group=0):\n ''\n return self.start(group),self.end(group)\n \n def expand(self,template):\n ''\n \n import sre\n return sre._expand(self.re,self,template)\n \n def groups(self,default=None):\n ''\n\n \n groups=[]\n for indices in self.regs[1:]:\n if indices[0]>=0:\n groups.append(self.string[indices[0]:indices[1]])\n else:\n groups.append(default)\n return tuple(groups)\n \n def groupdict(self,default=None):\n ''\n\n \n groupdict={}\n for key,value in self.re.groupindex.items():\n groupdict[key]=self._get_slice(value,default)\n return groupdict\n \n def group(self,*args):\n ''\n \n if len(args)==0:\n args=(0,)\n grouplist=[]\n for group in args:\n grouplist.append(self._get_slice(self._get_index(group),None))\n if len(grouplist)==1:\n return grouplist[0]\n else:\n return tuple(grouplist)\n \n def __copy__():\n raise TypeError(\"cannot copy this pattern object\")\n \n def __deepcopy__():\n raise TypeError(\"cannot copy this pattern object\")\n \n def __str__(self):\n start,end=self.start(0),self.end(0)\n return(f\"\")\n \nclass _State:\n\n def __init__(self,string,start,end,flags):\n if isinstance(string,bytearray):\n string=str(bytes(string),\"latin1\")\n if isinstance(string,bytes):\n string=str(string,\"latin1\")\n self.string=string\n if start <0:\n start=0\n if end >len(string):\n end=len(string)\n self.start=start\n self.string_position=self.start\n self.end=end\n self.pos=start\n self.flags=flags\n self.reset()\n \n def reset(self):\n self.marks=[]\n self.lastindex=-1\n self.marks_stack=[]\n self.context_stack=[]\n self.repeat=None\n \n def match(self,pattern_codes):\n \n \n \n \n \n \n \n \n dispatcher=_OpcodeDispatcher()\n self.context_stack.append(_MatchContext(self,pattern_codes))\n has_matched=None\n while len(self.context_stack)>0:\n context=self.context_stack[-1]\n has_matched=dispatcher.match(context)\n if has_matched is not None:\n self.context_stack.pop()\n return has_matched\n \n def search(self,pattern_codes):\n flags=0\n if OPCODES[pattern_codes[0]].name ==\"info\":\n \n \n if pattern_codes[2]&SRE_INFO_PREFIX and pattern_codes[5]>1:\n return self.fast_search(pattern_codes)\n flags=pattern_codes[2]\n pattern_codes=pattern_codes[pattern_codes[1]+1:]\n \n string_position=self.start\n if OPCODES[pattern_codes[0]].name ==\"literal\":\n \n \n character=pattern_codes[1]\n while True:\n while string_position =self.end:\n return False\n self.start=string_position\n string_position +=1\n self.string_position=string_position\n if flags&SRE_INFO_LITERAL:\n return True\n if self.match(pattern_codes[2:]):\n return True\n return False\n \n \n while string_position <=self.end:\n self.reset()\n self.start=self.string_position=string_position\n if self.match(pattern_codes):\n return True\n string_position +=1\n return False\n \n def fast_search(self,pattern_codes):\n ''\n \n \n \n flags=pattern_codes[2]\n prefix_len=pattern_codes[5]\n prefix_skip=pattern_codes[6]\n prefix=pattern_codes[7:7+prefix_len]\n overlap=pattern_codes[7+prefix_len -1:pattern_codes[1]+1]\n pattern_codes=pattern_codes[pattern_codes[1]+1:]\n i=0\n string_position=self.string_position\n while string_position =len(self.marks):\n self.marks.extend([None]*(mark_nr -len(self.marks)+1))\n self.marks[mark_nr]=position\n \n def get_marks(self,group_index):\n marks_index=2 *group_index\n if len(self.marks)>marks_index+1:\n return self.marks[marks_index],self.marks[marks_index+1]\n else:\n return None,None\n \n def marks_push(self):\n self.marks_stack.append((self.marks[:],self.lastindex))\n \n def marks_pop(self):\n self.marks,self.lastindex=self.marks_stack.pop()\n \n def marks_pop_keep(self):\n self.marks,self.lastindex=self.marks_stack[-1]\n \n def marks_pop_discard(self):\n self.marks_stack.pop()\n \n def lower(self,char_ord):\n return getlower(char_ord,self.flags)\n \n \nclass _MatchContext:\n\n def __init__(self,state,pattern_codes):\n self.state=state\n self.pattern_codes=pattern_codes\n self.string_position=state.string_position\n self.code_position=0\n self.has_matched=None\n \n def push_new_context(self,pattern_offset):\n ''\n\n \n child_context=_MatchContext(self.state,\n self.pattern_codes[self.code_position+pattern_offset:])\n \n \n \n \n self.state.context_stack.append(child_context)\n return child_context\n \n def peek_char(self,peek=0):\n return self.state.string[self.string_position+peek]\n \n def skip_char(self,skip_count):\n self.string_position +=skip_count\n \n def remaining_chars(self):\n return self.state.end -self.string_position\n \n def peek_code(self,peek=0):\n return self.pattern_codes[self.code_position+peek]\n \n def skip_code(self,skip_count):\n self.code_position +=skip_count\n \n def remaining_codes(self):\n return len(self.pattern_codes)-self.code_position\n \n def at_beginning(self):\n return self.string_position ==0\n \n def at_end(self):\n return self.string_position ==self.state.end\n \n def at_linebreak(self):\n return not self.at_end()and _is_linebreak(self.peek_char())\n \n def at_boundary(self,word_checker):\n if self.at_beginning()and self.at_end():\n return False\n that=not self.at_beginning()and word_checker(self.peek_char(-1))\n this=not self.at_end()and word_checker(self.peek_char())\n return this !=that\n \n \nclass _RepeatContext(_MatchContext):\n\n def __init__(self,context):\n _MatchContext.__init__(self,context.state,\n context.pattern_codes[context.code_position:])\n self.count=-1\n \n self.previous=context.state.repeat\n self.last_position=None\n \n \nclass _Dispatcher:\n\n DISPATCH_TABLE=None\n \n def dispatch(self,code,context):\n method=self.DISPATCH_TABLE.get(code,self.__class__.unknown)\n return method(self,context)\n \n def unknown(self,code,ctx):\n raise NotImplementedError()\n \n def build_dispatch_table(cls,items,method_prefix):\n if cls.DISPATCH_TABLE is not None:\n return\n table={}\n for item in items:\n key,value=item.name.lower(),int(item)\n if hasattr(cls,\"%s%s\"%(method_prefix,key)):\n table[value]=getattr(cls,\"%s%s\"%(method_prefix,key))\n cls.DISPATCH_TABLE=table\n \n build_dispatch_table=classmethod(build_dispatch_table)\n \n \nclass _OpcodeDispatcher(_Dispatcher):\n\n def __init__(self):\n self.executing_contexts={}\n self.at_dispatcher=_AtcodeDispatcher()\n self.ch_dispatcher=_ChcodeDispatcher()\n self.set_dispatcher=_CharsetDispatcher()\n \n def match(self,context):\n ''\n\n \n while context.remaining_codes()>0 and context.has_matched is None:\n opcode=context.peek_code()\n if not self.dispatch(opcode,context):\n return None\n if context.has_matched is None:\n context.has_matched=False\n return context.has_matched\n \n def dispatch(self,opcode,context):\n ''\n \n \n if id(context)in self.executing_contexts:\n generator=self.executing_contexts[id(context)]\n del self.executing_contexts[id(context)]\n has_finished=next(generator)\n else:\n method=self.DISPATCH_TABLE.get(opcode,_OpcodeDispatcher.unknown)\n has_finished=method(self,context)\n if hasattr(has_finished,\"__next__\"):\n generator=has_finished\n has_finished=next(generator)\n if not has_finished:\n self.executing_contexts[id(context)]=generator\n return has_finished\n \n def op_success(self,ctx):\n \n \n ctx.state.string_position=ctx.string_position\n ctx.has_matched=True\n return True\n \n def op_failure(self,ctx):\n \n \n ctx.has_matched=False\n return True\n \n def general_op_literal(self,ctx,compare,decorate=lambda x:x):\n if ctx.at_end()or not compare(decorate(ord(ctx.peek_char())),\n decorate(ctx.peek_code(1))):\n ctx.has_matched=False\n ctx.skip_code(2)\n ctx.skip_char(1)\n \n def op_literal(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.eq)\n return True\n \n def op_not_literal(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.ne)\n return True\n \n def op_literal_ignore(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.eq,ctx.state.lower)\n return True\n \n def op_literal_uni_ignore(self,ctx):\n self.general_op_literal(ctx,operator.eq,ctx.state.lower)\n return True\n \n def op_not_literal_ignore(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.ne,ctx.state.lower)\n return True\n \n def op_at(self,ctx):\n \n \n \n if not self.at_dispatcher.dispatch(ctx.peek_code(1),ctx):\n ctx.has_matched=False\n \n return True\n ctx.skip_code(2)\n return True\n \n def op_category(self,ctx):\n \n \n \n if ctx.at_end()or not self.ch_dispatcher.dispatch(ctx.peek_code(1),ctx):\n ctx.has_matched=False\n \n return True\n ctx.skip_code(2)\n ctx.skip_char(1)\n return True\n \n def op_any(self,ctx):\n \n \n \n if ctx.at_end()or ctx.at_linebreak():\n ctx.has_matched=False\n \n return True\n ctx.skip_code(1)\n ctx.skip_char(1)\n return True\n \n def op_any_all(self,ctx):\n \n \n \n if ctx.at_end():\n ctx.has_matched=False\n \n return True\n ctx.skip_code(1)\n ctx.skip_char(1)\n return True\n \n def general_op_in(self,ctx,decorate=lambda x:x):\n \n \n if ctx.at_end():\n ctx.has_matched=False\n \n return\n skip=ctx.peek_code(1)\n ctx.skip_code(2)\n \n \n if not self.check_charset(ctx,decorate(ord(ctx.peek_char()))):\n \n ctx.has_matched=False\n return\n ctx.skip_code(skip -1)\n ctx.skip_char(1)\n \n \n def op_in(self,ctx):\n \n \n \n self.general_op_in(ctx)\n return True\n \n def op_in_ignore(self,ctx):\n \n \n \n self.general_op_in(ctx,ctx.state.lower)\n return True\n \n def op_in_uni_ignore(self,ctx):\n self.general_op_in(ctx,ctx.state.lower)\n return True\n \n def op_jump(self,ctx):\n \n \n \n ctx.skip_code(ctx.peek_code(1)+1)\n return True\n \n \n \n op_info=op_jump\n \n def op_mark(self,ctx):\n \n \n \n ctx.state.set_mark(ctx.peek_code(1),ctx.string_position)\n ctx.skip_code(2)\n return True\n \n def op_branch(self,ctx):\n \n \n \n ctx.state.marks_push()\n ctx.skip_code(1)\n current_branch_length=ctx.peek_code(0)\n while current_branch_length:\n \n \n if not(OPCODES[ctx.peek_code(1)].name ==\"literal\"and\\\n (ctx.at_end()or ctx.peek_code(2)!=ord(ctx.peek_char()))):\n ctx.state.string_position=ctx.string_position\n child_context=ctx.push_new_context(1)\n \n yield False\n if child_context.has_matched:\n ctx.has_matched=True\n yield True\n ctx.state.marks_pop_keep()\n ctx.skip_code(current_branch_length)\n current_branch_length=ctx.peek_code(0)\n ctx.state.marks_pop_discard()\n ctx.has_matched=False\n \n yield True\n \n def op_repeat_one(self,ctx):\n \n \n \n \n mincount=ctx.peek_code(2)\n maxcount=ctx.peek_code(3)\n \n \n \n if ctx.remaining_chars()=mincount and\\\n (ctx.at_end()or ord(ctx.peek_char())!=char):\n ctx.skip_char(-1)\n count -=1\n if count =mincount:\n ctx.state.string_position=ctx.string_position\n child_context=ctx.push_new_context(ctx.peek_code(1)+1)\n yield False\n if child_context.has_matched:\n ctx.has_matched=True\n yield True\n ctx.skip_char(-1)\n count -=1\n ctx.state.marks_pop_keep()\n \n ctx.state.marks_pop_discard()\n ctx.has_matched=False\n \n yield True\n \n def op_min_repeat_one(self,ctx):\n \n \n mincount=ctx.peek_code(2)\n maxcount=ctx.peek_code(3)\n \n \n if ctx.remaining_chars()=maxcount and maxcount !=MAXREPEAT:\n ctx.has_matched=False\n \n yield True\n repeat.count=count\n child_context=repeat.push_new_context(4)\n yield False\n ctx.has_matched=child_context.has_matched\n if not ctx.has_matched:\n repeat.count=count -1\n ctx.state.string_position=ctx.string_position\n yield True\n \n def general_op_groupref(self,ctx,decorate=lambda x:x):\n group_start,group_end=ctx.state.get_marks(ctx.peek_code(1))\n if group_start is None or group_end is None or group_end =0:\n child_context=ctx.push_new_context(3)\n yield False\n if child_context.has_matched:\n ctx.has_matched=False\n yield True\n ctx.skip_code(ctx.peek_code(1)+1)\n yield True\n \n def unknown(self,ctx):\n \n raise RuntimeError(\"Internal re error. Unknown opcode: %s\"%ctx.peek_code())\n \n def check_charset(self,ctx,char):\n ''\n \n self.set_dispatcher.reset(char)\n save_position=ctx.code_position\n result=None\n while result is None:\n result=self.set_dispatcher.dispatch(ctx.peek_code(),ctx)\n ctx.code_position=save_position\n \n return result\n \n def count_repetitions(self,ctx,maxcount):\n ''\n\n \n count=0\n real_maxcount=ctx.state.end -ctx.string_position\n if maxcount >4)\\\n &(1 <<(char_code&15)):\n return self.ok\n ctx.skip_code(16)\n else:\n if char_code <256 and ctx.peek_code(char_code >>5)\\\n &(1 <<(char_code&31)):\n return self.ok\n ctx.skip_code(8)\n def set_range(self,ctx):\n \n if ctx.peek_code(1)<=self.char <=ctx.peek_code(2):\n return self.ok\n ctx.skip_code(3)\n def set_negate(self,ctx):\n self.ok=not self.ok\n ctx.skip_code(1)\n \n def set_bigcharset(self,ctx):\n \n char_code=self.char\n count=ctx.peek_code(1)\n ctx.skip_code(2)\n if char_code <65536:\n block_index=char_code >>8\n \n a=array.array(\"B\")\n a.fromstring(array.array(CODESIZE ==2 and \"H\"or \"I\",\n [ctx.peek_code(block_index //CODESIZE)]).tostring())\n block=a[block_index %CODESIZE]\n ctx.skip_code(256 //CODESIZE)\n block_value=ctx.peek_code(block *(32 //CODESIZE)\n +((char_code&255)>>(CODESIZE ==2 and 4 or 5)))\n if block_value&(1 <<(char_code&((8 *CODESIZE)-1))):\n return self.ok\n else:\n ctx.skip_code(256 //CODESIZE)\n ctx.skip_code(count *(32 //CODESIZE))\n \n def unknown(self,ctx):\n return False\n \n_CharsetDispatcher.build_dispatch_table(OPCODES,\"set_\")\n\n\nclass _AtcodeDispatcher(_Dispatcher):\n\n def at_beginning(self,ctx):\n return ctx.at_beginning()\n at_beginning_string=at_beginning\n def at_beginning_line(self,ctx):\n return ctx.at_beginning()or _is_linebreak(ctx.peek_char(-1))\n def at_end(self,ctx):\n return(ctx.remaining_chars()==1 and ctx.at_linebreak())or ctx.at_end()\n def at_end_line(self,ctx):\n return ctx.at_linebreak()or ctx.at_end()\n def at_end_string(self,ctx):\n return ctx.at_end()\n def at_boundary(self,ctx):\n return ctx.at_boundary(_is_word)\n def at_non_boundary(self,ctx):\n return not ctx.at_boundary(_is_word)\n def at_loc_boundary(self,ctx):\n return ctx.at_boundary(_is_loc_word)\n def at_loc_non_boundary(self,ctx):\n return not ctx.at_boundary(_is_loc_word)\n def at_uni_boundary(self,ctx):\n return ctx.at_boundary(_is_uni_word)\n def at_uni_non_boundary(self,ctx):\n return not ctx.at_boundary(_is_uni_word)\n def unknown(self,ctx):\n return False\n \n_AtcodeDispatcher.build_dispatch_table(ATCODES,\"\")\n\n\nclass _ChcodeDispatcher(_Dispatcher):\n\n def category_digit(self,ctx):\n return _is_digit(ctx.peek_char())\n def category_not_digit(self,ctx):\n return not _is_digit(ctx.peek_char())\n def category_space(self,ctx):\n return _is_space(ctx.peek_char())\n def category_not_space(self,ctx):\n return not _is_space(ctx.peek_char())\n def category_word(self,ctx):\n return _is_word(ctx.peek_char())\n def category_not_word(self,ctx):\n return not _is_word(ctx.peek_char())\n def category_linebreak(self,ctx):\n return _is_linebreak(ctx.peek_char())\n def category_not_linebreak(self,ctx):\n return not _is_linebreak(ctx.peek_char())\n def category_loc_word(self,ctx):\n return _is_loc_word(ctx.peek_char())\n def category_loc_not_word(self,ctx):\n return not _is_loc_word(ctx.peek_char())\n def category_uni_digit(self,ctx):\n return ctx.peek_char().isdigit()\n def category_uni_not_digit(self,ctx):\n return not ctx.peek_char().isdigit()\n def category_uni_space(self,ctx):\n return ctx.peek_char().isspace()\n def category_uni_not_space(self,ctx):\n return not ctx.peek_char().isspace()\n def category_uni_word(self,ctx):\n return _is_uni_word(ctx.peek_char())\n def category_uni_not_word(self,ctx):\n return not _is_uni_word(ctx.peek_char())\n def category_uni_linebreak(self,ctx):\n return ord(ctx.peek_char())in _uni_linebreaks\n def category_uni_not_linebreak(self,ctx):\n return ord(ctx.peek_char())not in _uni_linebreaks\n def unknown(self,ctx):\n return False\n \n_ChcodeDispatcher.build_dispatch_table(CHCODES,\"\")\n\n\n_ascii_char_info=[0,0,0,0,0,0,0,0,0,2,6,2,\n2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,\n0,0,0,0,0,0,0,0,0,0,0,0,0,25,25,25,25,25,25,25,25,\n25,25,0,0,0,0,0,0,0,24,24,24,24,24,24,24,24,24,24,\n24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,0,0,\n0,0,16,0,24,24,24,24,24,24,24,24,24,24,24,24,24,24,\n24,24,24,24,24,24,24,24,24,24,24,24,0,0,0,0,0]\n\ndef _is_digit(char):\n code=ord(char)\n return code <128 and _ascii_char_info[code]&1\n \ndef _is_space(char):\n code=ord(char)\n return code <128 and _ascii_char_info[code]&2\n \ndef _is_word(char):\n\n code=ord(char)\n return code <128 and _ascii_char_info[code]&16\n \ndef _is_loc_word(char):\n return(not(ord(char)&~255)and char.isalnum())or char =='_'\n \ndef _is_uni_word(char):\n\n\n return chr(ord(char)).isalnum()or char =='_'\n \ndef _is_linebreak(char):\n return char ==\"\\n\"\n \n \n_uni_linebreaks=[10,13,28,29,30,133,8232,8233]\n\ndef _log(message):\n if 0:\n print(message)\n", ["_sre_utils", "array", "operator", "re", "sre", "sre_constants", "sys"]], "encoding_cp932": [".js", "const cps = {\"0\": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 65377, 65378, 65379, 65380, 65381, 65382, 65383, 65384, 65385, 65386, 65387, 65388, 65389, 65390, 65391, 65392, 65393, 65394, 65395, 65396, 65397, 65398, 65399, 65400, 65401, 65402, 65403, 65404, 65405, 65406, 65407, 65408, 65409, 65410, 65411, 65412, 65413, 65414, 65415, 65416, 65417, 65418, 65419, 65420, 65421, 65422, 65423, 65424, 65425, 65426, 65427, 65428, 65429, 65430, 65431, 65432, 65433, 65434, 65435, 65436, 65437, 65438, 65439, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null], \"33088\": [12288, 12289, 12290, 65292, 65294, 12539, 65306, 65307, 65311, 65281, 12443, 12444, 180, 65344, 168, 65342, 65507, 65343, 12541, 12542, 12445, 12446, 12291, 20189, 12293, 12294, 12295, 12540, 8213, 8208, 65295, 65340, 65374, 8741, 65372, 8230, 8229, 8216, 8217, 8220, 8221, 65288, 65289, 12308, 12309, 65339, 65341, 65371, 65373, 12296, 12297, 12298, 12299, 12300, 12301, 12302, 12303, 12304, 12305, 65291, 65293, 177, 215], \"33152\": [247, 65309, 8800, 65308, 65310, 8806, 8807, 8734, 8756, 9794, 9792, 176, 8242, 8243, 8451, 65509, 65284, 65504, 65505, 65285, 65283, 65286, 65290, 65312, 167, 9734, 9733, 9675, 9679, 9678, 9671, 9670, 9633, 9632, 9651, 9650, 9661, 9660, 8251, 12306, 8594, 8592, 8593, 8595, 12307], \"33208\": [8712, 8715, 8838, 8839, 8834, 8835, 8746, 8745], \"33224\": [8743, 8744, 65506, 8658, 8660, 8704, 8707], \"33242\": [8736, 8869, 8978, 8706, 8711, 8801, 8786, 8810, 8811, 8730, 8765, 8733, 8757, 8747, 8748], \"33264\": [8491, 8240, 9839, 9837, 9834, 8224, 8225, 182], \"33276\": [9711], \"33359\": [65296, 65297, 65298, 65299, 65300, 65301, 65302, 65303, 65304, 65305], \"33376\": [65313, 65314, 65315, 65316, 65317, 65318, 65319, 65320, 65321, 65322, 65323, 65324, 65325, 65326, 65327, 65328, 65329, 65330, 65331, 65332, 65333, 65334, 65335, 65336, 65337, 65338], \"33409\": [65345, 65346, 65347, 65348, 65349, 65350, 65351, 65352, 65353, 65354, 65355, 65356, 65357, 65358, 65359, 65360, 65361, 65362, 65363, 65364, 65365, 65366, 65367, 65368, 65369, 65370], \"33439\": [12353, 12354, 12355, 12356, 12357, 12358, 12359, 12360, 12361, 12362, 12363, 12364, 12365, 12366, 12367, 12368, 12369, 12370, 12371, 12372, 12373, 12374, 12375, 12376, 12377, 12378, 12379, 12380, 12381, 12382, 12383, 12384, 12385, 12386, 12387, 12388, 12389, 12390, 12391, 12392, 12393, 12394, 12395, 12396, 12397, 12398, 12399, 12400, 12401, 12402, 12403, 12404, 12405, 12406, 12407, 12408, 12409, 12410, 12411, 12412, 12413, 12414, 12415, 12416, 12417, 12418, 12419, 12420, 12421, 12422, 12423, 12424, 12425, 12426, 12427, 12428, 12429, 12430, 12431, 12432, 12433, 12434, 12435], \"33600\": [12449, 12450, 12451, 12452, 12453, 12454, 12455, 12456, 12457, 12458, 12459, 12460, 12461, 12462, 12463, 12464, 12465, 12466, 12467, 12468, 12469, 12470, 12471, 12472, 12473, 12474, 12475, 12476, 12477, 12478, 12479, 12480, 12481, 12482, 12483, 12484, 12485, 12486, 12487, 12488, 12489, 12490, 12491, 12492, 12493, 12494, 12495, 12496, 12497, 12498, 12499, 12500, 12501, 12502, 12503, 12504, 12505, 12506, 12507, 12508, 12509, 12510, 12511], \"33664\": [12512, 12513, 12514, 12515, 12516, 12517, 12518, 12519, 12520, 12521, 12522, 12523, 12524, 12525, 12526, 12527, 12528, 12529, 12530, 12531, 12532, 12533, 12534], \"33695\": [913, 914, 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925, 926, 927, 928, 929, 931, 932, 933, 934, 935, 936, 937], \"33727\": [945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 963, 964, 965, 966, 967, 968, 969], \"33856\": [1040, 1041, 1042, 1043, 1044, 1045, 1025, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071], \"33904\": [1072, 1073, 1074, 1075, 1076, 1077, 1105, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085], \"33920\": [1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103], \"33951\": [9472, 9474, 9484, 9488, 9496, 9492, 9500, 9516, 9508, 9524, 9532, 9473, 9475, 9487, 9491, 9499, 9495, 9507, 9523, 9515, 9531, 9547, 9504, 9519, 9512, 9527, 9535, 9501, 9520, 9509, 9528, 9538], \"34624\": [9312, 9313, 9314, 9315, 9316, 9317, 9318, 9319, 9320, 9321, 9322, 9323, 9324, 9325, 9326, 9327, 9328, 9329, 9330, 9331, 8544, 8545, 8546, 8547, 8548, 8549, 8550, 8551, 8552, 8553], \"34655\": [13129, 13076, 13090, 13133, 13080, 13095, 13059, 13110, 13137, 13143, 13069, 13094, 13091, 13099, 13130, 13115, 13212, 13213, 13214, 13198, 13199, 13252, 13217], \"34686\": [13179], \"34688\": [12317, 12319, 8470, 13261, 8481, 12964, 12965, 12966, 12967, 12968, 12849, 12850, 12857, 13182, 13181, 13180, 8786, 8801, 8747, 8750, 8721, 8730, 8869, 8736, 8735, 8895, 8757, 8745, 8746], \"34975\": [20124, 21782, 23043, 38463, 21696, 24859, 25384, 23030, 36898, 33909, 33564, 31312, 24746, 25569, 28197, 26093, 33894, 33446, 39925, 26771, 22311, 26017, 25201, 23451, 22992, 34427, 39156, 32098, 32190, 39822, 25110, 31903, 34999, 23433, 24245, 25353, 26263, 26696, 38343, 38797, 26447, 20197, 20234, 20301, 20381, 20553, 22258, 22839, 22996, 23041, 23561, 24799, 24847, 24944, 26131, 26885, 28858, 30031, 30064, 31227, 32173, 32239, 32963, 33806, 34915, 35586, 36949, 36986, 21307, 20117, 20133, 22495, 32946, 37057, 30959, 19968, 22769, 28322, 36920, 31282, 33576, 33419, 39983, 20801, 21360, 21693, 21729, 22240, 23035, 24341, 39154, 28139, 32996, 34093], \"35136\": [38498, 38512, 38560, 38907, 21515, 21491, 23431, 28879, 32701, 36802, 38632, 21359, 40284, 31418, 19985, 30867, 33276, 28198, 22040, 21764, 27421, 34074, 39995, 23013, 21417, 28006, 29916, 38287, 22082, 20113, 36939, 38642, 33615, 39180, 21473, 21942, 23344, 24433, 26144, 26355, 26628, 27704, 27891, 27945, 29787, 30408, 31310, 38964, 33521, 34907, 35424, 37613, 28082, 30123, 30410, 39365, 24742, 35585, 36234, 38322, 27022, 21421, 20870], \"35200\": [22290, 22576, 22852, 23476, 24310, 24616, 25513, 25588, 27839, 28436, 28814, 28948, 29017, 29141, 29503, 32257, 33398, 33489, 34199, 36960, 37467, 40219, 22633, 26044, 27738, 29989, 20985, 22830, 22885, 24448, 24540, 25276, 26106, 27178, 27431, 27572, 29579, 32705, 35158, 40236, 40206, 40644, 23713, 27798, 33659, 20740, 23627, 25014, 33222, 26742, 29281, 20057, 20474, 21368, 24681, 28201, 31311, 38899, 19979, 21270, 20206, 20309, 20285, 20385, 20339, 21152, 21487, 22025, 22799, 23233, 23478, 23521, 31185, 26247, 26524, 26550, 27468, 27827, 28779, 29634, 31117, 31166, 31292, 31623, 33457, 33499, 33540, 33655, 33775, 33747, 34662, 35506, 22057, 36008, 36838, 36942, 38686, 34442, 20420, 23784, 25105, 29273, 30011, 33253, 33469, 34558, 36032, 38597, 39187, 39381, 20171, 20250, 35299, 22238, 22602, 22730, 24315, 24555, 24618, 24724, 24674, 25040, 25106, 25296, 25913], \"35392\": [39745, 26214, 26800, 28023, 28784, 30028, 30342, 32117, 33445, 34809, 38283, 38542, 35997, 20977, 21182, 22806, 21683, 23475, 23830, 24936, 27010, 28079, 30861, 33995, 34903, 35442, 37799, 39608, 28012, 39336, 34521, 22435, 26623, 34510, 37390, 21123, 22151, 21508, 24275, 25313, 25785, 26684, 26680, 27579, 29554, 30906, 31339, 35226, 35282, 36203, 36611, 37101, 38307, 38548, 38761, 23398, 23731, 27005, 38989, 38990, 25499, 31520, 27179], \"35456\": [27263, 26806, 39949, 28511, 21106, 21917, 24688, 25324, 27963, 28167, 28369, 33883, 35088, 36676, 19988, 39993, 21494, 26907, 27194, 38788, 26666, 20828, 31427, 33970, 37340, 37772, 22107, 40232, 26658, 33541, 33841, 31909, 21000, 33477, 29926, 20094, 20355, 20896, 23506, 21002, 21208, 21223, 24059, 21914, 22570, 23014, 23436, 23448, 23515, 24178, 24185, 24739, 24863, 24931, 25022, 25563, 25954, 26577, 26707, 26874, 27454, 27475, 27735, 28450, 28567, 28485, 29872, 29976, 30435, 30475, 31487, 31649, 31777, 32233, 32566, 32752, 32925, 33382, 33694, 35251, 35532, 36011, 36996, 37969, 38291, 38289, 38306, 38501, 38867, 39208, 33304, 20024, 21547, 23736, 24012, 29609, 30284, 30524, 23721, 32747, 36107, 38593, 38929, 38996, 39000, 20225, 20238, 21361, 21916, 22120, 22522, 22855, 23305, 23492, 23696, 24076, 24190, 24524, 25582, 26426, 26071, 26082, 26399, 26827, 26820], \"35648\": [27231, 24112, 27589, 27671, 27773, 30079, 31048, 23395, 31232, 32000, 24509, 35215, 35352, 36020, 36215, 36556, 36637, 39138, 39438, 39740, 20096, 20605, 20736, 22931, 23452, 25135, 25216, 25836, 27450, 29344, 30097, 31047, 32681, 34811, 35516, 35696, 25516, 33738, 38816, 21513, 21507, 21931, 26708, 27224, 35440, 30759, 26485, 40653, 21364, 23458, 33050, 34384, 36870, 19992, 20037, 20167, 20241, 21450, 21560, 23470, 24339, 24613, 25937], \"35712\": [26429, 27714, 27762, 27875, 28792, 29699, 31350, 31406, 31496, 32026, 31998, 32102, 26087, 29275, 21435, 23621, 24040, 25298, 25312, 25369, 28192, 34394, 35377, 36317, 37624, 28417, 31142, 39770, 20136, 20139, 20140, 20379, 20384, 20689, 20807, 31478, 20849, 20982, 21332, 21281, 21375, 21483, 21932, 22659, 23777, 24375, 24394, 24623, 24656, 24685, 25375, 25945, 27211, 27841, 29378, 29421, 30703, 33016, 33029, 33288, 34126, 37111, 37857, 38911, 39255, 39514, 20208, 20957, 23597, 26241, 26989, 23616, 26354, 26997, 29577, 26704, 31873, 20677, 21220, 22343, 24062, 37670, 26020, 27427, 27453, 29748, 31105, 31165, 31563, 32202, 33465, 33740, 34943, 35167, 35641, 36817, 37329, 21535, 37504, 20061, 20534, 21477, 21306, 29399, 29590, 30697, 33510, 36527, 39366, 39368, 39378, 20855, 24858, 34398, 21936, 31354, 20598, 23507, 36935, 38533, 20018, 27355, 37351, 23633, 23624], \"35904\": [25496, 31391, 27795, 38772, 36705, 31402, 29066, 38536, 31874, 26647, 32368, 26705, 37740, 21234, 21531, 34219, 35347, 32676, 36557, 37089, 21350, 34952, 31041, 20418, 20670, 21009, 20804, 21843, 22317, 29674, 22411, 22865, 24418, 24452, 24693, 24950, 24935, 25001, 25522, 25658, 25964, 26223, 26690, 28179, 30054, 31293, 31995, 32076, 32153, 32331, 32619, 33550, 33610, 34509, 35336, 35427, 35686, 36605, 38938, 40335, 33464, 36814, 39912], \"35968\": [21127, 25119, 25731, 28608, 38553, 26689, 20625, 27424, 27770, 28500, 31348, 32080, 34880, 35363, 26376, 20214, 20537, 20518, 20581, 20860, 21048, 21091, 21927, 22287, 22533, 23244, 24314, 25010, 25080, 25331, 25458, 26908, 27177, 29309, 29356, 29486, 30740, 30831, 32121, 30476, 32937, 35211, 35609, 36066, 36562, 36963, 37749, 38522, 38997, 39443, 40568, 20803, 21407, 21427, 24187, 24358, 28187, 28304, 29572, 29694, 32067, 33335, 35328, 35578, 38480, 20046, 20491, 21476, 21628, 22266, 22993, 23396, 24049, 24235, 24359, 25144, 25925, 26543, 28246, 29392, 31946, 34996, 32929, 32993, 33776, 34382, 35463, 36328, 37431, 38599, 39015, 40723, 20116, 20114, 20237, 21320, 21577, 21566, 23087, 24460, 24481, 24735, 26791, 27278, 29786, 30849, 35486, 35492, 35703, 37264, 20062, 39881, 20132, 20348, 20399, 20505, 20502, 20809, 20844, 21151, 21177, 21246, 21402, 21475, 21521], \"36160\": [21518, 21897, 22353, 22434, 22909, 23380, 23389, 23439, 24037, 24039, 24055, 24184, 24195, 24218, 24247, 24344, 24658, 24908, 25239, 25304, 25511, 25915, 26114, 26179, 26356, 26477, 26657, 26775, 27083, 27743, 27946, 28009, 28207, 28317, 30002, 30343, 30828, 31295, 31968, 32005, 32024, 32094, 32177, 32789, 32771, 32943, 32945, 33108, 33167, 33322, 33618, 34892, 34913, 35611, 36002, 36092, 37066, 37237, 37489, 30783, 37628, 38308, 38477], \"36224\": [38917, 39321, 39640, 40251, 21083, 21163, 21495, 21512, 22741, 25335, 28640, 35946, 36703, 40633, 20811, 21051, 21578, 22269, 31296, 37239, 40288, 40658, 29508, 28425, 33136, 29969, 24573, 24794, 39592, 29403, 36796, 27492, 38915, 20170, 22256, 22372, 22718, 23130, 24680, 25031, 26127, 26118, 26681, 26801, 28151, 30165, 32058, 33390, 39746, 20123, 20304, 21449, 21766, 23919, 24038, 24046, 26619, 27801, 29811, 30722, 35408, 37782, 35039, 22352, 24231, 25387, 20661, 20652, 20877, 26368, 21705, 22622, 22971, 23472, 24425, 25165, 25505, 26685, 27507, 28168, 28797, 37319, 29312, 30741, 30758, 31085, 25998, 32048, 33756, 35009, 36617, 38555, 21092, 22312, 26448, 32618, 36001, 20916, 22338, 38442, 22586, 27018, 32948, 21682, 23822, 22524, 30869, 40442, 20316, 21066, 21643, 25662, 26152, 26388, 26613, 31364, 31574, 32034, 37679, 26716, 39853, 31545, 21273, 20874, 21047], \"36416\": [23519, 25334, 25774, 25830, 26413, 27578, 34217, 38609, 30352, 39894, 25420, 37638, 39851, 30399, 26194, 19977, 20632, 21442, 23665, 24808, 25746, 25955, 26719, 29158, 29642, 29987, 31639, 32386, 34453, 35715, 36059, 37240, 39184, 26028, 26283, 27531, 20181, 20180, 20282, 20351, 21050, 21496, 21490, 21987, 22235, 22763, 22987, 22985, 23039, 23376, 23629, 24066, 24107, 24535, 24605, 25351, 25903, 23388, 26031, 26045, 26088, 26525, 27490], \"36480\": [27515, 27663, 29509, 31049, 31169, 31992, 32025, 32043, 32930, 33026, 33267, 35222, 35422, 35433, 35430, 35468, 35566, 36039, 36060, 38604, 39164, 27503, 20107, 20284, 20365, 20816, 23383, 23546, 24904, 25345, 26178, 27425, 28363, 27835, 29246, 29885, 30164, 30913, 31034, 32780, 32819, 33258, 33940, 36766, 27728, 40575, 24335, 35672, 40235, 31482, 36600, 23437, 38635, 19971, 21489, 22519, 22833, 23241, 23460, 24713, 28287, 28422, 30142, 36074, 23455, 34048, 31712, 20594, 26612, 33437, 23649, 34122, 32286, 33294, 20889, 23556, 25448, 36198, 26012, 29038, 31038, 32023, 32773, 35613, 36554, 36974, 34503, 37034, 20511, 21242, 23610, 26451, 28796, 29237, 37196, 37320, 37675, 33509, 23490, 24369, 24825, 20027, 21462, 23432, 25163, 26417, 27530, 29417, 29664, 31278, 33131, 36259, 37202, 39318, 20754, 21463, 21610, 23551, 25480, 27193, 32172, 38656, 22234, 21454, 21608], \"36672\": [23447, 23601, 24030, 20462, 24833, 25342, 27954, 31168, 31179, 32066, 32333, 32722, 33261, 33311, 33936, 34886, 35186, 35728, 36468, 36655, 36913, 37195, 37228, 38598, 37276, 20160, 20303, 20805, 21313, 24467, 25102, 26580, 27713, 28171, 29539, 32294, 37325, 37507, 21460, 22809, 23487, 28113, 31069, 32302, 31899, 22654, 29087, 20986, 34899, 36848, 20426, 23803, 26149, 30636, 31459, 33308, 39423, 20934, 24490, 26092, 26991, 27529, 28147], \"36736\": [28310, 28516, 30462, 32020, 24033, 36981, 37255, 38918, 20966, 21021, 25152, 26257, 26329, 28186, 24246, 32210, 32626, 26360, 34223, 34295, 35576, 21161, 21465, 22899, 24207, 24464, 24661, 37604, 38500, 20663, 20767, 21213, 21280, 21319, 21484, 21736, 21830, 21809, 22039, 22888, 22974, 23100, 23477, 23558, 23567, 23569, 23578, 24196, 24202, 24288, 24432, 25215, 25220, 25307, 25484, 25463, 26119, 26124, 26157, 26230, 26494, 26786, 27167, 27189, 27836, 28040, 28169, 28248, 28988, 28966, 29031, 30151, 30465, 30813, 30977, 31077, 31216, 31456, 31505, 31911, 32057, 32918, 33750, 33931, 34121, 34909, 35059, 35359, 35388, 35412, 35443, 35937, 36062, 37284, 37478, 37758, 37912, 38556, 38808, 19978, 19976, 19998, 20055, 20887, 21104, 22478, 22580, 22732, 23330, 24120, 24773, 25854, 26465, 26454, 27972, 29366, 30067, 31331, 33976, 35698, 37304, 37664, 22065, 22516, 39166], \"36928\": [25325, 26893, 27542, 29165, 32340, 32887, 33394, 35302, 39135, 34645, 36785, 23611, 20280, 20449, 20405, 21767, 23072, 23517, 23529, 24515, 24910, 25391, 26032, 26187, 26862, 27035, 28024, 28145, 30003, 30137, 30495, 31070, 31206, 32051, 33251, 33455, 34218, 35242, 35386, 36523, 36763, 36914, 37341, 38663, 20154, 20161, 20995, 22645, 22764, 23563, 29978, 23613, 33102, 35338, 36805, 38499, 38765, 31525, 35535, 38920, 37218, 22259, 21416], \"36992\": [36887, 21561, 22402, 24101, 25512, 27700, 28810, 30561, 31883, 32736, 34928, 36930, 37204, 37648, 37656, 38543, 29790, 39620, 23815, 23913, 25968, 26530, 36264, 38619, 25454, 26441, 26905, 33733, 38935, 38592, 35070, 28548, 25722, 23544, 19990, 28716, 30045, 26159, 20932, 21046, 21218, 22995, 24449, 24615, 25104, 25919, 25972, 26143, 26228, 26866, 26646, 27491, 28165, 29298, 29983, 30427, 31934, 32854, 22768, 35069, 35199, 35488, 35475, 35531, 36893, 37266, 38738, 38745, 25993, 31246, 33030, 38587, 24109, 24796, 25114, 26021, 26132, 26512, 30707, 31309, 31821, 32318, 33034, 36012, 36196, 36321, 36447, 30889, 20999, 25305, 25509, 25666, 25240, 35373, 31363, 31680, 35500, 38634, 32118, 33292, 34633, 20185, 20808, 21315, 21344, 23459, 23554, 23574, 24029, 25126, 25159, 25776, 26643, 26676, 27849, 27973, 27927, 26579, 28508, 29006, 29053, 26059, 31359, 31661, 32218], \"37184\": [32330, 32680, 33146, 33307, 33337, 34214, 35438, 36046, 36341, 36984, 36983, 37549, 37521, 38275, 39854, 21069, 21892, 28472, 28982, 20840, 31109, 32341, 33203, 31950, 22092, 22609, 23720, 25514, 26366, 26365, 26970, 29401, 30095, 30094, 30990, 31062, 31199, 31895, 32032, 32068, 34311, 35380, 38459, 36961, 40736, 20711, 21109, 21452, 21474, 20489, 21930, 22766, 22863, 29245, 23435, 23652, 21277, 24803, 24819, 25436, 25475, 25407, 25531], \"37248\": [25805, 26089, 26361, 24035, 27085, 27133, 28437, 29157, 20105, 30185, 30456, 31379, 31967, 32207, 32156, 32865, 33609, 33624, 33900, 33980, 34299, 35013, 36208, 36865, 36973, 37783, 38684, 39442, 20687, 22679, 24974, 33235, 34101, 36104, 36896, 20419, 20596, 21063, 21363, 24687, 25417, 26463, 28204, 36275, 36895, 20439, 23646, 36042, 26063, 32154, 21330, 34966, 20854, 25539, 23384, 23403, 23562, 25613, 26449, 36956, 20182, 22810, 22826, 27760, 35409, 21822, 22549, 22949, 24816, 25171, 26561, 33333, 26965, 38464, 39364, 39464, 20307, 22534, 23550, 32784, 23729, 24111, 24453, 24608, 24907, 25140, 26367, 27888, 28382, 32974, 33151, 33492, 34955, 36024, 36864, 36910, 38538, 40667, 39899, 20195, 21488, 22823, 31532, 37261, 38988, 40441, 28381, 28711, 21331, 21828, 23429, 25176, 25246, 25299, 27810, 28655, 29730, 35351, 37944, 28609, 35582, 33592, 20967, 34552, 21482], \"37440\": [21481, 20294, 36948, 36784, 22890, 33073, 24061, 31466, 36799, 26842, 35895, 29432, 40008, 27197, 35504, 20025, 21336, 22022, 22374, 25285, 25506, 26086, 27470, 28129, 28251, 28845, 30701, 31471, 31658, 32187, 32829, 32966, 34507, 35477, 37723, 22243, 22727, 24382, 26029, 26262, 27264, 27573, 30007, 35527, 20516, 30693, 22320, 24347, 24677, 26234, 27744, 30196, 31258, 32622, 33268, 34584, 36933, 39347, 31689, 30044, 31481, 31569, 33988], \"37504\": [36880, 31209, 31378, 33590, 23265, 30528, 20013, 20210, 23449, 24544, 25277, 26172, 26609, 27880, 34411, 34935, 35387, 37198, 37619, 39376, 27159, 28710, 29482, 33511, 33879, 36015, 19969, 20806, 20939, 21899, 23541, 24086, 24115, 24193, 24340, 24373, 24427, 24500, 25074, 25361, 26274, 26397, 28526, 29266, 30010, 30522, 32884, 33081, 33144, 34678, 35519, 35548, 36229, 36339, 37530, 38263, 38914, 40165, 21189, 25431, 30452, 26389, 27784, 29645, 36035, 37806, 38515, 27941, 22684, 26894, 27084, 36861, 37786, 30171, 36890, 22618, 26626, 25524, 27131, 20291, 28460, 26584, 36795, 34086, 32180, 37716, 26943, 28528, 22378, 22775, 23340, 32044, 29226, 21514, 37347, 40372, 20141, 20302, 20572, 20597, 21059, 35998, 21576, 22564, 23450, 24093, 24213, 24237, 24311, 24351, 24716, 25269, 25402, 25552, 26799, 27712, 30855, 31118, 31243, 32224, 33351, 35330, 35558, 36420, 36883], \"37696\": [37048, 37165, 37336, 40718, 27877, 25688, 25826, 25973, 28404, 30340, 31515, 36969, 37841, 28346, 21746, 24505, 25764, 36685, 36845, 37444, 20856, 22635, 22825, 23637, 24215, 28155, 32399, 29980, 36028, 36578, 39003, 28857, 20253, 27583, 28593, 30000, 38651, 20814, 21520, 22581, 22615, 22956, 23648, 24466, 26007, 26460, 28193, 30331, 33759, 36077, 36884, 37117, 37709, 30757, 30778, 21162, 24230, 22303, 22900, 24594, 20498, 20826, 20908], \"37760\": [20941, 20992, 21776, 22612, 22616, 22871, 23445, 23798, 23947, 24764, 25237, 25645, 26481, 26691, 26812, 26847, 30423, 28120, 28271, 28059, 28783, 29128, 24403, 30168, 31095, 31561, 31572, 31570, 31958, 32113, 21040, 33891, 34153, 34276, 35342, 35588, 35910, 36367, 36867, 36879, 37913, 38518, 38957, 39472, 38360, 20685, 21205, 21516, 22530, 23566, 24999, 25758, 27934, 30643, 31461, 33012, 33796, 36947, 37509, 23776, 40199, 21311, 24471, 24499, 28060, 29305, 30563, 31167, 31716, 27602, 29420, 35501, 26627, 27233, 20984, 31361, 26932, 23626, 40182, 33515, 23493, 37193, 28702, 22136, 23663, 24775, 25958, 27788, 35930, 36929, 38931, 21585, 26311, 37389, 22856, 37027, 20869, 20045, 20970, 34201, 35598, 28760, 25466, 37707, 26978, 39348, 32260, 30071, 21335, 26976, 36575, 38627, 27741, 20108, 23612, 24336, 36841, 21250, 36049, 32905, 34425, 24319, 26085, 20083, 20837], \"37952\": [22914, 23615, 38894, 20219, 22922, 24525, 35469, 28641, 31152, 31074, 23527, 33905, 29483, 29105, 24180, 24565, 25467, 25754, 29123, 31896, 20035, 24316, 20043, 22492, 22178, 24745, 28611, 32013, 33021, 33075, 33215, 36786, 35223, 34468, 24052, 25226, 25773, 35207, 26487, 27874, 27966, 29750, 30772, 23110, 32629, 33453, 39340, 20467, 24259, 25309, 25490, 25943, 26479, 30403, 29260, 32972, 32954, 36649, 37197, 20493, 22521, 23186, 26757], \"38016\": [26995, 29028, 29437, 36023, 22770, 36064, 38506, 36889, 34687, 31204, 30695, 33833, 20271, 21093, 21338, 25293, 26575, 27850, 30333, 31636, 31893, 33334, 34180, 36843, 26333, 28448, 29190, 32283, 33707, 39361, 40614, 20989, 31665, 30834, 31672, 32903, 31560, 27368, 24161, 32908, 30033, 30048, 20843, 37474, 28300, 30330, 37271, 39658, 20240, 32624, 25244, 31567, 38309, 40169, 22138, 22617, 34532, 38588, 20276, 21028, 21322, 21453, 21467, 24070, 25644, 26001, 26495, 27710, 27726, 29256, 29359, 29677, 30036, 32321, 33324, 34281, 36009, 31684, 37318, 29033, 38930, 39151, 25405, 26217, 30058, 30436, 30928, 34115, 34542, 21290, 21329, 21542, 22915, 24199, 24444, 24754, 25161, 25209, 25259, 26000, 27604, 27852, 30130, 30382, 30865, 31192, 32203, 32631, 32933, 34987, 35513, 36027, 36991, 38750, 39131, 27147, 31800, 20633, 23614, 24494, 26503, 27608, 29749, 30473, 32654], \"38208\": [40763, 26570, 31255, 21305, 30091, 39661, 24422, 33181, 33777, 32920, 24380, 24517, 30050, 31558, 36924, 26727, 23019, 23195, 32016, 30334, 35628, 20469, 24426, 27161, 27703, 28418, 29922, 31080, 34920, 35413, 35961, 24287, 25551, 30149, 31186, 33495, 37672, 37618, 33948, 34541, 39981, 21697, 24428, 25996, 27996, 28693, 36007, 36051, 38971, 25935, 29942, 19981, 20184, 22496, 22827, 23142, 23500, 20904, 24067, 24220, 24598, 25206, 25975], \"38272\": [26023, 26222, 28014, 29238, 31526, 33104, 33178, 33433, 35676, 36000, 36070, 36212, 38428, 38468, 20398, 25771, 27494, 33310, 33889, 34154, 37096, 23553, 26963, 39080, 33914, 34135, 20239, 21103, 24489, 24133, 26381, 31119, 33145, 35079, 35206, 28149, 24343, 25173, 27832, 20175, 29289, 39826, 20998, 21563, 22132, 22707, 24996, 25198, 28954, 22894, 31881, 31966, 32027, 38640, 25991, 32862, 19993, 20341, 20853, 22592, 24163, 24179, 24330, 26564, 20006, 34109, 38281, 38491, 31859, 38913, 20731, 22721, 30294, 30887, 21029, 30629, 34065, 31622, 20559, 22793, 29255, 31687, 32232, 36794, 36820, 36941, 20415, 21193, 23081, 24321, 38829, 20445, 33303, 37610, 22275, 25429, 27497, 29995, 35036, 36628, 31298, 21215, 22675, 24917, 25098, 26286, 27597, 31807, 33769, 20515, 20472, 21253, 21574, 22577, 22857, 23453, 23792, 23791, 23849, 24214, 25265, 25447, 25918, 26041, 26379], \"38464\": [27861, 27873, 28921, 30770, 32299, 32990, 33459, 33804, 34028, 34562, 35090, 35370, 35914, 37030, 37586, 39165, 40179, 40300, 20047, 20129, 20621, 21078, 22346, 22952, 24125, 24536, 24537, 25151, 26292, 26395, 26576, 26834, 20882, 32033, 32938, 33192, 35584, 35980, 36031, 37502, 38450, 21536, 38956, 21271, 20693, 21340, 22696, 25778, 26420, 29287, 30566, 31302, 37350, 21187, 27809, 27526, 22528, 24140, 22868, 26412, 32763, 20961, 30406], \"38528\": [25705, 30952, 39764, 40635, 22475, 22969, 26151, 26522, 27598, 21737, 27097, 24149, 33180, 26517, 39850, 26622, 40018, 26717, 20134, 20451, 21448, 25273, 26411, 27819, 36804, 20397, 32365, 40639, 19975, 24930, 28288, 28459, 34067, 21619, 26410, 39749, 24051, 31637, 23724, 23494, 34588, 28234, 34001, 31252, 33032, 22937, 31885, 27665, 30496, 21209, 22818, 28961, 29279, 30683, 38695, 40289, 26891, 23167, 23064, 20901, 21517, 21629, 26126, 30431, 36855, 37528, 40180, 23018, 29277, 28357, 20813, 26825, 32191, 32236, 38754, 40634, 25720, 27169, 33538, 22916, 23391, 27611, 29467, 30450, 32178, 32791, 33945, 20786, 26408, 40665, 30446, 26466, 21247, 39173, 23588, 25147, 31870, 36016, 21839, 24758, 32011, 38272, 21249, 20063, 20918, 22812, 29242, 32822, 37326, 24357, 30690, 21380, 24441, 32004, 34220, 35379, 36493, 38742, 26611, 34222, 37971, 24841, 24840, 27833, 30290], \"38720\": [35565, 36664, 21807, 20305, 20778, 21191, 21451, 23461, 24189, 24736, 24962, 25558, 26377, 26586, 28263, 28044, 29494, 29495, 30001, 31056, 35029, 35480, 36938, 37009, 37109, 38596, 34701, 22805, 20104, 20313, 19982, 35465, 36671, 38928, 20653, 24188, 22934, 23481, 24248, 25562, 25594, 25793, 26332, 26954, 27096, 27915, 28342, 29076, 29992, 31407, 32650, 32768, 33865, 33993, 35201, 35617, 36362, 36965, 38525, 39178, 24958, 25233, 27442], \"38784\": [27779, 28020, 32716, 32764, 28096, 32645, 34746, 35064, 26469, 33713, 38972, 38647, 27931, 32097, 33853, 37226, 20081, 21365, 23888, 27396, 28651, 34253, 34349, 35239, 21033, 21519, 23653, 26446, 26792, 29702, 29827, 30178, 35023, 35041, 37324, 38626, 38520, 24459, 29575, 31435, 33870, 25504, 30053, 21129, 27969, 28316, 29705, 30041, 30827, 31890, 38534, 31452, 40845, 20406, 24942, 26053, 34396, 20102, 20142, 20698, 20001, 20940, 23534, 26009, 26753, 28092, 29471, 30274, 30637, 31260, 31975, 33391, 35538, 36988, 37327, 38517, 38936, 21147, 32209, 20523, 21400, 26519, 28107, 29136, 29747, 33256, 36650, 38563, 40023, 40607, 29792, 22593, 28057, 32047, 39006, 20196, 20278, 20363, 20919, 21169, 23994, 24604, 29618, 31036, 33491, 37428, 38583, 38646, 38666, 40599, 40802, 26278, 27508, 21015, 21155, 28872, 35010, 24265, 24651, 24976, 28451, 29001, 31806, 32244, 32879], \"38976\": [34030, 36899, 37676, 21570, 39791, 27347, 28809, 36034, 36335, 38706, 21172, 23105, 24266, 24324, 26391, 27004, 27028, 28010, 28431, 29282, 29436, 31725, 32769, 32894, 34635, 37070, 20845, 40595, 31108, 32907, 37682, 35542, 20525, 21644, 35441, 27498, 36036, 33031, 24785, 26528, 40434, 20121, 20120, 39952, 35435, 34241, 34152, 26880, 28286, 30871, 33109], \"39071\": [24332, 19984, 19989, 20010, 20017, 20022, 20028, 20031, 20034, 20054, 20056, 20098, 20101, 35947, 20106, 33298, 24333, 20110, 20126, 20127, 20128, 20130, 20144, 20147, 20150, 20174, 20173, 20164, 20166, 20162, 20183, 20190, 20205, 20191, 20215, 20233, 20314, 20272, 20315, 20317, 20311, 20295, 20342, 20360, 20367, 20376, 20347, 20329, 20336, 20369, 20335, 20358, 20374, 20760, 20436, 20447, 20430, 20440, 20443, 20433, 20442, 20432, 20452, 20453, 20506, 20520, 20500, 20522, 20517, 20485, 20252, 20470, 20513, 20521, 20524, 20478, 20463, 20497, 20486, 20547, 20551, 26371, 20565, 20560, 20552, 20570, 20566, 20588, 20600, 20608, 20634, 20613, 20660, 20658], \"39232\": [20681, 20682, 20659, 20674, 20694, 20702, 20709, 20717, 20707, 20718, 20729, 20725, 20745, 20737, 20738, 20758, 20757, 20756, 20762, 20769, 20794, 20791, 20796, 20795, 20799, 20800, 20818, 20812, 20820, 20834, 31480, 20841, 20842, 20846, 20864, 20866, 22232, 20876, 20873, 20879, 20881, 20883, 20885, 20886, 20900, 20902, 20898, 20905, 20906, 20907, 20915, 20913, 20914, 20912, 20917, 20925, 20933, 20937, 20955, 20960, 34389, 20969, 20973], \"39296\": [20976, 20981, 20990, 20996, 21003, 21012, 21006, 21031, 21034, 21038, 21043, 21049, 21071, 21060, 21067, 21068, 21086, 21076, 21098, 21108, 21097, 21107, 21119, 21117, 21133, 21140, 21138, 21105, 21128, 21137, 36776, 36775, 21164, 21165, 21180, 21173, 21185, 21197, 21207, 21214, 21219, 21222, 39149, 21216, 21235, 21237, 21240, 21241, 21254, 21256, 30008, 21261, 21264, 21263, 21269, 21274, 21283, 21295, 21297, 21299, 21304, 21312, 21318, 21317, 19991, 21321, 21325, 20950, 21342, 21353, 21358, 22808, 21371, 21367, 21378, 21398, 21408, 21414, 21413, 21422, 21424, 21430, 21443, 31762, 38617, 21471, 26364, 29166, 21486, 21480, 21485, 21498, 21505, 21565, 21568, 21548, 21549, 21564, 21550, 21558, 21545, 21533, 21582, 21647, 21621, 21646, 21599, 21617, 21623, 21616, 21650, 21627, 21632, 21622, 21636, 21648, 21638, 21703, 21666, 21688, 21669, 21676, 21700, 21704, 21672], \"39488\": [21675, 21698, 21668, 21694, 21692, 21720, 21733, 21734, 21775, 21780, 21757, 21742, 21741, 21754, 21730, 21817, 21824, 21859, 21836, 21806, 21852, 21829, 21846, 21847, 21816, 21811, 21853, 21913, 21888, 21679, 21898, 21919, 21883, 21886, 21912, 21918, 21934, 21884, 21891, 21929, 21895, 21928, 21978, 21957, 21983, 21956, 21980, 21988, 21972, 22036, 22007, 22038, 22014, 22013, 22043, 22009, 22094, 22096, 29151, 22068, 22070, 22066, 22072], \"39552\": [22123, 22116, 22063, 22124, 22122, 22150, 22144, 22154, 22176, 22164, 22159, 22181, 22190, 22198, 22196, 22210, 22204, 22209, 22211, 22208, 22216, 22222, 22225, 22227, 22231, 22254, 22265, 22272, 22271, 22276, 22281, 22280, 22283, 22285, 22291, 22296, 22294, 21959, 22300, 22310, 22327, 22328, 22350, 22331, 22336, 22351, 22377, 22464, 22408, 22369, 22399, 22409, 22419, 22432, 22451, 22436, 22442, 22448, 22467, 22470, 22484, 22482, 22483, 22538, 22486, 22499, 22539, 22553, 22557, 22642, 22561, 22626, 22603, 22640, 27584, 22610, 22589, 22649, 22661, 22713, 22687, 22699, 22714, 22750, 22715, 22712, 22702, 22725, 22739, 22737, 22743, 22745, 22744, 22757, 22748, 22756, 22751, 22767, 22778, 22777, 22779, 22780, 22781, 22786, 22794, 22800, 22811, 26790, 22821, 22828, 22829, 22834, 22840, 22846, 31442, 22869, 22864, 22862, 22874, 22872, 22882, 22880, 22887, 22892, 22889], \"39744\": [22904, 22913, 22941, 20318, 20395, 22947, 22962, 22982, 23016, 23004, 22925, 23001, 23002, 23077, 23071, 23057, 23068, 23049, 23066, 23104, 23148, 23113, 23093, 23094, 23138, 23146, 23194, 23228, 23230, 23243, 23234, 23229, 23267, 23255, 23270, 23273, 23254, 23290, 23291, 23308, 23307, 23318, 23346, 23248, 23338, 23350, 23358, 23363, 23365, 23360, 23377, 23381, 23386, 23387, 23397, 23401, 23408, 23411, 23413, 23416, 25992, 23418, 23424], \"39808\": [23427, 23462, 23480, 23491, 23495, 23497, 23508, 23504, 23524, 23526, 23522, 23518, 23525, 23531, 23536, 23542, 23539, 23557, 23559, 23560, 23565, 23571, 23584, 23586, 23592, 23608, 23609, 23617, 23622, 23630, 23635, 23632, 23631, 23409, 23660, 23662, 20066, 23670, 23673, 23692, 23697, 23700, 22939, 23723, 23739, 23734, 23740, 23735, 23749, 23742, 23751, 23769, 23785, 23805, 23802, 23789, 23948, 23786, 23819, 23829, 23831, 23900, 23839, 23835, 23825, 23828, 23842, 23834, 23833, 23832, 23884, 23890, 23886, 23883, 23916, 23923, 23926, 23943, 23940, 23938, 23970, 23965, 23980, 23982, 23997, 23952, 23991, 23996, 24009, 24013, 24019, 24018, 24022, 24027, 24043, 24050, 24053, 24075, 24090, 24089, 24081, 24091, 24118, 24119, 24132, 24131, 24128, 24142, 24151, 24148, 24159, 24162, 24164, 24135, 24181, 24182, 24186, 40636, 24191, 24224, 24257, 24258, 24264, 24272, 24271], \"40000\": [24278, 24291, 24285, 24282, 24283, 24290, 24289, 24296, 24297, 24300, 24305, 24307, 24304, 24308, 24312, 24318, 24323, 24329, 24413, 24412, 24331, 24337, 24342, 24361, 24365, 24376, 24385, 24392, 24396, 24398, 24367, 24401, 24406, 24407, 24409, 24417, 24429, 24435, 24439, 24451, 24450, 24447, 24458, 24456, 24465, 24455, 24478, 24473, 24472, 24480, 24488, 24493, 24508, 24534, 24571, 24548, 24568, 24561, 24541, 24755, 24575, 24609, 24672], \"40064\": [24601, 24592, 24617, 24590, 24625, 24603, 24597, 24619, 24614, 24591, 24634, 24666, 24641, 24682, 24695, 24671, 24650, 24646, 24653, 24675, 24643, 24676, 24642, 24684, 24683, 24665, 24705, 24717, 24807, 24707, 24730, 24708, 24731, 24726, 24727, 24722, 24743, 24715, 24801, 24760, 24800, 24787, 24756, 24560, 24765, 24774, 24757, 24792, 24909, 24853, 24838, 24822, 24823, 24832, 24820, 24826, 24835, 24865, 24827, 24817, 24845, 24846, 24903, 24894, 24872, 24871, 24906, 24895, 24892, 24876, 24884, 24893, 24898, 24900, 24947, 24951, 24920, 24921, 24922, 24939, 24948, 24943, 24933, 24945, 24927, 24925, 24915, 24949, 24985, 24982, 24967, 25004, 24980, 24986, 24970, 24977, 25003, 25006, 25036, 25034, 25033, 25079, 25032, 25027, 25030, 25018, 25035, 32633, 25037, 25062, 25059, 25078, 25082, 25076, 25087, 25085, 25084, 25086, 25088, 25096, 25097, 25101, 25100, 25108, 25115], \"40256\": [25118, 25121, 25130, 25134, 25136, 25138, 25139, 25153, 25166, 25182, 25187, 25179, 25184, 25192, 25212, 25218, 25225, 25214, 25234, 25235, 25238, 25300, 25219, 25236, 25303, 25297, 25275, 25295, 25343, 25286, 25812, 25288, 25308, 25292, 25290, 25282, 25287, 25243, 25289, 25356, 25326, 25329, 25383, 25346, 25352, 25327, 25333, 25424, 25406, 25421, 25628, 25423, 25494, 25486, 25472, 25515, 25462, 25507, 25487, 25481, 25503, 25525, 25451], \"40320\": [25449, 25534, 25577, 25536, 25542, 25571, 25545, 25554, 25590, 25540, 25622, 25652, 25606, 25619, 25638, 25654, 25885, 25623, 25640, 25615, 25703, 25711, 25718, 25678, 25898, 25749, 25747, 25765, 25769, 25736, 25788, 25818, 25810, 25797, 25799, 25787, 25816, 25794, 25841, 25831, 33289, 25824, 25825, 25260, 25827, 25839, 25900, 25846, 25844, 25842, 25850, 25856, 25853, 25880, 25884, 25861, 25892, 25891, 25899, 25908, 25909, 25911, 25910, 25912, 30027, 25928, 25942, 25941, 25933, 25944, 25950, 25949, 25970, 25976, 25986, 25987, 35722, 26011, 26015, 26027, 26039, 26051, 26054, 26049, 26052, 26060, 26066, 26075, 26073, 26080, 26081, 26097, 26482, 26122, 26115, 26107, 26483, 26165, 26166, 26164, 26140, 26191, 26180, 26185, 26177, 26206, 26205, 26212, 26215, 26216, 26207, 26210, 26224, 26243, 26248, 26254, 26249, 26244, 26264, 26269, 26305, 26297, 26313, 26302, 26300], \"40512\": [26308, 26296, 26326, 26330, 26336, 26175, 26342, 26345, 26352, 26357, 26359, 26383, 26390, 26398, 26406, 26407, 38712, 26414, 26431, 26422, 26433, 26424, 26423, 26438, 26462, 26464, 26457, 26467, 26468, 26505, 26480, 26537, 26492, 26474, 26508, 26507, 26534, 26529, 26501, 26551, 26607, 26548, 26604, 26547, 26601, 26552, 26596, 26590, 26589, 26594, 26606, 26553, 26574, 26566, 26599, 27292, 26654, 26694, 26665, 26688, 26701, 26674, 26702], \"40576\": [26803, 26667, 26713, 26723, 26743, 26751, 26783, 26767, 26797, 26772, 26781, 26779, 26755, 27310, 26809, 26740, 26805, 26784, 26810, 26895, 26765, 26750, 26881, 26826, 26888, 26840, 26914, 26918, 26849, 26892, 26829, 26836, 26855, 26837, 26934, 26898, 26884, 26839, 26851, 26917, 26873, 26848, 26863, 26920, 26922, 26906, 26915, 26913, 26822, 27001, 26999, 26972, 27000, 26987, 26964, 27006, 26990, 26937, 26996, 26941, 26969, 26928, 26977, 26974, 26973, 27009, 26986, 27058, 27054, 27088, 27071, 27073, 27091, 27070, 27086, 23528, 27082, 27101, 27067, 27075, 27047, 27182, 27025, 27040, 27036, 27029, 27060, 27102, 27112, 27138, 27163, 27135, 27402, 27129, 27122, 27111, 27141, 27057, 27166, 27117, 27156, 27115, 27146, 27154, 27329, 27171, 27155, 27204, 27148, 27250, 27190, 27256, 27207, 27234, 27225, 27238, 27208, 27192, 27170, 27280, 27277, 27296, 27268, 27298, 27299], \"40768\": [27287, 34327, 27323, 27331, 27330, 27320, 27315, 27308, 27358, 27345, 27359, 27306, 27354, 27370, 27387, 27397, 34326, 27386, 27410, 27414, 39729, 27423, 27448, 27447, 30428, 27449, 39150, 27463, 27459, 27465, 27472, 27481, 27476, 27483, 27487, 27489, 27512, 27513, 27519, 27520, 27524, 27523, 27533, 27544, 27541, 27550, 27556, 27562, 27563, 27567, 27570, 27569, 27571, 27575, 27580, 27590, 27595, 27603, 27615, 27628, 27627, 27635, 27631], \"40832\": [40638, 27656, 27667, 27668, 27675, 27684, 27683, 27742, 27733, 27746, 27754, 27778, 27789, 27802, 27777, 27803, 27774, 27752, 27763, 27794, 27792, 27844, 27889, 27859, 27837, 27863, 27845, 27869, 27822, 27825, 27838, 27834, 27867, 27887, 27865, 27882, 27935, 34893, 27958, 27947, 27965, 27960, 27929, 27957, 27955, 27922, 27916, 28003, 28051, 28004, 27994, 28025, 27993, 28046, 28053, 28644, 28037, 28153, 28181, 28170, 28085, 28103, 28134, 28088, 28102, 28140, 28126, 28108, 28136, 28114, 28101, 28154, 28121, 28132, 28117, 28138, 28142, 28205, 28270, 28206, 28185, 28274, 28255, 28222, 28195, 28267, 28203, 28278, 28237, 28191, 28227, 28218, 28238, 28196, 28415, 28189, 28216, 28290, 28330, 28312, 28361, 28343, 28371, 28349, 28335, 28356, 28338, 28372, 28373, 28303, 28325, 28354, 28319, 28481, 28433, 28748, 28396, 28408, 28414, 28479, 28402, 28465, 28399, 28466, 28364], \"57408\": [28478, 28435, 28407, 28550, 28538, 28536, 28545, 28544, 28527, 28507, 28659, 28525, 28546, 28540, 28504, 28558, 28561, 28610, 28518, 28595, 28579, 28577, 28580, 28601, 28614, 28586, 28639, 28629, 28652, 28628, 28632, 28657, 28654, 28635, 28681, 28683, 28666, 28689, 28673, 28687, 28670, 28699, 28698, 28532, 28701, 28696, 28703, 28720, 28734, 28722, 28753, 28771, 28825, 28818, 28847, 28913, 28844, 28856, 28851, 28846, 28895, 28875, 28893], \"57472\": [28889, 28937, 28925, 28956, 28953, 29029, 29013, 29064, 29030, 29026, 29004, 29014, 29036, 29071, 29179, 29060, 29077, 29096, 29100, 29143, 29113, 29118, 29138, 29129, 29140, 29134, 29152, 29164, 29159, 29173, 29180, 29177, 29183, 29197, 29200, 29211, 29224, 29229, 29228, 29232, 29234, 29243, 29244, 29247, 29248, 29254, 29259, 29272, 29300, 29310, 29314, 29313, 29319, 29330, 29334, 29346, 29351, 29369, 29362, 29379, 29382, 29380, 29390, 29394, 29410, 29408, 29409, 29433, 29431, 20495, 29463, 29450, 29468, 29462, 29469, 29492, 29487, 29481, 29477, 29502, 29518, 29519, 40664, 29527, 29546, 29544, 29552, 29560, 29557, 29563, 29562, 29640, 29619, 29646, 29627, 29632, 29669, 29678, 29662, 29858, 29701, 29807, 29733, 29688, 29746, 29754, 29781, 29759, 29791, 29785, 29761, 29788, 29801, 29808, 29795, 29802, 29814, 29822, 29835, 29854, 29863, 29898, 29903, 29908, 29681], \"57664\": [29920, 29923, 29927, 29929, 29934, 29938, 29936, 29937, 29944, 29943, 29956, 29955, 29957, 29964, 29966, 29965, 29973, 29971, 29982, 29990, 29996, 30012, 30020, 30029, 30026, 30025, 30043, 30022, 30042, 30057, 30052, 30055, 30059, 30061, 30072, 30070, 30086, 30087, 30068, 30090, 30089, 30082, 30100, 30106, 30109, 30117, 30115, 30146, 30131, 30147, 30133, 30141, 30136, 30140, 30129, 30157, 30154, 30162, 30169, 30179, 30174, 30206, 30207], \"57728\": [30204, 30209, 30192, 30202, 30194, 30195, 30219, 30221, 30217, 30239, 30247, 30240, 30241, 30242, 30244, 30260, 30256, 30267, 30279, 30280, 30278, 30300, 30296, 30305, 30306, 30312, 30313, 30314, 30311, 30316, 30320, 30322, 30326, 30328, 30332, 30336, 30339, 30344, 30347, 30350, 30358, 30355, 30361, 30362, 30384, 30388, 30392, 30393, 30394, 30402, 30413, 30422, 30418, 30430, 30433, 30437, 30439, 30442, 34351, 30459, 30472, 30471, 30468, 30505, 30500, 30494, 30501, 30502, 30491, 30519, 30520, 30535, 30554, 30568, 30571, 30555, 30565, 30591, 30590, 30585, 30606, 30603, 30609, 30624, 30622, 30640, 30646, 30649, 30655, 30652, 30653, 30651, 30663, 30669, 30679, 30682, 30684, 30691, 30702, 30716, 30732, 30738, 31014, 30752, 31018, 30789, 30862, 30836, 30854, 30844, 30874, 30860, 30883, 30901, 30890, 30895, 30929, 30918, 30923, 30932, 30910, 30908, 30917, 30922, 30956], \"57920\": [30951, 30938, 30973, 30964, 30983, 30994, 30993, 31001, 31020, 31019, 31040, 31072, 31063, 31071, 31066, 31061, 31059, 31098, 31103, 31114, 31133, 31143, 40779, 31146, 31150, 31155, 31161, 31162, 31177, 31189, 31207, 31212, 31201, 31203, 31240, 31245, 31256, 31257, 31264, 31263, 31104, 31281, 31291, 31294, 31287, 31299, 31319, 31305, 31329, 31330, 31337, 40861, 31344, 31353, 31357, 31368, 31383, 31381, 31384, 31382, 31401, 31432, 31408], \"57984\": [31414, 31429, 31428, 31423, 36995, 31431, 31434, 31437, 31439, 31445, 31443, 31449, 31450, 31453, 31457, 31458, 31462, 31469, 31472, 31490, 31503, 31498, 31494, 31539, 31512, 31513, 31518, 31541, 31528, 31542, 31568, 31610, 31492, 31565, 31499, 31564, 31557, 31605, 31589, 31604, 31591, 31600, 31601, 31596, 31598, 31645, 31640, 31647, 31629, 31644, 31642, 31627, 31634, 31631, 31581, 31641, 31691, 31681, 31692, 31695, 31668, 31686, 31709, 31721, 31761, 31764, 31718, 31717, 31840, 31744, 31751, 31763, 31731, 31735, 31767, 31757, 31734, 31779, 31783, 31786, 31775, 31799, 31787, 31805, 31820, 31811, 31828, 31823, 31808, 31824, 31832, 31839, 31844, 31830, 31845, 31852, 31861, 31875, 31888, 31908, 31917, 31906, 31915, 31905, 31912, 31923, 31922, 31921, 31918, 31929, 31933, 31936, 31941, 31938, 31960, 31954, 31964, 31970, 39739, 31983, 31986, 31988, 31990, 31994, 32006], \"58176\": [32002, 32028, 32021, 32010, 32069, 32075, 32046, 32050, 32063, 32053, 32070, 32115, 32086, 32078, 32114, 32104, 32110, 32079, 32099, 32147, 32137, 32091, 32143, 32125, 32155, 32186, 32174, 32163, 32181, 32199, 32189, 32171, 32317, 32162, 32175, 32220, 32184, 32159, 32176, 32216, 32221, 32228, 32222, 32251, 32242, 32225, 32261, 32266, 32291, 32289, 32274, 32305, 32287, 32265, 32267, 32290, 32326, 32358, 32315, 32309, 32313, 32323, 32311], \"58240\": [32306, 32314, 32359, 32349, 32342, 32350, 32345, 32346, 32377, 32362, 32361, 32380, 32379, 32387, 32213, 32381, 36782, 32383, 32392, 32393, 32396, 32402, 32400, 32403, 32404, 32406, 32398, 32411, 32412, 32568, 32570, 32581, 32588, 32589, 32590, 32592, 32593, 32597, 32596, 32600, 32607, 32608, 32616, 32617, 32615, 32632, 32642, 32646, 32643, 32648, 32647, 32652, 32660, 32670, 32669, 32666, 32675, 32687, 32690, 32697, 32686, 32694, 32696, 35697, 32709, 32710, 32714, 32725, 32724, 32737, 32742, 32745, 32755, 32761, 39132, 32774, 32772, 32779, 32786, 32792, 32793, 32796, 32801, 32808, 32831, 32827, 32842, 32838, 32850, 32856, 32858, 32863, 32866, 32872, 32883, 32882, 32880, 32886, 32889, 32893, 32895, 32900, 32902, 32901, 32923, 32915, 32922, 32941, 20880, 32940, 32987, 32997, 32985, 32989, 32964, 32986, 32982, 33033, 33007, 33009, 33051, 33065, 33059, 33071, 33099], \"58432\": [38539, 33094, 33086, 33107, 33105, 33020, 33137, 33134, 33125, 33126, 33140, 33155, 33160, 33162, 33152, 33154, 33184, 33173, 33188, 33187, 33119, 33171, 33193, 33200, 33205, 33214, 33208, 33213, 33216, 33218, 33210, 33225, 33229, 33233, 33241, 33240, 33224, 33242, 33247, 33248, 33255, 33274, 33275, 33278, 33281, 33282, 33285, 33287, 33290, 33293, 33296, 33302, 33321, 33323, 33336, 33331, 33344, 33369, 33368, 33373, 33370, 33375, 33380], \"58496\": [33378, 33384, 33386, 33387, 33326, 33393, 33399, 33400, 33406, 33421, 33426, 33451, 33439, 33467, 33452, 33505, 33507, 33503, 33490, 33524, 33523, 33530, 33683, 33539, 33531, 33529, 33502, 33542, 33500, 33545, 33497, 33589, 33588, 33558, 33586, 33585, 33600, 33593, 33616, 33605, 33583, 33579, 33559, 33560, 33669, 33690, 33706, 33695, 33698, 33686, 33571, 33678, 33671, 33674, 33660, 33717, 33651, 33653, 33696, 33673, 33704, 33780, 33811, 33771, 33742, 33789, 33795, 33752, 33803, 33729, 33783, 33799, 33760, 33778, 33805, 33826, 33824, 33725, 33848, 34054, 33787, 33901, 33834, 33852, 34138, 33924, 33911, 33899, 33965, 33902, 33922, 33897, 33862, 33836, 33903, 33913, 33845, 33994, 33890, 33977, 33983, 33951, 34009, 33997, 33979, 34010, 34000, 33985, 33990, 34006, 33953, 34081, 34047, 34036, 34071, 34072, 34092, 34079, 34069, 34068, 34044, 34112, 34147, 34136, 34120], \"58688\": [34113, 34306, 34123, 34133, 34176, 34212, 34184, 34193, 34186, 34216, 34157, 34196, 34203, 34282, 34183, 34204, 34167, 34174, 34192, 34249, 34234, 34255, 34233, 34256, 34261, 34269, 34277, 34268, 34297, 34314, 34323, 34315, 34302, 34298, 34310, 34338, 34330, 34352, 34367, 34381, 20053, 34388, 34399, 34407, 34417, 34451, 34467, 34473, 34474, 34443, 34444, 34486, 34479, 34500, 34502, 34480, 34505, 34851, 34475, 34516, 34526, 34537, 34540], \"58752\": [34527, 34523, 34543, 34578, 34566, 34568, 34560, 34563, 34555, 34577, 34569, 34573, 34553, 34570, 34612, 34623, 34615, 34619, 34597, 34601, 34586, 34656, 34655, 34680, 34636, 34638, 34676, 34647, 34664, 34670, 34649, 34643, 34659, 34666, 34821, 34722, 34719, 34690, 34735, 34763, 34749, 34752, 34768, 38614, 34731, 34756, 34739, 34759, 34758, 34747, 34799, 34802, 34784, 34831, 34829, 34814, 34806, 34807, 34830, 34770, 34833, 34838, 34837, 34850, 34849, 34865, 34870, 34873, 34855, 34875, 34884, 34882, 34898, 34905, 34910, 34914, 34923, 34945, 34942, 34974, 34933, 34941, 34997, 34930, 34946, 34967, 34962, 34990, 34969, 34978, 34957, 34980, 34992, 35007, 34993, 35011, 35012, 35028, 35032, 35033, 35037, 35065, 35074, 35068, 35060, 35048, 35058, 35076, 35084, 35082, 35091, 35139, 35102, 35109, 35114, 35115, 35137, 35140, 35131, 35126, 35128, 35148, 35101, 35168, 35166], \"58944\": [35174, 35172, 35181, 35178, 35183, 35188, 35191, 35198, 35203, 35208, 35210, 35219, 35224, 35233, 35241, 35238, 35244, 35247, 35250, 35258, 35261, 35263, 35264, 35290, 35292, 35293, 35303, 35316, 35320, 35331, 35350, 35344, 35340, 35355, 35357, 35365, 35382, 35393, 35419, 35410, 35398, 35400, 35452, 35437, 35436, 35426, 35461, 35458, 35460, 35496, 35489, 35473, 35493, 35494, 35482, 35491, 35524, 35533, 35522, 35546, 35563, 35571, 35559], \"59008\": [35556, 35569, 35604, 35552, 35554, 35575, 35550, 35547, 35596, 35591, 35610, 35553, 35606, 35600, 35607, 35616, 35635, 38827, 35622, 35627, 35646, 35624, 35649, 35660, 35663, 35662, 35657, 35670, 35675, 35674, 35691, 35679, 35692, 35695, 35700, 35709, 35712, 35724, 35726, 35730, 35731, 35734, 35737, 35738, 35898, 35905, 35903, 35912, 35916, 35918, 35920, 35925, 35938, 35948, 35960, 35962, 35970, 35977, 35973, 35978, 35981, 35982, 35988, 35964, 35992, 25117, 36013, 36010, 36029, 36018, 36019, 36014, 36022, 36040, 36033, 36068, 36067, 36058, 36093, 36090, 36091, 36100, 36101, 36106, 36103, 36111, 36109, 36112, 40782, 36115, 36045, 36116, 36118, 36199, 36205, 36209, 36211, 36225, 36249, 36290, 36286, 36282, 36303, 36314, 36310, 36300, 36315, 36299, 36330, 36331, 36319, 36323, 36348, 36360, 36361, 36351, 36381, 36382, 36368, 36383, 36418, 36405, 36400, 36404, 36426], \"59200\": [36423, 36425, 36428, 36432, 36424, 36441, 36452, 36448, 36394, 36451, 36437, 36470, 36466, 36476, 36481, 36487, 36485, 36484, 36491, 36490, 36499, 36497, 36500, 36505, 36522, 36513, 36524, 36528, 36550, 36529, 36542, 36549, 36552, 36555, 36571, 36579, 36604, 36603, 36587, 36606, 36618, 36613, 36629, 36626, 36633, 36627, 36636, 36639, 36635, 36620, 36646, 36659, 36667, 36665, 36677, 36674, 36670, 36684, 36681, 36678, 36686, 36695, 36700], \"59264\": [36706, 36707, 36708, 36764, 36767, 36771, 36781, 36783, 36791, 36826, 36837, 36834, 36842, 36847, 36999, 36852, 36869, 36857, 36858, 36881, 36885, 36897, 36877, 36894, 36886, 36875, 36903, 36918, 36917, 36921, 36856, 36943, 36944, 36945, 36946, 36878, 36937, 36926, 36950, 36952, 36958, 36968, 36975, 36982, 38568, 36978, 36994, 36989, 36993, 36992, 37002, 37001, 37007, 37032, 37039, 37041, 37045, 37090, 37092, 25160, 37083, 37122, 37138, 37145, 37170, 37168, 37194, 37206, 37208, 37219, 37221, 37225, 37235, 37234, 37259, 37257, 37250, 37282, 37291, 37295, 37290, 37301, 37300, 37306, 37312, 37313, 37321, 37323, 37328, 37334, 37343, 37345, 37339, 37372, 37365, 37366, 37406, 37375, 37396, 37420, 37397, 37393, 37470, 37463, 37445, 37449, 37476, 37448, 37525, 37439, 37451, 37456, 37532, 37526, 37523, 37531, 37466, 37583, 37561, 37559, 37609, 37647, 37626, 37700, 37678], \"59456\": [37657, 37666, 37658, 37667, 37690, 37685, 37691, 37724, 37728, 37756, 37742, 37718, 37808, 37804, 37805, 37780, 37817, 37846, 37847, 37864, 37861, 37848, 37827, 37853, 37840, 37832, 37860, 37914, 37908, 37907, 37891, 37895, 37904, 37942, 37931, 37941, 37921, 37946, 37953, 37970, 37956, 37979, 37984, 37986, 37982, 37994, 37417, 38000, 38005, 38007, 38013, 37978, 38012, 38014, 38017, 38015, 38274, 38279, 38282, 38292, 38294, 38296, 38297], \"59520\": [38304, 38312, 38311, 38317, 38332, 38331, 38329, 38334, 38346, 28662, 38339, 38349, 38348, 38357, 38356, 38358, 38364, 38369, 38373, 38370, 38433, 38440, 38446, 38447, 38466, 38476, 38479, 38475, 38519, 38492, 38494, 38493, 38495, 38502, 38514, 38508, 38541, 38552, 38549, 38551, 38570, 38567, 38577, 38578, 38576, 38580, 38582, 38584, 38585, 38606, 38603, 38601, 38605, 35149, 38620, 38669, 38613, 38649, 38660, 38662, 38664, 38675, 38670, 38673, 38671, 38678, 38681, 38692, 38698, 38704, 38713, 38717, 38718, 38724, 38726, 38728, 38722, 38729, 38748, 38752, 38756, 38758, 38760, 21202, 38763, 38769, 38777, 38789, 38780, 38785, 38778, 38790, 38795, 38799, 38800, 38812, 38824, 38822, 38819, 38835, 38836, 38851, 38854, 38856, 38859, 38876, 38893, 40783, 38898, 31455, 38902, 38901, 38927, 38924, 38968, 38948, 38945, 38967, 38973, 38982, 38991, 38987, 39019, 39023, 39024], \"59712\": [39025, 39028, 39027, 39082, 39087, 39089, 39094, 39108, 39107, 39110, 39145, 39147, 39171, 39177, 39186, 39188, 39192, 39201, 39197, 39198, 39204, 39200, 39212, 39214, 39229, 39230, 39234, 39241, 39237, 39248, 39243, 39249, 39250, 39244, 39253, 39319, 39320, 39333, 39341, 39342, 39356, 39391, 39387, 39389, 39384, 39377, 39405, 39406, 39409, 39410, 39419, 39416, 39425, 39439, 39429, 39394, 39449, 39467, 39479, 39493, 39490, 39488, 39491], \"59776\": [39486, 39509, 39501, 39515, 39511, 39519, 39522, 39525, 39524, 39529, 39531, 39530, 39597, 39600, 39612, 39616, 39631, 39633, 39635, 39636, 39646, 39647, 39650, 39651, 39654, 39663, 39659, 39662, 39668, 39665, 39671, 39675, 39686, 39704, 39706, 39711, 39714, 39715, 39717, 39719, 39720, 39721, 39722, 39726, 39727, 39730, 39748, 39747, 39759, 39757, 39758, 39761, 39768, 39796, 39827, 39811, 39825, 39830, 39831, 39839, 39840, 39848, 39860, 39872, 39882, 39865, 39878, 39887, 39889, 39890, 39907, 39906, 39908, 39892, 39905, 39994, 39922, 39921, 39920, 39957, 39956, 39945, 39955, 39948, 39942, 39944, 39954, 39946, 39940, 39982, 39963, 39973, 39972, 39969, 39984, 40007, 39986, 40006, 39998, 40026, 40032, 40039, 40054, 40056, 40167, 40172, 40176, 40201, 40200, 40171, 40195, 40198, 40234, 40230, 40367, 40227, 40223, 40260, 40213, 40210, 40257, 40255, 40254, 40262, 40264], \"59968\": [40285, 40286, 40292, 40273, 40272, 40281, 40306, 40329, 40327, 40363, 40303, 40314, 40346, 40356, 40361, 40370, 40388, 40385, 40379, 40376, 40378, 40390, 40399, 40386, 40409, 40403, 40440, 40422, 40429, 40431, 40445, 40474, 40475, 40478, 40565, 40569, 40573, 40577, 40584, 40587, 40588, 40594, 40597, 40593, 40605, 40613, 40617, 40632, 40618, 40621, 38753, 40652, 40654, 40655, 40656, 40660, 40668, 40670, 40669, 40672, 40677, 40680, 40687], \"60032\": [40692, 40694, 40695, 40697, 40699, 40700, 40701, 40711, 40712, 30391, 40725, 40737, 40748, 40766, 40778, 40786, 40788, 40803, 40799, 40800, 40801, 40806, 40807, 40812, 40810, 40823, 40818, 40822, 40853, 40860, 40864, 22575, 27079, 36953, 29796, 20956, 29081], \"60736\": [32394, 35100, 37704, 37512, 34012, 20425, 28859, 26161, 26824, 37625, 26363, 24389, 20008, 20193, 20220, 20224, 20227, 20281, 20310, 20370, 20362, 20378, 20372, 20429, 20544, 20514, 20479, 20510, 20550, 20592, 20546, 20628, 20724, 20696, 20810, 20836, 20893, 20926, 20972, 21013, 21148, 21158, 21184, 21211, 21248, 21255, 21284, 21362, 21395, 21426, 21469, 64014, 21660, 21642, 21673, 21759, 21894, 22361, 22373, 22444, 22472, 22471, 64015], \"60800\": [64016, 22686, 22706, 22795, 22867, 22875, 22877, 22883, 22948, 22970, 23382, 23488, 29999, 23512, 23532, 23582, 23718, 23738, 23797, 23847, 23891, 64017, 23874, 23917, 23992, 23993, 24016, 24353, 24372, 24423, 24503, 24542, 24669, 24709, 24714, 24798, 24789, 24864, 24818, 24849, 24887, 24880, 24984, 25107, 25254, 25589, 25696, 25757, 25806, 25934, 26112, 26133, 26171, 26121, 26158, 26142, 26148, 26213, 26199, 26201, 64018, 26227, 26265, 26272, 26290, 26303, 26362, 26382, 63785, 26470, 26555, 26706, 26560, 26625, 26692, 26831, 64019, 26984, 64020, 27032, 27106, 27184, 27243, 27206, 27251, 27262, 27362, 27364, 27606, 27711, 27740, 27782, 27759, 27866, 27908, 28039, 28015, 28054, 28076, 28111, 28152, 28146, 28156, 28217, 28252, 28199, 28220, 28351, 28552, 28597, 28661, 28677, 28679, 28712, 28805, 28843, 28943, 28932, 29020, 28998, 28999, 64021, 29121, 29182, 29361], \"60992\": [29374, 29476, 64022, 29559, 29629, 29641, 29654, 29667, 29650, 29703, 29685, 29734, 29738, 29737, 29742, 29794, 29833, 29855, 29953, 30063, 30338, 30364, 30366, 30363, 30374, 64023, 30534, 21167, 30753, 30798, 30820, 30842, 31024, 64024, 64025, 64026, 31124, 64027, 31131, 31441, 31463, 64028, 31467, 31646, 64029, 32072, 32092, 32183, 32160, 32214, 32338, 32583, 32673, 64030, 33537, 33634, 33663, 33735, 33782, 33864, 33972, 34131, 34137], \"61056\": [34155, 64031, 34224, 64032, 64033, 34823, 35061, 35346, 35383, 35449, 35495, 35518, 35551, 64034, 35574, 35667, 35711, 36080, 36084, 36114, 36214, 64035, 36559, 64036, 64037, 36967, 37086, 64038, 37141, 37159, 37338, 37335, 37342, 37357, 37358, 37348, 37349, 37382, 37392, 37386, 37434, 37440, 37436, 37454, 37465, 37457, 37433, 37479, 37543, 37495, 37496, 37607, 37591, 37593, 37584, 64039, 37589, 37600, 37587, 37669, 37665, 37627, 64040, 37662, 37631, 37661, 37634, 37744, 37719, 37796, 37830, 37854, 37880, 37937, 37957, 37960, 38290, 63964, 64041, 38557, 38575, 38707, 38715, 38723, 38733, 38735, 38737, 38741, 38999, 39013, 64042, 64043, 39207, 64044, 39326, 39502, 39641, 39644, 39797, 39794, 39823, 39857, 39867, 39936, 40304, 40299, 64045, 40473, 40657], \"61167\": [8560, 8561, 8562, 8563, 8564, 8565, 8566, 8567, 8568, 8569, 65506, 65508, 65287, 65282], \"64064\": [8560, 8561, 8562, 8563, 8564, 8565, 8566, 8567, 8568, 8569, 8544, 8545, 8546, 8547, 8548, 8549, 8550, 8551, 8552, 8553, 65506, 65508, 65287, 65282, 12849, 8470, 8481, 8757, 32394, 35100, 37704, 37512, 34012, 20425, 28859, 26161, 26824, 37625, 26363, 24389, 20008, 20193, 20220, 20224, 20227, 20281, 20310, 20370, 20362, 20378, 20372, 20429, 20544, 20514, 20479, 20510, 20550, 20592, 20546, 20628, 20724, 20696, 20810], \"64128\": [20836, 20893, 20926, 20972, 21013, 21148, 21158, 21184, 21211, 21248, 21255, 21284, 21362, 21395, 21426, 21469, 64014, 21660, 21642, 21673, 21759, 21894, 22361, 22373, 22444, 22472, 22471, 64015, 64016, 22686, 22706, 22795, 22867, 22875, 22877, 22883, 22948, 22970, 23382, 23488, 29999, 23512, 23532, 23582, 23718, 23738, 23797, 23847, 23891, 64017, 23874, 23917, 23992, 23993, 24016, 24353, 24372, 24423, 24503, 24542, 24669, 24709, 24714, 24798, 24789, 24864, 24818, 24849, 24887, 24880, 24984, 25107, 25254, 25589, 25696, 25757, 25806, 25934, 26112, 26133, 26171, 26121, 26158, 26142, 26148, 26213, 26199, 26201, 64018, 26227, 26265, 26272, 26290, 26303, 26362, 26382, 63785, 26470, 26555, 26706, 26560, 26625, 26692, 26831, 64019, 26984, 64020, 27032, 27106, 27184, 27243, 27206, 27251, 27262, 27362, 27364, 27606, 27711, 27740, 27782, 27759, 27866, 27908, 28039, 28015], \"64320\": [28054, 28076, 28111, 28152, 28146, 28156, 28217, 28252, 28199, 28220, 28351, 28552, 28597, 28661, 28677, 28679, 28712, 28805, 28843, 28943, 28932, 29020, 28998, 28999, 64021, 29121, 29182, 29361, 29374, 29476, 64022, 29559, 29629, 29641, 29654, 29667, 29650, 29703, 29685, 29734, 29738, 29737, 29742, 29794, 29833, 29855, 29953, 30063, 30338, 30364, 30366, 30363, 30374, 64023, 30534, 21167, 30753, 30798, 30820, 30842, 31024, 64024, 64025], \"64384\": [64026, 31124, 64027, 31131, 31441, 31463, 64028, 31467, 31646, 64029, 32072, 32092, 32183, 32160, 32214, 32338, 32583, 32673, 64030, 33537, 33634, 33663, 33735, 33782, 33864, 33972, 34131, 34137, 34155, 64031, 34224, 64032, 64033, 34823, 35061, 35346, 35383, 35449, 35495, 35518, 35551, 64034, 35574, 35667, 35711, 36080, 36084, 36114, 36214, 64035, 36559, 64036, 64037, 36967, 37086, 64038, 37141, 37159, 37338, 37335, 37342, 37357, 37358, 37348, 37349, 37382, 37392, 37386, 37434, 37440, 37436, 37454, 37465, 37457, 37433, 37479, 37543, 37495, 37496, 37607, 37591, 37593, 37584, 64039, 37589, 37600, 37587, 37669, 37665, 37627, 64040, 37662, 37631, 37661, 37634, 37744, 37719, 37796, 37830, 37854, 37880, 37937, 37957, 37960, 38290, 63964, 64041, 38557, 38575, 38707, 38715, 38723, 38733, 38735, 38737, 38741, 38999, 39013, 64042, 64043, 39207, 64044, 39326, 39502, 39641], \"64576\": [39644, 39797, 39794, 39823, 39857, 39867, 39936, 40304, 40299, 64045, 40473, 40657]}\nvar decoding_table = [],\n encoding_table = []\nfor(let cp in cps){\n cp = parseInt(cp)\n for(let i = 0, len = cps[cp].length; i < len; i++){\n let key = cp + i,\n value = cps[cp][i]\n decoding_table[key] = value\n encoding_table[value] = key\n }\n}\nvar module = {encoding_table, decoding_table}\n__BRYTHON__.addToImported(\"encoding_cp932\", module)\n"], "_jsre": [".js", "(function($B){\n\n var _b_ = $B.builtins\n\n var MatchObject = $B.make_class(\"Match\",\n function(jsmatch, string, pattern){\n return {\n __class__: MatchObject,\n jsmatch: jsmatch,\n string: string\n }\n }\n )\n MatchObject.item = function(self, rank){\n return self.jsmatch[rank]\n }\n MatchObject.group = function(self){\n var res = []\n for(var i = 0, _len_i = arguments.length; i < _len_i; i++){\n if(self.jsmatch[arguments[i]] === undefined){res.push(_b_.None)}\n else{res.push(self.jsmatch[arguments[i]])}\n }\n if(arguments.length == 1){return res[0]}\n return _b_.tuple.$factory(res)\n }\n MatchObject.groups = function(self, _default){\n if(_default === undefined){_default = _b_.None}\n var res = []\n for(var i = 1, _len_i = self.length; i < _len_i; i++){\n if(self.jsmatch[i] === undefined){res.push(_default)}\n else{res.push(self.jsmatch[i])}\n }\n return _b_.tuple.$factory(res)\n }\n MatchObject.start = function(self){\n return self.index\n }\n MatchObject.end = function(self){\n return self.length - self.index\n }\n\n $B.set_func_names(MatchObject, '_jsre')\n\n var obj = {\n __str__: function(){return \"\"}\n }\n obj.A = obj.ASCII = 256\n obj.I = obj.IGNORECASE = 2 // 'i'\n obj.L = obj.LOCALE = 4\n obj.M = obj.MULTILINE = 8 // 'm'\n obj.S = obj.DOTALL = 16\n obj.U = obj.UNICODE = 32\n obj.X = obj.VERBOSE = 64\n obj._is_valid = function(pattern) {\n if ($B.$options.re == 'pyre'){return false} //force use of python's re module\n if ($B.$options.re == 'jsre'){return true} //force use of brythons re module\n // FIXME: Improve\n\n if(! $B.$isinstance(pattern, _b_.str)){\n // this is probably a SRE_PATTERN, so return false, and let\n // python's re module handle this.\n return false\n }\n var is_valid = false\n try{\n new RegExp(pattern)\n is_valid = true\n }\n catch(e){}\n if(! is_valid){return false} //if js won't parse the pattern return false\n\n // using reference http://www.regular-expressions.info/\n // to compare python re and javascript regex libraries\n\n // look for things javascript does not support\n // check for name capturing group\n var mylist = ['?P=', '?P<', '(?#', '(?<=', '(? -1) return false\n }\n\n var re_list=['\\{,\\d+\\}']\n for(var i=0, _len_i = re_list.length; i < _len_i; i++) {\n var _re = new RegExp(re_list[i])\n if (_re.test(pattern)){return false}\n }\n\n // it looks like the pattern has passed all our tests so lets assume\n // javascript can handle this pattern.\n return true\n }\n var $SRE_PatternDict = {\n __class__:_b_.type,\n $infos:{\n __name__:'SRE_Pattern'\n }\n }\n $SRE_PatternDict.__mro__ = [_b_.object]\n $SRE_PatternDict.findall = function(self, string){\n return obj.findall(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.finditer = function(self, string){\n return obj.finditer(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.match = function(self, string){\n return obj.match(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.search = function(self, string){\n return obj.search(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.sub = function(self,repl,string){\n return obj.sub(self.pattern,repl,string,self.flags)\n }\n $B.set_func_names($SRE_PatternDict, \"_jsre\")\n // TODO: groups\n // TODO: groupindex\n function normflags(flags){\n return ((flags & obj.I)? 'i' : '') + ((flags & obj.M)? 'm' : '');\n }\n // TODO: fullmatch()\n // TODO: split()\n // TODO: subn()\n obj.compile = function(pattern, flags){\n return {\n __class__: $SRE_PatternDict,\n pattern: pattern,\n flags: normflags(flags)\n }\n }\n obj.escape = function(string){\n // Escape all the characters in pattern except ASCII letters, numbers\n // and '_'. This is useful if you want to match an arbitrary literal\n // string that may have regular expression metacharacters in it.\n var res = ''\n var ok = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'\n for(var i = 0, _len_i = string.length; i < _len_i; i++){\n if(ok.search(string.charAt(i))>-1){res += string.charAt(i)}\n }\n return res\n }\n obj.findall = function(pattern, string, flags){\n var $ns=$B.args('re.findall', 2,\n {pattern:null, string:null}, ['pattern', 'string'],\n arguments,{}, 'args', 'kw') ,\n args = $ns['args'] ,\n _flags = 0;\n if(args.length>0){var flags = args[0]}\n else{var _flags = $B.$getattr($ns['kw'], 'get')('flags', 0)}\n\n var flags = normflags()\n flags += 'gm'\n var jsp = new RegExp(pattern,flags),\n jsmatch = string.match(jsp)\n if(jsmatch === null){return []}\n return jsmatch\n }\n obj.finditer = function(pattern, string, flags){\n var $ns=$B.args('re.finditer', 2,\n {pattern:null, string:null}, ['pattern', 'string'],\n arguments,{},'args','kw'),\n args = $ns['args'],\n _flags = 0;\n if(args.length>0){var flags=args[0]}\n else{var _flags = $B.$getattr($ns['kw'], 'get')('flags', 0)}\n\n var flags = normflags()\n flags += 'gm'\n var jsp = new RegExp(pattern, flags),\n jsmatch = string.match(jsp);\n if(jsmatch === null){return []}\n\n var _list = []\n for(var j = 0, _len_j = jsmatch.length; j < _len_j; j++) {\n var mo = {}\n mo._match=jsmatch[j]\n mo.group = function(){\n var res = []\n for(var i=0, _len_i = arguments.length; i < _len_i;i++){\n if(jsmatch[arguments[i]] === undefined){res.push(_b_.None)}\n else{res.push(jsmatch[arguments[i]])}\n }\n if(arguments.length == 1){return res[0]}\n return _b_.tuple.$factory(res)\n }\n mo.groups = function(_default){\n if(_default === undefined){_default = _b_.None}\n var res = []\n for(var i = 1, _len_i = jsmatch.length; i < _len_i; i++){\n if(jsmatch[i] === undefined){res.push(_default)}\n else{res.push(jsmatch[i])}\n }\n return _b_.tuple.$factory(res)\n }\n mo.start = function(){return mo._match.index}\n mo.end = function(){return mo._match.length - mo._match.index}\n mo.string = string\n _list.push(mo)\n }\n return _list\n }\n obj.search = function(pattern, string){\n var $ns = $B.args('re.search', 2,\n {pattern:null, string:null},['pattern', 'string'],\n arguments, {}, 'args', 'kw')\n var args = $ns['args']\n if(args.length>0){var flags = args[0]}\n else{var flags = $B.$getattr($ns['kw'], 'get')('flags', '')}\n flags = normflags(flags)\n var jsp = new RegExp(pattern, flags)\n var jsmatch = string.match(jsp)\n if(jsmatch === null){return _b_.None}\n return MatchObject.$factory(jsmatch, string, pattern)\n }\n obj.sub = function(pattern, repl, string){\n var $ns=$B.args('re.search', 3,\n {pattern: null, repl: null, string: null},\n ['pattern', 'repl', 'string'],\n arguments,{}, 'args', 'kw')\n for($var in $ns){eval(\"var \" + $var + \"=$ns[$var]\")}\n var args = $ns['args']\n var count = _b_.dict.get($ns['kw'], 'count', 0)\n var flags = _b_.dict.get($ns['kw'], 'flags', '')\n if(args.length > 0){var count = args[0]}\n if(args.length > 1){var flags = args[1]}\n flags = normflags(flags)\n if(typeof repl == \"string\"){\n // backreferences are \\1, \\2... in Python but $1,$2... in Javascript\n repl = repl.replace(/\\\\(\\d+)/g, '$$$1')\n }else if(typeof repl == \"function\"){\n // the argument passed to the Python function is the match object\n // the arguments passed to the Javascript function are :\n // - the matched substring\n // - the matched groups\n // - the offset of the matched substring inside the string\n // - the string being examined\n var $repl1 = function(){\n var mo = Object()\n mo.string = arguments[arguments.length - 1]\n var matched = arguments[0];\n var start = arguments[arguments.length - 2]\n var end = start + matched.length\n mo.start = function(){return start}\n mo.end = function(){return end}\n groups = []\n for(var i = 1, _len_i = arguments.length-2; i < _len_i; i++){\n groups.push(arguments[i])\n }\n mo.groups = function(_default){\n if(_default === undefined){_default = _b_.None}\n var res = []\n for(var i = 0, _len_i = groups.length; i < _len_i; i++){\n if(groups[i] === undefined){res.push(_default)}\n else{res.push(groups[i])}\n }\n return res\n }\n mo.group = function(i){\n if(i==0){return matched}\n return groups[i-1]\n }\n return repl(mo)\n }\n }\n if(count == 0){flags += 'g'}\n var jsp = new RegExp(pattern, flags)\n if(typeof repl == 'function'){return string.replace(jsp, $repl1)}\n else{return string.replace(jsp, repl)}\n }\n obj.match = (function(search_func){\n return function(){\n // match is like search but pattern must start with ^\n var pattern = arguments[0]\n if(pattern.charAt(0) != '^'){pattern = '^'+pattern}\n var args = [pattern]\n for(var i = 1, _len_i = arguments.length; i < _len_i; i++){\n args.push(arguments[i])\n }\n return search_func.apply(null, args)\n }\n })(obj.search)\n\n $B.addToImported('_jsre', obj)\n}\n)(__BRYTHON__)\n"], "crypto_js": [".py", "", [], 1], "crypto_js.rollups": [".py", "", [], 1], "crypto_js.rollups.sha1": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(e,m){var p={},j=p.lib={},l=function(){},f=j.Base={extend:function(a){l.prototype=this;var c=new l;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nn=j.WordArray=f.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=m?c:4*a.length},toString:function(a){return(a||h).stringify(this)},concat:function(a){var c=this.words,q=a.words,d=this.sigBytes;a=a.sigBytes;this.clamp();if(d%4)for(var b=0;b>>2]|=(q[b>>>2]>>>24-8*(b%4)&255)<<24-8*((d+b)%4);else if(65535>>2]=q[b>>>2];else c.push.apply(c,q);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=e.ceil(c/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],b=0;b>>2]>>>24-8*(d%4)&255;b.push((f>>>4).toString(16));b.push((f&15).toString(16))}return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>3]|=parseInt(a.substr(d,\n2),16)<<24-4*(d%8);return new n.init(b,c/2)}},g=b.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var b=[],d=0;d>>2]>>>24-8*(d%4)&255));return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>2]|=(a.charCodeAt(d)&255)<<24-8*(d%4);return new n.init(b,c)}},r=b.Utf8={stringify:function(a){try{return decodeURIComponent(escape(g.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return g.parse(unescape(encodeURIComponent(a)))}},\nk=j.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new n.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=r.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,b=c.words,d=c.sigBytes,f=this.blockSize,h=d/(4*f),h=a?e.ceil(h):e.max((h|0)-this._minBufferSize,0);a=h*f;d=e.min(4*a,d);if(a){for(var g=0;ga;a++){if(16>a)l[a]=f[n+a]|0;else{var c=l[a-3]^l[a-8]^l[a-14]^l[a-16];l[a]=c<<1|c>>>31}c=(h<<5|h>>>27)+j+l[a];c=20>a?c+((g&e|~g&k)+1518500249):40>a?c+((g^e^k)+1859775393):60>a?c+((g&e|g&k|e&k)-1894007588):c+((g^e^\nk)-899497514);j=k;k=e;e=g<<30|g>>>2;g=h;h=c}b[0]=b[0]+h|0;b[1]=b[1]+g|0;b[2]=b[2]+e|0;b[3]=b[3]+k|0;b[4]=b[4]+j|0},_doFinalize:function(){var f=this._data,e=f.words,b=8*this._nDataBytes,h=8*f.sigBytes;e[h>>>5]|=128<<24-h%32;e[(h+64>>>9<<4)+14]=Math.floor(b/4294967296);e[(h+64>>>9<<4)+15]=b;f.sigBytes=4*e.length;this._process();return this._hash},clone:function(){var e=j.clone.call(this);e._hash=this._hash.clone();return e}});e.SHA1=j._createHelper(m);e.HmacSHA1=j._createHmacHelper(m)})();\n"], "crypto_js.rollups.sha256": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(h,s){var f={},t=f.lib={},g=function(){},j=t.Base={extend:function(a){g.prototype=this;var c=new g;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=t.WordArray=j.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=s?c:4*a.length},toString:function(a){return(a||u).stringify(this)},concat:function(a){var c=this.words,d=a.words,b=this.sigBytes;a=a.sigBytes;this.clamp();if(b%4)for(var e=0;e>>2]|=(d[e>>>2]>>>24-8*(e%4)&255)<<24-8*((b+e)%4);else if(65535>>2]=d[e>>>2];else c.push.apply(c,d);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=h.ceil(c/4)},clone:function(){var a=j.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],d=0;d>>2]>>>24-8*(b%4)&255;d.push((e>>>4).toString(16));d.push((e&15).toString(16))}return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>3]|=parseInt(a.substr(b,\n2),16)<<24-4*(b%8);return new q.init(d,c/2)}},k=v.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var d=[],b=0;b>>2]>>>24-8*(b%4)&255));return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>2]|=(a.charCodeAt(b)&255)<<24-8*(b%4);return new q.init(d,c)}},l=v.Utf8={stringify:function(a){try{return decodeURIComponent(escape(k.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return k.parse(unescape(encodeURIComponent(a)))}},\nx=t.BufferedBlockAlgorithm=j.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=l.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,d=c.words,b=c.sigBytes,e=this.blockSize,f=b/(4*e),f=a?h.ceil(f):h.max((f|0)-this._minBufferSize,0);a=f*e;b=h.min(4*a,b);if(a){for(var m=0;mk;){var l;a:{l=u;for(var x=h.sqrt(l),w=2;w<=x;w++)if(!(l%w)){l=!1;break a}l=!0}l&&(8>k&&(j[k]=v(h.pow(u,0.5))),q[k]=v(h.pow(u,1/3)),k++);u++}var a=[],f=f.SHA256=g.extend({_doReset:function(){this._hash=new t.init(j.slice(0))},_doProcessBlock:function(c,d){for(var b=this._hash.words,e=b[0],f=b[1],m=b[2],h=b[3],p=b[4],j=b[5],k=b[6],l=b[7],n=0;64>n;n++){if(16>n)a[n]=\nc[d+n]|0;else{var r=a[n-15],g=a[n-2];a[n]=((r<<25|r>>>7)^(r<<14|r>>>18)^r>>>3)+a[n-7]+((g<<15|g>>>17)^(g<<13|g>>>19)^g>>>10)+a[n-16]}r=l+((p<<26|p>>>6)^(p<<21|p>>>11)^(p<<7|p>>>25))+(p&j^~p&k)+q[n]+a[n];g=((e<<30|e>>>2)^(e<<19|e>>>13)^(e<<10|e>>>22))+(e&f^e&m^f&m);l=k;k=j;j=p;p=h+r|0;h=m;m=f;f=e;e=r+g|0}b[0]=b[0]+e|0;b[1]=b[1]+f|0;b[2]=b[2]+m|0;b[3]=b[3]+h|0;b[4]=b[4]+p|0;b[5]=b[5]+j|0;b[6]=b[6]+k|0;b[7]=b[7]+l|0},_doFinalize:function(){var a=this._data,d=a.words,b=8*this._nDataBytes,e=8*a.sigBytes;\nd[e>>>5]|=128<<24-e%32;d[(e+64>>>9<<4)+14]=h.floor(b/4294967296);d[(e+64>>>9<<4)+15]=b;a.sigBytes=4*d.length;this._process();return this._hash},clone:function(){var a=g.clone.call(this);a._hash=this._hash.clone();return a}});s.SHA256=g._createHelper(f);s.HmacSHA256=g._createHmacHelper(f)})(Math);\n"], "crypto_js.rollups.sha384": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(a,c){var d={},j=d.lib={},f=function(){},m=j.Base={extend:function(a){f.prototype=this;var b=new f;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nB=j.WordArray=m.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=c?b:4*a.length},toString:function(a){return(a||y).stringify(this)},concat:function(a){var b=this.words,g=a.words,e=this.sigBytes;a=a.sigBytes;this.clamp();if(e%4)for(var k=0;k>>2]|=(g[k>>>2]>>>24-8*(k%4)&255)<<24-8*((e+k)%4);else if(65535>>2]=g[k>>>2];else b.push.apply(b,g);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],g=0;g>>2]>>>24-8*(e%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>3]|=parseInt(a.substr(e,\n2),16)<<24-4*(e%8);return new B.init(g,b/2)}},F=v.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var g=[],e=0;e>>2]>>>24-8*(e%4)&255));return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>2]|=(a.charCodeAt(e)&255)<<24-8*(e%4);return new B.init(g,b)}},ha=v.Utf8={stringify:function(a){try{return decodeURIComponent(escape(F.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return F.parse(unescape(encodeURIComponent(a)))}},\nZ=j.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new B.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=ha.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,g=b.words,e=b.sigBytes,k=this.blockSize,m=e/(4*k),m=n?a.ceil(m):a.max((m|0)-this._minBufferSize,0);n=m*k;e=a.min(4*n,e);if(n){for(var c=0;cy;y++)v[y]=a();j=j.SHA512=d.extend({_doReset:function(){this._hash=new m.init([new f.init(1779033703,4089235720),new f.init(3144134277,2227873595),new f.init(1013904242,4271175723),new f.init(2773480762,1595750129),new f.init(1359893119,2917565137),new f.init(2600822924,725511199),new f.init(528734635,4215389547),new f.init(1541459225,327033209)])},_doProcessBlock:function(a,c){for(var d=this._hash.words,\nf=d[0],j=d[1],b=d[2],g=d[3],e=d[4],k=d[5],m=d[6],d=d[7],y=f.high,M=f.low,$=j.high,N=j.low,aa=b.high,O=b.low,ba=g.high,P=g.low,ca=e.high,Q=e.low,da=k.high,R=k.low,ea=m.high,S=m.low,fa=d.high,T=d.low,s=y,p=M,G=$,D=N,H=aa,E=O,W=ba,I=P,t=ca,q=Q,U=da,J=R,V=ea,K=S,X=fa,L=T,u=0;80>u;u++){var z=v[u];if(16>u)var r=z.high=a[c+2*u]|0,h=z.low=a[c+2*u+1]|0;else{var r=v[u-15],h=r.high,w=r.low,r=(h>>>1|w<<31)^(h>>>8|w<<24)^h>>>7,w=(w>>>1|h<<31)^(w>>>8|h<<24)^(w>>>7|h<<25),C=v[u-2],h=C.high,l=C.low,C=(h>>>19|l<<\n13)^(h<<3|l>>>29)^h>>>6,l=(l>>>19|h<<13)^(l<<3|h>>>29)^(l>>>6|h<<26),h=v[u-7],Y=h.high,A=v[u-16],x=A.high,A=A.low,h=w+h.low,r=r+Y+(h>>>0>>0?1:0),h=h+l,r=r+C+(h>>>0>>0?1:0),h=h+A,r=r+x+(h>>>0>>0?1:0);z.high=r;z.low=h}var Y=t&U^~t&V,A=q&J^~q&K,z=s&G^s&H^G&H,ja=p&D^p&E^D&E,w=(s>>>28|p<<4)^(s<<30|p>>>2)^(s<<25|p>>>7),C=(p>>>28|s<<4)^(p<<30|s>>>2)^(p<<25|s>>>7),l=B[u],ka=l.high,ga=l.low,l=L+((q>>>14|t<<18)^(q>>>18|t<<14)^(q<<23|t>>>9)),x=X+((t>>>14|q<<18)^(t>>>18|q<<14)^(t<<23|q>>>9))+(l>>>0<\nL>>>0?1:0),l=l+A,x=x+Y+(l>>>0>>0?1:0),l=l+ga,x=x+ka+(l>>>0>>0?1:0),l=l+h,x=x+r+(l>>>0>>0?1:0),h=C+ja,z=w+z+(h>>>0>>0?1:0),X=V,L=K,V=U,K=J,U=t,J=q,q=I+l|0,t=W+x+(q>>>0>>0?1:0)|0,W=H,I=E,H=G,E=D,G=s,D=p,p=l+h|0,s=x+z+(p>>>0>>0?1:0)|0}M=f.low=M+p;f.high=y+s+(M>>>0

    >>0?1:0);N=j.low=N+D;j.high=$+G+(N>>>0>>0?1:0);O=b.low=O+E;b.high=aa+H+(O>>>0>>0?1:0);P=g.low=P+I;g.high=ba+W+(P>>>0>>0?1:0);Q=e.low=Q+q;e.high=ca+t+(Q>>>0>>0?1:0);R=k.low=R+J;k.high=da+U+(R>>>0>>0?1:0);\nS=m.low=S+K;m.high=ea+V+(S>>>0>>0?1:0);T=d.low=T+L;d.high=fa+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,c=a.words,d=8*this._nDataBytes,f=8*a.sigBytes;c[f>>>5]|=128<<24-f%32;c[(f+128>>>10<<5)+30]=Math.floor(d/4294967296);c[(f+128>>>10<<5)+31]=d;a.sigBytes=4*c.length;this._process();return this._hash.toX32()},clone:function(){var a=d.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});c.SHA512=d._createHelper(j);c.HmacSHA512=d._createHmacHelper(j)})();\n(function(){var a=CryptoJS,c=a.x64,d=c.Word,j=c.WordArray,c=a.algo,f=c.SHA512,c=c.SHA384=f.extend({_doReset:function(){this._hash=new j.init([new d.init(3418070365,3238371032),new d.init(1654270250,914150663),new d.init(2438529370,812702999),new d.init(355462360,4144912697),new d.init(1731405415,4290775857),new d.init(2394180231,1750603025),new d.init(3675008525,1694076839),new d.init(1203062813,3204075428)])},_doFinalize:function(){var a=f._doFinalize.call(this);a.sigBytes-=16;return a}});a.SHA384=\nf._createHelper(c);a.HmacSHA384=f._createHmacHelper(c)})();\n"], "crypto_js.rollups.sha512": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(a,m){var r={},f=r.lib={},g=function(){},l=f.Base={extend:function(a){g.prototype=this;var b=new g;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\np=f.WordArray=l.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=m?b:4*a.length},toString:function(a){return(a||q).stringify(this)},concat:function(a){var b=this.words,d=a.words,c=this.sigBytes;a=a.sigBytes;this.clamp();if(c%4)for(var j=0;j>>2]|=(d[j>>>2]>>>24-8*(j%4)&255)<<24-8*((c+j)%4);else if(65535>>2]=d[j>>>2];else b.push.apply(b,d);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=l.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],d=0;d>>2]>>>24-8*(c%4)&255;d.push((j>>>4).toString(16));d.push((j&15).toString(16))}return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>3]|=parseInt(a.substr(c,\n2),16)<<24-4*(c%8);return new p.init(d,b/2)}},G=y.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var d=[],c=0;c>>2]>>>24-8*(c%4)&255));return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>2]|=(a.charCodeAt(c)&255)<<24-8*(c%4);return new p.init(d,b)}},fa=y.Utf8={stringify:function(a){try{return decodeURIComponent(escape(G.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return G.parse(unescape(encodeURIComponent(a)))}},\nh=f.BufferedBlockAlgorithm=l.extend({reset:function(){this._data=new p.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=fa.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,d=b.words,c=b.sigBytes,j=this.blockSize,l=c/(4*j),l=n?a.ceil(l):a.max((l|0)-this._minBufferSize,0);n=l*j;c=a.min(4*n,c);if(n){for(var h=0;hq;q++)y[q]=a();f=f.SHA512=r.extend({_doReset:function(){this._hash=new l.init([new g.init(1779033703,4089235720),new g.init(3144134277,2227873595),new g.init(1013904242,4271175723),new g.init(2773480762,1595750129),new g.init(1359893119,2917565137),new g.init(2600822924,725511199),new g.init(528734635,4215389547),new g.init(1541459225,327033209)])},_doProcessBlock:function(a,f){for(var h=this._hash.words,\ng=h[0],n=h[1],b=h[2],d=h[3],c=h[4],j=h[5],l=h[6],h=h[7],q=g.high,m=g.low,r=n.high,N=n.low,Z=b.high,O=b.low,$=d.high,P=d.low,aa=c.high,Q=c.low,ba=j.high,R=j.low,ca=l.high,S=l.low,da=h.high,T=h.low,v=q,s=m,H=r,E=N,I=Z,F=O,W=$,J=P,w=aa,t=Q,U=ba,K=R,V=ca,L=S,X=da,M=T,x=0;80>x;x++){var B=y[x];if(16>x)var u=B.high=a[f+2*x]|0,e=B.low=a[f+2*x+1]|0;else{var u=y[x-15],e=u.high,z=u.low,u=(e>>>1|z<<31)^(e>>>8|z<<24)^e>>>7,z=(z>>>1|e<<31)^(z>>>8|e<<24)^(z>>>7|e<<25),D=y[x-2],e=D.high,k=D.low,D=(e>>>19|k<<13)^\n(e<<3|k>>>29)^e>>>6,k=(k>>>19|e<<13)^(k<<3|e>>>29)^(k>>>6|e<<26),e=y[x-7],Y=e.high,C=y[x-16],A=C.high,C=C.low,e=z+e.low,u=u+Y+(e>>>0>>0?1:0),e=e+k,u=u+D+(e>>>0>>0?1:0),e=e+C,u=u+A+(e>>>0>>0?1:0);B.high=u;B.low=e}var Y=w&U^~w&V,C=t&K^~t&L,B=v&H^v&I^H&I,ha=s&E^s&F^E&F,z=(v>>>28|s<<4)^(v<<30|s>>>2)^(v<<25|s>>>7),D=(s>>>28|v<<4)^(s<<30|v>>>2)^(s<<25|v>>>7),k=p[x],ia=k.high,ea=k.low,k=M+((t>>>14|w<<18)^(t>>>18|w<<14)^(t<<23|w>>>9)),A=X+((w>>>14|t<<18)^(w>>>18|t<<14)^(w<<23|t>>>9))+(k>>>0>>\n0?1:0),k=k+C,A=A+Y+(k>>>0>>0?1:0),k=k+ea,A=A+ia+(k>>>0>>0?1:0),k=k+e,A=A+u+(k>>>0>>0?1:0),e=D+ha,B=z+B+(e>>>0>>0?1:0),X=V,M=L,V=U,L=K,U=w,K=t,t=J+k|0,w=W+A+(t>>>0>>0?1:0)|0,W=I,J=F,I=H,F=E,H=v,E=s,s=k+e|0,v=A+B+(s>>>0>>0?1:0)|0}m=g.low=m+s;g.high=q+v+(m>>>0>>0?1:0);N=n.low=N+E;n.high=r+H+(N>>>0>>0?1:0);O=b.low=O+F;b.high=Z+I+(O>>>0>>0?1:0);P=d.low=P+J;d.high=$+W+(P>>>0>>0?1:0);Q=c.low=Q+t;c.high=aa+w+(Q>>>0>>0?1:0);R=j.low=R+K;j.high=ba+U+(R>>>0>>0?1:0);S=l.low=\nS+L;l.high=ca+V+(S>>>0>>0?1:0);T=h.low=T+M;h.high=da+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,f=a.words,h=8*this._nDataBytes,g=8*a.sigBytes;f[g>>>5]|=128<<24-g%32;f[(g+128>>>10<<5)+30]=Math.floor(h/4294967296);f[(g+128>>>10<<5)+31]=h;a.sigBytes=4*f.length;this._process();return this._hash.toX32()},clone:function(){var a=r.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});m.SHA512=r._createHelper(f);m.HmacSHA512=r._createHmacHelper(f)})();\n"], "crypto_js.rollups.md5": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(s,p){var m={},l=m.lib={},n=function(){},r=l.Base={extend:function(b){n.prototype=this;var h=new n;b&&h.mixIn(b);h.hasOwnProperty(\"init\")||(h.init=function(){h.$super.init.apply(this,arguments)});h.init.prototype=h;h.$super=this;return h},create:function(){var b=this.extend();b.init.apply(b,arguments);return b},init:function(){},mixIn:function(b){for(var h in b)b.hasOwnProperty(h)&&(this[h]=b[h]);b.hasOwnProperty(\"toString\")&&(this.toString=b.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=l.WordArray=r.extend({init:function(b,h){b=this.words=b||[];this.sigBytes=h!=p?h:4*b.length},toString:function(b){return(b||t).stringify(this)},concat:function(b){var h=this.words,a=b.words,j=this.sigBytes;b=b.sigBytes;this.clamp();if(j%4)for(var g=0;g>>2]|=(a[g>>>2]>>>24-8*(g%4)&255)<<24-8*((j+g)%4);else if(65535>>2]=a[g>>>2];else h.push.apply(h,a);this.sigBytes+=b;return this},clamp:function(){var b=this.words,h=this.sigBytes;b[h>>>2]&=4294967295<<\n32-8*(h%4);b.length=s.ceil(h/4)},clone:function(){var b=r.clone.call(this);b.words=this.words.slice(0);return b},random:function(b){for(var h=[],a=0;a>>2]>>>24-8*(j%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>3]|=parseInt(b.substr(j,\n2),16)<<24-4*(j%8);return new q.init(g,a/2)}},a=v.Latin1={stringify:function(b){var a=b.words;b=b.sigBytes;for(var g=[],j=0;j>>2]>>>24-8*(j%4)&255));return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>2]|=(b.charCodeAt(j)&255)<<24-8*(j%4);return new q.init(g,a)}},u=v.Utf8={stringify:function(b){try{return decodeURIComponent(escape(a.stringify(b)))}catch(g){throw Error(\"Malformed UTF-8 data\");}},parse:function(b){return a.parse(unescape(encodeURIComponent(b)))}},\ng=l.BufferedBlockAlgorithm=r.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(b){\"string\"==typeof b&&(b=u.parse(b));this._data.concat(b);this._nDataBytes+=b.sigBytes},_process:function(b){var a=this._data,g=a.words,j=a.sigBytes,k=this.blockSize,m=j/(4*k),m=b?s.ceil(m):s.max((m|0)-this._minBufferSize,0);b=m*k;j=s.min(4*b,j);if(b){for(var l=0;l>>32-j)+k}function m(a,k,b,h,l,j,m){a=a+(k&h|b&~h)+l+m;return(a<>>32-j)+k}function l(a,k,b,h,l,j,m){a=a+(k^b^h)+l+m;return(a<>>32-j)+k}function n(a,k,b,h,l,j,m){a=a+(b^(k|~h))+l+m;return(a<>>32-j)+k}for(var r=CryptoJS,q=r.lib,v=q.WordArray,t=q.Hasher,q=r.algo,a=[],u=0;64>u;u++)a[u]=4294967296*s.abs(s.sin(u+1))|0;q=q.MD5=t.extend({_doReset:function(){this._hash=new v.init([1732584193,4023233417,2562383102,271733878])},\n_doProcessBlock:function(g,k){for(var b=0;16>b;b++){var h=k+b,w=g[h];g[h]=(w<<8|w>>>24)&16711935|(w<<24|w>>>8)&4278255360}var b=this._hash.words,h=g[k+0],w=g[k+1],j=g[k+2],q=g[k+3],r=g[k+4],s=g[k+5],t=g[k+6],u=g[k+7],v=g[k+8],x=g[k+9],y=g[k+10],z=g[k+11],A=g[k+12],B=g[k+13],C=g[k+14],D=g[k+15],c=b[0],d=b[1],e=b[2],f=b[3],c=p(c,d,e,f,h,7,a[0]),f=p(f,c,d,e,w,12,a[1]),e=p(e,f,c,d,j,17,a[2]),d=p(d,e,f,c,q,22,a[3]),c=p(c,d,e,f,r,7,a[4]),f=p(f,c,d,e,s,12,a[5]),e=p(e,f,c,d,t,17,a[6]),d=p(d,e,f,c,u,22,a[7]),\nc=p(c,d,e,f,v,7,a[8]),f=p(f,c,d,e,x,12,a[9]),e=p(e,f,c,d,y,17,a[10]),d=p(d,e,f,c,z,22,a[11]),c=p(c,d,e,f,A,7,a[12]),f=p(f,c,d,e,B,12,a[13]),e=p(e,f,c,d,C,17,a[14]),d=p(d,e,f,c,D,22,a[15]),c=m(c,d,e,f,w,5,a[16]),f=m(f,c,d,e,t,9,a[17]),e=m(e,f,c,d,z,14,a[18]),d=m(d,e,f,c,h,20,a[19]),c=m(c,d,e,f,s,5,a[20]),f=m(f,c,d,e,y,9,a[21]),e=m(e,f,c,d,D,14,a[22]),d=m(d,e,f,c,r,20,a[23]),c=m(c,d,e,f,x,5,a[24]),f=m(f,c,d,e,C,9,a[25]),e=m(e,f,c,d,q,14,a[26]),d=m(d,e,f,c,v,20,a[27]),c=m(c,d,e,f,B,5,a[28]),f=m(f,c,\nd,e,j,9,a[29]),e=m(e,f,c,d,u,14,a[30]),d=m(d,e,f,c,A,20,a[31]),c=l(c,d,e,f,s,4,a[32]),f=l(f,c,d,e,v,11,a[33]),e=l(e,f,c,d,z,16,a[34]),d=l(d,e,f,c,C,23,a[35]),c=l(c,d,e,f,w,4,a[36]),f=l(f,c,d,e,r,11,a[37]),e=l(e,f,c,d,u,16,a[38]),d=l(d,e,f,c,y,23,a[39]),c=l(c,d,e,f,B,4,a[40]),f=l(f,c,d,e,h,11,a[41]),e=l(e,f,c,d,q,16,a[42]),d=l(d,e,f,c,t,23,a[43]),c=l(c,d,e,f,x,4,a[44]),f=l(f,c,d,e,A,11,a[45]),e=l(e,f,c,d,D,16,a[46]),d=l(d,e,f,c,j,23,a[47]),c=n(c,d,e,f,h,6,a[48]),f=n(f,c,d,e,u,10,a[49]),e=n(e,f,c,d,\nC,15,a[50]),d=n(d,e,f,c,s,21,a[51]),c=n(c,d,e,f,A,6,a[52]),f=n(f,c,d,e,q,10,a[53]),e=n(e,f,c,d,y,15,a[54]),d=n(d,e,f,c,w,21,a[55]),c=n(c,d,e,f,v,6,a[56]),f=n(f,c,d,e,D,10,a[57]),e=n(e,f,c,d,t,15,a[58]),d=n(d,e,f,c,B,21,a[59]),c=n(c,d,e,f,r,6,a[60]),f=n(f,c,d,e,z,10,a[61]),e=n(e,f,c,d,j,15,a[62]),d=n(d,e,f,c,x,21,a[63]);b[0]=b[0]+c|0;b[1]=b[1]+d|0;b[2]=b[2]+e|0;b[3]=b[3]+f|0},_doFinalize:function(){var a=this._data,k=a.words,b=8*this._nDataBytes,h=8*a.sigBytes;k[h>>>5]|=128<<24-h%32;var l=s.floor(b/\n4294967296);k[(h+64>>>9<<4)+15]=(l<<8|l>>>24)&16711935|(l<<24|l>>>8)&4278255360;k[(h+64>>>9<<4)+14]=(b<<8|b>>>24)&16711935|(b<<24|b>>>8)&4278255360;a.sigBytes=4*(k.length+1);this._process();a=this._hash;k=a.words;for(b=0;4>b;b++)h=k[b],k[b]=(h<<8|h>>>24)&16711935|(h<<24|h>>>8)&4278255360;return a},clone:function(){var a=t.clone.call(this);a._hash=this._hash.clone();return a}});r.MD5=t._createHelper(q);r.HmacMD5=t._createHmacHelper(q)})(Math);\n"], "crypto_js.rollups.sha3": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(v,p){var d={},u=d.lib={},r=function(){},f=u.Base={extend:function(a){r.prototype=this;var b=new r;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\ns=u.WordArray=f.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=p?b:4*a.length},toString:function(a){return(a||y).stringify(this)},concat:function(a){var b=this.words,c=a.words,j=this.sigBytes;a=a.sigBytes;this.clamp();if(j%4)for(var n=0;n>>2]|=(c[n>>>2]>>>24-8*(n%4)&255)<<24-8*((j+n)%4);else if(65535>>2]=c[n>>>2];else b.push.apply(b,c);this.sigBytes+=a;return this},clamp:function(){var a=this.words,b=this.sigBytes;a[b>>>2]&=4294967295<<\n32-8*(b%4);a.length=v.ceil(b/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var b=[],c=0;c>>2]>>>24-8*(j%4)&255;c.push((n>>>4).toString(16));c.push((n&15).toString(16))}return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>3]|=parseInt(a.substr(j,\n2),16)<<24-4*(j%8);return new s.init(c,b/2)}},e=x.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var c=[],j=0;j>>2]>>>24-8*(j%4)&255));return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>2]|=(a.charCodeAt(j)&255)<<24-8*(j%4);return new s.init(c,b)}},q=x.Utf8={stringify:function(a){try{return decodeURIComponent(escape(e.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return e.parse(unescape(encodeURIComponent(a)))}},\nt=u.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new s.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=q.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var b=this._data,c=b.words,j=b.sigBytes,n=this.blockSize,e=j/(4*n),e=a?v.ceil(e):v.max((e|0)-this._minBufferSize,0);a=e*n;j=v.min(4*a,j);if(a){for(var f=0;ft;t++){s[e+5*q]=(t+1)*(t+2)/2%64;var w=(2*e+3*q)%5,e=q%5,q=w}for(e=0;5>e;e++)for(q=0;5>q;q++)x[e+5*q]=q+5*((2*e+3*q)%5);e=1;for(q=0;24>q;q++){for(var a=w=t=0;7>a;a++){if(e&1){var b=(1<b?w^=1<e;e++)c[e]=f.create();d=d.SHA3=r.extend({cfg:r.cfg.extend({outputLength:512}),_doReset:function(){for(var a=this._state=\n[],b=0;25>b;b++)a[b]=new f.init;this.blockSize=(1600-2*this.cfg.outputLength)/32},_doProcessBlock:function(a,b){for(var e=this._state,f=this.blockSize/2,h=0;h>>24)&16711935|(l<<24|l>>>8)&4278255360,m=(m<<8|m>>>24)&16711935|(m<<24|m>>>8)&4278255360,g=e[h];g.high^=m;g.low^=l}for(f=0;24>f;f++){for(h=0;5>h;h++){for(var d=l=0,k=0;5>k;k++)g=e[h+5*k],l^=g.high,d^=g.low;g=c[h];g.high=l;g.low=d}for(h=0;5>h;h++){g=c[(h+4)%5];l=c[(h+1)%5];m=l.high;k=l.low;l=g.high^\n(m<<1|k>>>31);d=g.low^(k<<1|m>>>31);for(k=0;5>k;k++)g=e[h+5*k],g.high^=l,g.low^=d}for(m=1;25>m;m++)g=e[m],h=g.high,g=g.low,k=s[m],32>k?(l=h<>>32-k,d=g<>>32-k):(l=g<>>64-k,d=h<>>64-k),g=c[x[m]],g.high=l,g.low=d;g=c[0];h=e[0];g.high=h.high;g.low=h.low;for(h=0;5>h;h++)for(k=0;5>k;k++)m=h+5*k,g=e[m],l=c[m],m=c[(h+1)%5+5*k],d=c[(h+2)%5+5*k],g.high=l.high^~m.high&d.high,g.low=l.low^~m.low&d.low;g=e[0];h=y[f];g.high^=h.high;g.low^=h.low}},_doFinalize:function(){var a=this._data,\nb=a.words,c=8*a.sigBytes,e=32*this.blockSize;b[c>>>5]|=1<<24-c%32;b[(v.ceil((c+1)/e)*e>>>5)-1]|=128;a.sigBytes=4*b.length;this._process();for(var a=this._state,b=this.cfg.outputLength/8,c=b/8,e=[],h=0;h>>24)&16711935|(f<<24|f>>>8)&4278255360,d=(d<<8|d>>>24)&16711935|(d<<24|d>>>8)&4278255360;e.push(d);e.push(f)}return new u.init(e,b)},clone:function(){for(var a=r.clone.call(this),b=a._state=this._state.slice(0),c=0;25>c;c++)b[c]=b[c].clone();return a}});\np.SHA3=r._createHelper(d);p.HmacSHA3=r._createHmacHelper(d)})(Math);\n"], "crypto_js.rollups.sha224": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(g,l){var f={},k=f.lib={},h=function(){},m=k.Base={extend:function(a){h.prototype=this;var c=new h;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=k.WordArray=m.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=l?c:4*a.length},toString:function(a){return(a||s).stringify(this)},concat:function(a){var c=this.words,d=a.words,b=this.sigBytes;a=a.sigBytes;this.clamp();if(b%4)for(var e=0;e>>2]|=(d[e>>>2]>>>24-8*(e%4)&255)<<24-8*((b+e)%4);else if(65535>>2]=d[e>>>2];else c.push.apply(c,d);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=g.ceil(c/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],d=0;d>>2]>>>24-8*(b%4)&255;d.push((e>>>4).toString(16));d.push((e&15).toString(16))}return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>3]|=parseInt(a.substr(b,\n2),16)<<24-4*(b%8);return new q.init(d,c/2)}},n=t.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var d=[],b=0;b>>2]>>>24-8*(b%4)&255));return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>2]|=(a.charCodeAt(b)&255)<<24-8*(b%4);return new q.init(d,c)}},j=t.Utf8={stringify:function(a){try{return decodeURIComponent(escape(n.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return n.parse(unescape(encodeURIComponent(a)))}},\nw=k.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=j.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,d=c.words,b=c.sigBytes,e=this.blockSize,f=b/(4*e),f=a?g.ceil(f):g.max((f|0)-this._minBufferSize,0);a=f*e;b=g.min(4*a,b);if(a){for(var u=0;un;){var j;a:{j=s;for(var w=g.sqrt(j),v=2;v<=w;v++)if(!(j%v)){j=!1;break a}j=!0}j&&(8>n&&(m[n]=t(g.pow(s,0.5))),q[n]=t(g.pow(s,1/3)),n++);s++}var a=[],f=f.SHA256=h.extend({_doReset:function(){this._hash=new k.init(m.slice(0))},_doProcessBlock:function(c,d){for(var b=this._hash.words,e=b[0],f=b[1],g=b[2],k=b[3],h=b[4],l=b[5],m=b[6],n=b[7],p=0;64>p;p++){if(16>p)a[p]=\nc[d+p]|0;else{var j=a[p-15],r=a[p-2];a[p]=((j<<25|j>>>7)^(j<<14|j>>>18)^j>>>3)+a[p-7]+((r<<15|r>>>17)^(r<<13|r>>>19)^r>>>10)+a[p-16]}j=n+((h<<26|h>>>6)^(h<<21|h>>>11)^(h<<7|h>>>25))+(h&l^~h&m)+q[p]+a[p];r=((e<<30|e>>>2)^(e<<19|e>>>13)^(e<<10|e>>>22))+(e&f^e&g^f&g);n=m;m=l;l=h;h=k+j|0;k=g;g=f;f=e;e=j+r|0}b[0]=b[0]+e|0;b[1]=b[1]+f|0;b[2]=b[2]+g|0;b[3]=b[3]+k|0;b[4]=b[4]+h|0;b[5]=b[5]+l|0;b[6]=b[6]+m|0;b[7]=b[7]+n|0},_doFinalize:function(){var a=this._data,d=a.words,b=8*this._nDataBytes,e=8*a.sigBytes;\nd[e>>>5]|=128<<24-e%32;d[(e+64>>>9<<4)+14]=g.floor(b/4294967296);d[(e+64>>>9<<4)+15]=b;a.sigBytes=4*d.length;this._process();return this._hash},clone:function(){var a=h.clone.call(this);a._hash=this._hash.clone();return a}});l.SHA256=h._createHelper(f);l.HmacSHA256=h._createHmacHelper(f)})(Math);\n(function(){var g=CryptoJS,l=g.lib.WordArray,f=g.algo,k=f.SHA256,f=f.SHA224=k.extend({_doReset:function(){this._hash=new l.init([3238371032,914150663,812702999,4144912697,4290775857,1750603025,1694076839,3204075428])},_doFinalize:function(){var f=k._doFinalize.call(this);f.sigBytes-=4;return f}});g.SHA224=k._createHelper(f);g.HmacSHA224=k._createHmacHelper(f)})();\n"], "zipfile": [".py", "''\n\n\n\n\nimport binascii\nimport importlib.util\nimport io\nimport itertools\nimport os\nimport posixpath\nimport shutil\nimport stat\nimport struct\nimport sys\nimport threading\nimport time\nimport contextlib\nimport pathlib\n\ntry:\n import zlib\n crc32=zlib.crc32\nexcept ImportError:\n zlib=None\n crc32=binascii.crc32\n \ntry:\n import bz2\nexcept ImportError:\n bz2=None\n \ntry:\n import lzma\nexcept ImportError:\n lzma=None\n \n__all__=[\"BadZipFile\",\"BadZipfile\",\"error\",\n\"ZIP_STORED\",\"ZIP_DEFLATED\",\"ZIP_BZIP2\",\"ZIP_LZMA\",\n\"is_zipfile\",\"ZipInfo\",\"ZipFile\",\"PyZipFile\",\"LargeZipFile\",\n\"Path\"]\n\nclass BadZipFile(Exception):\n pass\n \n \nclass LargeZipFile(Exception):\n ''\n\n\n \n \nerror=BadZipfile=BadZipFile\n\n\nZIP64_LIMIT=(1 <<31)-1\nZIP_FILECOUNT_LIMIT=(1 <<16)-1\nZIP_MAX_COMMENT=(1 <<16)-1\n\n\nZIP_STORED=0\nZIP_DEFLATED=8\nZIP_BZIP2=12\nZIP_LZMA=14\n\n\nDEFAULT_VERSION=20\nZIP64_VERSION=45\nBZIP2_VERSION=46\nLZMA_VERSION=63\n\nMAX_EXTRACT_VERSION=63\n\n\n\n\n\n\n\n\n\nstructEndArchive=b\"<4s4H2LH\"\nstringEndArchive=b\"PK\\005\\006\"\nsizeEndCentDir=struct.calcsize(structEndArchive)\n\n_ECD_SIGNATURE=0\n_ECD_DISK_NUMBER=1\n_ECD_DISK_START=2\n_ECD_ENTRIES_THIS_DISK=3\n_ECD_ENTRIES_TOTAL=4\n_ECD_SIZE=5\n_ECD_OFFSET=6\n_ECD_COMMENT_SIZE=7\n\n\n_ECD_COMMENT=8\n_ECD_LOCATION=9\n\n\n\nstructCentralDir=\"<4s4B4HL2L5H2L\"\nstringCentralDir=b\"PK\\001\\002\"\nsizeCentralDir=struct.calcsize(structCentralDir)\n\n\n_CD_SIGNATURE=0\n_CD_CREATE_VERSION=1\n_CD_CREATE_SYSTEM=2\n_CD_EXTRACT_VERSION=3\n_CD_EXTRACT_SYSTEM=4\n_CD_FLAG_BITS=5\n_CD_COMPRESS_TYPE=6\n_CD_TIME=7\n_CD_DATE=8\n_CD_CRC=9\n_CD_COMPRESSED_SIZE=10\n_CD_UNCOMPRESSED_SIZE=11\n_CD_FILENAME_LENGTH=12\n_CD_EXTRA_FIELD_LENGTH=13\n_CD_COMMENT_LENGTH=14\n_CD_DISK_NUMBER_START=15\n_CD_INTERNAL_FILE_ATTRIBUTES=16\n_CD_EXTERNAL_FILE_ATTRIBUTES=17\n_CD_LOCAL_HEADER_OFFSET=18\n\n\n\n_MASK_ENCRYPTED=1 <<0\n\n_MASK_COMPRESS_OPTION_1=1 <<1\n\n\n\n\n_MASK_USE_DATA_DESCRIPTOR=1 <<3\n\n\n_MASK_COMPRESSED_PATCH=1 <<5\n_MASK_STRONG_ENCRYPTION=1 <<6\n\n\n\n\n_MASK_UTF_FILENAME=1 <<11\n\n\n\n\n\n\n\n\n\nstructFileHeader=\"<4s2B4HL2L2H\"\nstringFileHeader=b\"PK\\003\\004\"\nsizeFileHeader=struct.calcsize(structFileHeader)\n\n_FH_SIGNATURE=0\n_FH_EXTRACT_VERSION=1\n_FH_EXTRACT_SYSTEM=2\n_FH_GENERAL_PURPOSE_FLAG_BITS=3\n_FH_COMPRESSION_METHOD=4\n_FH_LAST_MOD_TIME=5\n_FH_LAST_MOD_DATE=6\n_FH_CRC=7\n_FH_COMPRESSED_SIZE=8\n_FH_UNCOMPRESSED_SIZE=9\n_FH_FILENAME_LENGTH=10\n_FH_EXTRA_FIELD_LENGTH=11\n\n\nstructEndArchive64Locator=\"<4sLQL\"\nstringEndArchive64Locator=b\"PK\\x06\\x07\"\nsizeEndCentDir64Locator=struct.calcsize(structEndArchive64Locator)\n\n\n\nstructEndArchive64=\"<4sQ2H2L4Q\"\nstringEndArchive64=b\"PK\\x06\\x06\"\nsizeEndCentDir64=struct.calcsize(structEndArchive64)\n\n_CD64_SIGNATURE=0\n_CD64_DIRECTORY_RECSIZE=1\n_CD64_CREATE_VERSION=2\n_CD64_EXTRACT_VERSION=3\n_CD64_DISK_NUMBER=4\n_CD64_DISK_NUMBER_START=5\n_CD64_NUMBER_ENTRIES_THIS_DISK=6\n_CD64_NUMBER_ENTRIES_TOTAL=7\n_CD64_DIRECTORY_SIZE=8\n_CD64_OFFSET_START_CENTDIR=9\n\n_DD_SIGNATURE=0x08074b50\n\n_EXTRA_FIELD_STRUCT=struct.Struct('1:\n raise BadZipFile(\"zipfiles that span multiple disks are not supported\")\n \n \n fpin.seek(offset -sizeEndCentDir64Locator -sizeEndCentDir64,2)\n data=fpin.read(sizeEndCentDir64)\n if len(data)!=sizeEndCentDir64:\n return endrec\n sig,sz,create_version,read_version,disk_num,disk_dir,\\\n dircount,dircount2,dirsize,diroffset=\\\n struct.unpack(structEndArchive64,data)\n if sig !=stringEndArchive64:\n return endrec\n \n \n endrec[_ECD_SIGNATURE]=sig\n endrec[_ECD_DISK_NUMBER]=disk_num\n endrec[_ECD_DISK_START]=disk_dir\n endrec[_ECD_ENTRIES_THIS_DISK]=dircount\n endrec[_ECD_ENTRIES_TOTAL]=dircount2\n endrec[_ECD_SIZE]=dirsize\n endrec[_ECD_OFFSET]=diroffset\n return endrec\n \n \ndef _EndRecData(fpin):\n ''\n\n\n \n \n \n fpin.seek(0,2)\n filesize=fpin.tell()\n \n \n \n \n try:\n fpin.seek(-sizeEndCentDir,2)\n except OSError:\n return None\n data=fpin.read()\n if(len(data)==sizeEndCentDir and\n data[0:4]==stringEndArchive and\n data[-2:]==b\"\\000\\000\"):\n \n endrec=struct.unpack(structEndArchive,data)\n endrec=list(endrec)\n \n \n endrec.append(b\"\")\n endrec.append(filesize -sizeEndCentDir)\n \n \n return _EndRecData64(fpin,-sizeEndCentDir,endrec)\n \n \n \n \n \n \n maxCommentStart=max(filesize -(1 <<16)-sizeEndCentDir,0)\n fpin.seek(maxCommentStart,0)\n data=fpin.read()\n start=data.rfind(stringEndArchive)\n if start >=0:\n \n recData=data[start:start+sizeEndCentDir]\n if len(recData)!=sizeEndCentDir:\n \n return None\n endrec=list(struct.unpack(structEndArchive,recData))\n commentSize=endrec[_ECD_COMMENT_SIZE]\n comment=data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]\n endrec.append(comment)\n endrec.append(maxCommentStart+start)\n \n \n return _EndRecData64(fpin,maxCommentStart+start -filesize,\n endrec)\n \n \n return None\n \n \nclass ZipInfo(object):\n ''\n \n __slots__=(\n 'orig_filename',\n 'filename',\n 'date_time',\n 'compress_type',\n '_compresslevel',\n 'comment',\n 'extra',\n 'create_system',\n 'create_version',\n 'extract_version',\n 'reserved',\n 'flag_bits',\n 'volume',\n 'internal_attr',\n 'external_attr',\n 'header_offset',\n 'CRC',\n 'compress_size',\n 'file_size',\n '_raw_time',\n )\n \n def __init__(self,filename=\"NoName\",date_time=(1980,1,1,0,0,0)):\n self.orig_filename=filename\n \n \n \n null_byte=filename.find(chr(0))\n if null_byte >=0:\n filename=filename[0:null_byte]\n \n \n \n if os.sep !=\"/\"and os.sep in filename:\n filename=filename.replace(os.sep,\"/\")\n \n self.filename=filename\n self.date_time=date_time\n \n if date_time[0]<1980:\n raise ValueError('ZIP does not support timestamps before 1980')\n \n \n self.compress_type=ZIP_STORED\n self._compresslevel=None\n self.comment=b\"\"\n self.extra=b\"\"\n if sys.platform =='win32':\n self.create_system=0\n else:\n \n self.create_system=3\n self.create_version=DEFAULT_VERSION\n self.extract_version=DEFAULT_VERSION\n self.reserved=0\n self.flag_bits=0\n self.volume=0\n self.internal_attr=0\n self.external_attr=0\n self.compress_size=0\n self.file_size=0\n \n \n \n \n def __repr__(self):\n result=['<%s filename=%r'%(self.__class__.__name__,self.filename)]\n if self.compress_type !=ZIP_STORED:\n result.append(' compress_type=%s'%\n compressor_names.get(self.compress_type,\n self.compress_type))\n hi=self.external_attr >>16\n lo=self.external_attr&0xFFFF\n if hi:\n result.append(' filemode=%r'%stat.filemode(hi))\n if lo:\n result.append(' external_attr=%#x'%lo)\n isdir=self.is_dir()\n if not isdir or self.file_size:\n result.append(' file_size=%r'%self.file_size)\n if((not isdir or self.compress_size)and\n (self.compress_type !=ZIP_STORED or\n self.file_size !=self.compress_size)):\n result.append(' compress_size=%r'%self.compress_size)\n result.append('>')\n return ''.join(result)\n \n def FileHeader(self,zip64=None):\n ''\n dt=self.date_time\n dosdate=(dt[0]-1980)<<9 |dt[1]<<5 |dt[2]\n dostime=dt[3]<<11 |dt[4]<<5 |(dt[5]//2)\n if self.flag_bits&_MASK_USE_DATA_DESCRIPTOR:\n \n CRC=compress_size=file_size=0\n else:\n CRC=self.CRC\n compress_size=self.compress_size\n file_size=self.file_size\n \n extra=self.extra\n \n min_version=0\n if zip64 is None:\n zip64=file_size >ZIP64_LIMIT or compress_size >ZIP64_LIMIT\n if zip64:\n fmt='ZIP64_LIMIT or compress_size >ZIP64_LIMIT:\n if not zip64:\n raise LargeZipFile(\"Filesize would require ZIP64 extensions\")\n \n \n file_size=0xffffffff\n compress_size=0xffffffff\n min_version=ZIP64_VERSION\n \n if self.compress_type ==ZIP_BZIP2:\n min_version=max(BZIP2_VERSION,min_version)\n elif self.compress_type ==ZIP_LZMA:\n min_version=max(LZMA_VERSION,min_version)\n \n self.extract_version=max(min_version,self.extract_version)\n self.create_version=max(min_version,self.create_version)\n filename,flag_bits=self._encodeFilenameFlags()\n header=struct.pack(structFileHeader,stringFileHeader,\n self.extract_version,self.reserved,flag_bits,\n self.compress_type,dostime,dosdate,CRC,\n compress_size,file_size,\n len(filename),len(extra))\n return header+filename+extra\n \n def _encodeFilenameFlags(self):\n try:\n return self.filename.encode('ascii'),self.flag_bits\n except UnicodeEncodeError:\n return self.filename.encode('utf-8'),self.flag_bits |_MASK_UTF_FILENAME\n \n def _decodeExtra(self):\n \n extra=self.extra\n unpack=struct.unpack\n while len(extra)>=4:\n tp,ln=unpack('len(extra):\n raise BadZipFile(\"Corrupt extra field %04x (size=%d)\"%(tp,ln))\n if tp ==0x0001:\n data=extra[4:ln+4]\n \n try:\n if self.file_size in(0xFFFF_FFFF_FFFF_FFFF,0xFFFF_FFFF):\n field=\"File size\"\n self.file_size,=unpack('2107:\n date_time=(2107,12,31,23,59,59)\n \n if arcname is None:\n arcname=filename\n arcname=os.path.normpath(os.path.splitdrive(arcname)[1])\n while arcname[0]in(os.sep,os.altsep):\n arcname=arcname[1:]\n if isdir:\n arcname +='/'\n zinfo=cls(arcname,date_time)\n zinfo.external_attr=(st.st_mode&0xFFFF)<<16\n if isdir:\n zinfo.file_size=0\n zinfo.external_attr |=0x10\n else:\n zinfo.file_size=st.st_size\n \n return zinfo\n \n def is_dir(self):\n ''\n return self.filename[-1]=='/'\n \n \n \n \n \n \n_crctable=None\ndef _gen_crc(crc):\n for j in range(8):\n if crc&1:\n crc=(crc >>1)^0xEDB88320\n else:\n crc >>=1\n return crc\n \n \n \n \n \n \n \n \n \ndef _ZipDecrypter(pwd):\n key0=305419896\n key1=591751049\n key2=878082192\n \n global _crctable\n if _crctable is None:\n _crctable=list(map(_gen_crc,range(256)))\n crctable=_crctable\n \n def crc32(ch,crc):\n ''\n return(crc >>8)^crctable[(crc ^ch)&0xFF]\n \n def update_keys(c):\n nonlocal key0,key1,key2\n key0=crc32(c,key0)\n key1=(key1+(key0&0xFF))&0xFFFFFFFF\n key1=(key1 *134775813+1)&0xFFFFFFFF\n key2=crc32(key1 >>24,key2)\n \n for p in pwd:\n update_keys(p)\n \n def decrypter(data):\n ''\n result=bytearray()\n append=result.append\n for c in data:\n k=key2 |2\n c ^=((k *(k ^1))>>8)&0xFF\n update_keys(c)\n append(c)\n return bytes(result)\n \n return decrypter\n \n \nclass LZMACompressor:\n\n def __init__(self):\n self._comp=None\n \n def _init(self):\n props=lzma._encode_filter_properties({'id':lzma.FILTER_LZMA1})\n self._comp=lzma.LZMACompressor(lzma.FORMAT_RAW,filters=[\n lzma._decode_filter_properties(lzma.FILTER_LZMA1,props)\n ])\n return struct.pack('>8)&0xff\n else:\n \n check_byte=(zipinfo.CRC >>24)&0xff\n h=self._init_decrypter()\n if h !=check_byte:\n raise RuntimeError(\"Bad password for file %r\"%zipinfo.orig_filename)\n \n \n def _init_decrypter(self):\n self._decrypter=_ZipDecrypter(self._pwd)\n \n \n \n \n \n header=self._fileobj.read(12)\n self._compress_left -=12\n return self._decrypter(header)[11]\n \n def __repr__(self):\n result=['<%s.%s'%(self.__class__.__module__,\n self.__class__.__qualname__)]\n if not self.closed:\n result.append(' name=%r mode=%r'%(self.name,self.mode))\n if self._compress_type !=ZIP_STORED:\n result.append(' compress_type=%s'%\n compressor_names.get(self._compress_type,\n self._compress_type))\n else:\n result.append(' [closed]')\n result.append('>')\n return ''.join(result)\n \n def readline(self,limit=-1):\n ''\n\n\n \n \n if limit <0:\n \n i=self._readbuffer.find(b'\\n',self._offset)+1\n if i >0:\n line=self._readbuffer[self._offset:i]\n self._offset=i\n return line\n \n return io.BufferedIOBase.readline(self,limit)\n \n def peek(self,n=1):\n ''\n if n >len(self._readbuffer)-self._offset:\n chunk=self.read(n)\n if len(chunk)>self._offset:\n self._readbuffer=chunk+self._readbuffer[self._offset:]\n self._offset=0\n else:\n self._offset -=len(chunk)\n \n \n return self._readbuffer[self._offset:self._offset+512]\n \n def readable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return True\n \n def read(self,n=-1):\n ''\n\n \n if self.closed:\n raise ValueError(\"read from closed file.\")\n if n is None or n <0:\n buf=self._readbuffer[self._offset:]\n self._readbuffer=b''\n self._offset=0\n while not self._eof:\n buf +=self._read1(self.MAX_N)\n return buf\n \n end=n+self._offset\n if end 0 and not self._eof:\n data=self._read1(n)\n if n 0:\n while not self._eof:\n data=self._read1(n)\n if n len(data):\n data +=self._read2(n -len(data))\n else:\n data=self._read2(n)\n \n if self._compress_type ==ZIP_STORED:\n self._eof=self._compress_left <=0\n elif self._compress_type ==ZIP_DEFLATED:\n n=max(n,self.MIN_READ_SIZE)\n data=self._decompressor.decompress(data,n)\n self._eof=(self._decompressor.eof or\n self._compress_left <=0 and\n not self._decompressor.unconsumed_tail)\n if self._eof:\n data +=self._decompressor.flush()\n else:\n data=self._decompressor.decompress(data)\n self._eof=self._decompressor.eof or self._compress_left <=0\n \n data=data[:self._left]\n self._left -=len(data)\n if self._left <=0:\n self._eof=True\n self._update_crc(data)\n return data\n \n def _read2(self,n):\n if self._compress_left <=0:\n return b''\n \n n=max(n,self.MIN_READ_SIZE)\n n=min(n,self._compress_left)\n \n data=self._fileobj.read(n)\n self._compress_left -=len(data)\n if not data:\n raise EOFError\n \n if self._decrypter is not None:\n data=self._decrypter(data)\n return data\n \n def close(self):\n try:\n if self._close_fileobj:\n self._fileobj.close()\n finally:\n super().close()\n \n def seekable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return self._seekable\n \n def seek(self,offset,whence=0):\n if self.closed:\n raise ValueError(\"seek on closed file.\")\n if not self._seekable:\n raise io.UnsupportedOperation(\"underlying stream is not seekable\")\n curr_pos=self.tell()\n if whence ==0:\n new_pos=offset\n elif whence ==1:\n new_pos=curr_pos+offset\n elif whence ==2:\n new_pos=self._orig_file_size+offset\n else:\n raise ValueError(\"whence must be os.SEEK_SET (0), \"\n \"os.SEEK_CUR (1), or os.SEEK_END (2)\")\n \n if new_pos >self._orig_file_size:\n new_pos=self._orig_file_size\n \n if new_pos <0:\n new_pos=0\n \n read_offset=new_pos -curr_pos\n buff_offset=read_offset+self._offset\n \n if buff_offset >=0 and buff_offset 0:\n read_len=min(self.MAX_SEEK_READ,read_offset)\n self.read(read_len)\n read_offset -=read_len\n \n return self.tell()\n \n def tell(self):\n if self.closed:\n raise ValueError(\"tell on closed file.\")\n if not self._seekable:\n raise io.UnsupportedOperation(\"underlying stream is not seekable\")\n filepos=self._orig_file_size -self._left -len(self._readbuffer)+self._offset\n return filepos\n \n \nclass _ZipWriteFile(io.BufferedIOBase):\n def __init__(self,zf,zinfo,zip64):\n self._zinfo=zinfo\n self._zip64=zip64\n self._zipfile=zf\n self._compressor=_get_compressor(zinfo.compress_type,\n zinfo._compresslevel)\n self._file_size=0\n self._compress_size=0\n self._crc=0\n \n @property\n def _fileobj(self):\n return self._zipfile.fp\n \n def writable(self):\n return True\n \n def write(self,data):\n if self.closed:\n raise ValueError('I/O operation on closed file.')\n \n \n if isinstance(data,(bytes,bytearray)):\n nbytes=len(data)\n else:\n data=memoryview(data)\n nbytes=data.nbytes\n self._file_size +=nbytes\n \n self._crc=crc32(data,self._crc)\n if self._compressor:\n data=self._compressor.compress(data)\n self._compress_size +=len(data)\n self._fileobj.write(data)\n return nbytes\n \n def close(self):\n if self.closed:\n return\n try:\n super().close()\n \n if self._compressor:\n buf=self._compressor.flush()\n self._compress_size +=len(buf)\n self._fileobj.write(buf)\n self._zinfo.compress_size=self._compress_size\n else:\n self._zinfo.compress_size=self._file_size\n self._zinfo.CRC=self._crc\n self._zinfo.file_size=self._file_size\n \n \n if self._zinfo.flag_bits&_MASK_USE_DATA_DESCRIPTOR:\n \n fmt='ZIP64_LIMIT:\n raise RuntimeError(\n 'File size unexpectedly exceeded ZIP64 limit')\n if self._compress_size >ZIP64_LIMIT:\n raise RuntimeError(\n 'Compressed size unexpectedly exceeded ZIP64 limit')\n \n \n \n \n self._zipfile.start_dir=self._fileobj.tell()\n self._fileobj.seek(self._zinfo.header_offset)\n self._fileobj.write(self._zinfo.FileHeader(self._zip64))\n self._fileobj.seek(self._zipfile.start_dir)\n \n \n self._zipfile.filelist.append(self._zinfo)\n self._zipfile.NameToInfo[self._zinfo.filename]=self._zinfo\n finally:\n self._zipfile._writing=False\n \n \n \nclass ZipFile:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n fp=None\n _windows_illegal_name_trans_table=None\n \n def __init__(self,file,mode=\"r\",compression=ZIP_STORED,allowZip64=True,\n compresslevel=None,*,strict_timestamps=True,metadata_encoding=None):\n ''\n \n if mode not in('r','w','x','a'):\n raise ValueError(\"ZipFile requires mode 'r', 'w', 'x', or 'a'\")\n \n _check_compression(compression)\n \n self._allowZip64=allowZip64\n self._didModify=False\n self.debug=0\n self.NameToInfo={}\n self.filelist=[]\n self.compression=compression\n self.compresslevel=compresslevel\n self.mode=mode\n self.pwd=None\n self._comment=b''\n self._strict_timestamps=strict_timestamps\n self.metadata_encoding=metadata_encoding\n \n \n if self.metadata_encoding and mode !='r':\n raise ValueError(\n \"metadata_encoding is only supported for reading files\")\n \n \n if isinstance(file,os.PathLike):\n file=os.fspath(file)\n if isinstance(file,str):\n \n self._filePassed=0\n self.filename=file\n modeDict={'r':'rb','w':'w+b','x':'x+b','a':'r+b',\n 'r+b':'w+b','w+b':'wb','x+b':'xb'}\n filemode=modeDict[mode]\n while True:\n try:\n self.fp=io.open(file,filemode)\n except OSError:\n if filemode in modeDict:\n filemode=modeDict[filemode]\n continue\n raise\n break\n else:\n self._filePassed=1\n self.fp=file\n self.filename=getattr(file,'name',None)\n self._fileRefCnt=1\n self._lock=threading.RLock()\n self._seekable=True\n self._writing=False\n \n try:\n if mode =='r':\n self._RealGetContents()\n elif mode in('w','x'):\n \n \n self._didModify=True\n try:\n self.start_dir=self.fp.tell()\n except(AttributeError,OSError):\n self.fp=_Tellable(self.fp)\n self.start_dir=0\n self._seekable=False\n else:\n \n try:\n self.fp.seek(self.start_dir)\n except(AttributeError,OSError):\n self._seekable=False\n elif mode =='a':\n try:\n \n self._RealGetContents()\n \n self.fp.seek(self.start_dir)\n except BadZipFile:\n \n self.fp.seek(0,2)\n \n \n \n self._didModify=True\n self.start_dir=self.fp.tell()\n else:\n raise ValueError(\"Mode must be 'r', 'w', 'x', or 'a'\")\n except:\n fp=self.fp\n self.fp=None\n self._fpclose(fp)\n raise\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,traceback):\n self.close()\n \n def __repr__(self):\n result=['<%s.%s'%(self.__class__.__module__,\n self.__class__.__qualname__)]\n if self.fp is not None:\n if self._filePassed:\n result.append(' file=%r'%self.fp)\n elif self.filename is not None:\n result.append(' filename=%r'%self.filename)\n result.append(' mode=%r'%self.mode)\n else:\n result.append(' [closed]')\n result.append('>')\n return ''.join(result)\n \n def _RealGetContents(self):\n ''\n fp=self.fp\n try:\n endrec=_EndRecData(fp)\n except OSError:\n raise BadZipFile(\"File is not a zip file\")\n if not endrec:\n raise BadZipFile(\"File is not a zip file\")\n if self.debug >1:\n print(endrec)\n size_cd=endrec[_ECD_SIZE]\n offset_cd=endrec[_ECD_OFFSET]\n self._comment=endrec[_ECD_COMMENT]\n \n \n concat=endrec[_ECD_LOCATION]-size_cd -offset_cd\n if endrec[_ECD_SIGNATURE]==stringEndArchive64:\n \n concat -=(sizeEndCentDir64+sizeEndCentDir64Locator)\n \n if self.debug >2:\n inferred=concat+offset_cd\n print(\"given, inferred, offset\",offset_cd,inferred,concat)\n \n self.start_dir=offset_cd+concat\n if self.start_dir <0:\n raise BadZipFile(\"Bad offset for central directory\")\n fp.seek(self.start_dir,0)\n data=fp.read(size_cd)\n fp=io.BytesIO(data)\n total=0\n while total 2:\n print(centdir)\n filename=fp.read(centdir[_CD_FILENAME_LENGTH])\n flags=centdir[_CD_FLAG_BITS]\n if flags&_MASK_UTF_FILENAME:\n \n filename=filename.decode('utf-8')\n else:\n \n filename=filename.decode(self.metadata_encoding or 'cp437')\n \n x=ZipInfo(filename)\n x.extra=fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])\n x.comment=fp.read(centdir[_CD_COMMENT_LENGTH])\n x.header_offset=centdir[_CD_LOCAL_HEADER_OFFSET]\n (x.create_version,x.create_system,x.extract_version,x.reserved,\n x.flag_bits,x.compress_type,t,d,\n x.CRC,x.compress_size,x.file_size)=centdir[1:12]\n if x.extract_version >MAX_EXTRACT_VERSION:\n raise NotImplementedError(\"zip file version %.1f\"%\n (x.extract_version /10))\n x.volume,x.internal_attr,x.external_attr=centdir[15:18]\n \n x._raw_time=t\n x.date_time=((d >>9)+1980,(d >>5)&0xF,d&0x1F,\n t >>11,(t >>5)&0x3F,(t&0x1F)*2)\n \n x._decodeExtra()\n x.header_offset=x.header_offset+concat\n self.filelist.append(x)\n self.NameToInfo[x.filename]=x\n \n \n total=(total+sizeCentralDir+centdir[_CD_FILENAME_LENGTH]\n +centdir[_CD_EXTRA_FIELD_LENGTH]\n +centdir[_CD_COMMENT_LENGTH])\n \n if self.debug >2:\n print(\"total\",total)\n \n \n def namelist(self):\n ''\n return[data.filename for data in self.filelist]\n \n def infolist(self):\n ''\n \n return self.filelist\n \n def printdir(self,file=None):\n ''\n print(\"%-46s %19s %12s\"%(\"File Name\",\"Modified \",\"Size\"),\n file=file)\n for zinfo in self.filelist:\n date=\"%d-%02d-%02d %02d:%02d:%02d\"%zinfo.date_time[:6]\n print(\"%-46s %s %12d\"%(zinfo.filename,date,zinfo.file_size),\n file=file)\n \n def testzip(self):\n ''\n chunk_size=2 **20\n for zinfo in self.filelist:\n try:\n \n \n with self.open(zinfo.filename,\"r\")as f:\n while f.read(chunk_size):\n pass\n except BadZipFile:\n return zinfo.filename\n \n def getinfo(self,name):\n ''\n info=self.NameToInfo.get(name)\n if info is None:\n raise KeyError(\n 'There is no item named %r in the archive'%name)\n \n return info\n \n def setpassword(self,pwd):\n ''\n if pwd and not isinstance(pwd,bytes):\n raise TypeError(\"pwd: expected bytes, got %s\"%type(pwd).__name__)\n if pwd:\n self.pwd=pwd\n else:\n self.pwd=None\n \n @property\n def comment(self):\n ''\n return self._comment\n \n @comment.setter\n def comment(self,comment):\n if not isinstance(comment,bytes):\n raise TypeError(\"comment: expected bytes, got %s\"%type(comment).__name__)\n \n if len(comment)>ZIP_MAX_COMMENT:\n import warnings\n warnings.warn('Archive comment is too long; truncating to %d bytes'\n %ZIP_MAX_COMMENT,stacklevel=2)\n comment=comment[:ZIP_MAX_COMMENT]\n self._comment=comment\n self._didModify=True\n \n def read(self,name,pwd=None):\n ''\n with self.open(name,\"r\",pwd)as fp:\n return fp.read()\n \n def open(self,name,mode=\"r\",pwd=None,*,force_zip64=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if mode not in{\"r\",\"w\"}:\n raise ValueError('open() requires mode \"r\" or \"w\"')\n if pwd and(mode ==\"w\"):\n raise ValueError(\"pwd is only supported for reading files\")\n if not self.fp:\n raise ValueError(\n \"Attempt to use ZIP archive that was already closed\")\n \n \n if isinstance(name,ZipInfo):\n \n zinfo=name\n elif mode =='w':\n zinfo=ZipInfo(name)\n zinfo.compress_type=self.compression\n zinfo._compresslevel=self.compresslevel\n else:\n \n zinfo=self.getinfo(name)\n \n if mode =='w':\n return self._open_to_write(zinfo,force_zip64=force_zip64)\n \n if self._writing:\n raise ValueError(\"Can't read from the ZIP file while there \"\n \"is an open writing handle on it. \"\n \"Close the writing handle before trying to read.\")\n \n \n self._fileRefCnt +=1\n zef_file=_SharedFile(self.fp,zinfo.header_offset,\n self._fpclose,self._lock,lambda:self._writing)\n try:\n \n fheader=zef_file.read(sizeFileHeader)\n if len(fheader)!=sizeFileHeader:\n raise BadZipFile(\"Truncated file header\")\n fheader=struct.unpack(structFileHeader,fheader)\n if fheader[_FH_SIGNATURE]!=stringFileHeader:\n raise BadZipFile(\"Bad magic number for file header\")\n \n fname=zef_file.read(fheader[_FH_FILENAME_LENGTH])\n if fheader[_FH_EXTRA_FIELD_LENGTH]:\n zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])\n \n if zinfo.flag_bits&_MASK_COMPRESSED_PATCH:\n \n raise NotImplementedError(\"compressed patched data (flag bit 5)\")\n \n if zinfo.flag_bits&_MASK_STRONG_ENCRYPTION:\n \n raise NotImplementedError(\"strong encryption (flag bit 6)\")\n \n if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS]&_MASK_UTF_FILENAME:\n \n fname_str=fname.decode(\"utf-8\")\n else:\n fname_str=fname.decode(self.metadata_encoding or \"cp437\")\n \n if fname_str !=zinfo.orig_filename:\n raise BadZipFile(\n 'File name in directory %r and header %r differ.'\n %(zinfo.orig_filename,fname))\n \n \n is_encrypted=zinfo.flag_bits&_MASK_ENCRYPTED\n if is_encrypted:\n if not pwd:\n pwd=self.pwd\n if pwd and not isinstance(pwd,bytes):\n raise TypeError(\"pwd: expected bytes, got %s\"%type(pwd).__name__)\n if not pwd:\n raise RuntimeError(\"File %r is encrypted, password \"\n \"required for extraction\"%name)\n else:\n pwd=None\n \n return ZipExtFile(zef_file,mode,zinfo,pwd,True)\n except:\n zef_file.close()\n raise\n \n def _open_to_write(self,zinfo,force_zip64=False):\n if force_zip64 and not self._allowZip64:\n raise ValueError(\n \"force_zip64 is True, but allowZip64 was False when opening \"\n \"the ZIP file.\"\n )\n if self._writing:\n raise ValueError(\"Can't write to the ZIP file while there is \"\n \"another write handle open on it. \"\n \"Close the first handle before opening another.\")\n \n \n zinfo.compress_size=0\n zinfo.CRC=0\n \n zinfo.flag_bits=0x00\n if zinfo.compress_type ==ZIP_LZMA:\n \n zinfo.flag_bits |=_MASK_COMPRESS_OPTION_1\n if not self._seekable:\n zinfo.flag_bits |=_MASK_USE_DATA_DESCRIPTOR\n \n if not zinfo.external_attr:\n zinfo.external_attr=0o600 <<16\n \n \n zip64=self._allowZip64 and\\\n (force_zip64 or zinfo.file_size *1.05 >ZIP64_LIMIT)\n \n if self._seekable:\n self.fp.seek(self.start_dir)\n zinfo.header_offset=self.fp.tell()\n \n self._writecheck(zinfo)\n self._didModify=True\n \n self.fp.write(zinfo.FileHeader(zip64))\n \n self._writing=True\n return _ZipWriteFile(self,zinfo,zip64)\n \n def extract(self,member,path=None,pwd=None):\n ''\n\n\n\n \n if path is None:\n path=os.getcwd()\n else:\n path=os.fspath(path)\n \n return self._extract_member(member,path,pwd)\n \n def extractall(self,path=None,members=None,pwd=None):\n ''\n\n\n\n \n if members is None:\n members=self.namelist()\n \n if path is None:\n path=os.getcwd()\n else:\n path=os.fspath(path)\n \n for zipinfo in members:\n self._extract_member(zipinfo,path,pwd)\n \n @classmethod\n def _sanitize_windows_name(cls,arcname,pathsep):\n ''\n table=cls._windows_illegal_name_trans_table\n if not table:\n illegal=':<>|\"?*'\n table=str.maketrans(illegal,'_'*len(illegal))\n cls._windows_illegal_name_trans_table=table\n arcname=arcname.translate(table)\n \n arcname=(x.rstrip('.')for x in arcname.split(pathsep))\n \n arcname=pathsep.join(x for x in arcname if x)\n return arcname\n \n def _extract_member(self,member,targetpath,pwd):\n ''\n\n \n if not isinstance(member,ZipInfo):\n member=self.getinfo(member)\n \n \n \n arcname=member.filename.replace('/',os.path.sep)\n \n if os.path.altsep:\n arcname=arcname.replace(os.path.altsep,os.path.sep)\n \n \n arcname=os.path.splitdrive(arcname)[1]\n invalid_path_parts=('',os.path.curdir,os.path.pardir)\n arcname=os.path.sep.join(x for x in arcname.split(os.path.sep)\n if x not in invalid_path_parts)\n if os.path.sep =='\\\\':\n \n arcname=self._sanitize_windows_name(arcname,os.path.sep)\n \n targetpath=os.path.join(targetpath,arcname)\n targetpath=os.path.normpath(targetpath)\n \n \n upperdirs=os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n os.makedirs(upperdirs)\n \n if member.is_dir():\n if not os.path.isdir(targetpath):\n os.mkdir(targetpath)\n return targetpath\n \n with self.open(member,pwd=pwd)as source,\\\n open(targetpath,\"wb\")as target:\n shutil.copyfileobj(source,target)\n \n return targetpath\n \n def _writecheck(self,zinfo):\n ''\n if zinfo.filename in self.NameToInfo:\n import warnings\n warnings.warn('Duplicate name: %r'%zinfo.filename,stacklevel=3)\n if self.mode not in('w','x','a'):\n raise ValueError(\"write() requires mode 'w', 'x', or 'a'\")\n if not self.fp:\n raise ValueError(\n \"Attempt to write ZIP archive that was already closed\")\n _check_compression(zinfo.compress_type)\n if not self._allowZip64:\n requires_zip64=None\n if len(self.filelist)>=ZIP_FILECOUNT_LIMIT:\n requires_zip64=\"Files count\"\n elif zinfo.file_size >ZIP64_LIMIT:\n requires_zip64=\"Filesize\"\n elif zinfo.header_offset >ZIP64_LIMIT:\n requires_zip64=\"Zipfile size\"\n if requires_zip64:\n raise LargeZipFile(requires_zip64+\n \" would require ZIP64 extensions\")\n \n def write(self,filename,arcname=None,\n compress_type=None,compresslevel=None):\n ''\n \n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists\"\n )\n \n zinfo=ZipInfo.from_file(filename,arcname,\n strict_timestamps=self._strict_timestamps)\n \n if zinfo.is_dir():\n zinfo.compress_size=0\n zinfo.CRC=0\n self.mkdir(zinfo)\n else:\n if compress_type is not None:\n zinfo.compress_type=compress_type\n else:\n zinfo.compress_type=self.compression\n \n if compresslevel is not None:\n zinfo._compresslevel=compresslevel\n else:\n zinfo._compresslevel=self.compresslevel\n \n with open(filename,\"rb\")as src,self.open(zinfo,'w')as dest:\n shutil.copyfileobj(src,dest,1024 *8)\n \n def writestr(self,zinfo_or_arcname,data,\n compress_type=None,compresslevel=None):\n ''\n\n\n\n \n if isinstance(data,str):\n data=data.encode(\"utf-8\")\n if not isinstance(zinfo_or_arcname,ZipInfo):\n zinfo=ZipInfo(filename=zinfo_or_arcname,\n date_time=time.localtime(time.time())[:6])\n zinfo.compress_type=self.compression\n zinfo._compresslevel=self.compresslevel\n if zinfo.filename[-1]=='/':\n zinfo.external_attr=0o40775 <<16\n zinfo.external_attr |=0x10\n else:\n zinfo.external_attr=0o600 <<16\n else:\n zinfo=zinfo_or_arcname\n \n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists.\"\n )\n \n if compress_type is not None:\n zinfo.compress_type=compress_type\n \n if compresslevel is not None:\n zinfo._compresslevel=compresslevel\n \n zinfo.file_size=len(data)\n with self._lock:\n with self.open(zinfo,mode='w')as dest:\n dest.write(data)\n \n def mkdir(self,zinfo_or_directory_name,mode=511):\n ''\n if isinstance(zinfo_or_directory_name,ZipInfo):\n zinfo=zinfo_or_directory_name\n if not zinfo.is_dir():\n raise ValueError(\"The given ZipInfo does not describe a directory\")\n elif isinstance(zinfo_or_directory_name,str):\n directory_name=zinfo_or_directory_name\n if not directory_name.endswith(\"/\"):\n directory_name +=\"/\"\n zinfo=ZipInfo(directory_name)\n zinfo.compress_size=0\n zinfo.CRC=0\n zinfo.external_attr=((0o40000 |mode)&0xFFFF)<<16\n zinfo.file_size=0\n zinfo.external_attr |=0x10\n else:\n raise TypeError(\"Expected type str or ZipInfo\")\n \n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n zinfo.header_offset=self.fp.tell()\n if zinfo.compress_type ==ZIP_LZMA:\n \n zinfo.flag_bits |=_MASK_COMPRESS_OPTION_1\n \n self._writecheck(zinfo)\n self._didModify=True\n \n self.filelist.append(zinfo)\n self.NameToInfo[zinfo.filename]=zinfo\n self.fp.write(zinfo.FileHeader(False))\n self.start_dir=self.fp.tell()\n \n def __del__(self):\n ''\n self.close()\n \n def close(self):\n ''\n \n if self.fp is None:\n return\n \n if self._writing:\n raise ValueError(\"Can't close the ZIP file while there is \"\n \"an open writing handle on it. \"\n \"Close the writing handle before closing the zip.\")\n \n try:\n if self.mode in('w','x','a')and self._didModify:\n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n self._write_end_record()\n finally:\n fp=self.fp\n self.fp=None\n self._fpclose(fp)\n \n def _write_end_record(self):\n for zinfo in self.filelist:\n dt=zinfo.date_time\n dosdate=(dt[0]-1980)<<9 |dt[1]<<5 |dt[2]\n dostime=dt[3]<<11 |dt[4]<<5 |(dt[5]//2)\n extra=[]\n if zinfo.file_size >ZIP64_LIMIT\\\n or zinfo.compress_size >ZIP64_LIMIT:\n extra.append(zinfo.file_size)\n extra.append(zinfo.compress_size)\n file_size=0xffffffff\n compress_size=0xffffffff\n else:\n file_size=zinfo.file_size\n compress_size=zinfo.compress_size\n \n if zinfo.header_offset >ZIP64_LIMIT:\n extra.append(zinfo.header_offset)\n header_offset=0xffffffff\n else:\n header_offset=zinfo.header_offset\n \n extra_data=zinfo.extra\n min_version=0\n if extra:\n \n extra_data=_strip_extra(extra_data,(1,))\n extra_data=struct.pack(\n 'ZIP_FILECOUNT_LIMIT:\n requires_zip64=\"Files count\"\n elif centDirOffset >ZIP64_LIMIT:\n requires_zip64=\"Central directory offset\"\n elif centDirSize >ZIP64_LIMIT:\n requires_zip64=\"Central directory size\"\n if requires_zip64:\n \n if not self._allowZip64:\n raise LargeZipFile(requires_zip64+\n \" would require ZIP64 extensions\")\n zip64endrec=struct.pack(\n structEndArchive64,stringEndArchive64,\n 44,45,45,0,0,centDirCount,centDirCount,\n centDirSize,centDirOffset)\n self.fp.write(zip64endrec)\n \n zip64locrec=struct.pack(\n structEndArchive64Locator,\n stringEndArchive64Locator,0,pos2,1)\n self.fp.write(zip64locrec)\n centDirCount=min(centDirCount,0xFFFF)\n centDirSize=min(centDirSize,0xFFFFFFFF)\n centDirOffset=min(centDirOffset,0xFFFFFFFF)\n \n endrec=struct.pack(structEndArchive,stringEndArchive,\n 0,0,centDirCount,centDirCount,\n centDirSize,centDirOffset,len(self._comment))\n self.fp.write(endrec)\n self.fp.write(self._comment)\n if self.mode ==\"a\":\n self.fp.truncate()\n self.fp.flush()\n \n def _fpclose(self,fp):\n assert self._fileRefCnt >0\n self._fileRefCnt -=1\n if not self._fileRefCnt and not self._filePassed:\n fp.close()\n \n \nclass PyZipFile(ZipFile):\n ''\n \n def __init__(self,file,mode=\"r\",compression=ZIP_STORED,\n allowZip64=True,optimize=-1):\n ZipFile.__init__(self,file,mode=mode,compression=compression,\n allowZip64=allowZip64)\n self._optimize=optimize\n \n def writepy(self,pathname,basename=\"\",filterfunc=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n pathname=os.fspath(pathname)\n if filterfunc and not filterfunc(pathname):\n if self.debug:\n label='path'if os.path.isdir(pathname)else 'file'\n print('%s %r skipped by filterfunc'%(label,pathname))\n return\n dir,name=os.path.split(pathname)\n if os.path.isdir(pathname):\n initname=os.path.join(pathname,\"__init__.py\")\n if os.path.isfile(initname):\n \n if basename:\n basename=\"%s/%s\"%(basename,name)\n else:\n basename=name\n if self.debug:\n print(\"Adding package in\",pathname,\"as\",basename)\n fname,arcname=self._get_codename(initname[0:-3],basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n dirlist=sorted(os.listdir(pathname))\n dirlist.remove(\"__init__.py\")\n \n for filename in dirlist:\n path=os.path.join(pathname,filename)\n root,ext=os.path.splitext(filename)\n if os.path.isdir(path):\n if os.path.isfile(os.path.join(path,\"__init__.py\")):\n \n self.writepy(path,basename,\n filterfunc=filterfunc)\n elif ext ==\".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc'%path)\n continue\n fname,arcname=self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n else:\n \n if self.debug:\n print(\"Adding files from directory\",pathname)\n for filename in sorted(os.listdir(pathname)):\n path=os.path.join(pathname,filename)\n root,ext=os.path.splitext(filename)\n if ext ==\".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc'%path)\n continue\n fname,arcname=self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n else:\n if pathname[-3:]!=\".py\":\n raise RuntimeError(\n 'Files added with writepy() must end with \".py\"')\n fname,arcname=self._get_codename(pathname[0:-3],basename)\n if self.debug:\n print(\"Adding file\",arcname)\n self.write(fname,arcname)\n \n def _get_codename(self,pathname,basename):\n ''\n\n\n\n\n \n def _compile(file,optimize=-1):\n import py_compile\n if self.debug:\n print(\"Compiling\",file)\n try:\n py_compile.compile(file,doraise=True,optimize=optimize)\n except py_compile.PyCompileError as err:\n print(err.msg)\n return False\n return True\n \n file_py=pathname+\".py\"\n file_pyc=pathname+\".pyc\"\n pycache_opt0=importlib.util.cache_from_source(file_py,optimization='')\n pycache_opt1=importlib.util.cache_from_source(file_py,optimization=1)\n pycache_opt2=importlib.util.cache_from_source(file_py,optimization=2)\n if self._optimize ==-1:\n \n if(os.path.isfile(file_pyc)and\n os.stat(file_pyc).st_mtime >=os.stat(file_py).st_mtime):\n \n arcname=fname=file_pyc\n elif(os.path.isfile(pycache_opt0)and\n os.stat(pycache_opt0).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt0\n arcname=file_pyc\n elif(os.path.isfile(pycache_opt1)and\n os.stat(pycache_opt1).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt1\n arcname=file_pyc\n elif(os.path.isfile(pycache_opt2)and\n os.stat(pycache_opt2).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt2\n arcname=file_pyc\n else:\n \n if _compile(file_py):\n if sys.flags.optimize ==0:\n fname=pycache_opt0\n elif sys.flags.optimize ==1:\n fname=pycache_opt1\n else:\n fname=pycache_opt2\n arcname=file_pyc\n else:\n fname=arcname=file_py\n else:\n \n if self._optimize ==0:\n fname=pycache_opt0\n arcname=file_pyc\n else:\n arcname=file_pyc\n if self._optimize ==1:\n fname=pycache_opt1\n elif self._optimize ==2:\n fname=pycache_opt2\n else:\n msg=\"invalid value for 'optimize': {!r}\".format(self._optimize)\n raise ValueError(msg)\n if not(os.path.isfile(fname)and\n os.stat(fname).st_mtime >=os.stat(file_py).st_mtime):\n if not _compile(file_py,optimize=self._optimize):\n fname=arcname=file_py\n archivename=os.path.split(arcname)[1]\n if basename:\n archivename=\"%s/%s\"%(basename,archivename)\n return(fname,archivename)\n \n \ndef _parents(path):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return itertools.islice(_ancestry(path),1,None)\n \n \ndef _ancestry(path):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n path=path.rstrip(posixpath.sep)\n while path and path !=posixpath.sep:\n yield path\n path,tail=posixpath.split(path)\n \n \n_dedupe=dict.fromkeys\n''\n\n\ndef _difference(minuend,subtrahend):\n ''\n\n\n \n return itertools.filterfalse(set(subtrahend).__contains__,minuend)\n \n \nclass CompleteDirs(ZipFile):\n ''\n\n\n \n \n @staticmethod\n def _implied_dirs(names):\n parents=itertools.chain.from_iterable(map(_parents,names))\n as_dirs=(p+posixpath.sep for p in parents)\n return _dedupe(_difference(as_dirs,names))\n \n def namelist(self):\n names=super(CompleteDirs,self).namelist()\n return names+list(self._implied_dirs(names))\n \n def _name_set(self):\n return set(self.namelist())\n \n def resolve_dir(self,name):\n ''\n\n\n \n names=self._name_set()\n dirname=name+'/'\n dir_match=name not in names and dirname in names\n return dirname if dir_match else name\n \n @classmethod\n def make(cls,source):\n ''\n\n\n \n if isinstance(source,CompleteDirs):\n return source\n \n if not isinstance(source,ZipFile):\n return cls(source)\n \n \n if 'r'not in source.mode:\n cls=CompleteDirs\n \n source.__class__=cls\n return source\n \n \nclass FastLookup(CompleteDirs):\n ''\n\n\n \n \n def namelist(self):\n with contextlib.suppress(AttributeError):\n return self.__names\n self.__names=super(FastLookup,self).namelist()\n return self.__names\n \n def _name_set(self):\n with contextlib.suppress(AttributeError):\n return self.__lookup\n self.__lookup=super(FastLookup,self)._name_set()\n return self.__lookup\n \n \nclass Path:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __repr=\"{self.__class__.__name__}({self.root.filename!r}, {self.at!r})\"\n \n def __init__(self,root,at=\"\"):\n ''\n\n\n\n\n\n\n\n \n self.root=FastLookup.make(root)\n self.at=at\n \n def open(self,mode='r',*args,pwd=None,**kwargs):\n ''\n\n\n\n \n if self.is_dir():\n raise IsADirectoryError(self)\n zip_mode=mode[0]\n if not self.exists()and zip_mode =='r':\n raise FileNotFoundError(self)\n stream=self.root.open(self.at,zip_mode,pwd=pwd)\n if 'b'in mode:\n if args or kwargs:\n raise ValueError(\"encoding args invalid for binary operation\")\n return stream\n else:\n kwargs[\"encoding\"]=io.text_encoding(kwargs.get(\"encoding\"))\n return io.TextIOWrapper(stream,*args,**kwargs)\n \n @property\n def name(self):\n return pathlib.Path(self.at).name or self.filename.name\n \n @property\n def suffix(self):\n return pathlib.Path(self.at).suffix or self.filename.suffix\n \n @property\n def suffixes(self):\n return pathlib.Path(self.at).suffixes or self.filename.suffixes\n \n @property\n def stem(self):\n return pathlib.Path(self.at).stem or self.filename.stem\n \n @property\n def filename(self):\n return pathlib.Path(self.root.filename).joinpath(self.at)\n \n def read_text(self,*args,**kwargs):\n kwargs[\"encoding\"]=io.text_encoding(kwargs.get(\"encoding\"))\n with self.open('r',*args,**kwargs)as strm:\n return strm.read()\n \n def read_bytes(self):\n with self.open('rb')as strm:\n return strm.read()\n \n def _is_child(self,path):\n return posixpath.dirname(path.at.rstrip(\"/\"))==self.at.rstrip(\"/\")\n \n def _next(self,at):\n return self.__class__(self.root,at)\n \n def is_dir(self):\n return not self.at or self.at.endswith(\"/\")\n \n def is_file(self):\n return self.exists()and not self.is_dir()\n \n def exists(self):\n return self.at in self.root._name_set()\n \n def iterdir(self):\n if not self.is_dir():\n raise ValueError(\"Can't listdir a file\")\n subs=map(self._next,self.root.namelist())\n return filter(self._is_child,subs)\n \n def __str__(self):\n return posixpath.join(self.root.filename,self.at)\n \n def __repr__(self):\n return self.__repr.format(self=self)\n \n def joinpath(self,*other):\n next=posixpath.join(self.at,*other)\n return self._next(self.root.resolve_dir(next))\n \n __truediv__=joinpath\n \n @property\n def parent(self):\n if not self.at:\n return self.filename.parent\n parent_at=posixpath.dirname(self.at.rstrip('/'))\n if parent_at:\n parent_at +='/'\n return self._next(parent_at)\n \n \ndef main(args=None):\n import argparse\n \n description='A simple command-line interface for zipfile module.'\n parser=argparse.ArgumentParser(description=description)\n group=parser.add_mutually_exclusive_group(required=True)\n group.add_argument('-l','--list',metavar='',\n help='Show listing of a zipfile')\n group.add_argument('-e','--extract',nargs=2,\n metavar=('',''),\n help='Extract zipfile into target dir')\n group.add_argument('-c','--create',nargs='+',\n metavar=('',''),\n help='Create zipfile from sources')\n group.add_argument('-t','--test',metavar='',\n help='Test if a zipfile is valid')\n parser.add_argument('--metadata-encoding',metavar='',\n help='Specify encoding of member names for -l, -e and -t')\n args=parser.parse_args(args)\n \n encoding=args.metadata_encoding\n \n if args.test is not None:\n src=args.test\n with ZipFile(src,'r',metadata_encoding=encoding)as zf:\n badfile=zf.testzip()\n if badfile:\n print(\"The following enclosed file is corrupted: {!r}\".format(badfile))\n print(\"Done testing\")\n \n elif args.list is not None:\n src=args.list\n with ZipFile(src,'r',metadata_encoding=encoding)as zf:\n zf.printdir()\n \n elif args.extract is not None:\n src,curdir=args.extract\n with ZipFile(src,'r',metadata_encoding=encoding)as zf:\n zf.extractall(curdir)\n \n elif args.create is not None:\n if encoding:\n print(\"Non-conforming encodings not supported with -c.\",\n file=sys.stderr)\n sys.exit(1)\n \n zip_name=args.create.pop(0)\n files=args.create\n \n def addToZip(zf,path,zippath):\n if os.path.isfile(path):\n zf.write(path,zippath,ZIP_DEFLATED)\n elif os.path.isdir(path):\n if zippath:\n zf.write(path,zippath)\n for nm in sorted(os.listdir(path)):\n addToZip(zf,\n os.path.join(path,nm),os.path.join(zippath,nm))\n \n \n with ZipFile(zip_name,'w')as zf:\n for path in files:\n zippath=os.path.basename(path)\n if not zippath:\n zippath=os.path.basename(os.path.dirname(path))\n if zippath in('',os.curdir,os.pardir):\n zippath=''\n addToZip(zf,path,zippath)\n \n \nif __name__ ==\"__main__\":\n main()\n", ["argparse", "binascii", "bz2", "contextlib", "importlib.util", "io", "itertools", "lzma", "os", "pathlib", "posixpath", "py_compile", "shutil", "stat", "struct", "sys", "threading", "time", "warnings", "zlib"]], "shutil": [".py", "''\n\n\n\n\n\nimport os\nimport sys\nimport stat\nimport fnmatch\nimport collections\nimport errno\nimport warnings\n\ntry:\n import zlib\n del zlib\n _ZLIB_SUPPORTED=True\nexcept ImportError:\n _ZLIB_SUPPORTED=False\n \ntry:\n import bz2\n del bz2\n _BZ2_SUPPORTED=True\nexcept ImportError:\n _BZ2_SUPPORTED=False\n \ntry:\n import lzma\n del lzma\n _LZMA_SUPPORTED=True\nexcept ImportError:\n _LZMA_SUPPORTED=False\n \n_WINDOWS=os.name =='nt'\nposix=nt=None\nif os.name =='posix':\n import posix\nelif _WINDOWS:\n import nt\n \nif sys.platform =='win32':\n import _winapi\nelse:\n _winapi=None\n \nCOPY_BUFSIZE=1024 *1024 if _WINDOWS else 64 *1024\n\n\n_USE_CP_SENDFILE=hasattr(os,\"sendfile\")and sys.platform.startswith(\"linux\")\n_HAS_FCOPYFILE=posix and hasattr(posix,\"_fcopyfile\")\n\n\n_WIN_DEFAULT_PATHEXT=\".COM;.EXE;.BAT;.CMD;.VBS;.JS;.WS;.MSC\"\n\n__all__=[\"copyfileobj\",\"copyfile\",\"copymode\",\"copystat\",\"copy\",\"copy2\",\n\"copytree\",\"move\",\"rmtree\",\"Error\",\"SpecialFileError\",\n\"ExecError\",\"make_archive\",\"get_archive_formats\",\n\"register_archive_format\",\"unregister_archive_format\",\n\"get_unpack_formats\",\"register_unpack_format\",\n\"unregister_unpack_format\",\"unpack_archive\",\n\"ignore_patterns\",\"chown\",\"which\",\"get_terminal_size\",\n\"SameFileError\"]\n\n\nclass Error(OSError):\n pass\n \nclass SameFileError(Error):\n ''\n \nclass SpecialFileError(OSError):\n ''\n \n \nclass ExecError(OSError):\n ''\n \nclass ReadError(OSError):\n ''\n \nclass RegistryError(Exception):\n ''\n \n \nclass _GiveupOnFastCopy(Exception):\n ''\n\n \n \ndef _fastcopy_fcopyfile(fsrc,fdst,flags):\n ''\n\n \n try:\n infd=fsrc.fileno()\n outfd=fdst.fileno()\n except Exception as err:\n raise _GiveupOnFastCopy(err)\n \n try:\n posix._fcopyfile(infd,outfd,flags)\n except OSError as err:\n err.filename=fsrc.name\n err.filename2=fdst.name\n if err.errno in{errno.EINVAL,errno.ENOTSUP}:\n raise _GiveupOnFastCopy(err)\n else:\n raise err from None\n \ndef _fastcopy_sendfile(fsrc,fdst):\n ''\n\n\n \n \n \n \n \n \n \n \n \n \n global _USE_CP_SENDFILE\n try:\n infd=fsrc.fileno()\n outfd=fdst.fileno()\n except Exception as err:\n raise _GiveupOnFastCopy(err)\n \n \n \n \n \n \n try:\n blocksize=max(os.fstat(infd).st_size,2 **23)\n except OSError:\n blocksize=2 **27\n \n \n if sys.maxsize <2 **32:\n blocksize=min(blocksize,2 **30)\n \n offset=0\n while True:\n try:\n sent=os.sendfile(outfd,infd,offset,blocksize)\n except OSError as err:\n \n err.filename=fsrc.name\n err.filename2=fdst.name\n \n if err.errno ==errno.ENOTSOCK:\n \n \n \n _USE_CP_SENDFILE=False\n raise _GiveupOnFastCopy(err)\n \n if err.errno ==errno.ENOSPC:\n raise err from None\n \n \n if offset ==0 and os.lseek(outfd,0,os.SEEK_CUR)==0:\n raise _GiveupOnFastCopy(err)\n \n raise err\n else:\n if sent ==0:\n break\n offset +=sent\n \ndef _copyfileobj_readinto(fsrc,fdst,length=COPY_BUFSIZE):\n ''\n\n\n \n \n fsrc_readinto=fsrc.readinto\n fdst_write=fdst.write\n with memoryview(bytearray(length))as mv:\n while True:\n n=fsrc_readinto(mv)\n if not n:\n break\n elif n 0:\n _copyfileobj_readinto(fsrc,fdst,min(file_size,COPY_BUFSIZE))\n return dst\n \n copyfileobj(fsrc,fdst)\n \n \n except IsADirectoryError as e:\n if not os.path.exists(dst):\n raise FileNotFoundError(f'Directory does not exist: {dst}')from e\n else:\n raise\n \n return dst\n \ndef copymode(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n \n sys.audit(\"shutil.copymode\",src,dst)\n \n if not follow_symlinks and _islink(src)and os.path.islink(dst):\n if hasattr(os,'lchmod'):\n stat_func,chmod_func=os.lstat,os.lchmod\n else:\n return\n else:\n stat_func,chmod_func=_stat,os.chmod\n \n st=stat_func(src)\n chmod_func(dst,stat.S_IMODE(st.st_mode))\n \nif hasattr(os,'listxattr'):\n def _copyxattr(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n \n \n try:\n names=os.listxattr(src,follow_symlinks=follow_symlinks)\n except OSError as e:\n if e.errno not in(errno.ENOTSUP,errno.ENODATA,errno.EINVAL):\n raise\n return\n for name in names:\n try:\n value=os.getxattr(src,name,follow_symlinks=follow_symlinks)\n os.setxattr(dst,name,value,follow_symlinks=follow_symlinks)\n except OSError as e:\n if e.errno not in(errno.EPERM,errno.ENOTSUP,errno.ENODATA,\n errno.EINVAL,errno.EACCES):\n raise\nelse:\n def _copyxattr(*args,**kwargs):\n pass\n \ndef copystat(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.copystat\",src,dst)\n \n def _nop(*args,ns=None,follow_symlinks=None):\n pass\n \n \n follow=follow_symlinks or not(_islink(src)and os.path.islink(dst))\n if follow:\n \n def lookup(name):\n return getattr(os,name,_nop)\n else:\n \n \n def lookup(name):\n fn=getattr(os,name,_nop)\n if fn in os.supports_follow_symlinks:\n return fn\n return _nop\n \n if isinstance(src,os.DirEntry):\n st=src.stat(follow_symlinks=follow)\n else:\n st=lookup(\"stat\")(src,follow_symlinks=follow)\n mode=stat.S_IMODE(st.st_mode)\n lookup(\"utime\")(dst,ns=(st.st_atime_ns,st.st_mtime_ns),\n follow_symlinks=follow)\n \n \n _copyxattr(src,dst,follow_symlinks=follow)\n try:\n lookup(\"chmod\")(dst,mode,follow_symlinks=follow)\n except NotImplementedError:\n \n \n \n \n \n \n \n \n \n \n pass\n if hasattr(st,'st_flags'):\n try:\n lookup(\"chflags\")(dst,st.st_flags,follow_symlinks=follow)\n except OSError as why:\n for err in 'EOPNOTSUPP','ENOTSUP':\n if hasattr(errno,err)and why.errno ==getattr(errno,err):\n break\n else:\n raise\n \ndef copy(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n\n\n\n\n \n if os.path.isdir(dst):\n dst=os.path.join(dst,os.path.basename(src))\n copyfile(src,dst,follow_symlinks=follow_symlinks)\n copymode(src,dst,follow_symlinks=follow_symlinks)\n return dst\n \ndef copy2(src,dst,*,follow_symlinks=True):\n ''\n\n\n\n\n\n\n\n\n \n if os.path.isdir(dst):\n dst=os.path.join(dst,os.path.basename(src))\n \n if hasattr(_winapi,\"CopyFile2\"):\n src_=os.fsdecode(src)\n dst_=os.fsdecode(dst)\n flags=_winapi.COPY_FILE_ALLOW_DECRYPTED_DESTINATION\n if not follow_symlinks:\n flags |=_winapi.COPY_FILE_COPY_SYMLINK\n try:\n _winapi.CopyFile2(src_,dst_,flags)\n return dst\n except OSError as exc:\n if(exc.winerror ==_winapi.ERROR_PRIVILEGE_NOT_HELD\n and not follow_symlinks):\n \n \n pass\n elif exc.winerror ==_winapi.ERROR_ACCESS_DENIED:\n \n \n pass\n else:\n raise\n \n copyfile(src,dst,follow_symlinks=follow_symlinks)\n copystat(src,dst,follow_symlinks=follow_symlinks)\n return dst\n \ndef ignore_patterns(*patterns):\n ''\n\n\n \n def _ignore_patterns(path,names):\n ignored_names=[]\n for pattern in patterns:\n ignored_names.extend(fnmatch.filter(names,pattern))\n return set(ignored_names)\n return _ignore_patterns\n \ndef _copytree(entries,src,dst,symlinks,ignore,copy_function,\nignore_dangling_symlinks,dirs_exist_ok=False):\n if ignore is not None:\n ignored_names=ignore(os.fspath(src),[x.name for x in entries])\n else:\n ignored_names=set()\n \n os.makedirs(dst,exist_ok=dirs_exist_ok)\n errors=[]\n use_srcentry=copy_function is copy2 or copy_function is copy\n \n for srcentry in entries:\n if srcentry.name in ignored_names:\n continue\n srcname=os.path.join(src,srcentry.name)\n dstname=os.path.join(dst,srcentry.name)\n srcobj=srcentry if use_srcentry else srcname\n try:\n is_symlink=srcentry.is_symlink()\n if is_symlink and os.name =='nt':\n \n \n lstat=srcentry.stat(follow_symlinks=False)\n if lstat.st_reparse_tag ==stat.IO_REPARSE_TAG_MOUNT_POINT:\n is_symlink=False\n if is_symlink:\n linkto=os.readlink(srcname)\n if symlinks:\n \n \n \n os.symlink(linkto,dstname)\n copystat(srcobj,dstname,follow_symlinks=not symlinks)\n else:\n \n if not os.path.exists(linkto)and ignore_dangling_symlinks:\n continue\n \n if srcentry.is_dir():\n copytree(srcobj,dstname,symlinks,ignore,\n copy_function,ignore_dangling_symlinks,\n dirs_exist_ok)\n else:\n copy_function(srcobj,dstname)\n elif srcentry.is_dir():\n copytree(srcobj,dstname,symlinks,ignore,copy_function,\n ignore_dangling_symlinks,dirs_exist_ok)\n else:\n \n copy_function(srcobj,dstname)\n \n \n except Error as err:\n errors.extend(err.args[0])\n except OSError as why:\n errors.append((srcname,dstname,str(why)))\n try:\n copystat(src,dst)\n except OSError as why:\n \n if getattr(why,'winerror',None)is None:\n errors.append((src,dst,str(why)))\n if errors:\n raise Error(errors)\n return dst\n \ndef copytree(src,dst,symlinks=False,ignore=None,copy_function=copy2,\nignore_dangling_symlinks=False,dirs_exist_ok=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.copytree\",src,dst)\n with os.scandir(src)as itr:\n entries=list(itr)\n return _copytree(entries=entries,src=src,dst=dst,symlinks=symlinks,\n ignore=ignore,copy_function=copy_function,\n ignore_dangling_symlinks=ignore_dangling_symlinks,\n dirs_exist_ok=dirs_exist_ok)\n \nif hasattr(os.stat_result,'st_file_attributes'):\n def _rmtree_islink(path):\n try:\n st=os.lstat(path)\n return(stat.S_ISLNK(st.st_mode)or\n (st.st_file_attributes&stat.FILE_ATTRIBUTE_REPARSE_POINT\n and st.st_reparse_tag ==stat.IO_REPARSE_TAG_MOUNT_POINT))\n except OSError:\n return False\nelse:\n def _rmtree_islink(path):\n return os.path.islink(path)\n \n \ndef _rmtree_unsafe(path,onexc):\n try:\n with os.scandir(path)as scandir_it:\n entries=list(scandir_it)\n except OSError as err:\n onexc(os.scandir,path,err)\n entries=[]\n for entry in entries:\n fullname=entry.path\n try:\n is_dir=entry.is_dir(follow_symlinks=False)\n except OSError:\n is_dir=False\n \n if is_dir and not entry.is_junction():\n try:\n if entry.is_symlink():\n \n \n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError as err:\n onexc(os.path.islink,fullname,err)\n continue\n _rmtree_unsafe(fullname,onexc)\n else:\n try:\n os.unlink(fullname)\n except OSError as err:\n onexc(os.unlink,fullname,err)\n try:\n os.rmdir(path)\n except OSError as err:\n onexc(os.rmdir,path,err)\n \n \ndef _rmtree_safe_fd(topfd,path,onexc):\n try:\n with os.scandir(topfd)as scandir_it:\n entries=list(scandir_it)\n except OSError as err:\n err.filename=path\n onexc(os.scandir,path,err)\n return\n for entry in entries:\n fullname=os.path.join(path,entry.name)\n try:\n is_dir=entry.is_dir(follow_symlinks=False)\n except OSError:\n is_dir=False\n else:\n if is_dir:\n try:\n orig_st=entry.stat(follow_symlinks=False)\n is_dir=stat.S_ISDIR(orig_st.st_mode)\n except OSError as err:\n onexc(os.lstat,fullname,err)\n continue\n if is_dir:\n try:\n dirfd=os.open(entry.name,os.O_RDONLY,dir_fd=topfd)\n dirfd_closed=False\n except OSError as err:\n onexc(os.open,fullname,err)\n else:\n try:\n if os.path.samestat(orig_st,os.fstat(dirfd)):\n _rmtree_safe_fd(dirfd,fullname,onexc)\n try:\n os.close(dirfd)\n dirfd_closed=True\n os.rmdir(entry.name,dir_fd=topfd)\n except OSError as err:\n onexc(os.rmdir,fullname,err)\n else:\n try:\n \n \n \n raise OSError(\"Cannot call rmtree on a symbolic \"\n \"link\")\n except OSError as err:\n onexc(os.path.islink,fullname,err)\n finally:\n if not dirfd_closed:\n os.close(dirfd)\n else:\n try:\n os.unlink(entry.name,dir_fd=topfd)\n except OSError as err:\n onexc(os.unlink,fullname,err)\n \n_use_fd_functions=({os.open,os.stat,os.unlink,os.rmdir}<=\nos.supports_dir_fd and\nos.scandir in os.supports_fd and\nos.stat in os.supports_follow_symlinks)\n\ndef rmtree(path,ignore_errors=False,onerror=None,*,onexc=None,dir_fd=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if onerror is not None:\n warnings.warn(\"onerror argument is deprecated, use onexc instead\",\n DeprecationWarning,stacklevel=2)\n \n sys.audit(\"shutil.rmtree\",path,dir_fd)\n if ignore_errors:\n def onexc(*args):\n pass\n elif onerror is None and onexc is None:\n def onexc(*args):\n raise\n elif onexc is None:\n if onerror is None:\n def onexc(*args):\n raise\n else:\n \n def onexc(*args):\n func,path,exc=args\n if exc is None:\n exc_info=None,None,None\n else:\n exc_info=type(exc),exc,exc.__traceback__\n return onerror(func,path,exc_info)\n \n if _use_fd_functions:\n \n if isinstance(path,bytes):\n path=os.fsdecode(path)\n \n \n try:\n orig_st=os.lstat(path,dir_fd=dir_fd)\n except Exception as err:\n onexc(os.lstat,path,err)\n return\n try:\n fd=os.open(path,os.O_RDONLY,dir_fd=dir_fd)\n fd_closed=False\n except Exception as err:\n onexc(os.open,path,err)\n return\n try:\n if os.path.samestat(orig_st,os.fstat(fd)):\n _rmtree_safe_fd(fd,path,onexc)\n try:\n os.close(fd)\n fd_closed=True\n os.rmdir(path,dir_fd=dir_fd)\n except OSError as err:\n onexc(os.rmdir,path,err)\n else:\n try:\n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError as err:\n onexc(os.path.islink,path,err)\n finally:\n if not fd_closed:\n os.close(fd)\n else:\n if dir_fd is not None:\n raise NotImplementedError(\"dir_fd unavailable on this platform\")\n try:\n if _rmtree_islink(path):\n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError as err:\n onexc(os.path.islink,path,err)\n \n return\n return _rmtree_unsafe(path,onexc)\n \n \n \nrmtree.avoids_symlink_attacks=_use_fd_functions\n\ndef _basename(path):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n path=os.fspath(path)\n sep=os.path.sep+(os.path.altsep or '')\n return os.path.basename(path.rstrip(sep))\n \ndef move(src,dst,copy_function=copy2):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.move\",src,dst)\n real_dst=dst\n if os.path.isdir(dst):\n if _samefile(src,dst):\n \n \n os.rename(src,dst)\n return\n \n \n \n real_dst=os.path.join(dst,_basename(src))\n \n if os.path.exists(real_dst):\n raise Error(\"Destination path '%s' already exists\"%real_dst)\n try:\n os.rename(src,real_dst)\n except OSError:\n if os.path.islink(src):\n linkto=os.readlink(src)\n os.symlink(linkto,real_dst)\n os.unlink(src)\n elif os.path.isdir(src):\n if _destinsrc(src,dst):\n raise Error(\"Cannot move a directory '%s' into itself\"\n \" '%s'.\"%(src,dst))\n if(_is_immutable(src)\n or(not os.access(src,os.W_OK)and os.listdir(src)\n and sys.platform =='darwin')):\n raise PermissionError(\"Cannot move the non-empty directory \"\n \"'%s': Lacking write permission to '%s'.\"\n %(src,src))\n copytree(src,real_dst,copy_function=copy_function,\n symlinks=True)\n rmtree(src)\n else:\n copy_function(src,real_dst)\n os.unlink(src)\n return real_dst\n \ndef _destinsrc(src,dst):\n src=os.path.abspath(src)\n dst=os.path.abspath(dst)\n if not src.endswith(os.path.sep):\n src +=os.path.sep\n if not dst.endswith(os.path.sep):\n dst +=os.path.sep\n return dst.startswith(src)\n \ndef _is_immutable(src):\n st=_stat(src)\n immutable_states=[stat.UF_IMMUTABLE,stat.SF_IMMUTABLE]\n return hasattr(st,'st_flags')and st.st_flags in immutable_states\n \ndef _get_gid(name):\n ''\n if name is None:\n return None\n \n try:\n from grp import getgrnam\n except ImportError:\n return None\n \n try:\n result=getgrnam(name)\n except KeyError:\n result=None\n if result is not None:\n return result[2]\n return None\n \ndef _get_uid(name):\n ''\n if name is None:\n return None\n \n try:\n from pwd import getpwnam\n except ImportError:\n return None\n \n try:\n result=getpwnam(name)\n except KeyError:\n result=None\n if result is not None:\n return result[2]\n return None\n \ndef _make_tarball(base_name,base_dir,compress=\"gzip\",verbose=0,dry_run=0,\nowner=None,group=None,logger=None,root_dir=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if compress is None:\n tar_compression=''\n elif _ZLIB_SUPPORTED and compress =='gzip':\n tar_compression='gz'\n elif _BZ2_SUPPORTED and compress =='bzip2':\n tar_compression='bz2'\n elif _LZMA_SUPPORTED and compress =='xz':\n tar_compression='xz'\n else:\n raise ValueError(\"bad value for 'compress', or compression format not \"\n \"supported : {0}\".format(compress))\n \n import tarfile\n \n compress_ext='.'+tar_compression if compress else ''\n archive_name=base_name+'.tar'+compress_ext\n archive_dir=os.path.dirname(archive_name)\n \n if archive_dir and not os.path.exists(archive_dir):\n if logger is not None:\n logger.info(\"creating %s\",archive_dir)\n if not dry_run:\n os.makedirs(archive_dir)\n \n \n if logger is not None:\n logger.info('Creating tar archive')\n \n uid=_get_uid(owner)\n gid=_get_gid(group)\n \n def _set_uid_gid(tarinfo):\n if gid is not None:\n tarinfo.gid=gid\n tarinfo.gname=group\n if uid is not None:\n tarinfo.uid=uid\n tarinfo.uname=owner\n return tarinfo\n \n if not dry_run:\n tar=tarfile.open(archive_name,'w|%s'%tar_compression)\n arcname=base_dir\n if root_dir is not None:\n base_dir=os.path.join(root_dir,base_dir)\n try:\n tar.add(base_dir,arcname,filter=_set_uid_gid)\n finally:\n tar.close()\n \n if root_dir is not None:\n archive_name=os.path.abspath(archive_name)\n return archive_name\n \ndef _make_zipfile(base_name,base_dir,verbose=0,dry_run=0,\nlogger=None,owner=None,group=None,root_dir=None):\n ''\n\n\n\n \n import zipfile\n \n zip_filename=base_name+\".zip\"\n archive_dir=os.path.dirname(base_name)\n \n if archive_dir and not os.path.exists(archive_dir):\n if logger is not None:\n logger.info(\"creating %s\",archive_dir)\n if not dry_run:\n os.makedirs(archive_dir)\n \n if logger is not None:\n logger.info(\"creating '%s' and adding '%s' to it\",\n zip_filename,base_dir)\n \n if not dry_run:\n with zipfile.ZipFile(zip_filename,\"w\",\n compression=zipfile.ZIP_DEFLATED)as zf:\n arcname=os.path.normpath(base_dir)\n if root_dir is not None:\n base_dir=os.path.join(root_dir,base_dir)\n base_dir=os.path.normpath(base_dir)\n if arcname !=os.curdir:\n zf.write(base_dir,arcname)\n if logger is not None:\n logger.info(\"adding '%s'\",base_dir)\n for dirpath,dirnames,filenames in os.walk(base_dir):\n arcdirpath=dirpath\n if root_dir is not None:\n arcdirpath=os.path.relpath(arcdirpath,root_dir)\n arcdirpath=os.path.normpath(arcdirpath)\n for name in sorted(dirnames):\n path=os.path.join(dirpath,name)\n arcname=os.path.join(arcdirpath,name)\n zf.write(path,arcname)\n if logger is not None:\n logger.info(\"adding '%s'\",path)\n for name in filenames:\n path=os.path.join(dirpath,name)\n path=os.path.normpath(path)\n if os.path.isfile(path):\n arcname=os.path.join(arcdirpath,name)\n zf.write(path,arcname)\n if logger is not None:\n logger.info(\"adding '%s'\",path)\n \n if root_dir is not None:\n zip_filename=os.path.abspath(zip_filename)\n return zip_filename\n \n_make_tarball.supports_root_dir=True\n_make_zipfile.supports_root_dir=True\n\n\n\n\n\n_ARCHIVE_FORMATS={\n'tar':(_make_tarball,[('compress',None)],\n\"uncompressed tar file\"),\n}\n\nif _ZLIB_SUPPORTED:\n _ARCHIVE_FORMATS['gztar']=(_make_tarball,[('compress','gzip')],\n \"gzip'ed tar-file\")\n _ARCHIVE_FORMATS['zip']=(_make_zipfile,[],\"ZIP file\")\n \nif _BZ2_SUPPORTED:\n _ARCHIVE_FORMATS['bztar']=(_make_tarball,[('compress','bzip2')],\n \"bzip2'ed tar-file\")\n \nif _LZMA_SUPPORTED:\n _ARCHIVE_FORMATS['xztar']=(_make_tarball,[('compress','xz')],\n \"xz'ed tar-file\")\n \ndef get_archive_formats():\n ''\n\n\n \n formats=[(name,registry[2])for name,registry in\n _ARCHIVE_FORMATS.items()]\n formats.sort()\n return formats\n \ndef register_archive_format(name,function,extra_args=None,description=''):\n ''\n\n\n\n\n\n\n \n if extra_args is None:\n extra_args=[]\n if not callable(function):\n raise TypeError('The %s object is not callable'%function)\n if not isinstance(extra_args,(tuple,list)):\n raise TypeError('extra_args needs to be a sequence')\n for element in extra_args:\n if not isinstance(element,(tuple,list))or len(element)!=2:\n raise TypeError('extra_args elements are : (arg_name, value)')\n \n _ARCHIVE_FORMATS[name]=(function,extra_args,description)\n \ndef unregister_archive_format(name):\n del _ARCHIVE_FORMATS[name]\n \ndef make_archive(base_name,format,root_dir=None,base_dir=None,verbose=0,\ndry_run=0,owner=None,group=None,logger=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.make_archive\",base_name,format,root_dir,base_dir)\n try:\n format_info=_ARCHIVE_FORMATS[format]\n except KeyError:\n raise ValueError(\"unknown archive format '%s'\"%format)from None\n \n kwargs={'dry_run':dry_run,'logger':logger,\n 'owner':owner,'group':group}\n \n func=format_info[0]\n for arg,val in format_info[1]:\n kwargs[arg]=val\n \n if base_dir is None:\n base_dir=os.curdir\n \n supports_root_dir=getattr(func,'supports_root_dir',False)\n save_cwd=None\n if root_dir is not None:\n stmd=os.stat(root_dir).st_mode\n if not stat.S_ISDIR(stmd):\n raise NotADirectoryError(errno.ENOTDIR,'Not a directory',root_dir)\n \n if supports_root_dir:\n \n base_name=os.fspath(base_name)\n kwargs['root_dir']=root_dir\n else:\n save_cwd=os.getcwd()\n if logger is not None:\n logger.debug(\"changing into '%s'\",root_dir)\n base_name=os.path.abspath(base_name)\n if not dry_run:\n os.chdir(root_dir)\n \n try:\n filename=func(base_name,base_dir,**kwargs)\n finally:\n if save_cwd is not None:\n if logger is not None:\n logger.debug(\"changing back to '%s'\",save_cwd)\n os.chdir(save_cwd)\n \n return filename\n \n \ndef get_unpack_formats():\n ''\n\n\n\n \n formats=[(name,info[0],info[3])for name,info in\n _UNPACK_FORMATS.items()]\n formats.sort()\n return formats\n \ndef _check_unpack_options(extensions,function,extra_args):\n ''\n \n existing_extensions={}\n for name,info in _UNPACK_FORMATS.items():\n for ext in info[0]:\n existing_extensions[ext]=name\n \n for extension in extensions:\n if extension in existing_extensions:\n msg='%s is already registered for \"%s\"'\n raise RegistryError(msg %(extension,\n existing_extensions[extension]))\n \n if not callable(function):\n raise TypeError('The registered function must be a callable')\n \n \ndef register_unpack_format(name,extensions,function,extra_args=None,\ndescription=''):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if extra_args is None:\n extra_args=[]\n _check_unpack_options(extensions,function,extra_args)\n _UNPACK_FORMATS[name]=extensions,function,extra_args,description\n \ndef unregister_unpack_format(name):\n ''\n del _UNPACK_FORMATS[name]\n \ndef _ensure_directory(path):\n ''\n dirname=os.path.dirname(path)\n if not os.path.isdir(dirname):\n os.makedirs(dirname)\n \ndef _unpack_zipfile(filename,extract_dir):\n ''\n \n import zipfile\n \n if not zipfile.is_zipfile(filename):\n raise ReadError(\"%s is not a zip file\"%filename)\n \n zip=zipfile.ZipFile(filename)\n try:\n for info in zip.infolist():\n name=info.filename\n \n \n if name.startswith('/')or '..'in name:\n continue\n \n targetpath=os.path.join(extract_dir,*name.split('/'))\n if not targetpath:\n continue\n \n _ensure_directory(targetpath)\n if not name.endswith('/'):\n \n with zip.open(name,'r')as source,\\\n open(targetpath,'wb')as target:\n copyfileobj(source,target)\n finally:\n zip.close()\n \ndef _unpack_tarfile(filename,extract_dir,*,filter=None):\n ''\n \n import tarfile\n try:\n tarobj=tarfile.open(filename)\n except tarfile.TarError:\n raise ReadError(\n \"%s is not a compressed or uncompressed tar file\"%filename)\n try:\n tarobj.extractall(extract_dir,filter=filter)\n finally:\n tarobj.close()\n \n \n \n \n \n \n_UNPACK_FORMATS={\n'tar':(['.tar'],_unpack_tarfile,[],\"uncompressed tar file\"),\n'zip':(['.zip'],_unpack_zipfile,[],\"ZIP file\"),\n}\n\nif _ZLIB_SUPPORTED:\n _UNPACK_FORMATS['gztar']=(['.tar.gz','.tgz'],_unpack_tarfile,[],\n \"gzip'ed tar-file\")\n \nif _BZ2_SUPPORTED:\n _UNPACK_FORMATS['bztar']=(['.tar.bz2','.tbz2'],_unpack_tarfile,[],\n \"bzip2'ed tar-file\")\n \nif _LZMA_SUPPORTED:\n _UNPACK_FORMATS['xztar']=(['.tar.xz','.txz'],_unpack_tarfile,[],\n \"xz'ed tar-file\")\n \ndef _find_unpack_format(filename):\n for name,info in _UNPACK_FORMATS.items():\n for extension in info[0]:\n if filename.endswith(extension):\n return name\n return None\n \ndef unpack_archive(filename,extract_dir=None,format=None,*,filter=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.unpack_archive\",filename,extract_dir,format)\n \n if extract_dir is None:\n extract_dir=os.getcwd()\n \n extract_dir=os.fspath(extract_dir)\n filename=os.fspath(filename)\n \n if filter is None:\n filter_kwargs={}\n else:\n filter_kwargs={'filter':filter}\n if format is not None:\n try:\n format_info=_UNPACK_FORMATS[format]\n except KeyError:\n raise ValueError(\"Unknown unpack format '{0}'\".format(format))from None\n \n func=format_info[1]\n func(filename,extract_dir,**dict(format_info[2]),**filter_kwargs)\n else:\n \n format=_find_unpack_format(filename)\n if format is None:\n raise ReadError(\"Unknown archive format '{0}'\".format(filename))\n \n func=_UNPACK_FORMATS[format][1]\n kwargs=dict(_UNPACK_FORMATS[format][2])|filter_kwargs\n func(filename,extract_dir,**kwargs)\n \n \nif hasattr(os,'statvfs'):\n\n __all__.append('disk_usage')\n _ntuple_diskusage=collections.namedtuple('usage','total used free')\n _ntuple_diskusage.total.__doc__='Total space in bytes'\n _ntuple_diskusage.used.__doc__='Used space in bytes'\n _ntuple_diskusage.free.__doc__='Free space in bytes'\n \n def disk_usage(path):\n ''\n\n\n\n \n st=os.statvfs(path)\n free=st.f_bavail *st.f_frsize\n total=st.f_blocks *st.f_frsize\n used=(st.f_blocks -st.f_bfree)*st.f_frsize\n return _ntuple_diskusage(total,used,free)\n \nelif _WINDOWS:\n\n __all__.append('disk_usage')\n _ntuple_diskusage=collections.namedtuple('usage','total used free')\n \n def disk_usage(path):\n ''\n\n\n\n \n total,free=nt._getdiskusage(path)\n used=total -free\n return _ntuple_diskusage(total,used,free)\n \n \ndef chown(path,user=None,group=None):\n ''\n\n\n\n \n sys.audit('shutil.chown',path,user,group)\n \n if user is None and group is None:\n raise ValueError(\"user and/or group must be set\")\n \n _user=user\n _group=group\n \n \n if user is None:\n _user=-1\n \n elif isinstance(user,str):\n _user=_get_uid(user)\n if _user is None:\n raise LookupError(\"no such user: {!r}\".format(user))\n \n if group is None:\n _group=-1\n elif not isinstance(group,int):\n _group=_get_gid(group)\n if _group is None:\n raise LookupError(\"no such group: {!r}\".format(group))\n \n os.chown(path,_user,_group)\n \ndef get_terminal_size(fallback=(80,24)):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n try:\n columns=int(os.environ['COLUMNS'])\n except(KeyError,ValueError):\n columns=0\n \n try:\n lines=int(os.environ['LINES'])\n except(KeyError,ValueError):\n lines=0\n \n \n if columns <=0 or lines <=0:\n try:\n size=os.get_terminal_size(sys.__stdout__.fileno())\n except(AttributeError,ValueError,OSError):\n \n \n size=os.terminal_size(fallback)\n if columns <=0:\n columns=size.columns or fallback[0]\n if lines <=0:\n lines=size.lines or fallback[1]\n \n return os.terminal_size((columns,lines))\n \n \n \n \n \ndef _access_check(fn,mode):\n return(os.path.exists(fn)and os.access(fn,mode)\n and not os.path.isdir(fn))\n \n \ndef _win_path_needs_curdir(cmd,mode):\n ''\n\n\n\n \n return(not(mode&os.X_OK))or _winapi.NeedCurrentDirectoryForExePath(\n os.fsdecode(cmd))\n \n \ndef which(cmd,mode=os.F_OK |os.X_OK,path=None):\n ''\n\n\n\n\n\n\n\n \n use_bytes=isinstance(cmd,bytes)\n \n \n \n \n dirname,cmd=os.path.split(cmd)\n if dirname:\n path=[dirname]\n else:\n if path is None:\n path=os.environ.get(\"PATH\",None)\n if path is None:\n try:\n path=os.confstr(\"CS_PATH\")\n except(AttributeError,ValueError):\n \n path=os.defpath\n \n \n \n \n \n if not path:\n return None\n \n if use_bytes:\n path=os.fsencode(path)\n path=path.split(os.fsencode(os.pathsep))\n else:\n path=os.fsdecode(path)\n path=path.split(os.pathsep)\n \n if sys.platform ==\"win32\"and _win_path_needs_curdir(cmd,mode):\n curdir=os.curdir\n if use_bytes:\n curdir=os.fsencode(curdir)\n path.insert(0,curdir)\n \n if sys.platform ==\"win32\":\n \n pathext_source=os.getenv(\"PATHEXT\")or _WIN_DEFAULT_PATHEXT\n pathext=[ext for ext in pathext_source.split(os.pathsep)if ext]\n \n if use_bytes:\n pathext=[os.fsencode(ext)for ext in pathext]\n \n \n files=[cmd]+[cmd+ext for ext in pathext]\n else:\n \n \n files=[cmd]\n \n seen=set()\n for dir in path:\n normdir=os.path.normcase(dir)\n if not normdir in seen:\n seen.add(normdir)\n for thefile in files:\n name=os.path.join(dir,thefile)\n if _access_check(name,mode):\n return name\n return None\n", ["_winapi", "bz2", "collections", "errno", "fnmatch", "grp", "lzma", "nt", "os", "posix", "pwd", "stat", "sys", "tarfile", "warnings", "zipfile", "zlib"]], "tempfile": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n\"NamedTemporaryFile\",\"TemporaryFile\",\n\"SpooledTemporaryFile\",\"TemporaryDirectory\",\n\"mkstemp\",\"mkdtemp\",\n\"mktemp\",\n\"TMP_MAX\",\"gettempprefix\",\n\"tempdir\",\"gettempdir\",\n\"gettempprefixb\",\"gettempdirb\",\n]\n\n\n\n\nimport functools as _functools\nimport warnings as _warnings\nimport io as _io\nimport os as _os\nimport shutil as _shutil\nimport errno as _errno\nfrom random import Random as _Random\nimport sys as _sys\nimport types as _types\nimport weakref as _weakref\nimport _thread\n_allocate_lock=_thread.allocate_lock\n\n_text_openflags=_os.O_RDWR |_os.O_CREAT |_os.O_EXCL\nif hasattr(_os,'O_NOFOLLOW'):\n _text_openflags |=_os.O_NOFOLLOW\n \n_bin_openflags=_text_openflags\nif hasattr(_os,'O_BINARY'):\n _bin_openflags |=_os.O_BINARY\n \nif hasattr(_os,'TMP_MAX'):\n TMP_MAX=_os.TMP_MAX\nelse:\n TMP_MAX=10000\n \n \n \n \n \ntemplate=\"tmp\"\n\n\n\n_once_lock=_allocate_lock()\n\n\ndef _exists(fn):\n try:\n _os.lstat(fn)\n except OSError:\n return False\n else:\n return True\n \n \ndef _infer_return_type(*args):\n ''\n return_type=None\n for arg in args:\n if arg is None:\n continue\n \n if isinstance(arg,_os.PathLike):\n arg=_os.fspath(arg)\n \n if isinstance(arg,bytes):\n if return_type is str:\n raise TypeError(\"Can't mix bytes and non-bytes in \"\n \"path components.\")\n return_type=bytes\n else:\n if return_type is bytes:\n raise TypeError(\"Can't mix bytes and non-bytes in \"\n \"path components.\")\n return_type=str\n if return_type is None:\n if tempdir is None or isinstance(tempdir,str):\n return str\n else:\n \n return bytes\n return return_type\n \n \ndef _sanitize_params(prefix,suffix,dir):\n ''\n output_type=_infer_return_type(prefix,suffix,dir)\n if suffix is None:\n suffix=output_type()\n if prefix is None:\n if output_type is str:\n prefix=template\n else:\n prefix=_os.fsencode(template)\n if dir is None:\n if output_type is str:\n dir=gettempdir()\n else:\n dir=gettempdirb()\n return prefix,suffix,dir,output_type\n \n \nclass _RandomNameSequence:\n ''\n\n\n\n\n \n \n characters=\"abcdefghijklmnopqrstuvwxyz0123456789_\"\n \n @property\n def rng(self):\n cur_pid=_os.getpid()\n if cur_pid !=getattr(self,'_rng_pid',None):\n self._rng=_Random()\n self._rng_pid=cur_pid\n return self._rng\n \n def __iter__(self):\n return self\n \n def __next__(self):\n return ''.join(self.rng.choices(self.characters,k=8))\n \ndef _candidate_tempdir_list():\n ''\n \n \n dirlist=[]\n \n \n for envname in 'TMPDIR','TEMP','TMP':\n dirname=_os.getenv(envname)\n if dirname:dirlist.append(dirname)\n \n \n if _os.name =='nt':\n dirlist.extend([_os.path.expanduser(r'~\\AppData\\Local\\Temp'),\n _os.path.expandvars(r'%SYSTEMROOT%\\Temp'),\n r'c:\\temp',r'c:\\tmp',r'\\temp',r'\\tmp'])\n else:\n dirlist.extend(['/tmp','/var/tmp','/usr/tmp'])\n \n \n try:\n dirlist.append(_os.getcwd())\n except(AttributeError,OSError):\n dirlist.append(_os.curdir)\n \n return dirlist\n \ndef _get_default_tempdir():\n ''\n\n\n\n\n\n \n \n namer=_RandomNameSequence()\n dirlist=_candidate_tempdir_list()\n \n for dir in dirlist:\n if dir !=_os.curdir:\n dir=_os.path.abspath(dir)\n \n for seq in range(100):\n name=next(namer)\n filename=_os.path.join(dir,name)\n try:\n fd=_os.open(filename,_bin_openflags,0o600)\n try:\n try:\n _os.write(fd,b'blat')\n finally:\n _os.close(fd)\n finally:\n _os.unlink(filename)\n return dir\n except FileExistsError:\n pass\n except PermissionError:\n \n \n if(_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n break\n except OSError:\n break\n raise FileNotFoundError(_errno.ENOENT,\n \"No usable temporary directory found in %s\"%\n dirlist)\n \n_name_sequence=None\n\ndef _get_candidate_names():\n ''\n \n global _name_sequence\n if _name_sequence is None:\n _once_lock.acquire()\n try:\n if _name_sequence is None:\n _name_sequence=_RandomNameSequence()\n finally:\n _once_lock.release()\n return _name_sequence\n \n \ndef _mkstemp_inner(dir,pre,suf,flags,output_type):\n ''\n \n dir=_os.path.abspath(dir)\n names=_get_candidate_names()\n if output_type is bytes:\n names=map(_os.fsencode,names)\n \n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,pre+name+suf)\n _sys.audit(\"tempfile.mkstemp\",file)\n try:\n fd=_os.open(file,flags,0o600)\n except FileExistsError:\n continue\n except PermissionError:\n \n \n if(_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n else:\n raise\n return fd,file\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary file name found\")\n \n \n \n \ndef gettempprefix():\n ''\n return _os.fsdecode(template)\n \ndef gettempprefixb():\n ''\n return _os.fsencode(template)\n \ntempdir=None\n\ndef _gettempdir():\n ''\n global tempdir\n if tempdir is None:\n _once_lock.acquire()\n try:\n if tempdir is None:\n tempdir=_get_default_tempdir()\n finally:\n _once_lock.release()\n return tempdir\n \ndef gettempdir():\n ''\n return _os.fsdecode(_gettempdir())\n \ndef gettempdirb():\n ''\n return _os.fsencode(_gettempdir())\n \ndef mkstemp(suffix=None,prefix=None,dir=None,text=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n if text:\n flags=_text_openflags\n else:\n flags=_bin_openflags\n \n return _mkstemp_inner(dir,prefix,suffix,flags,output_type)\n \n \ndef mkdtemp(suffix=None,prefix=None,dir=None):\n ''\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n names=_get_candidate_names()\n if output_type is bytes:\n names=map(_os.fsencode,names)\n \n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,prefix+name+suffix)\n _sys.audit(\"tempfile.mkdtemp\",file)\n try:\n _os.mkdir(file,0o700)\n except FileExistsError:\n continue\n except PermissionError:\n \n \n if(_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n else:\n raise\n return _os.path.abspath(file)\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary directory name found\")\n \ndef mktemp(suffix=\"\",prefix=template,dir=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n if dir is None:\n dir=gettempdir()\n \n names=_get_candidate_names()\n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,prefix+name+suffix)\n if not _exists(file):\n return file\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary filename found\")\n \n \nclass _TemporaryFileCloser:\n ''\n\n \n \n cleanup_called=False\n close_called=False\n \n def __init__(self,file,name,delete=True,delete_on_close=True):\n self.file=file\n self.name=name\n self.delete=delete\n self.delete_on_close=delete_on_close\n \n def cleanup(self,windows=(_os.name =='nt'),unlink=_os.unlink):\n if not self.cleanup_called:\n self.cleanup_called=True\n try:\n if not self.close_called:\n self.close_called=True\n self.file.close()\n finally:\n \n \n if self.delete and not(windows and self.delete_on_close):\n try:\n unlink(self.name)\n except FileNotFoundError:\n pass\n \n def close(self):\n if not self.close_called:\n self.close_called=True\n try:\n self.file.close()\n finally:\n if self.delete and self.delete_on_close:\n self.cleanup()\n \n def __del__(self):\n self.cleanup()\n \n \nclass _TemporaryFileWrapper:\n ''\n\n\n\n\n \n \n def __init__(self,file,name,delete=True,delete_on_close=True):\n self.file=file\n self.name=name\n self._closer=_TemporaryFileCloser(file,name,delete,\n delete_on_close)\n \n def __getattr__(self,name):\n \n \n \n file=self.__dict__['file']\n a=getattr(file,name)\n if hasattr(a,'__call__'):\n func=a\n @_functools.wraps(func)\n def func_wrapper(*args,**kwargs):\n return func(*args,**kwargs)\n \n \n func_wrapper._closer=self._closer\n a=func_wrapper\n if not isinstance(a,int):\n setattr(self,name,a)\n return a\n \n \n \n def __enter__(self):\n self.file.__enter__()\n return self\n \n \n \n def __exit__(self,exc,value,tb):\n result=self.file.__exit__(exc,value,tb)\n self._closer.cleanup()\n return result\n \n def close(self):\n ''\n\n \n self._closer.close()\n \n \n def __iter__(self):\n \n \n \n \n \n for line in self.file:\n yield line\n \ndef NamedTemporaryFile(mode='w+b',buffering=-1,encoding=None,\nnewline=None,suffix=None,prefix=None,\ndir=None,delete=True,*,errors=None,\ndelete_on_close=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n flags=_bin_openflags\n \n \n \n if _os.name =='nt'and delete and delete_on_close:\n flags |=_os.O_TEMPORARY\n \n if \"b\"not in mode:\n encoding=_io.text_encoding(encoding)\n \n name=None\n def opener(*args):\n nonlocal name\n fd,name=_mkstemp_inner(dir,prefix,suffix,flags,output_type)\n return fd\n try:\n file=_io.open(dir,mode,buffering=buffering,\n newline=newline,encoding=encoding,errors=errors,\n opener=opener)\n try:\n raw=getattr(file,'buffer',file)\n raw=getattr(raw,'raw',raw)\n raw.name=name\n return _TemporaryFileWrapper(file,name,delete,delete_on_close)\n except:\n file.close()\n raise\n except:\n if name is not None and not(\n _os.name =='nt'and delete and delete_on_close):\n _os.unlink(name)\n raise\n \nif _os.name !='posix'or _sys.platform =='cygwin':\n\n\n TemporaryFile=NamedTemporaryFile\n \nelse:\n\n\n\n _O_TMPFILE_WORKS=hasattr(_os,'O_TMPFILE')\n \n def TemporaryFile(mode='w+b',buffering=-1,encoding=None,\n newline=None,suffix=None,prefix=None,\n dir=None,*,errors=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n global _O_TMPFILE_WORKS\n \n if \"b\"not in mode:\n encoding=_io.text_encoding(encoding)\n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n flags=_bin_openflags\n if _O_TMPFILE_WORKS:\n fd=None\n def opener(*args):\n nonlocal fd\n flags2=(flags |_os.O_TMPFILE)&~_os.O_CREAT\n fd=_os.open(dir,flags2,0o600)\n return fd\n try:\n file=_io.open(dir,mode,buffering=buffering,\n newline=newline,encoding=encoding,\n errors=errors,opener=opener)\n raw=getattr(file,'buffer',file)\n raw=getattr(raw,'raw',raw)\n raw.name=fd\n return file\n except IsADirectoryError:\n \n \n \n \n \n _O_TMPFILE_WORKS=False\n except OSError:\n \n \n \n \n \n \n \n pass\n \n \n fd=None\n def opener(*args):\n nonlocal fd\n fd,name=_mkstemp_inner(dir,prefix,suffix,flags,output_type)\n try:\n _os.unlink(name)\n except BaseException as e:\n _os.close(fd)\n raise\n return fd\n file=_io.open(dir,mode,buffering=buffering,\n newline=newline,encoding=encoding,errors=errors,\n opener=opener)\n raw=getattr(file,'buffer',file)\n raw=getattr(raw,'raw',raw)\n raw.name=fd\n return file\n \nclass SpooledTemporaryFile(_io.IOBase):\n ''\n\n\n \n _rolled=False\n \n def __init__(self,max_size=0,mode='w+b',buffering=-1,\n encoding=None,newline=None,\n suffix=None,prefix=None,dir=None,*,errors=None):\n if 'b'in mode:\n self._file=_io.BytesIO()\n else:\n encoding=_io.text_encoding(encoding)\n self._file=_io.TextIOWrapper(_io.BytesIO(),\n encoding=encoding,errors=errors,\n newline=newline)\n self._max_size=max_size\n self._rolled=False\n self._TemporaryFileArgs={'mode':mode,'buffering':buffering,\n 'suffix':suffix,'prefix':prefix,\n 'encoding':encoding,'newline':newline,\n 'dir':dir,'errors':errors}\n \n __class_getitem__=classmethod(_types.GenericAlias)\n \n def _check(self,file):\n if self._rolled:return\n max_size=self._max_size\n if max_size and file.tell()>max_size:\n self.rollover()\n \n def rollover(self):\n if self._rolled:return\n file=self._file\n newfile=self._file=TemporaryFile(**self._TemporaryFileArgs)\n del self._TemporaryFileArgs\n \n pos=file.tell()\n if hasattr(newfile,'buffer'):\n newfile.buffer.write(file.detach().getvalue())\n else:\n newfile.write(file.getvalue())\n newfile.seek(pos,0)\n \n self._rolled=True\n \n \n \n \n \n \n \n def __enter__(self):\n if self._file.closed:\n raise ValueError(\"Cannot enter context with closed file\")\n return self\n \n def __exit__(self,exc,value,tb):\n self._file.close()\n \n \n def __iter__(self):\n return self._file.__iter__()\n \n def __del__(self):\n if not self.closed:\n _warnings.warn(\n \"Unclosed file {!r}\".format(self),\n ResourceWarning,\n stacklevel=2,\n source=self\n )\n self.close()\n \n def close(self):\n self._file.close()\n \n @property\n def closed(self):\n return self._file.closed\n \n @property\n def encoding(self):\n return self._file.encoding\n \n @property\n def errors(self):\n return self._file.errors\n \n def fileno(self):\n self.rollover()\n return self._file.fileno()\n \n def flush(self):\n self._file.flush()\n \n def isatty(self):\n return self._file.isatty()\n \n @property\n def mode(self):\n try:\n return self._file.mode\n except AttributeError:\n return self._TemporaryFileArgs['mode']\n \n @property\n def name(self):\n try:\n return self._file.name\n except AttributeError:\n return None\n \n @property\n def newlines(self):\n return self._file.newlines\n \n def readable(self):\n return self._file.readable()\n \n def read(self,*args):\n return self._file.read(*args)\n \n def read1(self,*args):\n return self._file.read1(*args)\n \n def readinto(self,b):\n return self._file.readinto(b)\n \n def readinto1(self,b):\n return self._file.readinto1(b)\n \n def readline(self,*args):\n return self._file.readline(*args)\n \n def readlines(self,*args):\n return self._file.readlines(*args)\n \n def seekable(self):\n return self._file.seekable()\n \n def seek(self,*args):\n return self._file.seek(*args)\n \n def tell(self):\n return self._file.tell()\n \n def truncate(self,size=None):\n if size is None:\n return self._file.truncate()\n else:\n if size >self._max_size:\n self.rollover()\n return self._file.truncate(size)\n \n def writable(self):\n return self._file.writable()\n \n def write(self,s):\n file=self._file\n rv=file.write(s)\n self._check(file)\n return rv\n \n def writelines(self,iterable):\n file=self._file\n rv=file.writelines(iterable)\n self._check(file)\n return rv\n \n def detach(self):\n return self._file.detach()\n \n \nclass TemporaryDirectory:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,suffix=None,prefix=None,dir=None,\n ignore_cleanup_errors=False,*,delete=True):\n self.name=mkdtemp(suffix,prefix,dir)\n self._ignore_cleanup_errors=ignore_cleanup_errors\n self._delete=delete\n self._finalizer=_weakref.finalize(\n self,self._cleanup,self.name,\n warn_message=\"Implicitly cleaning up {!r}\".format(self),\n ignore_errors=self._ignore_cleanup_errors,delete=self._delete)\n \n @classmethod\n def _rmtree(cls,name,ignore_errors=False):\n def onexc(func,path,exc):\n if isinstance(exc,PermissionError):\n def resetperms(path):\n try:\n _os.chflags(path,0)\n except AttributeError:\n pass\n _os.chmod(path,0o700)\n \n try:\n if path !=name:\n resetperms(_os.path.dirname(path))\n resetperms(path)\n \n try:\n _os.unlink(path)\n \n except(IsADirectoryError,PermissionError):\n cls._rmtree(path,ignore_errors=ignore_errors)\n except FileNotFoundError:\n pass\n elif isinstance(exc,FileNotFoundError):\n pass\n else:\n if not ignore_errors:\n raise\n \n _shutil.rmtree(name,onexc=onexc)\n \n @classmethod\n def _cleanup(cls,name,warn_message,ignore_errors=False,delete=True):\n if delete:\n cls._rmtree(name,ignore_errors=ignore_errors)\n _warnings.warn(warn_message,ResourceWarning)\n \n def __repr__(self):\n return \"<{} {!r}>\".format(self.__class__.__name__,self.name)\n \n def __enter__(self):\n return self.name\n \n def __exit__(self,exc,value,tb):\n if self._delete:\n self.cleanup()\n \n def cleanup(self):\n if self._finalizer.detach()or _os.path.exists(self.name):\n self._rmtree(self.name,ignore_errors=self._ignore_cleanup_errors)\n \n __class_getitem__=classmethod(_types.GenericAlias)\n", ["_thread", "errno", "functools", "io", "os", "random", "shutil", "sys", "types", "warnings", "weakref"]], "queue": [".py", "''\n\nimport threading\nimport types\nfrom collections import deque\nfrom heapq import heappush,heappop\nfrom time import monotonic as time\ntry:\n from _queue import SimpleQueue\nexcept ImportError:\n SimpleQueue=None\n \n__all__=['Empty','Full','Queue','PriorityQueue','LifoQueue','SimpleQueue']\n\n\ntry:\n from _queue import Empty\nexcept ImportError:\n class Empty(Exception):\n ''\n pass\n \nclass Full(Exception):\n ''\n pass\n \n \nclass Queue:\n ''\n\n\n \n \n def __init__(self,maxsize=0):\n self.maxsize=maxsize\n self._init(maxsize)\n \n \n \n \n \n self.mutex=threading.Lock()\n \n \n \n self.not_empty=threading.Condition(self.mutex)\n \n \n \n self.not_full=threading.Condition(self.mutex)\n \n \n \n self.all_tasks_done=threading.Condition(self.mutex)\n self.unfinished_tasks=0\n \n def task_done(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n with self.all_tasks_done:\n unfinished=self.unfinished_tasks -1\n if unfinished <=0:\n if unfinished <0:\n raise ValueError('task_done() called too many times')\n self.all_tasks_done.notify_all()\n self.unfinished_tasks=unfinished\n \n def join(self):\n ''\n\n\n\n\n\n\n \n with self.all_tasks_done:\n while self.unfinished_tasks:\n self.all_tasks_done.wait()\n \n def qsize(self):\n ''\n with self.mutex:\n return self._qsize()\n \n def empty(self):\n ''\n\n\n\n\n\n\n\n\n \n with self.mutex:\n return not self._qsize()\n \n def full(self):\n ''\n\n\n\n\n\n \n with self.mutex:\n return 0 0:\n if not block:\n if self._qsize()>=self.maxsize:\n raise Full\n elif timeout is None:\n while self._qsize()>=self.maxsize:\n self.not_full.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else:\n endtime=time()+timeout\n while self._qsize()>=self.maxsize:\n remaining=endtime -time()\n if remaining <=0.0:\n raise Full\n self.not_full.wait(remaining)\n self._put(item)\n self.unfinished_tasks +=1\n self.not_empty.notify()\n \n def get(self,block=True,timeout=None):\n ''\n\n\n\n\n\n\n\n\n \n with self.not_empty:\n if not block:\n if not self._qsize():\n raise Empty\n elif timeout is None:\n while not self._qsize():\n self.not_empty.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else:\n endtime=time()+timeout\n while not self._qsize():\n remaining=endtime -time()\n if remaining <=0.0:\n raise Empty\n self.not_empty.wait(remaining)\n item=self._get()\n self.not_full.notify()\n return item\n \n def put_nowait(self,item):\n ''\n\n\n\n \n return self.put(item,block=False)\n \n def get_nowait(self):\n ''\n\n\n\n \n return self.get(block=False)\n \n \n \n \n \n \n def _init(self,maxsize):\n self.queue=deque()\n \n def _qsize(self):\n return len(self.queue)\n \n \n def _put(self,item):\n self.queue.append(item)\n \n \n def _get(self):\n return self.queue.popleft()\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nclass PriorityQueue(Queue):\n ''\n\n\n \n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n heappush(self.queue,item)\n \n def _get(self):\n return heappop(self.queue)\n \n \nclass LifoQueue(Queue):\n ''\n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n self.queue.append(item)\n \n def _get(self):\n return self.queue.pop()\n \n \nclass _PySimpleQueue:\n ''\n\n\n \n \n \n \n \n \n def __init__(self):\n self._queue=deque()\n self._count=threading.Semaphore(0)\n \n def put(self,item,block=True,timeout=None):\n ''\n\n\n\n \n self._queue.append(item)\n self._count.release()\n \n def get(self,block=True,timeout=None):\n ''\n\n\n\n\n\n\n\n\n \n if timeout is not None and timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n if not self._count.acquire(block,timeout):\n raise Empty\n return self._queue.popleft()\n \n def put_nowait(self,item):\n ''\n\n\n\n \n return self.put(item,block=False)\n \n def get_nowait(self):\n ''\n\n\n\n \n return self.get(block=False)\n \n def empty(self):\n ''\n return len(self._queue)==0\n \n def qsize(self):\n ''\n return len(self._queue)\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nif SimpleQueue is None:\n SimpleQueue=_PySimpleQueue\n", ["_queue", "collections", "heapq", "threading", "time", "types"]], "pkgutil": [".py", "''\n\nfrom collections import namedtuple\nfrom functools import singledispatch as simplegeneric\nimport importlib\nimport importlib.util\nimport importlib.machinery\nimport os\nimport os.path\nimport sys\nfrom types import ModuleType\nimport warnings\n\n__all__=[\n'get_importer','iter_importers','get_loader','find_loader',\n'walk_packages','iter_modules','get_data',\n'read_code','extend_path',\n'ModuleInfo',\n]\n\n\nModuleInfo=namedtuple('ModuleInfo','module_finder name ispkg')\nModuleInfo.__doc__='A namedtuple with minimal info about a module.'\n\n\ndef read_code(stream):\n\n\n import marshal\n \n magic=stream.read(4)\n if magic !=importlib.util.MAGIC_NUMBER:\n return None\n \n stream.read(12)\n return marshal.load(stream)\n \n \ndef walk_packages(path=None,prefix='',onerror=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def seen(p,m={}):\n if p in m:\n return True\n m[p]=True\n \n for info in iter_modules(path,prefix):\n yield info\n \n if info.ispkg:\n try:\n __import__(info.name)\n except ImportError:\n if onerror is not None:\n onerror(info.name)\n except Exception:\n if onerror is not None:\n onerror(info.name)\n else:\n raise\n else:\n path=getattr(sys.modules[info.name],'__path__',None)or[]\n \n \n path=[p for p in path if not seen(p)]\n \n yield from walk_packages(path,info.name+'.',onerror)\n \n \ndef iter_modules(path=None,prefix=''):\n ''\n\n\n\n\n\n\n\n \n if path is None:\n importers=iter_importers()\n elif isinstance(path,str):\n raise ValueError(\"path must be None or list of paths to look for \"\n \"modules in\")\n else:\n importers=map(get_importer,path)\n \n yielded={}\n for i in importers:\n for name,ispkg in iter_importer_modules(i,prefix):\n if name not in yielded:\n yielded[name]=1\n yield ModuleInfo(i,name,ispkg)\n \n \n@simplegeneric\ndef iter_importer_modules(importer,prefix=''):\n if not hasattr(importer,'iter_modules'):\n return[]\n return importer.iter_modules(prefix)\n \n \n \ndef _iter_file_finder_modules(importer,prefix=''):\n if importer.path is None or not os.path.isdir(importer.path):\n return\n \n yielded={}\n import inspect\n try:\n filenames=os.listdir(importer.path)\n except OSError:\n \n filenames=[]\n filenames.sort()\n \n for fn in filenames:\n modname=inspect.getmodulename(fn)\n if modname =='__init__'or modname in yielded:\n continue\n \n path=os.path.join(importer.path,fn)\n ispkg=False\n \n if not modname and os.path.isdir(path)and '.'not in fn:\n modname=fn\n try:\n dircontents=os.listdir(path)\n except OSError:\n \n dircontents=[]\n for fn in dircontents:\n subname=inspect.getmodulename(fn)\n if subname =='__init__':\n ispkg=True\n break\n else:\n continue\n \n if modname and '.'not in modname:\n yielded[modname]=1\n yield prefix+modname,ispkg\n \niter_importer_modules.register(\nimportlib.machinery.FileFinder,_iter_file_finder_modules)\n\n\ntry:\n import zipimport\n from zipimport import zipimporter\n \n def iter_zipimport_modules(importer,prefix=''):\n dirlist=sorted(zipimport._zip_directory_cache[importer.archive])\n _prefix=importer.prefix\n plen=len(_prefix)\n yielded={}\n import inspect\n for fn in dirlist:\n if not fn.startswith(_prefix):\n continue\n \n fn=fn[plen:].split(os.sep)\n \n if len(fn)==2 and fn[1].startswith('__init__.py'):\n if fn[0]not in yielded:\n yielded[fn[0]]=1\n yield prefix+fn[0],True\n \n if len(fn)!=1:\n continue\n \n modname=inspect.getmodulename(fn[0])\n if modname =='__init__':\n continue\n \n if modname and '.'not in modname and modname not in yielded:\n yielded[modname]=1\n yield prefix+modname,False\n \n iter_importer_modules.register(zipimporter,iter_zipimport_modules)\n \nexcept ImportError:\n pass\n \n \ndef get_importer(path_item):\n ''\n\n\n\n\n\n\n \n path_item=os.fsdecode(path_item)\n try:\n importer=sys.path_importer_cache[path_item]\n except KeyError:\n for path_hook in sys.path_hooks:\n try:\n importer=path_hook(path_item)\n sys.path_importer_cache.setdefault(path_item,importer)\n break\n except ImportError:\n pass\n else:\n importer=None\n return importer\n \n \ndef iter_importers(fullname=\"\"):\n ''\n\n\n\n\n\n\n\n\n\n \n if fullname.startswith('.'):\n msg=\"Relative module name {!r} not supported\".format(fullname)\n raise ImportError(msg)\n if '.'in fullname:\n \n pkg_name=fullname.rpartition(\".\")[0]\n pkg=importlib.import_module(pkg_name)\n path=getattr(pkg,'__path__',None)\n if path is None:\n return\n else:\n yield from sys.meta_path\n path=sys.path\n for item in path:\n yield get_importer(item)\n \n \ndef get_loader(module_or_name):\n ''\n\n\n\n\n \n warnings._deprecated(\"pkgutil.get_loader\",\n f\"{warnings._DEPRECATED_MSG}; \"\n \"use importlib.util.find_spec() instead\",\n remove=(3,14))\n if module_or_name in sys.modules:\n module_or_name=sys.modules[module_or_name]\n if module_or_name is None:\n return None\n if isinstance(module_or_name,ModuleType):\n module=module_or_name\n loader=getattr(module,'__loader__',None)\n if loader is not None:\n return loader\n if getattr(module,'__spec__',None)is None:\n return None\n fullname=module.__name__\n else:\n fullname=module_or_name\n return find_loader(fullname)\n \n \ndef find_loader(fullname):\n ''\n\n\n\n\n \n warnings._deprecated(\"pkgutil.find_loader\",\n f\"{warnings._DEPRECATED_MSG}; \"\n \"use importlib.util.find_spec() instead\",\n remove=(3,14))\n if fullname.startswith('.'):\n msg=\"Relative module name {!r} not supported\".format(fullname)\n raise ImportError(msg)\n try:\n spec=importlib.util.find_spec(fullname)\n except(ImportError,AttributeError,TypeError,ValueError)as ex:\n \n \n \n msg=\"Error while finding loader for {!r} ({}: {})\"\n raise ImportError(msg.format(fullname,type(ex),ex))from ex\n return spec.loader if spec is not None else None\n \n \ndef extend_path(path,name):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not isinstance(path,list):\n \n \n return path\n \n sname_pkg=name+\".pkg\"\n \n path=path[:]\n \n parent_package,_,final_name=name.rpartition('.')\n if parent_package:\n try:\n search_path=sys.modules[parent_package].__path__\n except(KeyError,AttributeError):\n \n \n return path\n else:\n search_path=sys.path\n \n for dir in search_path:\n if not isinstance(dir,str):\n continue\n \n finder=get_importer(dir)\n if finder is not None:\n portions=[]\n if hasattr(finder,'find_spec'):\n spec=finder.find_spec(final_name)\n if spec is not None:\n portions=spec.submodule_search_locations or[]\n \n elif hasattr(finder,'find_loader'):\n _,portions=finder.find_loader(final_name)\n \n for portion in portions:\n \n \n if portion not in path:\n path.append(portion)\n \n \n \n pkgfile=os.path.join(dir,sname_pkg)\n if os.path.isfile(pkgfile):\n try:\n f=open(pkgfile)\n except OSError as msg:\n sys.stderr.write(\"Can't open %s: %s\\n\"%\n (pkgfile,msg))\n else:\n with f:\n for line in f:\n line=line.rstrip('\\n')\n if not line or line.startswith('#'):\n continue\n path.append(line)\n \n return path\n \n \ndef get_data(package,resource):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n spec=importlib.util.find_spec(package)\n if spec is None:\n return None\n loader=spec.loader\n if loader is None or not hasattr(loader,'get_data'):\n return None\n \n mod=(sys.modules.get(package)or\n importlib._bootstrap._load(spec))\n if mod is None or not hasattr(mod,'__file__'):\n return None\n \n \n \n \n parts=resource.split('/')\n parts.insert(0,os.path.dirname(mod.__file__))\n resource_name=os.path.join(*parts)\n return loader.get_data(resource_name)\n \n \n_NAME_PATTERN=None\n\ndef resolve_name(name):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _NAME_PATTERN\n if _NAME_PATTERN is None:\n \n import re\n dotted_words=r'(?!\\d)(\\w+)(\\.(?!\\d)(\\w+))*'\n _NAME_PATTERN=re.compile(f'^(?P{dotted_words})'\n f'(?P:(?P{dotted_words})?)?$',\n re.UNICODE)\n \n m=_NAME_PATTERN.match(name)\n if not m:\n raise ValueError(f'invalid format: {name !r}')\n gd=m.groupdict()\n if gd.get('cln'):\n \n mod=importlib.import_module(gd['pkg'])\n parts=gd.get('obj')\n parts=parts.split('.')if parts else[]\n else:\n \n parts=name.split('.')\n modname=parts.pop(0)\n \n mod=importlib.import_module(modname)\n while parts:\n p=parts[0]\n s=f'{modname}.{p}'\n try:\n mod=importlib.import_module(s)\n parts.pop(0)\n modname=s\n except ImportError:\n break\n \n \n \n result=mod\n for p in parts:\n result=getattr(result,p)\n return result\n", ["collections", "functools", "importlib", "importlib.machinery", "importlib.util", "inspect", "marshal", "os", "os.path", "re", "sys", "types", "warnings", "zipimport"]], "_dummy_thread": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['error','start_new_thread','exit','get_ident','allocate_lock',\n'interrupt_main','LockType','RLock']\n\n\nTIMEOUT_MAX=2 **31\n\n\n\n\n\n\nerror=RuntimeError\n\ndef start_new_thread(function,args,kwargs={}):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if type(args)!=type(tuple()):\n raise TypeError(\"2nd arg must be a tuple\")\n if type(kwargs)!=type(dict()):\n raise TypeError(\"3rd arg must be a dict\")\n global _main\n _main=False\n try:\n function(*args,**kwargs)\n except SystemExit:\n pass\n except:\n import traceback\n traceback.print_exc()\n _main=True\n global _interrupt\n if _interrupt:\n _interrupt=False\n raise KeyboardInterrupt\n \ndef exit():\n ''\n raise SystemExit\n \ndef get_ident():\n ''\n\n\n\n\n \n return 1\n \ndef allocate_lock():\n ''\n return LockType()\n \ndef stack_size(size=None):\n ''\n if size is not None:\n raise error(\"setting thread stack size not supported\")\n return 0\n \ndef _set_sentinel():\n ''\n return LockType()\n \nclass LockType(object):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self):\n self.locked_status=False\n \n def acquire(self,waitflag=None,timeout=-1):\n ''\n\n\n\n\n\n\n\n\n \n if waitflag is None or waitflag:\n self.locked_status=True\n return True\n else:\n if not self.locked_status:\n self.locked_status=True\n return True\n else:\n if timeout >0:\n import time\n time.sleep(timeout)\n return False\n \n __enter__=acquire\n \n def __exit__(self,typ,val,tb):\n self.release()\n \n def release(self):\n ''\n \n \n if not self.locked_status:\n raise error\n self.locked_status=False\n return True\n \n def locked(self):\n return self.locked_status\n \n def __repr__(self):\n return \"<%s %s.%s object at %s>\"%(\n \"locked\"if self.locked_status else \"unlocked\",\n self.__class__.__module__,\n self.__class__.__qualname__,\n hex(id(self))\n )\n \n \nclass RLock(LockType):\n ''\n\n\n\n\n\n \n def __init__(self):\n super().__init__()\n self._levels=0\n \n def acquire(self,waitflag=None,timeout=-1):\n ''\n \n locked=super().acquire(waitflag,timeout)\n if locked:\n self._levels +=1\n return locked\n \n def release(self):\n ''\n \n if self._levels ==0:\n raise error\n if self._levels ==1:\n super().release()\n self._levels -=1\n \n \n_interrupt=False\n\n_main=True\n\ndef interrupt_main():\n ''\n \n if _main:\n raise KeyboardInterrupt\n else:\n global _interrupt\n _interrupt=True\n", ["time", "traceback"]], "_struct": [".py", "\n\n\n\n\n\n\n\n\n\n\n\"\"\"Functions to convert between Python values and C structs.\nPython strings are used to hold the data representing the C struct\nand also as format strings to describe the layout of data in the C struct.\n\nThe optional first format char indicates byte order, size and alignment:\n @: native order, size & alignment (default)\n =: native order, std. size & alignment\n <: little-endian, std. size & alignment\n >: big-endian, std. size & alignment\n !: same as >\n\nThe remaining chars indicate types of args and must match exactly;\nthese can be preceded by a decimal repeat count:\n x: pad byte (no data);\n c:char;\n b:signed byte;\n B:unsigned byte;\n h:short;\n H:unsigned short;\n i:int;\n I:unsigned int;\n l:long;\n L:unsigned long;\n f:float;\n d:double.\nSpecial cases (preceding decimal count indicates length):\n s:string (array of char); p: pascal string (with count byte).\nSpecial case (only available in native format):\n P:an integer type that is wide enough to hold a pointer.\nSpecial case (not in native mode unless 'long long' in platform C):\n q:long long;\n Q:unsigned long long\nWhitespace between formats is ignored.\n\nThe variable struct.error is an exception raised on errors.\"\"\"\n\nimport math\nimport re\nimport sys\n\n\nclass StructError(Exception):\n pass\n \n \nerror=StructError\n\ndef _normalize(fmt):\n ''\n \n if re.search(r\"\\d\\s+\",fmt):\n raise StructError(\"bad char in struct format\")\n return fmt.replace(\" \",\"\")\n \ndef unpack_int(data,index,size,le):\n bytes=[b for b in data[index:index+size]]\n if le =='little':\n bytes.reverse()\n number=0\n for b in bytes:\n number=number <<8 |b\n return int(number)\n \ndef unpack_signed_int(data,index,size,le):\n number=unpack_int(data,index,size,le)\n max=2 **(size *8)\n if number >2 **(size *8 -1)-1:\n number=int(-1 *(max -number))\n return number\n \nINFINITY=1e200 *1e200\nNAN=INFINITY /INFINITY\n\nBIG_ENDIAN=0\nLITTLE_ENDIAN=1\n\ndef unpack_char(data,index,size,le):\n return data[index:index+size]\n \ndef pack_int(number,size,le):\n x=number\n res=[]\n for i in range(size):\n res.append(x&0xff)\n x >>=8\n if le =='big':\n res.reverse()\n return bytes(res)\n \ndef pack_signed_int(number,size,le):\n if not isinstance(number,int):\n raise StructError(\"argument for i,I,l,L,q,Q,h,H must be integer\")\n if number >2 **(8 *size -1)-1 or number <-1 *2 **(8 *size -1):\n raise OverflowError(\"Number:%i too large to convert\"%number)\n return pack_int(number,size,le)\n \ndef pack_unsigned_int(number,size,le):\n if not isinstance(number,int):\n raise StructError(\"argument for i,I,l,L,q,Q,h,H must be integer\")\n if number <0:\n raise TypeError(\"can't convert negative long to unsigned\")\n if number >2 **(8 *size)-1:\n raise OverflowError(\"Number:%i too large to convert\"%number)\n return pack_int(number,size,le)\n \ndef pack_char(char,size,le):\n return bytes(char)\n \ndef isinf(x):\n return x !=0.0 and x /2 ==x\n \ndef isnan(v):\n return v !=v *1.0 or(v ==1.0 and v ==2.0)\n \ndef pack_float(x,size,le):\n unsigned=float_pack(x,size)\n result=[]\n for i in range(size):\n result.append((unsigned >>(i *8))&0xFF)\n if le ==\"big\":\n result.reverse()\n return bytes(result)\n \ndef unpack_float(data,index,size,le):\n binary=[data[i]for i in range(index,index+size)]\n if le ==\"big\":\n binary.reverse()\n unsigned=0\n for i in range(size):\n unsigned |=binary[i]<<(i *8)\n return float_unpack(unsigned,size,le)\n \ndef round_to_nearest(x):\n ''\n\n\n\n\n\n\n\n\n \n int_part=int(x)\n frac_part=x -int_part\n if frac_part >0.5 or frac_part ==0.5 and int_part&1 ==1:\n int_part +=1\n return int_part\n \ndef float_unpack(Q,size,order=LITTLE_ENDIAN):\n ''\n \n \n if size ==8:\n MIN_EXP=-1021\n MAX_EXP=1024\n MANT_DIG=53\n BITS=64\n elif size ==4:\n MIN_EXP=-125\n MAX_EXP=128\n MANT_DIG=24\n BITS=32\n else:\n raise ValueError(\"invalid size value\")\n \n if Q >>BITS:\n raise ValueError(\"input out of range\")\n \n \n sign=Q >>BITS -1\n exp=(Q&((1 <>MANT_DIG -1\n mant=Q&((1 <0:\n \n mant=round_to_nearest(m *(1 <=0:\n mant=round_to_nearest(m *(1 <=MAX_EXP -MIN_EXP+2:\n raise OverflowError(\"float too large to pack in this format\")\n \n \n assert 0 <=mant <1 <':(default,'big'),\n'!':(default,'big'),\n'=':(default,sys.byteorder),\n'@':(default,sys.byteorder)\n}\n\ndef _getmode(fmt):\n try:\n formatdef,endianness=formatmode[fmt[0]]\n alignment=fmt[0]not in formatmode or fmt[0]=='@'\n index=1\n except(IndexError,KeyError):\n formatdef,endianness=formatmode['@']\n alignment=True\n index=0\n return formatdef,endianness,index,alignment\n \ndef _getnum(fmt,i):\n num=None\n cur=fmt[i]\n while('0'<=cur)and(cur <='9'):\n if num ==None:\n num=int(cur)\n else:\n num=10 *num+int(cur)\n i +=1\n cur=fmt[i]\n return num,i\n \ndef calcsize(fmt):\n ''\n\n \n if isinstance(fmt,bytes):\n fmt=fmt.decode(\"ascii\")\n \n fmt=_normalize(fmt)\n \n formatdef,endianness,i,alignment=_getmode(fmt)\n num=0\n result=0\n while i 0:\n result +=[bytes([len(args[0])])+args[0][:num -1]+\n b'\\0'*padding]\n else:\n if num <255:\n result +=[bytes([num -1])+args[0][:num -1]]\n else:\n result +=[bytes([255])+args[0][:num -1]]\n args.pop(0)\n else:\n raise StructError(\"arg for string format not a string\")\n \n else:\n if len(args)=num:\n n=num -1\n result.append(data[j+1:j+n+1])\n j +=num\n else:\n \n if j >0 and alignment:\n padding=format['size']-j %format['size']\n j +=padding\n for n in range(num):\n result +=[format['unpack'](data,j,format['size'],\n endianness)]\n j +=format['size']\n \n return tuple(result)\n \ndef pack_into(fmt,buf,offset,*args):\n data=pack(fmt,*args)\n buf[offset:offset+len(data)]=data\n \ndef unpack_from(fmt,buf,offset=0):\n size=calcsize(fmt)\n data=buf[offset:offset+size]\n if len(data)!=size:\n raise error(\"unpack_from requires a buffer of at least %d bytes\"\n %(size,))\n return unpack(fmt,data)\n \ndef _clearcache():\n ''\n \n \nclass Struct:\n\n def __init__(self,fmt):\n self.format=fmt\n \n def pack(self,*args):\n return pack(self.format,*args)\n \n def pack_into(self,*args):\n return pack_into(self.format,*args)\n \n def unpack(self,*args):\n return unpack(self.format,*args)\n \n def unpack_from(self,*args):\n return unpack_from(self.format,*args)\n \nif __name__ =='__main__':\n t=pack('Bf',1,2)\n print(t,len(t))\n print(unpack('Bf',t))\n print(calcsize('Bf'))\n \n", ["math", "re", "sys"]], "time": [".py", "from browser import self as window\nimport _locale\nimport javascript\n\n\ndate=javascript.Date.new\nnow=javascript.Date.now\n\n\n\n\n\n\n\n_STRUCT_TM_ITEMS=9\n\n\n\n\n\ndef _get_day_of_year(arg):\n ''\n\n\n\n\n\n\n\n\n\n \n ml=[31,28,31,30,31,30,31,31,30,31,30,31]\n if arg[0]%4 ==0:\n ml[1]+=1\n i=1\n yday=0\n while i mm >13:\n raise ValueError(\"month out of range\")\n \n dd=t[2]\n if dd ==0:dd=1\n if -1 >dd >32:\n raise ValueError(\"day of month out of range\")\n \n hh=t[3]\n if -1 >hh >24:\n raise ValueError(\"hour out of range\")\n \n minu=t[4]\n if -1 >minu >60:\n raise ValueError(\"minute out of range\")\n \n ss=t[5]\n if -1 >ss >62:\n raise ValueError(\"seconds out of range\")\n \n wd=t[6]%7\n if wd <-2:\n raise ValueError(\"day of week out of range\")\n \n dy=t[7]\n if dy ==0:dy=1\n if -1 >dy >367:\n raise ValueError(\"day of year out of range\")\n \n return t[0],mm,dd,hh,minu,ss,wd,dy,t[-1]\n \n \ndef _is_dst(secs=None):\n ''\n d=date()\n if secs is not None:\n d=date(secs *1000)\n \n \n \n jan=date(d.getFullYear(),0,1)\n jul=date(d.getFullYear(),6,1)\n dst=int(d.getTimezoneOffset()=0 else 6\n tmp=struct_time([d.getUTCFullYear(),\n d.getUTCMonth()+1,d.getUTCDate(),\n d.getUTCHours(),d.getUTCMinutes(),d.getUTCSeconds(),\n wday,0,0])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef localtime(secs=None):\n d=date()\n if secs is not None:\n d=date(secs *1000)\n dst=_is_dst(secs)\n wday=d.getDay()-1 if d.getDay()-1 >=0 else 6\n tmp=struct_time([d.getFullYear(),\n d.getMonth()+1,d.getDate(),\n d.getHours(),d.getMinutes(),d.getSeconds(),\n wday,0,dst])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef mktime(t):\n if isinstance(t,struct_time):\n d1=date(t.tm_year,t.tm_mon -1,t.tm_mday,\n t.tm_hour,t.tm_min,t.tm_sec,0).getTime()\n elif isinstance(t,tuple):\n d1=date(t[0],t[1]-1,t[2],t[3],t[4],t[5],0).getTime()\n else:\n raise ValueError(\"Tuple or struct_time argument required\")\n d2=date(0).getTime()\n return(d1 -d2)/1000.\n \ndef monotonic():\n return now()/1000.\n \ndef perf_counter():\n return window.performance.now()/1000.\n \ndef process_time():\n return now()/1000.\n \ndef time():\n return float(date().getTime()/1000)\n \ndef sleep(secs):\n ''\n\n \n \n float(secs)\n raise NotImplementedError(\"Blocking functions like time.sleep() are not \"\n \"supported in the browser. Use functions in module browser.timer \"\n \"instead.\")\n \ndef strftime(_format,t=None):\n def ns(t,nb):\n \n res=str(t)\n while len(res)>4)&0x3)),\n chr(((B -0x20)&0xf)<<4 |(((C -0x20)>>2)&0xf)),\n chr(((C -0x20)&0x3)<<6 |((D -0x20)&0x3f))\n ])for A,B,C,D in quadruplets_gen(s[1:].rstrip())]\n except ValueError:\n raise Error('Illegal char')\n result=''.join(result)\n trailingdata=result[length:]\n if trailingdata.strip('\\x00'):\n raise Error('Trailing garbage')\n result=result[:length]\n if len(result)>2)&0x3F],\n table_b2a_base64[((A <<4)|((B >>4)&0xF))&0x3F],\n table_b2a_base64[((B <<2)|((C >>6)&0x3))&0x3F],\n table_b2a_base64[(C)&0x3F]])\n for A,B,C in a]\n \n final=s[length -final_length:]\n if final_length ==0:\n snippet=''\n elif final_length ==1:\n a=final[0]\n snippet=table_b2a_base64[(a >>2)&0x3F]+\\\n table_b2a_base64[(a <<4)&0x3F]+'=='\n else:\n a=final[0]\n b=final[1]\n snippet=table_b2a_base64[(a >>2)&0x3F]+\\\n table_b2a_base64[((a <<4)|(b >>4)&0xF)&0x3F]+\\\n table_b2a_base64[(b <<2)&0x3F]+'='\n \n result=''.join(result)+snippet\n if newline:\n result +='\\n'\n return bytes(result,__BRYTHON__.charset)\n \ndef a2b_qp(s,header=False):\n inp=0\n odata=[]\n while inp =len(s):\n break\n \n if(s[inp]=='\\n')or(s[inp]=='\\r'):\n if s[inp]!='\\n':\n while inp 0 and data[lf -1]=='\\r'\n \n inp=0\n linelen=0\n odata=[]\n while inp '~'or\n c =='='or\n (header and c =='_')or\n (c =='.'and linelen ==0 and(inp+1 ==len(data)or\n data[inp+1]=='\\n'or\n data[inp+1]=='\\r'))or\n (not istext and(c =='\\r'or c =='\\n'))or\n ((c =='\\t'or c ==' ')and(inp+1 ==len(data)))or\n (c <=' 'and c !='\\r'and c !='\\n'and\n (quotetabs or(not quotetabs and(c !='\\t'and c !=' '))))):\n linelen +=3\n if linelen >=MAXLINESIZE:\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=3\n odata.append('='+two_hex_digits(ord(c)))\n inp +=1\n else:\n if(istext and\n (c =='\\n'or(inp+1 0 and\n (odata[-1]==' 'or odata[-1]=='\\t')):\n ch=ord(odata[-1])\n odata[-1]='='\n odata.append(two_hex_digits(ch))\n \n if crlf:odata.append('\\r')\n odata.append('\\n')\n if c =='\\r':\n inp +=2\n else:\n inp +=1\n else:\n if(inp+1 =MAXLINESIZE):\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=0\n \n linelen +=1\n if header and c ==' ':\n c='_'\n odata.append(c)\n inp +=1\n return ''.join(odata)\n \nhex_numbers='0123456789ABCDEF'\ndef hex(n):\n if n ==0:\n return '0'\n \n if n <0:\n n=-n\n sign='-'\n else:\n sign=''\n arr=[]\n \n def hex_gen(n):\n ''\n while n:\n yield n %0x10\n n=n /0x10\n \n for nibble in hex_gen(n):\n arr=[hex_numbers[nibble]]+arr\n return sign+''.join(arr)\n \ndef two_hex_digits(n):\n return hex_numbers[n /0x10]+hex_numbers[n %0x10]\n \n \ndef strhex_to_int(s):\n i=0\n for c in s:\n i=i *0x10+hex_numbers.index(c)\n return i\n \nhqx_encoding='!\"#$%&\\'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr'\n\nDONE=0x7f\nSKIP=0x7e\nFAIL=0x7d\n\ntable_a2b_hqx=[\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,SKIP,FAIL,FAIL,SKIP,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,0x00,0x01,0x02,0x03,0x04,0x05,0x06,\n\n0x07,0x08,0x09,0x0A,0x0B,0x0C,FAIL,FAIL,\n\n0x0D,0x0E,0x0F,0x10,0x11,0x12,0x13,FAIL,\n\n0x14,0x15,DONE,FAIL,FAIL,FAIL,FAIL,FAIL,\n\n0x16,0x17,0x18,0x19,0x1A,0x1B,0x1C,0x1D,\n\n0x1E,0x1F,0x20,0x21,0x22,0x23,0x24,FAIL,\n\n0x25,0x26,0x27,0x28,0x29,0x2A,0x2B,FAIL,\n\n0x2C,0x2D,0x2E,0x2F,FAIL,FAIL,FAIL,FAIL,\n\n0x30,0x31,0x32,0x33,0x34,0x35,0x36,FAIL,\n\n0x37,0x38,0x39,0x3A,0x3B,0x3C,FAIL,FAIL,\n\n0x3D,0x3E,0x3F,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n]\n\ndef a2b_hqx(s):\n result=[]\n \n def quadruples_gen(s):\n t=[]\n for c in s:\n res=table_a2b_hqx[ord(c)]\n if res ==SKIP:\n continue\n elif res ==FAIL:\n raise Error('Illegal character')\n elif res ==DONE:\n yield t\n raise Done\n else:\n t.append(res)\n if len(t)==4:\n yield t\n t=[]\n yield t\n \n done=0\n try:\n for snippet in quadruples_gen(s):\n length=len(snippet)\n if length ==4:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n result.append(chr(((snippet[2]&0x03)<<6)|(snippet[3])))\n elif length ==3:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n elif length ==2:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n except Done:\n done=1\n except Error:\n raise\n return(''.join(result),done)\n \n \n \ndef b2a_hqx(s):\n result=[]\n \n def triples_gen(s):\n while s:\n try:\n yield ord(s[0]),ord(s[1]),ord(s[2])\n except IndexError:\n yield tuple([ord(c)for c in s])\n s=s[3:]\n \n for snippet in triples_gen(s):\n length=len(snippet)\n if length ==3:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2 |((snippet[2]&0xc0)>>6)])\n result.append(hqx_encoding[snippet[2]&0x3f])\n elif length ==2:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2])\n elif length ==1:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)])\n return ''.join(result)\n \ncrctab_hqx=[\n0x0000,0x1021,0x2042,0x3063,0x4084,0x50a5,0x60c6,0x70e7,\n0x8108,0x9129,0xa14a,0xb16b,0xc18c,0xd1ad,0xe1ce,0xf1ef,\n0x1231,0x0210,0x3273,0x2252,0x52b5,0x4294,0x72f7,0x62d6,\n0x9339,0x8318,0xb37b,0xa35a,0xd3bd,0xc39c,0xf3ff,0xe3de,\n0x2462,0x3443,0x0420,0x1401,0x64e6,0x74c7,0x44a4,0x5485,\n0xa56a,0xb54b,0x8528,0x9509,0xe5ee,0xf5cf,0xc5ac,0xd58d,\n0x3653,0x2672,0x1611,0x0630,0x76d7,0x66f6,0x5695,0x46b4,\n0xb75b,0xa77a,0x9719,0x8738,0xf7df,0xe7fe,0xd79d,0xc7bc,\n0x48c4,0x58e5,0x6886,0x78a7,0x0840,0x1861,0x2802,0x3823,\n0xc9cc,0xd9ed,0xe98e,0xf9af,0x8948,0x9969,0xa90a,0xb92b,\n0x5af5,0x4ad4,0x7ab7,0x6a96,0x1a71,0x0a50,0x3a33,0x2a12,\n0xdbfd,0xcbdc,0xfbbf,0xeb9e,0x9b79,0x8b58,0xbb3b,0xab1a,\n0x6ca6,0x7c87,0x4ce4,0x5cc5,0x2c22,0x3c03,0x0c60,0x1c41,\n0xedae,0xfd8f,0xcdec,0xddcd,0xad2a,0xbd0b,0x8d68,0x9d49,\n0x7e97,0x6eb6,0x5ed5,0x4ef4,0x3e13,0x2e32,0x1e51,0x0e70,\n0xff9f,0xefbe,0xdfdd,0xcffc,0xbf1b,0xaf3a,0x9f59,0x8f78,\n0x9188,0x81a9,0xb1ca,0xa1eb,0xd10c,0xc12d,0xf14e,0xe16f,\n0x1080,0x00a1,0x30c2,0x20e3,0x5004,0x4025,0x7046,0x6067,\n0x83b9,0x9398,0xa3fb,0xb3da,0xc33d,0xd31c,0xe37f,0xf35e,\n0x02b1,0x1290,0x22f3,0x32d2,0x4235,0x5214,0x6277,0x7256,\n0xb5ea,0xa5cb,0x95a8,0x8589,0xf56e,0xe54f,0xd52c,0xc50d,\n0x34e2,0x24c3,0x14a0,0x0481,0x7466,0x6447,0x5424,0x4405,\n0xa7db,0xb7fa,0x8799,0x97b8,0xe75f,0xf77e,0xc71d,0xd73c,\n0x26d3,0x36f2,0x0691,0x16b0,0x6657,0x7676,0x4615,0x5634,\n0xd94c,0xc96d,0xf90e,0xe92f,0x99c8,0x89e9,0xb98a,0xa9ab,\n0x5844,0x4865,0x7806,0x6827,0x18c0,0x08e1,0x3882,0x28a3,\n0xcb7d,0xdb5c,0xeb3f,0xfb1e,0x8bf9,0x9bd8,0xabbb,0xbb9a,\n0x4a75,0x5a54,0x6a37,0x7a16,0x0af1,0x1ad0,0x2ab3,0x3a92,\n0xfd2e,0xed0f,0xdd6c,0xcd4d,0xbdaa,0xad8b,0x9de8,0x8dc9,\n0x7c26,0x6c07,0x5c64,0x4c45,0x3ca2,0x2c83,0x1ce0,0x0cc1,\n0xef1f,0xff3e,0xcf5d,0xdf7c,0xaf9b,0xbfba,0x8fd9,0x9ff8,\n0x6e17,0x7e36,0x4e55,0x5e74,0x2e93,0x3eb2,0x0ed1,0x1ef0,\n]\n\ndef crc_hqx(s,crc):\n for c in s:\n crc=((crc <<8)&0xff00)^crctab_hqx[((crc >>8)&0xff)^ord(c)]\n \n return crc\n \ndef rlecode_hqx(s):\n ''\n\n\n\n \n if not s:\n return ''\n result=[]\n prev=s[0]\n count=1\n \n \n \n \n if s[-1]=='!':\n s=s[1:]+'?'\n else:\n s=s[1:]+'!'\n \n for c in s:\n if c ==prev and count <255:\n count +=1\n else:\n if count ==1:\n if prev !='\\x90':\n result.append(prev)\n else:\n result.extend(['\\x90','\\x00'])\n elif count <4:\n if prev !='\\x90':\n result.extend([prev]*count)\n else:\n result.extend(['\\x90','\\x00']*count)\n else:\n if prev !='\\x90':\n result.extend([prev,'\\x90',chr(count)])\n else:\n result.extend(['\\x90','\\x00','\\x90',chr(count)])\n count=1\n prev=c\n \n return ''.join(result)\n \ndef rledecode_hqx(s):\n s=s.split('\\x90')\n result=[s[0]]\n prev=s[0]\n for snippet in s[1:]:\n count=ord(snippet[0])\n if count >0:\n result.append(prev[-1]*(count -1))\n prev=snippet\n else:\n result.append('\\x90')\n prev='\\x90'\n result.append(snippet[1:])\n \n return ''.join(result)\n \ncrc_32_tab=[\n0x00000000,0x77073096,0xee0e612c,0x990951ba,0x076dc419,\n0x706af48f,0xe963a535,0x9e6495a3,0x0edb8832,0x79dcb8a4,\n0xe0d5e91e,0x97d2d988,0x09b64c2b,0x7eb17cbd,0xe7b82d07,\n0x90bf1d91,0x1db71064,0x6ab020f2,0xf3b97148,0x84be41de,\n0x1adad47d,0x6ddde4eb,0xf4d4b551,0x83d385c7,0x136c9856,\n0x646ba8c0,0xfd62f97a,0x8a65c9ec,0x14015c4f,0x63066cd9,\n0xfa0f3d63,0x8d080df5,0x3b6e20c8,0x4c69105e,0xd56041e4,\n0xa2677172,0x3c03e4d1,0x4b04d447,0xd20d85fd,0xa50ab56b,\n0x35b5a8fa,0x42b2986c,0xdbbbc9d6,0xacbcf940,0x32d86ce3,\n0x45df5c75,0xdcd60dcf,0xabd13d59,0x26d930ac,0x51de003a,\n0xc8d75180,0xbfd06116,0x21b4f4b5,0x56b3c423,0xcfba9599,\n0xb8bda50f,0x2802b89e,0x5f058808,0xc60cd9b2,0xb10be924,\n0x2f6f7c87,0x58684c11,0xc1611dab,0xb6662d3d,0x76dc4190,\n0x01db7106,0x98d220bc,0xefd5102a,0x71b18589,0x06b6b51f,\n0x9fbfe4a5,0xe8b8d433,0x7807c9a2,0x0f00f934,0x9609a88e,\n0xe10e9818,0x7f6a0dbb,0x086d3d2d,0x91646c97,0xe6635c01,\n0x6b6b51f4,0x1c6c6162,0x856530d8,0xf262004e,0x6c0695ed,\n0x1b01a57b,0x8208f4c1,0xf50fc457,0x65b0d9c6,0x12b7e950,\n0x8bbeb8ea,0xfcb9887c,0x62dd1ddf,0x15da2d49,0x8cd37cf3,\n0xfbd44c65,0x4db26158,0x3ab551ce,0xa3bc0074,0xd4bb30e2,\n0x4adfa541,0x3dd895d7,0xa4d1c46d,0xd3d6f4fb,0x4369e96a,\n0x346ed9fc,0xad678846,0xda60b8d0,0x44042d73,0x33031de5,\n0xaa0a4c5f,0xdd0d7cc9,0x5005713c,0x270241aa,0xbe0b1010,\n0xc90c2086,0x5768b525,0x206f85b3,0xb966d409,0xce61e49f,\n0x5edef90e,0x29d9c998,0xb0d09822,0xc7d7a8b4,0x59b33d17,\n0x2eb40d81,0xb7bd5c3b,0xc0ba6cad,0xedb88320,0x9abfb3b6,\n0x03b6e20c,0x74b1d29a,0xead54739,0x9dd277af,0x04db2615,\n0x73dc1683,0xe3630b12,0x94643b84,0x0d6d6a3e,0x7a6a5aa8,\n0xe40ecf0b,0x9309ff9d,0x0a00ae27,0x7d079eb1,0xf00f9344,\n0x8708a3d2,0x1e01f268,0x6906c2fe,0xf762575d,0x806567cb,\n0x196c3671,0x6e6b06e7,0xfed41b76,0x89d32be0,0x10da7a5a,\n0x67dd4acc,0xf9b9df6f,0x8ebeeff9,0x17b7be43,0x60b08ed5,\n0xd6d6a3e8,0xa1d1937e,0x38d8c2c4,0x4fdff252,0xd1bb67f1,\n0xa6bc5767,0x3fb506dd,0x48b2364b,0xd80d2bda,0xaf0a1b4c,\n0x36034af6,0x41047a60,0xdf60efc3,0xa867df55,0x316e8eef,\n0x4669be79,0xcb61b38c,0xbc66831a,0x256fd2a0,0x5268e236,\n0xcc0c7795,0xbb0b4703,0x220216b9,0x5505262f,0xc5ba3bbe,\n0xb2bd0b28,0x2bb45a92,0x5cb36a04,0xc2d7ffa7,0xb5d0cf31,\n0x2cd99e8b,0x5bdeae1d,0x9b64c2b0,0xec63f226,0x756aa39c,\n0x026d930a,0x9c0906a9,0xeb0e363f,0x72076785,0x05005713,\n0x95bf4a82,0xe2b87a14,0x7bb12bae,0x0cb61b38,0x92d28e9b,\n0xe5d5be0d,0x7cdcefb7,0x0bdbdf21,0x86d3d2d4,0xf1d4e242,\n0x68ddb3f8,0x1fda836e,0x81be16cd,0xf6b9265b,0x6fb077e1,\n0x18b74777,0x88085ae6,0xff0f6a70,0x66063bca,0x11010b5c,\n0x8f659eff,0xf862ae69,0x616bffd3,0x166ccf45,0xa00ae278,\n0xd70dd2ee,0x4e048354,0x3903b3c2,0xa7672661,0xd06016f7,\n0x4969474d,0x3e6e77db,0xaed16a4a,0xd9d65adc,0x40df0b66,\n0x37d83bf0,0xa9bcae53,0xdebb9ec5,0x47b2cf7f,0x30b5ffe9,\n0xbdbdf21c,0xcabac28a,0x53b39330,0x24b4a3a6,0xbad03605,\n0xcdd70693,0x54de5729,0x23d967bf,0xb3667a2e,0xc4614ab8,\n0x5d681b02,0x2a6f2b94,0xb40bbe37,0xc30c8ea1,0x5a05df1b,\n0x2d02ef8d\n]\n\ndef crc32(s,crc=0):\n result=0\n crc=~int(crc)&0xffffffff\n \n for c in s:\n crc=crc_32_tab[(crc ^int(ord(c)))&0xff]^(crc >>8)\n \n \n \n result=crc ^0xffffffff\n \n if result >2 **31:\n result=((result+2 **31)%2 **32)-2 **31\n \n return result\n", ["_base64", "_binascii"]], "gzip": [".py", "''\n\n\n\n\n\n\nimport struct,sys,time,os\nimport zlib\nimport builtins\nimport io\nimport _compression\n\n__all__=[\"BadGzipFile\",\"GzipFile\",\"open\",\"compress\",\"decompress\"]\n\nFTEXT,FHCRC,FEXTRA,FNAME,FCOMMENT=1,2,4,8,16\n\nREAD,WRITE=1,2\n\n_COMPRESS_LEVEL_FAST=1\n_COMPRESS_LEVEL_TRADEOFF=6\n_COMPRESS_LEVEL_BEST=9\n\n\ndef open(filename,mode=\"rb\",compresslevel=_COMPRESS_LEVEL_BEST,\nencoding=None,errors=None,newline=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if \"t\"in mode:\n if \"b\"in mode:\n raise ValueError(\"Invalid mode: %r\"%(mode,))\n else:\n if encoding is not None:\n raise ValueError(\"Argument 'encoding' not supported in binary mode\")\n if errors is not None:\n raise ValueError(\"Argument 'errors' not supported in binary mode\")\n if newline is not None:\n raise ValueError(\"Argument 'newline' not supported in binary mode\")\n \n gz_mode=mode.replace(\"t\",\"\")\n if isinstance(filename,(str,bytes,os.PathLike)):\n binary_file=GzipFile(filename,gz_mode,compresslevel)\n elif hasattr(filename,\"read\")or hasattr(filename,\"write\"):\n binary_file=GzipFile(None,gz_mode,compresslevel,filename)\n else:\n raise TypeError(\"filename must be a str or bytes object, or a file\")\n \n if \"t\"in mode:\n encoding=io.text_encoding(encoding)\n return io.TextIOWrapper(binary_file,encoding,errors,newline)\n else:\n return binary_file\n \ndef write32u(output,value):\n\n\n output.write(struct.pack(\"'\n \n def _init_write(self,filename):\n self.name=filename\n self.crc=zlib.crc32(b\"\")\n self.size=0\n self.writebuf=[]\n self.bufsize=0\n self.offset=0\n \n def _write_gzip_header(self,compresslevel):\n self.fileobj.write(b'\\037\\213')\n self.fileobj.write(b'\\010')\n try:\n \n \n fname=os.path.basename(self.name)\n if not isinstance(fname,bytes):\n fname=fname.encode('latin-1')\n if fname.endswith(b'.gz'):\n fname=fname[:-3]\n except UnicodeEncodeError:\n fname=b''\n flags=0\n if fname:\n flags=FNAME\n self.fileobj.write(chr(flags).encode('latin-1'))\n mtime=self._write_mtime\n if mtime is None:\n mtime=time.time()\n write32u(self.fileobj,int(mtime))\n if compresslevel ==_COMPRESS_LEVEL_BEST:\n xfl=b'\\002'\n elif compresslevel ==_COMPRESS_LEVEL_FAST:\n xfl=b'\\004'\n else:\n xfl=b'\\000'\n self.fileobj.write(xfl)\n self.fileobj.write(b'\\377')\n if fname:\n self.fileobj.write(fname+b'\\000')\n \n def write(self,data):\n self._check_not_closed()\n if self.mode !=WRITE:\n import errno\n raise OSError(errno.EBADF,\"write() on read-only GzipFile object\")\n \n if self.fileobj is None:\n raise ValueError(\"write() on closed GzipFile object\")\n \n if isinstance(data,(bytes,bytearray)):\n length=len(data)\n else:\n \n data=memoryview(data)\n length=data.nbytes\n \n if length >0:\n self.fileobj.write(self.compress.compress(data))\n self.size +=length\n self.crc=zlib.crc32(data,self.crc)\n self.offset +=length\n \n return length\n \n def read(self,size=-1):\n self._check_not_closed()\n if self.mode !=READ:\n import errno\n raise OSError(errno.EBADF,\"read() on write-only GzipFile object\")\n return self._buffer.read(size)\n \n def read1(self,size=-1):\n ''\n\n \n self._check_not_closed()\n if self.mode !=READ:\n import errno\n raise OSError(errno.EBADF,\"read1() on write-only GzipFile object\")\n \n if size <0:\n size=io.DEFAULT_BUFFER_SIZE\n return self._buffer.read1(size)\n \n def peek(self,n):\n self._check_not_closed()\n if self.mode !=READ:\n import errno\n raise OSError(errno.EBADF,\"peek() on write-only GzipFile object\")\n return self._buffer.peek(n)\n \n @property\n def closed(self):\n return self.fileobj is None\n \n def close(self):\n fileobj=self.fileobj\n if fileobj is None:\n return\n self.fileobj=None\n try:\n if self.mode ==WRITE:\n fileobj.write(self.compress.flush())\n write32u(fileobj,self.crc)\n \n write32u(fileobj,self.size&0xffffffff)\n elif self.mode ==READ:\n self._buffer.close()\n finally:\n myfileobj=self.myfileobj\n if myfileobj:\n self.myfileobj=None\n myfileobj.close()\n \n def flush(self,zlib_mode=zlib.Z_SYNC_FLUSH):\n self._check_not_closed()\n if self.mode ==WRITE:\n \n self.fileobj.write(self.compress.flush(zlib_mode))\n self.fileobj.flush()\n \n def fileno(self):\n ''\n\n\n\n \n return self.fileobj.fileno()\n \n def rewind(self):\n ''\n \n if self.mode !=READ:\n raise OSError(\"Can't rewind in write mode\")\n self._buffer.seek(0)\n \n def readable(self):\n return self.mode ==READ\n \n def writable(self):\n return self.mode ==WRITE\n \n def seekable(self):\n return True\n \n def seek(self,offset,whence=io.SEEK_SET):\n if self.mode ==WRITE:\n if whence !=io.SEEK_SET:\n if whence ==io.SEEK_CUR:\n offset=self.offset+offset\n else:\n raise ValueError('Seek from end not supported')\n if offset 2:\n raise AddressValueError(f\"Only one '/' permitted in {address !r}\")\n return addr\n \n \ndef _find_address_range(addresses):\n ''\n\n\n\n\n\n\n\n \n it=iter(addresses)\n first=last=next(it)\n for ip in it:\n if ip._ip !=last._ip+1:\n yield first,last\n first=ip\n last=ip\n yield first,last\n \n \ndef _count_righthand_zero_bits(number,bits):\n ''\n\n\n\n\n\n\n\n\n \n if number ==0:\n return bits\n return min(bits,(~number&(number -1)).bit_length())\n \n \ndef summarize_address_range(first,last):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if(not(isinstance(first,_BaseAddress)and\n isinstance(last,_BaseAddress))):\n raise TypeError('first and last must be IP addresses, not networks')\n if first.version !=last.version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n first,last))\n if first >last:\n raise ValueError('last IP address must be greater than first')\n \n if first.version ==4:\n ip=IPv4Network\n elif first.version ==6:\n ip=IPv6Network\n else:\n raise ValueError('unknown IP version')\n \n ip_bits=first._max_prefixlen\n first_int=first._ip\n last_int=last._ip\n while first_int <=last_int:\n nbits=min(_count_righthand_zero_bits(first_int,ip_bits),\n (last_int -first_int+1).bit_length()-1)\n net=ip((first_int,ip_bits -nbits))\n yield net\n first_int +=1 <=net.broadcast_address:\n continue\n yield net\n last=net\n \n \ndef collapse_addresses(addresses):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n addrs=[]\n ips=[]\n nets=[]\n \n \n for ip in addresses:\n if isinstance(ip,_BaseAddress):\n if ips and ips[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,ips[-1]))\n ips.append(ip)\n elif ip._prefixlen ==ip._max_prefixlen:\n if ips and ips[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,ips[-1]))\n try:\n ips.append(ip.ip)\n except AttributeError:\n ips.append(ip.network_address)\n else:\n if nets and nets[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,nets[-1]))\n nets.append(ip)\n \n \n ips=sorted(set(ips))\n \n \n if ips:\n for first,last in _find_address_range(ips):\n addrs.extend(summarize_address_range(first,last))\n \n return _collapse_addresses_internal(addrs+nets)\n \n \ndef get_mixed_type_key(obj):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(obj,_BaseNetwork):\n return obj._get_networks_key()\n elif isinstance(obj,_BaseAddress):\n return obj._get_address_key()\n return NotImplemented\n \n \nclass _IPAddressBase:\n\n ''\n \n __slots__=()\n \n @property\n def exploded(self):\n ''\n return self._explode_shorthand_ip_string()\n \n @property\n def compressed(self):\n ''\n return str(self)\n \n @property\n def reverse_pointer(self):\n ''\n\n\n\n\n\n \n return self._reverse_pointer()\n \n @property\n def version(self):\n msg='%200s has no version specified'%(type(self),)\n raise NotImplementedError(msg)\n \n def _check_int_address(self,address):\n if address <0:\n msg=\"%d (< 0) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,self._version))\n if address >self._ALL_ONES:\n msg=\"%d (>= 2**%d) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,self._max_prefixlen,\n self._version))\n \n def _check_packed_address(self,address,expected_len):\n address_len=len(address)\n if address_len !=expected_len:\n msg=\"%r (len %d != %d) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,address_len,\n expected_len,self._version))\n \n @classmethod\n def _ip_int_from_prefix(cls,prefixlen):\n ''\n\n\n\n\n\n\n\n \n return cls._ALL_ONES ^(cls._ALL_ONES >>prefixlen)\n \n @classmethod\n def _prefix_from_ip_int(cls,ip_int):\n ''\n\n\n\n\n\n\n\n\n\n \n trailing_zeroes=_count_righthand_zero_bits(ip_int,\n cls._max_prefixlen)\n prefixlen=cls._max_prefixlen -trailing_zeroes\n leading_ones=ip_int >>trailing_zeroes\n all_ones=(1 <1:\n return address\n return address[0],cls._max_prefixlen\n \n def __reduce__(self):\n return self.__class__,(str(self),)\n \n \n_address_fmt_re=None\n\n@functools.total_ordering\nclass _BaseAddress(_IPAddressBase):\n\n ''\n\n\n\n \n \n __slots__=()\n \n def __int__(self):\n return self._ip\n \n def __eq__(self,other):\n try:\n return(self._ip ==other._ip\n and self._version ==other._version)\n except AttributeError:\n return NotImplemented\n \n def __lt__(self,other):\n if not isinstance(other,_BaseAddress):\n return NotImplemented\n if self._version !=other._version:\n raise TypeError('%s and %s are not of the same version'%(\n self,other))\n if self._ip !=other._ip:\n return self._ip =0:\n if network+n >broadcast:\n raise IndexError('address out of range')\n return self._address_class(network+n)\n else:\n n +=1\n if broadcast+n other.network_address:\n return 1\n \n if self.netmask other.netmask:\n return 1\n return 0\n \n def _get_networks_key(self):\n ''\n\n\n\n\n\n \n return(self._version,self.network_address,self.netmask)\n \n def subnets(self,prefixlen_diff=1,new_prefix=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._prefixlen ==self._max_prefixlen:\n yield self\n return\n \n if new_prefix is not None:\n if new_prefix 0')\n new_prefixlen=self._prefixlen+prefixlen_diff\n \n if new_prefixlen >self._max_prefixlen:\n raise ValueError(\n 'prefix length diff %d is invalid for netblock %s'%(\n new_prefixlen,self))\n \n start=int(self.network_address)\n end=int(self.broadcast_address)+1\n step=(int(self.hostmask)+1)>>prefixlen_diff\n for new_addr in range(start,end,step):\n current=self.__class__((new_addr,new_prefixlen))\n yield current\n \n def supernet(self,prefixlen_diff=1,new_prefix=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._prefixlen ==0:\n return self\n \n if new_prefix is not None:\n if new_prefix >self._prefixlen:\n raise ValueError('new prefix must be shorter')\n if prefixlen_diff !=1:\n raise ValueError('cannot set prefixlen_diff and new_prefix')\n prefixlen_diff=self._prefixlen -new_prefix\n \n new_prefixlen=self.prefixlen -prefixlen_diff\n if new_prefixlen <0:\n raise ValueError(\n 'current prefixlen is %d, cannot have a prefixlen_diff of %d'%\n (self.prefixlen,prefixlen_diff))\n return self.__class__((\n int(self.network_address)&(int(self.netmask)<=a.broadcast_address)\n except AttributeError:\n raise TypeError(f\"Unable to test subnet containment \"\n f\"between {a} and {b}\")\n \n def subnet_of(self,other):\n ''\n return self._is_subnet_of(self,other)\n \n def supernet_of(self,other):\n ''\n return self._is_subnet_of(other,self)\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return(self.network_address.is_reserved and\n self.broadcast_address.is_reserved)\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return(self.network_address.is_link_local and\n self.broadcast_address.is_link_local)\n \n @property\n def is_private(self):\n ''\n\n\n\n\n\n \n return(self.network_address.is_private and\n self.broadcast_address.is_private)\n \n @property\n def is_global(self):\n ''\n\n\n\n\n\n \n return not self.is_private\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return(self.network_address.is_unspecified and\n self.broadcast_address.is_unspecified)\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n\n \n return(self.network_address.is_loopback and\n self.broadcast_address.is_loopback)\n \nclass _BaseV4:\n\n ''\n\n\n\n\n \n \n __slots__=()\n _version=4\n \n _ALL_ONES=(2 **IPV4LENGTH)-1\n \n _max_prefixlen=IPV4LENGTH\n \n \n _netmask_cache={}\n \n def _explode_shorthand_ip_string(self):\n return str(self)\n \n @classmethod\n def _make_netmask(cls,arg):\n ''\n\n\n\n\n\n \n if arg not in cls._netmask_cache:\n if isinstance(arg,int):\n prefixlen=arg\n if not(0 <=prefixlen <=cls._max_prefixlen):\n cls._report_invalid_netmask(prefixlen)\n else:\n try:\n \n prefixlen=cls._prefix_from_prefix_string(arg)\n except NetmaskValueError:\n \n \n prefixlen=cls._prefix_from_ip_string(arg)\n netmask=IPv4Address(cls._ip_int_from_prefix(prefixlen))\n cls._netmask_cache[arg]=netmask,prefixlen\n return cls._netmask_cache[arg]\n \n @classmethod\n def _ip_int_from_string(cls,ip_str):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not ip_str:\n raise AddressValueError('Address cannot be empty')\n \n octets=ip_str.split('.')\n if len(octets)!=4:\n raise AddressValueError(\"Expected 4 octets in %r\"%ip_str)\n \n try:\n return int.from_bytes(map(cls._parse_octet,octets),'big')\n except ValueError as exc:\n raise AddressValueError(\"%s in %r\"%(exc,ip_str))from None\n \n @classmethod\n def _parse_octet(cls,octet_str):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not octet_str:\n raise ValueError(\"Empty octet not permitted\")\n \n if not(octet_str.isascii()and octet_str.isdigit()):\n msg=\"Only decimal digits permitted in %r\"\n raise ValueError(msg %octet_str)\n \n \n if len(octet_str)>3:\n msg=\"At most 3 characters permitted in %r\"\n raise ValueError(msg %octet_str)\n \n \n if octet_str !='0'and octet_str[0]=='0':\n msg=\"Leading zeros are not permitted in %r\"\n raise ValueError(msg %octet_str)\n \n octet_int=int(octet_str,10)\n if octet_int >255:\n raise ValueError(\"Octet %d (> 255) not permitted\"%octet_int)\n return octet_int\n \n @classmethod\n def _string_from_ip_int(cls,ip_int):\n ''\n\n\n\n\n\n\n\n \n return '.'.join(map(str,ip_int.to_bytes(4,'big')))\n \n def _reverse_pointer(self):\n ''\n\n\n\n \n reverse_octets=str(self).split('.')[::-1]\n return '.'.join(reverse_octets)+'.in-addr.arpa'\n \n @property\n def max_prefixlen(self):\n return self._max_prefixlen\n \n @property\n def version(self):\n return self._version\n \n \nclass IPv4Address(_BaseV4,_BaseAddress):\n\n ''\n \n __slots__=('_ip','__weakref__')\n \n def __init__(self,address):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if isinstance(address,int):\n self._check_int_address(address)\n self._ip=address\n return\n \n \n if isinstance(address,bytes):\n self._check_packed_address(address,4)\n self._ip=int.from_bytes(address)\n return\n \n \n \n addr_str=str(address)\n if '/'in addr_str:\n raise AddressValueError(f\"Unexpected '/' in {address !r}\")\n self._ip=self._ip_int_from_string(addr_str)\n \n @property\n def packed(self):\n ''\n return v4_int_to_packed(self._ip)\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return self in self._constants._reserved_network\n \n @property\n @functools.lru_cache()\n def is_private(self):\n ''\n\n\n\n\n\n \n return any(self in net for net in self._constants._private_networks)\n \n @property\n @functools.lru_cache()\n def is_global(self):\n return self not in self._constants._public_network and not self.is_private\n \n @property\n def is_multicast(self):\n ''\n\n\n\n\n\n \n return self in self._constants._multicast_network\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return self ==self._constants._unspecified_address\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n \n return self in self._constants._loopback_network\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return self in self._constants._linklocal_network\n \n \nclass IPv4Interface(IPv4Address):\n\n def __init__(self,address):\n addr,mask=self._split_addr_prefix(address)\n \n IPv4Address.__init__(self,addr)\n self.network=IPv4Network((addr,mask),strict=False)\n self.netmask=self.network.netmask\n self._prefixlen=self.network._prefixlen\n \n @functools.cached_property\n def hostmask(self):\n return self.network.hostmask\n \n def __str__(self):\n return '%s/%d'%(self._string_from_ip_int(self._ip),\n self._prefixlen)\n \n def __eq__(self,other):\n address_equal=IPv4Address.__eq__(self,other)\n if address_equal is NotImplemented or not address_equal:\n return address_equal\n try:\n return self.network ==other.network\n except AttributeError:\n \n \n \n return False\n \n def __lt__(self,other):\n address_less=IPv4Address.__lt__(self,other)\n if address_less is NotImplemented:\n return NotImplemented\n try:\n return(self.network >16)&0xFFFF))\n parts.append('%x'%(ipv4_int&0xFFFF))\n \n \n \n \n _max_parts=cls._HEXTET_COUNT+1\n if len(parts)>_max_parts:\n msg=\"At most %d colons permitted in %r\"%(_max_parts -1,ip_str)\n raise AddressValueError(msg)\n \n \n \n skip_index=None\n for i in range(1,len(parts)-1):\n if not parts[i]:\n if skip_index is not None:\n \n msg=\"At most one '::' permitted in %r\"%ip_str\n raise AddressValueError(msg)\n skip_index=i\n \n \n \n if skip_index is not None:\n \n parts_hi=skip_index\n parts_lo=len(parts)-skip_index -1\n if not parts[0]:\n parts_hi -=1\n if parts_hi:\n msg=\"Leading ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n if not parts[-1]:\n parts_lo -=1\n if parts_lo:\n msg=\"Trailing ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n parts_skipped=cls._HEXTET_COUNT -(parts_hi+parts_lo)\n if parts_skipped <1:\n msg=\"Expected at most %d other parts with '::' in %r\"\n raise AddressValueError(msg %(cls._HEXTET_COUNT -1,ip_str))\n else:\n \n \n \n if len(parts)!=cls._HEXTET_COUNT:\n msg=\"Exactly %d parts expected without '::' in %r\"\n raise AddressValueError(msg %(cls._HEXTET_COUNT,ip_str))\n if not parts[0]:\n msg=\"Leading ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n if not parts[-1]:\n msg=\"Trailing ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n parts_hi=len(parts)\n parts_lo=0\n parts_skipped=0\n \n try:\n \n ip_int=0\n for i in range(parts_hi):\n ip_int <<=16\n ip_int |=cls._parse_hextet(parts[i])\n ip_int <<=16 *parts_skipped\n for i in range(-parts_lo,0):\n ip_int <<=16\n ip_int |=cls._parse_hextet(parts[i])\n return ip_int\n except ValueError as exc:\n raise AddressValueError(\"%s in %r\"%(exc,ip_str))from None\n \n @classmethod\n def _parse_hextet(cls,hextet_str):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not cls._HEX_DIGITS.issuperset(hextet_str):\n raise ValueError(\"Only hex digits permitted in %r\"%hextet_str)\n \n \n if len(hextet_str)>4:\n msg=\"At most 4 characters permitted in %r\"\n raise ValueError(msg %hextet_str)\n \n return int(hextet_str,16)\n \n @classmethod\n def _compress_hextets(cls,hextets):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n best_doublecolon_start=-1\n best_doublecolon_len=0\n doublecolon_start=-1\n doublecolon_len=0\n for index,hextet in enumerate(hextets):\n if hextet =='0':\n doublecolon_len +=1\n if doublecolon_start ==-1:\n \n doublecolon_start=index\n if doublecolon_len >best_doublecolon_len:\n \n best_doublecolon_len=doublecolon_len\n best_doublecolon_start=doublecolon_start\n else:\n doublecolon_len=0\n doublecolon_start=-1\n \n if best_doublecolon_len >1:\n best_doublecolon_end=(best_doublecolon_start+\n best_doublecolon_len)\n \n if best_doublecolon_end ==len(hextets):\n hextets +=['']\n hextets[best_doublecolon_start:best_doublecolon_end]=['']\n \n if best_doublecolon_start ==0:\n hextets=['']+hextets\n \n return hextets\n \n @classmethod\n def _string_from_ip_int(cls,ip_int=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if ip_int is None:\n ip_int=int(cls._ip)\n \n if ip_int >cls._ALL_ONES:\n raise ValueError('IPv6 address is too large')\n \n hex_str='%032x'%ip_int\n hextets=['%x'%int(hex_str[x:x+4],16)for x in range(0,32,4)]\n \n hextets=cls._compress_hextets(hextets)\n return ':'.join(hextets)\n \n def _explode_shorthand_ip_string(self):\n ''\n\n\n\n\n\n\n\n \n if isinstance(self,IPv6Network):\n ip_str=str(self.network_address)\n elif isinstance(self,IPv6Interface):\n ip_str=str(self.ip)\n else:\n ip_str=str(self)\n \n ip_int=self._ip_int_from_string(ip_str)\n hex_str='%032x'%ip_int\n parts=[hex_str[x:x+4]for x in range(0,32,4)]\n if isinstance(self,(_BaseNetwork,IPv6Interface)):\n return '%s/%d'%(':'.join(parts),self._prefixlen)\n return ':'.join(parts)\n \n def _reverse_pointer(self):\n ''\n\n\n\n \n reverse_chars=self.exploded[::-1].replace(':','')\n return '.'.join(reverse_chars)+'.ip6.arpa'\n \n @staticmethod\n def _split_scope_id(ip_str):\n ''\n\n\n\n\n\n\n\n\n\n \n addr,sep,scope_id=ip_str.partition('%')\n if not sep:\n scope_id=None\n elif not scope_id or '%'in scope_id:\n raise AddressValueError('Invalid IPv6 address: \"%r\"'%ip_str)\n return addr,scope_id\n \n @property\n def max_prefixlen(self):\n return self._max_prefixlen\n \n @property\n def version(self):\n return self._version\n \n \nclass IPv6Address(_BaseV6,_BaseAddress):\n\n ''\n \n __slots__=('_ip','_scope_id','__weakref__')\n \n def __init__(self,address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if isinstance(address,int):\n self._check_int_address(address)\n self._ip=address\n self._scope_id=None\n return\n \n \n if isinstance(address,bytes):\n self._check_packed_address(address,16)\n self._ip=int.from_bytes(address,'big')\n self._scope_id=None\n return\n \n \n \n addr_str=str(address)\n if '/'in addr_str:\n raise AddressValueError(f\"Unexpected '/' in {address !r}\")\n addr_str,self._scope_id=self._split_scope_id(addr_str)\n \n self._ip=self._ip_int_from_string(addr_str)\n \n def __str__(self):\n ip_str=super().__str__()\n return ip_str+'%'+self._scope_id if self._scope_id else ip_str\n \n def __hash__(self):\n return hash((self._ip,self._scope_id))\n \n def __eq__(self,other):\n address_equal=super().__eq__(other)\n if address_equal is NotImplemented:\n return NotImplemented\n if not address_equal:\n return False\n return self._scope_id ==getattr(other,'_scope_id',None)\n \n @property\n def scope_id(self):\n ''\n\n\n\n\n\n\n \n return self._scope_id\n \n @property\n def packed(self):\n ''\n return v6_int_to_packed(self._ip)\n \n @property\n def is_multicast(self):\n ''\n\n\n\n\n\n \n return self in self._constants._multicast_network\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return any(self in x for x in self._constants._reserved_networks)\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return self in self._constants._linklocal_network\n \n @property\n def is_site_local(self):\n ''\n\n\n\n\n\n\n\n\n \n return self in self._constants._sitelocal_network\n \n @property\n @functools.lru_cache()\n def is_private(self):\n ''\n\n\n\n\n\n\n \n ipv4_mapped=self.ipv4_mapped\n if ipv4_mapped is not None:\n return ipv4_mapped.is_private\n return any(self in net for net in self._constants._private_networks)\n \n @property\n def is_global(self):\n ''\n\n\n\n\n\n \n return not self.is_private\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return self._ip ==0\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n\n \n return self._ip ==1\n \n @property\n def ipv4_mapped(self):\n ''\n\n\n\n\n\n \n if(self._ip >>32)!=0xFFFF:\n return None\n return IPv4Address(self._ip&0xFFFFFFFF)\n \n @property\n def teredo(self):\n ''\n\n\n\n\n\n\n \n if(self._ip >>96)!=0x20010000:\n return None\n return(IPv4Address((self._ip >>64)&0xFFFFFFFF),\n IPv4Address(~self._ip&0xFFFFFFFF))\n \n @property\n def sixtofour(self):\n ''\n\n\n\n\n\n \n if(self._ip >>112)!=0x2002:\n return None\n return IPv4Address((self._ip >>80)&0xFFFFFFFF)\n \n \nclass IPv6Interface(IPv6Address):\n\n def __init__(self,address):\n addr,mask=self._split_addr_prefix(address)\n \n IPv6Address.__init__(self,addr)\n self.network=IPv6Network((addr,mask),strict=False)\n self.netmask=self.network.netmask\n self._prefixlen=self.network._prefixlen\n \n @functools.cached_property\n def hostmask(self):\n return self.network.hostmask\n \n def __str__(self):\n return '%s/%d'%(super().__str__(),\n self._prefixlen)\n \n def __eq__(self,other):\n address_equal=IPv6Address.__eq__(self,other)\n if address_equal is NotImplemented or not address_equal:\n return address_equal\n try:\n return self.network ==other.network\n except AttributeError:\n \n \n \n return False\n \n def __lt__(self,other):\n address_less=IPv6Address.__lt__(self,other)\n if address_less is NotImplemented:\n return address_less\n try:\n return(self.network =len(fmt):\n \n \n \n continue\n field_name=fmt[i]\n is_metadata=field_name.startswith(':')\n if i >=n_defaults and not is_metadata:\n \n \n h=field_name+\": \"\n if token and token[:len(h)].lower()!=h:\n raise NNTPDataError(\"OVER/XOVER response doesn't include \"\n \"names of additional headers\")\n token=token[len(h):]if token else None\n fields[fmt[i]]=token\n overview.append((article_number,fields))\n return overview\n \ndef _parse_datetime(date_str,time_str=None):\n ''\n\n\n \n if time_str is None:\n time_str=date_str[-6:]\n date_str=date_str[:-6]\n hours=int(time_str[:2])\n minutes=int(time_str[2:4])\n seconds=int(time_str[4:])\n year=int(date_str[:-4])\n month=int(date_str[-4:-2])\n day=int(date_str[-2:])\n \n \n if year <70:\n year +=2000\n elif year <100:\n year +=1900\n return datetime.datetime(year,month,day,hours,minutes,seconds)\n \ndef _unparse_datetime(dt,legacy=False):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not isinstance(dt,datetime.datetime):\n time_str=\"000000\"\n else:\n time_str=\"{0.hour:02d}{0.minute:02d}{0.second:02d}\".format(dt)\n y=dt.year\n if legacy:\n y=y %100\n date_str=\"{0:02d}{1.month:02d}{1.day:02d}\".format(y,dt)\n else:\n date_str=\"{0:04d}{1.month:02d}{1.day:02d}\".format(y,dt)\n return date_str,time_str\n \n \nif _have_ssl:\n\n def _encrypt_on(sock,context,hostname):\n ''\n\n\n\n\n \n \n if context is None:\n context=ssl._create_stdlib_context()\n return context.wrap_socket(sock,server_hostname=hostname)\n \n \n \nclass NNTP:\n\n\n\n\n\n\n\n\n\n\n\n\n encoding='utf-8'\n errors='surrogateescape'\n \n def __init__(self,host,port=NNTP_PORT,user=None,password=None,\n readermode=None,usenetrc=False,\n timeout=_GLOBAL_DEFAULT_TIMEOUT):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.host=host\n self.port=port\n self.sock=self._create_socket(timeout)\n self.file=None\n try:\n self.file=self.sock.makefile(\"rwb\")\n self._base_init(readermode)\n if user or usenetrc:\n self.login(user,password,usenetrc)\n except:\n if self.file:\n self.file.close()\n self.sock.close()\n raise\n \n def _base_init(self,readermode):\n ''\n\n \n self.debugging=0\n self.welcome=self._getresp()\n \n \n self._caps=None\n self.getcapabilities()\n \n \n \n \n \n \n \n \n self.readermode_afterauth=False\n if readermode and 'READER'not in self._caps:\n self._setreadermode()\n if not self.readermode_afterauth:\n \n self._caps=None\n self.getcapabilities()\n \n \n \n \n self.tls_on=False\n \n \n self.authenticated=False\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n is_connected=lambda:hasattr(self,\"file\")\n if is_connected():\n try:\n self.quit()\n except(OSError,EOFError):\n pass\n finally:\n if is_connected():\n self._close()\n \n def _create_socket(self,timeout):\n if timeout is not None and not timeout:\n raise ValueError('Non-blocking socket (timeout=0) is not supported')\n sys.audit(\"nntplib.connect\",self,self.host,self.port)\n return socket.create_connection((self.host,self.port),timeout)\n \n def getwelcome(self):\n ''\n\n\n \n \n if self.debugging:print('*welcome*',repr(self.welcome))\n return self.welcome\n \n def getcapabilities(self):\n ''\n\n \n if self._caps is None:\n self.nntp_version=1\n self.nntp_implementation=None\n try:\n resp,caps=self.capabilities()\n except(NNTPPermanentError,NNTPTemporaryError):\n \n self._caps={}\n else:\n self._caps=caps\n if 'VERSION'in caps:\n \n \n self.nntp_version=max(map(int,caps['VERSION']))\n if 'IMPLEMENTATION'in caps:\n self.nntp_implementation=' '.join(caps['IMPLEMENTATION'])\n return self._caps\n \n def set_debuglevel(self,level):\n ''\n\n\n \n \n self.debugging=level\n debug=set_debuglevel\n \n def _putline(self,line):\n ''\n \n sys.audit(\"nntplib.putline\",self,line)\n line=line+_CRLF\n if self.debugging >1:print('*put*',repr(line))\n self.file.write(line)\n self.file.flush()\n \n def _putcmd(self,line):\n ''\n \n if self.debugging:print('*cmd*',repr(line))\n line=line.encode(self.encoding,self.errors)\n self._putline(line)\n \n def _getline(self,strip_crlf=True):\n ''\n\n \n line=self.file.readline(_MAXLINE+1)\n if len(line)>_MAXLINE:\n raise NNTPDataError('line too long')\n if self.debugging >1:\n print('*get*',repr(line))\n if not line:raise EOFError\n if strip_crlf:\n if line[-2:]==_CRLF:\n line=line[:-2]\n elif line[-1:]in _CRLF:\n line=line[:-1]\n return line\n \n def _getresp(self):\n ''\n\n \n resp=self._getline()\n if self.debugging:print('*resp*',repr(resp))\n resp=resp.decode(self.encoding,self.errors)\n c=resp[:1]\n if c =='4':\n raise NNTPTemporaryError(resp)\n if c =='5':\n raise NNTPPermanentError(resp)\n if c not in '123':\n raise NNTPProtocolError(resp)\n return resp\n \n def _getlongresp(self,file=None):\n ''\n\n\n\n\n\n \n \n openedFile=None\n try:\n \n if isinstance(file,(str,bytes)):\n openedFile=file=open(file,\"wb\")\n \n resp=self._getresp()\n if resp[:3]not in _LONGRESP:\n raise NNTPReplyError(resp)\n \n lines=[]\n if file is not None:\n \n terminators=(b'.'+_CRLF,b'.\\n')\n while 1:\n line=self._getline(False)\n if line in terminators:\n break\n if line.startswith(b'..'):\n line=line[1:]\n file.write(line)\n else:\n terminator=b'.'\n while 1:\n line=self._getline()\n if line ==terminator:\n break\n if line.startswith(b'..'):\n line=line[1:]\n lines.append(line)\n finally:\n \n if openedFile:\n openedFile.close()\n \n return resp,lines\n \n def _shortcmd(self,line):\n ''\n \n self._putcmd(line)\n return self._getresp()\n \n def _longcmd(self,line,file=None):\n ''\n \n self._putcmd(line)\n return self._getlongresp(file)\n \n def _longcmdstring(self,line,file=None):\n ''\n\n\n \n self._putcmd(line)\n resp,list=self._getlongresp(file)\n return resp,[line.decode(self.encoding,self.errors)\n for line in list]\n \n def _getoverviewfmt(self):\n ''\n \n try:\n return self._cachedoverviewfmt\n except AttributeError:\n pass\n try:\n resp,lines=self._longcmdstring(\"LIST OVERVIEW.FMT\")\n except NNTPPermanentError:\n \n fmt=_DEFAULT_OVERVIEW_FMT[:]\n else:\n fmt=_parse_overview_fmt(lines)\n self._cachedoverviewfmt=fmt\n return fmt\n \n def _grouplist(self,lines):\n \n return[GroupInfo(*line.split())for line in lines]\n \n def capabilities(self):\n ''\n\n\n\n\n \n caps={}\n resp,lines=self._longcmdstring(\"CAPABILITIES\")\n for line in lines:\n name,*tokens=line.split()\n caps[name]=tokens\n return resp,caps\n \n def newgroups(self,date,*,file=None):\n ''\n\n\n\n\n \n if not isinstance(date,(datetime.date,datetime.date)):\n raise TypeError(\n \"the date parameter must be a date or datetime object, \"\n \"not '{:40}'\".format(date.__class__.__name__))\n date_str,time_str=_unparse_datetime(date,self.nntp_version <2)\n cmd='NEWGROUPS {0} {1}'.format(date_str,time_str)\n resp,lines=self._longcmdstring(cmd,file)\n return resp,self._grouplist(lines)\n \n def newnews(self,group,date,*,file=None):\n ''\n\n\n\n\n\n \n if not isinstance(date,(datetime.date,datetime.date)):\n raise TypeError(\n \"the date parameter must be a date or datetime object, \"\n \"not '{:40}'\".format(date.__class__.__name__))\n date_str,time_str=_unparse_datetime(date,self.nntp_version <2)\n cmd='NEWNEWS {0} {1} {2}'.format(group,date_str,time_str)\n return self._longcmdstring(cmd,file)\n \n def list(self,group_pattern=None,*,file=None):\n ''\n\n\n\n\n\n \n if group_pattern is not None:\n command='LIST ACTIVE '+group_pattern\n else:\n command='LIST'\n resp,lines=self._longcmdstring(command,file)\n return resp,self._grouplist(lines)\n \n def _getdescriptions(self,group_pattern,return_all):\n line_pat=re.compile('^(?P[^ \\t]+)[ \\t]+(.*)$')\n \n resp,lines=self._longcmdstring('LIST NEWSGROUPS '+group_pattern)\n if not resp.startswith('215'):\n \n \n \n resp,lines=self._longcmdstring('XGTITLE '+group_pattern)\n groups={}\n for raw_line in lines:\n match=line_pat.search(raw_line.strip())\n if match:\n name,desc=match.group(1,2)\n if not return_all:\n return desc\n groups[name]=desc\n if return_all:\n return resp,groups\n else:\n \n return ''\n \n def description(self,group):\n ''\n\n\n\n\n\n\n\n\n \n return self._getdescriptions(group,False)\n \n def descriptions(self,group_pattern):\n ''\n return self._getdescriptions(group_pattern,True)\n \n def group(self,name):\n ''\n\n\n\n\n\n\n\n \n resp=self._shortcmd('GROUP '+name)\n if not resp.startswith('211'):\n raise NNTPReplyError(resp)\n words=resp.split()\n count=first=last=0\n n=len(words)\n if n >1:\n count=words[1]\n if n >2:\n first=words[2]\n if n >3:\n last=words[3]\n if n >4:\n name=words[4].lower()\n return resp,int(count),int(first),int(last),name\n \n def help(self,*,file=None):\n ''\n\n\n\n\n\n \n return self._longcmdstring('HELP',file)\n \n def _statparse(self,resp):\n ''\n \n if not resp.startswith('22'):\n raise NNTPReplyError(resp)\n words=resp.split()\n art_num=int(words[1])\n message_id=words[2]\n return resp,art_num,message_id\n \n def _statcmd(self,line):\n ''\n resp=self._shortcmd(line)\n return self._statparse(resp)\n \n def stat(self,message_spec=None):\n ''\n\n\n\n\n\n\n \n if message_spec:\n return self._statcmd('STAT {0}'.format(message_spec))\n else:\n return self._statcmd('STAT')\n \n def next(self):\n ''\n return self._statcmd('NEXT')\n \n def last(self):\n ''\n return self._statcmd('LAST')\n \n def _artcmd(self,line,file=None):\n ''\n resp,lines=self._longcmd(line,file)\n resp,art_num,message_id=self._statparse(resp)\n return resp,ArticleInfo(art_num,message_id,lines)\n \n def head(self,message_spec=None,*,file=None):\n ''\n\n\n\n\n\n \n if message_spec is not None:\n cmd='HEAD {0}'.format(message_spec)\n else:\n cmd='HEAD'\n return self._artcmd(cmd,file)\n \n def body(self,message_spec=None,*,file=None):\n ''\n\n\n\n\n\n \n if message_spec is not None:\n cmd='BODY {0}'.format(message_spec)\n else:\n cmd='BODY'\n return self._artcmd(cmd,file)\n \n def article(self,message_spec=None,*,file=None):\n ''\n\n\n\n\n\n \n if message_spec is not None:\n cmd='ARTICLE {0}'.format(message_spec)\n else:\n cmd='ARTICLE'\n return self._artcmd(cmd,file)\n \n def slave(self):\n ''\n\n \n return self._shortcmd('SLAVE')\n \n def xhdr(self,hdr,str,*,file=None):\n ''\n\n\n\n\n\n\n \n pat=re.compile('^([0-9]+) ?(.*)\\n?')\n resp,lines=self._longcmdstring('XHDR {0} {1}'.format(hdr,str),file)\n def remove_number(line):\n m=pat.match(line)\n return m.group(1,2)if m else line\n return resp,[remove_number(line)for line in lines]\n \n def xover(self,start,end,*,file=None):\n ''\n\n\n\n\n\n\n \n resp,lines=self._longcmdstring('XOVER {0}-{1}'.format(start,end),\n file)\n fmt=self._getoverviewfmt()\n return resp,_parse_overview(lines,fmt)\n \n def over(self,message_spec,*,file=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n cmd='OVER'if 'OVER'in self._caps else 'XOVER'\n if isinstance(message_spec,(tuple,list)):\n start,end=message_spec\n cmd +=' {0}-{1}'.format(start,end or '')\n elif message_spec is not None:\n cmd=cmd+' '+message_spec\n resp,lines=self._longcmdstring(cmd,file)\n fmt=self._getoverviewfmt()\n return resp,_parse_overview(lines,fmt)\n \n def date(self):\n ''\n\n\n\n \n resp=self._shortcmd(\"DATE\")\n if not resp.startswith('111'):\n raise NNTPReplyError(resp)\n elem=resp.split()\n if len(elem)!=2:\n raise NNTPDataError(resp)\n date=elem[1]\n if len(date)!=14:\n raise NNTPDataError(resp)\n return resp,_parse_datetime(date,None)\n \n def _post(self,command,f):\n resp=self._shortcmd(command)\n \n if not resp.startswith('3'):\n raise NNTPReplyError(resp)\n if isinstance(f,(bytes,bytearray)):\n f=f.splitlines()\n \n \n \n \n for line in f:\n if not line.endswith(_CRLF):\n line=line.rstrip(b\"\\r\\n\")+_CRLF\n if line.startswith(b'.'):\n line=b'.'+line\n self.file.write(line)\n self.file.write(b\".\\r\\n\")\n self.file.flush()\n return self._getresp()\n \n def post(self,data):\n ''\n\n\n \n return self._post('POST',data)\n \n def ihave(self,message_id,data):\n ''\n\n\n\n\n \n return self._post('IHAVE {0}'.format(message_id),data)\n \n def _close(self):\n try:\n if self.file:\n self.file.close()\n del self.file\n finally:\n self.sock.close()\n \n def quit(self):\n ''\n \n try:\n resp=self._shortcmd('QUIT')\n finally:\n self._close()\n return resp\n \n def login(self,user=None,password=None,usenetrc=True):\n if self.authenticated:\n raise ValueError(\"Already logged in.\")\n if not user and not usenetrc:\n raise ValueError(\n \"At least one of `user` and `usenetrc` must be specified\")\n \n \n \n try:\n if usenetrc and not user:\n import netrc\n credentials=netrc.netrc()\n auth=credentials.authenticators(self.host)\n if auth:\n user=auth[0]\n password=auth[2]\n except OSError:\n pass\n \n if not user:\n return\n resp=self._shortcmd('authinfo user '+user)\n if resp.startswith('381'):\n if not password:\n raise NNTPReplyError(resp)\n else:\n resp=self._shortcmd('authinfo pass '+password)\n if not resp.startswith('281'):\n raise NNTPPermanentError(resp)\n \n self._caps=None\n self.getcapabilities()\n \n \n if self.readermode_afterauth and 'READER'not in self._caps:\n self._setreadermode()\n \n self._caps=None\n self.getcapabilities()\n \n def _setreadermode(self):\n try:\n self.welcome=self._shortcmd('mode reader')\n except NNTPPermanentError:\n \n pass\n except NNTPTemporaryError as e:\n if e.response.startswith('480'):\n \n self.readermode_afterauth=True\n else:\n raise\n \n if _have_ssl:\n def starttls(self,context=None):\n ''\n\n \n \n \n if self.tls_on:\n raise ValueError(\"TLS is already enabled.\")\n if self.authenticated:\n raise ValueError(\"TLS cannot be started after authentication.\")\n resp=self._shortcmd('STARTTLS')\n if resp.startswith('382'):\n self.file.close()\n self.sock=_encrypt_on(self.sock,context,self.host)\n self.file=self.sock.makefile(\"rwb\")\n self.tls_on=True\n \n \n self._caps=None\n self.getcapabilities()\n else:\n raise NNTPError(\"TLS failed to start.\")\n \n \nif _have_ssl:\n class NNTP_SSL(NNTP):\n \n def __init__(self,host,port=NNTP_SSL_PORT,\n user=None,password=None,ssl_context=None,\n readermode=None,usenetrc=False,\n timeout=_GLOBAL_DEFAULT_TIMEOUT):\n ''\n\n \n self.ssl_context=ssl_context\n super().__init__(host,port,user,password,readermode,\n usenetrc,timeout)\n \n def _create_socket(self,timeout):\n sock=super()._create_socket(timeout)\n try:\n sock=_encrypt_on(sock,self.ssl_context,self.host)\n except:\n sock.close()\n raise\n else:\n return sock\n \n __all__.append(\"NNTP_SSL\")\n \n \n \nif __name__ =='__main__':\n import argparse\n \n parser=argparse.ArgumentParser(description=\"\"\"\\\n nntplib built-in demo - display the latest articles in a newsgroup\"\"\")\n parser.add_argument('-g','--group',default='gmane.comp.python.general',\n help='group to fetch messages from (default: %(default)s)')\n parser.add_argument('-s','--server',default='news.gmane.io',\n help='NNTP server hostname (default: %(default)s)')\n parser.add_argument('-p','--port',default=-1,type=int,\n help='NNTP port number (default: %s / %s)'%(NNTP_PORT,NNTP_SSL_PORT))\n parser.add_argument('-n','--nb-articles',default=10,type=int,\n help='number of articles to fetch (default: %(default)s)')\n parser.add_argument('-S','--ssl',action='store_true',default=False,\n help='use NNTP over SSL')\n args=parser.parse_args()\n \n port=args.port\n if not args.ssl:\n if port ==-1:\n port=NNTP_PORT\n s=NNTP(host=args.server,port=port)\n else:\n if port ==-1:\n port=NNTP_SSL_PORT\n s=NNTP_SSL(host=args.server,port=port)\n \n caps=s.getcapabilities()\n if 'STARTTLS'in caps:\n s.starttls()\n resp,count,first,last,name=s.group(args.group)\n print('Group',name,'has',count,'articles, range',first,'to',last)\n \n def cut(s,lim):\n if len(s)>lim:\n s=s[:lim -4]+\"...\"\n return s\n \n first=str(int(last)-args.nb_articles+1)\n resp,overviews=s.xover(first,last)\n for artnum,over in overviews:\n author=decode_header(over['from']).split('<',1)[0]\n subject=decode_header(over['subject'])\n lines=int(over[':lines'])\n print(\"{:7} {:20} {:42} ({})\".format(\n artnum,cut(author,20),cut(subject,42),lines)\n )\n \n s.quit()\n", ["argparse", "collections", "datetime", "email.header", "netrc", "re", "socket", "ssl", "sys", "warnings"]], "_compat_pickle": [".py", "\n\n\n\n\n\n\nIMPORT_MAPPING={\n'__builtin__':'builtins',\n'copy_reg':'copyreg',\n'Queue':'queue',\n'SocketServer':'socketserver',\n'ConfigParser':'configparser',\n'repr':'reprlib',\n'tkFileDialog':'tkinter.filedialog',\n'tkSimpleDialog':'tkinter.simpledialog',\n'tkColorChooser':'tkinter.colorchooser',\n'tkCommonDialog':'tkinter.commondialog',\n'Dialog':'tkinter.dialog',\n'Tkdnd':'tkinter.dnd',\n'tkFont':'tkinter.font',\n'tkMessageBox':'tkinter.messagebox',\n'ScrolledText':'tkinter.scrolledtext',\n'Tkconstants':'tkinter.constants',\n'Tix':'tkinter.tix',\n'ttk':'tkinter.ttk',\n'Tkinter':'tkinter',\n'markupbase':'_markupbase',\n'_winreg':'winreg',\n'thread':'_thread',\n'dummy_thread':'_dummy_thread',\n'dbhash':'dbm.bsd',\n'dumbdbm':'dbm.dumb',\n'dbm':'dbm.ndbm',\n'gdbm':'dbm.gnu',\n'xmlrpclib':'xmlrpc.client',\n'SimpleXMLRPCServer':'xmlrpc.server',\n'httplib':'http.client',\n'htmlentitydefs':'html.entities',\n'HTMLParser':'html.parser',\n'Cookie':'http.cookies',\n'cookielib':'http.cookiejar',\n'BaseHTTPServer':'http.server',\n'test.test_support':'test.support',\n'commands':'subprocess',\n'urlparse':'urllib.parse',\n'robotparser':'urllib.robotparser',\n'urllib2':'urllib.request',\n'anydbm':'dbm',\n'_abcoll':'collections.abc',\n}\n\n\n\n\n\nNAME_MAPPING={\n('__builtin__','xrange'):('builtins','range'),\n('__builtin__','reduce'):('functools','reduce'),\n('__builtin__','intern'):('sys','intern'),\n('__builtin__','unichr'):('builtins','chr'),\n('__builtin__','unicode'):('builtins','str'),\n('__builtin__','long'):('builtins','int'),\n('itertools','izip'):('builtins','zip'),\n('itertools','imap'):('builtins','map'),\n('itertools','ifilter'):('builtins','filter'),\n('itertools','ifilterfalse'):('itertools','filterfalse'),\n('itertools','izip_longest'):('itertools','zip_longest'),\n('UserDict','IterableUserDict'):('collections','UserDict'),\n('UserList','UserList'):('collections','UserList'),\n('UserString','UserString'):('collections','UserString'),\n('whichdb','whichdb'):('dbm','whichdb'),\n('_socket','fromfd'):('socket','fromfd'),\n('_multiprocessing','Connection'):('multiprocessing.connection','Connection'),\n('multiprocessing.process','Process'):('multiprocessing.context','Process'),\n('multiprocessing.forking','Popen'):('multiprocessing.popen_fork','Popen'),\n('urllib','ContentTooShortError'):('urllib.error','ContentTooShortError'),\n('urllib','getproxies'):('urllib.request','getproxies'),\n('urllib','pathname2url'):('urllib.request','pathname2url'),\n('urllib','quote_plus'):('urllib.parse','quote_plus'),\n('urllib','quote'):('urllib.parse','quote'),\n('urllib','unquote_plus'):('urllib.parse','unquote_plus'),\n('urllib','unquote'):('urllib.parse','unquote'),\n('urllib','url2pathname'):('urllib.request','url2pathname'),\n('urllib','urlcleanup'):('urllib.request','urlcleanup'),\n('urllib','urlencode'):('urllib.parse','urlencode'),\n('urllib','urlopen'):('urllib.request','urlopen'),\n('urllib','urlretrieve'):('urllib.request','urlretrieve'),\n('urllib2','HTTPError'):('urllib.error','HTTPError'),\n('urllib2','URLError'):('urllib.error','URLError'),\n}\n\nPYTHON2_EXCEPTIONS=(\n\"ArithmeticError\",\n\"AssertionError\",\n\"AttributeError\",\n\"BaseException\",\n\"BufferError\",\n\"BytesWarning\",\n\"DeprecationWarning\",\n\"EOFError\",\n\"EnvironmentError\",\n\"Exception\",\n\"FloatingPointError\",\n\"FutureWarning\",\n\"GeneratorExit\",\n\"IOError\",\n\"ImportError\",\n\"ImportWarning\",\n\"IndentationError\",\n\"IndexError\",\n\"KeyError\",\n\"KeyboardInterrupt\",\n\"LookupError\",\n\"MemoryError\",\n\"NameError\",\n\"NotImplementedError\",\n\"OSError\",\n\"OverflowError\",\n\"PendingDeprecationWarning\",\n\"ReferenceError\",\n\"RuntimeError\",\n\"RuntimeWarning\",\n\n\"StopIteration\",\n\"SyntaxError\",\n\"SyntaxWarning\",\n\"SystemError\",\n\"SystemExit\",\n\"TabError\",\n\"TypeError\",\n\"UnboundLocalError\",\n\"UnicodeDecodeError\",\n\"UnicodeEncodeError\",\n\"UnicodeError\",\n\"UnicodeTranslateError\",\n\"UnicodeWarning\",\n\"UserWarning\",\n\"ValueError\",\n\"Warning\",\n\"ZeroDivisionError\",\n)\n\ntry:\n WindowsError\nexcept NameError:\n pass\nelse:\n PYTHON2_EXCEPTIONS +=(\"WindowsError\",)\n \nfor excname in PYTHON2_EXCEPTIONS:\n NAME_MAPPING[(\"exceptions\",excname)]=(\"builtins\",excname)\n \nMULTIPROCESSING_EXCEPTIONS=(\n'AuthenticationError',\n'BufferTooShort',\n'ProcessError',\n'TimeoutError',\n)\n\nfor excname in MULTIPROCESSING_EXCEPTIONS:\n NAME_MAPPING[(\"multiprocessing\",excname)]=(\"multiprocessing.context\",excname)\n \n \nREVERSE_IMPORT_MAPPING=dict((v,k)for(k,v)in IMPORT_MAPPING.items())\nassert len(REVERSE_IMPORT_MAPPING)==len(IMPORT_MAPPING)\nREVERSE_NAME_MAPPING=dict((v,k)for(k,v)in NAME_MAPPING.items())\nassert len(REVERSE_NAME_MAPPING)==len(NAME_MAPPING)\n\n\n\nIMPORT_MAPPING.update({\n'cPickle':'pickle',\n'_elementtree':'xml.etree.ElementTree',\n'FileDialog':'tkinter.filedialog',\n'SimpleDialog':'tkinter.simpledialog',\n'DocXMLRPCServer':'xmlrpc.server',\n'SimpleHTTPServer':'http.server',\n'CGIHTTPServer':'http.server',\n\n'UserDict':'collections',\n'UserList':'collections',\n'UserString':'collections',\n'whichdb':'dbm',\n'StringIO':'io',\n'cStringIO':'io',\n})\n\nREVERSE_IMPORT_MAPPING.update({\n'_bz2':'bz2',\n'_dbm':'dbm',\n'_functools':'functools',\n'_gdbm':'gdbm',\n'_pickle':'pickle',\n})\n\nNAME_MAPPING.update({\n('__builtin__','basestring'):('builtins','str'),\n('exceptions','StandardError'):('builtins','Exception'),\n('UserDict','UserDict'):('collections','UserDict'),\n('socket','_socketobject'):('socket','SocketType'),\n})\n\nREVERSE_NAME_MAPPING.update({\n('_functools','reduce'):('__builtin__','reduce'),\n('tkinter.filedialog','FileDialog'):('FileDialog','FileDialog'),\n('tkinter.filedialog','LoadFileDialog'):('FileDialog','LoadFileDialog'),\n('tkinter.filedialog','SaveFileDialog'):('FileDialog','SaveFileDialog'),\n('tkinter.simpledialog','SimpleDialog'):('SimpleDialog','SimpleDialog'),\n('xmlrpc.server','ServerHTMLDoc'):('DocXMLRPCServer','ServerHTMLDoc'),\n('xmlrpc.server','XMLRPCDocGenerator'):\n('DocXMLRPCServer','XMLRPCDocGenerator'),\n('xmlrpc.server','DocXMLRPCRequestHandler'):\n('DocXMLRPCServer','DocXMLRPCRequestHandler'),\n('xmlrpc.server','DocXMLRPCServer'):\n('DocXMLRPCServer','DocXMLRPCServer'),\n('xmlrpc.server','DocCGIXMLRPCRequestHandler'):\n('DocXMLRPCServer','DocCGIXMLRPCRequestHandler'),\n('http.server','SimpleHTTPRequestHandler'):\n('SimpleHTTPServer','SimpleHTTPRequestHandler'),\n('http.server','CGIHTTPRequestHandler'):\n('CGIHTTPServer','CGIHTTPRequestHandler'),\n('_socket','socket'):('socket','_socketobject'),\n})\n\nPYTHON3_OSERROR_EXCEPTIONS=(\n'BrokenPipeError',\n'ChildProcessError',\n'ConnectionAbortedError',\n'ConnectionError',\n'ConnectionRefusedError',\n'ConnectionResetError',\n'FileExistsError',\n'FileNotFoundError',\n'InterruptedError',\n'IsADirectoryError',\n'NotADirectoryError',\n'PermissionError',\n'ProcessLookupError',\n'TimeoutError',\n)\n\nfor excname in PYTHON3_OSERROR_EXCEPTIONS:\n REVERSE_NAME_MAPPING[('builtins',excname)]=('exceptions','OSError')\n \nPYTHON3_IMPORTERROR_EXCEPTIONS=(\n'ModuleNotFoundError',\n)\n\nfor excname in PYTHON3_IMPORTERROR_EXCEPTIONS:\n REVERSE_NAME_MAPPING[('builtins',excname)]=('exceptions','ImportError')\ndel excname\n", []], "formatter": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport warnings\nwarnings.warn('the formatter module is deprecated',DeprecationWarning,\nstacklevel=2)\n\n\nAS_IS=None\n\n\nclass NullFormatter:\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,writer=None):\n if writer is None:\n writer=NullWriter()\n self.writer=writer\n def end_paragraph(self,blankline):pass\n def add_line_break(self):pass\n def add_hor_rule(self,*args,**kw):pass\n def add_label_data(self,format,counter,blankline=None):pass\n def add_flowing_data(self,data):pass\n def add_literal_data(self,data):pass\n def flush_softspace(self):pass\n def push_alignment(self,align):pass\n def pop_alignment(self):pass\n def push_font(self,x):pass\n def pop_font(self):pass\n def push_margin(self,margin):pass\n def pop_margin(self):pass\n def set_spacing(self,spacing):pass\n def push_style(self,*styles):pass\n def pop_style(self,n=1):pass\n def assert_line_data(self,flag=1):pass\n \n \nclass AbstractFormatter:\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n def __init__(self,writer):\n self.writer=writer\n self.align=None\n self.align_stack=[]\n self.font_stack=[]\n self.margin_stack=[]\n self.spacing=None\n self.style_stack=[]\n self.nospace=1\n self.softspace=0\n self.para_end=1\n self.parskip=0\n self.hard_break=1\n self.have_label=0\n \n def end_paragraph(self,blankline):\n if not self.hard_break:\n self.writer.send_line_break()\n self.have_label=0\n if self.parskip 0:\n label=label+self.format_letter(c,counter)\n elif c in 'iI':\n if counter >0:\n label=label+self.format_roman(c,counter)\n else:\n label=label+c\n return label\n \n def format_letter(self,case,counter):\n label=''\n while counter >0:\n counter,x=divmod(counter -1,26)\n \n \n \n s=chr(ord(case)+x)\n label=s+label\n return label\n \n def format_roman(self,case,counter):\n ones=['i','x','c','m']\n fives=['v','l','d']\n label,index='',0\n \n while counter >0:\n counter,x=divmod(counter,10)\n if x ==9:\n label=ones[index]+ones[index+1]+label\n elif x ==4:\n label=ones[index]+fives[index]+label\n else:\n if x >=5:\n s=fives[index]\n x=x -5\n else:\n s=''\n s=s+ones[index]*x\n label=s+label\n index=index+1\n if case =='I':\n return label.upper()\n return label\n \n def add_flowing_data(self,data):\n if not data:return\n prespace=data[:1].isspace()\n postspace=data[-1:].isspace()\n data=\" \".join(data.split())\n if self.nospace and not data:\n return\n elif prespace or self.softspace:\n if not data:\n if not self.nospace:\n self.softspace=1\n self.parskip=0\n return\n if not self.nospace:\n data=' '+data\n self.hard_break=self.nospace=self.para_end=\\\n self.parskip=self.have_label=0\n self.softspace=postspace\n self.writer.send_flowing_data(data)\n \n def add_literal_data(self,data):\n if not data:return\n if self.softspace:\n self.writer.send_flowing_data(\" \")\n self.hard_break=data[-1:]=='\\n'\n self.nospace=self.para_end=self.softspace=\\\n self.parskip=self.have_label=0\n self.writer.send_literal_data(data)\n \n def flush_softspace(self):\n if self.softspace:\n self.hard_break=self.para_end=self.parskip=\\\n self.have_label=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n \n def push_alignment(self,align):\n if align and align !=self.align:\n self.writer.new_alignment(align)\n self.align=align\n self.align_stack.append(align)\n else:\n self.align_stack.append(self.align)\n \n def pop_alignment(self):\n if self.align_stack:\n del self.align_stack[-1]\n if self.align_stack:\n self.align=align=self.align_stack[-1]\n self.writer.new_alignment(align)\n else:\n self.align=None\n self.writer.new_alignment(None)\n \n def push_font(self,font):\n size,i,b,tt=font\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n if self.font_stack:\n csize,ci,cb,ctt=self.font_stack[-1]\n if size is AS_IS:size=csize\n if i is AS_IS:i=ci\n if b is AS_IS:b=cb\n if tt is AS_IS:tt=ctt\n font=(size,i,b,tt)\n self.font_stack.append(font)\n self.writer.new_font(font)\n \n def pop_font(self):\n if self.font_stack:\n del self.font_stack[-1]\n if self.font_stack:\n font=self.font_stack[-1]\n else:\n font=None\n self.writer.new_font(font)\n \n def push_margin(self,margin):\n self.margin_stack.append(margin)\n fstack=[m for m in self.margin_stack if m]\n if not margin and fstack:\n margin=fstack[-1]\n self.writer.new_margin(margin,len(fstack))\n \n def pop_margin(self):\n if self.margin_stack:\n del self.margin_stack[-1]\n fstack=[m for m in self.margin_stack if m]\n if fstack:\n margin=fstack[-1]\n else:\n margin=None\n self.writer.new_margin(margin,len(fstack))\n \n def set_spacing(self,spacing):\n self.spacing=spacing\n self.writer.new_spacing(spacing)\n \n def push_style(self,*styles):\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n for style in styles:\n self.style_stack.append(style)\n self.writer.new_styles(tuple(self.style_stack))\n \n def pop_style(self,n=1):\n del self.style_stack[-n:]\n self.writer.new_styles(tuple(self.style_stack))\n \n def assert_line_data(self,flag=1):\n self.nospace=self.hard_break=not flag\n self.para_end=self.parskip=self.have_label=0\n \n \nclass NullWriter:\n ''\n\n\n\n\n\n \n def __init__(self):pass\n def flush(self):pass\n def new_alignment(self,align):pass\n def new_font(self,font):pass\n def new_margin(self,margin,level):pass\n def new_spacing(self,spacing):pass\n def new_styles(self,styles):pass\n def send_paragraph(self,blankline):pass\n def send_line_break(self):pass\n def send_hor_rule(self,*args,**kw):pass\n def send_label_data(self,data):pass\n def send_flowing_data(self,data):pass\n def send_literal_data(self,data):pass\n \n \nclass AbstractWriter(NullWriter):\n ''\n\n\n\n\n \n \n def new_alignment(self,align):\n print(\"new_alignment(%r)\"%(align,))\n \n def new_font(self,font):\n print(\"new_font(%r)\"%(font,))\n \n def new_margin(self,margin,level):\n print(\"new_margin(%r, %d)\"%(margin,level))\n \n def new_spacing(self,spacing):\n print(\"new_spacing(%r)\"%(spacing,))\n \n def new_styles(self,styles):\n print(\"new_styles(%r)\"%(styles,))\n \n def send_paragraph(self,blankline):\n print(\"send_paragraph(%r)\"%(blankline,))\n \n def send_line_break(self):\n print(\"send_line_break()\")\n \n def send_hor_rule(self,*args,**kw):\n print(\"send_hor_rule()\")\n \n def send_label_data(self,data):\n print(\"send_label_data(%r)\"%(data,))\n \n def send_flowing_data(self,data):\n print(\"send_flowing_data(%r)\"%(data,))\n \n def send_literal_data(self,data):\n print(\"send_literal_data(%r)\"%(data,))\n \n \nclass DumbWriter(NullWriter):\n ''\n\n\n\n\n\n \n \n def __init__(self,file=None,maxcol=72):\n self.file=file or sys.stdout\n self.maxcol=maxcol\n NullWriter.__init__(self)\n self.reset()\n \n def reset(self):\n self.col=0\n self.atbreak=0\n \n def send_paragraph(self,blankline):\n self.file.write('\\n'*blankline)\n self.col=0\n self.atbreak=0\n \n def send_line_break(self):\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_hor_rule(self,*args,**kw):\n self.file.write('\\n')\n self.file.write('-'*self.maxcol)\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_literal_data(self,data):\n self.file.write(data)\n i=data.rfind('\\n')\n if i >=0:\n self.col=0\n data=data[i+1:]\n data=data.expandtabs()\n self.col=self.col+len(data)\n self.atbreak=0\n \n def send_flowing_data(self,data):\n if not data:return\n atbreak=self.atbreak or data[0].isspace()\n col=self.col\n maxcol=self.maxcol\n write=self.file.write\n for word in data.split():\n if atbreak:\n if col+len(word)>=maxcol:\n write('\\n')\n col=0\n else:\n write(' ')\n col=col+1\n write(word)\n col=col+len(word)\n atbreak=1\n self.col=col\n self.atbreak=data[-1].isspace()\n \n \ndef test(file=None):\n w=DumbWriter()\n f=AbstractFormatter(w)\n if file is not None:\n fp=open(file)\n elif sys.argv[1:]:\n fp=open(sys.argv[1])\n else:\n fp=sys.stdin\n try:\n for line in fp:\n if line =='\\n':\n f.end_paragraph(1)\n else:\n f.add_flowing_data(line)\n finally:\n if fp is not sys.stdin:\n fp.close()\n f.end_paragraph(0)\n \n \nif __name__ =='__main__':\n test()\n", ["sys", "warnings"]], "bdb": [".py", "''\n\nimport fnmatch\nimport sys\nimport os\nfrom inspect import CO_GENERATOR,CO_COROUTINE,CO_ASYNC_GENERATOR\n\n__all__=[\"BdbQuit\",\"Bdb\",\"Breakpoint\"]\n\nGENERATOR_AND_COROUTINE_FLAGS=CO_GENERATOR |CO_COROUTINE |CO_ASYNC_GENERATOR\n\n\nclass BdbQuit(Exception):\n ''\n \n \nclass Bdb:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,skip=None):\n self.skip=set(skip)if skip else None\n self.breaks={}\n self.fncache={}\n self.frame_returning=None\n \n self._load_breaks()\n \n def canonic(self,filename):\n ''\n\n\n\n\n\n \n if filename ==\"<\"+filename[1:-1]+\">\":\n return filename\n canonic=self.fncache.get(filename)\n if not canonic:\n canonic=os.path.abspath(filename)\n canonic=os.path.normcase(canonic)\n self.fncache[filename]=canonic\n return canonic\n \n def reset(self):\n ''\n import linecache\n linecache.checkcache()\n self.botframe=None\n self._set_stopinfo(None,None)\n \n def trace_dispatch(self,frame,event,arg):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.quitting:\n return\n if event =='line':\n return self.dispatch_line(frame)\n if event =='call':\n return self.dispatch_call(frame,arg)\n if event =='return':\n return self.dispatch_return(frame,arg)\n if event =='exception':\n return self.dispatch_exception(frame,arg)\n if event =='c_call':\n return self.trace_dispatch\n if event =='c_exception':\n return self.trace_dispatch\n if event =='c_return':\n return self.trace_dispatch\n print('bdb.Bdb.dispatch: unknown debugging event:',repr(event))\n return self.trace_dispatch\n \n def dispatch_line(self,frame):\n ''\n\n\n\n\n \n if self.stop_here(frame)or self.break_here(frame):\n self.user_line(frame)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_call(self,frame,arg):\n ''\n\n\n\n\n \n \n if self.botframe is None:\n \n self.botframe=frame.f_back\n return self.trace_dispatch\n if not(self.stop_here(frame)or self.break_anywhere(frame)):\n \n return\n \n if self.stopframe and frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n return self.trace_dispatch\n self.user_call(frame,arg)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_return(self,frame,arg):\n ''\n\n\n\n\n \n if self.stop_here(frame)or frame ==self.returnframe:\n \n if self.stopframe and frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n return self.trace_dispatch\n try:\n self.frame_returning=frame\n self.user_return(frame,arg)\n finally:\n self.frame_returning=None\n if self.quitting:raise BdbQuit\n \n if self.stopframe is frame and self.stoplineno !=-1:\n self._set_stopinfo(None,None)\n return self.trace_dispatch\n \n def dispatch_exception(self,frame,arg):\n ''\n\n\n\n\n \n if self.stop_here(frame):\n \n \n \n if not(frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS\n and arg[0]is StopIteration and arg[2]is None):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n \n \n \n elif(self.stopframe and frame is not self.stopframe\n and self.stopframe.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS\n and arg[0]in(StopIteration,GeneratorExit)):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n return self.trace_dispatch\n \n \n \n \n \n def is_skipped_module(self,module_name):\n ''\n if module_name is None:\n return False\n for pattern in self.skip:\n if fnmatch.fnmatch(module_name,pattern):\n return True\n return False\n \n def stop_here(self,frame):\n ''\n \n \n if self.skip and\\\n self.is_skipped_module(frame.f_globals.get('__name__')):\n return False\n if frame is self.stopframe:\n if self.stoplineno ==-1:\n return False\n return frame.f_lineno >=self.stoplineno\n if not self.stopframe:\n return True\n return False\n \n def break_here(self,frame):\n ''\n\n\n\n \n filename=self.canonic(frame.f_code.co_filename)\n if filename not in self.breaks:\n return False\n lineno=frame.f_lineno\n if lineno not in self.breaks[filename]:\n \n \n lineno=frame.f_code.co_firstlineno\n if lineno not in self.breaks[filename]:\n return False\n \n \n (bp,flag)=effective(filename,lineno,frame)\n if bp:\n self.currentbp=bp.number\n if(flag and bp.temporary):\n self.do_clear(str(bp.number))\n return True\n else:\n return False\n \n def do_clear(self,arg):\n ''\n\n\n \n raise NotImplementedError(\"subclass of bdb must implement do_clear()\")\n \n def break_anywhere(self,frame):\n ''\n \n return self.canonic(frame.f_code.co_filename)in self.breaks\n \n \n \n \n def user_call(self,frame,argument_list):\n ''\n pass\n \n def user_line(self,frame):\n ''\n pass\n \n def user_return(self,frame,return_value):\n ''\n pass\n \n def user_exception(self,frame,exc_info):\n ''\n pass\n \n def _set_stopinfo(self,stopframe,returnframe,stoplineno=0):\n ''\n\n\n\n\n \n self.stopframe=stopframe\n self.returnframe=returnframe\n self.quitting=False\n \n \n self.stoplineno=stoplineno\n \n \n \n \n def set_until(self,frame,lineno=None):\n ''\n \n \n if lineno is None:\n lineno=frame.f_lineno+1\n self._set_stopinfo(frame,frame,lineno)\n \n def set_step(self):\n ''\n \n \n \n \n if self.frame_returning:\n caller_frame=self.frame_returning.f_back\n if caller_frame and not caller_frame.f_trace:\n caller_frame.f_trace=self.trace_dispatch\n self._set_stopinfo(None,None)\n \n def set_next(self,frame):\n ''\n self._set_stopinfo(frame,None)\n \n def set_return(self,frame):\n ''\n if frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n self._set_stopinfo(frame,None,-1)\n else:\n self._set_stopinfo(frame.f_back,frame)\n \n def set_trace(self,frame=None):\n ''\n\n\n \n if frame is None:\n frame=sys._getframe().f_back\n self.reset()\n while frame:\n frame.f_trace=self.trace_dispatch\n self.botframe=frame\n frame=frame.f_back\n self.set_step()\n sys.settrace(self.trace_dispatch)\n \n def set_continue(self):\n ''\n\n\n \n \n self._set_stopinfo(self.botframe,None,-1)\n if not self.breaks:\n \n sys.settrace(None)\n frame=sys._getframe().f_back\n while frame and frame is not self.botframe:\n del frame.f_trace\n frame=frame.f_back\n \n def set_quit(self):\n ''\n\n\n \n self.stopframe=self.botframe\n self.returnframe=None\n self.quitting=True\n sys.settrace(None)\n \n \n \n \n \n \n \n \n def _add_to_breaks(self,filename,lineno):\n ''\n bp_linenos=self.breaks.setdefault(filename,[])\n if lineno not in bp_linenos:\n bp_linenos.append(lineno)\n \n def set_break(self,filename,lineno,temporary=False,cond=None,\n funcname=None):\n ''\n\n\n\n \n filename=self.canonic(filename)\n import linecache\n line=linecache.getline(filename,lineno)\n if not line:\n return 'Line %s:%d does not exist'%(filename,lineno)\n self._add_to_breaks(filename,lineno)\n bp=Breakpoint(filename,lineno,temporary,cond,funcname)\n return None\n \n def _load_breaks(self):\n ''\n\n\n\n\n\n \n for(filename,lineno)in Breakpoint.bplist.keys():\n self._add_to_breaks(filename,lineno)\n \n def _prune_breaks(self,filename,lineno):\n ''\n\n\n\n\n\n \n if(filename,lineno)not in Breakpoint.bplist:\n self.breaks[filename].remove(lineno)\n if not self.breaks[filename]:\n del self.breaks[filename]\n \n def clear_break(self,filename,lineno):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename not in self.breaks:\n return 'There are no breakpoints in %s'%filename\n if lineno not in self.breaks[filename]:\n return 'There is no breakpoint at %s:%d'%(filename,lineno)\n \n \n for bp in Breakpoint.bplist[filename,lineno][:]:\n bp.deleteMe()\n self._prune_breaks(filename,lineno)\n return None\n \n def clear_bpbynumber(self,arg):\n ''\n\n\n \n try:\n bp=self.get_bpbynumber(arg)\n except ValueError as err:\n return str(err)\n bp.deleteMe()\n self._prune_breaks(bp.file,bp.line)\n return None\n \n def clear_all_file_breaks(self,filename):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename not in self.breaks:\n return 'There are no breakpoints in %s'%filename\n for line in self.breaks[filename]:\n blist=Breakpoint.bplist[filename,line]\n for bp in blist:\n bp.deleteMe()\n del self.breaks[filename]\n return None\n \n def clear_all_breaks(self):\n ''\n\n\n \n if not self.breaks:\n return 'There are no breakpoints'\n for bp in Breakpoint.bpbynumber:\n if bp:\n bp.deleteMe()\n self.breaks={}\n return None\n \n def get_bpbynumber(self,arg):\n ''\n\n\n\n \n if not arg:\n raise ValueError('Breakpoint number expected')\n try:\n number=int(arg)\n except ValueError:\n raise ValueError('Non-numeric breakpoint number %s'%arg)from None\n try:\n bp=Breakpoint.bpbynumber[number]\n except IndexError:\n raise ValueError('Breakpoint number %d out of range'%number)from None\n if bp is None:\n raise ValueError('Breakpoint %d already deleted'%number)\n return bp\n \n def get_break(self,filename,lineno):\n ''\n filename=self.canonic(filename)\n return filename in self.breaks and\\\n lineno in self.breaks[filename]\n \n def get_breaks(self,filename,lineno):\n ''\n\n\n \n filename=self.canonic(filename)\n return filename in self.breaks and\\\n lineno in self.breaks[filename]and\\\n Breakpoint.bplist[filename,lineno]or[]\n \n def get_file_breaks(self,filename):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename in self.breaks:\n return self.breaks[filename]\n else:\n return[]\n \n def get_all_breaks(self):\n ''\n return self.breaks\n \n \n \n \n def get_stack(self,f,t):\n ''\n\n\n\n \n stack=[]\n if t and t.tb_frame is f:\n t=t.tb_next\n while f is not None:\n stack.append((f,f.f_lineno))\n if f is self.botframe:\n break\n f=f.f_back\n stack.reverse()\n i=max(0,len(stack)-1)\n while t is not None:\n stack.append((t.tb_frame,t.tb_lineno))\n t=t.tb_next\n if f is None:\n i=max(0,len(stack)-1)\n return stack,i\n \n def format_stack_entry(self,frame_lineno,lprefix=': '):\n ''\n\n\n\n\n\n\n \n import linecache,reprlib\n frame,lineno=frame_lineno\n filename=self.canonic(frame.f_code.co_filename)\n s='%s(%r)'%(filename,lineno)\n if frame.f_code.co_name:\n s +=frame.f_code.co_name\n else:\n s +=\"\"\n s +='()'\n if '__return__'in frame.f_locals:\n rv=frame.f_locals['__return__']\n s +='->'\n s +=reprlib.repr(rv)\n if lineno is not None:\n line=linecache.getline(filename,lineno,frame.f_globals)\n if line:\n s +=lprefix+line.strip()\n else:\n s +=f'{lprefix}Warning: lineno is None'\n return s\n \n \n \n \n \n def run(self,cmd,globals=None,locals=None):\n ''\n\n\n \n if globals is None:\n import __main__\n globals=__main__.__dict__\n if locals is None:\n locals=globals\n self.reset()\n if isinstance(cmd,str):\n cmd=compile(cmd,\"\",\"exec\")\n sys.settrace(self.trace_dispatch)\n try:\n exec(cmd,globals,locals)\n except BdbQuit:\n pass\n finally:\n self.quitting=True\n sys.settrace(None)\n \n def runeval(self,expr,globals=None,locals=None):\n ''\n\n\n \n if globals is None:\n import __main__\n globals=__main__.__dict__\n if locals is None:\n locals=globals\n self.reset()\n sys.settrace(self.trace_dispatch)\n try:\n return eval(expr,globals,locals)\n except BdbQuit:\n pass\n finally:\n self.quitting=True\n sys.settrace(None)\n \n def runctx(self,cmd,globals,locals):\n ''\n \n self.run(cmd,globals,locals)\n \n \n \n def runcall(self,func,/,*args,**kwds):\n ''\n\n\n \n self.reset()\n sys.settrace(self.trace_dispatch)\n res=None\n try:\n res=func(*args,**kwds)\n except BdbQuit:\n pass\n finally:\n self.quitting=True\n sys.settrace(None)\n return res\n \n \ndef set_trace():\n ''\n Bdb().set_trace()\n \n \nclass Breakpoint:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n next=1\n bplist={}\n bpbynumber=[None]\n \n \n \n def __init__(self,file,line,temporary=False,cond=None,funcname=None):\n self.funcname=funcname\n \n self.func_first_executable_line=None\n self.file=file\n self.line=line\n self.temporary=temporary\n self.cond=cond\n self.enabled=True\n self.ignore=0\n self.hits=0\n self.number=Breakpoint.next\n Breakpoint.next +=1\n \n self.bpbynumber.append(self)\n if(file,line)in self.bplist:\n self.bplist[file,line].append(self)\n else:\n self.bplist[file,line]=[self]\n \n @staticmethod\n def clearBreakpoints():\n Breakpoint.next=1\n Breakpoint.bplist={}\n Breakpoint.bpbynumber=[None]\n \n def deleteMe(self):\n ''\n\n\n\n \n \n index=(self.file,self.line)\n self.bpbynumber[self.number]=None\n self.bplist[index].remove(self)\n if not self.bplist[index]:\n \n del self.bplist[index]\n \n def enable(self):\n ''\n self.enabled=True\n \n def disable(self):\n ''\n self.enabled=False\n \n def bpprint(self,out=None):\n ''\n\n\n\n \n if out is None:\n out=sys.stdout\n print(self.bpformat(),file=out)\n \n def bpformat(self):\n ''\n\n\n\n\n\n \n if self.temporary:\n disp='del '\n else:\n disp='keep '\n if self.enabled:\n disp=disp+'yes '\n else:\n disp=disp+'no '\n ret='%-4dbreakpoint %s at %s:%d'%(self.number,disp,\n self.file,self.line)\n if self.cond:\n ret +='\\n\\tstop only if %s'%(self.cond,)\n if self.ignore:\n ret +='\\n\\tignore next %d hits'%(self.ignore,)\n if self.hits:\n if self.hits >1:\n ss='s'\n else:\n ss=''\n ret +='\\n\\tbreakpoint already hit %d time%s'%(self.hits,ss)\n return ret\n \n def __str__(self):\n ''\n return 'breakpoint %s at %s:%s'%(self.number,self.file,self.line)\n \n \n \n \ndef checkfuncname(b,frame):\n ''\n\n\n\n\n\n \n if not b.funcname:\n \n if b.line !=frame.f_lineno:\n \n \n return False\n return True\n \n \n if frame.f_code.co_name !=b.funcname:\n \n return False\n \n \n if not b.func_first_executable_line:\n \n b.func_first_executable_line=frame.f_lineno\n \n if b.func_first_executable_line !=frame.f_lineno:\n \n return False\n return True\n \n \ndef effective(file,line,frame):\n ''\n\n\n\n\n\n\n\n\n\n\n \n possibles=Breakpoint.bplist[file,line]\n for b in possibles:\n if not b.enabled:\n continue\n if not checkfuncname(b,frame):\n continue\n \n b.hits +=1\n if not b.cond:\n \n if b.ignore >0:\n b.ignore -=1\n continue\n else:\n \n return(b,True)\n else:\n \n \n \n try:\n val=eval(b.cond,frame.f_globals,frame.f_locals)\n if val:\n if b.ignore >0:\n b.ignore -=1\n \n else:\n return(b,True)\n \n \n except:\n \n \n \n return(b,False)\n return(None,None)\n \n \n \n \nclass Tdb(Bdb):\n def user_call(self,frame,args):\n name=frame.f_code.co_name\n if not name:name='???'\n print('+++ call',name,args)\n def user_line(self,frame):\n import linecache\n name=frame.f_code.co_name\n if not name:name='???'\n fn=self.canonic(frame.f_code.co_filename)\n line=linecache.getline(fn,frame.f_lineno,frame.f_globals)\n print('+++',fn,frame.f_lineno,name,':',line.strip())\n def user_return(self,frame,retval):\n print('+++ return',retval)\n def user_exception(self,frame,exc_stuff):\n print('+++ exception',exc_stuff)\n self.set_continue()\n \ndef foo(n):\n print('foo(',n,')')\n x=bar(n *10)\n print('bar returned',x)\n \ndef bar(a):\n print('bar(',a,')')\n return a /2\n \ndef test():\n t=Tdb()\n t.run('import bdb; bdb.foo(10)')\n", ["__main__", "fnmatch", "inspect", "linecache", "os", "reprlib", "sys"]], "cmd": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport string,sys\n\n__all__=[\"Cmd\"]\n\nPROMPT='(Cmd) '\nIDENTCHARS=string.ascii_letters+string.digits+'_'\n\nclass Cmd:\n ''\n\n\n\n\n\n\n\n\n\n \n prompt=PROMPT\n identchars=IDENTCHARS\n ruler='='\n lastcmd=''\n intro=None\n doc_leader=\"\"\n doc_header=\"Documented commands (type help ):\"\n misc_header=\"Miscellaneous help topics:\"\n undoc_header=\"Undocumented commands:\"\n nohelp=\"*** No help on %s\"\n use_rawinput=1\n \n def __init__(self,completekey='tab',stdin=None,stdout=None):\n ''\n\n\n\n\n\n\n\n\n \n if stdin is not None:\n self.stdin=stdin\n else:\n self.stdin=sys.stdin\n if stdout is not None:\n self.stdout=stdout\n else:\n self.stdout=sys.stdout\n self.cmdqueue=[]\n self.completekey=completekey\n \n def cmdloop(self,intro=None):\n ''\n\n\n\n \n \n self.preloop()\n if self.use_rawinput and self.completekey:\n try:\n import readline\n self.old_completer=readline.get_completer()\n readline.set_completer(self.complete)\n readline.parse_and_bind(self.completekey+\": complete\")\n except ImportError:\n pass\n try:\n if intro is not None:\n self.intro=intro\n if self.intro:\n self.stdout.write(str(self.intro)+\"\\n\")\n stop=None\n while not stop:\n if self.cmdqueue:\n line=self.cmdqueue.pop(0)\n else:\n if self.use_rawinput:\n try:\n line=input(self.prompt)\n except EOFError:\n line='EOF'\n else:\n self.stdout.write(self.prompt)\n self.stdout.flush()\n line=self.stdin.readline()\n if not len(line):\n line='EOF'\n else:\n line=line.rstrip('\\r\\n')\n line=self.precmd(line)\n stop=self.onecmd(line)\n stop=self.postcmd(stop,line)\n self.postloop()\n finally:\n if self.use_rawinput and self.completekey:\n try:\n import readline\n readline.set_completer(self.old_completer)\n except ImportError:\n pass\n \n \n def precmd(self,line):\n ''\n\n\n \n return line\n \n def postcmd(self,stop,line):\n ''\n return stop\n \n def preloop(self):\n ''\n pass\n \n def postloop(self):\n ''\n\n\n \n pass\n \n def parseline(self,line):\n ''\n\n\n \n line=line.strip()\n if not line:\n return None,None,line\n elif line[0]=='?':\n line='help '+line[1:]\n elif line[0]=='!':\n if hasattr(self,'do_shell'):\n line='shell '+line[1:]\n else:\n return None,None,line\n i,n=0,len(line)\n while i 0:\n cmd,args,foo=self.parseline(line)\n if cmd =='':\n compfunc=self.completedefault\n else:\n try:\n compfunc=getattr(self,'complete_'+cmd)\n except AttributeError:\n compfunc=self.completedefault\n else:\n compfunc=self.completenames\n self.completion_matches=compfunc(text,line,begidx,endidx)\n try:\n return self.completion_matches[state]\n except IndexError:\n return None\n \n def get_names(self):\n \n \n return dir(self.__class__)\n \n def complete_help(self,*args):\n commands=set(self.completenames(*args))\n topics=set(a[5:]for a in self.get_names()\n if a.startswith('help_'+args[0]))\n return list(commands |topics)\n \n def do_help(self,arg):\n ''\n if arg:\n \n try:\n func=getattr(self,'help_'+arg)\n except AttributeError:\n try:\n doc=getattr(self,'do_'+arg).__doc__\n if doc:\n self.stdout.write(\"%s\\n\"%str(doc))\n return\n except AttributeError:\n pass\n self.stdout.write(\"%s\\n\"%str(self.nohelp %(arg,)))\n return\n func()\n else:\n names=self.get_names()\n cmds_doc=[]\n cmds_undoc=[]\n topics=set()\n for name in names:\n if name[:5]=='help_':\n topics.add(name[5:])\n names.sort()\n \n prevname=''\n for name in names:\n if name[:3]=='do_':\n if name ==prevname:\n continue\n prevname=name\n cmd=name[3:]\n if cmd in topics:\n cmds_doc.append(cmd)\n topics.remove(cmd)\n elif getattr(self,name).__doc__:\n cmds_doc.append(cmd)\n else:\n cmds_undoc.append(cmd)\n self.stdout.write(\"%s\\n\"%str(self.doc_leader))\n self.print_topics(self.doc_header,cmds_doc,15,80)\n self.print_topics(self.misc_header,sorted(topics),15,80)\n self.print_topics(self.undoc_header,cmds_undoc,15,80)\n \n def print_topics(self,header,cmds,cmdlen,maxcol):\n if cmds:\n self.stdout.write(\"%s\\n\"%str(header))\n if self.ruler:\n self.stdout.write(\"%s\\n\"%str(self.ruler *len(header)))\n self.columnize(cmds,maxcol -1)\n self.stdout.write(\"\\n\")\n \n def columnize(self,list,displaywidth=80):\n ''\n\n\n\n \n if not list:\n self.stdout.write(\"\\n\")\n return\n \n nonstrings=[i for i in range(len(list))\n if not isinstance(list[i],str)]\n if nonstrings:\n raise TypeError(\"list[i] not a string for i in %s\"\n %\", \".join(map(str,nonstrings)))\n size=len(list)\n if size ==1:\n self.stdout.write('%s\\n'%str(list[0]))\n return\n \n for nrows in range(1,len(list)):\n ncols=(size+nrows -1)//nrows\n colwidths=[]\n totwidth=-2\n for col in range(ncols):\n colwidth=0\n for row in range(nrows):\n i=row+nrows *col\n if i >=size:\n break\n x=list[i]\n colwidth=max(colwidth,len(x))\n colwidths.append(colwidth)\n totwidth +=colwidth+2\n if totwidth >displaywidth:\n break\n if totwidth <=displaywidth:\n break\n else:\n nrows=len(list)\n ncols=1\n colwidths=[0]\n for row in range(nrows):\n texts=[]\n for col in range(ncols):\n i=row+nrows *col\n if i >=size:\n x=\"\"\n else:\n x=list[i]\n texts.append(x)\n while texts and not texts[-1]:\n del texts[-1]\n for col in range(len(texts)):\n texts[col]=texts[col].ljust(colwidths[col])\n self.stdout.write(\"%s\\n\"%str(\" \".join(texts)))\n", ["readline", "string", "sys"]], "_socket": [".py", "''\n\n\n\n\nAF_APPLETALK=16\n\nAF_DECnet=12\n\nAF_INET=2\n\nAF_INET6=23\n\nAF_IPX=6\n\nAF_IRDA=26\n\nAF_SNA=11\n\nAF_UNSPEC=0\n\nAI_ADDRCONFIG=1024\n\nAI_ALL=256\n\nAI_CANONNAME=2\n\nAI_NUMERICHOST=4\n\nAI_NUMERICSERV=8\n\nAI_PASSIVE=1\n\nAI_V4MAPPED=2048\n\nCAPI=''\n\nEAI_AGAIN=11002\n\nEAI_BADFLAGS=10022\n\nEAI_FAIL=11003\n\nEAI_FAMILY=10047\n\nEAI_MEMORY=8\n\nEAI_NODATA=11001\n\nEAI_NONAME=11001\n\nEAI_SERVICE=10109\n\nEAI_SOCKTYPE=10044\n\nINADDR_ALLHOSTS_GROUP=-536870911\n\nINADDR_ANY=0\n\nINADDR_BROADCAST=-1\n\nINADDR_LOOPBACK=2130706433\n\nINADDR_MAX_LOCAL_GROUP=-536870657\n\nINADDR_NONE=-1\n\nINADDR_UNSPEC_GROUP=-536870912\n\nIPPORT_RESERVED=1024\n\nIPPORT_USERRESERVED=5000\n\nIPPROTO_ICMP=1\n\nIPPROTO_IP=0\n\nIPPROTO_RAW=255\n\nIPPROTO_TCP=6\n\nIPPROTO_UDP=17\n\nIPV6_CHECKSUM=26\n\nIPV6_DONTFRAG=14\n\nIPV6_HOPLIMIT=21\n\nIPV6_HOPOPTS=1\n\nIPV6_JOIN_GROUP=12\n\nIPV6_LEAVE_GROUP=13\n\nIPV6_MULTICAST_HOPS=10\n\nIPV6_MULTICAST_IF=9\n\nIPV6_MULTICAST_LOOP=11\n\nIPV6_PKTINFO=19\n\nIPV6_RECVRTHDR=38\n\nIPV6_RECVTCLASS=40\n\nIPV6_RTHDR=32\n\nIPV6_TCLASS=39\n\nIPV6_UNICAST_HOPS=4\n\nIPV6_V6ONLY=27\n\nIP_ADD_MEMBERSHIP=12\n\nIP_DROP_MEMBERSHIP=13\n\nIP_HDRINCL=2\n\nIP_MULTICAST_IF=9\n\nIP_MULTICAST_LOOP=11\n\nIP_MULTICAST_TTL=10\n\nIP_OPTIONS=1\n\nIP_RECVDSTADDR=25\n\nIP_TOS=3\n\nIP_TTL=4\n\nMSG_BCAST=1024\n\nMSG_CTRUNC=512\n\nMSG_DONTROUTE=4\n\nMSG_MCAST=2048\n\nMSG_OOB=1\n\nMSG_PEEK=2\n\nMSG_TRUNC=256\n\nNI_DGRAM=16\n\nNI_MAXHOST=1025\n\nNI_MAXSERV=32\n\nNI_NAMEREQD=4\n\nNI_NOFQDN=1\n\nNI_NUMERICHOST=2\n\nNI_NUMERICSERV=8\n\nRCVALL_MAX=3\n\nRCVALL_OFF=0\n\nRCVALL_ON=1\n\nRCVALL_SOCKETLEVELONLY=2\n\nSHUT_RD=0\n\nSHUT_RDWR=2\n\nSHUT_WR=1\n\nSIO_KEEPALIVE_VALS=2550136836\n\nSIO_RCVALL=2550136833\n\nSOCK_DGRAM=2\n\nSOCK_RAW=3\n\nSOCK_RDM=4\n\nSOCK_SEQPACKET=5\n\nSOCK_STREAM=1\n\nSOL_IP=0\n\nSOL_SOCKET=65535\n\nSOL_TCP=6\n\nSOL_UDP=17\n\nSOMAXCONN=2147483647\n\nSO_ACCEPTCONN=2\n\nSO_BROADCAST=32\n\nSO_DEBUG=1\n\nSO_DONTROUTE=16\n\nSO_ERROR=4103\n\nSO_EXCLUSIVEADDRUSE=-5\n\nSO_KEEPALIVE=8\n\nSO_LINGER=128\n\nSO_OOBINLINE=256\n\nSO_RCVBUF=4098\n\nSO_RCVLOWAT=4100\n\nSO_RCVTIMEO=4102\n\nSO_REUSEADDR=4\n\nSO_SNDBUF=4097\n\nSO_SNDLOWAT=4099\n\nSO_SNDTIMEO=4101\n\nSO_TYPE=4104\n\nSO_USELOOPBACK=64\n\nclass SocketType:\n pass\n \nTCP_MAXSEG=4\n\nTCP_NODELAY=1\n\n__loader__='<_frozen_importlib.ExtensionFileLoader object at 0x00CA2D90>'\n\ndef dup(*args,**kw):\n ''\n\n \n pass\n \nclass error:\n pass\n \nclass gaierror:\n pass\n \ndef getaddrinfo(*args,**kw):\n ''\n\n \n pass\n \ndef getdefaulttimeout(*args,**kw):\n ''\n\n\n \n pass\n \ndef gethostbyaddr(*args,**kw):\n ''\n\n \n pass\n \ndef gethostbyname(*args,**kw):\n ''\n \n pass\n \ndef gethostbyname_ex(*args,**kw):\n ''\n\n \n pass\n \ndef gethostname(*args,**kw):\n ''\n \n import browser\n return browser.window.navigator.userAgent\n \ndef getnameinfo(*args,**kw):\n ''\n \n pass\n \ndef getprotobyname(*args,**kw):\n ''\n \n pass\n \ndef getservbyname(*args,**kw):\n ''\n\n\n \n pass\n \ndef getservbyport(*args,**kw):\n ''\n\n\n \n pass\n \nhas_ipv6=True\n\nclass herror:\n pass\n \ndef htonl(*args,**kw):\n ''\n \n pass\n \ndef htons(*args,**kw):\n ''\n \n pass\n \ndef inet_aton(*args,**kw):\n ''\n\n \n pass\n \ndef inet_ntoa(*args,**kw):\n ''\n \n pass\n \ndef ntohl(*args,**kw):\n ''\n \n pass\n \ndef ntohs(*args,**kw):\n ''\n \n pass\n \ndef setdefaulttimeout(*args,**kw):\n ''\n\n\n \n pass\n \nclass socket:\n def __init__(self,*args,**kw):\n pass\n def bind(self,*args,**kw):\n pass\n def close(self):\n pass\n \nclass timeout:\n pass\n", ["browser"]], "_codecs_jp": [".py", "from encoding_cp932 import encoding_table,decoding_table\n\n\n\nclass Codec:\n\n def encode(self,input,errors='strict'):\n b=[]\n for pos,car in enumerate(input):\n cp=ord(car)\n try:\n code=encoding_table[cp]\n high=((code >>8)&0xff)\n low=code&0xff\n if high:\n b.append(high)\n b.append(low)\n except IndexError:\n raise UnicodeEncodeError(pos)\n return[bytes(b),len(input)]\n \n def decode(self,input,errors='strict'):\n i=0\n string=''\n while i 1:\n print(\"checking %r ...\"%file)\n \n try:\n process_tokens(tokenize.generate_tokens(f.readline))\n \n except tokenize.TokenError as msg:\n errprint(\"%r: Token Error: %s\"%(file,msg))\n return\n \n except SyntaxError as msg:\n errprint(\"%r: Token Error: %s\"%(file,msg))\n return\n \n except IndentationError as msg:\n errprint(\"%r: Indentation Error: %s\"%(file,msg))\n return\n \n except NannyNag as nag:\n badline=nag.get_lineno()\n line=nag.get_line()\n if verbose:\n print(\"%r: *** Line %d: trouble in tab city! ***\"%(file,badline))\n print(\"offending line: %r\"%(line,))\n print(nag.get_msg())\n else:\n if ' 'in file:file='\"'+file+'\"'\n if filename_only:print(file)\n else:print(file,badline,repr(line))\n return\n \n finally:\n f.close()\n \n if verbose:\n print(\"%r: Clean bill of health.\"%(file,))\n \nclass Whitespace:\n\n S,T=' \\t'\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def __init__(self,ws):\n self.raw=ws\n S,T=Whitespace.S,Whitespace.T\n count=[]\n b=n=nt=0\n for ch in self.raw:\n if ch ==S:\n n=n+1\n b=b+1\n elif ch ==T:\n n=n+1\n nt=nt+1\n if b >=len(count):\n count=count+[0]*(b -len(count)+1)\n count[b]=count[b]+1\n b=0\n else:\n break\n self.n=n\n self.nt=nt\n self.norm=tuple(count),b\n self.is_simple=len(count)<=1\n \n \n \n def longest_run_of_spaces(self):\n count,trailing=self.norm\n return max(len(count)-1,trailing)\n \n def indent_level(self,tabsize):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n count,trailing=self.norm\n il=0\n for i in range(tabsize,len(count)):\n il=il+i //tabsize *count[i]\n return trailing+tabsize *(il+self.nt)\n \n \n \n def equal(self,other):\n return self.norm ==other.norm\n \n \n \n \n \n def not_equal_witness(self,other):\n n=max(self.longest_run_of_spaces(),\n other.longest_run_of_spaces())+1\n a=[]\n for ts in range(1,n+1):\n if self.indent_level(ts)!=other.indent_level(ts):\n a.append((ts,\n self.indent_level(ts),\n other.indent_level(ts)))\n return a\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def less(self,other):\n if self.n >=other.n:\n return False\n if self.is_simple and other.is_simple:\n return self.nt <=other.nt\n n=max(self.longest_run_of_spaces(),\n other.longest_run_of_spaces())+1\n \n for ts in range(2,n+1):\n if self.indent_level(ts)>=other.indent_level(ts):\n return False\n return True\n \n \n \n \n \n def not_less_witness(self,other):\n n=max(self.longest_run_of_spaces(),\n other.longest_run_of_spaces())+1\n a=[]\n for ts in range(1,n+1):\n if self.indent_level(ts)>=other.indent_level(ts):\n a.append((ts,\n self.indent_level(ts),\n other.indent_level(ts)))\n return a\n \ndef format_witnesses(w):\n firsts=(str(tup[0])for tup in w)\n prefix=\"at tab size\"\n if len(w)>1:\n prefix=prefix+\"s\"\n return prefix+\" \"+', '.join(firsts)\n \ndef process_tokens(tokens):\n try:\n _process_tokens(tokens)\n except TabError as e:\n raise NannyNag(e.lineno,e.msg,e.text)\n \ndef _process_tokens(tokens):\n INDENT=tokenize.INDENT\n DEDENT=tokenize.DEDENT\n NEWLINE=tokenize.NEWLINE\n JUNK=tokenize.COMMENT,tokenize.NL\n indents=[Whitespace(\"\")]\n check_equal=0\n \n for(type,token,start,end,line)in tokens:\n if type ==NEWLINE:\n \n \n \n \n \n check_equal=1\n \n elif type ==INDENT:\n check_equal=0\n thisguy=Whitespace(token)\n if not indents[-1].less(thisguy):\n witness=indents[-1].not_less_witness(thisguy)\n msg=\"indent not greater e.g. \"+format_witnesses(witness)\n raise NannyNag(start[0],msg,line)\n indents.append(thisguy)\n \n elif type ==DEDENT:\n \n \n \n \n \n \n \n \n \n check_equal=1\n \n del indents[-1]\n \n elif check_equal and type not in JUNK:\n \n \n \n \n \n \n check_equal=0\n thisguy=Whitespace(line)\n if not indents[-1].equal(thisguy):\n witness=indents[-1].not_equal_witness(thisguy)\n msg=\"indent not equal e.g. \"+format_witnesses(witness)\n raise NannyNag(start[0],msg,line)\n \n \nif __name__ =='__main__':\n main()\n", ["getopt", "os", "sys", "tokenize"]], "_py_abc": [".py", "from _weakrefset import WeakSet\n\n\ndef get_cache_token():\n ''\n\n\n\n\n \n return ABCMeta._abc_invalidation_counter\n \n \nclass ABCMeta(type):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n _abc_invalidation_counter=0\n \n def __new__(mcls,name,bases,namespace,/,**kwargs):\n cls=super().__new__(mcls,name,bases,namespace,**kwargs)\n \n abstracts={name\n for name,value in namespace.items()\n if getattr(value,\"__isabstractmethod__\",False)}\n for base in bases:\n for name in getattr(base,\"__abstractmethods__\",set()):\n value=getattr(cls,name,None)\n if getattr(value,\"__isabstractmethod__\",False):\n abstracts.add(name)\n cls.__abstractmethods__=frozenset(abstracts)\n \n cls._abc_registry=WeakSet()\n cls._abc_cache=WeakSet()\n cls._abc_negative_cache=WeakSet()\n cls._abc_negative_cache_version=ABCMeta._abc_invalidation_counter\n return cls\n \n def register(cls,subclass):\n ''\n\n\n \n if not isinstance(subclass,type):\n raise TypeError(\"Can only register classes\")\n if issubclass(subclass,cls):\n return subclass\n \n \n if issubclass(cls,subclass):\n \n raise RuntimeError(\"Refusing to create an inheritance cycle\")\n cls._abc_registry.add(subclass)\n ABCMeta._abc_invalidation_counter +=1\n return subclass\n \n def _dump_registry(cls,file=None):\n ''\n print(f\"Class: {cls.__module__}.{cls.__qualname__}\",file=file)\n print(f\"Inv. counter: {get_cache_token()}\",file=file)\n for name in cls.__dict__:\n if name.startswith(\"_abc_\"):\n value=getattr(cls,name)\n if isinstance(value,WeakSet):\n value=set(value)\n print(f\"{name}: {value !r}\",file=file)\n \n def _abc_registry_clear(cls):\n ''\n cls._abc_registry.clear()\n \n def _abc_caches_clear(cls):\n ''\n cls._abc_cache.clear()\n cls._abc_negative_cache.clear()\n \n def __instancecheck__(cls,instance):\n ''\n \n subclass=instance.__class__\n if subclass in cls._abc_cache:\n return True\n subtype=type(instance)\n if subtype is subclass:\n if(cls._abc_negative_cache_version ==\n ABCMeta._abc_invalidation_counter and\n subclass in cls._abc_negative_cache):\n return False\n \n return cls.__subclasscheck__(subclass)\n return any(cls.__subclasscheck__(c)for c in(subclass,subtype))\n \n def __subclasscheck__(cls,subclass):\n ''\n if not isinstance(subclass,type):\n raise TypeError('issubclass() arg 1 must be a class')\n \n if subclass in cls._abc_cache:\n return True\n \n if cls._abc_negative_cache_version '\n \n \n \n \n \n \n \n_zip_searchorder=(\n(path_sep+'__init__.pyc',True,True),\n(path_sep+'__init__.py',False,True),\n('.pyc',True,False),\n('.py',False,False),\n)\n\n\n\ndef _get_module_path(self,fullname):\n return self.prefix+fullname.rpartition('.')[2]\n \n \ndef _is_dir(self,path):\n\n\n\n dirpath=path+path_sep\n \n return dirpath in self._files\n \n \ndef _get_module_info(self,fullname):\n path=_get_module_path(self,fullname)\n for suffix,isbytecode,ispackage in _zip_searchorder:\n fullpath=path+suffix\n if fullpath in self._files:\n return ispackage\n return None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _read_directory(archive):\n try:\n fp=_io.open_code(archive)\n except OSError:\n raise ZipImportError(f\"can't open Zip file: {archive !r}\",path=archive)\n \n with fp:\n \n \n \n start_offset=fp.tell()\n try:\n try:\n fp.seek(-END_CENTRAL_DIR_SIZE,2)\n header_position=fp.tell()\n buffer=fp.read(END_CENTRAL_DIR_SIZE)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n if len(buffer)!=END_CENTRAL_DIR_SIZE:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n if buffer[:4]!=STRING_END_ARCHIVE:\n \n \n try:\n fp.seek(0,2)\n file_size=fp.tell()\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",\n path=archive)\n max_comment_start=max(file_size -MAX_COMMENT_LEN -\n END_CENTRAL_DIR_SIZE,0)\n try:\n fp.seek(max_comment_start)\n data=fp.read()\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",\n path=archive)\n pos=data.rfind(STRING_END_ARCHIVE)\n if pos <0:\n raise ZipImportError(f'not a Zip file: {archive !r}',\n path=archive)\n buffer=data[pos:pos+END_CENTRAL_DIR_SIZE]\n if len(buffer)!=END_CENTRAL_DIR_SIZE:\n raise ZipImportError(f\"corrupt Zip file: {archive !r}\",\n path=archive)\n header_position=file_size -len(data)+pos\n \n header_size=_unpack_uint32(buffer[12:16])\n header_offset=_unpack_uint32(buffer[16:20])\n if header_position header_offset:\n raise ZipImportError(f'bad local header offset: {archive !r}',path=archive)\n file_offset +=arc_offset\n \n try:\n name=fp.read(name_size)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n if len(name)!=name_size:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n \n \n \n try:\n if len(fp.read(header_size -name_size))!=header_size -name_size:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n \n if flags&0x800:\n \n name=name.decode()\n else:\n \n try:\n name=name.decode('ascii')\n except UnicodeDecodeError:\n name=name.decode('latin1').translate(cp437_table)\n \n name=name.replace('/',path_sep)\n path=_bootstrap_external._path_join(archive,name)\n t=(path,compress,data_size,file_size,file_offset,time,date,crc)\n files[name]=t\n count +=1\n finally:\n fp.seek(start_offset)\n _bootstrap._verbose_message('zipimport: found {} names in {!r}',count,archive)\n return files\n \n \n \n \n \n \n \ncp437_table=(\n\n'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f'\n'\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f'\n' !\"#$%&\\'()*+,-./'\n'0123456789:;<=>?'\n'@ABCDEFGHIJKLMNO'\n'PQRSTUVWXYZ[\\\\]^_'\n'`abcdefghijklmno'\n'pqrstuvwxyz{|}~\\x7f'\n\n'\\xc7\\xfc\\xe9\\xe2\\xe4\\xe0\\xe5\\xe7'\n'\\xea\\xeb\\xe8\\xef\\xee\\xec\\xc4\\xc5'\n'\\xc9\\xe6\\xc6\\xf4\\xf6\\xf2\\xfb\\xf9'\n'\\xff\\xd6\\xdc\\xa2\\xa3\\xa5\\u20a7\\u0192'\n'\\xe1\\xed\\xf3\\xfa\\xf1\\xd1\\xaa\\xba'\n'\\xbf\\u2310\\xac\\xbd\\xbc\\xa1\\xab\\xbb'\n'\\u2591\\u2592\\u2593\\u2502\\u2524\\u2561\\u2562\\u2556'\n'\\u2555\\u2563\\u2551\\u2557\\u255d\\u255c\\u255b\\u2510'\n'\\u2514\\u2534\\u252c\\u251c\\u2500\\u253c\\u255e\\u255f'\n'\\u255a\\u2554\\u2569\\u2566\\u2560\\u2550\\u256c\\u2567'\n'\\u2568\\u2564\\u2565\\u2559\\u2558\\u2552\\u2553\\u256b'\n'\\u256a\\u2518\\u250c\\u2588\\u2584\\u258c\\u2590\\u2580'\n'\\u03b1\\xdf\\u0393\\u03c0\\u03a3\\u03c3\\xb5\\u03c4'\n'\\u03a6\\u0398\\u03a9\\u03b4\\u221e\\u03c6\\u03b5\\u2229'\n'\\u2261\\xb1\\u2265\\u2264\\u2320\\u2321\\xf7\\u2248'\n'\\xb0\\u2219\\xb7\\u221a\\u207f\\xb2\\u25a0\\xa0'\n)\n\n_importing_zlib=False\n\n\n\n\ndef _get_decompress_func():\n global _importing_zlib\n if _importing_zlib:\n \n \n _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')\n raise ZipImportError(\"can't decompress data; zlib not available\")\n \n _importing_zlib=True\n try:\n from zlib import decompress\n except Exception:\n _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')\n raise ZipImportError(\"can't decompress data; zlib not available\")\n finally:\n _importing_zlib=False\n \n _bootstrap._verbose_message('zipimport: zlib available')\n return decompress\n \n \ndef _get_data(archive,toc_entry):\n datapath,compress,data_size,file_size,file_offset,time,date,crc=toc_entry\n if data_size <0:\n raise ZipImportError('negative data size')\n \n with _io.open_code(archive)as fp:\n \n try:\n fp.seek(file_offset)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n buffer=fp.read(30)\n if len(buffer)!=30:\n raise EOFError('EOF read where not expected')\n \n if buffer[:4]!=b'PK\\x03\\x04':\n \n raise ZipImportError(f'bad local file header: {archive !r}',path=archive)\n \n name_size=_unpack_uint16(buffer[26:28])\n extra_size=_unpack_uint16(buffer[28:30])\n header_size=30+name_size+extra_size\n file_offset +=header_size\n try:\n fp.seek(file_offset)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive !r}\",path=archive)\n raw_data=fp.read(data_size)\n if len(raw_data)!=data_size:\n raise OSError(\"zipimport: can't read data\")\n \n if compress ==0:\n \n return raw_data\n \n \n try:\n decompress=_get_decompress_func()\n except Exception:\n raise ZipImportError(\"can't decompress data; zlib not available\")\n return decompress(raw_data,-15)\n \n \n \n \n \ndef _eq_mtime(t1,t2):\n\n return abs(t1 -t2)<=1\n \n \n \n \n \ndef _unmarshal_code(self,pathname,fullpath,fullname,data):\n exc_details={\n 'name':fullname,\n 'path':fullpath,\n }\n \n flags=_bootstrap_external._classify_pyc(data,fullname,exc_details)\n \n hash_based=flags&0b1 !=0\n if hash_based:\n check_source=flags&0b10 !=0\n if(_imp.check_hash_based_pycs !='never'and\n (check_source or _imp.check_hash_based_pycs =='always')):\n source_bytes=_get_pyc_source(self,fullpath)\n if source_bytes is not None:\n source_hash=_imp.source_hash(\n _bootstrap_external._RAW_MAGIC_NUMBER,\n source_bytes,\n )\n \n _bootstrap_external._validate_hash_pyc(\n data,source_hash,fullname,exc_details)\n else:\n source_mtime,source_size=\\\n _get_mtime_and_size_of_source(self,fullpath)\n \n if source_mtime:\n \n \n if(not _eq_mtime(_unpack_uint32(data[8:12]),source_mtime)or\n _unpack_uint32(data[12:16])!=source_size):\n _bootstrap._verbose_message(\n f'bytecode is stale for {fullname !r}')\n return None\n \n code=marshal.loads(data[16:])\n if not isinstance(code,_code_type):\n raise TypeError(f'compiled module {pathname !r} is not a code object')\n return code\n \n_code_type=type(_unmarshal_code.__code__)\n\n\n\n\ndef _normalize_line_endings(source):\n source=source.replace(b'\\r\\n',b'\\n')\n source=source.replace(b'\\r',b'\\n')\n return source\n \n \n \ndef _compile_source(pathname,source):\n source=_normalize_line_endings(source)\n return compile(source,pathname,'exec',dont_inherit=True)\n \n \n \ndef _parse_dostime(d,t):\n return time.mktime((\n (d >>9)+1980,\n (d >>5)&0xF,\n d&0x1F,\n t >>11,\n (t >>5)&0x3F,\n (t&0x1F)*2,\n -1,-1,-1))\n \n \n \n \ndef _get_mtime_and_size_of_source(self,path):\n try:\n \n assert path[-1:]in('c','o')\n path=path[:-1]\n toc_entry=self._files[path]\n \n \n time=toc_entry[5]\n date=toc_entry[6]\n uncompressed_size=toc_entry[3]\n return _parse_dostime(date,time),uncompressed_size\n except(KeyError,IndexError,TypeError):\n return 0,0\n \n \n \n \n \ndef _get_pyc_source(self,path):\n\n assert path[-1:]in('c','o')\n path=path[:-1]\n \n try:\n toc_entry=self._files[path]\n except KeyError:\n return None\n else:\n return _get_data(self.archive,toc_entry)\n \n \n \n \ndef _get_module_code(self,fullname):\n path=_get_module_path(self,fullname)\n import_error=None\n for suffix,isbytecode,ispackage in _zip_searchorder:\n fullpath=path+suffix\n _bootstrap._verbose_message('trying {}{}{}',self.archive,path_sep,fullpath,verbosity=2)\n try:\n toc_entry=self._files[fullpath]\n except KeyError:\n pass\n else:\n modpath=toc_entry[0]\n data=_get_data(self.archive,toc_entry)\n code=None\n if isbytecode:\n try:\n code=_unmarshal_code(self,modpath,fullpath,fullname,data)\n except ImportError as exc:\n import_error=exc\n else:\n code=_compile_source(modpath,data)\n if code is None:\n \n \n continue\n modpath=toc_entry[0]\n return code,ispackage,modpath\n else:\n if import_error:\n msg=f\"module load failed: {import_error}\"\n raise ZipImportError(msg,name=fullname)from import_error\n else:\n raise ZipImportError(f\"can't find module {fullname !r}\",name=fullname)\n", ["_frozen_importlib", "_frozen_importlib_external", "_imp", "_io", "_warnings", "importlib.readers", "marshal", "sys", "time", "zlib"]], "token": [".py", "''\n\n\n__all__=['tok_name','ISTERMINAL','ISNONTERMINAL','ISEOF']\n\nENDMARKER=0\nNAME=1\nNUMBER=2\nSTRING=3\nNEWLINE=4\nINDENT=5\nDEDENT=6\nLPAR=7\nRPAR=8\nLSQB=9\nRSQB=10\nCOLON=11\nCOMMA=12\nSEMI=13\nPLUS=14\nMINUS=15\nSTAR=16\nSLASH=17\nVBAR=18\nAMPER=19\nLESS=20\nGREATER=21\nEQUAL=22\nDOT=23\nPERCENT=24\nLBRACE=25\nRBRACE=26\nEQEQUAL=27\nNOTEQUAL=28\nLESSEQUAL=29\nGREATEREQUAL=30\nTILDE=31\nCIRCUMFLEX=32\nLEFTSHIFT=33\nRIGHTSHIFT=34\nDOUBLESTAR=35\nPLUSEQUAL=36\nMINEQUAL=37\nSTAREQUAL=38\nSLASHEQUAL=39\nPERCENTEQUAL=40\nAMPEREQUAL=41\nVBAREQUAL=42\nCIRCUMFLEXEQUAL=43\nLEFTSHIFTEQUAL=44\nRIGHTSHIFTEQUAL=45\nDOUBLESTAREQUAL=46\nDOUBLESLASH=47\nDOUBLESLASHEQUAL=48\nAT=49\nATEQUAL=50\nRARROW=51\nELLIPSIS=52\nCOLONEQUAL=53\nEXCLAMATION=54\nOP=55\nAWAIT=56\nASYNC=57\nTYPE_IGNORE=58\nTYPE_COMMENT=59\nSOFT_KEYWORD=60\nFSTRING_START=61\nFSTRING_MIDDLE=62\nFSTRING_END=63\nCOMMENT=64\nNL=65\n\nERRORTOKEN=66\nENCODING=67\nN_TOKENS=68\n\nNT_OFFSET=256\n\ntok_name={value:name\nfor name,value in globals().items()\nif isinstance(value,int)and not name.startswith('_')}\n__all__.extend(tok_name.values())\n\nEXACT_TOKEN_TYPES={\n'!':EXCLAMATION,\n'!=':NOTEQUAL,\n'%':PERCENT,\n'%=':PERCENTEQUAL,\n'&':AMPER,\n'&=':AMPEREQUAL,\n'(':LPAR,\n')':RPAR,\n'*':STAR,\n'**':DOUBLESTAR,\n'**=':DOUBLESTAREQUAL,\n'*=':STAREQUAL,\n'+':PLUS,\n'+=':PLUSEQUAL,\n',':COMMA,\n'-':MINUS,\n'-=':MINEQUAL,\n'->':RARROW,\n'.':DOT,\n'...':ELLIPSIS,\n'/':SLASH,\n'//':DOUBLESLASH,\n'//=':DOUBLESLASHEQUAL,\n'/=':SLASHEQUAL,\n':':COLON,\n':=':COLONEQUAL,\n';':SEMI,\n'<':LESS,\n'<<':LEFTSHIFT,\n'<<=':LEFTSHIFTEQUAL,\n'<=':LESSEQUAL,\n'=':EQUAL,\n'==':EQEQUAL,\n'>':GREATER,\n'>=':GREATEREQUAL,\n'>>':RIGHTSHIFT,\n'>>=':RIGHTSHIFTEQUAL,\n'@':AT,\n'@=':ATEQUAL,\n'[':LSQB,\n']':RSQB,\n'^':CIRCUMFLEX,\n'^=':CIRCUMFLEXEQUAL,\n'{':LBRACE,\n'|':VBAR,\n'|=':VBAREQUAL,\n'}':RBRACE,\n'~':TILDE,\n}\n\ndef ISTERMINAL(x):\n return x =NT_OFFSET\n \ndef ISEOF(x):\n return x ==ENDMARKER\n", []], "textwrap": [".py", "''\n\n\n\n\n\n\nimport re\n\n__all__=['TextWrapper','wrap','fill','dedent','indent','shorten']\n\n\n\n\n_whitespace='\\t\\n\\x0b\\x0c\\r '\n\nclass TextWrapper:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n unicode_whitespace_trans=dict.fromkeys(map(ord,_whitespace),ord(' '))\n \n \n \n \n \n \n \n word_punct=r'[\\w!\"\\'&.,?]'\n letter=r'[^\\d\\W]'\n whitespace=r'[%s]'%re.escape(_whitespace)\n nowhitespace='[^'+whitespace[1:]\n wordsep_re=re.compile(r'''\n ( # any whitespace\n %(ws)s+\n | # em-dash between words\n (?<=%(wp)s) -{2,} (?=\\w)\n | # word, possibly hyphenated\n %(nws)s+? (?:\n # hyphenated word\n -(?: (?<=%(lt)s{2}-) | (?<=%(lt)s-%(lt)s-))\n (?= %(lt)s -? %(lt)s)\n | # end of word\n (?=%(ws)s|\\Z)\n | # em-dash\n (?<=%(wp)s) (?=-{2,}\\w)\n )\n )'''%{'wp':word_punct,'lt':letter,\n 'ws':whitespace,'nws':nowhitespace},\n re.VERBOSE)\n del word_punct,letter,nowhitespace\n \n \n \n \n \n wordsep_simple_re=re.compile(r'(%s+)'%whitespace)\n del whitespace\n \n \n \n sentence_end_re=re.compile(r'[a-z]'\n r'[\\.\\!\\?]'\n r'[\\\"\\']?'\n r'\\Z')\n \n def __init__(self,\n width=70,\n initial_indent=\"\",\n subsequent_indent=\"\",\n expand_tabs=True,\n replace_whitespace=True,\n fix_sentence_endings=False,\n break_long_words=True,\n drop_whitespace=True,\n break_on_hyphens=True,\n tabsize=8,\n *,\n max_lines=None,\n placeholder=' [...]'):\n self.width=width\n self.initial_indent=initial_indent\n self.subsequent_indent=subsequent_indent\n self.expand_tabs=expand_tabs\n self.replace_whitespace=replace_whitespace\n self.fix_sentence_endings=fix_sentence_endings\n self.break_long_words=break_long_words\n self.drop_whitespace=drop_whitespace\n self.break_on_hyphens=break_on_hyphens\n self.tabsize=tabsize\n self.max_lines=max_lines\n self.placeholder=placeholder\n \n \n \n \n \n def _munge_whitespace(self,text):\n ''\n\n\n\n\n \n if self.expand_tabs:\n text=text.expandtabs(self.tabsize)\n if self.replace_whitespace:\n text=text.translate(self.unicode_whitespace_trans)\n return text\n \n \n def _split(self,text):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.break_on_hyphens is True:\n chunks=self.wordsep_re.split(text)\n else:\n chunks=self.wordsep_simple_re.split(text)\n chunks=[c for c in chunks if c]\n return chunks\n \n def _fix_sentence_endings(self,chunks):\n ''\n\n\n\n\n\n\n \n i=0\n patsearch=self.sentence_end_re.search\n while i space_left:\n \n \n hyphen=chunk.rfind('-',0,space_left)\n if hyphen >0 and any(c !='-'for c in chunk[:hyphen]):\n end=hyphen+1\n cur_line.append(chunk[:end])\n reversed_chunks[-1]=chunk[end:]\n \n \n \n \n elif not cur_line:\n cur_line.append(reversed_chunks.pop())\n \n \n \n \n \n \n \n def _wrap_chunks(self,chunks):\n ''\n\n\n\n\n\n\n\n\n\n\n \n lines=[]\n if self.width <=0:\n raise ValueError(\"invalid width %r (must be > 0)\"%self.width)\n if self.max_lines is not None:\n if self.max_lines >1:\n indent=self.subsequent_indent\n else:\n indent=self.initial_indent\n if len(indent)+len(self.placeholder.lstrip())>self.width:\n raise ValueError(\"placeholder too large for max width\")\n \n \n \n chunks.reverse()\n \n while chunks:\n \n \n \n cur_line=[]\n cur_len=0\n \n \n if lines:\n indent=self.subsequent_indent\n else:\n indent=self.initial_indent\n \n \n width=self.width -len(indent)\n \n \n \n if self.drop_whitespace and chunks[-1].strip()==''and lines:\n del chunks[-1]\n \n while chunks:\n l=len(chunks[-1])\n \n \n if cur_len+l <=width:\n cur_line.append(chunks.pop())\n cur_len +=l\n \n \n else:\n break\n \n \n \n if chunks and len(chunks[-1])>width:\n self._handle_long_word(chunks,cur_line,cur_len,width)\n cur_len=sum(map(len,cur_line))\n \n \n if self.drop_whitespace and cur_line and cur_line[-1].strip()=='':\n cur_len -=len(cur_line[-1])\n del cur_line[-1]\n \n if cur_line:\n if(self.max_lines is None or\n len(lines)+1 >30]+\n b32tab2[(c >>20)&0x3ff]+\n b32tab2[(c >>10)&0x3ff]+\n b32tab2[c&0x3ff]\n )\n \n if leftover ==1:\n encoded[-6:]=b'======'\n elif leftover ==2:\n encoded[-4:]=b'===='\n elif leftover ==3:\n encoded[-3:]=b'==='\n elif leftover ==4:\n encoded[-1:]=b'='\n return bytes(encoded)\n \ndef _b32decode(alphabet,s,casefold=False,map01=None):\n global _b32rev\n \n \n if alphabet not in _b32rev:\n _b32rev[alphabet]={v:k for k,v in enumerate(alphabet)}\n s=_bytes_from_decode_data(s)\n if len(s)%8:\n raise binascii.Error('Incorrect padding')\n \n \n \n if map01 is not None:\n map01=_bytes_from_decode_data(map01)\n assert len(map01)==1,repr(map01)\n s=s.translate(bytes.maketrans(b'01',b'O'+map01))\n if casefold:\n s=s.upper()\n \n \n \n l=len(s)\n s=s.rstrip(b'=')\n padchars=l -len(s)\n \n decoded=bytearray()\n b32rev=_b32rev[alphabet]\n for i in range(0,len(s),8):\n quanta=s[i:i+8]\n acc=0\n try:\n for c in quanta:\n acc=(acc <<5)+b32rev[c]\n except KeyError:\n raise binascii.Error('Non-base32 digit found')from None\n decoded +=acc.to_bytes(5)\n \n if l %8 or padchars not in{0,1,3,4,6}:\n raise binascii.Error('Incorrect padding')\n if padchars and decoded:\n acc <<=5 *padchars\n last=acc.to_bytes(5)\n leftover=(43 -5 *padchars)//8\n decoded[-5:]=last[:leftover]\n return bytes(decoded)\n \n \ndef b32encode(s):\n return _b32encode(_b32alphabet,s)\nb32encode.__doc__=_B32_ENCODE_DOCSTRING.format(encoding='base32')\n\ndef b32decode(s,casefold=False,map01=None):\n return _b32decode(_b32alphabet,s,casefold,map01)\nb32decode.__doc__=_B32_DECODE_DOCSTRING.format(encoding='base32',\nextra_args=_B32_DECODE_MAP01_DOCSTRING)\n\ndef b32hexencode(s):\n return _b32encode(_b32hexalphabet,s)\nb32hexencode.__doc__=_B32_ENCODE_DOCSTRING.format(encoding='base32hex')\n\ndef b32hexdecode(s,casefold=False):\n\n return _b32decode(_b32hexalphabet,s,casefold)\nb32hexdecode.__doc__=_B32_DECODE_DOCSTRING.format(encoding='base32hex',\nextra_args='')\n\n\n\n\n\ndef b16encode(s):\n ''\n \n return binascii.hexlify(s).upper()\n \n \ndef b16decode(s,casefold=False):\n ''\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n if casefold:\n s=s.upper()\n if re.search(b'[^0-9A-F]',s):\n raise binascii.Error('Non-base16 digit found')\n return binascii.unhexlify(s)\n \n \n \n \n \n_a85chars=None\n_a85chars2=None\n_A85START=b\"<~\"\n_A85END=b\"~>\"\n\ndef _85encode(b,chars,chars2,pad=False,foldnuls=False,foldspaces=False):\n\n if not isinstance(b,bytes_types):\n b=memoryview(b).tobytes()\n \n padding=(-len(b))%4\n if padding:\n b=b+b'\\0'*padding\n words=struct.Struct('!%dI'%(len(b)//4)).unpack(b)\n \n chunks=[b'z'if foldnuls and not word else\n b'y'if foldspaces and word ==0x20202020 else\n (chars2[word //614125]+\n chars2[word //85 %7225]+\n chars[word %85])\n for word in words]\n \n if padding and not pad:\n if chunks[-1]==b'z':\n chunks[-1]=chars[0]*5\n chunks[-1]=chunks[-1][:-padding]\n \n return b''.join(chunks)\n \ndef a85encode(b,*,foldspaces=False,wrapcol=0,pad=False,adobe=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _a85chars,_a85chars2\n \n \n if _a85chars2 is None:\n _a85chars=[bytes((i,))for i in range(33,118)]\n _a85chars2=[(a+b)for a in _a85chars for b in _a85chars]\n \n result=_85encode(b,_a85chars,_a85chars2,pad,True,foldspaces)\n \n if adobe:\n result=_A85START+result\n if wrapcol:\n wrapcol=max(2 if adobe else 1,wrapcol)\n chunks=[result[i:i+wrapcol]\n for i in range(0,len(result),wrapcol)]\n if adobe:\n if len(chunks[-1])+2 >wrapcol:\n chunks.append(b'')\n result=b'\\n'.join(chunks)\n if adobe:\n result +=_A85END\n \n return result\n \ndef a85decode(b,*,foldspaces=False,adobe=False,ignorechars=b' \\t\\n\\r\\v'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n b=_bytes_from_decode_data(b)\n if adobe:\n if not b.endswith(_A85END):\n raise ValueError(\n \"Ascii85 encoded byte sequences must end \"\n \"with {!r}\".format(_A85END)\n )\n if b.startswith(_A85START):\n b=b[2:-2]\n else:\n b=b[:-2]\n \n \n \n \n packI=struct.Struct('!I').pack\n decoded=[]\n decoded_append=decoded.append\n curr=[]\n curr_append=curr.append\n curr_clear=curr.clear\n for x in b+b'u'*4:\n if b'!'[0]<=x <=b'u'[0]:\n curr_append(x)\n if len(curr)==5:\n acc=0\n for x in curr:\n acc=85 *acc+(x -33)\n try:\n decoded_append(packI(acc))\n except struct.error:\n raise ValueError('Ascii85 overflow')from None\n curr_clear()\n elif x ==b'z'[0]:\n if curr:\n raise ValueError('z inside Ascii85 5-tuple')\n decoded_append(b'\\0\\0\\0\\0')\n elif foldspaces and x ==b'y'[0]:\n if curr:\n raise ValueError('y inside Ascii85 5-tuple')\n decoded_append(b'\\x20\\x20\\x20\\x20')\n elif x in ignorechars:\n \n continue\n else:\n raise ValueError('Non-Ascii85 digit found: %c'%x)\n \n result=b''.join(decoded)\n padding=4 -len(curr)\n if padding:\n \n result=result[:-padding]\n return result\n \n \n \n_b85alphabet=(b\"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\nb\"abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~\")\n_b85chars=None\n_b85chars2=None\n_b85dec=None\n\ndef b85encode(b,pad=False):\n ''\n\n\n\n \n global _b85chars,_b85chars2\n \n \n if _b85chars2 is None:\n _b85chars=[bytes((i,))for i in _b85alphabet]\n _b85chars2=[(a+b)for a in _b85chars for b in _b85chars]\n return _85encode(b,_b85chars,_b85chars2,pad)\n \ndef b85decode(b):\n ''\n\n\n \n global _b85dec\n \n \n if _b85dec is None:\n _b85dec=[None]*256\n for i,c in enumerate(_b85alphabet):\n _b85dec[c]=i\n \n b=_bytes_from_decode_data(b)\n padding=(-len(b))%5\n b=b+b'~'*padding\n out=[]\n packI=struct.Struct('!I').pack\n for i in range(0,len(b),5):\n chunk=b[i:i+5]\n acc=0\n try:\n for c in chunk:\n acc=acc *85+_b85dec[c]\n except TypeError:\n for j,c in enumerate(chunk):\n if _b85dec[c]is None:\n raise ValueError('bad base85 character at position %d'\n %(i+j))from None\n raise\n try:\n out.append(packI(acc))\n except struct.error:\n raise ValueError('base85 overflow in hunk starting at byte %d'\n %i)from None\n \n result=b''.join(out)\n if padding:\n result=result[:-padding]\n return result\n \n \n \n \n \nMAXLINESIZE=76\nMAXBINSIZE=(MAXLINESIZE //4)*3\n\ndef encode(input,output):\n ''\n while s :=input.read(MAXBINSIZE):\n while len(s)')\n_markedsectionclose=re.compile(r']\\s*]\\s*>')\n\n\n\n\n_msmarkedsectionclose=re.compile(r']\\s*>')\n\ndel re\n\n\nclass ParserBase:\n ''\n \n \n def __init__(self):\n if self.__class__ is ParserBase:\n raise RuntimeError(\n \"_markupbase.ParserBase must be subclassed\")\n \n def reset(self):\n self.lineno=1\n self.offset=0\n \n def getpos(self):\n ''\n return self.lineno,self.offset\n \n \n \n \n \n def updatepos(self,i,j):\n if i >=j:\n return j\n rawdata=self.rawdata\n nlines=rawdata.count(\"\\n\",i,j)\n if nlines:\n self.lineno=self.lineno+nlines\n pos=rawdata.rindex(\"\\n\",i,j)\n self.offset=j -(pos+1)\n else:\n self.offset=self.offset+j -i\n return j\n \n _decl_otherchars=''\n \n \n def parse_declaration(self,i):\n \n \n \n \n \n \n \n \n \n \n rawdata=self.rawdata\n j=i+2\n assert rawdata[i:j]==\"\":\n \n return j+1\n if rawdata[j:j+1]in(\"-\",\"\"):\n \n \n return -1\n \n n=len(rawdata)\n if rawdata[j:j+2]=='--':\n \n return self.parse_comment(i)\n elif rawdata[j]=='[':\n \n \n \n \n return self.parse_marked_section(i)\n else:\n decltype,j=self._scan_name(j,i)\n if j <0:\n return j\n if decltype ==\"doctype\":\n self._decl_otherchars=''\n while j \":\n \n data=rawdata[i+2:j]\n if decltype ==\"doctype\":\n self.handle_decl(data)\n else:\n \n \n \n \n self.unknown_decl(data)\n return j+1\n if c in \"\\\"'\":\n m=_declstringlit_match(rawdata,j)\n if not m:\n return -1\n j=m.end()\n elif c in \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\":\n name,j=self._scan_name(j,i)\n elif c in self._decl_otherchars:\n j=j+1\n elif c ==\"[\":\n \n if decltype ==\"doctype\":\n j=self._parse_doctype_subset(j+1,i)\n elif decltype in{\"attlist\",\"linktype\",\"link\",\"element\"}:\n \n \n \n \n raise AssertionError(\"unsupported '[' char in %s declaration\"%decltype)\n else:\n raise AssertionError(\"unexpected '[' char in declaration\")\n else:\n raise AssertionError(\"unexpected %r char in declaration\"%rawdata[j])\n if j <0:\n return j\n return -1\n \n \n \n def parse_marked_section(self,i,report=1):\n rawdata=self.rawdata\n assert rawdata[i:i+3]=='n:\n \n return -1\n if rawdata[j:j+4]==\" unknown values %r [%s]'\n %(cls.__name__,value,unknown,bin(unknown))\n )\n \n if cls._member_type_ is object:\n \n pseudo_member=object.__new__(cls)\n else:\n pseudo_member=cls._member_type_.__new__(cls,value)\n if not hasattr(pseudo_member,'_value_'):\n pseudo_member._value_=value\n if member_value or aliases:\n members=[]\n combined_value=0\n for m in cls._iter_member_(member_value):\n members.append(m)\n combined_value |=m._value_\n if aliases:\n value=member_value |aliases\n for n,pm in cls._member_map_.items():\n if pm not in members and pm._value_ and pm._value_&value ==pm._value_:\n members.append(pm)\n combined_value |=pm._value_\n unknown=value ^combined_value\n pseudo_member._name_='|'.join([m._name_ for m in members])\n if not combined_value:\n pseudo_member._name_=None\n elif unknown and cls._boundary_ is STRICT:\n raise ValueError('%r: no members with value %r'%(cls,unknown))\n elif unknown:\n pseudo_member._name_ +='|%s'%cls._numeric_repr_(unknown)\n else:\n pseudo_member._name_=None\n \n \n \n pseudo_member=cls._value2member_map_.setdefault(value,pseudo_member)\n if neg_value is not None:\n cls._value2member_map_[neg_value]=pseudo_member\n return pseudo_member\n \n def __contains__(self,other):\n ''\n\n \n if not isinstance(other,self.__class__):\n raise TypeError(\n \"unsupported operand type(s) for 'in': %r and %r\"%(\n type(other).__qualname__,self.__class__.__qualname__))\n return other._value_&self._value_ ==other._value_\n \n def __iter__(self):\n ''\n\n \n yield from self._iter_member_(self._value_)\n \n def __len__(self):\n return self._value_.bit_count()\n \n def __repr__(self):\n cls_name=self.__class__.__name__\n v_repr=self.__class__._value_repr_ or repr\n if self._name_ is None:\n return \"<%s: %s>\"%(cls_name,v_repr(self._value_))\n else:\n return \"<%s.%s: %s>\"%(cls_name,self._name_,v_repr(self._value_))\n \n def __str__(self):\n cls_name=self.__class__.__name__\n if self._name_ is None:\n return '%s(%r)'%(cls_name,self._value_)\n else:\n return \"%s.%s\"%(cls_name,self._name_)\n \n def __bool__(self):\n return bool(self._value_)\n \n def __or__(self,other):\n if isinstance(other,self.__class__):\n other=other._value_\n elif self._member_type_ is not object and isinstance(other,self._member_type_):\n other=other\n else:\n return NotImplemented\n value=self._value_\n return self.__class__(value |other)\n \n def __and__(self,other):\n if isinstance(other,self.__class__):\n other=other._value_\n elif self._member_type_ is not object and isinstance(other,self._member_type_):\n other=other\n else:\n return NotImplemented\n value=self._value_\n return self.__class__(value&other)\n \n def __xor__(self,other):\n if isinstance(other,self.__class__):\n other=other._value_\n elif self._member_type_ is not object and isinstance(other,self._member_type_):\n other=other\n else:\n return NotImplemented\n value=self._value_\n return self.__class__(value ^other)\n \n def __invert__(self):\n if self._inverted_ is None:\n if self._boundary_ in(EJECT,KEEP):\n self._inverted_=self.__class__(~self._value_)\n else:\n self._inverted_=self.__class__(self._singles_mask_&~self._value_)\n return self._inverted_\n \n __rand__=__and__\n __ror__=__or__\n __rxor__=__xor__\n \n \nclass IntFlag(int,ReprEnum,Flag,boundary=KEEP):\n ''\n\n \n \n \ndef _high_bit(value):\n ''\n\n \n return value.bit_length()-1\n \ndef unique(enumeration):\n ''\n\n \n duplicates=[]\n for name,member in enumeration.__members__.items():\n if name !=member.name:\n duplicates.append((name,member.name))\n if duplicates:\n alias_details=', '.join(\n [\"%s -> %s\"%(alias,name)for(alias,name)in duplicates])\n raise ValueError('duplicate values found in %r: %s'%\n (enumeration,alias_details))\n return enumeration\n \ndef _dataclass_repr(self):\n dcf=self.__dataclass_fields__\n return ', '.join(\n '%s=%r'%(k,getattr(self,k))\n for k in dcf.keys()\n if dcf[k].repr\n )\n \ndef global_enum_repr(self):\n ''\n\n\n\n \n module=self.__class__.__module__.split('.')[-1]\n return '%s.%s'%(module,self._name_)\n \ndef global_flag_repr(self):\n ''\n\n\n\n \n module=self.__class__.__module__.split('.')[-1]\n cls_name=self.__class__.__name__\n if self._name_ is None:\n return \"%s.%s(%r)\"%(module,cls_name,self._value_)\n if _is_single_bit(self):\n return '%s.%s'%(module,self._name_)\n if self._boundary_ is not FlagBoundary.KEEP:\n return '|'.join(['%s.%s'%(module,name)for name in self.name.split('|')])\n else:\n name=[]\n for n in self._name_.split('|'):\n if n[0].isdigit():\n name.append(n)\n else:\n name.append('%s.%s'%(module,n))\n return '|'.join(name)\n \ndef global_str(self):\n ''\n\n \n if self._name_ is None:\n cls_name=self.__class__.__name__\n return \"%s(%r)\"%(cls_name,self._value_)\n else:\n return self._name_\n \ndef global_enum(cls,update_str=False):\n ''\n\n\n\n \n if issubclass(cls,Flag):\n cls.__repr__=global_flag_repr\n else:\n cls.__repr__=global_enum_repr\n if not issubclass(cls,ReprEnum)or update_str:\n cls.__str__=global_str\n sys.modules[cls.__module__].__dict__.update(cls.__members__)\n return cls\n \ndef _simple_enum(etype=Enum,*,boundary=None,use_args=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def convert_class(cls):\n nonlocal use_args\n cls_name=cls.__name__\n if use_args is None:\n use_args=etype._use_args_\n __new__=cls.__dict__.get('__new__')\n if __new__ is not None:\n new_member=__new__.__func__\n else:\n new_member=etype._member_type_.__new__\n attrs={}\n body={}\n if __new__ is not None:\n body['__new_member__']=new_member\n body['_new_member_']=new_member\n body['_use_args_']=use_args\n body['_generate_next_value_']=gnv=etype._generate_next_value_\n body['_member_names_']=member_names=[]\n body['_member_map_']=member_map={}\n body['_value2member_map_']=value2member_map={}\n body['_unhashable_values_']=[]\n body['_member_type_']=member_type=etype._member_type_\n body['_value_repr_']=etype._value_repr_\n if issubclass(etype,Flag):\n body['_boundary_']=boundary or etype._boundary_\n body['_flag_mask_']=None\n body['_all_bits_']=None\n body['_singles_mask_']=None\n body['_inverted_']=None\n body['__or__']=Flag.__or__\n body['__xor__']=Flag.__xor__\n body['__and__']=Flag.__and__\n body['__ror__']=Flag.__ror__\n body['__rxor__']=Flag.__rxor__\n body['__rand__']=Flag.__rand__\n body['__invert__']=Flag.__invert__\n for name,obj in cls.__dict__.items():\n if name in('__dict__','__weakref__'):\n continue\n if _is_dunder(name)or _is_private(cls_name,name)or _is_sunder(name)or _is_descriptor(obj):\n body[name]=obj\n else:\n attrs[name]=obj\n if cls.__dict__.get('__doc__')is None:\n body['__doc__']='An enumeration.'\n \n \n \n \n \n enum_class=type(cls_name,(etype,),body,boundary=boundary,_simple=True)\n for name in('__repr__','__str__','__format__','__reduce_ex__'):\n if name not in body:\n \n enum_method=getattr(etype,name)\n found_method=getattr(enum_class,name)\n object_method=getattr(object,name)\n data_type_method=getattr(member_type,name)\n if found_method in(data_type_method,object_method):\n setattr(enum_class,name,enum_method)\n gnv_last_values=[]\n if issubclass(enum_class,Flag):\n \n single_bits=multi_bits=0\n for name,value in attrs.items():\n if isinstance(value,auto)and auto.value is _auto_null:\n value=gnv(name,1,len(member_names),gnv_last_values)\n if value in value2member_map:\n \n member=value2member_map[value]\n redirect=property()\n redirect.member=member\n redirect.__set_name__(enum_class,name)\n setattr(enum_class,name,redirect)\n member_map[name]=member\n else:\n \n if use_args:\n if not isinstance(value,tuple):\n value=(value,)\n member=new_member(enum_class,*value)\n value=value[0]\n else:\n member=new_member(enum_class)\n if __new__ is None:\n member._value_=value\n member._name_=name\n member.__objclass__=enum_class\n member.__init__(value)\n redirect=property()\n redirect.member=member\n redirect.__set_name__(enum_class,name)\n setattr(enum_class,name,redirect)\n member_map[name]=member\n member._sort_order_=len(member_names)\n value2member_map[value]=member\n if _is_single_bit(value):\n \n member_names.append(name)\n single_bits |=value\n else:\n multi_bits |=value\n gnv_last_values.append(value)\n enum_class._flag_mask_=single_bits |multi_bits\n enum_class._singles_mask_=single_bits\n enum_class._all_bits_=2 **((single_bits |multi_bits).bit_length())-1\n \n member_list=[m._value_ for m in enum_class]\n if member_list !=sorted(member_list):\n enum_class._iter_member_=enum_class._iter_member_by_def_\n else:\n \n for name,value in attrs.items():\n if isinstance(value,auto):\n if value.value is _auto_null:\n value.value=gnv(name,1,len(member_names),gnv_last_values)\n value=value.value\n if value in value2member_map:\n \n member=value2member_map[value]\n redirect=property()\n redirect.member=member\n redirect.__set_name__(enum_class,name)\n setattr(enum_class,name,redirect)\n member_map[name]=member\n else:\n \n if use_args:\n if not isinstance(value,tuple):\n value=(value,)\n member=new_member(enum_class,*value)\n value=value[0]\n else:\n member=new_member(enum_class)\n if __new__ is None:\n member._value_=value\n member._name_=name\n member.__objclass__=enum_class\n member.__init__(value)\n member._sort_order_=len(member_names)\n redirect=property()\n redirect.member=member\n redirect.__set_name__(enum_class,name)\n setattr(enum_class,name,redirect)\n member_map[name]=member\n value2member_map[value]=member\n member_names.append(name)\n gnv_last_values.append(value)\n if '__new__'in body:\n enum_class.__new_member__=enum_class.__new__\n enum_class.__new__=Enum.__new__\n return enum_class\n return convert_class\n \n@_simple_enum(StrEnum)\nclass EnumCheck:\n ''\n\n \n CONTINUOUS=\"no skipped integer values\"\n NAMED_FLAGS=\"multi-flag aliases may not contain unnamed flags\"\n UNIQUE=\"one name per value\"\nCONTINUOUS,NAMED_FLAGS,UNIQUE=EnumCheck\n\n\nclass verify:\n ''\n\n \n def __init__(self,*checks):\n self.checks=checks\n def __call__(self,enumeration):\n checks=self.checks\n cls_name=enumeration.__name__\n if Flag is not None and issubclass(enumeration,Flag):\n enum_type='flag'\n elif issubclass(enumeration,Enum):\n enum_type='enum'\n else:\n raise TypeError(\"the 'verify' decorator only works with Enum and Flag\")\n for check in checks:\n if check is UNIQUE:\n \n duplicates=[]\n for name,member in enumeration.__members__.items():\n if name !=member.name:\n duplicates.append((name,member.name))\n if duplicates:\n alias_details=', '.join(\n [\"%s -> %s\"%(alias,name)for(alias,name)in duplicates])\n raise ValueError('aliases found in %r: %s'%\n (enumeration,alias_details))\n elif check is CONTINUOUS:\n values=set(e.value for e in enumeration)\n if len(values)<2:\n continue\n low,high=min(values),max(values)\n missing=[]\n if enum_type =='flag':\n \n for i in range(_high_bit(low)+1,_high_bit(high)):\n if 2 **i not in values:\n missing.append(2 **i)\n elif enum_type =='enum':\n \n for i in range(low+1,high):\n if i not in values:\n missing.append(i)\n else:\n raise Exception('verify: unknown type %r'%enum_type)\n if missing:\n raise ValueError(('invalid %s %r: missing values %s'%(\n enum_type,cls_name,', '.join((str(m)for m in missing)))\n )[:256])\n \n elif check is NAMED_FLAGS:\n \n member_names=enumeration._member_names_\n member_values=[m.value for m in enumeration]\n missing_names=[]\n missing_value=0\n for name,alias in enumeration._member_map_.items():\n if name in member_names:\n \n continue\n if alias.value <0:\n \n continue\n values=list(_iter_bits_lsb(alias.value))\n missed=[v for v in values if v not in member_values]\n if missed:\n missing_names.append(name)\n missing_value |=reduce(_or_,missed)\n if missing_names:\n if len(missing_names)==1:\n alias='alias %s is missing'%missing_names[0]\n else:\n alias='aliases %s and %s are missing'%(\n ', '.join(missing_names[:-1]),missing_names[-1]\n )\n if _is_single_bit(missing_value):\n value='value 0x%x'%missing_value\n else:\n value='combined values of 0x%x'%missing_value\n raise ValueError(\n 'invalid Flag %r: %s %s [use enum.show_flag_values(value) for details]'\n %(cls_name,alias,value)\n )\n return enumeration\n \ndef _test_simple_enum(checked_enum,simple_enum):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n failed=[]\n if checked_enum.__dict__ !=simple_enum.__dict__:\n checked_dict=checked_enum.__dict__\n checked_keys=list(checked_dict.keys())\n simple_dict=simple_enum.__dict__\n simple_keys=list(simple_dict.keys())\n member_names=set(\n list(checked_enum._member_map_.keys())\n +list(simple_enum._member_map_.keys())\n )\n for key in set(checked_keys+simple_keys):\n if key in('__module__','_member_map_','_value2member_map_','__doc__'):\n \n continue\n elif key in member_names:\n \n continue\n elif key not in simple_keys:\n failed.append(\"missing key: %r\"%(key,))\n elif key not in checked_keys:\n failed.append(\"extra key: %r\"%(key,))\n else:\n checked_value=checked_dict[key]\n simple_value=simple_dict[key]\n if callable(checked_value)or isinstance(checked_value,bltns.property):\n continue\n if key =='__doc__':\n \n compressed_checked_value=checked_value.replace(' ','').replace('\\t','')\n compressed_simple_value=simple_value.replace(' ','').replace('\\t','')\n if compressed_checked_value !=compressed_simple_value:\n failed.append(\"%r:\\n %s\\n %s\"%(\n key,\n \"checked -> %r\"%(checked_value,),\n \"simple -> %r\"%(simple_value,),\n ))\n elif checked_value !=simple_value:\n failed.append(\"%r:\\n %s\\n %s\"%(\n key,\n \"checked -> %r\"%(checked_value,),\n \"simple -> %r\"%(simple_value,),\n ))\n failed.sort()\n for name in member_names:\n failed_member=[]\n if name not in simple_keys:\n failed.append('missing member from simple enum: %r'%name)\n elif name not in checked_keys:\n failed.append('extra member in simple enum: %r'%name)\n else:\n checked_member_dict=checked_enum[name].__dict__\n checked_member_keys=list(checked_member_dict.keys())\n simple_member_dict=simple_enum[name].__dict__\n simple_member_keys=list(simple_member_dict.keys())\n for key in set(checked_member_keys+simple_member_keys):\n if key in('__module__','__objclass__','_inverted_'):\n \n continue\n elif key not in simple_member_keys:\n failed_member.append(\"missing key %r not in the simple enum member %r\"%(key,name))\n elif key not in checked_member_keys:\n failed_member.append(\"extra key %r in simple enum member %r\"%(key,name))\n else:\n checked_value=checked_member_dict[key]\n simple_value=simple_member_dict[key]\n if checked_value !=simple_value:\n failed_member.append(\"%r:\\n %s\\n %s\"%(\n key,\n \"checked member -> %r\"%(checked_value,),\n \"simple member -> %r\"%(simple_value,),\n ))\n if failed_member:\n failed.append('%r member mismatch:\\n %s'%(\n name,'\\n '.join(failed_member),\n ))\n for method in(\n '__str__','__repr__','__reduce_ex__','__format__',\n '__getnewargs_ex__','__getnewargs__','__reduce_ex__','__reduce__'\n ):\n if method in simple_keys and method in checked_keys:\n \n continue\n elif method not in simple_keys and method not in checked_keys:\n \n checked_method=getattr(checked_enum,method,None)\n simple_method=getattr(simple_enum,method,None)\n if hasattr(checked_method,'__func__'):\n checked_method=checked_method.__func__\n simple_method=simple_method.__func__\n if checked_method !=simple_method:\n failed.append(\"%r: %-30s %s\"%(\n method,\n \"checked -> %r\"%(checked_method,),\n \"simple -> %r\"%(simple_method,),\n ))\n else:\n \n \n pass\n if failed:\n raise TypeError('enum mismatch:\\n %s'%'\\n '.join(failed))\n \ndef _old_convert_(etype,name,module,filter,source=None,*,boundary=None):\n ''\n\n \n \n \n \n \n \n module_globals=sys.modules[module].__dict__\n if source:\n source=source.__dict__\n else:\n source=module_globals\n \n \n \n members=[\n (name,value)\n for name,value in source.items()\n if filter(name)]\n try:\n \n members.sort(key=lambda t:(t[1],t[0]))\n except TypeError:\n \n members.sort(key=lambda t:t[0])\n cls=etype(name,members,module=module,boundary=boundary or KEEP)\n return cls\n \n_stdlib_enums=IntEnum,StrEnum,IntFlag\n", ["builtins", "functools", "operator", "sys", "types", "warnings"]], "timeit": [".py", "#! /usr/bin/env python3\n\n\"\"\"Tool for measuring execution time of small code snippets.\n\nThis module avoids a number of common traps for measuring execution\ntimes. See also Tim Peters' introduction to the Algorithms chapter in\nthe Python Cookbook, published by O'Reilly.\n\nLibrary usage: see the Timer class.\n\nCommand line usage:\n python timeit.py [-n N] [-r N] [-s S] [-p] [-h] [--] [statement]\n\nOptions:\n -n/--number N: how many times to execute 'statement' (default: see below)\n -r/--repeat N: how many times to repeat the timer (default 5)\n -s/--setup S: statement to be executed once initially (default 'pass').\n Execution time of this setup statement is NOT timed.\n -p/--process: use time.process_time() (default is time.perf_counter())\n -v/--verbose: print raw timing results; repeat for more digits precision\n -u/--unit: set the output time unit (nsec, usec, msec, or sec)\n -h/--help: print this usage message and exit\n --: separate options from statement, use when statement starts with -\n statement: statement to be timed (default 'pass')\n\nA multi-line statement may be given by specifying each line as a\nseparate argument; indented lines are possible by enclosing an\nargument in quotes and using leading spaces. Multiple -s options are\ntreated similarly.\n\nIf -n is not given, a suitable number of loops is calculated by trying\nincreasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the\ntotal time is at least 0.2 seconds.\n\nNote: there is a certain baseline overhead associated with executing a\npass statement. It differs between versions. The code here doesn't try\nto hide it, but you should be aware of it. The baseline overhead can be\nmeasured by invoking the program without arguments.\n\nClasses:\n\n Timer\n\nFunctions:\n\n timeit(string, string) -> float\n repeat(string, string) -> list\n default_timer() -> float\n\n\"\"\"\n\nimport gc\nimport itertools\nimport sys\nimport time\n\n__all__=[\"Timer\",\"timeit\",\"repeat\",\"default_timer\"]\n\ndummy_src_name=\"\"\ndefault_number=1000000\ndefault_repeat=5\ndefault_timer=time.perf_counter\n\n_globals=globals\n\n\n\n\ntemplate=\"\"\"\ndef inner(_it, _timer{init}):\n {setup}\n _t0 = _timer()\n for _i in _it:\n {stmt}\n pass\n _t1 = _timer()\n return _t1 - _t0\n\"\"\"\n\n\ndef reindent(src,indent):\n ''\n return src.replace(\"\\n\",\"\\n\"+\" \"*indent)\n \n \nclass Timer:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,stmt=\"pass\",setup=\"pass\",timer=default_timer,\n globals=None):\n ''\n self.timer=timer\n local_ns={}\n global_ns=_globals()if globals is None else globals\n init=''\n if isinstance(setup,str):\n \n compile(setup,dummy_src_name,\"exec\")\n stmtprefix=setup+'\\n'\n setup=reindent(setup,4)\n elif callable(setup):\n local_ns['_setup']=setup\n init +=', _setup=_setup'\n stmtprefix=''\n setup='_setup()'\n else:\n raise ValueError(\"setup is neither a string nor callable\")\n if isinstance(stmt,str):\n \n compile(stmtprefix+stmt,dummy_src_name,\"exec\")\n stmt=reindent(stmt,8)\n elif callable(stmt):\n local_ns['_stmt']=stmt\n init +=', _stmt=_stmt'\n stmt='_stmt()'\n else:\n raise ValueError(\"stmt is neither a string nor callable\")\n src=template.format(stmt=stmt,setup=setup,init=init)\n self.src=src\n code=compile(src,dummy_src_name,\"exec\")\n exec(code,global_ns,local_ns)\n self.inner=local_ns[\"inner\"]\n \n def print_exc(self,file=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n import linecache,traceback\n if self.src is not None:\n linecache.cache[dummy_src_name]=(len(self.src),\n None,\n self.src.split(\"\\n\"),\n dummy_src_name)\n \n \n traceback.print_exc(file=file)\n \n def timeit(self,number=default_number):\n ''\n\n\n\n\n\n\n\n \n it=itertools.repeat(None,number)\n gcold=gc.isenabled()\n gc.disable()\n try:\n timing=self.inner(it,self.timer)\n finally:\n if gcold:\n gc.enable()\n return timing\n \n def repeat(self,repeat=default_repeat,number=default_number):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n r=[]\n for i in range(repeat):\n t=self.timeit(number)\n r.append(t)\n return r\n \n def autorange(self,callback=None):\n ''\n\n\n\n\n\n\n\n \n i=1\n while True:\n for j in 1,2,5:\n number=i *j\n time_taken=self.timeit(number)\n if callback:\n callback(number,time_taken)\n if time_taken >=0.2:\n return(number,time_taken)\n i *=10\n \n \ndef timeit(stmt=\"pass\",setup=\"pass\",timer=default_timer,\nnumber=default_number,globals=None):\n ''\n return Timer(stmt,setup,timer,globals).timeit(number)\n \n \ndef repeat(stmt=\"pass\",setup=\"pass\",timer=default_timer,\nrepeat=default_repeat,number=default_number,globals=None):\n ''\n return Timer(stmt,setup,timer,globals).repeat(repeat,number)\n \n \ndef main(args=None,*,_wrap_timer=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if args is None:\n args=sys.argv[1:]\n import getopt\n try:\n opts,args=getopt.getopt(args,\"n:u:s:r:pvh\",\n [\"number=\",\"setup=\",\"repeat=\",\n \"process\",\"verbose\",\"unit=\",\"help\"])\n except getopt.error as err:\n print(err)\n print(\"use -h/--help for command line help\")\n return 2\n \n timer=default_timer\n stmt=\"\\n\".join(args)or \"pass\"\n number=0\n setup=[]\n repeat=default_repeat\n verbose=0\n time_unit=None\n units={\"nsec\":1e-9,\"usec\":1e-6,\"msec\":1e-3,\"sec\":1.0}\n precision=3\n for o,a in opts:\n if o in(\"-n\",\"--number\"):\n number=int(a)\n if o in(\"-s\",\"--setup\"):\n setup.append(a)\n if o in(\"-u\",\"--unit\"):\n if a in units:\n time_unit=a\n else:\n print(\"Unrecognized unit. Please select nsec, usec, msec, or sec.\",\n file=sys.stderr)\n return 2\n if o in(\"-r\",\"--repeat\"):\n repeat=int(a)\n if repeat <=0:\n repeat=1\n if o in(\"-p\",\"--process\"):\n timer=time.process_time\n if o in(\"-v\",\"--verbose\"):\n if verbose:\n precision +=1\n verbose +=1\n if o in(\"-h\",\"--help\"):\n print(__doc__,end=' ')\n return 0\n setup=\"\\n\".join(setup)or \"pass\"\n \n \n \n \n import os\n sys.path.insert(0,os.curdir)\n if _wrap_timer is not None:\n timer=_wrap_timer(timer)\n \n t=Timer(stmt,setup,timer)\n if number ==0:\n \n callback=None\n if verbose:\n def callback(number,time_taken):\n msg=\"{num} loop{s} -> {secs:.{prec}g} secs\"\n plural=(number !=1)\n print(msg.format(num=number,s='s'if plural else '',\n secs=time_taken,prec=precision))\n try:\n number,_=t.autorange(callback)\n except:\n t.print_exc()\n return 1\n \n if verbose:\n print()\n \n try:\n raw_timings=t.repeat(repeat,number)\n except:\n t.print_exc()\n return 1\n \n def format_time(dt):\n unit=time_unit\n \n if unit is not None:\n scale=units[unit]\n else:\n scales=[(scale,unit)for unit,scale in units.items()]\n scales.sort(reverse=True)\n for scale,unit in scales:\n if dt >=scale:\n break\n \n return \"%.*g %s\"%(precision,dt /scale,unit)\n \n if verbose:\n print(\"raw times: %s\"%\", \".join(map(format_time,raw_timings)))\n print()\n timings=[dt /number for dt in raw_timings]\n \n best=min(timings)\n print(\"%d loop%s, best of %d: %s per loop\"\n %(number,'s'if number !=1 else '',\n repeat,format_time(best)))\n \n best=min(timings)\n worst=max(timings)\n if worst >=best *4:\n import warnings\n warnings.warn_explicit(\"The test results are likely unreliable. \"\n \"The worst time (%s) was more than four times \"\n \"slower than the best time (%s).\"\n %(format_time(worst),format_time(best)),\n UserWarning,'',0)\n return None\n \n \nif __name__ ==\"__main__\":\n sys.exit(main())\n", ["gc", "getopt", "itertools", "linecache", "os", "sys", "time", "traceback", "warnings"]], "_signal": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nCTRL_BREAK_EVENT=1\n\nCTRL_C_EVENT=0\n\nNSIG=23\n\nSIGABRT=22\n\nSIGBREAK=21\n\nSIGFPE=8\n\nSIGILL=4\n\nSIGINT=2\n\nSIGSEGV=11\n\nSIGTERM=15\n\nSIG_DFL=0\n\nSIG_IGN=1\n\ndef default_int_handler(*args,**kw):\n ''\n \n pass\n \ndef getsignal(*args,**kw):\n ''\n\n\n\n\n \n pass\n \ndef raise_signal(*args,**kw):\n ''\n pass\n \ndef set_wakeup_fd(*args,**kw):\n ''\n\n\n\n\n \n pass\n \ndef signal(*args,**kw):\n ''\n\n\n\n\n\n \n pass\n \ndef strsignal(*args,**kw):\n ''\n\n \n pass\n \ndef valid_signals(*args,**kw):\n ''\n\n \n pass\n", []], "hmac": [".py", "''\n\n\n\n\nimport warnings as _warnings\ntry:\n import _hashlib as _hashopenssl\nexcept ImportError:\n _hashopenssl=None\n _functype=None\n from _operator import _compare_digest as compare_digest\nelse:\n compare_digest=_hashopenssl.compare_digest\n _functype=type(_hashopenssl.openssl_sha256)\n \nimport hashlib as _hashlib\n\ntrans_5C=bytes((x ^0x5C)for x in range(256))\ntrans_36=bytes((x ^0x36)for x in range(256))\n\n\n\ndigest_size=None\n\n\nclass HMAC:\n ''\n\n\n \n blocksize=64\n \n __slots__=(\n \"_hmac\",\"_inner\",\"_outer\",\"block_size\",\"digest_size\"\n )\n \n def __init__(self,key,msg=None,digestmod=''):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n if not isinstance(key,(bytes,bytearray)):\n raise TypeError(\"key: expected bytes or bytearray, but got %r\"%type(key).__name__)\n \n if not digestmod:\n raise TypeError(\"Missing required parameter 'digestmod'.\")\n \n if _hashopenssl and isinstance(digestmod,(str,_functype)):\n try:\n self._init_hmac(key,msg,digestmod)\n except _hashopenssl.UnsupportedDigestmodError:\n self._init_old(key,msg,digestmod)\n else:\n self._init_old(key,msg,digestmod)\n \n def _init_hmac(self,key,msg,digestmod):\n self._hmac=_hashopenssl.hmac_new(key,msg,digestmod=digestmod)\n self.digest_size=self._hmac.digest_size\n self.block_size=self._hmac.block_size\n \n def _init_old(self,key,msg,digestmod):\n if callable(digestmod):\n digest_cons=digestmod\n elif isinstance(digestmod,str):\n digest_cons=lambda d=b'':_hashlib.new(digestmod,d)\n else:\n digest_cons=lambda d=b'':digestmod.new(d)\n \n self._hmac=None\n self._outer=digest_cons()\n self._inner=digest_cons()\n self.digest_size=self._inner.digest_size\n \n if hasattr(self._inner,'block_size'):\n blocksize=self._inner.block_size\n if blocksize <16:\n _warnings.warn('block_size of %d seems too small; using our '\n 'default of %d.'%(blocksize,self.blocksize),\n RuntimeWarning,2)\n blocksize=self.blocksize\n else:\n _warnings.warn('No block_size attribute on given digest object; '\n 'Assuming %d.'%(self.blocksize),\n RuntimeWarning,2)\n blocksize=self.blocksize\n \n if len(key)>blocksize:\n key=digest_cons(key).digest()\n \n \n \n self.block_size=blocksize\n \n key=key.ljust(blocksize,b'\\0')\n self._outer.update(key.translate(trans_5C))\n self._inner.update(key.translate(trans_36))\n if msg is not None:\n self.update(msg)\n \n @property\n def name(self):\n if self._hmac:\n return self._hmac.name\n else:\n return f\"hmac-{self._inner.name}\"\n \n def update(self,msg):\n ''\n inst=self._hmac or self._inner\n inst.update(msg)\n \n def copy(self):\n ''\n\n\n \n \n other=self.__class__.__new__(self.__class__)\n other.digest_size=self.digest_size\n if self._hmac:\n other._hmac=self._hmac.copy()\n other._inner=other._outer=None\n else:\n other._hmac=None\n other._inner=self._inner.copy()\n other._outer=self._outer.copy()\n return other\n \n def _current(self):\n ''\n\n\n \n if self._hmac:\n return self._hmac\n else:\n h=self._outer.copy()\n h.update(self._inner.digest())\n return h\n \n def digest(self):\n ''\n\n\n\n\n \n h=self._current()\n return h.digest()\n \n def hexdigest(self):\n ''\n \n h=self._current()\n return h.hexdigest()\n \ndef new(key,msg=None,digestmod=''):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return HMAC(key,msg,digestmod)\n \n \ndef digest(key,msg,digest):\n ''\n\n\n\n\n\n\n \n if _hashopenssl is not None and isinstance(digest,(str,_functype)):\n try:\n return _hashopenssl.hmac_digest(key,msg,digest)\n except _hashopenssl.UnsupportedDigestmodError:\n pass\n \n if callable(digest):\n digest_cons=digest\n elif isinstance(digest,str):\n digest_cons=lambda d=b'':_hashlib.new(digest,d)\n else:\n digest_cons=lambda d=b'':digest.new(d)\n \n inner=digest_cons()\n outer=digest_cons()\n blocksize=getattr(inner,'block_size',64)\n if len(key)>blocksize:\n key=digest_cons(key).digest()\n key=key+b'\\x00'*(blocksize -len(key))\n inner.update(key.translate(trans_36))\n outer.update(key.translate(trans_5C))\n inner.update(msg)\n outer.update(inner.digest())\n return outer.digest()\n", ["_hashlib", "_operator", "hashlib", "warnings"]], "tarfile": [".py", "#!/usr/bin/env python3\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n''\n\n\nversion=\"0.9.0\"\n__author__=\"Lars Gust\\u00e4bel (lars@gustaebel.de)\"\n__credits__=\"Gustavo Niemeyer, Niels Gust\\u00e4bel, Richard Townsend.\"\n\n\n\n\nfrom builtins import open as bltn_open\nimport sys\nimport os\nimport io\nimport shutil\nimport stat\nimport time\nimport struct\nimport copy\nimport re\nimport warnings\n\ntry:\n import pwd\nexcept ImportError:\n pwd=None\ntry:\n import grp\nexcept ImportError:\n grp=None\n \n \n \n \nsymlink_exception=(AttributeError,NotImplementedError,OSError)\n\n\n__all__=[\"TarFile\",\"TarInfo\",\"is_tarfile\",\"TarError\",\"ReadError\",\n\"CompressionError\",\"StreamError\",\"ExtractError\",\"HeaderError\",\n\"ENCODING\",\"USTAR_FORMAT\",\"GNU_FORMAT\",\"PAX_FORMAT\",\n\"DEFAULT_FORMAT\",\"open\",\"fully_trusted_filter\",\"data_filter\",\n\"tar_filter\",\"FilterError\",\"AbsoluteLinkError\",\n\"OutsideDestinationError\",\"SpecialFileError\",\"AbsolutePathError\",\n\"LinkOutsideDestinationError\"]\n\n\n\n\n\nNUL=b\"\\0\"\nBLOCKSIZE=512\nRECORDSIZE=BLOCKSIZE *20\nGNU_MAGIC=b\"ustar \\0\"\nPOSIX_MAGIC=b\"ustar\\x0000\"\n\nLENGTH_NAME=100\nLENGTH_LINK=100\nLENGTH_PREFIX=155\n\nREGTYPE=b\"0\"\nAREGTYPE=b\"\\0\"\nLNKTYPE=b\"1\"\nSYMTYPE=b\"2\"\nCHRTYPE=b\"3\"\nBLKTYPE=b\"4\"\nDIRTYPE=b\"5\"\nFIFOTYPE=b\"6\"\nCONTTYPE=b\"7\"\n\nGNUTYPE_LONGNAME=b\"L\"\nGNUTYPE_LONGLINK=b\"K\"\nGNUTYPE_SPARSE=b\"S\"\n\nXHDTYPE=b\"x\"\nXGLTYPE=b\"g\"\nSOLARIS_XHDTYPE=b\"X\"\n\nUSTAR_FORMAT=0\nGNU_FORMAT=1\nPAX_FORMAT=2\nDEFAULT_FORMAT=PAX_FORMAT\n\n\n\n\n\nSUPPORTED_TYPES=(REGTYPE,AREGTYPE,LNKTYPE,\nSYMTYPE,DIRTYPE,FIFOTYPE,\nCONTTYPE,CHRTYPE,BLKTYPE,\nGNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nREGULAR_TYPES=(REGTYPE,AREGTYPE,\nCONTTYPE,GNUTYPE_SPARSE)\n\n\nGNU_TYPES=(GNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nPAX_FIELDS=(\"path\",\"linkpath\",\"size\",\"mtime\",\n\"uid\",\"gid\",\"uname\",\"gname\")\n\n\nPAX_NAME_FIELDS={\"path\",\"linkpath\",\"uname\",\"gname\"}\n\n\n\nPAX_NUMBER_FIELDS={\n\"atime\":float,\n\"ctime\":float,\n\"mtime\":float,\n\"uid\":int,\n\"gid\":int,\n\"size\":int\n}\n\n\n\n\nif os.name ==\"nt\":\n ENCODING=\"utf-8\"\nelse:\n ENCODING=sys.getfilesystemencoding()\n \n \n \n \n \ndef stn(s,length,encoding,errors):\n ''\n \n if s is None:\n raise ValueError(\"metadata cannot contain None\")\n s=s.encode(encoding,errors)\n return s[:length]+(length -len(s))*NUL\n \ndef nts(s,encoding,errors):\n ''\n \n p=s.find(b\"\\0\")\n if p !=-1:\n s=s[:p]\n return s.decode(encoding,errors)\n \ndef nti(s):\n ''\n \n \n \n if s[0]in(0o200,0o377):\n n=0\n for i in range(len(s)-1):\n n <<=8\n n +=s[i+1]\n if s[0]==0o377:\n n=-(256 **(len(s)-1)-n)\n else:\n try:\n s=nts(s,\"ascii\",\"strict\")\n n=int(s.strip()or \"0\",8)\n except ValueError:\n raise InvalidHeaderError(\"invalid header\")\n return n\n \ndef itn(n,digits=8,format=DEFAULT_FORMAT):\n ''\n \n \n \n \n \n \n \n \n \n original_n=n\n n=int(n)\n if 0 <=n <8 **(digits -1):\n s=bytes(\"%0*o\"%(digits -1,n),\"ascii\")+NUL\n elif format ==GNU_FORMAT and -256 **(digits -1)<=n <256 **(digits -1):\n if n >=0:\n s=bytearray([0o200])\n else:\n s=bytearray([0o377])\n n=256 **digits+n\n \n for i in range(digits -1):\n s.insert(1,n&0o377)\n n >>=8\n else:\n raise ValueError(\"overflow in number field\")\n \n return s\n \ndef calc_chksums(buf):\n ''\n\n\n\n\n\n\n \n unsigned_chksum=256+sum(struct.unpack_from(\"148B8x356B\",buf))\n signed_chksum=256+sum(struct.unpack_from(\"148b8x356b\",buf))\n return unsigned_chksum,signed_chksum\n \ndef copyfileobj(src,dst,length=None,exception=OSError,bufsize=None):\n ''\n\n \n bufsize=bufsize or 16 *1024\n if length ==0:\n return\n if length is None:\n shutil.copyfileobj(src,dst,bufsize)\n return\n \n blocks,remainder=divmod(length,bufsize)\n for b in range(blocks):\n buf=src.read(bufsize)\n if len(buf)self.bufsize:\n self.fileobj.write(self.buf[:self.bufsize])\n self.buf=self.buf[self.bufsize:]\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n \n self.closed=True\n try:\n if self.mode ==\"w\"and self.comptype !=\"tar\":\n self.buf +=self.cmp.flush()\n \n if self.mode ==\"w\"and self.buf:\n self.fileobj.write(self.buf)\n self.buf=b\"\"\n if self.comptype ==\"gz\":\n self.fileobj.write(struct.pack(\"=0:\n blocks,remainder=divmod(pos -self.pos,self.bufsize)\n for i in range(blocks):\n self.read(self.bufsize)\n self.read(remainder)\n else:\n raise StreamError(\"seeking backwards is not allowed\")\n return self.pos\n \n def read(self,size):\n ''\n assert size is not None\n buf=self._read(size)\n self.pos +=len(buf)\n return buf\n \n def _read(self,size):\n ''\n \n if self.comptype ==\"tar\":\n return self.__read(size)\n \n c=len(self.dbuf)\n t=[self.dbuf]\n while c lastpos:\n self.map.append((False,lastpos,offset,None))\n self.map.append((True,offset,offset+size,realpos))\n realpos +=size\n lastpos=offset+size\n if lastpos 0:\n while True:\n data,start,stop,offset=self.map[self.map_index]\n if start <=self.position \"%(self.__class__.__name__,self.name,id(self))\n \n def replace(self,*,\n name=_KEEP,mtime=_KEEP,mode=_KEEP,linkname=_KEEP,\n uid=_KEEP,gid=_KEEP,uname=_KEEP,gname=_KEEP,\n deep=True,_KEEP=_KEEP):\n ''\n \n if deep:\n result=copy.deepcopy(self)\n else:\n result=copy.copy(self)\n if name is not _KEEP:\n result.name=name\n if mtime is not _KEEP:\n result.mtime=mtime\n if mode is not _KEEP:\n result.mode=mode\n if linkname is not _KEEP:\n result.linkname=linkname\n if uid is not _KEEP:\n result.uid=uid\n if gid is not _KEEP:\n result.gid=gid\n if uname is not _KEEP:\n result.uname=uname\n if gname is not _KEEP:\n result.gname=gname\n return result\n \n def get_info(self):\n ''\n \n if self.mode is None:\n mode=None\n else:\n mode=self.mode&0o7777\n info={\n \"name\":self.name,\n \"mode\":mode,\n \"uid\":self.uid,\n \"gid\":self.gid,\n \"size\":self.size,\n \"mtime\":self.mtime,\n \"chksum\":self.chksum,\n \"type\":self.type,\n \"linkname\":self.linkname,\n \"uname\":self.uname,\n \"gname\":self.gname,\n \"devmajor\":self.devmajor,\n \"devminor\":self.devminor\n }\n \n if info[\"type\"]==DIRTYPE and not info[\"name\"].endswith(\"/\"):\n info[\"name\"]+=\"/\"\n \n return info\n \n def tobuf(self,format=DEFAULT_FORMAT,encoding=ENCODING,errors=\"surrogateescape\"):\n ''\n \n info=self.get_info()\n for name,value in info.items():\n if value is None:\n raise ValueError(\"%s may not be None\"%name)\n \n if format ==USTAR_FORMAT:\n return self.create_ustar_header(info,encoding,errors)\n elif format ==GNU_FORMAT:\n return self.create_gnu_header(info,encoding,errors)\n elif format ==PAX_FORMAT:\n return self.create_pax_header(info,encoding)\n else:\n raise ValueError(\"invalid format\")\n \n def create_ustar_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=POSIX_MAGIC\n \n if len(info[\"linkname\"].encode(encoding,errors))>LENGTH_LINK:\n raise ValueError(\"linkname is too long\")\n \n if len(info[\"name\"].encode(encoding,errors))>LENGTH_NAME:\n info[\"prefix\"],info[\"name\"]=self._posix_split_name(info[\"name\"],encoding,errors)\n \n return self._create_header(info,USTAR_FORMAT,encoding,errors)\n \n def create_gnu_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=GNU_MAGIC\n \n buf=b\"\"\n if len(info[\"linkname\"].encode(encoding,errors))>LENGTH_LINK:\n buf +=self._create_gnu_long_header(info[\"linkname\"],GNUTYPE_LONGLINK,encoding,errors)\n \n if len(info[\"name\"].encode(encoding,errors))>LENGTH_NAME:\n buf +=self._create_gnu_long_header(info[\"name\"],GNUTYPE_LONGNAME,encoding,errors)\n \n return buf+self._create_header(info,GNU_FORMAT,encoding,errors)\n \n def create_pax_header(self,info,encoding):\n ''\n\n\n \n info[\"magic\"]=POSIX_MAGIC\n pax_headers=self.pax_headers.copy()\n \n \n \n for name,hname,length in(\n (\"name\",\"path\",LENGTH_NAME),(\"linkname\",\"linkpath\",LENGTH_LINK),\n (\"uname\",\"uname\",32),(\"gname\",\"gname\",32)):\n \n if hname in pax_headers:\n \n continue\n \n \n try:\n info[name].encode(\"ascii\",\"strict\")\n except UnicodeEncodeError:\n pax_headers[hname]=info[name]\n continue\n \n if len(info[name])>length:\n pax_headers[hname]=info[name]\n \n \n \n for name,digits in((\"uid\",8),(\"gid\",8),(\"size\",12),(\"mtime\",12)):\n needs_pax=False\n \n val=info[name]\n val_is_float=isinstance(val,float)\n val_int=round(val)if val_is_float else val\n if not 0 <=val_int <8 **(digits -1):\n \n info[name]=0\n needs_pax=True\n elif val_is_float:\n \n \n info[name]=val_int\n needs_pax=True\n \n \n if needs_pax and name not in pax_headers:\n pax_headers[name]=str(val)\n \n \n if pax_headers:\n buf=self._create_pax_generic_header(pax_headers,XHDTYPE,encoding)\n else:\n buf=b\"\"\n \n return buf+self._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")\n \n @classmethod\n def create_pax_global_header(cls,pax_headers):\n ''\n \n return cls._create_pax_generic_header(pax_headers,XGLTYPE,\"utf-8\")\n \n def _posix_split_name(self,name,encoding,errors):\n ''\n\n \n components=name.split(\"/\")\n for i in range(1,len(components)):\n prefix=\"/\".join(components[:i])\n name=\"/\".join(components[i:])\n if len(prefix.encode(encoding,errors))<=LENGTH_PREFIX and\\\n len(name.encode(encoding,errors))<=LENGTH_NAME:\n break\n else:\n raise ValueError(\"name is too long\")\n \n return prefix,name\n \n @staticmethod\n def _create_header(info,format,encoding,errors):\n ''\n\n \n has_device_fields=info.get(\"type\")in(CHRTYPE,BLKTYPE)\n if has_device_fields:\n devmajor=itn(info.get(\"devmajor\",0),8,format)\n devminor=itn(info.get(\"devminor\",0),8,format)\n else:\n devmajor=stn(\"\",8,encoding,errors)\n devminor=stn(\"\",8,encoding,errors)\n \n \n \n filetype=info.get(\"type\",REGTYPE)\n if filetype is None:\n raise ValueError(\"TarInfo.type must not be None\")\n \n parts=[\n stn(info.get(\"name\",\"\"),100,encoding,errors),\n itn(info.get(\"mode\",0)&0o7777,8,format),\n itn(info.get(\"uid\",0),8,format),\n itn(info.get(\"gid\",0),8,format),\n itn(info.get(\"size\",0),12,format),\n itn(info.get(\"mtime\",0),12,format),\n b\" \",\n filetype,\n stn(info.get(\"linkname\",\"\"),100,encoding,errors),\n info.get(\"magic\",POSIX_MAGIC),\n stn(info.get(\"uname\",\"\"),32,encoding,errors),\n stn(info.get(\"gname\",\"\"),32,encoding,errors),\n devmajor,\n devminor,\n stn(info.get(\"prefix\",\"\"),155,encoding,errors)\n ]\n \n buf=struct.pack(\"%ds\"%BLOCKSIZE,b\"\".join(parts))\n chksum=calc_chksums(buf[-BLOCKSIZE:])[0]\n buf=buf[:-364]+bytes(\"%06o\\0\"%chksum,\"ascii\")+buf[-357:]\n return buf\n \n @staticmethod\n def _create_payload(payload):\n ''\n\n \n blocks,remainder=divmod(len(payload),BLOCKSIZE)\n if remainder >0:\n payload +=(BLOCKSIZE -remainder)*NUL\n return payload\n \n @classmethod\n def _create_gnu_long_header(cls,name,type,encoding,errors):\n ''\n\n \n name=name.encode(encoding,errors)+NUL\n \n info={}\n info[\"name\"]=\"././@LongLink\"\n info[\"type\"]=type\n info[\"size\"]=len(name)\n info[\"magic\"]=GNU_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,encoding,errors)+\\\n cls._create_payload(name)\n \n @classmethod\n def _create_pax_generic_header(cls,pax_headers,type,encoding):\n ''\n\n\n \n \n \n binary=False\n for keyword,value in pax_headers.items():\n try:\n value.encode(\"utf-8\",\"strict\")\n except UnicodeEncodeError:\n binary=True\n break\n \n records=b\"\"\n if binary:\n \n records +=b\"21 hdrcharset=BINARY\\n\"\n \n for keyword,value in pax_headers.items():\n keyword=keyword.encode(\"utf-8\")\n if binary:\n \n \n value=value.encode(encoding,\"surrogateescape\")\n else:\n value=value.encode(\"utf-8\")\n \n l=len(keyword)+len(value)+3\n n=p=0\n while True:\n n=l+len(str(p))\n if n ==p:\n break\n p=n\n records +=bytes(str(p),\"ascii\")+b\" \"+keyword+b\"=\"+value+b\"\\n\"\n \n \n \n info={}\n info[\"name\"]=\"././@PaxHeader\"\n info[\"type\"]=type\n info[\"size\"]=len(records)\n info[\"magic\"]=POSIX_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")+\\\n cls._create_payload(records)\n \n @classmethod\n def frombuf(cls,buf,encoding,errors):\n ''\n \n if len(buf)==0:\n raise EmptyHeaderError(\"empty header\")\n if len(buf)!=BLOCKSIZE:\n raise TruncatedHeaderError(\"truncated header\")\n if buf.count(NUL)==BLOCKSIZE:\n raise EOFHeaderError(\"end of file header\")\n \n chksum=nti(buf[148:156])\n if chksum not in calc_chksums(buf):\n raise InvalidHeaderError(\"bad checksum\")\n \n obj=cls()\n obj.name=nts(buf[0:100],encoding,errors)\n obj.mode=nti(buf[100:108])\n obj.uid=nti(buf[108:116])\n obj.gid=nti(buf[116:124])\n obj.size=nti(buf[124:136])\n obj.mtime=nti(buf[136:148])\n obj.chksum=chksum\n obj.type=buf[156:157]\n obj.linkname=nts(buf[157:257],encoding,errors)\n obj.uname=nts(buf[265:297],encoding,errors)\n obj.gname=nts(buf[297:329],encoding,errors)\n obj.devmajor=nti(buf[329:337])\n obj.devminor=nti(buf[337:345])\n prefix=nts(buf[345:500],encoding,errors)\n \n \n \n if obj.type ==AREGTYPE and obj.name.endswith(\"/\"):\n obj.type=DIRTYPE\n \n \n \n \n if obj.type ==GNUTYPE_SPARSE:\n pos=386\n structs=[]\n for i in range(4):\n try:\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[482])\n origsize=nti(buf[483:495])\n obj._sparse_structs=(structs,isextended,origsize)\n \n \n if obj.isdir():\n obj.name=obj.name.rstrip(\"/\")\n \n \n if prefix and obj.type not in GNU_TYPES:\n obj.name=prefix+\"/\"+obj.name\n return obj\n \n @classmethod\n def fromtarfile(cls,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(BLOCKSIZE)\n obj=cls.frombuf(buf,tarfile.encoding,tarfile.errors)\n obj.offset=tarfile.fileobj.tell()-BLOCKSIZE\n return obj._proc_member(tarfile)\n \n \n \n \n \n \n \n \n \n \n \n \n def _proc_member(self,tarfile):\n ''\n\n \n if self.type in(GNUTYPE_LONGNAME,GNUTYPE_LONGLINK):\n return self._proc_gnulong(tarfile)\n elif self.type ==GNUTYPE_SPARSE:\n return self._proc_sparse(tarfile)\n elif self.type in(XHDTYPE,XGLTYPE,SOLARIS_XHDTYPE):\n return self._proc_pax(tarfile)\n else:\n return self._proc_builtin(tarfile)\n \n def _proc_builtin(self,tarfile):\n ''\n\n \n self.offset_data=tarfile.fileobj.tell()\n offset=self.offset_data\n if self.isreg()or self.type not in SUPPORTED_TYPES:\n \n offset +=self._block(self.size)\n tarfile.offset=offset\n \n \n \n self._apply_pax_info(tarfile.pax_headers,tarfile.encoding,tarfile.errors)\n \n \n \n if self.isdir():\n self.name=self.name.rstrip(\"/\")\n \n return self\n \n def _proc_gnulong(self,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n try:\n next=self.fromtarfile(tarfile)\n except HeaderError as e:\n raise SubsequentHeaderError(str(e))from None\n \n \n \n next.offset=self.offset\n if self.type ==GNUTYPE_LONGNAME:\n next.name=nts(buf,tarfile.encoding,tarfile.errors)\n elif self.type ==GNUTYPE_LONGLINK:\n next.linkname=nts(buf,tarfile.encoding,tarfile.errors)\n \n \n \n if next.isdir():\n next.name=next.name.removesuffix(\"/\")\n \n return next\n \n def _proc_sparse(self,tarfile):\n ''\n \n \n structs,isextended,origsize=self._sparse_structs\n del self._sparse_structs\n \n \n while isextended:\n buf=tarfile.fileobj.read(BLOCKSIZE)\n pos=0\n for i in range(21):\n try:\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n if offset and numbytes:\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[504])\n self.sparse=structs\n \n self.offset_data=tarfile.fileobj.tell()\n tarfile.offset=self.offset_data+self._block(self.size)\n self.size=origsize\n return self\n \n def _proc_pax(self,tarfile):\n ''\n\n \n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n \n \n if self.type ==XGLTYPE:\n pax_headers=tarfile.pax_headers\n else:\n pax_headers=tarfile.pax_headers.copy()\n \n \n \n \n \n \n match=re.search(br\"\\d+ hdrcharset=([^\\n]+)\\n\",buf)\n if match is not None:\n pax_headers[\"hdrcharset\"]=match.group(1).decode(\"utf-8\")\n \n \n \n \n hdrcharset=pax_headers.get(\"hdrcharset\")\n if hdrcharset ==\"BINARY\":\n encoding=tarfile.encoding\n else:\n encoding=\"utf-8\"\n \n \n \n \n \n regex=re.compile(br\"(\\d+) ([^=]+)=\")\n pos=0\n while match :=regex.match(buf,pos):\n length,keyword=match.groups()\n length=int(length)\n if length ==0:\n raise InvalidHeaderError(\"invalid header\")\n value=buf[match.end(2)+1:match.start(1)+length -1]\n \n \n \n \n \n \n \n \n keyword=self._decode_pax_field(keyword,\"utf-8\",\"utf-8\",\n tarfile.errors)\n if keyword in PAX_NAME_FIELDS:\n value=self._decode_pax_field(value,encoding,tarfile.encoding,\n tarfile.errors)\n else:\n value=self._decode_pax_field(value,\"utf-8\",\"utf-8\",\n tarfile.errors)\n \n pax_headers[keyword]=value\n pos +=length\n \n \n try:\n next=self.fromtarfile(tarfile)\n except HeaderError as e:\n raise SubsequentHeaderError(str(e))from None\n \n \n if \"GNU.sparse.map\"in pax_headers:\n \n self._proc_gnusparse_01(next,pax_headers)\n \n elif \"GNU.sparse.size\"in pax_headers:\n \n self._proc_gnusparse_00(next,pax_headers,buf)\n \n elif pax_headers.get(\"GNU.sparse.major\")==\"1\"and pax_headers.get(\"GNU.sparse.minor\")==\"0\":\n \n self._proc_gnusparse_10(next,pax_headers,tarfile)\n \n if self.type in(XHDTYPE,SOLARIS_XHDTYPE):\n \n next._apply_pax_info(pax_headers,tarfile.encoding,tarfile.errors)\n next.offset=self.offset\n \n if \"size\"in pax_headers:\n \n \n \n offset=next.offset_data\n if next.isreg()or next.type not in SUPPORTED_TYPES:\n offset +=next._block(next.size)\n tarfile.offset=offset\n \n return next\n \n def _proc_gnusparse_00(self,next,pax_headers,buf):\n ''\n \n offsets=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.offset=(\\d+)\\n\",buf):\n offsets.append(int(match.group(1)))\n numbytes=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.numbytes=(\\d+)\\n\",buf):\n numbytes.append(int(match.group(1)))\n next.sparse=list(zip(offsets,numbytes))\n \n def _proc_gnusparse_01(self,next,pax_headers):\n ''\n \n sparse=[int(x)for x in pax_headers[\"GNU.sparse.map\"].split(\",\")]\n next.sparse=list(zip(sparse[::2],sparse[1::2]))\n \n def _proc_gnusparse_10(self,next,pax_headers,tarfile):\n ''\n \n fields=None\n sparse=[]\n buf=tarfile.fileobj.read(BLOCKSIZE)\n fields,buf=buf.split(b\"\\n\",1)\n fields=int(fields)\n while len(sparse)0:\n self.fileobj.write(NUL *(RECORDSIZE -remainder))\n finally:\n if not self._extfileobj:\n self.fileobj.close()\n \n def getmember(self,name):\n ''\n\n\n\n \n tarinfo=self._getmember(name.rstrip('/'))\n if tarinfo is None:\n raise KeyError(\"filename %r not found\"%name)\n return tarinfo\n \n def getmembers(self):\n ''\n\n \n self._check()\n if not self._loaded:\n self._load()\n \n return self.members\n \n def getnames(self):\n ''\n\n \n return[tarinfo.name for tarinfo in self.getmembers()]\n \n def gettarinfo(self,name=None,arcname=None,fileobj=None):\n ''\n\n\n\n\n\n\n \n self._check(\"awx\")\n \n \n \n if fileobj is not None:\n name=fileobj.name\n \n \n \n \n if arcname is None:\n arcname=name\n drv,arcname=os.path.splitdrive(arcname)\n arcname=arcname.replace(os.sep,\"/\")\n arcname=arcname.lstrip(\"/\")\n \n \n \n tarinfo=self.tarinfo()\n tarinfo.tarfile=self\n \n \n if fileobj is None:\n if not self.dereference:\n statres=os.lstat(name)\n else:\n statres=os.stat(name)\n else:\n statres=os.fstat(fileobj.fileno())\n linkname=\"\"\n \n stmd=statres.st_mode\n if stat.S_ISREG(stmd):\n inode=(statres.st_ino,statres.st_dev)\n if not self.dereference and statres.st_nlink >1 and\\\n inode in self.inodes and arcname !=self.inodes[inode]:\n \n \n type=LNKTYPE\n linkname=self.inodes[inode]\n else:\n \n \n type=REGTYPE\n if inode[0]:\n self.inodes[inode]=arcname\n elif stat.S_ISDIR(stmd):\n type=DIRTYPE\n elif stat.S_ISFIFO(stmd):\n type=FIFOTYPE\n elif stat.S_ISLNK(stmd):\n type=SYMTYPE\n linkname=os.readlink(name)\n elif stat.S_ISCHR(stmd):\n type=CHRTYPE\n elif stat.S_ISBLK(stmd):\n type=BLKTYPE\n else:\n return None\n \n \n \n tarinfo.name=arcname\n tarinfo.mode=stmd\n tarinfo.uid=statres.st_uid\n tarinfo.gid=statres.st_gid\n if type ==REGTYPE:\n tarinfo.size=statres.st_size\n else:\n tarinfo.size=0\n tarinfo.mtime=statres.st_mtime\n tarinfo.type=type\n tarinfo.linkname=linkname\n if pwd:\n try:\n tarinfo.uname=pwd.getpwuid(tarinfo.uid)[0]\n except KeyError:\n pass\n if grp:\n try:\n tarinfo.gname=grp.getgrgid(tarinfo.gid)[0]\n except KeyError:\n pass\n \n if type in(CHRTYPE,BLKTYPE):\n if hasattr(os,\"major\")and hasattr(os,\"minor\"):\n tarinfo.devmajor=os.major(statres.st_rdev)\n tarinfo.devminor=os.minor(statres.st_rdev)\n return tarinfo\n \n def list(self,verbose=True,*,members=None):\n ''\n\n\n\n \n self._check()\n \n if members is None:\n members=self\n for tarinfo in members:\n if verbose:\n if tarinfo.mode is None:\n _safe_print(\"??????????\")\n else:\n _safe_print(stat.filemode(tarinfo.mode))\n _safe_print(\"%s/%s\"%(tarinfo.uname or tarinfo.uid,\n tarinfo.gname or tarinfo.gid))\n if tarinfo.ischr()or tarinfo.isblk():\n _safe_print(\"%10s\"%\n (\"%d,%d\"%(tarinfo.devmajor,tarinfo.devminor)))\n else:\n _safe_print(\"%10d\"%tarinfo.size)\n if tarinfo.mtime is None:\n _safe_print(\"????-??-?? ??:??:??\")\n else:\n _safe_print(\"%d-%02d-%02d %02d:%02d:%02d\"\\\n %time.localtime(tarinfo.mtime)[:6])\n \n _safe_print(tarinfo.name+(\"/\"if tarinfo.isdir()else \"\"))\n \n if verbose:\n if tarinfo.issym():\n _safe_print(\"-> \"+tarinfo.linkname)\n if tarinfo.islnk():\n _safe_print(\"link to \"+tarinfo.linkname)\n print()\n \n def add(self,name,arcname=None,recursive=True,*,filter=None):\n ''\n\n\n\n\n\n\n\n \n self._check(\"awx\")\n \n if arcname is None:\n arcname=name\n \n \n if self.name is not None and os.path.abspath(name)==self.name:\n self._dbg(2,\"tarfile: Skipped %r\"%name)\n return\n \n self._dbg(1,name)\n \n \n tarinfo=self.gettarinfo(name,arcname)\n \n if tarinfo is None:\n self._dbg(1,\"tarfile: Unsupported type %r\"%name)\n return\n \n \n if filter is not None:\n tarinfo=filter(tarinfo)\n if tarinfo is None:\n self._dbg(2,\"tarfile: Excluded %r\"%name)\n return\n \n \n if tarinfo.isreg():\n with bltn_open(name,\"rb\")as f:\n self.addfile(tarinfo,f)\n \n elif tarinfo.isdir():\n self.addfile(tarinfo)\n if recursive:\n for f in sorted(os.listdir(name)):\n self.add(os.path.join(name,f),os.path.join(arcname,f),\n recursive,filter=filter)\n \n else:\n self.addfile(tarinfo)\n \n def addfile(self,tarinfo,fileobj=None):\n ''\n\n\n\n \n self._check(\"awx\")\n \n tarinfo=copy.copy(tarinfo)\n \n buf=tarinfo.tobuf(self.format,self.encoding,self.errors)\n self.fileobj.write(buf)\n self.offset +=len(buf)\n bufsize=self.copybufsize\n \n if fileobj is not None:\n copyfileobj(fileobj,self.fileobj,tarinfo.size,bufsize=bufsize)\n blocks,remainder=divmod(tarinfo.size,BLOCKSIZE)\n if remainder >0:\n self.fileobj.write(NUL *(BLOCKSIZE -remainder))\n blocks +=1\n self.offset +=blocks *BLOCKSIZE\n \n self.members.append(tarinfo)\n \n def _get_filter_function(self,filter):\n if filter is None:\n filter=self.extraction_filter\n if filter is None:\n warnings.warn(\n 'Python 3.14 will, by default, filter extracted tar '\n +'archives and reject files or modify their metadata. '\n +'Use the filter argument to control this behavior.',\n DeprecationWarning)\n return fully_trusted_filter\n if isinstance(filter,str):\n raise TypeError(\n 'String names are not supported for '\n +'TarFile.extraction_filter. Use a function such as '\n +'tarfile.data_filter directly.')\n return filter\n if callable(filter):\n return filter\n try:\n return _NAMED_FILTERS[filter]\n except KeyError:\n raise ValueError(f\"filter {filter !r} not found\")from None\n \n def extractall(self,path=\".\",members=None,*,numeric_owner=False,\n filter=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n directories=[]\n \n filter_function=self._get_filter_function(filter)\n if members is None:\n members=self\n \n for member in members:\n tarinfo=self._get_extract_tarinfo(member,filter_function,path)\n if tarinfo is None:\n continue\n if tarinfo.isdir():\n \n \n \n directories.append(tarinfo)\n self._extract_one(tarinfo,path,set_attrs=not tarinfo.isdir(),\n numeric_owner=numeric_owner)\n \n \n directories.sort(key=lambda a:a.name,reverse=True)\n \n \n for tarinfo in directories:\n dirpath=os.path.join(path,tarinfo.name)\n try:\n self.chown(tarinfo,dirpath,numeric_owner=numeric_owner)\n self.utime(tarinfo,dirpath)\n self.chmod(tarinfo,dirpath)\n except ExtractError as e:\n self._handle_nonfatal_error(e)\n \n def extract(self,member,path=\"\",set_attrs=True,*,numeric_owner=False,\n filter=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n filter_function=self._get_filter_function(filter)\n tarinfo=self._get_extract_tarinfo(member,filter_function,path)\n if tarinfo is not None:\n self._extract_one(tarinfo,path,set_attrs,numeric_owner)\n \n def _get_extract_tarinfo(self,member,filter_function,path):\n ''\n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else:\n tarinfo=member\n \n unfiltered=tarinfo\n try:\n tarinfo=filter_function(tarinfo,path)\n except(OSError,FilterError)as e:\n self._handle_fatal_error(e)\n except ExtractError as e:\n self._handle_nonfatal_error(e)\n if tarinfo is None:\n self._dbg(2,\"tarfile: Excluded %r\"%unfiltered.name)\n return None\n \n if tarinfo.islnk():\n tarinfo=copy.copy(tarinfo)\n tarinfo._link_target=os.path.join(path,tarinfo.linkname)\n return tarinfo\n \n def _extract_one(self,tarinfo,path,set_attrs,numeric_owner):\n ''\n self._check(\"r\")\n \n try:\n self._extract_member(tarinfo,os.path.join(path,tarinfo.name),\n set_attrs=set_attrs,\n numeric_owner=numeric_owner)\n except OSError as e:\n self._handle_fatal_error(e)\n except ExtractError as e:\n self._handle_nonfatal_error(e)\n \n def _handle_nonfatal_error(self,e):\n ''\n if self.errorlevel >1:\n raise\n else:\n self._dbg(1,\"tarfile: %s\"%e)\n \n def _handle_fatal_error(self,e):\n ''\n if self.errorlevel >0:\n raise\n elif isinstance(e,OSError):\n if e.filename is None:\n self._dbg(1,\"tarfile: %s\"%e.strerror)\n else:\n self._dbg(1,\"tarfile: %s %r\"%(e.strerror,e.filename))\n else:\n self._dbg(1,\"tarfile: %s %s\"%(type(e).__name__,e))\n \n def extractfile(self,member):\n ''\n\n\n\n\n \n self._check(\"r\")\n \n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else:\n tarinfo=member\n \n if tarinfo.isreg()or tarinfo.type not in SUPPORTED_TYPES:\n \n return self.fileobject(self,tarinfo)\n \n elif tarinfo.islnk()or tarinfo.issym():\n if isinstance(self.fileobj,_Stream):\n \n \n \n raise StreamError(\"cannot extract (sym)link as file object\")\n else:\n \n return self.extractfile(self._find_link_target(tarinfo))\n else:\n \n \n return None\n \n def _extract_member(self,tarinfo,targetpath,set_attrs=True,\n numeric_owner=False):\n ''\n\n \n \n \n \n targetpath=targetpath.rstrip(\"/\")\n targetpath=targetpath.replace(\"/\",os.sep)\n \n \n upperdirs=os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n \n \n os.makedirs(upperdirs)\n \n if tarinfo.islnk()or tarinfo.issym():\n self._dbg(1,\"%s -> %s\"%(tarinfo.name,tarinfo.linkname))\n else:\n self._dbg(1,tarinfo.name)\n \n if tarinfo.isreg():\n self.makefile(tarinfo,targetpath)\n elif tarinfo.isdir():\n self.makedir(tarinfo,targetpath)\n elif tarinfo.isfifo():\n self.makefifo(tarinfo,targetpath)\n elif tarinfo.ischr()or tarinfo.isblk():\n self.makedev(tarinfo,targetpath)\n elif tarinfo.islnk()or tarinfo.issym():\n self.makelink(tarinfo,targetpath)\n elif tarinfo.type not in SUPPORTED_TYPES:\n self.makeunknown(tarinfo,targetpath)\n else:\n self.makefile(tarinfo,targetpath)\n \n if set_attrs:\n self.chown(tarinfo,targetpath,numeric_owner)\n if not tarinfo.issym():\n self.chmod(tarinfo,targetpath)\n self.utime(tarinfo,targetpath)\n \n \n \n \n \n \n def makedir(self,tarinfo,targetpath):\n ''\n \n try:\n if tarinfo.mode is None:\n \n os.mkdir(targetpath)\n else:\n \n \n os.mkdir(targetpath,0o700)\n except FileExistsError:\n pass\n \n def makefile(self,tarinfo,targetpath):\n ''\n \n source=self.fileobj\n source.seek(tarinfo.offset_data)\n bufsize=self.copybufsize\n with bltn_open(targetpath,\"wb\")as target:\n if tarinfo.sparse is not None:\n for offset,size in tarinfo.sparse:\n target.seek(offset)\n copyfileobj(source,target,size,ReadError,bufsize)\n target.seek(tarinfo.size)\n target.truncate()\n else:\n copyfileobj(source,target,tarinfo.size,ReadError,bufsize)\n \n def makeunknown(self,tarinfo,targetpath):\n ''\n\n \n self.makefile(tarinfo,targetpath)\n self._dbg(1,\"tarfile: Unknown file type %r, \"\\\n \"extracted as regular file.\"%tarinfo.type)\n \n def makefifo(self,tarinfo,targetpath):\n ''\n \n if hasattr(os,\"mkfifo\"):\n os.mkfifo(targetpath)\n else:\n raise ExtractError(\"fifo not supported by system\")\n \n def makedev(self,tarinfo,targetpath):\n ''\n \n if not hasattr(os,\"mknod\")or not hasattr(os,\"makedev\"):\n raise ExtractError(\"special devices not supported by system\")\n \n mode=tarinfo.mode\n if mode is None:\n \n mode=0o600\n if tarinfo.isblk():\n mode |=stat.S_IFBLK\n else:\n mode |=stat.S_IFCHR\n \n os.mknod(targetpath,mode,\n os.makedev(tarinfo.devmajor,tarinfo.devminor))\n \n def makelink(self,tarinfo,targetpath):\n ''\n\n\n \n try:\n \n if tarinfo.issym():\n if os.path.lexists(targetpath):\n \n os.unlink(targetpath)\n os.symlink(tarinfo.linkname,targetpath)\n else:\n if os.path.exists(tarinfo._link_target):\n os.link(tarinfo._link_target,targetpath)\n else:\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except symlink_exception:\n try:\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except KeyError:\n raise ExtractError(\"unable to resolve link inside archive\")from None\n \n def chown(self,tarinfo,targetpath,numeric_owner):\n ''\n\n\n\n \n if hasattr(os,\"geteuid\")and os.geteuid()==0:\n \n g=tarinfo.gid\n u=tarinfo.uid\n if not numeric_owner:\n try:\n if grp and tarinfo.gname:\n g=grp.getgrnam(tarinfo.gname)[2]\n except KeyError:\n pass\n try:\n if pwd and tarinfo.uname:\n u=pwd.getpwnam(tarinfo.uname)[2]\n except KeyError:\n pass\n if g is None:\n g=-1\n if u is None:\n u=-1\n try:\n if tarinfo.issym()and hasattr(os,\"lchown\"):\n os.lchown(targetpath,u,g)\n else:\n os.chown(targetpath,u,g)\n except OSError as e:\n raise ExtractError(\"could not change owner\")from e\n \n def chmod(self,tarinfo,targetpath):\n ''\n \n if tarinfo.mode is None:\n return\n try:\n os.chmod(targetpath,tarinfo.mode)\n except OSError as e:\n raise ExtractError(\"could not change mode\")from e\n \n def utime(self,tarinfo,targetpath):\n ''\n \n mtime=tarinfo.mtime\n if mtime is None:\n return\n if not hasattr(os,'utime'):\n return\n try:\n os.utime(targetpath,(mtime,mtime))\n except OSError as e:\n raise ExtractError(\"could not change modification time\")from e\n \n \n def next(self):\n ''\n\n\n \n self._check(\"ra\")\n if self.firstmember is not None:\n m=self.firstmember\n self.firstmember=None\n return m\n \n \n if self.offset !=self.fileobj.tell():\n if self.offset ==0:\n return None\n self.fileobj.seek(self.offset -1)\n if not self.fileobj.read(1):\n raise ReadError(\"unexpected end of data\")\n \n \n tarinfo=None\n while True:\n try:\n tarinfo=self.tarinfo.fromtarfile(self)\n except EOFHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n except InvalidHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n elif self.offset ==0:\n raise ReadError(str(e))from None\n except EmptyHeaderError:\n if self.offset ==0:\n raise ReadError(\"empty file\")from None\n except TruncatedHeaderError as e:\n if self.offset ==0:\n raise ReadError(str(e))from None\n except SubsequentHeaderError as e:\n raise ReadError(str(e))from None\n except Exception as e:\n try:\n import zlib\n if isinstance(e,zlib.error):\n raise ReadError(f'zlib error: {e}')from None\n else:\n raise e\n except ImportError:\n raise e\n break\n \n if tarinfo is not None:\n self.members.append(tarinfo)\n else:\n self._loaded=True\n \n return tarinfo\n \n \n \n \n def _getmember(self,name,tarinfo=None,normalize=False):\n ''\n\n \n \n members=self.getmembers()\n \n \n skipping=False\n if tarinfo is not None:\n try:\n index=members.index(tarinfo)\n except ValueError:\n \n \n skipping=True\n else:\n \n members=members[:index]\n \n if normalize:\n name=os.path.normpath(name)\n \n for member in reversed(members):\n if skipping:\n if tarinfo.offset ==member.offset:\n skipping=False\n continue\n if normalize:\n member_name=os.path.normpath(member.name)\n else:\n member_name=member.name\n \n if name ==member_name:\n return member\n \n if skipping:\n \n raise ValueError(tarinfo)\n \n def _load(self):\n ''\n\n \n while self.next()is not None:\n pass\n self._loaded=True\n \n def _check(self,mode=None):\n ''\n\n \n if self.closed:\n raise OSError(\"%s is closed\"%self.__class__.__name__)\n if mode is not None and self.mode not in mode:\n raise OSError(\"bad operation for mode %r\"%self.mode)\n \n def _find_link_target(self,tarinfo):\n ''\n\n \n if tarinfo.issym():\n \n linkname=\"/\".join(filter(None,(os.path.dirname(tarinfo.name),tarinfo.linkname)))\n limit=None\n else:\n \n \n linkname=tarinfo.linkname\n limit=tarinfo\n \n member=self._getmember(linkname,tarinfo=limit,normalize=True)\n if member is None:\n raise KeyError(\"linkname %r not found\"%linkname)\n return member\n \n def __iter__(self):\n ''\n \n if self._loaded:\n yield from self.members\n return\n \n \n \n index=0\n \n \n \n if self.firstmember is not None:\n tarinfo=self.next()\n index +=1\n yield tarinfo\n \n while True:\n if index ',''),\n help='Extract tarfile into target dir')\n group.add_argument('-c','--create',nargs='+',\n metavar=('',''),\n help='Create tarfile from sources')\n group.add_argument('-t','--test',metavar='',\n help='Test if a tarfile is valid')\n \n args=parser.parse_args()\n \n if args.filter and args.extract is None:\n parser.exit(1,'--filter is only valid for extraction\\n')\n \n if args.test is not None:\n src=args.test\n if is_tarfile(src):\n with open(src,'r')as tar:\n tar.getmembers()\n print(tar.getmembers(),file=sys.stderr)\n if args.verbose:\n print('{!r} is a tar archive.'.format(src))\n else:\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.list is not None:\n src=args.list\n if is_tarfile(src):\n with TarFile.open(src,'r:*')as tf:\n tf.list(verbose=args.verbose)\n else:\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.extract is not None:\n if len(args.extract)==1:\n src=args.extract[0]\n curdir=os.curdir\n elif len(args.extract)==2:\n src,curdir=args.extract\n else:\n parser.exit(1,parser.format_help())\n \n if is_tarfile(src):\n with TarFile.open(src,'r:*')as tf:\n tf.extractall(path=curdir,filter=args.filter)\n if args.verbose:\n if curdir =='.':\n msg='{!r} file is extracted.'.format(src)\n else:\n msg=('{!r} file is extracted '\n 'into {!r} directory.').format(src,curdir)\n print(msg)\n else:\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.create is not None:\n tar_name=args.create.pop(0)\n _,ext=os.path.splitext(tar_name)\n compressions={\n \n '.gz':'gz',\n '.tgz':'gz',\n \n '.xz':'xz',\n '.txz':'xz',\n \n '.bz2':'bz2',\n '.tbz':'bz2',\n '.tbz2':'bz2',\n '.tb2':'bz2',\n }\n tar_mode='w:'+compressions[ext]if ext in compressions else 'w'\n tar_files=args.create\n \n with TarFile.open(tar_name,tar_mode)as tf:\n for file_name in tar_files:\n tf.add(file_name)\n \n if args.verbose:\n print('{!r} file created.'.format(tar_name))\n \nif __name__ =='__main__':\n main()\n", ["argparse", "builtins", "bz2", "copy", "grp", "gzip", "io", "lzma", "os", "pwd", "re", "shutil", "stat", "struct", "sys", "time", "warnings", "zlib"]], "stringprep": [".py", "\n''\n\n\n\n\n\nfrom unicodedata import ucd_3_2_0 as unicodedata\n\nassert unicodedata.unidata_version =='3.2.0'\n\ndef in_table_a1(code):\n if unicodedata.category(code)!='Cn':return False\n c=ord(code)\n if 0xFDD0 <=c <0xFDF0:return False\n return(c&0xFFFF)not in(0xFFFE,0xFFFF)\n \n \nb1_set=set([173,847,6150,6155,6156,6157,8203,8204,8205,8288,65279]+list(range(65024,65040)))\ndef in_table_b1(code):\n return ord(code)in b1_set\n \n \nb3_exceptions={\n0xb5:'\\u03bc',0xdf:'ss',0x130:'i\\u0307',0x149:'\\u02bcn',\n0x17f:'s',0x1f0:'j\\u030c',0x345:'\\u03b9',0x37a:' \\u03b9',\n0x390:'\\u03b9\\u0308\\u0301',0x3b0:'\\u03c5\\u0308\\u0301',0x3c2:'\\u03c3',0x3d0:'\\u03b2',\n0x3d1:'\\u03b8',0x3d2:'\\u03c5',0x3d3:'\\u03cd',0x3d4:'\\u03cb',\n0x3d5:'\\u03c6',0x3d6:'\\u03c0',0x3f0:'\\u03ba',0x3f1:'\\u03c1',\n0x3f2:'\\u03c3',0x3f5:'\\u03b5',0x587:'\\u0565\\u0582',0x1e96:'h\\u0331',\n0x1e97:'t\\u0308',0x1e98:'w\\u030a',0x1e99:'y\\u030a',0x1e9a:'a\\u02be',\n0x1e9b:'\\u1e61',0x1f50:'\\u03c5\\u0313',0x1f52:'\\u03c5\\u0313\\u0300',0x1f54:'\\u03c5\\u0313\\u0301',\n0x1f56:'\\u03c5\\u0313\\u0342',0x1f80:'\\u1f00\\u03b9',0x1f81:'\\u1f01\\u03b9',0x1f82:'\\u1f02\\u03b9',\n0x1f83:'\\u1f03\\u03b9',0x1f84:'\\u1f04\\u03b9',0x1f85:'\\u1f05\\u03b9',0x1f86:'\\u1f06\\u03b9',\n0x1f87:'\\u1f07\\u03b9',0x1f88:'\\u1f00\\u03b9',0x1f89:'\\u1f01\\u03b9',0x1f8a:'\\u1f02\\u03b9',\n0x1f8b:'\\u1f03\\u03b9',0x1f8c:'\\u1f04\\u03b9',0x1f8d:'\\u1f05\\u03b9',0x1f8e:'\\u1f06\\u03b9',\n0x1f8f:'\\u1f07\\u03b9',0x1f90:'\\u1f20\\u03b9',0x1f91:'\\u1f21\\u03b9',0x1f92:'\\u1f22\\u03b9',\n0x1f93:'\\u1f23\\u03b9',0x1f94:'\\u1f24\\u03b9',0x1f95:'\\u1f25\\u03b9',0x1f96:'\\u1f26\\u03b9',\n0x1f97:'\\u1f27\\u03b9',0x1f98:'\\u1f20\\u03b9',0x1f99:'\\u1f21\\u03b9',0x1f9a:'\\u1f22\\u03b9',\n0x1f9b:'\\u1f23\\u03b9',0x1f9c:'\\u1f24\\u03b9',0x1f9d:'\\u1f25\\u03b9',0x1f9e:'\\u1f26\\u03b9',\n0x1f9f:'\\u1f27\\u03b9',0x1fa0:'\\u1f60\\u03b9',0x1fa1:'\\u1f61\\u03b9',0x1fa2:'\\u1f62\\u03b9',\n0x1fa3:'\\u1f63\\u03b9',0x1fa4:'\\u1f64\\u03b9',0x1fa5:'\\u1f65\\u03b9',0x1fa6:'\\u1f66\\u03b9',\n0x1fa7:'\\u1f67\\u03b9',0x1fa8:'\\u1f60\\u03b9',0x1fa9:'\\u1f61\\u03b9',0x1faa:'\\u1f62\\u03b9',\n0x1fab:'\\u1f63\\u03b9',0x1fac:'\\u1f64\\u03b9',0x1fad:'\\u1f65\\u03b9',0x1fae:'\\u1f66\\u03b9',\n0x1faf:'\\u1f67\\u03b9',0x1fb2:'\\u1f70\\u03b9',0x1fb3:'\\u03b1\\u03b9',0x1fb4:'\\u03ac\\u03b9',\n0x1fb6:'\\u03b1\\u0342',0x1fb7:'\\u03b1\\u0342\\u03b9',0x1fbc:'\\u03b1\\u03b9',0x1fbe:'\\u03b9',\n0x1fc2:'\\u1f74\\u03b9',0x1fc3:'\\u03b7\\u03b9',0x1fc4:'\\u03ae\\u03b9',0x1fc6:'\\u03b7\\u0342',\n0x1fc7:'\\u03b7\\u0342\\u03b9',0x1fcc:'\\u03b7\\u03b9',0x1fd2:'\\u03b9\\u0308\\u0300',0x1fd3:'\\u03b9\\u0308\\u0301',\n0x1fd6:'\\u03b9\\u0342',0x1fd7:'\\u03b9\\u0308\\u0342',0x1fe2:'\\u03c5\\u0308\\u0300',0x1fe3:'\\u03c5\\u0308\\u0301',\n0x1fe4:'\\u03c1\\u0313',0x1fe6:'\\u03c5\\u0342',0x1fe7:'\\u03c5\\u0308\\u0342',0x1ff2:'\\u1f7c\\u03b9',\n0x1ff3:'\\u03c9\\u03b9',0x1ff4:'\\u03ce\\u03b9',0x1ff6:'\\u03c9\\u0342',0x1ff7:'\\u03c9\\u0342\\u03b9',\n0x1ffc:'\\u03c9\\u03b9',0x20a8:'rs',0x2102:'c',0x2103:'\\xb0c',\n0x2107:'\\u025b',0x2109:'\\xb0f',0x210b:'h',0x210c:'h',\n0x210d:'h',0x2110:'i',0x2111:'i',0x2112:'l',\n0x2115:'n',0x2116:'no',0x2119:'p',0x211a:'q',\n0x211b:'r',0x211c:'r',0x211d:'r',0x2120:'sm',\n0x2121:'tel',0x2122:'tm',0x2124:'z',0x2128:'z',\n0x212c:'b',0x212d:'c',0x2130:'e',0x2131:'f',\n0x2133:'m',0x213e:'\\u03b3',0x213f:'\\u03c0',0x2145:'d',\n0x3371:'hpa',0x3373:'au',0x3375:'ov',0x3380:'pa',\n0x3381:'na',0x3382:'\\u03bca',0x3383:'ma',0x3384:'ka',\n0x3385:'kb',0x3386:'mb',0x3387:'gb',0x338a:'pf',\n0x338b:'nf',0x338c:'\\u03bcf',0x3390:'hz',0x3391:'khz',\n0x3392:'mhz',0x3393:'ghz',0x3394:'thz',0x33a9:'pa',\n0x33aa:'kpa',0x33ab:'mpa',0x33ac:'gpa',0x33b4:'pv',\n0x33b5:'nv',0x33b6:'\\u03bcv',0x33b7:'mv',0x33b8:'kv',\n0x33b9:'mv',0x33ba:'pw',0x33bb:'nw',0x33bc:'\\u03bcw',\n0x33bd:'mw',0x33be:'kw',0x33bf:'mw',0x33c0:'k\\u03c9',\n0x33c1:'m\\u03c9',0x33c3:'bq',0x33c6:'c\\u2215kg',0x33c7:'co.',\n0x33c8:'db',0x33c9:'gy',0x33cb:'hp',0x33cd:'kk',\n0x33ce:'km',0x33d7:'ph',0x33d9:'ppm',0x33da:'pr',\n0x33dc:'sv',0x33dd:'wb',0xfb00:'ff',0xfb01:'fi',\n0xfb02:'fl',0xfb03:'ffi',0xfb04:'ffl',0xfb05:'st',\n0xfb06:'st',0xfb13:'\\u0574\\u0576',0xfb14:'\\u0574\\u0565',0xfb15:'\\u0574\\u056b',\n0xfb16:'\\u057e\\u0576',0xfb17:'\\u0574\\u056d',0x1d400:'a',0x1d401:'b',\n0x1d402:'c',0x1d403:'d',0x1d404:'e',0x1d405:'f',\n0x1d406:'g',0x1d407:'h',0x1d408:'i',0x1d409:'j',\n0x1d40a:'k',0x1d40b:'l',0x1d40c:'m',0x1d40d:'n',\n0x1d40e:'o',0x1d40f:'p',0x1d410:'q',0x1d411:'r',\n0x1d412:'s',0x1d413:'t',0x1d414:'u',0x1d415:'v',\n0x1d416:'w',0x1d417:'x',0x1d418:'y',0x1d419:'z',\n0x1d434:'a',0x1d435:'b',0x1d436:'c',0x1d437:'d',\n0x1d438:'e',0x1d439:'f',0x1d43a:'g',0x1d43b:'h',\n0x1d43c:'i',0x1d43d:'j',0x1d43e:'k',0x1d43f:'l',\n0x1d440:'m',0x1d441:'n',0x1d442:'o',0x1d443:'p',\n0x1d444:'q',0x1d445:'r',0x1d446:'s',0x1d447:'t',\n0x1d448:'u',0x1d449:'v',0x1d44a:'w',0x1d44b:'x',\n0x1d44c:'y',0x1d44d:'z',0x1d468:'a',0x1d469:'b',\n0x1d46a:'c',0x1d46b:'d',0x1d46c:'e',0x1d46d:'f',\n0x1d46e:'g',0x1d46f:'h',0x1d470:'i',0x1d471:'j',\n0x1d472:'k',0x1d473:'l',0x1d474:'m',0x1d475:'n',\n0x1d476:'o',0x1d477:'p',0x1d478:'q',0x1d479:'r',\n0x1d47a:'s',0x1d47b:'t',0x1d47c:'u',0x1d47d:'v',\n0x1d47e:'w',0x1d47f:'x',0x1d480:'y',0x1d481:'z',\n0x1d49c:'a',0x1d49e:'c',0x1d49f:'d',0x1d4a2:'g',\n0x1d4a5:'j',0x1d4a6:'k',0x1d4a9:'n',0x1d4aa:'o',\n0x1d4ab:'p',0x1d4ac:'q',0x1d4ae:'s',0x1d4af:'t',\n0x1d4b0:'u',0x1d4b1:'v',0x1d4b2:'w',0x1d4b3:'x',\n0x1d4b4:'y',0x1d4b5:'z',0x1d4d0:'a',0x1d4d1:'b',\n0x1d4d2:'c',0x1d4d3:'d',0x1d4d4:'e',0x1d4d5:'f',\n0x1d4d6:'g',0x1d4d7:'h',0x1d4d8:'i',0x1d4d9:'j',\n0x1d4da:'k',0x1d4db:'l',0x1d4dc:'m',0x1d4dd:'n',\n0x1d4de:'o',0x1d4df:'p',0x1d4e0:'q',0x1d4e1:'r',\n0x1d4e2:'s',0x1d4e3:'t',0x1d4e4:'u',0x1d4e5:'v',\n0x1d4e6:'w',0x1d4e7:'x',0x1d4e8:'y',0x1d4e9:'z',\n0x1d504:'a',0x1d505:'b',0x1d507:'d',0x1d508:'e',\n0x1d509:'f',0x1d50a:'g',0x1d50d:'j',0x1d50e:'k',\n0x1d50f:'l',0x1d510:'m',0x1d511:'n',0x1d512:'o',\n0x1d513:'p',0x1d514:'q',0x1d516:'s',0x1d517:'t',\n0x1d518:'u',0x1d519:'v',0x1d51a:'w',0x1d51b:'x',\n0x1d51c:'y',0x1d538:'a',0x1d539:'b',0x1d53b:'d',\n0x1d53c:'e',0x1d53d:'f',0x1d53e:'g',0x1d540:'i',\n0x1d541:'j',0x1d542:'k',0x1d543:'l',0x1d544:'m',\n0x1d546:'o',0x1d54a:'s',0x1d54b:'t',0x1d54c:'u',\n0x1d54d:'v',0x1d54e:'w',0x1d54f:'x',0x1d550:'y',\n0x1d56c:'a',0x1d56d:'b',0x1d56e:'c',0x1d56f:'d',\n0x1d570:'e',0x1d571:'f',0x1d572:'g',0x1d573:'h',\n0x1d574:'i',0x1d575:'j',0x1d576:'k',0x1d577:'l',\n0x1d578:'m',0x1d579:'n',0x1d57a:'o',0x1d57b:'p',\n0x1d57c:'q',0x1d57d:'r',0x1d57e:'s',0x1d57f:'t',\n0x1d580:'u',0x1d581:'v',0x1d582:'w',0x1d583:'x',\n0x1d584:'y',0x1d585:'z',0x1d5a0:'a',0x1d5a1:'b',\n0x1d5a2:'c',0x1d5a3:'d',0x1d5a4:'e',0x1d5a5:'f',\n0x1d5a6:'g',0x1d5a7:'h',0x1d5a8:'i',0x1d5a9:'j',\n0x1d5aa:'k',0x1d5ab:'l',0x1d5ac:'m',0x1d5ad:'n',\n0x1d5ae:'o',0x1d5af:'p',0x1d5b0:'q',0x1d5b1:'r',\n0x1d5b2:'s',0x1d5b3:'t',0x1d5b4:'u',0x1d5b5:'v',\n0x1d5b6:'w',0x1d5b7:'x',0x1d5b8:'y',0x1d5b9:'z',\n0x1d5d4:'a',0x1d5d5:'b',0x1d5d6:'c',0x1d5d7:'d',\n0x1d5d8:'e',0x1d5d9:'f',0x1d5da:'g',0x1d5db:'h',\n0x1d5dc:'i',0x1d5dd:'j',0x1d5de:'k',0x1d5df:'l',\n0x1d5e0:'m',0x1d5e1:'n',0x1d5e2:'o',0x1d5e3:'p',\n0x1d5e4:'q',0x1d5e5:'r',0x1d5e6:'s',0x1d5e7:'t',\n0x1d5e8:'u',0x1d5e9:'v',0x1d5ea:'w',0x1d5eb:'x',\n0x1d5ec:'y',0x1d5ed:'z',0x1d608:'a',0x1d609:'b',\n0x1d60a:'c',0x1d60b:'d',0x1d60c:'e',0x1d60d:'f',\n0x1d60e:'g',0x1d60f:'h',0x1d610:'i',0x1d611:'j',\n0x1d612:'k',0x1d613:'l',0x1d614:'m',0x1d615:'n',\n0x1d616:'o',0x1d617:'p',0x1d618:'q',0x1d619:'r',\n0x1d61a:'s',0x1d61b:'t',0x1d61c:'u',0x1d61d:'v',\n0x1d61e:'w',0x1d61f:'x',0x1d620:'y',0x1d621:'z',\n0x1d63c:'a',0x1d63d:'b',0x1d63e:'c',0x1d63f:'d',\n0x1d640:'e',0x1d641:'f',0x1d642:'g',0x1d643:'h',\n0x1d644:'i',0x1d645:'j',0x1d646:'k',0x1d647:'l',\n0x1d648:'m',0x1d649:'n',0x1d64a:'o',0x1d64b:'p',\n0x1d64c:'q',0x1d64d:'r',0x1d64e:'s',0x1d64f:'t',\n0x1d650:'u',0x1d651:'v',0x1d652:'w',0x1d653:'x',\n0x1d654:'y',0x1d655:'z',0x1d670:'a',0x1d671:'b',\n0x1d672:'c',0x1d673:'d',0x1d674:'e',0x1d675:'f',\n0x1d676:'g',0x1d677:'h',0x1d678:'i',0x1d679:'j',\n0x1d67a:'k',0x1d67b:'l',0x1d67c:'m',0x1d67d:'n',\n0x1d67e:'o',0x1d67f:'p',0x1d680:'q',0x1d681:'r',\n0x1d682:'s',0x1d683:'t',0x1d684:'u',0x1d685:'v',\n0x1d686:'w',0x1d687:'x',0x1d688:'y',0x1d689:'z',\n0x1d6a8:'\\u03b1',0x1d6a9:'\\u03b2',0x1d6aa:'\\u03b3',0x1d6ab:'\\u03b4',\n0x1d6ac:'\\u03b5',0x1d6ad:'\\u03b6',0x1d6ae:'\\u03b7',0x1d6af:'\\u03b8',\n0x1d6b0:'\\u03b9',0x1d6b1:'\\u03ba',0x1d6b2:'\\u03bb',0x1d6b3:'\\u03bc',\n0x1d6b4:'\\u03bd',0x1d6b5:'\\u03be',0x1d6b6:'\\u03bf',0x1d6b7:'\\u03c0',\n0x1d6b8:'\\u03c1',0x1d6b9:'\\u03b8',0x1d6ba:'\\u03c3',0x1d6bb:'\\u03c4',\n0x1d6bc:'\\u03c5',0x1d6bd:'\\u03c6',0x1d6be:'\\u03c7',0x1d6bf:'\\u03c8',\n0x1d6c0:'\\u03c9',0x1d6d3:'\\u03c3',0x1d6e2:'\\u03b1',0x1d6e3:'\\u03b2',\n0x1d6e4:'\\u03b3',0x1d6e5:'\\u03b4',0x1d6e6:'\\u03b5',0x1d6e7:'\\u03b6',\n0x1d6e8:'\\u03b7',0x1d6e9:'\\u03b8',0x1d6ea:'\\u03b9',0x1d6eb:'\\u03ba',\n0x1d6ec:'\\u03bb',0x1d6ed:'\\u03bc',0x1d6ee:'\\u03bd',0x1d6ef:'\\u03be',\n0x1d6f0:'\\u03bf',0x1d6f1:'\\u03c0',0x1d6f2:'\\u03c1',0x1d6f3:'\\u03b8',\n0x1d6f4:'\\u03c3',0x1d6f5:'\\u03c4',0x1d6f6:'\\u03c5',0x1d6f7:'\\u03c6',\n0x1d6f8:'\\u03c7',0x1d6f9:'\\u03c8',0x1d6fa:'\\u03c9',0x1d70d:'\\u03c3',\n0x1d71c:'\\u03b1',0x1d71d:'\\u03b2',0x1d71e:'\\u03b3',0x1d71f:'\\u03b4',\n0x1d720:'\\u03b5',0x1d721:'\\u03b6',0x1d722:'\\u03b7',0x1d723:'\\u03b8',\n0x1d724:'\\u03b9',0x1d725:'\\u03ba',0x1d726:'\\u03bb',0x1d727:'\\u03bc',\n0x1d728:'\\u03bd',0x1d729:'\\u03be',0x1d72a:'\\u03bf',0x1d72b:'\\u03c0',\n0x1d72c:'\\u03c1',0x1d72d:'\\u03b8',0x1d72e:'\\u03c3',0x1d72f:'\\u03c4',\n0x1d730:'\\u03c5',0x1d731:'\\u03c6',0x1d732:'\\u03c7',0x1d733:'\\u03c8',\n0x1d734:'\\u03c9',0x1d747:'\\u03c3',0x1d756:'\\u03b1',0x1d757:'\\u03b2',\n0x1d758:'\\u03b3',0x1d759:'\\u03b4',0x1d75a:'\\u03b5',0x1d75b:'\\u03b6',\n0x1d75c:'\\u03b7',0x1d75d:'\\u03b8',0x1d75e:'\\u03b9',0x1d75f:'\\u03ba',\n0x1d760:'\\u03bb',0x1d761:'\\u03bc',0x1d762:'\\u03bd',0x1d763:'\\u03be',\n0x1d764:'\\u03bf',0x1d765:'\\u03c0',0x1d766:'\\u03c1',0x1d767:'\\u03b8',\n0x1d768:'\\u03c3',0x1d769:'\\u03c4',0x1d76a:'\\u03c5',0x1d76b:'\\u03c6',\n0x1d76c:'\\u03c7',0x1d76d:'\\u03c8',0x1d76e:'\\u03c9',0x1d781:'\\u03c3',\n0x1d790:'\\u03b1',0x1d791:'\\u03b2',0x1d792:'\\u03b3',0x1d793:'\\u03b4',\n0x1d794:'\\u03b5',0x1d795:'\\u03b6',0x1d796:'\\u03b7',0x1d797:'\\u03b8',\n0x1d798:'\\u03b9',0x1d799:'\\u03ba',0x1d79a:'\\u03bb',0x1d79b:'\\u03bc',\n0x1d79c:'\\u03bd',0x1d79d:'\\u03be',0x1d79e:'\\u03bf',0x1d79f:'\\u03c0',\n0x1d7a0:'\\u03c1',0x1d7a1:'\\u03b8',0x1d7a2:'\\u03c3',0x1d7a3:'\\u03c4',\n0x1d7a4:'\\u03c5',0x1d7a5:'\\u03c6',0x1d7a6:'\\u03c7',0x1d7a7:'\\u03c8',\n0x1d7a8:'\\u03c9',0x1d7bb:'\\u03c3',}\n\ndef map_table_b3(code):\n r=b3_exceptions.get(ord(code))\n if r is not None:return r\n return code.lower()\n \n \ndef map_table_b2(a):\n al=map_table_b3(a)\n b=unicodedata.normalize(\"NFKC\",al)\n bl=\"\".join([map_table_b3(ch)for ch in b])\n c=unicodedata.normalize(\"NFKC\",bl)\n if b !=c:\n return c\n else:\n return al\n \n \ndef in_table_c11(code):\n return code ==\" \"\n \n \ndef in_table_c12(code):\n return unicodedata.category(code)==\"Zs\"and code !=\" \"\n \ndef in_table_c11_c12(code):\n return unicodedata.category(code)==\"Zs\"\n \n \ndef in_table_c21(code):\n return ord(code)<128 and unicodedata.category(code)==\"Cc\"\n \nc22_specials=set([1757,1807,6158,8204,8205,8232,8233,65279]+list(range(8288,8292))+list(range(8298,8304))+list(range(65529,65533))+list(range(119155,119163)))\ndef in_table_c22(code):\n c=ord(code)\n if c <128:return False\n if unicodedata.category(code)==\"Cc\":return True\n return c in c22_specials\n \ndef in_table_c21_c22(code):\n return unicodedata.category(code)==\"Cc\"or\\\n ord(code)in c22_specials\n \n \ndef in_table_c3(code):\n return unicodedata.category(code)==\"Co\"\n \n \ndef in_table_c4(code):\n c=ord(code)\n if c <0xFDD0:return False\n if c <0xFDF0:return True\n return(ord(code)&0xFFFF)in(0xFFFE,0xFFFF)\n \n \ndef in_table_c5(code):\n return unicodedata.category(code)==\"Cs\"\n \n \nc6_set=set(range(65529,65534))\ndef in_table_c6(code):\n return ord(code)in c6_set\n \n \nc7_set=set(range(12272,12284))\ndef in_table_c7(code):\n return ord(code)in c7_set\n \n \nc8_set=set([832,833,8206,8207]+list(range(8234,8239))+list(range(8298,8304)))\ndef in_table_c8(code):\n return ord(code)in c8_set\n \n \nc9_set=set([917505]+list(range(917536,917632)))\ndef in_table_c9(code):\n return ord(code)in c9_set\n \n \ndef in_table_d1(code):\n return unicodedata.bidirectional(code)in(\"R\",\"AL\")\n \n \ndef in_table_d2(code):\n return unicodedata.bidirectional(code)==\"L\"\n", ["unicodedata"]], "typing": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfrom abc import abstractmethod,ABCMeta\nimport collections\nfrom collections import defaultdict\nimport collections.abc\nimport copyreg\nimport contextlib\nimport functools\nimport operator\nimport re as stdlib_re\nimport sys\nimport types\nimport warnings\nfrom types import WrapperDescriptorType,MethodWrapperType,MethodDescriptorType,GenericAlias\n\nfrom _typing import(\n_idfunc,\nTypeVar,\nParamSpec,\nTypeVarTuple,\nParamSpecArgs,\nParamSpecKwargs,\nTypeAliasType,\nGeneric,\n)\n\n\n__all__=[\n\n'Annotated',\n'Any',\n'Callable',\n'ClassVar',\n'Concatenate',\n'Final',\n'ForwardRef',\n'Generic',\n'Literal',\n'Optional',\n'ParamSpec',\n'Protocol',\n'Tuple',\n'Type',\n'TypeVar',\n'TypeVarTuple',\n'Union',\n\n\n'AbstractSet',\n'ByteString',\n'Container',\n'ContextManager',\n'Hashable',\n'ItemsView',\n'Iterable',\n'Iterator',\n'KeysView',\n'Mapping',\n'MappingView',\n'MutableMapping',\n'MutableSequence',\n'MutableSet',\n'Sequence',\n'Sized',\n'ValuesView',\n'Awaitable',\n'AsyncIterator',\n'AsyncIterable',\n'Coroutine',\n'Collection',\n'AsyncGenerator',\n'AsyncContextManager',\n\n\n'Reversible',\n'SupportsAbs',\n'SupportsBytes',\n'SupportsComplex',\n'SupportsFloat',\n'SupportsIndex',\n'SupportsInt',\n'SupportsRound',\n\n\n'ChainMap',\n'Counter',\n'Deque',\n'Dict',\n'DefaultDict',\n'List',\n'OrderedDict',\n'Set',\n'FrozenSet',\n'NamedTuple',\n'TypedDict',\n'Generator',\n\n\n'BinaryIO',\n'IO',\n'Match',\n'Pattern',\n'TextIO',\n\n\n'AnyStr',\n'assert_type',\n'assert_never',\n'cast',\n'clear_overloads',\n'dataclass_transform',\n'final',\n'get_args',\n'get_origin',\n'get_overloads',\n'get_type_hints',\n'is_typeddict',\n'LiteralString',\n'Never',\n'NewType',\n'no_type_check',\n'no_type_check_decorator',\n'NoReturn',\n'NotRequired',\n'overload',\n'override',\n'ParamSpecArgs',\n'ParamSpecKwargs',\n'Required',\n'reveal_type',\n'runtime_checkable',\n'Self',\n'Text',\n'TYPE_CHECKING',\n'TypeAlias',\n'TypeGuard',\n'TypeAliasType',\n'Unpack',\n]\n\n\n\n\n\n\ndef _type_convert(arg,module=None,*,allow_special_forms=False):\n ''\n if arg is None:\n return type(None)\n if isinstance(arg,str):\n return ForwardRef(arg,module=module,is_class=allow_special_forms)\n return arg\n \n \ndef _type_check(arg,msg,is_argument=True,module=None,*,allow_special_forms=False):\n ''\n\n\n\n\n\n\n\n\n\n \n invalid_generic_forms=(Generic,Protocol)\n if not allow_special_forms:\n invalid_generic_forms +=(ClassVar,)\n if is_argument:\n invalid_generic_forms +=(Final,)\n \n arg=_type_convert(arg,module=module,allow_special_forms=allow_special_forms)\n if(isinstance(arg,_GenericAlias)and\n arg.__origin__ in invalid_generic_forms):\n raise TypeError(f\"{arg} is not valid as type argument\")\n if arg in(Any,LiteralString,NoReturn,Never,Self,TypeAlias):\n return arg\n if allow_special_forms and arg in(ClassVar,Final):\n return arg\n if isinstance(arg,_SpecialForm)or arg in(Generic,Protocol):\n raise TypeError(f\"Plain {arg} is not valid as type argument\")\n if type(arg)is tuple:\n raise TypeError(f\"{msg} Got {arg !r:.100}.\")\n return arg\n \n \ndef _is_param_expr(arg):\n return arg is ...or isinstance(arg,\n (tuple,list,ParamSpec,_ConcatenateGenericAlias))\n \n \ndef _should_unflatten_callable_args(typ,args):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n return(\n typ.__origin__ is collections.abc.Callable\n and not(len(args)==2 and _is_param_expr(args[0]))\n )\n \n \ndef _type_repr(obj):\n ''\n\n\n\n\n\n \n \n \n \n if isinstance(obj,type):\n if obj.__module__ =='builtins':\n return obj.__qualname__\n return f'{obj.__module__}.{obj.__qualname__}'\n if obj is ...:\n return '...'\n if isinstance(obj,types.FunctionType):\n return obj.__name__\n if isinstance(obj,tuple):\n \n return '['+', '.join(_type_repr(t)for t in obj)+']'\n return repr(obj)\n \n \ndef _collect_parameters(args):\n ''\n\n\n\n\n\n \n parameters=[]\n for t in args:\n if isinstance(t,type):\n \n pass\n elif isinstance(t,tuple):\n \n \n for x in t:\n for collected in _collect_parameters([x]):\n if collected not in parameters:\n parameters.append(collected)\n elif hasattr(t,'__typing_subst__'):\n if t not in parameters:\n parameters.append(t)\n else:\n for x in getattr(t,'__parameters__',()):\n if x not in parameters:\n parameters.append(x)\n return tuple(parameters)\n \n \ndef _check_generic(cls,parameters,elen):\n ''\n\n\n \n if not elen:\n raise TypeError(f\"{cls} is not a generic class\")\n alen=len(parameters)\n if alen !=elen:\n raise TypeError(f\"Too {'many'if alen >elen else 'few'} arguments for {cls};\"\n f\" actual {alen}, expected {elen}\")\n \ndef _unpack_args(args):\n newargs=[]\n for arg in args:\n subargs=getattr(arg,'__typing_unpacked_tuple_args__',None)\n if subargs is not None and not(subargs and subargs[-1]is ...):\n newargs.extend(subargs)\n else:\n newargs.append(arg)\n return newargs\n \ndef _deduplicate(params):\n\n all_params=set(params)\n if len(all_params)','eval')\n except SyntaxError:\n raise SyntaxError(f\"Forward reference must be an expression -- got {arg !r}\")\n \n self.__forward_arg__=arg\n self.__forward_code__=code\n self.__forward_evaluated__=False\n self.__forward_value__=None\n self.__forward_is_argument__=is_argument\n self.__forward_is_class__=is_class\n self.__forward_module__=module\n \n def _evaluate(self,globalns,localns,recursive_guard):\n if self.__forward_arg__ in recursive_guard:\n return self\n if not self.__forward_evaluated__ or localns is not globalns:\n if globalns is None and localns is None:\n globalns=localns={}\n elif globalns is None:\n globalns=localns\n elif localns is None:\n localns=globalns\n if self.__forward_module__ is not None:\n globalns=getattr(\n sys.modules.get(self.__forward_module__,None),'__dict__',globalns\n )\n type_=_type_check(\n eval(self.__forward_code__,globalns,localns),\n \"Forward references must evaluate to types.\",\n is_argument=self.__forward_is_argument__,\n allow_special_forms=self.__forward_is_class__,\n )\n self.__forward_value__=_eval_type(\n type_,globalns,localns,recursive_guard |{self.__forward_arg__}\n )\n self.__forward_evaluated__=True\n return self.__forward_value__\n \n def __eq__(self,other):\n if not isinstance(other,ForwardRef):\n return NotImplemented\n if self.__forward_evaluated__ and other.__forward_evaluated__:\n return(self.__forward_arg__ ==other.__forward_arg__ and\n self.__forward_value__ ==other.__forward_value__)\n return(self.__forward_arg__ ==other.__forward_arg__ and\n self.__forward_module__ ==other.__forward_module__)\n \n def __hash__(self):\n return hash((self.__forward_arg__,self.__forward_module__))\n \n def __or__(self,other):\n return Union[self,other]\n \n def __ror__(self,other):\n return Union[other,self]\n \n def __repr__(self):\n if self.__forward_module__ is None:\n module_repr=''\n else:\n module_repr=f', module={self.__forward_module__ !r}'\n return f'ForwardRef({self.__forward_arg__ !r}{module_repr})'\n \n \ndef _is_unpacked_typevartuple(x:Any)->bool:\n return((not isinstance(x,type))and\n getattr(x,'__typing_is_unpacked_typevartuple__',False))\n \n \ndef _is_typevar_like(x:Any)->bool:\n return isinstance(x,(TypeVar,ParamSpec))or _is_unpacked_typevartuple(x)\n \n \nclass _PickleUsingNameMixin:\n ''\n \n def __reduce__(self):\n return self.__name__\n \n \ndef _typevar_subst(self,arg):\n msg=\"Parameters to generic types must be types.\"\n arg=_type_check(arg,msg,is_argument=True)\n if((isinstance(arg,_GenericAlias)and arg.__origin__ is Unpack)or\n (isinstance(arg,GenericAlias)and getattr(arg,'__unpacked__',False))):\n raise TypeError(f\"{arg} is not valid as type argument\")\n return arg\n \n \ndef _typevartuple_prepare_subst(self,alias,args):\n params=alias.__parameters__\n typevartuple_index=params.index(self)\n for param in params[typevartuple_index+1:]:\n if isinstance(param,TypeVarTuple):\n raise TypeError(f\"More than one TypeVarTuple parameter in {alias}\")\n \n alen=len(args)\n plen=len(params)\n left=typevartuple_index\n right=plen -typevartuple_index -1\n var_tuple_index=None\n fillarg=None\n for k,arg in enumerate(args):\n if not isinstance(arg,type):\n subargs=getattr(arg,'__typing_unpacked_tuple_args__',None)\n if subargs and len(subargs)==2 and subargs[-1]is ...:\n if var_tuple_index is not None:\n raise TypeError(\"More than one unpacked arbitrary-length tuple argument\")\n var_tuple_index=k\n fillarg=subargs[0]\n if var_tuple_index is not None:\n left=min(left,var_tuple_index)\n right=min(right,alen -var_tuple_index -1)\n elif left+right >alen:\n raise TypeError(f\"Too few arguments for {alias};\"\n f\" actual {alen}, expected at least {plen -1}\")\n \n return(\n *args[:left],\n *([fillarg]*(typevartuple_index -left)),\n tuple(args[left:alen -right]),\n *([fillarg]*(plen -right -left -typevartuple_index -1)),\n *args[alen -right:],\n )\n \n \ndef _paramspec_subst(self,arg):\n if isinstance(arg,(list,tuple)):\n arg=tuple(_type_check(a,\"Expected a type.\")for a in arg)\n elif not _is_param_expr(arg):\n raise TypeError(f\"Expected a list of types, an ellipsis, \"\n f\"ParamSpec, or Concatenate. Got {arg}\")\n return arg\n \n \ndef _paramspec_prepare_subst(self,alias,args):\n params=alias.__parameters__\n i=params.index(self)\n if i >=len(args):\n raise TypeError(f\"Too few arguments for {alias}\")\n \n if len(params)==1 and not _is_param_expr(args[0]):\n assert i ==0\n args=(args,)\n \n elif isinstance(args[i],list):\n args=(*args[:i],tuple(args[i]),*args[i+1:])\n return args\n \n \n@_tp_cache\ndef _generic_class_getitem(cls,params):\n ''\n\n\n\n\n\n\n\n \n if not isinstance(params,tuple):\n params=(params,)\n \n params=tuple(_type_convert(p)for p in params)\n is_generic_or_protocol=cls in(Generic,Protocol)\n \n if is_generic_or_protocol:\n \n if not params:\n raise TypeError(\n f\"Parameter list to {cls.__qualname__}[...] cannot be empty\"\n )\n if not all(_is_typevar_like(p)for p in params):\n raise TypeError(\n f\"Parameters to {cls.__name__}[...] must all be type variables \"\n f\"or parameter specification variables.\")\n if len(set(params))!=len(params):\n raise TypeError(\n f\"Parameters to {cls.__name__}[...] must all be unique\")\n else:\n \n for param in cls.__parameters__:\n prepare=getattr(param,'__typing_prepare_subst__',None)\n if prepare is not None:\n params=prepare(cls,params)\n _check_generic(cls,params,len(cls.__parameters__))\n \n new_args=[]\n for param,new_arg in zip(cls.__parameters__,params):\n if isinstance(param,TypeVarTuple):\n new_args.extend(new_arg)\n else:\n new_args.append(new_arg)\n params=tuple(new_args)\n \n return _GenericAlias(cls,params)\n \n \ndef _generic_init_subclass(cls,*args,**kwargs):\n super(Generic,cls).__init_subclass__(*args,**kwargs)\n tvars=[]\n if '__orig_bases__'in cls.__dict__:\n error=Generic in cls.__orig_bases__\n else:\n error=(Generic in cls.__bases__ and\n cls.__name__ !='Protocol'and\n type(cls)!=_TypedDictMeta)\n if error:\n raise TypeError(\"Cannot inherit from plain Generic\")\n if '__orig_bases__'in cls.__dict__:\n tvars=_collect_parameters(cls.__orig_bases__)\n \n \n \n \n \n gvars=None\n for base in cls.__orig_bases__:\n if(isinstance(base,_GenericAlias)and\n base.__origin__ is Generic):\n if gvars is not None:\n raise TypeError(\n \"Cannot inherit from Generic[...] multiple times.\")\n gvars=base.__parameters__\n if gvars is not None:\n tvarset=set(tvars)\n gvarset=set(gvars)\n if not tvarset <=gvarset:\n s_vars=', '.join(str(t)for t in tvars if t not in gvarset)\n s_args=', '.join(str(g)for g in gvars)\n raise TypeError(f\"Some type variables ({s_vars}) are\"\n f\" not listed in Generic[{s_args}]\")\n tvars=gvars\n cls.__parameters__=tuple(tvars)\n \n \ndef _is_dunder(attr):\n return attr.startswith('__')and attr.endswith('__')\n \nclass _BaseGenericAlias(_Final,_root=True):\n ''\n\n\n\n\n\n\n \n \n def __init__(self,origin,*,inst=True,name=None):\n self._inst=inst\n self._name=name\n self.__origin__=origin\n self.__slots__=None\n \n def __call__(self,*args,**kwargs):\n if not self._inst:\n raise TypeError(f\"Type {self._name} cannot be instantiated; \"\n f\"use {self.__origin__.__name__}() instead\")\n result=self.__origin__(*args,**kwargs)\n try:\n result.__orig_class__=self\n except AttributeError:\n pass\n return result\n \n def __mro_entries__(self,bases):\n res=[]\n if self.__origin__ not in bases:\n res.append(self.__origin__)\n i=bases.index(self)\n for b in bases[i+1:]:\n if isinstance(b,_BaseGenericAlias)or issubclass(b,Generic):\n break\n else:\n res.append(Generic)\n return tuple(res)\n \n def __getattr__(self,attr):\n if attr in{'__name__','__qualname__'}:\n return self._name or self.__origin__.__name__\n \n \n \n if '__origin__'in self.__dict__ and not _is_dunder(attr):\n return getattr(self.__origin__,attr)\n raise AttributeError(attr)\n \n def __setattr__(self,attr,val):\n if _is_dunder(attr)or attr in{'_name','_inst','_nparams'}:\n super().__setattr__(attr,val)\n else:\n setattr(self.__origin__,attr,val)\n \n def __instancecheck__(self,obj):\n return self.__subclasscheck__(type(obj))\n \n def __subclasscheck__(self,cls):\n raise TypeError(\"Subscripted generics cannot be used with\"\n \" class and instance checks\")\n \n def __dir__(self):\n return list(set(super().__dir__()\n +[attr for attr in dir(self.__origin__)if not _is_dunder(attr)]))\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass _GenericAlias(_BaseGenericAlias,_root=True):\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n def __init__(self,origin,args,*,inst=True,name=None):\n super().__init__(origin,inst=inst,name=name)\n if not isinstance(args,tuple):\n args=(args,)\n self.__args__=tuple(...if a is _TypingEllipsis else\n a for a in args)\n self.__parameters__=_collect_parameters(args)\n if not name:\n self.__module__=origin.__module__\n \n def __eq__(self,other):\n if not isinstance(other,_GenericAlias):\n return NotImplemented\n return(self.__origin__ ==other.__origin__\n and self.__args__ ==other.__args__)\n \n def __hash__(self):\n return hash((self.__origin__,self.__args__))\n \n def __or__(self,right):\n return Union[self,right]\n \n def __ror__(self,left):\n return Union[left,self]\n \n @_tp_cache\n def __getitem__(self,args):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.__origin__ in(Generic,Protocol):\n \n raise TypeError(f\"Cannot subscript already-subscripted {self}\")\n if not self.__parameters__:\n raise TypeError(f\"{self} is not a generic class\")\n \n \n if not isinstance(args,tuple):\n args=(args,)\n args=tuple(_type_convert(p)for p in args)\n args=_unpack_args(args)\n new_args=self._determine_new_args(args)\n r=self.copy_with(new_args)\n return r\n \n def _determine_new_args(self,args):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n params=self.__parameters__\n \n for param in params:\n prepare=getattr(param,'__typing_prepare_subst__',None)\n if prepare is not None:\n args=prepare(self,args)\n alen=len(args)\n plen=len(params)\n if alen !=plen:\n raise TypeError(f\"Too {'many'if alen >plen else 'few'} arguments for {self};\"\n f\" actual {alen}, expected {plen}\")\n new_arg_by_param=dict(zip(params,args))\n return tuple(self._make_substitution(self.__args__,new_arg_by_param))\n \n def _make_substitution(self,args,new_arg_by_param):\n ''\n new_args=[]\n for old_arg in args:\n if isinstance(old_arg,type):\n new_args.append(old_arg)\n continue\n \n substfunc=getattr(old_arg,'__typing_subst__',None)\n if substfunc:\n new_arg=substfunc(new_arg_by_param[old_arg])\n else:\n subparams=getattr(old_arg,'__parameters__',())\n if not subparams:\n new_arg=old_arg\n else:\n subargs=[]\n for x in subparams:\n if isinstance(x,TypeVarTuple):\n subargs.extend(new_arg_by_param[x])\n else:\n subargs.append(new_arg_by_param[x])\n new_arg=old_arg[tuple(subargs)]\n \n if self.__origin__ ==collections.abc.Callable and isinstance(new_arg,tuple):\n \n \n \n \n \n \n \n \n \n \n new_args.extend(new_arg)\n elif _is_unpacked_typevartuple(old_arg):\n \n \n \n \n \n \n \n \n \n new_args.extend(new_arg)\n elif isinstance(old_arg,tuple):\n \n \n \n \n \n \n \n new_args.append(\n tuple(self._make_substitution(old_arg,new_arg_by_param)),\n )\n else:\n new_args.append(new_arg)\n return new_args\n \n def copy_with(self,args):\n return self.__class__(self.__origin__,args,name=self._name,inst=self._inst)\n \n def __repr__(self):\n if self._name:\n name='typing.'+self._name\n else:\n name=_type_repr(self.__origin__)\n if self.__args__:\n args=\", \".join([_type_repr(a)for a in self.__args__])\n else:\n \n args=\"()\"\n return f'{name}[{args}]'\n \n def __reduce__(self):\n if self._name:\n origin=globals()[self._name]\n else:\n origin=self.__origin__\n args=tuple(self.__args__)\n if len(args)==1 and not isinstance(args[0],tuple):\n args,=args\n return operator.getitem,(origin,args)\n \n def __mro_entries__(self,bases):\n if isinstance(self.__origin__,_SpecialForm):\n raise TypeError(f\"Cannot subclass {self !r}\")\n \n if self._name:\n return super().__mro_entries__(bases)\n if self.__origin__ is Generic:\n if Protocol in bases:\n return()\n i=bases.index(self)\n for b in bases[i+1:]:\n if isinstance(b,_BaseGenericAlias)and b is not self:\n return()\n return(self.__origin__,)\n \n def __iter__(self):\n yield Unpack[self]\n \n \n \n \n \n \nclass _SpecialGenericAlias(_NotIterable,_BaseGenericAlias,_root=True):\n def __init__(self,origin,nparams,*,inst=True,name=None):\n if name is None:\n name=origin.__name__\n super().__init__(origin,inst=inst,name=name)\n self._nparams=nparams\n if origin.__module__ =='builtins':\n self.__doc__=f'A generic version of {origin.__qualname__}.'\n else:\n self.__doc__=f'A generic version of {origin.__module__}.{origin.__qualname__}.'\n \n @_tp_cache\n def __getitem__(self,params):\n if not isinstance(params,tuple):\n params=(params,)\n msg=\"Parameters to generic types must be types.\"\n params=tuple(_type_check(p,msg)for p in params)\n _check_generic(self,params,self._nparams)\n return self.copy_with(params)\n \n def copy_with(self,params):\n return _GenericAlias(self.__origin__,params,\n name=self._name,inst=self._inst)\n \n def __repr__(self):\n return 'typing.'+self._name\n \n def __subclasscheck__(self,cls):\n if isinstance(cls,_SpecialGenericAlias):\n return issubclass(cls.__origin__,self.__origin__)\n if not isinstance(cls,_GenericAlias):\n return issubclass(cls,self.__origin__)\n return super().__subclasscheck__(cls)\n \n def __reduce__(self):\n return self._name\n \n def __or__(self,right):\n return Union[self,right]\n \n def __ror__(self,left):\n return Union[left,self]\n \n \nclass _DeprecatedGenericAlias(_SpecialGenericAlias,_root=True):\n def __init__(\n self,origin,nparams,*,removal_version,inst=True,name=None\n ):\n super().__init__(origin,nparams,inst=inst,name=name)\n self._removal_version=removal_version\n \n def __instancecheck__(self,inst):\n import warnings\n warnings._deprecated(\n f\"{self.__module__}.{self._name}\",remove=self._removal_version\n )\n return super().__instancecheck__(inst)\n \n \nclass _CallableGenericAlias(_NotIterable,_GenericAlias,_root=True):\n def __repr__(self):\n assert self._name =='Callable'\n args=self.__args__\n if len(args)==2 and _is_param_expr(args[0]):\n return super().__repr__()\n return(f'typing.Callable'\n f'[[{\", \".join([_type_repr(a)for a in args[:-1]])}], '\n f'{_type_repr(args[-1])}]')\n \n def __reduce__(self):\n args=self.__args__\n if not(len(args)==2 and _is_param_expr(args[0])):\n args=list(args[:-1]),args[-1]\n return operator.getitem,(Callable,args)\n \n \nclass _CallableType(_SpecialGenericAlias,_root=True):\n def copy_with(self,params):\n return _CallableGenericAlias(self.__origin__,params,\n name=self._name,inst=self._inst)\n \n def __getitem__(self,params):\n if not isinstance(params,tuple)or len(params)!=2:\n raise TypeError(\"Callable must be used as \"\n \"Callable[[arg, ...], result].\")\n args,result=params\n \n \n \n if isinstance(args,list):\n params=(tuple(args),result)\n else:\n params=(args,result)\n return self.__getitem_inner__(params)\n \n @_tp_cache\n def __getitem_inner__(self,params):\n args,result=params\n msg=\"Callable[args, result]: result must be a type.\"\n result=_type_check(result,msg)\n if args is Ellipsis:\n return self.copy_with((_TypingEllipsis,result))\n if not isinstance(args,tuple):\n args=(args,)\n args=tuple(_type_convert(arg)for arg in args)\n params=args+(result,)\n return self.copy_with(params)\n \n \nclass _TupleType(_SpecialGenericAlias,_root=True):\n @_tp_cache\n def __getitem__(self,params):\n if not isinstance(params,tuple):\n params=(params,)\n if len(params)>=2 and params[-1]is ...:\n msg=\"Tuple[t, ...]: t must be a type.\"\n params=tuple(_type_check(p,msg)for p in params[:-1])\n return self.copy_with((*params,_TypingEllipsis))\n msg=\"Tuple[t0, t1, ...]: each t must be a type.\"\n params=tuple(_type_check(p,msg)for p in params)\n return self.copy_with(params)\n \n \nclass _UnionGenericAlias(_NotIterable,_GenericAlias,_root=True):\n def copy_with(self,params):\n return Union[params]\n \n def __eq__(self,other):\n if not isinstance(other,(_UnionGenericAlias,types.UnionType)):\n return NotImplemented\n return set(self.__args__)==set(other.__args__)\n \n def __hash__(self):\n return hash(frozenset(self.__args__))\n \n def __repr__(self):\n args=self.__args__\n if len(args)==2:\n if args[0]is type(None):\n return f'typing.Optional[{_type_repr(args[1])}]'\n elif args[1]is type(None):\n return f'typing.Optional[{_type_repr(args[0])}]'\n return super().__repr__()\n \n def __instancecheck__(self,obj):\n return self.__subclasscheck__(type(obj))\n \n def __subclasscheck__(self,cls):\n for arg in self.__args__:\n if issubclass(cls,arg):\n return True\n \n def __reduce__(self):\n func,(origin,args)=super().__reduce__()\n return func,(Union,args)\n \n \ndef _value_and_type_iter(parameters):\n return((p,type(p))for p in parameters)\n \n \nclass _LiteralGenericAlias(_GenericAlias,_root=True):\n def __eq__(self,other):\n if not isinstance(other,_LiteralGenericAlias):\n return NotImplemented\n \n return set(_value_and_type_iter(self.__args__))==set(_value_and_type_iter(other.__args__))\n \n def __hash__(self):\n return hash(frozenset(_value_and_type_iter(self.__args__)))\n \n \nclass _ConcatenateGenericAlias(_GenericAlias,_root=True):\n def copy_with(self,params):\n if isinstance(params[-1],(list,tuple)):\n return(*params[:-1],*params[-1])\n if isinstance(params[-1],_ConcatenateGenericAlias):\n params=(*params[:-1],*params[-1].__args__)\n return super().copy_with(params)\n \n \n@_SpecialForm\ndef Unpack(self,parameters):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n item=_type_check(parameters,f'{self} accepts only single type.')\n return _UnpackGenericAlias(origin=self,args=(item,))\n \n \nclass _UnpackGenericAlias(_GenericAlias,_root=True):\n def __repr__(self):\n \n \n return f'typing.Unpack[{_type_repr(self.__args__[0])}]'\n \n def __getitem__(self,args):\n if self.__typing_is_unpacked_typevartuple__:\n return args\n return super().__getitem__(args)\n \n @property\n def __typing_unpacked_tuple_args__(self):\n assert self.__origin__ is Unpack\n assert len(self.__args__)==1\n arg,=self.__args__\n if isinstance(arg,_GenericAlias):\n assert arg.__origin__ is tuple\n return arg.__args__\n return None\n \n @property\n def __typing_is_unpacked_typevartuple__(self):\n assert self.__origin__ is Unpack\n assert len(self.__args__)==1\n return isinstance(self.__args__[0],TypeVarTuple)\n \n \nclass _TypingEllipsis:\n ''\n \n \n_TYPING_INTERNALS=frozenset({\n'__parameters__','__orig_bases__','__orig_class__',\n'_is_protocol','_is_runtime_protocol','__protocol_attrs__',\n'__callable_proto_members_only__','__type_params__',\n})\n\n_SPECIAL_NAMES=frozenset({\n'__abstractmethods__','__annotations__','__dict__','__doc__',\n'__init__','__module__','__new__','__slots__',\n'__subclasshook__','__weakref__','__class_getitem__'\n})\n\n\nEXCLUDED_ATTRIBUTES=_TYPING_INTERNALS |_SPECIAL_NAMES |{'_MutableMapping__marker'}\n\n\ndef _get_protocol_attrs(cls):\n ''\n\n\n\n \n attrs=set()\n for base in cls.__mro__[:-1]:\n if base.__name__ in{'Protocol','Generic'}:\n continue\n annotations=getattr(base,'__annotations__',{})\n for attr in(*base.__dict__,*annotations):\n if not attr.startswith('_abc_')and attr not in EXCLUDED_ATTRIBUTES:\n attrs.add(attr)\n return attrs\n \n \ndef _no_init_or_replace_init(self,*args,**kwargs):\n cls=type(self)\n \n if cls._is_protocol:\n raise TypeError('Protocols cannot be instantiated')\n \n \n \n if cls.__init__ is not _no_init_or_replace_init:\n return\n \n \n \n \n \n \n \n for base in cls.__mro__:\n init=base.__dict__.get('__init__',_no_init_or_replace_init)\n if init is not _no_init_or_replace_init:\n cls.__init__=init\n break\n else:\n \n cls.__init__=object.__init__\n \n cls.__init__(self,*args,**kwargs)\n \n \ndef _caller(depth=1,default='__main__'):\n try:\n return sys._getframemodulename(depth+1)or default\n except AttributeError:\n pass\n try:\n return sys._getframe(depth+1).f_globals.get('__name__',default)\n except(AttributeError,ValueError):\n pass\n return None\n \ndef _allow_reckless_class_checks(depth=2):\n ''\n\n\n\n \n return _caller(depth)in{'abc','functools',None}\n \n \n_PROTO_ALLOWLIST={\n'collections.abc':[\n'Callable','Awaitable','Iterable','Iterator','AsyncIterable',\n'Hashable','Sized','Container','Collection','Reversible','Buffer',\n],\n'contextlib':['AbstractContextManager','AbstractAsyncContextManager'],\n}\n\n\n@functools.cache\ndef _lazy_load_getattr_static():\n\n\n from inspect import getattr_static\n return getattr_static\n \n \n_cleanups.append(_lazy_load_getattr_static.cache_clear)\n\ndef _pickle_psargs(psargs):\n return ParamSpecArgs,(psargs.__origin__,)\n \ncopyreg.pickle(ParamSpecArgs,_pickle_psargs)\n\ndef _pickle_pskwargs(pskwargs):\n return ParamSpecKwargs,(pskwargs.__origin__,)\n \ncopyreg.pickle(ParamSpecKwargs,_pickle_pskwargs)\n\ndel _pickle_psargs,_pickle_pskwargs\n\n\nclass _ProtocolMeta(ABCMeta):\n\n\n def __new__(mcls,name,bases,namespace,/,**kwargs):\n if name ==\"Protocol\"and bases ==(Generic,):\n pass\n elif Protocol in bases:\n for base in bases:\n if not(\n base in{object,Generic}\n or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__,[])\n or(\n issubclass(base,Generic)\n and getattr(base,\"_is_protocol\",False)\n )\n ):\n raise TypeError(\n f\"Protocols can only inherit from other protocols, \"\n f\"got {base !r}\"\n )\n return super().__new__(mcls,name,bases,namespace,**kwargs)\n \n def __init__(cls,*args,**kwargs):\n super().__init__(*args,**kwargs)\n if getattr(cls,\"_is_protocol\",False):\n cls.__protocol_attrs__=_get_protocol_attrs(cls)\n \n \n cls.__callable_proto_members_only__=all(\n callable(getattr(cls,attr,None))for attr in cls.__protocol_attrs__\n )\n \n def __subclasscheck__(cls,other):\n if cls is Protocol:\n return type.__subclasscheck__(cls,other)\n if(\n getattr(cls,'_is_protocol',False)\n and not _allow_reckless_class_checks()\n ):\n if not isinstance(other,type):\n \n raise TypeError('issubclass() arg 1 must be a class')\n if(\n not cls.__callable_proto_members_only__\n and cls.__dict__.get(\"__subclasshook__\")is _proto_hook\n ):\n raise TypeError(\n \"Protocols with non-method members don't support issubclass()\"\n )\n if not getattr(cls,'_is_runtime_protocol',False):\n raise TypeError(\n \"Instance and class checks can only be used with \"\n \"@runtime_checkable protocols\"\n )\n return super().__subclasscheck__(other)\n \n def __instancecheck__(cls,instance):\n \n \n if cls is Protocol:\n return type.__instancecheck__(cls,instance)\n if not getattr(cls,\"_is_protocol\",False):\n \n return super().__instancecheck__(instance)\n \n if(\n not getattr(cls,'_is_runtime_protocol',False)and\n not _allow_reckless_class_checks()\n ):\n raise TypeError(\"Instance and class checks can only be used with\"\n \" @runtime_checkable protocols\")\n \n if super().__instancecheck__(instance):\n return True\n \n getattr_static=_lazy_load_getattr_static()\n for attr in cls.__protocol_attrs__:\n try:\n val=getattr_static(instance,attr)\n except AttributeError:\n break\n if val is None and callable(getattr(cls,attr,None)):\n break\n else:\n return True\n \n return False\n \n \n@classmethod\ndef _proto_hook(cls,other):\n if not cls.__dict__.get('_is_protocol',False):\n return NotImplemented\n \n for attr in cls.__protocol_attrs__:\n for base in other.__mro__:\n \n if attr in base.__dict__:\n if base.__dict__[attr]is None:\n return NotImplemented\n break\n \n \n annotations=getattr(base,'__annotations__',{})\n if(isinstance(annotations,collections.abc.Mapping)and\n attr in annotations and\n issubclass(other,Generic)and getattr(other,'_is_protocol',False)):\n break\n else:\n return NotImplemented\n return True\n \n \nclass Protocol(Generic,metaclass=_ProtocolMeta):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n _is_protocol=True\n _is_runtime_protocol=False\n \n def __init_subclass__(cls,*args,**kwargs):\n super().__init_subclass__(*args,**kwargs)\n \n \n if not cls.__dict__.get('_is_protocol',False):\n cls._is_protocol=any(b is Protocol for b in cls.__bases__)\n \n \n if '__subclasshook__'not in cls.__dict__:\n cls.__subclasshook__=_proto_hook\n \n \n if cls._is_protocol and cls.__init__ is Protocol.__init__:\n cls.__init__=_no_init_or_replace_init\n \n \nclass _AnnotatedAlias(_NotIterable,_GenericAlias,_root=True):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,origin,metadata):\n if isinstance(origin,_AnnotatedAlias):\n metadata=origin.__metadata__+metadata\n origin=origin.__origin__\n super().__init__(origin,origin,name='Annotated')\n self.__metadata__=metadata\n \n def copy_with(self,params):\n assert len(params)==1\n new_type=params[0]\n return _AnnotatedAlias(new_type,self.__metadata__)\n \n def __repr__(self):\n return \"typing.Annotated[{}, {}]\".format(\n _type_repr(self.__origin__),\n \", \".join(repr(a)for a in self.__metadata__)\n )\n \n def __reduce__(self):\n return operator.getitem,(\n Annotated,(self.__origin__,)+self.__metadata__\n )\n \n def __eq__(self,other):\n if not isinstance(other,_AnnotatedAlias):\n return NotImplemented\n return(self.__origin__ ==other.__origin__\n and self.__metadata__ ==other.__metadata__)\n \n def __hash__(self):\n return hash((self.__origin__,self.__metadata__))\n \n def __getattr__(self,attr):\n if attr in{'__name__','__qualname__'}:\n return 'Annotated'\n return super().__getattr__(attr)\n \n def __mro_entries__(self,bases):\n return(self.__origin__,)\n \n \nclass Annotated:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n def __new__(cls,*args,**kwargs):\n raise TypeError(\"Type Annotated cannot be instantiated.\")\n \n @_tp_cache\n def __class_getitem__(cls,params):\n if not isinstance(params,tuple)or len(params)<2:\n raise TypeError(\"Annotated[...] should be used \"\n \"with at least two arguments (a type and an \"\n \"annotation).\")\n if _is_unpacked_typevartuple(params[0]):\n raise TypeError(\"Annotated[...] should not be used with an \"\n \"unpacked TypeVarTuple\")\n msg=\"Annotated[t, ...]: t must be a type.\"\n origin=_type_check(params[0],msg,allow_special_forms=True)\n metadata=tuple(params[1:])\n return _AnnotatedAlias(origin,metadata)\n \n def __init_subclass__(cls,*args,**kwargs):\n raise TypeError(\n \"Cannot subclass {}.Annotated\".format(cls.__module__)\n )\n \n \ndef runtime_checkable(cls):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not issubclass(cls,Generic)or not getattr(cls,'_is_protocol',False):\n raise TypeError('@runtime_checkable can be only applied to protocol classes,'\n ' got %r'%cls)\n cls._is_runtime_protocol=True\n return cls\n \n \ndef cast(typ,val):\n ''\n\n\n\n\n\n \n return val\n \n \ndef assert_type(val,typ,/):\n ''\n\n\n\n\n\n\n\n\n\n\n \n return val\n \n \n_allowed_types=(types.FunctionType,types.BuiltinFunctionType,\ntypes.MethodType,types.ModuleType,\nWrapperDescriptorType,MethodWrapperType,MethodDescriptorType)\n\n\ndef get_type_hints(obj,globalns=None,localns=None,include_extras=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if getattr(obj,'__no_type_check__',None):\n return{}\n \n if isinstance(obj,type):\n hints={}\n for base in reversed(obj.__mro__):\n if globalns is None:\n base_globals=getattr(sys.modules.get(base.__module__,None),'__dict__',{})\n else:\n base_globals=globalns\n ann=base.__dict__.get('__annotations__',{})\n if isinstance(ann,types.GetSetDescriptorType):\n ann={}\n base_locals=dict(vars(base))if localns is None else localns\n if localns is None and globalns is None:\n \n \n \n \n \n \n base_globals,base_locals=base_locals,base_globals\n for name,value in ann.items():\n if value is None:\n value=type(None)\n if isinstance(value,str):\n value=ForwardRef(value,is_argument=False,is_class=True)\n value=_eval_type(value,base_globals,base_locals)\n hints[name]=value\n return hints if include_extras else{k:_strip_annotations(t)for k,t in hints.items()}\n \n if globalns is None:\n if isinstance(obj,types.ModuleType):\n globalns=obj.__dict__\n else:\n nsobj=obj\n \n while hasattr(nsobj,'__wrapped__'):\n nsobj=nsobj.__wrapped__\n globalns=getattr(nsobj,'__globals__',{})\n if localns is None:\n localns=globalns\n elif localns is None:\n localns=globalns\n hints=getattr(obj,'__annotations__',None)\n if hints is None:\n \n if isinstance(obj,_allowed_types):\n return{}\n else:\n raise TypeError('{!r} is not a module, class, method, '\n 'or function.'.format(obj))\n hints=dict(hints)\n for name,value in hints.items():\n if value is None:\n value=type(None)\n if isinstance(value,str):\n \n \n value=ForwardRef(\n value,\n is_argument=not isinstance(obj,types.ModuleType),\n is_class=False,\n )\n hints[name]=_eval_type(value,globalns,localns)\n return hints if include_extras else{k:_strip_annotations(t)for k,t in hints.items()}\n \n \ndef _strip_annotations(t):\n ''\n if isinstance(t,_AnnotatedAlias):\n return _strip_annotations(t.__origin__)\n if hasattr(t,\"__origin__\")and t.__origin__ in(Required,NotRequired):\n return _strip_annotations(t.__args__[0])\n if isinstance(t,_GenericAlias):\n stripped_args=tuple(_strip_annotations(a)for a in t.__args__)\n if stripped_args ==t.__args__:\n return t\n return t.copy_with(stripped_args)\n if isinstance(t,GenericAlias):\n stripped_args=tuple(_strip_annotations(a)for a in t.__args__)\n if stripped_args ==t.__args__:\n return t\n return GenericAlias(t.__origin__,stripped_args)\n if isinstance(t,types.UnionType):\n stripped_args=tuple(_strip_annotations(a)for a in t.__args__)\n if stripped_args ==t.__args__:\n return t\n return functools.reduce(operator.or_,stripped_args)\n \n return t\n \n \ndef get_origin(tp):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(tp,_AnnotatedAlias):\n return Annotated\n if isinstance(tp,(_BaseGenericAlias,GenericAlias,\n ParamSpecArgs,ParamSpecKwargs)):\n return tp.__origin__\n if tp is Generic:\n return Generic\n if isinstance(tp,types.UnionType):\n return types.UnionType\n return None\n \n \ndef get_args(tp):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(tp,_AnnotatedAlias):\n return(tp.__origin__,)+tp.__metadata__\n if isinstance(tp,(_GenericAlias,GenericAlias)):\n res=tp.__args__\n if _should_unflatten_callable_args(tp,res):\n res=(list(res[:-1]),res[-1])\n return res\n if isinstance(tp,types.UnionType):\n return tp.__args__\n return()\n \n \ndef is_typeddict(tp):\n ''\n\n\n\n\n\n\n\n\n\n \n return isinstance(tp,_TypedDictMeta)\n \n \n_ASSERT_NEVER_REPR_MAX_LENGTH=100\n\n\ndef assert_never(arg:Never,/)->Never:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n value=repr(arg)\n if len(value)>_ASSERT_NEVER_REPR_MAX_LENGTH:\n value=value[:_ASSERT_NEVER_REPR_MAX_LENGTH]+'...'\n raise AssertionError(f\"Expected code to be unreachable, but got: {value}\")\n \n \ndef no_type_check(arg):\n ''\n\n\n\n\n\n\n \n if isinstance(arg,type):\n for key in dir(arg):\n obj=getattr(arg,key)\n if(\n not hasattr(obj,'__qualname__')\n or obj.__qualname__ !=f'{arg.__qualname__}.{obj.__name__}'\n or getattr(obj,'__module__',None)!=arg.__module__\n ):\n \n \n \n continue\n \n if isinstance(obj,types.FunctionType):\n obj.__no_type_check__=True\n if isinstance(obj,types.MethodType):\n obj.__func__.__no_type_check__=True\n \n if isinstance(obj,type):\n no_type_check(obj)\n try:\n arg.__no_type_check__=True\n except TypeError:\n pass\n return arg\n \n \ndef no_type_check_decorator(decorator):\n ''\n\n\n\n \n @functools.wraps(decorator)\n def wrapped_decorator(*args,**kwds):\n func=decorator(*args,**kwds)\n func=no_type_check(func)\n return func\n \n return wrapped_decorator\n \n \ndef _overload_dummy(*args,**kwds):\n ''\n raise NotImplementedError(\n \"You should not call an overloaded function. \"\n \"A series of @overload-decorated functions \"\n \"outside a stub module should always be followed \"\n \"by an implementation that is not @overload-ed.\")\n \n \n \n_overload_registry=defaultdict(functools.partial(defaultdict,dict))\n\n\ndef overload(func):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n f=getattr(func,\"__func__\",func)\n try:\n _overload_registry[f.__module__][f.__qualname__][f.__code__.co_firstlineno]=func\n except AttributeError:\n \n pass\n return _overload_dummy\n \n \ndef get_overloads(func):\n ''\n \n f=getattr(func,\"__func__\",func)\n if f.__module__ not in _overload_registry:\n return[]\n mod_dict=_overload_registry[f.__module__]\n if f.__qualname__ not in mod_dict:\n return[]\n return list(mod_dict[f.__qualname__].values())\n \n \ndef clear_overloads():\n ''\n _overload_registry.clear()\n \n \ndef final(f):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n f.__final__=True\n except(AttributeError,TypeError):\n \n \n \n pass\n return f\n \n \n \n \n \nT=TypeVar('T')\nKT=TypeVar('KT')\nVT=TypeVar('VT')\nT_co=TypeVar('T_co',covariant=True)\nV_co=TypeVar('V_co',covariant=True)\nVT_co=TypeVar('VT_co',covariant=True)\nT_contra=TypeVar('T_contra',contravariant=True)\n\nCT_co=TypeVar('CT_co',covariant=True,bound=type)\n\n\n\n\nAnyStr=TypeVar('AnyStr',bytes,str)\n\n\n\n_alias=_SpecialGenericAlias\n\nHashable=_alias(collections.abc.Hashable,0)\nAwaitable=_alias(collections.abc.Awaitable,1)\nCoroutine=_alias(collections.abc.Coroutine,3)\nAsyncIterable=_alias(collections.abc.AsyncIterable,1)\nAsyncIterator=_alias(collections.abc.AsyncIterator,1)\nIterable=_alias(collections.abc.Iterable,1)\nIterator=_alias(collections.abc.Iterator,1)\nReversible=_alias(collections.abc.Reversible,1)\nSized=_alias(collections.abc.Sized,0)\nContainer=_alias(collections.abc.Container,1)\nCollection=_alias(collections.abc.Collection,1)\nCallable=_CallableType(collections.abc.Callable,2)\nCallable.__doc__=\\\n\"\"\"Deprecated alias to collections.abc.Callable.\n\n Callable[[int], str] signifies a function that takes a single\n parameter of type int and returns a str.\n\n The subscription syntax must always be used with exactly two\n values: the argument list and the return type.\n The argument list must be a list of types, a ParamSpec,\n Concatenate or ellipsis. The return type must be a single type.\n\n There is no syntax to indicate optional or keyword arguments;\n such function types are rarely used as callback types.\n \"\"\"\nAbstractSet=_alias(collections.abc.Set,1,name='AbstractSet')\nMutableSet=_alias(collections.abc.MutableSet,1)\n\nMapping=_alias(collections.abc.Mapping,2)\nMutableMapping=_alias(collections.abc.MutableMapping,2)\nSequence=_alias(collections.abc.Sequence,1)\nMutableSequence=_alias(collections.abc.MutableSequence,1)\nByteString=_DeprecatedGenericAlias(\ncollections.abc.ByteString,0,removal_version=(3,14)\n)\n\nTuple=_TupleType(tuple,-1,inst=False,name='Tuple')\nTuple.__doc__=\\\n\"\"\"Deprecated alias to builtins.tuple.\n\n Tuple[X, Y] is the cross-product type of X and Y.\n\n Example: Tuple[T1, T2] is a tuple of two elements corresponding\n to type variables T1 and T2. Tuple[int, float, str] is a tuple\n of an int, a float and a string.\n\n To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].\n \"\"\"\nList=_alias(list,1,inst=False,name='List')\nDeque=_alias(collections.deque,1,name='Deque')\nSet=_alias(set,1,inst=False,name='Set')\nFrozenSet=_alias(frozenset,1,inst=False,name='FrozenSet')\nMappingView=_alias(collections.abc.MappingView,1)\nKeysView=_alias(collections.abc.KeysView,1)\nItemsView=_alias(collections.abc.ItemsView,2)\nValuesView=_alias(collections.abc.ValuesView,1)\nContextManager=_alias(contextlib.AbstractContextManager,1,name='ContextManager')\nAsyncContextManager=_alias(contextlib.AbstractAsyncContextManager,1,name='AsyncContextManager')\nDict=_alias(dict,2,inst=False,name='Dict')\nDefaultDict=_alias(collections.defaultdict,2,name='DefaultDict')\nOrderedDict=_alias(collections.OrderedDict,2)\nCounter=_alias(collections.Counter,1)\nChainMap=_alias(collections.ChainMap,2)\nGenerator=_alias(collections.abc.Generator,3)\nAsyncGenerator=_alias(collections.abc.AsyncGenerator,2)\nType=_alias(type,1,inst=False,name='Type')\nType.__doc__=\\\n\"\"\"Deprecated alias to builtins.type.\n\n builtins.type or typing.Type can be used to annotate class objects.\n For example, suppose we have the following classes::\n\n class User: ... # Abstract base for User classes\n class BasicUser(User): ...\n class ProUser(User): ...\n class TeamUser(User): ...\n\n And a function that takes a class argument that's a subclass of\n User and returns an instance of the corresponding class::\n\n def new_user[U](user_class: Type[U]) -> U:\n user = user_class()\n # (Here we could write the user object to a database)\n return user\n\n joe = new_user(BasicUser)\n\n At this point the type checker knows that joe has type BasicUser.\n \"\"\"\n\n\n@runtime_checkable\nclass SupportsInt(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __int__(self)->int:\n pass\n \n \n@runtime_checkable\nclass SupportsFloat(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __float__(self)->float:\n pass\n \n \n@runtime_checkable\nclass SupportsComplex(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __complex__(self)->complex:\n pass\n \n \n@runtime_checkable\nclass SupportsBytes(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __bytes__(self)->bytes:\n pass\n \n \n@runtime_checkable\nclass SupportsIndex(Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __index__(self)->int:\n pass\n \n \n@runtime_checkable\nclass SupportsAbs[T](Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __abs__(self)->T:\n pass\n \n \n@runtime_checkable\nclass SupportsRound[T](Protocol):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __round__(self,ndigits:int=0)->T:\n pass\n \n \ndef _make_nmtuple(name,types,module,defaults=()):\n fields=[n for n,t in types]\n types={n:_type_check(t,f\"field {n} annotation must be a type\")\n for n,t in types}\n nm_tpl=collections.namedtuple(name,fields,\n defaults=defaults,module=module)\n nm_tpl.__annotations__=nm_tpl.__new__.__annotations__=types\n return nm_tpl\n \n \n \n_prohibited=frozenset({'__new__','__init__','__slots__','__getnewargs__',\n'_fields','_field_defaults',\n'_make','_replace','_asdict','_source'})\n\n_special=frozenset({'__module__','__name__','__annotations__'})\n\n\nclass NamedTupleMeta(type):\n def __new__(cls,typename,bases,ns):\n assert _NamedTuple in bases\n for base in bases:\n if base is not _NamedTuple and base is not Generic:\n raise TypeError(\n 'can only inherit from a NamedTuple type and Generic')\n bases=tuple(tuple if base is _NamedTuple else base for base in bases)\n types=ns.get('__annotations__',{})\n default_names=[]\n for field_name in types:\n if field_name in ns:\n default_names.append(field_name)\n elif default_names:\n raise TypeError(f\"Non-default namedtuple field {field_name} \"\n f\"cannot follow default field\"\n f\"{'s'if len(default_names)>1 else ''} \"\n f\"{', '.join(default_names)}\")\n nm_tpl=_make_nmtuple(typename,types.items(),\n defaults=[ns[n]for n in default_names],\n module=ns['__module__'])\n nm_tpl.__bases__=bases\n if Generic in bases:\n class_getitem=_generic_class_getitem\n nm_tpl.__class_getitem__=classmethod(class_getitem)\n \n for key in ns:\n if key in _prohibited:\n raise AttributeError(\"Cannot overwrite NamedTuple attribute \"+key)\n elif key not in _special and key not in nm_tpl._fields:\n setattr(nm_tpl,key,ns[key])\n if Generic in bases:\n nm_tpl.__init_subclass__()\n return nm_tpl\n \n \ndef NamedTuple(typename,fields=None,/,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if fields is None:\n fields=kwargs.items()\n elif kwargs:\n raise TypeError(\"Either list of fields or keywords\"\n \" can be provided to NamedTuple, not both\")\n nt=_make_nmtuple(typename,fields,module=_caller())\n nt.__orig_bases__=(NamedTuple,)\n return nt\n \n_NamedTuple=type.__new__(NamedTupleMeta,'NamedTuple',(),{})\n\ndef _namedtuple_mro_entries(bases):\n assert NamedTuple in bases\n return(_NamedTuple,)\n \nNamedTuple.__mro_entries__=_namedtuple_mro_entries\n\n\nclass _TypedDictMeta(type):\n def __new__(cls,name,bases,ns,total=True):\n ''\n\n\n\n\n\n \n for base in bases:\n if type(base)is not _TypedDictMeta and base is not Generic:\n raise TypeError('cannot inherit from both a TypedDict type '\n 'and a non-TypedDict base class')\n \n if any(issubclass(b,Generic)for b in bases):\n generic_base=(Generic,)\n else:\n generic_base=()\n \n tp_dict=type.__new__(_TypedDictMeta,name,(*generic_base,dict),ns)\n \n if not hasattr(tp_dict,'__orig_bases__'):\n tp_dict.__orig_bases__=bases\n \n annotations={}\n own_annotations=ns.get('__annotations__',{})\n msg=\"TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type\"\n own_annotations={\n n:_type_check(tp,msg,module=tp_dict.__module__)\n for n,tp in own_annotations.items()\n }\n required_keys=set()\n optional_keys=set()\n \n for base in bases:\n annotations.update(base.__dict__.get('__annotations__',{}))\n required_keys.update(base.__dict__.get('__required_keys__',()))\n optional_keys.update(base.__dict__.get('__optional_keys__',()))\n \n annotations.update(own_annotations)\n for annotation_key,annotation_type in own_annotations.items():\n annotation_origin=get_origin(annotation_type)\n if annotation_origin is Annotated:\n annotation_args=get_args(annotation_type)\n if annotation_args:\n annotation_type=annotation_args[0]\n annotation_origin=get_origin(annotation_type)\n \n if annotation_origin is Required:\n required_keys.add(annotation_key)\n elif annotation_origin is NotRequired:\n optional_keys.add(annotation_key)\n elif total:\n required_keys.add(annotation_key)\n else:\n optional_keys.add(annotation_key)\n \n tp_dict.__annotations__=annotations\n tp_dict.__required_keys__=frozenset(required_keys)\n tp_dict.__optional_keys__=frozenset(optional_keys)\n if not hasattr(tp_dict,'__total__'):\n tp_dict.__total__=total\n return tp_dict\n \n __call__=dict\n \n def __subclasscheck__(cls,other):\n \n raise TypeError('TypedDict does not support instance and class checks')\n \n __instancecheck__=__subclasscheck__\n \n \ndef TypedDict(typename,fields=None,/,*,total=True,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if fields is None:\n fields=kwargs\n elif kwargs:\n raise TypeError(\"TypedDict takes either a dict or keyword arguments,\"\n \" but not both\")\n if kwargs:\n warnings.warn(\n \"The kwargs-based syntax for TypedDict definitions is deprecated \"\n \"in Python 3.11, will be removed in Python 3.13, and may not be \"\n \"understood by third-party type checkers.\",\n DeprecationWarning,\n stacklevel=2,\n )\n \n ns={'__annotations__':dict(fields)}\n module=_caller()\n if module is not None:\n \n ns['__module__']=module\n \n td=_TypedDictMeta(typename,(),ns,total=total)\n td.__orig_bases__=(TypedDict,)\n return td\n \n_TypedDict=type.__new__(_TypedDictMeta,'TypedDict',(),{})\nTypedDict.__mro_entries__=lambda bases:(_TypedDict,)\n\n\n@_SpecialForm\ndef Required(self,parameters):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n item=_type_check(parameters,f'{self._name} accepts only a single type.')\n return _GenericAlias(self,(item,))\n \n \n@_SpecialForm\ndef NotRequired(self,parameters):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n item=_type_check(parameters,f'{self._name} accepts only a single type.')\n return _GenericAlias(self,(item,))\n \n \nclass NewType:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __call__=_idfunc\n \n def __init__(self,name,tp):\n self.__qualname__=name\n if '.'in name:\n name=name.rpartition('.')[-1]\n self.__name__=name\n self.__supertype__=tp\n def_mod=_caller()\n if def_mod !='typing':\n self.__module__=def_mod\n \n def __mro_entries__(self,bases):\n \n \n superclass_name=self.__name__\n \n class Dummy:\n def __init_subclass__(cls):\n subclass_name=cls.__name__\n raise TypeError(\n f\"Cannot subclass an instance of NewType. Perhaps you were looking for: \"\n f\"`{subclass_name} = NewType({subclass_name !r}, {superclass_name})`\"\n )\n \n return(Dummy,)\n \n def __repr__(self):\n return f'{self.__module__}.{self.__qualname__}'\n \n def __reduce__(self):\n return self.__qualname__\n \n def __or__(self,other):\n return Union[self,other]\n \n def __ror__(self,other):\n return Union[other,self]\n \n \n \nText=str\n\n\n\nTYPE_CHECKING=False\n\n\nclass IO(Generic[AnyStr]):\n ''\n\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n @property\n @abstractmethod\n def mode(self)->str:\n pass\n \n @property\n @abstractmethod\n def name(self)->str:\n pass\n \n @abstractmethod\n def close(self)->None:\n pass\n \n @property\n @abstractmethod\n def closed(self)->bool:\n pass\n \n @abstractmethod\n def fileno(self)->int:\n pass\n \n @abstractmethod\n def flush(self)->None:\n pass\n \n @abstractmethod\n def isatty(self)->bool:\n pass\n \n @abstractmethod\n def read(self,n:int=-1)->AnyStr:\n pass\n \n @abstractmethod\n def readable(self)->bool:\n pass\n \n @abstractmethod\n def readline(self,limit:int=-1)->AnyStr:\n pass\n \n @abstractmethod\n def readlines(self,hint:int=-1)->List[AnyStr]:\n pass\n \n @abstractmethod\n def seek(self,offset:int,whence:int=0)->int:\n pass\n \n @abstractmethod\n def seekable(self)->bool:\n pass\n \n @abstractmethod\n def tell(self)->int:\n pass\n \n @abstractmethod\n def truncate(self,size:int=None)->int:\n pass\n \n @abstractmethod\n def writable(self)->bool:\n pass\n \n @abstractmethod\n def write(self,s:AnyStr)->int:\n pass\n \n @abstractmethod\n def writelines(self,lines:List[AnyStr])->None:\n pass\n \n @abstractmethod\n def __enter__(self)->'IO[AnyStr]':\n pass\n \n @abstractmethod\n def __exit__(self,type,value,traceback)->None:\n pass\n \n \nclass BinaryIO(IO[bytes]):\n ''\n \n __slots__=()\n \n @abstractmethod\n def write(self,s:Union[bytes,bytearray])->int:\n pass\n \n @abstractmethod\n def __enter__(self)->'BinaryIO':\n pass\n \n \nclass TextIO(IO[str]):\n ''\n \n __slots__=()\n \n @property\n @abstractmethod\n def buffer(self)->BinaryIO:\n pass\n \n @property\n @abstractmethod\n def encoding(self)->str:\n pass\n \n @property\n @abstractmethod\n def errors(self)->Optional[str]:\n pass\n \n @property\n @abstractmethod\n def line_buffering(self)->bool:\n pass\n \n @property\n @abstractmethod\n def newlines(self)->Any:\n pass\n \n @abstractmethod\n def __enter__(self)->'TextIO':\n pass\n \n \nclass _DeprecatedType(type):\n def __getattribute__(cls,name):\n if name not in(\"__dict__\",\"__module__\")and name in cls.__dict__:\n warnings.warn(\n f\"{cls.__name__} is deprecated, import directly \"\n f\"from typing instead. {cls.__name__} will be removed \"\n \"in Python 3.12.\",\n DeprecationWarning,\n stacklevel=2,\n )\n return super().__getattribute__(name)\n \n \nclass io(metaclass=_DeprecatedType):\n ''\n \n __all__=['IO','TextIO','BinaryIO']\n IO=IO\n TextIO=TextIO\n BinaryIO=BinaryIO\n \n \nio.__name__=__name__+'.io'\nsys.modules[io.__name__]=io\n\nPattern=_alias(stdlib_re.Pattern,1)\nMatch=_alias(stdlib_re.Match,1)\n\nclass re(metaclass=_DeprecatedType):\n ''\n \n __all__=['Pattern','Match']\n Pattern=Pattern\n Match=Match\n \n \nre.__name__=__name__+'.re'\nsys.modules[re.__name__]=re\n\n\ndef reveal_type[T](obj:T,/)->T:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n print(f\"Runtime type is {type(obj).__name__ !r}\",file=sys.stderr)\n return obj\n \n \nclass _IdentityCallable(Protocol):\n def __call__[T](self,arg:T,/)->T:\n ...\n \n \ndef dataclass_transform(\n*,\neq_default:bool=True,\norder_default:bool=False,\nkw_only_default:bool=False,\nfrozen_default:bool=False,\nfield_specifiers:tuple[type[Any]|Callable[...,Any],...]=(),\n**kwargs:Any,\n)->_IdentityCallable:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def decorator(cls_or_fn):\n cls_or_fn.__dataclass_transform__={\n \"eq_default\":eq_default,\n \"order_default\":order_default,\n \"kw_only_default\":kw_only_default,\n \"frozen_default\":frozen_default,\n \"field_specifiers\":field_specifiers,\n \"kwargs\":kwargs,\n }\n return cls_or_fn\n return decorator\n \n \ntype _Func=Callable[...,Any]\n\n\ndef override[F:_Func](method:F,/)->F:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n method.__override__=True\n except(AttributeError,TypeError):\n \n \n \n pass\n return method\n", ["_typing", "abc", "collections", "collections.abc", "contextlib", "copyreg", "functools", "inspect", "operator", "re", "sys", "types", "warnings"]], "socket": [".py", "\n\n\n\"\"\"\\\nThis module provides socket operations and some related functions.\nOn Unix, it supports IP (Internet Protocol) and Unix domain sockets.\nOn other systems, it only supports IP. Functions specific for a\nsocket are available as methods of the socket object.\n\nFunctions:\n\nsocket() -- create a new socket object\nsocketpair() -- create a pair of new socket objects [*]\nfromfd() -- create a socket object from an open file descriptor [*]\nsend_fds() -- Send file descriptor to the socket.\nrecv_fds() -- Receive file descriptors from the socket.\nfromshare() -- create a socket object from data received from socket.share() [*]\ngethostname() -- return the current hostname\ngethostbyname() -- map a hostname to its IP number\ngethostbyaddr() -- map an IP number or hostname to DNS info\ngetservbyname() -- map a service name and a protocol name to a port number\ngetprotobyname() -- map a protocol name (e.g. 'tcp') to a number\nntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order\nhtons(), htonl() -- convert 16, 32 bit int from host to network byte order\ninet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format\ninet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)\nsocket.getdefaulttimeout() -- get the default timeout value\nsocket.setdefaulttimeout() -- set the default timeout value\ncreate_connection() -- connects to an address, with an optional timeout and\n optional source address.\n\n [*] not available on all platforms!\n\nSpecial objects:\n\nSocketType -- type object for socket objects\nerror -- exception raised for I/O errors\nhas_ipv6 -- boolean value indicating if IPv6 is supported\n\nIntEnum constants:\n\nAF_INET, AF_UNIX -- socket domains (first argument to socket() call)\nSOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)\n\nInteger constants:\n\nMany other constants may be defined; these may be used in calls to\nthe setsockopt() and getsockopt() methods.\n\"\"\"\n\nimport _socket\nfrom _socket import *\n\nimport os,sys,io,selectors\nfrom enum import IntEnum,IntFlag\n\ntry:\n import errno\nexcept ImportError:\n errno=None\nEBADF=getattr(errno,'EBADF',9)\nEAGAIN=getattr(errno,'EAGAIN',11)\nEWOULDBLOCK=getattr(errno,'EWOULDBLOCK',11)\n\n__all__=[\"fromfd\",\"getfqdn\",\"create_connection\",\"create_server\",\n\"has_dualstack_ipv6\",\"AddressFamily\",\"SocketKind\"]\n__all__.extend(os._get_exports_list(_socket))\n\n\n\n\n\n\n\nIntEnum._convert_(\n'AddressFamily',\n__name__,\nlambda C:C.isupper()and C.startswith('AF_'))\n\nIntEnum._convert_(\n'SocketKind',\n__name__,\nlambda C:C.isupper()and C.startswith('SOCK_'))\n\nIntFlag._convert_(\n'MsgFlag',\n__name__,\nlambda C:C.isupper()and C.startswith('MSG_'))\n\nIntFlag._convert_(\n'AddressInfo',\n__name__,\nlambda C:C.isupper()and C.startswith('AI_'))\n\n_LOCALHOST='127.0.0.1'\n_LOCALHOST_V6='::1'\n\n\ndef _intenum_converter(value,enum_klass):\n ''\n\n\n \n try:\n return enum_klass(value)\n except ValueError:\n return value\n \n \n \nif sys.platform.lower().startswith(\"win\"):\n errorTab={}\n errorTab[6]=\"Specified event object handle is invalid.\"\n errorTab[8]=\"Insufficient memory available.\"\n errorTab[87]=\"One or more parameters are invalid.\"\n errorTab[995]=\"Overlapped operation aborted.\"\n errorTab[996]=\"Overlapped I/O event object not in signaled state.\"\n errorTab[997]=\"Overlapped operation will complete later.\"\n errorTab[10004]=\"The operation was interrupted.\"\n errorTab[10009]=\"A bad file handle was passed.\"\n errorTab[10013]=\"Permission denied.\"\n errorTab[10014]=\"A fault occurred on the network??\"\n errorTab[10022]=\"An invalid operation was attempted.\"\n errorTab[10024]=\"Too many open files.\"\n errorTab[10035]=\"The socket operation would block.\"\n errorTab[10036]=\"A blocking operation is already in progress.\"\n errorTab[10037]=\"Operation already in progress.\"\n errorTab[10038]=\"Socket operation on nonsocket.\"\n errorTab[10039]=\"Destination address required.\"\n errorTab[10040]=\"Message too long.\"\n errorTab[10041]=\"Protocol wrong type for socket.\"\n errorTab[10042]=\"Bad protocol option.\"\n errorTab[10043]=\"Protocol not supported.\"\n errorTab[10044]=\"Socket type not supported.\"\n errorTab[10045]=\"Operation not supported.\"\n errorTab[10046]=\"Protocol family not supported.\"\n errorTab[10047]=\"Address family not supported by protocol family.\"\n errorTab[10048]=\"The network address is in use.\"\n errorTab[10049]=\"Cannot assign requested address.\"\n errorTab[10050]=\"Network is down.\"\n errorTab[10051]=\"Network is unreachable.\"\n errorTab[10052]=\"Network dropped connection on reset.\"\n errorTab[10053]=\"Software caused connection abort.\"\n errorTab[10054]=\"The connection has been reset.\"\n errorTab[10055]=\"No buffer space available.\"\n errorTab[10056]=\"Socket is already connected.\"\n errorTab[10057]=\"Socket is not connected.\"\n errorTab[10058]=\"The network has been shut down.\"\n errorTab[10059]=\"Too many references.\"\n errorTab[10060]=\"The operation timed out.\"\n errorTab[10061]=\"Connection refused.\"\n errorTab[10062]=\"Cannot translate name.\"\n errorTab[10063]=\"The name is too long.\"\n errorTab[10064]=\"The host is down.\"\n errorTab[10065]=\"The host is unreachable.\"\n errorTab[10066]=\"Directory not empty.\"\n errorTab[10067]=\"Too many processes.\"\n errorTab[10068]=\"User quota exceeded.\"\n errorTab[10069]=\"Disk quota exceeded.\"\n errorTab[10070]=\"Stale file handle reference.\"\n errorTab[10071]=\"Item is remote.\"\n errorTab[10091]=\"Network subsystem is unavailable.\"\n errorTab[10092]=\"Winsock.dll version out of range.\"\n errorTab[10093]=\"Successful WSAStartup not yet performed.\"\n errorTab[10101]=\"Graceful shutdown in progress.\"\n errorTab[10102]=\"No more results from WSALookupServiceNext.\"\n errorTab[10103]=\"Call has been canceled.\"\n errorTab[10104]=\"Procedure call table is invalid.\"\n errorTab[10105]=\"Service provider is invalid.\"\n errorTab[10106]=\"Service provider failed to initialize.\"\n errorTab[10107]=\"System call failure.\"\n errorTab[10108]=\"Service not found.\"\n errorTab[10109]=\"Class type not found.\"\n errorTab[10110]=\"No more results from WSALookupServiceNext.\"\n errorTab[10111]=\"Call was canceled.\"\n errorTab[10112]=\"Database query was refused.\"\n errorTab[11001]=\"Host not found.\"\n errorTab[11002]=\"Nonauthoritative host not found.\"\n errorTab[11003]=\"This is a nonrecoverable error.\"\n errorTab[11004]=\"Valid name, no data record requested type.\"\n errorTab[11005]=\"QoS receivers.\"\n errorTab[11006]=\"QoS senders.\"\n errorTab[11007]=\"No QoS senders.\"\n errorTab[11008]=\"QoS no receivers.\"\n errorTab[11009]=\"QoS request confirmed.\"\n errorTab[11010]=\"QoS admission error.\"\n errorTab[11011]=\"QoS policy failure.\"\n errorTab[11012]=\"QoS bad style.\"\n errorTab[11013]=\"QoS bad object.\"\n errorTab[11014]=\"QoS traffic control error.\"\n errorTab[11015]=\"QoS generic error.\"\n errorTab[11016]=\"QoS service type error.\"\n errorTab[11017]=\"QoS flowspec error.\"\n errorTab[11018]=\"Invalid QoS provider buffer.\"\n errorTab[11019]=\"Invalid QoS filter style.\"\n errorTab[11020]=\"Invalid QoS filter style.\"\n errorTab[11021]=\"Incorrect QoS filter count.\"\n errorTab[11022]=\"Invalid QoS object length.\"\n errorTab[11023]=\"Incorrect QoS flow count.\"\n errorTab[11024]=\"Unrecognized QoS object.\"\n errorTab[11025]=\"Invalid QoS policy object.\"\n errorTab[11026]=\"Invalid QoS flow descriptor.\"\n errorTab[11027]=\"Invalid QoS provider-specific flowspec.\"\n errorTab[11028]=\"Invalid QoS provider-specific filterspec.\"\n errorTab[11029]=\"Invalid QoS shape discard mode object.\"\n errorTab[11030]=\"Invalid QoS shaping rate object.\"\n errorTab[11031]=\"Reserved policy QoS element type.\"\n __all__.append(\"errorTab\")\n \n \nclass _GiveupOnSendfile(Exception):pass\n\n\nclass socket(_socket.socket):\n\n ''\n \n __slots__=[\"__weakref__\",\"_io_refs\",\"_closed\"]\n \n def __init__(self,family=-1,type=-1,proto=-1,fileno=None):\n \n \n \n \n if fileno is None:\n if family ==-1:\n family=AF_INET\n if type ==-1:\n type=SOCK_STREAM\n if proto ==-1:\n proto=0\n _socket.socket.__init__(self,family,type,proto,fileno)\n self._io_refs=0\n self._closed=False\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n if not self._closed:\n self.close()\n \n def __repr__(self):\n ''\n\n \n closed=getattr(self,'_closed',False)\n s=\"<%s.%s%s fd=%i, family=%s, type=%s, proto=%i\"\\\n %(self.__class__.__module__,\n self.__class__.__qualname__,\n \" [closed]\"if closed else \"\",\n self.fileno(),\n self.family,\n self.type,\n self.proto)\n if not closed:\n try:\n laddr=self.getsockname()\n if laddr:\n s +=\", laddr=%s\"%str(laddr)\n except error:\n pass\n try:\n raddr=self.getpeername()\n if raddr:\n s +=\", raddr=%s\"%str(raddr)\n except error:\n pass\n s +='>'\n return s\n \n def __getstate__(self):\n raise TypeError(f\"cannot pickle {self.__class__.__name__ !r} object\")\n \n def dup(self):\n ''\n\n\n\n \n fd=dup(self.fileno())\n sock=self.__class__(self.family,self.type,self.proto,fileno=fd)\n sock.settimeout(self.gettimeout())\n return sock\n \n def accept(self):\n ''\n\n\n\n\n \n fd,addr=self._accept()\n sock=socket(self.family,self.type,self.proto,fileno=fd)\n \n \n \n if getdefaulttimeout()is None and self.gettimeout():\n sock.setblocking(True)\n return sock,addr\n \n def makefile(self,mode=\"r\",buffering=None,*,\n encoding=None,errors=None,newline=None):\n ''\n\n\n\n \n \n if not set(mode)<={\"r\",\"w\",\"b\"}:\n raise ValueError(\"invalid mode %r (only r, w, b allowed)\"%(mode,))\n writing=\"w\"in mode\n reading=\"r\"in mode or not writing\n assert reading or writing\n binary=\"b\"in mode\n rawmode=\"\"\n if reading:\n rawmode +=\"r\"\n if writing:\n rawmode +=\"w\"\n raw=SocketIO(self,rawmode)\n self._io_refs +=1\n if buffering is None:\n buffering=-1\n if buffering <0:\n buffering=io.DEFAULT_BUFFER_SIZE\n if buffering ==0:\n if not binary:\n raise ValueError(\"unbuffered streams must be binary\")\n return raw\n if reading and writing:\n buffer=io.BufferedRWPair(raw,raw,buffering)\n elif reading:\n buffer=io.BufferedReader(raw,buffering)\n else:\n assert writing\n buffer=io.BufferedWriter(raw,buffering)\n if binary:\n return buffer\n encoding=io.text_encoding(encoding)\n text=io.TextIOWrapper(buffer,encoding,errors,newline)\n text.mode=mode\n return text\n \n if hasattr(os,'sendfile'):\n \n def _sendfile_use_sendfile(self,file,offset=0,count=None):\n self._check_sendfile_params(file,offset,count)\n sockno=self.fileno()\n try:\n fileno=file.fileno()\n except(AttributeError,io.UnsupportedOperation)as err:\n raise _GiveupOnSendfile(err)\n try:\n fsize=os.fstat(fileno).st_size\n except OSError as err:\n raise _GiveupOnSendfile(err)\n if not fsize:\n return 0\n \n blocksize=min(count or fsize,2 **30)\n timeout=self.gettimeout()\n if timeout ==0:\n raise ValueError(\"non-blocking sockets are not supported\")\n \n \n \n if hasattr(selectors,'PollSelector'):\n selector=selectors.PollSelector()\n else:\n selector=selectors.SelectSelector()\n selector.register(sockno,selectors.EVENT_WRITE)\n \n total_sent=0\n \n selector_select=selector.select\n os_sendfile=os.sendfile\n try:\n while True:\n if timeout and not selector_select(timeout):\n raise TimeoutError('timed out')\n if count:\n blocksize=count -total_sent\n if blocksize <=0:\n break\n try:\n sent=os_sendfile(sockno,fileno,offset,blocksize)\n except BlockingIOError:\n if not timeout:\n \n \n selector_select()\n continue\n except OSError as err:\n if total_sent ==0:\n \n \n \n \n raise _GiveupOnSendfile(err)\n raise err from None\n else:\n if sent ==0:\n break\n offset +=sent\n total_sent +=sent\n return total_sent\n finally:\n if total_sent >0 and hasattr(file,'seek'):\n file.seek(offset)\n else:\n def _sendfile_use_sendfile(self,file,offset=0,count=None):\n raise _GiveupOnSendfile(\n \"os.sendfile() not available on this platform\")\n \n def _sendfile_use_send(self,file,offset=0,count=None):\n self._check_sendfile_params(file,offset,count)\n if self.gettimeout()==0:\n raise ValueError(\"non-blocking sockets are not supported\")\n if offset:\n file.seek(offset)\n blocksize=min(count,8192)if count else 8192\n total_sent=0\n \n file_read=file.read\n sock_send=self.send\n try:\n while True:\n if count:\n blocksize=min(count -total_sent,blocksize)\n if blocksize <=0:\n break\n data=memoryview(file_read(blocksize))\n if not data:\n break\n while True:\n try:\n sent=sock_send(data)\n except BlockingIOError:\n continue\n else:\n total_sent +=sent\n if sent 0 and hasattr(file,'seek'):\n file.seek(offset+total_sent)\n \n def _check_sendfile_params(self,file,offset,count):\n if 'b'not in getattr(file,'mode','b'):\n raise ValueError(\"file should be opened in binary mode\")\n if not self.type&SOCK_STREAM:\n raise ValueError(\"only SOCK_STREAM type sockets are supported\")\n if count is not None:\n if not isinstance(count,int):\n raise TypeError(\n \"count must be a positive integer (got {!r})\".format(count))\n if count <=0:\n raise ValueError(\n \"count must be a positive integer (got {!r})\".format(count))\n \n def sendfile(self,file,offset=0,count=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n return self._sendfile_use_sendfile(file,offset,count)\n except _GiveupOnSendfile:\n return self._sendfile_use_send(file,offset,count)\n \n def _decref_socketios(self):\n if self._io_refs >0:\n self._io_refs -=1\n if self._closed:\n self.close()\n \n def _real_close(self,_ss=_socket.socket):\n \n _ss.close(self)\n \n def close(self):\n \n self._closed=True\n if self._io_refs <=0:\n self._real_close()\n \n def detach(self):\n ''\n\n\n\n\n \n self._closed=True\n return super().detach()\n \n @property\n def family(self):\n ''\n \n return _intenum_converter(super().family,AddressFamily)\n \n @property\n def type(self):\n ''\n \n return _intenum_converter(super().type,SocketKind)\n \n if os.name =='nt':\n def get_inheritable(self):\n return os.get_handle_inheritable(self.fileno())\n def set_inheritable(self,inheritable):\n os.set_handle_inheritable(self.fileno(),inheritable)\n else:\n def get_inheritable(self):\n return os.get_inheritable(self.fileno())\n def set_inheritable(self,inheritable):\n os.set_inheritable(self.fileno(),inheritable)\n get_inheritable.__doc__=\"Get the inheritable flag of the socket\"\n set_inheritable.__doc__=\"Set the inheritable flag of the socket\"\n \ndef fromfd(fd,family,type,proto=0):\n ''\n\n\n\n \n nfd=dup(fd)\n return socket(family,type,proto,nfd)\n \nif hasattr(_socket.socket,\"sendmsg\"):\n import array\n \n def send_fds(sock,buffers,fds,flags=0,address=None):\n ''\n\n\n \n return sock.sendmsg(buffers,[(_socket.SOL_SOCKET,\n _socket.SCM_RIGHTS,array.array(\"i\",fds))])\n __all__.append(\"send_fds\")\n \nif hasattr(_socket.socket,\"recvmsg\"):\n import array\n \n def recv_fds(sock,bufsize,maxfds,flags=0):\n ''\n\n\n\n\n \n \n fds=array.array(\"i\")\n msg,ancdata,flags,addr=sock.recvmsg(bufsize,\n _socket.CMSG_LEN(maxfds *fds.itemsize))\n for cmsg_level,cmsg_type,cmsg_data in ancdata:\n if(cmsg_level ==_socket.SOL_SOCKET and cmsg_type ==_socket.SCM_RIGHTS):\n fds.frombytes(cmsg_data[:\n len(cmsg_data)-(len(cmsg_data)%fds.itemsize)])\n \n return msg,list(fds),flags,addr\n __all__.append(\"recv_fds\")\n \nif hasattr(_socket.socket,\"share\"):\n def fromshare(info):\n ''\n\n\n\n \n return socket(0,0,0,info)\n __all__.append(\"fromshare\")\n \nif hasattr(_socket,\"socketpair\"):\n\n def socketpair(family=None,type=SOCK_STREAM,proto=0):\n ''\n\n\n\n\n\n \n if family is None:\n try:\n family=AF_UNIX\n except NameError:\n family=AF_INET\n a,b=_socket.socketpair(family,type,proto)\n a=socket(family,type,proto,a.detach())\n b=socket(family,type,proto,b.detach())\n return a,b\n \nelse:\n\n\n def socketpair(family=AF_INET,type=SOCK_STREAM,proto=0):\n if family ==AF_INET:\n host=_LOCALHOST\n elif family ==AF_INET6:\n host=_LOCALHOST_V6\n else:\n raise ValueError(\"Only AF_INET and AF_INET6 socket address families \"\n \"are supported\")\n if type !=SOCK_STREAM:\n raise ValueError(\"Only SOCK_STREAM socket type is supported\")\n if proto !=0:\n raise ValueError(\"Only protocol zero is supported\")\n \n \n \n lsock=socket(family,type,proto)\n try:\n lsock.bind((host,0))\n lsock.listen()\n \n addr,port=lsock.getsockname()[:2]\n csock=socket(family,type,proto)\n try:\n csock.setblocking(False)\n try:\n csock.connect((addr,port))\n except(BlockingIOError,InterruptedError):\n pass\n csock.setblocking(True)\n ssock,_=lsock.accept()\n except:\n csock.close()\n raise\n finally:\n lsock.close()\n return(ssock,csock)\n __all__.append(\"socketpair\")\n \nsocketpair.__doc__=\"\"\"socketpair([family[, type[, proto]]]) -> (socket object, socket object)\nCreate a pair of socket objects from the sockets returned by the platform\nsocketpair() function.\nThe arguments are the same as for socket() except the default family is AF_UNIX\nif defined on the platform; otherwise, the default is AF_INET.\n\"\"\"\n\n_blocking_errnos={EAGAIN,EWOULDBLOCK}\n\nclass SocketIO(io.RawIOBase):\n\n ''\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n def __init__(self,sock,mode):\n if mode not in(\"r\",\"w\",\"rw\",\"rb\",\"wb\",\"rwb\"):\n raise ValueError(\"invalid mode: %r\"%mode)\n io.RawIOBase.__init__(self)\n self._sock=sock\n if \"b\"not in mode:\n mode +=\"b\"\n self._mode=mode\n self._reading=\"r\"in mode\n self._writing=\"w\"in mode\n self._timeout_occurred=False\n \n def readinto(self,b):\n ''\n\n\n\n\n\n \n self._checkClosed()\n self._checkReadable()\n if self._timeout_occurred:\n raise OSError(\"cannot read from timed out object\")\n while True:\n try:\n return self._sock.recv_into(b)\n except timeout:\n self._timeout_occurred=True\n raise\n except error as e:\n if e.errno in _blocking_errnos:\n return None\n raise\n \n def write(self,b):\n ''\n\n\n\n \n self._checkClosed()\n self._checkWritable()\n try:\n return self._sock.send(b)\n except error as e:\n \n if e.errno in _blocking_errnos:\n return None\n raise\n \n def readable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._reading\n \n def writable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._writing\n \n def seekable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return super().seekable()\n \n def fileno(self):\n ''\n \n self._checkClosed()\n return self._sock.fileno()\n \n @property\n def name(self):\n if not self.closed:\n return self.fileno()\n else:\n return -1\n \n @property\n def mode(self):\n return self._mode\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n io.RawIOBase.close(self)\n self._sock._decref_socketios()\n self._sock=None\n \n \ndef getfqdn(name=''):\n ''\n\n\n\n\n\n\n\n \n name=name.strip()\n if not name or name =='0.0.0.0':\n name=gethostname()\n try:\n hostname,aliases,ipaddrs=gethostbyaddr(name)\n except error:\n pass\n else:\n aliases.insert(0,hostname)\n for name in aliases:\n if '.'in name:\n break\n else:\n name=hostname\n return name\n \n \n_GLOBAL_DEFAULT_TIMEOUT=object()\n\ndef create_connection(address,timeout=_GLOBAL_DEFAULT_TIMEOUT,\nsource_address=None,*,all_errors=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n host,port=address\n exceptions=[]\n for res in getaddrinfo(host,port,0,SOCK_STREAM):\n af,socktype,proto,canonname,sa=res\n sock=None\n try:\n sock=socket(af,socktype,proto)\n if timeout is not _GLOBAL_DEFAULT_TIMEOUT:\n sock.settimeout(timeout)\n if source_address:\n sock.bind(source_address)\n sock.connect(sa)\n \n exceptions.clear()\n return sock\n \n except error as exc:\n if not all_errors:\n exceptions.clear()\n exceptions.append(exc)\n if sock is not None:\n sock.close()\n \n if len(exceptions):\n try:\n if not all_errors:\n raise exceptions[0]\n raise ExceptionGroup(\"create_connection failed\",exceptions)\n finally:\n \n exceptions.clear()\n else:\n raise error(\"getaddrinfo returns an empty list\")\n \n \ndef has_dualstack_ipv6():\n ''\n\n \n if not has_ipv6\\\n or not hasattr(_socket,'IPPROTO_IPV6')\\\n or not hasattr(_socket,'IPV6_V6ONLY'):\n return False\n try:\n with socket(AF_INET6,SOCK_STREAM)as sock:\n sock.setsockopt(IPPROTO_IPV6,IPV6_V6ONLY,0)\n return True\n except error:\n return False\n \n \ndef create_server(address,*,family=AF_INET,backlog=None,reuse_port=False,\ndualstack_ipv6=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if reuse_port and not hasattr(_socket,\"SO_REUSEPORT\"):\n raise ValueError(\"SO_REUSEPORT not supported on this platform\")\n if dualstack_ipv6:\n if not has_dualstack_ipv6():\n raise ValueError(\"dualstack_ipv6 not supported on this platform\")\n if family !=AF_INET6:\n raise ValueError(\"dualstack_ipv6 requires AF_INET6 family\")\n sock=socket(family,SOCK_STREAM)\n try:\n \n \n \n \n \n \n \n \n \n if os.name not in('nt','cygwin')and\\\n hasattr(_socket,'SO_REUSEADDR'):\n try:\n sock.setsockopt(SOL_SOCKET,SO_REUSEADDR,1)\n except error:\n \n \n pass\n if reuse_port:\n sock.setsockopt(SOL_SOCKET,SO_REUSEPORT,1)\n if has_ipv6 and family ==AF_INET6:\n if dualstack_ipv6:\n sock.setsockopt(IPPROTO_IPV6,IPV6_V6ONLY,0)\n elif hasattr(_socket,\"IPV6_V6ONLY\")and\\\n hasattr(_socket,\"IPPROTO_IPV6\"):\n sock.setsockopt(IPPROTO_IPV6,IPV6_V6ONLY,1)\n try:\n sock.bind(address)\n except error as err:\n msg='%s (while attempting to bind on address %r)'%\\\n (err.strerror,address)\n raise error(err.errno,msg)from None\n if backlog is None:\n sock.listen()\n else:\n sock.listen(backlog)\n return sock\n except error:\n sock.close()\n raise\n \n \ndef getaddrinfo(host,port,family=0,type=0,proto=0,flags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n addrlist=[]\n for res in _socket.getaddrinfo(host,port,family,type,proto,flags):\n af,socktype,proto,canonname,sa=res\n addrlist.append((_intenum_converter(af,AddressFamily),\n _intenum_converter(socktype,SocketKind),\n proto,canonname,sa))\n return addrlist\n", ["_socket", "array", "enum", "errno", "io", "os", "selectors", "sys"]], "datetime": [".py", "try:\n from _datetime import *\n from _datetime import __doc__\nexcept ImportError:\n from _pydatetime import *\n from _pydatetime import __doc__\n \n__all__=(\"date\",\"datetime\",\"time\",\"timedelta\",\"timezone\",\"tzinfo\",\n\"MINYEAR\",\"MAXYEAR\",\"UTC\")\n", ["_datetime", "_pydatetime"]], "_thread": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['error','start_new_thread','exit','get_ident','allocate_lock',\n'interrupt_main','LockType']\n\n\nTIMEOUT_MAX=2 **31\n\n\n\n\n\n\nerror=RuntimeError\n\ndef daemon_threads_allowed():\n return False\n \ndef _set_sentinel(*args,**kw):\n return LockType()\n \ndef start_new_thread(function,args,kwargs={}):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if type(args)!=type(tuple()):\n raise TypeError(\"2nd arg must be a tuple\")\n if type(kwargs)!=type(dict()):\n raise TypeError(\"3rd arg must be a dict\")\n global _main\n _main=False\n try:\n function(*args,**kwargs)\n except SystemExit:\n pass\n except:\n import traceback\n traceback.print_exc()\n _main=True\n global _interrupt\n if _interrupt:\n _interrupt=False\n raise KeyboardInterrupt\n \ndef exit():\n ''\n raise SystemExit\n \ndef get_ident():\n ''\n\n\n\n\n \n return -1\n \ndef allocate_lock():\n ''\n return LockType()\n \ndef stack_size(size=None):\n ''\n if size is not None:\n raise error(\"setting thread stack size not supported\")\n return 0\n \nclass LockType(object):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self):\n self.locked_status=False\n \n def acquire(self,waitflag=None,timeout=-1):\n ''\n\n\n\n\n\n\n\n\n \n if waitflag is None or waitflag:\n self.locked_status=True\n return True\n else:\n if not self.locked_status:\n self.locked_status=True\n return True\n else:\n if timeout >0:\n import time\n time.sleep(timeout)\n return False\n \n __enter__=acquire\n \n def __exit__(self,typ,val,tb):\n self.release()\n \n def release(self):\n ''\n \n \n \n \n self.locked_status=False\n return True\n \n def locked(self):\n return self.locked_status\n \n \n_interrupt=False\n\n_main=True\n\ndef interrupt_main():\n ''\n \n if _main:\n raise KeyboardInterrupt\n else:\n global _interrupt\n _interrupt=True\n \n \nclass _local:\n pass\n \nRLock=LockType\n", ["time", "traceback"]], "sysconfig": [".py", "''\n\nimport os\nimport sys\nimport threading\nfrom os.path import realpath\n\n__all__=[\n'get_config_h_filename',\n'get_config_var',\n'get_config_vars',\n'get_makefile_filename',\n'get_path',\n'get_path_names',\n'get_paths',\n'get_platform',\n'get_python_version',\n'get_scheme_names',\n'parse_config_h',\n]\n\n\n_ALWAYS_STR={\n'MACOSX_DEPLOYMENT_TARGET',\n}\n\n_INSTALL_SCHEMES={\n'posix_prefix':{\n'stdlib':'{installed_base}/{platlibdir}/python{py_version_short}',\n'platstdlib':'{platbase}/{platlibdir}/python{py_version_short}',\n'purelib':'{base}/lib/python{py_version_short}/site-packages',\n'platlib':'{platbase}/{platlibdir}/python{py_version_short}/site-packages',\n'include':\n'{installed_base}/include/python{py_version_short}{abiflags}',\n'platinclude':\n'{installed_platbase}/include/python{py_version_short}{abiflags}',\n'scripts':'{base}/bin',\n'data':'{base}',\n},\n'posix_home':{\n'stdlib':'{installed_base}/lib/python',\n'platstdlib':'{base}/lib/python',\n'purelib':'{base}/lib/python',\n'platlib':'{base}/lib/python',\n'include':'{installed_base}/include/python',\n'platinclude':'{installed_base}/include/python',\n'scripts':'{base}/bin',\n'data':'{base}',\n},\n'nt':{\n'stdlib':'{installed_base}/Lib',\n'platstdlib':'{base}/Lib',\n'purelib':'{base}/Lib/site-packages',\n'platlib':'{base}/Lib/site-packages',\n'include':'{installed_base}/Include',\n'platinclude':'{installed_base}/Include',\n'scripts':'{base}/Scripts',\n'data':'{base}',\n},\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n'posix_venv':{\n'stdlib':'{installed_base}/{platlibdir}/python{py_version_short}',\n'platstdlib':'{platbase}/{platlibdir}/python{py_version_short}',\n'purelib':'{base}/lib/python{py_version_short}/site-packages',\n'platlib':'{platbase}/{platlibdir}/python{py_version_short}/site-packages',\n'include':\n'{installed_base}/include/python{py_version_short}{abiflags}',\n'platinclude':\n'{installed_platbase}/include/python{py_version_short}{abiflags}',\n'scripts':'{base}/bin',\n'data':'{base}',\n},\n'nt_venv':{\n'stdlib':'{installed_base}/Lib',\n'platstdlib':'{base}/Lib',\n'purelib':'{base}/Lib/site-packages',\n'platlib':'{base}/Lib/site-packages',\n'include':'{installed_base}/Include',\n'platinclude':'{installed_base}/Include',\n'scripts':'{base}/Scripts',\n'data':'{base}',\n},\n}\n\n\nif os.name =='nt':\n _INSTALL_SCHEMES['venv']=_INSTALL_SCHEMES['nt_venv']\nelse:\n _INSTALL_SCHEMES['venv']=_INSTALL_SCHEMES['posix_venv']\n \n \n \n \ndef _getuserbase():\n env_base=os.environ.get(\"PYTHONUSERBASE\",None)\n if env_base:\n return env_base\n \n \n if sys.platform in{\"emscripten\",\"vxworks\",\"wasi\"}:\n return None\n \n def joinuser(*args):\n return os.path.expanduser(os.path.join(*args))\n \n if os.name ==\"nt\":\n base=os.environ.get(\"APPDATA\")or \"~\"\n return joinuser(base,\"Python\")\n \n if sys.platform ==\"darwin\"and sys._framework:\n return joinuser(\"~\",\"Library\",sys._framework,\n f\"{sys.version_info[0]}.{sys.version_info[1]}\")\n \n return joinuser(\"~\",\".local\")\n \n_HAS_USER_BASE=(_getuserbase()is not None)\n\nif _HAS_USER_BASE:\n _INSTALL_SCHEMES |={\n \n 'nt_user':{\n 'stdlib':'{userbase}/Python{py_version_nodot_plat}',\n 'platstdlib':'{userbase}/Python{py_version_nodot_plat}',\n 'purelib':'{userbase}/Python{py_version_nodot_plat}/site-packages',\n 'platlib':'{userbase}/Python{py_version_nodot_plat}/site-packages',\n 'include':'{userbase}/Python{py_version_nodot_plat}/Include',\n 'scripts':'{userbase}/Python{py_version_nodot_plat}/Scripts',\n 'data':'{userbase}',\n },\n 'posix_user':{\n 'stdlib':'{userbase}/{platlibdir}/python{py_version_short}',\n 'platstdlib':'{userbase}/{platlibdir}/python{py_version_short}',\n 'purelib':'{userbase}/lib/python{py_version_short}/site-packages',\n 'platlib':'{userbase}/lib/python{py_version_short}/site-packages',\n 'include':'{userbase}/include/python{py_version_short}',\n 'scripts':'{userbase}/bin',\n 'data':'{userbase}',\n },\n 'osx_framework_user':{\n 'stdlib':'{userbase}/lib/python',\n 'platstdlib':'{userbase}/lib/python',\n 'purelib':'{userbase}/lib/python/site-packages',\n 'platlib':'{userbase}/lib/python/site-packages',\n 'include':'{userbase}/include/python{py_version_short}',\n 'scripts':'{userbase}/bin',\n 'data':'{userbase}',\n },\n }\n \n_SCHEME_KEYS=('stdlib','platstdlib','purelib','platlib','include',\n'scripts','data')\n\n_PY_VERSION=sys.version.split()[0]\n_PY_VERSION_SHORT=f'{sys.version_info[0]}.{sys.version_info[1]}'\n_PY_VERSION_SHORT_NO_DOT=f'{sys.version_info[0]}{sys.version_info[1]}'\n_PREFIX=os.path.normpath(sys.prefix)\n_BASE_PREFIX=os.path.normpath(sys.base_prefix)\n_EXEC_PREFIX=os.path.normpath(sys.exec_prefix)\n_BASE_EXEC_PREFIX=os.path.normpath(sys.base_exec_prefix)\n\n_CONFIG_VARS_LOCK=threading.RLock()\n_CONFIG_VARS=None\n\n_CONFIG_VARS_INITIALIZED=False\n_USER_BASE=None\n\n\n\n_variable_rx=r\"([a-zA-Z][a-zA-Z0-9_]+)\\s*=\\s*(.*)\"\n_findvar1_rx=r\"\\$\\(([A-Za-z][A-Za-z0-9_]*)\\)\"\n_findvar2_rx=r\"\\${([A-Za-z][A-Za-z0-9_]*)}\"\n\n\ndef _safe_realpath(path):\n try:\n return realpath(path)\n except OSError:\n return path\n \nif sys.executable:\n _PROJECT_BASE=os.path.dirname(_safe_realpath(sys.executable))\nelse:\n\n\n _PROJECT_BASE=_safe_realpath(os.getcwd())\n \n \n \n \n_sys_home=getattr(sys,'_home',None)\nif _sys_home:\n _PROJECT_BASE=_sys_home\n \nif os.name =='nt':\n\n\n\n\n\n if _safe_realpath(_PROJECT_BASE).startswith(\n _safe_realpath(f'{_BASE_PREFIX}\\\\PCbuild')):\n _PROJECT_BASE=_BASE_PREFIX\n \n \nif \"_PYTHON_PROJECT_BASE\"in os.environ:\n _PROJECT_BASE=_safe_realpath(os.environ[\"_PYTHON_PROJECT_BASE\"])\n \ndef is_python_build(check_home=None):\n if check_home is not None:\n import warnings\n warnings.warn(\"check_home argument is deprecated and ignored.\",\n DeprecationWarning,stacklevel=2)\n for fn in(\"Setup\",\"Setup.local\"):\n if os.path.isfile(os.path.join(_PROJECT_BASE,\"Modules\",fn)):\n return True\n return False\n \n_PYTHON_BUILD=is_python_build()\n\nif _PYTHON_BUILD:\n for scheme in('posix_prefix','posix_home'):\n \n \n \n \n scheme=_INSTALL_SCHEMES[scheme]\n scheme['headers']=scheme['include']\n scheme['include']='{srcdir}/Include'\n scheme['platinclude']='{projectbase}/.'\n del scheme\n \n \ndef _subst_vars(s,local_vars):\n try:\n return s.format(**local_vars)\n except KeyError as var:\n try:\n return s.format(**os.environ)\n except KeyError:\n raise AttributeError(f'{var}')from None\n \ndef _extend_dict(target_dict,other_dict):\n target_keys=target_dict.keys()\n for key,value in other_dict.items():\n if key in target_keys:\n continue\n target_dict[key]=value\n \n \ndef _expand_vars(scheme,vars):\n res={}\n if vars is None:\n vars={}\n _extend_dict(vars,get_config_vars())\n if os.name =='nt':\n \n \n \n vars=vars |{'platlibdir':'lib'}\n \n for key,value in _INSTALL_SCHEMES[scheme].items():\n if os.name in('posix','nt'):\n value=os.path.expanduser(value)\n res[key]=os.path.normpath(_subst_vars(value,vars))\n return res\n \n \ndef _get_preferred_schemes():\n if os.name =='nt':\n return{\n 'prefix':'nt',\n 'home':'posix_home',\n 'user':'nt_user',\n }\n if sys.platform =='darwin'and sys._framework:\n return{\n 'prefix':'posix_prefix',\n 'home':'posix_home',\n 'user':'osx_framework_user',\n }\n return{\n 'prefix':'posix_prefix',\n 'home':'posix_home',\n 'user':'posix_user',\n }\n \n \ndef get_preferred_scheme(key):\n if key =='prefix'and sys.prefix !=sys.base_prefix:\n return 'venv'\n scheme=_get_preferred_schemes()[key]\n if scheme not in _INSTALL_SCHEMES:\n raise ValueError(\n f\"{key !r} returned {scheme !r}, which is not a valid scheme \"\n f\"on this platform\"\n )\n return scheme\n \n \ndef get_default_scheme():\n return get_preferred_scheme('prefix')\n \n \ndef _parse_makefile(filename,vars=None,keep_unresolved=True):\n ''\n\n\n\n\n \n import re\n \n if vars is None:\n vars={}\n done={}\n notdone={}\n \n with open(filename,encoding=sys.getfilesystemencoding(),\n errors=\"surrogateescape\")as f:\n lines=f.readlines()\n \n for line in lines:\n if line.startswith('#')or line.strip()=='':\n continue\n m=re.match(_variable_rx,line)\n if m:\n n,v=m.group(1,2)\n v=v.strip()\n \n tmpv=v.replace('$$','')\n \n if \"$\"in tmpv:\n notdone[n]=v\n else:\n try:\n if n in _ALWAYS_STR:\n raise ValueError\n \n v=int(v)\n except ValueError:\n \n done[n]=v.replace('$$','$')\n else:\n done[n]=v\n \n \n variables=list(notdone.keys())\n \n \n \n \n \n renamed_variables=('CFLAGS','LDFLAGS','CPPFLAGS')\n \n while len(variables)>0:\n for name in tuple(variables):\n value=notdone[name]\n m1=re.search(_findvar1_rx,value)\n m2=re.search(_findvar2_rx,value)\n if m1 and m2:\n m=m1 if m1.start()=\"5\":\n osname=\"solaris\"\n release=f\"{int(release[0])-3}.{release[2:]}\"\n \n \n \n bitness={2147483647:\"32bit\",9223372036854775807:\"64bit\"}\n machine +=f\".{bitness[sys.maxsize]}\"\n \n elif osname[:3]==\"aix\":\n from _aix_support import aix_platform\n return aix_platform()\n elif osname[:6]==\"cygwin\":\n osname=\"cygwin\"\n import re\n rel_re=re.compile(r'[\\d.]+')\n m=rel_re.match(release)\n if m:\n release=m.group()\n elif osname[:6]==\"darwin\":\n import _osx_support\n osname,release,machine=_osx_support.get_platform_osx(\n get_config_vars(),\n osname,release,machine)\n \n return f\"{osname}-{release}-{machine}\"\n \n \ndef get_python_version():\n return _PY_VERSION_SHORT\n \n \ndef expand_makefile_vars(s,vars):\n ''\n\n\n\n\n\n \n import re\n \n \n \n \n \n \n \n while True:\n m=re.search(_findvar1_rx,s)or re.search(_findvar2_rx,s)\n if m:\n (beg,end)=m.span()\n s=s[0:beg]+vars.get(m.group(1))+s[end:]\n else:\n break\n return s\n \n \ndef _print_dict(title,data):\n for index,(key,value)in enumerate(sorted(data.items())):\n if index ==0:\n print(f'{title}: ')\n print(f'\\t{key} = \"{value}\"')\n \n \ndef _main():\n ''\n if '--generate-posix-vars'in sys.argv:\n _generate_posix_vars()\n return\n print(f'Platform: \"{get_platform()}\"')\n print(f'Python version: \"{get_python_version()}\"')\n print(f'Current installation scheme: \"{get_default_scheme()}\"')\n print()\n _print_dict('Paths',get_paths())\n print()\n _print_dict('Variables',get_config_vars())\n \n \nif __name__ =='__main__':\n _main()\n", ["_aix_support", "_imp", "_osx_support", "os", "os.path", "pprint", "re", "sys", "threading", "types", "warnings"]], "pathlib": [".py", "''\n\n\n\n\n\n\nimport fnmatch\nimport functools\nimport io\nimport ntpath\nimport os\nimport posixpath\nimport re\nimport sys\nimport warnings\nfrom _collections_abc import Sequence\nfrom errno import ENOENT,ENOTDIR,EBADF,ELOOP\nfrom stat import S_ISDIR,S_ISLNK,S_ISREG,S_ISSOCK,S_ISBLK,S_ISCHR,S_ISFIFO\nfrom urllib.parse import quote_from_bytes as urlquote_from_bytes\n\n\n__all__=[\n\"PurePath\",\"PurePosixPath\",\"PureWindowsPath\",\n\"Path\",\"PosixPath\",\"WindowsPath\",\n]\n\n\n\n\n\n\n\n_WIN_RESERVED_NAMES=frozenset(\n{'CON','PRN','AUX','NUL','CONIN$','CONOUT$'}|\n{f'COM{c}'for c in '123456789\\xb9\\xb2\\xb3'}|\n{f'LPT{c}'for c in '123456789\\xb9\\xb2\\xb3'}\n)\n\n_WINERROR_NOT_READY=21\n_WINERROR_INVALID_NAME=123\n_WINERROR_CANT_RESOLVE_FILENAME=1921\n\n\n_IGNORED_ERRNOS=(ENOENT,ENOTDIR,EBADF,ELOOP)\n\n_IGNORED_WINERRORS=(\n_WINERROR_NOT_READY,\n_WINERROR_INVALID_NAME,\n_WINERROR_CANT_RESOLVE_FILENAME)\n\ndef _ignore_error(exception):\n return(getattr(exception,'errno',None)in _IGNORED_ERRNOS or\n getattr(exception,'winerror',None)in _IGNORED_WINERRORS)\n \n \n@functools.cache\ndef _is_case_sensitive(flavour):\n return flavour.normcase('Aa')=='Aa'\n \n \n \n \n \n \n \n \n \n \n \n \n \n_FNMATCH_PREFIX,_FNMATCH_SUFFIX=fnmatch.translate('_').split('_')\n_FNMATCH_SLICE=slice(len(_FNMATCH_PREFIX),-len(_FNMATCH_SUFFIX))\n_SWAP_SEP_AND_NEWLINE={\n'/':str.maketrans({'/':'\\n','\\n':'/'}),\n'\\\\':str.maketrans({'\\\\':'\\n','\\n':'\\\\'}),\n}\n\n\n@functools.lru_cache()\ndef _make_selector(pattern_parts,flavour,case_sensitive):\n pat=pattern_parts[0]\n if not pat:\n return _TerminatingSelector()\n if pat =='**':\n child_parts_idx=1\n while child_parts_idx =len(self)or idx <-len(self):\n raise IndexError(idx)\n if idx <0:\n idx +=len(self)\n return self._path._from_parsed_parts(self._drv,self._root,\n self._tail[:-idx -1])\n \n def __repr__(self):\n return \"<{}.parents>\".format(type(self._path).__name__)\n \n \nclass PurePath(object):\n ''\n\n\n\n\n\n\n \n \n __slots__=(\n \n \n '_raw_paths',\n \n \n \n \n \n \n \n \n '_drv','_root','_tail_cached',\n \n \n \n \n '_str',\n \n \n \n \n \n '_str_normcase_cached',\n \n \n \n \n \n '_parts_normcase_cached',\n \n \n \n '_lines_cached',\n \n \n \n '_hash',\n )\n _flavour=os.path\n \n def __new__(cls,*args,**kwargs):\n ''\n\n\n\n \n if cls is PurePath:\n cls=PureWindowsPath if os.name =='nt'else PurePosixPath\n return object.__new__(cls)\n \n def __reduce__(self):\n \n \n return(self.__class__,self.parts)\n \n def __init__(self,*args):\n paths=[]\n for arg in args:\n if isinstance(arg,PurePath):\n if arg._flavour is ntpath and self._flavour is posixpath:\n \n paths.extend(path.replace('\\\\','/')for path in arg._raw_paths)\n else:\n paths.extend(arg._raw_paths)\n else:\n try:\n path=os.fspath(arg)\n except TypeError:\n path=arg\n if not isinstance(path,str):\n raise TypeError(\n \"argument should be a str or an os.PathLike \"\n \"object where __fspath__ returns a str, \"\n f\"not {type(path).__name__ !r}\")\n paths.append(path)\n self._raw_paths=paths\n \n def with_segments(self,*pathsegments):\n ''\n\n\n \n return type(self)(*pathsegments)\n \n @classmethod\n def _parse_path(cls,path):\n if not path:\n return '','',[]\n sep=cls._flavour.sep\n altsep=cls._flavour.altsep\n if altsep:\n path=path.replace(altsep,sep)\n drv,root,rel=cls._flavour.splitroot(path)\n if not root and drv.startswith(sep)and not drv.endswith(sep):\n drv_parts=drv.split(sep)\n if len(drv_parts)==4 and drv_parts[2]not in '?.':\n \n root=sep\n elif len(drv_parts)==6:\n \n root=sep\n parsed=[sys.intern(str(x))for x in rel.split(sep)if x and x !='.']\n return drv,root,parsed\n \n def _load_parts(self):\n paths=self._raw_paths\n if len(paths)==0:\n path=''\n elif len(paths)==1:\n path=paths[0]\n else:\n path=self._flavour.join(*paths)\n drv,root,tail=self._parse_path(path)\n self._drv=drv\n self._root=root\n self._tail_cached=tail\n \n def _from_parsed_parts(self,drv,root,tail):\n path_str=self._format_parsed_parts(drv,root,tail)\n path=self.with_segments(path_str)\n path._str=path_str or '.'\n path._drv=drv\n path._root=root\n path._tail_cached=tail\n return path\n \n @classmethod\n def _format_parsed_parts(cls,drv,root,tail):\n if drv or root:\n return drv+root+cls._flavour.sep.join(tail)\n elif tail and cls._flavour.splitdrive(tail[0])[0]:\n tail=['.']+tail\n return cls._flavour.sep.join(tail)\n \n def __str__(self):\n ''\n \n try:\n return self._str\n except AttributeError:\n self._str=self._format_parsed_parts(self.drive,self.root,\n self._tail)or '.'\n return self._str\n \n def __fspath__(self):\n return str(self)\n \n def as_posix(self):\n ''\n \n f=self._flavour\n return str(self).replace(f.sep,'/')\n \n def __bytes__(self):\n ''\n \n return os.fsencode(self)\n \n def __repr__(self):\n return \"{}({!r})\".format(self.__class__.__name__,self.as_posix())\n \n def as_uri(self):\n ''\n if not self.is_absolute():\n raise ValueError(\"relative path can't be expressed as a file URI\")\n \n drive=self.drive\n if len(drive)==2 and drive[1]==':':\n \n prefix='file:///'+drive\n path=self.as_posix()[2:]\n elif drive:\n \n prefix='file:'\n path=self.as_posix()\n else:\n \n prefix='file://'\n path=str(self)\n return prefix+urlquote_from_bytes(os.fsencode(path))\n \n @property\n def _str_normcase(self):\n \n try:\n return self._str_normcase_cached\n except AttributeError:\n if _is_case_sensitive(self._flavour):\n self._str_normcase_cached=str(self)\n else:\n self._str_normcase_cached=str(self).lower()\n return self._str_normcase_cached\n \n @property\n def _parts_normcase(self):\n \n try:\n return self._parts_normcase_cached\n except AttributeError:\n self._parts_normcase_cached=self._str_normcase.split(self._flavour.sep)\n return self._parts_normcase_cached\n \n @property\n def _lines(self):\n \n try:\n return self._lines_cached\n except AttributeError:\n path_str=str(self)\n if path_str =='.':\n self._lines_cached=''\n else:\n trans=_SWAP_SEP_AND_NEWLINE[self._flavour.sep]\n self._lines_cached=path_str.translate(trans)\n return self._lines_cached\n \n def __eq__(self,other):\n if not isinstance(other,PurePath):\n return NotImplemented\n return self._str_normcase ==other._str_normcase and self._flavour is other._flavour\n \n def __hash__(self):\n try:\n return self._hash\n except AttributeError:\n self._hash=hash(self._str_normcase)\n return self._hash\n \n def __lt__(self,other):\n if not isinstance(other,PurePath)or self._flavour is not other._flavour:\n return NotImplemented\n return self._parts_normcase other._parts_normcase\n \n def __ge__(self,other):\n if not isinstance(other,PurePath)or self._flavour is not other._flavour:\n return NotImplemented\n return self._parts_normcase >=other._parts_normcase\n \n @property\n def drive(self):\n ''\n try:\n return self._drv\n except AttributeError:\n self._load_parts()\n return self._drv\n \n @property\n def root(self):\n ''\n try:\n return self._root\n except AttributeError:\n self._load_parts()\n return self._root\n \n @property\n def _tail(self):\n try:\n return self._tail_cached\n except AttributeError:\n self._load_parts()\n return self._tail_cached\n \n @property\n def anchor(self):\n ''\n anchor=self.drive+self.root\n return anchor\n \n @property\n def name(self):\n ''\n tail=self._tail\n if not tail:\n return ''\n return tail[-1]\n \n @property\n def suffix(self):\n ''\n\n\n\n \n name=self.name\n i=name.rfind('.')\n if 0 >> from decimal import *\n>>> setcontext(ExtendedContext)\n>>> Decimal(0)\nDecimal('0')\n>>> Decimal('1')\nDecimal('1')\n>>> Decimal('-.0123')\nDecimal('-0.0123')\n>>> Decimal(123456)\nDecimal('123456')\n>>> Decimal('123.45e12345678')\nDecimal('1.2345E+12345680')\n>>> Decimal('1.33') + Decimal('1.27')\nDecimal('2.60')\n>>> Decimal('12.34') + Decimal('3.87') - Decimal('18.41')\nDecimal('-2.20')\n>>> dig = Decimal(1)\n>>> print(dig / Decimal(3))\n0.333333333\n>>> getcontext().prec = 18\n>>> print(dig / Decimal(3))\n0.333333333333333333\n>>> print(dig.sqrt())\n1\n>>> print(Decimal(3).sqrt())\n1.73205080756887729\n>>> print(Decimal(3) ** 123)\n4.85192780976896427E+58\n>>> inf = Decimal(1) / Decimal(0)\n>>> print(inf)\nInfinity\n>>> neginf = Decimal(-1) / Decimal(0)\n>>> print(neginf)\n-Infinity\n>>> print(neginf + inf)\nNaN\n>>> print(neginf * inf)\n-Infinity\n>>> print(dig / 0)\nInfinity\n>>> getcontext().traps[DivisionByZero] = 1\n>>> print(dig / 0)\nTraceback (most recent call last):\n ...\n ...\n ...\ndecimal.DivisionByZero: x / 0\n>>> c = Context()\n>>> c.traps[InvalidOperation] = 0\n>>> print(c.flags[InvalidOperation])\n0\n>>> c.divide(Decimal(0), Decimal(0))\nDecimal('NaN')\n>>> c.traps[InvalidOperation] = 1\n>>> print(c.flags[InvalidOperation])\n1\n>>> c.flags[InvalidOperation] = 0\n>>> print(c.flags[InvalidOperation])\n0\n>>> print(c.divide(Decimal(0), Decimal(0)))\nTraceback (most recent call last):\n ...\n ...\n ...\ndecimal.InvalidOperation: 0 / 0\n>>> print(c.flags[InvalidOperation])\n1\n>>> c.flags[InvalidOperation] = 0\n>>> c.traps[InvalidOperation] = 0\n>>> print(c.divide(Decimal(0), Decimal(0)))\nNaN\n>>> print(c.flags[InvalidOperation])\n1\n>>>\n\"\"\"\n\n__all__=[\n\n'Decimal','Context',\n\n\n'DecimalTuple',\n\n\n'DefaultContext','BasicContext','ExtendedContext',\n\n\n'DecimalException','Clamped','InvalidOperation','DivisionByZero',\n'Inexact','Rounded','Subnormal','Overflow','Underflow',\n'FloatOperation',\n\n\n'DivisionImpossible','InvalidContext','ConversionSyntax','DivisionUndefined',\n\n\n'ROUND_DOWN','ROUND_HALF_UP','ROUND_HALF_EVEN','ROUND_CEILING',\n'ROUND_FLOOR','ROUND_UP','ROUND_HALF_DOWN','ROUND_05UP',\n\n\n'setcontext','getcontext','localcontext',\n\n\n'MAX_PREC','MAX_EMAX','MIN_EMIN','MIN_ETINY',\n\n\n'HAVE_THREADS',\n\n\n'HAVE_CONTEXTVAR'\n]\n\n__xname__=__name__\n__name__='decimal'\n__version__='1.70'\n\n__libmpdec_version__=\"2.4.2\"\n\nimport math as _math\nimport numbers as _numbers\nimport sys\n\ntry:\n from collections import namedtuple as _namedtuple\n DecimalTuple=_namedtuple('DecimalTuple','sign digits exponent',module='decimal')\nexcept ImportError:\n DecimalTuple=lambda *args:args\n \n \nROUND_DOWN='ROUND_DOWN'\nROUND_HALF_UP='ROUND_HALF_UP'\nROUND_HALF_EVEN='ROUND_HALF_EVEN'\nROUND_CEILING='ROUND_CEILING'\nROUND_FLOOR='ROUND_FLOOR'\nROUND_UP='ROUND_UP'\nROUND_HALF_DOWN='ROUND_HALF_DOWN'\nROUND_05UP='ROUND_05UP'\n\n\nHAVE_THREADS=True\nHAVE_CONTEXTVAR=True\nif sys.maxsize ==2 **63 -1:\n MAX_PREC=999999999999999999\n MAX_EMAX=999999999999999999\n MIN_EMIN=-999999999999999999\nelse:\n MAX_PREC=425000000\n MAX_EMAX=425000000\n MIN_EMIN=-425000000\n \nMIN_ETINY=MIN_EMIN -(MAX_PREC -1)\n\n\n\nclass DecimalException(ArithmeticError):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def handle(self,context,*args):\n pass\n \n \nclass Clamped(DecimalException):\n ''\n\n\n\n\n\n\n\n\n \n \nclass InvalidOperation(DecimalException):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def handle(self,context,*args):\n if args:\n ans=_dec_from_triple(args[0]._sign,args[0]._int,'n',True)\n return ans._fix_nan(context)\n return _NaN\n \nclass ConversionSyntax(InvalidOperation):\n ''\n\n\n\n\n \n def handle(self,context,*args):\n return _NaN\n \nclass DivisionByZero(DecimalException,ZeroDivisionError):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def handle(self,context,sign,*args):\n return _SignedInfinity[sign]\n \nclass DivisionImpossible(InvalidOperation):\n ''\n\n\n\n\n \n \n def handle(self,context,*args):\n return _NaN\n \nclass DivisionUndefined(InvalidOperation,ZeroDivisionError):\n ''\n\n\n\n\n \n \n def handle(self,context,*args):\n return _NaN\n \nclass Inexact(DecimalException):\n ''\n\n\n\n\n\n\n\n\n \n \nclass InvalidContext(InvalidOperation):\n ''\n\n\n\n\n\n\n\n \n \n def handle(self,context,*args):\n return _NaN\n \nclass Rounded(DecimalException):\n ''\n\n\n\n\n\n\n\n\n \n \nclass Subnormal(DecimalException):\n ''\n\n\n\n\n\n\n\n \n \nclass Overflow(Inexact,Rounded):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def handle(self,context,sign,*args):\n if context.rounding in(ROUND_HALF_UP,ROUND_HALF_EVEN,\n ROUND_HALF_DOWN,ROUND_UP):\n return _SignedInfinity[sign]\n if sign ==0:\n if context.rounding ==ROUND_CEILING:\n return _SignedInfinity[sign]\n return _dec_from_triple(sign,'9'*context.prec,\n context.Emax -context.prec+1)\n if sign ==1:\n if context.rounding ==ROUND_FLOOR:\n return _SignedInfinity[sign]\n return _dec_from_triple(sign,'9'*context.prec,\n context.Emax -context.prec+1)\n \n \nclass Underflow(Inexact,Rounded,Subnormal):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \nclass FloatOperation(DecimalException,TypeError):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n_signals=[Clamped,DivisionByZero,Inexact,Overflow,Rounded,\nUnderflow,InvalidOperation,Subnormal,FloatOperation]\n\n\n_condition_map={ConversionSyntax:InvalidOperation,\nDivisionImpossible:InvalidOperation,\nDivisionUndefined:InvalidOperation,\nInvalidContext:InvalidOperation}\n\n\n_rounding_modes=(ROUND_DOWN,ROUND_HALF_UP,ROUND_HALF_EVEN,ROUND_CEILING,\nROUND_FLOOR,ROUND_UP,ROUND_HALF_DOWN,ROUND_05UP)\n\n\n\n\n\n\nimport contextvars\n\n_current_context_var=contextvars.ContextVar('decimal_context')\n\n_context_attributes=frozenset(\n['prec','Emin','Emax','capitals','clamp','rounding','flags','traps']\n)\n\ndef getcontext():\n ''\n\n\n\n\n \n try:\n return _current_context_var.get()\n except LookupError:\n context=Context()\n _current_context_var.set(context)\n return context\n \ndef setcontext(context):\n ''\n if context in(DefaultContext,BasicContext,ExtendedContext):\n context=context.copy()\n context.clear_flags()\n _current_context_var.set(context)\n \ndel contextvars\n\ndef localcontext(ctx=None,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if ctx is None:\n ctx=getcontext()\n ctx_manager=_ContextManager(ctx)\n for key,value in kwargs.items():\n if key not in _context_attributes:\n raise TypeError(f\"'{key}' is an invalid keyword argument for this function\")\n setattr(ctx_manager.new_context,key,value)\n return ctx_manager\n \n \n \n \n \n \n \n \nclass Decimal(object):\n ''\n \n __slots__=('_exp','_int','_sign','_is_special')\n \n \n \n \n \n def __new__(cls,value=\"0\",context=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n self=object.__new__(cls)\n \n \n \n if isinstance(value,str):\n m=_parser(value.strip().replace(\"_\",\"\"))\n if m is None:\n if context is None:\n context=getcontext()\n return context._raise_error(ConversionSyntax,\n \"Invalid literal for Decimal: %r\"%value)\n \n if m.group('sign')==\"-\":\n self._sign=1\n else:\n self._sign=0\n intpart=m.group('int')\n if intpart is not None:\n \n fracpart=m.group('frac')or ''\n exp=int(m.group('exp')or '0')\n self._int=str(int(intpart+fracpart))\n self._exp=exp -len(fracpart)\n self._is_special=False\n else:\n diag=m.group('diag')\n if diag is not None:\n \n self._int=str(int(diag or '0')).lstrip('0')\n if m.group('signal'):\n self._exp='N'\n else:\n self._exp='n'\n else:\n \n self._int='0'\n self._exp='F'\n self._is_special=True\n return self\n \n \n if isinstance(value,int):\n if value >=0:\n self._sign=0\n else:\n self._sign=1\n self._exp=0\n self._int=str(abs(value))\n self._is_special=False\n return self\n \n \n if isinstance(value,Decimal):\n self._exp=value._exp\n self._sign=value._sign\n self._int=value._int\n self._is_special=value._is_special\n return self\n \n \n if isinstance(value,_WorkRep):\n self._sign=value.sign\n self._int=str(value.int)\n self._exp=int(value.exp)\n self._is_special=False\n return self\n \n \n if isinstance(value,(list,tuple)):\n if len(value)!=3:\n raise ValueError('Invalid tuple size in creation of Decimal '\n 'from list or tuple. The list or tuple '\n 'should have exactly three elements.')\n \n if not(isinstance(value[0],int)and value[0]in(0,1)):\n raise ValueError(\"Invalid sign. The first value in the tuple \"\n \"should be an integer; either 0 for a \"\n \"positive number or 1 for a negative number.\")\n self._sign=value[0]\n if value[2]=='F':\n \n self._int='0'\n self._exp=value[2]\n self._is_special=True\n else:\n \n digits=[]\n for digit in value[1]:\n if isinstance(digit,int)and 0 <=digit <=9:\n \n if digits or digit !=0:\n digits.append(digit)\n else:\n raise ValueError(\"The second value in the tuple must \"\n \"be composed of integers in the range \"\n \"0 through 9.\")\n if value[2]in('n','N'):\n \n self._int=''.join(map(str,digits))\n self._exp=value[2]\n self._is_special=True\n elif isinstance(value[2],int):\n \n self._int=''.join(map(str,digits or[0]))\n self._exp=value[2]\n self._is_special=False\n else:\n raise ValueError(\"The third value in the tuple must \"\n \"be an integer, or one of the \"\n \"strings 'F', 'n', 'N'.\")\n return self\n \n if isinstance(value,float):\n if context is None:\n context=getcontext()\n context._raise_error(FloatOperation,\n \"strict semantics for mixing floats and Decimals are \"\n \"enabled\")\n value=Decimal.from_float(value)\n self._exp=value._exp\n self._sign=value._sign\n self._int=value._int\n self._is_special=value._is_special\n return self\n \n raise TypeError(\"Cannot convert %r to Decimal\"%value)\n \n @classmethod\n def from_float(cls,f):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(f,int):\n sign=0 if f >=0 else 1\n k=0\n coeff=str(abs(f))\n elif isinstance(f,float):\n if _math.isinf(f)or _math.isnan(f):\n return cls(repr(f))\n if _math.copysign(1.0,f)==1.0:\n sign=0\n else:\n sign=1\n n,d=abs(f).as_integer_ratio()\n k=d.bit_length()-1\n coeff=str(n *5 **k)\n else:\n raise TypeError(\"argument must be int or float.\")\n \n result=_dec_from_triple(sign,coeff,-k)\n if cls is Decimal:\n return result\n else:\n return cls(result)\n \n def _isnan(self):\n ''\n\n\n\n\n \n if self._is_special:\n exp=self._exp\n if exp =='n':\n return 1\n elif exp =='N':\n return 2\n return 0\n \n def _isinfinity(self):\n ''\n\n\n\n\n \n if self._exp =='F':\n if self._sign:\n return -1\n return 1\n return 0\n \n def _check_nans(self,other=None,context=None):\n ''\n\n\n\n\n\n\n \n \n self_is_nan=self._isnan()\n if other is None:\n other_is_nan=False\n else:\n other_is_nan=other._isnan()\n \n if self_is_nan or other_is_nan:\n if context is None:\n context=getcontext()\n \n if self_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n self)\n if other_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n other)\n if self_is_nan:\n return self._fix_nan(context)\n \n return other._fix_nan(context)\n return 0\n \n def _compare_check_nans(self,other,context):\n ''\n\n\n\n\n\n\n\n\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n if self.is_snan():\n return context._raise_error(InvalidOperation,\n 'comparison involving sNaN',\n self)\n elif other.is_snan():\n return context._raise_error(InvalidOperation,\n 'comparison involving sNaN',\n other)\n elif self.is_qnan():\n return context._raise_error(InvalidOperation,\n 'comparison involving NaN',\n self)\n elif other.is_qnan():\n return context._raise_error(InvalidOperation,\n 'comparison involving NaN',\n other)\n return 0\n \n def __bool__(self):\n ''\n\n\n \n return self._is_special or self._int !='0'\n \n def _cmp(self,other):\n ''\n\n\n \n \n if self._is_special or other._is_special:\n self_inf=self._isinfinity()\n other_inf=other._isinfinity()\n if self_inf ==other_inf:\n return 0\n elif self_inf other_adjusted:\n return(-1)**self._sign\n else:\n return -((-1)**self._sign)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def __eq__(self,other,context=None):\n self,other=_convert_for_comparison(self,other,equality_op=True)\n if other is NotImplemented:\n return other\n if self._check_nans(other,context):\n return False\n return self._cmp(other)==0\n \n def __lt__(self,other,context=None):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)<0\n \n def __le__(self,other,context=None):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)<=0\n \n def __gt__(self,other,context=None):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)>0\n \n def __ge__(self,other,context=None):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)>=0\n \n def compare(self,other,context=None):\n ''\n\n\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n \n if(self._is_special or other and other._is_special):\n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n return Decimal(self._cmp(other))\n \n def __hash__(self):\n ''\n \n \n \n \n \n if self._is_special:\n if self.is_snan():\n raise TypeError('Cannot hash a signaling NaN value.')\n elif self.is_nan():\n return object.__hash__(self)\n else:\n if self._sign:\n return -_PyHASH_INF\n else:\n return _PyHASH_INF\n \n if self._exp >=0:\n exp_hash=pow(10,self._exp,_PyHASH_MODULUS)\n else:\n exp_hash=pow(_PyHASH_10INV,-self._exp,_PyHASH_MODULUS)\n hash_=int(self._int)*exp_hash %_PyHASH_MODULUS\n ans=hash_ if self >=0 else -hash_\n return -2 if ans ==-1 else ans\n \n def as_tuple(self):\n ''\n\n\n \n return DecimalTuple(self._sign,tuple(map(int,self._int)),self._exp)\n \n def as_integer_ratio(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot convert NaN to integer ratio\")\n else:\n raise OverflowError(\"cannot convert Infinity to integer ratio\")\n \n if not self:\n return 0,1\n \n \n \n n=int(self._int)\n if self._exp >=0:\n \n n,d=n *10 **self._exp,1\n else:\n \n d5=-self._exp\n while d5 >0 and n %5 ==0:\n n //=5\n d5 -=1\n \n \n \n d2=-self._exp\n shift2=min((n&-n).bit_length()-1,d2)\n if shift2:\n n >>=shift2\n d2 -=shift2\n \n d=5 **d5 <-6:\n \n dotplace=leftdigits\n elif not eng:\n \n dotplace=1\n elif self._int =='0':\n \n dotplace=(leftdigits+1)%3 -1\n else:\n \n dotplace=(leftdigits -1)%3+1\n \n if dotplace <=0:\n intpart='0'\n fracpart='.'+'0'*(-dotplace)+self._int\n elif dotplace >=len(self._int):\n intpart=self._int+'0'*(dotplace -len(self._int))\n fracpart=''\n else:\n intpart=self._int[:dotplace]\n fracpart='.'+self._int[dotplace:]\n if leftdigits ==dotplace:\n exp=''\n else:\n if context is None:\n context=getcontext()\n exp=['e','E'][context.capitals]+\"%+d\"%(leftdigits -dotplace)\n \n return sign+intpart+fracpart+exp\n \n def to_eng_string(self,context=None):\n ''\n\n\n\n\n \n return self.__str__(eng=True,context=context)\n \n def __neg__(self,context=None):\n ''\n\n\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if context is None:\n context=getcontext()\n \n if not self and context.rounding !=ROUND_FLOOR:\n \n \n ans=self.copy_abs()\n else:\n ans=self.copy_negate()\n \n return ans._fix(context)\n \n def __pos__(self,context=None):\n ''\n\n\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if context is None:\n context=getcontext()\n \n if not self and context.rounding !=ROUND_FLOOR:\n \n ans=self.copy_abs()\n else:\n ans=Decimal(self)\n \n return ans._fix(context)\n \n def __abs__(self,round=True,context=None):\n ''\n\n\n\n\n \n if not round:\n return self.copy_abs()\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._sign:\n ans=self.__neg__(context=context)\n else:\n ans=self.__pos__(context=context)\n \n return ans\n \n def __add__(self,other,context=None):\n ''\n\n\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if self._isinfinity():\n \n if self._sign !=other._sign and other._isinfinity():\n return context._raise_error(InvalidOperation,'-INF + INF')\n return Decimal(self)\n if other._isinfinity():\n return Decimal(other)\n \n exp=min(self._exp,other._exp)\n negativezero=0\n if context.rounding ==ROUND_FLOOR and self._sign !=other._sign:\n \n negativezero=1\n \n if not self and not other:\n sign=min(self._sign,other._sign)\n if negativezero:\n sign=1\n ans=_dec_from_triple(sign,'0',exp)\n ans=ans._fix(context)\n return ans\n if not self:\n exp=max(exp,other._exp -context.prec -1)\n ans=other._rescale(exp,context.rounding)\n ans=ans._fix(context)\n return ans\n if not other:\n exp=max(exp,self._exp -context.prec -1)\n ans=self._rescale(exp,context.rounding)\n ans=ans._fix(context)\n return ans\n \n op1=_WorkRep(self)\n op2=_WorkRep(other)\n op1,op2=_normalize(op1,op2,context.prec)\n \n result=_WorkRep()\n if op1.sign !=op2.sign:\n \n if op1.int ==op2.int:\n ans=_dec_from_triple(negativezero,'0',exp)\n ans=ans._fix(context)\n return ans\n if op1.int =0:\n coeff,remainder=divmod(op1.int *10 **shift,op2.int)\n else:\n coeff,remainder=divmod(op1.int,op2.int *10 **-shift)\n if remainder:\n \n if coeff %5 ==0:\n coeff +=1\n else:\n \n ideal_exp=self._exp -other._exp\n while exp =op2.exp:\n op1.int *=10 **(op1.exp -op2.exp)\n else:\n op2.int *=10 **(op2.exp -op1.exp)\n q,r=divmod(op1.int,op2.int)\n if q <10 **context.prec:\n return(_dec_from_triple(sign,str(q),0),\n _dec_from_triple(self._sign,str(r),ideal_exp))\n \n \n ans=context._raise_error(DivisionImpossible,\n 'quotient too large in //, % or divmod')\n return ans,ans\n \n def __rtruediv__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__truediv__(self,context=context)\n \n def __divmod__(self,other,context=None):\n ''\n\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return(ans,ans)\n \n sign=self._sign ^other._sign\n if self._isinfinity():\n if other._isinfinity():\n ans=context._raise_error(InvalidOperation,'divmod(INF, INF)')\n return ans,ans\n else:\n return(_SignedInfinity[sign],\n context._raise_error(InvalidOperation,'INF % x'))\n \n if not other:\n if not self:\n ans=context._raise_error(DivisionUndefined,'divmod(0, 0)')\n return ans,ans\n else:\n return(context._raise_error(DivisionByZero,'x // 0',sign),\n context._raise_error(InvalidOperation,'x % 0'))\n \n quotient,remainder=self._divide(other,context)\n remainder=remainder._fix(context)\n return quotient,remainder\n \n def __rdivmod__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__divmod__(self,context=context)\n \n def __mod__(self,other,context=None):\n ''\n\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if self._isinfinity():\n return context._raise_error(InvalidOperation,'INF % x')\n elif not other:\n if self:\n return context._raise_error(InvalidOperation,'x % 0')\n else:\n return context._raise_error(DivisionUndefined,'0 % 0')\n \n remainder=self._divide(other,context)[1]\n remainder=remainder._fix(context)\n return remainder\n \n def __rmod__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__mod__(self,context=context)\n \n def remainder_near(self,other,context=None):\n ''\n\n \n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n \n if self._isinfinity():\n return context._raise_error(InvalidOperation,\n 'remainder_near(infinity, x)')\n \n \n if not other:\n if self:\n return context._raise_error(InvalidOperation,\n 'remainder_near(x, 0)')\n else:\n return context._raise_error(DivisionUndefined,\n 'remainder_near(0, 0)')\n \n \n if other._isinfinity():\n ans=Decimal(self)\n return ans._fix(context)\n \n \n ideal_exponent=min(self._exp,other._exp)\n if not self:\n ans=_dec_from_triple(self._sign,'0',ideal_exponent)\n return ans._fix(context)\n \n \n expdiff=self.adjusted()-other.adjusted()\n if expdiff >=context.prec+1:\n \n return context._raise_error(DivisionImpossible)\n if expdiff <=-2:\n \n ans=self._rescale(ideal_exponent,context.rounding)\n return ans._fix(context)\n \n \n op1=_WorkRep(self)\n op2=_WorkRep(other)\n if op1.exp >=op2.exp:\n op1.int *=10 **(op1.exp -op2.exp)\n else:\n op2.int *=10 **(op2.exp -op1.exp)\n q,r=divmod(op1.int,op2.int)\n \n \n \n if 2 *r+(q&1)>op2.int:\n r -=op2.int\n q +=1\n \n if q >=10 **context.prec:\n return context._raise_error(DivisionImpossible)\n \n \n sign=self._sign\n if r <0:\n sign=1 -sign\n r=-r\n \n ans=_dec_from_triple(sign,str(r),ideal_exponent)\n return ans._fix(context)\n \n def __floordiv__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if self._isinfinity():\n if other._isinfinity():\n return context._raise_error(InvalidOperation,'INF // INF')\n else:\n return _SignedInfinity[self._sign ^other._sign]\n \n if not other:\n if self:\n return context._raise_error(DivisionByZero,'x // 0',\n self._sign ^other._sign)\n else:\n return context._raise_error(DivisionUndefined,'0 // 0')\n \n return self._divide(other,context)[0]\n \n def __rfloordiv__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__floordiv__(self,context=context)\n \n def __float__(self):\n ''\n if self._isnan():\n if self.is_snan():\n raise ValueError(\"Cannot convert signaling NaN to float\")\n s=\"-nan\"if self._sign else \"nan\"\n else:\n s=str(self)\n return float(s)\n \n def __int__(self):\n ''\n if self._is_special:\n if self._isnan():\n raise ValueError(\"Cannot convert NaN to integer\")\n elif self._isinfinity():\n raise OverflowError(\"Cannot convert infinity to integer\")\n s=(-1)**self._sign\n if self._exp >=0:\n return s *int(self._int)*10 **self._exp\n else:\n return s *int(self._int[:self._exp]or '0')\n \n __trunc__=__int__\n \n @property\n def real(self):\n return self\n \n @property\n def imag(self):\n return Decimal(0)\n \n def conjugate(self):\n return self\n \n def __complex__(self):\n return complex(float(self))\n \n def _fix_nan(self,context):\n ''\n payload=self._int\n \n \n \n max_payload_len=context.prec -context.clamp\n if len(payload)>max_payload_len:\n payload=payload[len(payload)-max_payload_len:].lstrip('0')\n return _dec_from_triple(self._sign,payload,self._exp,True)\n return Decimal(self)\n \n def _fix(self,context):\n ''\n\n\n\n\n\n\n \n \n if self._is_special:\n if self._isnan():\n \n return self._fix_nan(context)\n else:\n \n return Decimal(self)\n \n \n \n Etiny=context.Etiny()\n Etop=context.Etop()\n if not self:\n exp_max=[context.Emax,Etop][context.clamp]\n new_exp=min(max(self._exp,Etiny),exp_max)\n if new_exp !=self._exp:\n context._raise_error(Clamped)\n return _dec_from_triple(self._sign,'0',new_exp)\n else:\n return Decimal(self)\n \n \n \n exp_min=len(self._int)+self._exp -context.prec\n if exp_min >Etop:\n \n ans=context._raise_error(Overflow,'above Emax',self._sign)\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n return ans\n \n self_is_subnormal=exp_min 0:\n coeff=str(int(coeff)+1)\n if len(coeff)>context.prec:\n coeff=coeff[:-1]\n exp_min +=1\n \n \n if exp_min >Etop:\n ans=context._raise_error(Overflow,'above Emax',self._sign)\n else:\n ans=_dec_from_triple(self._sign,coeff,exp_min)\n \n \n \n if changed and self_is_subnormal:\n context._raise_error(Underflow)\n if self_is_subnormal:\n context._raise_error(Subnormal)\n if changed:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n if not ans:\n \n context._raise_error(Clamped)\n return ans\n \n if self_is_subnormal:\n context._raise_error(Subnormal)\n \n \n if context.clamp ==1 and self._exp >Etop:\n context._raise_error(Clamped)\n self_padded=self._int+'0'*(self._exp -Etop)\n return _dec_from_triple(self._sign,self_padded,Etop)\n \n \n return Decimal(self)\n \n \n \n \n \n \n \n \n \n \n \n def _round_down(self,prec):\n ''\n if _all_zeros(self._int,prec):\n return 0\n else:\n return -1\n \n def _round_up(self,prec):\n ''\n return -self._round_down(prec)\n \n def _round_half_up(self,prec):\n ''\n if self._int[prec]in '56789':\n return 1\n elif _all_zeros(self._int,prec):\n return 0\n else:\n return -1\n \n def _round_half_down(self,prec):\n ''\n if _exact_half(self._int,prec):\n return -1\n else:\n return self._round_half_up(prec)\n \n def _round_half_even(self,prec):\n ''\n if _exact_half(self._int,prec)and\\\n (prec ==0 or self._int[prec -1]in '02468'):\n return -1\n else:\n return self._round_half_up(prec)\n \n def _round_ceiling(self,prec):\n ''\n if self._sign:\n return self._round_down(prec)\n else:\n return -self._round_down(prec)\n \n def _round_floor(self,prec):\n ''\n if not self._sign:\n return self._round_down(prec)\n else:\n return -self._round_down(prec)\n \n def _round_05up(self,prec):\n ''\n if prec and self._int[prec -1]not in '05':\n return self._round_down(prec)\n else:\n return -self._round_down(prec)\n \n _pick_rounding_function=dict(\n ROUND_DOWN=_round_down,\n ROUND_UP=_round_up,\n ROUND_HALF_UP=_round_half_up,\n ROUND_HALF_DOWN=_round_half_down,\n ROUND_HALF_EVEN=_round_half_even,\n ROUND_CEILING=_round_ceiling,\n ROUND_FLOOR=_round_floor,\n ROUND_05UP=_round_05up,\n )\n \n def __round__(self,n=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if n is not None:\n \n if not isinstance(n,int):\n raise TypeError('Second argument to round should be integral')\n exp=_dec_from_triple(0,'1',-n)\n return self.quantize(exp)\n \n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot round a NaN\")\n else:\n raise OverflowError(\"cannot round an infinity\")\n return int(self._rescale(0,ROUND_HALF_EVEN))\n \n def __floor__(self):\n ''\n\n\n\n\n\n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot round a NaN\")\n else:\n raise OverflowError(\"cannot round an infinity\")\n return int(self._rescale(0,ROUND_FLOOR))\n \n def __ceil__(self):\n ''\n\n\n\n\n\n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot round a NaN\")\n else:\n raise OverflowError(\"cannot round an infinity\")\n return int(self._rescale(0,ROUND_CEILING))\n \n def fma(self,other,third,context=None):\n ''\n\n\n\n\n\n\n\n \n \n other=_convert_other(other,raiseit=True)\n third=_convert_other(third,raiseit=True)\n \n \n \n if self._is_special or other._is_special:\n if context is None:\n context=getcontext()\n if self._exp =='N':\n return context._raise_error(InvalidOperation,'sNaN',self)\n if other._exp =='N':\n return context._raise_error(InvalidOperation,'sNaN',other)\n if self._exp =='n':\n product=self\n elif other._exp =='n':\n product=other\n elif self._exp =='F':\n if not other:\n return context._raise_error(InvalidOperation,\n 'INF * 0 in fma')\n product=_SignedInfinity[self._sign ^other._sign]\n elif other._exp =='F':\n if not self:\n return context._raise_error(InvalidOperation,\n '0 * INF in fma')\n product=_SignedInfinity[self._sign ^other._sign]\n else:\n product=_dec_from_triple(self._sign ^other._sign,\n str(int(self._int)*int(other._int)),\n self._exp+other._exp)\n \n return product.__add__(third,context)\n \n def _power_modulo(self,other,modulo,context=None):\n ''\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n modulo=_convert_other(modulo)\n if modulo is NotImplemented:\n return modulo\n \n if context is None:\n context=getcontext()\n \n \n \n self_is_nan=self._isnan()\n other_is_nan=other._isnan()\n modulo_is_nan=modulo._isnan()\n if self_is_nan or other_is_nan or modulo_is_nan:\n if self_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n self)\n if other_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n other)\n if modulo_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n modulo)\n if self_is_nan:\n return self._fix_nan(context)\n if other_is_nan:\n return other._fix_nan(context)\n return modulo._fix_nan(context)\n \n \n if not(self._isinteger()and\n other._isinteger()and\n modulo._isinteger()):\n return context._raise_error(InvalidOperation,\n 'pow() 3rd argument not allowed '\n 'unless all arguments are integers')\n if other <0:\n return context._raise_error(InvalidOperation,\n 'pow() 2nd argument cannot be '\n 'negative when 3rd argument specified')\n if not modulo:\n return context._raise_error(InvalidOperation,\n 'pow() 3rd argument cannot be 0')\n \n \n \n if modulo.adjusted()>=context.prec:\n return context._raise_error(InvalidOperation,\n 'insufficient precision: pow() 3rd '\n 'argument must not have more than '\n 'precision digits')\n \n \n \n if not other and not self:\n return context._raise_error(InvalidOperation,\n 'at least one of pow() 1st argument '\n 'and 2nd argument must be nonzero; '\n '0**0 is not defined')\n \n \n if other._iseven():\n sign=0\n else:\n sign=self._sign\n \n \n \n modulo=abs(int(modulo))\n base=_WorkRep(self.to_integral_value())\n exponent=_WorkRep(other.to_integral_value())\n \n \n base=(base.int %modulo *pow(10,base.exp,modulo))%modulo\n for i in range(exponent.exp):\n base=pow(base,10,modulo)\n base=pow(base,exponent.int,modulo)\n \n return _dec_from_triple(sign,str(base),0)\n \n def _power_exact(self,other,p):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n x=_WorkRep(self)\n xc,xe=x.int,x.exp\n while xc %10 ==0:\n xc //=10\n xe +=1\n \n y=_WorkRep(other)\n yc,ye=y.int,y.exp\n while yc %10 ==0:\n yc //=10\n ye +=1\n \n \n \n if xc ==1:\n xe *=yc\n \n while xe %10 ==0:\n xe //=10\n ye +=1\n if ye <0:\n return None\n exponent=xe *10 **ye\n if y.sign ==1:\n exponent=-exponent\n \n if other._isinteger()and other._sign ==0:\n ideal_exponent=self._exp *int(other)\n zeros=min(exponent -ideal_exponent,p -1)\n else:\n zeros=0\n return _dec_from_triple(0,'1'+'0'*zeros,exponent -zeros)\n \n \n \n if y.sign ==1:\n last_digit=xc %10\n if last_digit in(2,4,6,8):\n \n if xc&-xc !=xc:\n return None\n \n e=_nbits(xc)-1\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n emax=p *93 //65\n if ye >=len(str(emax)):\n return None\n \n \n e=_decimal_lshift_exact(e *yc,ye)\n xe=_decimal_lshift_exact(xe *yc,ye)\n if e is None or xe is None:\n return None\n \n if e >emax:\n return None\n xc=5 **e\n \n elif last_digit ==5:\n \n \n e=_nbits(xc)*28 //65\n xc,remainder=divmod(5 **e,xc)\n if remainder:\n return None\n while xc %5 ==0:\n xc //=5\n e -=1\n \n \n \n \n emax=p *10 //3\n if ye >=len(str(emax)):\n return None\n \n e=_decimal_lshift_exact(e *yc,ye)\n xe=_decimal_lshift_exact(xe *yc,ye)\n if e is None or xe is None:\n return None\n \n if e >emax:\n return None\n xc=2 **e\n else:\n return None\n \n if xc >=10 **p:\n return None\n xe=-e -xe\n return _dec_from_triple(0,str(xc),xe)\n \n \n if ye >=0:\n m,n=yc *10 **ye,1\n else:\n if xe !=0 and len(str(abs(yc *xe)))<=-ye:\n return None\n xc_bits=_nbits(xc)\n if len(str(abs(yc)*xc_bits))<=-ye:\n return None\n m,n=yc,10 **(-ye)\n while m %2 ==n %2 ==0:\n m //=2\n n //=2\n while m %5 ==n %5 ==0:\n m //=5\n n //=5\n \n \n if n >1:\n \n if xc_bits <=n:\n return None\n \n xe,rem=divmod(xe,n)\n if rem !=0:\n return None\n \n \n a=1 <<-(-_nbits(xc)//n)\n while True:\n q,r=divmod(xc,a **(n -1))\n if a <=q:\n break\n else:\n a=(a *(n -1)+q)//n\n if not(a ==q and r ==0):\n return None\n xc=a\n \n \n \n \n \n \n if xc >1 and m >p *100 //_log10_lb(xc):\n return None\n xc=xc **m\n xe *=m\n if xc >10 **p:\n return None\n \n \n \n \n str_xc=str(xc)\n if other._isinteger()and other._sign ==0:\n ideal_exponent=self._exp *int(other)\n zeros=min(xe -ideal_exponent,p -len(str_xc))\n else:\n zeros=0\n return _dec_from_triple(0,str_xc+'0'*zeros,xe -zeros)\n \n def __pow__(self,other,modulo=None,context=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if modulo is not None:\n return self._power_modulo(other,modulo,context)\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None:\n context=getcontext()\n \n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n \n if not other:\n if not self:\n return context._raise_error(InvalidOperation,'0 ** 0')\n else:\n return _One\n \n \n result_sign=0\n if self._sign ==1:\n if other._isinteger():\n if not other._iseven():\n result_sign=1\n else:\n \n \n if self:\n return context._raise_error(InvalidOperation,\n 'x ** y with x negative and y not an integer')\n \n self=self.copy_negate()\n \n \n if not self:\n if other._sign ==0:\n return _dec_from_triple(result_sign,'0',0)\n else:\n return _SignedInfinity[result_sign]\n \n \n if self._isinfinity():\n if other._sign ==0:\n return _SignedInfinity[result_sign]\n else:\n return _dec_from_triple(result_sign,'0',0)\n \n \n \n \n if self ==_One:\n if other._isinteger():\n \n \n \n \n if other._sign ==1:\n multiplier=0\n elif other >context.prec:\n multiplier=context.prec\n else:\n multiplier=int(other)\n \n exp=self._exp *multiplier\n if exp <1 -context.prec:\n exp=1 -context.prec\n context._raise_error(Rounded)\n else:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n exp=1 -context.prec\n \n return _dec_from_triple(result_sign,'1'+'0'*-exp,exp)\n \n \n self_adj=self.adjusted()\n \n \n \n if other._isinfinity():\n if(other._sign ==0)==(self_adj <0):\n return _dec_from_triple(result_sign,'0',0)\n else:\n return _SignedInfinity[result_sign]\n \n \n \n ans=None\n exact=False\n \n \n \n \n \n \n bound=self._log10_exp_bound()+other.adjusted()\n if(self_adj >=0)==(other._sign ==0):\n \n \n if bound >=len(str(context.Emax)):\n ans=_dec_from_triple(result_sign,'1',context.Emax+1)\n else:\n \n \n Etiny=context.Etiny()\n if bound >=len(str(-Etiny)):\n ans=_dec_from_triple(result_sign,'1',Etiny -1)\n \n \n if ans is None:\n ans=self._power_exact(other,context.prec+1)\n if ans is not None:\n if result_sign ==1:\n ans=_dec_from_triple(1,ans._int,ans._exp)\n exact=True\n \n \n if ans is None:\n p=context.prec\n x=_WorkRep(self)\n xc,xe=x.int,x.exp\n y=_WorkRep(other)\n yc,ye=y.int,y.exp\n if y.sign ==1:\n yc=-yc\n \n \n \n extra=3\n while True:\n coeff,exp=_dpower(xc,xe,yc,ye,p+extra)\n if coeff %(5 *10 **(len(str(coeff))-p -1)):\n break\n extra +=3\n \n ans=_dec_from_triple(result_sign,str(coeff),exp)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if exact and not other._isinteger():\n \n \n if len(ans._int)<=context.prec:\n expdiff=context.prec+1 -len(ans._int)\n ans=_dec_from_triple(ans._sign,ans._int+'0'*expdiff,\n ans._exp -expdiff)\n \n \n newcontext=context.copy()\n newcontext.clear_flags()\n for exception in _signals:\n newcontext.traps[exception]=0\n \n \n ans=ans._fix(newcontext)\n \n \n newcontext._raise_error(Inexact)\n if newcontext.flags[Subnormal]:\n newcontext._raise_error(Underflow)\n \n \n \n \n \n \n if newcontext.flags[Overflow]:\n context._raise_error(Overflow,'above Emax',ans._sign)\n for exception in Underflow,Subnormal,Inexact,Rounded,Clamped:\n if newcontext.flags[exception]:\n context._raise_error(exception)\n \n else:\n ans=ans._fix(context)\n \n return ans\n \n def __rpow__(self,other,context=None):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__pow__(self,context=context)\n \n def normalize(self,context=None):\n ''\n \n if context is None:\n context=getcontext()\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n dup=self._fix(context)\n if dup._isinfinity():\n return dup\n \n if not dup:\n return _dec_from_triple(dup._sign,'0',0)\n exp_max=[context.Emax,context.Etop()][context.clamp]\n end=len(dup._int)\n exp=dup._exp\n while dup._int[end -1]=='0'and exp context.Emax:\n return context._raise_error(InvalidOperation,\n 'exponent of quantize result too large for current context')\n if self_adjusted -exp._exp+1 >context.prec:\n return context._raise_error(InvalidOperation,\n 'quantize result has too many digits for current context')\n \n ans=self._rescale(exp._exp,rounding)\n if ans.adjusted()>context.Emax:\n return context._raise_error(InvalidOperation,\n 'exponent of quantize result too large for current context')\n if len(ans._int)>context.prec:\n return context._raise_error(InvalidOperation,\n 'quantize result has too many digits for current context')\n \n \n if ans and ans.adjusted()self._exp:\n if ans !=self:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n \n \n \n ans=ans._fix(context)\n return ans\n \n def same_quantum(self,other,context=None):\n ''\n\n\n\n\n\n\n \n other=_convert_other(other,raiseit=True)\n if self._is_special or other._is_special:\n return(self.is_nan()and other.is_nan()or\n self.is_infinite()and other.is_infinite())\n return self._exp ==other._exp\n \n def _rescale(self,exp,rounding):\n ''\n\n\n\n\n\n\n\n\n \n if self._is_special:\n return Decimal(self)\n if not self:\n return _dec_from_triple(self._sign,'0',exp)\n \n if self._exp >=exp:\n \n return _dec_from_triple(self._sign,\n self._int+'0'*(self._exp -exp),exp)\n \n \n \n digits=len(self._int)+self._exp -exp\n if digits <0:\n self=_dec_from_triple(self._sign,'1',exp -1)\n digits=0\n this_function=self._pick_rounding_function[rounding]\n changed=this_function(self,digits)\n coeff=self._int[:digits]or '0'\n if changed ==1:\n coeff=str(int(coeff)+1)\n return _dec_from_triple(self._sign,coeff,exp)\n \n def _round(self,places,rounding):\n ''\n\n\n\n\n\n\n\n \n if places <=0:\n raise ValueError(\"argument should be at least 1 in _round\")\n if self._is_special or not self:\n return Decimal(self)\n ans=self._rescale(self.adjusted()+1 -places,rounding)\n \n \n \n \n if ans.adjusted()!=self.adjusted():\n ans=ans._rescale(ans.adjusted()+1 -places,rounding)\n return ans\n \n def to_integral_exact(self,rounding=None,context=None):\n ''\n\n\n\n\n\n\n\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n return Decimal(self)\n if self._exp >=0:\n return Decimal(self)\n if not self:\n return _dec_from_triple(self._sign,'0',0)\n if context is None:\n context=getcontext()\n if rounding is None:\n rounding=context.rounding\n ans=self._rescale(0,rounding)\n if ans !=self:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n return ans\n \n def to_integral_value(self,rounding=None,context=None):\n ''\n if context is None:\n context=getcontext()\n if rounding is None:\n rounding=context.rounding\n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n return Decimal(self)\n if self._exp >=0:\n return Decimal(self)\n else:\n return self._rescale(0,rounding)\n \n \n to_integral=to_integral_value\n \n def sqrt(self,context=None):\n ''\n if context is None:\n context=getcontext()\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._isinfinity()and self._sign ==0:\n return Decimal(self)\n \n if not self:\n \n ans=_dec_from_triple(self._sign,'0',self._exp //2)\n return ans._fix(context)\n \n if self._sign ==1:\n return context._raise_error(InvalidOperation,'sqrt(-x), x > 0')\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n prec=context.prec+1\n \n \n \n \n \n op=_WorkRep(self)\n e=op.exp >>1\n if op.exp&1:\n c=op.int *10\n l=(len(self._int)>>1)+1\n else:\n c=op.int\n l=len(self._int)+1 >>1\n \n \n shift=prec -l\n if shift >=0:\n c *=100 **shift\n exact=True\n else:\n c,remainder=divmod(c,100 **-shift)\n exact=not remainder\n e -=shift\n \n \n n=10 **prec\n while True:\n q=c //n\n if n <=q:\n break\n else:\n n=n+q >>1\n exact=exact and n *n ==c\n \n if exact:\n \n if shift >=0:\n \n n //=10 **shift\n else:\n n *=10 **-shift\n e +=shift\n else:\n \n if n %5 ==0:\n n +=1\n \n ans=_dec_from_triple(0,str(n),e)\n \n \n context=context._shallow_copy()\n rounding=context._set_rounding(ROUND_HALF_EVEN)\n ans=ans._fix(context)\n context.rounding=rounding\n \n return ans\n \n def max(self,other,context=None):\n ''\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self._cmp(other)\n if c ==0:\n \n \n \n \n \n \n \n \n c=self.compare_total(other)\n \n if c ==-1:\n ans=other\n else:\n ans=self\n \n return ans._fix(context)\n \n def min(self,other,context=None):\n ''\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self._cmp(other)\n if c ==0:\n c=self.compare_total(other)\n \n if c ==-1:\n ans=self\n else:\n ans=other\n \n return ans._fix(context)\n \n def _isinteger(self):\n ''\n if self._is_special:\n return False\n if self._exp >=0:\n return True\n rest=self._int[self._exp:]\n return rest =='0'*len(rest)\n \n def _iseven(self):\n ''\n if not self or self._exp >0:\n return True\n return self._int[-1+self._exp]in '02468'\n \n def adjusted(self):\n ''\n try:\n return self._exp+len(self._int)-1\n \n except TypeError:\n return 0\n \n def canonical(self):\n ''\n\n\n\n \n return self\n \n def compare_signal(self,other,context=None):\n ''\n\n\n\n \n other=_convert_other(other,raiseit=True)\n ans=self._compare_check_nans(other,context)\n if ans:\n return ans\n return self.compare(other,context=context)\n \n def compare_total(self,other,context=None):\n ''\n\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n \n if self._sign and not other._sign:\n return _NegativeOne\n if not self._sign and other._sign:\n return _One\n sign=self._sign\n \n \n self_nan=self._isnan()\n other_nan=other._isnan()\n if self_nan or other_nan:\n if self_nan ==other_nan:\n \n self_key=len(self._int),self._int\n other_key=len(other._int),other._int\n if self_key other_key:\n if sign:\n return _NegativeOne\n else:\n return _One\n return _Zero\n \n if sign:\n if self_nan ==1:\n return _NegativeOne\n if other_nan ==1:\n return _One\n if self_nan ==2:\n return _NegativeOne\n if other_nan ==2:\n return _One\n else:\n if self_nan ==1:\n return _One\n if other_nan ==1:\n return _NegativeOne\n if self_nan ==2:\n return _One\n if other_nan ==2:\n return _NegativeOne\n \n if self other:\n return _One\n \n if self._exp other._exp:\n if sign:\n return _NegativeOne\n else:\n return _One\n return _Zero\n \n \n def compare_total_mag(self,other,context=None):\n ''\n\n\n \n other=_convert_other(other,raiseit=True)\n \n s=self.copy_abs()\n o=other.copy_abs()\n return s.compare_total(o)\n \n def copy_abs(self):\n ''\n return _dec_from_triple(0,self._int,self._exp,self._is_special)\n \n def copy_negate(self):\n ''\n if self._sign:\n return _dec_from_triple(0,self._int,self._exp,self._is_special)\n else:\n return _dec_from_triple(1,self._int,self._exp,self._is_special)\n \n def copy_sign(self,other,context=None):\n ''\n other=_convert_other(other,raiseit=True)\n return _dec_from_triple(other._sign,self._int,\n self._exp,self._is_special)\n \n def exp(self,context=None):\n ''\n \n if context is None:\n context=getcontext()\n \n \n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n \n if self._isinfinity()==-1:\n return _Zero\n \n \n if not self:\n return _One\n \n \n if self._isinfinity()==1:\n return Decimal(self)\n \n \n \n \n \n p=context.prec\n adj=self.adjusted()\n \n \n \n \n \n \n if self._sign ==0 and adj >len(str((context.Emax+1)*3)):\n \n ans=_dec_from_triple(0,'1',context.Emax+1)\n elif self._sign ==1 and adj >len(str((-context.Etiny()+1)*3)):\n \n ans=_dec_from_triple(0,'1',context.Etiny()-1)\n elif self._sign ==0 and adj <-p:\n \n ans=_dec_from_triple(0,'1'+'0'*(p -1)+'1',-p)\n elif self._sign ==1 and adj <-p -1:\n \n ans=_dec_from_triple(0,'9'*(p+1),-p -1)\n \n else:\n op=_WorkRep(self)\n c,e=op.int,op.exp\n if op.sign ==1:\n c=-c\n \n \n \n \n extra=3\n while True:\n coeff,exp=_dexp(c,e,p+extra)\n if coeff %(5 *10 **(len(str(coeff))-p -1)):\n break\n extra +=3\n \n ans=_dec_from_triple(0,str(coeff),exp)\n \n \n \n context=context._shallow_copy()\n rounding=context._set_rounding(ROUND_HALF_EVEN)\n ans=ans._fix(context)\n context.rounding=rounding\n \n return ans\n \n def is_canonical(self):\n ''\n\n\n\n \n return True\n \n def is_finite(self):\n ''\n\n\n\n \n return not self._is_special\n \n def is_infinite(self):\n ''\n return self._exp =='F'\n \n def is_nan(self):\n ''\n return self._exp in('n','N')\n \n def is_normal(self,context=None):\n ''\n if self._is_special or not self:\n return False\n if context is None:\n context=getcontext()\n return context.Emin <=self.adjusted()\n \n def is_qnan(self):\n ''\n return self._exp =='n'\n \n def is_signed(self):\n ''\n return self._sign ==1\n \n def is_snan(self):\n ''\n return self._exp =='N'\n \n def is_subnormal(self,context=None):\n ''\n if self._is_special or not self:\n return False\n if context is None:\n context=getcontext()\n return self.adjusted()=1:\n \n return len(str(adj *23 //10))-1\n if adj <=-2:\n \n return len(str((-1 -adj)*23 //10))-1\n op=_WorkRep(self)\n c,e=op.int,op.exp\n if adj ==0:\n \n num=str(c -10 **-e)\n den=str(c)\n return len(num)-len(den)-(num =1:\n \n return len(str(adj))-1\n if adj <=-2:\n \n return len(str(-1 -adj))-1\n op=_WorkRep(self)\n c,e=op.int,op.exp\n if adj ==0:\n \n num=str(c -10 **-e)\n den=str(231 *c)\n return len(num)-len(den)-(num 0:\n opa='0'*dif+opa\n elif dif <0:\n opa=opa[-context.prec:]\n dif=context.prec -len(opb)\n if dif >0:\n opb='0'*dif+opb\n elif dif <0:\n opb=opb[-context.prec:]\n return opa,opb\n \n def logical_and(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n if not self._islogical()or not other._islogical():\n return context._raise_error(InvalidOperation)\n \n \n (opa,opb)=self._fill_logical(context,self._int,other._int)\n \n \n result=\"\".join([str(int(a)&int(b))for a,b in zip(opa,opb)])\n return _dec_from_triple(0,result.lstrip('0')or '0',0)\n \n def logical_invert(self,context=None):\n ''\n if context is None:\n context=getcontext()\n return self.logical_xor(_dec_from_triple(0,'1'*context.prec,0),\n context)\n \n def logical_or(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n if not self._islogical()or not other._islogical():\n return context._raise_error(InvalidOperation)\n \n \n (opa,opb)=self._fill_logical(context,self._int,other._int)\n \n \n result=\"\".join([str(int(a)|int(b))for a,b in zip(opa,opb)])\n return _dec_from_triple(0,result.lstrip('0')or '0',0)\n \n def logical_xor(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n if not self._islogical()or not other._islogical():\n return context._raise_error(InvalidOperation)\n \n \n (opa,opb)=self._fill_logical(context,self._int,other._int)\n \n \n result=\"\".join([str(int(a)^int(b))for a,b in zip(opa,opb)])\n return _dec_from_triple(0,result.lstrip('0')or '0',0)\n \n def max_mag(self,other,context=None):\n ''\n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self.copy_abs()._cmp(other.copy_abs())\n if c ==0:\n c=self.compare_total(other)\n \n if c ==-1:\n ans=other\n else:\n ans=self\n \n return ans._fix(context)\n \n def min_mag(self,other,context=None):\n ''\n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self.copy_abs()._cmp(other.copy_abs())\n if c ==0:\n c=self.compare_total(other)\n \n if c ==-1:\n ans=self\n else:\n ans=other\n \n return ans._fix(context)\n \n def next_minus(self,context=None):\n ''\n if context is None:\n context=getcontext()\n \n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._isinfinity()==-1:\n return _NegativeInfinity\n if self._isinfinity()==1:\n return _dec_from_triple(0,'9'*context.prec,context.Etop())\n \n context=context.copy()\n context._set_rounding(ROUND_FLOOR)\n context._ignore_all_flags()\n new_self=self._fix(context)\n if new_self !=self:\n return new_self\n return self.__sub__(_dec_from_triple(0,'1',context.Etiny()-1),\n context)\n \n def next_plus(self,context=None):\n ''\n if context is None:\n context=getcontext()\n \n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._isinfinity()==1:\n return _Infinity\n if self._isinfinity()==-1:\n return _dec_from_triple(1,'9'*context.prec,context.Etop())\n \n context=context.copy()\n context._set_rounding(ROUND_CEILING)\n context._ignore_all_flags()\n new_self=self._fix(context)\n if new_self !=self:\n return new_self\n return self.__add__(_dec_from_triple(0,'1',context.Etiny()-1),\n context)\n \n def next_toward(self,other,context=None):\n ''\n\n\n\n\n\n\n \n other=_convert_other(other,raiseit=True)\n \n if context is None:\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n comparison=self._cmp(other)\n if comparison ==0:\n return self.copy_sign(other)\n \n if comparison ==-1:\n ans=self.next_plus(context)\n else:\n ans=self.next_minus(context)\n \n \n if ans._isinfinity():\n context._raise_error(Overflow,\n 'Infinite result from next_toward',\n ans._sign)\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n elif ans.adjusted()0:\n rotdig='0'*topad+rotdig\n elif topad <0:\n rotdig=rotdig[-topad:]\n \n \n rotated=rotdig[torot:]+rotdig[:torot]\n return _dec_from_triple(self._sign,\n rotated.lstrip('0')or '0',self._exp)\n \n def scaleb(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if other._exp !=0:\n return context._raise_error(InvalidOperation)\n liminf=-2 *(context.Emax+context.prec)\n limsup=2 *(context.Emax+context.prec)\n if not(liminf <=int(other)<=limsup):\n return context._raise_error(InvalidOperation)\n \n if self._isinfinity():\n return Decimal(self)\n \n d=_dec_from_triple(self._sign,self._int,self._exp+int(other))\n d=d._fix(context)\n return d\n \n def shift(self,other,context=None):\n ''\n if context is None:\n context=getcontext()\n \n other=_convert_other(other,raiseit=True)\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if other._exp !=0:\n return context._raise_error(InvalidOperation)\n if not(-context.prec <=int(other)<=context.prec):\n return context._raise_error(InvalidOperation)\n \n if self._isinfinity():\n return Decimal(self)\n \n \n torot=int(other)\n rotdig=self._int\n topad=context.prec -len(rotdig)\n if topad >0:\n rotdig='0'*topad+rotdig\n elif topad <0:\n rotdig=rotdig[-topad:]\n \n \n if torot <0:\n shifted=rotdig[:torot]\n else:\n shifted=rotdig+'0'*torot\n shifted=shifted[-context.prec:]\n \n return _dec_from_triple(self._sign,\n shifted.lstrip('0')or '0',self._exp)\n \n \n def __reduce__(self):\n return(self.__class__,(str(self),))\n \n def __copy__(self):\n if type(self)is Decimal:\n return self\n return self.__class__(str(self))\n \n def __deepcopy__(self,memo):\n if type(self)is Decimal:\n return self\n return self.__class__(str(self))\n \n \n \n def __format__(self,specifier,context=None,_localeconv=None):\n ''\n\n\n\n\n\n\n \n \n \n \n \n \n \n if context is None:\n context=getcontext()\n \n spec=_parse_format_specifier(specifier,_localeconv=_localeconv)\n \n \n if self._is_special:\n sign=_format_sign(self._sign,spec)\n body=str(self.copy_abs())\n if spec['type']=='%':\n body +='%'\n return _format_align(sign,body,spec)\n \n \n if spec['type']is None:\n spec['type']=['g','G'][context.capitals]\n \n \n if spec['type']=='%':\n self=_dec_from_triple(self._sign,self._int,self._exp+2)\n \n \n rounding=context.rounding\n precision=spec['precision']\n if precision is not None:\n if spec['type']in 'eE':\n self=self._round(precision+1,rounding)\n elif spec['type']in 'fF%':\n self=self._rescale(-precision,rounding)\n elif spec['type']in 'gG'and len(self._int)>precision:\n self=self._round(precision,rounding)\n \n \n if not self and self._exp >0 and spec['type']in 'fF%':\n self=self._rescale(0,rounding)\n if not self and spec['no_neg_0']and self._sign:\n adjusted_sign=0\n else:\n adjusted_sign=self._sign\n \n \n leftdigits=self._exp+len(self._int)\n if spec['type']in 'eE':\n if not self and precision is not None:\n dotplace=1 -precision\n else:\n dotplace=1\n elif spec['type']in 'fF%':\n dotplace=leftdigits\n elif spec['type']in 'gG':\n if self._exp <=0 and leftdigits >-6:\n dotplace=leftdigits\n else:\n dotplace=1\n \n \n if dotplace <0:\n intpart='0'\n fracpart='0'*(-dotplace)+self._int\n elif dotplace >len(self._int):\n intpart=self._int+'0'*(dotplace -len(self._int))\n fracpart=''\n else:\n intpart=self._int[:dotplace]or '0'\n fracpart=self._int[dotplace:]\n exp=leftdigits -dotplace\n \n \n \n return _format_number(adjusted_sign,intpart,fracpart,exp,spec)\n \ndef _dec_from_triple(sign,coefficient,exponent,special=False):\n ''\n\n\n\n\n \n \n self=object.__new__(Decimal)\n self._sign=sign\n self._int=coefficient\n self._exp=exponent\n self._is_special=special\n \n return self\n \n \n \n \n_numbers.Number.register(Decimal)\n\n\n\n\nclass _ContextManager(object):\n ''\n\n\n\n \n def __init__(self,new_context):\n self.new_context=new_context.copy()\n def __enter__(self):\n self.saved_context=getcontext()\n setcontext(self.new_context)\n return self.new_context\n def __exit__(self,t,v,tb):\n setcontext(self.saved_context)\n \nclass Context(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,prec=None,rounding=None,Emin=None,Emax=None,\n capitals=None,clamp=None,flags=None,traps=None,\n _ignored_flags=None):\n \n \n try:\n dc=DefaultContext\n except NameError:\n pass\n \n self.prec=prec if prec is not None else dc.prec\n self.rounding=rounding if rounding is not None else dc.rounding\n self.Emin=Emin if Emin is not None else dc.Emin\n self.Emax=Emax if Emax is not None else dc.Emax\n self.capitals=capitals if capitals is not None else dc.capitals\n self.clamp=clamp if clamp is not None else dc.clamp\n \n if _ignored_flags is None:\n self._ignored_flags=[]\n else:\n self._ignored_flags=_ignored_flags\n \n if traps is None:\n self.traps=dc.traps.copy()\n elif not isinstance(traps,dict):\n self.traps=dict((s,int(s in traps))for s in _signals+traps)\n else:\n self.traps=traps\n \n if flags is None:\n self.flags=dict.fromkeys(_signals,0)\n elif not isinstance(flags,dict):\n self.flags=dict((s,int(s in flags))for s in _signals+flags)\n else:\n self.flags=flags\n \n def _set_integer_check(self,name,value,vmin,vmax):\n if not isinstance(value,int):\n raise TypeError(\"%s must be an integer\"%name)\n if vmin =='-inf':\n if value >vmax:\n raise ValueError(\"%s must be in [%s, %d]. got: %s\"%(name,vmin,vmax,value))\n elif vmax =='inf':\n if value vmax:\n raise ValueError(\"%s must be in [%d, %d]. got %s\"%(name,vmin,vmax,value))\n return object.__setattr__(self,name,value)\n \n def _set_signal_dict(self,name,d):\n if not isinstance(d,dict):\n raise TypeError(\"%s must be a signal dict\"%d)\n for key in d:\n if not key in _signals:\n raise KeyError(\"%s is not a valid signal dict\"%d)\n for key in _signals:\n if not key in d:\n raise KeyError(\"%s is not a valid signal dict\"%d)\n return object.__setattr__(self,name,d)\n \n def __setattr__(self,name,value):\n if name =='prec':\n return self._set_integer_check(name,value,1,'inf')\n elif name =='Emin':\n return self._set_integer_check(name,value,'-inf',0)\n elif name =='Emax':\n return self._set_integer_check(name,value,0,'inf')\n elif name =='capitals':\n return self._set_integer_check(name,value,0,1)\n elif name =='clamp':\n return self._set_integer_check(name,value,0,1)\n elif name =='rounding':\n if not value in _rounding_modes:\n \n \n raise TypeError(\"%s: invalid rounding mode\"%value)\n return object.__setattr__(self,name,value)\n elif name =='flags'or name =='traps':\n return self._set_signal_dict(name,value)\n elif name =='_ignored_flags':\n return object.__setattr__(self,name,value)\n else:\n raise AttributeError(\n \"'decimal.Context' object has no attribute '%s'\"%name)\n \n def __delattr__(self,name):\n raise AttributeError(\"%s cannot be deleted\"%name)\n \n \n def __reduce__(self):\n flags=[sig for sig,v in self.flags.items()if v]\n traps=[sig for sig,v in self.traps.items()if v]\n return(self.__class__,\n (self.prec,self.rounding,self.Emin,self.Emax,\n self.capitals,self.clamp,flags,traps))\n \n def __repr__(self):\n ''\n s=[]\n s.append('Context(prec=%(prec)d, rounding=%(rounding)s, '\n 'Emin=%(Emin)d, Emax=%(Emax)d, capitals=%(capitals)d, '\n 'clamp=%(clamp)d'\n %vars(self))\n names=[f.__name__ for f,v in self.flags.items()if v]\n s.append('flags=['+', '.join(names)+']')\n names=[t.__name__ for t,v in self.traps.items()if v]\n s.append('traps=['+', '.join(names)+']')\n return ', '.join(s)+')'\n \n def clear_flags(self):\n ''\n for flag in self.flags:\n self.flags[flag]=0\n \n def clear_traps(self):\n ''\n for flag in self.traps:\n self.traps[flag]=0\n \n def _shallow_copy(self):\n ''\n nc=Context(self.prec,self.rounding,self.Emin,self.Emax,\n self.capitals,self.clamp,self.flags,self.traps,\n self._ignored_flags)\n return nc\n \n def copy(self):\n ''\n nc=Context(self.prec,self.rounding,self.Emin,self.Emax,\n self.capitals,self.clamp,\n self.flags.copy(),self.traps.copy(),\n self._ignored_flags)\n return nc\n __copy__=copy\n \n def _raise_error(self,condition,explanation=None,*args):\n ''\n\n\n\n\n\n \n error=_condition_map.get(condition,condition)\n if error in self._ignored_flags:\n \n return error().handle(self,*args)\n \n self.flags[error]=1\n if not self.traps[error]:\n \n return condition().handle(self,*args)\n \n \n \n raise error(explanation)\n \n def _ignore_all_flags(self):\n ''\n return self._ignore_flags(*_signals)\n \n def _ignore_flags(self,*flags):\n ''\n \n \n self._ignored_flags=(self._ignored_flags+list(flags))\n return list(flags)\n \n def _regard_flags(self,*flags):\n ''\n if flags and isinstance(flags[0],(tuple,list)):\n flags=flags[0]\n for flag in flags:\n self._ignored_flags.remove(flag)\n \n \n __hash__=None\n \n def Etiny(self):\n ''\n return int(self.Emin -self.prec+1)\n \n def Etop(self):\n ''\n return int(self.Emax -self.prec+1)\n \n def _set_rounding(self,type):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n rounding=self.rounding\n self.rounding=type\n return rounding\n \n def create_decimal(self,num='0'):\n ''\n\n\n \n \n if isinstance(num,str)and(num !=num.strip()or '_'in num):\n return self._raise_error(ConversionSyntax,\n \"trailing or leading whitespace and \"\n \"underscores are not permitted.\")\n \n d=Decimal(num,context=self)\n if d._isnan()and len(d._int)>self.prec -self.clamp:\n return self._raise_error(ConversionSyntax,\n \"diagnostic info too long in NaN\")\n return d._fix(self)\n \n def create_decimal_from_float(self,f):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n d=Decimal.from_float(f)\n return d._fix(self)\n \n \n def abs(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.__abs__(context=self)\n \n def add(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__add__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def _apply(self,a):\n return str(a._fix(self))\n \n def canonical(self,a):\n ''\n\n\n\n\n\n\n \n if not isinstance(a,Decimal):\n raise TypeError(\"canonical requires a Decimal as an argument.\")\n return a.canonical()\n \n def compare(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.compare(b,context=self)\n \n def compare_signal(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.compare_signal(b,context=self)\n \n def compare_total(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.compare_total(b)\n \n def compare_total_mag(self,a,b):\n ''\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.compare_total_mag(b)\n \n def copy_abs(self,a):\n ''\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.copy_abs()\n \n def copy_decimal(self,a):\n ''\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return Decimal(a)\n \n def copy_negate(self,a):\n ''\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.copy_negate()\n \n def copy_sign(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.copy_sign(b)\n \n def divide(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__truediv__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def divide_int(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__floordiv__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def divmod(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__divmod__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def exp(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.exp(context=self)\n \n def fma(self,a,b,c):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.fma(b,c,context=self)\n \n def is_canonical(self,a):\n ''\n\n\n\n\n\n\n \n if not isinstance(a,Decimal):\n raise TypeError(\"is_canonical requires a Decimal as an argument.\")\n return a.is_canonical()\n \n def is_finite(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_finite()\n \n def is_infinite(self,a):\n ''\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_infinite()\n \n def is_nan(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_nan()\n \n def is_normal(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_normal(context=self)\n \n def is_qnan(self,a):\n ''\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_qnan()\n \n def is_signed(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_signed()\n \n def is_snan(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_snan()\n \n def is_subnormal(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_subnormal(context=self)\n \n def is_zero(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.is_zero()\n \n def ln(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.ln(context=self)\n \n def log10(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.log10(context=self)\n \n def logb(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logb(context=self)\n \n def logical_and(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logical_and(b,context=self)\n \n def logical_invert(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logical_invert(context=self)\n \n def logical_or(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logical_or(b,context=self)\n \n def logical_xor(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.logical_xor(b,context=self)\n \n def max(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.max(b,context=self)\n \n def max_mag(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.max_mag(b,context=self)\n \n def min(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.min(b,context=self)\n \n def min_mag(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.min_mag(b,context=self)\n \n def minus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.__neg__(context=self)\n \n def multiply(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__mul__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def next_minus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.next_minus(context=self)\n \n def next_plus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.next_plus(context=self)\n \n def next_toward(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.next_toward(b,context=self)\n \n def normalize(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.normalize(context=self)\n \n def number_class(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.number_class(context=self)\n \n def plus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.__pos__(context=self)\n \n def power(self,a,b,modulo=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__pow__(b,modulo,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def quantize(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.quantize(b,context=self)\n \n def radix(self):\n ''\n\n\n\n \n return Decimal(10)\n \n def remainder(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__mod__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def remainder_near(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.remainder_near(b,context=self)\n \n def rotate(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.rotate(b,context=self)\n \n def same_quantum(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.same_quantum(b)\n \n def scaleb(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.scaleb(b,context=self)\n \n def shift(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.shift(b,context=self)\n \n def sqrt(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.sqrt(context=self)\n \n def subtract(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n r=a.__sub__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else:\n return r\n \n def to_eng_string(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.to_eng_string(context=self)\n \n def to_sci_string(self,a):\n ''\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.__str__(context=self)\n \n def to_integral_exact(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.to_integral_exact(context=self)\n \n def to_integral_value(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True)\n return a.to_integral_value(context=self)\n \n \n to_integral=to_integral_value\n \nclass _WorkRep(object):\n __slots__=('sign','int','exp')\n \n \n \n \n def __init__(self,value=None):\n if value is None:\n self.sign=None\n self.int=0\n self.exp=None\n elif isinstance(value,Decimal):\n self.sign=value._sign\n self.int=int(value._int)\n self.exp=value._exp\n else:\n \n self.sign=value[0]\n self.int=value[1]\n self.exp=value[2]\n \n def __repr__(self):\n return \"(%r, %r, %r)\"%(self.sign,self.int,self.exp)\n \n \n \ndef _normalize(op1,op2,prec=0):\n ''\n\n\n \n if op1.exp =0:\n return n *10 **e\n else:\n \n str_n=str(abs(n))\n val_n=len(str_n)-len(str_n.rstrip('0'))\n return None if val_n <-e else n //10 **-e\n \ndef _sqrt_nearest(n,a):\n ''\n\n\n\n\n \n if n <=0 or a <=0:\n raise ValueError(\"Both arguments to _sqrt_nearest should be positive.\")\n \n b=0\n while a !=b:\n b,a=a,a --n //a >>1\n return a\n \ndef _rshift_nearest(x,shift):\n ''\n\n\n \n b,q=1 <>shift\n return q+(2 *(x&(b -1))+(q&1)>b)\n \ndef _div_nearest(a,b):\n ''\n\n\n \n q,r=divmod(a,b)\n return q+(2 *r+(q&1)>b)\n \ndef _ilog(x,M,L=8):\n ''\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n y=x -M\n \n R=0\n while(R <=L and abs(y)<=M or\n R >L and abs(y)>>R -L >=M):\n y=_div_nearest((M *y)<<1,\n M+_sqrt_nearest(M *(M+_rshift_nearest(y,R)),M))\n R +=1\n \n \n T=-int(-10 *len(str(M))//(3 *L))\n yshift=_rshift_nearest(y,R)\n w=_div_nearest(M,T)\n for k in range(T -1,0,-1):\n w=_div_nearest(M,k)-_div_nearest(yshift *w,M)\n \n return _div_nearest(w *y,M)\n \ndef _dlog10(c,e,p):\n ''\n\n \n \n \n \n p +=2\n \n \n \n \n \n l=len(str(c))\n f=e+l -(e+l >=1)\n \n if p >0:\n M=10 **p\n k=e+p -f\n if k >=0:\n c *=10 **k\n else:\n c=_div_nearest(c,10 **-k)\n \n log_d=_ilog(c,M)\n log_10=_log10_digits(p)\n log_d=_div_nearest(log_d *M,log_10)\n log_tenpower=f *M\n else:\n log_d=0\n log_tenpower=_div_nearest(f,10 **-p)\n \n return _div_nearest(log_tenpower+log_d,100)\n \ndef _dlog(c,e,p):\n ''\n\n \n \n \n \n p +=2\n \n \n \n \n l=len(str(c))\n f=e+l -(e+l >=1)\n \n \n if p >0:\n k=e+p -f\n if k >=0:\n c *=10 **k\n else:\n c=_div_nearest(c,10 **-k)\n \n \n log_d=_ilog(c,10 **p)\n else:\n \n log_d=0\n \n \n if f:\n extra=len(str(abs(f)))-1\n if p+extra >=0:\n \n \n f_log_ten=_div_nearest(f *_log10_digits(p+extra),10 **extra)\n else:\n f_log_ten=0\n else:\n f_log_ten=0\n \n \n return _div_nearest(f_log_ten+log_d,100)\n \nclass _Log10Memoize(object):\n ''\n\n \n def __init__(self):\n self.digits=\"23025850929940456840179914546843642076011014886\"\n \n def getdigits(self,p):\n ''\n\n\n \n \n \n \n \n if p <0:\n raise ValueError(\"p should be nonnegative\")\n \n if p >=len(self.digits):\n \n \n extra=3\n while True:\n \n M=10 **(p+extra+2)\n digits=str(_div_nearest(_ilog(10 *M,M),100))\n if digits[-extra:]!='0'*extra:\n break\n extra +=3\n \n \n self.digits=digits.rstrip('0')[:-1]\n return int(self.digits[:p+1])\n \n_log10_digits=_Log10Memoize().getdigits\n\ndef _iexp(x,M,L=8):\n ''\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n R=_nbits((x <=0:\n cshift=c *10 **shift\n else:\n cshift=c //10 **-shift\n quot,rem=divmod(cshift,_log10_digits(q))\n \n \n rem=_div_nearest(rem,10 **extra)\n \n \n return _div_nearest(_iexp(rem,10 **p),1000),quot -p+3\n \ndef _dpower(xc,xe,yc,ye,p):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n b=len(str(abs(yc)))+ye\n \n \n lxc=_dlog(xc,xe,p+b+1)\n \n \n shift=ye -b\n if shift >=0:\n pc=lxc *yc *10 **shift\n else:\n pc=_div_nearest(lxc *yc,10 **-shift)\n \n if pc ==0:\n \n \n if((len(str(xc))+xe >=1)==(yc >0)):\n coeff,exp=10 **(p -1)+1,1 -p\n else:\n coeff,exp=10 **p -1,-p\n else:\n coeff,exp=_dexp(pc,-(p+1),p+1)\n coeff=_div_nearest(coeff,10)\n exp +=1\n \n return coeff,exp\n \ndef _log10_lb(c,correction={\n'1':100,'2':70,'3':53,'4':40,'5':31,\n'6':23,'7':16,'8':10,'9':5}):\n ''\n if c <=0:\n raise ValueError(\"The argument to _log10_lb should be nonnegative.\")\n str_c=str(c)\n return 100 *len(str_c)-correction[str_c[0]]\n \n \n \ndef _convert_other(other,raiseit=False,allow_float=False):\n ''\n\n\n\n\n\n \n if isinstance(other,Decimal):\n return other\n if isinstance(other,int):\n return Decimal(other)\n if allow_float and isinstance(other,float):\n return Decimal.from_float(other)\n \n if raiseit:\n raise TypeError(\"Unable to convert %s to Decimal\"%other)\n return NotImplemented\n \ndef _convert_for_comparison(self,other,equality_op=False):\n ''\n\n\n\n\n \n if isinstance(other,Decimal):\n return self,other\n \n \n \n \n \n if isinstance(other,_numbers.Rational):\n if not self._is_special:\n self=_dec_from_triple(self._sign,\n str(int(self._int)*other.denominator),\n self._exp)\n return self,Decimal(other.numerator)\n \n \n \n \n if equality_op and isinstance(other,_numbers.Complex)and other.imag ==0:\n other=other.real\n if isinstance(other,float):\n context=getcontext()\n if equality_op:\n context.flags[FloatOperation]=1\n else:\n context._raise_error(FloatOperation,\n \"strict semantics for mixing floats and Decimals are enabled\")\n return self,Decimal.from_float(other)\n return NotImplemented,NotImplemented\n \n \n \n \n \n \n \nDefaultContext=Context(\nprec=28,rounding=ROUND_HALF_EVEN,\ntraps=[DivisionByZero,Overflow,InvalidOperation],\nflags=[],\nEmax=999999,\nEmin=-999999,\ncapitals=1,\nclamp=0\n)\n\n\n\n\n\n\nBasicContext=Context(\nprec=9,rounding=ROUND_HALF_UP,\ntraps=[DivisionByZero,Overflow,InvalidOperation,Clamped,Underflow],\nflags=[],\n)\n\nExtendedContext=Context(\nprec=9,rounding=ROUND_HALF_EVEN,\ntraps=[],\nflags=[],\n)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\n_parser=re.compile(r\"\"\" # A numeric string consists of:\n# \\s*\n (?P[-+])? # an optional sign, followed by either...\n (\n (?=\\d|\\.\\d) # ...a number (with at least one digit)\n (?P\\d*) # having a (possibly empty) integer part\n (\\.(?P\\d*))? # followed by an optional fractional part\n (E(?P[-+]?\\d+))? # followed by an optional exponent, or...\n |\n Inf(inity)? # ...an infinity, or...\n |\n (?Ps)? # ...an (optionally signaling)\n NaN # NaN\n (?P\\d*) # with (possibly empty) diagnostic info.\n )\n# \\s*\n \\Z\n\"\"\",re.VERBOSE |re.IGNORECASE).match\n\n_all_zeros=re.compile('0*$').match\n_exact_half=re.compile('50*$').match\n\n\n\n\n\n\n\n\n\n\n_parse_format_specifier_regex=re.compile(r\"\"\"\\A\n(?:\n (?P.)?\n (?P[<>=^])\n)?\n(?P[-+ ])?\n(?Pz)?\n(?P\\#)?\n(?P0)?\n(?P(?!0)\\d+)?\n(?P,)?\n(?:\\.(?P0|(?!0)\\d+))?\n(?P[eEfFgGn%])?\n\\Z\n\"\"\",re.VERBOSE |re.DOTALL)\n\ndel re\n\n\n\n\ntry:\n import locale as _locale\nexcept ImportError:\n pass\n \ndef _parse_format_specifier(format_spec,_localeconv=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n m=_parse_format_specifier_regex.match(format_spec)\n if m is None:\n raise ValueError(\"Invalid format specifier: \"+format_spec)\n \n \n format_dict=m.groupdict()\n \n \n \n fill=format_dict['fill']\n align=format_dict['align']\n format_dict['zeropad']=(format_dict['zeropad']is not None)\n if format_dict['zeropad']:\n if fill is not None:\n raise ValueError(\"Fill character conflicts with '0'\"\n \" in format specifier: \"+format_spec)\n if align is not None:\n raise ValueError(\"Alignment conflicts with '0' in \"\n \"format specifier: \"+format_spec)\n format_dict['fill']=fill or ' '\n \n \n \n format_dict['align']=align or '>'\n \n \n if format_dict['sign']is None:\n format_dict['sign']='-'\n \n \n format_dict['minimumwidth']=int(format_dict['minimumwidth']or '0')\n if format_dict['precision']is not None:\n format_dict['precision']=int(format_dict['precision'])\n \n \n \n if format_dict['precision']==0:\n if format_dict['type']is None or format_dict['type']in 'gGn':\n format_dict['precision']=1\n \n \n \n if format_dict['type']=='n':\n \n format_dict['type']='g'\n if _localeconv is None:\n _localeconv=_locale.localeconv()\n if format_dict['thousands_sep']is not None:\n raise ValueError(\"Explicit thousands separator conflicts with \"\n \"'n' type in format specifier: \"+format_spec)\n format_dict['thousands_sep']=_localeconv['thousands_sep']\n format_dict['grouping']=_localeconv['grouping']\n format_dict['decimal_point']=_localeconv['decimal_point']\n else:\n if format_dict['thousands_sep']is None:\n format_dict['thousands_sep']=''\n format_dict['grouping']=[3,0]\n format_dict['decimal_point']='.'\n \n return format_dict\n \ndef _format_align(sign,body,spec):\n ''\n\n\n\n\n \n \n minimumwidth=spec['minimumwidth']\n fill=spec['fill']\n padding=fill *(minimumwidth -len(sign)-len(body))\n \n align=spec['align']\n if align =='<':\n result=sign+body+padding\n elif align =='>':\n result=padding+sign+body\n elif align =='=':\n result=sign+padding+body\n elif align =='^':\n half=len(padding)//2\n result=padding[:half]+sign+body+padding[half:]\n else:\n raise ValueError('Unrecognised alignment field')\n \n return result\n \ndef _group_lengths(grouping):\n ''\n\n\n \n \n \n \n \n \n \n \n \n from itertools import chain,repeat\n if not grouping:\n return[]\n elif grouping[-1]==0 and len(grouping)>=2:\n return chain(grouping[:-1],repeat(grouping[-2]))\n elif grouping[-1]==_locale.CHAR_MAX:\n return grouping[:-1]\n else:\n raise ValueError('unrecognised format for grouping')\n \ndef _insert_thousands_sep(digits,spec,min_width=1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n sep=spec['thousands_sep']\n grouping=spec['grouping']\n \n groups=[]\n for l in _group_lengths(grouping):\n if l <=0:\n raise ValueError(\"group length should be positive\")\n \n l=min(max(len(digits),min_width,1),l)\n groups.append('0'*(l -len(digits))+digits[-l:])\n digits=digits[:-l]\n min_width -=l\n if not digits and min_width <=0:\n break\n min_width -=len(sep)\n else:\n l=max(len(digits),min_width,1)\n groups.append('0'*(l -len(digits))+digits[-l:])\n return sep.join(reversed(groups))\n \ndef _format_sign(is_negative,spec):\n ''\n \n if is_negative:\n return '-'\n elif spec['sign']in ' +':\n return spec['sign']\n else:\n return ''\n \ndef _format_number(is_negative,intpart,fracpart,exp,spec):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n sign=_format_sign(is_negative,spec)\n \n if fracpart or spec['alt']:\n fracpart=spec['decimal_point']+fracpart\n \n if exp !=0 or spec['type']in 'eE':\n echar={'E':'E','e':'e','G':'E','g':'e'}[spec['type']]\n fracpart +=\"{0}{1:+}\".format(echar,exp)\n if spec['type']=='%':\n fracpart +='%'\n \n if spec['zeropad']:\n min_width=spec['minimumwidth']-len(fracpart)-len(sign)\n else:\n min_width=0\n intpart=_insert_thousands_sep(intpart,spec,min_width)\n \n return _format_align(sign,intpart+fracpart,spec)\n \n \n \n \n \n_Infinity=Decimal('Inf')\n_NegativeInfinity=Decimal('-Inf')\n_NaN=Decimal('NaN')\n_Zero=Decimal(0)\n_One=Decimal(1)\n_NegativeOne=Decimal(-1)\n\n\n_SignedInfinity=(_Infinity,_NegativeInfinity)\n\n\n\n_PyHASH_MODULUS=sys.hash_info.modulus\n\n_PyHASH_INF=sys.hash_info.inf\n_PyHASH_NAN=sys.hash_info.nan\n\n\n_PyHASH_10INV=pow(10,_PyHASH_MODULUS -2,_PyHASH_MODULUS)\ndel sys\n", ["collections", "contextvars", "itertools", "locale", "math", "numbers", "re", "sys"]], "ntpath": [".py", "\n''\n\n\n\n\n\n\n\n\ncurdir='.'\npardir='..'\nextsep='.'\nsep='\\\\'\npathsep=';'\naltsep='/'\ndefpath='.;C:\\\\bin'\ndevnull='nul'\n\nimport os\nimport sys\nimport stat\nimport genericpath\nfrom genericpath import *\n\n\n__all__=[\"normcase\",\"isabs\",\"join\",\"splitdrive\",\"splitroot\",\"split\",\"splitext\",\n\"basename\",\"dirname\",\"commonprefix\",\"getsize\",\"getmtime\",\n\"getatime\",\"getctime\",\"islink\",\"exists\",\"lexists\",\"isdir\",\"isfile\",\n\"ismount\",\"expanduser\",\"expandvars\",\"normpath\",\"abspath\",\n\"curdir\",\"pardir\",\"sep\",\"pathsep\",\"defpath\",\"altsep\",\n\"extsep\",\"devnull\",\"realpath\",\"supports_unicode_filenames\",\"relpath\",\n\"samefile\",\"sameopenfile\",\"samestat\",\"commonpath\",\"isjunction\"]\n\ndef _get_bothseps(path):\n if isinstance(path,bytes):\n return b'\\\\/'\n else:\n return '\\\\/'\n \n \n \n \n \ntry:\n from _winapi import(\n LCMapStringEx as _LCMapStringEx,\n LOCALE_NAME_INVARIANT as _LOCALE_NAME_INVARIANT,\n LCMAP_LOWERCASE as _LCMAP_LOWERCASE)\n \n def normcase(s):\n ''\n\n\n \n s=os.fspath(s)\n if not s:\n return s\n if isinstance(s,bytes):\n encoding=sys.getfilesystemencoding()\n s=s.decode(encoding,'surrogateescape').replace('/','\\\\')\n s=_LCMapStringEx(_LOCALE_NAME_INVARIANT,\n _LCMAP_LOWERCASE,s)\n return s.encode(encoding,'surrogateescape')\n else:\n return _LCMapStringEx(_LOCALE_NAME_INVARIANT,\n _LCMAP_LOWERCASE,\n s.replace('/','\\\\'))\nexcept ImportError:\n def normcase(s):\n ''\n\n\n \n s=os.fspath(s)\n if isinstance(s,bytes):\n return os.fsencode(os.fsdecode(s).replace('/','\\\\').lower())\n return s.replace('/','\\\\').lower()\n \n \n \n \n \n \n \n \ndef isabs(s):\n ''\n s=os.fspath(s)\n if isinstance(s,bytes):\n sep=b'\\\\'\n altsep=b'/'\n colon_sep=b':\\\\'\n else:\n sep='\\\\'\n altsep='/'\n colon_sep=':\\\\'\n s=s[:3].replace(altsep,sep)\n \n \n if s.startswith(sep)or s.startswith(colon_sep,1):\n return True\n return False\n \n \n \ndef join(path,*paths):\n path=os.fspath(path)\n if isinstance(path,bytes):\n sep=b'\\\\'\n seps=b'\\\\/'\n colon=b':'\n else:\n sep='\\\\'\n seps='\\\\/'\n colon=':'\n try:\n if not paths:\n path[:0]+sep\n result_drive,result_root,result_path=splitroot(path)\n for p in map(os.fspath,paths):\n p_drive,p_root,p_path=splitroot(p)\n if p_root:\n \n if p_drive or not result_drive:\n result_drive=p_drive\n result_root=p_root\n result_path=p_path\n continue\n elif p_drive and p_drive !=result_drive:\n if p_drive.lower()!=result_drive.lower():\n \n result_drive=p_drive\n result_root=p_root\n result_path=p_path\n continue\n \n result_drive=p_drive\n \n if result_path and result_path[-1]not in seps:\n result_path=result_path+sep\n result_path=result_path+p_path\n \n if(result_path and not result_root and\n result_drive and result_drive[-1:]not in colon+seps):\n return result_drive+sep+result_path\n return result_drive+result_root+result_path\n except(TypeError,AttributeError,BytesWarning):\n genericpath._check_arg_types('join',path,*paths)\n raise\n \n \n \n \n \ndef splitdrive(p):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n drive,root,tail=splitroot(p)\n return drive,root+tail\n \n \ndef splitroot(p):\n ''\n\n\n\n\n\n\n\n\n \n p=os.fspath(p)\n if isinstance(p,bytes):\n sep=b'\\\\'\n altsep=b'/'\n colon=b':'\n unc_prefix=b'\\\\\\\\?\\\\UNC\\\\'\n empty=b''\n else:\n sep='\\\\'\n altsep='/'\n colon=':'\n unc_prefix='\\\\\\\\?\\\\UNC\\\\'\n empty=''\n normp=p.replace(altsep,sep)\n if normp[:1]==sep:\n if normp[1:2]==sep:\n \n \n start=8 if normp[:8].upper()==unc_prefix else 2\n index=normp.find(sep,start)\n if index ==-1:\n return p,empty,empty\n index2=normp.find(sep,index+1)\n if index2 ==-1:\n return p,empty,empty\n return p[:index2],p[index2:index2+1],p[index2+1:]\n else:\n \n return empty,p[:1],p[1:]\n elif normp[1:2]==colon:\n if normp[2:3]==sep:\n \n return p[:2],p[2:3],p[3:]\n else:\n \n return p[:2],empty,p[2:]\n else:\n \n return empty,empty,p\n \n \n \n \n \n \n \ndef split(p):\n ''\n\n\n \n p=os.fspath(p)\n seps=_get_bothseps(p)\n d,r,p=splitroot(p)\n \n i=len(p)\n while i and p[i -1]not in seps:\n i -=1\n head,tail=p[:i],p[i:]\n return d+r+head.rstrip(seps),tail\n \n \n \n \n \n \n \ndef splitext(p):\n p=os.fspath(p)\n if isinstance(p,bytes):\n return genericpath._splitext(p,b'\\\\',b'/',b'.')\n else:\n return genericpath._splitext(p,'\\\\','/','.')\nsplitext.__doc__=genericpath._splitext.__doc__\n\n\n\n\ndef basename(p):\n ''\n return split(p)[1]\n \n \n \n \ndef dirname(p):\n ''\n return split(p)[0]\n \n \n \n \nif hasattr(os.stat_result,'st_reparse_tag'):\n def isjunction(path):\n ''\n try:\n st=os.lstat(path)\n except(OSError,ValueError,AttributeError):\n return False\n return bool(st.st_reparse_tag ==stat.IO_REPARSE_TAG_MOUNT_POINT)\nelse:\n def isjunction(path):\n ''\n os.fspath(path)\n return False\n \n \n \n \ndef lexists(path):\n ''\n try:\n st=os.lstat(path)\n except(OSError,ValueError):\n return False\n return True\n \n \n \n \n \n \n \n \n \n \n \ntry:\n from nt import _getvolumepathname\nexcept ImportError:\n _getvolumepathname=None\ndef ismount(path):\n ''\n \n path=os.fspath(path)\n seps=_get_bothseps(path)\n path=abspath(path)\n drive,root,rest=splitroot(path)\n if drive and drive[0]in seps:\n return not rest\n if root and not rest:\n return True\n \n if _getvolumepathname:\n x=path.rstrip(seps)\n y=_getvolumepathname(path).rstrip(seps)\n return x.casefold()==y.casefold()\n else:\n return False\n \n \n \n \n \n \n \n \n \n \n \ndef expanduser(path):\n ''\n\n \n path=os.fspath(path)\n if isinstance(path,bytes):\n tilde=b'~'\n else:\n tilde='~'\n if not path.startswith(tilde):\n return path\n i,n=1,len(path)\n while i 0 and comps[i -1]!=pardir:\n del comps[i -1:i+1]\n i -=1\n elif i ==0 and root:\n del comps[i]\n else:\n i +=1\n else:\n i +=1\n \n if not prefix and not comps:\n comps.append(curdir)\n return prefix+sep.join(comps)\n \nelse:\n def normpath(path):\n ''\n path=os.fspath(path)\n if isinstance(path,bytes):\n return os.fsencode(_path_normpath(os.fsdecode(path)))or b\".\"\n return _path_normpath(path)or \".\"\n \n \ndef _abspath_fallback(path):\n ''\n\n\n\n \n \n path=os.fspath(path)\n if not isabs(path):\n if isinstance(path,bytes):\n cwd=os.getcwdb()\n else:\n cwd=os.getcwd()\n path=join(cwd,path)\n return normpath(path)\n \n \ntry:\n from nt import _getfullpathname\n \nexcept ImportError:\n abspath=_abspath_fallback\n \nelse:\n def abspath(path):\n ''\n try:\n return _getfullpathname(normpath(path))\n except(OSError,ValueError):\n return _abspath_fallback(path)\n \ntry:\n from nt import _getfinalpathname,readlink as _nt_readlink\nexcept ImportError:\n\n realpath=abspath\nelse:\n def _readlink_deep(path):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n allowed_winerror=1,2,3,5,21,32,50,67,87,4390,4392,4393\n \n seen=set()\n while normcase(path)not in seen:\n seen.add(normcase(path))\n try:\n old_path=path\n path=_nt_readlink(path)\n \n \n if not isabs(path):\n \n \n \n if not islink(old_path):\n path=old_path\n break\n path=normpath(join(dirname(old_path),path))\n except OSError as ex:\n if ex.winerror in allowed_winerror:\n break\n raise\n except ValueError:\n \n break\n return path\n \n def _getfinalpathname_nonstrict(path):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n allowed_winerror=1,2,3,5,21,32,50,53,65,67,87,123,161,1920,1921\n \n \n \n tail=path[:0]\n while path:\n try:\n path=_getfinalpathname(path)\n return join(path,tail)if tail else path\n except OSError as ex:\n if ex.winerror not in allowed_winerror:\n raise\n try:\n \n \n \n new_path=_readlink_deep(path)\n if new_path !=path:\n return join(new_path,tail)if tail else new_path\n except OSError:\n \n pass\n path,name=split(path)\n \n \n \n if path and not name:\n return path+tail\n tail=join(name,tail)if tail else name\n return tail\n \n def realpath(path,*,strict=False):\n path=normpath(path)\n if isinstance(path,bytes):\n prefix=b'\\\\\\\\?\\\\'\n unc_prefix=b'\\\\\\\\?\\\\UNC\\\\'\n new_unc_prefix=b'\\\\\\\\'\n cwd=os.getcwdb()\n \n if normcase(path)==normcase(os.fsencode(devnull)):\n return b'\\\\\\\\.\\\\NUL'\n else:\n prefix='\\\\\\\\?\\\\'\n unc_prefix='\\\\\\\\?\\\\UNC\\\\'\n new_unc_prefix='\\\\\\\\'\n cwd=os.getcwd()\n \n if normcase(path)==normcase(devnull):\n return '\\\\\\\\.\\\\NUL'\n had_prefix=path.startswith(prefix)\n if not had_prefix and not isabs(path):\n path=join(cwd,path)\n try:\n path=_getfinalpathname(path)\n initial_winerror=0\n except OSError as ex:\n if strict:\n raise\n initial_winerror=ex.winerror\n path=_getfinalpathname_nonstrict(path)\n \n \n \n if not had_prefix and path.startswith(prefix):\n \n \n if path.startswith(unc_prefix):\n spath=new_unc_prefix+path[len(unc_prefix):]\n else:\n spath=path[len(prefix):]\n \n try:\n if _getfinalpathname(spath)==path:\n path=spath\n except OSError as ex:\n \n \n if ex.winerror ==initial_winerror:\n path=spath\n return path\n \n \n \nsupports_unicode_filenames=True\n\ndef relpath(path,start=None):\n ''\n path=os.fspath(path)\n if isinstance(path,bytes):\n sep=b'\\\\'\n curdir=b'.'\n pardir=b'..'\n else:\n sep='\\\\'\n curdir='.'\n pardir='..'\n \n if start is None:\n start=curdir\n \n if not path:\n raise ValueError(\"no path specified\")\n \n start=os.fspath(start)\n try:\n start_abs=abspath(normpath(start))\n path_abs=abspath(normpath(path))\n start_drive,_,start_rest=splitroot(start_abs)\n path_drive,_,path_rest=splitroot(path_abs)\n if normcase(start_drive)!=normcase(path_drive):\n raise ValueError(\"path is on mount %r, start on mount %r\"%(\n path_drive,start_drive))\n \n start_list=[x for x in start_rest.split(sep)if x]\n path_list=[x for x in path_rest.split(sep)if x]\n \n i=0\n for e1,e2 in zip(start_list,path_list):\n if normcase(e1)!=normcase(e2):\n break\n i +=1\n \n rel_list=[pardir]*(len(start_list)-i)+path_list[i:]\n if not rel_list:\n return curdir\n return join(*rel_list)\n except(TypeError,ValueError,AttributeError,BytesWarning,DeprecationWarning):\n genericpath._check_arg_types('relpath',path,start)\n raise\n \n \n \n \n \n \n \n \n \n \n \n \ndef commonpath(paths):\n ''\n \n if not paths:\n raise ValueError('commonpath() arg is an empty sequence')\n \n paths=tuple(map(os.fspath,paths))\n if isinstance(paths[0],bytes):\n sep=b'\\\\'\n altsep=b'/'\n curdir=b'.'\n else:\n sep='\\\\'\n altsep='/'\n curdir='.'\n \n try:\n drivesplits=[splitroot(p.replace(altsep,sep).lower())for p in paths]\n split_paths=[p.split(sep)for d,r,p in drivesplits]\n \n if len({r for d,r,p in drivesplits})!=1:\n raise ValueError(\"Can't mix absolute and relative paths\")\n \n \n \n \n if len({d for d,r,p in drivesplits})!=1:\n raise ValueError(\"Paths don't have the same drive\")\n \n drive,root,path=splitroot(paths[0].replace(altsep,sep))\n common=path.split(sep)\n common=[c for c in common if c and c !=curdir]\n \n split_paths=[[c for c in s if c and c !=curdir]for s in split_paths]\n s1=min(split_paths)\n s2=max(split_paths)\n for i,c in enumerate(s1):\n if c !=s2[i]:\n common=common[:i]\n break\n else:\n common=common[:len(s1)]\n \n return drive+root+sep.join(common)\n except(TypeError,AttributeError):\n genericpath._check_arg_types('commonpath',*paths)\n raise\n \n \ntry:\n\n\n\n from nt import _path_isdir as isdir\n from nt import _path_isfile as isfile\n from nt import _path_islink as islink\n from nt import _path_exists as exists\nexcept ImportError:\n\n pass\n \n \ntry:\n from nt import _path_isdevdrive\nexcept ImportError:\n def isdevdrive(path):\n ''\n \n return False\nelse:\n def isdevdrive(path):\n ''\n try:\n return _path_isdevdrive(abspath(path))\n except OSError:\n return False\n", ["_winapi", "genericpath", "nt", "os", "stat", "string", "sys"]], "tokenize": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__author__='Ka-Ping Yee '\n__credits__=('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '\n'Skip Montanaro, Raymond Hettinger, Trent Nelson, '\n'Michael Foord')\nfrom builtins import open as _builtin_open\nfrom codecs import lookup,BOM_UTF8\nimport collections\nimport functools\nfrom io import TextIOWrapper\nimport itertools as _itertools\nimport re\nimport sys\nfrom token import *\nfrom token import EXACT_TOKEN_TYPES\nimport _tokenize\n\ncookie_re=re.compile(r'^[ \\t\\f]*#.*?coding[:=][ \\t]*([-\\w.]+)',re.ASCII)\nblank_re=re.compile(br'^[ \\t\\f]*(?:[#\\r\\n]|$)',re.ASCII)\n\nimport token\n__all__=token.__all__+[\"tokenize\",\"generate_tokens\",\"detect_encoding\",\n\"untokenize\",\"TokenInfo\"]\ndel token\n\nclass TokenInfo(collections.namedtuple('TokenInfo','type string start end line')):\n def __repr__(self):\n annotated_type='%d (%s)'%(self.type,tok_name[self.type])\n return('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)'%\n self._replace(type=annotated_type))\n \n @property\n def exact_type(self):\n if self.type ==OP and self.string in EXACT_TOKEN_TYPES:\n return EXACT_TOKEN_TYPES[self.string]\n else:\n return self.type\n \ndef group(*choices):return '('+'|'.join(choices)+')'\ndef any(*choices):return group(*choices)+'*'\ndef maybe(*choices):return group(*choices)+'?'\n\n\n\nWhitespace=r'[ \\f\\t]*'\nComment=r'#[^\\r\\n]*'\nIgnore=Whitespace+any(r'\\\\\\r?\\n'+Whitespace)+maybe(Comment)\nName=r'\\w+'\n\nHexnumber=r'0[xX](?:_?[0-9a-fA-F])+'\nBinnumber=r'0[bB](?:_?[01])+'\nOctnumber=r'0[oO](?:_?[0-7])+'\nDecnumber=r'(?:0(?:_?0)*|[1-9](?:_?[0-9])*)'\nIntnumber=group(Hexnumber,Binnumber,Octnumber,Decnumber)\nExponent=r'[eE][-+]?[0-9](?:_?[0-9])*'\nPointfloat=group(r'[0-9](?:_?[0-9])*\\.(?:[0-9](?:_?[0-9])*)?',\nr'\\.[0-9](?:_?[0-9])*')+maybe(Exponent)\nExpfloat=r'[0-9](?:_?[0-9])*'+Exponent\nFloatnumber=group(Pointfloat,Expfloat)\nImagnumber=group(r'[0-9](?:_?[0-9])*[jJ]',Floatnumber+r'[jJ]')\nNumber=group(Imagnumber,Floatnumber,Intnumber)\n\n\ndef _all_string_prefixes():\n\n\n\n _valid_string_prefixes=['b','r','u','f','br','fr']\n \n result={''}\n for prefix in _valid_string_prefixes:\n for t in _itertools.permutations(prefix):\n \n \n for u in _itertools.product(*[(c,c.upper())for c in t]):\n result.add(''.join(u))\n return result\n \n@functools.lru_cache\ndef _compile(expr):\n return re.compile(expr,re.UNICODE)\n \n \n \nStringPrefix=group(*_all_string_prefixes())\n\n\nSingle=r\"[^'\\\\]*(?:\\\\.[^'\\\\]*)*'\"\n\nDouble=r'[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\"'\n\nSingle3=r\"[^'\\\\]*(?:(?:\\\\.|'(?!''))[^'\\\\]*)*'''\"\n\nDouble3=r'[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\"'\nTriple=group(StringPrefix+\"'''\",StringPrefix+'\"\"\"')\n\nString=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*'\",\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*\"')\n\n\n\n\nSpecial=group(*map(re.escape,sorted(EXACT_TOKEN_TYPES,reverse=True)))\nFunny=group(r'\\r?\\n',Special)\n\nPlainToken=group(Number,Funny,String,Name)\nToken=Ignore+PlainToken\n\n\nContStr=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*\"+\ngroup(\"'\",r'\\\\\\r?\\n'),\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*'+\ngroup('\"',r'\\\\\\r?\\n'))\nPseudoExtras=group(r'\\\\\\r?\\n|\\Z',Comment,Triple)\nPseudoToken=Whitespace+group(PseudoExtras,Number,Funny,ContStr,Name)\n\n\n\n\nendpats={}\nfor _prefix in _all_string_prefixes():\n endpats[_prefix+\"'\"]=Single\n endpats[_prefix+'\"']=Double\n endpats[_prefix+\"'''\"]=Single3\n endpats[_prefix+'\"\"\"']=Double3\ndel _prefix\n\n\n\nsingle_quoted=set()\ntriple_quoted=set()\nfor t in _all_string_prefixes():\n for u in(t+'\"',t+\"'\"):\n single_quoted.add(u)\n for u in(t+'\"\"\"',t+\"'''\"):\n triple_quoted.add(u)\ndel t,u\n\ntabsize=8\n\nclass TokenError(Exception):pass\n\n\nclass StopTokenizing(Exception):pass\n\nclass Untokenizer:\n\n def __init__(self):\n self.tokens=[]\n self.prev_row=1\n self.prev_col=0\n self.encoding=None\n \n def add_whitespace(self,start):\n row,col=start\n if row =len(indent):\n self.tokens.append(indent)\n self.prev_col=len(indent)\n startline=False\n elif tok_type ==FSTRING_MIDDLE:\n if '{'in token or '}'in token:\n end_line,end_col=end\n end=(end_line,end_col+token.count('{')+token.count('}'))\n token=re.sub('{','{{',token)\n token=re.sub('}','}}',token)\n \n \n self.add_whitespace(start)\n self.tokens.append(token)\n self.prev_row,self.prev_col=end\n if tok_type in(NEWLINE,NL):\n self.prev_row +=1\n self.prev_col=0\n return \"\".join(self.tokens)\n \n def compat(self,token,iterable):\n indents=[]\n toks_append=self.tokens.append\n startline=token[0]in(NEWLINE,NL)\n prevstring=False\n \n for tok in _itertools.chain([token],iterable):\n toknum,tokval=tok[:2]\n if toknum ==ENCODING:\n self.encoding=tokval\n continue\n \n if toknum in(NAME,NUMBER):\n tokval +=' '\n \n \n if toknum ==STRING:\n if prevstring:\n tokval=' '+tokval\n prevstring=True\n else:\n prevstring=False\n \n if toknum ==INDENT:\n indents.append(tokval)\n continue\n elif toknum ==DEDENT:\n indents.pop()\n continue\n elif toknum in(NEWLINE,NL):\n startline=True\n elif startline and indents:\n toks_append(indents[-1])\n startline=False\n elif toknum ==FSTRING_MIDDLE:\n if '{'in tokval or '}'in tokval:\n tokval=re.sub('{','{{',tokval)\n tokval=re.sub('}','}}',tokval)\n \n toks_append(tokval)\n \n \ndef untokenize(iterable):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ut=Untokenizer()\n out=ut.untokenize(iterable)\n if ut.encoding is not None:\n out=out.encode(ut.encoding)\n return out\n \n \ndef _get_normal_name(orig_enc):\n ''\n \n enc=orig_enc[:12].lower().replace(\"_\",\"-\")\n if enc ==\"utf-8\"or enc.startswith(\"utf-8-\"):\n return \"utf-8\"\n if enc in(\"latin-1\",\"iso-8859-1\",\"iso-latin-1\")or\\\n enc.startswith((\"latin-1-\",\"iso-8859-1-\",\"iso-latin-1-\")):\n return \"iso-8859-1\"\n return orig_enc\n \ndef detect_encoding(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n filename=readline.__self__.name\n except AttributeError:\n filename=None\n bom_found=False\n encoding=None\n default='utf-8'\n def read_or_stop():\n try:\n return readline()\n except StopIteration:\n return b''\n \n def find_cookie(line):\n try:\n \n \n \n line_string=line.decode('utf-8')\n except UnicodeDecodeError:\n msg=\"invalid or missing encoding declaration\"\n if filename is not None:\n msg='{} for {!r}'.format(msg,filename)\n raise SyntaxError(msg)\n \n match=cookie_re.match(line_string)\n if not match:\n return None\n encoding=_get_normal_name(match.group(1))\n try:\n codec=lookup(encoding)\n except LookupError:\n \n if filename is None:\n msg=\"unknown encoding: \"+encoding\n else:\n msg=\"unknown encoding for {!r}: {}\".format(filename,\n encoding)\n raise SyntaxError(msg)\n \n if bom_found:\n if encoding !='utf-8':\n \n if filename is None:\n msg='encoding problem: utf-8'\n else:\n msg='encoding problem for {!r}: utf-8'.format(filename)\n raise SyntaxError(msg)\n encoding +='-sig'\n return encoding\n \n first=read_or_stop()\n if first.startswith(BOM_UTF8):\n bom_found=True\n first=first[3:]\n default='utf-8-sig'\n if not first:\n return default,[]\n \n encoding=find_cookie(first)\n if encoding:\n return encoding,[first]\n if not blank_re.match(first):\n return default,[first]\n \n second=read_or_stop()\n if not second:\n return default,[first]\n \n encoding=find_cookie(second)\n if encoding:\n return encoding,[first,second]\n \n return default,[first,second]\n \n \ndef open(filename):\n ''\n\n \n buffer=_builtin_open(filename,'rb')\n try:\n encoding,lines=detect_encoding(buffer.readline)\n buffer.seek(0)\n text=TextIOWrapper(buffer,encoding,line_buffering=True)\n text.mode='r'\n return text\n except:\n buffer.close()\n raise\n \ndef tokenize(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n encoding,consumed=detect_encoding(readline)\n rl_gen=_itertools.chain(consumed,iter(readline,b\"\"))\n if encoding is not None:\n if encoding ==\"utf-8-sig\":\n \n encoding=\"utf-8\"\n yield TokenInfo(ENCODING,encoding,(0,0),(0,0),'')\n yield from _generate_tokens_from_c_tokenizer(rl_gen.__next__,encoding,extra_tokens=True)\n \ndef generate_tokens(readline):\n ''\n\n\n\n \n return _generate_tokens_from_c_tokenizer(readline,extra_tokens=True)\n \ndef main():\n import argparse\n \n \n def perror(message):\n sys.stderr.write(message)\n sys.stderr.write('\\n')\n \n def error(message,filename=None,location=None):\n if location:\n args=(filename,)+location+(message,)\n perror(\"%s:%d:%d: error: %s\"%args)\n elif filename:\n perror(\"%s: error: %s\"%(filename,message))\n else:\n perror(\"error: %s\"%message)\n sys.exit(1)\n \n \n parser=argparse.ArgumentParser(prog='python -m tokenize')\n parser.add_argument(dest='filename',nargs='?',\n metavar='filename.py',\n help='the file to tokenize; defaults to stdin')\n parser.add_argument('-e','--exact',dest='exact',action='store_true',\n help='display token names using the exact type')\n args=parser.parse_args()\n \n try:\n \n if args.filename:\n filename=args.filename\n with _builtin_open(filename,'rb')as f:\n tokens=list(tokenize(f.readline))\n else:\n filename=\"\"\n tokens=_generate_tokens_from_c_tokenizer(\n sys.stdin.readline,extra_tokens=True)\n \n \n \n for token in tokens:\n token_type=token.type\n if args.exact:\n token_type=token.exact_type\n token_range=\"%d,%d-%d,%d:\"%(token.start+token.end)\n print(\"%-20s%-15s%-15r\"%\n (token_range,tok_name[token_type],token.string))\n except IndentationError as err:\n line,column=err.args[1][1:3]\n error(err.args[0],filename,(line,column))\n except TokenError as err:\n line,column=err.args[1]\n error(err.args[0],filename,(line,column))\n except SyntaxError as err:\n error(err,filename)\n except OSError as err:\n error(err)\n except KeyboardInterrupt:\n print(\"interrupted\\n\")\n except Exception as err:\n perror(\"unexpected error: %s\"%err)\n raise\n \ndef _transform_msg(msg):\n ''\n\n\n\n \n if \"unterminated triple-quoted string literal\"in msg:\n return \"EOF in multi-line string\"\n return msg\n \ndef _generate_tokens_from_c_tokenizer(source,encoding=None,extra_tokens=False):\n ''\n if encoding is None:\n it=_tokenize.TokenizerIter(source,extra_tokens=extra_tokens)\n else:\n it=_tokenize.TokenizerIter(source,encoding=encoding,extra_tokens=extra_tokens)\n try:\n for info in it:\n yield TokenInfo._make(info)\n except SyntaxError as e:\n if type(e)!=SyntaxError:\n raise e from None\n msg=_transform_msg(e.msg)\n raise TokenError(msg,(e.lineno,e.offset))from None\n \n \nif __name__ ==\"__main__\":\n main()\n", ["_tokenize", "argparse", "builtins", "codecs", "collections", "functools", "io", "itertools", "re", "sys", "token"]], "uuid": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport os\nimport sys\n\nfrom enum import Enum,_simple_enum\n\n\n__author__='Ka-Ping Yee '\n\n\nif sys.platform in('win32','darwin','emscripten','wasi'):\n _AIX=_LINUX=False\nelse:\n import platform\n _platform_system=platform.system()\n _AIX=_platform_system =='AIX'\n _LINUX=_platform_system =='Linux'\n \n_MAC_DELIM=b':'\n_MAC_OMITS_LEADING_ZEROES=False\nif _AIX:\n _MAC_DELIM=b'.'\n _MAC_OMITS_LEADING_ZEROES=True\n \nRESERVED_NCS,RFC_4122,RESERVED_MICROSOFT,RESERVED_FUTURE=[\n'reserved for NCS compatibility','specified in RFC 4122',\n'reserved for Microsoft compatibility','reserved for future definition']\n\nint_=int\nbytes_=bytes\n\n\n@_simple_enum(Enum)\nclass SafeUUID:\n safe=0\n unsafe=-1\n unknown=None\n \n \nclass UUID:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('int','is_safe','__weakref__')\n \n def __init__(self,hex=None,bytes=None,bytes_le=None,fields=None,\n int=None,version=None,\n *,is_safe=SafeUUID.unknown):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if[hex,bytes,bytes_le,fields,int].count(None)!=4:\n raise TypeError('one of the hex, bytes, bytes_le, fields, '\n 'or int arguments must be given')\n if hex is not None:\n hex=hex.replace('urn:','').replace('uuid:','')\n hex=hex.strip('{}').replace('-','')\n if len(hex)!=32:\n raise ValueError('badly formed hexadecimal UUID string')\n int=int_(hex,16)\n if bytes_le is not None:\n if len(bytes_le)!=16:\n raise ValueError('bytes_le is not a 16-char string')\n bytes=(bytes_le[4 -1::-1]+bytes_le[6 -1:4 -1:-1]+\n bytes_le[8 -1:6 -1:-1]+bytes_le[8:])\n if bytes is not None:\n if len(bytes)!=16:\n raise ValueError('bytes is not a 16-char string')\n assert isinstance(bytes,bytes_),repr(bytes)\n int=int_.from_bytes(bytes)\n if fields is not None:\n if len(fields)!=6:\n raise ValueError('fields is not a 6-tuple')\n (time_low,time_mid,time_hi_version,\n clock_seq_hi_variant,clock_seq_low,node)=fields\n if not 0 <=time_low <1 <<32:\n raise ValueError('field 1 out of range (need a 32-bit value)')\n if not 0 <=time_mid <1 <<16:\n raise ValueError('field 2 out of range (need a 16-bit value)')\n if not 0 <=time_hi_version <1 <<16:\n raise ValueError('field 3 out of range (need a 16-bit value)')\n if not 0 <=clock_seq_hi_variant <1 <<8:\n raise ValueError('field 4 out of range (need an 8-bit value)')\n if not 0 <=clock_seq_low <1 <<8:\n raise ValueError('field 5 out of range (need an 8-bit value)')\n if not 0 <=node <1 <<48:\n raise ValueError('field 6 out of range (need a 48-bit value)')\n clock_seq=(clock_seq_hi_variant <<8)|clock_seq_low\n int=((time_low <<96)|(time_mid <<80)|\n (time_hi_version <<64)|(clock_seq <<48)|node)\n if int is not None:\n if not 0 <=int <1 <<128:\n raise ValueError('int is out of range (need a 128-bit value)')\n if version is not None:\n if not 1 <=version <=5:\n raise ValueError('illegal version number')\n \n int &=~(0xc000 <<48)\n int |=0x8000 <<48\n \n int &=~(0xf000 <<64)\n int |=version <<76\n object.__setattr__(self,'int',int)\n object.__setattr__(self,'is_safe',is_safe)\n \n def __getstate__(self):\n d={'int':self.int}\n if self.is_safe !=SafeUUID.unknown:\n \n \n d['is_safe']=self.is_safe.value\n return d\n \n def __setstate__(self,state):\n object.__setattr__(self,'int',state['int'])\n \n object.__setattr__(self,'is_safe',\n SafeUUID(state['is_safe'])\n if 'is_safe'in state else SafeUUID.unknown)\n \n def __eq__(self,other):\n if isinstance(other,UUID):\n return self.int ==other.int\n return NotImplemented\n \n \n \n \n def __lt__(self,other):\n if isinstance(other,UUID):\n return self.int other.int\n return NotImplemented\n \n def __le__(self,other):\n if isinstance(other,UUID):\n return self.int <=other.int\n return NotImplemented\n \n def __ge__(self,other):\n if isinstance(other,UUID):\n return self.int >=other.int\n return NotImplemented\n \n def __hash__(self):\n return hash(self.int)\n \n def __int__(self):\n return self.int\n \n def __repr__(self):\n return '%s(%r)'%(self.__class__.__name__,str(self))\n \n def __setattr__(self,name,value):\n raise TypeError('UUID objects are immutable')\n \n def __str__(self):\n hex='%032x'%self.int\n return '%s-%s-%s-%s-%s'%(\n hex[:8],hex[8:12],hex[12:16],hex[16:20],hex[20:])\n \n @property\n def bytes(self):\n return self.int.to_bytes(16)\n \n @property\n def bytes_le(self):\n bytes=self.bytes\n return(bytes[4 -1::-1]+bytes[6 -1:4 -1:-1]+bytes[8 -1:6 -1:-1]+\n bytes[8:])\n \n @property\n def fields(self):\n return(self.time_low,self.time_mid,self.time_hi_version,\n self.clock_seq_hi_variant,self.clock_seq_low,self.node)\n \n @property\n def time_low(self):\n return self.int >>96\n \n @property\n def time_mid(self):\n return(self.int >>80)&0xffff\n \n @property\n def time_hi_version(self):\n return(self.int >>64)&0xffff\n \n @property\n def clock_seq_hi_variant(self):\n return(self.int >>56)&0xff\n \n @property\n def clock_seq_low(self):\n return(self.int >>48)&0xff\n \n @property\n def time(self):\n return(((self.time_hi_version&0x0fff)<<48)|\n (self.time_mid <<32)|self.time_low)\n \n @property\n def clock_seq(self):\n return(((self.clock_seq_hi_variant&0x3f)<<8)|\n self.clock_seq_low)\n \n @property\n def node(self):\n return self.int&0xffffffffffff\n \n @property\n def hex(self):\n return '%032x'%self.int\n \n @property\n def urn(self):\n return 'urn:uuid:'+str(self)\n \n @property\n def variant(self):\n if not self.int&(0x8000 <<48):\n return RESERVED_NCS\n elif not self.int&(0x4000 <<48):\n return RFC_4122\n elif not self.int&(0x2000 <<48):\n return RESERVED_MICROSOFT\n else:\n return RESERVED_FUTURE\n \n @property\n def version(self):\n \n if self.variant ==RFC_4122:\n return int((self.int >>76)&0xf)\n \n \ndef _get_command_stdout(command,*args):\n import io,os,shutil,subprocess\n \n try:\n path_dirs=os.environ.get('PATH',os.defpath).split(os.pathsep)\n path_dirs.extend(['/sbin','/usr/sbin'])\n executable=shutil.which(command,path=os.pathsep.join(path_dirs))\n if executable is None:\n return None\n \n \n \n env=dict(os.environ)\n env['LC_ALL']='C'\n \n if args !=('',):\n command=(executable,*args)\n else:\n command=(executable,)\n proc=subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.DEVNULL,\n env=env)\n if not proc:\n return None\n stdout,stderr=proc.communicate()\n return io.BytesIO(stdout)\n except(OSError,subprocess.SubprocessError):\n return None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _is_universal(mac):\n return not(mac&(1 <<41))\n \n \ndef _find_mac_near_keyword(command,args,keywords,get_word_index):\n ''\n\n\n\n\n\n\n \n stdout=_get_command_stdout(command,args)\n if stdout is None:\n return None\n \n first_local_mac=None\n for line in stdout:\n words=line.lower().rstrip().split()\n for i in range(len(words)):\n if words[i]in keywords:\n try:\n word=words[get_word_index(i)]\n mac=int(word.replace(_MAC_DELIM,b''),16)\n except(ValueError,IndexError):\n \n \n \n \n \n pass\n else:\n if _is_universal(mac):\n return mac\n first_local_mac=first_local_mac or mac\n return first_local_mac or None\n \n \ndef _parse_mac(word):\n\n\n\n\n\n\n parts=word.split(_MAC_DELIM)\n if len(parts)!=6:\n return\n if _MAC_OMITS_LEADING_ZEROES:\n \n \n \n \n if not all(1 <=len(part)<=2 for part in parts):\n return\n hexstr=b''.join(part.rjust(2,b'0')for part in parts)\n else:\n if not all(len(part)==2 for part in parts):\n return\n hexstr=b''.join(parts)\n try:\n return int(hexstr,16)\n except ValueError:\n return\n \n \ndef _find_mac_under_heading(command,args,heading):\n ''\n\n\n\n\n \n stdout=_get_command_stdout(command,args)\n if stdout is None:\n return None\n \n keywords=stdout.readline().rstrip().split()\n try:\n column_index=keywords.index(heading)\n except ValueError:\n return None\n \n first_local_mac=None\n for line in stdout:\n words=line.rstrip().split()\n try:\n word=words[column_index]\n except IndexError:\n continue\n \n mac=_parse_mac(word)\n if mac is None:\n continue\n if _is_universal(mac):\n return mac\n if first_local_mac is None:\n first_local_mac=mac\n \n return first_local_mac\n \n \n \n \ndef _ifconfig_getnode():\n ''\n \n keywords=(b'hwaddr',b'ether',b'address:',b'lladdr')\n for args in('','-a','-av'):\n mac=_find_mac_near_keyword('ifconfig',args,keywords,lambda i:i+1)\n if mac:\n return mac\n return None\n \ndef _ip_getnode():\n ''\n \n mac=_find_mac_near_keyword('ip','link',[b'link/ether'],lambda i:i+1)\n if mac:\n return mac\n return None\n \ndef _arp_getnode():\n ''\n import os,socket\n if not hasattr(socket,\"gethostbyname\"):\n return None\n try:\n ip_addr=socket.gethostbyname(socket.gethostname())\n except OSError:\n return None\n \n \n mac=_find_mac_near_keyword('arp','-an',[os.fsencode(ip_addr)],lambda i:-1)\n if mac:\n return mac\n \n \n mac=_find_mac_near_keyword('arp','-an',[os.fsencode(ip_addr)],lambda i:i+1)\n if mac:\n return mac\n \n \n mac=_find_mac_near_keyword('arp','-an',[os.fsencode('(%s)'%ip_addr)],\n lambda i:i+2)\n \n if mac:\n return mac\n return None\n \ndef _lanscan_getnode():\n ''\n \n return _find_mac_near_keyword('lanscan','-ai',[b'lan0'],lambda i:0)\n \ndef _netstat_getnode():\n ''\n \n return _find_mac_under_heading('netstat','-ian',b'Address')\n \ndef _ipconfig_getnode():\n ''\n \n return _windll_getnode()\n \ndef _netbios_getnode():\n ''\n \n return _windll_getnode()\n \n \n \ntry:\n import _uuid\n _generate_time_safe=getattr(_uuid,\"generate_time_safe\",None)\n _UuidCreate=getattr(_uuid,\"UuidCreate\",None)\n _has_uuid_generate_time_safe=_uuid.has_uuid_generate_time_safe\nexcept ImportError:\n _uuid=None\n _generate_time_safe=None\n _UuidCreate=None\n _has_uuid_generate_time_safe=None\n \n \ndef _load_system_functions():\n ''\n \n \ndef _unix_getnode():\n ''\n if _generate_time_safe:\n uuid_time,_=_generate_time_safe()\n return UUID(bytes=uuid_time).node\n \ndef _windll_getnode():\n ''\n if _UuidCreate:\n uuid_bytes=_UuidCreate()\n return UUID(bytes_le=uuid_bytes).node\n \ndef _random_getnode():\n ''\n \n \n \n \n \n \n \n \n \n \n import random\n return random.getrandbits(48)|(1 <<40)\n \n \n \n \n \n \n \n \nif _LINUX:\n _OS_GETTERS=[_ip_getnode,_ifconfig_getnode]\nelif sys.platform =='darwin':\n _OS_GETTERS=[_ifconfig_getnode,_arp_getnode,_netstat_getnode]\nelif sys.platform =='win32':\n\n _OS_GETTERS=[]\nelif _AIX:\n _OS_GETTERS=[_netstat_getnode]\nelse:\n _OS_GETTERS=[_ifconfig_getnode,_ip_getnode,_arp_getnode,\n _netstat_getnode,_lanscan_getnode]\nif os.name =='posix':\n _GETTERS=[_unix_getnode]+_OS_GETTERS\nelif os.name =='nt':\n _GETTERS=[_windll_getnode]+_OS_GETTERS\nelse:\n _GETTERS=_OS_GETTERS\n \n_node=None\n\ndef getnode():\n ''\n\n\n\n\n\n \n global _node\n if _node is not None:\n return _node\n \n for getter in _GETTERS+[_random_getnode]:\n try:\n _node=getter()\n except:\n continue\n if(_node is not None)and(0 <=_node <(1 <<48)):\n return _node\n assert False,'_random_getnode() returned invalid value: {}'.format(_node)\n \n \n_last_timestamp=None\n\ndef uuid1(node=None,clock_seq=None):\n ''\n\n\n \n \n \n \n if _generate_time_safe is not None and node is clock_seq is None:\n uuid_time,safely_generated=_generate_time_safe()\n try:\n is_safe=SafeUUID(safely_generated)\n except ValueError:\n is_safe=SafeUUID.unknown\n return UUID(bytes=uuid_time,is_safe=is_safe)\n \n global _last_timestamp\n import time\n nanoseconds=time.time_ns()\n \n \n timestamp=nanoseconds //100+0x01b21dd213814000\n if _last_timestamp is not None and timestamp <=_last_timestamp:\n timestamp=_last_timestamp+1\n _last_timestamp=timestamp\n if clock_seq is None:\n import random\n clock_seq=random.getrandbits(14)\n time_low=timestamp&0xffffffff\n time_mid=(timestamp >>32)&0xffff\n time_hi_version=(timestamp >>48)&0x0fff\n clock_seq_low=clock_seq&0xff\n clock_seq_hi_variant=(clock_seq >>8)&0x3f\n if node is None:\n node=getnode()\n return UUID(fields=(time_low,time_mid,time_hi_version,\n clock_seq_hi_variant,clock_seq_low,node),version=1)\n \ndef uuid3(namespace,name):\n ''\n if isinstance(name,str):\n name=bytes(name,\"utf-8\")\n from hashlib import md5\n digest=md5(\n namespace.bytes+name,\n usedforsecurity=False\n ).digest()\n return UUID(bytes=digest[:16],version=3)\n \ndef uuid4():\n ''\n return UUID(bytes=os.urandom(16),version=4)\n \ndef uuid5(namespace,name):\n ''\n if isinstance(name,str):\n name=bytes(name,\"utf-8\")\n from hashlib import sha1\n hash=sha1(namespace.bytes+name).digest()\n return UUID(bytes=hash[:16],version=5)\n \n \ndef main():\n ''\n uuid_funcs={\n \"uuid1\":uuid1,\n \"uuid3\":uuid3,\n \"uuid4\":uuid4,\n \"uuid5\":uuid5\n }\n uuid_namespace_funcs=(\"uuid3\",\"uuid5\")\n namespaces={\n \"@dns\":NAMESPACE_DNS,\n \"@url\":NAMESPACE_URL,\n \"@oid\":NAMESPACE_OID,\n \"@x500\":NAMESPACE_X500\n }\n \n import argparse\n parser=argparse.ArgumentParser(\n description=\"Generates a uuid using the selected uuid function.\")\n parser.add_argument(\"-u\",\"--uuid\",choices=uuid_funcs.keys(),default=\"uuid4\",\n help=\"The function to use to generate the uuid. \"\n \"By default uuid4 function is used.\")\n parser.add_argument(\"-n\",\"--namespace\",\n help=\"The namespace is a UUID, or '@ns' where 'ns' is a \"\n \"well-known predefined UUID addressed by namespace name. \"\n \"Such as @dns, @url, @oid, and @x500. \"\n \"Only required for uuid3/uuid5 functions.\")\n parser.add_argument(\"-N\",\"--name\",\n help=\"The name used as part of generating the uuid. \"\n \"Only required for uuid3/uuid5 functions.\")\n \n args=parser.parse_args()\n uuid_func=uuid_funcs[args.uuid]\n namespace=args.namespace\n name=args.name\n \n if args.uuid in uuid_namespace_funcs:\n if not namespace or not name:\n parser.error(\n \"Incorrect number of arguments. \"\n f\"{args.uuid} requires a namespace and a name. \"\n \"Run 'python -m uuid -h' for more information.\"\n )\n namespace=namespaces[namespace]if namespace in namespaces else UUID(namespace)\n print(uuid_func(namespace,name))\n else:\n print(uuid_func())\n \n \n \n \nNAMESPACE_DNS=UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_URL=UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_OID=UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_X500=UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')\n\nif __name__ ==\"__main__\":\n main()\n", ["_uuid", "argparse", "enum", "hashlib", "io", "os", "platform", "random", "shutil", "socket", "subprocess", "sys", "time"]], "imp": [".py", "''\n\n\n\n\n\n\n\nfrom _imp import(lock_held,acquire_lock,release_lock,\nget_frozen_object,is_frozen_package,\ninit_frozen,is_builtin,is_frozen,\n_fix_co_filename,_frozen_module_names)\ntry:\n from _imp import create_dynamic\nexcept ImportError:\n\n create_dynamic=None\n \nfrom importlib._bootstrap import _ERR_MSG,_exec,_load,_builtin_from_name\nfrom importlib._bootstrap_external import SourcelessFileLoader\n\nfrom importlib import machinery\nfrom importlib import util\nimport importlib\nimport os\nimport sys\nimport tokenize\nimport types\nimport warnings\n\nwarnings.warn(\"the imp module is deprecated in favour of importlib and slated \"\n\"for removal in Python 3.12; \"\n\"see the module's documentation for alternative uses\",\nDeprecationWarning,stacklevel=2)\n\n\nSEARCH_ERROR=0\nPY_SOURCE=1\nPY_COMPILED=2\nC_EXTENSION=3\nPY_RESOURCE=4\nPKG_DIRECTORY=5\nC_BUILTIN=6\nPY_FROZEN=7\nPY_CODERESOURCE=8\nIMP_HOOK=9\n\n\ndef new_module(name):\n ''\n\n\n\n\n\n \n return types.ModuleType(name)\n \n \ndef get_magic():\n ''\n\n\n \n return util.MAGIC_NUMBER\n \n \ndef get_tag():\n ''\n return sys.implementation.cache_tag\n \n \ndef cache_from_source(path,debug_override=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n return util.cache_from_source(path,debug_override)\n \n \ndef source_from_cache(path):\n ''\n\n\n\n\n\n\n\n\n \n return util.source_from_cache(path)\n \n \ndef get_suffixes():\n ''\n extensions=[(s,'rb',C_EXTENSION)for s in machinery.EXTENSION_SUFFIXES]\n source=[(s,'r',PY_SOURCE)for s in machinery.SOURCE_SUFFIXES]\n bytecode=[(s,'rb',PY_COMPILED)for s in machinery.BYTECODE_SUFFIXES]\n \n return extensions+source+bytecode\n \n \nclass NullImporter:\n\n ''\n\n\n\n \n \n def __init__(self,path):\n if path =='':\n raise ImportError('empty pathname',path='')\n elif os.path.isdir(path):\n raise ImportError('existing directory',path=path)\n \n def find_module(self,fullname):\n ''\n return None\n \n \nclass _HackedGetData:\n\n ''\n \n \n def __init__(self,fullname,path,file=None):\n super().__init__(fullname,path)\n self.file=file\n \n def get_data(self,path):\n ''\n if self.file and path ==self.path:\n \n \n if not self.file.closed:\n file=self.file\n if 'b'not in file.mode:\n file.close()\n if self.file.closed:\n self.file=file=open(self.path,'rb')\n \n with file:\n return file.read()\n else:\n return super().get_data(path)\n \n \nclass _LoadSourceCompatibility(_HackedGetData,machinery.SourceFileLoader):\n\n ''\n \n \ndef load_source(name,pathname,file=None):\n loader=_LoadSourceCompatibility(name,pathname,file)\n spec=util.spec_from_file_location(name,pathname,loader=loader)\n if name in sys.modules:\n module=_exec(spec,sys.modules[name])\n else:\n module=_load(spec)\n \n \n module.__loader__=machinery.SourceFileLoader(name,pathname)\n module.__spec__.loader=module.__loader__\n return module\n \n \nclass _LoadCompiledCompatibility(_HackedGetData,SourcelessFileLoader):\n\n ''\n \n \ndef load_compiled(name,pathname,file=None):\n ''\n loader=_LoadCompiledCompatibility(name,pathname,file)\n spec=util.spec_from_file_location(name,pathname,loader=loader)\n if name in sys.modules:\n module=_exec(spec,sys.modules[name])\n else:\n module=_load(spec)\n \n \n module.__loader__=SourcelessFileLoader(name,pathname)\n module.__spec__.loader=module.__loader__\n return module\n \n \ndef load_package(name,path):\n ''\n if os.path.isdir(path):\n extensions=(machinery.SOURCE_SUFFIXES[:]+\n machinery.BYTECODE_SUFFIXES[:])\n for extension in extensions:\n init_path=os.path.join(path,'__init__'+extension)\n if os.path.exists(init_path):\n path=init_path\n break\n else:\n raise ValueError('{!r} is not a package'.format(path))\n spec=util.spec_from_file_location(name,path,\n submodule_search_locations=[])\n if name in sys.modules:\n return _exec(spec,sys.modules[name])\n else:\n return _load(spec)\n \n \ndef load_module(name,file,filename,details):\n ''\n\n\n\n\n\n \n suffix,mode,type_=details\n if mode and(not mode.startswith('r')or '+'in mode):\n raise ValueError('invalid file open mode {!r}'.format(mode))\n elif file is None and type_ in{PY_SOURCE,PY_COMPILED}:\n msg='file object required for import (type code {})'.format(type_)\n raise ValueError(msg)\n elif type_ ==PY_SOURCE:\n return load_source(name,filename,file)\n elif type_ ==PY_COMPILED:\n return load_compiled(name,filename,file)\n elif type_ ==C_EXTENSION and load_dynamic is not None:\n if file is None:\n with open(filename,'rb')as opened_file:\n return load_dynamic(name,filename,opened_file)\n else:\n return load_dynamic(name,filename,file)\n elif type_ ==PKG_DIRECTORY:\n return load_package(name,filename)\n elif type_ ==C_BUILTIN:\n return init_builtin(name)\n elif type_ ==PY_FROZEN:\n return init_frozen(name)\n else:\n msg=\"Don't know how to import {} (type code {})\".format(name,type_)\n raise ImportError(msg,name=name)\n \n \ndef find_module(name,path=None):\n ''\n\n\n\n\n\n\n\n\n \n if not isinstance(name,str):\n raise TypeError(\"'name' must be a str, not {}\".format(type(name)))\n elif not isinstance(path,(type(None),list)):\n \n raise RuntimeError(\"'path' must be None or a list, \"\n \"not {}\".format(type(path)))\n \n if path is None:\n if is_builtin(name):\n return None,None,('','',C_BUILTIN)\n elif is_frozen(name):\n return None,None,('','',PY_FROZEN)\n else:\n path=sys.path\n \n for entry in path:\n package_directory=os.path.join(entry,name)\n for suffix in['.py',machinery.BYTECODE_SUFFIXES[0]]:\n package_file_name='__init__'+suffix\n file_path=os.path.join(package_directory,package_file_name)\n if os.path.isfile(file_path):\n return None,package_directory,('','',PKG_DIRECTORY)\n for suffix,mode,type_ in get_suffixes():\n file_name=name+suffix\n file_path=os.path.join(entry,file_name)\n if os.path.isfile(file_path):\n break\n else:\n continue\n break\n else:\n raise ImportError(_ERR_MSG.format(name),name=name)\n \n encoding=None\n if 'b'not in mode:\n with open(file_path,'rb')as file:\n encoding=tokenize.detect_encoding(file.readline)[0]\n file=open(file_path,mode,encoding=encoding)\n return file,file_path,(suffix,mode,type_)\n \n \ndef reload(module):\n ''\n\n\n\n\n\n \n return importlib.reload(module)\n \n \ndef init_builtin(name):\n ''\n\n\n\n \n try:\n return _builtin_from_name(name)\n except ImportError:\n return None\n \n \nif create_dynamic:\n def load_dynamic(name,path,file=None):\n ''\n\n\n \n import importlib.machinery\n loader=importlib.machinery.ExtensionFileLoader(name,path)\n \n \n \n spec=importlib.machinery.ModuleSpec(\n name=name,loader=loader,origin=path)\n return _load(spec)\n \nelse:\n load_dynamic=None\n", ["_imp", "importlib", "importlib._bootstrap", "importlib._bootstrap_external", "importlib.machinery", "importlib.util", "os", "sys", "tokenize", "types", "warnings"]], "re": [".py", "from python_re import *\n\nimport python_re\n_compile=python_re._compile\n_reconstructor=python_re._reconstructor\n\npython_re._reconstructor.__module__='re'\n", ["python_re"]], "_sysconfigdata": [".py", "build_time_vars={'HAVE_SYS_WAIT_H':1,'HAVE_UTIL_H':0,'HAVE_SYMLINKAT':1,'HAVE_LIBSENDFILE':0,'SRCDIRS':'Parser Grammar Objects Python Modules Mac','SIZEOF_OFF_T':8,'BASECFLAGS':'-Wno-unused-result','HAVE_UTIME_H':1,'EXTRAMACHDEPPATH':'','HAVE_SYS_TIME_H':1,'CFLAGSFORSHARED':'-fPIC','HAVE_HYPOT':1,'PGSRCS':'\\\\','HAVE_LIBUTIL_H':0,'HAVE_COMPUTED_GOTOS':1,'HAVE_LUTIMES':1,'HAVE_MAKEDEV':1,'HAVE_REALPATH':1,'HAVE_LINUX_TIPC_H':1,'MULTIARCH':'i386-linux-gnu','HAVE_GETWD':1,'HAVE_GCC_ASM_FOR_X64':0,'HAVE_INET_PTON':1,'HAVE_GETHOSTBYNAME_R_6_ARG':1,'SIZEOF__BOOL':1,'HAVE_ZLIB_COPY':1,'ASDLGEN':'python3.3 ../Parser/asdl_c.py','GRAMMAR_INPUT':'../Grammar/Grammar','HOST_GNU_TYPE':'i686-pc-linux-gnu','HAVE_SCHED_RR_GET_INTERVAL':1,'HAVE_BLUETOOTH_H':0,'HAVE_MKFIFO':1,'TIMEMODULE_LIB':0,'LIBM':'-lm','PGENOBJS':'\\\\ \\\\','PYTHONFRAMEWORK':'','GETPGRP_HAVE_ARG':0,'HAVE_MMAP':1,'SHLIB_SUFFIX':'.so','SIZEOF_FLOAT':4,'HAVE_RENAMEAT':1,'HAVE_LANGINFO_H':1,'HAVE_STDLIB_H':1,'PY_CORE_CFLAGS':'-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security -I. -IInclude -I../Include -D_FORTIFY_SOURCE=2 -fPIC -DPy_BUILD_CORE','HAVE_BROKEN_PIPE_BUF':0,'HAVE_CONFSTR':1,'HAVE_SIGTIMEDWAIT':1,'HAVE_FTELLO':1,'READELF':'readelf','HAVE_SIGALTSTACK':1,'TESTTIMEOUT':3600,'PYTHONPATH':':plat-i386-linux-gnu','SIZEOF_WCHAR_T':4,'LIBOBJS':'','HAVE_SYSCONF':1,'MAKESETUP':'../Modules/makesetup','HAVE_UTIMENSAT':1,'HAVE_FCHOWNAT':1,'HAVE_WORKING_TZSET':1,'HAVE_FINITE':1,'HAVE_ASINH':1,'HAVE_SETEUID':1,'CONFIGFILES':'configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in','HAVE_SETGROUPS':1,'PARSER_OBJS':'\\\\ Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o','HAVE_MBRTOWC':1,'SIZEOF_INT':4,'HAVE_STDARG_PROTOTYPES':1,'TM_IN_SYS_TIME':0,'HAVE_SYS_TIMES_H':1,'HAVE_LCHOWN':1,'HAVE_SSIZE_T':1,'HAVE_PAUSE':1,'SYSLIBS':'-lm','POSIX_SEMAPHORES_NOT_ENABLED':0,'HAVE_DEVICE_MACROS':1,'BLDSHARED':'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','LIBSUBDIRS':'tkinter tkinter/test tkinter/test/test_tkinter \\\\','HAVE_SYS_UN_H':1,'HAVE_SYS_STAT_H':1,'VPATH':'..','INCLDIRSTOMAKE':'/usr/include /usr/include /usr/include/python3.3m /usr/include/python3.3m','HAVE_BROKEN_SEM_GETVALUE':0,'HAVE_TIMEGM':1,'PACKAGE_VERSION':0,'MAJOR_IN_SYSMACROS':0,'HAVE_ATANH':1,'HAVE_GAI_STRERROR':1,'HAVE_SYS_POLL_H':1,'SIZEOF_PTHREAD_T':4,'SIZEOF_FPOS_T':16,'HAVE_CTERMID':1,'HAVE_TMPFILE':1,'HAVE_SETUID':1,'CXX':'i686-linux-gnu-g++ -pthread','srcdir':'..','HAVE_UINT32_T':1,'HAVE_ADDRINFO':1,'HAVE_GETSPENT':1,'SIZEOF_DOUBLE':8,'HAVE_INT32_T':1,'LIBRARY_OBJS_OMIT_FROZEN':'\\\\','HAVE_FUTIMES':1,'CONFINCLUDEPY':'/usr/include/python3.3m','HAVE_RL_COMPLETION_APPEND_CHARACTER':1,'LIBFFI_INCLUDEDIR':'','HAVE_SETGID':1,'HAVE_UINT64_T':1,'EXEMODE':755,'UNIVERSALSDK':'','HAVE_LIBDL':1,'HAVE_GETNAMEINFO':1,'HAVE_STDINT_H':1,'COREPYTHONPATH':':plat-i386-linux-gnu','HAVE_SOCKADDR_STORAGE':1,'HAVE_WAITID':1,'EXTRAPLATDIR':'@EXTRAPLATDIR@','HAVE_ACCEPT4':1,'RUNSHARED':'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared:','EXE':'','HAVE_SIGACTION':1,'HAVE_CHOWN':1,'HAVE_GETLOGIN':1,'HAVE_TZNAME':0,'PACKAGE_NAME':0,'HAVE_GETPGID':1,'HAVE_GLIBC_MEMMOVE_BUG':0,'BUILD_GNU_TYPE':'i686-pc-linux-gnu','HAVE_LINUX_CAN_H':1,'DYNLOADFILE':'dynload_shlib.o','HAVE_PWRITE':1,'BUILDEXE':'','HAVE_OPENPTY':1,'HAVE_LOCKF':1,'HAVE_COPYSIGN':1,'HAVE_PREAD':1,'HAVE_DLOPEN':1,'HAVE_SYS_KERN_CONTROL_H':0,'PY_FORMAT_LONG_LONG':'\"ll\"','HAVE_TCSETPGRP':1,'HAVE_SETSID':1,'HAVE_STRUCT_STAT_ST_BIRTHTIME':0,'HAVE_STRING_H':1,'LDLIBRARY':'libpython3.3m.so','INSTALL_SCRIPT':'/usr/bin/install -c','HAVE_SYS_XATTR_H':1,'HAVE_CURSES_IS_TERM_RESIZED':1,'HAVE_TMPNAM_R':1,'STRICT_SYSV_CURSES':\"/* Don't use ncurses extensions */\",'WANT_SIGFPE_HANDLER':1,'HAVE_INT64_T':1,'HAVE_STAT_TV_NSEC':1,'HAVE_SYS_MKDEV_H':0,'HAVE_BROKEN_POLL':0,'HAVE_IF_NAMEINDEX':1,'HAVE_GETPWENT':1,'PSRCS':'\\\\','RANLIB':'ranlib','HAVE_WCSCOLL':1,'WITH_NEXT_FRAMEWORK':0,'ASDLGEN_FILES':'../Parser/asdl.py ../Parser/asdl_c.py','HAVE_RL_PRE_INPUT_HOOK':1,'PACKAGE_URL':0,'SHLIB_EXT':0,'HAVE_SYS_LOADAVG_H':0,'HAVE_LIBIEEE':0,'HAVE_SEM_OPEN':1,'HAVE_TERM_H':1,'IO_OBJS':'\\\\','IO_H':'Modules/_io/_iomodule.h','HAVE_STATVFS':1,'VERSION':'3.3','HAVE_GETC_UNLOCKED':1,'MACHDEPS':'plat-i386-linux-gnu @EXTRAPLATDIR@','SUBDIRSTOO':'Include Lib Misc','HAVE_SETREUID':1,'HAVE_ERFC':1,'HAVE_SETRESUID':1,'LINKFORSHARED':'-Xlinker -export-dynamic -Wl,-O1 -Wl,-Bsymbolic-functions','HAVE_SYS_TYPES_H':1,'HAVE_GETPAGESIZE':1,'HAVE_SETEGID':1,'HAVE_PTY_H':1,'HAVE_STRUCT_STAT_ST_FLAGS':0,'HAVE_WCHAR_H':1,'HAVE_FSEEKO':1,'Py_ENABLE_SHARED':1,'HAVE_SIGRELSE':1,'HAVE_PTHREAD_INIT':0,'FILEMODE':644,'HAVE_SYS_RESOURCE_H':1,'HAVE_READLINKAT':1,'PYLONG_BITS_IN_DIGIT':0,'LINKCC':'i686-linux-gnu-gcc -pthread','HAVE_SETLOCALE':1,'HAVE_CHROOT':1,'HAVE_OPENAT':1,'HAVE_FEXECVE':1,'LDCXXSHARED':'i686-linux-gnu-g++ -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions','DIST':'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in Include Lib Misc Ext-dummy','HAVE_MKNOD':1,'PY_LDFLAGS':'-Wl,-Bsymbolic-functions -Wl,-z,relro','HAVE_BROKEN_MBSTOWCS':0,'LIBRARY_OBJS':'\\\\','HAVE_LOG1P':1,'SIZEOF_VOID_P':4,'HAVE_FCHOWN':1,'PYTHONFRAMEWORKPREFIX':'','HAVE_LIBDLD':0,'HAVE_TGAMMA':1,'HAVE_ERRNO_H':1,'HAVE_IO_H':0,'OTHER_LIBTOOL_OPT':'','HAVE_POLL_H':1,'PY_CPPFLAGS':'-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2','XMLLIBSUBDIRS':'xml xml/dom xml/etree xml/parsers xml/sax','GRAMMAR_H':'Include/graminit.h','TANH_PRESERVES_ZERO_SIGN':1,'HAVE_GETLOADAVG':1,'UNICODE_DEPS':'\\\\ \\\\','HAVE_GETCWD':1,'MANDIR':'/usr/share/man','MACHDESTLIB':'/usr/lib/python3.3','GRAMMAR_C':'Python/graminit.c','PGOBJS':'\\\\','HAVE_DEV_PTMX':1,'HAVE_UINTPTR_T':1,'HAVE_SCHED_SETAFFINITY':1,'PURIFY':'','HAVE_DECL_ISINF':1,'HAVE_RL_CALLBACK':1,'HAVE_WRITEV':1,'HAVE_GETHOSTBYNAME_R_5_ARG':0,'HAVE_SYS_AUDIOIO_H':0,'EXT_SUFFIX':'.cpython-33m.so','SIZEOF_LONG_LONG':8,'DLINCLDIR':'.','HAVE_PATHCONF':1,'HAVE_UNLINKAT':1,'MKDIR_P':'/bin/mkdir -p','HAVE_ALTZONE':0,'SCRIPTDIR':'/usr/lib','OPCODETARGETGEN_FILES':'\\\\','HAVE_GETSPNAM':1,'HAVE_SYS_TERMIO_H':0,'HAVE_ATTRIBUTE_FORMAT_PARSETUPLE':0,'HAVE_PTHREAD_H':1,'Py_DEBUG':0,'HAVE_STRUCT_STAT_ST_BLOCKS':1,'X87_DOUBLE_ROUNDING':1,'SIZEOF_TIME_T':4,'HAVE_DYNAMIC_LOADING':1,'HAVE_DIRECT_H':0,'SRC_GDB_HOOKS':'../Tools/gdb/libpython.py','HAVE_GETADDRINFO':1,'HAVE_BROKEN_NICE':0,'HAVE_DIRENT_H':1,'HAVE_WCSXFRM':1,'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK':1,'HAVE_FSTATVFS':1,'PYTHON':'python','HAVE_OSX105_SDK':0,'BINDIR':'/usr/bin','TESTPYTHON':'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python','ARFLAGS':'rc','PLATDIR':'plat-i386-linux-gnu','HAVE_ASM_TYPES_H':1,'PY3LIBRARY':'libpython3.so','HAVE_PLOCK':0,'FLOCK_NEEDS_LIBBSD':0,'WITH_TSC':0,'HAVE_LIBREADLINE':1,'MACHDEP':'linux','HAVE_SELECT':1,'LDFLAGS':'-Wl,-Bsymbolic-functions -Wl,-z,relro','HAVE_HSTRERROR':1,'SOABI':'cpython-33m','HAVE_GETTIMEOFDAY':1,'HAVE_LIBRESOLV':0,'HAVE_UNSETENV':1,'HAVE_TM_ZONE':1,'HAVE_GETPGRP':1,'HAVE_FLOCK':1,'HAVE_SYS_BSDTTY_H':0,'SUBDIRS':'','PYTHONFRAMEWORKINSTALLDIR':'','PACKAGE_BUGREPORT':0,'HAVE_CLOCK':1,'HAVE_GETPEERNAME':1,'SIZEOF_PID_T':4,'HAVE_CONIO_H':0,'HAVE_FSTATAT':1,'HAVE_NETPACKET_PACKET_H':1,'HAVE_WAIT3':1,'DESTPATH':'','HAVE_STAT_TV_NSEC2':0,'HAVE_GETRESGID':1,'HAVE_UCS4_TCL':0,'SIGNED_RIGHT_SHIFT_ZERO_FILLS':0,'HAVE_TIMES':1,'HAVE_UNAME':1,'HAVE_ERF':1,'SIZEOF_SHORT':2,'HAVE_NCURSES_H':1,'HAVE_SYS_SENDFILE_H':1,'HAVE_CTERMID_R':0,'HAVE_TMPNAM':1,'prefix':'/usr','HAVE_NICE':1,'WITH_THREAD':1,'LN':'ln','TESTRUNNER':'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python ../Tools/scripts/run_tests.py','HAVE_SIGINTERRUPT':1,'HAVE_SETPGID':1,'RETSIGTYPE':'void','HAVE_SCHED_GET_PRIORITY_MAX':1,'HAVE_SYS_SYS_DOMAIN_H':0,'HAVE_SYS_DIR_H':0,'HAVE__GETPTY':0,'HAVE_BLUETOOTH_BLUETOOTH_H':1,'HAVE_BIND_TEXTDOMAIN_CODESET':1,'HAVE_POLL':1,'PYTHON_OBJS':'\\\\','HAVE_WAITPID':1,'USE_INLINE':1,'HAVE_FUTIMENS':1,'USE_COMPUTED_GOTOS':1,'MAINCC':'i686-linux-gnu-gcc -pthread','HAVE_SOCKETPAIR':1,'HAVE_PROCESS_H':0,'HAVE_SETVBUF':1,'HAVE_FDOPENDIR':1,'CONFINCLUDEDIR':'/usr/include','BINLIBDEST':'/usr/lib/python3.3','HAVE_SYS_IOCTL_H':1,'HAVE_SYSEXITS_H':1,'LDLAST':'','HAVE_SYS_FILE_H':1,'HAVE_RL_COMPLETION_SUPPRESS_APPEND':1,'HAVE_RL_COMPLETION_MATCHES':1,'HAVE_TCGETPGRP':1,'SIZEOF_SIZE_T':4,'HAVE_EPOLL_CREATE1':1,'HAVE_SYS_SELECT_H':1,'HAVE_CLOCK_GETTIME':1,'CFLAGS':'-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','HAVE_SNPRINTF':1,'BLDLIBRARY':'-lpython3.3m','PARSER_HEADERS':'\\\\','SO':'.so','LIBRARY':'libpython3.3m.a','HAVE_FPATHCONF':1,'HAVE_TERMIOS_H':1,'HAVE_BROKEN_PTHREAD_SIGMASK':0,'AST_H':'Include/Python-ast.h','HAVE_GCC_UINT128_T':0,'HAVE_ACOSH':1,'MODOBJS':'Modules/_threadmodule.o Modules/signalmodule.o Modules/arraymodule.o Modules/mathmodule.o Modules/_math.o Modules/_struct.o Modules/timemodule.o Modules/_randommodule.o Modules/atexitmodule.o Modules/_elementtree.o Modules/_pickle.o Modules/_datetimemodule.o Modules/_bisectmodule.o Modules/_heapqmodule.o Modules/unicodedata.o Modules/fcntlmodule.o Modules/spwdmodule.o Modules/grpmodule.o Modules/selectmodule.o Modules/socketmodule.o Modules/_posixsubprocess.o Modules/md5module.o Modules/sha1module.o Modules/sha256module.o Modules/sha512module.o Modules/syslogmodule.o Modules/binascii.o Modules/zlibmodule.o Modules/pyexpat.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/_weakref.o Modules/_functoolsmodule.o Modules/operator.o Modules/_collectionsmodule.o Modules/itertoolsmodule.o Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o Modules/zipimport.o Modules/faulthandler.o Modules/symtablemodule.o Modules/xxsubtype.o','AST_C':'Python/Python-ast.c','HAVE_SYS_NDIR_H':0,'DESTDIRS':'/usr /usr/lib /usr/lib/python3.3 /usr/lib/python3.3/lib-dynload','HAVE_SIGNAL_H':1,'PACKAGE_TARNAME':0,'HAVE_GETPRIORITY':1,'INCLUDEDIR':'/usr/include','HAVE_INTTYPES_H':1,'SIGNAL_OBJS':'','HAVE_READV':1,'HAVE_SETHOSTNAME':1,'MODLIBS':'-lrt -lexpat -L/usr/lib -lz -lexpat','CC':'i686-linux-gnu-gcc -pthread','HAVE_LCHMOD':0,'SIZEOF_UINTPTR_T':4,'LIBPC':'/usr/lib/i386-linux-gnu/pkgconfig','BYTESTR_DEPS':'\\\\','HAVE_MKDIRAT':1,'LIBPL':'/usr/lib/python3.3/config-3.3m-i386-linux-gnu','HAVE_SHADOW_H':1,'HAVE_SYS_EVENT_H':0,'INSTALL':'/usr/bin/install -c','HAVE_GCC_ASM_FOR_X87':1,'HAVE_BROKEN_UNSETENV':0,'BASECPPFLAGS':'','DOUBLE_IS_BIG_ENDIAN_IEEE754':0,'HAVE_STRUCT_STAT_ST_RDEV':1,'HAVE_SEM_UNLINK':1,'BUILDPYTHON':'python','HAVE_RL_CATCH_SIGNAL':1,'HAVE_DECL_TZNAME':0,'RESSRCDIR':'Mac/Resources/framework','HAVE_PTHREAD_SIGMASK':1,'HAVE_UTIMES':1,'DISTDIRS':'Include Lib Misc Ext-dummy','HAVE_FDATASYNC':1,'HAVE_USABLE_WCHAR_T':0,'PY_FORMAT_SIZE_T':'\"z\"','HAVE_SCHED_SETSCHEDULER':1,'VA_LIST_IS_ARRAY':0,'HAVE_LINUX_NETLINK_H':1,'HAVE_SETREGID':1,'HAVE_STROPTS_H':1,'LDVERSION':'3.3m','abs_builddir':'/build/buildd/python3.3-3.3.1/build-shared','SITEPATH':'','HAVE_GETHOSTBYNAME':0,'HAVE_SIGPENDING':1,'HAVE_KQUEUE':0,'HAVE_SYNC':1,'HAVE_GETSID':1,'HAVE_ROUND':1,'HAVE_STRFTIME':1,'AST_H_DIR':'Include','HAVE_PIPE2':1,'AST_C_DIR':'Python','TESTPYTHONOPTS':'','HAVE_DEV_PTC':0,'GETTIMEOFDAY_NO_TZ':0,'HAVE_NET_IF_H':1,'HAVE_SENDFILE':1,'HAVE_SETPGRP':1,'HAVE_SEM_GETVALUE':1,'CONFIGURE_LDFLAGS':'-Wl,-Bsymbolic-functions -Wl,-z,relro','DLLLIBRARY':'','PYTHON_FOR_BUILD':'./python -E','SETPGRP_HAVE_ARG':0,'HAVE_INET_ATON':1,'INSTALL_SHARED':'/usr/bin/install -c -m 555','WITH_DOC_STRINGS':1,'OPCODETARGETS_H':'\\\\','HAVE_INITGROUPS':1,'HAVE_LINKAT':1,'BASEMODLIBS':'','SGI_ABI':'','HAVE_SCHED_SETPARAM':1,'OPT':'-DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes','HAVE_POSIX_FADVISE':1,'datarootdir':'/usr/share','HAVE_MEMRCHR':1,'HGTAG':'','HAVE_MEMMOVE':1,'HAVE_GETRESUID':1,'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754':0,'HAVE_LSTAT':1,'AR':'ar','HAVE_WAIT4':1,'HAVE_SYS_MODEM_H':0,'INSTSONAME':'libpython3.3m.so.1.0','HAVE_SYS_STATVFS_H':1,'HAVE_LGAMMA':1,'HAVE_PROTOTYPES':1,'HAVE_SYS_UIO_H':1,'MAJOR_IN_MKDEV':0,'QUICKTESTOPTS':'-x test_subprocess test_io test_lib2to3 \\\\','HAVE_SYS_DEVPOLL_H':0,'HAVE_CHFLAGS':0,'HAVE_FSYNC':1,'HAVE_FCHMOD':1,'INCLUDEPY':'/usr/include/python3.3m','HAVE_SEM_TIMEDWAIT':1,'LDLIBRARYDIR':'','HAVE_STRUCT_TM_TM_ZONE':1,'HAVE_CURSES_H':1,'TIME_WITH_SYS_TIME':1,'HAVE_DUP2':1,'ENABLE_IPV6':1,'WITH_VALGRIND':0,'HAVE_SETITIMER':1,'THREADOBJ':'Python/thread.o','LOCALMODLIBS':'-lrt -lexpat -L/usr/lib -lz -lexpat','HAVE_MEMORY_H':1,'HAVE_GETITIMER':1,'HAVE_C99_BOOL':1,'INSTALL_DATA':'/usr/bin/install -c -m 644','PGEN':'Parser/pgen','HAVE_GRP_H':1,'HAVE_WCSFTIME':1,'AIX_GENUINE_CPLUSPLUS':0,'HAVE_LIBINTL_H':1,'SHELL':'/bin/sh','HAVE_UNISTD_H':1,'EXTRATESTOPTS':'','HAVE_EXECV':1,'HAVE_FSEEK64':0,'MVWDELCH_IS_EXPRESSION':1,'DESTSHARED':'/usr/lib/python3.3/lib-dynload','OPCODETARGETGEN':'\\\\','LIBDEST':'/usr/lib/python3.3','CCSHARED':'-fPIC','HAVE_EXPM1':1,'HAVE_DLFCN_H':1,'exec_prefix':'/usr','HAVE_READLINK':1,'WINDOW_HAS_FLAGS':1,'HAVE_FTELL64':0,'HAVE_STRLCPY':0,'MACOSX_DEPLOYMENT_TARGET':'','HAVE_SYS_SYSCALL_H':1,'DESTLIB':'/usr/lib/python3.3','LDSHARED':'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','HGVERSION':'','PYTHON_HEADERS':'\\\\','HAVE_STRINGS_H':1,'DOUBLE_IS_LITTLE_ENDIAN_IEEE754':1,'HAVE_POSIX_FALLOCATE':1,'HAVE_DIRFD':1,'HAVE_LOG2':1,'HAVE_GETPID':1,'HAVE_ALARM':1,'MACHDEP_OBJS':'','HAVE_SPAWN_H':1,'HAVE_FORK':1,'HAVE_SETRESGID':1,'HAVE_FCHMODAT':1,'HAVE_CLOCK_GETRES':1,'MACHDEPPATH':':plat-i386-linux-gnu','STDC_HEADERS':1,'HAVE_SETPRIORITY':1,'LIBC':'','HAVE_SYS_EPOLL_H':1,'HAVE_SYS_UTSNAME_H':1,'HAVE_PUTENV':1,'HAVE_CURSES_RESIZE_TERM':1,'HAVE_FUTIMESAT':1,'WITH_DYLD':0,'INSTALL_PROGRAM':'/usr/bin/install -c','LIBS':'-lpthread -ldl -lutil','HAVE_TRUNCATE':1,'TESTOPTS':'','PROFILE_TASK':'../Tools/pybench/pybench.py -n 2 --with-gc --with-syscheck','HAVE_CURSES_RESIZETERM':1,'ABIFLAGS':'m','HAVE_GETGROUPLIST':1,'OBJECT_OBJS':'\\\\','HAVE_MKNODAT':1,'HAVE_ST_BLOCKS':1,'HAVE_STRUCT_STAT_ST_GEN':0,'SYS_SELECT_WITH_SYS_TIME':1,'SHLIBS':'-lpthread -ldl -lutil','HAVE_GETGROUPS':1,'MODULE_OBJS':'\\\\','PYTHONFRAMEWORKDIR':'no-framework','HAVE_FCNTL_H':1,'HAVE_LINK':1,'HAVE_SIGWAIT':1,'HAVE_GAMMA':1,'HAVE_SYS_LOCK_H':0,'HAVE_FORKPTY':1,'HAVE_SOCKADDR_SA_LEN':0,'HAVE_TEMPNAM':1,'HAVE_STRUCT_STAT_ST_BLKSIZE':1,'HAVE_MKFIFOAT':1,'HAVE_SIGWAITINFO':1,'HAVE_FTIME':1,'HAVE_EPOLL':1,'HAVE_SYS_SOCKET_H':1,'HAVE_LARGEFILE_SUPPORT':1,'CONFIGURE_CFLAGS':'-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security','HAVE_PTHREAD_DESTRUCTOR':0,'CONFIGURE_CPPFLAGS':'-D_FORTIFY_SOURCE=2','HAVE_SYMLINK':1,'HAVE_LONG_LONG':1,'HAVE_IEEEFP_H':0,'LIBDIR':'/usr/lib','HAVE_PTHREAD_KILL':1,'TESTPATH':'','HAVE_STRDUP':1,'POBJS':'\\\\','NO_AS_NEEDED':'-Wl,--no-as-needed','HAVE_LONG_DOUBLE':1,'HGBRANCH':'','DISTFILES':'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in','PTHREAD_SYSTEM_SCHED_SUPPORTED':1,'HAVE_FACCESSAT':1,'AST_ASDL':'../Parser/Python.asdl','CPPFLAGS':'-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2','HAVE_MKTIME':1,'HAVE_NDIR_H':0,'PY_CFLAGS':'-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','LIBOBJDIR':'Python/','HAVE_LINUX_CAN_RAW_H':1,'HAVE_GETHOSTBYNAME_R_3_ARG':0,'PACKAGE_STRING':0,'GNULD':'yes','LOG1P_DROPS_ZERO_SIGN':0,'HAVE_FTRUNCATE':1,'WITH_LIBINTL':0,'HAVE_MREMAP':1,'HAVE_DECL_ISNAN':1,'HAVE_KILLPG':1,'SIZEOF_LONG':4,'HAVE_DECL_ISFINITE':1,'HAVE_IPA_PURE_CONST_BUG':0,'WITH_PYMALLOC':1,'abs_srcdir':'/build/buildd/python3.3-3.3.1/build-shared/..','HAVE_FCHDIR':1,'HAVE_BROKEN_POSIX_SEMAPHORES':0,'AC_APPLE_UNIVERSAL_BUILD':0,'PGENSRCS':'\\\\ \\\\','DIRMODE':755,'HAVE_GETHOSTBYNAME_R':1,'HAVE_LCHFLAGS':0,'HAVE_SYS_PARAM_H':1,'SIZEOF_LONG_DOUBLE':12,'CONFIG_ARGS':\"'--enable-shared' '--prefix=/usr' '--enable-ipv6' '--enable-loadable-sqlite-extensions' '--with-dbmliborder=bdb:gdbm' '--with-computed-gotos' '--with-system-expat' '--with-system-ffi' '--with-fpectl' 'CC=i686-linux-gnu-gcc' 'CFLAGS=-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ' 'LDFLAGS=-Wl,-Bsymbolic-functions -Wl,-z,relro' 'CPPFLAGS=-D_FORTIFY_SOURCE=2'\",'HAVE_SCHED_H':1,'HAVE_KILL':1}\n\n", []], "struct": [".py", "__all__=[\n\n'calcsize','pack','pack_into','unpack','unpack_from',\n'iter_unpack',\n\n\n'Struct',\n\n\n'error'\n]\n\nfrom _struct import *\nfrom _struct import _clearcache\nfrom _struct import __doc__\n", ["_struct"]], "sre_parse": [".py", "import warnings\nwarnings.warn(f\"module {__name__ !r} is deprecated\",\nDeprecationWarning,\nstacklevel=2)\n\nfrom re import _parser as _\nglobals().update({k:v for k,v in vars(_).items()if k[:2]!='__'})\n", ["re", "warnings"]], "sys": [".py", "\nfrom _sys import *\nimport _sys\n\n_getframe=_sys._getframe\n\nclass _dataclass(tuple):\n\n def __init__(self,**kwargs):\n self.keys=list(kwargs)\n self.__dict__.update(kwargs)\n \n def __getitem__(self,key):\n if isinstance(key,int)and 0 <=key <=len(self.keys):\n return self.__dict__[self.keys[key]]\n raise KeyError(key)\n \n def __iter__(self):\n return(self.__dict__[key]for key in self.keys)\n \n def __len__(self):\n return len(self.keys)\n \n def __repr__(self):\n s=', '.join(f'{k}={self.__dict__[k]!r}'for k in self.keys)\n return f'sys.{self.__class__.__name__}({s})'\n \n \ndef make_dataclass(name,bases=None):\n bases=[_dataclass]if bases is None else[*bases,_dataclass]\n cls=type(name,bases,{})\n return cls\n \n \n__breakpointhook__=breakpointhook\n\nabiflags=0\n\ndef audit(event,*args):\n ''\n pass\n \nbrython_debug_mode=__BRYTHON__.get_option('debug')\n\nbase_exec_prefix=__BRYTHON__.brython_path\n\nbase_prefix=__BRYTHON__.brython_path\n\nbuiltin_module_names=__BRYTHON__.builtin_module_names\n\nbyteorder='little'\n\ncopyright=\"\"\"Copyright (c) 2001-2023 Python Software Foundation.\nAll Rights Reserved.\n\nCopyright (c) 2000 BeOpen.com.\nAll Rights Reserved.\n\nCopyright (c) 1995-2001 Corporation for National Research Initiatives.\nAll Rights Reserved.\n\nCopyright (c) 1991-1995 Stichting Mathematisch Centrum, Amsterdam.\nAll Rights Reserved.\"\"\"\n\ndont_write_bytecode=True\n\nexec_prefix=__BRYTHON__.brython_path\n\nargv=orig_argv=[__BRYTHON__.script_path]+list(__BRYTHON__.get_option('args'))\n\ndef displayhook(value):\n if value is not None:\n stdout.write(repr(value))\n \n__displayhook__=displayhook\n\ndef exit(i=None):\n raise SystemExit('')\n \nflags=make_dataclass('flags')(\ndebug=0,\ninspect=0,\ninteractive=0,\noptimize=0,\ndont_write_bytecode=0,\nno_user_site=0,\nno_site=0,\nignore_environment=0,\nverbose=0,\nbytes_warning=0,\nquiet=0,\nhash_randomization=1,\nisolated=0,\ndev_mode=False,\nutf8_mode=0,\nwarn_default_encoding=0\n)\n\ndef getfilesystemencoding(*args,**kw):\n ''\n\n \n return 'utf-8'\n \ndef getfilesystemencodeerrors():\n return \"utf-8\"\n \ndef intern(string):\n return string\n \nint_info=make_dataclass('int_info')(\nbits_per_digit=30,\nsizeof_digit=4,\ndefault_max_str_digits=__BRYTHON__.int_max_str_digits,\nstr_digits_check_threshold=__BRYTHON__.str_digits_check_threshold)\n\ndef get_int_max_str_digits():\n return __BRYTHON__.int_max_str_digits\n \ndef set_int_max_str_digits(value):\n try:\n value=int(value)\n except:\n raise ValueError(f\"'{value.__class__.__name__}' object \"\n \"cannot be interpreted as an integer\")\n if value !=0 and value =other\n \n return NotImplemented\n \n def __gt__(self,other):\n if isinstance(other,tuple):\n return(self.major,self.minor,self.micro)>other\n \n return NotImplemented\n \n def __le__(self,other):\n if isinstance(other,tuple):\n return(self.major,self.minor,self.micro)<=other\n \n return NotImplemented\n \n def __lt__(self,other):\n if isinstance(other,tuple):\n return(self.major,self.minor,self.micro)self.n:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in self.indices)\n else:\n try:\n for i in reversed(range(self.r)):\n if self.indices[i]!=i+self.n -self.r:\n break\n self.indices[i]+=1\n for j in range(i+1,self.r):\n self.indices[j]=self.indices[j -1]+1\n return tuple(self.pool[i]for i in self.indices)\n except:\n raise StopIteration\n \nclass combinations_with_replacement:\n def __init__(self,iterable,r):\n self.pool=tuple(iterable)\n self.n=len(self.pool)\n self.r=r\n self.indices=[0]*self.r\n self.zero=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if not self.n and self.r:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in self.indices)\n else:\n try:\n for i in reversed(range(self.r)):\n if self.indices[i]!=self.n -1:\n break\n self.indices[i:]=[self.indices[i]+1]*(self.r -i)\n return tuple(self.pool[i]for i in self.indices)\n except:\n raise StopIteration\n \n \n \nclass compress:\n def __init__(self,data,selectors):\n self.data=iter(data)\n self.selectors=iter(selectors)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n while True:\n next_item=next(self.data)\n next_selector=next(self.selectors)\n if bool(next_selector):\n return next_item\n \n \n \n \nclass count:\n ''\n\n\n\n \n def __init__(self,start=0,step=1):\n if not isinstance(start,(int,float)):\n raise TypeError('a number is required')\n self.times=start -step\n self.step=step\n \n def __iter__(self):\n return self\n \n def __next__(self):\n self.times +=self.step\n return self.times\n \n def __repr__(self):\n return 'count(%d)'%(self.times+self.step)\n \n \n \nclass cycle:\n def __init__(self,iterable):\n self._cur_iter=iter(iterable)\n self._saved=[]\n self._must_save=True\n \n def __iter__(self):\n return self\n \n def __next__(self):\n try:\n next_elt=next(self._cur_iter)\n if self._must_save:\n self._saved.append(next_elt)\n except StopIteration:\n self._cur_iter=iter(self._saved)\n next_elt=next(self._cur_iter)\n self._must_save=False\n return next_elt\n \n \n \nclass dropwhile:\n def __init__(self,predicate,iterable):\n self._predicate=predicate\n self._iter=iter(iterable)\n self._dropped=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n value=next(self._iter)\n if self._dropped:\n return value\n while self._predicate(value):\n value=next(self._iter)\n self._dropped=True\n return value\n \n \n \nclass filterfalse:\n def __init__(self,predicate,iterable):\n \n self._iter=iter(iterable)\n if predicate is None:\n self._predicate=bool\n else:\n self._predicate=predicate\n \n def __iter__(self):\n return self\n def __next__(self):\n next_elt=next(self._iter)\n while True:\n if not self._predicate(next_elt):\n return next_elt\n next_elt=next(self._iter)\n \nclass groupby:\n\n\n def __init__(self,iterable,key=None):\n if key is None:\n key=lambda x:x\n self.keyfunc=key\n self.it=iter(iterable)\n self.tgtkey=self.currkey=self.currvalue=object()\n def __iter__(self):\n return self\n def __next__(self):\n while self.currkey ==self.tgtkey:\n self.currvalue=next(self.it)\n self.currkey=self.keyfunc(self.currvalue)\n self.tgtkey=self.currkey\n return(self.currkey,self._grouper(self.tgtkey))\n def _grouper(self,tgtkey):\n while self.currkey ==tgtkey:\n yield self.currvalue\n self.currvalue=next(self.it)\n self.currkey=self.keyfunc(self.currvalue)\n \n \n \nclass islice:\n def __init__(self,iterable,*args):\n s=slice(*args)\n self.start,self.stop,self.step=s.start or 0,s.stop,s.step\n if not isinstance(self.start,int):\n raise ValueError(\"Start argument must be an integer\")\n if self.stop !=None and not isinstance(self.stop,int):\n raise ValueError(\"Stop argument must be an integer or None\")\n if self.step is None:\n self.step=1\n if self.start <0 or(self.stop !=None and self.stop <0\n )or self.step <=0:\n raise ValueError(\"indices for islice() must be positive\")\n self.it=iter(iterable)\n self.donext=None\n self.cnt=0\n \n def __iter__(self):\n return self\n \n def __next__(self):\n nextindex=self.start\n if self.stop !=None and nextindex >=self.stop:\n raise StopIteration\n while self.cnt <=nextindex:\n nextitem=next(self.it)\n self.cnt +=1\n self.start +=self.step\n return nextitem\n \nclass permutations:\n def __init__(self,iterable,r=None):\n self.pool=tuple(iterable)\n self.n=len(self.pool)\n self.r=self.n if r is None else r\n self.indices=list(range(self.n))\n self.cycles=list(range(self.n,self.n -self.r,-1))\n self.zero=False\n self.stop=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n indices=self.indices\n if self.r >self.n:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in indices[:self.r])\n \n i=self.r -1\n while i >=0:\n j=self.cycles[i]-1\n if j >0:\n self.cycles[i]=j\n indices[i],indices[-j]=indices[-j],indices[i]\n return tuple(self.pool[i]for i in indices[:self.r])\n self.cycles[i]=len(indices)-i\n n1=len(indices)-1\n assert n1 >=0\n num=indices[i]\n for k in range(i,n1):\n indices[k]=indices[k+1]\n indices[n1]=num\n i -=1\n raise StopIteration\n \n \ndef product(*args,repeat=1):\n\n\n pools=[tuple(pool)for pool in args]*repeat\n result=[[]]\n for pool in pools:\n result=[x+[y]for x in result for y in pool]\n for prod in result:\n yield tuple(prod)\n \n \n \n \n \n \n \n \nclass _product:\n def __init__(self,*args,**kw):\n if len(kw)>1:\n raise TypeError(\"product() takes at most 1 argument (%d given)\"%\n len(kw))\n self.repeat=kw.get('repeat',1)\n if not isinstance(self.repeat,int):\n raise TypeError(\"integer argument expected, got %s\"%\n type(self.repeat))\n self.gears=[x for x in args]*self.repeat\n self.num_gears=len(self.gears)\n \n self.indicies=[(0,len(self.gears[x]))\n for x in range(0,self.num_gears)]\n self.cont=True\n self.zero=False\n \n def roll_gears(self):\n \n \n \n should_carry=True\n for n in range(0,self.num_gears):\n nth_gear=self.num_gears -n -1\n if should_carry:\n count,lim=self.indicies[nth_gear]\n count +=1\n if count ==lim and nth_gear ==0:\n self.cont=False\n if count ==lim:\n should_carry=True\n count=0\n else:\n should_carry=False\n self.indicies[nth_gear]=(count,lim)\n else:\n break\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if self.zero:\n raise StopIteration\n if self.repeat >0:\n if not self.cont:\n raise StopIteration\n l=[]\n for x in range(0,self.num_gears):\n index,limit=self.indicies[x]\n print('itertools 353',self.gears,x,index)\n l.append(self.gears[x][index])\n self.roll_gears()\n return tuple(l)\n elif self.repeat ==0:\n self.zero=True\n return()\n else:\n raise ValueError(\"repeat argument cannot be negative\")\n \n \n \nclass repeat:\n def __init__(self,obj,times=None):\n self._obj=obj\n if times is not None:\n range(times)\n if times <0:\n times=0\n self._times=times\n \n def __iter__(self):\n return self\n \n def __next__(self):\n \n if self._times is not None:\n if self._times <=0:\n raise StopIteration()\n self._times -=1\n return self._obj\n \n def __repr__(self):\n if self._times is not None:\n return 'repeat(%r, %r)'%(self._obj,self._times)\n else:\n return 'repeat(%r)'%(self._obj,)\n \n def __len__(self):\n if self._times ==-1 or self._times is None:\n raise TypeError(\"len() of uniszed object\")\n return self._times\n \n \n \nclass starmap(object):\n def __init__(self,function,iterable):\n self._func=function\n self._iter=iter(iterable)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n t=next(self._iter)\n return self._func(*t)\n \n \n \nclass takewhile(object):\n def __init__(self,predicate,iterable):\n self._predicate=predicate\n self._iter=iter(iterable)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n value=next(self._iter)\n if not self._predicate(value):\n raise StopIteration()\n return value\n \n \n \nclass TeeData(object):\n def __init__(self,iterator):\n self.data=[]\n self._iter=iterator\n \n def __getitem__(self,i):\n \n while i >=len(self.data):\n self.data.append(next(self._iter))\n return self.data[i]\n \n \nclass TeeObject(object):\n def __init__(self,iterable=None,tee_data=None):\n if tee_data:\n self.tee_data=tee_data\n self.pos=0\n \n elif isinstance(iterable,TeeObject):\n self.tee_data=iterable.tee_data\n self.pos=iterable.pos\n else:\n self.tee_data=TeeData(iter(iterable))\n self.pos=0\n \n def __next__(self):\n data=self.tee_data[self.pos]\n self.pos +=1\n return data\n \n def __iter__(self):\n return self\n \n \ndef tee(iterable,n=2):\n if isinstance(iterable,TeeObject):\n return tuple([iterable]+\n [TeeObject(tee_data=iterable.tee_data)for i in range(n -1)])\n tee_data=TeeData(iter(iterable))\n return tuple([TeeObject(tee_data=tee_data)for i in range(n)])\n \nclass zip_longest:\n def __init__(self,*args,fillvalue=None):\n self.args=[iter(arg)for arg in args]\n self.fillvalue=fillvalue\n self.units=len(args)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n temp=[]\n nb=0\n for i in range(self.units):\n try:\n temp.append(next(self.args[i]))\n nb +=1\n except StopIteration:\n temp.append(self.fillvalue)\n if nb ==0:\n raise StopIteration\n return tuple(temp)\n", ["operator"]], "encodings.mac_romanian": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-romanian',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\u0102'\n'\\u0218'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\u0103'\n'\\u0219'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u2044'\n'\\u20ac'\n'\\u2039'\n'\\u203a'\n'\\u021a'\n'\\u021b'\n'\\u2021'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_farsi": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-farsi',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xa0'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\u06ba'\n'\\xab'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\u2026'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xbb'\n'\\xf4'\n'\\xf6'\n'\\xf7'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n' '\n'!'\n'\"'\n'#'\n'$'\n'\\u066a'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n'\\u060c'\n'-'\n'.'\n'/'\n'\\u06f0'\n'\\u06f1'\n'\\u06f2'\n'\\u06f3'\n'\\u06f4'\n'\\u06f5'\n'\\u06f6'\n'\\u06f7'\n'\\u06f8'\n'\\u06f9'\n':'\n'\\u061b'\n'<'\n'='\n'>'\n'\\u061f'\n'\\u274a'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\u0636'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'\\u0640'\n'\\u0641'\n'\\u0642'\n'\\u0643'\n'\\u0644'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\u0649'\n'\\u064a'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\u064f'\n'\\u0650'\n'\\u0651'\n'\\u0652'\n'\\u067e'\n'\\u0679'\n'\\u0686'\n'\\u06d5'\n'\\u06a4'\n'\\u06af'\n'\\u0688'\n'\\u0691'\n'{'\n'|'\n'}'\n'\\u0698'\n'\\u06d2'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.idna": [".py", "\n\nimport stringprep,re,codecs\nfrom unicodedata import ucd_3_2_0 as unicodedata\n\n\ndots=re.compile(\"[\\u002E\\u3002\\uFF0E\\uFF61]\")\n\n\nace_prefix=b\"xn--\"\nsace_prefix=\"xn--\"\n\n\ndef nameprep(label):\n\n newlabel=[]\n for c in label:\n if stringprep.in_table_b1(c):\n \n continue\n newlabel.append(stringprep.map_table_b2(c))\n label=\"\".join(newlabel)\n \n \n label=unicodedata.normalize(\"NFKC\",label)\n \n \n for c in label:\n if stringprep.in_table_c12(c)or\\\n stringprep.in_table_c22(c)or\\\n stringprep.in_table_c3(c)or\\\n stringprep.in_table_c4(c)or\\\n stringprep.in_table_c5(c)or\\\n stringprep.in_table_c6(c)or\\\n stringprep.in_table_c7(c)or\\\n stringprep.in_table_c8(c)or\\\n stringprep.in_table_c9(c):\n raise UnicodeError(\"Invalid character %r\"%c)\n \n \n RandAL=[stringprep.in_table_d1(x)for x in label]\n for c in RandAL:\n if c:\n \n \n \n \n \n \n if any(stringprep.in_table_d2(x)for x in label):\n raise UnicodeError(\"Violation of BIDI requirement 2\")\n \n \n \n \n \n if not RandAL[0]or not RandAL[-1]:\n raise UnicodeError(\"Violation of BIDI requirement 3\")\n \n return label\n \ndef ToASCII(label):\n try:\n \n label=label.encode(\"ascii\")\n except UnicodeError:\n pass\n else:\n \n \n if 0 =64:\n raise UnicodeError(\"label too long\")\n return result,len(input)\n \n result=bytearray()\n labels=dots.split(input)\n if labels and not labels[-1]:\n trailing_dot=b'.'\n del labels[-1]\n else:\n trailing_dot=b''\n for label in labels:\n if result:\n \n result.extend(b'.')\n result.extend(ToASCII(label))\n return bytes(result+trailing_dot),len(input)\n \n def decode(self,input,errors='strict'):\n \n if errors !='strict':\n raise UnicodeError(\"Unsupported error handling \"+errors)\n \n if not input:\n return \"\",0\n \n \n if not isinstance(input,bytes):\n \n input=bytes(input)\n \n if ace_prefix not in input:\n \n try:\n return input.decode('ascii'),len(input)\n except UnicodeDecodeError:\n pass\n \n labels=input.split(b\".\")\n \n if labels and len(labels[-1])==0:\n trailing_dot='.'\n del labels[-1]\n else:\n trailing_dot=''\n \n result=[]\n for label in labels:\n result.append(ToUnicode(label))\n \n return \".\".join(result)+trailing_dot,len(input)\n \nclass IncrementalEncoder(codecs.BufferedIncrementalEncoder):\n def _buffer_encode(self,input,errors,final):\n if errors !='strict':\n \n raise UnicodeError(\"unsupported error handling \"+errors)\n \n if not input:\n return(b'',0)\n \n labels=dots.split(input)\n trailing_dot=b''\n if labels:\n if not labels[-1]:\n trailing_dot=b'.'\n del labels[-1]\n elif not final:\n \n del labels[-1]\n if labels:\n trailing_dot=b'.'\n \n result=bytearray()\n size=0\n for label in labels:\n if size:\n \n result.extend(b'.')\n size +=1\n result.extend(ToASCII(label))\n size +=len(label)\n \n result +=trailing_dot\n size +=len(trailing_dot)\n return(bytes(result),size)\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def _buffer_decode(self,input,errors,final):\n if errors !='strict':\n raise UnicodeError(\"Unsupported error handling \"+errors)\n \n if not input:\n return(\"\",0)\n \n \n if isinstance(input,str):\n labels=dots.split(input)\n else:\n \n input=str(input,\"ascii\")\n labels=input.split(\".\")\n \n trailing_dot=''\n if labels:\n if not labels[-1]:\n trailing_dot='.'\n del labels[-1]\n elif not final:\n \n del labels[-1]\n if labels:\n trailing_dot='.'\n \n result=[]\n size=0\n for label in labels:\n result.append(ToUnicode(label))\n if size:\n size +=1\n size +=len(label)\n \n result=\".\".join(result)+trailing_dot\n size +=len(trailing_dot)\n return(result,size)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='idna',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs", "re", "stringprep", "unicodedata"]], "encodings.cp273": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp273',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'{'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'\\xc4'\n'.'\n'<'\n'('\n'+'\n'!'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'~'\n'\\xdc'\n'$'\n'*'\n')'\n';'\n'^'\n'-'\n'/'\n'\\xc2'\n'['\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xf6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'\\xa7'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'\\xdf'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'@'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xac'\n'|'\n'\\u203e'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'\\xe4'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xa6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'\\xfc'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'}'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\xd6'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\\\'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n']'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.punycode": [".py", "''\n\n\n\n\nimport codecs\n\n\n\ndef segregate(str):\n ''\n base=bytearray()\n extended=set()\n for c in str:\n if ord(c)<128:\n base.append(ord(c))\n else:\n extended.add(c)\n extended=sorted(extended)\n return bytes(base),extended\n \ndef selective_len(str,max):\n ''\n res=0\n for c in str:\n if ord(c)26:return 26\n return res\n \ndigits=b\"abcdefghijklmnopqrstuvwxyz0123456789\"\ndef generate_generalized_integer(N,bias):\n ''\n result=bytearray()\n j=0\n while 1:\n t=T(j,bias)\n if N 455:\n delta=delta //35\n divisions +=36\n bias=divisions+(36 *delta //(delta+38))\n return bias\n \n \ndef generate_integers(baselen,deltas):\n ''\n \n result=bytearray()\n bias=72\n for points,delta in enumerate(deltas):\n s=generate_generalized_integer(delta,bias)\n result.extend(s)\n bias=adapt(delta,points ==0,baselen+points+1)\n return bytes(result)\n \ndef punycode_encode(text):\n base,extended=segregate(text)\n deltas=insertion_unsort(text,extended)\n extended=generate_integers(len(base),deltas)\n if base:\n return base+b\"-\"+extended\n return extended\n \n \n \ndef decode_generalized_number(extended,extpos,bias,errors):\n ''\n result=0\n w=1\n j=0\n while 1:\n try:\n char=ord(extended[extpos])\n except IndexError:\n if errors ==\"strict\":\n raise UnicodeError(\"incomplete punicode string\")\n return extpos+1,None\n extpos +=1\n if 0x41 <=char <=0x5A:\n digit=char -0x41\n elif 0x30 <=char <=0x39:\n digit=char -22\n elif errors ==\"strict\":\n raise UnicodeError(\"Invalid extended code point '%s'\"\n %extended[extpos -1])\n else:\n return extpos,None\n t=T(j,bias)\n result +=digit *w\n if digit 0x10FFFF:\n if errors ==\"strict\":\n raise UnicodeError(\"Invalid character U+%x\"%char)\n char=ord('?')\n pos=pos %(len(base)+1)\n base=base[:pos]+chr(char)+base[pos:]\n bias=adapt(delta,(extpos ==0),len(base))\n extpos=newpos\n return base\n \ndef punycode_decode(text,errors):\n if isinstance(text,str):\n text=text.encode(\"ascii\")\n if isinstance(text,memoryview):\n text=bytes(text)\n pos=text.rfind(b\"-\")\n if pos ==-1:\n base=\"\"\n extended=str(text,\"ascii\").upper()\n else:\n base=str(text[:pos],\"ascii\",errors)\n extended=str(text[pos+1:],\"ascii\").upper()\n return insertion_sort(base,extended,errors)\n \n \n \nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n res=punycode_encode(input)\n return res,len(input)\n \n def decode(self,input,errors='strict'):\n if errors not in('strict','replace','ignore'):\n raise UnicodeError(\"Unsupported error handling \"+errors)\n res=punycode_decode(input,errors)\n return res,len(input)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return punycode_encode(input)\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n if self.errors not in('strict','replace','ignore'):\n raise UnicodeError(\"Unsupported error handling \"+self.errors)\n return punycode_decode(input,self.errors)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='punycode',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.raw_unicode_escape": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.raw_unicode_escape_encode\n decode=codecs.raw_unicode_escape_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.raw_unicode_escape_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def _buffer_decode(self,input,errors,final):\n return codecs.raw_unicode_escape_decode(input,errors,final)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n def decode(self,input,errors='strict'):\n return codecs.raw_unicode_escape_decode(input,errors,False)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='raw-unicode-escape',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.utf_8": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nencode=codecs.utf_8_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_8_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.utf_8_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_8_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_8_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_8_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-8',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp1252": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1252',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\u0160'\n'\\u2039'\n'\\u0152'\n'\\ufffe'\n'\\u017d'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\u0161'\n'\\u203a'\n'\\u0153'\n'\\ufffe'\n'\\u017e'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp869": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp869',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:None,\n0x0081:None,\n0x0082:None,\n0x0083:None,\n0x0084:None,\n0x0085:None,\n0x0086:0x0386,\n0x0087:None,\n0x0088:0x00b7,\n0x0089:0x00ac,\n0x008a:0x00a6,\n0x008b:0x2018,\n0x008c:0x2019,\n0x008d:0x0388,\n0x008e:0x2015,\n0x008f:0x0389,\n0x0090:0x038a,\n0x0091:0x03aa,\n0x0092:0x038c,\n0x0093:None,\n0x0094:None,\n0x0095:0x038e,\n0x0096:0x03ab,\n0x0097:0x00a9,\n0x0098:0x038f,\n0x0099:0x00b2,\n0x009a:0x00b3,\n0x009b:0x03ac,\n0x009c:0x00a3,\n0x009d:0x03ad,\n0x009e:0x03ae,\n0x009f:0x03af,\n0x00a0:0x03ca,\n0x00a1:0x0390,\n0x00a2:0x03cc,\n0x00a3:0x03cd,\n0x00a4:0x0391,\n0x00a5:0x0392,\n0x00a6:0x0393,\n0x00a7:0x0394,\n0x00a8:0x0395,\n0x00a9:0x0396,\n0x00aa:0x0397,\n0x00ab:0x00bd,\n0x00ac:0x0398,\n0x00ad:0x0399,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x039a,\n0x00b6:0x039b,\n0x00b7:0x039c,\n0x00b8:0x039d,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x039e,\n0x00be:0x039f,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x03a0,\n0x00c7:0x03a1,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x03a3,\n0x00d0:0x03a4,\n0x00d1:0x03a5,\n0x00d2:0x03a6,\n0x00d3:0x03a7,\n0x00d4:0x03a8,\n0x00d5:0x03a9,\n0x00d6:0x03b1,\n0x00d7:0x03b2,\n0x00d8:0x03b3,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x03b4,\n0x00de:0x03b5,\n0x00df:0x2580,\n0x00e0:0x03b6,\n0x00e1:0x03b7,\n0x00e2:0x03b8,\n0x00e3:0x03b9,\n0x00e4:0x03ba,\n0x00e5:0x03bb,\n0x00e6:0x03bc,\n0x00e7:0x03bd,\n0x00e8:0x03be,\n0x00e9:0x03bf,\n0x00ea:0x03c0,\n0x00eb:0x03c1,\n0x00ec:0x03c3,\n0x00ed:0x03c2,\n0x00ee:0x03c4,\n0x00ef:0x0384,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x03c5,\n0x00f3:0x03c6,\n0x00f4:0x03c7,\n0x00f5:0x00a7,\n0x00f6:0x03c8,\n0x00f7:0x0385,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x03c9,\n0x00fb:0x03cb,\n0x00fc:0x03b0,\n0x00fd:0x03ce,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0386'\n'\\ufffe'\n'\\xb7'\n'\\xac'\n'\\xa6'\n'\\u2018'\n'\\u2019'\n'\\u0388'\n'\\u2015'\n'\\u0389'\n'\\u038a'\n'\\u03aa'\n'\\u038c'\n'\\ufffe'\n'\\ufffe'\n'\\u038e'\n'\\u03ab'\n'\\xa9'\n'\\u038f'\n'\\xb2'\n'\\xb3'\n'\\u03ac'\n'\\xa3'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03ca'\n'\\u0390'\n'\\u03cc'\n'\\u03cd'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\xbd'\n'\\u0398'\n'\\u0399'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u039e'\n'\\u039f'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u03a0'\n'\\u03a1'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u03b4'\n'\\u03b5'\n'\\u2580'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c3'\n'\\u03c2'\n'\\u03c4'\n'\\u0384'\n'\\xad'\n'\\xb1'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\xa7'\n'\\u03c8'\n'\\u0385'\n'\\xb0'\n'\\xa8'\n'\\u03c9'\n'\\u03cb'\n'\\u03b0'\n'\\u03ce'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a3:0x009c,\n0x00a6:0x008a,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x0097,\n0x00ab:0x00ae,\n0x00ac:0x0089,\n0x00ad:0x00f0,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x0099,\n0x00b3:0x009a,\n0x00b7:0x0088,\n0x00bb:0x00af,\n0x00bd:0x00ab,\n0x0384:0x00ef,\n0x0385:0x00f7,\n0x0386:0x0086,\n0x0388:0x008d,\n0x0389:0x008f,\n0x038a:0x0090,\n0x038c:0x0092,\n0x038e:0x0095,\n0x038f:0x0098,\n0x0390:0x00a1,\n0x0391:0x00a4,\n0x0392:0x00a5,\n0x0393:0x00a6,\n0x0394:0x00a7,\n0x0395:0x00a8,\n0x0396:0x00a9,\n0x0397:0x00aa,\n0x0398:0x00ac,\n0x0399:0x00ad,\n0x039a:0x00b5,\n0x039b:0x00b6,\n0x039c:0x00b7,\n0x039d:0x00b8,\n0x039e:0x00bd,\n0x039f:0x00be,\n0x03a0:0x00c6,\n0x03a1:0x00c7,\n0x03a3:0x00cf,\n0x03a4:0x00d0,\n0x03a5:0x00d1,\n0x03a6:0x00d2,\n0x03a7:0x00d3,\n0x03a8:0x00d4,\n0x03a9:0x00d5,\n0x03aa:0x0091,\n0x03ab:0x0096,\n0x03ac:0x009b,\n0x03ad:0x009d,\n0x03ae:0x009e,\n0x03af:0x009f,\n0x03b0:0x00fc,\n0x03b1:0x00d6,\n0x03b2:0x00d7,\n0x03b3:0x00d8,\n0x03b4:0x00dd,\n0x03b5:0x00de,\n0x03b6:0x00e0,\n0x03b7:0x00e1,\n0x03b8:0x00e2,\n0x03b9:0x00e3,\n0x03ba:0x00e4,\n0x03bb:0x00e5,\n0x03bc:0x00e6,\n0x03bd:0x00e7,\n0x03be:0x00e8,\n0x03bf:0x00e9,\n0x03c0:0x00ea,\n0x03c1:0x00eb,\n0x03c2:0x00ed,\n0x03c3:0x00ec,\n0x03c4:0x00ee,\n0x03c5:0x00f2,\n0x03c6:0x00f3,\n0x03c7:0x00f4,\n0x03c8:0x00f6,\n0x03c9:0x00fa,\n0x03ca:0x00a0,\n0x03cb:0x00fb,\n0x03cc:0x00a2,\n0x03cd:0x00a3,\n0x03ce:0x00fd,\n0x2015:0x008e,\n0x2018:0x008b,\n0x2019:0x008c,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.iso8859_14": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-14',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u1e02'\n'\\u1e03'\n'\\xa3'\n'\\u010a'\n'\\u010b'\n'\\u1e0a'\n'\\xa7'\n'\\u1e80'\n'\\xa9'\n'\\u1e82'\n'\\u1e0b'\n'\\u1ef2'\n'\\xad'\n'\\xae'\n'\\u0178'\n'\\u1e1e'\n'\\u1e1f'\n'\\u0120'\n'\\u0121'\n'\\u1e40'\n'\\u1e41'\n'\\xb6'\n'\\u1e56'\n'\\u1e81'\n'\\u1e57'\n'\\u1e83'\n'\\u1e60'\n'\\u1ef3'\n'\\u1e84'\n'\\u1e85'\n'\\u1e61'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u0174'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\u1e6a'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\u0176'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u0175'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\u1e6b'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\u0177'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_2": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-2',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0104'\n'\\u02d8'\n'\\u0141'\n'\\xa4'\n'\\u013d'\n'\\u015a'\n'\\xa7'\n'\\xa8'\n'\\u0160'\n'\\u015e'\n'\\u0164'\n'\\u0179'\n'\\xad'\n'\\u017d'\n'\\u017b'\n'\\xb0'\n'\\u0105'\n'\\u02db'\n'\\u0142'\n'\\xb4'\n'\\u013e'\n'\\u015b'\n'\\u02c7'\n'\\xb8'\n'\\u0161'\n'\\u015f'\n'\\u0165'\n'\\u017a'\n'\\u02dd'\n'\\u017e'\n'\\u017c'\n'\\u0154'\n'\\xc1'\n'\\xc2'\n'\\u0102'\n'\\xc4'\n'\\u0139'\n'\\u0106'\n'\\xc7'\n'\\u010c'\n'\\xc9'\n'\\u0118'\n'\\xcb'\n'\\u011a'\n'\\xcd'\n'\\xce'\n'\\u010e'\n'\\u0110'\n'\\u0143'\n'\\u0147'\n'\\xd3'\n'\\xd4'\n'\\u0150'\n'\\xd6'\n'\\xd7'\n'\\u0158'\n'\\u016e'\n'\\xda'\n'\\u0170'\n'\\xdc'\n'\\xdd'\n'\\u0162'\n'\\xdf'\n'\\u0155'\n'\\xe1'\n'\\xe2'\n'\\u0103'\n'\\xe4'\n'\\u013a'\n'\\u0107'\n'\\xe7'\n'\\u010d'\n'\\xe9'\n'\\u0119'\n'\\xeb'\n'\\u011b'\n'\\xed'\n'\\xee'\n'\\u010f'\n'\\u0111'\n'\\u0144'\n'\\u0148'\n'\\xf3'\n'\\xf4'\n'\\u0151'\n'\\xf6'\n'\\xf7'\n'\\u0159'\n'\\u016f'\n'\\xfa'\n'\\u0171'\n'\\xfc'\n'\\xfd'\n'\\u0163'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_arabic": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-arabic',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c4,\n0x0081:0x00a0,\n0x0082:0x00c7,\n0x0083:0x00c9,\n0x0084:0x00d1,\n0x0085:0x00d6,\n0x0086:0x00dc,\n0x0087:0x00e1,\n0x0088:0x00e0,\n0x0089:0x00e2,\n0x008a:0x00e4,\n0x008b:0x06ba,\n0x008c:0x00ab,\n0x008d:0x00e7,\n0x008e:0x00e9,\n0x008f:0x00e8,\n0x0090:0x00ea,\n0x0091:0x00eb,\n0x0092:0x00ed,\n0x0093:0x2026,\n0x0094:0x00ee,\n0x0095:0x00ef,\n0x0096:0x00f1,\n0x0097:0x00f3,\n0x0098:0x00bb,\n0x0099:0x00f4,\n0x009a:0x00f6,\n0x009b:0x00f7,\n0x009c:0x00fa,\n0x009d:0x00f9,\n0x009e:0x00fb,\n0x009f:0x00fc,\n0x00a0:0x0020,\n0x00a1:0x0021,\n0x00a2:0x0022,\n0x00a3:0x0023,\n0x00a4:0x0024,\n0x00a5:0x066a,\n0x00a6:0x0026,\n0x00a7:0x0027,\n0x00a8:0x0028,\n0x00a9:0x0029,\n0x00aa:0x002a,\n0x00ab:0x002b,\n0x00ac:0x060c,\n0x00ad:0x002d,\n0x00ae:0x002e,\n0x00af:0x002f,\n0x00b0:0x0660,\n0x00b1:0x0661,\n0x00b2:0x0662,\n0x00b3:0x0663,\n0x00b4:0x0664,\n0x00b5:0x0665,\n0x00b6:0x0666,\n0x00b7:0x0667,\n0x00b8:0x0668,\n0x00b9:0x0669,\n0x00ba:0x003a,\n0x00bb:0x061b,\n0x00bc:0x003c,\n0x00bd:0x003d,\n0x00be:0x003e,\n0x00bf:0x061f,\n0x00c0:0x274a,\n0x00c1:0x0621,\n0x00c2:0x0622,\n0x00c3:0x0623,\n0x00c4:0x0624,\n0x00c5:0x0625,\n0x00c6:0x0626,\n0x00c7:0x0627,\n0x00c8:0x0628,\n0x00c9:0x0629,\n0x00ca:0x062a,\n0x00cb:0x062b,\n0x00cc:0x062c,\n0x00cd:0x062d,\n0x00ce:0x062e,\n0x00cf:0x062f,\n0x00d0:0x0630,\n0x00d1:0x0631,\n0x00d2:0x0632,\n0x00d3:0x0633,\n0x00d4:0x0634,\n0x00d5:0x0635,\n0x00d6:0x0636,\n0x00d7:0x0637,\n0x00d8:0x0638,\n0x00d9:0x0639,\n0x00da:0x063a,\n0x00db:0x005b,\n0x00dc:0x005c,\n0x00dd:0x005d,\n0x00de:0x005e,\n0x00df:0x005f,\n0x00e0:0x0640,\n0x00e1:0x0641,\n0x00e2:0x0642,\n0x00e3:0x0643,\n0x00e4:0x0644,\n0x00e5:0x0645,\n0x00e6:0x0646,\n0x00e7:0x0647,\n0x00e8:0x0648,\n0x00e9:0x0649,\n0x00ea:0x064a,\n0x00eb:0x064b,\n0x00ec:0x064c,\n0x00ed:0x064d,\n0x00ee:0x064e,\n0x00ef:0x064f,\n0x00f0:0x0650,\n0x00f1:0x0651,\n0x00f2:0x0652,\n0x00f3:0x067e,\n0x00f4:0x0679,\n0x00f5:0x0686,\n0x00f6:0x06d5,\n0x00f7:0x06a4,\n0x00f8:0x06af,\n0x00f9:0x0688,\n0x00fa:0x0691,\n0x00fb:0x007b,\n0x00fc:0x007c,\n0x00fd:0x007d,\n0x00fe:0x0698,\n0x00ff:0x06d2,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xa0'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\u06ba'\n'\\xab'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\u2026'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xbb'\n'\\xf4'\n'\\xf6'\n'\\xf7'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n' '\n'!'\n'\"'\n'#'\n'$'\n'\\u066a'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n'\\u060c'\n'-'\n'.'\n'/'\n'\\u0660'\n'\\u0661'\n'\\u0662'\n'\\u0663'\n'\\u0664'\n'\\u0665'\n'\\u0666'\n'\\u0667'\n'\\u0668'\n'\\u0669'\n':'\n'\\u061b'\n'<'\n'='\n'>'\n'\\u061f'\n'\\u274a'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\u0636'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'\\u0640'\n'\\u0641'\n'\\u0642'\n'\\u0643'\n'\\u0644'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\u0649'\n'\\u064a'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\u064f'\n'\\u0650'\n'\\u0651'\n'\\u0652'\n'\\u067e'\n'\\u0679'\n'\\u0686'\n'\\u06d5'\n'\\u06a4'\n'\\u06af'\n'\\u0688'\n'\\u0691'\n'{'\n'|'\n'}'\n'\\u0698'\n'\\u06d2'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0020:0x00a0,\n0x0021:0x0021,\n0x0021:0x00a1,\n0x0022:0x0022,\n0x0022:0x00a2,\n0x0023:0x0023,\n0x0023:0x00a3,\n0x0024:0x0024,\n0x0024:0x00a4,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0026:0x00a6,\n0x0027:0x0027,\n0x0027:0x00a7,\n0x0028:0x0028,\n0x0028:0x00a8,\n0x0029:0x0029,\n0x0029:0x00a9,\n0x002a:0x002a,\n0x002a:0x00aa,\n0x002b:0x002b,\n0x002b:0x00ab,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002d:0x00ad,\n0x002e:0x002e,\n0x002e:0x00ae,\n0x002f:0x002f,\n0x002f:0x00af,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003a:0x00ba,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003c:0x00bc,\n0x003d:0x003d,\n0x003d:0x00bd,\n0x003e:0x003e,\n0x003e:0x00be,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005b:0x00db,\n0x005c:0x005c,\n0x005c:0x00dc,\n0x005d:0x005d,\n0x005d:0x00dd,\n0x005e:0x005e,\n0x005e:0x00de,\n0x005f:0x005f,\n0x005f:0x00df,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007b:0x00fb,\n0x007c:0x007c,\n0x007c:0x00fc,\n0x007d:0x007d,\n0x007d:0x00fd,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x0081,\n0x00ab:0x008c,\n0x00bb:0x0098,\n0x00c4:0x0080,\n0x00c7:0x0082,\n0x00c9:0x0083,\n0x00d1:0x0084,\n0x00d6:0x0085,\n0x00dc:0x0086,\n0x00e0:0x0088,\n0x00e1:0x0087,\n0x00e2:0x0089,\n0x00e4:0x008a,\n0x00e7:0x008d,\n0x00e8:0x008f,\n0x00e9:0x008e,\n0x00ea:0x0090,\n0x00eb:0x0091,\n0x00ed:0x0092,\n0x00ee:0x0094,\n0x00ef:0x0095,\n0x00f1:0x0096,\n0x00f3:0x0097,\n0x00f4:0x0099,\n0x00f6:0x009a,\n0x00f7:0x009b,\n0x00f9:0x009d,\n0x00fa:0x009c,\n0x00fb:0x009e,\n0x00fc:0x009f,\n0x060c:0x00ac,\n0x061b:0x00bb,\n0x061f:0x00bf,\n0x0621:0x00c1,\n0x0622:0x00c2,\n0x0623:0x00c3,\n0x0624:0x00c4,\n0x0625:0x00c5,\n0x0626:0x00c6,\n0x0627:0x00c7,\n0x0628:0x00c8,\n0x0629:0x00c9,\n0x062a:0x00ca,\n0x062b:0x00cb,\n0x062c:0x00cc,\n0x062d:0x00cd,\n0x062e:0x00ce,\n0x062f:0x00cf,\n0x0630:0x00d0,\n0x0631:0x00d1,\n0x0632:0x00d2,\n0x0633:0x00d3,\n0x0634:0x00d4,\n0x0635:0x00d5,\n0x0636:0x00d6,\n0x0637:0x00d7,\n0x0638:0x00d8,\n0x0639:0x00d9,\n0x063a:0x00da,\n0x0640:0x00e0,\n0x0641:0x00e1,\n0x0642:0x00e2,\n0x0643:0x00e3,\n0x0644:0x00e4,\n0x0645:0x00e5,\n0x0646:0x00e6,\n0x0647:0x00e7,\n0x0648:0x00e8,\n0x0649:0x00e9,\n0x064a:0x00ea,\n0x064b:0x00eb,\n0x064c:0x00ec,\n0x064d:0x00ed,\n0x064e:0x00ee,\n0x064f:0x00ef,\n0x0650:0x00f0,\n0x0651:0x00f1,\n0x0652:0x00f2,\n0x0660:0x00b0,\n0x0661:0x00b1,\n0x0662:0x00b2,\n0x0663:0x00b3,\n0x0664:0x00b4,\n0x0665:0x00b5,\n0x0666:0x00b6,\n0x0667:0x00b7,\n0x0668:0x00b8,\n0x0669:0x00b9,\n0x066a:0x00a5,\n0x0679:0x00f4,\n0x067e:0x00f3,\n0x0686:0x00f5,\n0x0688:0x00f9,\n0x0691:0x00fa,\n0x0698:0x00fe,\n0x06a4:0x00f7,\n0x06af:0x00f8,\n0x06ba:0x008b,\n0x06d2:0x00ff,\n0x06d5:0x00f6,\n0x2026:0x0093,\n0x274a:0x00c0,\n}\n", ["codecs"]], "encodings.mac_croatian": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-croatian',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\u0160'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\u017d'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\u2206'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u0161'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\u017e'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u0106'\n'\\xab'\n'\\u010c'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u0110'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\uf8ff'\n'\\xa9'\n'\\u2044'\n'\\u20ac'\n'\\u2039'\n'\\u203a'\n'\\xc6'\n'\\xbb'\n'\\u2013'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\u0107'\n'\\xc1'\n'\\u010d'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\u0111'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u03c0'\n'\\xcb'\n'\\u02da'\n'\\xb8'\n'\\xca'\n'\\xe6'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.big5hkscs": [".py", "\n\n\n\n\n\nimport _codecs_hk,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_hk.getcodec('big5hkscs')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='big5hkscs',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_hk", "_multibytecodec", "codecs"]], "encodings.cp1256": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1256',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\u067e'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\u0679'\n'\\u2039'\n'\\u0152'\n'\\u0686'\n'\\u0698'\n'\\u0688'\n'\\u06af'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u06a9'\n'\\u2122'\n'\\u0691'\n'\\u203a'\n'\\u0153'\n'\\u200c'\n'\\u200d'\n'\\u06ba'\n'\\xa0'\n'\\u060c'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\u06be'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\u061b'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\u061f'\n'\\u06c1'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\u0636'\n'\\xd7'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'\\u0640'\n'\\u0641'\n'\\u0642'\n'\\u0643'\n'\\xe0'\n'\\u0644'\n'\\xe2'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\u0649'\n'\\u064a'\n'\\xee'\n'\\xef'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\xf4'\n'\\u064f'\n'\\u0650'\n'\\xf7'\n'\\u0651'\n'\\xf9'\n'\\u0652'\n'\\xfb'\n'\\xfc'\n'\\u200e'\n'\\u200f'\n'\\u06d2'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_6": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-6',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa4'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u060c'\n'\\xad'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u061b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u061f'\n'\\ufffe'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\u0636'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0640'\n'\\u0641'\n'\\u0642'\n'\\u0643'\n'\\u0644'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\u0649'\n'\\u064a'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\u064f'\n'\\u0650'\n'\\u0651'\n'\\u0652'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_10": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-10',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0104'\n'\\u0112'\n'\\u0122'\n'\\u012a'\n'\\u0128'\n'\\u0136'\n'\\xa7'\n'\\u013b'\n'\\u0110'\n'\\u0160'\n'\\u0166'\n'\\u017d'\n'\\xad'\n'\\u016a'\n'\\u014a'\n'\\xb0'\n'\\u0105'\n'\\u0113'\n'\\u0123'\n'\\u012b'\n'\\u0129'\n'\\u0137'\n'\\xb7'\n'\\u013c'\n'\\u0111'\n'\\u0161'\n'\\u0167'\n'\\u017e'\n'\\u2015'\n'\\u016b'\n'\\u014b'\n'\\u0100'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\u012e'\n'\\u010c'\n'\\xc9'\n'\\u0118'\n'\\xcb'\n'\\u0116'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\u0145'\n'\\u014c'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\u0168'\n'\\xd8'\n'\\u0172'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\u0101'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\u012f'\n'\\u010d'\n'\\xe9'\n'\\u0119'\n'\\xeb'\n'\\u0117'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\u0146'\n'\\u014d'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\u0169'\n'\\xf8'\n'\\u0173'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\u0138'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_kr": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_kr')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_kr',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.cp1140": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1140',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'\\xa2'\n'.'\n'<'\n'('\n'+'\n'|'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n'!'\n'$'\n'*'\n')'\n';'\n'\\xac'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\u20ac'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'^'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'['\n']'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\xfc'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\xd6'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\\xdc'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1125": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1125',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0410,\n0x0081:0x0411,\n0x0082:0x0412,\n0x0083:0x0413,\n0x0084:0x0414,\n0x0085:0x0415,\n0x0086:0x0416,\n0x0087:0x0417,\n0x0088:0x0418,\n0x0089:0x0419,\n0x008a:0x041a,\n0x008b:0x041b,\n0x008c:0x041c,\n0x008d:0x041d,\n0x008e:0x041e,\n0x008f:0x041f,\n0x0090:0x0420,\n0x0091:0x0421,\n0x0092:0x0422,\n0x0093:0x0423,\n0x0094:0x0424,\n0x0095:0x0425,\n0x0096:0x0426,\n0x0097:0x0427,\n0x0098:0x0428,\n0x0099:0x0429,\n0x009a:0x042a,\n0x009b:0x042b,\n0x009c:0x042c,\n0x009d:0x042d,\n0x009e:0x042e,\n0x009f:0x042f,\n0x00a0:0x0430,\n0x00a1:0x0431,\n0x00a2:0x0432,\n0x00a3:0x0433,\n0x00a4:0x0434,\n0x00a5:0x0435,\n0x00a6:0x0436,\n0x00a7:0x0437,\n0x00a8:0x0438,\n0x00a9:0x0439,\n0x00aa:0x043a,\n0x00ab:0x043b,\n0x00ac:0x043c,\n0x00ad:0x043d,\n0x00ae:0x043e,\n0x00af:0x043f,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x0440,\n0x00e1:0x0441,\n0x00e2:0x0442,\n0x00e3:0x0443,\n0x00e4:0x0444,\n0x00e5:0x0445,\n0x00e6:0x0446,\n0x00e7:0x0447,\n0x00e8:0x0448,\n0x00e9:0x0449,\n0x00ea:0x044a,\n0x00eb:0x044b,\n0x00ec:0x044c,\n0x00ed:0x044d,\n0x00ee:0x044e,\n0x00ef:0x044f,\n0x00f0:0x0401,\n0x00f1:0x0451,\n0x00f2:0x0490,\n0x00f3:0x0491,\n0x00f4:0x0404,\n0x00f5:0x0454,\n0x00f6:0x0406,\n0x00f7:0x0456,\n0x00f8:0x0407,\n0x00f9:0x0457,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x2116,\n0x00fd:0x00a4,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n'\\u0401'\n'\\u0451'\n'\\u0490'\n'\\u0491'\n'\\u0404'\n'\\u0454'\n'\\u0406'\n'\\u0456'\n'\\u0407'\n'\\u0457'\n'\\xb7'\n'\\u221a'\n'\\u2116'\n'\\xa4'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00fd,\n0x00b7:0x00fa,\n0x0401:0x00f0,\n0x0404:0x00f4,\n0x0406:0x00f6,\n0x0407:0x00f8,\n0x0410:0x0080,\n0x0411:0x0081,\n0x0412:0x0082,\n0x0413:0x0083,\n0x0414:0x0084,\n0x0415:0x0085,\n0x0416:0x0086,\n0x0417:0x0087,\n0x0418:0x0088,\n0x0419:0x0089,\n0x041a:0x008a,\n0x041b:0x008b,\n0x041c:0x008c,\n0x041d:0x008d,\n0x041e:0x008e,\n0x041f:0x008f,\n0x0420:0x0090,\n0x0421:0x0091,\n0x0422:0x0092,\n0x0423:0x0093,\n0x0424:0x0094,\n0x0425:0x0095,\n0x0426:0x0096,\n0x0427:0x0097,\n0x0428:0x0098,\n0x0429:0x0099,\n0x042a:0x009a,\n0x042b:0x009b,\n0x042c:0x009c,\n0x042d:0x009d,\n0x042e:0x009e,\n0x042f:0x009f,\n0x0430:0x00a0,\n0x0431:0x00a1,\n0x0432:0x00a2,\n0x0433:0x00a3,\n0x0434:0x00a4,\n0x0435:0x00a5,\n0x0436:0x00a6,\n0x0437:0x00a7,\n0x0438:0x00a8,\n0x0439:0x00a9,\n0x043a:0x00aa,\n0x043b:0x00ab,\n0x043c:0x00ac,\n0x043d:0x00ad,\n0x043e:0x00ae,\n0x043f:0x00af,\n0x0440:0x00e0,\n0x0441:0x00e1,\n0x0442:0x00e2,\n0x0443:0x00e3,\n0x0444:0x00e4,\n0x0445:0x00e5,\n0x0446:0x00e6,\n0x0447:0x00e7,\n0x0448:0x00e8,\n0x0449:0x00e9,\n0x044a:0x00ea,\n0x044b:0x00eb,\n0x044c:0x00ec,\n0x044d:0x00ed,\n0x044e:0x00ee,\n0x044f:0x00ef,\n0x0451:0x00f1,\n0x0454:0x00f5,\n0x0456:0x00f7,\n0x0457:0x00f9,\n0x0490:0x00f2,\n0x0491:0x00f3,\n0x2116:0x00fc,\n0x221a:0x00fb,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.iso2022_jp_1": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_1')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_1',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.cp1257": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1257',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\ufffe'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\ufffe'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\ufffe'\n'\\xa8'\n'\\u02c7'\n'\\xb8'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\ufffe'\n'\\xaf'\n'\\u02db'\n'\\ufffe'\n'\\xa0'\n'\\ufffe'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\ufffe'\n'\\xa6'\n'\\xa7'\n'\\xd8'\n'\\xa9'\n'\\u0156'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xc6'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xf8'\n'\\xb9'\n'\\u0157'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xe6'\n'\\u0104'\n'\\u012e'\n'\\u0100'\n'\\u0106'\n'\\xc4'\n'\\xc5'\n'\\u0118'\n'\\u0112'\n'\\u010c'\n'\\xc9'\n'\\u0179'\n'\\u0116'\n'\\u0122'\n'\\u0136'\n'\\u012a'\n'\\u013b'\n'\\u0160'\n'\\u0143'\n'\\u0145'\n'\\xd3'\n'\\u014c'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\u0172'\n'\\u0141'\n'\\u015a'\n'\\u016a'\n'\\xdc'\n'\\u017b'\n'\\u017d'\n'\\xdf'\n'\\u0105'\n'\\u012f'\n'\\u0101'\n'\\u0107'\n'\\xe4'\n'\\xe5'\n'\\u0119'\n'\\u0113'\n'\\u010d'\n'\\xe9'\n'\\u017a'\n'\\u0117'\n'\\u0123'\n'\\u0137'\n'\\u012b'\n'\\u013c'\n'\\u0161'\n'\\u0144'\n'\\u0146'\n'\\xf3'\n'\\u014d'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\u0173'\n'\\u0142'\n'\\u015b'\n'\\u016b'\n'\\xfc'\n'\\u017c'\n'\\u017e'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp949": [".py", "\n\n\n\n\n\nimport _codecs_kr,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_kr.getcodec('cp949')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp949',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_kr", "_multibytecodec", "codecs"]], "encodings.cp858": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp858',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x00ec,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00ff,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x00d7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x00c0,\n0x00b8:0x00a9,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x00a2,\n0x00be:0x00a5,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x00e3,\n0x00c7:0x00c3,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x00f0,\n0x00d1:0x00d0,\n0x00d2:0x00ca,\n0x00d3:0x00cb,\n0x00d4:0x00c8,\n0x00d5:0x20ac,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x00cf,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x00a6,\n0x00de:0x00cc,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x00d2,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:0x00fe,\n0x00e8:0x00de,\n0x00e9:0x00da,\n0x00ea:0x00db,\n0x00eb:0x00d9,\n0x00ec:0x00fd,\n0x00ed:0x00dd,\n0x00ee:0x00af,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x2017,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\xec'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\xff'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\xd7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\xc0'\n'\\xa9'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\xa2'\n'\\xa5'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\xe3'\n'\\xc3'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\xf0'\n'\\xd0'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\u20ac'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\xa6'\n'\\xcc'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\xd2'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\xfe'\n'\\xde'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\xfd'\n'\\xdd'\n'\\xaf'\n'\\xb4'\n'\\xad'\n'\\xb1'\n'\\u2017'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x00bd,\n0x00a3:0x009c,\n0x00a4:0x00cf,\n0x00a5:0x00be,\n0x00a6:0x00dd,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x00b8,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00af:0x00ee,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b4:0x00ef,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b8:0x00f7,\n0x00b9:0x00fb,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00bf:0x00a8,\n0x00c0:0x00b7,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c3:0x00c7,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c8:0x00d4,\n0x00c9:0x0090,\n0x00ca:0x00d2,\n0x00cb:0x00d3,\n0x00cc:0x00de,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00cf:0x00d8,\n0x00d0:0x00d1,\n0x00d1:0x00a5,\n0x00d2:0x00e3,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00d8:0x009d,\n0x00d9:0x00eb,\n0x00da:0x00e9,\n0x00db:0x00ea,\n0x00dc:0x009a,\n0x00dd:0x00ed,\n0x00de:0x00e8,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e3:0x00c6,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f0:0x00d0,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00fd:0x00ec,\n0x00fe:0x00e7,\n0x00ff:0x0098,\n0x20ac:0x00d5,\n0x0192:0x009f,\n0x2017:0x00f2,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.iso8859_7": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-7',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u2018'\n'\\u2019'\n'\\xa3'\n'\\u20ac'\n'\\u20af'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\u037a'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\ufffe'\n'\\u2015'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u0384'\n'\\u0385'\n'\\u0386'\n'\\xb7'\n'\\u0388'\n'\\u0389'\n'\\u038a'\n'\\xbb'\n'\\u038c'\n'\\xbd'\n'\\u038e'\n'\\u038f'\n'\\u0390'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0398'\n'\\u0399'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u039e'\n'\\u039f'\n'\\u03a0'\n'\\u03a1'\n'\\ufffe'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03aa'\n'\\u03ab'\n'\\u03ac'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03b0'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u03b4'\n'\\u03b5'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c2'\n'\\u03c3'\n'\\u03c4'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\u03c8'\n'\\u03c9'\n'\\u03ca'\n'\\u03cb'\n'\\u03cc'\n'\\u03cd'\n'\\u03ce'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_11": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-11',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0e01'\n'\\u0e02'\n'\\u0e03'\n'\\u0e04'\n'\\u0e05'\n'\\u0e06'\n'\\u0e07'\n'\\u0e08'\n'\\u0e09'\n'\\u0e0a'\n'\\u0e0b'\n'\\u0e0c'\n'\\u0e0d'\n'\\u0e0e'\n'\\u0e0f'\n'\\u0e10'\n'\\u0e11'\n'\\u0e12'\n'\\u0e13'\n'\\u0e14'\n'\\u0e15'\n'\\u0e16'\n'\\u0e17'\n'\\u0e18'\n'\\u0e19'\n'\\u0e1a'\n'\\u0e1b'\n'\\u0e1c'\n'\\u0e1d'\n'\\u0e1e'\n'\\u0e1f'\n'\\u0e20'\n'\\u0e21'\n'\\u0e22'\n'\\u0e23'\n'\\u0e24'\n'\\u0e25'\n'\\u0e26'\n'\\u0e27'\n'\\u0e28'\n'\\u0e29'\n'\\u0e2a'\n'\\u0e2b'\n'\\u0e2c'\n'\\u0e2d'\n'\\u0e2e'\n'\\u0e2f'\n'\\u0e30'\n'\\u0e31'\n'\\u0e32'\n'\\u0e33'\n'\\u0e34'\n'\\u0e35'\n'\\u0e36'\n'\\u0e37'\n'\\u0e38'\n'\\u0e39'\n'\\u0e3a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0e3f'\n'\\u0e40'\n'\\u0e41'\n'\\u0e42'\n'\\u0e43'\n'\\u0e44'\n'\\u0e45'\n'\\u0e46'\n'\\u0e47'\n'\\u0e48'\n'\\u0e49'\n'\\u0e4a'\n'\\u0e4b'\n'\\u0e4c'\n'\\u0e4d'\n'\\u0e4e'\n'\\u0e4f'\n'\\u0e50'\n'\\u0e51'\n'\\u0e52'\n'\\u0e53'\n'\\u0e54'\n'\\u0e55'\n'\\u0e56'\n'\\u0e57'\n'\\u0e58'\n'\\u0e59'\n'\\u0e5a'\n'\\u0e5b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.hp_roman8": [".py", "''\n\n\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='hp-roman8',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xc0'\n'\\xc2'\n'\\xc8'\n'\\xca'\n'\\xcb'\n'\\xce'\n'\\xcf'\n'\\xb4'\n'\\u02cb'\n'\\u02c6'\n'\\xa8'\n'\\u02dc'\n'\\xd9'\n'\\xdb'\n'\\u20a4'\n'\\xaf'\n'\\xdd'\n'\\xfd'\n'\\xb0'\n'\\xc7'\n'\\xe7'\n'\\xd1'\n'\\xf1'\n'\\xa1'\n'\\xbf'\n'\\xa4'\n'\\xa3'\n'\\xa5'\n'\\xa7'\n'\\u0192'\n'\\xa2'\n'\\xe2'\n'\\xea'\n'\\xf4'\n'\\xfb'\n'\\xe1'\n'\\xe9'\n'\\xf3'\n'\\xfa'\n'\\xe0'\n'\\xe8'\n'\\xf2'\n'\\xf9'\n'\\xe4'\n'\\xeb'\n'\\xf6'\n'\\xfc'\n'\\xc5'\n'\\xee'\n'\\xd8'\n'\\xc6'\n'\\xe5'\n'\\xed'\n'\\xf8'\n'\\xe6'\n'\\xc4'\n'\\xec'\n'\\xd6'\n'\\xdc'\n'\\xc9'\n'\\xef'\n'\\xdf'\n'\\xd4'\n'\\xc1'\n'\\xc3'\n'\\xe3'\n'\\xd0'\n'\\xf0'\n'\\xcd'\n'\\xcc'\n'\\xd3'\n'\\xd2'\n'\\xd5'\n'\\xf5'\n'\\u0160'\n'\\u0161'\n'\\xda'\n'\\u0178'\n'\\xff'\n'\\xde'\n'\\xfe'\n'\\xb7'\n'\\xb5'\n'\\xb6'\n'\\xbe'\n'\\u2014'\n'\\xbc'\n'\\xbd'\n'\\xaa'\n'\\xba'\n'\\xab'\n'\\u25a0'\n'\\xbb'\n'\\xb1'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.koi8_r": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='koi8-r',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u2500'\n'\\u2502'\n'\\u250c'\n'\\u2510'\n'\\u2514'\n'\\u2518'\n'\\u251c'\n'\\u2524'\n'\\u252c'\n'\\u2534'\n'\\u253c'\n'\\u2580'\n'\\u2584'\n'\\u2588'\n'\\u258c'\n'\\u2590'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2320'\n'\\u25a0'\n'\\u2219'\n'\\u221a'\n'\\u2248'\n'\\u2264'\n'\\u2265'\n'\\xa0'\n'\\u2321'\n'\\xb0'\n'\\xb2'\n'\\xb7'\n'\\xf7'\n'\\u2550'\n'\\u2551'\n'\\u2552'\n'\\u0451'\n'\\u2553'\n'\\u2554'\n'\\u2555'\n'\\u2556'\n'\\u2557'\n'\\u2558'\n'\\u2559'\n'\\u255a'\n'\\u255b'\n'\\u255c'\n'\\u255d'\n'\\u255e'\n'\\u255f'\n'\\u2560'\n'\\u2561'\n'\\u0401'\n'\\u2562'\n'\\u2563'\n'\\u2564'\n'\\u2565'\n'\\u2566'\n'\\u2567'\n'\\u2568'\n'\\u2569'\n'\\u256a'\n'\\u256b'\n'\\u256c'\n'\\xa9'\n'\\u044e'\n'\\u0430'\n'\\u0431'\n'\\u0446'\n'\\u0434'\n'\\u0435'\n'\\u0444'\n'\\u0433'\n'\\u0445'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u044f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0436'\n'\\u0432'\n'\\u044c'\n'\\u044b'\n'\\u0437'\n'\\u0448'\n'\\u044d'\n'\\u0449'\n'\\u0447'\n'\\u044a'\n'\\u042e'\n'\\u0410'\n'\\u0411'\n'\\u0426'\n'\\u0414'\n'\\u0415'\n'\\u0424'\n'\\u0413'\n'\\u0425'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u042f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0416'\n'\\u0412'\n'\\u042c'\n'\\u042b'\n'\\u0417'\n'\\u0428'\n'\\u042d'\n'\\u0429'\n'\\u0427'\n'\\u042a'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.zlib_codec": [".py", "''\n\n\n\n\n\n\nimport codecs\nimport zlib\n\n\n\ndef zlib_encode(input,errors='strict'):\n assert errors =='strict'\n return(zlib.compress(input),len(input))\n \ndef zlib_decode(input,errors='strict'):\n assert errors =='strict'\n return(zlib.decompress(input),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return zlib_encode(input,errors)\n def decode(self,input,errors='strict'):\n return zlib_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n assert errors =='strict'\n self.errors=errors\n self.compressobj=zlib.compressobj()\n \n def encode(self,input,final=False):\n if final:\n c=self.compressobj.compress(input)\n return c+self.compressobj.flush()\n else:\n return self.compressobj.compress(input)\n \n def reset(self):\n self.compressobj=zlib.compressobj()\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def __init__(self,errors='strict'):\n assert errors =='strict'\n self.errors=errors\n self.decompressobj=zlib.decompressobj()\n \n def decode(self,input,final=False):\n if final:\n c=self.decompressobj.decompress(input)\n return c+self.decompressobj.flush()\n else:\n return self.decompressobj.decompress(input)\n \n def reset(self):\n self.decompressobj=zlib.decompressobj()\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='zlib',\n encode=zlib_encode,\n decode=zlib_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n _is_text_encoding=False,\n )\n", ["codecs", "zlib"]], "encodings.gbk": [".py", "\n\n\n\n\n\nimport _codecs_cn,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_cn.getcodec('gbk')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='gbk',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_cn", "_multibytecodec", "codecs"]], "encodings.johab": [".py", "\n\n\n\n\n\nimport _codecs_kr,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_kr.getcodec('johab')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='johab',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_kr", "_multibytecodec", "codecs"]], "encodings.cp1253": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1253',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\ufffe'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\u0385'\n'\\u0386'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\ufffe'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\u2015'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u0384'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u0388'\n'\\u0389'\n'\\u038a'\n'\\xbb'\n'\\u038c'\n'\\xbd'\n'\\u038e'\n'\\u038f'\n'\\u0390'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0398'\n'\\u0399'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u039e'\n'\\u039f'\n'\\u03a0'\n'\\u03a1'\n'\\ufffe'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03aa'\n'\\u03ab'\n'\\u03ac'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03b0'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u03b4'\n'\\u03b5'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c2'\n'\\u03c3'\n'\\u03c4'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\u03c8'\n'\\u03c9'\n'\\u03ca'\n'\\u03cb'\n'\\u03cc'\n'\\u03cd'\n'\\u03ce'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_15": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-15',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\u20ac'\n'\\xa5'\n'\\u0160'\n'\\xa7'\n'\\u0161'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u017d'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u017e'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\u0152'\n'\\u0153'\n'\\u0178'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp_2004": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_2004')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_2004',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.mac_iceland": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-iceland',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\xdd'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\xc6'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\xe6'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u2044'\n'\\u20ac'\n'\\xd0'\n'\\xf0'\n'\\xde'\n'\\xfe'\n'\\xfd'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_3": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-3',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0126'\n'\\u02d8'\n'\\xa3'\n'\\xa4'\n'\\ufffe'\n'\\u0124'\n'\\xa7'\n'\\xa8'\n'\\u0130'\n'\\u015e'\n'\\u011e'\n'\\u0134'\n'\\xad'\n'\\ufffe'\n'\\u017b'\n'\\xb0'\n'\\u0127'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\u0125'\n'\\xb7'\n'\\xb8'\n'\\u0131'\n'\\u015f'\n'\\u011f'\n'\\u0135'\n'\\xbd'\n'\\ufffe'\n'\\u017c'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\ufffe'\n'\\xc4'\n'\\u010a'\n'\\u0108'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\ufffe'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\u0120'\n'\\xd6'\n'\\xd7'\n'\\u011c'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u016c'\n'\\u015c'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\ufffe'\n'\\xe4'\n'\\u010b'\n'\\u0109'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\ufffe'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\u0121'\n'\\xf6'\n'\\xf7'\n'\\u011d'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u016d'\n'\\u015d'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_greek": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-greek',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xb9'\n'\\xb2'\n'\\xc9'\n'\\xb3'\n'\\xd6'\n'\\xdc'\n'\\u0385'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\u0384'\n'\\xa8'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xa3'\n'\\u2122'\n'\\xee'\n'\\xef'\n'\\u2022'\n'\\xbd'\n'\\u2030'\n'\\xf4'\n'\\xf6'\n'\\xa6'\n'\\u20ac'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\u0393'\n'\\u0394'\n'\\u0398'\n'\\u039b'\n'\\u039e'\n'\\u03a0'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u03a3'\n'\\u03aa'\n'\\xa7'\n'\\u2260'\n'\\xb0'\n'\\xb7'\n'\\u0391'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\u0392'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0399'\n'\\u039a'\n'\\u039c'\n'\\u03a6'\n'\\u03ab'\n'\\u03a8'\n'\\u03a9'\n'\\u03ac'\n'\\u039d'\n'\\xac'\n'\\u039f'\n'\\u03a1'\n'\\u2248'\n'\\u03a4'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u03a5'\n'\\u03a7'\n'\\u0386'\n'\\u0388'\n'\\u0153'\n'\\u2013'\n'\\u2015'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u0389'\n'\\u038a'\n'\\u038c'\n'\\u038e'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03cc'\n'\\u038f'\n'\\u03cd'\n'\\u03b1'\n'\\u03b2'\n'\\u03c8'\n'\\u03b4'\n'\\u03b5'\n'\\u03c6'\n'\\u03b3'\n'\\u03b7'\n'\\u03b9'\n'\\u03be'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03bf'\n'\\u03c0'\n'\\u03ce'\n'\\u03c1'\n'\\u03c3'\n'\\u03c4'\n'\\u03b8'\n'\\u03c9'\n'\\u03c2'\n'\\u03c7'\n'\\u03c5'\n'\\u03b6'\n'\\u03ca'\n'\\u03cb'\n'\\u0390'\n'\\u03b0'\n'\\xad'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.rot_13": [".py", "#!/usr/bin/env python\n''\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return(str.translate(input,rot13_map),len(input))\n \n def decode(self,input,errors='strict'):\n return(str.translate(input,rot13_map),len(input))\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return str.translate(input,rot13_map)\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return str.translate(input,rot13_map)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='rot-13',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False,\n )\n \n \n \nrot13_map=codecs.make_identity_dict(range(256))\nrot13_map.update({\n0x0041:0x004e,\n0x0042:0x004f,\n0x0043:0x0050,\n0x0044:0x0051,\n0x0045:0x0052,\n0x0046:0x0053,\n0x0047:0x0054,\n0x0048:0x0055,\n0x0049:0x0056,\n0x004a:0x0057,\n0x004b:0x0058,\n0x004c:0x0059,\n0x004d:0x005a,\n0x004e:0x0041,\n0x004f:0x0042,\n0x0050:0x0043,\n0x0051:0x0044,\n0x0052:0x0045,\n0x0053:0x0046,\n0x0054:0x0047,\n0x0055:0x0048,\n0x0056:0x0049,\n0x0057:0x004a,\n0x0058:0x004b,\n0x0059:0x004c,\n0x005a:0x004d,\n0x0061:0x006e,\n0x0062:0x006f,\n0x0063:0x0070,\n0x0064:0x0071,\n0x0065:0x0072,\n0x0066:0x0073,\n0x0067:0x0074,\n0x0068:0x0075,\n0x0069:0x0076,\n0x006a:0x0077,\n0x006b:0x0078,\n0x006c:0x0079,\n0x006d:0x007a,\n0x006e:0x0061,\n0x006f:0x0062,\n0x0070:0x0063,\n0x0071:0x0064,\n0x0072:0x0065,\n0x0073:0x0066,\n0x0074:0x0067,\n0x0075:0x0068,\n0x0076:0x0069,\n0x0077:0x006a,\n0x0078:0x006b,\n0x0079:0x006c,\n0x007a:0x006d,\n})\n\n\n\ndef rot13(infile,outfile):\n outfile.write(codecs.encode(infile.read(),'rot-13'))\n \nif __name__ =='__main__':\n import sys\n rot13(sys.stdin,sys.stdout)\n", ["codecs", "sys"]], "encodings.utf_16_be": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nencode=codecs.utf_16_be_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_16_be_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.utf_16_be_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_16_be_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_16_be_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_16_be_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-16-be',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.euc_kr": [".py", "\n\n\n\n\n\nimport _codecs_kr,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_kr.getcodec('euc_kr')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='euc_kr',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_kr", "_multibytecodec", "codecs"]], "encodings.mac_centeuro": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-centeuro',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\u0100'\n'\\u0101'\n'\\xc9'\n'\\u0104'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\u0105'\n'\\u010c'\n'\\xe4'\n'\\u010d'\n'\\u0106'\n'\\u0107'\n'\\xe9'\n'\\u0179'\n'\\u017a'\n'\\u010e'\n'\\xed'\n'\\u010f'\n'\\u0112'\n'\\u0113'\n'\\u0116'\n'\\xf3'\n'\\u0117'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\u011a'\n'\\u011b'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\u0118'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\u0119'\n'\\xa8'\n'\\u2260'\n'\\u0123'\n'\\u012e'\n'\\u012f'\n'\\u012a'\n'\\u2264'\n'\\u2265'\n'\\u012b'\n'\\u0136'\n'\\u2202'\n'\\u2211'\n'\\u0142'\n'\\u013b'\n'\\u013c'\n'\\u013d'\n'\\u013e'\n'\\u0139'\n'\\u013a'\n'\\u0145'\n'\\u0146'\n'\\u0143'\n'\\xac'\n'\\u221a'\n'\\u0144'\n'\\u0147'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u0148'\n'\\u0150'\n'\\xd5'\n'\\u0151'\n'\\u014c'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\u014d'\n'\\u0154'\n'\\u0155'\n'\\u0158'\n'\\u2039'\n'\\u203a'\n'\\u0159'\n'\\u0156'\n'\\u0157'\n'\\u0160'\n'\\u201a'\n'\\u201e'\n'\\u0161'\n'\\u015a'\n'\\u015b'\n'\\xc1'\n'\\u0164'\n'\\u0165'\n'\\xcd'\n'\\u017d'\n'\\u017e'\n'\\u016a'\n'\\xd3'\n'\\xd4'\n'\\u016b'\n'\\u016e'\n'\\xda'\n'\\u016f'\n'\\u0170'\n'\\u0171'\n'\\u0172'\n'\\u0173'\n'\\xdd'\n'\\xfd'\n'\\u0137'\n'\\u017b'\n'\\u0141'\n'\\u017c'\n'\\u0122'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.euc_jisx0213": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('euc_jisx0213')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='euc_jisx0213',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp863": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp863',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00c2,\n0x0085:0x00e0,\n0x0086:0x00b6,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x2017,\n0x008e:0x00c0,\n0x008f:0x00a7,\n0x0090:0x00c9,\n0x0091:0x00c8,\n0x0092:0x00ca,\n0x0093:0x00f4,\n0x0094:0x00cb,\n0x0095:0x00cf,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00a4,\n0x0099:0x00d4,\n0x009a:0x00dc,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00d9,\n0x009e:0x00db,\n0x009f:0x0192,\n0x00a0:0x00a6,\n0x00a1:0x00b4,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00a8,\n0x00a5:0x00b8,\n0x00a6:0x00b3,\n0x00a7:0x00af,\n0x00a8:0x00ce,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00be,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xc2'\n'\\xe0'\n'\\xb6'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\u2017'\n'\\xc0'\n'\\xa7'\n'\\xc9'\n'\\xc8'\n'\\xca'\n'\\xf4'\n'\\xcb'\n'\\xcf'\n'\\xfb'\n'\\xf9'\n'\\xa4'\n'\\xd4'\n'\\xdc'\n'\\xa2'\n'\\xa3'\n'\\xd9'\n'\\xdb'\n'\\u0192'\n'\\xa6'\n'\\xb4'\n'\\xf3'\n'\\xfa'\n'\\xa8'\n'\\xb8'\n'\\xb3'\n'\\xaf'\n'\\xce'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xbe'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00a4:0x0098,\n0x00a6:0x00a0,\n0x00a7:0x008f,\n0x00a8:0x00a4,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00af:0x00a7,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00a6,\n0x00b4:0x00a1,\n0x00b5:0x00e6,\n0x00b6:0x0086,\n0x00b7:0x00fa,\n0x00b8:0x00a5,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00ad,\n0x00c0:0x008e,\n0x00c2:0x0084,\n0x00c7:0x0080,\n0x00c8:0x0091,\n0x00c9:0x0090,\n0x00ca:0x0092,\n0x00cb:0x0094,\n0x00ce:0x00a8,\n0x00cf:0x0095,\n0x00d4:0x0099,\n0x00d9:0x009d,\n0x00db:0x009e,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e2:0x0083,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f7:0x00f6,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x2017:0x008d,\n0x207f:0x00fc,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.ascii": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.ascii_encode\n decode=codecs.ascii_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.ascii_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.ascii_decode(input,self.errors)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \nclass StreamConverter(StreamWriter,StreamReader):\n\n encode=codecs.ascii_decode\n decode=codecs.ascii_encode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='ascii',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.iso8859_8": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-8',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\ufffe'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xd7'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xf7'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2017'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\ufffe'\n'\\ufffe'\n'\\u200e'\n'\\u200f'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp857": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp857',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x0131,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x0130,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x015e,\n0x009f:0x015f,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x011e,\n0x00a7:0x011f,\n0x00a8:0x00bf,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x00c0,\n0x00b8:0x00a9,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x00a2,\n0x00be:0x00a5,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x00e3,\n0x00c7:0x00c3,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x00ba,\n0x00d1:0x00aa,\n0x00d2:0x00ca,\n0x00d3:0x00cb,\n0x00d4:0x00c8,\n0x00d5:None,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x00cf,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x00a6,\n0x00de:0x00cc,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x00d2,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:None,\n0x00e8:0x00d7,\n0x00e9:0x00da,\n0x00ea:0x00db,\n0x00eb:0x00d9,\n0x00ed:0x00ff,\n0x00ee:0x00af,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:None,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\u0131'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\u0130'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\u015e'\n'\\u015f'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\u011e'\n'\\u011f'\n'\\xbf'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\xc0'\n'\\xa9'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\xa2'\n'\\xa5'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\xe3'\n'\\xc3'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\xba'\n'\\xaa'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\ufffe'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\xa6'\n'\\xcc'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\xd2'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\ufffe'\n'\\xd7'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\xec'\n'\\xff'\n'\\xaf'\n'\\xb4'\n'\\xad'\n'\\xb1'\n'\\ufffe'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x00bd,\n0x00a3:0x009c,\n0x00a4:0x00cf,\n0x00a5:0x00be,\n0x00a6:0x00dd,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x00b8,\n0x00aa:0x00d1,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00af:0x00ee,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b4:0x00ef,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b8:0x00f7,\n0x00b9:0x00fb,\n0x00ba:0x00d0,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00bf:0x00a8,\n0x00c0:0x00b7,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c3:0x00c7,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c8:0x00d4,\n0x00c9:0x0090,\n0x00ca:0x00d2,\n0x00cb:0x00d3,\n0x00cc:0x00de,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00cf:0x00d8,\n0x00d1:0x00a5,\n0x00d2:0x00e3,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x00e8,\n0x00d8:0x009d,\n0x00d9:0x00eb,\n0x00da:0x00e9,\n0x00db:0x00ea,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e3:0x00c6,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x00ec,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00ff:0x00ed,\n0x011e:0x00a6,\n0x011f:0x00a7,\n0x0130:0x0098,\n0x0131:0x008d,\n0x015e:0x009e,\n0x015f:0x009f,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.utf_32_be": [".py", "''\n\n\nimport codecs\n\n\n\nencode=codecs.utf_32_be_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_32_be_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.utf_32_be_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_32_be_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_32_be_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_32_be_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-32-be',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp1258": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1258',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\u0152'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\u0153'\n'\\ufffe'\n'\\ufffe'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\u0102'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\u0300'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u0110'\n'\\xd1'\n'\\u0309'\n'\\xd3'\n'\\xd4'\n'\\u01a0'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u01af'\n'\\u0303'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\u0103'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\u0301'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u0111'\n'\\xf1'\n'\\u0323'\n'\\xf3'\n'\\xf4'\n'\\u01a1'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u01b0'\n'\\u20ab'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.oem": [".py", "''\n\n\n\n\nfrom codecs import oem_encode,oem_decode\n\nimport codecs\n\n\n\nencode=oem_encode\n\ndef decode(input,errors='strict'):\n return oem_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return oem_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=oem_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=oem_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=oem_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='oem',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.mac_latin2": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-latin2',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\u0100'\n'\\u0101'\n'\\xc9'\n'\\u0104'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\u0105'\n'\\u010c'\n'\\xe4'\n'\\u010d'\n'\\u0106'\n'\\u0107'\n'\\xe9'\n'\\u0179'\n'\\u017a'\n'\\u010e'\n'\\xed'\n'\\u010f'\n'\\u0112'\n'\\u0113'\n'\\u0116'\n'\\xf3'\n'\\u0117'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\u011a'\n'\\u011b'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\u0118'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\u0119'\n'\\xa8'\n'\\u2260'\n'\\u0123'\n'\\u012e'\n'\\u012f'\n'\\u012a'\n'\\u2264'\n'\\u2265'\n'\\u012b'\n'\\u0136'\n'\\u2202'\n'\\u2211'\n'\\u0142'\n'\\u013b'\n'\\u013c'\n'\\u013d'\n'\\u013e'\n'\\u0139'\n'\\u013a'\n'\\u0145'\n'\\u0146'\n'\\u0143'\n'\\xac'\n'\\u221a'\n'\\u0144'\n'\\u0147'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u0148'\n'\\u0150'\n'\\xd5'\n'\\u0151'\n'\\u014c'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\u014d'\n'\\u0154'\n'\\u0155'\n'\\u0158'\n'\\u2039'\n'\\u203a'\n'\\u0159'\n'\\u0156'\n'\\u0157'\n'\\u0160'\n'\\u201a'\n'\\u201e'\n'\\u0161'\n'\\u015a'\n'\\u015b'\n'\\xc1'\n'\\u0164'\n'\\u0165'\n'\\xcd'\n'\\u017d'\n'\\u017e'\n'\\u016a'\n'\\xd3'\n'\\xd4'\n'\\u016b'\n'\\u016e'\n'\\xda'\n'\\u016f'\n'\\u0170'\n'\\u0171'\n'\\u0172'\n'\\u0173'\n'\\xdd'\n'\\xfd'\n'\\u0137'\n'\\u017b'\n'\\u0141'\n'\\u017c'\n'\\u0122'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp775": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp775',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0106,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x0101,\n0x0084:0x00e4,\n0x0085:0x0123,\n0x0086:0x00e5,\n0x0087:0x0107,\n0x0088:0x0142,\n0x0089:0x0113,\n0x008a:0x0156,\n0x008b:0x0157,\n0x008c:0x012b,\n0x008d:0x0179,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x014d,\n0x0094:0x00f6,\n0x0095:0x0122,\n0x0096:0x00a2,\n0x0097:0x015a,\n0x0098:0x015b,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x00d7,\n0x009f:0x00a4,\n0x00a0:0x0100,\n0x00a1:0x012a,\n0x00a2:0x00f3,\n0x00a3:0x017b,\n0x00a4:0x017c,\n0x00a5:0x017a,\n0x00a6:0x201d,\n0x00a7:0x00a6,\n0x00a8:0x00a9,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x0141,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x0104,\n0x00b6:0x010c,\n0x00b7:0x0118,\n0x00b8:0x0116,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x012e,\n0x00be:0x0160,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x0172,\n0x00c7:0x016a,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x017d,\n0x00d0:0x0105,\n0x00d1:0x010d,\n0x00d2:0x0119,\n0x00d3:0x0117,\n0x00d4:0x012f,\n0x00d5:0x0161,\n0x00d6:0x0173,\n0x00d7:0x016b,\n0x00d8:0x017e,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x014c,\n0x00e3:0x0143,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:0x0144,\n0x00e8:0x0136,\n0x00e9:0x0137,\n0x00ea:0x013b,\n0x00eb:0x013c,\n0x00ec:0x0146,\n0x00ed:0x0112,\n0x00ee:0x0145,\n0x00ef:0x2019,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x201c,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x201e,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0106'\n'\\xfc'\n'\\xe9'\n'\\u0101'\n'\\xe4'\n'\\u0123'\n'\\xe5'\n'\\u0107'\n'\\u0142'\n'\\u0113'\n'\\u0156'\n'\\u0157'\n'\\u012b'\n'\\u0179'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\u014d'\n'\\xf6'\n'\\u0122'\n'\\xa2'\n'\\u015a'\n'\\u015b'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\xd7'\n'\\xa4'\n'\\u0100'\n'\\u012a'\n'\\xf3'\n'\\u017b'\n'\\u017c'\n'\\u017a'\n'\\u201d'\n'\\xa6'\n'\\xa9'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\u0141'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u0104'\n'\\u010c'\n'\\u0118'\n'\\u0116'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u012e'\n'\\u0160'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u0172'\n'\\u016a'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u017d'\n'\\u0105'\n'\\u010d'\n'\\u0119'\n'\\u0117'\n'\\u012f'\n'\\u0161'\n'\\u0173'\n'\\u016b'\n'\\u017e'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\u014c'\n'\\u0143'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\u0144'\n'\\u0136'\n'\\u0137'\n'\\u013b'\n'\\u013c'\n'\\u0146'\n'\\u0112'\n'\\u0145'\n'\\u2019'\n'\\xad'\n'\\xb1'\n'\\u201c'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\u201e'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a2:0x0096,\n0x00a3:0x009c,\n0x00a4:0x009f,\n0x00a6:0x00a7,\n0x00a7:0x00f5,\n0x00a9:0x00a8,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b9:0x00fb,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c9:0x0090,\n0x00d3:0x00e0,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00d8:0x009d,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e9:0x0082,\n0x00f3:0x00a2,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00fc:0x0081,\n0x0100:0x00a0,\n0x0101:0x0083,\n0x0104:0x00b5,\n0x0105:0x00d0,\n0x0106:0x0080,\n0x0107:0x0087,\n0x010c:0x00b6,\n0x010d:0x00d1,\n0x0112:0x00ed,\n0x0113:0x0089,\n0x0116:0x00b8,\n0x0117:0x00d3,\n0x0118:0x00b7,\n0x0119:0x00d2,\n0x0122:0x0095,\n0x0123:0x0085,\n0x012a:0x00a1,\n0x012b:0x008c,\n0x012e:0x00bd,\n0x012f:0x00d4,\n0x0136:0x00e8,\n0x0137:0x00e9,\n0x013b:0x00ea,\n0x013c:0x00eb,\n0x0141:0x00ad,\n0x0142:0x0088,\n0x0143:0x00e3,\n0x0144:0x00e7,\n0x0145:0x00ee,\n0x0146:0x00ec,\n0x014c:0x00e2,\n0x014d:0x0093,\n0x0156:0x008a,\n0x0157:0x008b,\n0x015a:0x0097,\n0x015b:0x0098,\n0x0160:0x00be,\n0x0161:0x00d5,\n0x016a:0x00c7,\n0x016b:0x00d7,\n0x0172:0x00c6,\n0x0173:0x00d6,\n0x0179:0x008d,\n0x017a:0x00a5,\n0x017b:0x00a3,\n0x017c:0x00a4,\n0x017d:0x00cf,\n0x017e:0x00d8,\n0x2019:0x00ef,\n0x201c:0x00f2,\n0x201d:0x00a6,\n0x201e:0x00f7,\n0x2219:0x00f9,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.mac_roman": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-roman',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\xc6'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\xe6'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u2044'\n'\\u20ac'\n'\\u2039'\n'\\u203a'\n'\\ufb01'\n'\\ufb02'\n'\\u2021'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport codecs\nimport sys\nfrom. import aliases\n\n_cache={}\n_unknown='--unknown--'\n_import_tail=['*']\n_aliases=aliases.aliases\n\nclass CodecRegistryError(LookupError,SystemError):\n pass\n \ndef normalize_encoding(encoding):\n\n ''\n\n\n\n\n\n\n\n\n \n if isinstance(encoding,bytes):\n encoding=str(encoding,\"ascii\")\n \n chars=[]\n punct=False\n for c in encoding:\n if c.isalnum()or c =='.':\n if punct and chars:\n chars.append('_')\n if c.isascii():\n chars.append(c)\n punct=False\n else:\n punct=True\n return ''.join(chars)\n \ndef search_function(encoding):\n\n\n entry=_cache.get(encoding,_unknown)\n if entry is not _unknown:\n return entry\n \n \n \n \n \n \n \n \n norm_encoding=normalize_encoding(encoding)\n aliased_encoding=_aliases.get(norm_encoding)or\\\n _aliases.get(norm_encoding.replace('.','_'))\n if aliased_encoding is not None:\n modnames=[aliased_encoding,\n norm_encoding]\n else:\n modnames=[norm_encoding]\n for modname in modnames:\n if not modname or '.'in modname:\n continue\n try:\n \n \n mod=__import__('encodings.'+modname,fromlist=_import_tail,\n level=0)\n except ImportError:\n \n \n pass\n else:\n break\n else:\n mod=None\n \n try:\n getregentry=mod.getregentry\n except AttributeError:\n \n mod=None\n \n if mod is None:\n \n _cache[encoding]=None\n return None\n \n \n entry=getregentry()\n if not isinstance(entry,codecs.CodecInfo):\n if not 4 <=len(entry)<=7:\n raise CodecRegistryError('module \"%s\" (%s) failed to register'\n %(mod.__name__,mod.__file__))\n if not callable(entry[0])or not callable(entry[1])or\\\n (entry[2]is not None and not callable(entry[2]))or\\\n (entry[3]is not None and not callable(entry[3]))or\\\n (len(entry)>4 and entry[4]is not None and not callable(entry[4]))or\\\n (len(entry)>5 and entry[5]is not None and not callable(entry[5])):\n raise CodecRegistryError('incompatible codecs in module \"%s\" (%s)'\n %(mod.__name__,mod.__file__))\n if len(entry)<7 or entry[6]is None:\n entry +=(None,)*(6 -len(entry))+(mod.__name__.split(\".\",1)[1],)\n entry=codecs.CodecInfo(*entry)\n \n \n _cache[encoding]=entry\n \n \n \n try:\n codecaliases=mod.getaliases()\n except AttributeError:\n pass\n else:\n for alias in codecaliases:\n if alias not in _aliases:\n _aliases[alias]=modname\n \n \n return entry\n \n \ncodecs.register(search_function)\n\nif sys.platform =='win32':\n\n\n\n\n def _alias_mbcs(encoding):\n try:\n import _winapi\n ansi_code_page=\"cp%s\"%_winapi.GetACP()\n if encoding ==ansi_code_page:\n import encodings.mbcs\n return encodings.mbcs.getregentry()\n except ImportError:\n \n pass\n \n codecs.register(_alias_mbcs)\n", ["_winapi", "codecs", "encodings", "encodings.aliases", "encodings.mbcs", "sys"], 1], "encodings.cp852": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp852',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x016f,\n0x0086:0x0107,\n0x0087:0x00e7,\n0x0088:0x0142,\n0x0089:0x00eb,\n0x008a:0x0150,\n0x008b:0x0151,\n0x008c:0x00ee,\n0x008d:0x0179,\n0x008e:0x00c4,\n0x008f:0x0106,\n0x0090:0x00c9,\n0x0091:0x0139,\n0x0092:0x013a,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x013d,\n0x0096:0x013e,\n0x0097:0x015a,\n0x0098:0x015b,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x0164,\n0x009c:0x0165,\n0x009d:0x0141,\n0x009e:0x00d7,\n0x009f:0x010d,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x0104,\n0x00a5:0x0105,\n0x00a6:0x017d,\n0x00a7:0x017e,\n0x00a8:0x0118,\n0x00a9:0x0119,\n0x00aa:0x00ac,\n0x00ab:0x017a,\n0x00ac:0x010c,\n0x00ad:0x015f,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x011a,\n0x00b8:0x015e,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x017b,\n0x00be:0x017c,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x0102,\n0x00c7:0x0103,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x0111,\n0x00d1:0x0110,\n0x00d2:0x010e,\n0x00d3:0x00cb,\n0x00d4:0x010f,\n0x00d5:0x0147,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x011b,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x0162,\n0x00de:0x016e,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x0143,\n0x00e4:0x0144,\n0x00e5:0x0148,\n0x00e6:0x0160,\n0x00e7:0x0161,\n0x00e8:0x0154,\n0x00e9:0x00da,\n0x00ea:0x0155,\n0x00eb:0x0170,\n0x00ec:0x00fd,\n0x00ed:0x00dd,\n0x00ee:0x0163,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x02dd,\n0x00f2:0x02db,\n0x00f3:0x02c7,\n0x00f4:0x02d8,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x02d9,\n0x00fb:0x0171,\n0x00fc:0x0158,\n0x00fd:0x0159,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\u016f'\n'\\u0107'\n'\\xe7'\n'\\u0142'\n'\\xeb'\n'\\u0150'\n'\\u0151'\n'\\xee'\n'\\u0179'\n'\\xc4'\n'\\u0106'\n'\\xc9'\n'\\u0139'\n'\\u013a'\n'\\xf4'\n'\\xf6'\n'\\u013d'\n'\\u013e'\n'\\u015a'\n'\\u015b'\n'\\xd6'\n'\\xdc'\n'\\u0164'\n'\\u0165'\n'\\u0141'\n'\\xd7'\n'\\u010d'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\u0104'\n'\\u0105'\n'\\u017d'\n'\\u017e'\n'\\u0118'\n'\\u0119'\n'\\xac'\n'\\u017a'\n'\\u010c'\n'\\u015f'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\u011a'\n'\\u015e'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u017b'\n'\\u017c'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u0102'\n'\\u0103'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\u0111'\n'\\u0110'\n'\\u010e'\n'\\xcb'\n'\\u010f'\n'\\u0147'\n'\\xcd'\n'\\xce'\n'\\u011b'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u0162'\n'\\u016e'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\u0143'\n'\\u0144'\n'\\u0148'\n'\\u0160'\n'\\u0161'\n'\\u0154'\n'\\xda'\n'\\u0155'\n'\\u0170'\n'\\xfd'\n'\\xdd'\n'\\u0163'\n'\\xb4'\n'\\xad'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n'\\u02d8'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\u02d9'\n'\\u0171'\n'\\u0158'\n'\\u0159'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00cf,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00b0:0x00f8,\n0x00b4:0x00ef,\n0x00b8:0x00f7,\n0x00bb:0x00af,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c4:0x008e,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00cb:0x00d3,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00da:0x00e9,\n0x00dc:0x009a,\n0x00dd:0x00ed,\n0x00df:0x00e1,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e7:0x0087,\n0x00e9:0x0082,\n0x00eb:0x0089,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00fa:0x00a3,\n0x00fc:0x0081,\n0x00fd:0x00ec,\n0x0102:0x00c6,\n0x0103:0x00c7,\n0x0104:0x00a4,\n0x0105:0x00a5,\n0x0106:0x008f,\n0x0107:0x0086,\n0x010c:0x00ac,\n0x010d:0x009f,\n0x010e:0x00d2,\n0x010f:0x00d4,\n0x0110:0x00d1,\n0x0111:0x00d0,\n0x0118:0x00a8,\n0x0119:0x00a9,\n0x011a:0x00b7,\n0x011b:0x00d8,\n0x0139:0x0091,\n0x013a:0x0092,\n0x013d:0x0095,\n0x013e:0x0096,\n0x0141:0x009d,\n0x0142:0x0088,\n0x0143:0x00e3,\n0x0144:0x00e4,\n0x0147:0x00d5,\n0x0148:0x00e5,\n0x0150:0x008a,\n0x0151:0x008b,\n0x0154:0x00e8,\n0x0155:0x00ea,\n0x0158:0x00fc,\n0x0159:0x00fd,\n0x015a:0x0097,\n0x015b:0x0098,\n0x015e:0x00b8,\n0x015f:0x00ad,\n0x0160:0x00e6,\n0x0161:0x00e7,\n0x0162:0x00dd,\n0x0163:0x00ee,\n0x0164:0x009b,\n0x0165:0x009c,\n0x016e:0x00de,\n0x016f:0x0085,\n0x0170:0x00eb,\n0x0171:0x00fb,\n0x0179:0x008d,\n0x017a:0x00ab,\n0x017b:0x00bd,\n0x017c:0x00be,\n0x017d:0x00a6,\n0x017e:0x00a7,\n0x02c7:0x00f3,\n0x02d8:0x00f4,\n0x02d9:0x00fa,\n0x02db:0x00f2,\n0x02dd:0x00f1,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.shift_jisx0213": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('shift_jisx0213')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='shift_jisx0213',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp866": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp866',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0410,\n0x0081:0x0411,\n0x0082:0x0412,\n0x0083:0x0413,\n0x0084:0x0414,\n0x0085:0x0415,\n0x0086:0x0416,\n0x0087:0x0417,\n0x0088:0x0418,\n0x0089:0x0419,\n0x008a:0x041a,\n0x008b:0x041b,\n0x008c:0x041c,\n0x008d:0x041d,\n0x008e:0x041e,\n0x008f:0x041f,\n0x0090:0x0420,\n0x0091:0x0421,\n0x0092:0x0422,\n0x0093:0x0423,\n0x0094:0x0424,\n0x0095:0x0425,\n0x0096:0x0426,\n0x0097:0x0427,\n0x0098:0x0428,\n0x0099:0x0429,\n0x009a:0x042a,\n0x009b:0x042b,\n0x009c:0x042c,\n0x009d:0x042d,\n0x009e:0x042e,\n0x009f:0x042f,\n0x00a0:0x0430,\n0x00a1:0x0431,\n0x00a2:0x0432,\n0x00a3:0x0433,\n0x00a4:0x0434,\n0x00a5:0x0435,\n0x00a6:0x0436,\n0x00a7:0x0437,\n0x00a8:0x0438,\n0x00a9:0x0439,\n0x00aa:0x043a,\n0x00ab:0x043b,\n0x00ac:0x043c,\n0x00ad:0x043d,\n0x00ae:0x043e,\n0x00af:0x043f,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x0440,\n0x00e1:0x0441,\n0x00e2:0x0442,\n0x00e3:0x0443,\n0x00e4:0x0444,\n0x00e5:0x0445,\n0x00e6:0x0446,\n0x00e7:0x0447,\n0x00e8:0x0448,\n0x00e9:0x0449,\n0x00ea:0x044a,\n0x00eb:0x044b,\n0x00ec:0x044c,\n0x00ed:0x044d,\n0x00ee:0x044e,\n0x00ef:0x044f,\n0x00f0:0x0401,\n0x00f1:0x0451,\n0x00f2:0x0404,\n0x00f3:0x0454,\n0x00f4:0x0407,\n0x00f5:0x0457,\n0x00f6:0x040e,\n0x00f7:0x045e,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x2116,\n0x00fd:0x00a4,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n'\\u0401'\n'\\u0451'\n'\\u0404'\n'\\u0454'\n'\\u0407'\n'\\u0457'\n'\\u040e'\n'\\u045e'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u2116'\n'\\xa4'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00fd,\n0x00b0:0x00f8,\n0x00b7:0x00fa,\n0x0401:0x00f0,\n0x0404:0x00f2,\n0x0407:0x00f4,\n0x040e:0x00f6,\n0x0410:0x0080,\n0x0411:0x0081,\n0x0412:0x0082,\n0x0413:0x0083,\n0x0414:0x0084,\n0x0415:0x0085,\n0x0416:0x0086,\n0x0417:0x0087,\n0x0418:0x0088,\n0x0419:0x0089,\n0x041a:0x008a,\n0x041b:0x008b,\n0x041c:0x008c,\n0x041d:0x008d,\n0x041e:0x008e,\n0x041f:0x008f,\n0x0420:0x0090,\n0x0421:0x0091,\n0x0422:0x0092,\n0x0423:0x0093,\n0x0424:0x0094,\n0x0425:0x0095,\n0x0426:0x0096,\n0x0427:0x0097,\n0x0428:0x0098,\n0x0429:0x0099,\n0x042a:0x009a,\n0x042b:0x009b,\n0x042c:0x009c,\n0x042d:0x009d,\n0x042e:0x009e,\n0x042f:0x009f,\n0x0430:0x00a0,\n0x0431:0x00a1,\n0x0432:0x00a2,\n0x0433:0x00a3,\n0x0434:0x00a4,\n0x0435:0x00a5,\n0x0436:0x00a6,\n0x0437:0x00a7,\n0x0438:0x00a8,\n0x0439:0x00a9,\n0x043a:0x00aa,\n0x043b:0x00ab,\n0x043c:0x00ac,\n0x043d:0x00ad,\n0x043e:0x00ae,\n0x043f:0x00af,\n0x0440:0x00e0,\n0x0441:0x00e1,\n0x0442:0x00e2,\n0x0443:0x00e3,\n0x0444:0x00e4,\n0x0445:0x00e5,\n0x0446:0x00e6,\n0x0447:0x00e7,\n0x0448:0x00e8,\n0x0449:0x00e9,\n0x044a:0x00ea,\n0x044b:0x00eb,\n0x044c:0x00ec,\n0x044d:0x00ed,\n0x044e:0x00ee,\n0x044f:0x00ef,\n0x0451:0x00f1,\n0x0454:0x00f3,\n0x0457:0x00f5,\n0x045e:0x00f7,\n0x2116:0x00fc,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.utf_7": [".py", "''\n\n\n\nimport codecs\n\n\n\nencode=codecs.utf_7_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_7_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.utf_7_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_7_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_7_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_7_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-7',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.base64_codec": [".py", "''\n\n\n\n\n\n\nimport codecs\nimport base64\n\n\n\ndef base64_encode(input,errors='strict'):\n assert errors =='strict'\n return(base64.encodebytes(input),len(input))\n \ndef base64_decode(input,errors='strict'):\n assert errors =='strict'\n return(base64.decodebytes(input),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return base64_encode(input,errors)\n def decode(self,input,errors='strict'):\n return base64_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n assert self.errors =='strict'\n return base64.encodebytes(input)\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n assert self.errors =='strict'\n return base64.decodebytes(input)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='base64',\n encode=base64_encode,\n decode=base64_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False,\n )\n", ["base64", "codecs"]], "encodings.cp932": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('cp932')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp932',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp720": [".py", "''\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp720',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\xe9'\n'\\xe2'\n'\\x84'\n'\\xe0'\n'\\x86'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\u0651'\n'\\u0652'\n'\\xf4'\n'\\xa4'\n'\\u0640'\n'\\xfb'\n'\\xf9'\n'\\u0621'\n'\\u0622'\n'\\u0623'\n'\\u0624'\n'\\xa3'\n'\\u0625'\n'\\u0626'\n'\\u0627'\n'\\u0628'\n'\\u0629'\n'\\u062a'\n'\\u062b'\n'\\u062c'\n'\\u062d'\n'\\u062e'\n'\\u062f'\n'\\u0630'\n'\\u0631'\n'\\u0632'\n'\\u0633'\n'\\u0634'\n'\\u0635'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u0636'\n'\\u0637'\n'\\u0638'\n'\\u0639'\n'\\u063a'\n'\\u0641'\n'\\xb5'\n'\\u0642'\n'\\u0643'\n'\\u0644'\n'\\u0645'\n'\\u0646'\n'\\u0647'\n'\\u0648'\n'\\u0649'\n'\\u064a'\n'\\u2261'\n'\\u064b'\n'\\u064c'\n'\\u064d'\n'\\u064e'\n'\\u064f'\n'\\u0650'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp862": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp862',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x05d0,\n0x0081:0x05d1,\n0x0082:0x05d2,\n0x0083:0x05d3,\n0x0084:0x05d4,\n0x0085:0x05d5,\n0x0086:0x05d6,\n0x0087:0x05d7,\n0x0088:0x05d8,\n0x0089:0x05d9,\n0x008a:0x05da,\n0x008b:0x05db,\n0x008c:0x05dc,\n0x008d:0x05dd,\n0x008e:0x05de,\n0x008f:0x05df,\n0x0090:0x05e0,\n0x0091:0x05e1,\n0x0092:0x05e2,\n0x0093:0x05e3,\n0x0094:0x05e4,\n0x0095:0x05e5,\n0x0096:0x05e6,\n0x0097:0x05e7,\n0x0098:0x05e8,\n0x0099:0x05e9,\n0x009a:0x05ea,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00a5,\n0x009e:0x20a7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\u20a7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00a5:0x009d,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00d1:0x00a5,\n0x00df:0x00e1,\n0x00e1:0x00a0,\n0x00ed:0x00a1,\n0x00f1:0x00a4,\n0x00f3:0x00a2,\n0x00f7:0x00f6,\n0x00fa:0x00a3,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x05d0:0x0080,\n0x05d1:0x0081,\n0x05d2:0x0082,\n0x05d3:0x0083,\n0x05d4:0x0084,\n0x05d5:0x0085,\n0x05d6:0x0086,\n0x05d7:0x0087,\n0x05d8:0x0088,\n0x05d9:0x0089,\n0x05da:0x008a,\n0x05db:0x008b,\n0x05dc:0x008c,\n0x05dd:0x008d,\n0x05de:0x008e,\n0x05df:0x008f,\n0x05e0:0x0090,\n0x05e1:0x0091,\n0x05e2:0x0092,\n0x05e3:0x0093,\n0x05e4:0x0094,\n0x05e5:0x0095,\n0x05e6:0x0096,\n0x05e7:0x0097,\n0x05e8:0x0098,\n0x05e9:0x0099,\n0x05ea:0x009a,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.cp437": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp437',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x00ec,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00ff,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00a5,\n0x009e:0x20a7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\xec'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\xff'\n'\\xd6'\n'\\xdc'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\u20a7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00a5:0x009d,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00d1:0x00a5,\n0x00d6:0x0099,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00ff:0x0098,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.palmos": [".py", "''\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='palmos',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\x81'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\u0160'\n'\\u2039'\n'\\u0152'\n'\\u2666'\n'\\u2663'\n'\\u2665'\n'\\u2660'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\u0161'\n'\\x9b'\n'\\u0153'\n'\\x9d'\n'\\x9e'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_9": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-9',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u011e'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u0130'\n'\\u015e'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u011f'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u0131'\n'\\u015f'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp856": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp856',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\ufffe'\n'\\xa3'\n'\\ufffe'\n'\\xd7'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\ufffe'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa9'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\xa2'\n'\\xa5'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\ufffe'\n'\\ufffe'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\xa6'\n'\\ufffe'\n'\\u2580'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xb5'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xaf'\n'\\xb4'\n'\\xad'\n'\\xb1'\n'\\u2017'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.aliases": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\naliases={\n\n\n\n\n'646':'ascii',\n'ansi_x3.4_1968':'ascii',\n'ansi_x3_4_1968':'ascii',\n'ansi_x3.4_1986':'ascii',\n'cp367':'ascii',\n'csascii':'ascii',\n'ibm367':'ascii',\n'iso646_us':'ascii',\n'iso_646.irv_1991':'ascii',\n'iso_ir_6':'ascii',\n'us':'ascii',\n'us_ascii':'ascii',\n\n\n'base64':'base64_codec',\n'base_64':'base64_codec',\n\n\n'big5_tw':'big5',\n'csbig5':'big5',\n\n\n'big5_hkscs':'big5hkscs',\n'hkscs':'big5hkscs',\n\n\n'bz2':'bz2_codec',\n\n\n'037':'cp037',\n'csibm037':'cp037',\n'ebcdic_cp_ca':'cp037',\n'ebcdic_cp_nl':'cp037',\n'ebcdic_cp_us':'cp037',\n'ebcdic_cp_wt':'cp037',\n'ibm037':'cp037',\n'ibm039':'cp037',\n\n\n'1026':'cp1026',\n'csibm1026':'cp1026',\n'ibm1026':'cp1026',\n\n\n'1125':'cp1125',\n'ibm1125':'cp1125',\n'cp866u':'cp1125',\n'ruscii':'cp1125',\n\n\n'1140':'cp1140',\n'ibm1140':'cp1140',\n\n\n'1250':'cp1250',\n'windows_1250':'cp1250',\n\n\n'1251':'cp1251',\n'windows_1251':'cp1251',\n\n\n'1252':'cp1252',\n'windows_1252':'cp1252',\n\n\n'1253':'cp1253',\n'windows_1253':'cp1253',\n\n\n'1254':'cp1254',\n'windows_1254':'cp1254',\n\n\n'1255':'cp1255',\n'windows_1255':'cp1255',\n\n\n'1256':'cp1256',\n'windows_1256':'cp1256',\n\n\n'1257':'cp1257',\n'windows_1257':'cp1257',\n\n\n'1258':'cp1258',\n'windows_1258':'cp1258',\n\n\n'273':'cp273',\n'ibm273':'cp273',\n'csibm273':'cp273',\n\n\n'424':'cp424',\n'csibm424':'cp424',\n'ebcdic_cp_he':'cp424',\n'ibm424':'cp424',\n\n\n'437':'cp437',\n'cspc8codepage437':'cp437',\n'ibm437':'cp437',\n\n\n'500':'cp500',\n'csibm500':'cp500',\n'ebcdic_cp_be':'cp500',\n'ebcdic_cp_ch':'cp500',\n'ibm500':'cp500',\n\n\n'775':'cp775',\n'cspc775baltic':'cp775',\n'ibm775':'cp775',\n\n\n'850':'cp850',\n'cspc850multilingual':'cp850',\n'ibm850':'cp850',\n\n\n'852':'cp852',\n'cspcp852':'cp852',\n'ibm852':'cp852',\n\n\n'855':'cp855',\n'csibm855':'cp855',\n'ibm855':'cp855',\n\n\n'857':'cp857',\n'csibm857':'cp857',\n'ibm857':'cp857',\n\n\n'858':'cp858',\n'csibm858':'cp858',\n'ibm858':'cp858',\n\n\n'860':'cp860',\n'csibm860':'cp860',\n'ibm860':'cp860',\n\n\n'861':'cp861',\n'cp_is':'cp861',\n'csibm861':'cp861',\n'ibm861':'cp861',\n\n\n'862':'cp862',\n'cspc862latinhebrew':'cp862',\n'ibm862':'cp862',\n\n\n'863':'cp863',\n'csibm863':'cp863',\n'ibm863':'cp863',\n\n\n'864':'cp864',\n'csibm864':'cp864',\n'ibm864':'cp864',\n\n\n'865':'cp865',\n'csibm865':'cp865',\n'ibm865':'cp865',\n\n\n'866':'cp866',\n'csibm866':'cp866',\n'ibm866':'cp866',\n\n\n'869':'cp869',\n'cp_gr':'cp869',\n'csibm869':'cp869',\n'ibm869':'cp869',\n\n\n'932':'cp932',\n'ms932':'cp932',\n'mskanji':'cp932',\n'ms_kanji':'cp932',\n\n\n'949':'cp949',\n'ms949':'cp949',\n'uhc':'cp949',\n\n\n'950':'cp950',\n'ms950':'cp950',\n\n\n'jisx0213':'euc_jis_2004',\n'eucjis2004':'euc_jis_2004',\n'euc_jis2004':'euc_jis_2004',\n\n\n'eucjisx0213':'euc_jisx0213',\n\n\n'eucjp':'euc_jp',\n'ujis':'euc_jp',\n'u_jis':'euc_jp',\n\n\n'euckr':'euc_kr',\n'korean':'euc_kr',\n'ksc5601':'euc_kr',\n'ks_c_5601':'euc_kr',\n'ks_c_5601_1987':'euc_kr',\n'ksx1001':'euc_kr',\n'ks_x_1001':'euc_kr',\n\n\n'gb18030_2000':'gb18030',\n\n\n'chinese':'gb2312',\n'csiso58gb231280':'gb2312',\n'euc_cn':'gb2312',\n'euccn':'gb2312',\n'eucgb2312_cn':'gb2312',\n'gb2312_1980':'gb2312',\n'gb2312_80':'gb2312',\n'iso_ir_58':'gb2312',\n\n\n'936':'gbk',\n'cp936':'gbk',\n'ms936':'gbk',\n\n\n'hex':'hex_codec',\n\n\n'roman8':'hp_roman8',\n'r8':'hp_roman8',\n'csHPRoman8':'hp_roman8',\n'cp1051':'hp_roman8',\n'ibm1051':'hp_roman8',\n\n\n'hzgb':'hz',\n'hz_gb':'hz',\n'hz_gb_2312':'hz',\n\n\n'csiso2022jp':'iso2022_jp',\n'iso2022jp':'iso2022_jp',\n'iso_2022_jp':'iso2022_jp',\n\n\n'iso2022jp_1':'iso2022_jp_1',\n'iso_2022_jp_1':'iso2022_jp_1',\n\n\n'iso2022jp_2':'iso2022_jp_2',\n'iso_2022_jp_2':'iso2022_jp_2',\n\n\n'iso_2022_jp_2004':'iso2022_jp_2004',\n'iso2022jp_2004':'iso2022_jp_2004',\n\n\n'iso2022jp_3':'iso2022_jp_3',\n'iso_2022_jp_3':'iso2022_jp_3',\n\n\n'iso2022jp_ext':'iso2022_jp_ext',\n'iso_2022_jp_ext':'iso2022_jp_ext',\n\n\n'csiso2022kr':'iso2022_kr',\n'iso2022kr':'iso2022_kr',\n'iso_2022_kr':'iso2022_kr',\n\n\n'csisolatin6':'iso8859_10',\n'iso_8859_10':'iso8859_10',\n'iso_8859_10_1992':'iso8859_10',\n'iso_ir_157':'iso8859_10',\n'l6':'iso8859_10',\n'latin6':'iso8859_10',\n\n\n'thai':'iso8859_11',\n'iso_8859_11':'iso8859_11',\n'iso_8859_11_2001':'iso8859_11',\n\n\n'iso_8859_13':'iso8859_13',\n'l7':'iso8859_13',\n'latin7':'iso8859_13',\n\n\n'iso_8859_14':'iso8859_14',\n'iso_8859_14_1998':'iso8859_14',\n'iso_celtic':'iso8859_14',\n'iso_ir_199':'iso8859_14',\n'l8':'iso8859_14',\n'latin8':'iso8859_14',\n\n\n'iso_8859_15':'iso8859_15',\n'l9':'iso8859_15',\n'latin9':'iso8859_15',\n\n\n'iso_8859_16':'iso8859_16',\n'iso_8859_16_2001':'iso8859_16',\n'iso_ir_226':'iso8859_16',\n'l10':'iso8859_16',\n'latin10':'iso8859_16',\n\n\n'csisolatin2':'iso8859_2',\n'iso_8859_2':'iso8859_2',\n'iso_8859_2_1987':'iso8859_2',\n'iso_ir_101':'iso8859_2',\n'l2':'iso8859_2',\n'latin2':'iso8859_2',\n\n\n'csisolatin3':'iso8859_3',\n'iso_8859_3':'iso8859_3',\n'iso_8859_3_1988':'iso8859_3',\n'iso_ir_109':'iso8859_3',\n'l3':'iso8859_3',\n'latin3':'iso8859_3',\n\n\n'csisolatin4':'iso8859_4',\n'iso_8859_4':'iso8859_4',\n'iso_8859_4_1988':'iso8859_4',\n'iso_ir_110':'iso8859_4',\n'l4':'iso8859_4',\n'latin4':'iso8859_4',\n\n\n'csisolatincyrillic':'iso8859_5',\n'cyrillic':'iso8859_5',\n'iso_8859_5':'iso8859_5',\n'iso_8859_5_1988':'iso8859_5',\n'iso_ir_144':'iso8859_5',\n\n\n'arabic':'iso8859_6',\n'asmo_708':'iso8859_6',\n'csisolatinarabic':'iso8859_6',\n'ecma_114':'iso8859_6',\n'iso_8859_6':'iso8859_6',\n'iso_8859_6_1987':'iso8859_6',\n'iso_ir_127':'iso8859_6',\n\n\n'csisolatingreek':'iso8859_7',\n'ecma_118':'iso8859_7',\n'elot_928':'iso8859_7',\n'greek':'iso8859_7',\n'greek8':'iso8859_7',\n'iso_8859_7':'iso8859_7',\n'iso_8859_7_1987':'iso8859_7',\n'iso_ir_126':'iso8859_7',\n\n\n'csisolatinhebrew':'iso8859_8',\n'hebrew':'iso8859_8',\n'iso_8859_8':'iso8859_8',\n'iso_8859_8_1988':'iso8859_8',\n'iso_ir_138':'iso8859_8',\n\n\n'csisolatin5':'iso8859_9',\n'iso_8859_9':'iso8859_9',\n'iso_8859_9_1989':'iso8859_9',\n'iso_ir_148':'iso8859_9',\n'l5':'iso8859_9',\n'latin5':'iso8859_9',\n\n\n'cp1361':'johab',\n'ms1361':'johab',\n\n\n'cskoi8r':'koi8_r',\n\n\n'kz_1048':'kz1048',\n'rk1048':'kz1048',\n'strk1048_2002':'kz1048',\n\n\n\n\n\n\n\n\n'8859':'latin_1',\n'cp819':'latin_1',\n'csisolatin1':'latin_1',\n'ibm819':'latin_1',\n'iso8859':'latin_1',\n'iso8859_1':'latin_1',\n'iso_8859_1':'latin_1',\n'iso_8859_1_1987':'latin_1',\n'iso_ir_100':'latin_1',\n'l1':'latin_1',\n'latin':'latin_1',\n'latin1':'latin_1',\n\n\n'maccyrillic':'mac_cyrillic',\n\n\n'macgreek':'mac_greek',\n\n\n'maciceland':'mac_iceland',\n\n\n'maccentraleurope':'mac_latin2',\n'mac_centeuro':'mac_latin2',\n'maclatin2':'mac_latin2',\n\n\n'macintosh':'mac_roman',\n'macroman':'mac_roman',\n\n\n'macturkish':'mac_turkish',\n\n\n'ansi':'mbcs',\n'dbcs':'mbcs',\n\n\n'csptcp154':'ptcp154',\n'pt154':'ptcp154',\n'cp154':'ptcp154',\n'cyrillic_asian':'ptcp154',\n\n\n'quopri':'quopri_codec',\n'quoted_printable':'quopri_codec',\n'quotedprintable':'quopri_codec',\n\n\n'rot13':'rot_13',\n\n\n'csshiftjis':'shift_jis',\n'shiftjis':'shift_jis',\n'sjis':'shift_jis',\n's_jis':'shift_jis',\n\n\n'shiftjis2004':'shift_jis_2004',\n'sjis_2004':'shift_jis_2004',\n's_jis_2004':'shift_jis_2004',\n\n\n'shiftjisx0213':'shift_jisx0213',\n'sjisx0213':'shift_jisx0213',\n's_jisx0213':'shift_jisx0213',\n\n\n'tis620':'tis_620',\n'tis_620_0':'tis_620',\n'tis_620_2529_0':'tis_620',\n'tis_620_2529_1':'tis_620',\n'iso_ir_166':'tis_620',\n\n\n'u16':'utf_16',\n'utf16':'utf_16',\n\n\n'unicodebigunmarked':'utf_16_be',\n'utf_16be':'utf_16_be',\n\n\n'unicodelittleunmarked':'utf_16_le',\n'utf_16le':'utf_16_le',\n\n\n'u32':'utf_32',\n'utf32':'utf_32',\n\n\n'utf_32be':'utf_32_be',\n\n\n'utf_32le':'utf_32_le',\n\n\n'u7':'utf_7',\n'utf7':'utf_7',\n'unicode_1_1_utf_7':'utf_7',\n\n\n'u8':'utf_8',\n'utf':'utf_8',\n'utf8':'utf_8',\n'utf8_ucs2':'utf_8',\n'utf8_ucs4':'utf_8',\n'cp65001':'utf_8',\n\n\n'uu':'uu_codec',\n\n\n'zip':'zlib_codec',\n'zlib':'zlib_codec',\n\n\n'x_mac_japanese':'shift_jis',\n'x_mac_korean':'euc_kr',\n'x_mac_simp_chinese':'gb2312',\n'x_mac_trad_chinese':'big5',\n}\n", []], "encodings.latin_1": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.latin_1_encode\n decode=codecs.latin_1_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.latin_1_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.latin_1_decode(input,self.errors)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \nclass StreamConverter(StreamWriter,StreamReader):\n\n encode=codecs.latin_1_decode\n decode=codecs.latin_1_encode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-1',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp875": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp875',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0398'\n'\\u0399'\n'['\n'.'\n'<'\n'('\n'+'\n'!'\n'&'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u039e'\n'\\u039f'\n'\\u03a0'\n'\\u03a1'\n'\\u03a3'\n']'\n'$'\n'*'\n')'\n';'\n'^'\n'-'\n'/'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03aa'\n'\\u03ab'\n'|'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xa8'\n'\\u0386'\n'\\u0388'\n'\\u0389'\n'\\xa0'\n'\\u038a'\n'\\u038c'\n'\\u038e'\n'\\u038f'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\u0385'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u03b4'\n'\\u03b5'\n'\\u03b6'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\xb4'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c3'\n'\\xa3'\n'\\u03ac'\n'\\u03ad'\n'\\u03ae'\n'\\u03ca'\n'\\u03af'\n'\\u03cc'\n'\\u03cd'\n'\\u03cb'\n'\\u03ce'\n'\\u03c2'\n'\\u03c4'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\u03c8'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\u03c9'\n'\\u0390'\n'\\u03b0'\n'\\u2018'\n'\\u2015'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb1'\n'\\xbd'\n'\\x1a'\n'\\u0387'\n'\\u2019'\n'\\xa6'\n'\\\\'\n'\\x1a'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xa7'\n'\\x1a'\n'\\x1a'\n'\\xab'\n'\\xac'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xa9'\n'\\x1a'\n'\\x1a'\n'\\xbb'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp950": [".py", "\n\n\n\n\n\nimport _codecs_tw,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_tw.getcodec('cp950')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp950',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_tw", "_multibytecodec", "codecs"]], "encodings.unicode_escape": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.unicode_escape_encode\n decode=codecs.unicode_escape_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.unicode_escape_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def _buffer_decode(self,input,errors,final):\n return codecs.unicode_escape_decode(input,errors,final)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n def decode(self,input,errors='strict'):\n return codecs.unicode_escape_decode(input,errors,False)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='unicode-escape',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.cp737": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp737',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0391,\n0x0081:0x0392,\n0x0082:0x0393,\n0x0083:0x0394,\n0x0084:0x0395,\n0x0085:0x0396,\n0x0086:0x0397,\n0x0087:0x0398,\n0x0088:0x0399,\n0x0089:0x039a,\n0x008a:0x039b,\n0x008b:0x039c,\n0x008c:0x039d,\n0x008d:0x039e,\n0x008e:0x039f,\n0x008f:0x03a0,\n0x0090:0x03a1,\n0x0091:0x03a3,\n0x0092:0x03a4,\n0x0093:0x03a5,\n0x0094:0x03a6,\n0x0095:0x03a7,\n0x0096:0x03a8,\n0x0097:0x03a9,\n0x0098:0x03b1,\n0x0099:0x03b2,\n0x009a:0x03b3,\n0x009b:0x03b4,\n0x009c:0x03b5,\n0x009d:0x03b6,\n0x009e:0x03b7,\n0x009f:0x03b8,\n0x00a0:0x03b9,\n0x00a1:0x03ba,\n0x00a2:0x03bb,\n0x00a3:0x03bc,\n0x00a4:0x03bd,\n0x00a5:0x03be,\n0x00a6:0x03bf,\n0x00a7:0x03c0,\n0x00a8:0x03c1,\n0x00a9:0x03c3,\n0x00aa:0x03c2,\n0x00ab:0x03c4,\n0x00ac:0x03c5,\n0x00ad:0x03c6,\n0x00ae:0x03c7,\n0x00af:0x03c8,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03c9,\n0x00e1:0x03ac,\n0x00e2:0x03ad,\n0x00e3:0x03ae,\n0x00e4:0x03ca,\n0x00e5:0x03af,\n0x00e6:0x03cc,\n0x00e7:0x03cd,\n0x00e8:0x03cb,\n0x00e9:0x03ce,\n0x00ea:0x0386,\n0x00eb:0x0388,\n0x00ec:0x0389,\n0x00ed:0x038a,\n0x00ee:0x038c,\n0x00ef:0x038e,\n0x00f0:0x038f,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x03aa,\n0x00f5:0x03ab,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0398'\n'\\u0399'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u039e'\n'\\u039f'\n'\\u03a0'\n'\\u03a1'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u03b4'\n'\\u03b5'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c3'\n'\\u03c2'\n'\\u03c4'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\u03c8'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03c9'\n'\\u03ac'\n'\\u03ad'\n'\\u03ae'\n'\\u03ca'\n'\\u03af'\n'\\u03cc'\n'\\u03cd'\n'\\u03cb'\n'\\u03ce'\n'\\u0386'\n'\\u0388'\n'\\u0389'\n'\\u038a'\n'\\u038c'\n'\\u038e'\n'\\u038f'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u03aa'\n'\\u03ab'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b7:0x00fa,\n0x00f7:0x00f6,\n0x0386:0x00ea,\n0x0388:0x00eb,\n0x0389:0x00ec,\n0x038a:0x00ed,\n0x038c:0x00ee,\n0x038e:0x00ef,\n0x038f:0x00f0,\n0x0391:0x0080,\n0x0392:0x0081,\n0x0393:0x0082,\n0x0394:0x0083,\n0x0395:0x0084,\n0x0396:0x0085,\n0x0397:0x0086,\n0x0398:0x0087,\n0x0399:0x0088,\n0x039a:0x0089,\n0x039b:0x008a,\n0x039c:0x008b,\n0x039d:0x008c,\n0x039e:0x008d,\n0x039f:0x008e,\n0x03a0:0x008f,\n0x03a1:0x0090,\n0x03a3:0x0091,\n0x03a4:0x0092,\n0x03a5:0x0093,\n0x03a6:0x0094,\n0x03a7:0x0095,\n0x03a8:0x0096,\n0x03a9:0x0097,\n0x03aa:0x00f4,\n0x03ab:0x00f5,\n0x03ac:0x00e1,\n0x03ad:0x00e2,\n0x03ae:0x00e3,\n0x03af:0x00e5,\n0x03b1:0x0098,\n0x03b2:0x0099,\n0x03b3:0x009a,\n0x03b4:0x009b,\n0x03b5:0x009c,\n0x03b6:0x009d,\n0x03b7:0x009e,\n0x03b8:0x009f,\n0x03b9:0x00a0,\n0x03ba:0x00a1,\n0x03bb:0x00a2,\n0x03bc:0x00a3,\n0x03bd:0x00a4,\n0x03be:0x00a5,\n0x03bf:0x00a6,\n0x03c0:0x00a7,\n0x03c1:0x00a8,\n0x03c2:0x00aa,\n0x03c3:0x00a9,\n0x03c4:0x00ab,\n0x03c5:0x00ac,\n0x03c6:0x00ad,\n0x03c7:0x00ae,\n0x03c8:0x00af,\n0x03c9:0x00e0,\n0x03ca:0x00e4,\n0x03cb:0x00e8,\n0x03cc:0x00e6,\n0x03cd:0x00e7,\n0x03ce:0x00e9,\n0x207f:0x00fc,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x2248:0x00f7,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.cp865": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp865',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x00ec,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00ff,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x20a7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00a4,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\xec'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\xff'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\u20a7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xa4'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a3:0x009c,\n0x00a4:0x00af,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00ba:0x00a7,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00d1:0x00a5,\n0x00d6:0x0099,\n0x00d8:0x009d,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00ff:0x0098,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.ptcp154": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='ptcp154',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0496'\n'\\u0492'\n'\\u04ee'\n'\\u0493'\n'\\u201e'\n'\\u2026'\n'\\u04b6'\n'\\u04ae'\n'\\u04b2'\n'\\u04af'\n'\\u04a0'\n'\\u04e2'\n'\\u04a2'\n'\\u049a'\n'\\u04ba'\n'\\u04b8'\n'\\u0497'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u04b3'\n'\\u04b7'\n'\\u04a1'\n'\\u04e3'\n'\\u04a3'\n'\\u049b'\n'\\u04bb'\n'\\u04b9'\n'\\xa0'\n'\\u040e'\n'\\u045e'\n'\\u0408'\n'\\u04e8'\n'\\u0498'\n'\\u04b0'\n'\\xa7'\n'\\u0401'\n'\\xa9'\n'\\u04d8'\n'\\xab'\n'\\xac'\n'\\u04ef'\n'\\xae'\n'\\u049c'\n'\\xb0'\n'\\u04b1'\n'\\u0406'\n'\\u0456'\n'\\u0499'\n'\\u04e9'\n'\\xb6'\n'\\xb7'\n'\\u0451'\n'\\u2116'\n'\\u04d9'\n'\\xbb'\n'\\u0458'\n'\\u04aa'\n'\\u04ab'\n'\\u049d'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.big5": [".py", "\n\n\n\n\n\nimport _codecs_tw,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_tw.getcodec('big5')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='big5',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_tw", "_multibytecodec", "codecs"]], "encodings.cp424": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp424',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\xa2'\n'.'\n'<'\n'('\n'+'\n'|'\n'&'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'!'\n'$'\n'*'\n')'\n';'\n'\\xac'\n'-'\n'/'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\ufffe'\n'\\u05ea'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2017'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\ufffe'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xb8'\n'\\ufffe'\n'\\xa4'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xae'\n'^'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'['\n']'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp861": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp861',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00d0,\n0x008c:0x00f0,\n0x008d:0x00de,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00fe,\n0x0096:0x00fb,\n0x0097:0x00dd,\n0x0098:0x00fd,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x20a7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00c1,\n0x00a5:0x00cd,\n0x00a6:0x00d3,\n0x00a7:0x00da,\n0x00a8:0x00bf,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xd0'\n'\\xf0'\n'\\xde'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xfe'\n'\\xfb'\n'\\xdd'\n'\\xfd'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\u20a7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xc1'\n'\\xcd'\n'\\xd3'\n'\\xda'\n'\\xbf'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a3:0x009c,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00c1:0x00a4,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00cd:0x00a5,\n0x00d0:0x008b,\n0x00d3:0x00a6,\n0x00d6:0x0099,\n0x00d8:0x009d,\n0x00da:0x00a7,\n0x00dc:0x009a,\n0x00dd:0x0097,\n0x00de:0x008d,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ed:0x00a1,\n0x00f0:0x008c,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00fd:0x0098,\n0x00fe:0x0095,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.euc_jp": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('euc_jp')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='euc_jp',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp855": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp855',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0452,\n0x0081:0x0402,\n0x0082:0x0453,\n0x0083:0x0403,\n0x0084:0x0451,\n0x0085:0x0401,\n0x0086:0x0454,\n0x0087:0x0404,\n0x0088:0x0455,\n0x0089:0x0405,\n0x008a:0x0456,\n0x008b:0x0406,\n0x008c:0x0457,\n0x008d:0x0407,\n0x008e:0x0458,\n0x008f:0x0408,\n0x0090:0x0459,\n0x0091:0x0409,\n0x0092:0x045a,\n0x0093:0x040a,\n0x0094:0x045b,\n0x0095:0x040b,\n0x0096:0x045c,\n0x0097:0x040c,\n0x0098:0x045e,\n0x0099:0x040e,\n0x009a:0x045f,\n0x009b:0x040f,\n0x009c:0x044e,\n0x009d:0x042e,\n0x009e:0x044a,\n0x009f:0x042a,\n0x00a0:0x0430,\n0x00a1:0x0410,\n0x00a2:0x0431,\n0x00a3:0x0411,\n0x00a4:0x0446,\n0x00a5:0x0426,\n0x00a6:0x0434,\n0x00a7:0x0414,\n0x00a8:0x0435,\n0x00a9:0x0415,\n0x00aa:0x0444,\n0x00ab:0x0424,\n0x00ac:0x0433,\n0x00ad:0x0413,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x0445,\n0x00b6:0x0425,\n0x00b7:0x0438,\n0x00b8:0x0418,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x0439,\n0x00be:0x0419,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x043a,\n0x00c7:0x041a,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x043b,\n0x00d1:0x041b,\n0x00d2:0x043c,\n0x00d3:0x041c,\n0x00d4:0x043d,\n0x00d5:0x041d,\n0x00d6:0x043e,\n0x00d7:0x041e,\n0x00d8:0x043f,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x041f,\n0x00de:0x044f,\n0x00df:0x2580,\n0x00e0:0x042f,\n0x00e1:0x0440,\n0x00e2:0x0420,\n0x00e3:0x0441,\n0x00e4:0x0421,\n0x00e5:0x0442,\n0x00e6:0x0422,\n0x00e7:0x0443,\n0x00e8:0x0423,\n0x00e9:0x0436,\n0x00ea:0x0416,\n0x00eb:0x0432,\n0x00ec:0x0412,\n0x00ed:0x044c,\n0x00ee:0x042c,\n0x00ef:0x2116,\n0x00f0:0x00ad,\n0x00f1:0x044b,\n0x00f2:0x042b,\n0x00f3:0x0437,\n0x00f4:0x0417,\n0x00f5:0x0448,\n0x00f6:0x0428,\n0x00f7:0x044d,\n0x00f8:0x042d,\n0x00f9:0x0449,\n0x00fa:0x0429,\n0x00fb:0x0447,\n0x00fc:0x0427,\n0x00fd:0x00a7,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0452'\n'\\u0402'\n'\\u0453'\n'\\u0403'\n'\\u0451'\n'\\u0401'\n'\\u0454'\n'\\u0404'\n'\\u0455'\n'\\u0405'\n'\\u0456'\n'\\u0406'\n'\\u0457'\n'\\u0407'\n'\\u0458'\n'\\u0408'\n'\\u0459'\n'\\u0409'\n'\\u045a'\n'\\u040a'\n'\\u045b'\n'\\u040b'\n'\\u045c'\n'\\u040c'\n'\\u045e'\n'\\u040e'\n'\\u045f'\n'\\u040f'\n'\\u044e'\n'\\u042e'\n'\\u044a'\n'\\u042a'\n'\\u0430'\n'\\u0410'\n'\\u0431'\n'\\u0411'\n'\\u0446'\n'\\u0426'\n'\\u0434'\n'\\u0414'\n'\\u0435'\n'\\u0415'\n'\\u0444'\n'\\u0424'\n'\\u0433'\n'\\u0413'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u0445'\n'\\u0425'\n'\\u0438'\n'\\u0418'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u0439'\n'\\u0419'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u043a'\n'\\u041a'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\u043b'\n'\\u041b'\n'\\u043c'\n'\\u041c'\n'\\u043d'\n'\\u041d'\n'\\u043e'\n'\\u041e'\n'\\u043f'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u041f'\n'\\u044f'\n'\\u2580'\n'\\u042f'\n'\\u0440'\n'\\u0420'\n'\\u0441'\n'\\u0421'\n'\\u0442'\n'\\u0422'\n'\\u0443'\n'\\u0423'\n'\\u0436'\n'\\u0416'\n'\\u0432'\n'\\u0412'\n'\\u044c'\n'\\u042c'\n'\\u2116'\n'\\xad'\n'\\u044b'\n'\\u042b'\n'\\u0437'\n'\\u0417'\n'\\u0448'\n'\\u0428'\n'\\u044d'\n'\\u042d'\n'\\u0449'\n'\\u0429'\n'\\u0447'\n'\\u0427'\n'\\xa7'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00cf,\n0x00a7:0x00fd,\n0x00ab:0x00ae,\n0x00ad:0x00f0,\n0x00bb:0x00af,\n0x0401:0x0085,\n0x0402:0x0081,\n0x0403:0x0083,\n0x0404:0x0087,\n0x0405:0x0089,\n0x0406:0x008b,\n0x0407:0x008d,\n0x0408:0x008f,\n0x0409:0x0091,\n0x040a:0x0093,\n0x040b:0x0095,\n0x040c:0x0097,\n0x040e:0x0099,\n0x040f:0x009b,\n0x0410:0x00a1,\n0x0411:0x00a3,\n0x0412:0x00ec,\n0x0413:0x00ad,\n0x0414:0x00a7,\n0x0415:0x00a9,\n0x0416:0x00ea,\n0x0417:0x00f4,\n0x0418:0x00b8,\n0x0419:0x00be,\n0x041a:0x00c7,\n0x041b:0x00d1,\n0x041c:0x00d3,\n0x041d:0x00d5,\n0x041e:0x00d7,\n0x041f:0x00dd,\n0x0420:0x00e2,\n0x0421:0x00e4,\n0x0422:0x00e6,\n0x0423:0x00e8,\n0x0424:0x00ab,\n0x0425:0x00b6,\n0x0426:0x00a5,\n0x0427:0x00fc,\n0x0428:0x00f6,\n0x0429:0x00fa,\n0x042a:0x009f,\n0x042b:0x00f2,\n0x042c:0x00ee,\n0x042d:0x00f8,\n0x042e:0x009d,\n0x042f:0x00e0,\n0x0430:0x00a0,\n0x0431:0x00a2,\n0x0432:0x00eb,\n0x0433:0x00ac,\n0x0434:0x00a6,\n0x0435:0x00a8,\n0x0436:0x00e9,\n0x0437:0x00f3,\n0x0438:0x00b7,\n0x0439:0x00bd,\n0x043a:0x00c6,\n0x043b:0x00d0,\n0x043c:0x00d2,\n0x043d:0x00d4,\n0x043e:0x00d6,\n0x043f:0x00d8,\n0x0440:0x00e1,\n0x0441:0x00e3,\n0x0442:0x00e5,\n0x0443:0x00e7,\n0x0444:0x00aa,\n0x0445:0x00b5,\n0x0446:0x00a4,\n0x0447:0x00fb,\n0x0448:0x00f5,\n0x0449:0x00f9,\n0x044a:0x009e,\n0x044b:0x00f1,\n0x044c:0x00ed,\n0x044d:0x00f7,\n0x044e:0x009c,\n0x044f:0x00de,\n0x0451:0x0084,\n0x0452:0x0080,\n0x0453:0x0082,\n0x0454:0x0086,\n0x0455:0x0088,\n0x0456:0x008a,\n0x0457:0x008c,\n0x0458:0x008e,\n0x0459:0x0090,\n0x045a:0x0092,\n0x045b:0x0094,\n0x045c:0x0096,\n0x045e:0x0098,\n0x045f:0x009a,\n0x2116:0x00ef,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.shift_jis": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('shift_jis')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='shift_jis',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.utf_32_le": [".py", "''\n\n\nimport codecs\n\n\n\nencode=codecs.utf_32_le_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_32_le_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.utf_32_le_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_32_le_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_32_le_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_32_le_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-32-le',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp500": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp500',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'['\n'.'\n'<'\n'('\n'+'\n'!'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n']'\n'$'\n'*'\n')'\n';'\n'^'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xac'\n'|'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\xfc'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\xd6'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\\xdc'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.undefined": [".py", "''\n\n\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n raise UnicodeError(\"undefined encoding\")\n \n def decode(self,input,errors='strict'):\n raise UnicodeError(\"undefined encoding\")\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n raise UnicodeError(\"undefined encoding\")\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n raise UnicodeError(\"undefined encoding\")\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='undefined',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.cp860": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp860',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e3,\n0x0085:0x00e0,\n0x0086:0x00c1,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00ca,\n0x008a:0x00e8,\n0x008b:0x00cd,\n0x008c:0x00d4,\n0x008d:0x00ec,\n0x008e:0x00c3,\n0x008f:0x00c2,\n0x0090:0x00c9,\n0x0091:0x00c0,\n0x0092:0x00c8,\n0x0093:0x00f4,\n0x0094:0x00f5,\n0x0095:0x00f2,\n0x0096:0x00da,\n0x0097:0x00f9,\n0x0098:0x00cc,\n0x0099:0x00d5,\n0x009a:0x00dc,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00d9,\n0x009e:0x20a7,\n0x009f:0x00d3,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x00d2,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe3'\n'\\xe0'\n'\\xc1'\n'\\xe7'\n'\\xea'\n'\\xca'\n'\\xe8'\n'\\xcd'\n'\\xd4'\n'\\xec'\n'\\xc3'\n'\\xc2'\n'\\xc9'\n'\\xc0'\n'\\xc8'\n'\\xf4'\n'\\xf5'\n'\\xf2'\n'\\xda'\n'\\xf9'\n'\\xcc'\n'\\xd5'\n'\\xdc'\n'\\xa2'\n'\\xa3'\n'\\xd9'\n'\\u20a7'\n'\\xd3'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\xd2'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00c0:0x0091,\n0x00c1:0x0086,\n0x00c2:0x008f,\n0x00c3:0x008e,\n0x00c7:0x0080,\n0x00c8:0x0092,\n0x00c9:0x0090,\n0x00ca:0x0089,\n0x00cc:0x0098,\n0x00cd:0x008b,\n0x00d1:0x00a5,\n0x00d2:0x00a9,\n0x00d3:0x009f,\n0x00d4:0x008c,\n0x00d5:0x0099,\n0x00d9:0x009d,\n0x00da:0x0096,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e3:0x0084,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f5:0x0094,\n0x00f7:0x00f6,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fc:0x0081,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.uu_codec": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\nimport binascii\nfrom io import BytesIO\n\n\n\ndef uu_encode(input,errors='strict',filename='',mode=0o666):\n assert errors =='strict'\n infile=BytesIO(input)\n outfile=BytesIO()\n read=infile.read\n write=outfile.write\n \n \n filename=filename.replace('\\n','\\\\n')\n filename=filename.replace('\\r','\\\\r')\n \n \n write(('begin %o %s\\n'%(mode&0o777,filename)).encode('ascii'))\n chunk=read(45)\n while chunk:\n write(binascii.b2a_uu(chunk))\n chunk=read(45)\n write(b' \\nend\\n')\n \n return(outfile.getvalue(),len(input))\n \ndef uu_decode(input,errors='strict'):\n assert errors =='strict'\n infile=BytesIO(input)\n outfile=BytesIO()\n readline=infile.readline\n write=outfile.write\n \n \n while 1:\n s=readline()\n if not s:\n raise ValueError('Missing \"begin\" line in input data')\n if s[:5]==b'begin':\n break\n \n \n while True:\n s=readline()\n if not s or s ==b'end\\n':\n break\n try:\n data=binascii.a2b_uu(s)\n except binascii.Error as v:\n \n nbytes=(((s[0]-32)&63)*4+5)//3\n data=binascii.a2b_uu(s[:nbytes])\n \n write(data)\n if not s:\n raise ValueError('Truncated input data')\n \n return(outfile.getvalue(),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return uu_encode(input,errors)\n \n def decode(self,input,errors='strict'):\n return uu_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return uu_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return uu_decode(input,self.errors)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='uu',\n encode=uu_encode,\n decode=uu_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n _is_text_encoding=False,\n )\n", ["binascii", "codecs", "io"]], "encodings.utf_16_le": [".py", "''\n\n\n\n\n\n\n\nimport codecs\n\n\n\nencode=codecs.utf_16_le_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_16_le_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.utf_16_le_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=codecs.utf_16_le_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=codecs.utf_16_le_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=codecs.utf_16_le_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-16-le',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.gb18030": [".py", "\n\n\n\n\n\nimport _codecs_cn,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_cn.getcodec('gb18030')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='gb18030',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_cn", "_multibytecodec", "codecs"]], "encodings.cp874": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp874',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2026'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\u0e01'\n'\\u0e02'\n'\\u0e03'\n'\\u0e04'\n'\\u0e05'\n'\\u0e06'\n'\\u0e07'\n'\\u0e08'\n'\\u0e09'\n'\\u0e0a'\n'\\u0e0b'\n'\\u0e0c'\n'\\u0e0d'\n'\\u0e0e'\n'\\u0e0f'\n'\\u0e10'\n'\\u0e11'\n'\\u0e12'\n'\\u0e13'\n'\\u0e14'\n'\\u0e15'\n'\\u0e16'\n'\\u0e17'\n'\\u0e18'\n'\\u0e19'\n'\\u0e1a'\n'\\u0e1b'\n'\\u0e1c'\n'\\u0e1d'\n'\\u0e1e'\n'\\u0e1f'\n'\\u0e20'\n'\\u0e21'\n'\\u0e22'\n'\\u0e23'\n'\\u0e24'\n'\\u0e25'\n'\\u0e26'\n'\\u0e27'\n'\\u0e28'\n'\\u0e29'\n'\\u0e2a'\n'\\u0e2b'\n'\\u0e2c'\n'\\u0e2d'\n'\\u0e2e'\n'\\u0e2f'\n'\\u0e30'\n'\\u0e31'\n'\\u0e32'\n'\\u0e33'\n'\\u0e34'\n'\\u0e35'\n'\\u0e36'\n'\\u0e37'\n'\\u0e38'\n'\\u0e39'\n'\\u0e3a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0e3f'\n'\\u0e40'\n'\\u0e41'\n'\\u0e42'\n'\\u0e43'\n'\\u0e44'\n'\\u0e45'\n'\\u0e46'\n'\\u0e47'\n'\\u0e48'\n'\\u0e49'\n'\\u0e4a'\n'\\u0e4b'\n'\\u0e4c'\n'\\u0e4d'\n'\\u0e4e'\n'\\u0e4f'\n'\\u0e50'\n'\\u0e51'\n'\\u0e52'\n'\\u0e53'\n'\\u0e54'\n'\\u0e55'\n'\\u0e56'\n'\\u0e57'\n'\\u0e58'\n'\\u0e59'\n'\\u0e5a'\n'\\u0e5b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp850": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp850',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x00ec,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00ff,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x00d7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x00c0,\n0x00b8:0x00a9,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x00a2,\n0x00be:0x00a5,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x00e3,\n0x00c7:0x00c3,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x00f0,\n0x00d1:0x00d0,\n0x00d2:0x00ca,\n0x00d3:0x00cb,\n0x00d4:0x00c8,\n0x00d5:0x0131,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x00cf,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x00a6,\n0x00de:0x00cc,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x00d2,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:0x00fe,\n0x00e8:0x00de,\n0x00e9:0x00da,\n0x00ea:0x00db,\n0x00eb:0x00d9,\n0x00ec:0x00fd,\n0x00ed:0x00dd,\n0x00ee:0x00af,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x2017,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\xec'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\xff'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\xd7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\xc0'\n'\\xa9'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\xa2'\n'\\xa5'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\xe3'\n'\\xc3'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\xf0'\n'\\xd0'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\u0131'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\xa6'\n'\\xcc'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\xd2'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\xfe'\n'\\xde'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\xfd'\n'\\xdd'\n'\\xaf'\n'\\xb4'\n'\\xad'\n'\\xb1'\n'\\u2017'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x00bd,\n0x00a3:0x009c,\n0x00a4:0x00cf,\n0x00a5:0x00be,\n0x00a6:0x00dd,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x00b8,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00af:0x00ee,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b4:0x00ef,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b8:0x00f7,\n0x00b9:0x00fb,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00bf:0x00a8,\n0x00c0:0x00b7,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c3:0x00c7,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c8:0x00d4,\n0x00c9:0x0090,\n0x00ca:0x00d2,\n0x00cb:0x00d3,\n0x00cc:0x00de,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00cf:0x00d8,\n0x00d0:0x00d1,\n0x00d1:0x00a5,\n0x00d2:0x00e3,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00d8:0x009d,\n0x00d9:0x00eb,\n0x00da:0x00e9,\n0x00db:0x00ea,\n0x00dc:0x009a,\n0x00dd:0x00ed,\n0x00de:0x00e8,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e3:0x00c6,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f0:0x00d0,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00fd:0x00ec,\n0x00fe:0x00e7,\n0x00ff:0x0098,\n0x0131:0x00d5,\n0x0192:0x009f,\n0x2017:0x00f2,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n", ["codecs"]], "encodings.cp864": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp864',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0025:0x066a,\n0x0080:0x00b0,\n0x0081:0x00b7,\n0x0082:0x2219,\n0x0083:0x221a,\n0x0084:0x2592,\n0x0085:0x2500,\n0x0086:0x2502,\n0x0087:0x253c,\n0x0088:0x2524,\n0x0089:0x252c,\n0x008a:0x251c,\n0x008b:0x2534,\n0x008c:0x2510,\n0x008d:0x250c,\n0x008e:0x2514,\n0x008f:0x2518,\n0x0090:0x03b2,\n0x0091:0x221e,\n0x0092:0x03c6,\n0x0093:0x00b1,\n0x0094:0x00bd,\n0x0095:0x00bc,\n0x0096:0x2248,\n0x0097:0x00ab,\n0x0098:0x00bb,\n0x0099:0xfef7,\n0x009a:0xfef8,\n0x009b:None,\n0x009c:None,\n0x009d:0xfefb,\n0x009e:0xfefc,\n0x009f:None,\n0x00a1:0x00ad,\n0x00a2:0xfe82,\n0x00a5:0xfe84,\n0x00a6:None,\n0x00a7:None,\n0x00a8:0xfe8e,\n0x00a9:0xfe8f,\n0x00aa:0xfe95,\n0x00ab:0xfe99,\n0x00ac:0x060c,\n0x00ad:0xfe9d,\n0x00ae:0xfea1,\n0x00af:0xfea5,\n0x00b0:0x0660,\n0x00b1:0x0661,\n0x00b2:0x0662,\n0x00b3:0x0663,\n0x00b4:0x0664,\n0x00b5:0x0665,\n0x00b6:0x0666,\n0x00b7:0x0667,\n0x00b8:0x0668,\n0x00b9:0x0669,\n0x00ba:0xfed1,\n0x00bb:0x061b,\n0x00bc:0xfeb1,\n0x00bd:0xfeb5,\n0x00be:0xfeb9,\n0x00bf:0x061f,\n0x00c0:0x00a2,\n0x00c1:0xfe80,\n0x00c2:0xfe81,\n0x00c3:0xfe83,\n0x00c4:0xfe85,\n0x00c5:0xfeca,\n0x00c6:0xfe8b,\n0x00c7:0xfe8d,\n0x00c8:0xfe91,\n0x00c9:0xfe93,\n0x00ca:0xfe97,\n0x00cb:0xfe9b,\n0x00cc:0xfe9f,\n0x00cd:0xfea3,\n0x00ce:0xfea7,\n0x00cf:0xfea9,\n0x00d0:0xfeab,\n0x00d1:0xfead,\n0x00d2:0xfeaf,\n0x00d3:0xfeb3,\n0x00d4:0xfeb7,\n0x00d5:0xfebb,\n0x00d6:0xfebf,\n0x00d7:0xfec1,\n0x00d8:0xfec5,\n0x00d9:0xfecb,\n0x00da:0xfecf,\n0x00db:0x00a6,\n0x00dc:0x00ac,\n0x00dd:0x00f7,\n0x00de:0x00d7,\n0x00df:0xfec9,\n0x00e0:0x0640,\n0x00e1:0xfed3,\n0x00e2:0xfed7,\n0x00e3:0xfedb,\n0x00e4:0xfedf,\n0x00e5:0xfee3,\n0x00e6:0xfee7,\n0x00e7:0xfeeb,\n0x00e8:0xfeed,\n0x00e9:0xfeef,\n0x00ea:0xfef3,\n0x00eb:0xfebd,\n0x00ec:0xfecc,\n0x00ed:0xfece,\n0x00ee:0xfecd,\n0x00ef:0xfee1,\n0x00f0:0xfe7d,\n0x00f1:0x0651,\n0x00f2:0xfee5,\n0x00f3:0xfee9,\n0x00f4:0xfeec,\n0x00f5:0xfef0,\n0x00f6:0xfef2,\n0x00f7:0xfed0,\n0x00f8:0xfed5,\n0x00f9:0xfef5,\n0x00fa:0xfef6,\n0x00fb:0xfedd,\n0x00fc:0xfed9,\n0x00fd:0xfef1,\n0x00fe:0x25a0,\n0x00ff:None,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'\\u066a'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xb0'\n'\\xb7'\n'\\u2219'\n'\\u221a'\n'\\u2592'\n'\\u2500'\n'\\u2502'\n'\\u253c'\n'\\u2524'\n'\\u252c'\n'\\u251c'\n'\\u2534'\n'\\u2510'\n'\\u250c'\n'\\u2514'\n'\\u2518'\n'\\u03b2'\n'\\u221e'\n'\\u03c6'\n'\\xb1'\n'\\xbd'\n'\\xbc'\n'\\u2248'\n'\\xab'\n'\\xbb'\n'\\ufef7'\n'\\ufef8'\n'\\ufffe'\n'\\ufffe'\n'\\ufefb'\n'\\ufefc'\n'\\ufffe'\n'\\xa0'\n'\\xad'\n'\\ufe82'\n'\\xa3'\n'\\xa4'\n'\\ufe84'\n'\\ufffe'\n'\\ufffe'\n'\\ufe8e'\n'\\ufe8f'\n'\\ufe95'\n'\\ufe99'\n'\\u060c'\n'\\ufe9d'\n'\\ufea1'\n'\\ufea5'\n'\\u0660'\n'\\u0661'\n'\\u0662'\n'\\u0663'\n'\\u0664'\n'\\u0665'\n'\\u0666'\n'\\u0667'\n'\\u0668'\n'\\u0669'\n'\\ufed1'\n'\\u061b'\n'\\ufeb1'\n'\\ufeb5'\n'\\ufeb9'\n'\\u061f'\n'\\xa2'\n'\\ufe80'\n'\\ufe81'\n'\\ufe83'\n'\\ufe85'\n'\\ufeca'\n'\\ufe8b'\n'\\ufe8d'\n'\\ufe91'\n'\\ufe93'\n'\\ufe97'\n'\\ufe9b'\n'\\ufe9f'\n'\\ufea3'\n'\\ufea7'\n'\\ufea9'\n'\\ufeab'\n'\\ufead'\n'\\ufeaf'\n'\\ufeb3'\n'\\ufeb7'\n'\\ufebb'\n'\\ufebf'\n'\\ufec1'\n'\\ufec5'\n'\\ufecb'\n'\\ufecf'\n'\\xa6'\n'\\xac'\n'\\xf7'\n'\\xd7'\n'\\ufec9'\n'\\u0640'\n'\\ufed3'\n'\\ufed7'\n'\\ufedb'\n'\\ufedf'\n'\\ufee3'\n'\\ufee7'\n'\\ufeeb'\n'\\ufeed'\n'\\ufeef'\n'\\ufef3'\n'\\ufebd'\n'\\ufecc'\n'\\ufece'\n'\\ufecd'\n'\\ufee1'\n'\\ufe7d'\n'\\u0651'\n'\\ufee5'\n'\\ufee9'\n'\\ufeec'\n'\\ufef0'\n'\\ufef2'\n'\\ufed0'\n'\\ufed5'\n'\\ufef5'\n'\\ufef6'\n'\\ufedd'\n'\\ufed9'\n'\\ufef1'\n'\\u25a0'\n'\\ufffe'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00a0,\n0x00a2:0x00c0,\n0x00a3:0x00a3,\n0x00a4:0x00a4,\n0x00a6:0x00db,\n0x00ab:0x0097,\n0x00ac:0x00dc,\n0x00ad:0x00a1,\n0x00b0:0x0080,\n0x00b1:0x0093,\n0x00b7:0x0081,\n0x00bb:0x0098,\n0x00bc:0x0095,\n0x00bd:0x0094,\n0x00d7:0x00de,\n0x00f7:0x00dd,\n0x03b2:0x0090,\n0x03c6:0x0092,\n0x060c:0x00ac,\n0x061b:0x00bb,\n0x061f:0x00bf,\n0x0640:0x00e0,\n0x0651:0x00f1,\n0x0660:0x00b0,\n0x0661:0x00b1,\n0x0662:0x00b2,\n0x0663:0x00b3,\n0x0664:0x00b4,\n0x0665:0x00b5,\n0x0666:0x00b6,\n0x0667:0x00b7,\n0x0668:0x00b8,\n0x0669:0x00b9,\n0x066a:0x0025,\n0x2219:0x0082,\n0x221a:0x0083,\n0x221e:0x0091,\n0x2248:0x0096,\n0x2500:0x0085,\n0x2502:0x0086,\n0x250c:0x008d,\n0x2510:0x008c,\n0x2514:0x008e,\n0x2518:0x008f,\n0x251c:0x008a,\n0x2524:0x0088,\n0x252c:0x0089,\n0x2534:0x008b,\n0x253c:0x0087,\n0x2592:0x0084,\n0x25a0:0x00fe,\n0xfe7d:0x00f0,\n0xfe80:0x00c1,\n0xfe81:0x00c2,\n0xfe82:0x00a2,\n0xfe83:0x00c3,\n0xfe84:0x00a5,\n0xfe85:0x00c4,\n0xfe8b:0x00c6,\n0xfe8d:0x00c7,\n0xfe8e:0x00a8,\n0xfe8f:0x00a9,\n0xfe91:0x00c8,\n0xfe93:0x00c9,\n0xfe95:0x00aa,\n0xfe97:0x00ca,\n0xfe99:0x00ab,\n0xfe9b:0x00cb,\n0xfe9d:0x00ad,\n0xfe9f:0x00cc,\n0xfea1:0x00ae,\n0xfea3:0x00cd,\n0xfea5:0x00af,\n0xfea7:0x00ce,\n0xfea9:0x00cf,\n0xfeab:0x00d0,\n0xfead:0x00d1,\n0xfeaf:0x00d2,\n0xfeb1:0x00bc,\n0xfeb3:0x00d3,\n0xfeb5:0x00bd,\n0xfeb7:0x00d4,\n0xfeb9:0x00be,\n0xfebb:0x00d5,\n0xfebd:0x00eb,\n0xfebf:0x00d6,\n0xfec1:0x00d7,\n0xfec5:0x00d8,\n0xfec9:0x00df,\n0xfeca:0x00c5,\n0xfecb:0x00d9,\n0xfecc:0x00ec,\n0xfecd:0x00ee,\n0xfece:0x00ed,\n0xfecf:0x00da,\n0xfed0:0x00f7,\n0xfed1:0x00ba,\n0xfed3:0x00e1,\n0xfed5:0x00f8,\n0xfed7:0x00e2,\n0xfed9:0x00fc,\n0xfedb:0x00e3,\n0xfedd:0x00fb,\n0xfedf:0x00e4,\n0xfee1:0x00ef,\n0xfee3:0x00e5,\n0xfee5:0x00f2,\n0xfee7:0x00e6,\n0xfee9:0x00f3,\n0xfeeb:0x00e7,\n0xfeec:0x00f4,\n0xfeed:0x00e8,\n0xfeef:0x00e9,\n0xfef0:0x00f5,\n0xfef1:0x00fd,\n0xfef2:0x00f6,\n0xfef3:0x00ea,\n0xfef5:0x00f9,\n0xfef6:0x00fa,\n0xfef7:0x0099,\n0xfef8:0x009a,\n0xfefb:0x009d,\n0xfefc:0x009e,\n}\n", ["codecs"]], "encodings.utf_32": [".py", "''\n\n\nimport codecs,sys\n\n\n\nencode=codecs.utf_32_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_32_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n codecs.IncrementalEncoder.__init__(self,errors)\n self.encoder=None\n \n def encode(self,input,final=False):\n if self.encoder is None:\n result=codecs.utf_32_encode(input,self.errors)[0]\n if sys.byteorder =='little':\n self.encoder=codecs.utf_32_le_encode\n else:\n self.encoder=codecs.utf_32_be_encode\n return result\n return self.encoder(input,self.errors)[0]\n \n def reset(self):\n codecs.IncrementalEncoder.reset(self)\n self.encoder=None\n \n def getstate(self):\n \n \n \n \n return(2 if self.encoder is None else 0)\n \n def setstate(self,state):\n if state:\n self.encoder=None\n else:\n if sys.byteorder =='little':\n self.encoder=codecs.utf_32_le_encode\n else:\n self.encoder=codecs.utf_32_be_encode\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def __init__(self,errors='strict'):\n codecs.BufferedIncrementalDecoder.__init__(self,errors)\n self.decoder=None\n \n def _buffer_decode(self,input,errors,final):\n if self.decoder is None:\n (output,consumed,byteorder)=\\\n codecs.utf_32_ex_decode(input,errors,0,final)\n if byteorder ==-1:\n self.decoder=codecs.utf_32_le_decode\n elif byteorder ==1:\n self.decoder=codecs.utf_32_be_decode\n elif consumed >=4:\n raise UnicodeError(\"UTF-32 stream does not start with BOM\")\n return(output,consumed)\n return self.decoder(input,self.errors,final)\n \n def reset(self):\n codecs.BufferedIncrementalDecoder.reset(self)\n self.decoder=None\n \n def getstate(self):\n \n \n state=codecs.BufferedIncrementalDecoder.getstate(self)[0]\n \n \n \n \n if self.decoder is None:\n return(state,2)\n addstate=int((sys.byteorder ==\"big\")!=\n (self.decoder is codecs.utf_32_be_decode))\n return(state,addstate)\n \n def setstate(self,state):\n \n codecs.BufferedIncrementalDecoder.setstate(self,state)\n state=state[1]\n if state ==0:\n self.decoder=(codecs.utf_32_be_decode\n if sys.byteorder ==\"big\"\n else codecs.utf_32_le_decode)\n elif state ==1:\n self.decoder=(codecs.utf_32_le_decode\n if sys.byteorder ==\"big\"\n else codecs.utf_32_be_decode)\n else:\n self.decoder=None\n \nclass StreamWriter(codecs.StreamWriter):\n def __init__(self,stream,errors='strict'):\n self.encoder=None\n codecs.StreamWriter.__init__(self,stream,errors)\n \n def reset(self):\n codecs.StreamWriter.reset(self)\n self.encoder=None\n \n def encode(self,input,errors='strict'):\n if self.encoder is None:\n result=codecs.utf_32_encode(input,errors)\n if sys.byteorder =='little':\n self.encoder=codecs.utf_32_le_encode\n else:\n self.encoder=codecs.utf_32_be_encode\n return result\n else:\n return self.encoder(input,errors)\n \nclass StreamReader(codecs.StreamReader):\n\n def reset(self):\n codecs.StreamReader.reset(self)\n try:\n del self.decode\n except AttributeError:\n pass\n \n def decode(self,input,errors='strict'):\n (object,consumed,byteorder)=\\\n codecs.utf_32_ex_decode(input,errors,0,False)\n if byteorder ==-1:\n self.decode=codecs.utf_32_le_decode\n elif byteorder ==1:\n self.decode=codecs.utf_32_be_decode\n elif consumed >=4:\n raise UnicodeError(\"UTF-32 stream does not start with BOM\")\n return(object,consumed)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-32',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs", "sys"]], "encodings.koi8_u": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='koi8-u',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u2500'\n'\\u2502'\n'\\u250c'\n'\\u2510'\n'\\u2514'\n'\\u2518'\n'\\u251c'\n'\\u2524'\n'\\u252c'\n'\\u2534'\n'\\u253c'\n'\\u2580'\n'\\u2584'\n'\\u2588'\n'\\u258c'\n'\\u2590'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2320'\n'\\u25a0'\n'\\u2219'\n'\\u221a'\n'\\u2248'\n'\\u2264'\n'\\u2265'\n'\\xa0'\n'\\u2321'\n'\\xb0'\n'\\xb2'\n'\\xb7'\n'\\xf7'\n'\\u2550'\n'\\u2551'\n'\\u2552'\n'\\u0451'\n'\\u0454'\n'\\u2554'\n'\\u0456'\n'\\u0457'\n'\\u2557'\n'\\u2558'\n'\\u2559'\n'\\u255a'\n'\\u255b'\n'\\u0491'\n'\\u255d'\n'\\u255e'\n'\\u255f'\n'\\u2560'\n'\\u2561'\n'\\u0401'\n'\\u0404'\n'\\u2563'\n'\\u0406'\n'\\u0407'\n'\\u2566'\n'\\u2567'\n'\\u2568'\n'\\u2569'\n'\\u256a'\n'\\u0490'\n'\\u256c'\n'\\xa9'\n'\\u044e'\n'\\u0430'\n'\\u0431'\n'\\u0446'\n'\\u0434'\n'\\u0435'\n'\\u0444'\n'\\u0433'\n'\\u0445'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u044f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0436'\n'\\u0432'\n'\\u044c'\n'\\u044b'\n'\\u0437'\n'\\u0448'\n'\\u044d'\n'\\u0449'\n'\\u0447'\n'\\u044a'\n'\\u042e'\n'\\u0410'\n'\\u0411'\n'\\u0426'\n'\\u0414'\n'\\u0415'\n'\\u0424'\n'\\u0413'\n'\\u0425'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u042f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0416'\n'\\u0412'\n'\\u042c'\n'\\u042b'\n'\\u0417'\n'\\u0428'\n'\\u042d'\n'\\u0429'\n'\\u0427'\n'\\u042a'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1254": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1254',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\u0160'\n'\\u2039'\n'\\u0152'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\u0161'\n'\\u203a'\n'\\u0153'\n'\\ufffe'\n'\\ufffe'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u011e'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u0130'\n'\\u015e'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u011f'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u0131'\n'\\u015f'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp_2": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_2')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_2',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.utf_16": [".py", "''\n\n\n\n\n\n\n\nimport codecs,sys\n\n\n\nencode=codecs.utf_16_encode\n\ndef decode(input,errors='strict'):\n return codecs.utf_16_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n codecs.IncrementalEncoder.__init__(self,errors)\n self.encoder=None\n \n def encode(self,input,final=False):\n if self.encoder is None:\n result=codecs.utf_16_encode(input,self.errors)[0]\n if sys.byteorder =='little':\n self.encoder=codecs.utf_16_le_encode\n else:\n self.encoder=codecs.utf_16_be_encode\n return result\n return self.encoder(input,self.errors)[0]\n \n def reset(self):\n codecs.IncrementalEncoder.reset(self)\n self.encoder=None\n \n def getstate(self):\n \n \n \n \n return(2 if self.encoder is None else 0)\n \n def setstate(self,state):\n if state:\n self.encoder=None\n else:\n if sys.byteorder =='little':\n self.encoder=codecs.utf_16_le_encode\n else:\n self.encoder=codecs.utf_16_be_encode\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def __init__(self,errors='strict'):\n codecs.BufferedIncrementalDecoder.__init__(self,errors)\n self.decoder=None\n \n def _buffer_decode(self,input,errors,final):\n if self.decoder is None:\n (output,consumed,byteorder)=\\\n codecs.utf_16_ex_decode(input,errors,0,final)\n if byteorder ==-1:\n self.decoder=codecs.utf_16_le_decode\n elif byteorder ==1:\n self.decoder=codecs.utf_16_be_decode\n elif consumed >=2:\n raise UnicodeError(\"UTF-16 stream does not start with BOM\")\n return(output,consumed)\n return self.decoder(input,self.errors,final)\n \n def reset(self):\n codecs.BufferedIncrementalDecoder.reset(self)\n self.decoder=None\n \n def getstate(self):\n \n \n state=codecs.BufferedIncrementalDecoder.getstate(self)[0]\n \n \n \n \n if self.decoder is None:\n return(state,2)\n addstate=int((sys.byteorder ==\"big\")!=\n (self.decoder is codecs.utf_16_be_decode))\n return(state,addstate)\n \n def setstate(self,state):\n \n codecs.BufferedIncrementalDecoder.setstate(self,state)\n state=state[1]\n if state ==0:\n self.decoder=(codecs.utf_16_be_decode\n if sys.byteorder ==\"big\"\n else codecs.utf_16_le_decode)\n elif state ==1:\n self.decoder=(codecs.utf_16_le_decode\n if sys.byteorder ==\"big\"\n else codecs.utf_16_be_decode)\n else:\n self.decoder=None\n \nclass StreamWriter(codecs.StreamWriter):\n def __init__(self,stream,errors='strict'):\n codecs.StreamWriter.__init__(self,stream,errors)\n self.encoder=None\n \n def reset(self):\n codecs.StreamWriter.reset(self)\n self.encoder=None\n \n def encode(self,input,errors='strict'):\n if self.encoder is None:\n result=codecs.utf_16_encode(input,errors)\n if sys.byteorder =='little':\n self.encoder=codecs.utf_16_le_encode\n else:\n self.encoder=codecs.utf_16_be_encode\n return result\n else:\n return self.encoder(input,errors)\n \nclass StreamReader(codecs.StreamReader):\n\n def reset(self):\n codecs.StreamReader.reset(self)\n try:\n del self.decode\n except AttributeError:\n pass\n \n def decode(self,input,errors='strict'):\n (object,consumed,byteorder)=\\\n codecs.utf_16_ex_decode(input,errors,0,False)\n if byteorder ==-1:\n self.decode=codecs.utf_16_le_decode\n elif byteorder ==1:\n self.decode=codecs.utf_16_be_decode\n elif consumed >=2:\n raise UnicodeError(\"UTF-16 stream does not start with BOM\")\n return(object,consumed)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-16',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs", "sys"]], "encodings.iso8859_4": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-4',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0104'\n'\\u0138'\n'\\u0156'\n'\\xa4'\n'\\u0128'\n'\\u013b'\n'\\xa7'\n'\\xa8'\n'\\u0160'\n'\\u0112'\n'\\u0122'\n'\\u0166'\n'\\xad'\n'\\u017d'\n'\\xaf'\n'\\xb0'\n'\\u0105'\n'\\u02db'\n'\\u0157'\n'\\xb4'\n'\\u0129'\n'\\u013c'\n'\\u02c7'\n'\\xb8'\n'\\u0161'\n'\\u0113'\n'\\u0123'\n'\\u0167'\n'\\u014a'\n'\\u017e'\n'\\u014b'\n'\\u0100'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\u012e'\n'\\u010c'\n'\\xc9'\n'\\u0118'\n'\\xcb'\n'\\u0116'\n'\\xcd'\n'\\xce'\n'\\u012a'\n'\\u0110'\n'\\u0145'\n'\\u014c'\n'\\u0136'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\u0172'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u0168'\n'\\u016a'\n'\\xdf'\n'\\u0101'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\u012f'\n'\\u010d'\n'\\xe9'\n'\\u0119'\n'\\xeb'\n'\\u0117'\n'\\xed'\n'\\xee'\n'\\u012b'\n'\\u0111'\n'\\u0146'\n'\\u014d'\n'\\u0137'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\u0173'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u0169'\n'\\u016b'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.euc_jis_2004": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('euc_jis_2004')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='euc_jis_2004',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.mbcs": [".py", "''\n\n\n\n\n\n\n\n\n\n\nfrom codecs import mbcs_encode,mbcs_decode\n\nimport codecs\n\n\n\nencode=mbcs_encode\n\ndef decode(input,errors='strict'):\n return mbcs_decode(input,errors,True)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return mbcs_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n _buffer_decode=mbcs_decode\n \nclass StreamWriter(codecs.StreamWriter):\n encode=mbcs_encode\n \nclass StreamReader(codecs.StreamReader):\n decode=mbcs_decode\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mbcs',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.cp1250": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1250',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\ufffe'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\ufffe'\n'\\u2030'\n'\\u0160'\n'\\u2039'\n'\\u015a'\n'\\u0164'\n'\\u017d'\n'\\u0179'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\u0161'\n'\\u203a'\n'\\u015b'\n'\\u0165'\n'\\u017e'\n'\\u017a'\n'\\xa0'\n'\\u02c7'\n'\\u02d8'\n'\\u0141'\n'\\xa4'\n'\\u0104'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\u015e'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\u017b'\n'\\xb0'\n'\\xb1'\n'\\u02db'\n'\\u0142'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\u0105'\n'\\u015f'\n'\\xbb'\n'\\u013d'\n'\\u02dd'\n'\\u013e'\n'\\u017c'\n'\\u0154'\n'\\xc1'\n'\\xc2'\n'\\u0102'\n'\\xc4'\n'\\u0139'\n'\\u0106'\n'\\xc7'\n'\\u010c'\n'\\xc9'\n'\\u0118'\n'\\xcb'\n'\\u011a'\n'\\xcd'\n'\\xce'\n'\\u010e'\n'\\u0110'\n'\\u0143'\n'\\u0147'\n'\\xd3'\n'\\xd4'\n'\\u0150'\n'\\xd6'\n'\\xd7'\n'\\u0158'\n'\\u016e'\n'\\xda'\n'\\u0170'\n'\\xdc'\n'\\xdd'\n'\\u0162'\n'\\xdf'\n'\\u0155'\n'\\xe1'\n'\\xe2'\n'\\u0103'\n'\\xe4'\n'\\u013a'\n'\\u0107'\n'\\xe7'\n'\\u010d'\n'\\xe9'\n'\\u0119'\n'\\xeb'\n'\\u011b'\n'\\xed'\n'\\xee'\n'\\u010f'\n'\\u0111'\n'\\u0144'\n'\\u0148'\n'\\xf3'\n'\\xf4'\n'\\u0151'\n'\\xf6'\n'\\xf7'\n'\\u0159'\n'\\u016f'\n'\\xfa'\n'\\u0171'\n'\\xfc'\n'\\xfd'\n'\\u0163'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.gb2312": [".py", "\n\n\n\n\n\nimport _codecs_cn,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_cn.getcodec('gb2312')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='gb2312',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_cn", "_multibytecodec", "codecs"]], "encodings.iso8859_16": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-16',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0104'\n'\\u0105'\n'\\u0141'\n'\\u20ac'\n'\\u201e'\n'\\u0160'\n'\\xa7'\n'\\u0161'\n'\\xa9'\n'\\u0218'\n'\\xab'\n'\\u0179'\n'\\xad'\n'\\u017a'\n'\\u017b'\n'\\xb0'\n'\\xb1'\n'\\u010c'\n'\\u0142'\n'\\u017d'\n'\\u201d'\n'\\xb6'\n'\\xb7'\n'\\u017e'\n'\\u010d'\n'\\u0219'\n'\\xbb'\n'\\u0152'\n'\\u0153'\n'\\u0178'\n'\\u017c'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\u0102'\n'\\xc4'\n'\\u0106'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u0110'\n'\\u0143'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\u0150'\n'\\xd6'\n'\\u015a'\n'\\u0170'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u0118'\n'\\u021a'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\u0103'\n'\\xe4'\n'\\u0107'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u0111'\n'\\u0144'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\u0151'\n'\\xf6'\n'\\u015b'\n'\\u0171'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u0119'\n'\\u021b'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_cyrillic": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-cyrillic',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u2020'\n'\\xb0'\n'\\u0490'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\u0406'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\u0402'\n'\\u0452'\n'\\u2260'\n'\\u0403'\n'\\u0453'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\u0456'\n'\\xb5'\n'\\u0491'\n'\\u0408'\n'\\u0404'\n'\\u0454'\n'\\u0407'\n'\\u0457'\n'\\u0409'\n'\\u0459'\n'\\u040a'\n'\\u045a'\n'\\u0458'\n'\\u0405'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u040b'\n'\\u045b'\n'\\u040c'\n'\\u045c'\n'\\u0455'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u201e'\n'\\u040e'\n'\\u045e'\n'\\u040f'\n'\\u045f'\n'\\u2116'\n'\\u0401'\n'\\u0451'\n'\\u044f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u20ac'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.hex_codec": [".py", "''\n\n\n\n\n\n\nimport codecs\nimport binascii\n\n\n\ndef hex_encode(input,errors='strict'):\n assert errors =='strict'\n return(binascii.b2a_hex(input),len(input))\n \ndef hex_decode(input,errors='strict'):\n assert errors =='strict'\n return(binascii.a2b_hex(input),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return hex_encode(input,errors)\n def decode(self,input,errors='strict'):\n return hex_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n assert self.errors =='strict'\n return binascii.b2a_hex(input)\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n assert self.errors =='strict'\n return binascii.a2b_hex(input)\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='hex',\n encode=hex_encode,\n decode=hex_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False,\n )\n", ["binascii", "codecs"]], "encodings.tis_620": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='tis-620',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\ufffe'\n'\\u0e01'\n'\\u0e02'\n'\\u0e03'\n'\\u0e04'\n'\\u0e05'\n'\\u0e06'\n'\\u0e07'\n'\\u0e08'\n'\\u0e09'\n'\\u0e0a'\n'\\u0e0b'\n'\\u0e0c'\n'\\u0e0d'\n'\\u0e0e'\n'\\u0e0f'\n'\\u0e10'\n'\\u0e11'\n'\\u0e12'\n'\\u0e13'\n'\\u0e14'\n'\\u0e15'\n'\\u0e16'\n'\\u0e17'\n'\\u0e18'\n'\\u0e19'\n'\\u0e1a'\n'\\u0e1b'\n'\\u0e1c'\n'\\u0e1d'\n'\\u0e1e'\n'\\u0e1f'\n'\\u0e20'\n'\\u0e21'\n'\\u0e22'\n'\\u0e23'\n'\\u0e24'\n'\\u0e25'\n'\\u0e26'\n'\\u0e27'\n'\\u0e28'\n'\\u0e29'\n'\\u0e2a'\n'\\u0e2b'\n'\\u0e2c'\n'\\u0e2d'\n'\\u0e2e'\n'\\u0e2f'\n'\\u0e30'\n'\\u0e31'\n'\\u0e32'\n'\\u0e33'\n'\\u0e34'\n'\\u0e35'\n'\\u0e36'\n'\\u0e37'\n'\\u0e38'\n'\\u0e39'\n'\\u0e3a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0e3f'\n'\\u0e40'\n'\\u0e41'\n'\\u0e42'\n'\\u0e43'\n'\\u0e44'\n'\\u0e45'\n'\\u0e46'\n'\\u0e47'\n'\\u0e48'\n'\\u0e49'\n'\\u0e4a'\n'\\u0e4b'\n'\\u0e4c'\n'\\u0e4d'\n'\\u0e4e'\n'\\u0e4f'\n'\\u0e50'\n'\\u0e51'\n'\\u0e52'\n'\\u0e53'\n'\\u0e54'\n'\\u0e55'\n'\\u0e56'\n'\\u0e57'\n'\\u0e58'\n'\\u0e59'\n'\\u0e5a'\n'\\u0e5b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp037": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp037',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'\\xa2'\n'.'\n'<'\n'('\n'+'\n'|'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n'!'\n'$'\n'*'\n')'\n';'\n'\\xac'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'^'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'['\n']'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\xfc'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\xd6'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\\xdc'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1006": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1006',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u06f0'\n'\\u06f1'\n'\\u06f2'\n'\\u06f3'\n'\\u06f4'\n'\\u06f5'\n'\\u06f6'\n'\\u06f7'\n'\\u06f8'\n'\\u06f9'\n'\\u060c'\n'\\u061b'\n'\\xad'\n'\\u061f'\n'\\ufe81'\n'\\ufe8d'\n'\\ufe8e'\n'\\ufe8e'\n'\\ufe8f'\n'\\ufe91'\n'\\ufb56'\n'\\ufb58'\n'\\ufe93'\n'\\ufe95'\n'\\ufe97'\n'\\ufb66'\n'\\ufb68'\n'\\ufe99'\n'\\ufe9b'\n'\\ufe9d'\n'\\ufe9f'\n'\\ufb7a'\n'\\ufb7c'\n'\\ufea1'\n'\\ufea3'\n'\\ufea5'\n'\\ufea7'\n'\\ufea9'\n'\\ufb84'\n'\\ufeab'\n'\\ufead'\n'\\ufb8c'\n'\\ufeaf'\n'\\ufb8a'\n'\\ufeb1'\n'\\ufeb3'\n'\\ufeb5'\n'\\ufeb7'\n'\\ufeb9'\n'\\ufebb'\n'\\ufebd'\n'\\ufebf'\n'\\ufec1'\n'\\ufec5'\n'\\ufec9'\n'\\ufeca'\n'\\ufecb'\n'\\ufecc'\n'\\ufecd'\n'\\ufece'\n'\\ufecf'\n'\\ufed0'\n'\\ufed1'\n'\\ufed3'\n'\\ufed5'\n'\\ufed7'\n'\\ufed9'\n'\\ufedb'\n'\\ufb92'\n'\\ufb94'\n'\\ufedd'\n'\\ufedf'\n'\\ufee0'\n'\\ufee1'\n'\\ufee3'\n'\\ufb9e'\n'\\ufee5'\n'\\ufee7'\n'\\ufe85'\n'\\ufeed'\n'\\ufba6'\n'\\ufba8'\n'\\ufba9'\n'\\ufbaa'\n'\\ufe80'\n'\\ufe89'\n'\\ufe8a'\n'\\ufe8b'\n'\\ufef1'\n'\\ufef2'\n'\\ufef3'\n'\\ufbb0'\n'\\ufbae'\n'\\ufe7c'\n'\\ufe7d'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1251": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1251',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0402'\n'\\u0403'\n'\\u201a'\n'\\u0453'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u20ac'\n'\\u2030'\n'\\u0409'\n'\\u2039'\n'\\u040a'\n'\\u040c'\n'\\u040b'\n'\\u040f'\n'\\u0452'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\u0459'\n'\\u203a'\n'\\u045a'\n'\\u045c'\n'\\u045b'\n'\\u045f'\n'\\xa0'\n'\\u040e'\n'\\u045e'\n'\\u0408'\n'\\xa4'\n'\\u0490'\n'\\xa6'\n'\\xa7'\n'\\u0401'\n'\\xa9'\n'\\u0404'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\u0407'\n'\\xb0'\n'\\xb1'\n'\\u0406'\n'\\u0456'\n'\\u0491'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u0451'\n'\\u2116'\n'\\u0454'\n'\\xbb'\n'\\u0458'\n'\\u0405'\n'\\u0455'\n'\\u0457'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.mac_turkish": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-turkish',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\xc6'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\xe6'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u011e'\n'\\u011f'\n'\\u0130'\n'\\u0131'\n'\\u015e'\n'\\u015f'\n'\\u2021'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\uf8a0'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp_ext": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_ext')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_ext',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.iso8859_1": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-1',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.hz": [".py", "\n\n\n\n\n\nimport _codecs_cn,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_cn.getcodec('hz')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='hz',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_cn", "_multibytecodec", "codecs"]], "encodings.bz2_codec": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\nimport bz2\n\n\n\ndef bz2_encode(input,errors='strict'):\n assert errors =='strict'\n return(bz2.compress(input),len(input))\n \ndef bz2_decode(input,errors='strict'):\n assert errors =='strict'\n return(bz2.decompress(input),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return bz2_encode(input,errors)\n def decode(self,input,errors='strict'):\n return bz2_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n assert errors =='strict'\n self.errors=errors\n self.compressobj=bz2.BZ2Compressor()\n \n def encode(self,input,final=False):\n if final:\n c=self.compressobj.compress(input)\n return c+self.compressobj.flush()\n else:\n return self.compressobj.compress(input)\n \n def reset(self):\n self.compressobj=bz2.BZ2Compressor()\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def __init__(self,errors='strict'):\n assert errors =='strict'\n self.errors=errors\n self.decompressobj=bz2.BZ2Decompressor()\n \n def decode(self,input,final=False):\n try:\n return self.decompressobj.decompress(input)\n except EOFError:\n return ''\n \n def reset(self):\n self.decompressobj=bz2.BZ2Decompressor()\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name=\"bz2\",\n encode=bz2_encode,\n decode=bz2_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False,\n )\n", ["bz2", "codecs"]], "encodings.quopri_codec": [".py", "''\n\n\n\n\nimport codecs\nimport quopri\nfrom io import BytesIO\n\ndef quopri_encode(input,errors='strict'):\n assert errors =='strict'\n f=BytesIO(input)\n g=BytesIO()\n quopri.encode(f,g,quotetabs=True)\n return(g.getvalue(),len(input))\n \ndef quopri_decode(input,errors='strict'):\n assert errors =='strict'\n f=BytesIO(input)\n g=BytesIO()\n quopri.decode(f,g)\n return(g.getvalue(),len(input))\n \nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return quopri_encode(input,errors)\n def decode(self,input,errors='strict'):\n return quopri_decode(input,errors)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return quopri_encode(input,self.errors)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return quopri_decode(input,self.errors)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n charbuffertype=bytes\n \nclass StreamReader(Codec,codecs.StreamReader):\n charbuffertype=bytes\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='quopri',\n encode=quopri_encode,\n decode=quopri_decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n _is_text_encoding=False,\n )\n", ["codecs", "io", "quopri"]], "encodings.kz1048": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='kz1048',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0402'\n'\\u0403'\n'\\u201a'\n'\\u0453'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u20ac'\n'\\u2030'\n'\\u0409'\n'\\u2039'\n'\\u040a'\n'\\u049a'\n'\\u04ba'\n'\\u040f'\n'\\u0452'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\u0459'\n'\\u203a'\n'\\u045a'\n'\\u049b'\n'\\u04bb'\n'\\u045f'\n'\\xa0'\n'\\u04b0'\n'\\u04b1'\n'\\u04d8'\n'\\xa4'\n'\\u04e8'\n'\\xa6'\n'\\xa7'\n'\\u0401'\n'\\xa9'\n'\\u0492'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\u04ae'\n'\\xb0'\n'\\xb1'\n'\\u0406'\n'\\u0456'\n'\\u04e9'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u0451'\n'\\u2116'\n'\\u0493'\n'\\xbb'\n'\\u04d9'\n'\\u04a2'\n'\\u04a3'\n'\\u04af'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.utf_8_sig": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\ndef encode(input,errors='strict'):\n return(codecs.BOM_UTF8+codecs.utf_8_encode(input,errors)[0],\n len(input))\n \ndef decode(input,errors='strict'):\n prefix=0\n if input[:3]==codecs.BOM_UTF8:\n input=input[3:]\n prefix=3\n (output,consumed)=codecs.utf_8_decode(input,errors,True)\n return(output,consumed+prefix)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict'):\n codecs.IncrementalEncoder.__init__(self,errors)\n self.first=1\n \n def encode(self,input,final=False):\n if self.first:\n self.first=0\n return codecs.BOM_UTF8+\\\n codecs.utf_8_encode(input,self.errors)[0]\n else:\n return codecs.utf_8_encode(input,self.errors)[0]\n \n def reset(self):\n codecs.IncrementalEncoder.reset(self)\n self.first=1\n \n def getstate(self):\n return self.first\n \n def setstate(self,state):\n self.first=state\n \nclass IncrementalDecoder(codecs.BufferedIncrementalDecoder):\n def __init__(self,errors='strict'):\n codecs.BufferedIncrementalDecoder.__init__(self,errors)\n self.first=1\n \n def _buffer_decode(self,input,errors,final):\n if self.first:\n if len(input)<3:\n if codecs.BOM_UTF8.startswith(input):\n \n \n return(\"\",0)\n else:\n self.first=0\n else:\n self.first=0\n if input[:3]==codecs.BOM_UTF8:\n (output,consumed)=\\\n codecs.utf_8_decode(input[3:],errors,final)\n return(output,consumed+3)\n return codecs.utf_8_decode(input,errors,final)\n \n def reset(self):\n codecs.BufferedIncrementalDecoder.reset(self)\n self.first=1\n \n def getstate(self):\n state=codecs.BufferedIncrementalDecoder.getstate(self)\n \n return(state[0],self.first)\n \n def setstate(self,state):\n \n codecs.BufferedIncrementalDecoder.setstate(self,state)\n self.first=state[1]\n \nclass StreamWriter(codecs.StreamWriter):\n def reset(self):\n codecs.StreamWriter.reset(self)\n try:\n del self.encode\n except AttributeError:\n pass\n \n def encode(self,input,errors='strict'):\n self.encode=codecs.utf_8_encode\n return encode(input,errors)\n \nclass StreamReader(codecs.StreamReader):\n def reset(self):\n codecs.StreamReader.reset(self)\n try:\n del self.decode\n except AttributeError:\n pass\n \n def decode(self,input,errors='strict'):\n if len(input)<3:\n if codecs.BOM_UTF8.startswith(input):\n \n \n return(\"\",0)\n elif input[:3]==codecs.BOM_UTF8:\n self.decode=codecs.utf_8_decode\n (output,consumed)=codecs.utf_8_decode(input[3:],errors)\n return(output,consumed+3)\n \n self.decode=codecs.utf_8_decode\n return codecs.utf_8_decode(input,errors)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='utf-8-sig',\n encode=encode,\n decode=decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["codecs"]], "encodings.koi8_t": [".py", "''\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='koi8-t',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u049b'\n'\\u0493'\n'\\u201a'\n'\\u0492'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\ufffe'\n'\\u2030'\n'\\u04b3'\n'\\u2039'\n'\\u04b2'\n'\\u04b7'\n'\\u04b6'\n'\\ufffe'\n'\\u049a'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u04ef'\n'\\u04ee'\n'\\u0451'\n'\\xa4'\n'\\u04e3'\n'\\xa6'\n'\\xa7'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\ufffe'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\u0401'\n'\\ufffe'\n'\\u04e2'\n'\\xb6'\n'\\xb7'\n'\\ufffe'\n'\\u2116'\n'\\ufffe'\n'\\xbb'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa9'\n'\\u044e'\n'\\u0430'\n'\\u0431'\n'\\u0446'\n'\\u0434'\n'\\u0435'\n'\\u0444'\n'\\u0433'\n'\\u0445'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u044f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0436'\n'\\u0432'\n'\\u044c'\n'\\u044b'\n'\\u0437'\n'\\u0448'\n'\\u044d'\n'\\u0449'\n'\\u0447'\n'\\u044a'\n'\\u042e'\n'\\u0410'\n'\\u0411'\n'\\u0426'\n'\\u0414'\n'\\u0415'\n'\\u0424'\n'\\u0413'\n'\\u0425'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u042f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0416'\n'\\u0412'\n'\\u042c'\n'\\u042b'\n'\\u0417'\n'\\u0428'\n'\\u042d'\n'\\u0429'\n'\\u0427'\n'\\u042a'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.cp1255": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1255',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\u20aa'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xd7'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xf7'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\u05b0'\n'\\u05b1'\n'\\u05b2'\n'\\u05b3'\n'\\u05b4'\n'\\u05b5'\n'\\u05b6'\n'\\u05b7'\n'\\u05b8'\n'\\u05b9'\n'\\ufffe'\n'\\u05bb'\n'\\u05bc'\n'\\u05bd'\n'\\u05be'\n'\\u05bf'\n'\\u05c0'\n'\\u05c1'\n'\\u05c2'\n'\\u05c3'\n'\\u05f0'\n'\\u05f1'\n'\\u05f2'\n'\\u05f3'\n'\\u05f4'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\ufffe'\n'\\ufffe'\n'\\u200e'\n'\\u200f'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp_3": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp_3')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp_3',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "encodings.shift_jis_2004": [".py", "\n\n\n\n\n\nimport _codecs_jp,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_jp.getcodec('shift_jis_2004')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='shift_jis_2004',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_jp", "_multibytecodec", "codecs"]], "encodings.cp1026": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1026',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'{'\n'\\xf1'\n'\\xc7'\n'.'\n'<'\n'('\n'+'\n'!'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n'\\u011e'\n'\\u0130'\n'*'\n')'\n';'\n'^'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'['\n'\\xd1'\n'\\u015f'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\u0131'\n':'\n'\\xd6'\n'\\u015e'\n\"'\"\n'='\n'\\xdc'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'}'\n'`'\n'\\xa6'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'\\xf6'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n']'\n'$'\n'@'\n'\\xae'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xac'\n'|'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'\\xe7'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'~'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'\\u011f'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\\\'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\xfc'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'#'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\"'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.charmap": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n\n\n encode=codecs.charmap_encode\n decode=codecs.charmap_decode\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def __init__(self,errors='strict',mapping=None):\n codecs.IncrementalEncoder.__init__(self,errors)\n self.mapping=mapping\n \n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,self.mapping)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def __init__(self,errors='strict',mapping=None):\n codecs.IncrementalDecoder.__init__(self,errors)\n self.mapping=mapping\n \n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,self.mapping)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n\n def __init__(self,stream,errors='strict',mapping=None):\n codecs.StreamWriter.__init__(self,stream,errors)\n self.mapping=mapping\n \n def encode(self,input,errors='strict'):\n return Codec.encode(input,errors,self.mapping)\n \nclass StreamReader(Codec,codecs.StreamReader):\n\n def __init__(self,stream,errors='strict',mapping=None):\n codecs.StreamReader.__init__(self,stream,errors)\n self.mapping=mapping\n \n def decode(self,input,errors='strict'):\n return Codec.decode(input,errors,self.mapping)\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='charmap',\n encode=Codec.encode,\n decode=Codec.decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n", ["codecs"]], "encodings.iso8859_5": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-5',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0401'\n'\\u0402'\n'\\u0403'\n'\\u0404'\n'\\u0405'\n'\\u0406'\n'\\u0407'\n'\\u0408'\n'\\u0409'\n'\\u040a'\n'\\u040b'\n'\\u040c'\n'\\xad'\n'\\u040e'\n'\\u040f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n'\\u2116'\n'\\u0451'\n'\\u0452'\n'\\u0453'\n'\\u0454'\n'\\u0455'\n'\\u0456'\n'\\u0457'\n'\\u0458'\n'\\u0459'\n'\\u045a'\n'\\u045b'\n'\\u045c'\n'\\xa7'\n'\\u045e'\n'\\u045f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso8859_13": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-13',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u201d'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\u201e'\n'\\xa6'\n'\\xa7'\n'\\xd8'\n'\\xa9'\n'\\u0156'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xc6'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u201c'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xf8'\n'\\xb9'\n'\\u0157'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xe6'\n'\\u0104'\n'\\u012e'\n'\\u0100'\n'\\u0106'\n'\\xc4'\n'\\xc5'\n'\\u0118'\n'\\u0112'\n'\\u010c'\n'\\xc9'\n'\\u0179'\n'\\u0116'\n'\\u0122'\n'\\u0136'\n'\\u012a'\n'\\u013b'\n'\\u0160'\n'\\u0143'\n'\\u0145'\n'\\xd3'\n'\\u014c'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\u0172'\n'\\u0141'\n'\\u015a'\n'\\u016a'\n'\\xdc'\n'\\u017b'\n'\\u017d'\n'\\xdf'\n'\\u0105'\n'\\u012f'\n'\\u0101'\n'\\u0107'\n'\\xe4'\n'\\xe5'\n'\\u0119'\n'\\u0113'\n'\\u010d'\n'\\xe9'\n'\\u017a'\n'\\u0117'\n'\\u0123'\n'\\u0137'\n'\\u012b'\n'\\u013c'\n'\\u0161'\n'\\u0144'\n'\\u0146'\n'\\xf3'\n'\\u014d'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\u0173'\n'\\u0142'\n'\\u015b'\n'\\u016b'\n'\\xfc'\n'\\u017c'\n'\\u017e'\n'\\u2019'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n", ["codecs"]], "encodings.iso2022_jp": [".py", "\n\n\n\n\n\nimport _codecs_iso2022,codecs\nimport _multibytecodec as mbc\n\ncodec=_codecs_iso2022.getcodec('iso2022_jp')\n\nclass Codec(codecs.Codec):\n encode=codec.encode\n decode=codec.decode\n \nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\ncodecs.IncrementalEncoder):\n codec=codec\n \nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\ncodecs.IncrementalDecoder):\n codec=codec\n \nclass StreamReader(Codec,mbc.MultibyteStreamReader,codecs.StreamReader):\n codec=codec\n \nclass StreamWriter(Codec,mbc.MultibyteStreamWriter,codecs.StreamWriter):\n codec=codec\n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n", ["_codecs_iso2022", "_multibytecodec", "codecs"]], "unittest.signals": [".py", "import signal\nimport weakref\n\nfrom functools import wraps\n\n__unittest=True\n\n\nclass _InterruptHandler(object):\n def __init__(self,default_handler):\n self.called=False\n self.original_handler=default_handler\n if isinstance(default_handler,int):\n if default_handler ==signal.SIG_DFL:\n \n default_handler=signal.default_int_handler\n elif default_handler ==signal.SIG_IGN:\n \n \n def default_handler(unused_signum,unused_frame):\n pass\n else:\n raise TypeError(\"expected SIGINT signal handler to be \"\n \"signal.SIG_IGN, signal.SIG_DFL, or a \"\n \"callable object\")\n self.default_handler=default_handler\n \n def __call__(self,signum,frame):\n installed_handler=signal.getsignal(signal.SIGINT)\n if installed_handler is not self:\n \n \n self.default_handler(signum,frame)\n \n if self.called:\n self.default_handler(signum,frame)\n self.called=True\n for result in _results.keys():\n result.stop()\n \n_results=weakref.WeakKeyDictionary()\ndef registerResult(result):\n _results[result]=1\n \ndef removeResult(result):\n return bool(_results.pop(result,None))\n \n_interrupt_handler=None\ndef installHandler():\n global _interrupt_handler\n if _interrupt_handler is None:\n default_handler=signal.getsignal(signal.SIGINT)\n _interrupt_handler=_InterruptHandler(default_handler)\n signal.signal(signal.SIGINT,_interrupt_handler)\n \n \ndef removeHandler(method=None):\n if method is not None:\n @wraps(method)\n def inner(*args,**kwargs):\n initial=signal.getsignal(signal.SIGINT)\n removeHandler()\n try:\n return method(*args,**kwargs)\n finally:\n signal.signal(signal.SIGINT,initial)\n return inner\n \n global _interrupt_handler\n if _interrupt_handler is not None:\n signal.signal(signal.SIGINT,_interrupt_handler.original_handler)\n", ["functools", "signal", "weakref"]], "unittest.runner": [".py", "''\n\nimport sys\nimport time\nimport warnings\n\nfrom. import result\nfrom.case import _SubTest\nfrom.signals import registerResult\n\n__unittest=True\n\n\nclass _WritelnDecorator(object):\n ''\n def __init__(self,stream):\n self.stream=stream\n \n def __getattr__(self,attr):\n if attr in('stream','__getstate__'):\n raise AttributeError(attr)\n return getattr(self.stream,attr)\n \n def writeln(self,arg=None):\n if arg:\n self.write(arg)\n self.write('\\n')\n \n \nclass TextTestResult(result.TestResult):\n ''\n\n\n \n separator1='='*70\n separator2='-'*70\n \n def __init__(self,stream,descriptions,verbosity,*,durations=None):\n ''\n \n super(TextTestResult,self).__init__(stream,descriptions,verbosity)\n self.stream=stream\n self.showAll=verbosity >1\n self.dots=verbosity ==1\n self.descriptions=descriptions\n self._newline=True\n self.durations=durations\n \n def getDescription(self,test):\n doc_first_line=test.shortDescription()\n if self.descriptions and doc_first_line:\n return '\\n'.join((str(test),doc_first_line))\n else:\n return str(test)\n \n def startTest(self,test):\n super(TextTestResult,self).startTest(test)\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n self._newline=False\n \n def _write_status(self,test,status):\n is_subtest=isinstance(test,_SubTest)\n if is_subtest or self._newline:\n if not self._newline:\n self.stream.writeln()\n if is_subtest:\n self.stream.write(\" \")\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.writeln(status)\n self.stream.flush()\n self._newline=True\n \n def addSubTest(self,test,subtest,err):\n if err is not None:\n if self.showAll:\n if issubclass(err[0],subtest.failureException):\n self._write_status(subtest,\"FAIL\")\n else:\n self._write_status(subtest,\"ERROR\")\n elif self.dots:\n if issubclass(err[0],subtest.failureException):\n self.stream.write('F')\n else:\n self.stream.write('E')\n self.stream.flush()\n super(TextTestResult,self).addSubTest(test,subtest,err)\n \n def addSuccess(self,test):\n super(TextTestResult,self).addSuccess(test)\n if self.showAll:\n self._write_status(test,\"ok\")\n elif self.dots:\n self.stream.write('.')\n self.stream.flush()\n \n def addError(self,test,err):\n super(TextTestResult,self).addError(test,err)\n if self.showAll:\n self._write_status(test,\"ERROR\")\n elif self.dots:\n self.stream.write('E')\n self.stream.flush()\n \n def addFailure(self,test,err):\n super(TextTestResult,self).addFailure(test,err)\n if self.showAll:\n self._write_status(test,\"FAIL\")\n elif self.dots:\n self.stream.write('F')\n self.stream.flush()\n \n def addSkip(self,test,reason):\n super(TextTestResult,self).addSkip(test,reason)\n if self.showAll:\n self._write_status(test,\"skipped {0!r}\".format(reason))\n elif self.dots:\n self.stream.write(\"s\")\n self.stream.flush()\n \n def addExpectedFailure(self,test,err):\n super(TextTestResult,self).addExpectedFailure(test,err)\n if self.showAll:\n self.stream.writeln(\"expected failure\")\n self.stream.flush()\n elif self.dots:\n self.stream.write(\"x\")\n self.stream.flush()\n \n def addUnexpectedSuccess(self,test):\n super(TextTestResult,self).addUnexpectedSuccess(test)\n if self.showAll:\n self.stream.writeln(\"unexpected success\")\n self.stream.flush()\n elif self.dots:\n self.stream.write(\"u\")\n self.stream.flush()\n \n def printErrors(self):\n if self.dots or self.showAll:\n self.stream.writeln()\n self.stream.flush()\n self.printErrorList('ERROR',self.errors)\n self.printErrorList('FAIL',self.failures)\n unexpectedSuccesses=getattr(self,'unexpectedSuccesses',())\n if unexpectedSuccesses:\n self.stream.writeln(self.separator1)\n for test in unexpectedSuccesses:\n self.stream.writeln(f\"UNEXPECTED SUCCESS: {self.getDescription(test)}\")\n self.stream.flush()\n \n def printErrorList(self,flavour,errors):\n for test,err in errors:\n self.stream.writeln(self.separator1)\n self.stream.writeln(\"%s: %s\"%(flavour,self.getDescription(test)))\n self.stream.writeln(self.separator2)\n self.stream.writeln(\"%s\"%err)\n self.stream.flush()\n \n \nclass TextTestRunner(object):\n ''\n\n\n\n \n resultclass=TextTestResult\n \n def __init__(self,stream=None,descriptions=True,verbosity=1,\n failfast=False,buffer=False,resultclass=None,warnings=None,\n *,tb_locals=False,durations=None):\n ''\n\n\n\n \n if stream is None:\n stream=sys.stderr\n self.stream=_WritelnDecorator(stream)\n self.descriptions=descriptions\n self.verbosity=verbosity\n self.failfast=failfast\n self.buffer=buffer\n self.tb_locals=tb_locals\n self.durations=durations\n self.warnings=warnings\n if resultclass is not None:\n self.resultclass=resultclass\n \n def _makeResult(self):\n try:\n return self.resultclass(self.stream,self.descriptions,\n self.verbosity,durations=self.durations)\n except TypeError:\n \n return self.resultclass(self.stream,self.descriptions,\n self.verbosity)\n \n def _printDurations(self,result):\n if not result.collectedDurations:\n return\n ls=sorted(result.collectedDurations,key=lambda x:x[1],\n reverse=True)\n if self.durations >0:\n ls=ls[:self.durations]\n self.stream.writeln(\"Slowest test durations\")\n if hasattr(result,'separator2'):\n self.stream.writeln(result.separator2)\n hidden=False\n for test,elapsed in ls:\n if self.verbosity <2 and elapsed <0.001:\n hidden=True\n continue\n self.stream.writeln(\"%-10s %s\"%(\"%.3fs\"%elapsed,test))\n if hidden:\n self.stream.writeln(\"\\n(durations < 0.001s were hidden; \"\n \"use -v to show these durations)\")\n else:\n self.stream.writeln(\"\")\n \n def run(self,test):\n ''\n result=self._makeResult()\n registerResult(result)\n result.failfast=self.failfast\n result.buffer=self.buffer\n result.tb_locals=self.tb_locals\n with warnings.catch_warnings():\n if self.warnings:\n \n warnings.simplefilter(self.warnings)\n startTime=time.perf_counter()\n startTestRun=getattr(result,'startTestRun',None)\n if startTestRun is not None:\n startTestRun()\n try:\n test(result)\n finally:\n stopTestRun=getattr(result,'stopTestRun',None)\n if stopTestRun is not None:\n stopTestRun()\n stopTime=time.perf_counter()\n timeTaken=stopTime -startTime\n result.printErrors()\n if self.durations is not None:\n self._printDurations(result)\n \n if hasattr(result,'separator2'):\n self.stream.writeln(result.separator2)\n \n run=result.testsRun\n self.stream.writeln(\"Ran %d test%s in %.3fs\"%\n (run,run !=1 and \"s\"or \"\",timeTaken))\n self.stream.writeln()\n \n expectedFails=unexpectedSuccesses=skipped=0\n try:\n results=map(len,(result.expectedFailures,\n result.unexpectedSuccesses,\n result.skipped))\n except AttributeError:\n pass\n else:\n expectedFails,unexpectedSuccesses,skipped=results\n \n infos=[]\n if not result.wasSuccessful():\n self.stream.write(\"FAILED\")\n failed,errored=len(result.failures),len(result.errors)\n if failed:\n infos.append(\"failures=%d\"%failed)\n if errored:\n infos.append(\"errors=%d\"%errored)\n elif run ==0:\n self.stream.write(\"NO TESTS RAN\")\n else:\n self.stream.write(\"OK\")\n if skipped:\n infos.append(\"skipped=%d\"%skipped)\n if expectedFails:\n infos.append(\"expected failures=%d\"%expectedFails)\n if unexpectedSuccesses:\n infos.append(\"unexpected successes=%d\"%unexpectedSuccesses)\n if infos:\n self.stream.writeln(\" (%s)\"%(\", \".join(infos),))\n else:\n self.stream.write(\"\\n\")\n self.stream.flush()\n return result\n", ["sys", "time", "unittest", "unittest.case", "unittest.result", "unittest.signals", "warnings"]], "unittest.suite": [".py", "''\n\nimport sys\n\nfrom. import case\nfrom. import util\n\n__unittest=True\n\n\ndef _call_if_exists(parent,attr):\n func=getattr(parent,attr,lambda:None)\n func()\n \n \nclass BaseTestSuite(object):\n ''\n \n _cleanup=True\n \n def __init__(self,tests=()):\n self._tests=[]\n self._removed_tests=0\n self.addTests(tests)\n \n def __repr__(self):\n return \"<%s tests=%s>\"%(util.strclass(self.__class__),list(self))\n \n def __eq__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return list(self)==list(other)\n \n def __iter__(self):\n return iter(self._tests)\n \n def countTestCases(self):\n cases=self._removed_tests\n for test in self:\n if test:\n cases +=test.countTestCases()\n return cases\n \n def addTest(self,test):\n \n if not callable(test):\n raise TypeError(\"{} is not callable\".format(repr(test)))\n if isinstance(test,type)and issubclass(test,\n (case.TestCase,TestSuite)):\n raise TypeError(\"TestCases and TestSuites must be instantiated \"\n \"before passing them to addTest()\")\n self._tests.append(test)\n \n def addTests(self,tests):\n if isinstance(tests,str):\n raise TypeError(\"tests must be an iterable of tests, not a string\")\n for test in tests:\n self.addTest(test)\n \n def run(self,result):\n for index,test in enumerate(self):\n if result.shouldStop:\n break\n test(result)\n if self._cleanup:\n self._removeTestAtIndex(index)\n return result\n \n def _removeTestAtIndex(self,index):\n ''\n try:\n test=self._tests[index]\n except TypeError:\n \n pass\n else:\n \n \n if hasattr(test,'countTestCases'):\n self._removed_tests +=test.countTestCases()\n self._tests[index]=None\n \n def __call__(self,*args,**kwds):\n return self.run(*args,**kwds)\n \n def debug(self):\n ''\n for test in self:\n test.debug()\n \n \nclass TestSuite(BaseTestSuite):\n ''\n\n\n\n\n\n\n \n \n def run(self,result,debug=False):\n topLevel=False\n if getattr(result,'_testRunEntered',False)is False:\n result._testRunEntered=topLevel=True\n \n for index,test in enumerate(self):\n if result.shouldStop:\n break\n \n if _isnotsuite(test):\n self._tearDownPreviousClass(test,result)\n self._handleModuleFixture(test,result)\n self._handleClassSetUp(test,result)\n result._previousTestClass=test.__class__\n \n if(getattr(test.__class__,'_classSetupFailed',False)or\n getattr(result,'_moduleSetUpFailed',False)):\n continue\n \n if not debug:\n test(result)\n else:\n test.debug()\n \n if self._cleanup:\n self._removeTestAtIndex(index)\n \n if topLevel:\n self._tearDownPreviousClass(None,result)\n self._handleModuleTearDown(result)\n result._testRunEntered=False\n return result\n \n def debug(self):\n ''\n debug=_DebugResult()\n self.run(debug,True)\n \n \n \n def _handleClassSetUp(self,test,result):\n previousClass=getattr(result,'_previousTestClass',None)\n currentClass=test.__class__\n if currentClass ==previousClass:\n return\n if result._moduleSetUpFailed:\n return\n if getattr(currentClass,\"__unittest_skip__\",False):\n return\n \n failed=False\n try:\n currentClass._classSetupFailed=False\n except TypeError:\n \n \n pass\n \n setUpClass=getattr(currentClass,'setUpClass',None)\n doClassCleanups=getattr(currentClass,'doClassCleanups',None)\n if setUpClass is not None:\n _call_if_exists(result,'_setupStdout')\n try:\n try:\n setUpClass()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n failed=True\n try:\n currentClass._classSetupFailed=True\n except TypeError:\n pass\n className=util.strclass(currentClass)\n self._createClassOrModuleLevelException(result,e,\n 'setUpClass',\n className)\n if failed and doClassCleanups is not None:\n doClassCleanups()\n for exc_info in currentClass.tearDown_exceptions:\n self._createClassOrModuleLevelException(\n result,exc_info[1],'setUpClass',className,\n info=exc_info)\n finally:\n _call_if_exists(result,'_restoreStdout')\n \n def _get_previous_module(self,result):\n previousModule=None\n previousClass=getattr(result,'_previousTestClass',None)\n if previousClass is not None:\n previousModule=previousClass.__module__\n return previousModule\n \n \n def _handleModuleFixture(self,test,result):\n previousModule=self._get_previous_module(result)\n currentModule=test.__class__.__module__\n if currentModule ==previousModule:\n return\n \n self._handleModuleTearDown(result)\n \n \n result._moduleSetUpFailed=False\n try:\n module=sys.modules[currentModule]\n except KeyError:\n return\n setUpModule=getattr(module,'setUpModule',None)\n if setUpModule is not None:\n _call_if_exists(result,'_setupStdout')\n try:\n try:\n setUpModule()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n result._moduleSetUpFailed=True\n self._createClassOrModuleLevelException(result,e,\n 'setUpModule',\n currentModule)\n if result._moduleSetUpFailed:\n try:\n case.doModuleCleanups()\n except Exception as e:\n self._createClassOrModuleLevelException(result,e,\n 'setUpModule',\n currentModule)\n finally:\n _call_if_exists(result,'_restoreStdout')\n \n def _createClassOrModuleLevelException(self,result,exc,method_name,\n parent,info=None):\n errorName=f'{method_name} ({parent})'\n self._addClassOrModuleLevelException(result,exc,errorName,info)\n \n def _addClassOrModuleLevelException(self,result,exception,errorName,\n info=None):\n error=_ErrorHolder(errorName)\n addSkip=getattr(result,'addSkip',None)\n if addSkip is not None and isinstance(exception,case.SkipTest):\n addSkip(error,str(exception))\n else:\n if not info:\n result.addError(error,sys.exc_info())\n else:\n result.addError(error,info)\n \n def _handleModuleTearDown(self,result):\n previousModule=self._get_previous_module(result)\n if previousModule is None:\n return\n if result._moduleSetUpFailed:\n return\n \n try:\n module=sys.modules[previousModule]\n except KeyError:\n return\n \n _call_if_exists(result,'_setupStdout')\n try:\n tearDownModule=getattr(module,'tearDownModule',None)\n if tearDownModule is not None:\n try:\n tearDownModule()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n self._createClassOrModuleLevelException(result,e,\n 'tearDownModule',\n previousModule)\n try:\n case.doModuleCleanups()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n self._createClassOrModuleLevelException(result,e,\n 'tearDownModule',\n previousModule)\n finally:\n _call_if_exists(result,'_restoreStdout')\n \n def _tearDownPreviousClass(self,test,result):\n previousClass=getattr(result,'_previousTestClass',None)\n currentClass=test.__class__\n if currentClass ==previousClass or previousClass is None:\n return\n if getattr(previousClass,'_classSetupFailed',False):\n return\n if getattr(result,'_moduleSetUpFailed',False):\n return\n if getattr(previousClass,\"__unittest_skip__\",False):\n return\n \n tearDownClass=getattr(previousClass,'tearDownClass',None)\n doClassCleanups=getattr(previousClass,'doClassCleanups',None)\n if tearDownClass is None and doClassCleanups is None:\n return\n \n _call_if_exists(result,'_setupStdout')\n try:\n if tearDownClass is not None:\n try:\n tearDownClass()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n className=util.strclass(previousClass)\n self._createClassOrModuleLevelException(result,e,\n 'tearDownClass',\n className)\n if doClassCleanups is not None:\n doClassCleanups()\n for exc_info in previousClass.tearDown_exceptions:\n if isinstance(result,_DebugResult):\n raise exc_info[1]\n className=util.strclass(previousClass)\n self._createClassOrModuleLevelException(result,exc_info[1],\n 'tearDownClass',\n className,\n info=exc_info)\n finally:\n _call_if_exists(result,'_restoreStdout')\n \n \nclass _ErrorHolder(object):\n ''\n\n\n\n \n \n \n \n \n failureException=None\n \n def __init__(self,description):\n self.description=description\n \n def id(self):\n return self.description\n \n def shortDescription(self):\n return None\n \n def __repr__(self):\n return \"\"%(self.description,)\n \n def __str__(self):\n return self.id()\n \n def run(self,result):\n \n \n pass\n \n def __call__(self,result):\n return self.run(result)\n \n def countTestCases(self):\n return 0\n \ndef _isnotsuite(test):\n ''\n try:\n iter(test)\n except TypeError:\n return True\n return False\n \n \nclass _DebugResult(object):\n ''\n _previousTestClass=None\n _moduleSetUpFailed=False\n shouldStop=False\n", ["sys", "unittest", "unittest.case", "unittest.util"]], "unittest.util": [".py", "''\n\nfrom collections import namedtuple,Counter\nfrom os.path import commonprefix\n\n__unittest=True\n\n_MAX_LENGTH=80\n_PLACEHOLDER_LEN=12\n_MIN_BEGIN_LEN=5\n_MIN_END_LEN=5\n_MIN_COMMON_LEN=5\n_MIN_DIFF_LEN=_MAX_LENGTH -\\\n(_MIN_BEGIN_LEN+_PLACEHOLDER_LEN+_MIN_COMMON_LEN+\n_PLACEHOLDER_LEN+_MIN_END_LEN)\nassert _MIN_DIFF_LEN >=0\n\ndef _shorten(s,prefixlen,suffixlen):\n skip=len(s)-prefixlen -suffixlen\n if skip >_PLACEHOLDER_LEN:\n s='%s[%d chars]%s'%(s[:prefixlen],skip,s[len(s)-suffixlen:])\n return s\n \ndef _common_shorten_repr(*args):\n args=tuple(map(safe_repr,args))\n maxlen=max(map(len,args))\n if maxlen <=_MAX_LENGTH:\n return args\n \n prefix=commonprefix(args)\n prefixlen=len(prefix)\n \n common_len=_MAX_LENGTH -\\\n (maxlen -prefixlen+_MIN_BEGIN_LEN+_PLACEHOLDER_LEN)\n if common_len >_MIN_COMMON_LEN:\n assert _MIN_BEGIN_LEN+_PLACEHOLDER_LEN+_MIN_COMMON_LEN+\\\n (maxlen -prefixlen)<_MAX_LENGTH\n prefix=_shorten(prefix,_MIN_BEGIN_LEN,common_len)\n return tuple(prefix+s[prefixlen:]for s in args)\n \n prefix=_shorten(prefix,_MIN_BEGIN_LEN,_MIN_COMMON_LEN)\n return tuple(prefix+_shorten(s[prefixlen:],_MIN_DIFF_LEN,_MIN_END_LEN)\n for s in args)\n \ndef safe_repr(obj,short=False):\n try:\n result=repr(obj)\n except Exception:\n result=object.__repr__(obj)\n if not short or len(result)<_MAX_LENGTH:\n return result\n return result[:_MAX_LENGTH]+' [truncated]...'\n \ndef strclass(cls):\n return \"%s.%s\"%(cls.__module__,cls.__qualname__)\n \ndef sorted_list_difference(expected,actual):\n ''\n\n\n\n\n\n \n i=j=0\n missing=[]\n unexpected=[]\n while True:\n try:\n e=expected[i]\n a=actual[j]\n if e a:\n unexpected.append(a)\n j +=1\n while actual[j]==a:\n j +=1\n else:\n i +=1\n try:\n while expected[i]==e:\n i +=1\n finally:\n j +=1\n while actual[j]==a:\n j +=1\n except IndexError:\n missing.extend(expected[i:])\n unexpected.extend(actual[j:])\n break\n return missing,unexpected\n \n \ndef unorderable_list_difference(expected,actual):\n ''\n\n\n\n \n missing=[]\n while expected:\n item=expected.pop()\n try:\n actual.remove(item)\n except ValueError:\n missing.append(item)\n \n \n return missing,actual\n \ndef three_way_cmp(x,y):\n ''\n return(x >y)-(x \"%\n (util.strclass(self.__class__),self.testsRun,len(self.errors),\n len(self.failures)))\n", ["functools", "io", "sys", "traceback", "unittest", "unittest.util"]], "unittest.loader": [".py", "''\n\nimport os\nimport re\nimport sys\nimport traceback\nimport types\nimport functools\n\nfrom fnmatch import fnmatch,fnmatchcase\n\nfrom. import case,suite,util\n\n__unittest=True\n\n\n\n\nVALID_MODULE_NAME=re.compile(r'[_a-z]\\w*\\.py$',re.IGNORECASE)\n\n\nclass _FailedTest(case.TestCase):\n _testMethodName=None\n \n def __init__(self,method_name,exception):\n self._exception=exception\n super(_FailedTest,self).__init__(method_name)\n \n def __getattr__(self,name):\n if name !=self._testMethodName:\n return super(_FailedTest,self).__getattr__(name)\n def testFailure():\n raise self._exception\n return testFailure\n \n \ndef _make_failed_import_test(name,suiteClass):\n message='Failed to import test module: %s\\n%s'%(\n name,traceback.format_exc())\n return _make_failed_test(name,ImportError(message),suiteClass,message)\n \ndef _make_failed_load_tests(name,exception,suiteClass):\n message='Failed to call load_tests:\\n%s'%(traceback.format_exc(),)\n return _make_failed_test(\n name,exception,suiteClass,message)\n \ndef _make_failed_test(methodname,exception,suiteClass,message):\n test=_FailedTest(methodname,exception)\n return suiteClass((test,)),message\n \ndef _make_skipped_test(methodname,exception,suiteClass):\n @case.skip(str(exception))\n def testSkipped(self):\n pass\n attrs={methodname:testSkipped}\n TestClass=type(\"ModuleSkipped\",(case.TestCase,),attrs)\n return suiteClass((TestClass(methodname),))\n \ndef _splitext(path):\n return os.path.splitext(path)[0]\n \n \nclass TestLoader(object):\n ''\n\n\n \n testMethodPrefix='test'\n sortTestMethodsUsing=staticmethod(util.three_way_cmp)\n testNamePatterns=None\n suiteClass=suite.TestSuite\n _top_level_dir=None\n \n def __init__(self):\n super(TestLoader,self).__init__()\n self.errors=[]\n \n \n self._loading_packages=set()\n \n def loadTestsFromTestCase(self,testCaseClass):\n ''\n if issubclass(testCaseClass,suite.TestSuite):\n raise TypeError(\"Test cases should not be derived from \"\n \"TestSuite. Maybe you meant to derive from \"\n \"TestCase?\")\n testCaseNames=self.getTestCaseNames(testCaseClass)\n if not testCaseNames and hasattr(testCaseClass,'runTest'):\n testCaseNames=['runTest']\n loaded_suite=self.suiteClass(map(testCaseClass,testCaseNames))\n return loaded_suite\n \n def loadTestsFromModule(self,module,*,pattern=None):\n ''\n tests=[]\n for name in dir(module):\n obj=getattr(module,name)\n if isinstance(obj,type)and issubclass(obj,case.TestCase):\n tests.append(self.loadTestsFromTestCase(obj))\n \n load_tests=getattr(module,'load_tests',None)\n tests=self.suiteClass(tests)\n if load_tests is not None:\n try:\n return load_tests(self,tests,pattern)\n except Exception as e:\n error_case,error_message=_make_failed_load_tests(\n module.__name__,e,self.suiteClass)\n self.errors.append(error_message)\n return error_case\n return tests\n \n def loadTestsFromName(self,name,module=None):\n ''\n\n\n\n\n\n\n \n parts=name.split('.')\n error_case,error_message=None,None\n if module is None:\n parts_copy=parts[:]\n while parts_copy:\n try:\n module_name='.'.join(parts_copy)\n module=__import__(module_name)\n break\n except ImportError:\n next_attribute=parts_copy.pop()\n \n error_case,error_message=_make_failed_import_test(\n next_attribute,self.suiteClass)\n if not parts_copy:\n \n self.errors.append(error_message)\n return error_case\n parts=parts[1:]\n obj=module\n for part in parts:\n try:\n parent,obj=obj,getattr(obj,part)\n except AttributeError as e:\n \n if(getattr(obj,'__path__',None)is not None\n and error_case is not None):\n \n \n \n \n \n self.errors.append(error_message)\n return error_case\n else:\n \n error_case,error_message=_make_failed_test(\n part,e,self.suiteClass,\n 'Failed to access attribute:\\n%s'%(\n traceback.format_exc(),))\n self.errors.append(error_message)\n return error_case\n \n if isinstance(obj,types.ModuleType):\n return self.loadTestsFromModule(obj)\n elif isinstance(obj,type)and issubclass(obj,case.TestCase):\n return self.loadTestsFromTestCase(obj)\n elif(isinstance(obj,types.FunctionType)and\n isinstance(parent,type)and\n issubclass(parent,case.TestCase)):\n name=parts[-1]\n inst=parent(name)\n \n if not isinstance(getattr(inst,name),types.FunctionType):\n return self.suiteClass([inst])\n elif isinstance(obj,suite.TestSuite):\n return obj\n if callable(obj):\n test=obj()\n if isinstance(test,suite.TestSuite):\n return test\n elif isinstance(test,case.TestCase):\n return self.suiteClass([test])\n else:\n raise TypeError(\"calling %s returned %s, not a test\"%\n (obj,test))\n else:\n raise TypeError(\"don't know how to make test from: %s\"%obj)\n \n def loadTestsFromNames(self,names,module=None):\n ''\n\n \n suites=[self.loadTestsFromName(name,module)for name in names]\n return self.suiteClass(suites)\n \n def getTestCaseNames(self,testCaseClass):\n ''\n \n def shouldIncludeMethod(attrname):\n if not attrname.startswith(self.testMethodPrefix):\n return False\n testFunc=getattr(testCaseClass,attrname)\n if not callable(testFunc):\n return False\n fullName=f'%s.%s.%s'%(\n testCaseClass.__module__,testCaseClass.__qualname__,attrname\n )\n return self.testNamePatterns is None or\\\n any(fnmatchcase(fullName,pattern)for pattern in self.testNamePatterns)\n testFnNames=list(filter(shouldIncludeMethod,dir(testCaseClass)))\n if self.sortTestMethodsUsing:\n testFnNames.sort(key=functools.cmp_to_key(self.sortTestMethodsUsing))\n return testFnNames\n \n def discover(self,start_dir,pattern='test*.py',top_level_dir=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n set_implicit_top=False\n if top_level_dir is None and self._top_level_dir is not None:\n \n top_level_dir=self._top_level_dir\n elif top_level_dir is None:\n set_implicit_top=True\n top_level_dir=start_dir\n \n top_level_dir=os.path.abspath(top_level_dir)\n \n if not top_level_dir in sys.path:\n \n \n \n \n sys.path.insert(0,top_level_dir)\n self._top_level_dir=top_level_dir\n \n is_not_importable=False\n if os.path.isdir(os.path.abspath(start_dir)):\n start_dir=os.path.abspath(start_dir)\n if start_dir !=top_level_dir:\n is_not_importable=not os.path.isfile(os.path.join(start_dir,'__init__.py'))\n else:\n \n try:\n __import__(start_dir)\n except ImportError:\n is_not_importable=True\n else:\n the_module=sys.modules[start_dir]\n top_part=start_dir.split('.')[0]\n try:\n start_dir=os.path.abspath(\n os.path.dirname((the_module.__file__)))\n except AttributeError:\n if the_module.__name__ in sys.builtin_module_names:\n \n raise TypeError('Can not use builtin modules '\n 'as dotted module names')from None\n else:\n raise TypeError(\n f\"don't know how to discover from {the_module !r}\"\n )from None\n \n if set_implicit_top:\n self._top_level_dir=self._get_directory_containing_module(top_part)\n sys.path.remove(top_level_dir)\n \n if is_not_importable:\n raise ImportError('Start directory is not importable: %r'%start_dir)\n \n tests=list(self._find_tests(start_dir,pattern))\n return self.suiteClass(tests)\n \n def _get_directory_containing_module(self,module_name):\n module=sys.modules[module_name]\n full_path=os.path.abspath(module.__file__)\n \n if os.path.basename(full_path).lower().startswith('__init__.py'):\n return os.path.dirname(os.path.dirname(full_path))\n else:\n \n \n \n return os.path.dirname(full_path)\n \n def _get_name_from_path(self,path):\n if path ==self._top_level_dir:\n return '.'\n path=_splitext(os.path.normpath(path))\n \n _relpath=os.path.relpath(path,self._top_level_dir)\n assert not os.path.isabs(_relpath),\"Path must be within the project\"\n assert not _relpath.startswith('..'),\"Path must be within the project\"\n \n name=_relpath.replace(os.path.sep,'.')\n return name\n \n def _get_module_from_name(self,name):\n __import__(name)\n return sys.modules[name]\n \n def _match_path(self,path,full_path,pattern):\n \n return fnmatch(path,pattern)\n \n def _find_tests(self,start_dir,pattern):\n ''\n \n name=self._get_name_from_path(start_dir)\n \n \n if name !='.'and name not in self._loading_packages:\n \n \n tests,should_recurse=self._find_test_path(start_dir,pattern)\n if tests is not None:\n yield tests\n if not should_recurse:\n \n \n return\n \n paths=sorted(os.listdir(start_dir))\n for path in paths:\n full_path=os.path.join(start_dir,path)\n tests,should_recurse=self._find_test_path(full_path,pattern)\n if tests is not None:\n yield tests\n if should_recurse:\n \n name=self._get_name_from_path(full_path)\n self._loading_packages.add(name)\n try:\n yield from self._find_tests(full_path,pattern)\n finally:\n self._loading_packages.discard(name)\n \n def _find_test_path(self,full_path,pattern):\n ''\n\n\n\n\n\n \n basename=os.path.basename(full_path)\n if os.path.isfile(full_path):\n if not VALID_MODULE_NAME.match(basename):\n \n return None,False\n if not self._match_path(basename,full_path,pattern):\n return None,False\n \n name=self._get_name_from_path(full_path)\n try:\n module=self._get_module_from_name(name)\n except case.SkipTest as e:\n return _make_skipped_test(name,e,self.suiteClass),False\n except:\n error_case,error_message=\\\n _make_failed_import_test(name,self.suiteClass)\n self.errors.append(error_message)\n return error_case,False\n else:\n mod_file=os.path.abspath(\n getattr(module,'__file__',full_path))\n realpath=_splitext(\n os.path.realpath(mod_file))\n fullpath_noext=_splitext(\n os.path.realpath(full_path))\n if realpath.lower()!=fullpath_noext.lower():\n module_dir=os.path.dirname(realpath)\n mod_name=_splitext(\n os.path.basename(full_path))\n expected_dir=os.path.dirname(full_path)\n msg=(\"%r module incorrectly imported from %r. Expected \"\n \"%r. Is this module globally installed?\")\n raise ImportError(\n msg %(mod_name,module_dir,expected_dir))\n return self.loadTestsFromModule(module,pattern=pattern),False\n elif os.path.isdir(full_path):\n if not os.path.isfile(os.path.join(full_path,'__init__.py')):\n return None,False\n \n load_tests=None\n tests=None\n name=self._get_name_from_path(full_path)\n try:\n package=self._get_module_from_name(name)\n except case.SkipTest as e:\n return _make_skipped_test(name,e,self.suiteClass),False\n except:\n error_case,error_message=\\\n _make_failed_import_test(name,self.suiteClass)\n self.errors.append(error_message)\n return error_case,False\n else:\n load_tests=getattr(package,'load_tests',None)\n \n self._loading_packages.add(name)\n try:\n tests=self.loadTestsFromModule(package,pattern=pattern)\n if load_tests is not None:\n \n return tests,False\n return tests,True\n finally:\n self._loading_packages.discard(name)\n else:\n return None,False\n \n \ndefaultTestLoader=TestLoader()\n\n\n\n\n\ndef _makeLoader(prefix,sortUsing,suiteClass=None,testNamePatterns=None):\n loader=TestLoader()\n loader.sortTestMethodsUsing=sortUsing\n loader.testMethodPrefix=prefix\n loader.testNamePatterns=testNamePatterns\n if suiteClass:\n loader.suiteClass=suiteClass\n return loader\n \ndef getTestCaseNames(testCaseClass,prefix,sortUsing=util.three_way_cmp,testNamePatterns=None):\n import warnings\n warnings.warn(\n \"unittest.getTestCaseNames() is deprecated and will be removed in Python 3.13. \"\n \"Please use unittest.TestLoader.getTestCaseNames() instead.\",\n DeprecationWarning,stacklevel=2\n )\n return _makeLoader(prefix,sortUsing,testNamePatterns=testNamePatterns).getTestCaseNames(testCaseClass)\n \ndef makeSuite(testCaseClass,prefix='test',sortUsing=util.three_way_cmp,\nsuiteClass=suite.TestSuite):\n import warnings\n warnings.warn(\n \"unittest.makeSuite() is deprecated and will be removed in Python 3.13. \"\n \"Please use unittest.TestLoader.loadTestsFromTestCase() instead.\",\n DeprecationWarning,stacklevel=2\n )\n return _makeLoader(prefix,sortUsing,suiteClass).loadTestsFromTestCase(\n testCaseClass)\n \ndef findTestCases(module,prefix='test',sortUsing=util.three_way_cmp,\nsuiteClass=suite.TestSuite):\n import warnings\n warnings.warn(\n \"unittest.findTestCases() is deprecated and will be removed in Python 3.13. \"\n \"Please use unittest.TestLoader.loadTestsFromModule() instead.\",\n DeprecationWarning,stacklevel=2\n )\n return _makeLoader(prefix,sortUsing,suiteClass).loadTestsFromModule(\\\n module)\n", ["fnmatch", "functools", "os", "re", "sys", "traceback", "types", "unittest", "unittest.case", "unittest.suite", "unittest.util", "warnings"]], "unittest.case": [".py", "''\n\nimport sys\nimport functools\nimport difflib\nimport pprint\nimport re\nimport warnings\nimport collections\nimport contextlib\nimport traceback\nimport time\nimport types\n\nfrom. import result\nfrom.util import(strclass,safe_repr,_count_diff_all_purpose,\n_count_diff_hashable,_common_shorten_repr)\n\n__unittest=True\n\n_subtest_msg_sentinel=object()\n\nDIFF_OMITTED=('\\nDiff is %s characters long. '\n'Set self.maxDiff to None to see it.')\n\nclass SkipTest(Exception):\n ''\n\n\n\n\n \n \nclass _ShouldStop(Exception):\n ''\n\n \n \nclass _UnexpectedSuccess(Exception):\n ''\n\n \n \n \nclass _Outcome(object):\n def __init__(self,result=None):\n self.expecting_failure=False\n self.result=result\n self.result_supports_subtests=hasattr(result,\"addSubTest\")\n self.success=True\n self.expectedFailure=None\n \n @contextlib.contextmanager\n def testPartExecutor(self,test_case,subTest=False):\n old_success=self.success\n self.success=True\n try:\n yield\n except KeyboardInterrupt:\n raise\n except SkipTest as e:\n self.success=False\n _addSkip(self.result,test_case,str(e))\n except _ShouldStop:\n pass\n except:\n exc_info=sys.exc_info()\n if self.expecting_failure:\n self.expectedFailure=exc_info\n else:\n self.success=False\n if subTest:\n self.result.addSubTest(test_case.test_case,test_case,exc_info)\n else:\n _addError(self.result,test_case,exc_info)\n \n \n exc_info=None\n else:\n if subTest and self.success:\n self.result.addSubTest(test_case.test_case,test_case,None)\n finally:\n self.success=self.success and old_success\n \n \ndef _addSkip(result,test_case,reason):\n addSkip=getattr(result,'addSkip',None)\n if addSkip is not None:\n addSkip(test_case,reason)\n else:\n warnings.warn(\"TestResult has no addSkip method, skips not reported\",\n RuntimeWarning,2)\n result.addSuccess(test_case)\n \ndef _addError(result,test,exc_info):\n if result is not None and exc_info is not None:\n if issubclass(exc_info[0],test.failureException):\n result.addFailure(test,exc_info)\n else:\n result.addError(test,exc_info)\n \ndef _id(obj):\n return obj\n \n \ndef _enter_context(cm,addcleanup):\n\n\n cls=type(cm)\n try:\n enter=cls.__enter__\n exit=cls.__exit__\n except AttributeError:\n raise TypeError(f\"'{cls.__module__}.{cls.__qualname__}' object does \"\n f\"not support the context manager protocol\")from None\n result=enter(cm)\n addcleanup(exit,cm,None,None,None)\n return result\n \n \n_module_cleanups=[]\ndef addModuleCleanup(function,/,*args,**kwargs):\n ''\n \n _module_cleanups.append((function,args,kwargs))\n \ndef enterModuleContext(cm):\n ''\n return _enter_context(cm,addModuleCleanup)\n \n \ndef doModuleCleanups():\n ''\n \n exceptions=[]\n while _module_cleanups:\n function,args,kwargs=_module_cleanups.pop()\n try:\n function(*args,**kwargs)\n except Exception as exc:\n exceptions.append(exc)\n if exceptions:\n \n \n raise exceptions[0]\n \n \ndef skip(reason):\n ''\n\n \n def decorator(test_item):\n if not isinstance(test_item,type):\n @functools.wraps(test_item)\n def skip_wrapper(*args,**kwargs):\n raise SkipTest(reason)\n test_item=skip_wrapper\n \n test_item.__unittest_skip__=True\n test_item.__unittest_skip_why__=reason\n return test_item\n if isinstance(reason,types.FunctionType):\n test_item=reason\n reason=''\n return decorator(test_item)\n return decorator\n \ndef skipIf(condition,reason):\n ''\n\n \n if condition:\n return skip(reason)\n return _id\n \ndef skipUnless(condition,reason):\n ''\n\n \n if not condition:\n return skip(reason)\n return _id\n \ndef expectedFailure(test_item):\n test_item.__unittest_expecting_failure__=True\n return test_item\n \ndef _is_subtype(expected,basetype):\n if isinstance(expected,tuple):\n return all(_is_subtype(e,basetype)for e in expected)\n return isinstance(expected,type)and issubclass(expected,basetype)\n \nclass _BaseTestCaseContext:\n\n def __init__(self,test_case):\n self.test_case=test_case\n \n def _raiseFailure(self,standardMsg):\n msg=self.test_case._formatMessage(self.msg,standardMsg)\n raise self.test_case.failureException(msg)\n \nclass _AssertRaisesBaseContext(_BaseTestCaseContext):\n\n def __init__(self,expected,test_case,expected_regex=None):\n _BaseTestCaseContext.__init__(self,test_case)\n self.expected=expected\n self.test_case=test_case\n if expected_regex is not None:\n expected_regex=re.compile(expected_regex)\n self.expected_regex=expected_regex\n self.obj_name=None\n self.msg=None\n \n def handle(self,name,args,kwargs):\n ''\n\n\n\n\n \n try:\n if not _is_subtype(self.expected,self._base_type):\n raise TypeError('%s() arg 1 must be %s'%\n (name,self._base_type_str))\n if not args:\n self.msg=kwargs.pop('msg',None)\n if kwargs:\n raise TypeError('%r is an invalid keyword argument for '\n 'this function'%(next(iter(kwargs)),))\n return self\n \n callable_obj,*args=args\n try:\n self.obj_name=callable_obj.__name__\n except AttributeError:\n self.obj_name=str(callable_obj)\n with self:\n callable_obj(*args,**kwargs)\n finally:\n \n self=None\n \n \nclass _AssertRaisesContext(_AssertRaisesBaseContext):\n ''\n \n _base_type=BaseException\n _base_type_str='an exception type or tuple of exception types'\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,tb):\n if exc_type is None:\n try:\n exc_name=self.expected.__name__\n except AttributeError:\n exc_name=str(self.expected)\n if self.obj_name:\n self._raiseFailure(\"{} not raised by {}\".format(exc_name,\n self.obj_name))\n else:\n self._raiseFailure(\"{} not raised\".format(exc_name))\n else:\n traceback.clear_frames(tb)\n if not issubclass(exc_type,self.expected):\n \n return False\n \n self.exception=exc_value.with_traceback(None)\n if self.expected_regex is None:\n return True\n \n expected_regex=self.expected_regex\n if not expected_regex.search(str(exc_value)):\n self._raiseFailure('\"{}\" does not match \"{}\"'.format(\n expected_regex.pattern,str(exc_value)))\n return True\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nclass _AssertWarnsContext(_AssertRaisesBaseContext):\n ''\n \n _base_type=Warning\n _base_type_str='a warning type or tuple of warning types'\n \n def __enter__(self):\n \n \n for v in list(sys.modules.values()):\n if getattr(v,'__warningregistry__',None):\n v.__warningregistry__={}\n self.warnings_manager=warnings.catch_warnings(record=True)\n self.warnings=self.warnings_manager.__enter__()\n warnings.simplefilter(\"always\",self.expected)\n return self\n \n def __exit__(self,exc_type,exc_value,tb):\n self.warnings_manager.__exit__(exc_type,exc_value,tb)\n if exc_type is not None:\n \n return\n try:\n exc_name=self.expected.__name__\n except AttributeError:\n exc_name=str(self.expected)\n first_matching=None\n for m in self.warnings:\n w=m.message\n if not isinstance(w,self.expected):\n continue\n if first_matching is None:\n first_matching=w\n if(self.expected_regex is not None and\n not self.expected_regex.search(str(w))):\n continue\n \n self.warning=w\n self.filename=m.filename\n self.lineno=m.lineno\n return\n \n if first_matching is not None:\n self._raiseFailure('\"{}\" does not match \"{}\"'.format(\n self.expected_regex.pattern,str(first_matching)))\n if self.obj_name:\n self._raiseFailure(\"{} not triggered by {}\".format(exc_name,\n self.obj_name))\n else:\n self._raiseFailure(\"{} not triggered\".format(exc_name))\n \n \nclass _OrderedChainMap(collections.ChainMap):\n def __iter__(self):\n seen=set()\n for mapping in self.maps:\n for k in mapping:\n if k not in seen:\n seen.add(k)\n yield k\n \n \nclass TestCase(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n failureException=AssertionError\n \n longMessage=True\n \n maxDiff=80 *8\n \n \n \n _diffThreshold=2 **16\n \n def __init_subclass__(cls,*args,**kwargs):\n \n cls._classSetupFailed=False\n cls._class_cleanups=[]\n super().__init_subclass__(*args,**kwargs)\n \n def __init__(self,methodName='runTest'):\n ''\n\n\n \n self._testMethodName=methodName\n self._outcome=None\n self._testMethodDoc='No test'\n try:\n testMethod=getattr(self,methodName)\n except AttributeError:\n if methodName !='runTest':\n \n \n raise ValueError(\"no such test method in %s: %s\"%\n (self.__class__,methodName))\n else:\n self._testMethodDoc=testMethod.__doc__\n self._cleanups=[]\n self._subtest=None\n \n \n \n \n self._type_equality_funcs={}\n self.addTypeEqualityFunc(dict,'assertDictEqual')\n self.addTypeEqualityFunc(list,'assertListEqual')\n self.addTypeEqualityFunc(tuple,'assertTupleEqual')\n self.addTypeEqualityFunc(set,'assertSetEqual')\n self.addTypeEqualityFunc(frozenset,'assertSetEqual')\n self.addTypeEqualityFunc(str,'assertMultiLineEqual')\n \n def addTypeEqualityFunc(self,typeobj,function):\n ''\n\n\n\n\n\n\n\n\n\n\n \n self._type_equality_funcs[typeobj]=function\n \n def addCleanup(self,function,/,*args,**kwargs):\n ''\n\n\n\n \n self._cleanups.append((function,args,kwargs))\n \n def enterContext(self,cm):\n ''\n\n\n\n \n return _enter_context(cm,self.addCleanup)\n \n @classmethod\n def addClassCleanup(cls,function,/,*args,**kwargs):\n ''\n \n cls._class_cleanups.append((function,args,kwargs))\n \n @classmethod\n def enterClassContext(cls,cm):\n ''\n return _enter_context(cm,cls.addClassCleanup)\n \n def setUp(self):\n ''\n pass\n \n def tearDown(self):\n ''\n pass\n \n @classmethod\n def setUpClass(cls):\n ''\n \n @classmethod\n def tearDownClass(cls):\n ''\n \n def countTestCases(self):\n return 1\n \n def defaultTestResult(self):\n return result.TestResult()\n \n def shortDescription(self):\n ''\n\n\n\n\n \n doc=self._testMethodDoc\n return doc.strip().split(\"\\n\")[0].strip()if doc else None\n \n \n def id(self):\n return \"%s.%s\"%(strclass(self.__class__),self._testMethodName)\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self._testMethodName ==other._testMethodName\n \n def __hash__(self):\n return hash((type(self),self._testMethodName))\n \n def __str__(self):\n return \"%s (%s.%s)\"%(self._testMethodName,strclass(self.__class__),self._testMethodName)\n \n def __repr__(self):\n return \"<%s testMethod=%s>\"%\\\n (strclass(self.__class__),self._testMethodName)\n \n @contextlib.contextmanager\n def subTest(self,msg=_subtest_msg_sentinel,**params):\n ''\n\n\n\n\n \n if self._outcome is None or not self._outcome.result_supports_subtests:\n yield\n return\n parent=self._subtest\n if parent is None:\n params_map=_OrderedChainMap(params)\n else:\n params_map=parent.params.new_child(params)\n self._subtest=_SubTest(self,msg,params_map)\n try:\n with self._outcome.testPartExecutor(self._subtest,subTest=True):\n yield\n if not self._outcome.success:\n result=self._outcome.result\n if result is not None and result.failfast:\n raise _ShouldStop\n elif self._outcome.expectedFailure:\n \n \n raise _ShouldStop\n finally:\n self._subtest=parent\n \n def _addExpectedFailure(self,result,exc_info):\n try:\n addExpectedFailure=result.addExpectedFailure\n except AttributeError:\n warnings.warn(\"TestResult has no addExpectedFailure method, reporting as passes\",\n RuntimeWarning)\n result.addSuccess(self)\n else:\n addExpectedFailure(self,exc_info)\n \n def _addUnexpectedSuccess(self,result):\n try:\n addUnexpectedSuccess=result.addUnexpectedSuccess\n except AttributeError:\n warnings.warn(\"TestResult has no addUnexpectedSuccess method, reporting as failure\",\n RuntimeWarning)\n \n \n try:\n raise _UnexpectedSuccess from None\n except _UnexpectedSuccess:\n result.addFailure(self,sys.exc_info())\n else:\n addUnexpectedSuccess(self)\n \n def _addDuration(self,result,elapsed):\n try:\n addDuration=result.addDuration\n except AttributeError:\n warnings.warn(\"TestResult has no addDuration method\",\n RuntimeWarning)\n else:\n addDuration(self,elapsed)\n \n def _callSetUp(self):\n self.setUp()\n \n def _callTestMethod(self,method):\n if method()is not None:\n warnings.warn(f'It is deprecated to return a value that is not None from a '\n f'test case ({method})',DeprecationWarning,stacklevel=3)\n \n def _callTearDown(self):\n self.tearDown()\n \n def _callCleanup(self,function,/,*args,**kwargs):\n function(*args,**kwargs)\n \n def run(self,result=None):\n if result is None:\n result=self.defaultTestResult()\n startTestRun=getattr(result,'startTestRun',None)\n stopTestRun=getattr(result,'stopTestRun',None)\n if startTestRun is not None:\n startTestRun()\n else:\n stopTestRun=None\n \n result.startTest(self)\n try:\n testMethod=getattr(self,self._testMethodName)\n if(getattr(self.__class__,\"__unittest_skip__\",False)or\n getattr(testMethod,\"__unittest_skip__\",False)):\n \n skip_why=(getattr(self.__class__,'__unittest_skip_why__','')\n or getattr(testMethod,'__unittest_skip_why__',''))\n _addSkip(result,self,skip_why)\n return result\n \n expecting_failure=(\n getattr(self,\"__unittest_expecting_failure__\",False)or\n getattr(testMethod,\"__unittest_expecting_failure__\",False)\n )\n outcome=_Outcome(result)\n start_time=time.perf_counter()\n try:\n self._outcome=outcome\n \n with outcome.testPartExecutor(self):\n self._callSetUp()\n if outcome.success:\n outcome.expecting_failure=expecting_failure\n with outcome.testPartExecutor(self):\n self._callTestMethod(testMethod)\n outcome.expecting_failure=False\n with outcome.testPartExecutor(self):\n self._callTearDown()\n self.doCleanups()\n self._addDuration(result,(time.perf_counter()-start_time))\n \n if outcome.success:\n if expecting_failure:\n if outcome.expectedFailure:\n self._addExpectedFailure(result,outcome.expectedFailure)\n else:\n self._addUnexpectedSuccess(result)\n else:\n result.addSuccess(self)\n return result\n finally:\n \n \n outcome.expectedFailure=None\n outcome=None\n \n \n self._outcome=None\n \n finally:\n result.stopTest(self)\n if stopTestRun is not None:\n stopTestRun()\n \n def doCleanups(self):\n ''\n \n outcome=self._outcome or _Outcome()\n while self._cleanups:\n function,args,kwargs=self._cleanups.pop()\n with outcome.testPartExecutor(self):\n self._callCleanup(function,*args,**kwargs)\n \n \n \n return outcome.success\n \n @classmethod\n def doClassCleanups(cls):\n ''\n \n cls.tearDown_exceptions=[]\n while cls._class_cleanups:\n function,args,kwargs=cls._class_cleanups.pop()\n try:\n function(*args,**kwargs)\n except Exception:\n cls.tearDown_exceptions.append(sys.exc_info())\n \n def __call__(self,*args,**kwds):\n return self.run(*args,**kwds)\n \n def debug(self):\n ''\n testMethod=getattr(self,self._testMethodName)\n if(getattr(self.__class__,\"__unittest_skip__\",False)or\n getattr(testMethod,\"__unittest_skip__\",False)):\n \n skip_why=(getattr(self.__class__,'__unittest_skip_why__','')\n or getattr(testMethod,'__unittest_skip_why__',''))\n raise SkipTest(skip_why)\n \n self._callSetUp()\n self._callTestMethod(testMethod)\n self._callTearDown()\n while self._cleanups:\n function,args,kwargs=self._cleanups.pop()\n self._callCleanup(function,*args,**kwargs)\n \n def skipTest(self,reason):\n ''\n raise SkipTest(reason)\n \n def fail(self,msg=None):\n ''\n raise self.failureException(msg)\n \n def assertFalse(self,expr,msg=None):\n ''\n if expr:\n msg=self._formatMessage(msg,\"%s is not false\"%safe_repr(expr))\n raise self.failureException(msg)\n \n def assertTrue(self,expr,msg=None):\n ''\n if not expr:\n msg=self._formatMessage(msg,\"%s is not true\"%safe_repr(expr))\n raise self.failureException(msg)\n \n def _formatMessage(self,msg,standardMsg):\n ''\n\n\n\n\n\n\n\n \n if not self.longMessage:\n return msg or standardMsg\n if msg is None:\n return standardMsg\n try:\n \n \n return '%s : %s'%(standardMsg,msg)\n except UnicodeDecodeError:\n return '%s : %s'%(safe_repr(standardMsg),safe_repr(msg))\n \n def assertRaises(self,expected_exception,*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertRaisesContext(expected_exception,self)\n try:\n return context.handle('assertRaises',args,kwargs)\n finally:\n \n context=None\n \n def assertWarns(self,expected_warning,*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertWarnsContext(expected_warning,self)\n return context.handle('assertWarns',args,kwargs)\n \n def assertLogs(self,logger=None,level=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n from._log import _AssertLogsContext\n return _AssertLogsContext(self,logger,level,no_logs=False)\n \n def assertNoLogs(self,logger=None,level=None):\n ''\n\n\n\n \n from._log import _AssertLogsContext\n return _AssertLogsContext(self,logger,level,no_logs=True)\n \n def _getAssertEqualityFunc(self,first,second):\n ''\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n if type(first)is type(second):\n asserter=self._type_equality_funcs.get(type(first))\n if asserter is not None:\n if isinstance(asserter,str):\n asserter=getattr(self,asserter)\n return asserter\n \n return self._baseAssertEqual\n \n def _baseAssertEqual(self,first,second,msg=None):\n ''\n if not first ==second:\n standardMsg='%s != %s'%_common_shorten_repr(first,second)\n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertEqual(self,first,second,msg=None):\n ''\n\n \n assertion_func=self._getAssertEqualityFunc(first,second)\n assertion_func(first,second,msg=msg)\n \n def assertNotEqual(self,first,second,msg=None):\n ''\n\n \n if not first !=second:\n msg=self._formatMessage(msg,'%s == %s'%(safe_repr(first),\n safe_repr(second)))\n raise self.failureException(msg)\n \n def assertAlmostEqual(self,first,second,places=None,msg=None,\n delta=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if first ==second:\n \n return\n if delta is not None and places is not None:\n raise TypeError(\"specify delta or places not both\")\n \n diff=abs(first -second)\n if delta is not None:\n if diff <=delta:\n return\n \n standardMsg='%s != %s within %s delta (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n safe_repr(delta),\n safe_repr(diff))\n else:\n if places is None:\n places=7\n \n if round(diff,places)==0:\n return\n \n standardMsg='%s != %s within %r places (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n places,\n safe_repr(diff))\n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertNotAlmostEqual(self,first,second,places=None,msg=None,\n delta=None):\n ''\n\n\n\n\n\n\n\n\n \n if delta is not None and places is not None:\n raise TypeError(\"specify delta or places not both\")\n diff=abs(first -second)\n if delta is not None:\n if not(first ==second)and diff >delta:\n return\n standardMsg='%s == %s within %s delta (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n safe_repr(delta),\n safe_repr(diff))\n else:\n if places is None:\n places=7\n if not(first ==second)and round(diff,places)!=0:\n return\n standardMsg='%s == %s within %r places'%(safe_repr(first),\n safe_repr(second),\n places)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertSequenceEqual(self,seq1,seq2,msg=None,seq_type=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if seq_type is not None:\n seq_type_name=seq_type.__name__\n if not isinstance(seq1,seq_type):\n raise self.failureException('First sequence is not a %s: %s'\n %(seq_type_name,safe_repr(seq1)))\n if not isinstance(seq2,seq_type):\n raise self.failureException('Second sequence is not a %s: %s'\n %(seq_type_name,safe_repr(seq2)))\n else:\n seq_type_name=\"sequence\"\n \n differing=None\n try:\n len1=len(seq1)\n except(TypeError,NotImplementedError):\n differing='First %s has no length. Non-sequence?'%(\n seq_type_name)\n \n if differing is None:\n try:\n len2=len(seq2)\n except(TypeError,NotImplementedError):\n differing='Second %s has no length. Non-sequence?'%(\n seq_type_name)\n \n if differing is None:\n if seq1 ==seq2:\n return\n \n differing='%ss differ: %s != %s\\n'%(\n (seq_type_name.capitalize(),)+\n _common_shorten_repr(seq1,seq2))\n \n for i in range(min(len1,len2)):\n try:\n item1=seq1[i]\n except(TypeError,IndexError,NotImplementedError):\n differing +=('\\nUnable to index element %d of first %s\\n'%\n (i,seq_type_name))\n break\n \n try:\n item2=seq2[i]\n except(TypeError,IndexError,NotImplementedError):\n differing +=('\\nUnable to index element %d of second %s\\n'%\n (i,seq_type_name))\n break\n \n if item1 !=item2:\n differing +=('\\nFirst differing element %d:\\n%s\\n%s\\n'%\n ((i,)+_common_shorten_repr(item1,item2)))\n break\n else:\n if(len1 ==len2 and seq_type is None and\n type(seq1)!=type(seq2)):\n \n return\n \n if len1 >len2:\n differing +=('\\nFirst %s contains %d additional '\n 'elements.\\n'%(seq_type_name,len1 -len2))\n try:\n differing +=('First extra element %d:\\n%s\\n'%\n (len2,safe_repr(seq1[len2])))\n except(TypeError,IndexError,NotImplementedError):\n differing +=('Unable to index element %d '\n 'of first %s\\n'%(len2,seq_type_name))\n elif len1 self._diffThreshold or\n len(second)>self._diffThreshold):\n self._baseAssertEqual(first,second,msg)\n \n \n \n \n \n first_presplit=first\n second_presplit=second\n if first and second:\n if first[-1]!='\\n'or second[-1]!='\\n':\n first_presplit +='\\n'\n second_presplit +='\\n'\n elif second and second[-1]!='\\n':\n second_presplit +='\\n'\n elif first and first[-1]!='\\n':\n first_presplit +='\\n'\n \n firstlines=first_presplit.splitlines(keepends=True)\n secondlines=second_presplit.splitlines(keepends=True)\n \n \n standardMsg='%s != %s'%_common_shorten_repr(first,second)\n diff='\\n'+''.join(difflib.ndiff(firstlines,secondlines))\n standardMsg=self._truncateMessage(standardMsg,diff)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertLess(self,a,b,msg=None):\n ''\n if not a b:\n standardMsg='%s not greater than %s'%(safe_repr(a),safe_repr(b))\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertGreaterEqual(self,a,b,msg=None):\n ''\n if not a >=b:\n standardMsg='%s not greater than or equal to %s'%(safe_repr(a),safe_repr(b))\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsNone(self,obj,msg=None):\n ''\n if obj is not None:\n standardMsg='%s is not None'%(safe_repr(obj),)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsNotNone(self,obj,msg=None):\n ''\n if obj is None:\n standardMsg='unexpectedly None'\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsInstance(self,obj,cls,msg=None):\n ''\n \n if not isinstance(obj,cls):\n standardMsg='%s is not an instance of %r'%(safe_repr(obj),cls)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertNotIsInstance(self,obj,cls,msg=None):\n ''\n if isinstance(obj,cls):\n standardMsg='%s is an instance of %r'%(safe_repr(obj),cls)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertRaisesRegex(self,expected_exception,expected_regex,\n *args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n \n context=_AssertRaisesContext(expected_exception,self,expected_regex)\n return context.handle('assertRaisesRegex',args,kwargs)\n \n def assertWarnsRegex(self,expected_warning,expected_regex,\n *args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertWarnsContext(expected_warning,self,expected_regex)\n return context.handle('assertWarnsRegex',args,kwargs)\n \n def assertRegex(self,text,expected_regex,msg=None):\n ''\n if isinstance(expected_regex,(str,bytes)):\n assert expected_regex,\"expected_regex must not be empty.\"\n expected_regex=re.compile(expected_regex)\n if not expected_regex.search(text):\n standardMsg=\"Regex didn't match: %r not found in %r\"%(\n expected_regex.pattern,text)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertNotRegex(self,text,unexpected_regex,msg=None):\n ''\n if isinstance(unexpected_regex,(str,bytes)):\n unexpected_regex=re.compile(unexpected_regex)\n match=unexpected_regex.search(text)\n if match:\n standardMsg='Regex matched: %r matches %r in %r'%(\n text[match.start():match.end()],\n unexpected_regex.pattern,\n text)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n \n \nclass FunctionTestCase(TestCase):\n ''\n\n\n\n\n\n \n \n def __init__(self,testFunc,setUp=None,tearDown=None,description=None):\n super(FunctionTestCase,self).__init__()\n self._setUpFunc=setUp\n self._tearDownFunc=tearDown\n self._testFunc=testFunc\n self._description=description\n \n def setUp(self):\n if self._setUpFunc is not None:\n self._setUpFunc()\n \n def tearDown(self):\n if self._tearDownFunc is not None:\n self._tearDownFunc()\n \n def runTest(self):\n self._testFunc()\n \n def id(self):\n return self._testFunc.__name__\n \n def __eq__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n \n return self._setUpFunc ==other._setUpFunc and\\\n self._tearDownFunc ==other._tearDownFunc and\\\n self._testFunc ==other._testFunc and\\\n self._description ==other._description\n \n def __hash__(self):\n return hash((type(self),self._setUpFunc,self._tearDownFunc,\n self._testFunc,self._description))\n \n def __str__(self):\n return \"%s (%s)\"%(strclass(self.__class__),\n self._testFunc.__name__)\n \n def __repr__(self):\n return \"<%s tec=%s>\"%(strclass(self.__class__),\n self._testFunc)\n \n def shortDescription(self):\n if self._description is not None:\n return self._description\n doc=self._testFunc.__doc__\n return doc and doc.split(\"\\n\")[0].strip()or None\n \n \nclass _SubTest(TestCase):\n\n def __init__(self,test_case,message,params):\n super().__init__()\n self._message=message\n self.test_case=test_case\n self.params=params\n self.failureException=test_case.failureException\n \n def runTest(self):\n raise NotImplementedError(\"subtests cannot be run directly\")\n \n def _subDescription(self):\n parts=[]\n if self._message is not _subtest_msg_sentinel:\n parts.append(\"[{}]\".format(self._message))\n if self.params:\n params_desc=', '.join(\n \"{}={!r}\".format(k,v)\n for(k,v)in self.params.items())\n parts.append(\"({})\".format(params_desc))\n return \" \".join(parts)or '()'\n \n def id(self):\n return \"{} {}\".format(self.test_case.id(),self._subDescription())\n \n def shortDescription(self):\n ''\n\n \n return self.test_case.shortDescription()\n \n def __str__(self):\n return \"{} {}\".format(self.test_case,self._subDescription())\n", ["collections", "contextlib", "difflib", "functools", "pprint", "re", "sys", "time", "traceback", "types", "unittest", "unittest._log", "unittest.result", "unittest.util", "warnings"]], "unittest.main": [".py", "''\n\nimport sys\nimport argparse\nimport os\nimport warnings\n\nfrom. import loader,runner\nfrom.signals import installHandler\n\n__unittest=True\n_NO_TESTS_EXITCODE=5\n\nMAIN_EXAMPLES=\"\"\"\\\nExamples:\n %(prog)s test_module - run tests from test_module\n %(prog)s module.TestClass - run tests from module.TestClass\n %(prog)s module.Class.test_method - run specified test method\n %(prog)s path/to/test_file.py - run tests from test_file.py\n\"\"\"\n\nMODULE_EXAMPLES=\"\"\"\\\nExamples:\n %(prog)s - run default set of tests\n %(prog)s MyTestSuite - run suite 'MyTestSuite'\n %(prog)s MyTestCase.testSomething - run MyTestCase.testSomething\n %(prog)s MyTestCase - run all 'test*' test methods\n in MyTestCase\n\"\"\"\n\ndef _convert_name(name):\n\n\n\n\n if os.path.isfile(name)and name.lower().endswith('.py'):\n if os.path.isabs(name):\n rel_path=os.path.relpath(name,os.getcwd())\n if os.path.isabs(rel_path)or rel_path.startswith(os.pardir):\n return name\n name=rel_path\n \n \n return os.path.normpath(name)[:-3].replace('\\\\','.').replace('/','.')\n return name\n \ndef _convert_names(names):\n return[_convert_name(name)for name in names]\n \n \ndef _convert_select_pattern(pattern):\n if not '*'in pattern:\n pattern='*%s*'%pattern\n return pattern\n \n \nclass TestProgram(object):\n ''\n\n \n \n module=None\n verbosity=1\n failfast=catchbreak=buffer=progName=warnings=testNamePatterns=None\n _discovery_parser=None\n \n def __init__(self,module='__main__',defaultTest=None,argv=None,\n testRunner=None,testLoader=loader.defaultTestLoader,\n exit=True,verbosity=1,failfast=None,catchbreak=None,\n buffer=None,warnings=None,*,tb_locals=False,\n durations=None):\n if isinstance(module,str):\n self.module=__import__(module)\n for part in module.split('.')[1:]:\n self.module=getattr(self.module,part)\n else:\n self.module=module\n if argv is None:\n argv=sys.argv\n \n self.exit=exit\n self.failfast=failfast\n self.catchbreak=catchbreak\n self.verbosity=verbosity\n self.buffer=buffer\n self.tb_locals=tb_locals\n self.durations=durations\n if warnings is None and not sys.warnoptions:\n \n \n \n self.warnings='default'\n else:\n \n \n \n \n \n self.warnings=warnings\n self.defaultTest=defaultTest\n self.testRunner=testRunner\n self.testLoader=testLoader\n self.progName=os.path.basename(argv[0])\n self.parseArgs(argv)\n self.runTests()\n \n def usageExit(self,msg=None):\n warnings.warn(\"TestProgram.usageExit() is deprecated and will be\"\n \" removed in Python 3.13\",DeprecationWarning)\n if msg:\n print(msg)\n if self._discovery_parser is None:\n self._initArgParsers()\n self._print_help()\n sys.exit(2)\n \n def _print_help(self,*args,**kwargs):\n if self.module is None:\n print(self._main_parser.format_help())\n print(MAIN_EXAMPLES %{'prog':self.progName})\n self._discovery_parser.print_help()\n else:\n print(self._main_parser.format_help())\n print(MODULE_EXAMPLES %{'prog':self.progName})\n \n def parseArgs(self,argv):\n self._initArgParsers()\n if self.module is None:\n if len(argv)>1 and argv[1].lower()=='discover':\n self._do_discovery(argv[2:])\n return\n self._main_parser.parse_args(argv[1:],self)\n if not self.tests:\n \n \n self._do_discovery([])\n return\n else:\n self._main_parser.parse_args(argv[1:],self)\n \n if self.tests:\n self.testNames=_convert_names(self.tests)\n if __name__ =='__main__':\n \n self.module=None\n elif self.defaultTest is None:\n \n self.testNames=None\n elif isinstance(self.defaultTest,str):\n self.testNames=(self.defaultTest,)\n else:\n self.testNames=list(self.defaultTest)\n self.createTests()\n \n def createTests(self,from_discovery=False,Loader=None):\n if self.testNamePatterns:\n self.testLoader.testNamePatterns=self.testNamePatterns\n if from_discovery:\n loader=self.testLoader if Loader is None else Loader()\n self.test=loader.discover(self.start,self.pattern,self.top)\n elif self.testNames is None:\n self.test=self.testLoader.loadTestsFromModule(self.module)\n else:\n self.test=self.testLoader.loadTestsFromNames(self.testNames,\n self.module)\n \n def _initArgParsers(self):\n parent_parser=self._getParentArgParser()\n self._main_parser=self._getMainArgParser(parent_parser)\n self._discovery_parser=self._getDiscoveryArgParser(parent_parser)\n \n def _getParentArgParser(self):\n parser=argparse.ArgumentParser(add_help=False)\n \n parser.add_argument('-v','--verbose',dest='verbosity',\n action='store_const',const=2,\n help='Verbose output')\n parser.add_argument('-q','--quiet',dest='verbosity',\n action='store_const',const=0,\n help='Quiet output')\n parser.add_argument('--locals',dest='tb_locals',\n action='store_true',\n help='Show local variables in tracebacks')\n parser.add_argument('--durations',dest='durations',type=int,\n default=None,metavar=\"N\",\n help='Show the N slowest test cases (N=0 for all)')\n if self.failfast is None:\n parser.add_argument('-f','--failfast',dest='failfast',\n action='store_true',\n help='Stop on first fail or error')\n self.failfast=False\n if self.catchbreak is None:\n parser.add_argument('-c','--catch',dest='catchbreak',\n action='store_true',\n help='Catch Ctrl-C and display results so far')\n self.catchbreak=False\n if self.buffer is None:\n parser.add_argument('-b','--buffer',dest='buffer',\n action='store_true',\n help='Buffer stdout and stderr during tests')\n self.buffer=False\n if self.testNamePatterns is None:\n parser.add_argument('-k',dest='testNamePatterns',\n action='append',type=_convert_select_pattern,\n help='Only run tests which match the given substring')\n self.testNamePatterns=[]\n \n return parser\n \n def _getMainArgParser(self,parent):\n parser=argparse.ArgumentParser(parents=[parent])\n parser.prog=self.progName\n parser.print_help=self._print_help\n \n parser.add_argument('tests',nargs='*',\n help='a list of any number of test modules, '\n 'classes and test methods.')\n \n return parser\n \n def _getDiscoveryArgParser(self,parent):\n parser=argparse.ArgumentParser(parents=[parent])\n parser.prog='%s discover'%self.progName\n parser.epilog=('For test discovery all test modules must be '\n 'importable from the top level directory of the '\n 'project.')\n \n parser.add_argument('-s','--start-directory',dest='start',\n help=\"Directory to start discovery ('.' default)\")\n parser.add_argument('-p','--pattern',dest='pattern',\n help=\"Pattern to match tests ('test*.py' default)\")\n parser.add_argument('-t','--top-level-directory',dest='top',\n help='Top level directory of project (defaults to '\n 'start directory)')\n for arg in('start','pattern','top'):\n parser.add_argument(arg,nargs='?',\n default=argparse.SUPPRESS,\n help=argparse.SUPPRESS)\n \n return parser\n \n def _do_discovery(self,argv,Loader=None):\n self.start='.'\n self.pattern='test*.py'\n self.top=None\n if argv is not None:\n \n if self._discovery_parser is None:\n \n self._initArgParsers()\n self._discovery_parser.parse_args(argv,self)\n \n self.createTests(from_discovery=True,Loader=Loader)\n \n def runTests(self):\n if self.catchbreak:\n installHandler()\n if self.testRunner is None:\n self.testRunner=runner.TextTestRunner\n if isinstance(self.testRunner,type):\n try:\n try:\n testRunner=self.testRunner(verbosity=self.verbosity,\n failfast=self.failfast,\n buffer=self.buffer,\n warnings=self.warnings,\n tb_locals=self.tb_locals,\n durations=self.durations)\n except TypeError:\n \n testRunner=self.testRunner(verbosity=self.verbosity,\n failfast=self.failfast,\n buffer=self.buffer,\n warnings=self.warnings)\n except TypeError:\n \n testRunner=self.testRunner()\n else:\n \n testRunner=self.testRunner\n self.result=testRunner.run(self.test)\n if self.exit:\n if self.result.testsRun ==0:\n sys.exit(_NO_TESTS_EXITCODE)\n elif self.result.wasSuccessful():\n sys.exit(0)\n else:\n sys.exit(1)\n \n \nmain=TestProgram\n", ["argparse", "os", "sys", "unittest", "unittest.loader", "unittest.runner", "unittest.signals", "warnings"]], "unittest.async_case": [".py", "import asyncio\nimport contextvars\nimport inspect\nimport warnings\n\nfrom.case import TestCase\n\n\nclass IsolatedAsyncioTestCase(TestCase):\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n def __init__(self,methodName='runTest'):\n super().__init__(methodName)\n self._asyncioRunner=None\n self._asyncioTestContext=contextvars.copy_context()\n \n async def asyncSetUp(self):\n pass\n \n async def asyncTearDown(self):\n pass\n \n def addAsyncCleanup(self,func,/,*args,**kwargs):\n \n \n \n \n \n \n \n \n \n \n \n \n self.addCleanup(*(func,*args),**kwargs)\n \n async def enterAsyncContext(self,cm):\n ''\n\n\n\n \n \n \n cls=type(cm)\n try:\n enter=cls.__aenter__\n exit=cls.__aexit__\n except AttributeError:\n raise TypeError(f\"'{cls.__module__}.{cls.__qualname__}' object does \"\n f\"not support the asynchronous context manager protocol\"\n )from None\n result=await enter(cm)\n self.addAsyncCleanup(exit,cm,None,None,None)\n return result\n \n def _callSetUp(self):\n \n \n \n self._asyncioRunner.get_loop()\n self._asyncioTestContext.run(self.setUp)\n self._callAsync(self.asyncSetUp)\n \n def _callTestMethod(self,method):\n if self._callMaybeAsync(method)is not None:\n warnings.warn(f'It is deprecated to return a value that is not None from a '\n f'test case ({method})',DeprecationWarning,stacklevel=4)\n \n def _callTearDown(self):\n self._callAsync(self.asyncTearDown)\n self._asyncioTestContext.run(self.tearDown)\n \n def _callCleanup(self,function,*args,**kwargs):\n self._callMaybeAsync(function,*args,**kwargs)\n \n def _callAsync(self,func,/,*args,**kwargs):\n assert self._asyncioRunner is not None,'asyncio runner is not initialized'\n assert inspect.iscoroutinefunction(func),f'{func !r} is not an async function'\n return self._asyncioRunner.run(\n func(*args,**kwargs),\n context=self._asyncioTestContext\n )\n \n def _callMaybeAsync(self,func,/,*args,**kwargs):\n assert self._asyncioRunner is not None,'asyncio runner is not initialized'\n if inspect.iscoroutinefunction(func):\n return self._asyncioRunner.run(\n func(*args,**kwargs),\n context=self._asyncioTestContext,\n )\n else:\n return self._asyncioTestContext.run(func,*args,**kwargs)\n \n def _setupAsyncioRunner(self):\n assert self._asyncioRunner is None,'asyncio runner is already initialized'\n runner=asyncio.Runner(debug=True)\n self._asyncioRunner=runner\n \n def _tearDownAsyncioRunner(self):\n runner=self._asyncioRunner\n runner.close()\n \n def run(self,result=None):\n self._setupAsyncioRunner()\n try:\n return super().run(result)\n finally:\n self._tearDownAsyncioRunner()\n \n def debug(self):\n self._setupAsyncioRunner()\n super().debug()\n self._tearDownAsyncioRunner()\n \n def __del__(self):\n if self._asyncioRunner is not None:\n self._tearDownAsyncioRunner()\n", ["asyncio", "contextvars", "inspect", "unittest.case", "warnings"]], "unittest.__main__": [".py", "''\n\nimport sys\nif sys.argv[0].endswith(\"__main__.py\"):\n import os.path\n \n \n \n \n executable=os.path.basename(sys.executable)\n sys.argv[0]=executable+\" -m unittest\"\n del os\n \n__unittest=True\n\nfrom.main import main\n\nmain(module=None)\n", ["os.path", "sys", "unittest.main"]], "unittest._log": [".py", "import logging\nimport collections\n\nfrom.case import _BaseTestCaseContext\n\n\n_LoggingWatcher=collections.namedtuple(\"_LoggingWatcher\",\n[\"records\",\"output\"])\n\nclass _CapturingHandler(logging.Handler):\n ''\n\n \n \n def __init__(self):\n logging.Handler.__init__(self)\n self.watcher=_LoggingWatcher([],[])\n \n def flush(self):\n pass\n \n def emit(self,record):\n self.watcher.records.append(record)\n msg=self.format(record)\n self.watcher.output.append(msg)\n \n \nclass _AssertLogsContext(_BaseTestCaseContext):\n ''\n \n LOGGING_FORMAT=\"%(levelname)s:%(name)s:%(message)s\"\n \n def __init__(self,test_case,logger_name,level,no_logs):\n _BaseTestCaseContext.__init__(self,test_case)\n self.logger_name=logger_name\n if level:\n self.level=logging._nameToLevel.get(level,level)\n else:\n self.level=logging.INFO\n self.msg=None\n self.no_logs=no_logs\n \n def __enter__(self):\n if isinstance(self.logger_name,logging.Logger):\n logger=self.logger=self.logger_name\n else:\n logger=self.logger=logging.getLogger(self.logger_name)\n formatter=logging.Formatter(self.LOGGING_FORMAT)\n handler=_CapturingHandler()\n handler.setLevel(self.level)\n handler.setFormatter(formatter)\n self.watcher=handler.watcher\n self.old_handlers=logger.handlers[:]\n self.old_level=logger.level\n self.old_propagate=logger.propagate\n logger.handlers=[handler]\n logger.setLevel(self.level)\n logger.propagate=False\n if self.no_logs:\n return\n return handler.watcher\n \n def __exit__(self,exc_type,exc_value,tb):\n self.logger.handlers=self.old_handlers\n self.logger.propagate=self.old_propagate\n self.logger.setLevel(self.old_level)\n \n if exc_type is not None:\n \n return False\n \n if self.no_logs:\n \n if len(self.watcher.records)>0:\n self._raiseFailure(\n \"Unexpected logs found: {!r}\".format(\n self.watcher.output\n )\n )\n \n else:\n \n if len(self.watcher.records)==0:\n self._raiseFailure(\n \"no logs of level {} or higher triggered on {}\"\n .format(logging.getLevelName(self.level),self.logger.name))\n", ["collections", "logging", "unittest.case"]], "unittest.mock": [".py", "\n\n\n\n\n\n__all__=(\n'Mock',\n'MagicMock',\n'patch',\n'sentinel',\n'DEFAULT',\n'ANY',\n'call',\n'create_autospec',\n'AsyncMock',\n'FILTER_DIR',\n'NonCallableMock',\n'NonCallableMagicMock',\n'mock_open',\n'PropertyMock',\n'seal',\n)\n\n\nimport asyncio\nimport contextlib\nimport io\nimport inspect\nimport pprint\nimport sys\nimport builtins\nimport pkgutil\nfrom asyncio import iscoroutinefunction\nfrom types import CodeType,ModuleType,MethodType\nfrom unittest.util import safe_repr\nfrom functools import wraps,partial\nfrom threading import RLock\n\n\nclass InvalidSpecError(Exception):\n ''\n \n \n_builtins={name for name in dir(builtins)if not name.startswith('_')}\n\nFILTER_DIR=True\n\n\n\n_safe_super=super\n\ndef _is_async_obj(obj):\n if _is_instance_mock(obj)and not isinstance(obj,AsyncMock):\n return False\n if hasattr(obj,'__func__'):\n obj=getattr(obj,'__func__')\n return iscoroutinefunction(obj)or inspect.isawaitable(obj)\n \n \ndef _is_async_func(func):\n if getattr(func,'__code__',None):\n return iscoroutinefunction(func)\n else:\n return False\n \n \ndef _is_instance_mock(obj):\n\n\n return issubclass(type(obj),NonCallableMock)\n \n \ndef _is_exception(obj):\n return(\n isinstance(obj,BaseException)or\n isinstance(obj,type)and issubclass(obj,BaseException)\n )\n \n \ndef _extract_mock(obj):\n\n\n if isinstance(obj,FunctionTypes)and hasattr(obj,'mock'):\n return obj.mock\n else:\n return obj\n \n \ndef _get_signature_object(func,as_instance,eat_self):\n ''\n\n\n\n \n if isinstance(func,type)and not as_instance:\n \n func=func.__init__\n \n eat_self=True\n elif isinstance(func,(classmethod,staticmethod)):\n if isinstance(func,classmethod):\n \n eat_self=True\n \n func=func.__func__\n elif not isinstance(func,FunctionTypes):\n \n \n try:\n func=func.__call__\n except AttributeError:\n return None\n if eat_self:\n sig_func=partial(func,None)\n else:\n sig_func=func\n try:\n return func,inspect.signature(sig_func)\n except ValueError:\n \n return None\n \n \ndef _check_signature(func,mock,skipfirst,instance=False):\n sig=_get_signature_object(func,instance,skipfirst)\n if sig is None:\n return\n func,sig=sig\n def checksig(self,/,*args,**kwargs):\n sig.bind(*args,**kwargs)\n _copy_func_details(func,checksig)\n type(mock)._mock_check_sig=checksig\n type(mock).__signature__=sig\n \n \ndef _copy_func_details(func,funcopy):\n\n\n for attribute in(\n '__name__','__doc__','__text_signature__',\n '__module__','__defaults__','__kwdefaults__',\n ):\n try:\n setattr(funcopy,attribute,getattr(func,attribute))\n except AttributeError:\n pass\n \n \ndef _callable(obj):\n if isinstance(obj,type):\n return True\n if isinstance(obj,(staticmethod,classmethod,MethodType)):\n return _callable(obj.__func__)\n if getattr(obj,'__call__',None)is not None:\n return True\n return False\n \n \ndef _is_list(obj):\n\n\n return type(obj)in(list,tuple)\n \n \ndef _instance_callable(obj):\n ''\n \n if not isinstance(obj,type):\n \n return getattr(obj,'__call__',None)is not None\n \n \n \n for base in(obj,)+obj.__mro__:\n if base.__dict__.get('__call__')is not None:\n return True\n return False\n \n \ndef _set_signature(mock,original,instance=False):\n\n\n\n\n skipfirst=isinstance(original,type)\n result=_get_signature_object(original,instance,skipfirst)\n if result is None:\n return mock\n func,sig=result\n def checksig(*args,**kwargs):\n sig.bind(*args,**kwargs)\n _copy_func_details(func,checksig)\n \n name=original.__name__\n if not name.isidentifier():\n name='funcopy'\n context={'_checksig_':checksig,'mock':mock}\n src=\"\"\"def %s(*args, **kwargs):\n _checksig_(*args, **kwargs)\n return mock(*args, **kwargs)\"\"\"%name\n exec(src,context)\n funcopy=context[name]\n _setup_func(funcopy,mock,sig)\n return funcopy\n \n \ndef _setup_func(funcopy,mock,sig):\n funcopy.mock=mock\n \n def assert_called_with(*args,**kwargs):\n return mock.assert_called_with(*args,**kwargs)\n def assert_called(*args,**kwargs):\n return mock.assert_called(*args,**kwargs)\n def assert_not_called(*args,**kwargs):\n return mock.assert_not_called(*args,**kwargs)\n def assert_called_once(*args,**kwargs):\n return mock.assert_called_once(*args,**kwargs)\n def assert_called_once_with(*args,**kwargs):\n return mock.assert_called_once_with(*args,**kwargs)\n def assert_has_calls(*args,**kwargs):\n return mock.assert_has_calls(*args,**kwargs)\n def assert_any_call(*args,**kwargs):\n return mock.assert_any_call(*args,**kwargs)\n def reset_mock():\n funcopy.method_calls=_CallList()\n funcopy.mock_calls=_CallList()\n mock.reset_mock()\n ret=funcopy.return_value\n if _is_instance_mock(ret)and not ret is mock:\n ret.reset_mock()\n \n funcopy.called=False\n funcopy.call_count=0\n funcopy.call_args=None\n funcopy.call_args_list=_CallList()\n funcopy.method_calls=_CallList()\n funcopy.mock_calls=_CallList()\n \n funcopy.return_value=mock.return_value\n funcopy.side_effect=mock.side_effect\n funcopy._mock_children=mock._mock_children\n \n funcopy.assert_called_with=assert_called_with\n funcopy.assert_called_once_with=assert_called_once_with\n funcopy.assert_has_calls=assert_has_calls\n funcopy.assert_any_call=assert_any_call\n funcopy.reset_mock=reset_mock\n funcopy.assert_called=assert_called\n funcopy.assert_not_called=assert_not_called\n funcopy.assert_called_once=assert_called_once\n funcopy.__signature__=sig\n \n mock._mock_delegate=funcopy\n \n \ndef _setup_async_mock(mock):\n mock._is_coroutine=asyncio.coroutines._is_coroutine\n mock.await_count=0\n mock.await_args=None\n mock.await_args_list=_CallList()\n \n \n \n \n def wrapper(attr,/,*args,**kwargs):\n return getattr(mock.mock,attr)(*args,**kwargs)\n \n for attribute in('assert_awaited',\n 'assert_awaited_once',\n 'assert_awaited_with',\n 'assert_awaited_once_with',\n 'assert_any_await',\n 'assert_has_awaits',\n 'assert_not_awaited'):\n \n \n \n \n \n setattr(mock,attribute,partial(wrapper,attribute))\n \n \ndef _is_magic(name):\n return '__%s__'%name[2:-2]==name\n \n \nclass _SentinelObject(object):\n ''\n def __init__(self,name):\n self.name=name\n \n def __repr__(self):\n return 'sentinel.%s'%self.name\n \n def __reduce__(self):\n return 'sentinel.%s'%self.name\n \n \nclass _Sentinel(object):\n ''\n def __init__(self):\n self._sentinels={}\n \n def __getattr__(self,name):\n if name =='__bases__':\n \n raise AttributeError\n return self._sentinels.setdefault(name,_SentinelObject(name))\n \n def __reduce__(self):\n return 'sentinel'\n \n \nsentinel=_Sentinel()\n\nDEFAULT=sentinel.DEFAULT\n_missing=sentinel.MISSING\n_deleted=sentinel.DELETED\n\n\n_allowed_names={\n'return_value','_mock_return_value','side_effect',\n'_mock_side_effect','_mock_parent','_mock_new_parent',\n'_mock_name','_mock_new_name'\n}\n\n\ndef _delegating_property(name):\n _allowed_names.add(name)\n _the_name='_mock_'+name\n def _get(self,name=name,_the_name=_the_name):\n sig=self._mock_delegate\n if sig is None:\n return getattr(self,_the_name)\n return getattr(sig,name)\n def _set(self,value,name=name,_the_name=_the_name):\n sig=self._mock_delegate\n if sig is None:\n self.__dict__[_the_name]=value\n else:\n setattr(sig,name,value)\n \n return property(_get,_set)\n \n \n \nclass _CallList(list):\n\n def __contains__(self,value):\n if not isinstance(value,list):\n return list.__contains__(self,value)\n len_value=len(value)\n len_self=len(self)\n if len_value >len_self:\n return False\n \n for i in range(0,len_self -len_value+1):\n sub_list=self[i:i+len_value]\n if sub_list ==value:\n return True\n return False\n \n def __repr__(self):\n return pprint.pformat(list(self))\n \n \ndef _check_and_set_parent(parent,value,name,new_name):\n value=_extract_mock(value)\n \n if not _is_instance_mock(value):\n return False\n if((value._mock_name or value._mock_new_name)or\n (value._mock_parent is not None)or\n (value._mock_new_parent is not None)):\n return False\n \n _parent=parent\n while _parent is not None:\n \n \n if _parent is value:\n return False\n _parent=_parent._mock_new_parent\n \n if new_name:\n value._mock_new_parent=parent\n value._mock_new_name=new_name\n if name:\n value._mock_parent=parent\n value._mock_name=name\n return True\n \n \nclass _MockIter(object):\n def __init__(self,obj):\n self.obj=iter(obj)\n def __next__(self):\n return next(self.obj)\n \nclass Base(object):\n _mock_return_value=DEFAULT\n _mock_side_effect=None\n def __init__(self,/,*args,**kwargs):\n pass\n \n \n \nclass NonCallableMock(Base):\n ''\n \n \n \n \n \n \n \n _lock=RLock()\n \n def __new__(\n cls,spec=None,wraps=None,name=None,spec_set=None,\n parent=None,_spec_state=None,_new_name='',_new_parent=None,\n _spec_as_instance=False,_eat_self=None,unsafe=False,**kwargs\n ):\n \n \n \n bases=(cls,)\n if not issubclass(cls,AsyncMockMixin):\n \n spec_arg=spec_set or spec\n if spec_arg is not None and _is_async_obj(spec_arg):\n bases=(AsyncMockMixin,cls)\n new=type(cls.__name__,bases,{'__doc__':cls.__doc__})\n instance=_safe_super(NonCallableMock,cls).__new__(new)\n return instance\n \n \n def __init__(\n self,spec=None,wraps=None,name=None,spec_set=None,\n parent=None,_spec_state=None,_new_name='',_new_parent=None,\n _spec_as_instance=False,_eat_self=None,unsafe=False,**kwargs\n ):\n if _new_parent is None:\n _new_parent=parent\n \n __dict__=self.__dict__\n __dict__['_mock_parent']=parent\n __dict__['_mock_name']=name\n __dict__['_mock_new_name']=_new_name\n __dict__['_mock_new_parent']=_new_parent\n __dict__['_mock_sealed']=False\n \n if spec_set is not None:\n spec=spec_set\n spec_set=True\n if _eat_self is None:\n _eat_self=parent is not None\n \n self._mock_add_spec(spec,spec_set,_spec_as_instance,_eat_self)\n \n __dict__['_mock_children']={}\n __dict__['_mock_wraps']=wraps\n __dict__['_mock_delegate']=None\n \n __dict__['_mock_called']=False\n __dict__['_mock_call_args']=None\n __dict__['_mock_call_count']=0\n __dict__['_mock_call_args_list']=_CallList()\n __dict__['_mock_mock_calls']=_CallList()\n \n __dict__['method_calls']=_CallList()\n __dict__['_mock_unsafe']=unsafe\n \n if kwargs:\n self.configure_mock(**kwargs)\n \n _safe_super(NonCallableMock,self).__init__(\n spec,wraps,name,spec_set,parent,\n _spec_state\n )\n \n \n def attach_mock(self,mock,attribute):\n ''\n\n\n \n inner_mock=_extract_mock(mock)\n \n inner_mock._mock_parent=None\n inner_mock._mock_new_parent=None\n inner_mock._mock_name=''\n inner_mock._mock_new_name=None\n \n setattr(self,attribute,mock)\n \n \n def mock_add_spec(self,spec,spec_set=False):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n \n \n def _mock_add_spec(self,spec,spec_set,_spec_as_instance=False,\n _eat_self=False):\n if _is_instance_mock(spec):\n raise InvalidSpecError(f'Cannot spec a Mock object. [object={spec !r}]')\n \n _spec_class=None\n _spec_signature=None\n _spec_asyncs=[]\n \n if spec is not None and not _is_list(spec):\n if isinstance(spec,type):\n _spec_class=spec\n else:\n _spec_class=type(spec)\n res=_get_signature_object(spec,\n _spec_as_instance,_eat_self)\n _spec_signature=res and res[1]\n \n spec_list=dir(spec)\n \n for attr in spec_list:\n if iscoroutinefunction(getattr(spec,attr,None)):\n _spec_asyncs.append(attr)\n \n spec=spec_list\n \n __dict__=self.__dict__\n __dict__['_spec_class']=_spec_class\n __dict__['_spec_set']=spec_set\n __dict__['_spec_signature']=_spec_signature\n __dict__['_mock_methods']=spec\n __dict__['_spec_asyncs']=_spec_asyncs\n \n def __get_return_value(self):\n ret=self._mock_return_value\n if self._mock_delegate is not None:\n ret=self._mock_delegate.return_value\n \n if ret is DEFAULT:\n ret=self._get_child_mock(\n _new_parent=self,_new_name='()'\n )\n self.return_value=ret\n return ret\n \n \n def __set_return_value(self,value):\n if self._mock_delegate is not None:\n self._mock_delegate.return_value=value\n else:\n self._mock_return_value=value\n _check_and_set_parent(self,value,None,'()')\n \n __return_value_doc=\"The value to be returned when the mock is called.\"\n return_value=property(__get_return_value,__set_return_value,\n __return_value_doc)\n \n \n @property\n def __class__(self):\n if self._spec_class is None:\n return type(self)\n return self._spec_class\n \n called=_delegating_property('called')\n call_count=_delegating_property('call_count')\n call_args=_delegating_property('call_args')\n call_args_list=_delegating_property('call_args_list')\n mock_calls=_delegating_property('mock_calls')\n \n \n def __get_side_effect(self):\n delegated=self._mock_delegate\n if delegated is None:\n return self._mock_side_effect\n sf=delegated.side_effect\n if(sf is not None and not callable(sf)\n and not isinstance(sf,_MockIter)and not _is_exception(sf)):\n sf=_MockIter(sf)\n delegated.side_effect=sf\n return sf\n \n def __set_side_effect(self,value):\n value=_try_iter(value)\n delegated=self._mock_delegate\n if delegated is None:\n self._mock_side_effect=value\n else:\n delegated.side_effect=value\n \n side_effect=property(__get_side_effect,__set_side_effect)\n \n \n def reset_mock(self,visited=None,*,return_value=False,side_effect=False):\n ''\n if visited is None:\n visited=[]\n if id(self)in visited:\n return\n visited.append(id(self))\n \n self.called=False\n self.call_args=None\n self.call_count=0\n self.mock_calls=_CallList()\n self.call_args_list=_CallList()\n self.method_calls=_CallList()\n \n if return_value:\n self._mock_return_value=DEFAULT\n if side_effect:\n self._mock_side_effect=None\n \n for child in self._mock_children.values():\n if isinstance(child,_SpecState)or child is _deleted:\n continue\n child.reset_mock(visited,return_value=return_value,side_effect=side_effect)\n \n ret=self._mock_return_value\n if _is_instance_mock(ret)and ret is not self:\n ret.reset_mock(visited)\n \n \n def configure_mock(self,/,**kwargs):\n ''\n\n\n\n\n\n\n \n for arg,val in sorted(kwargs.items(),\n \n \n \n key=lambda entry:entry[0].count('.')):\n args=arg.split('.')\n final=args.pop()\n obj=self\n for entry in args:\n obj=getattr(obj,entry)\n setattr(obj,final,val)\n \n \n def __getattr__(self,name):\n if name in{'_mock_methods','_mock_unsafe'}:\n raise AttributeError(name)\n elif self._mock_methods is not None:\n if name not in self._mock_methods or name in _all_magics:\n raise AttributeError(\"Mock object has no attribute %r\"%name)\n elif _is_magic(name):\n raise AttributeError(name)\n if not self._mock_unsafe and(not self._mock_methods or name not in self._mock_methods):\n if name.startswith(('assert','assret','asert','aseert','assrt'))or name in _ATTRIB_DENY_LIST:\n raise AttributeError(\n f\"{name !r} is not a valid assertion. Use a spec \"\n f\"for the mock if {name !r} is meant to be an attribute.\")\n \n with NonCallableMock._lock:\n result=self._mock_children.get(name)\n if result is _deleted:\n raise AttributeError(name)\n elif result is None:\n wraps=None\n if self._mock_wraps is not None:\n \n \n wraps=getattr(self._mock_wraps,name)\n \n result=self._get_child_mock(\n parent=self,name=name,wraps=wraps,_new_name=name,\n _new_parent=self\n )\n self._mock_children[name]=result\n \n elif isinstance(result,_SpecState):\n try:\n result=create_autospec(\n result.spec,result.spec_set,result.instance,\n result.parent,result.name\n )\n except InvalidSpecError:\n target_name=self.__dict__['_mock_name']or self\n raise InvalidSpecError(\n f'Cannot autospec attr {name !r} from target '\n f'{target_name !r} as it has already been mocked out. '\n f'[target={self !r}, attr={result.spec !r}]')\n self._mock_children[name]=result\n \n return result\n \n \n def _extract_mock_name(self):\n _name_list=[self._mock_new_name]\n _parent=self._mock_new_parent\n last=self\n \n dot='.'\n if _name_list ==['()']:\n dot=''\n \n while _parent is not None:\n last=_parent\n \n _name_list.append(_parent._mock_new_name+dot)\n dot='.'\n if _parent._mock_new_name =='()':\n dot=''\n \n _parent=_parent._mock_new_parent\n \n _name_list=list(reversed(_name_list))\n _first=last._mock_name or 'mock'\n if len(_name_list)>1:\n if _name_list[1]not in('()','().'):\n _first +='.'\n _name_list[0]=_first\n return ''.join(_name_list)\n \n def __repr__(self):\n name=self._extract_mock_name()\n \n name_string=''\n if name not in('mock','mock.'):\n name_string=' name=%r'%name\n \n spec_string=''\n if self._spec_class is not None:\n spec_string=' spec=%r'\n if self._spec_set:\n spec_string=' spec_set=%r'\n spec_string=spec_string %self._spec_class.__name__\n return \"<%s%s%s id='%s'>\"%(\n type(self).__name__,\n name_string,\n spec_string,\n id(self)\n )\n \n \n def __dir__(self):\n ''\n if not FILTER_DIR:\n return object.__dir__(self)\n \n extras=self._mock_methods or[]\n from_type=dir(type(self))\n from_dict=list(self.__dict__)\n from_child_mocks=[\n m_name for m_name,m_value in self._mock_children.items()\n if m_value is not _deleted]\n \n from_type=[e for e in from_type if not e.startswith('_')]\n from_dict=[e for e in from_dict if not e.startswith('_')or\n _is_magic(e)]\n return sorted(set(extras+from_type+from_dict+from_child_mocks))\n \n \n def __setattr__(self,name,value):\n if name in _allowed_names:\n \n return object.__setattr__(self,name,value)\n elif(self._spec_set and self._mock_methods is not None and\n name not in self._mock_methods and\n name not in self.__dict__):\n raise AttributeError(\"Mock object has no attribute '%s'\"%name)\n elif name in _unsupported_magics:\n msg='Attempting to set unsupported magic method %r.'%name\n raise AttributeError(msg)\n elif name in _all_magics:\n if self._mock_methods is not None and name not in self._mock_methods:\n raise AttributeError(\"Mock object has no attribute '%s'\"%name)\n \n if not _is_instance_mock(value):\n setattr(type(self),name,_get_method(name,value))\n original=value\n value=lambda *args,**kw:original(self,*args,**kw)\n else:\n \n \n _check_and_set_parent(self,value,None,name)\n setattr(type(self),name,value)\n self._mock_children[name]=value\n elif name =='__class__':\n self._spec_class=value\n return\n else:\n if _check_and_set_parent(self,value,name,name):\n self._mock_children[name]=value\n \n if self._mock_sealed and not hasattr(self,name):\n mock_name=f'{self._extract_mock_name()}.{name}'\n raise AttributeError(f'Cannot set {mock_name}')\n \n return object.__setattr__(self,name,value)\n \n \n def __delattr__(self,name):\n if name in _all_magics and name in type(self).__dict__:\n delattr(type(self),name)\n if name not in self.__dict__:\n \n \n return\n \n obj=self._mock_children.get(name,_missing)\n if name in self.__dict__:\n _safe_super(NonCallableMock,self).__delattr__(name)\n elif obj is _deleted:\n raise AttributeError(name)\n if obj is not _missing:\n del self._mock_children[name]\n self._mock_children[name]=_deleted\n \n \n def _format_mock_call_signature(self,args,kwargs):\n name=self._mock_name or 'mock'\n return _format_call_signature(name,args,kwargs)\n \n \n def _format_mock_failure_message(self,args,kwargs,action='call'):\n message='expected %s not found.\\nExpected: %s\\nActual: %s'\n expected_string=self._format_mock_call_signature(args,kwargs)\n call_args=self.call_args\n actual_string=self._format_mock_call_signature(*call_args)\n return message %(action,expected_string,actual_string)\n \n \n def _get_call_signature_from_name(self,name):\n ''\n\n\n\n\n\n\n\n\n \n if not name:\n return self._spec_signature\n \n sig=None\n names=name.replace('()','').split('.')\n children=self._mock_children\n \n for name in names:\n child=children.get(name)\n if child is None or isinstance(child,_SpecState):\n break\n else:\n \n \n \n child=_extract_mock(child)\n children=child._mock_children\n sig=child._spec_signature\n \n return sig\n \n \n def _call_matcher(self,_call):\n ''\n\n\n\n\n \n \n if isinstance(_call,tuple)and len(_call)>2:\n sig=self._get_call_signature_from_name(_call[0])\n else:\n sig=self._spec_signature\n \n if sig is not None:\n if len(_call)==2:\n name=''\n args,kwargs=_call\n else:\n name,args,kwargs=_call\n try:\n bound_call=sig.bind(*args,**kwargs)\n return call(name,bound_call.args,bound_call.kwargs)\n except TypeError as e:\n return e.with_traceback(None)\n else:\n return _call\n \n def assert_not_called(self):\n ''\n \n if self.call_count !=0:\n msg=(\"Expected '%s' to not have been called. Called %s times.%s\"\n %(self._mock_name or 'mock',\n self.call_count,\n self._calls_repr()))\n raise AssertionError(msg)\n \n def assert_called(self):\n ''\n \n if self.call_count ==0:\n msg=(\"Expected '%s' to have been called.\"%\n (self._mock_name or 'mock'))\n raise AssertionError(msg)\n \n def assert_called_once(self):\n ''\n \n if not self.call_count ==1:\n msg=(\"Expected '%s' to have been called once. Called %s times.%s\"\n %(self._mock_name or 'mock',\n self.call_count,\n self._calls_repr()))\n raise AssertionError(msg)\n \n def assert_called_with(self,/,*args,**kwargs):\n ''\n\n\n \n if self.call_args is None:\n expected=self._format_mock_call_signature(args,kwargs)\n actual='not called.'\n error_message=('expected call not found.\\nExpected: %s\\nActual: %s'\n %(expected,actual))\n raise AssertionError(error_message)\n \n def _error_message():\n msg=self._format_mock_failure_message(args,kwargs)\n return msg\n expected=self._call_matcher(_Call((args,kwargs),two=True))\n actual=self._call_matcher(self.call_args)\n if actual !=expected:\n cause=expected if isinstance(expected,Exception)else None\n raise AssertionError(_error_message())from cause\n \n \n def assert_called_once_with(self,/,*args,**kwargs):\n ''\n \n if not self.call_count ==1:\n msg=(\"Expected '%s' to be called once. Called %s times.%s\"\n %(self._mock_name or 'mock',\n self.call_count,\n self._calls_repr()))\n raise AssertionError(msg)\n return self.assert_called_with(*args,**kwargs)\n \n \n def assert_has_calls(self,calls,any_order=False):\n ''\n\n\n\n\n\n\n\n \n expected=[self._call_matcher(c)for c in calls]\n cause=next((e for e in expected if isinstance(e,Exception)),None)\n all_calls=_CallList(self._call_matcher(c)for c in self.mock_calls)\n if not any_order:\n if expected not in all_calls:\n if cause is None:\n problem='Calls not found.'\n else:\n problem=('Error processing expected calls.\\n'\n 'Errors: {}').format(\n [e if isinstance(e,Exception)else None\n for e in expected])\n raise AssertionError(\n f'{problem}\\n'\n f'Expected: {_CallList(calls)}'\n f'{self._calls_repr(prefix=\"Actual\").rstrip(\".\")}'\n )from cause\n return\n \n all_calls=list(all_calls)\n \n not_found=[]\n for kall in expected:\n try:\n all_calls.remove(kall)\n except ValueError:\n not_found.append(kall)\n if not_found:\n raise AssertionError(\n '%r does not contain all of %r in its call list, '\n 'found %r instead'%(self._mock_name or 'mock',\n tuple(not_found),all_calls)\n )from cause\n \n \n def assert_any_call(self,/,*args,**kwargs):\n ''\n\n\n\n \n expected=self._call_matcher(_Call((args,kwargs),two=True))\n cause=expected if isinstance(expected,Exception)else None\n actual=[self._call_matcher(c)for c in self.call_args_list]\n if cause or expected not in _AnyComparer(actual):\n expected_string=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(\n '%s call not found'%expected_string\n )from cause\n \n \n def _get_child_mock(self,/,**kw):\n ''\n\n\n\n\n\n \n if self._mock_sealed:\n attribute=f\".{kw['name']}\"if \"name\"in kw else \"()\"\n mock_name=self._extract_mock_name()+attribute\n raise AttributeError(mock_name)\n \n _new_name=kw.get(\"_new_name\")\n if _new_name in self.__dict__['_spec_asyncs']:\n return AsyncMock(**kw)\n \n _type=type(self)\n if issubclass(_type,MagicMock)and _new_name in _async_method_magics:\n \n klass=AsyncMock\n elif issubclass(_type,AsyncMockMixin):\n if(_new_name in _all_sync_magics or\n self._mock_methods and _new_name in self._mock_methods):\n \n klass=MagicMock\n else:\n klass=AsyncMock\n elif not issubclass(_type,CallableMixin):\n if issubclass(_type,NonCallableMagicMock):\n klass=MagicMock\n elif issubclass(_type,NonCallableMock):\n klass=Mock\n else:\n klass=_type.__mro__[1]\n return klass(**kw)\n \n \n def _calls_repr(self,prefix=\"Calls\"):\n ''\n\n\n\n\n\n \n if not self.mock_calls:\n return \"\"\n return f\"\\n{prefix}: {safe_repr(self.mock_calls)}.\"\n \n \n \n_ATTRIB_DENY_LIST=frozenset({\nname.removeprefix(\"assert_\")\nfor name in dir(NonCallableMock)\nif name.startswith(\"assert_\")\n})\n\n\nclass _AnyComparer(list):\n ''\n\n\n \n def __contains__(self,item):\n for _call in self:\n assert len(item)==len(_call)\n if all([\n expected ==actual\n for expected,actual in zip(item,_call)\n ]):\n return True\n return False\n \n \ndef _try_iter(obj):\n if obj is None:\n return obj\n if _is_exception(obj):\n return obj\n if _callable(obj):\n return obj\n try:\n return iter(obj)\n except TypeError:\n \n \n return obj\n \n \nclass CallableMixin(Base):\n\n def __init__(self,spec=None,side_effect=None,return_value=DEFAULT,\n wraps=None,name=None,spec_set=None,parent=None,\n _spec_state=None,_new_name='',_new_parent=None,**kwargs):\n self.__dict__['_mock_return_value']=return_value\n _safe_super(CallableMixin,self).__init__(\n spec,wraps,name,spec_set,parent,\n _spec_state,_new_name,_new_parent,**kwargs\n )\n \n self.side_effect=side_effect\n \n \n def _mock_check_sig(self,/,*args,**kwargs):\n \n pass\n \n \n def __call__(self,/,*args,**kwargs):\n \n \n self._mock_check_sig(*args,**kwargs)\n self._increment_mock_call(*args,**kwargs)\n return self._mock_call(*args,**kwargs)\n \n \n def _mock_call(self,/,*args,**kwargs):\n return self._execute_mock_call(*args,**kwargs)\n \n def _increment_mock_call(self,/,*args,**kwargs):\n self.called=True\n self.call_count +=1\n \n \n \n \n _call=_Call((args,kwargs),two=True)\n self.call_args=_call\n self.call_args_list.append(_call)\n \n \n do_method_calls=self._mock_parent is not None\n method_call_name=self._mock_name\n \n \n mock_call_name=self._mock_new_name\n is_a_call=mock_call_name =='()'\n self.mock_calls.append(_Call(('',args,kwargs)))\n \n \n _new_parent=self._mock_new_parent\n while _new_parent is not None:\n \n \n if do_method_calls:\n _new_parent.method_calls.append(_Call((method_call_name,args,kwargs)))\n do_method_calls=_new_parent._mock_parent is not None\n if do_method_calls:\n method_call_name=_new_parent._mock_name+'.'+method_call_name\n \n \n this_mock_call=_Call((mock_call_name,args,kwargs))\n _new_parent.mock_calls.append(this_mock_call)\n \n if _new_parent._mock_new_name:\n if is_a_call:\n dot=''\n else:\n dot='.'\n is_a_call=_new_parent._mock_new_name =='()'\n mock_call_name=_new_parent._mock_new_name+dot+mock_call_name\n \n \n _new_parent=_new_parent._mock_new_parent\n \n def _execute_mock_call(self,/,*args,**kwargs):\n \n \n \n effect=self.side_effect\n if effect is not None:\n if _is_exception(effect):\n raise effect\n elif not _callable(effect):\n result=next(effect)\n if _is_exception(result):\n raise result\n else:\n result=effect(*args,**kwargs)\n \n if result is not DEFAULT:\n return result\n \n if self._mock_return_value is not DEFAULT:\n return self.return_value\n \n if self._mock_wraps is not None:\n return self._mock_wraps(*args,**kwargs)\n \n return self.return_value\n \n \n \nclass Mock(CallableMixin,NonCallableMock):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \ndef _check_spec_arg_typos(kwargs_to_check):\n typos=(\"autospect\",\"auto_spec\",\"set_spec\")\n for typo in typos:\n if typo in kwargs_to_check:\n raise RuntimeError(\n f\"{typo !r} might be a typo; use unsafe=True if this is intended\"\n )\n \n \nclass _patch(object):\n\n attribute_name=None\n _active_patches=[]\n \n def __init__(\n self,getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs,*,unsafe=False\n ):\n if new_callable is not None:\n if new is not DEFAULT:\n raise ValueError(\n \"Cannot use 'new' and 'new_callable' together\"\n )\n if autospec is not None:\n raise ValueError(\n \"Cannot use 'autospec' and 'new_callable' together\"\n )\n if not unsafe:\n _check_spec_arg_typos(kwargs)\n if _is_instance_mock(spec):\n raise InvalidSpecError(\n f'Cannot spec attr {attribute !r} as the spec '\n f'has already been mocked out. [spec={spec !r}]')\n if _is_instance_mock(spec_set):\n raise InvalidSpecError(\n f'Cannot spec attr {attribute !r} as the spec_set '\n f'target has already been mocked out. [spec_set={spec_set !r}]')\n \n self.getter=getter\n self.attribute=attribute\n self.new=new\n self.new_callable=new_callable\n self.spec=spec\n self.create=create\n self.has_local=False\n self.spec_set=spec_set\n self.autospec=autospec\n self.kwargs=kwargs\n self.additional_patchers=[]\n \n \n def copy(self):\n patcher=_patch(\n self.getter,self.attribute,self.new,self.spec,\n self.create,self.spec_set,\n self.autospec,self.new_callable,self.kwargs\n )\n patcher.attribute_name=self.attribute_name\n patcher.additional_patchers=[\n p.copy()for p in self.additional_patchers\n ]\n return patcher\n \n \n def __call__(self,func):\n if isinstance(func,type):\n return self.decorate_class(func)\n if inspect.iscoroutinefunction(func):\n return self.decorate_async_callable(func)\n return self.decorate_callable(func)\n \n \n def decorate_class(self,klass):\n for attr in dir(klass):\n if not attr.startswith(patch.TEST_PREFIX):\n continue\n \n attr_value=getattr(klass,attr)\n if not hasattr(attr_value,\"__call__\"):\n continue\n \n patcher=self.copy()\n setattr(klass,attr,patcher(attr_value))\n return klass\n \n \n @contextlib.contextmanager\n def decoration_helper(self,patched,args,keywargs):\n extra_args=[]\n with contextlib.ExitStack()as exit_stack:\n for patching in patched.patchings:\n arg=exit_stack.enter_context(patching)\n if patching.attribute_name is not None:\n keywargs.update(arg)\n elif patching.new is DEFAULT:\n extra_args.append(arg)\n \n args +=tuple(extra_args)\n yield(args,keywargs)\n \n \n def decorate_callable(self,func):\n \n if hasattr(func,'patchings'):\n func.patchings.append(self)\n return func\n \n @wraps(func)\n def patched(*args,**keywargs):\n with self.decoration_helper(patched,\n args,\n keywargs)as(newargs,newkeywargs):\n return func(*newargs,**newkeywargs)\n \n patched.patchings=[self]\n return patched\n \n \n def decorate_async_callable(self,func):\n \n if hasattr(func,'patchings'):\n func.patchings.append(self)\n return func\n \n @wraps(func)\n async def patched(*args,**keywargs):\n with self.decoration_helper(patched,\n args,\n keywargs)as(newargs,newkeywargs):\n return await func(*newargs,**newkeywargs)\n \n patched.patchings=[self]\n return patched\n \n \n def get_original(self):\n target=self.getter()\n name=self.attribute\n \n original=DEFAULT\n local=False\n \n try:\n original=target.__dict__[name]\n except(AttributeError,KeyError):\n original=getattr(target,name,DEFAULT)\n else:\n local=True\n \n if name in _builtins and isinstance(target,ModuleType):\n self.create=True\n \n if not self.create and original is DEFAULT:\n raise AttributeError(\n \"%s does not have the attribute %r\"%(target,name)\n )\n return original,local\n \n \n def __enter__(self):\n ''\n new,spec,spec_set=self.new,self.spec,self.spec_set\n autospec,kwargs=self.autospec,self.kwargs\n new_callable=self.new_callable\n self.target=self.getter()\n \n \n if spec is False:\n spec=None\n if spec_set is False:\n spec_set=None\n if autospec is False:\n autospec=None\n \n if spec is not None and autospec is not None:\n raise TypeError(\"Can't specify spec and autospec\")\n if((spec is not None or autospec is not None)and\n spec_set not in(True,None)):\n raise TypeError(\"Can't provide explicit spec_set *and* spec or autospec\")\n \n original,local=self.get_original()\n \n if new is DEFAULT and autospec is None:\n inherit=False\n if spec is True:\n \n spec=original\n if spec_set is True:\n spec_set=original\n spec=None\n elif spec is not None:\n if spec_set is True:\n spec_set=spec\n spec=None\n elif spec_set is True:\n spec_set=original\n \n if spec is not None or spec_set is not None:\n if original is DEFAULT:\n raise TypeError(\"Can't use 'spec' with create=True\")\n if isinstance(original,type):\n \n inherit=True\n if spec is None and _is_async_obj(original):\n Klass=AsyncMock\n else:\n Klass=MagicMock\n _kwargs={}\n if new_callable is not None:\n Klass=new_callable\n elif spec is not None or spec_set is not None:\n this_spec=spec\n if spec_set is not None:\n this_spec=spec_set\n if _is_list(this_spec):\n not_callable='__call__'not in this_spec\n else:\n not_callable=not callable(this_spec)\n if _is_async_obj(this_spec):\n Klass=AsyncMock\n elif not_callable:\n Klass=NonCallableMagicMock\n \n if spec is not None:\n _kwargs['spec']=spec\n if spec_set is not None:\n _kwargs['spec_set']=spec_set\n \n \n if(isinstance(Klass,type)and\n issubclass(Klass,NonCallableMock)and self.attribute):\n _kwargs['name']=self.attribute\n \n _kwargs.update(kwargs)\n new=Klass(**_kwargs)\n \n if inherit and _is_instance_mock(new):\n \n \n this_spec=spec\n if spec_set is not None:\n this_spec=spec_set\n if(not _is_list(this_spec)and not\n _instance_callable(this_spec)):\n Klass=NonCallableMagicMock\n \n _kwargs.pop('name')\n new.return_value=Klass(_new_parent=new,_new_name='()',\n **_kwargs)\n elif autospec is not None:\n \n \n \n if new is not DEFAULT:\n raise TypeError(\n \"autospec creates the mock for you. Can't specify \"\n \"autospec and new.\"\n )\n if original is DEFAULT:\n raise TypeError(\"Can't use 'autospec' with create=True\")\n spec_set=bool(spec_set)\n if autospec is True:\n autospec=original\n \n if _is_instance_mock(self.target):\n raise InvalidSpecError(\n f'Cannot autospec attr {self.attribute !r} as the patch '\n f'target has already been mocked out. '\n f'[target={self.target !r}, attr={autospec !r}]')\n if _is_instance_mock(autospec):\n target_name=getattr(self.target,'__name__',self.target)\n raise InvalidSpecError(\n f'Cannot autospec attr {self.attribute !r} from target '\n f'{target_name !r} as it has already been mocked out. '\n f'[target={self.target !r}, attr={autospec !r}]')\n \n new=create_autospec(autospec,spec_set=spec_set,\n _name=self.attribute,**kwargs)\n elif kwargs:\n \n \n raise TypeError(\"Can't pass kwargs to a mock we aren't creating\")\n \n new_attr=new\n \n self.temp_original=original\n self.is_local=local\n self._exit_stack=contextlib.ExitStack()\n try:\n setattr(self.target,self.attribute,new_attr)\n if self.attribute_name is not None:\n extra_args={}\n if self.new is DEFAULT:\n extra_args[self.attribute_name]=new\n for patching in self.additional_patchers:\n arg=self._exit_stack.enter_context(patching)\n if patching.new is DEFAULT:\n extra_args.update(arg)\n return extra_args\n \n return new\n except:\n if not self.__exit__(*sys.exc_info()):\n raise\n \n def __exit__(self,*exc_info):\n ''\n if self.is_local and self.temp_original is not DEFAULT:\n setattr(self.target,self.attribute,self.temp_original)\n else:\n delattr(self.target,self.attribute)\n if not self.create and(not hasattr(self.target,self.attribute)or\n self.attribute in('__doc__','__module__',\n '__defaults__','__annotations__',\n '__kwdefaults__')):\n \n setattr(self.target,self.attribute,self.temp_original)\n \n del self.temp_original\n del self.is_local\n del self.target\n exit_stack=self._exit_stack\n del self._exit_stack\n return exit_stack.__exit__(*exc_info)\n \n \n def start(self):\n ''\n result=self.__enter__()\n self._active_patches.append(self)\n return result\n \n \n def stop(self):\n ''\n try:\n self._active_patches.remove(self)\n except ValueError:\n \n return None\n \n return self.__exit__(None,None,None)\n \n \n \ndef _get_target(target):\n try:\n target,attribute=target.rsplit('.',1)\n except(TypeError,ValueError,AttributeError):\n raise TypeError(\n f\"Need a valid target to patch. You supplied: {target !r}\")\n return partial(pkgutil.resolve_name,target),attribute\n \n \ndef _patch_object(\ntarget,attribute,new=DEFAULT,spec=None,\ncreate=False,spec_set=None,autospec=None,\nnew_callable=None,*,unsafe=False,**kwargs\n):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if type(target)is str:\n raise TypeError(\n f\"{target !r} must be the actual object to be patched, not a str\"\n )\n getter=lambda:target\n return _patch(\n getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs,unsafe=unsafe\n )\n \n \ndef _patch_multiple(target,spec=None,create=False,spec_set=None,\nautospec=None,new_callable=None,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if type(target)is str:\n getter=partial(pkgutil.resolve_name,target)\n else:\n getter=lambda:target\n \n if not kwargs:\n raise ValueError(\n 'Must supply at least one keyword argument with patch.multiple'\n )\n \n items=list(kwargs.items())\n attribute,new=items[0]\n patcher=_patch(\n getter,attribute,new,spec,create,spec_set,\n autospec,new_callable,{}\n )\n patcher.attribute_name=attribute\n for attribute,new in items[1:]:\n this_patcher=_patch(\n getter,attribute,new,spec,create,spec_set,\n autospec,new_callable,{}\n )\n this_patcher.attribute_name=attribute\n patcher.additional_patchers.append(this_patcher)\n return patcher\n \n \ndef patch(\ntarget,new=DEFAULT,spec=None,create=False,\nspec_set=None,autospec=None,new_callable=None,*,unsafe=False,**kwargs\n):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n getter,attribute=_get_target(target)\n return _patch(\n getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs,unsafe=unsafe\n )\n \n \nclass _patch_dict(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,in_dict,values=(),clear=False,**kwargs):\n self.in_dict=in_dict\n \n self.values=dict(values)\n self.values.update(kwargs)\n self.clear=clear\n self._original=None\n \n \n def __call__(self,f):\n if isinstance(f,type):\n return self.decorate_class(f)\n if inspect.iscoroutinefunction(f):\n return self.decorate_async_callable(f)\n return self.decorate_callable(f)\n \n \n def decorate_callable(self,f):\n @wraps(f)\n def _inner(*args,**kw):\n self._patch_dict()\n try:\n return f(*args,**kw)\n finally:\n self._unpatch_dict()\n \n return _inner\n \n \n def decorate_async_callable(self,f):\n @wraps(f)\n async def _inner(*args,**kw):\n self._patch_dict()\n try:\n return await f(*args,**kw)\n finally:\n self._unpatch_dict()\n \n return _inner\n \n \n def decorate_class(self,klass):\n for attr in dir(klass):\n attr_value=getattr(klass,attr)\n if(attr.startswith(patch.TEST_PREFIX)and\n hasattr(attr_value,\"__call__\")):\n decorator=_patch_dict(self.in_dict,self.values,self.clear)\n decorated=decorator(attr_value)\n setattr(klass,attr,decorated)\n return klass\n \n \n def __enter__(self):\n ''\n self._patch_dict()\n return self.in_dict\n \n \n def _patch_dict(self):\n values=self.values\n if isinstance(self.in_dict,str):\n self.in_dict=pkgutil.resolve_name(self.in_dict)\n in_dict=self.in_dict\n clear=self.clear\n \n try:\n original=in_dict.copy()\n except AttributeError:\n \n \n original={}\n for key in in_dict:\n original[key]=in_dict[key]\n self._original=original\n \n if clear:\n _clear_dict(in_dict)\n \n try:\n in_dict.update(values)\n except AttributeError:\n \n for key in values:\n in_dict[key]=values[key]\n \n \n def _unpatch_dict(self):\n in_dict=self.in_dict\n original=self._original\n \n _clear_dict(in_dict)\n \n try:\n in_dict.update(original)\n except AttributeError:\n for key in original:\n in_dict[key]=original[key]\n \n \n def __exit__(self,*args):\n ''\n if self._original is not None:\n self._unpatch_dict()\n return False\n \n \n def start(self):\n ''\n result=self.__enter__()\n _patch._active_patches.append(self)\n return result\n \n \n def stop(self):\n ''\n try:\n _patch._active_patches.remove(self)\n except ValueError:\n \n return None\n \n return self.__exit__(None,None,None)\n \n \ndef _clear_dict(in_dict):\n try:\n in_dict.clear()\n except AttributeError:\n keys=list(in_dict)\n for key in keys:\n del in_dict[key]\n \n \ndef _patch_stopall():\n ''\n for patch in reversed(_patch._active_patches):\n patch.stop()\n \n \npatch.object=_patch_object\npatch.dict=_patch_dict\npatch.multiple=_patch_multiple\npatch.stopall=_patch_stopall\npatch.TEST_PREFIX='test'\n\nmagic_methods=(\n\"lt le gt ge eq ne \"\n\"getitem setitem delitem \"\n\"len contains iter \"\n\"hash str sizeof \"\n\"enter exit \"\n\n\n\"divmod rdivmod neg pos abs invert \"\n\"complex int float index \"\n\"round trunc floor ceil \"\n\"bool next \"\n\"fspath \"\n\"aiter \"\n)\n\nnumerics=(\n\"add sub mul matmul truediv floordiv mod lshift rshift and xor or pow\"\n)\ninplace=' '.join('i%s'%n for n in numerics.split())\nright=' '.join('r%s'%n for n in numerics.split())\n\n\n\n\n\n_non_defaults={\n'__get__','__set__','__delete__','__reversed__','__missing__',\n'__reduce__','__reduce_ex__','__getinitargs__','__getnewargs__',\n'__getstate__','__setstate__','__getformat__',\n'__repr__','__dir__','__subclasses__','__format__',\n'__getnewargs_ex__',\n}\n\n\ndef _get_method(name,func):\n ''\n def method(self,/,*args,**kw):\n return func(self,*args,**kw)\n method.__name__=name\n return method\n \n \n_magics={\n'__%s__'%method for method in\n' '.join([magic_methods,numerics,inplace,right]).split()\n}\n\n\n_async_method_magics={\"__aenter__\",\"__aexit__\",\"__anext__\"}\n\n_sync_async_magics={\"__aiter__\"}\n_async_magics=_async_method_magics |_sync_async_magics\n\n_all_sync_magics=_magics |_non_defaults\n_all_magics=_all_sync_magics |_async_magics\n\n_unsupported_magics={\n'__getattr__','__setattr__',\n'__init__','__new__','__prepare__',\n'__instancecheck__','__subclasscheck__',\n'__del__'\n}\n\n_calculate_return_value={\n'__hash__':lambda self:object.__hash__(self),\n'__str__':lambda self:object.__str__(self),\n'__sizeof__':lambda self:object.__sizeof__(self),\n'__fspath__':lambda self:f\"{type(self).__name__}/{self._extract_mock_name()}/{id(self)}\",\n}\n\n_return_values={\n'__lt__':NotImplemented,\n'__gt__':NotImplemented,\n'__le__':NotImplemented,\n'__ge__':NotImplemented,\n'__int__':1,\n'__contains__':False,\n'__len__':0,\n'__exit__':False,\n'__complex__':1j,\n'__float__':1.0,\n'__bool__':True,\n'__index__':1,\n'__aexit__':False,\n}\n\n\ndef _get_eq(self):\n def __eq__(other):\n ret_val=self.__eq__._mock_return_value\n if ret_val is not DEFAULT:\n return ret_val\n if self is other:\n return True\n return NotImplemented\n return __eq__\n \ndef _get_ne(self):\n def __ne__(other):\n if self.__ne__._mock_return_value is not DEFAULT:\n return DEFAULT\n if self is other:\n return False\n return NotImplemented\n return __ne__\n \ndef _get_iter(self):\n def __iter__():\n ret_val=self.__iter__._mock_return_value\n if ret_val is DEFAULT:\n return iter([])\n \n \n return iter(ret_val)\n return __iter__\n \ndef _get_async_iter(self):\n def __aiter__():\n ret_val=self.__aiter__._mock_return_value\n if ret_val is DEFAULT:\n return _AsyncIterator(iter([]))\n return _AsyncIterator(iter(ret_val))\n return __aiter__\n \n_side_effect_methods={\n'__eq__':_get_eq,\n'__ne__':_get_ne,\n'__iter__':_get_iter,\n'__aiter__':_get_async_iter\n}\n\n\n\ndef _set_return_value(mock,method,name):\n fixed=_return_values.get(name,DEFAULT)\n if fixed is not DEFAULT:\n method.return_value=fixed\n return\n \n return_calculator=_calculate_return_value.get(name)\n if return_calculator is not None:\n return_value=return_calculator(mock)\n method.return_value=return_value\n return\n \n side_effector=_side_effect_methods.get(name)\n if side_effector is not None:\n method.side_effect=side_effector(mock)\n \n \n \nclass MagicMixin(Base):\n def __init__(self,/,*args,**kw):\n self._mock_set_magics()\n _safe_super(MagicMixin,self).__init__(*args,**kw)\n self._mock_set_magics()\n \n \n def _mock_set_magics(self):\n orig_magics=_magics |_async_method_magics\n these_magics=orig_magics\n \n if getattr(self,\"_mock_methods\",None)is not None:\n these_magics=orig_magics.intersection(self._mock_methods)\n \n remove_magics=set()\n remove_magics=orig_magics -these_magics\n \n for entry in remove_magics:\n if entry in type(self).__dict__:\n \n delattr(self,entry)\n \n \n these_magics=these_magics -set(type(self).__dict__)\n \n _type=type(self)\n for entry in these_magics:\n setattr(_type,entry,MagicProxy(entry,self))\n \n \n \nclass NonCallableMagicMock(MagicMixin,NonCallableMock):\n ''\n def mock_add_spec(self,spec,spec_set=False):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n self._mock_set_magics()\n \n \nclass AsyncMagicMixin(MagicMixin):\n pass\n \n \nclass MagicMock(MagicMixin,Mock):\n ''\n\n\n\n\n\n\n\n\n \n def mock_add_spec(self,spec,spec_set=False):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n self._mock_set_magics()\n \n \n \nclass MagicProxy(Base):\n def __init__(self,name,parent):\n self.name=name\n self.parent=parent\n \n def create_mock(self):\n entry=self.name\n parent=self.parent\n m=parent._get_child_mock(name=entry,_new_name=entry,\n _new_parent=parent)\n setattr(parent,entry,m)\n _set_return_value(parent,m,entry)\n return m\n \n def __get__(self,obj,_type=None):\n return self.create_mock()\n \n \n_CODE_ATTRS=dir(CodeType)\n_CODE_SIG=inspect.signature(partial(CodeType.__init__,None))\n\n\nclass AsyncMockMixin(Base):\n await_count=_delegating_property('await_count')\n await_args=_delegating_property('await_args')\n await_args_list=_delegating_property('await_args_list')\n \n def __init__(self,/,*args,**kwargs):\n super().__init__(*args,**kwargs)\n \n \n \n \n \n \n self.__dict__['_is_coroutine']=asyncio.coroutines._is_coroutine\n self.__dict__['_mock_await_count']=0\n self.__dict__['_mock_await_args']=None\n self.__dict__['_mock_await_args_list']=_CallList()\n code_mock=NonCallableMock(spec_set=_CODE_ATTRS)\n code_mock.__dict__[\"_spec_class\"]=CodeType\n code_mock.__dict__[\"_spec_signature\"]=_CODE_SIG\n code_mock.co_flags=(\n inspect.CO_COROUTINE\n +inspect.CO_VARARGS\n +inspect.CO_VARKEYWORDS\n )\n code_mock.co_argcount=0\n code_mock.co_varnames=('args','kwargs')\n code_mock.co_posonlyargcount=0\n code_mock.co_kwonlyargcount=0\n self.__dict__['__code__']=code_mock\n self.__dict__['__name__']='AsyncMock'\n self.__dict__['__defaults__']=tuple()\n self.__dict__['__kwdefaults__']={}\n self.__dict__['__annotations__']=None\n \n async def _execute_mock_call(self,/,*args,**kwargs):\n \n \n \n _call=_Call((args,kwargs),two=True)\n self.await_count +=1\n self.await_args=_call\n self.await_args_list.append(_call)\n \n effect=self.side_effect\n if effect is not None:\n if _is_exception(effect):\n raise effect\n elif not _callable(effect):\n try:\n result=next(effect)\n except StopIteration:\n \n \n raise StopAsyncIteration\n if _is_exception(result):\n raise result\n elif iscoroutinefunction(effect):\n result=await effect(*args,**kwargs)\n else:\n result=effect(*args,**kwargs)\n \n if result is not DEFAULT:\n return result\n \n if self._mock_return_value is not DEFAULT:\n return self.return_value\n \n if self._mock_wraps is not None:\n if iscoroutinefunction(self._mock_wraps):\n return await self._mock_wraps(*args,**kwargs)\n return self._mock_wraps(*args,**kwargs)\n \n return self.return_value\n \n def assert_awaited(self):\n ''\n\n \n if self.await_count ==0:\n msg=f\"Expected {self._mock_name or 'mock'} to have been awaited.\"\n raise AssertionError(msg)\n \n def assert_awaited_once(self):\n ''\n\n \n if not self.await_count ==1:\n msg=(f\"Expected {self._mock_name or 'mock'} to have been awaited once.\"\n f\" Awaited {self.await_count} times.\")\n raise AssertionError(msg)\n \n def assert_awaited_with(self,/,*args,**kwargs):\n ''\n\n \n if self.await_args is None:\n expected=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(f'Expected await: {expected}\\nNot awaited')\n \n def _error_message():\n msg=self._format_mock_failure_message(args,kwargs,action='await')\n return msg\n \n expected=self._call_matcher(_Call((args,kwargs),two=True))\n actual=self._call_matcher(self.await_args)\n if actual !=expected:\n cause=expected if isinstance(expected,Exception)else None\n raise AssertionError(_error_message())from cause\n \n def assert_awaited_once_with(self,/,*args,**kwargs):\n ''\n\n\n \n if not self.await_count ==1:\n msg=(f\"Expected {self._mock_name or 'mock'} to have been awaited once.\"\n f\" Awaited {self.await_count} times.\")\n raise AssertionError(msg)\n return self.assert_awaited_with(*args,**kwargs)\n \n def assert_any_await(self,/,*args,**kwargs):\n ''\n\n \n expected=self._call_matcher(_Call((args,kwargs),two=True))\n cause=expected if isinstance(expected,Exception)else None\n actual=[self._call_matcher(c)for c in self.await_args_list]\n if cause or expected not in _AnyComparer(actual):\n expected_string=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(\n '%s await not found'%expected_string\n )from cause\n \n def assert_has_awaits(self,calls,any_order=False):\n ''\n\n\n\n\n\n\n\n\n\n \n expected=[self._call_matcher(c)for c in calls]\n cause=next((e for e in expected if isinstance(e,Exception)),None)\n all_awaits=_CallList(self._call_matcher(c)for c in self.await_args_list)\n if not any_order:\n if expected not in all_awaits:\n if cause is None:\n problem='Awaits not found.'\n else:\n problem=('Error processing expected awaits.\\n'\n 'Errors: {}').format(\n [e if isinstance(e,Exception)else None\n for e in expected])\n raise AssertionError(\n f'{problem}\\n'\n f'Expected: {_CallList(calls)}\\n'\n f'Actual: {self.await_args_list}'\n )from cause\n return\n \n all_awaits=list(all_awaits)\n \n not_found=[]\n for kall in expected:\n try:\n all_awaits.remove(kall)\n except ValueError:\n not_found.append(kall)\n if not_found:\n raise AssertionError(\n '%r not all found in await list'%(tuple(not_found),)\n )from cause\n \n def assert_not_awaited(self):\n ''\n\n \n if self.await_count !=0:\n msg=(f\"Expected {self._mock_name or 'mock'} to not have been awaited.\"\n f\" Awaited {self.await_count} times.\")\n raise AssertionError(msg)\n \n def reset_mock(self,/,*args,**kwargs):\n ''\n\n \n super().reset_mock(*args,**kwargs)\n self.await_count=0\n self.await_args=None\n self.await_args_list=_CallList()\n \n \nclass AsyncMock(AsyncMockMixin,AsyncMagicMixin,Mock):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \nclass _ANY(object):\n ''\n \n def __eq__(self,other):\n return True\n \n def __ne__(self,other):\n return False\n \n def __repr__(self):\n return ''\n \nANY=_ANY()\n\n\n\ndef _format_call_signature(name,args,kwargs):\n message='%s(%%s)'%name\n formatted_args=''\n args_string=', '.join([repr(arg)for arg in args])\n kwargs_string=', '.join([\n '%s=%r'%(key,value)for key,value in kwargs.items()\n ])\n if args_string:\n formatted_args=args_string\n if kwargs_string:\n if formatted_args:\n formatted_args +=', '\n formatted_args +=kwargs_string\n \n return message %formatted_args\n \n \n \nclass _Call(tuple):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __new__(cls,value=(),name='',parent=None,two=False,\n from_kall=True):\n args=()\n kwargs={}\n _len=len(value)\n if _len ==3:\n name,args,kwargs=value\n elif _len ==2:\n first,second=value\n if isinstance(first,str):\n name=first\n if isinstance(second,tuple):\n args=second\n else:\n kwargs=second\n else:\n args,kwargs=first,second\n elif _len ==1:\n value,=value\n if isinstance(value,str):\n name=value\n elif isinstance(value,tuple):\n args=value\n else:\n kwargs=value\n \n if two:\n return tuple.__new__(cls,(args,kwargs))\n \n return tuple.__new__(cls,(name,args,kwargs))\n \n \n def __init__(self,value=(),name=None,parent=None,two=False,\n from_kall=True):\n self._mock_name=name\n self._mock_parent=parent\n self._mock_from_kall=from_kall\n \n \n def __eq__(self,other):\n try:\n len_other=len(other)\n except TypeError:\n return NotImplemented\n \n self_name=''\n if len(self)==2:\n self_args,self_kwargs=self\n else:\n self_name,self_args,self_kwargs=self\n \n if(getattr(self,'_mock_parent',None)and getattr(other,'_mock_parent',None)\n and self._mock_parent !=other._mock_parent):\n return False\n \n other_name=''\n if len_other ==0:\n other_args,other_kwargs=(),{}\n elif len_other ==3:\n other_name,other_args,other_kwargs=other\n elif len_other ==1:\n value,=other\n if isinstance(value,tuple):\n other_args=value\n other_kwargs={}\n elif isinstance(value,str):\n other_name=value\n other_args,other_kwargs=(),{}\n else:\n other_args=()\n other_kwargs=value\n elif len_other ==2:\n \n first,second=other\n if isinstance(first,str):\n other_name=first\n if isinstance(second,tuple):\n other_args,other_kwargs=second,{}\n else:\n other_args,other_kwargs=(),second\n else:\n other_args,other_kwargs=first,second\n else:\n return False\n \n if self_name and other_name !=self_name:\n return False\n \n \n return(other_args,other_kwargs)==(self_args,self_kwargs)\n \n \n __ne__=object.__ne__\n \n \n def __call__(self,/,*args,**kwargs):\n if self._mock_name is None:\n return _Call(('',args,kwargs),name='()')\n \n name=self._mock_name+'()'\n return _Call((self._mock_name,args,kwargs),name=name,parent=self)\n \n \n def __getattr__(self,attr):\n if self._mock_name is None:\n return _Call(name=attr,from_kall=False)\n name='%s.%s'%(self._mock_name,attr)\n return _Call(name=name,parent=self,from_kall=False)\n \n \n def __getattribute__(self,attr):\n if attr in tuple.__dict__:\n raise AttributeError\n return tuple.__getattribute__(self,attr)\n \n \n def _get_call_arguments(self):\n if len(self)==2:\n args,kwargs=self\n else:\n name,args,kwargs=self\n \n return args,kwargs\n \n @property\n def args(self):\n return self._get_call_arguments()[0]\n \n @property\n def kwargs(self):\n return self._get_call_arguments()[1]\n \n def __repr__(self):\n if not self._mock_from_kall:\n name=self._mock_name or 'call'\n if name.startswith('()'):\n name='call%s'%name\n return name\n \n if len(self)==2:\n name='call'\n args,kwargs=self\n else:\n name,args,kwargs=self\n if not name:\n name='call'\n elif not name.startswith('()'):\n name='call.%s'%name\n else:\n name='call%s'%name\n return _format_call_signature(name,args,kwargs)\n \n \n def call_list(self):\n ''\n\n \n vals=[]\n thing=self\n while thing is not None:\n if thing._mock_from_kall:\n vals.append(thing)\n thing=thing._mock_parent\n return _CallList(reversed(vals))\n \n \ncall=_Call(from_kall=False)\n\n\ndef create_autospec(spec,spec_set=False,instance=False,_parent=None,\n_name=None,*,unsafe=False,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _is_list(spec):\n \n \n spec=type(spec)\n \n is_type=isinstance(spec,type)\n if _is_instance_mock(spec):\n raise InvalidSpecError(f'Cannot autospec a Mock object. '\n f'[object={spec !r}]')\n is_async_func=_is_async_func(spec)\n _kwargs={'spec':spec}\n if spec_set:\n _kwargs={'spec_set':spec}\n elif spec is None:\n \n _kwargs={}\n if _kwargs and instance:\n _kwargs['_spec_as_instance']=True\n if not unsafe:\n _check_spec_arg_typos(kwargs)\n \n _kwargs.update(kwargs)\n \n Klass=MagicMock\n if inspect.isdatadescriptor(spec):\n \n \n _kwargs={}\n elif is_async_func:\n if instance:\n raise RuntimeError(\"Instance can not be True when create_autospec \"\n \"is mocking an async function\")\n Klass=AsyncMock\n elif not _callable(spec):\n Klass=NonCallableMagicMock\n elif is_type and instance and not _instance_callable(spec):\n Klass=NonCallableMagicMock\n \n _name=_kwargs.pop('name',_name)\n \n _new_name=_name\n if _parent is None:\n \n _new_name=''\n \n mock=Klass(parent=_parent,_new_parent=_parent,_new_name=_new_name,\n name=_name,**_kwargs)\n \n if isinstance(spec,FunctionTypes):\n \n \n mock=_set_signature(mock,spec)\n if is_async_func:\n _setup_async_mock(mock)\n else:\n _check_signature(spec,mock,is_type,instance)\n \n if _parent is not None and not instance:\n _parent._mock_children[_name]=mock\n \n if is_type and not instance and 'return_value'not in kwargs:\n mock.return_value=create_autospec(spec,spec_set,instance=True,\n _name='()',_parent=mock)\n \n for entry in dir(spec):\n if _is_magic(entry):\n \n continue\n \n \n \n \n \n \n \n \n \n \n try:\n original=getattr(spec,entry)\n except AttributeError:\n continue\n \n kwargs={'spec':original}\n if spec_set:\n kwargs={'spec_set':original}\n \n if not isinstance(original,FunctionTypes):\n new=_SpecState(original,spec_set,mock,entry,instance)\n mock._mock_children[entry]=new\n else:\n parent=mock\n if isinstance(spec,FunctionTypes):\n parent=mock.mock\n \n skipfirst=_must_skip(spec,entry,is_type)\n kwargs['_eat_self']=skipfirst\n if iscoroutinefunction(original):\n child_klass=AsyncMock\n else:\n child_klass=MagicMock\n new=child_klass(parent=parent,name=entry,_new_name=entry,\n _new_parent=parent,\n **kwargs)\n mock._mock_children[entry]=new\n new.return_value=child_klass()\n _check_signature(original,new,skipfirst=skipfirst)\n \n \n \n \n \n if isinstance(new,FunctionTypes):\n setattr(mock,entry,new)\n \n return mock\n \n \ndef _must_skip(spec,entry,is_type):\n ''\n\n\n \n if not isinstance(spec,type):\n if entry in getattr(spec,'__dict__',{}):\n \n return False\n spec=spec.__class__\n \n for klass in spec.__mro__:\n result=klass.__dict__.get(entry,DEFAULT)\n if result is DEFAULT:\n continue\n if isinstance(result,(staticmethod,classmethod)):\n return False\n elif isinstance(result,FunctionTypes):\n \n \n return is_type\n else:\n return False\n \n \n return is_type\n \n \nclass _SpecState(object):\n\n def __init__(self,spec,spec_set=False,parent=None,\n name=None,ids=None,instance=False):\n self.spec=spec\n self.ids=ids\n self.spec_set=spec_set\n self.parent=parent\n self.instance=instance\n self.name=name\n \n \nFunctionTypes=(\n\ntype(create_autospec),\n\ntype(ANY.__eq__),\n)\n\n\nfile_spec=None\nopen_spec=None\n\n\ndef _to_stream(read_data):\n if isinstance(read_data,bytes):\n return io.BytesIO(read_data)\n else:\n return io.StringIO(read_data)\n \n \ndef mock_open(mock=None,read_data=''):\n ''\n\n\n\n\n\n\n\n\n\n \n _read_data=_to_stream(read_data)\n _state=[_read_data,None]\n \n def _readlines_side_effect(*args,**kwargs):\n if handle.readlines.return_value is not None:\n return handle.readlines.return_value\n return _state[0].readlines(*args,**kwargs)\n \n def _read_side_effect(*args,**kwargs):\n if handle.read.return_value is not None:\n return handle.read.return_value\n return _state[0].read(*args,**kwargs)\n \n def _readline_side_effect(*args,**kwargs):\n yield from _iter_side_effect()\n while True:\n yield _state[0].readline(*args,**kwargs)\n \n def _iter_side_effect():\n if handle.readline.return_value is not None:\n while True:\n yield handle.readline.return_value\n for line in _state[0]:\n yield line\n \n def _next_side_effect():\n if handle.readline.return_value is not None:\n return handle.readline.return_value\n return next(_state[0])\n \n global file_spec\n if file_spec is None:\n import _io\n file_spec=list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))\n \n global open_spec\n if open_spec is None:\n import _io\n open_spec=list(set(dir(_io.open)))\n if mock is None:\n mock=MagicMock(name='open',spec=open_spec)\n \n handle=MagicMock(spec=file_spec)\n handle.__enter__.return_value=handle\n \n handle.write.return_value=None\n handle.read.return_value=None\n handle.readline.return_value=None\n handle.readlines.return_value=None\n \n handle.read.side_effect=_read_side_effect\n _state[1]=_readline_side_effect()\n handle.readline.side_effect=_state[1]\n handle.readlines.side_effect=_readlines_side_effect\n handle.__iter__.side_effect=_iter_side_effect\n handle.__next__.side_effect=_next_side_effect\n \n def reset_data(*args,**kwargs):\n _state[0]=_to_stream(read_data)\n if handle.readline.side_effect ==_state[1]:\n \n _state[1]=_readline_side_effect()\n handle.readline.side_effect=_state[1]\n return DEFAULT\n \n mock.side_effect=reset_data\n mock.return_value=handle\n return mock\n \n \nclass PropertyMock(Mock):\n ''\n\n\n\n\n\n\n \n def _get_child_mock(self,/,**kwargs):\n return MagicMock(**kwargs)\n \n def __get__(self,obj,obj_type=None):\n return self()\n def __set__(self,obj,val):\n self(val)\n \n \ndef seal(mock):\n ''\n\n\n\n\n\n\n\n \n mock._mock_sealed=True\n for attr in dir(mock):\n try:\n m=getattr(mock,attr)\n except AttributeError:\n continue\n if not isinstance(m,NonCallableMock):\n continue\n if isinstance(m._mock_children.get(attr),_SpecState):\n continue\n if m._mock_new_parent is mock:\n seal(m)\n \n \nclass _AsyncIterator:\n ''\n\n \n def __init__(self,iterator):\n self.iterator=iterator\n code_mock=NonCallableMock(spec_set=CodeType)\n code_mock.co_flags=inspect.CO_ITERABLE_COROUTINE\n self.__dict__['__code__']=code_mock\n \n async def __anext__(self):\n try:\n return next(self.iterator)\n except StopIteration:\n pass\n raise StopAsyncIteration\n", ["_io", "asyncio", "builtins", "contextlib", "functools", "inspect", "io", "pkgutil", "pprint", "sys", "threading", "types", "unittest.util"]], "multiprocessing.util": [".py", "\n\n\n\n\n\n\n\n\nimport sys\nimport functools\nimport os\nimport itertools\nimport weakref\nimport atexit\nimport threading\n\nfrom subprocess import _args_from_interpreter_flags\n\nfrom multiprocessing.process import current_process,active_children\n\n__all__=[\n'sub_debug','debug','info','sub_warning','get_logger',\n'log_to_stderr','get_temp_dir','register_after_fork',\n'is_exiting','Finalize','ForkAwareThreadLock','ForkAwareLocal',\n'SUBDEBUG','SUBWARNING',\n]\n\n\n\n\n\nNOTSET=0\nSUBDEBUG=5\nDEBUG=10\nINFO=20\nSUBWARNING=25\n\nLOGGER_NAME='multiprocessing'\nDEFAULT_LOGGING_FORMAT='[%(levelname)s/%(processName)s] %(message)s'\n\n_logger=None\n_log_to_stderr=False\n\ndef sub_debug(msg,*args):\n if _logger:\n _logger.log(SUBDEBUG,msg,*args)\n \ndef debug(msg,*args):\n if _logger:\n _logger.log(DEBUG,msg,*args)\n \ndef info(msg,*args):\n if _logger:\n _logger.log(INFO,msg,*args)\n \ndef sub_warning(msg,*args):\n if _logger:\n _logger.log(SUBWARNING,msg,*args)\n \ndef get_logger():\n ''\n\n \n global _logger\n import logging\n \n logging._acquireLock()\n try:\n if not _logger:\n \n _logger=logging.getLogger(LOGGER_NAME)\n _logger.propagate=0\n logging.addLevelName(SUBDEBUG,'SUBDEBUG')\n logging.addLevelName(SUBWARNING,'SUBWARNING')\n \n \n if hasattr(atexit,'unregister'):\n atexit.unregister(_exit_function)\n atexit.register(_exit_function)\n else:\n atexit._exithandlers.remove((_exit_function,(),{}))\n atexit._exithandlers.append((_exit_function,(),{}))\n \n finally:\n logging._releaseLock()\n \n return _logger\n \ndef log_to_stderr(level=None):\n ''\n\n \n global _log_to_stderr\n import logging\n \n logger=get_logger()\n formatter=logging.Formatter(DEFAULT_LOGGING_FORMAT)\n handler=logging.StreamHandler()\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n \n if level:\n logger.setLevel(level)\n _log_to_stderr=True\n return _logger\n \n \n \n \n \ndef get_temp_dir():\n\n if current_process()._tempdir is None:\n import shutil,tempfile\n tempdir=tempfile.mkdtemp(prefix='pymp-')\n info('created temp directory %s',tempdir)\n Finalize(None,shutil.rmtree,args=[tempdir],exitpriority=-100)\n current_process()._tempdir=tempdir\n return current_process()._tempdir\n \n \n \n \n \n_afterfork_registry=weakref.WeakValueDictionary()\n_afterfork_counter=itertools.count()\n\ndef _run_after_forkers():\n items=list(_afterfork_registry.items())\n items.sort()\n for(index,ident,func),obj in items:\n try:\n func(obj)\n except Exception as e:\n info('after forker raised exception %s',e)\n \ndef register_after_fork(obj,func):\n _afterfork_registry[(next(_afterfork_counter),id(obj),func)]=obj\n \n \n \n \n \n_finalizer_registry={}\n_finalizer_counter=itertools.count()\n\n\nclass Finalize(object):\n ''\n\n \n def __init__(self,obj,callback,args=(),kwargs=None,exitpriority=None):\n assert exitpriority is None or type(exitpriority)is int\n \n if obj is not None:\n self._weakref=weakref.ref(obj,self)\n else:\n assert exitpriority is not None\n \n self._callback=callback\n self._args=args\n self._kwargs=kwargs or{}\n self._key=(exitpriority,next(_finalizer_counter))\n self._pid=os.getpid()\n \n _finalizer_registry[self._key]=self\n \n def __call__(self,wr=None,\n \n \n _finalizer_registry=_finalizer_registry,\n sub_debug=sub_debug,getpid=os.getpid):\n ''\n\n \n try:\n del _finalizer_registry[self._key]\n except KeyError:\n sub_debug('finalizer no longer registered')\n else:\n if self._pid !=getpid():\n sub_debug('finalizer ignored because different process')\n res=None\n else:\n sub_debug('finalizer calling %s with args %s and kwargs %s',\n self._callback,self._args,self._kwargs)\n res=self._callback(*self._args,**self._kwargs)\n self._weakref=self._callback=self._args=\\\n self._kwargs=self._key=None\n return res\n \n def cancel(self):\n ''\n\n \n try:\n del _finalizer_registry[self._key]\n except KeyError:\n pass\n else:\n self._weakref=self._callback=self._args=\\\n self._kwargs=self._key=None\n \n def still_active(self):\n ''\n\n \n return self._key in _finalizer_registry\n \n def __repr__(self):\n try:\n obj=self._weakref()\n except(AttributeError,TypeError):\n obj=None\n \n if obj is None:\n return ''\n \n x=''\n \n \ndef _run_finalizers(minpriority=None):\n ''\n\n\n\n\n \n if _finalizer_registry is None:\n \n \n \n return\n \n if minpriority is None:\n f=lambda p:p[0][0]is not None\n else:\n f=lambda p:p[0][0]is not None and p[0][0]>=minpriority\n \n items=[x for x in list(_finalizer_registry.items())if f(x)]\n items.sort(reverse=True)\n \n for key,finalizer in items:\n sub_debug('calling %s',finalizer)\n try:\n finalizer()\n except Exception:\n import traceback\n traceback.print_exc()\n \n if minpriority is None:\n _finalizer_registry.clear()\n \n \n \n \n \ndef is_exiting():\n ''\n\n \n return _exiting or _exiting is None\n \n_exiting=False\n\ndef _exit_function(info=info,debug=debug,_run_finalizers=_run_finalizers,\nactive_children=active_children,\ncurrent_process=current_process):\n\n\n\n\n global _exiting\n \n if not _exiting:\n _exiting=True\n \n info('process shutting down')\n debug('running all \"atexit\" finalizers with priority >= 0')\n _run_finalizers(0)\n \n if current_process()is not None:\n \n \n \n \n \n \n \n \n \n \n \n \n \n for p in active_children():\n if p._daemonic:\n info('calling terminate() for daemon %s',p.name)\n p._popen.terminate()\n \n for p in active_children():\n info('calling join() for process %s',p.name)\n p.join()\n \n debug('running the remaining \"atexit\" finalizers')\n _run_finalizers()\n \natexit.register(_exit_function)\n\n\n\n\n\nclass ForkAwareThreadLock(object):\n def __init__(self):\n self._reset()\n register_after_fork(self,ForkAwareThreadLock._reset)\n \n def _reset(self):\n self._lock=threading.Lock()\n self.acquire=self._lock.acquire\n self.release=self._lock.release\n \nclass ForkAwareLocal(threading.local):\n def __init__(self):\n register_after_fork(self,lambda obj:obj.__dict__.clear())\n def __reduce__(self):\n return type(self),()\n", ["atexit", "functools", "itertools", "logging", "multiprocessing.process", "os", "shutil", "subprocess", "sys", "tempfile", "threading", "traceback", "weakref"]], "multiprocessing": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__='0.70a1'\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Manager','Pipe','cpu_count','log_to_stderr','get_logger',\n'allow_connection_pickling','BufferTooShort','TimeoutError',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','SimpleQueue','JoinableQueue','Pool',\n'Value','Array','RawValue','RawArray','SUBDEBUG','SUBWARNING',\n]\n\n__author__='R. Oudkerk (r.m.oudkerk@gmail.com)'\n\n\n\n\n\nimport os\nimport sys\n\nfrom multiprocessing.process import Process,current_process,active_children\nfrom multiprocessing.util import SUBDEBUG,SUBWARNING\n\n\n\n\n\nclass ProcessError(Exception):\n pass\n \nclass BufferTooShort(ProcessError):\n pass\n \nclass TimeoutError(ProcessError):\n pass\n \nclass AuthenticationError(ProcessError):\n pass\n \nimport _multiprocessing\n\n\n\n\n\ndef Manager():\n ''\n\n\n\n\n \n from multiprocessing.managers import SyncManager\n m=SyncManager()\n m.start()\n return m\n \n \n \n \n \n \n \n \n \ndef cpu_count():\n ''\n\n \n if sys.platform =='win32':\n try:\n num=int(os.environ['NUMBER_OF_PROCESSORS'])\n except(ValueError,KeyError):\n num=0\n elif 'bsd'in sys.platform or sys.platform =='darwin':\n comm='/sbin/sysctl -n hw.ncpu'\n if sys.platform =='darwin':\n comm='/usr'+comm\n try:\n with os.popen(comm)as p:\n num=int(p.read())\n except ValueError:\n num=0\n else:\n try:\n num=os.sysconf('SC_NPROCESSORS_ONLN')\n except(ValueError,OSError,AttributeError):\n num=0\n \n if num >=1:\n return num\n else:\n raise NotImplementedError('cannot determine number of cpus')\n \ndef freeze_support():\n ''\n\n\n \n if sys.platform =='win32'and getattr(sys,'frozen',False):\n from multiprocessing.forking import freeze_support\n freeze_support()\n \ndef get_logger():\n ''\n\n \n from multiprocessing.util import get_logger\n return get_logger()\n \ndef log_to_stderr(level=None):\n ''\n\n \n from multiprocessing.util import log_to_stderr\n return log_to_stderr(level)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef Lock():\n ''\n\n \n from multiprocessing.synchronize import Lock\n return Lock()\n \ndef RLock():\n ''\n\n \n from multiprocessing.synchronize import RLock\n return RLock()\n \ndef Condition(lock=None):\n ''\n\n \n from multiprocessing.synchronize import Condition\n return Condition(lock)\n \ndef Semaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import Semaphore\n return Semaphore(value)\n \ndef BoundedSemaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import BoundedSemaphore\n return BoundedSemaphore(value)\n \ndef Event():\n ''\n\n \n from multiprocessing.synchronize import Event\n return Event()\n \ndef Barrier(parties,action=None,timeout=None):\n ''\n\n \n from multiprocessing.synchronize import Barrier\n return Barrier(parties,action,timeout)\n \ndef Queue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import Queue\n return Queue(maxsize)\n \ndef JoinableQueue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import JoinableQueue\n return JoinableQueue(maxsize)\n \ndef SimpleQueue():\n ''\n\n \n from multiprocessing.queues import SimpleQueue\n return SimpleQueue()\n \ndef Pool(processes=None,initializer=None,initargs=(),maxtasksperchild=None):\n ''\n\n \n from multiprocessing.pool import Pool\n return Pool(processes,initializer,initargs,maxtasksperchild)\n \ndef RawValue(typecode_or_type,*args):\n ''\n\n \n from multiprocessing.sharedctypes import RawValue\n return RawValue(typecode_or_type,*args)\n \ndef RawArray(typecode_or_type,size_or_initializer):\n ''\n\n \n from multiprocessing.sharedctypes import RawArray\n return RawArray(typecode_or_type,size_or_initializer)\n \ndef Value(typecode_or_type,*args,lock=True):\n ''\n\n \n from multiprocessing.sharedctypes import Value\n return Value(typecode_or_type,*args,lock=lock)\n \ndef Array(typecode_or_type,size_or_initializer,*,lock=True):\n ''\n\n \n from multiprocessing.sharedctypes import Array\n return Array(typecode_or_type,size_or_initializer,lock=lock)\n \n \n \n \n \nif sys.platform =='win32':\n\n def set_executable(executable):\n ''\n\n\n\n \n from multiprocessing.forking import set_executable\n set_executable(executable)\n \n __all__ +=['set_executable']\n", ["_multiprocessing", "multiprocessing.forking", "multiprocessing.managers", "multiprocessing.pool", "multiprocessing.process", "multiprocessing.queues", "multiprocessing.sharedctypes", "multiprocessing.synchronize", "multiprocessing.util", "os", "sys"], 1], "multiprocessing.connection": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None]\n\n\nclass Listener(object):\n\n def __init__(self,address=None,family=None,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n address=property(lambda self:self._backlog_queue)\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n self._in.not_empty.acquire()\n self._in.not_empty.wait(timeout)\n self._in.not_empty.release()\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n", ["queue"]], "multiprocessing.process": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Process','current_process','active_children']\n\n\n\n\n\nimport os\nimport sys\nimport signal\nimport itertools\nfrom _weakrefset import WeakSet\n\n\nfrom _multiprocessing import Process\n\n\n\n\ntry:\n ORIGINAL_DIR=os.path.abspath(os.getcwd())\nexcept OSError:\n ORIGINAL_DIR=None\n \n \n \n \n \ndef current_process():\n ''\n\n \n return _current_process\n \ndef active_children():\n ''\n\n \n _cleanup()\n return list(_current_process._children)\n \n \n \n \n \ndef _cleanup():\n\n for p in list(_current_process._children):\n if p._popen.poll()is not None:\n _current_process._children.discard(p)\n \n \n \n \n \n \n \n \n \n \n \n \nclass AuthenticationString(bytes):\n def __reduce__(self):\n from.forking import Popen\n if not Popen.thread_is_spawning():\n raise TypeError(\n 'Pickling an AuthenticationString object is '\n 'disallowed for security reasons'\n )\n return AuthenticationString,(bytes(self),)\n \n \n \n \n \nclass _MainProcess(Process):\n\n def __init__(self):\n self._identity=()\n self._daemonic=False\n self._name='MainProcess'\n self._parent_pid=None\n self._popen=None\n self._counter=itertools.count(1)\n self._children=set()\n self._authkey=AuthenticationString(os.urandom(32))\n self._tempdir=None\n \n_current_process=_MainProcess()\ndel _MainProcess\n\n\n\n\n\n_exitcode_to_name={}\n\nfor name,signum in list(signal.__dict__.items()):\n if name[:3]=='SIG'and '_'not in name:\n _exitcode_to_name[-signum]=name\n \n \n_dangling=WeakSet()\n", ["_multiprocessing", "_weakrefset", "itertools", "multiprocessing.forking", "os", "signal", "sys"]], "multiprocessing.pool": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Pool']\n\n\n\n\n\nimport threading\nimport queue\nimport itertools\nimport collections\nimport time\n\nfrom multiprocessing import Process,cpu_count,TimeoutError\nfrom multiprocessing.util import Finalize,debug\n\n\n\n\n\nRUN=0\nCLOSE=1\nTERMINATE=2\n\n\n\n\n\njob_counter=itertools.count()\n\ndef mapstar(args):\n return list(map(*args))\n \ndef starmapstar(args):\n return list(itertools.starmap(args[0],args[1]))\n \n \n \n \n \nclass MaybeEncodingError(Exception):\n ''\n \n \n def __init__(self,exc,value):\n self.exc=repr(exc)\n self.value=repr(value)\n super(MaybeEncodingError,self).__init__(self.exc,self.value)\n \n def __str__(self):\n return \"Error sending result: '%s'. Reason: '%s'\"%(self.value,\n self.exc)\n \n def __repr__(self):\n return \"\"%str(self)\n \n \ndef worker(inqueue,outqueue,initializer=None,initargs=(),maxtasks=None):\n assert maxtasks is None or(type(maxtasks)==int and maxtasks >0)\n put=outqueue.put\n get=inqueue.get\n if hasattr(inqueue,'_writer'):\n inqueue._writer.close()\n outqueue._reader.close()\n \n if initializer is not None:\n initializer(*initargs)\n \n completed=0\n while maxtasks is None or(maxtasks and completed 1\n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=IMapIterator(self._cache)\n self._taskqueue.put((((result._job,i,mapstar,(x,),{})\n for i,x in enumerate(task_batches)),result._set_length))\n return(item for chunk in result for item in chunk)\n \n def imap_unordered(self,func,iterable,chunksize=1):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n if chunksize ==1:\n result=IMapUnorderedIterator(self._cache)\n self._taskqueue.put((((result._job,i,func,(x,),{})\n for i,x in enumerate(iterable)),result._set_length))\n return result\n else:\n assert chunksize >1\n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=IMapUnorderedIterator(self._cache)\n self._taskqueue.put((((result._job,i,mapstar,(x,),{})\n for i,x in enumerate(task_batches)),result._set_length))\n return(item for chunk in result for item in chunk)\n \n def apply_async(self,func,args=(),kwds={},callback=None,\n error_callback=None):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n result=ApplyResult(self._cache,callback,error_callback)\n self._taskqueue.put(([(result._job,None,func,args,kwds)],None))\n return result\n \n def map_async(self,func,iterable,chunksize=None,callback=None,\n error_callback=None):\n ''\n\n \n return self._map_async(func,iterable,mapstar,chunksize,callback,\n error_callback)\n \n def _map_async(self,func,iterable,mapper,chunksize=None,callback=None,\n error_callback=None):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n if not hasattr(iterable,'__len__'):\n iterable=list(iterable)\n \n if chunksize is None:\n chunksize,extra=divmod(len(iterable),len(self._pool)*4)\n if extra:\n chunksize +=1\n if len(iterable)==0:\n chunksize=0\n \n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=MapResult(self._cache,chunksize,len(iterable),callback,\n error_callback=error_callback)\n self._taskqueue.put((((result._job,i,mapper,(x,),{})\n for i,x in enumerate(task_batches)),None))\n return result\n \n @staticmethod\n def _handle_workers(pool):\n thread=threading.current_thread()\n \n \n \n while thread._state ==RUN or(pool._cache and thread._state !=TERMINATE):\n pool._maintain_pool()\n time.sleep(0.1)\n \n pool._taskqueue.put(None)\n debug('worker handler exiting')\n \n @staticmethod\n def _handle_tasks(taskqueue,put,outqueue,pool):\n thread=threading.current_thread()\n \n for taskseq,set_length in iter(taskqueue.get,None):\n i=-1\n for i,task in enumerate(taskseq):\n if thread._state:\n debug('task handler found thread._state != RUN')\n break\n try:\n put(task)\n except IOError:\n debug('could not put task on queue')\n break\n else:\n if set_length:\n debug('doing set_length()')\n set_length(i+1)\n continue\n break\n else:\n debug('task handler got sentinel')\n \n \n try:\n \n debug('task handler sending sentinel to result handler')\n outqueue.put(None)\n \n \n debug('task handler sending sentinel to workers')\n for p in pool:\n put(None)\n except IOError:\n debug('task handler got IOError when sending sentinels')\n \n debug('task handler exiting')\n \n @staticmethod\n def _handle_results(outqueue,get,cache):\n thread=threading.current_thread()\n \n while 1:\n try:\n task=get()\n except(IOError,EOFError):\n debug('result handler got EOFError/IOError -- exiting')\n return\n \n if thread._state:\n assert thread._state ==TERMINATE\n debug('result handler found thread._state=TERMINATE')\n break\n \n if task is None:\n debug('result handler got sentinel')\n break\n \n job,i,obj=task\n try:\n cache[job]._set(i,obj)\n except KeyError:\n pass\n \n while cache and thread._state !=TERMINATE:\n try:\n task=get()\n except(IOError,EOFError):\n debug('result handler got EOFError/IOError -- exiting')\n return\n \n if task is None:\n debug('result handler ignoring extra sentinel')\n continue\n job,i,obj=task\n try:\n cache[job]._set(i,obj)\n except KeyError:\n pass\n \n if hasattr(outqueue,'_reader'):\n debug('ensuring that outqueue is not full')\n \n \n \n try:\n for i in range(10):\n if not outqueue._reader.poll():\n break\n get()\n except(IOError,EOFError):\n pass\n \n debug('result handler exiting: len(cache)=%s, thread._state=%s',\n len(cache),thread._state)\n \n @staticmethod\n def _get_tasks(func,it,size):\n it=iter(it)\n while 1:\n x=tuple(itertools.islice(it,size))\n if not x:\n return\n yield(func,x)\n \n def __reduce__(self):\n raise NotImplementedError(\n 'pool objects cannot be passed between processes or pickled'\n )\n \n def close(self):\n debug('closing pool')\n if self._state ==RUN:\n self._state=CLOSE\n self._worker_handler._state=CLOSE\n \n def terminate(self):\n debug('terminating pool')\n self._state=TERMINATE\n self._worker_handler._state=TERMINATE\n self._terminate()\n \n def join(self):\n debug('joining pool')\n assert self._state in(CLOSE,TERMINATE)\n self._worker_handler.join()\n self._task_handler.join()\n self._result_handler.join()\n for p in self._pool:\n p.join()\n \n @staticmethod\n def _help_stuff_finish(inqueue,task_handler,size):\n \n debug('removing tasks from inqueue until task handler finished')\n inqueue._rlock.acquire()\n while task_handler.is_alive()and inqueue._reader.poll():\n inqueue._reader.recv()\n time.sleep(0)\n \n @classmethod\n def _terminate_pool(cls,taskqueue,inqueue,outqueue,pool,\n worker_handler,task_handler,result_handler,cache):\n \n debug('finalizing pool')\n \n worker_handler._state=TERMINATE\n task_handler._state=TERMINATE\n \n debug('helping task handler/workers to finish')\n cls._help_stuff_finish(inqueue,task_handler,len(pool))\n \n assert result_handler.is_alive()or len(cache)==0\n \n result_handler._state=TERMINATE\n outqueue.put(None)\n \n \n \n debug('joining worker handler')\n if threading.current_thread()is not worker_handler:\n worker_handler.join()\n \n \n if pool and hasattr(pool[0],'terminate'):\n debug('terminating workers')\n for p in pool:\n if p.exitcode is None:\n p.terminate()\n \n debug('joining task handler')\n if threading.current_thread()is not task_handler:\n task_handler.join()\n \n debug('joining result handler')\n if threading.current_thread()is not result_handler:\n result_handler.join()\n \n if pool and hasattr(pool[0],'terminate'):\n debug('joining pool workers')\n for p in pool:\n if p.is_alive():\n \n debug('cleaning up worker %d'%p.pid)\n p.join()\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_val,exc_tb):\n self.terminate()\n \n \n \n \n \nclass ApplyResult(object):\n\n def __init__(self,cache,callback,error_callback):\n self._event=threading.Event()\n self._job=next(job_counter)\n self._cache=cache\n self._callback=callback\n self._error_callback=error_callback\n cache[self._job]=self\n \n def ready(self):\n return self._event.is_set()\n \n def successful(self):\n assert self.ready()\n return self._success\n \n def wait(self,timeout=None):\n self._event.wait(timeout)\n \n def get(self,timeout=None):\n self.wait(timeout)\n if not self.ready():\n raise TimeoutError\n if self._success:\n return self._value\n else:\n raise self._value\n \n def _set(self,i,obj):\n self._success,self._value=obj\n if self._callback and self._success:\n self._callback(self._value)\n if self._error_callback and not self._success:\n self._error_callback(self._value)\n self._event.set()\n del self._cache[self._job]\n \nAsyncResult=ApplyResult\n\n\n\n\n\nclass MapResult(ApplyResult):\n\n def __init__(self,cache,chunksize,length,callback,error_callback):\n ApplyResult.__init__(self,cache,callback,\n error_callback=error_callback)\n self._success=True\n self._value=[None]*length\n self._chunksize=chunksize\n if chunksize <=0:\n self._number_left=0\n self._event.set()\n del cache[self._job]\n else:\n self._number_left=length //chunksize+bool(length %chunksize)\n \n def _set(self,i,success_result):\n success,result=success_result\n if success:\n self._value[i *self._chunksize:(i+1)*self._chunksize]=result\n self._number_left -=1\n if self._number_left ==0:\n if self._callback:\n self._callback(self._value)\n del self._cache[self._job]\n self._event.set()\n else:\n self._success=False\n self._value=result\n if self._error_callback:\n self._error_callback(self._value)\n del self._cache[self._job]\n self._event.set()\n \n \n \n \n \nclass IMapIterator(object):\n\n def __init__(self,cache):\n self._cond=threading.Condition(threading.Lock())\n self._job=next(job_counter)\n self._cache=cache\n self._items=collections.deque()\n self._index=0\n self._length=None\n self._unsorted={}\n cache[self._job]=self\n \n def __iter__(self):\n return self\n \n def next(self,timeout=None):\n self._cond.acquire()\n try:\n try:\n item=self._items.popleft()\n except IndexError:\n if self._index ==self._length:\n raise StopIteration\n self._cond.wait(timeout)\n try:\n item=self._items.popleft()\n except IndexError:\n if self._index ==self._length:\n raise StopIteration\n raise TimeoutError\n finally:\n self._cond.release()\n \n success,value=item\n if success:\n return value\n raise value\n \n __next__=next\n \n def _set(self,i,obj):\n self._cond.acquire()\n try:\n if self._index ==i:\n self._items.append(obj)\n self._index +=1\n while self._index in self._unsorted:\n obj=self._unsorted.pop(self._index)\n self._items.append(obj)\n self._index +=1\n self._cond.notify()\n else:\n self._unsorted[i]=obj\n \n if self._index ==self._length:\n del self._cache[self._job]\n finally:\n self._cond.release()\n \n def _set_length(self,length):\n self._cond.acquire()\n try:\n self._length=length\n if self._index ==self._length:\n self._cond.notify()\n del self._cache[self._job]\n finally:\n self._cond.release()\n \n \n \n \n \nclass IMapUnorderedIterator(IMapIterator):\n\n def _set(self,i,obj):\n self._cond.acquire()\n try:\n self._items.append(obj)\n self._index +=1\n self._cond.notify()\n if self._index ==self._length:\n del self._cache[self._job]\n finally:\n self._cond.release()\n \n \n \n \n \nclass ThreadPool(Pool):\n\n from.dummy import Process\n \n def __init__(self,processes=None,initializer=None,initargs=()):\n Pool.__init__(self,processes,initializer,initargs)\n \n def _setup_queues(self):\n self._inqueue=queue.Queue()\n self._outqueue=queue.Queue()\n self._quick_put=self._inqueue.put\n self._quick_get=self._outqueue.get\n \n @staticmethod\n def _help_stuff_finish(inqueue,task_handler,size):\n \n inqueue.not_empty.acquire()\n try:\n inqueue.queue.clear()\n inqueue.queue.extend([None]*size)\n inqueue.not_empty.notify_all()\n finally:\n inqueue.not_empty.release()\n", ["collections", "itertools", "multiprocessing", "multiprocessing.Process", "multiprocessing.dummy", "multiprocessing.queues", "multiprocessing.util", "queue", "threading", "time"]], "multiprocessing.dummy": [".py", "\n\n\n\n\n\n\n\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','Manager','Pipe','Pool','JoinableQueue'\n]\n\n\n\n\n\nimport threading\nimport sys\nimport weakref\nimport array\n\nfrom.connection import Pipe\nfrom threading import Lock,RLock,Semaphore,BoundedSemaphore\nfrom threading import Event,Condition,Barrier\nfrom queue import Queue\n\n\n\n\n\nclass DummyProcess(threading.Thread):\n\n def __init__(self,group=None,target=None,name=None,args=(),kwargs={}):\n threading.Thread.__init__(self,group,target,name,args,kwargs)\n self._pid=None\n self._children=weakref.WeakKeyDictionary()\n self._start_called=False\n self._parent=current_process()\n \n def start(self):\n if self._parent is not current_process():\n raise RuntimeError(\n \"Parent is {0!r} but current_process is {1!r}\".format(\n self._parent,current_process()))\n self._start_called=True\n if hasattr(self._parent,'_children'):\n self._parent._children[self]=None\n threading.Thread.start(self)\n \n @property\n def exitcode(self):\n if self._start_called and not self.is_alive():\n return 0\n else:\n return None\n \n \n \n \n \nProcess=DummyProcess\ncurrent_process=threading.current_thread\ncurrent_process()._children=weakref.WeakKeyDictionary()\n\ndef active_children():\n children=current_process()._children\n for p in list(children):\n if not p.is_alive():\n children.pop(p,None)\n return list(children)\n \ndef freeze_support():\n pass\n \n \n \n \n \nclass Namespace(object):\n def __init__(self,/,**kwds):\n self.__dict__.update(kwds)\n def __repr__(self):\n items=list(self.__dict__.items())\n temp=[]\n for name,value in items:\n if not name.startswith('_'):\n temp.append('%s=%r'%(name,value))\n temp.sort()\n return '%s(%s)'%(self.__class__.__name__,', '.join(temp))\n \ndict=dict\nlist=list\n\ndef Array(typecode,sequence,lock=True):\n return array.array(typecode,sequence)\n \nclass Value(object):\n def __init__(self,typecode,value,lock=True):\n self._typecode=typecode\n self._value=value\n \n @property\n def value(self):\n return self._value\n \n @value.setter\n def value(self,value):\n self._value=value\n \n def __repr__(self):\n return '<%s(%r, %r)>'%(type(self).__name__,self._typecode,self._value)\n \ndef Manager():\n return sys.modules[__name__]\n \ndef shutdown():\n pass\n \ndef Pool(processes=None,initializer=None,initargs=()):\n from..pool import ThreadPool\n return ThreadPool(processes,initializer,initargs)\n \nJoinableQueue=Queue\n", ["array", "multiprocessing.dummy.connection", "multiprocessing.pool", "queue", "sys", "threading", "weakref"], 1], "multiprocessing.dummy.connection": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None]\n\n\nclass Listener(object):\n\n def __init__(self,address=None,family=None,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n @property\n def address(self):\n return self._backlog_queue\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n with self._in.not_empty:\n self._in.not_empty.wait(timeout)\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n", ["queue"]], "urllib.error": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\nimport io\nimport urllib.response\n\n__all__=['URLError','HTTPError','ContentTooShortError']\n\n\nclass URLError(OSError):\n\n\n\n\n\n def __init__(self,reason,filename=None):\n self.args=reason,\n self.reason=reason\n if filename is not None:\n self.filename=filename\n \n def __str__(self):\n return ''%self.reason\n \n \nclass HTTPError(URLError,urllib.response.addinfourl):\n ''\n __super_init=urllib.response.addinfourl.__init__\n \n def __init__(self,url,code,msg,hdrs,fp):\n self.code=code\n self.msg=msg\n self.hdrs=hdrs\n self.fp=fp\n self.filename=url\n if fp is None:\n fp=io.BytesIO()\n self.__super_init(fp,hdrs,url,code)\n \n def __str__(self):\n return 'HTTP Error %s: %s'%(self.code,self.msg)\n \n def __repr__(self):\n return ''%(self.code,self.msg)\n \n \n \n @property\n def reason(self):\n return self.msg\n \n @property\n def headers(self):\n return self.hdrs\n \n @headers.setter\n def headers(self,headers):\n self.hdrs=headers\n \n \nclass ContentTooShortError(URLError):\n ''\n def __init__(self,message,content):\n URLError.__init__(self,message)\n self.content=content\n", ["io", "urllib.response"]], "urllib.request": [".py", "from browser import ajax\nfrom. import error\n\nclass FileIO:\n\n def __init__(self,data):\n self._data=data\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n pass\n \n def read(self):\n return self._data\n \ndef urlopen(url,data=None,timeout=None):\n global result\n result=None\n \n def on_complete(req):\n global result\n if req.status ==200:\n result=req\n \n _ajax=ajax.ajax()\n _ajax.bind('complete',on_complete)\n if timeout is not None:\n _ajax.set_timeout(timeout)\n \n if data is None:\n _ajax.open('GET',url,False)\n _ajax.send()\n else:\n _ajax.open('POST',url,False)\n _ajax.send(data)\n \n if result is not None:\n if isinstance(result.text,str):\n return FileIO(result.text)\n \n return FileIO(result.text())\n raise error.HTTPError('file not found')\n", ["browser", "browser.ajax", "urllib", "urllib.error"]], "urllib": [".py", "", [], 1], "urllib.response": [".py", "''\n\n\n\n\n\n\n\nimport tempfile\n\n__all__=['addbase','addclosehook','addinfo','addinfourl']\n\n\nclass addbase(tempfile._TemporaryFileWrapper):\n ''\n \n \n \n def __init__(self,fp):\n super(addbase,self).__init__(fp,'',delete=False)\n \n self.fp=fp\n \n def __repr__(self):\n return '<%s at %r whose fp = %r>'%(self.__class__.__name__,\n id(self),self.file)\n \n def __enter__(self):\n if self.fp.closed:\n raise ValueError(\"I/O operation on closed file\")\n return self\n \n def __exit__(self,type,value,traceback):\n self.close()\n \n \nclass addclosehook(addbase):\n ''\n \n def __init__(self,fp,closehook,*hookargs):\n super(addclosehook,self).__init__(fp)\n self.closehook=closehook\n self.hookargs=hookargs\n \n def close(self):\n try:\n closehook=self.closehook\n hookargs=self.hookargs\n if closehook:\n self.closehook=None\n self.hookargs=None\n closehook(*hookargs)\n finally:\n super(addclosehook,self).close()\n \n \nclass addinfo(addbase):\n ''\n \n def __init__(self,fp,headers):\n super(addinfo,self).__init__(fp)\n self.headers=headers\n \n def info(self):\n return self.headers\n \n \nclass addinfourl(addinfo):\n ''\n \n def __init__(self,fp,headers,url,code=None):\n super(addinfourl,self).__init__(fp,headers)\n self.url=url\n self.code=code\n \n @property\n def status(self):\n return self.code\n \n def getcode(self):\n return self.code\n \n def geturl(self):\n return self.url\n", ["tempfile"]], "urllib.parse": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfrom collections import namedtuple\nimport functools\nimport math\nimport re\nimport types\nimport warnings\nimport ipaddress\n\n__all__=[\"urlparse\",\"urlunparse\",\"urljoin\",\"urldefrag\",\n\"urlsplit\",\"urlunsplit\",\"urlencode\",\"parse_qs\",\n\"parse_qsl\",\"quote\",\"quote_plus\",\"quote_from_bytes\",\n\"unquote\",\"unquote_plus\",\"unquote_to_bytes\",\n\"DefragResult\",\"ParseResult\",\"SplitResult\",\n\"DefragResultBytes\",\"ParseResultBytes\",\"SplitResultBytes\"]\n\n\n\n\n\nuses_relative=['','ftp','http','gopher','nntp','imap',\n'wais','file','https','shttp','mms',\n'prospero','rtsp','rtsps','rtspu','sftp',\n'svn','svn+ssh','ws','wss']\n\nuses_netloc=['','ftp','http','gopher','nntp','telnet',\n'imap','wais','file','mms','https','shttp',\n'snews','prospero','rtsp','rtsps','rtspu','rsync',\n'svn','svn+ssh','sftp','nfs','git','git+ssh',\n'ws','wss','itms-services']\n\nuses_params=['','ftp','hdl','prospero','http','imap',\n'https','shttp','rtsp','rtsps','rtspu','sip',\n'sips','mms','sftp','tel']\n\n\n\n\nnon_hierarchical=['gopher','hdl','mailto','news',\n'telnet','wais','imap','snews','sip','sips']\n\nuses_query=['','http','wais','imap','https','shttp','mms',\n'gopher','rtsp','rtsps','rtspu','sip','sips']\n\nuses_fragment=['','ftp','hdl','http','gopher','news',\n'nntp','wais','https','shttp','snews',\n'file','prospero']\n\n\nscheme_chars=('abcdefghijklmnopqrstuvwxyz'\n'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\n'0123456789'\n'+-.')\n\n\n\n_WHATWG_C0_CONTROL_OR_SPACE='\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f '\n\n\n_UNSAFE_URL_BYTES_TO_REMOVE=['\\t','\\r','\\n']\n\ndef clear_cache():\n ''\n urlsplit.cache_clear()\n _byte_quoter_factory.cache_clear()\n \n \n \n \n \n \n \n_implicit_encoding='ascii'\n_implicit_errors='strict'\n\ndef _noop(obj):\n return obj\n \ndef _encode_result(obj,encoding=_implicit_encoding,\nerrors=_implicit_errors):\n return obj.encode(encoding,errors)\n \ndef _decode_args(args,encoding=_implicit_encoding,\nerrors=_implicit_errors):\n return tuple(x.decode(encoding,errors)if x else ''for x in args)\n \ndef _coerce_args(*args):\n\n\n\n\n\n str_input=isinstance(args[0],str)\n for arg in args[1:]:\n \n \n if arg and isinstance(arg,str)!=str_input:\n raise TypeError(\"Cannot mix str and non-str arguments\")\n if str_input:\n return args+(_noop,)\n return _decode_args(args)+(_encode_result,)\n \n \nclass _ResultMixinStr(object):\n ''\n __slots__=()\n \n def encode(self,encoding='ascii',errors='strict'):\n return self._encoded_counterpart(*(x.encode(encoding,errors)for x in self))\n \n \nclass _ResultMixinBytes(object):\n ''\n __slots__=()\n \n def decode(self,encoding='ascii',errors='strict'):\n return self._decoded_counterpart(*(x.decode(encoding,errors)for x in self))\n \n \nclass _NetlocResultMixinBase(object):\n ''\n __slots__=()\n \n @property\n def username(self):\n return self._userinfo[0]\n \n @property\n def password(self):\n return self._userinfo[1]\n \n @property\n def hostname(self):\n hostname=self._hostinfo[0]\n if not hostname:\n return None\n \n \n separator='%'if isinstance(hostname,str)else b'%'\n hostname,percent,zone=hostname.partition(separator)\n return hostname.lower()+percent+zone\n \n @property\n def port(self):\n port=self._hostinfo[1]\n if port is not None:\n if port.isdigit()and port.isascii():\n port=int(port)\n else:\n raise ValueError(f\"Port could not be cast to integer value as {port !r}\")\n if not(0 <=port <=65535):\n raise ValueError(\"Port out of range 0-65535\")\n return port\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nclass _NetlocResultMixinStr(_NetlocResultMixinBase,_ResultMixinStr):\n __slots__=()\n \n @property\n def _userinfo(self):\n netloc=self.netloc\n userinfo,have_info,hostinfo=netloc.rpartition('@')\n if have_info:\n username,have_password,password=userinfo.partition(':')\n if not have_password:\n password=None\n else:\n username=password=None\n return username,password\n \n @property\n def _hostinfo(self):\n netloc=self.netloc\n _,_,hostinfo=netloc.rpartition('@')\n _,have_open_br,bracketed=hostinfo.partition('[')\n if have_open_br:\n hostname,_,port=bracketed.partition(']')\n _,_,port=port.partition(':')\n else:\n hostname,_,port=hostinfo.partition(':')\n if not port:\n port=None\n return hostname,port\n \n \nclass _NetlocResultMixinBytes(_NetlocResultMixinBase,_ResultMixinBytes):\n __slots__=()\n \n @property\n def _userinfo(self):\n netloc=self.netloc\n userinfo,have_info,hostinfo=netloc.rpartition(b'@')\n if have_info:\n username,have_password,password=userinfo.partition(b':')\n if not have_password:\n password=None\n else:\n username=password=None\n return username,password\n \n @property\n def _hostinfo(self):\n netloc=self.netloc\n _,_,hostinfo=netloc.rpartition(b'@')\n _,have_open_br,bracketed=hostinfo.partition(b'[')\n if have_open_br:\n hostname,_,port=bracketed.partition(b']')\n _,_,port=port.partition(b':')\n else:\n hostname,_,port=hostinfo.partition(b':')\n if not port:\n port=None\n return hostname,port\n \n \n_DefragResultBase=namedtuple('DefragResult','url fragment')\n_SplitResultBase=namedtuple(\n'SplitResult','scheme netloc path query fragment')\n_ParseResultBase=namedtuple(\n'ParseResult','scheme netloc path params query fragment')\n\n_DefragResultBase.__doc__=\"\"\"\nDefragResult(url, fragment)\n\nA 2-tuple that contains the url without fragment identifier and the fragment\nidentifier as a separate argument.\n\"\"\"\n\n_DefragResultBase.url.__doc__=\"\"\"The URL with no fragment identifier.\"\"\"\n\n_DefragResultBase.fragment.__doc__=\"\"\"\nFragment identifier separated from URL, that allows indirect identification of a\nsecondary resource by reference to a primary resource and additional identifying\ninformation.\n\"\"\"\n\n_SplitResultBase.__doc__=\"\"\"\nSplitResult(scheme, netloc, path, query, fragment)\n\nA 5-tuple that contains the different components of a URL. Similar to\nParseResult, but does not split params.\n\"\"\"\n\n_SplitResultBase.scheme.__doc__=\"\"\"Specifies URL scheme for the request.\"\"\"\n\n_SplitResultBase.netloc.__doc__=\"\"\"\nNetwork location where the request is made to.\n\"\"\"\n\n_SplitResultBase.path.__doc__=\"\"\"\nThe hierarchical path, such as the path to a file to download.\n\"\"\"\n\n_SplitResultBase.query.__doc__=\"\"\"\nThe query component, that contains non-hierarchical data, that along with data\nin path component, identifies a resource in the scope of URI's scheme and\nnetwork location.\n\"\"\"\n\n_SplitResultBase.fragment.__doc__=\"\"\"\nFragment identifier, that allows indirect identification of a secondary resource\nby reference to a primary resource and additional identifying information.\n\"\"\"\n\n_ParseResultBase.__doc__=\"\"\"\nParseResult(scheme, netloc, path, params, query, fragment)\n\nA 6-tuple that contains components of a parsed URL.\n\"\"\"\n\n_ParseResultBase.scheme.__doc__=_SplitResultBase.scheme.__doc__\n_ParseResultBase.netloc.__doc__=_SplitResultBase.netloc.__doc__\n_ParseResultBase.path.__doc__=_SplitResultBase.path.__doc__\n_ParseResultBase.params.__doc__=\"\"\"\nParameters for last path element used to dereference the URI in order to provide\naccess to perform some operation on the resource.\n\"\"\"\n\n_ParseResultBase.query.__doc__=_SplitResultBase.query.__doc__\n_ParseResultBase.fragment.__doc__=_SplitResultBase.fragment.__doc__\n\n\n\n\n\nResultBase=_NetlocResultMixinStr\n\n\nclass DefragResult(_DefragResultBase,_ResultMixinStr):\n __slots__=()\n def geturl(self):\n if self.fragment:\n return self.url+'#'+self.fragment\n else:\n return self.url\n \nclass SplitResult(_SplitResultBase,_NetlocResultMixinStr):\n __slots__=()\n def geturl(self):\n return urlunsplit(self)\n \nclass ParseResult(_ParseResultBase,_NetlocResultMixinStr):\n __slots__=()\n def geturl(self):\n return urlunparse(self)\n \n \nclass DefragResultBytes(_DefragResultBase,_ResultMixinBytes):\n __slots__=()\n def geturl(self):\n if self.fragment:\n return self.url+b'#'+self.fragment\n else:\n return self.url\n \nclass SplitResultBytes(_SplitResultBase,_NetlocResultMixinBytes):\n __slots__=()\n def geturl(self):\n return urlunsplit(self)\n \nclass ParseResultBytes(_ParseResultBase,_NetlocResultMixinBytes):\n __slots__=()\n def geturl(self):\n return urlunparse(self)\n \n \ndef _fix_result_transcoding():\n _result_pairs=(\n (DefragResult,DefragResultBytes),\n (SplitResult,SplitResultBytes),\n (ParseResult,ParseResultBytes),\n )\n for _decoded,_encoded in _result_pairs:\n _decoded._encoded_counterpart=_encoded\n _encoded._decoded_counterpart=_decoded\n \n_fix_result_transcoding()\ndel _fix_result_transcoding\n\ndef urlparse(url,scheme='',allow_fragments=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n url,scheme,_coerce_result=_coerce_args(url,scheme)\n splitresult=urlsplit(url,scheme,allow_fragments)\n scheme,netloc,url,query,fragment=splitresult\n if scheme in uses_params and ';'in url:\n url,params=_splitparams(url)\n else:\n params=''\n result=ParseResult(scheme,netloc,url,params,query,fragment)\n return _coerce_result(result)\n \ndef _splitparams(url):\n if '/'in url:\n i=url.find(';',url.rfind('/'))\n if i <0:\n return url,''\n else:\n i=url.find(';')\n return url[:i],url[i+1:]\n \ndef _splitnetloc(url,start=0):\n delim=len(url)\n for c in '/?#':\n wdelim=url.find(c,start)\n if wdelim >=0:\n delim=min(delim,wdelim)\n return url[start:delim],url[delim:]\n \ndef _checknetloc(netloc):\n if not netloc or netloc.isascii():\n return\n \n \n import unicodedata\n n=netloc.replace('@','')\n n=n.replace(':','')\n n=n.replace('#','')\n n=n.replace('?','')\n netloc2=unicodedata.normalize('NFKC',n)\n if n ==netloc2:\n return\n for c in '/?#@:':\n if c in netloc2:\n raise ValueError(\"netloc '\"+netloc+\"' contains invalid \"+\n \"characters under NFKC normalization\")\n \n \n \ndef _check_bracketed_host(hostname):\n if hostname.startswith('v'):\n if not re.match(r\"\\Av[a-fA-F0-9]+\\..+\\Z\",hostname):\n raise ValueError(f\"IPvFuture address is invalid\")\n else:\n ip=ipaddress.ip_address(hostname)\n if isinstance(ip,ipaddress.IPv4Address):\n raise ValueError(f\"An IPv4 address cannot be in brackets\")\n \n \n \n@functools.lru_cache(typed=True)\ndef urlsplit(url,scheme='',allow_fragments=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n url,scheme,_coerce_result=_coerce_args(url,scheme)\n \n \n url=url.lstrip(_WHATWG_C0_CONTROL_OR_SPACE)\n scheme=scheme.strip(_WHATWG_C0_CONTROL_OR_SPACE)\n \n for b in _UNSAFE_URL_BYTES_TO_REMOVE:\n url=url.replace(b,\"\")\n scheme=scheme.replace(b,\"\")\n \n allow_fragments=bool(allow_fragments)\n netloc=query=fragment=''\n i=url.find(':')\n if i >0 and url[0].isascii()and url[0].isalpha():\n for c in url[:i]:\n if c not in scheme_chars:\n break\n else:\n scheme,url=url[:i].lower(),url[i+1:]\n if url[:2]=='//':\n netloc,url=_splitnetloc(url,2)\n if(('['in netloc and ']'not in netloc)or\n (']'in netloc and '['not in netloc)):\n raise ValueError(\"Invalid IPv6 URL\")\n if '['in netloc and ']'in netloc:\n bracketed_host=netloc.partition('[')[2].partition(']')[0]\n _check_bracketed_host(bracketed_host)\n if allow_fragments and '#'in url:\n url,fragment=url.split('#',1)\n if '?'in url:\n url,query=url.split('?',1)\n _checknetloc(netloc)\n v=SplitResult(scheme,netloc,url,query,fragment)\n return _coerce_result(v)\n \ndef urlunparse(components):\n ''\n\n\n \n scheme,netloc,url,params,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if params:\n url=\"%s;%s\"%(url,params)\n return _coerce_result(urlunsplit((scheme,netloc,url,query,fragment)))\n \ndef urlunsplit(components):\n ''\n\n\n\n \n scheme,netloc,url,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if netloc or(scheme and scheme in uses_netloc and url[:2]!='//'):\n if url and url[:1]!='/':url='/'+url\n url='//'+(netloc or '')+url\n if scheme:\n url=scheme+':'+url\n if query:\n url=url+'?'+query\n if fragment:\n url=url+'#'+fragment\n return _coerce_result(url)\n \ndef urljoin(base,url,allow_fragments=True):\n ''\n \n if not base:\n return url\n if not url:\n return base\n \n base,url,_coerce_result=_coerce_args(base,url)\n bscheme,bnetloc,bpath,bparams,bquery,bfragment=\\\n urlparse(base,'',allow_fragments)\n scheme,netloc,path,params,query,fragment=\\\n urlparse(url,bscheme,allow_fragments)\n \n if scheme !=bscheme or scheme not in uses_relative:\n return _coerce_result(url)\n if scheme in uses_netloc:\n if netloc:\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n netloc=bnetloc\n \n if not path and not params:\n path=bpath\n params=bparams\n if not query:\n query=bquery\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n \n base_parts=bpath.split('/')\n if base_parts[-1]!='':\n \n \n del base_parts[-1]\n \n \n if path[:1]=='/':\n segments=path.split('/')\n else:\n segments=base_parts+path.split('/')\n \n \n segments[1:-1]=filter(None,segments[1:-1])\n \n resolved_path=[]\n \n for seg in segments:\n if seg =='..':\n try:\n resolved_path.pop()\n except IndexError:\n \n \n pass\n elif seg =='.':\n continue\n else:\n resolved_path.append(seg)\n \n if segments[-1]in('.','..'):\n \n \n resolved_path.append('')\n \n return _coerce_result(urlunparse((scheme,netloc,'/'.join(\n resolved_path)or '/',params,query,fragment)))\n \n \ndef urldefrag(url):\n ''\n\n\n\n\n \n url,_coerce_result=_coerce_args(url)\n if '#'in url:\n s,n,p,a,q,frag=urlparse(url)\n defrag=urlunparse((s,n,p,a,q,''))\n else:\n frag=''\n defrag=url\n return _coerce_result(DefragResult(defrag,frag))\n \n_hexdig='0123456789ABCDEFabcdef'\n_hextobyte=None\n\ndef unquote_to_bytes(string):\n ''\n return bytes(_unquote_impl(string))\n \ndef _unquote_impl(string:bytes |bytearray |str)->bytes |bytearray:\n\n\n if not string:\n \n string.split\n return b''\n if isinstance(string,str):\n string=string.encode('utf-8')\n bits=string.split(b'%')\n if len(bits)==1:\n return string\n res=bytearray(bits[0])\n append=res.extend\n \n \n global _hextobyte\n if _hextobyte is None:\n _hextobyte={(a+b).encode():bytes.fromhex(a+b)\n for a in _hexdig for b in _hexdig}\n for item in bits[1:]:\n try:\n append(_hextobyte[item[:2]])\n append(item[2:])\n except KeyError:\n append(b'%')\n append(item)\n return res\n \n_asciire=re.compile('([\\x00-\\x7f]+)')\n\ndef _generate_unquoted_parts(string,encoding,errors):\n previous_match_end=0\n for ascii_match in _asciire.finditer(string):\n start,end=ascii_match.span()\n yield string[previous_match_end:start]\n \n yield _unquote_impl(ascii_match[1]).decode(encoding,errors)\n previous_match_end=end\n yield string[previous_match_end:]\n \ndef unquote(string,encoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n \n if isinstance(string,bytes):\n return _unquote_impl(string).decode(encoding,errors)\n if '%'not in string:\n \n string.split\n return string\n if encoding is None:\n encoding='utf-8'\n if errors is None:\n errors='replace'\n return ''.join(_generate_unquoted_parts(string,encoding,errors))\n \n \ndef parse_qs(qs,keep_blank_values=False,strict_parsing=False,\nencoding='utf-8',errors='replace',max_num_fields=None,separator='&'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n parsed_result={}\n pairs=parse_qsl(qs,keep_blank_values,strict_parsing,\n encoding=encoding,errors=errors,\n max_num_fields=max_num_fields,separator=separator)\n for name,value in pairs:\n if name in parsed_result:\n parsed_result[name].append(value)\n else:\n parsed_result[name]=[value]\n return parsed_result\n \n \ndef parse_qsl(qs,keep_blank_values=False,strict_parsing=False,\nencoding='utf-8',errors='replace',max_num_fields=None,separator='&'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n qs,_coerce_result=_coerce_args(qs)\n separator,_=_coerce_args(separator)\n \n if not separator or(not isinstance(separator,(str,bytes))):\n raise ValueError(\"Separator must be of type string or bytes.\")\n \n \n \n \n if max_num_fields is not None:\n num_fields=1+qs.count(separator)if qs else 0\n if max_num_fields \"\n \n def __missing__(self,b):\n \n res=chr(b)if b in self.safe else '%{:02X}'.format(b)\n self[b]=res\n return res\n \ndef quote(string,safe='/',encoding=None,errors=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(string,str):\n if not string:\n return string\n if encoding is None:\n encoding='utf-8'\n if errors is None:\n errors='strict'\n string=string.encode(encoding,errors)\n else:\n if encoding is not None:\n raise TypeError(\"quote() doesn't support 'encoding' for bytes\")\n if errors is not None:\n raise TypeError(\"quote() doesn't support 'errors' for bytes\")\n return quote_from_bytes(string,safe)\n \ndef quote_plus(string,safe='',encoding=None,errors=None):\n ''\n\n\n \n \n \n if((isinstance(string,str)and ' 'not in string)or\n (isinstance(string,bytes)and b' 'not in string)):\n return quote(string,safe,encoding,errors)\n if isinstance(safe,str):\n space=' '\n else:\n space=b' '\n string=quote(string,safe+space,encoding,errors)\n return string.replace(' ','+')\n \n \n@functools.lru_cache\ndef _byte_quoter_factory(safe):\n return _Quoter(safe).__getitem__\n \ndef quote_from_bytes(bs,safe='/'):\n ''\n\n\n \n if not isinstance(bs,(bytes,bytearray)):\n raise TypeError(\"quote_from_bytes() expected bytes\")\n if not bs:\n return ''\n if isinstance(safe,str):\n \n safe=safe.encode('ascii','ignore')\n else:\n \n safe=bytes([c for c in safe if c <128])\n if not bs.rstrip(_ALWAYS_SAFE_BYTES+safe):\n return bs.decode()\n quoter=_byte_quoter_factory(safe)\n if(bs_len :=len(bs))<200_000:\n return ''.join(map(quoter,bs))\n else:\n \n chunk_size=math.isqrt(bs_len)\n chunks=[''.join(map(quoter,bs[i:i+chunk_size]))\n for i in range(0,bs_len,chunk_size)]\n return ''.join(chunks)\n \ndef urlencode(query,doseq=False,safe='',encoding=None,errors=None,\nquote_via=quote_plus):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if hasattr(query,\"items\"):\n query=query.items()\n else:\n \n \n try:\n \n \n if len(query)and not isinstance(query[0],tuple):\n raise TypeError\n \n \n \n \n except TypeError as err:\n raise TypeError(\"not a valid non-string sequence \"\n \"or mapping object\")from err\n \n l=[]\n if not doseq:\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_via(k,safe)\n else:\n k=quote_via(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_via(v,safe)\n else:\n v=quote_via(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else:\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_via(k,safe)\n else:\n k=quote_via(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_via(v,safe)\n l.append(k+'='+v)\n elif isinstance(v,str):\n v=quote_via(v,safe,encoding,errors)\n l.append(k+'='+v)\n else:\n try:\n \n x=len(v)\n except TypeError:\n \n v=quote_via(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else:\n \n for elt in v:\n if isinstance(elt,bytes):\n elt=quote_via(elt,safe)\n else:\n elt=quote_via(str(elt),safe,encoding,errors)\n l.append(k+'='+elt)\n return '&'.join(l)\n \n \ndef to_bytes(url):\n warnings.warn(\"urllib.parse.to_bytes() is deprecated as of 3.8\",\n DeprecationWarning,stacklevel=2)\n return _to_bytes(url)\n \n \ndef _to_bytes(url):\n ''\n \n \n \n if isinstance(url,str):\n try:\n url=url.encode(\"ASCII\").decode()\n except UnicodeError:\n raise UnicodeError(\"URL \"+repr(url)+\n \" contains non-ASCII characters\")\n return url\n \n \ndef unwrap(url):\n ''\n\n\n \n url=str(url).strip()\n if url[:1]=='<'and url[-1:]=='>':\n url=url[1:-1].strip()\n if url[:4]=='URL:':\n url=url[4:].strip()\n return url\n \n \ndef splittype(url):\n warnings.warn(\"urllib.parse.splittype() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splittype(url)\n \n \n_typeprog=None\ndef _splittype(url):\n ''\n global _typeprog\n if _typeprog is None:\n _typeprog=re.compile('([^/:]+):(.*)',re.DOTALL)\n \n match=_typeprog.match(url)\n if match:\n scheme,data=match.groups()\n return scheme.lower(),data\n return None,url\n \n \ndef splithost(url):\n warnings.warn(\"urllib.parse.splithost() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splithost(url)\n \n \n_hostprog=None\ndef _splithost(url):\n ''\n global _hostprog\n if _hostprog is None:\n _hostprog=re.compile('//([^/#?]*)(.*)',re.DOTALL)\n \n match=_hostprog.match(url)\n if match:\n host_port,path=match.groups()\n if path and path[0]!='/':\n path='/'+path\n return host_port,path\n return None,url\n \n \ndef splituser(host):\n warnings.warn(\"urllib.parse.splituser() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splituser(host)\n \n \ndef _splituser(host):\n ''\n user,delim,host=host.rpartition('@')\n return(user if delim else None),host\n \n \ndef splitpasswd(user):\n warnings.warn(\"urllib.parse.splitpasswd() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitpasswd(user)\n \n \ndef _splitpasswd(user):\n ''\n user,delim,passwd=user.partition(':')\n return user,(passwd if delim else None)\n \n \ndef splitport(host):\n warnings.warn(\"urllib.parse.splitport() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitport(host)\n \n \n \n_portprog=None\ndef _splitport(host):\n ''\n global _portprog\n if _portprog is None:\n _portprog=re.compile('(.*):([0-9]*)',re.DOTALL)\n \n match=_portprog.fullmatch(host)\n if match:\n host,port=match.groups()\n if port:\n return host,port\n return host,None\n \n \ndef splitnport(host,defport=-1):\n warnings.warn(\"urllib.parse.splitnport() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitnport(host,defport)\n \n \ndef _splitnport(host,defport=-1):\n ''\n\n\n \n host,delim,port=host.rpartition(':')\n if not delim:\n host=port\n elif port:\n if port.isdigit()and port.isascii():\n nport=int(port)\n else:\n nport=None\n return host,nport\n return host,defport\n \n \ndef splitquery(url):\n warnings.warn(\"urllib.parse.splitquery() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitquery(url)\n \n \ndef _splitquery(url):\n ''\n path,delim,query=url.rpartition('?')\n if delim:\n return path,query\n return url,None\n \n \ndef splittag(url):\n warnings.warn(\"urllib.parse.splittag() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splittag(url)\n \n \ndef _splittag(url):\n ''\n path,delim,tag=url.rpartition('#')\n if delim:\n return path,tag\n return url,None\n \n \ndef splitattr(url):\n warnings.warn(\"urllib.parse.splitattr() is deprecated as of 3.8, \"\n \"use urllib.parse.urlparse() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitattr(url)\n \n \ndef _splitattr(url):\n ''\n \n words=url.split(';')\n return words[0],words[1:]\n \n \ndef splitvalue(attr):\n warnings.warn(\"urllib.parse.splitvalue() is deprecated as of 3.8, \"\n \"use urllib.parse.parse_qsl() instead\",\n DeprecationWarning,stacklevel=2)\n return _splitvalue(attr)\n \n \ndef _splitvalue(attr):\n ''\n attr,delim,value=attr.partition('=')\n return attr,(value if delim else None)\n", ["collections", "functools", "ipaddress", "math", "re", "types", "unicodedata", "warnings"]], "html": [".py", "''\n\n\n\nimport re as _re\nfrom html.entities import html5 as _html5\n\n\n__all__=['escape','unescape']\n\n\ndef escape(s,quote=True):\n ''\n\n\n\n\n \n s=s.replace(\"&\",\"&\")\n s=s.replace(\"<\",\"<\")\n s=s.replace(\">\",\">\")\n if quote:\n s=s.replace('\"',\""\")\n s=s.replace('\\'',\"'\")\n return s\n \n \n \n \n_invalid_charrefs={\n0x00:'\\ufffd',\n0x0d:'\\r',\n0x80:'\\u20ac',\n0x81:'\\x81',\n0x82:'\\u201a',\n0x83:'\\u0192',\n0x84:'\\u201e',\n0x85:'\\u2026',\n0x86:'\\u2020',\n0x87:'\\u2021',\n0x88:'\\u02c6',\n0x89:'\\u2030',\n0x8a:'\\u0160',\n0x8b:'\\u2039',\n0x8c:'\\u0152',\n0x8d:'\\x8d',\n0x8e:'\\u017d',\n0x8f:'\\x8f',\n0x90:'\\x90',\n0x91:'\\u2018',\n0x92:'\\u2019',\n0x93:'\\u201c',\n0x94:'\\u201d',\n0x95:'\\u2022',\n0x96:'\\u2013',\n0x97:'\\u2014',\n0x98:'\\u02dc',\n0x99:'\\u2122',\n0x9a:'\\u0161',\n0x9b:'\\u203a',\n0x9c:'\\u0153',\n0x9d:'\\x9d',\n0x9e:'\\u017e',\n0x9f:'\\u0178',\n}\n\n_invalid_codepoints={\n\n0x1,0x2,0x3,0x4,0x5,0x6,0x7,0x8,\n\n0xe,0xf,0x10,0x11,0x12,0x13,0x14,0x15,0x16,0x17,0x18,0x19,\n0x1a,0x1b,0x1c,0x1d,0x1e,0x1f,\n\n0x7f,0x80,0x81,0x82,0x83,0x84,0x85,0x86,0x87,0x88,0x89,0x8a,\n0x8b,0x8c,0x8d,0x8e,0x8f,0x90,0x91,0x92,0x93,0x94,0x95,0x96,\n0x97,0x98,0x99,0x9a,0x9b,0x9c,0x9d,0x9e,0x9f,\n\n0xfdd0,0xfdd1,0xfdd2,0xfdd3,0xfdd4,0xfdd5,0xfdd6,0xfdd7,0xfdd8,\n0xfdd9,0xfdda,0xfddb,0xfddc,0xfddd,0xfdde,0xfddf,0xfde0,0xfde1,\n0xfde2,0xfde3,0xfde4,0xfde5,0xfde6,0xfde7,0xfde8,0xfde9,0xfdea,\n0xfdeb,0xfdec,0xfded,0xfdee,0xfdef,\n\n0xb,0xfffe,0xffff,0x1fffe,0x1ffff,0x2fffe,0x2ffff,0x3fffe,0x3ffff,\n0x4fffe,0x4ffff,0x5fffe,0x5ffff,0x6fffe,0x6ffff,0x7fffe,0x7ffff,\n0x8fffe,0x8ffff,0x9fffe,0x9ffff,0xafffe,0xaffff,0xbfffe,0xbffff,\n0xcfffe,0xcffff,0xdfffe,0xdffff,0xefffe,0xeffff,0xffffe,0xfffff,\n0x10fffe,0x10ffff\n}\n\n\ndef _replace_charref(s):\n s=s.group(1)\n if s[0]=='#':\n \n if s[1]in 'xX':\n num=int(s[2:].rstrip(';'),16)\n else:\n num=int(s[1:].rstrip(';'))\n if num in _invalid_charrefs:\n return _invalid_charrefs[num]\n if 0xD800 <=num <=0xDFFF or num >0x10FFFF:\n return '\\uFFFD'\n if num in _invalid_codepoints:\n return ''\n return chr(num)\n else:\n \n if s in _html5:\n return _html5[s]\n \n for x in range(len(s)-1,1,-1):\n if s[:x]in _html5:\n return _html5[s[:x]]+s[x:]\n else:\n return '&'+s\n \n \n_charref=_re.compile(r'&(#[0-9]+;?'\nr'|#[xX][0-9a-fA-F]+;?'\nr'|[^\\t\\n\\f <&#;]{1,32};?)')\n\ndef unescape(s):\n ''\n\n\n\n\n\n \n if '&'not in s:\n return s\n return _charref.sub(_replace_charref,s)\n", ["html.entities", "re"], 1], "html.parser": [".py", "''\n\n\n\n\n\n\n\n\n\nimport re\nimport _markupbase\n\nfrom html import unescape\n\n\n__all__=['HTMLParser']\n\n\n\ninteresting_normal=re.compile('[&<]')\nincomplete=re.compile('&[a-zA-Z#]')\n\nentityref=re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]')\ncharref=re.compile('&#(?:[0-9]+|[xX][0-9a-fA-F]+)[^0-9a-fA-F]')\n\nstarttagopen=re.compile('<[a-zA-Z]')\npiclose=re.compile('>')\ncommentclose=re.compile(r'--\\s*>')\n\n\n\n\n\n\ntagfind_tolerant=re.compile(r'([a-zA-Z][^\\t\\n\\r\\f />\\x00]*)(?:\\s|/(?!>))*')\nattrfind_tolerant=re.compile(\nr'((?<=[\\'\"\\s/])[^\\s/>][^\\s/=>]*)(\\s*=+\\s*'\nr'(\\'[^\\']*\\'|\"[^\"]*\"|(?![\\'\"])[^>\\s]*))?(?:\\s|/(?!>))*')\nlocatestarttagend_tolerant=re.compile(r\"\"\"\n <[a-zA-Z][^\\t\\n\\r\\f />\\x00]* # tag name\n (?:[\\s/]* # optional whitespace before attribute name\n (?:(?<=['\"\\s/])[^\\s/>][^\\s/=>]* # attribute name\n (?:\\s*=+\\s* # value indicator\n (?:'[^']*' # LITA-enclosed value\n |\"[^\"]*\" # LIT-enclosed value\n |(?!['\"])[^>\\s]* # bare value\n )\n \\s* # possibly followed by a space\n )?(?:\\s|/(?!>))*\n )*\n )?\n \\s* # trailing whitespace\n\"\"\",re.VERBOSE)\nendendtag=re.compile('>')\n\n\nendtagfind=re.compile(r'')\n\n\n\nclass HTMLParser(_markupbase.ParserBase):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n CDATA_CONTENT_ELEMENTS=(\"script\",\"style\")\n \n def __init__(self,*,convert_charrefs=True):\n ''\n\n\n\n \n super().__init__()\n self.convert_charrefs=convert_charrefs\n self.reset()\n \n def reset(self):\n ''\n self.rawdata=''\n self.lasttag='???'\n self.interesting=interesting_normal\n self.cdata_elem=None\n super().reset()\n \n def feed(self,data):\n ''\n\n\n\n \n self.rawdata=self.rawdata+data\n self.goahead(0)\n \n def close(self):\n ''\n self.goahead(1)\n \n __starttag_text=None\n \n def get_starttag_text(self):\n ''\n return self.__starttag_text\n \n def set_cdata_mode(self,elem):\n self.cdata_elem=elem.lower()\n self.interesting=re.compile(r''%self.cdata_elem,re.I)\n \n def clear_cdata_mode(self):\n self.interesting=interesting_normal\n self.cdata_elem=None\n \n \n \n \n def goahead(self,end):\n rawdata=self.rawdata\n i=0\n n=len(rawdata)\n while i =0 and\n not re.compile(r'[\\s;]').search(rawdata,amppos)):\n break\n j=n\n else:\n match=self.interesting.search(rawdata,i)\n if match:\n j=match.start()\n else:\n if self.cdata_elem:\n break\n j=n\n if i ',i+1)\n if k <0:\n k=rawdata.find('<',i+1)\n if k <0:\n k=i+1\n else:\n k +=1\n if self.convert_charrefs and not self.cdata_elem:\n self.handle_data(unescape(rawdata[i:k]))\n else:\n self.handle_data(rawdata[i:k])\n i=self.updatepos(i,k)\n elif startswith(\"&#\",i):\n match=charref.match(rawdata,i)\n if match:\n name=match.group()[2:-1]\n self.handle_charref(name)\n k=match.end()\n if not startswith(';',k -1):\n k=k -1\n i=self.updatepos(i,k)\n continue\n else:\n if \";\"in rawdata[i:]:\n self.handle_data(rawdata[i:i+2])\n i=self.updatepos(i,i+2)\n break\n elif startswith('&',i):\n match=entityref.match(rawdata,i)\n if match:\n name=match.group(1)\n self.handle_entityref(name)\n k=match.end()\n if not startswith(';',k -1):\n k=k -1\n i=self.updatepos(i,k)\n continue\n match=incomplete.match(rawdata,i)\n if match:\n \n if end and match.group()==rawdata[i:]:\n k=match.end()\n if k <=i:\n k=n\n i=self.updatepos(i,i+1)\n \n break\n elif(i+1)',i+9)\n if gtpos ==-1:\n return -1\n self.handle_decl(rawdata[i+2:gtpos])\n return gtpos+1\n else:\n return self.parse_bogus_comment(i)\n \n \n \n def parse_bogus_comment(self,i,report=1):\n rawdata=self.rawdata\n assert rawdata[i:i+2]in('',i+2)\n if pos ==-1:\n return -1\n if report:\n self.handle_comment(rawdata[i+2:pos])\n return pos+1\n \n \n def parse_pi(self,i):\n rawdata=self.rawdata\n assert rawdata[i:i+2]=='\",\"/>\"):\n self.handle_data(rawdata[i:endpos])\n return endpos\n if end.endswith('/>'):\n \n self.handle_startendtag(tag,attrs)\n else:\n self.handle_starttag(tag,attrs)\n if tag in self.CDATA_CONTENT_ELEMENTS:\n self.set_cdata_mode(tag)\n return endpos\n \n \n \n def check_for_whole_start_tag(self,i):\n rawdata=self.rawdata\n m=locatestarttagend_tolerant.match(rawdata,i)\n if m:\n j=m.end()\n next=rawdata[j:j+1]\n if next ==\">\":\n return j+1\n if next ==\"/\":\n if rawdata.startswith(\"/>\",j):\n return j+2\n if rawdata.startswith(\"/\",j):\n \n return -1\n \n if j >i:\n return j\n else:\n return i+1\n if next ==\"\":\n \n return -1\n if next in(\"abcdefghijklmnopqrstuvwxyz=/\"\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\"):\n \n \n return -1\n if j >i:\n return j\n else:\n return i+1\n raise AssertionError(\"we should not get here!\")\n \n \n def parse_endtag(self,i):\n rawdata=self.rawdata\n assert rawdata[i:i+2]==\"':\n return i+3\n else:\n return self.parse_bogus_comment(i)\n tagname=namematch.group(1).lower()\n \n \n \n \n gtpos=rawdata.find('>',namematch.end())\n self.handle_endtag(tagname)\n return gtpos+1\n \n elem=match.group(1).lower()\n if self.cdata_elem is not None:\n if elem !=self.cdata_elem:\n self.handle_data(rawdata[i:gtpos])\n return gtpos\n \n self.handle_endtag(elem)\n self.clear_cdata_mode()\n return gtpos\n \n \n def handle_startendtag(self,tag,attrs):\n self.handle_starttag(tag,attrs)\n self.handle_endtag(tag)\n \n \n def handle_starttag(self,tag,attrs):\n pass\n \n \n def handle_endtag(self,tag):\n pass\n \n \n def handle_charref(self,name):\n pass\n \n \n def handle_entityref(self,name):\n pass\n \n \n def handle_data(self,data):\n pass\n \n \n def handle_comment(self,data):\n pass\n \n \n def handle_decl(self,decl):\n pass\n \n \n def handle_pi(self,data):\n pass\n \n def unknown_decl(self,data):\n pass\n", ["_markupbase", "html", "re"]], "html.entities": [".py", "''\n\n__all__=['html5','name2codepoint','codepoint2name','entitydefs']\n\n\n\nname2codepoint={\n'AElig':0x00c6,\n'Aacute':0x00c1,\n'Acirc':0x00c2,\n'Agrave':0x00c0,\n'Alpha':0x0391,\n'Aring':0x00c5,\n'Atilde':0x00c3,\n'Auml':0x00c4,\n'Beta':0x0392,\n'Ccedil':0x00c7,\n'Chi':0x03a7,\n'Dagger':0x2021,\n'Delta':0x0394,\n'ETH':0x00d0,\n'Eacute':0x00c9,\n'Ecirc':0x00ca,\n'Egrave':0x00c8,\n'Epsilon':0x0395,\n'Eta':0x0397,\n'Euml':0x00cb,\n'Gamma':0x0393,\n'Iacute':0x00cd,\n'Icirc':0x00ce,\n'Igrave':0x00cc,\n'Iota':0x0399,\n'Iuml':0x00cf,\n'Kappa':0x039a,\n'Lambda':0x039b,\n'Mu':0x039c,\n'Ntilde':0x00d1,\n'Nu':0x039d,\n'OElig':0x0152,\n'Oacute':0x00d3,\n'Ocirc':0x00d4,\n'Ograve':0x00d2,\n'Omega':0x03a9,\n'Omicron':0x039f,\n'Oslash':0x00d8,\n'Otilde':0x00d5,\n'Ouml':0x00d6,\n'Phi':0x03a6,\n'Pi':0x03a0,\n'Prime':0x2033,\n'Psi':0x03a8,\n'Rho':0x03a1,\n'Scaron':0x0160,\n'Sigma':0x03a3,\n'THORN':0x00de,\n'Tau':0x03a4,\n'Theta':0x0398,\n'Uacute':0x00da,\n'Ucirc':0x00db,\n'Ugrave':0x00d9,\n'Upsilon':0x03a5,\n'Uuml':0x00dc,\n'Xi':0x039e,\n'Yacute':0x00dd,\n'Yuml':0x0178,\n'Zeta':0x0396,\n'aacute':0x00e1,\n'acirc':0x00e2,\n'acute':0x00b4,\n'aelig':0x00e6,\n'agrave':0x00e0,\n'alefsym':0x2135,\n'alpha':0x03b1,\n'amp':0x0026,\n'and':0x2227,\n'ang':0x2220,\n'aring':0x00e5,\n'asymp':0x2248,\n'atilde':0x00e3,\n'auml':0x00e4,\n'bdquo':0x201e,\n'beta':0x03b2,\n'brvbar':0x00a6,\n'bull':0x2022,\n'cap':0x2229,\n'ccedil':0x00e7,\n'cedil':0x00b8,\n'cent':0x00a2,\n'chi':0x03c7,\n'circ':0x02c6,\n'clubs':0x2663,\n'cong':0x2245,\n'copy':0x00a9,\n'crarr':0x21b5,\n'cup':0x222a,\n'curren':0x00a4,\n'dArr':0x21d3,\n'dagger':0x2020,\n'darr':0x2193,\n'deg':0x00b0,\n'delta':0x03b4,\n'diams':0x2666,\n'divide':0x00f7,\n'eacute':0x00e9,\n'ecirc':0x00ea,\n'egrave':0x00e8,\n'empty':0x2205,\n'emsp':0x2003,\n'ensp':0x2002,\n'epsilon':0x03b5,\n'equiv':0x2261,\n'eta':0x03b7,\n'eth':0x00f0,\n'euml':0x00eb,\n'euro':0x20ac,\n'exist':0x2203,\n'fnof':0x0192,\n'forall':0x2200,\n'frac12':0x00bd,\n'frac14':0x00bc,\n'frac34':0x00be,\n'frasl':0x2044,\n'gamma':0x03b3,\n'ge':0x2265,\n'gt':0x003e,\n'hArr':0x21d4,\n'harr':0x2194,\n'hearts':0x2665,\n'hellip':0x2026,\n'iacute':0x00ed,\n'icirc':0x00ee,\n'iexcl':0x00a1,\n'igrave':0x00ec,\n'image':0x2111,\n'infin':0x221e,\n'int':0x222b,\n'iota':0x03b9,\n'iquest':0x00bf,\n'isin':0x2208,\n'iuml':0x00ef,\n'kappa':0x03ba,\n'lArr':0x21d0,\n'lambda':0x03bb,\n'lang':0x2329,\n'laquo':0x00ab,\n'larr':0x2190,\n'lceil':0x2308,\n'ldquo':0x201c,\n'le':0x2264,\n'lfloor':0x230a,\n'lowast':0x2217,\n'loz':0x25ca,\n'lrm':0x200e,\n'lsaquo':0x2039,\n'lsquo':0x2018,\n'lt':0x003c,\n'macr':0x00af,\n'mdash':0x2014,\n'micro':0x00b5,\n'middot':0x00b7,\n'minus':0x2212,\n'mu':0x03bc,\n'nabla':0x2207,\n'nbsp':0x00a0,\n'ndash':0x2013,\n'ne':0x2260,\n'ni':0x220b,\n'not':0x00ac,\n'notin':0x2209,\n'nsub':0x2284,\n'ntilde':0x00f1,\n'nu':0x03bd,\n'oacute':0x00f3,\n'ocirc':0x00f4,\n'oelig':0x0153,\n'ograve':0x00f2,\n'oline':0x203e,\n'omega':0x03c9,\n'omicron':0x03bf,\n'oplus':0x2295,\n'or':0x2228,\n'ordf':0x00aa,\n'ordm':0x00ba,\n'oslash':0x00f8,\n'otilde':0x00f5,\n'otimes':0x2297,\n'ouml':0x00f6,\n'para':0x00b6,\n'part':0x2202,\n'permil':0x2030,\n'perp':0x22a5,\n'phi':0x03c6,\n'pi':0x03c0,\n'piv':0x03d6,\n'plusmn':0x00b1,\n'pound':0x00a3,\n'prime':0x2032,\n'prod':0x220f,\n'prop':0x221d,\n'psi':0x03c8,\n'quot':0x0022,\n'rArr':0x21d2,\n'radic':0x221a,\n'rang':0x232a,\n'raquo':0x00bb,\n'rarr':0x2192,\n'rceil':0x2309,\n'rdquo':0x201d,\n'real':0x211c,\n'reg':0x00ae,\n'rfloor':0x230b,\n'rho':0x03c1,\n'rlm':0x200f,\n'rsaquo':0x203a,\n'rsquo':0x2019,\n'sbquo':0x201a,\n'scaron':0x0161,\n'sdot':0x22c5,\n'sect':0x00a7,\n'shy':0x00ad,\n'sigma':0x03c3,\n'sigmaf':0x03c2,\n'sim':0x223c,\n'spades':0x2660,\n'sub':0x2282,\n'sube':0x2286,\n'sum':0x2211,\n'sup':0x2283,\n'sup1':0x00b9,\n'sup2':0x00b2,\n'sup3':0x00b3,\n'supe':0x2287,\n'szlig':0x00df,\n'tau':0x03c4,\n'there4':0x2234,\n'theta':0x03b8,\n'thetasym':0x03d1,\n'thinsp':0x2009,\n'thorn':0x00fe,\n'tilde':0x02dc,\n'times':0x00d7,\n'trade':0x2122,\n'uArr':0x21d1,\n'uacute':0x00fa,\n'uarr':0x2191,\n'ucirc':0x00fb,\n'ugrave':0x00f9,\n'uml':0x00a8,\n'upsih':0x03d2,\n'upsilon':0x03c5,\n'uuml':0x00fc,\n'weierp':0x2118,\n'xi':0x03be,\n'yacute':0x00fd,\n'yen':0x00a5,\n'yuml':0x00ff,\n'zeta':0x03b6,\n'zwj':0x200d,\n'zwnj':0x200c,\n}\n\n\n\n\n\n\n\nhtml5={\n'Aacute':'\\xc1',\n'aacute':'\\xe1',\n'Aacute;':'\\xc1',\n'aacute;':'\\xe1',\n'Abreve;':'\\u0102',\n'abreve;':'\\u0103',\n'ac;':'\\u223e',\n'acd;':'\\u223f',\n'acE;':'\\u223e\\u0333',\n'Acirc':'\\xc2',\n'acirc':'\\xe2',\n'Acirc;':'\\xc2',\n'acirc;':'\\xe2',\n'acute':'\\xb4',\n'acute;':'\\xb4',\n'Acy;':'\\u0410',\n'acy;':'\\u0430',\n'AElig':'\\xc6',\n'aelig':'\\xe6',\n'AElig;':'\\xc6',\n'aelig;':'\\xe6',\n'af;':'\\u2061',\n'Afr;':'\\U0001d504',\n'afr;':'\\U0001d51e',\n'Agrave':'\\xc0',\n'agrave':'\\xe0',\n'Agrave;':'\\xc0',\n'agrave;':'\\xe0',\n'alefsym;':'\\u2135',\n'aleph;':'\\u2135',\n'Alpha;':'\\u0391',\n'alpha;':'\\u03b1',\n'Amacr;':'\\u0100',\n'amacr;':'\\u0101',\n'amalg;':'\\u2a3f',\n'AMP':'&',\n'amp':'&',\n'AMP;':'&',\n'amp;':'&',\n'And;':'\\u2a53',\n'and;':'\\u2227',\n'andand;':'\\u2a55',\n'andd;':'\\u2a5c',\n'andslope;':'\\u2a58',\n'andv;':'\\u2a5a',\n'ang;':'\\u2220',\n'ange;':'\\u29a4',\n'angle;':'\\u2220',\n'angmsd;':'\\u2221',\n'angmsdaa;':'\\u29a8',\n'angmsdab;':'\\u29a9',\n'angmsdac;':'\\u29aa',\n'angmsdad;':'\\u29ab',\n'angmsdae;':'\\u29ac',\n'angmsdaf;':'\\u29ad',\n'angmsdag;':'\\u29ae',\n'angmsdah;':'\\u29af',\n'angrt;':'\\u221f',\n'angrtvb;':'\\u22be',\n'angrtvbd;':'\\u299d',\n'angsph;':'\\u2222',\n'angst;':'\\xc5',\n'angzarr;':'\\u237c',\n'Aogon;':'\\u0104',\n'aogon;':'\\u0105',\n'Aopf;':'\\U0001d538',\n'aopf;':'\\U0001d552',\n'ap;':'\\u2248',\n'apacir;':'\\u2a6f',\n'apE;':'\\u2a70',\n'ape;':'\\u224a',\n'apid;':'\\u224b',\n'apos;':\"'\",\n'ApplyFunction;':'\\u2061',\n'approx;':'\\u2248',\n'approxeq;':'\\u224a',\n'Aring':'\\xc5',\n'aring':'\\xe5',\n'Aring;':'\\xc5',\n'aring;':'\\xe5',\n'Ascr;':'\\U0001d49c',\n'ascr;':'\\U0001d4b6',\n'Assign;':'\\u2254',\n'ast;':'*',\n'asymp;':'\\u2248',\n'asympeq;':'\\u224d',\n'Atilde':'\\xc3',\n'atilde':'\\xe3',\n'Atilde;':'\\xc3',\n'atilde;':'\\xe3',\n'Auml':'\\xc4',\n'auml':'\\xe4',\n'Auml;':'\\xc4',\n'auml;':'\\xe4',\n'awconint;':'\\u2233',\n'awint;':'\\u2a11',\n'backcong;':'\\u224c',\n'backepsilon;':'\\u03f6',\n'backprime;':'\\u2035',\n'backsim;':'\\u223d',\n'backsimeq;':'\\u22cd',\n'Backslash;':'\\u2216',\n'Barv;':'\\u2ae7',\n'barvee;':'\\u22bd',\n'Barwed;':'\\u2306',\n'barwed;':'\\u2305',\n'barwedge;':'\\u2305',\n'bbrk;':'\\u23b5',\n'bbrktbrk;':'\\u23b6',\n'bcong;':'\\u224c',\n'Bcy;':'\\u0411',\n'bcy;':'\\u0431',\n'bdquo;':'\\u201e',\n'becaus;':'\\u2235',\n'Because;':'\\u2235',\n'because;':'\\u2235',\n'bemptyv;':'\\u29b0',\n'bepsi;':'\\u03f6',\n'bernou;':'\\u212c',\n'Bernoullis;':'\\u212c',\n'Beta;':'\\u0392',\n'beta;':'\\u03b2',\n'beth;':'\\u2136',\n'between;':'\\u226c',\n'Bfr;':'\\U0001d505',\n'bfr;':'\\U0001d51f',\n'bigcap;':'\\u22c2',\n'bigcirc;':'\\u25ef',\n'bigcup;':'\\u22c3',\n'bigodot;':'\\u2a00',\n'bigoplus;':'\\u2a01',\n'bigotimes;':'\\u2a02',\n'bigsqcup;':'\\u2a06',\n'bigstar;':'\\u2605',\n'bigtriangledown;':'\\u25bd',\n'bigtriangleup;':'\\u25b3',\n'biguplus;':'\\u2a04',\n'bigvee;':'\\u22c1',\n'bigwedge;':'\\u22c0',\n'bkarow;':'\\u290d',\n'blacklozenge;':'\\u29eb',\n'blacksquare;':'\\u25aa',\n'blacktriangle;':'\\u25b4',\n'blacktriangledown;':'\\u25be',\n'blacktriangleleft;':'\\u25c2',\n'blacktriangleright;':'\\u25b8',\n'blank;':'\\u2423',\n'blk12;':'\\u2592',\n'blk14;':'\\u2591',\n'blk34;':'\\u2593',\n'block;':'\\u2588',\n'bne;':'=\\u20e5',\n'bnequiv;':'\\u2261\\u20e5',\n'bNot;':'\\u2aed',\n'bnot;':'\\u2310',\n'Bopf;':'\\U0001d539',\n'bopf;':'\\U0001d553',\n'bot;':'\\u22a5',\n'bottom;':'\\u22a5',\n'bowtie;':'\\u22c8',\n'boxbox;':'\\u29c9',\n'boxDL;':'\\u2557',\n'boxDl;':'\\u2556',\n'boxdL;':'\\u2555',\n'boxdl;':'\\u2510',\n'boxDR;':'\\u2554',\n'boxDr;':'\\u2553',\n'boxdR;':'\\u2552',\n'boxdr;':'\\u250c',\n'boxH;':'\\u2550',\n'boxh;':'\\u2500',\n'boxHD;':'\\u2566',\n'boxHd;':'\\u2564',\n'boxhD;':'\\u2565',\n'boxhd;':'\\u252c',\n'boxHU;':'\\u2569',\n'boxHu;':'\\u2567',\n'boxhU;':'\\u2568',\n'boxhu;':'\\u2534',\n'boxminus;':'\\u229f',\n'boxplus;':'\\u229e',\n'boxtimes;':'\\u22a0',\n'boxUL;':'\\u255d',\n'boxUl;':'\\u255c',\n'boxuL;':'\\u255b',\n'boxul;':'\\u2518',\n'boxUR;':'\\u255a',\n'boxUr;':'\\u2559',\n'boxuR;':'\\u2558',\n'boxur;':'\\u2514',\n'boxV;':'\\u2551',\n'boxv;':'\\u2502',\n'boxVH;':'\\u256c',\n'boxVh;':'\\u256b',\n'boxvH;':'\\u256a',\n'boxvh;':'\\u253c',\n'boxVL;':'\\u2563',\n'boxVl;':'\\u2562',\n'boxvL;':'\\u2561',\n'boxvl;':'\\u2524',\n'boxVR;':'\\u2560',\n'boxVr;':'\\u255f',\n'boxvR;':'\\u255e',\n'boxvr;':'\\u251c',\n'bprime;':'\\u2035',\n'Breve;':'\\u02d8',\n'breve;':'\\u02d8',\n'brvbar':'\\xa6',\n'brvbar;':'\\xa6',\n'Bscr;':'\\u212c',\n'bscr;':'\\U0001d4b7',\n'bsemi;':'\\u204f',\n'bsim;':'\\u223d',\n'bsime;':'\\u22cd',\n'bsol;':'\\\\',\n'bsolb;':'\\u29c5',\n'bsolhsub;':'\\u27c8',\n'bull;':'\\u2022',\n'bullet;':'\\u2022',\n'bump;':'\\u224e',\n'bumpE;':'\\u2aae',\n'bumpe;':'\\u224f',\n'Bumpeq;':'\\u224e',\n'bumpeq;':'\\u224f',\n'Cacute;':'\\u0106',\n'cacute;':'\\u0107',\n'Cap;':'\\u22d2',\n'cap;':'\\u2229',\n'capand;':'\\u2a44',\n'capbrcup;':'\\u2a49',\n'capcap;':'\\u2a4b',\n'capcup;':'\\u2a47',\n'capdot;':'\\u2a40',\n'CapitalDifferentialD;':'\\u2145',\n'caps;':'\\u2229\\ufe00',\n'caret;':'\\u2041',\n'caron;':'\\u02c7',\n'Cayleys;':'\\u212d',\n'ccaps;':'\\u2a4d',\n'Ccaron;':'\\u010c',\n'ccaron;':'\\u010d',\n'Ccedil':'\\xc7',\n'ccedil':'\\xe7',\n'Ccedil;':'\\xc7',\n'ccedil;':'\\xe7',\n'Ccirc;':'\\u0108',\n'ccirc;':'\\u0109',\n'Cconint;':'\\u2230',\n'ccups;':'\\u2a4c',\n'ccupssm;':'\\u2a50',\n'Cdot;':'\\u010a',\n'cdot;':'\\u010b',\n'cedil':'\\xb8',\n'cedil;':'\\xb8',\n'Cedilla;':'\\xb8',\n'cemptyv;':'\\u29b2',\n'cent':'\\xa2',\n'cent;':'\\xa2',\n'CenterDot;':'\\xb7',\n'centerdot;':'\\xb7',\n'Cfr;':'\\u212d',\n'cfr;':'\\U0001d520',\n'CHcy;':'\\u0427',\n'chcy;':'\\u0447',\n'check;':'\\u2713',\n'checkmark;':'\\u2713',\n'Chi;':'\\u03a7',\n'chi;':'\\u03c7',\n'cir;':'\\u25cb',\n'circ;':'\\u02c6',\n'circeq;':'\\u2257',\n'circlearrowleft;':'\\u21ba',\n'circlearrowright;':'\\u21bb',\n'circledast;':'\\u229b',\n'circledcirc;':'\\u229a',\n'circleddash;':'\\u229d',\n'CircleDot;':'\\u2299',\n'circledR;':'\\xae',\n'circledS;':'\\u24c8',\n'CircleMinus;':'\\u2296',\n'CirclePlus;':'\\u2295',\n'CircleTimes;':'\\u2297',\n'cirE;':'\\u29c3',\n'cire;':'\\u2257',\n'cirfnint;':'\\u2a10',\n'cirmid;':'\\u2aef',\n'cirscir;':'\\u29c2',\n'ClockwiseContourIntegral;':'\\u2232',\n'CloseCurlyDoubleQuote;':'\\u201d',\n'CloseCurlyQuote;':'\\u2019',\n'clubs;':'\\u2663',\n'clubsuit;':'\\u2663',\n'Colon;':'\\u2237',\n'colon;':':',\n'Colone;':'\\u2a74',\n'colone;':'\\u2254',\n'coloneq;':'\\u2254',\n'comma;':',',\n'commat;':'@',\n'comp;':'\\u2201',\n'compfn;':'\\u2218',\n'complement;':'\\u2201',\n'complexes;':'\\u2102',\n'cong;':'\\u2245',\n'congdot;':'\\u2a6d',\n'Congruent;':'\\u2261',\n'Conint;':'\\u222f',\n'conint;':'\\u222e',\n'ContourIntegral;':'\\u222e',\n'Copf;':'\\u2102',\n'copf;':'\\U0001d554',\n'coprod;':'\\u2210',\n'Coproduct;':'\\u2210',\n'COPY':'\\xa9',\n'copy':'\\xa9',\n'COPY;':'\\xa9',\n'copy;':'\\xa9',\n'copysr;':'\\u2117',\n'CounterClockwiseContourIntegral;':'\\u2233',\n'crarr;':'\\u21b5',\n'Cross;':'\\u2a2f',\n'cross;':'\\u2717',\n'Cscr;':'\\U0001d49e',\n'cscr;':'\\U0001d4b8',\n'csub;':'\\u2acf',\n'csube;':'\\u2ad1',\n'csup;':'\\u2ad0',\n'csupe;':'\\u2ad2',\n'ctdot;':'\\u22ef',\n'cudarrl;':'\\u2938',\n'cudarrr;':'\\u2935',\n'cuepr;':'\\u22de',\n'cuesc;':'\\u22df',\n'cularr;':'\\u21b6',\n'cularrp;':'\\u293d',\n'Cup;':'\\u22d3',\n'cup;':'\\u222a',\n'cupbrcap;':'\\u2a48',\n'CupCap;':'\\u224d',\n'cupcap;':'\\u2a46',\n'cupcup;':'\\u2a4a',\n'cupdot;':'\\u228d',\n'cupor;':'\\u2a45',\n'cups;':'\\u222a\\ufe00',\n'curarr;':'\\u21b7',\n'curarrm;':'\\u293c',\n'curlyeqprec;':'\\u22de',\n'curlyeqsucc;':'\\u22df',\n'curlyvee;':'\\u22ce',\n'curlywedge;':'\\u22cf',\n'curren':'\\xa4',\n'curren;':'\\xa4',\n'curvearrowleft;':'\\u21b6',\n'curvearrowright;':'\\u21b7',\n'cuvee;':'\\u22ce',\n'cuwed;':'\\u22cf',\n'cwconint;':'\\u2232',\n'cwint;':'\\u2231',\n'cylcty;':'\\u232d',\n'Dagger;':'\\u2021',\n'dagger;':'\\u2020',\n'daleth;':'\\u2138',\n'Darr;':'\\u21a1',\n'dArr;':'\\u21d3',\n'darr;':'\\u2193',\n'dash;':'\\u2010',\n'Dashv;':'\\u2ae4',\n'dashv;':'\\u22a3',\n'dbkarow;':'\\u290f',\n'dblac;':'\\u02dd',\n'Dcaron;':'\\u010e',\n'dcaron;':'\\u010f',\n'Dcy;':'\\u0414',\n'dcy;':'\\u0434',\n'DD;':'\\u2145',\n'dd;':'\\u2146',\n'ddagger;':'\\u2021',\n'ddarr;':'\\u21ca',\n'DDotrahd;':'\\u2911',\n'ddotseq;':'\\u2a77',\n'deg':'\\xb0',\n'deg;':'\\xb0',\n'Del;':'\\u2207',\n'Delta;':'\\u0394',\n'delta;':'\\u03b4',\n'demptyv;':'\\u29b1',\n'dfisht;':'\\u297f',\n'Dfr;':'\\U0001d507',\n'dfr;':'\\U0001d521',\n'dHar;':'\\u2965',\n'dharl;':'\\u21c3',\n'dharr;':'\\u21c2',\n'DiacriticalAcute;':'\\xb4',\n'DiacriticalDot;':'\\u02d9',\n'DiacriticalDoubleAcute;':'\\u02dd',\n'DiacriticalGrave;':'`',\n'DiacriticalTilde;':'\\u02dc',\n'diam;':'\\u22c4',\n'Diamond;':'\\u22c4',\n'diamond;':'\\u22c4',\n'diamondsuit;':'\\u2666',\n'diams;':'\\u2666',\n'die;':'\\xa8',\n'DifferentialD;':'\\u2146',\n'digamma;':'\\u03dd',\n'disin;':'\\u22f2',\n'div;':'\\xf7',\n'divide':'\\xf7',\n'divide;':'\\xf7',\n'divideontimes;':'\\u22c7',\n'divonx;':'\\u22c7',\n'DJcy;':'\\u0402',\n'djcy;':'\\u0452',\n'dlcorn;':'\\u231e',\n'dlcrop;':'\\u230d',\n'dollar;':'$',\n'Dopf;':'\\U0001d53b',\n'dopf;':'\\U0001d555',\n'Dot;':'\\xa8',\n'dot;':'\\u02d9',\n'DotDot;':'\\u20dc',\n'doteq;':'\\u2250',\n'doteqdot;':'\\u2251',\n'DotEqual;':'\\u2250',\n'dotminus;':'\\u2238',\n'dotplus;':'\\u2214',\n'dotsquare;':'\\u22a1',\n'doublebarwedge;':'\\u2306',\n'DoubleContourIntegral;':'\\u222f',\n'DoubleDot;':'\\xa8',\n'DoubleDownArrow;':'\\u21d3',\n'DoubleLeftArrow;':'\\u21d0',\n'DoubleLeftRightArrow;':'\\u21d4',\n'DoubleLeftTee;':'\\u2ae4',\n'DoubleLongLeftArrow;':'\\u27f8',\n'DoubleLongLeftRightArrow;':'\\u27fa',\n'DoubleLongRightArrow;':'\\u27f9',\n'DoubleRightArrow;':'\\u21d2',\n'DoubleRightTee;':'\\u22a8',\n'DoubleUpArrow;':'\\u21d1',\n'DoubleUpDownArrow;':'\\u21d5',\n'DoubleVerticalBar;':'\\u2225',\n'DownArrow;':'\\u2193',\n'Downarrow;':'\\u21d3',\n'downarrow;':'\\u2193',\n'DownArrowBar;':'\\u2913',\n'DownArrowUpArrow;':'\\u21f5',\n'DownBreve;':'\\u0311',\n'downdownarrows;':'\\u21ca',\n'downharpoonleft;':'\\u21c3',\n'downharpoonright;':'\\u21c2',\n'DownLeftRightVector;':'\\u2950',\n'DownLeftTeeVector;':'\\u295e',\n'DownLeftVector;':'\\u21bd',\n'DownLeftVectorBar;':'\\u2956',\n'DownRightTeeVector;':'\\u295f',\n'DownRightVector;':'\\u21c1',\n'DownRightVectorBar;':'\\u2957',\n'DownTee;':'\\u22a4',\n'DownTeeArrow;':'\\u21a7',\n'drbkarow;':'\\u2910',\n'drcorn;':'\\u231f',\n'drcrop;':'\\u230c',\n'Dscr;':'\\U0001d49f',\n'dscr;':'\\U0001d4b9',\n'DScy;':'\\u0405',\n'dscy;':'\\u0455',\n'dsol;':'\\u29f6',\n'Dstrok;':'\\u0110',\n'dstrok;':'\\u0111',\n'dtdot;':'\\u22f1',\n'dtri;':'\\u25bf',\n'dtrif;':'\\u25be',\n'duarr;':'\\u21f5',\n'duhar;':'\\u296f',\n'dwangle;':'\\u29a6',\n'DZcy;':'\\u040f',\n'dzcy;':'\\u045f',\n'dzigrarr;':'\\u27ff',\n'Eacute':'\\xc9',\n'eacute':'\\xe9',\n'Eacute;':'\\xc9',\n'eacute;':'\\xe9',\n'easter;':'\\u2a6e',\n'Ecaron;':'\\u011a',\n'ecaron;':'\\u011b',\n'ecir;':'\\u2256',\n'Ecirc':'\\xca',\n'ecirc':'\\xea',\n'Ecirc;':'\\xca',\n'ecirc;':'\\xea',\n'ecolon;':'\\u2255',\n'Ecy;':'\\u042d',\n'ecy;':'\\u044d',\n'eDDot;':'\\u2a77',\n'Edot;':'\\u0116',\n'eDot;':'\\u2251',\n'edot;':'\\u0117',\n'ee;':'\\u2147',\n'efDot;':'\\u2252',\n'Efr;':'\\U0001d508',\n'efr;':'\\U0001d522',\n'eg;':'\\u2a9a',\n'Egrave':'\\xc8',\n'egrave':'\\xe8',\n'Egrave;':'\\xc8',\n'egrave;':'\\xe8',\n'egs;':'\\u2a96',\n'egsdot;':'\\u2a98',\n'el;':'\\u2a99',\n'Element;':'\\u2208',\n'elinters;':'\\u23e7',\n'ell;':'\\u2113',\n'els;':'\\u2a95',\n'elsdot;':'\\u2a97',\n'Emacr;':'\\u0112',\n'emacr;':'\\u0113',\n'empty;':'\\u2205',\n'emptyset;':'\\u2205',\n'EmptySmallSquare;':'\\u25fb',\n'emptyv;':'\\u2205',\n'EmptyVerySmallSquare;':'\\u25ab',\n'emsp13;':'\\u2004',\n'emsp14;':'\\u2005',\n'emsp;':'\\u2003',\n'ENG;':'\\u014a',\n'eng;':'\\u014b',\n'ensp;':'\\u2002',\n'Eogon;':'\\u0118',\n'eogon;':'\\u0119',\n'Eopf;':'\\U0001d53c',\n'eopf;':'\\U0001d556',\n'epar;':'\\u22d5',\n'eparsl;':'\\u29e3',\n'eplus;':'\\u2a71',\n'epsi;':'\\u03b5',\n'Epsilon;':'\\u0395',\n'epsilon;':'\\u03b5',\n'epsiv;':'\\u03f5',\n'eqcirc;':'\\u2256',\n'eqcolon;':'\\u2255',\n'eqsim;':'\\u2242',\n'eqslantgtr;':'\\u2a96',\n'eqslantless;':'\\u2a95',\n'Equal;':'\\u2a75',\n'equals;':'=',\n'EqualTilde;':'\\u2242',\n'equest;':'\\u225f',\n'Equilibrium;':'\\u21cc',\n'equiv;':'\\u2261',\n'equivDD;':'\\u2a78',\n'eqvparsl;':'\\u29e5',\n'erarr;':'\\u2971',\n'erDot;':'\\u2253',\n'Escr;':'\\u2130',\n'escr;':'\\u212f',\n'esdot;':'\\u2250',\n'Esim;':'\\u2a73',\n'esim;':'\\u2242',\n'Eta;':'\\u0397',\n'eta;':'\\u03b7',\n'ETH':'\\xd0',\n'eth':'\\xf0',\n'ETH;':'\\xd0',\n'eth;':'\\xf0',\n'Euml':'\\xcb',\n'euml':'\\xeb',\n'Euml;':'\\xcb',\n'euml;':'\\xeb',\n'euro;':'\\u20ac',\n'excl;':'!',\n'exist;':'\\u2203',\n'Exists;':'\\u2203',\n'expectation;':'\\u2130',\n'ExponentialE;':'\\u2147',\n'exponentiale;':'\\u2147',\n'fallingdotseq;':'\\u2252',\n'Fcy;':'\\u0424',\n'fcy;':'\\u0444',\n'female;':'\\u2640',\n'ffilig;':'\\ufb03',\n'fflig;':'\\ufb00',\n'ffllig;':'\\ufb04',\n'Ffr;':'\\U0001d509',\n'ffr;':'\\U0001d523',\n'filig;':'\\ufb01',\n'FilledSmallSquare;':'\\u25fc',\n'FilledVerySmallSquare;':'\\u25aa',\n'fjlig;':'fj',\n'flat;':'\\u266d',\n'fllig;':'\\ufb02',\n'fltns;':'\\u25b1',\n'fnof;':'\\u0192',\n'Fopf;':'\\U0001d53d',\n'fopf;':'\\U0001d557',\n'ForAll;':'\\u2200',\n'forall;':'\\u2200',\n'fork;':'\\u22d4',\n'forkv;':'\\u2ad9',\n'Fouriertrf;':'\\u2131',\n'fpartint;':'\\u2a0d',\n'frac12':'\\xbd',\n'frac12;':'\\xbd',\n'frac13;':'\\u2153',\n'frac14':'\\xbc',\n'frac14;':'\\xbc',\n'frac15;':'\\u2155',\n'frac16;':'\\u2159',\n'frac18;':'\\u215b',\n'frac23;':'\\u2154',\n'frac25;':'\\u2156',\n'frac34':'\\xbe',\n'frac34;':'\\xbe',\n'frac35;':'\\u2157',\n'frac38;':'\\u215c',\n'frac45;':'\\u2158',\n'frac56;':'\\u215a',\n'frac58;':'\\u215d',\n'frac78;':'\\u215e',\n'frasl;':'\\u2044',\n'frown;':'\\u2322',\n'Fscr;':'\\u2131',\n'fscr;':'\\U0001d4bb',\n'gacute;':'\\u01f5',\n'Gamma;':'\\u0393',\n'gamma;':'\\u03b3',\n'Gammad;':'\\u03dc',\n'gammad;':'\\u03dd',\n'gap;':'\\u2a86',\n'Gbreve;':'\\u011e',\n'gbreve;':'\\u011f',\n'Gcedil;':'\\u0122',\n'Gcirc;':'\\u011c',\n'gcirc;':'\\u011d',\n'Gcy;':'\\u0413',\n'gcy;':'\\u0433',\n'Gdot;':'\\u0120',\n'gdot;':'\\u0121',\n'gE;':'\\u2267',\n'ge;':'\\u2265',\n'gEl;':'\\u2a8c',\n'gel;':'\\u22db',\n'geq;':'\\u2265',\n'geqq;':'\\u2267',\n'geqslant;':'\\u2a7e',\n'ges;':'\\u2a7e',\n'gescc;':'\\u2aa9',\n'gesdot;':'\\u2a80',\n'gesdoto;':'\\u2a82',\n'gesdotol;':'\\u2a84',\n'gesl;':'\\u22db\\ufe00',\n'gesles;':'\\u2a94',\n'Gfr;':'\\U0001d50a',\n'gfr;':'\\U0001d524',\n'Gg;':'\\u22d9',\n'gg;':'\\u226b',\n'ggg;':'\\u22d9',\n'gimel;':'\\u2137',\n'GJcy;':'\\u0403',\n'gjcy;':'\\u0453',\n'gl;':'\\u2277',\n'gla;':'\\u2aa5',\n'glE;':'\\u2a92',\n'glj;':'\\u2aa4',\n'gnap;':'\\u2a8a',\n'gnapprox;':'\\u2a8a',\n'gnE;':'\\u2269',\n'gne;':'\\u2a88',\n'gneq;':'\\u2a88',\n'gneqq;':'\\u2269',\n'gnsim;':'\\u22e7',\n'Gopf;':'\\U0001d53e',\n'gopf;':'\\U0001d558',\n'grave;':'`',\n'GreaterEqual;':'\\u2265',\n'GreaterEqualLess;':'\\u22db',\n'GreaterFullEqual;':'\\u2267',\n'GreaterGreater;':'\\u2aa2',\n'GreaterLess;':'\\u2277',\n'GreaterSlantEqual;':'\\u2a7e',\n'GreaterTilde;':'\\u2273',\n'Gscr;':'\\U0001d4a2',\n'gscr;':'\\u210a',\n'gsim;':'\\u2273',\n'gsime;':'\\u2a8e',\n'gsiml;':'\\u2a90',\n'GT':'>',\n'gt':'>',\n'GT;':'>',\n'Gt;':'\\u226b',\n'gt;':'>',\n'gtcc;':'\\u2aa7',\n'gtcir;':'\\u2a7a',\n'gtdot;':'\\u22d7',\n'gtlPar;':'\\u2995',\n'gtquest;':'\\u2a7c',\n'gtrapprox;':'\\u2a86',\n'gtrarr;':'\\u2978',\n'gtrdot;':'\\u22d7',\n'gtreqless;':'\\u22db',\n'gtreqqless;':'\\u2a8c',\n'gtrless;':'\\u2277',\n'gtrsim;':'\\u2273',\n'gvertneqq;':'\\u2269\\ufe00',\n'gvnE;':'\\u2269\\ufe00',\n'Hacek;':'\\u02c7',\n'hairsp;':'\\u200a',\n'half;':'\\xbd',\n'hamilt;':'\\u210b',\n'HARDcy;':'\\u042a',\n'hardcy;':'\\u044a',\n'hArr;':'\\u21d4',\n'harr;':'\\u2194',\n'harrcir;':'\\u2948',\n'harrw;':'\\u21ad',\n'Hat;':'^',\n'hbar;':'\\u210f',\n'Hcirc;':'\\u0124',\n'hcirc;':'\\u0125',\n'hearts;':'\\u2665',\n'heartsuit;':'\\u2665',\n'hellip;':'\\u2026',\n'hercon;':'\\u22b9',\n'Hfr;':'\\u210c',\n'hfr;':'\\U0001d525',\n'HilbertSpace;':'\\u210b',\n'hksearow;':'\\u2925',\n'hkswarow;':'\\u2926',\n'hoarr;':'\\u21ff',\n'homtht;':'\\u223b',\n'hookleftarrow;':'\\u21a9',\n'hookrightarrow;':'\\u21aa',\n'Hopf;':'\\u210d',\n'hopf;':'\\U0001d559',\n'horbar;':'\\u2015',\n'HorizontalLine;':'\\u2500',\n'Hscr;':'\\u210b',\n'hscr;':'\\U0001d4bd',\n'hslash;':'\\u210f',\n'Hstrok;':'\\u0126',\n'hstrok;':'\\u0127',\n'HumpDownHump;':'\\u224e',\n'HumpEqual;':'\\u224f',\n'hybull;':'\\u2043',\n'hyphen;':'\\u2010',\n'Iacute':'\\xcd',\n'iacute':'\\xed',\n'Iacute;':'\\xcd',\n'iacute;':'\\xed',\n'ic;':'\\u2063',\n'Icirc':'\\xce',\n'icirc':'\\xee',\n'Icirc;':'\\xce',\n'icirc;':'\\xee',\n'Icy;':'\\u0418',\n'icy;':'\\u0438',\n'Idot;':'\\u0130',\n'IEcy;':'\\u0415',\n'iecy;':'\\u0435',\n'iexcl':'\\xa1',\n'iexcl;':'\\xa1',\n'iff;':'\\u21d4',\n'Ifr;':'\\u2111',\n'ifr;':'\\U0001d526',\n'Igrave':'\\xcc',\n'igrave':'\\xec',\n'Igrave;':'\\xcc',\n'igrave;':'\\xec',\n'ii;':'\\u2148',\n'iiiint;':'\\u2a0c',\n'iiint;':'\\u222d',\n'iinfin;':'\\u29dc',\n'iiota;':'\\u2129',\n'IJlig;':'\\u0132',\n'ijlig;':'\\u0133',\n'Im;':'\\u2111',\n'Imacr;':'\\u012a',\n'imacr;':'\\u012b',\n'image;':'\\u2111',\n'ImaginaryI;':'\\u2148',\n'imagline;':'\\u2110',\n'imagpart;':'\\u2111',\n'imath;':'\\u0131',\n'imof;':'\\u22b7',\n'imped;':'\\u01b5',\n'Implies;':'\\u21d2',\n'in;':'\\u2208',\n'incare;':'\\u2105',\n'infin;':'\\u221e',\n'infintie;':'\\u29dd',\n'inodot;':'\\u0131',\n'Int;':'\\u222c',\n'int;':'\\u222b',\n'intcal;':'\\u22ba',\n'integers;':'\\u2124',\n'Integral;':'\\u222b',\n'intercal;':'\\u22ba',\n'Intersection;':'\\u22c2',\n'intlarhk;':'\\u2a17',\n'intprod;':'\\u2a3c',\n'InvisibleComma;':'\\u2063',\n'InvisibleTimes;':'\\u2062',\n'IOcy;':'\\u0401',\n'iocy;':'\\u0451',\n'Iogon;':'\\u012e',\n'iogon;':'\\u012f',\n'Iopf;':'\\U0001d540',\n'iopf;':'\\U0001d55a',\n'Iota;':'\\u0399',\n'iota;':'\\u03b9',\n'iprod;':'\\u2a3c',\n'iquest':'\\xbf',\n'iquest;':'\\xbf',\n'Iscr;':'\\u2110',\n'iscr;':'\\U0001d4be',\n'isin;':'\\u2208',\n'isindot;':'\\u22f5',\n'isinE;':'\\u22f9',\n'isins;':'\\u22f4',\n'isinsv;':'\\u22f3',\n'isinv;':'\\u2208',\n'it;':'\\u2062',\n'Itilde;':'\\u0128',\n'itilde;':'\\u0129',\n'Iukcy;':'\\u0406',\n'iukcy;':'\\u0456',\n'Iuml':'\\xcf',\n'iuml':'\\xef',\n'Iuml;':'\\xcf',\n'iuml;':'\\xef',\n'Jcirc;':'\\u0134',\n'jcirc;':'\\u0135',\n'Jcy;':'\\u0419',\n'jcy;':'\\u0439',\n'Jfr;':'\\U0001d50d',\n'jfr;':'\\U0001d527',\n'jmath;':'\\u0237',\n'Jopf;':'\\U0001d541',\n'jopf;':'\\U0001d55b',\n'Jscr;':'\\U0001d4a5',\n'jscr;':'\\U0001d4bf',\n'Jsercy;':'\\u0408',\n'jsercy;':'\\u0458',\n'Jukcy;':'\\u0404',\n'jukcy;':'\\u0454',\n'Kappa;':'\\u039a',\n'kappa;':'\\u03ba',\n'kappav;':'\\u03f0',\n'Kcedil;':'\\u0136',\n'kcedil;':'\\u0137',\n'Kcy;':'\\u041a',\n'kcy;':'\\u043a',\n'Kfr;':'\\U0001d50e',\n'kfr;':'\\U0001d528',\n'kgreen;':'\\u0138',\n'KHcy;':'\\u0425',\n'khcy;':'\\u0445',\n'KJcy;':'\\u040c',\n'kjcy;':'\\u045c',\n'Kopf;':'\\U0001d542',\n'kopf;':'\\U0001d55c',\n'Kscr;':'\\U0001d4a6',\n'kscr;':'\\U0001d4c0',\n'lAarr;':'\\u21da',\n'Lacute;':'\\u0139',\n'lacute;':'\\u013a',\n'laemptyv;':'\\u29b4',\n'lagran;':'\\u2112',\n'Lambda;':'\\u039b',\n'lambda;':'\\u03bb',\n'Lang;':'\\u27ea',\n'lang;':'\\u27e8',\n'langd;':'\\u2991',\n'langle;':'\\u27e8',\n'lap;':'\\u2a85',\n'Laplacetrf;':'\\u2112',\n'laquo':'\\xab',\n'laquo;':'\\xab',\n'Larr;':'\\u219e',\n'lArr;':'\\u21d0',\n'larr;':'\\u2190',\n'larrb;':'\\u21e4',\n'larrbfs;':'\\u291f',\n'larrfs;':'\\u291d',\n'larrhk;':'\\u21a9',\n'larrlp;':'\\u21ab',\n'larrpl;':'\\u2939',\n'larrsim;':'\\u2973',\n'larrtl;':'\\u21a2',\n'lat;':'\\u2aab',\n'lAtail;':'\\u291b',\n'latail;':'\\u2919',\n'late;':'\\u2aad',\n'lates;':'\\u2aad\\ufe00',\n'lBarr;':'\\u290e',\n'lbarr;':'\\u290c',\n'lbbrk;':'\\u2772',\n'lbrace;':'{',\n'lbrack;':'[',\n'lbrke;':'\\u298b',\n'lbrksld;':'\\u298f',\n'lbrkslu;':'\\u298d',\n'Lcaron;':'\\u013d',\n'lcaron;':'\\u013e',\n'Lcedil;':'\\u013b',\n'lcedil;':'\\u013c',\n'lceil;':'\\u2308',\n'lcub;':'{',\n'Lcy;':'\\u041b',\n'lcy;':'\\u043b',\n'ldca;':'\\u2936',\n'ldquo;':'\\u201c',\n'ldquor;':'\\u201e',\n'ldrdhar;':'\\u2967',\n'ldrushar;':'\\u294b',\n'ldsh;':'\\u21b2',\n'lE;':'\\u2266',\n'le;':'\\u2264',\n'LeftAngleBracket;':'\\u27e8',\n'LeftArrow;':'\\u2190',\n'Leftarrow;':'\\u21d0',\n'leftarrow;':'\\u2190',\n'LeftArrowBar;':'\\u21e4',\n'LeftArrowRightArrow;':'\\u21c6',\n'leftarrowtail;':'\\u21a2',\n'LeftCeiling;':'\\u2308',\n'LeftDoubleBracket;':'\\u27e6',\n'LeftDownTeeVector;':'\\u2961',\n'LeftDownVector;':'\\u21c3',\n'LeftDownVectorBar;':'\\u2959',\n'LeftFloor;':'\\u230a',\n'leftharpoondown;':'\\u21bd',\n'leftharpoonup;':'\\u21bc',\n'leftleftarrows;':'\\u21c7',\n'LeftRightArrow;':'\\u2194',\n'Leftrightarrow;':'\\u21d4',\n'leftrightarrow;':'\\u2194',\n'leftrightarrows;':'\\u21c6',\n'leftrightharpoons;':'\\u21cb',\n'leftrightsquigarrow;':'\\u21ad',\n'LeftRightVector;':'\\u294e',\n'LeftTee;':'\\u22a3',\n'LeftTeeArrow;':'\\u21a4',\n'LeftTeeVector;':'\\u295a',\n'leftthreetimes;':'\\u22cb',\n'LeftTriangle;':'\\u22b2',\n'LeftTriangleBar;':'\\u29cf',\n'LeftTriangleEqual;':'\\u22b4',\n'LeftUpDownVector;':'\\u2951',\n'LeftUpTeeVector;':'\\u2960',\n'LeftUpVector;':'\\u21bf',\n'LeftUpVectorBar;':'\\u2958',\n'LeftVector;':'\\u21bc',\n'LeftVectorBar;':'\\u2952',\n'lEg;':'\\u2a8b',\n'leg;':'\\u22da',\n'leq;':'\\u2264',\n'leqq;':'\\u2266',\n'leqslant;':'\\u2a7d',\n'les;':'\\u2a7d',\n'lescc;':'\\u2aa8',\n'lesdot;':'\\u2a7f',\n'lesdoto;':'\\u2a81',\n'lesdotor;':'\\u2a83',\n'lesg;':'\\u22da\\ufe00',\n'lesges;':'\\u2a93',\n'lessapprox;':'\\u2a85',\n'lessdot;':'\\u22d6',\n'lesseqgtr;':'\\u22da',\n'lesseqqgtr;':'\\u2a8b',\n'LessEqualGreater;':'\\u22da',\n'LessFullEqual;':'\\u2266',\n'LessGreater;':'\\u2276',\n'lessgtr;':'\\u2276',\n'LessLess;':'\\u2aa1',\n'lesssim;':'\\u2272',\n'LessSlantEqual;':'\\u2a7d',\n'LessTilde;':'\\u2272',\n'lfisht;':'\\u297c',\n'lfloor;':'\\u230a',\n'Lfr;':'\\U0001d50f',\n'lfr;':'\\U0001d529',\n'lg;':'\\u2276',\n'lgE;':'\\u2a91',\n'lHar;':'\\u2962',\n'lhard;':'\\u21bd',\n'lharu;':'\\u21bc',\n'lharul;':'\\u296a',\n'lhblk;':'\\u2584',\n'LJcy;':'\\u0409',\n'ljcy;':'\\u0459',\n'Ll;':'\\u22d8',\n'll;':'\\u226a',\n'llarr;':'\\u21c7',\n'llcorner;':'\\u231e',\n'Lleftarrow;':'\\u21da',\n'llhard;':'\\u296b',\n'lltri;':'\\u25fa',\n'Lmidot;':'\\u013f',\n'lmidot;':'\\u0140',\n'lmoust;':'\\u23b0',\n'lmoustache;':'\\u23b0',\n'lnap;':'\\u2a89',\n'lnapprox;':'\\u2a89',\n'lnE;':'\\u2268',\n'lne;':'\\u2a87',\n'lneq;':'\\u2a87',\n'lneqq;':'\\u2268',\n'lnsim;':'\\u22e6',\n'loang;':'\\u27ec',\n'loarr;':'\\u21fd',\n'lobrk;':'\\u27e6',\n'LongLeftArrow;':'\\u27f5',\n'Longleftarrow;':'\\u27f8',\n'longleftarrow;':'\\u27f5',\n'LongLeftRightArrow;':'\\u27f7',\n'Longleftrightarrow;':'\\u27fa',\n'longleftrightarrow;':'\\u27f7',\n'longmapsto;':'\\u27fc',\n'LongRightArrow;':'\\u27f6',\n'Longrightarrow;':'\\u27f9',\n'longrightarrow;':'\\u27f6',\n'looparrowleft;':'\\u21ab',\n'looparrowright;':'\\u21ac',\n'lopar;':'\\u2985',\n'Lopf;':'\\U0001d543',\n'lopf;':'\\U0001d55d',\n'loplus;':'\\u2a2d',\n'lotimes;':'\\u2a34',\n'lowast;':'\\u2217',\n'lowbar;':'_',\n'LowerLeftArrow;':'\\u2199',\n'LowerRightArrow;':'\\u2198',\n'loz;':'\\u25ca',\n'lozenge;':'\\u25ca',\n'lozf;':'\\u29eb',\n'lpar;':'(',\n'lparlt;':'\\u2993',\n'lrarr;':'\\u21c6',\n'lrcorner;':'\\u231f',\n'lrhar;':'\\u21cb',\n'lrhard;':'\\u296d',\n'lrm;':'\\u200e',\n'lrtri;':'\\u22bf',\n'lsaquo;':'\\u2039',\n'Lscr;':'\\u2112',\n'lscr;':'\\U0001d4c1',\n'Lsh;':'\\u21b0',\n'lsh;':'\\u21b0',\n'lsim;':'\\u2272',\n'lsime;':'\\u2a8d',\n'lsimg;':'\\u2a8f',\n'lsqb;':'[',\n'lsquo;':'\\u2018',\n'lsquor;':'\\u201a',\n'Lstrok;':'\\u0141',\n'lstrok;':'\\u0142',\n'LT':'<',\n'lt':'<',\n'LT;':'<',\n'Lt;':'\\u226a',\n'lt;':'<',\n'ltcc;':'\\u2aa6',\n'ltcir;':'\\u2a79',\n'ltdot;':'\\u22d6',\n'lthree;':'\\u22cb',\n'ltimes;':'\\u22c9',\n'ltlarr;':'\\u2976',\n'ltquest;':'\\u2a7b',\n'ltri;':'\\u25c3',\n'ltrie;':'\\u22b4',\n'ltrif;':'\\u25c2',\n'ltrPar;':'\\u2996',\n'lurdshar;':'\\u294a',\n'luruhar;':'\\u2966',\n'lvertneqq;':'\\u2268\\ufe00',\n'lvnE;':'\\u2268\\ufe00',\n'macr':'\\xaf',\n'macr;':'\\xaf',\n'male;':'\\u2642',\n'malt;':'\\u2720',\n'maltese;':'\\u2720',\n'Map;':'\\u2905',\n'map;':'\\u21a6',\n'mapsto;':'\\u21a6',\n'mapstodown;':'\\u21a7',\n'mapstoleft;':'\\u21a4',\n'mapstoup;':'\\u21a5',\n'marker;':'\\u25ae',\n'mcomma;':'\\u2a29',\n'Mcy;':'\\u041c',\n'mcy;':'\\u043c',\n'mdash;':'\\u2014',\n'mDDot;':'\\u223a',\n'measuredangle;':'\\u2221',\n'MediumSpace;':'\\u205f',\n'Mellintrf;':'\\u2133',\n'Mfr;':'\\U0001d510',\n'mfr;':'\\U0001d52a',\n'mho;':'\\u2127',\n'micro':'\\xb5',\n'micro;':'\\xb5',\n'mid;':'\\u2223',\n'midast;':'*',\n'midcir;':'\\u2af0',\n'middot':'\\xb7',\n'middot;':'\\xb7',\n'minus;':'\\u2212',\n'minusb;':'\\u229f',\n'minusd;':'\\u2238',\n'minusdu;':'\\u2a2a',\n'MinusPlus;':'\\u2213',\n'mlcp;':'\\u2adb',\n'mldr;':'\\u2026',\n'mnplus;':'\\u2213',\n'models;':'\\u22a7',\n'Mopf;':'\\U0001d544',\n'mopf;':'\\U0001d55e',\n'mp;':'\\u2213',\n'Mscr;':'\\u2133',\n'mscr;':'\\U0001d4c2',\n'mstpos;':'\\u223e',\n'Mu;':'\\u039c',\n'mu;':'\\u03bc',\n'multimap;':'\\u22b8',\n'mumap;':'\\u22b8',\n'nabla;':'\\u2207',\n'Nacute;':'\\u0143',\n'nacute;':'\\u0144',\n'nang;':'\\u2220\\u20d2',\n'nap;':'\\u2249',\n'napE;':'\\u2a70\\u0338',\n'napid;':'\\u224b\\u0338',\n'napos;':'\\u0149',\n'napprox;':'\\u2249',\n'natur;':'\\u266e',\n'natural;':'\\u266e',\n'naturals;':'\\u2115',\n'nbsp':'\\xa0',\n'nbsp;':'\\xa0',\n'nbump;':'\\u224e\\u0338',\n'nbumpe;':'\\u224f\\u0338',\n'ncap;':'\\u2a43',\n'Ncaron;':'\\u0147',\n'ncaron;':'\\u0148',\n'Ncedil;':'\\u0145',\n'ncedil;':'\\u0146',\n'ncong;':'\\u2247',\n'ncongdot;':'\\u2a6d\\u0338',\n'ncup;':'\\u2a42',\n'Ncy;':'\\u041d',\n'ncy;':'\\u043d',\n'ndash;':'\\u2013',\n'ne;':'\\u2260',\n'nearhk;':'\\u2924',\n'neArr;':'\\u21d7',\n'nearr;':'\\u2197',\n'nearrow;':'\\u2197',\n'nedot;':'\\u2250\\u0338',\n'NegativeMediumSpace;':'\\u200b',\n'NegativeThickSpace;':'\\u200b',\n'NegativeThinSpace;':'\\u200b',\n'NegativeVeryThinSpace;':'\\u200b',\n'nequiv;':'\\u2262',\n'nesear;':'\\u2928',\n'nesim;':'\\u2242\\u0338',\n'NestedGreaterGreater;':'\\u226b',\n'NestedLessLess;':'\\u226a',\n'NewLine;':'\\n',\n'nexist;':'\\u2204',\n'nexists;':'\\u2204',\n'Nfr;':'\\U0001d511',\n'nfr;':'\\U0001d52b',\n'ngE;':'\\u2267\\u0338',\n'nge;':'\\u2271',\n'ngeq;':'\\u2271',\n'ngeqq;':'\\u2267\\u0338',\n'ngeqslant;':'\\u2a7e\\u0338',\n'nges;':'\\u2a7e\\u0338',\n'nGg;':'\\u22d9\\u0338',\n'ngsim;':'\\u2275',\n'nGt;':'\\u226b\\u20d2',\n'ngt;':'\\u226f',\n'ngtr;':'\\u226f',\n'nGtv;':'\\u226b\\u0338',\n'nhArr;':'\\u21ce',\n'nharr;':'\\u21ae',\n'nhpar;':'\\u2af2',\n'ni;':'\\u220b',\n'nis;':'\\u22fc',\n'nisd;':'\\u22fa',\n'niv;':'\\u220b',\n'NJcy;':'\\u040a',\n'njcy;':'\\u045a',\n'nlArr;':'\\u21cd',\n'nlarr;':'\\u219a',\n'nldr;':'\\u2025',\n'nlE;':'\\u2266\\u0338',\n'nle;':'\\u2270',\n'nLeftarrow;':'\\u21cd',\n'nleftarrow;':'\\u219a',\n'nLeftrightarrow;':'\\u21ce',\n'nleftrightarrow;':'\\u21ae',\n'nleq;':'\\u2270',\n'nleqq;':'\\u2266\\u0338',\n'nleqslant;':'\\u2a7d\\u0338',\n'nles;':'\\u2a7d\\u0338',\n'nless;':'\\u226e',\n'nLl;':'\\u22d8\\u0338',\n'nlsim;':'\\u2274',\n'nLt;':'\\u226a\\u20d2',\n'nlt;':'\\u226e',\n'nltri;':'\\u22ea',\n'nltrie;':'\\u22ec',\n'nLtv;':'\\u226a\\u0338',\n'nmid;':'\\u2224',\n'NoBreak;':'\\u2060',\n'NonBreakingSpace;':'\\xa0',\n'Nopf;':'\\u2115',\n'nopf;':'\\U0001d55f',\n'not':'\\xac',\n'Not;':'\\u2aec',\n'not;':'\\xac',\n'NotCongruent;':'\\u2262',\n'NotCupCap;':'\\u226d',\n'NotDoubleVerticalBar;':'\\u2226',\n'NotElement;':'\\u2209',\n'NotEqual;':'\\u2260',\n'NotEqualTilde;':'\\u2242\\u0338',\n'NotExists;':'\\u2204',\n'NotGreater;':'\\u226f',\n'NotGreaterEqual;':'\\u2271',\n'NotGreaterFullEqual;':'\\u2267\\u0338',\n'NotGreaterGreater;':'\\u226b\\u0338',\n'NotGreaterLess;':'\\u2279',\n'NotGreaterSlantEqual;':'\\u2a7e\\u0338',\n'NotGreaterTilde;':'\\u2275',\n'NotHumpDownHump;':'\\u224e\\u0338',\n'NotHumpEqual;':'\\u224f\\u0338',\n'notin;':'\\u2209',\n'notindot;':'\\u22f5\\u0338',\n'notinE;':'\\u22f9\\u0338',\n'notinva;':'\\u2209',\n'notinvb;':'\\u22f7',\n'notinvc;':'\\u22f6',\n'NotLeftTriangle;':'\\u22ea',\n'NotLeftTriangleBar;':'\\u29cf\\u0338',\n'NotLeftTriangleEqual;':'\\u22ec',\n'NotLess;':'\\u226e',\n'NotLessEqual;':'\\u2270',\n'NotLessGreater;':'\\u2278',\n'NotLessLess;':'\\u226a\\u0338',\n'NotLessSlantEqual;':'\\u2a7d\\u0338',\n'NotLessTilde;':'\\u2274',\n'NotNestedGreaterGreater;':'\\u2aa2\\u0338',\n'NotNestedLessLess;':'\\u2aa1\\u0338',\n'notni;':'\\u220c',\n'notniva;':'\\u220c',\n'notnivb;':'\\u22fe',\n'notnivc;':'\\u22fd',\n'NotPrecedes;':'\\u2280',\n'NotPrecedesEqual;':'\\u2aaf\\u0338',\n'NotPrecedesSlantEqual;':'\\u22e0',\n'NotReverseElement;':'\\u220c',\n'NotRightTriangle;':'\\u22eb',\n'NotRightTriangleBar;':'\\u29d0\\u0338',\n'NotRightTriangleEqual;':'\\u22ed',\n'NotSquareSubset;':'\\u228f\\u0338',\n'NotSquareSubsetEqual;':'\\u22e2',\n'NotSquareSuperset;':'\\u2290\\u0338',\n'NotSquareSupersetEqual;':'\\u22e3',\n'NotSubset;':'\\u2282\\u20d2',\n'NotSubsetEqual;':'\\u2288',\n'NotSucceeds;':'\\u2281',\n'NotSucceedsEqual;':'\\u2ab0\\u0338',\n'NotSucceedsSlantEqual;':'\\u22e1',\n'NotSucceedsTilde;':'\\u227f\\u0338',\n'NotSuperset;':'\\u2283\\u20d2',\n'NotSupersetEqual;':'\\u2289',\n'NotTilde;':'\\u2241',\n'NotTildeEqual;':'\\u2244',\n'NotTildeFullEqual;':'\\u2247',\n'NotTildeTilde;':'\\u2249',\n'NotVerticalBar;':'\\u2224',\n'npar;':'\\u2226',\n'nparallel;':'\\u2226',\n'nparsl;':'\\u2afd\\u20e5',\n'npart;':'\\u2202\\u0338',\n'npolint;':'\\u2a14',\n'npr;':'\\u2280',\n'nprcue;':'\\u22e0',\n'npre;':'\\u2aaf\\u0338',\n'nprec;':'\\u2280',\n'npreceq;':'\\u2aaf\\u0338',\n'nrArr;':'\\u21cf',\n'nrarr;':'\\u219b',\n'nrarrc;':'\\u2933\\u0338',\n'nrarrw;':'\\u219d\\u0338',\n'nRightarrow;':'\\u21cf',\n'nrightarrow;':'\\u219b',\n'nrtri;':'\\u22eb',\n'nrtrie;':'\\u22ed',\n'nsc;':'\\u2281',\n'nsccue;':'\\u22e1',\n'nsce;':'\\u2ab0\\u0338',\n'Nscr;':'\\U0001d4a9',\n'nscr;':'\\U0001d4c3',\n'nshortmid;':'\\u2224',\n'nshortparallel;':'\\u2226',\n'nsim;':'\\u2241',\n'nsime;':'\\u2244',\n'nsimeq;':'\\u2244',\n'nsmid;':'\\u2224',\n'nspar;':'\\u2226',\n'nsqsube;':'\\u22e2',\n'nsqsupe;':'\\u22e3',\n'nsub;':'\\u2284',\n'nsubE;':'\\u2ac5\\u0338',\n'nsube;':'\\u2288',\n'nsubset;':'\\u2282\\u20d2',\n'nsubseteq;':'\\u2288',\n'nsubseteqq;':'\\u2ac5\\u0338',\n'nsucc;':'\\u2281',\n'nsucceq;':'\\u2ab0\\u0338',\n'nsup;':'\\u2285',\n'nsupE;':'\\u2ac6\\u0338',\n'nsupe;':'\\u2289',\n'nsupset;':'\\u2283\\u20d2',\n'nsupseteq;':'\\u2289',\n'nsupseteqq;':'\\u2ac6\\u0338',\n'ntgl;':'\\u2279',\n'Ntilde':'\\xd1',\n'ntilde':'\\xf1',\n'Ntilde;':'\\xd1',\n'ntilde;':'\\xf1',\n'ntlg;':'\\u2278',\n'ntriangleleft;':'\\u22ea',\n'ntrianglelefteq;':'\\u22ec',\n'ntriangleright;':'\\u22eb',\n'ntrianglerighteq;':'\\u22ed',\n'Nu;':'\\u039d',\n'nu;':'\\u03bd',\n'num;':'#',\n'numero;':'\\u2116',\n'numsp;':'\\u2007',\n'nvap;':'\\u224d\\u20d2',\n'nVDash;':'\\u22af',\n'nVdash;':'\\u22ae',\n'nvDash;':'\\u22ad',\n'nvdash;':'\\u22ac',\n'nvge;':'\\u2265\\u20d2',\n'nvgt;':'>\\u20d2',\n'nvHarr;':'\\u2904',\n'nvinfin;':'\\u29de',\n'nvlArr;':'\\u2902',\n'nvle;':'\\u2264\\u20d2',\n'nvlt;':'<\\u20d2',\n'nvltrie;':'\\u22b4\\u20d2',\n'nvrArr;':'\\u2903',\n'nvrtrie;':'\\u22b5\\u20d2',\n'nvsim;':'\\u223c\\u20d2',\n'nwarhk;':'\\u2923',\n'nwArr;':'\\u21d6',\n'nwarr;':'\\u2196',\n'nwarrow;':'\\u2196',\n'nwnear;':'\\u2927',\n'Oacute':'\\xd3',\n'oacute':'\\xf3',\n'Oacute;':'\\xd3',\n'oacute;':'\\xf3',\n'oast;':'\\u229b',\n'ocir;':'\\u229a',\n'Ocirc':'\\xd4',\n'ocirc':'\\xf4',\n'Ocirc;':'\\xd4',\n'ocirc;':'\\xf4',\n'Ocy;':'\\u041e',\n'ocy;':'\\u043e',\n'odash;':'\\u229d',\n'Odblac;':'\\u0150',\n'odblac;':'\\u0151',\n'odiv;':'\\u2a38',\n'odot;':'\\u2299',\n'odsold;':'\\u29bc',\n'OElig;':'\\u0152',\n'oelig;':'\\u0153',\n'ofcir;':'\\u29bf',\n'Ofr;':'\\U0001d512',\n'ofr;':'\\U0001d52c',\n'ogon;':'\\u02db',\n'Ograve':'\\xd2',\n'ograve':'\\xf2',\n'Ograve;':'\\xd2',\n'ograve;':'\\xf2',\n'ogt;':'\\u29c1',\n'ohbar;':'\\u29b5',\n'ohm;':'\\u03a9',\n'oint;':'\\u222e',\n'olarr;':'\\u21ba',\n'olcir;':'\\u29be',\n'olcross;':'\\u29bb',\n'oline;':'\\u203e',\n'olt;':'\\u29c0',\n'Omacr;':'\\u014c',\n'omacr;':'\\u014d',\n'Omega;':'\\u03a9',\n'omega;':'\\u03c9',\n'Omicron;':'\\u039f',\n'omicron;':'\\u03bf',\n'omid;':'\\u29b6',\n'ominus;':'\\u2296',\n'Oopf;':'\\U0001d546',\n'oopf;':'\\U0001d560',\n'opar;':'\\u29b7',\n'OpenCurlyDoubleQuote;':'\\u201c',\n'OpenCurlyQuote;':'\\u2018',\n'operp;':'\\u29b9',\n'oplus;':'\\u2295',\n'Or;':'\\u2a54',\n'or;':'\\u2228',\n'orarr;':'\\u21bb',\n'ord;':'\\u2a5d',\n'order;':'\\u2134',\n'orderof;':'\\u2134',\n'ordf':'\\xaa',\n'ordf;':'\\xaa',\n'ordm':'\\xba',\n'ordm;':'\\xba',\n'origof;':'\\u22b6',\n'oror;':'\\u2a56',\n'orslope;':'\\u2a57',\n'orv;':'\\u2a5b',\n'oS;':'\\u24c8',\n'Oscr;':'\\U0001d4aa',\n'oscr;':'\\u2134',\n'Oslash':'\\xd8',\n'oslash':'\\xf8',\n'Oslash;':'\\xd8',\n'oslash;':'\\xf8',\n'osol;':'\\u2298',\n'Otilde':'\\xd5',\n'otilde':'\\xf5',\n'Otilde;':'\\xd5',\n'otilde;':'\\xf5',\n'Otimes;':'\\u2a37',\n'otimes;':'\\u2297',\n'otimesas;':'\\u2a36',\n'Ouml':'\\xd6',\n'ouml':'\\xf6',\n'Ouml;':'\\xd6',\n'ouml;':'\\xf6',\n'ovbar;':'\\u233d',\n'OverBar;':'\\u203e',\n'OverBrace;':'\\u23de',\n'OverBracket;':'\\u23b4',\n'OverParenthesis;':'\\u23dc',\n'par;':'\\u2225',\n'para':'\\xb6',\n'para;':'\\xb6',\n'parallel;':'\\u2225',\n'parsim;':'\\u2af3',\n'parsl;':'\\u2afd',\n'part;':'\\u2202',\n'PartialD;':'\\u2202',\n'Pcy;':'\\u041f',\n'pcy;':'\\u043f',\n'percnt;':'%',\n'period;':'.',\n'permil;':'\\u2030',\n'perp;':'\\u22a5',\n'pertenk;':'\\u2031',\n'Pfr;':'\\U0001d513',\n'pfr;':'\\U0001d52d',\n'Phi;':'\\u03a6',\n'phi;':'\\u03c6',\n'phiv;':'\\u03d5',\n'phmmat;':'\\u2133',\n'phone;':'\\u260e',\n'Pi;':'\\u03a0',\n'pi;':'\\u03c0',\n'pitchfork;':'\\u22d4',\n'piv;':'\\u03d6',\n'planck;':'\\u210f',\n'planckh;':'\\u210e',\n'plankv;':'\\u210f',\n'plus;':'+',\n'plusacir;':'\\u2a23',\n'plusb;':'\\u229e',\n'pluscir;':'\\u2a22',\n'plusdo;':'\\u2214',\n'plusdu;':'\\u2a25',\n'pluse;':'\\u2a72',\n'PlusMinus;':'\\xb1',\n'plusmn':'\\xb1',\n'plusmn;':'\\xb1',\n'plussim;':'\\u2a26',\n'plustwo;':'\\u2a27',\n'pm;':'\\xb1',\n'Poincareplane;':'\\u210c',\n'pointint;':'\\u2a15',\n'Popf;':'\\u2119',\n'popf;':'\\U0001d561',\n'pound':'\\xa3',\n'pound;':'\\xa3',\n'Pr;':'\\u2abb',\n'pr;':'\\u227a',\n'prap;':'\\u2ab7',\n'prcue;':'\\u227c',\n'prE;':'\\u2ab3',\n'pre;':'\\u2aaf',\n'prec;':'\\u227a',\n'precapprox;':'\\u2ab7',\n'preccurlyeq;':'\\u227c',\n'Precedes;':'\\u227a',\n'PrecedesEqual;':'\\u2aaf',\n'PrecedesSlantEqual;':'\\u227c',\n'PrecedesTilde;':'\\u227e',\n'preceq;':'\\u2aaf',\n'precnapprox;':'\\u2ab9',\n'precneqq;':'\\u2ab5',\n'precnsim;':'\\u22e8',\n'precsim;':'\\u227e',\n'Prime;':'\\u2033',\n'prime;':'\\u2032',\n'primes;':'\\u2119',\n'prnap;':'\\u2ab9',\n'prnE;':'\\u2ab5',\n'prnsim;':'\\u22e8',\n'prod;':'\\u220f',\n'Product;':'\\u220f',\n'profalar;':'\\u232e',\n'profline;':'\\u2312',\n'profsurf;':'\\u2313',\n'prop;':'\\u221d',\n'Proportion;':'\\u2237',\n'Proportional;':'\\u221d',\n'propto;':'\\u221d',\n'prsim;':'\\u227e',\n'prurel;':'\\u22b0',\n'Pscr;':'\\U0001d4ab',\n'pscr;':'\\U0001d4c5',\n'Psi;':'\\u03a8',\n'psi;':'\\u03c8',\n'puncsp;':'\\u2008',\n'Qfr;':'\\U0001d514',\n'qfr;':'\\U0001d52e',\n'qint;':'\\u2a0c',\n'Qopf;':'\\u211a',\n'qopf;':'\\U0001d562',\n'qprime;':'\\u2057',\n'Qscr;':'\\U0001d4ac',\n'qscr;':'\\U0001d4c6',\n'quaternions;':'\\u210d',\n'quatint;':'\\u2a16',\n'quest;':'?',\n'questeq;':'\\u225f',\n'QUOT':'\"',\n'quot':'\"',\n'QUOT;':'\"',\n'quot;':'\"',\n'rAarr;':'\\u21db',\n'race;':'\\u223d\\u0331',\n'Racute;':'\\u0154',\n'racute;':'\\u0155',\n'radic;':'\\u221a',\n'raemptyv;':'\\u29b3',\n'Rang;':'\\u27eb',\n'rang;':'\\u27e9',\n'rangd;':'\\u2992',\n'range;':'\\u29a5',\n'rangle;':'\\u27e9',\n'raquo':'\\xbb',\n'raquo;':'\\xbb',\n'Rarr;':'\\u21a0',\n'rArr;':'\\u21d2',\n'rarr;':'\\u2192',\n'rarrap;':'\\u2975',\n'rarrb;':'\\u21e5',\n'rarrbfs;':'\\u2920',\n'rarrc;':'\\u2933',\n'rarrfs;':'\\u291e',\n'rarrhk;':'\\u21aa',\n'rarrlp;':'\\u21ac',\n'rarrpl;':'\\u2945',\n'rarrsim;':'\\u2974',\n'Rarrtl;':'\\u2916',\n'rarrtl;':'\\u21a3',\n'rarrw;':'\\u219d',\n'rAtail;':'\\u291c',\n'ratail;':'\\u291a',\n'ratio;':'\\u2236',\n'rationals;':'\\u211a',\n'RBarr;':'\\u2910',\n'rBarr;':'\\u290f',\n'rbarr;':'\\u290d',\n'rbbrk;':'\\u2773',\n'rbrace;':'}',\n'rbrack;':']',\n'rbrke;':'\\u298c',\n'rbrksld;':'\\u298e',\n'rbrkslu;':'\\u2990',\n'Rcaron;':'\\u0158',\n'rcaron;':'\\u0159',\n'Rcedil;':'\\u0156',\n'rcedil;':'\\u0157',\n'rceil;':'\\u2309',\n'rcub;':'}',\n'Rcy;':'\\u0420',\n'rcy;':'\\u0440',\n'rdca;':'\\u2937',\n'rdldhar;':'\\u2969',\n'rdquo;':'\\u201d',\n'rdquor;':'\\u201d',\n'rdsh;':'\\u21b3',\n'Re;':'\\u211c',\n'real;':'\\u211c',\n'realine;':'\\u211b',\n'realpart;':'\\u211c',\n'reals;':'\\u211d',\n'rect;':'\\u25ad',\n'REG':'\\xae',\n'reg':'\\xae',\n'REG;':'\\xae',\n'reg;':'\\xae',\n'ReverseElement;':'\\u220b',\n'ReverseEquilibrium;':'\\u21cb',\n'ReverseUpEquilibrium;':'\\u296f',\n'rfisht;':'\\u297d',\n'rfloor;':'\\u230b',\n'Rfr;':'\\u211c',\n'rfr;':'\\U0001d52f',\n'rHar;':'\\u2964',\n'rhard;':'\\u21c1',\n'rharu;':'\\u21c0',\n'rharul;':'\\u296c',\n'Rho;':'\\u03a1',\n'rho;':'\\u03c1',\n'rhov;':'\\u03f1',\n'RightAngleBracket;':'\\u27e9',\n'RightArrow;':'\\u2192',\n'Rightarrow;':'\\u21d2',\n'rightarrow;':'\\u2192',\n'RightArrowBar;':'\\u21e5',\n'RightArrowLeftArrow;':'\\u21c4',\n'rightarrowtail;':'\\u21a3',\n'RightCeiling;':'\\u2309',\n'RightDoubleBracket;':'\\u27e7',\n'RightDownTeeVector;':'\\u295d',\n'RightDownVector;':'\\u21c2',\n'RightDownVectorBar;':'\\u2955',\n'RightFloor;':'\\u230b',\n'rightharpoondown;':'\\u21c1',\n'rightharpoonup;':'\\u21c0',\n'rightleftarrows;':'\\u21c4',\n'rightleftharpoons;':'\\u21cc',\n'rightrightarrows;':'\\u21c9',\n'rightsquigarrow;':'\\u219d',\n'RightTee;':'\\u22a2',\n'RightTeeArrow;':'\\u21a6',\n'RightTeeVector;':'\\u295b',\n'rightthreetimes;':'\\u22cc',\n'RightTriangle;':'\\u22b3',\n'RightTriangleBar;':'\\u29d0',\n'RightTriangleEqual;':'\\u22b5',\n'RightUpDownVector;':'\\u294f',\n'RightUpTeeVector;':'\\u295c',\n'RightUpVector;':'\\u21be',\n'RightUpVectorBar;':'\\u2954',\n'RightVector;':'\\u21c0',\n'RightVectorBar;':'\\u2953',\n'ring;':'\\u02da',\n'risingdotseq;':'\\u2253',\n'rlarr;':'\\u21c4',\n'rlhar;':'\\u21cc',\n'rlm;':'\\u200f',\n'rmoust;':'\\u23b1',\n'rmoustache;':'\\u23b1',\n'rnmid;':'\\u2aee',\n'roang;':'\\u27ed',\n'roarr;':'\\u21fe',\n'robrk;':'\\u27e7',\n'ropar;':'\\u2986',\n'Ropf;':'\\u211d',\n'ropf;':'\\U0001d563',\n'roplus;':'\\u2a2e',\n'rotimes;':'\\u2a35',\n'RoundImplies;':'\\u2970',\n'rpar;':')',\n'rpargt;':'\\u2994',\n'rppolint;':'\\u2a12',\n'rrarr;':'\\u21c9',\n'Rrightarrow;':'\\u21db',\n'rsaquo;':'\\u203a',\n'Rscr;':'\\u211b',\n'rscr;':'\\U0001d4c7',\n'Rsh;':'\\u21b1',\n'rsh;':'\\u21b1',\n'rsqb;':']',\n'rsquo;':'\\u2019',\n'rsquor;':'\\u2019',\n'rthree;':'\\u22cc',\n'rtimes;':'\\u22ca',\n'rtri;':'\\u25b9',\n'rtrie;':'\\u22b5',\n'rtrif;':'\\u25b8',\n'rtriltri;':'\\u29ce',\n'RuleDelayed;':'\\u29f4',\n'ruluhar;':'\\u2968',\n'rx;':'\\u211e',\n'Sacute;':'\\u015a',\n'sacute;':'\\u015b',\n'sbquo;':'\\u201a',\n'Sc;':'\\u2abc',\n'sc;':'\\u227b',\n'scap;':'\\u2ab8',\n'Scaron;':'\\u0160',\n'scaron;':'\\u0161',\n'sccue;':'\\u227d',\n'scE;':'\\u2ab4',\n'sce;':'\\u2ab0',\n'Scedil;':'\\u015e',\n'scedil;':'\\u015f',\n'Scirc;':'\\u015c',\n'scirc;':'\\u015d',\n'scnap;':'\\u2aba',\n'scnE;':'\\u2ab6',\n'scnsim;':'\\u22e9',\n'scpolint;':'\\u2a13',\n'scsim;':'\\u227f',\n'Scy;':'\\u0421',\n'scy;':'\\u0441',\n'sdot;':'\\u22c5',\n'sdotb;':'\\u22a1',\n'sdote;':'\\u2a66',\n'searhk;':'\\u2925',\n'seArr;':'\\u21d8',\n'searr;':'\\u2198',\n'searrow;':'\\u2198',\n'sect':'\\xa7',\n'sect;':'\\xa7',\n'semi;':';',\n'seswar;':'\\u2929',\n'setminus;':'\\u2216',\n'setmn;':'\\u2216',\n'sext;':'\\u2736',\n'Sfr;':'\\U0001d516',\n'sfr;':'\\U0001d530',\n'sfrown;':'\\u2322',\n'sharp;':'\\u266f',\n'SHCHcy;':'\\u0429',\n'shchcy;':'\\u0449',\n'SHcy;':'\\u0428',\n'shcy;':'\\u0448',\n'ShortDownArrow;':'\\u2193',\n'ShortLeftArrow;':'\\u2190',\n'shortmid;':'\\u2223',\n'shortparallel;':'\\u2225',\n'ShortRightArrow;':'\\u2192',\n'ShortUpArrow;':'\\u2191',\n'shy':'\\xad',\n'shy;':'\\xad',\n'Sigma;':'\\u03a3',\n'sigma;':'\\u03c3',\n'sigmaf;':'\\u03c2',\n'sigmav;':'\\u03c2',\n'sim;':'\\u223c',\n'simdot;':'\\u2a6a',\n'sime;':'\\u2243',\n'simeq;':'\\u2243',\n'simg;':'\\u2a9e',\n'simgE;':'\\u2aa0',\n'siml;':'\\u2a9d',\n'simlE;':'\\u2a9f',\n'simne;':'\\u2246',\n'simplus;':'\\u2a24',\n'simrarr;':'\\u2972',\n'slarr;':'\\u2190',\n'SmallCircle;':'\\u2218',\n'smallsetminus;':'\\u2216',\n'smashp;':'\\u2a33',\n'smeparsl;':'\\u29e4',\n'smid;':'\\u2223',\n'smile;':'\\u2323',\n'smt;':'\\u2aaa',\n'smte;':'\\u2aac',\n'smtes;':'\\u2aac\\ufe00',\n'SOFTcy;':'\\u042c',\n'softcy;':'\\u044c',\n'sol;':'/',\n'solb;':'\\u29c4',\n'solbar;':'\\u233f',\n'Sopf;':'\\U0001d54a',\n'sopf;':'\\U0001d564',\n'spades;':'\\u2660',\n'spadesuit;':'\\u2660',\n'spar;':'\\u2225',\n'sqcap;':'\\u2293',\n'sqcaps;':'\\u2293\\ufe00',\n'sqcup;':'\\u2294',\n'sqcups;':'\\u2294\\ufe00',\n'Sqrt;':'\\u221a',\n'sqsub;':'\\u228f',\n'sqsube;':'\\u2291',\n'sqsubset;':'\\u228f',\n'sqsubseteq;':'\\u2291',\n'sqsup;':'\\u2290',\n'sqsupe;':'\\u2292',\n'sqsupset;':'\\u2290',\n'sqsupseteq;':'\\u2292',\n'squ;':'\\u25a1',\n'Square;':'\\u25a1',\n'square;':'\\u25a1',\n'SquareIntersection;':'\\u2293',\n'SquareSubset;':'\\u228f',\n'SquareSubsetEqual;':'\\u2291',\n'SquareSuperset;':'\\u2290',\n'SquareSupersetEqual;':'\\u2292',\n'SquareUnion;':'\\u2294',\n'squarf;':'\\u25aa',\n'squf;':'\\u25aa',\n'srarr;':'\\u2192',\n'Sscr;':'\\U0001d4ae',\n'sscr;':'\\U0001d4c8',\n'ssetmn;':'\\u2216',\n'ssmile;':'\\u2323',\n'sstarf;':'\\u22c6',\n'Star;':'\\u22c6',\n'star;':'\\u2606',\n'starf;':'\\u2605',\n'straightepsilon;':'\\u03f5',\n'straightphi;':'\\u03d5',\n'strns;':'\\xaf',\n'Sub;':'\\u22d0',\n'sub;':'\\u2282',\n'subdot;':'\\u2abd',\n'subE;':'\\u2ac5',\n'sube;':'\\u2286',\n'subedot;':'\\u2ac3',\n'submult;':'\\u2ac1',\n'subnE;':'\\u2acb',\n'subne;':'\\u228a',\n'subplus;':'\\u2abf',\n'subrarr;':'\\u2979',\n'Subset;':'\\u22d0',\n'subset;':'\\u2282',\n'subseteq;':'\\u2286',\n'subseteqq;':'\\u2ac5',\n'SubsetEqual;':'\\u2286',\n'subsetneq;':'\\u228a',\n'subsetneqq;':'\\u2acb',\n'subsim;':'\\u2ac7',\n'subsub;':'\\u2ad5',\n'subsup;':'\\u2ad3',\n'succ;':'\\u227b',\n'succapprox;':'\\u2ab8',\n'succcurlyeq;':'\\u227d',\n'Succeeds;':'\\u227b',\n'SucceedsEqual;':'\\u2ab0',\n'SucceedsSlantEqual;':'\\u227d',\n'SucceedsTilde;':'\\u227f',\n'succeq;':'\\u2ab0',\n'succnapprox;':'\\u2aba',\n'succneqq;':'\\u2ab6',\n'succnsim;':'\\u22e9',\n'succsim;':'\\u227f',\n'SuchThat;':'\\u220b',\n'Sum;':'\\u2211',\n'sum;':'\\u2211',\n'sung;':'\\u266a',\n'sup1':'\\xb9',\n'sup1;':'\\xb9',\n'sup2':'\\xb2',\n'sup2;':'\\xb2',\n'sup3':'\\xb3',\n'sup3;':'\\xb3',\n'Sup;':'\\u22d1',\n'sup;':'\\u2283',\n'supdot;':'\\u2abe',\n'supdsub;':'\\u2ad8',\n'supE;':'\\u2ac6',\n'supe;':'\\u2287',\n'supedot;':'\\u2ac4',\n'Superset;':'\\u2283',\n'SupersetEqual;':'\\u2287',\n'suphsol;':'\\u27c9',\n'suphsub;':'\\u2ad7',\n'suplarr;':'\\u297b',\n'supmult;':'\\u2ac2',\n'supnE;':'\\u2acc',\n'supne;':'\\u228b',\n'supplus;':'\\u2ac0',\n'Supset;':'\\u22d1',\n'supset;':'\\u2283',\n'supseteq;':'\\u2287',\n'supseteqq;':'\\u2ac6',\n'supsetneq;':'\\u228b',\n'supsetneqq;':'\\u2acc',\n'supsim;':'\\u2ac8',\n'supsub;':'\\u2ad4',\n'supsup;':'\\u2ad6',\n'swarhk;':'\\u2926',\n'swArr;':'\\u21d9',\n'swarr;':'\\u2199',\n'swarrow;':'\\u2199',\n'swnwar;':'\\u292a',\n'szlig':'\\xdf',\n'szlig;':'\\xdf',\n'Tab;':'\\t',\n'target;':'\\u2316',\n'Tau;':'\\u03a4',\n'tau;':'\\u03c4',\n'tbrk;':'\\u23b4',\n'Tcaron;':'\\u0164',\n'tcaron;':'\\u0165',\n'Tcedil;':'\\u0162',\n'tcedil;':'\\u0163',\n'Tcy;':'\\u0422',\n'tcy;':'\\u0442',\n'tdot;':'\\u20db',\n'telrec;':'\\u2315',\n'Tfr;':'\\U0001d517',\n'tfr;':'\\U0001d531',\n'there4;':'\\u2234',\n'Therefore;':'\\u2234',\n'therefore;':'\\u2234',\n'Theta;':'\\u0398',\n'theta;':'\\u03b8',\n'thetasym;':'\\u03d1',\n'thetav;':'\\u03d1',\n'thickapprox;':'\\u2248',\n'thicksim;':'\\u223c',\n'ThickSpace;':'\\u205f\\u200a',\n'thinsp;':'\\u2009',\n'ThinSpace;':'\\u2009',\n'thkap;':'\\u2248',\n'thksim;':'\\u223c',\n'THORN':'\\xde',\n'thorn':'\\xfe',\n'THORN;':'\\xde',\n'thorn;':'\\xfe',\n'Tilde;':'\\u223c',\n'tilde;':'\\u02dc',\n'TildeEqual;':'\\u2243',\n'TildeFullEqual;':'\\u2245',\n'TildeTilde;':'\\u2248',\n'times':'\\xd7',\n'times;':'\\xd7',\n'timesb;':'\\u22a0',\n'timesbar;':'\\u2a31',\n'timesd;':'\\u2a30',\n'tint;':'\\u222d',\n'toea;':'\\u2928',\n'top;':'\\u22a4',\n'topbot;':'\\u2336',\n'topcir;':'\\u2af1',\n'Topf;':'\\U0001d54b',\n'topf;':'\\U0001d565',\n'topfork;':'\\u2ada',\n'tosa;':'\\u2929',\n'tprime;':'\\u2034',\n'TRADE;':'\\u2122',\n'trade;':'\\u2122',\n'triangle;':'\\u25b5',\n'triangledown;':'\\u25bf',\n'triangleleft;':'\\u25c3',\n'trianglelefteq;':'\\u22b4',\n'triangleq;':'\\u225c',\n'triangleright;':'\\u25b9',\n'trianglerighteq;':'\\u22b5',\n'tridot;':'\\u25ec',\n'trie;':'\\u225c',\n'triminus;':'\\u2a3a',\n'TripleDot;':'\\u20db',\n'triplus;':'\\u2a39',\n'trisb;':'\\u29cd',\n'tritime;':'\\u2a3b',\n'trpezium;':'\\u23e2',\n'Tscr;':'\\U0001d4af',\n'tscr;':'\\U0001d4c9',\n'TScy;':'\\u0426',\n'tscy;':'\\u0446',\n'TSHcy;':'\\u040b',\n'tshcy;':'\\u045b',\n'Tstrok;':'\\u0166',\n'tstrok;':'\\u0167',\n'twixt;':'\\u226c',\n'twoheadleftarrow;':'\\u219e',\n'twoheadrightarrow;':'\\u21a0',\n'Uacute':'\\xda',\n'uacute':'\\xfa',\n'Uacute;':'\\xda',\n'uacute;':'\\xfa',\n'Uarr;':'\\u219f',\n'uArr;':'\\u21d1',\n'uarr;':'\\u2191',\n'Uarrocir;':'\\u2949',\n'Ubrcy;':'\\u040e',\n'ubrcy;':'\\u045e',\n'Ubreve;':'\\u016c',\n'ubreve;':'\\u016d',\n'Ucirc':'\\xdb',\n'ucirc':'\\xfb',\n'Ucirc;':'\\xdb',\n'ucirc;':'\\xfb',\n'Ucy;':'\\u0423',\n'ucy;':'\\u0443',\n'udarr;':'\\u21c5',\n'Udblac;':'\\u0170',\n'udblac;':'\\u0171',\n'udhar;':'\\u296e',\n'ufisht;':'\\u297e',\n'Ufr;':'\\U0001d518',\n'ufr;':'\\U0001d532',\n'Ugrave':'\\xd9',\n'ugrave':'\\xf9',\n'Ugrave;':'\\xd9',\n'ugrave;':'\\xf9',\n'uHar;':'\\u2963',\n'uharl;':'\\u21bf',\n'uharr;':'\\u21be',\n'uhblk;':'\\u2580',\n'ulcorn;':'\\u231c',\n'ulcorner;':'\\u231c',\n'ulcrop;':'\\u230f',\n'ultri;':'\\u25f8',\n'Umacr;':'\\u016a',\n'umacr;':'\\u016b',\n'uml':'\\xa8',\n'uml;':'\\xa8',\n'UnderBar;':'_',\n'UnderBrace;':'\\u23df',\n'UnderBracket;':'\\u23b5',\n'UnderParenthesis;':'\\u23dd',\n'Union;':'\\u22c3',\n'UnionPlus;':'\\u228e',\n'Uogon;':'\\u0172',\n'uogon;':'\\u0173',\n'Uopf;':'\\U0001d54c',\n'uopf;':'\\U0001d566',\n'UpArrow;':'\\u2191',\n'Uparrow;':'\\u21d1',\n'uparrow;':'\\u2191',\n'UpArrowBar;':'\\u2912',\n'UpArrowDownArrow;':'\\u21c5',\n'UpDownArrow;':'\\u2195',\n'Updownarrow;':'\\u21d5',\n'updownarrow;':'\\u2195',\n'UpEquilibrium;':'\\u296e',\n'upharpoonleft;':'\\u21bf',\n'upharpoonright;':'\\u21be',\n'uplus;':'\\u228e',\n'UpperLeftArrow;':'\\u2196',\n'UpperRightArrow;':'\\u2197',\n'Upsi;':'\\u03d2',\n'upsi;':'\\u03c5',\n'upsih;':'\\u03d2',\n'Upsilon;':'\\u03a5',\n'upsilon;':'\\u03c5',\n'UpTee;':'\\u22a5',\n'UpTeeArrow;':'\\u21a5',\n'upuparrows;':'\\u21c8',\n'urcorn;':'\\u231d',\n'urcorner;':'\\u231d',\n'urcrop;':'\\u230e',\n'Uring;':'\\u016e',\n'uring;':'\\u016f',\n'urtri;':'\\u25f9',\n'Uscr;':'\\U0001d4b0',\n'uscr;':'\\U0001d4ca',\n'utdot;':'\\u22f0',\n'Utilde;':'\\u0168',\n'utilde;':'\\u0169',\n'utri;':'\\u25b5',\n'utrif;':'\\u25b4',\n'uuarr;':'\\u21c8',\n'Uuml':'\\xdc',\n'uuml':'\\xfc',\n'Uuml;':'\\xdc',\n'uuml;':'\\xfc',\n'uwangle;':'\\u29a7',\n'vangrt;':'\\u299c',\n'varepsilon;':'\\u03f5',\n'varkappa;':'\\u03f0',\n'varnothing;':'\\u2205',\n'varphi;':'\\u03d5',\n'varpi;':'\\u03d6',\n'varpropto;':'\\u221d',\n'vArr;':'\\u21d5',\n'varr;':'\\u2195',\n'varrho;':'\\u03f1',\n'varsigma;':'\\u03c2',\n'varsubsetneq;':'\\u228a\\ufe00',\n'varsubsetneqq;':'\\u2acb\\ufe00',\n'varsupsetneq;':'\\u228b\\ufe00',\n'varsupsetneqq;':'\\u2acc\\ufe00',\n'vartheta;':'\\u03d1',\n'vartriangleleft;':'\\u22b2',\n'vartriangleright;':'\\u22b3',\n'Vbar;':'\\u2aeb',\n'vBar;':'\\u2ae8',\n'vBarv;':'\\u2ae9',\n'Vcy;':'\\u0412',\n'vcy;':'\\u0432',\n'VDash;':'\\u22ab',\n'Vdash;':'\\u22a9',\n'vDash;':'\\u22a8',\n'vdash;':'\\u22a2',\n'Vdashl;':'\\u2ae6',\n'Vee;':'\\u22c1',\n'vee;':'\\u2228',\n'veebar;':'\\u22bb',\n'veeeq;':'\\u225a',\n'vellip;':'\\u22ee',\n'Verbar;':'\\u2016',\n'verbar;':'|',\n'Vert;':'\\u2016',\n'vert;':'|',\n'VerticalBar;':'\\u2223',\n'VerticalLine;':'|',\n'VerticalSeparator;':'\\u2758',\n'VerticalTilde;':'\\u2240',\n'VeryThinSpace;':'\\u200a',\n'Vfr;':'\\U0001d519',\n'vfr;':'\\U0001d533',\n'vltri;':'\\u22b2',\n'vnsub;':'\\u2282\\u20d2',\n'vnsup;':'\\u2283\\u20d2',\n'Vopf;':'\\U0001d54d',\n'vopf;':'\\U0001d567',\n'vprop;':'\\u221d',\n'vrtri;':'\\u22b3',\n'Vscr;':'\\U0001d4b1',\n'vscr;':'\\U0001d4cb',\n'vsubnE;':'\\u2acb\\ufe00',\n'vsubne;':'\\u228a\\ufe00',\n'vsupnE;':'\\u2acc\\ufe00',\n'vsupne;':'\\u228b\\ufe00',\n'Vvdash;':'\\u22aa',\n'vzigzag;':'\\u299a',\n'Wcirc;':'\\u0174',\n'wcirc;':'\\u0175',\n'wedbar;':'\\u2a5f',\n'Wedge;':'\\u22c0',\n'wedge;':'\\u2227',\n'wedgeq;':'\\u2259',\n'weierp;':'\\u2118',\n'Wfr;':'\\U0001d51a',\n'wfr;':'\\U0001d534',\n'Wopf;':'\\U0001d54e',\n'wopf;':'\\U0001d568',\n'wp;':'\\u2118',\n'wr;':'\\u2240',\n'wreath;':'\\u2240',\n'Wscr;':'\\U0001d4b2',\n'wscr;':'\\U0001d4cc',\n'xcap;':'\\u22c2',\n'xcirc;':'\\u25ef',\n'xcup;':'\\u22c3',\n'xdtri;':'\\u25bd',\n'Xfr;':'\\U0001d51b',\n'xfr;':'\\U0001d535',\n'xhArr;':'\\u27fa',\n'xharr;':'\\u27f7',\n'Xi;':'\\u039e',\n'xi;':'\\u03be',\n'xlArr;':'\\u27f8',\n'xlarr;':'\\u27f5',\n'xmap;':'\\u27fc',\n'xnis;':'\\u22fb',\n'xodot;':'\\u2a00',\n'Xopf;':'\\U0001d54f',\n'xopf;':'\\U0001d569',\n'xoplus;':'\\u2a01',\n'xotime;':'\\u2a02',\n'xrArr;':'\\u27f9',\n'xrarr;':'\\u27f6',\n'Xscr;':'\\U0001d4b3',\n'xscr;':'\\U0001d4cd',\n'xsqcup;':'\\u2a06',\n'xuplus;':'\\u2a04',\n'xutri;':'\\u25b3',\n'xvee;':'\\u22c1',\n'xwedge;':'\\u22c0',\n'Yacute':'\\xdd',\n'yacute':'\\xfd',\n'Yacute;':'\\xdd',\n'yacute;':'\\xfd',\n'YAcy;':'\\u042f',\n'yacy;':'\\u044f',\n'Ycirc;':'\\u0176',\n'ycirc;':'\\u0177',\n'Ycy;':'\\u042b',\n'ycy;':'\\u044b',\n'yen':'\\xa5',\n'yen;':'\\xa5',\n'Yfr;':'\\U0001d51c',\n'yfr;':'\\U0001d536',\n'YIcy;':'\\u0407',\n'yicy;':'\\u0457',\n'Yopf;':'\\U0001d550',\n'yopf;':'\\U0001d56a',\n'Yscr;':'\\U0001d4b4',\n'yscr;':'\\U0001d4ce',\n'YUcy;':'\\u042e',\n'yucy;':'\\u044e',\n'yuml':'\\xff',\n'Yuml;':'\\u0178',\n'yuml;':'\\xff',\n'Zacute;':'\\u0179',\n'zacute;':'\\u017a',\n'Zcaron;':'\\u017d',\n'zcaron;':'\\u017e',\n'Zcy;':'\\u0417',\n'zcy;':'\\u0437',\n'Zdot;':'\\u017b',\n'zdot;':'\\u017c',\n'zeetrf;':'\\u2128',\n'ZeroWidthSpace;':'\\u200b',\n'Zeta;':'\\u0396',\n'zeta;':'\\u03b6',\n'Zfr;':'\\u2128',\n'zfr;':'\\U0001d537',\n'ZHcy;':'\\u0416',\n'zhcy;':'\\u0436',\n'zigrarr;':'\\u21dd',\n'Zopf;':'\\u2124',\n'zopf;':'\\U0001d56b',\n'Zscr;':'\\U0001d4b5',\n'zscr;':'\\U0001d4cf',\n'zwj;':'\\u200d',\n'zwnj;':'\\u200c',\n}\n\n\ncodepoint2name={}\n\n\n\nentitydefs={}\n\nfor(name,codepoint)in name2codepoint.items():\n codepoint2name[codepoint]=name\n entitydefs[name]=chr(codepoint)\n \ndel name,codepoint\n", []], "browser": [".py", "", [], 1], "browser.object_storage": [".py", "import json\n\nclass _UnProvided():\n pass\n \n \nclass ObjectStorage():\n\n def __init__(self,storage):\n self.storage=storage\n \n def __delitem__(self,key):\n del self.storage[json.dumps(key)]\n \n def __getitem__(self,key):\n return json.loads(self.storage[json.dumps(key)])\n \n def __setitem__(self,key,value):\n self.storage[json.dumps(key)]=json.dumps(value)\n \n def __contains__(self,key):\n return json.dumps(key)in self.storage\n \n def get(self,key,default=None):\n if json.dumps(key)in self.storage:\n return self.storage[json.dumps(key)]\n return default\n \n def pop(self,key,default=_UnProvided()):\n if type(default)is _UnProvided or json.dumps(key)in self.storage:\n return json.loads(self.storage.pop(json.dumps(key)))\n return default\n \n def __iter__(self):\n keys=self.keys()\n return keys.__iter__()\n \n def keys(self):\n return[json.loads(key)for key in self.storage.keys()]\n \n def values(self):\n return[json.loads(val)for val in self.storage.values()]\n \n def items(self):\n return list(zip(self.keys(),self.values()))\n \n def clear(self):\n self.storage.clear()\n \n def __len__(self):\n return len(self.storage)\n", ["json"]], "browser.worker": [".py", "from _webworker import *\n", ["_webworker"]], "browser.session_storage": [".py", "\nimport sys\nfrom browser import window\nfrom.local_storage import LocalStorage\n\nhas_session_storage=hasattr(window,'sessionStorage')\n\nclass SessionStorage(LocalStorage):\n\n storage_type=\"session_storage\"\n \n def __init__(self):\n if not has_session_storage:\n raise EnvironmentError(\"SessionStorage not available\")\n self.store=window.sessionStorage\n \nif has_session_storage:\n storage=SessionStorage()\n", ["browser", "browser.local_storage", "sys"]], "browser.ui": [".py", "from. import html,window,console,document\n\n\nclass UIError(Exception):\n pass\n \n \nclass Border:\n\n def __init__(self,width=1,style='solid',color='#000',radius=None):\n self.width=width\n self.style=style\n self.color=color\n self.radius=radius\n \n \nclass Font:\n\n def __init__(self,family='Arial',size=None,weight='normal',\n style='normal'):\n self.family=family\n self.size=size\n self.weight=weight\n self.style=style\n \n \nclass _Directions:\n\n def __init__(self,*args,**kw):\n if len(args)==0:\n values=[0]*4\n elif len(args)==1:\n values=[args[0]]*4\n elif len(args)==2:\n values=[args[0],args[1]]*2\n elif len(args)==3:\n values=args+[0]\n elif len(args)==4:\n values=args\n else:\n raise ValueError('Padding expects at most 4 arguments, got '+\n f'{len(args)}')\n self.top,self.right,self.bottom,self.left=values\n if(x :=kw.get('x'))is not None:\n self.left=self.right=x\n if(y :=kw.get('y'))is not None:\n self.top=self.bottom=y\n if(top :=kw.get('top'))is not None:\n self.top=top\n if(right :=kw.get('right'))is not None:\n self.right=right\n if(bottom :=kw.get('bottom'))is not None:\n self.bottom=bottom\n if(left :=kw.get('left'))is not None:\n self.left=left\n \n \nclass _Coords:\n\n def __init__(self,left,top,width,height):\n self.left=left\n self.top=top\n self.width=width\n self.height=height\n \n \nclass Padding(_Directions):\n pass\n \n \nclass Mouse:\n\n def __str__(self):\n return f''\n \nmouse=Mouse()\n\nclass Rows:\n\n def __init__(self,widget):\n self.widget=widget\n self._rows=[]\n if hasattr(widget,'_table'):\n console.log('_table',widget._table)\n for row in self._widget.rows:\n cells=[]\n for cell in row.cells:\n cells.append(cell.firstChild)\n self._rows.append(cells)\n return self._rows\n \n \nclass Widget:\n\n def __init_subclass__(cls):\n cls.__str__=Widget.__str__\n \n def __str__(self):\n return f''\n \n def add(self,widget,row='same',column=None,**kw):\n widget.master=self\n widget.config(**widget._options)\n widget.grid(row=row,column=column,**kw)\n widget.kw=kw\n \n def add_row(self,widgets,row='next',column_start=0,**kw):\n ''\n for i,widget in enumerate(widgets):\n if i ==0:\n self.add(widget,row=row,column=column_start,**kw)\n else:\n self.add(widget,**kw)\n \n def add_from_table(self,table,**kw):\n ''\n \n for line in table:\n self.add(Label(line[0]),row='next')\n for cell in line[1:]:\n if isinstance(cell,str):\n self.add(Label(cell),align='left',**kw)\n else:\n self.add(Label(cell),align='right',**kw)\n \n def apply_default_style(self):\n if hasattr(self,'default_style'):\n for key,value in self.default_style.items():\n self.style[key]=value\n \n def config(self,**kw):\n element=self\n \n if(value :=kw.get('value')):\n if not isinstance(self,(Label,Entry)):\n raise TypeError(\"invalid keyword 'value' for \"+\n self.__class__.__name__)\n element._value=value\n element.text=value\n \n for attr in['type','name','checked']:\n if(value :=kw.get(attr))is not None:\n setattr(element,attr,value)\n \n if(title :=kw.get('title'))and isinstance(self,Box):\n element.title_bar.text=title\n \n for attr in['width','height','top','left']:\n if(value :=kw.get(attr)):\n \n match value:\n case str():\n setattr(element.style,attr,value)\n case int()|float():\n setattr(element.style,attr,f'{round(value)}px')\n case _:\n raise ValueError(f\"{attr} should be str or number, \"+\n f\"not '{value.__class__.__name__}'\")\n \n if(cursor :=kw.get('cursor')):\n element.style.cursor=cursor\n \n if(command :=kw.get('command')):\n element.bind('click',\n lambda ev,command=command:command(ev.target))\n element.style.cursor='default'\n \n if(font :=kw.get('font')):\n element.style.fontFamily=font.family\n element.style.fontWeight=font.weight\n element.style.fontStyle=font.style\n if font.size:\n if isinstance(font.size,str):\n element.style.fontSize=font.size\n else:\n element.style.fontSize=f'{font.size}px'\n \n if(background :=kw.get('background')):\n element.style.backgroundColor=background\n if(color :=kw.get('color')):\n element.style.color=color\n \n if(border :=kw.get('border')):\n if isinstance(border,str):\n element.style.borderWidth=border\n element.style.borderStyle='solid'\n elif isinstance(border,int):\n element.style.borderWidth=f'{border}px'\n element.style.borderStyle='solid'\n elif isinstance(border,Border):\n element.style.borderStyle=border.style\n element.style.borderWidth=f'{border.width}px'\n element.style.borderColor=border.color\n element.style.borderRadius=f'{border.radius}px'\n else:\n raise TypeError('invalid type for border: '+\n border.__class__.__name__)\n \n if(padding :=kw.get('padding')):\n if isinstance(padding,str):\n element.style.padding=padding\n elif isinstance(padding,int):\n element.style.padding=f'{padding}px'\n elif isinstance(padding,Padding):\n for key in['top','right','bottom','left']:\n value=getattr(padding,key)\n attr='padding'+key.capitalize()\n if isinstance(value,str):\n setattr(element.style,attr,value)\n else:\n setattr(element.style,attr,f'{value}px')\n else:\n raise TypeError('invalid type for padding: '+\n padding.__class__.__name__)\n \n if(menu :=kw.get('menu'))is not None:\n if isinstance(self,Box):\n menu._build()\n self.insertBefore(menu.element,\n self.title_bar.nextSibling)\n self.menu=menu\n \n if(callbacks :=kw.get('callbacks'))is not None:\n for event,func in callbacks.items():\n element.bind(event,self._wrap_callback(func))\n \n self._config=getattr(self,'_config',{})\n self._config |=kw\n \n def _wrap_callback(self,func):\n def f(event):\n res=func(event)\n if res is False:\n event.stopPropagation()\n event.preventDefault()\n return res\n return f\n \n def coords(self):\n if not hasattr(self,'master'):\n raise TypeError(\"attribute 'coords' not set until widget is added\")\n parent=self.parentNode\n return _Coords(parent.offsetLeft,parent.offsetTop,parent.offsetWidth,\n parent.offsetHeight)\n \n def grid(self,column=None,columnspan=1,row=None,rowspan=1,align='',\n **options):\n master=self.master\n if not hasattr(master,'_table'):\n master._table=html.TABLE(\n cellpadding=0,\n cellspacing=0,\n style='width:100%;')\n master <=master._table\n if row =='same':\n row=0\n \n master.table=_Wrapper(master._table)\n \n if not hasattr(master,'cells'):\n master.cells=set()\n \n valid=[None,'same','next']\n if not isinstance(row,int)and row not in valid:\n raise ValueError(f'invalid value for row: {row !r}')\n if not isinstance(column,int)and column not in valid:\n raise ValueError(f'invalid value for column: {column !r}')\n \n \n \n \n nb_rows=len(master._table.rows)\n if row is None or row =='next':\n \n row=nb_rows\n if column is None:\n column=0\n elif row =='same':\n row=max(0,nb_rows -1)\n \n if column is None:\n column='next'\n \n for i in range(row -nb_rows+1):\n master._table <=html.TR()\n \n tr=master._table.rows[row]\n \n nb_cols=len(tr.cells)\n if column =='next':\n column=nb_cols\n elif column =='same':\n column=nb_cols -1\n \n \n cols_from_span=[c for(r,c)in master.cells\n if r ==row and c 1:\n td.attrs['colspan']=columnspan\n if rowspan >1:\n td.attrs['rowspan']=rowspan\n \n aligns=align.split()\n if 'left'in aligns:\n td.style.textAlign='left'\n if 'right'in aligns:\n td.style.textAlign='right'\n if 'center'in aligns:\n td.style.textAlign='center'\n if 'top'in aligns:\n td.style.verticalAlign='top'\n if 'bottom'in aligns:\n td.style.verticalAlign='bottom'\n if 'middle'in aligns:\n td.style.verticalAlign='middle'\n \n has_child=len(td.childNodes)>0\n if has_child:\n if hasattr(td.firstChild,'is_inner'):\n inner=td.firstChild\n else:\n inner=html.DIV(style=\"position:relative\")\n inner.is_inner=True\n inner <=td.firstChild\n td <=inner\n self.style.position=\"absolute\"\n self.style.top='0px'\n inner <=self\n else:\n td <=self\n \n self.row=row\n self.column=column\n self.cell=_Wrapper(td)\n \n self.cell.config(**options)\n \n self.row=_Wrapper(tr)\n \n return self\n \n @property\n def rows(self):\n return Rows(self)\n \n def sort_by_row(self,*columns,has_title=False):\n ''\n \n rows=list(self._table.rows)\n if has_title:\n head=rows[0]\n rows=rows[1:]\n \n def first_values(row,rank):\n values=[]\n for i in range(rank):\n col_num,_=columns[i]\n values.append(row.cells[col_num].firstChild._value)\n return values\n \n for i,(col_num,ascending)in enumerate(columns):\n if i ==0:\n rows.sort(key=lambda row:row.cells[col_num].firstChild._value,\n reverse=not ascending)\n else:\n new_rows=[]\n j=0\n while True:\n same_start=[row for row in rows if\n first_values(row,i)==first_values(rows[j],i)]\n same_start.sort(key=lambda r:r.cells[col_num].firstChild._value,\n reverse=not ascending)\n new_rows +=same_start\n j +=len(same_start)\n if j ==len(rows):\n rows=new_rows\n break\n \n if has_title:\n rows.insert(0,head)\n self._table <=rows\n \n \n \nborderColor='#008'\nbackgroundColor='#fff'\ncolor='#000'\n\n\nclass Frame(html.DIV,Widget):\n\n def __init__(self,*args,**options):\n self._options=options\n \n \nclass Bar(Frame):\n\n def __init__(self,**options):\n super().__init__(**options)\n self <=Label(\" \")\n \n \nclass Box(html.DIV,Widget):\n\n default_config={\n 'width':'inherit',\n 'background':backgroundColor,\n 'color':color,\n 'cursor':'default',\n 'menu':None,\n 'font':Font(family='sans-serif',size=12)\n }\n \n def __init__(self,container=document,title=\"\",titlebar=False,**options):\n html.DIV.__init__(self,\n style=\"position:absolute;box-sizing:border-box\")\n \n container <=self\n self._options=self.default_config |options\n self.config(**self._options)\n \n if titlebar:\n self.title_bar=TitleBar(title)\n self.add(self.title_bar)\n \n panel=Frame()\n self.add(panel,row=\"next\",align=\"left\")\n self.panel=panel\n \n self.title_bar.close_button.bind(\"click\",self.close)\n \n self.title_bar.bind(\"mousedown\",self._grab_widget)\n self.title_bar.bind(\"touchstart\",self._grab_widget)\n self.title_bar.bind(\"mouseup\",self._stop_moving)\n self.title_bar.bind(\"touchend\",self._stop_moving)\n self.bind(\"leave\",self._stop_moving)\n self.is_moving=False\n \n elif title:\n raise UIError('cannot set title if titlebar is not set')\n \n def add(self,widget,**kw):\n if hasattr(self,'panel'):\n self.panel.add(widget,**kw)\n else:\n Widget.add(self,widget,**kw)\n \n def add_menu(self,menu):\n ''\n if not hasattr(self,\"_table\"):\n self.add(menu)\n else:\n self.insertBefore(menu,self._table)\n menu._toplevel=True\n \n def close(self,*args):\n self.remove()\n \n def keys(self):\n return[\n 'left','top','width','height'\n 'background','color',\n 'cursor',\n 'menu',\n 'border',\n 'font',\n 'padding']\n \n def _grab_widget(self,event):\n self._remove_menus()\n document.bind(\"mousemove\",self._move_widget)\n document.bind(\"touchmove\",self._move_widget)\n self.is_moving=True\n self.initial=[self.left -event.x,self.top -event.y]\n \n event.preventDefault()\n \n def _move_widget(self,event):\n if not self.is_moving:\n return\n \n \n self.left=self.initial[0]+event.x\n self.top=self.initial[1]+event.y\n \n def _stop_moving(self,event):\n self.is_moving=False\n document.unbind(\"mousemove\")\n document.unbind(\"touchmove\")\n \n def title(self,text):\n self.title_bar.text=text\n \n def _remove_menus(self):\n menu=self._options['menu']\n if menu and menu.open_submenu:\n menu.open_on_mouseenter=False\n menu.open_submenu.element.remove()\n \n \nclass _Wrapper:\n\n def __init__(self,element):\n self.element=element\n \n def config(self,**options):\n Widget.config(self.element,**options)\n \n \nclass Checkbuttons(Frame):\n\n COUNTER=0\n \n def __init__(self,**options):\n Frame.__init__(self,**options)\n self.name=f'checkbutton{self.COUNTER}'\n self.COUNTER +=1\n \n def add_option(self,label,value=None,checked=False):\n self.add(Entry(type=\"checkbox\",name=self.name,\n value=value if value is not None else label,\n checked=checked))\n self.add(Label(label))\n \n \nclass Button(html.BUTTON,Widget):\n\n def __init__(self,*args,**options):\n super().__init__(*args)\n self._options=options\n \n \nclass Entry(html.INPUT,Widget):\n\n def __init__(self,*args,**options):\n self._options=options\n super().__init__(*args)\n \n \nclass Image(html.IMG,Widget):\n\n def __init__(self,src,**options):\n super().__init__(src=src)\n self._options=options\n \n \nclass Label(html.DIV,Widget):\n\n default_style={\n 'whiteSpace':'pre',\n 'padding':'0.3em'\n }\n \n def __init__(self,value,*args,**options):\n self._options=options\n self._value=value\n super().__init__(value,*args)\n if not value:\n self.style.minHeight='1em'\n self.apply_default_style()\n \n \nclass Link(html.A,Widget):\n\n def __init__(self,text,href,**options):\n super().__init__(text,href=href)\n self._options=options\n \n \nclass Listbox(Frame):\n\n def __init__(self,**options):\n self.size=options.pop('size',None)\n self.multiple=options.pop('multiple',False)\n if self.size is not None and not isinstance(self.size,int):\n raise ValueError('size must be an integer')\n Frame.__init__(self,**options)\n self._selected=[]\n \n def add_option(self,name):\n option=Label(name,\n callbacks=dict(mouseenter=self.enter_option,\n mouseleave=self.leave_option,\n click=self.select_option))\n self.add(option,row='next')\n if self.size is not None and option.row ==self.size -1:\n self.style.height=f'{self.offsetHeight}px'\n self.style.overflowY=\"scroll\"\n \n def enter_option(self,widget):\n if widget not in self._selected:\n widget.config(background='lightblue')\n \n def leave_option(self,widget):\n if widget not in self._selected:\n widget.config(background='inherit')\n \n def select_option(self,widget):\n if self.multiple:\n if widget in self._selected:\n self.unselect(widget)\n self.enter_option(widget)\n else:\n self.select(widget)\n else:\n if self._selected:\n self.unselect(self._selected[0])\n self.select(widget)\n \n def select(self,widget):\n widget.config(background='blue',color='white')\n self._selected.append(widget)\n \n def unselect(self,widget):\n widget.config(background='inherit',color='inherit')\n self._selected.remove(widget)\n \n \nclass Menu(Frame):\n\n default_config={\n 'background':'#eee'\n }\n \n toplevel_options={\n 'background':'inherit',\n 'color':'inherit',\n 'highlight-background':'LightBlue',\n 'highlight-color':'inherit'\n }\n \n submenu_options={\n 'background':'inherit',\n 'color':'inherit',\n 'highlight-background':'blue',\n 'highlight-color':'white'\n }\n \n def __init__(self,master,label=None,**options):\n self.master=master\n self._toplevel_options=(self.toplevel_options |\n options.pop('toplevel_options',{}))\n self._submenu_options=(self.submenu_options |\n options.pop('submenu_options',{}))\n \n self._toplevel=not isinstance(master,Menu)\n self._options=self.default_config |options\n Frame.__init__(self,**self._options)\n \n if not self._toplevel:\n if label is None:\n raise ValueError('missing submenu label')\n master.add_submenu(label,self)\n elif not hasattr(master,\"_table\"):\n master.add(self)\n else:\n master.insertBefore(self,master._table)\n \n \n def add_option(self,label,command=None):\n callbacks=dict(mouseenter=self.enter,\n mouseleave=self.leave)\n if command:\n callbacks['click']=command\n name=Label(label,padding=5,callbacks=callbacks)\n self.add(name,row=\"next\")\n \n def add_submenu(self,label,submenu=None):\n menu_options={\n 'callbacks':dict(click=self.submenu,\n mouseenter=self.enter,\n mouseleave=self.leave),\n 'padding':5\n }\n frame=Frame(**menu_options)\n frame.submenu=submenu\n \n frame.add(Label(label))\n if not self._toplevel:\n frame.add(Label('▶',padding=Padding(left=\"1em\")))\n if self._toplevel:\n self.add(frame)\n else:\n self.add(frame,row=\"next\")\n \n def enter(self,widget):\n if self._toplevel:\n options=self._toplevel_options\n else:\n options=self._submenu_options\n widget.config(background=options['highlight-background'],\n color=options['highlight-color'])\n if hasattr(widget.master,'open_submenu'):\n self.submenu(widget)\n \n def leave(self,widget):\n if self._toplevel:\n options=self._toplevel_options\n else:\n options=self._submenu_options\n widget.config(background=options['background'],\n color=options['color'])\n \n def submenu(self,widget):\n master=widget.master\n if hasattr(master,'open_submenu'):\n master.open_submenu.remove()\n if not hasattr(widget,\"submenu\"):\n return\n coords=widget.coords()\n if self._toplevel:\n top=coords.top+coords.height\n left=coords.left\n else:\n top=coords.top+widget.closest('TABLE').offsetTop\n left=coords.left+master.master.clientWidth\n \n box=Box(container=widget,titlebar=None,\n top=f'{top}px',left=f'{left}px')\n box.add(widget.submenu)\n master.open_submenu=box\n \nclass Radiobuttons(Frame):\n\n COUNTER=0\n \n def __init__(self,**options):\n Frame.__init__(self,**options)\n self.name=\"radiobutton{self.COUNTER}\"\n self.COUNTER +=1\n \n def add_option(self,label,value=None,checked=False):\n self.add(Entry(type=\"radio\",\n name=self.name,\n value=value if value is not None else label,\n checked=checked))\n self.add(Label(label))\n \n \nclass Slider(Frame):\n\n default_config={\n 'background':\"#bbb\"\n }\n \n def __init__(self,ratio=0,width=300,height=20,**options):\n background=options.pop('background',self.default_config['background'])\n Frame.__init__(self,width=width,height=height,**options)\n self.style.display='flex'\n self.style.alignItems='center'\n self.bar=html.DIV(style=\"width:100%;height:25%;border-radius:3px;\")\n self.bar.style.backgroundColor=background\n self <=self.bar\n self.slider=html.DIV(style=\"position:absolute;\"+\n \"cursor:grab;\")\n self.slider.style.backgroundColor=background\n self <=self.slider\n self.slider.bind('mousedown',self.grab_slider)\n self.ratio=ratio\n self.moving=False\n \n def grid(self,**kw):\n Widget.grid(self,**kw)\n ray=round(self.offsetWidth *0.03)\n self.min_x=-ray\n self.max_x=round(self.width -self.slider.width -ray)\n self.interval=self.width -self.slider.width\n self.slider.left=self.min_x+round(self.interval *self.ratio)\n self.slider.style.height=self.slider.style.width=f'{2 *ray}px'\n self.slider.style.borderRadius=\"50%\"\n print(self.slider.style.width)\n \n def grab_slider(self,event):\n self.x0=self.slider.left\n self.mouse0=event.clientX\n document.bind('mousemove',self.move_slider)\n document.bind('mouseup',self.release_slider)\n self.moving=True\n event.preventDefault()\n \n def move_slider(self,event):\n event.preventDefault()\n if self.moving:\n dx=event.clientX -self.mouse0\n x=self.x0+dx\n if x self.max_x:\n x=self.max_x\n self.slider.left=x\n self.ratio=(x -self.min_x)/self.interval\n evt=window.CustomEvent.new('move')\n evt.clientX=event.clientX\n evt.clientY=event.clientY\n self.dispatchEvent(evt)\n return False\n \n def release_slider(self,event):\n self.moving=False\n document.unbind('mousemove',self.move_slider)\n document.unbind('mouseup',self.release_slider)\n \n \nclass Text(html.DIV,Widget):\n\n default_style={\n 'borderWidth':'1px',\n 'borderStyle':'solid',\n 'borderColor':'#999',\n 'boxSizing':'border-box'\n }\n \n def __init__(self,*args,**options):\n self.apply_default_style()\n self._options=options\n super().__init__(*args)\n self.attrs['contenteditable']=True\n \n \nclass TitleBar(html.DIV,Widget):\n\n default_config={\n 'background':'#f0f0f0',\n 'cursor':'default'\n }\n \n def __init__(self,title='',*args,**options):\n self._options=self.default_config |options\n super().__init__('',*args)\n \n self.add(Label(title))\n self.close_button=Button(\"╳\",\n padding=Padding(bottom=10),\n background=\"inherit\",\n border=Border(width=0))\n \n self.add(self.close_button,align=\"right top\")\n \n self.config(**self._options)\n \n", ["browser"]], "browser.timer": [".py", "from browser import self as window\n\n\nclear_interval=window.clearInterval\n\nclear_timeout=window.clearTimeout\n\ndef set_interval(func,interval,*args):\n return window.setInterval(func,interval,*args)\n \ndef set_timeout(func,interval,*args):\n return int(window.setTimeout(func,interval,*args))\n \ndef request_animation_frame(func):\n if func.__code__.co_argcount ==0:\n raise TypeError(f'function {func.__code__.co_name}() '+\n 'should take a single argument')\n return int(window.requestAnimationFrame(func))\n \ndef cancel_animation_frame(int_id):\n window.cancelAnimationFrame(int_id)\n \ndef set_loop_timeout(x):\n\n assert isinstance(x,int)\n __BRYTHON__.loop_timeout=x\n", ["browser"]], "browser.idbcache": [".py", "\n\nfrom datetime import datetime\n\nfrom browser.widgets import dialog\n\nfrom browser import bind,window,document\nfrom browser.html import *\n\nidb_name=\"brython-cache\"\nidb_cx=window.indexedDB.open(idb_name)\n\ninfos={\"nb_modules\":0,\"size\":0}\n\n@bind(idb_cx,\"success\")\ndef open_success(evt):\n db=evt.target.result\n if not db.objectStoreNames.contains(\"modules\"):\n dialog.InfoDialog('indexedDB cache','db has no store \"modules\"')\n return\n \n table=TABLE(border=1)\n table <=TR(TH(col)for col in\n ['Name','Package','Size','Brython timestamp',\n 'Stdlib timestamp'])\n tx=db.transaction(\"modules\",\"readwrite\")\n store=tx.objectStore(\"modules\")\n outdated=[]\n \n openCursor=store.openCursor()\n \n @bind(openCursor,\"error\")\n def cursor_error(evt):\n print(\"open cursor error\",evt)\n \n @bind(openCursor,\"success\")\n def cursor_success(evt):\n infos['nb_modules']+=1\n cursor=evt.target.result\n if cursor:\n record=cursor.value\n timestamp=datetime.fromtimestamp(record.timestamp /1000)\n source_ts=datetime.fromtimestamp(record.source_ts /1000)\n table <=TR(TD(record.name)+\n TD(bool(record.is_package))+\n TD(len(record.content),align=\"right\")+\n TD(timestamp.strftime('%Y-%m-%d %H:%M'))+\n TD(source_ts.strftime('%Y-%m-%d %H:%M'))\n )\n infos['size']+=len(record.content)\n getattr(cursor,\"continue\")()\n else:\n panel=dialog.Dialog('indexedDB cache',top=0,left=0).panel\n panel <=H1(\"Brython indexedDB cache\")\n size='{:,d}'.format(infos['size'])\n panel <=H3(f\"{infos['nb_modules']} modules, size {size} bytes\")\n panel <=table\n", ["browser", "browser.html", "browser.widgets", "browser.widgets.dialog", "datetime"]], "browser.local_storage": [".py", "\nimport sys\nfrom browser import window,console\nimport javascript\n\nhas_local_storage=hasattr(window,'localStorage')\n\nclass _UnProvided():\n pass\n \nclass LocalStorage():\n storage_type=\"local_storage\"\n \n def __init__(self):\n if not has_local_storage:\n raise EnvironmentError(\"LocalStorage not available\")\n self.store=window.localStorage\n \n def __delitem__(self,key):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n if key not in self:\n raise KeyError(key)\n self.store.removeItem(key)\n \n def __getitem__(self,key):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n res=self.store.getItem(key)\n if res is not javascript.NULL:\n return res\n raise KeyError(key)\n \n def __setitem__(self,key,value):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n if not isinstance(value,str):\n raise TypeError(\"value must be string\")\n self.store.setItem(key,value)\n \n \n def __contains__(self,key):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n res=self.store.getItem(key)\n if res is javascript.NULL:\n return False\n return True\n \n def __iter__(self):\n keys=self.keys()\n return keys.__iter__()\n \n def get(self,key,default=None):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n return self.store.getItem(key)or default\n \n def pop(self,key,default=_UnProvided()):\n if not isinstance(key,str):\n raise TypeError(\"key must be string\")\n if type(default)is _UnProvided:\n ret=self.get(key)\n del self[key]\n return ret\n else:\n if key in self:\n ret=self.get(key)\n del self[key]\n return ret\n else:\n return default\n \n \n \n def keys(self):\n return[self.store.key(i)for i in range(self.store.length)]\n \n def values(self):\n return[self.__getitem__(k)for k in self.keys()]\n \n def items(self):\n return list(zip(self.keys(),self.values()))\n \n def clear(self):\n self.store.clear()\n \n def __len__(self):\n return self.store.length\n \nif has_local_storage:\n storage=LocalStorage()\n", ["browser", "javascript", "sys"]], "browser.indexed_db": [".py", "class EventListener:\n def __init__(self,events=[]):\n self._events=events\n \n def append(self,event):\n self._events.append(event)\n \n def fire(self,e):\n for _event in self._events:\n _event(e)\n \nclass IndexedDB:\n def __init__(self):\n if not __BRYTHON__.has_indexedDB:\n raise NotImplementedError(\"Your browser doesn't support indexedDB\")\n return\n \n self._indexedDB=__BRYTHON__.indexedDB()\n self._db=None\n self._version=None\n \n def _onsuccess(self,event):\n self._db=event.target.result\n \n def open(self,name,onsuccess,version=1.0,onerror=None,\n onupgradeneeded=None):\n self._version=version\n _result=self._indexedDB.open(name,version)\n _success=EventListener([self._onsuccess,onsuccess])\n _result.onsuccess=_success.fire\n _result.onupgradeneeded=onupgradeneeded\n \n \n def onerror(e):\n print(\"onerror: %s:%s\"%(e.type,e.target.result))\n \n def onblocked(e):\n print(\"blocked: %s:%s\"%(e.type,e.result))\n \n _result.onerror=onerror\n _result.onblocked=onblocked\n \n def transaction(self,entities,mode='read'):\n return Transaction(self._db.transaction(entities,mode))\n \nclass Transaction:\n\n def __init__(self,transaction):\n self._transaction=transaction\n \n def objectStore(self,name):\n return ObjectStore(self._transaction.objectStore(name))\n \nclass ObjectStore:\n\n def __init__(self,objectStore):\n self._objectStore=objectStore\n self._data=[]\n \n def clear(self,onsuccess=None,onerror=None):\n _result=self._objectStore.clear()\n \n if onsuccess is not None:\n _result.onsuccess=onsuccess\n \n if onerror is not None:\n _result.onerror=onerror\n \n def _helper(self,func,object,onsuccess=None,onerror=None):\n _result=func(object)\n \n if onsuccess is not None:\n _result.onsuccess=onsuccess\n \n if onerror is not None:\n _result.onerror=onerror\n \n def put(self,obj,key=None,onsuccess=None,onerror=None):\n _r=self._objectStore.put(obj,key)\n _r.onsuccess=onsuccess\n _r.onerror=onerror\n \n def add(self,obj,key,onsuccess=None,onerror=None):\n _r=self._objectStore.add(obj,key)\n _r.onsuccess=onsuccess\n _r.onerror=onerror\n \n \n def delete(self,index,onsuccess=None,onerror=None):\n self._helper(self._objectStore.delete,index,onsuccess,onerror)\n \n def query(self,*args):\n self._data=[]\n def onsuccess(event):\n cursor=event.target.result\n if cursor is not None:\n self._data.append(cursor.value)\n getattr(cursor,\"continue\")()\n \n self._objectStore.openCursor(args).onsuccess=onsuccess\n \n def fetchall(self):\n yield self._data\n \n def get(self,key,onsuccess=None,onerror=None):\n self._helper(self._objectStore.get,key,onsuccess,onerror)\n", []], "browser.webcomponent": [".py", "from _webcomponent import *\n", ["_webcomponent"]], "browser.svg": [".py", "from _svg import *\n", ["_svg"]], "browser.markdown": [".py", "\n\nimport re\n\nimport random\n\nletters='abcdefghijklmnopqrstuvwxyz'\nletters +=letters.upper()+'0123456789'\n\nclass URL:\n\n def __init__(self,src):\n elts=src.split(maxsplit=1)\n self.href=elts[0]\n self.alt=''\n if len(elts)==2:\n alt=elts[1]\n if alt[0]=='\"'and alt[-1]=='\"':\n self.alt=alt[1:-1]\n elif alt[0]==\"'\"and alt[-1]==\"'\":\n self.alt=alt[1:-1]\n elif alt[0]==\"(\"and alt[-1]==\")\":\n self.alt=alt[1:-1]\n \n \nclass CodeBlock:\n\n def __init__(self,line):\n self.lines=[line]\n if line.startswith(\"```\"):\n if len(line)>3:\n self.info=line[3:]\n else:\n self.info=\"block\"\n elif line.startswith(\"`\")and len(line)>1:\n self.info=line[1:]\n elif line.startswith(\">>>\"):\n self.info=\"python-console\"\n else:\n self.info=None\n \n def to_html(self):\n if self.lines[0].startswith(\"`\"):\n self.lines.pop(0)\n res=escape('\\n'.join(self.lines))\n res=unmark(res)\n _class=self.info or \"marked\"\n res='

    %s
    \\n'%(_class,res)\n return res,[]\n \n \nclass Marked:\n\n def __init__(self,line=''):\n self.line=line\n self.children=[]\n \n def to_html(self):\n return apply_markdown(self.line)\n \n \nclass Script:\n\n def __init__(self,src):\n self.src=src\n \n def to_html(self):\n return self.src,[]\n \n \n \nrefs={}\nref_pattern=r\"^\\[(.*)\\]:\\s+(.*)\"\n\ndef mark(src):\n\n global refs\n refs={}\n \n \n \n \n \n \n \n \n src=src.replace('\\r\\n','\\n')\n \n \n src=re.sub(r'(.*?)\\n=+\\n','\\n# \\\\1\\n',src)\n src=re.sub(r'(.*?)\\n-+\\n','\\n## \\\\1\\n',src)\n \n lines=src.split('\\n')+['']\n \n i=bq=0\n ul=ol=0\n \n while i '):\n nb=1\n while nb ':\n nb +=1\n lines[i]=lines[i][nb:]\n if nb >bq:\n lines.insert(i,'
    '*(nb -bq))\n i +=1\n bq=nb\n elif nb '*(bq -nb))\n i +=1\n bq=nb\n elif bq >0:\n lines.insert(i,'
    '*bq)\n i +=1\n bq=0\n \n \n if(lines[i].strip()and lines[i].lstrip()[0]in '-+*'\n and len(lines[i].lstrip())>1\n and lines[i].lstrip()[1]==' '\n and(i ==0 or ul or not lines[i -1].strip())):\n \n nb=1+len(lines[i])-len(lines[i].lstrip())\n lines[i]='
  • '+lines[i][nb:]\n if nb >ul:\n lines.insert(i,'
      '*(nb -ul))\n i +=1\n elif nb '*(ul -nb))\n i +=1\n ul=nb\n elif ul and not lines[i].strip():\n if(i 1 and nline[1]==' ':\n pass\n else:\n lines.insert(i,'
    '*ul)\n i +=1\n ul=0\n \n \n mo=re.search(r'^(\\d+\\.)',lines[i])\n if mo:\n if not ol:\n lines.insert(i,'
      ')\n i +=1\n lines[i]='
    1. '+lines[i][len(mo.groups()[0]):]\n ol=1\n elif(ol and not lines[i].strip()and i ')\n i +=1\n ol=0\n \n i +=1\n \n if ul:\n lines.append(''*ul)\n if ol:\n lines.append('
    '*ol)\n if bq:\n lines.append(''*bq)\n \n sections=[]\n scripts=[]\n section=Marked()\n \n i=0\n while i '):\n sections.append(Script('\\n'.join(lines[i:j+1])))\n for k in range(i,j+1):\n lines[k]=''\n section=Marked()\n break\n j +=1\n i=j\n continue\n \n \n elif line.startswith('#'):\n level=1\n line=lines[i]\n while level ','>')\n czone=czone.replace('_','_')\n czone=czone.replace('*','*')\n return czone\n \ndef s_escape(mo):\n\n czone=mo.string[mo.start():mo.end()]\n return escape(czone)\n \ndef unmark(code_zone):\n\n code_zone=code_zone.replace('_','_')\n return code_zone\n \ndef s_unmark(mo):\n\n code_zone=mo.string[mo.start():mo.end()]\n code_zone=code_zone.replace('_','_')\n return code_zone\n \ndef apply_markdown(src):\n\n scripts=[]\n key=None\n \n i=0\n while i 0 and src[i -1]=='!'\n start_a=i+1\n nb=1\n while True:\n end_a=src.find(']',i)\n if end_a ==-1:\n break\n nb +=src[i+1:end_a].count('[')-1\n i=end_a+1\n if nb ==0:\n break\n if end_a >-1 and src[start_a:end_a].find('\\n')==-1:\n link=src[start_a:end_a]\n rest=src[end_a+1:].lstrip()\n if rest and rest[0]=='(':\n j=0\n while True:\n end_href=rest.find(')',j)\n if end_href ==-1:\n break\n if rest[end_href -1]=='\\\\':\n j=end_href+1\n else:\n break\n if end_href >-1 and rest[:end_href].find('\\n')==-1:\n if img_link:\n tag=('\"'+link+'\"')\n src=src[:start_a -2]+tag+rest[end_href+1:]\n else:\n tag=('
    '+link\n +'')\n src=src[:start_a -1]+tag+rest[end_href+1:]\n i=start_a+len(tag)\n elif rest and rest[0]=='[':\n j=0\n while True:\n end_key=rest.find(']',j)\n if end_key ==-1:\n break\n if rest[end_key -1]=='\\\\':\n j=end_key+1\n else:\n break\n if end_key >-1 and rest[:end_key].find('\\n')==-1:\n if not key:\n key=link\n if key.lower()not in refs:\n raise KeyError('unknown reference %s'%key)\n url=refs[key.lower()]\n tag=''+link+''\n src=src[:start_a -1]+tag+rest[end_key+1:]\n i=start_a+len(tag)\n \n i +=1\n \n \n \n \n \n \n \n \n \n rstr=' '+''.join(random.choice(letters)for i in range(16))+' '\n \n i=0\n state=None\n start=-1\n data=''\n tags=[]\n while i 'and state is None:\n tags.append(src[i:j+1])\n src=src[:i]+rstr+src[j+1:]\n i +=len(rstr)\n break\n elif state =='\"'or state ==\"'\":\n data +=src[j]\n elif src[j]=='\\n':\n \n \n src=src[:i]+'<'+src[i+1:]\n j=i+4\n break\n j +=1\n elif src[i]=='`'and i >0:\n if src[i -1]!='\\\\':\n \n j=i+1\n while j \\1'%(tag,tag),src)\n \n \n src=re.sub(r'\\*(.+?)\\*',r'<%s>\\1'%('EM','EM'),src)\n \n \n \n src=re.sub(r'\\b_(.*?)_\\b',r'\\1',src,\n flags=re.M)\n \n \n code_pattern=r'\\`(.*?)\\`'\n src=re.sub(code_pattern,r'\\1',src)\n \n \n while True:\n pos=src.rfind(rstr)\n if pos ==-1:\n break\n repl=tags.pop()\n src=src[:pos]+repl+src[pos+len(rstr):]\n \n src='

    '+src+'

    '\n \n return src,scripts\n", ["random", "re"]], "browser.template": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport tb as traceback\nfrom browser import document,html\n\n\n\nvoid_elements=[\"AREA\",\"BASE\",\"BR\",\"COL\",\"EMBED\",\"HR\",\"IMG\",\"INPUT\",\n\"LINK\",\"META\",\"PARAM\",\"SOURCE\",\"TRACK\",\"WBR\"]\n\ndef copy(obj):\n if isinstance(obj,dict):\n res={}\n for key,value in obj.items():\n res[key]=copy(value)\n return res\n elif isinstance(obj,(list,tuple)):\n return obj[:]\n elif isinstance(obj,set):\n return{x for x in obj}\n else:\n return obj\n \n \nclass ElementData:\n ''\n \n \n def __init__(self,**kw):\n ''\n\n\n \n self.__keys__=set()\n for key,value in kw.items():\n object.__setattr__(self,key,value)\n self.__keys__.add(key)\n \n def __setattr__(self,attr,value):\n ''\n\n\n \n object.__setattr__(self,attr,value)\n if attr !=\"__keys__\":\n self.__keys__.add(attr)\n \n def to_dict(self):\n ''\n return{k:getattr(self,k)for k in self.__keys__}\n \n def clone(self):\n ''\n\n\n \n return copy(self.to_dict())\n \n \nclass TemplateError(Exception):\n pass\n \n \nclass Template:\n\n def __init__(self,element,callbacks=[]):\n if isinstance(element,str):\n element=document[element]\n self.element=element\n self.line_mapping={}\n self.line_num=1\n self.indent=0\n self.python=\"\"\n self.parse(element)\n self.callbacks=callbacks\n \n def add(self,content,elt):\n self.python +=content\n self.line_mapping[self.line_num]=elt\n if content.endswith(\"\\n\"):\n self.line_num +=1\n \n def add_indent(self,content,elt):\n self.add(\" \"*self.indent+content,elt)\n \n def write(self,content):\n self.html +=str(content)+\"\\n\"\n \n def parse(self,elt):\n ''\n\n \n \n \n is_block=False\n \n if elt.nodeType ==3:\n \n if elt.text.strip():\n text=elt.text.replace('\"',\""\")\n text=text.replace(\"\\n\",\"\\\\n\")\n text='\"'+text+'\"'\n \n nb_braces=elt.text.count(\"{\")\n if nb_braces:\n nb_double_braces=elt.text.count(\"{{\")\n if nb_double_braces !=nb_braces:\n lines=[line for line in elt.text.split(\"\\n\")\n if line.strip()]\n text='f\"\"\"'+\" \".join(lines)+'\"\"\"'\n self.add_indent(\"__write__(\"+text+\")\\n\",elt)\n \n elif hasattr(elt,\"tagName\"):\n start_tag=\"__write__('<\"+elt.tagName\n block=None\n \n \n static_attrs=[]\n dynamic_attrs=[]\n for item in elt.attributes:\n if item.name ==\"b-code\":\n \n block=item.value.rstrip(\":\")+\":\"\n elif item.name ==\"b-include\":\n \n elt.html=open(item.value).read()\n else:\n value=item.value.replace(\"\\n\",\"\")\n if \"{\"in value:\n dynamic_attrs.append(\"'\"+item.name+\"', f'\"+\n value+\"'\")\n else:\n static_attrs.append(item.name+'=\"'+value+'\"')\n \n if block:\n self.add_indent(block+\"\\n\",elt)\n self.indent +=1\n is_block=True\n \n self.add_indent(start_tag,elt)\n \n if static_attrs or dynamic_attrs:\n self.add(\" \",elt)\n \n for attr in static_attrs:\n self.add_indent(attr+\" \",elt)\n \n if dynamic_attrs:\n self.add(\"')\\n\",elt)\n for attr in dynamic_attrs:\n self.add_indent(\"__render_attr__(\"+attr+\")\\n\",elt)\n self.add_indent(\"__write__('>')\\n\",elt)\n else:\n self.add_indent(\">')\\n\",elt)\n \n for child in elt.childNodes:\n self.parse(child)\n \n if hasattr(elt,\"tagName\")and elt.tagName not in void_elements:\n self.add_indent(\"__write__('')\\n\",elt)\n \n if is_block:\n self.indent -=1\n \n def on(self,element,event,callback):\n def func(evt):\n cache=self.data.clone()\n callback(evt,self)\n new_data=self.data.to_dict()\n if new_data !=cache:\n self.render(**new_data)\n element.bind(event,func)\n \n def render_attr(self,name,value):\n ''\n\n\n\n\n\n \n if value ==\"False\":\n return\n elif value ==\"True\":\n self.html +=\" \"+name\n else:\n self.html +=\" \"+name+'=\"'+str(value)+'\"'\n \n def render(self,**ns):\n ''\n\n \n \n self.data=ElementData(**ns)\n \n \n ns.update({\"__write__\":self.write,\n \"__render_attr__\":self.render_attr})\n \n self.html=\"\"\n \n \n try:\n exec(self.python,ns)\n except Exception as exc:\n msg=traceback.format_exc()\n if isinstance(exc,SyntaxError):\n line_no=exc.args[2]\n else:\n tb=exc.__traceback__\n while tb is not None:\n print('template 265, tb',tb,tb.tb_frame,tb.tb_lineno)\n line_no=tb.tb_lineno\n tb=tb.tb_next\n elt=self.line_mapping[line_no]\n print(\"Error rendering the element:\",elt.nodeType)\n if elt.nodeType ==3:\n print(elt.textContent)\n else:\n try:\n print(elt.outerHTML)\n except AttributeError:\n print('no outerHTML for',elt)\n print(elt.html)\n print(f\"{exc.__class__.__name__}: {exc}\")\n return\n \n \n \n \n \n \n \n if self.element.nodeType !=9:\n rank=self.element.index()\n parent=self.element.parent\n self.element.outerHTML=self.html\n self.element=parent.childNodes[rank]\n \n else:\n \n self.element.html=self.html\n \n \n self.element.unbind()\n callbacks={}\n for callback in self.callbacks:\n callbacks[callback.__name__]=callback\n \n \n \n for element in self.element.select(\"*[b-on]\"):\n bindings=element.getAttribute(\"b-on\")\n bindings=bindings.split(\";\")\n for binding in bindings:\n parts=binding.split(\":\")\n if not len(parts)==2:\n raise TemplateError(f\"wrong binding: {binding}\")\n event,func_name=[x.strip()for x in parts]\n if not func_name in callbacks:\n print(element.outerHTML)\n raise TemplateError(f\"unknown callback: {func_name}\")\n self.on(element,event,callbacks[func_name])\n", ["browser", "tb"]], "browser.ajax": [".py", "from _ajax import *\n", ["_ajax"]], "browser.highlight": [".py", "import re\n\nfrom browser import html\n\nletters='abcdefghijklmnopqrstuvwxyz'\nletters +=letters.upper()+'_'\ndigits='0123456789'\n\nbuiltin_funcs=\"\"\"abs|dict|help|min|setattr|\nall|dir|hex|next|slice|\nany|divmod|id|object|sorted|\nascii|enumerate|input|oct|staticmethod|\nbin|eval|int|open|str|\nbool|exec|isinstance|ord|sum|\nbytearray|filter|issubclass|pow|super|\nbytes|float|iter|print|tuple|\ncallable|format|len|property|type|\nchr|frozenset|list|range|vars|\nclassmethod|getattr|locals|repr|zip|\ncompile|globals|map|reversed|__import__|\ncomplex|hasattr|max|round|\ndelattr|hash|memoryview|set|\n\"\"\"\n\nkeywords=[\n'False',\n'None',\n'True',\n'and',\n'as',\n'assert',\n'async',\n'await',\n'break',\n'class',\n'continue',\n'def',\n'del',\n'elif',\n'else',\n'except',\n'finally',\n'for',\n'from',\n'global',\n'if',\n'import',\n'in',\n'is',\n'lambda',\n'nonlocal',\n'not',\n'or',\n'pass',\n'raise',\n'return',\n'try',\n'while',\n'with',\n'yield',\n]\nkw_pattern='^('+'|'.join(keywords)+')$'\nbf_pattern='^('+builtin_funcs.replace(\"\\n\",\"\")+')$'\n\ndef escape(txt):\n txt=txt.replace('<','<')\n txt=txt.replace('>','>')\n return txt\n \ndef highlight(txt):\n res=html.PRE()\n i=0\n name=''\n while i if the item has a submenu\n*/\n.brython-menu-submenu-item {\n font-family: var(--brython-menu-font-family);\n padding: 0.3em 0.3em 0.3em 1em;\n cursor: default;\n}\n\n/* end of browser.widgets.menu classes */\n\n\"\"\"\n\n\nclass Menu:\n\n def __init__(self,container=document.body,parent=None,default_css=True):\n ''\n\n \n self.container=container\n self.parent=parent\n \n if default_css:\n \n for stylesheet in document.styleSheets:\n if stylesheet.ownerNode.id ==\"brython-menu\":\n break\n else:\n document <=html.STYLE(style_sheet,id=\"brython-menu\")\n \n self.default_css=default_css\n \n if parent:\n parent.submenu=html.TABLE(Class=\"brython-menu-submenu\")\n parent.submenu.style.position=\"absolute\"\n parent.submenu.style.display=\"none\"\n self.container <=parent.submenu\n \n parent.bind(\"click\",self.unfold)\n \n if not hasattr(self.container,\"bind_document\"):\n \n document.bind(\"click\",self.hide_menus)\n self.container.bind_document=True\n \n def add_item(self,label,callback=None,menu=False):\n if self.parent is None:\n \n item=html.SPAN(label,Class=\"brython-menu-navbar-item\")\n self.container <=item\n item.bind(\"click\",self.hide_menus)\n else:\n \n item=html.TR(Class=\"brython-menu-submenu-row\")\n self.parent.submenu <=item\n item <=html.TD(label,Class=\"brython-menu-submenu-item\")\n item <=html.TD(\">\"if menu else \" \",\n Class=\"brython-menu-submenu-item\",\n paddingLeft=\"2em\")\n \n if callback is not None:\n item.bind(\"click\",callback)\n \n return item\n \n def add_link(self,label,href):\n ''\n if self.parent is None:\n \n item=html.A(label,Class=\"brython-menu-navbar-link\",href=href)\n self.container <=item\n else:\n \n item=html.TR(Class=\"brython-menu-submenu-row\")\n self.parent.submenu <=item\n item <=html.TD(html.A(label,Class=\"brython-menu-submenu-link\",\n href=href))\n \n return item\n \n def add_menu(self,label):\n ''\n \n item=self.add_item(label,menu=True)\n \n if self.parent is None:\n \n span=html.SPAN(Class=\"brython-menu-submenu\")\n span.style.position=\"absolute\"\n \n return Menu(self.container,item,default_css=self.default_css)\n \n def hide_menus(self,*args):\n ''\n for css in[\".brython-menu-navbar-item-selected\",\n \".brython-menu-submenu-row-selected\"]:\n for item in document.select(css):\n item.classList.remove(css[1:])\n for div in document.select(\".brython-menu-submenu\"):\n if div.style.display !=\"none\":\n div.style.display=\"none\"\n \n def hide_submenus(self,table):\n ''\n for row in table.select(\"TR\"):\n if hasattr(row,\"submenu\"):\n row.submenu.style.display=\"none\"\n self.hide_submenus(row.submenu)\n \n def unfold(self,ev):\n ''\n target=ev.target\n if target.nodeName ==\"SPAN\":\n \n selected=document.select(\".brython-menu-navbar-item-selected\")\n \n if selected:\n self.hide_menus()\n \n for item in selected:\n item.classList.remove(\"brython-menu-navbar-item-selected\")\n \n submenu=target.submenu\n \n target.classList.add(\"brython-menu-navbar-item-selected\")\n submenu.style.left=f\"{target.abs_left}px\"\n submenu.style.top=f\"{target.abs_top+target.offsetHeight}px\"\n \n \n \n if not selected:\n for item in document.select(\".brython-menu-navbar-item\"):\n item.bind(\"mouseenter\",self.unfold)\n \n \n submenu.style.display=\"block\"\n \n else:\n target=target.closest(\"TR\")\n \n \n table=target.closest(\"TABLE\")\n self.hide_submenus(table)\n \n \n selected=table.select(\".brython-menu-submenu-row-selected\")\n for row in selected:\n row.classList.remove(\"brython-menu-submenu-row-selected\")\n \n \n target.classList.add(\"brython-menu-submenu-row-selected\")\n \n if hasattr(target,\"submenu\"):\n \n target.submenu.style.top=f\"{target.abs_top}px\"\n target.submenu.style.left=\\\n f\"{target.abs_left+target.offsetWidth}px\"\n target.submenu.style.display=\"block\"\n \n if not selected:\n \n \n for row in table.select(\"TR\"):\n row.bind(\"mouseenter\",self.unfold)\n \n \n \n \n ev.stopPropagation()\n", ["browser"]], "json": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__='2.0.9'\n__all__=[\n'dump','dumps','load','loads',\n'JSONDecoder','JSONDecodeError','JSONEncoder',\n]\n\n__author__='Bob Ippolito '\n\n\n\n\n\nclass codecs:\n\n BOM_UTF8=b'\\xef\\xbb\\xbf'\n BOM_LE=BOM_UTF16_LE=b'\\xff\\xfe'\n BOM_BE=BOM_UTF16_BE=b'\\xfe\\xff'\n BOM_UTF32_LE=b'\\xff\\xfe\\x00\\x00'\n BOM_UTF32_BE=b'\\x00\\x00\\xfe\\xff'\n \n \nimport _json\nfrom.encoder import JSONEncoder\n\nJSONDecoder=_json.JSONDecoder\n\nclass decoder:\n JSONDecoder=_json.JSONDecoder\n \nclass JSONDecodeError(ValueError):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,msg,doc,pos):\n lineno=doc.count('\\n',0,pos)+1\n colno=pos -doc.rfind('\\n',0,pos)\n errmsg='%s: line %d column %d (char %d)'%(msg,lineno,colno,pos)\n ValueError.__init__(self,errmsg)\n self.msg=msg\n self.doc=doc\n self.pos=pos\n self.lineno=lineno\n self.colno=colno\n \n def __reduce__(self):\n return self.__class__,(self.msg,self.doc,self.pos)\n \ndef dump(obj,fp,**kw):\n fp.write(dumps(obj,**kw))\n \ndef dumps(obj,*,skipkeys=False,ensure_ascii=True,check_circular=True,\nallow_nan=True,cls=None,indent=None,separators=None,\ndefault=None,sort_keys=False,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if cls is None:\n return _json.dumps(obj,1,\n skipkeys=skipkeys,ensure_ascii=ensure_ascii,\n check_circular=check_circular,allow_nan=allow_nan,indent=indent,\n separators=separators,default=default,sort_keys=sort_keys,\n **kw)\n return cls(\n skipkeys=skipkeys,ensure_ascii=ensure_ascii,\n check_circular=check_circular,allow_nan=allow_nan,indent=indent,\n separators=separators,default=default,sort_keys=sort_keys,\n **kw).encode(obj)\n \ndef detect_encoding(b):\n bstartswith=b.startswith\n if bstartswith((codecs.BOM_UTF32_BE,codecs.BOM_UTF32_LE)):\n return 'utf-32'\n if bstartswith((codecs.BOM_UTF16_BE,codecs.BOM_UTF16_LE)):\n return 'utf-16'\n if bstartswith(codecs.BOM_UTF8):\n return 'utf-8-sig'\n \n if len(b)>=4:\n if not b[0]:\n \n \n return 'utf-16-be'if b[1]else 'utf-32-be'\n if not b[1]:\n \n \n \n return 'utf-16-le'if b[2]or b[3]else 'utf-32-le'\n elif len(b)==2:\n if not b[0]:\n \n return 'utf-16-be'\n if not b[1]:\n \n return 'utf-16-le'\n \n return 'utf-8'\n \n \ndef load(fp,*,cls=None,object_hook=None,parse_float=None,\nparse_int=None,parse_constant=None,object_pairs_hook=None,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return loads(fp.read(),\n cls=cls,object_hook=object_hook,\n parse_float=parse_float,parse_int=parse_int,\n parse_constant=parse_constant,object_pairs_hook=object_pairs_hook,**kw)\n \n \ndef loads(s,*,cls=None,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(s,str):\n if s.startswith('\\ufeff'):\n raise JSONDecodeError(\"Unexpected UTF-8 BOM (decode using utf-8-sig)\",\n s,0)\n else:\n if not isinstance(s,(bytes,bytearray)):\n raise TypeError(f'the JSON object must be str, bytes or bytearray, '\n f'not {s.__class__.__name__}')\n s=s.decode(detect_encoding(s),'surrogatepass')\n \n \n if \"encoding\"in kw:\n import warnings\n warnings.warn(\n \"'encoding' is ignored and deprecated. It will be removed in Python 3.9\",\n DeprecationWarning,\n stacklevel=2\n )\n del kw['encoding']\n \n if cls is None:\n \n \n return _json.loads(s,**kw)\n if object_hook is not None:\n kw['object_hook']=object_hook\n if object_pairs_hook is not None:\n kw['object_pairs_hook']=object_pairs_hook\n if parse_float is not None:\n kw['parse_float']=parse_float\n if parse_int is not None:\n kw['parse_int']=parse_int\n if parse_constant is not None:\n kw['parse_constant']=parse_constant\n return cls(**kw).decode(s)\n", ["_json", "json.encoder", "warnings"], 1], "json.encoder": [".py", "''\n\nimport re\n\ntry:\n from _json import encode_basestring_ascii as c_encode_basestring_ascii\nexcept ImportError:\n c_encode_basestring_ascii=None\ntry:\n from _json import encode_basestring as c_encode_basestring\nexcept ImportError:\n c_encode_basestring=None\ntry:\n from _json import make_encoder as c_make_encoder\nexcept ImportError:\n c_make_encoder=None\n \nESCAPE=re.compile(r'[\\x00-\\x1f\\\\\"\\b\\f\\n\\r\\t]')\nESCAPE_ASCII=re.compile(r'([\\\\\"]|[^\\ -~])')\nHAS_UTF8=re.compile(b'[\\x80-\\xff]')\nESCAPE_DCT={\n'\\\\':'\\\\\\\\',\n'\"':'\\\\\"',\n'\\b':'\\\\b',\n'\\f':'\\\\f',\n'\\n':'\\\\n',\n'\\r':'\\\\r',\n'\\t':'\\\\t',\n}\nfor i in range(0x20):\n ESCAPE_DCT.setdefault(chr(i),'\\\\u{0:04x}'.format(i))\n \ndel i\n\nINFINITY=float('inf')\n\ndef py_encode_basestring(s):\n ''\n\n \n def replace(match):\n return ESCAPE_DCT[match.group(0)]\n return '\"'+ESCAPE.sub(replace,s)+'\"'\n \n \nencode_basestring=(c_encode_basestring or py_encode_basestring)\n\n\ndef py_encode_basestring_ascii(s):\n ''\n\n \n def replace(match):\n s=match.group(0)\n try:\n return ESCAPE_DCT[s]\n except KeyError:\n n=ord(s)\n if n <0x10000:\n return '\\\\u{0:04x}'.format(n)\n \n else:\n \n n -=0x10000\n s1=0xd800 |((n >>10)&0x3ff)\n s2=0xdc00 |(n&0x3ff)\n return '\\\\u{0:04x}\\\\u{1:04x}'.format(s1,s2)\n return '\"'+ESCAPE_ASCII.sub(replace,s)+'\"'\n \n \nencode_basestring_ascii=(\nc_encode_basestring_ascii or py_encode_basestring_ascii)\n\nclass JSONEncoder(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n item_separator=', '\n key_separator=': '\n def __init__(self,*,skipkeys=False,ensure_ascii=True,\n check_circular=True,allow_nan=True,sort_keys=False,\n indent=None,separators=None,default=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n self.skipkeys=skipkeys\n self.ensure_ascii=ensure_ascii\n self.check_circular=check_circular\n self.allow_nan=allow_nan\n self.sort_keys=sort_keys\n self.indent=indent\n if separators is not None:\n self.item_separator,self.key_separator=separators\n elif indent is not None:\n self.item_separator=','\n if default is not None:\n self.default=default\n \n def default(self,o):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise TypeError(f'Object of type {o.__class__.__name__} '\n f'is not JSON serializable')\n \n def encode(self,o):\n ''\n\n\n\n\n\n \n \n if isinstance(o,str):\n if self.ensure_ascii:\n return encode_basestring_ascii(o)\n else:\n return encode_basestring(o)\n \n \n \n chunks=self.iterencode(o,_one_shot=True)\n if not isinstance(chunks,(list,tuple)):\n chunks=list(chunks)\n return ''.join(chunks)\n \n def iterencode(self,o,_one_shot=False):\n ''\n\n\n\n\n\n\n\n \n if self.check_circular:\n markers={}\n else:\n markers=None\n if self.ensure_ascii:\n _encoder=encode_basestring_ascii\n else:\n _encoder=encode_basestring\n \n def floatstr(o,allow_nan=self.allow_nan,\n _repr=float.__repr__,_inf=INFINITY,_neginf=-INFINITY):\n \n \n \n \n if o !=o:\n text='NaN'\n elif o ==_inf:\n text='Infinity'\n elif o ==_neginf:\n text='-Infinity'\n else:\n return _repr(o)\n \n if not allow_nan:\n raise ValueError(\n \"Out of range float values are not JSON compliant: \"+\n repr(o))\n \n return text\n \n \n if(_one_shot and c_make_encoder is not None\n and self.indent is None):\n _iterencode=c_make_encoder(\n markers,self.default,_encoder,self.indent,\n self.key_separator,self.item_separator,self.sort_keys,\n self.skipkeys,self.allow_nan)\n else:\n _iterencode=_make_iterencode(\n markers,self.default,_encoder,self.indent,floatstr,\n self.key_separator,self.item_separator,self.sort_keys,\n self.skipkeys,_one_shot)\n return _iterencode(o,0)\n \ndef _make_iterencode(markers,_default,_encoder,_indent,_floatstr,\n_key_separator,_item_separator,_sort_keys,_skipkeys,_one_shot,\n\nValueError=ValueError,\ndict=dict,\nfloat=float,\nid=id,\nint=int,\nisinstance=isinstance,\nlist=list,\nstr=str,\ntuple=tuple,\n_intstr=int.__repr__,\n):\n\n if _indent is not None and not isinstance(_indent,str):\n _indent=' '*_indent\n \n def _iterencode_list(lst,_current_indent_level):\n if not lst:\n yield '[]'\n return\n if markers is not None:\n markerid=id(lst)\n if markerid in markers:\n raise ValueError(\"Circular reference detected\")\n markers[markerid]=lst\n buf='['\n if _indent is not None:\n _current_indent_level +=1\n newline_indent='\\n'+_indent *_current_indent_level\n separator=_item_separator+newline_indent\n buf +=newline_indent\n else:\n newline_indent=None\n separator=_item_separator\n first=True\n for value in lst:\n if first:\n first=False\n else:\n buf=separator\n if isinstance(value,str):\n yield buf+_encoder(value)\n elif value is None:\n yield buf+'null'\n elif value is True:\n yield buf+'true'\n elif value is False:\n yield buf+'false'\n elif isinstance(value,int):\n \n \n \n yield buf+_intstr(value)\n elif isinstance(value,float):\n \n yield buf+_floatstr(value)\n else:\n yield buf\n if isinstance(value,(list,tuple)):\n chunks=_iterencode_list(value,_current_indent_level)\n elif isinstance(value,dict):\n chunks=_iterencode_dict(value,_current_indent_level)\n else:\n chunks=_iterencode(value,_current_indent_level)\n yield from chunks\n if newline_indent is not None:\n _current_indent_level -=1\n yield '\\n'+_indent *_current_indent_level\n yield ']'\n if markers is not None:\n del markers[markerid]\n \n def _iterencode_dict(dct,_current_indent_level):\n if not dct:\n yield '{}'\n return\n if markers is not None:\n markerid=id(dct)\n if markerid in markers:\n raise ValueError(\"Circular reference detected\")\n markers[markerid]=dct\n yield '{'\n if _indent is not None:\n _current_indent_level +=1\n newline_indent='\\n'+_indent *_current_indent_level\n item_separator=_item_separator+newline_indent\n yield newline_indent\n else:\n newline_indent=None\n item_separator=_item_separator\n first=True\n if _sort_keys:\n items=sorted(dct.items())\n else:\n items=dct.items()\n for key,value in items:\n if isinstance(key,str):\n pass\n \n \n elif isinstance(key,float):\n \n key=_floatstr(key)\n elif key is True:\n key='true'\n elif key is False:\n key='false'\n elif key is None:\n key='null'\n elif isinstance(key,int):\n \n key=_intstr(key)\n elif _skipkeys:\n continue\n else:\n raise TypeError(f'keys must be str, int, float, bool or None, '\n f'not {key.__class__.__name__}')\n if first:\n first=False\n else:\n yield item_separator\n yield _encoder(key)\n yield _key_separator\n if isinstance(value,str):\n yield _encoder(value)\n elif value is None:\n yield 'null'\n elif value is True:\n yield 'true'\n elif value is False:\n yield 'false'\n elif isinstance(value,int):\n \n yield _intstr(value)\n elif isinstance(value,float):\n \n yield _floatstr(value)\n else:\n if isinstance(value,(list,tuple)):\n chunks=_iterencode_list(value,_current_indent_level)\n elif isinstance(value,dict):\n chunks=_iterencode_dict(value,_current_indent_level)\n else:\n chunks=_iterencode(value,_current_indent_level)\n yield from chunks\n if newline_indent is not None:\n _current_indent_level -=1\n yield '\\n'+_indent *_current_indent_level\n yield '}'\n if markers is not None:\n del markers[markerid]\n \n def _iterencode(o,_current_indent_level):\n if isinstance(o,str):\n yield _encoder(o)\n elif o is None:\n yield 'null'\n elif o is True:\n yield 'true'\n elif o is False:\n yield 'false'\n elif isinstance(o,int):\n \n yield _intstr(o)\n elif isinstance(o,float):\n \n yield _floatstr(o)\n elif isinstance(o,(list,tuple)):\n yield from _iterencode_list(o,_current_indent_level)\n elif isinstance(o,dict):\n yield from _iterencode_dict(o,_current_indent_level)\n else:\n if markers is not None:\n markerid=id(o)\n if markerid in markers:\n raise ValueError(\"Circular reference detected\")\n markers[markerid]=o\n o=_default(o)\n yield from _iterencode(o,_current_indent_level)\n if markers is not None:\n del markers[markerid]\n return _iterencode\n", ["_json", "re"]], "http.cookies": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nr\"\"\"\nHere's a sample session to show how to use this module.\nAt the moment, this is the only documentation.\n\nThe Basics\n----------\n\nImporting is easy...\n\n >>> from http import cookies\n\nMost of the time you start by creating a cookie.\n\n >>> C = cookies.SimpleCookie()\n\nOnce you've created your Cookie, you can add values just as if it were\na dictionary.\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"fig\"] = \"newton\"\n >>> C[\"sugar\"] = \"wafer\"\n >>> C.output()\n 'Set-Cookie: fig=newton\\r\\nSet-Cookie: sugar=wafer'\n\nNotice that the printable representation of a Cookie is the\nappropriate format for a Set-Cookie: header. This is the\ndefault behavior. You can change the header and printed\nattributes by using the .output() function\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"rocky\"] = \"road\"\n >>> C[\"rocky\"][\"path\"] = \"/cookie\"\n >>> print(C.output(header=\"Cookie:\"))\n Cookie: rocky=road; Path=/cookie\n >>> print(C.output(attrs=[], header=\"Cookie:\"))\n Cookie: rocky=road\n\nThe load() method of a Cookie extracts cookies from a string. In a\nCGI script, you would use this method to extract the cookies from the\nHTTP_COOKIE environment variable.\n\n >>> C = cookies.SimpleCookie()\n >>> C.load(\"chips=ahoy; vienna=finger\")\n >>> C.output()\n 'Set-Cookie: chips=ahoy\\r\\nSet-Cookie: vienna=finger'\n\nThe load() method is darn-tootin smart about identifying cookies\nwithin a string. Escaped quotation marks, nested semicolons, and other\nsuch trickeries do not confuse it.\n\n >>> C = cookies.SimpleCookie()\n >>> C.load('keebler=\"E=everybody; L=\\\\\"Loves\\\\\"; fudge=\\\\012;\";')\n >>> print(C)\n Set-Cookie: keebler=\"E=everybody; L=\\\"Loves\\\"; fudge=\\012;\"\n\nEach element of the Cookie also supports all of the RFC 2109\nCookie attributes. Here's an example which sets the Path\nattribute.\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"oreo\"] = \"doublestuff\"\n >>> C[\"oreo\"][\"path\"] = \"/\"\n >>> print(C)\n Set-Cookie: oreo=doublestuff; Path=/\n\nEach dictionary element has a 'value' attribute, which gives you\nback the value associated with the key.\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"twix\"] = \"none for you\"\n >>> C[\"twix\"].value\n 'none for you'\n\nThe SimpleCookie expects that all values should be standard strings.\nJust to be sure, SimpleCookie invokes the str() builtin to convert\nthe value to a string, when the values are set dictionary-style.\n\n >>> C = cookies.SimpleCookie()\n >>> C[\"number\"] = 7\n >>> C[\"string\"] = \"seven\"\n >>> C[\"number\"].value\n '7'\n >>> C[\"string\"].value\n 'seven'\n >>> C.output()\n 'Set-Cookie: number=7\\r\\nSet-Cookie: string=seven'\n\nFinis.\n\"\"\"\n\n\n\n\nimport re\nimport string\nimport types\n\n__all__=[\"CookieError\",\"BaseCookie\",\"SimpleCookie\"]\n\n_nulljoin=''.join\n_semispacejoin='; '.join\n_spacejoin=' '.join\n\n\n\n\nclass CookieError(Exception):\n pass\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n_LegalChars=string.ascii_letters+string.digits+\"!#$%&'*+-.^_`|~:\"\n_UnescapedChars=_LegalChars+' ()/<=>?@[]{}'\n\n_Translator={n:'\\\\%03o'%n\nfor n in set(range(256))-set(map(ord,_UnescapedChars))}\n_Translator.update({\nord('\"'):'\\\\\"',\nord('\\\\'):'\\\\\\\\',\n})\n\n_is_legal_key=re.compile('[%s]+'%re.escape(_LegalChars)).fullmatch\n\ndef _quote(str):\n ''\n\n\n\n\n \n if str is None or _is_legal_key(str):\n return str\n else:\n return '\"'+str.translate(_Translator)+'\"'\n \n \n_OctalPatt=re.compile(r\"\\\\[0-3][0-7][0-7]\")\n_QuotePatt=re.compile(r\"[\\\\].\")\n\ndef _unquote(str):\n\n\n if str is None or len(str)<2:\n return str\n if str[0]!='\"'or str[-1]!='\"':\n return str\n \n \n \n \n \n str=str[1:-1]\n \n \n \n \n \n i=0\n n=len(str)\n res=[]\n while 0 <=i '%(self.__class__.__name__,self.OutputString())\n \n def js_output(self,attrs=None):\n \n return \"\"\"\n \n \"\"\"%(self.OutputString(attrs).replace('\"',r'\\\"'))\n \n def OutputString(self,attrs=None):\n \n \n result=[]\n append=result.append\n \n \n append(\"%s=%s\"%(self.key,self.coded_value))\n \n \n if attrs is None:\n attrs=self._reserved\n items=sorted(self.items())\n for key,value in items:\n if value ==\"\":\n continue\n if key not in attrs:\n continue\n if key ==\"expires\"and isinstance(value,int):\n append(\"%s=%s\"%(self._reserved[key],_getdate(value)))\n elif key ==\"max-age\"and isinstance(value,int):\n append(\"%s=%d\"%(self._reserved[key],value))\n elif key ==\"comment\"and isinstance(value,str):\n append(\"%s=%s\"%(self._reserved[key],_quote(value)))\n elif key in self._flags:\n if value:\n append(str(self._reserved[key]))\n else:\n append(\"%s=%s\"%(self._reserved[key],value))\n \n \n return _semispacejoin(result)\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \n \n \n \n \n \n \n \n \n \n_LegalKeyChars=r\"\\w\\d!#%&'~_`><@,:/\\$\\*\\+\\-\\.\\^\\|\\)\\(\\?\\}\\{\\=\"\n_LegalValueChars=_LegalKeyChars+r'\\[\\]'\n_CookiePattern=re.compile(r\"\"\"\n \\s* # Optional whitespace at start of cookie\n (?P # Start of group 'key'\n [\"\"\"+_LegalKeyChars+r\"\"\"]+? # Any word of at least one letter\n ) # End of group 'key'\n ( # Optional group: there may not be a value.\n \\s*=\\s* # Equal Sign\n (?P # Start of group 'val'\n \"(?:[^\\\\\"]|\\\\.)*\" # Any doublequoted string\n | # or\n \\w{3},\\s[\\w\\d\\s-]{9,11}\\s[\\d:]{8}\\sGMT # Special case for \"expires\" attr\n | # or\n [\"\"\"+_LegalValueChars+r\"\"\"]* # Any word or empty string\n ) # End of group 'val'\n )? # End of optional value group\n \\s* # Any number of spaces.\n (\\s+|;|$) # Ending either at space, semicolon, or EOS.\n \"\"\",re.ASCII |re.VERBOSE)\n\n\n\n\n\nclass BaseCookie(dict):\n ''\n \n def value_decode(self,val):\n ''\n\n\n\n\n \n return val,val\n \n def value_encode(self,val):\n ''\n\n\n\n \n strval=str(val)\n return strval,strval\n \n def __init__(self,input=None):\n if input:\n self.load(input)\n \n def __set(self,key,real_value,coded_value):\n ''\n M=self.get(key,Morsel())\n M.set(key,real_value,coded_value)\n dict.__setitem__(self,key,M)\n \n def __setitem__(self,key,value):\n ''\n if isinstance(value,Morsel):\n \n dict.__setitem__(self,key,value)\n else:\n rval,cval=self.value_encode(value)\n self.__set(key,rval,cval)\n \n def output(self,attrs=None,header=\"Set-Cookie:\",sep=\"\\015\\012\"):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.output(attrs,header))\n return sep.join(result)\n \n __str__=output\n \n def __repr__(self):\n l=[]\n items=sorted(self.items())\n for key,value in items:\n l.append('%s=%s'%(key,repr(value.value)))\n return '<%s: %s>'%(self.__class__.__name__,_spacejoin(l))\n \n def js_output(self,attrs=None):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.js_output(attrs))\n return _nulljoin(result)\n \n def load(self,rawdata):\n ''\n\n\n\n \n if isinstance(rawdata,str):\n self.__parse_string(rawdata)\n else:\n \n for key,value in rawdata.items():\n self[key]=value\n return\n \n def __parse_string(self,str,patt=_CookiePattern):\n i=0\n n=len(str)\n parsed_items=[]\n morsel_seen=False\n \n TYPE_ATTRIBUTE=1\n TYPE_KEYVALUE=2\n \n \n \n \n while 0 <=i _MAXLINE:\n raise LineTooLong(\"header line\")\n headers.append(line)\n if len(headers)>_MAXHEADERS:\n raise HTTPException(\"got more than %d headers\"%_MAXHEADERS)\n if line in(b'\\r\\n',b'\\n',b''):\n break\n return headers\n \ndef _parse_header_lines(header_lines,_class=HTTPMessage):\n ''\n\n\n\n\n\n\n\n\n \n hstring=b''.join(header_lines).decode('iso-8859-1')\n return email.parser.Parser(_class=_class).parsestr(hstring)\n \ndef parse_headers(fp,_class=HTTPMessage):\n ''\n \n headers=_read_headers(fp)\n return _parse_header_lines(headers,_class)\n \n \nclass HTTPResponse(io.BufferedIOBase):\n\n\n\n\n\n\n\n\n def __init__(self,sock,debuglevel=0,method=None,url=None):\n \n \n \n \n \n \n \n self.fp=sock.makefile(\"rb\")\n self.debuglevel=debuglevel\n self._method=method\n \n \n \n \n \n \n \n self.headers=self.msg=None\n \n \n self.version=_UNKNOWN\n self.status=_UNKNOWN\n self.reason=_UNKNOWN\n \n self.chunked=_UNKNOWN\n self.chunk_left=_UNKNOWN\n self.length=_UNKNOWN\n self.will_close=_UNKNOWN\n \n def _read_status(self):\n line=str(self.fp.readline(_MAXLINE+1),\"iso-8859-1\")\n if len(line)>_MAXLINE:\n raise LineTooLong(\"status line\")\n if self.debuglevel >0:\n print(\"reply:\",repr(line))\n if not line:\n \n \n raise RemoteDisconnected(\"Remote end closed connection without\"\n \" response\")\n try:\n version,status,reason=line.split(None,2)\n except ValueError:\n try:\n version,status=line.split(None,1)\n reason=\"\"\n except ValueError:\n \n version=\"\"\n if not version.startswith(\"HTTP/\"):\n self._close_conn()\n raise BadStatusLine(line)\n \n \n try:\n status=int(status)\n if status <100 or status >999:\n raise BadStatusLine(line)\n except ValueError:\n raise BadStatusLine(line)\n return version,status,reason\n \n def begin(self):\n if self.headers is not None:\n \n return\n \n \n while True:\n version,status,reason=self._read_status()\n if status !=CONTINUE:\n break\n \n skipped_headers=_read_headers(self.fp)\n if self.debuglevel >0:\n print(\"headers:\",skipped_headers)\n del skipped_headers\n \n self.code=self.status=status\n self.reason=reason.strip()\n if version in(\"HTTP/1.0\",\"HTTP/0.9\"):\n \n self.version=10\n elif version.startswith(\"HTTP/1.\"):\n self.version=11\n else:\n raise UnknownProtocol(version)\n \n self.headers=self.msg=parse_headers(self.fp)\n \n if self.debuglevel >0:\n for hdr,val in self.headers.items():\n print(\"header:\",hdr+\":\",val)\n \n \n tr_enc=self.headers.get(\"transfer-encoding\")\n if tr_enc and tr_enc.lower()==\"chunked\":\n self.chunked=True\n self.chunk_left=None\n else:\n self.chunked=False\n \n \n self.will_close=self._check_close()\n \n \n \n self.length=None\n length=self.headers.get(\"content-length\")\n if length and not self.chunked:\n try:\n self.length=int(length)\n except ValueError:\n self.length=None\n else:\n if self.length <0:\n self.length=None\n else:\n self.length=None\n \n \n if(status ==NO_CONTENT or status ==NOT_MODIFIED or\n 100 <=status <200 or\n self._method ==\"HEAD\"):\n self.length=0\n \n \n \n \n if(not self.will_close and\n not self.chunked and\n self.length is None):\n self.will_close=True\n \n def _check_close(self):\n conn=self.headers.get(\"connection\")\n if self.version ==11:\n \n \n if conn and \"close\"in conn.lower():\n return True\n return False\n \n \n \n \n \n if self.headers.get(\"keep-alive\"):\n return False\n \n \n \n if conn and \"keep-alive\"in conn.lower():\n return False\n \n \n pconn=self.headers.get(\"proxy-connection\")\n if pconn and \"keep-alive\"in pconn.lower():\n return False\n \n \n return True\n \n def _close_conn(self):\n fp=self.fp\n self.fp=None\n fp.close()\n \n def close(self):\n try:\n super().close()\n finally:\n if self.fp:\n self._close_conn()\n \n \n \n \n \n \n def flush(self):\n super().flush()\n if self.fp:\n self.fp.flush()\n \n def readable(self):\n ''\n return True\n \n \n \n def isclosed(self):\n ''\n \n \n \n \n \n \n return self.fp is None\n \n def read(self,amt=None):\n ''\n if self.fp is None:\n return b\"\"\n \n if self._method ==\"HEAD\":\n self._close_conn()\n return b\"\"\n \n if self.chunked:\n return self._read_chunked(amt)\n \n if amt is not None:\n if self.length is not None and amt >self.length:\n \n amt=self.length\n s=self.fp.read(amt)\n if not s and amt:\n \n \n self._close_conn()\n elif self.length is not None:\n self.length -=len(s)\n if not self.length:\n self._close_conn()\n return s\n else:\n \n if self.length is None:\n s=self.fp.read()\n else:\n try:\n s=self._safe_read(self.length)\n except IncompleteRead:\n self._close_conn()\n raise\n self.length=0\n self._close_conn()\n return s\n \n def readinto(self,b):\n ''\n\n \n \n if self.fp is None:\n return 0\n \n if self._method ==\"HEAD\":\n self._close_conn()\n return 0\n \n if self.chunked:\n return self._readinto_chunked(b)\n \n if self.length is not None:\n if len(b)>self.length:\n \n b=memoryview(b)[0:self.length]\n \n \n \n \n n=self.fp.readinto(b)\n if not n and b:\n \n \n self._close_conn()\n elif self.length is not None:\n self.length -=n\n if not self.length:\n self._close_conn()\n return n\n \n def _read_next_chunk_size(self):\n \n line=self.fp.readline(_MAXLINE+1)\n if len(line)>_MAXLINE:\n raise LineTooLong(\"chunk size\")\n i=line.find(b\";\")\n if i >=0:\n line=line[:i]\n try:\n return int(line,16)\n except ValueError:\n \n \n self._close_conn()\n raise\n \n def _read_and_discard_trailer(self):\n \n \n while True:\n line=self.fp.readline(_MAXLINE+1)\n if len(line)>_MAXLINE:\n raise LineTooLong(\"trailer line\")\n if not line:\n \n \n break\n if line in(b'\\r\\n',b'\\n',b''):\n break\n \n def _get_chunk_left(self):\n \n \n \n \n \n chunk_left=self.chunk_left\n if not chunk_left:\n if chunk_left is not None:\n \n self._safe_read(2)\n try:\n chunk_left=self._read_next_chunk_size()\n except ValueError:\n raise IncompleteRead(b'')\n if chunk_left ==0:\n \n self._read_and_discard_trailer()\n \n self._close_conn()\n chunk_left=None\n self.chunk_left=chunk_left\n return chunk_left\n \n def _read_chunked(self,amt=None):\n assert self.chunked !=_UNKNOWN\n value=[]\n try:\n while(chunk_left :=self._get_chunk_left())is not None:\n if amt is not None and amt <=chunk_left:\n value.append(self._safe_read(amt))\n self.chunk_left=chunk_left -amt\n break\n \n value.append(self._safe_read(chunk_left))\n if amt is not None:\n amt -=chunk_left\n self.chunk_left=0\n return b''.join(value)\n except IncompleteRead as exc:\n raise IncompleteRead(b''.join(value))from exc\n \n def _readinto_chunked(self,b):\n assert self.chunked !=_UNKNOWN\n total_bytes=0\n mvb=memoryview(b)\n try:\n while True:\n chunk_left=self._get_chunk_left()\n if chunk_left is None:\n return total_bytes\n \n if len(mvb)<=chunk_left:\n n=self._safe_readinto(mvb)\n self.chunk_left=chunk_left -n\n return total_bytes+n\n \n temp_mvb=mvb[:chunk_left]\n n=self._safe_readinto(temp_mvb)\n mvb=mvb[n:]\n total_bytes +=n\n self.chunk_left=0\n \n except IncompleteRead:\n raise IncompleteRead(bytes(b[0:total_bytes]))\n \n def _safe_read(self,amt):\n ''\n\n\n\n\n \n data=self.fp.read(amt)\n if len(data)self.length):\n n=self.length\n result=self.fp.read1(n)\n if not result and n:\n self._close_conn()\n elif self.length is not None:\n self.length -=len(result)\n return result\n \n def peek(self,n=-1):\n \n \n if self.fp is None or self._method ==\"HEAD\":\n return b\"\"\n if self.chunked:\n return self._peek_chunked(n)\n return self.fp.peek(n)\n \n def readline(self,limit=-1):\n if self.fp is None or self._method ==\"HEAD\":\n return b\"\"\n if self.chunked:\n \n return super().readline(limit)\n if self.length is not None and(limit <0 or limit >self.length):\n limit=self.length\n result=self.fp.readline(limit)\n if not result and limit:\n self._close_conn()\n elif self.length is not None:\n self.length -=len(result)\n return result\n \n def _read1_chunked(self,n):\n \n \n chunk_left=self._get_chunk_left()\n if chunk_left is None or n ==0:\n return b''\n if not(0 <=n <=chunk_left):\n n=chunk_left\n read=self.fp.read1(n)\n self.chunk_left -=len(read)\n if not read:\n raise IncompleteRead(b\"\")\n return read\n \n def _peek_chunked(self,n):\n \n \n try:\n chunk_left=self._get_chunk_left()\n except IncompleteRead:\n return b''\n if chunk_left is None:\n return b''\n \n \n return self.fp.peek(chunk_left)[:chunk_left]\n \n def fileno(self):\n return self.fp.fileno()\n \n def getheader(self,name,default=None):\n ''\n\n\n\n\n\n\n\n\n\n \n if self.headers is None:\n raise ResponseNotReady()\n headers=self.headers.get_all(name)or default\n if isinstance(headers,str)or not hasattr(headers,'__iter__'):\n return headers\n else:\n return ', '.join(headers)\n \n def getheaders(self):\n ''\n if self.headers is None:\n raise ResponseNotReady()\n return list(self.headers.items())\n \n \n \n def __iter__(self):\n return self\n \n \n \n def info(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return self.headers\n \n def geturl(self):\n ''\n\n\n\n\n\n\n\n \n return self.url\n \n def getcode(self):\n ''\n\n\n \n return self.status\n \n \ndef _create_https_context(http_version):\n\n\n context=ssl._create_default_https_context()\n \n if http_version ==11:\n context.set_alpn_protocols(['http/1.1'])\n \n if context.post_handshake_auth is not None:\n context.post_handshake_auth=True\n return context\n \n \nclass HTTPConnection:\n\n _http_vsn=11\n _http_vsn_str='HTTP/1.1'\n \n response_class=HTTPResponse\n default_port=HTTP_PORT\n auto_open=1\n debuglevel=0\n \n @staticmethod\n def _is_textIO(stream):\n ''\n \n return isinstance(stream,io.TextIOBase)\n \n @staticmethod\n def _get_content_length(body,method):\n ''\n\n\n\n\n \n if body is None:\n \n \n if method.upper()in _METHODS_EXPECTING_BODY:\n return 0\n else:\n return None\n \n if hasattr(body,'read'):\n \n return None\n \n try:\n \n mv=memoryview(body)\n return mv.nbytes\n except TypeError:\n pass\n \n if isinstance(body,str):\n return len(body)\n \n return None\n \n def __init__(self,host,port=None,timeout=socket._GLOBAL_DEFAULT_TIMEOUT,\n source_address=None,blocksize=8192):\n self.timeout=timeout\n self.source_address=source_address\n self.blocksize=blocksize\n self.sock=None\n self._buffer=[]\n self.__response=None\n self.__state=_CS_IDLE\n self._method=None\n self._tunnel_host=None\n self._tunnel_port=None\n self._tunnel_headers={}\n self._raw_proxy_headers=None\n \n (self.host,self.port)=self._get_hostport(host,port)\n \n self._validate_host(self.host)\n \n \n \n self._create_connection=socket.create_connection\n \n def set_tunnel(self,host,port=None,headers=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if self.sock:\n raise RuntimeError(\"Can't set up tunnel for established connection\")\n \n self._tunnel_host,self._tunnel_port=self._get_hostport(host,port)\n if headers:\n self._tunnel_headers=headers.copy()\n else:\n self._tunnel_headers.clear()\n \n if not any(header.lower()==\"host\"for header in self._tunnel_headers):\n encoded_host=self._tunnel_host.encode(\"idna\").decode(\"ascii\")\n self._tunnel_headers[\"Host\"]=\"%s:%d\"%(\n encoded_host,self._tunnel_port)\n \n def _get_hostport(self,host,port):\n if port is None:\n i=host.rfind(':')\n j=host.rfind(']')\n if i >j:\n try:\n port=int(host[i+1:])\n except ValueError:\n if host[i+1:]==\"\":\n port=self.default_port\n else:\n raise InvalidURL(\"nonnumeric port: '%s'\"%host[i+1:])\n host=host[:i]\n else:\n port=self.default_port\n if host and host[0]=='['and host[-1]==']':\n host=host[1:-1]\n \n return(host,port)\n \n def set_debuglevel(self,level):\n self.debuglevel=level\n \n def _tunnel(self):\n connect=b\"CONNECT %s:%d %s\\r\\n\"%(\n self._tunnel_host.encode(\"idna\"),self._tunnel_port,\n self._http_vsn_str.encode(\"ascii\"))\n headers=[connect]\n for header,value in self._tunnel_headers.items():\n headers.append(f\"{header}: {value}\\r\\n\".encode(\"latin-1\"))\n headers.append(b\"\\r\\n\")\n \n \n \n self.send(b\"\".join(headers))\n del headers\n \n response=self.response_class(self.sock,method=self._method)\n try:\n (version,code,message)=response._read_status()\n \n self._raw_proxy_headers=_read_headers(response.fp)\n \n if self.debuglevel >0:\n for header in self._raw_proxy_headers:\n print('header:',header.decode())\n \n if code !=http.HTTPStatus.OK:\n self.close()\n raise OSError(f\"Tunnel connection failed: {code} {message.strip()}\")\n \n finally:\n response.close()\n \n def get_proxy_response_headers(self):\n ''\n\n\n\n\n\n \n return(\n _parse_header_lines(self._raw_proxy_headers)\n if self._raw_proxy_headers is not None\n else None\n )\n \n def connect(self):\n ''\n sys.audit(\"http.client.connect\",self,self.host,self.port)\n self.sock=self._create_connection(\n (self.host,self.port),self.timeout,self.source_address)\n \n try:\n self.sock.setsockopt(socket.IPPROTO_TCP,socket.TCP_NODELAY,1)\n except OSError as e:\n if e.errno !=errno.ENOPROTOOPT:\n raise\n \n if self._tunnel_host:\n self._tunnel()\n \n def close(self):\n ''\n self.__state=_CS_IDLE\n try:\n sock=self.sock\n if sock:\n self.sock=None\n sock.close()\n finally:\n response=self.__response\n if response:\n self.__response=None\n response.close()\n \n def send(self,data):\n ''\n\n\n \n \n if self.sock is None:\n if self.auto_open:\n self.connect()\n else:\n raise NotConnected()\n \n if self.debuglevel >0:\n print(\"send:\",repr(data))\n if hasattr(data,\"read\"):\n if self.debuglevel >0:\n print(\"sending a readable\")\n encode=self._is_textIO(data)\n if encode and self.debuglevel >0:\n print(\"encoding file using iso-8859-1\")\n while datablock :=data.read(self.blocksize):\n if encode:\n datablock=datablock.encode(\"iso-8859-1\")\n sys.audit(\"http.client.send\",self,datablock)\n self.sock.sendall(datablock)\n return\n sys.audit(\"http.client.send\",self,data)\n try:\n self.sock.sendall(data)\n except TypeError:\n if isinstance(data,collections.abc.Iterable):\n for d in data:\n self.sock.sendall(d)\n else:\n raise TypeError(\"data should be a bytes-like object \"\n \"or an iterable, got %r\"%type(data))\n \n def _output(self,s):\n ''\n\n\n \n self._buffer.append(s)\n \n def _read_readable(self,readable):\n if self.debuglevel >0:\n print(\"reading a readable\")\n encode=self._is_textIO(readable)\n if encode and self.debuglevel >0:\n print(\"encoding file using iso-8859-1\")\n while datablock :=readable.read(self.blocksize):\n if encode:\n datablock=datablock.encode(\"iso-8859-1\")\n yield datablock\n \n def _send_output(self,message_body=None,encode_chunked=False):\n ''\n\n\n\n \n self._buffer.extend((b\"\",b\"\"))\n msg=b\"\\r\\n\".join(self._buffer)\n del self._buffer[:]\n self.send(msg)\n \n if message_body is not None:\n \n \n if hasattr(message_body,'read'):\n \n \n \n chunks=self._read_readable(message_body)\n else:\n try:\n \n \n \n \n memoryview(message_body)\n except TypeError:\n try:\n chunks=iter(message_body)\n except TypeError:\n raise TypeError(\"message_body should be a bytes-like \"\n \"object or an iterable, got %r\"\n %type(message_body))\n else:\n \n \n chunks=(message_body,)\n \n for chunk in chunks:\n if not chunk:\n if self.debuglevel >0:\n print('Zero length chunk ignored')\n continue\n \n if encode_chunked and self._http_vsn ==11:\n \n chunk=f'{len(chunk):X}\\r\\n'.encode('ascii')+chunk\\\n +b'\\r\\n'\n self.send(chunk)\n \n if encode_chunked and self._http_vsn ==11:\n \n self.send(b'0\\r\\n\\r\\n')\n \n def putrequest(self,method,url,skip_host=False,\n skip_accept_encoding=False):\n ''\n\n\n\n\n\n\n \n \n \n if self.__response and self.__response.isclosed():\n self.__response=None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.__state ==_CS_IDLE:\n self.__state=_CS_REQ_STARTED\n else:\n raise CannotSendRequest(self.__state)\n \n self._validate_method(method)\n \n \n self._method=method\n \n url=url or '/'\n self._validate_path(url)\n \n request='%s %s %s'%(method,url,self._http_vsn_str)\n \n self._output(self._encode_request(request))\n \n if self._http_vsn ==11:\n \n \n if not skip_host:\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n netloc=''\n if url.startswith('http'):\n nil,netloc,nil,nil,nil=urlsplit(url)\n \n if netloc:\n try:\n netloc_enc=netloc.encode(\"ascii\")\n except UnicodeEncodeError:\n netloc_enc=netloc.encode(\"idna\")\n self.putheader('Host',netloc_enc)\n else:\n if self._tunnel_host:\n host=self._tunnel_host\n port=self._tunnel_port\n else:\n host=self.host\n port=self.port\n \n try:\n host_enc=host.encode(\"ascii\")\n except UnicodeEncodeError:\n host_enc=host.encode(\"idna\")\n \n \n \n \n if host.find(':')>=0:\n host_enc=b'['+host_enc+b']'\n \n if port ==self.default_port:\n self.putheader('Host',host_enc)\n else:\n host_enc=host_enc.decode(\"ascii\")\n self.putheader('Host',\"%s:%s\"%(host_enc,port))\n \n \n \n \n \n \n \n \n \n if not skip_accept_encoding:\n self.putheader('Accept-Encoding','identity')\n \n \n \n \n \n \n \n \n \n else:\n \n pass\n \n def _encode_request(self,request):\n \n return request.encode('ascii')\n \n def _validate_method(self,method):\n ''\n \n match=_contains_disallowed_method_pchar_re.search(method)\n if match:\n raise ValueError(\n f\"method can't contain control characters. {method !r} \"\n f\"(found at least {match.group()!r})\")\n \n def _validate_path(self,url):\n ''\n \n match=_contains_disallowed_url_pchar_re.search(url)\n if match:\n raise InvalidURL(f\"URL can't contain control characters. {url !r} \"\n f\"(found at least {match.group()!r})\")\n \n def _validate_host(self,host):\n ''\n \n match=_contains_disallowed_url_pchar_re.search(host)\n if match:\n raise InvalidURL(f\"URL can't contain control characters. {host !r} \"\n f\"(found at least {match.group()!r})\")\n \n def putheader(self,header,*values):\n ''\n\n\n \n if self.__state !=_CS_REQ_STARTED:\n raise CannotSendHeader()\n \n if hasattr(header,'encode'):\n header=header.encode('ascii')\n \n if not _is_legal_header_name(header):\n raise ValueError('Invalid header name %r'%(header,))\n \n values=list(values)\n for i,one_value in enumerate(values):\n if hasattr(one_value,'encode'):\n values[i]=one_value.encode('latin-1')\n elif isinstance(one_value,int):\n values[i]=str(one_value).encode('ascii')\n \n if _is_illegal_header_value(values[i]):\n raise ValueError('Invalid header value %r'%(values[i],))\n \n value=b'\\r\\n\\t'.join(values)\n header=header+b': '+value\n self._output(header)\n \n def endheaders(self,message_body=None,*,encode_chunked=False):\n ''\n\n\n\n\n \n if self.__state ==_CS_REQ_STARTED:\n self.__state=_CS_REQ_SENT\n else:\n raise CannotSendHeader()\n self._send_output(message_body,encode_chunked=encode_chunked)\n \n def request(self,method,url,body=None,headers={},*,\n encode_chunked=False):\n ''\n self._send_request(method,url,body,headers,encode_chunked)\n \n def _send_request(self,method,url,body,headers,encode_chunked):\n \n header_names=frozenset(k.lower()for k in headers)\n skips={}\n if 'host'in header_names:\n skips['skip_host']=1\n if 'accept-encoding'in header_names:\n skips['skip_accept_encoding']=1\n \n self.putrequest(method,url,**skips)\n \n \n \n \n \n \n \n \n if 'content-length'not in header_names:\n \n \n \n if 'transfer-encoding'not in header_names:\n \n \n encode_chunked=False\n content_length=self._get_content_length(body,method)\n if content_length is None:\n if body is not None:\n if self.debuglevel >0:\n print('Unable to determine size of %r'%body)\n encode_chunked=True\n self.putheader('Transfer-Encoding','chunked')\n else:\n self.putheader('Content-Length',str(content_length))\n else:\n encode_chunked=False\n \n for hdr,value in headers.items():\n self.putheader(hdr,value)\n if isinstance(body,str):\n \n \n body=_encode(body,'body')\n self.endheaders(body,encode_chunked=encode_chunked)\n \n def getresponse(self):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n if self.__response and self.__response.isclosed():\n self.__response=None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.__state !=_CS_REQ_SENT or self.__response:\n raise ResponseNotReady(self.__state)\n \n if self.debuglevel >0:\n response=self.response_class(self.sock,self.debuglevel,\n method=self._method)\n else:\n response=self.response_class(self.sock,method=self._method)\n \n try:\n try:\n response.begin()\n except ConnectionError:\n self.close()\n raise\n assert response.will_close !=_UNKNOWN\n self.__state=_CS_IDLE\n \n if response.will_close:\n \n self.close()\n else:\n \n self.__response=response\n \n return response\n except:\n response.close()\n raise\n \ntry:\n import ssl\nexcept ImportError:\n pass\nelse:\n class HTTPSConnection(HTTPConnection):\n ''\n \n default_port=HTTPS_PORT\n \n def __init__(self,host,port=None,\n *,timeout=socket._GLOBAL_DEFAULT_TIMEOUT,\n source_address=None,context=None,blocksize=8192):\n super(HTTPSConnection,self).__init__(host,port,timeout,\n source_address,\n blocksize=blocksize)\n if context is None:\n context=_create_https_context(self._http_vsn)\n self._context=context\n \n def connect(self):\n ''\n \n super().connect()\n \n if self._tunnel_host:\n server_hostname=self._tunnel_host\n else:\n server_hostname=self.host\n \n self.sock=self._context.wrap_socket(self.sock,\n server_hostname=server_hostname)\n \n __all__.append(\"HTTPSConnection\")\n \nclass HTTPException(Exception):\n\n\n pass\n \nclass NotConnected(HTTPException):\n pass\n \nclass InvalidURL(HTTPException):\n pass\n \nclass UnknownProtocol(HTTPException):\n def __init__(self,version):\n self.args=version,\n self.version=version\n \nclass UnknownTransferEncoding(HTTPException):\n pass\n \nclass UnimplementedFileMode(HTTPException):\n pass\n \nclass IncompleteRead(HTTPException):\n def __init__(self,partial,expected=None):\n self.args=partial,\n self.partial=partial\n self.expected=expected\n def __repr__(self):\n if self.expected is not None:\n e=', %i more expected'%self.expected\n else:\n e=''\n return '%s(%i bytes read%s)'%(self.__class__.__name__,\n len(self.partial),e)\n __str__=object.__str__\n \nclass ImproperConnectionState(HTTPException):\n pass\n \nclass CannotSendRequest(ImproperConnectionState):\n pass\n \nclass CannotSendHeader(ImproperConnectionState):\n pass\n \nclass ResponseNotReady(ImproperConnectionState):\n pass\n \nclass BadStatusLine(HTTPException):\n def __init__(self,line):\n if not line:\n line=repr(line)\n self.args=line,\n self.line=line\n \nclass LineTooLong(HTTPException):\n def __init__(self,line_type):\n HTTPException.__init__(self,\"got more than %d bytes when reading %s\"\n %(_MAXLINE,line_type))\n \nclass RemoteDisconnected(ConnectionResetError,BadStatusLine):\n def __init__(self,*pos,**kw):\n BadStatusLine.__init__(self,\"\")\n ConnectionResetError.__init__(self,*pos,**kw)\n \n \nerror=HTTPException\n", ["collections.abc", "email.message", "email.parser", "errno", "http", "io", "re", "socket", "ssl", "sys", "urllib.parse"]], "http": [".py", "from enum import StrEnum,IntEnum,_simple_enum\n\n__all__=['HTTPStatus','HTTPMethod']\n\n\n@_simple_enum(IntEnum)\nclass HTTPStatus:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __new__(cls,value,phrase,description=''):\n obj=int.__new__(cls,value)\n obj._value_=value\n \n obj.phrase=phrase\n obj.description=description\n return obj\n \n @property\n def is_informational(self):\n return 100 <=self <=199\n \n @property\n def is_success(self):\n return 200 <=self <=299\n \n @property\n def is_redirection(self):\n return 300 <=self <=399\n \n @property\n def is_client_error(self):\n return 400 <=self <=499\n \n @property\n def is_server_error(self):\n return 500 <=self <=599\n \n \n CONTINUE=100,'Continue','Request received, please continue'\n SWITCHING_PROTOCOLS=(101,'Switching Protocols',\n 'Switching to new protocol; obey Upgrade header')\n PROCESSING=102,'Processing'\n EARLY_HINTS=103,'Early Hints'\n \n \n OK=200,'OK','Request fulfilled, document follows'\n CREATED=201,'Created','Document created, URL follows'\n ACCEPTED=(202,'Accepted',\n 'Request accepted, processing continues off-line')\n NON_AUTHORITATIVE_INFORMATION=(203,\n 'Non-Authoritative Information','Request fulfilled from cache')\n NO_CONTENT=204,'No Content','Request fulfilled, nothing follows'\n RESET_CONTENT=205,'Reset Content','Clear input form for further input'\n PARTIAL_CONTENT=206,'Partial Content','Partial content follows'\n MULTI_STATUS=207,'Multi-Status'\n ALREADY_REPORTED=208,'Already Reported'\n IM_USED=226,'IM Used'\n \n \n MULTIPLE_CHOICES=(300,'Multiple Choices',\n 'Object has several resources -- see URI list')\n MOVED_PERMANENTLY=(301,'Moved Permanently',\n 'Object moved permanently -- see URI list')\n FOUND=302,'Found','Object moved temporarily -- see URI list'\n SEE_OTHER=303,'See Other','Object moved -- see Method and URL list'\n NOT_MODIFIED=(304,'Not Modified',\n 'Document has not changed since given time')\n USE_PROXY=(305,'Use Proxy',\n 'You must use proxy specified in Location to access this resource')\n TEMPORARY_REDIRECT=(307,'Temporary Redirect',\n 'Object moved temporarily -- see URI list')\n PERMANENT_REDIRECT=(308,'Permanent Redirect',\n 'Object moved permanently -- see URI list')\n \n \n BAD_REQUEST=(400,'Bad Request',\n 'Bad request syntax or unsupported method')\n UNAUTHORIZED=(401,'Unauthorized',\n 'No permission -- see authorization schemes')\n PAYMENT_REQUIRED=(402,'Payment Required',\n 'No payment -- see charging schemes')\n FORBIDDEN=(403,'Forbidden',\n 'Request forbidden -- authorization will not help')\n NOT_FOUND=(404,'Not Found',\n 'Nothing matches the given URI')\n METHOD_NOT_ALLOWED=(405,'Method Not Allowed',\n 'Specified method is invalid for this resource')\n NOT_ACCEPTABLE=(406,'Not Acceptable',\n 'URI not available in preferred format')\n PROXY_AUTHENTICATION_REQUIRED=(407,\n 'Proxy Authentication Required',\n 'You must authenticate with this proxy before proceeding')\n REQUEST_TIMEOUT=(408,'Request Timeout',\n 'Request timed out; try again later')\n CONFLICT=409,'Conflict','Request conflict'\n GONE=(410,'Gone',\n 'URI no longer exists and has been permanently removed')\n LENGTH_REQUIRED=(411,'Length Required',\n 'Client must specify Content-Length')\n PRECONDITION_FAILED=(412,'Precondition Failed',\n 'Precondition in headers is false')\n REQUEST_ENTITY_TOO_LARGE=(413,'Request Entity Too Large',\n 'Entity is too large')\n REQUEST_URI_TOO_LONG=(414,'Request-URI Too Long',\n 'URI is too long')\n UNSUPPORTED_MEDIA_TYPE=(415,'Unsupported Media Type',\n 'Entity body in unsupported format')\n REQUESTED_RANGE_NOT_SATISFIABLE=(416,\n 'Requested Range Not Satisfiable',\n 'Cannot satisfy request range')\n EXPECTATION_FAILED=(417,'Expectation Failed',\n 'Expect condition could not be satisfied')\n IM_A_TEAPOT=(418,'I\\'m a Teapot',\n 'Server refuses to brew coffee because it is a teapot.')\n MISDIRECTED_REQUEST=(421,'Misdirected Request',\n 'Server is not able to produce a response')\n UNPROCESSABLE_ENTITY=422,'Unprocessable Entity'\n LOCKED=423,'Locked'\n FAILED_DEPENDENCY=424,'Failed Dependency'\n TOO_EARLY=425,'Too Early'\n UPGRADE_REQUIRED=426,'Upgrade Required'\n PRECONDITION_REQUIRED=(428,'Precondition Required',\n 'The origin server requires the request to be conditional')\n TOO_MANY_REQUESTS=(429,'Too Many Requests',\n 'The user has sent too many requests in '\n 'a given amount of time (\"rate limiting\")')\n REQUEST_HEADER_FIELDS_TOO_LARGE=(431,\n 'Request Header Fields Too Large',\n 'The server is unwilling to process the request because its header '\n 'fields are too large')\n UNAVAILABLE_FOR_LEGAL_REASONS=(451,\n 'Unavailable For Legal Reasons',\n 'The server is denying access to the '\n 'resource as a consequence of a legal demand')\n \n \n INTERNAL_SERVER_ERROR=(500,'Internal Server Error',\n 'Server got itself in trouble')\n NOT_IMPLEMENTED=(501,'Not Implemented',\n 'Server does not support this operation')\n BAD_GATEWAY=(502,'Bad Gateway',\n 'Invalid responses from another server/proxy')\n SERVICE_UNAVAILABLE=(503,'Service Unavailable',\n 'The server cannot process the request due to a high load')\n GATEWAY_TIMEOUT=(504,'Gateway Timeout',\n 'The gateway server did not receive a timely response')\n HTTP_VERSION_NOT_SUPPORTED=(505,'HTTP Version Not Supported',\n 'Cannot fulfill request')\n VARIANT_ALSO_NEGOTIATES=506,'Variant Also Negotiates'\n INSUFFICIENT_STORAGE=507,'Insufficient Storage'\n LOOP_DETECTED=508,'Loop Detected'\n NOT_EXTENDED=510,'Not Extended'\n NETWORK_AUTHENTICATION_REQUIRED=(511,\n 'Network Authentication Required',\n 'The client needs to authenticate to gain network access')\n \n \n@_simple_enum(StrEnum)\nclass HTTPMethod:\n ''\n\n\n\n\n\n \n def __new__(cls,value,description):\n obj=str.__new__(cls,value)\n obj._value_=value\n obj.description=description\n return obj\n \n def __repr__(self):\n return \"<%s.%s>\"%(self.__class__.__name__,self._name_)\n \n CONNECT='CONNECT','Establish a connection to the server.'\n DELETE='DELETE','Remove the target.'\n GET='GET','Retrieve the target.'\n HEAD='HEAD','Same as GET, but only retrieve the status line and header section.'\n OPTIONS='OPTIONS','Describe the communication options for the target.'\n PATCH='PATCH','Apply partial modifications to a target.'\n POST='POST','Perform target-specific processing with the request payload.'\n PUT='PUT','Replace the target with the request payload.'\n TRACE='TRACE','Perform a message loop-back test along the path to the target.'\n", ["enum"], 1], "concurrent": [".py", "", [], 1], "concurrent.futures._base": [".py", "\n\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport collections\nimport logging\nimport threading\nimport time\nimport types\n\nFIRST_COMPLETED='FIRST_COMPLETED'\nFIRST_EXCEPTION='FIRST_EXCEPTION'\nALL_COMPLETED='ALL_COMPLETED'\n_AS_COMPLETED='_AS_COMPLETED'\n\n\nPENDING='PENDING'\nRUNNING='RUNNING'\n\nCANCELLED='CANCELLED'\n\nCANCELLED_AND_NOTIFIED='CANCELLED_AND_NOTIFIED'\nFINISHED='FINISHED'\n\n_FUTURE_STATES=[\nPENDING,\nRUNNING,\nCANCELLED,\nCANCELLED_AND_NOTIFIED,\nFINISHED\n]\n\n_STATE_TO_DESCRIPTION_MAP={\nPENDING:\"pending\",\nRUNNING:\"running\",\nCANCELLED:\"cancelled\",\nCANCELLED_AND_NOTIFIED:\"cancelled\",\nFINISHED:\"finished\"\n}\n\n\nLOGGER=logging.getLogger(\"concurrent.futures\")\n\nclass Error(Exception):\n ''\n pass\n \nclass CancelledError(Error):\n ''\n pass\n \nTimeoutError=TimeoutError\n\nclass InvalidStateError(Error):\n ''\n pass\n \nclass _Waiter(object):\n ''\n def __init__(self):\n self.event=threading.Event()\n self.finished_futures=[]\n \n def add_result(self,future):\n self.finished_futures.append(future)\n \n def add_exception(self,future):\n self.finished_futures.append(future)\n \n def add_cancelled(self,future):\n self.finished_futures.append(future)\n \nclass _AsCompletedWaiter(_Waiter):\n ''\n \n def __init__(self):\n super(_AsCompletedWaiter,self).__init__()\n self.lock=threading.Lock()\n \n def add_result(self,future):\n with self.lock:\n super(_AsCompletedWaiter,self).add_result(future)\n self.event.set()\n \n def add_exception(self,future):\n with self.lock:\n super(_AsCompletedWaiter,self).add_exception(future)\n self.event.set()\n \n def add_cancelled(self,future):\n with self.lock:\n super(_AsCompletedWaiter,self).add_cancelled(future)\n self.event.set()\n \nclass _FirstCompletedWaiter(_Waiter):\n ''\n \n def add_result(self,future):\n super().add_result(future)\n self.event.set()\n \n def add_exception(self,future):\n super().add_exception(future)\n self.event.set()\n \n def add_cancelled(self,future):\n super().add_cancelled(future)\n self.event.set()\n \nclass _AllCompletedWaiter(_Waiter):\n ''\n \n def __init__(self,num_pending_calls,stop_on_exception):\n self.num_pending_calls=num_pending_calls\n self.stop_on_exception=stop_on_exception\n self.lock=threading.Lock()\n super().__init__()\n \n def _decrement_pending_calls(self):\n with self.lock:\n self.num_pending_calls -=1\n if not self.num_pending_calls:\n self.event.set()\n \n def add_result(self,future):\n super().add_result(future)\n self._decrement_pending_calls()\n \n def add_exception(self,future):\n super().add_exception(future)\n if self.stop_on_exception:\n self.event.set()\n else:\n self._decrement_pending_calls()\n \n def add_cancelled(self,future):\n super().add_cancelled(future)\n self._decrement_pending_calls()\n \nclass _AcquireFutures(object):\n ''\n \n def __init__(self,futures):\n self.futures=sorted(futures,key=id)\n \n def __enter__(self):\n for future in self.futures:\n future._condition.acquire()\n \n def __exit__(self,*args):\n for future in self.futures:\n future._condition.release()\n \ndef _create_and_install_waiters(fs,return_when):\n if return_when ==_AS_COMPLETED:\n waiter=_AsCompletedWaiter()\n elif return_when ==FIRST_COMPLETED:\n waiter=_FirstCompletedWaiter()\n else:\n pending_count=sum(\n f._state not in[CANCELLED_AND_NOTIFIED,FINISHED]for f in fs)\n \n if return_when ==FIRST_EXCEPTION:\n waiter=_AllCompletedWaiter(pending_count,stop_on_exception=True)\n elif return_when ==ALL_COMPLETED:\n waiter=_AllCompletedWaiter(pending_count,stop_on_exception=False)\n else:\n raise ValueError(\"Invalid return condition: %r\"%return_when)\n \n for f in fs:\n f._waiters.append(waiter)\n \n return waiter\n \n \ndef _yield_finished_futures(fs,waiter,ref_collect):\n ''\n\n\n\n\n\n\n\n\n \n while fs:\n f=fs[-1]\n for futures_set in ref_collect:\n futures_set.remove(f)\n with f._condition:\n f._waiters.remove(waiter)\n del f\n \n yield fs.pop()\n \n \ndef as_completed(fs,timeout=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if timeout is not None:\n end_time=timeout+time.monotonic()\n \n fs=set(fs)\n total_futures=len(fs)\n with _AcquireFutures(fs):\n finished=set(\n f for f in fs\n if f._state in[CANCELLED_AND_NOTIFIED,FINISHED])\n pending=fs -finished\n waiter=_create_and_install_waiters(fs,_AS_COMPLETED)\n finished=list(finished)\n try:\n yield from _yield_finished_futures(finished,waiter,\n ref_collect=(fs,))\n \n while pending:\n if timeout is None:\n wait_timeout=None\n else:\n wait_timeout=end_time -time.monotonic()\n if wait_timeout <0:\n raise TimeoutError(\n '%d (of %d) futures unfinished'%(\n len(pending),total_futures))\n \n waiter.event.wait(wait_timeout)\n \n with waiter.lock:\n finished=waiter.finished_futures\n waiter.finished_futures=[]\n waiter.event.clear()\n \n \n finished.reverse()\n yield from _yield_finished_futures(finished,waiter,\n ref_collect=(fs,pending))\n \n finally:\n \n for f in fs:\n with f._condition:\n f._waiters.remove(waiter)\n \nDoneAndNotDoneFutures=collections.namedtuple(\n'DoneAndNotDoneFutures','done not_done')\ndef wait(fs,timeout=None,return_when=ALL_COMPLETED):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n fs=set(fs)\n with _AcquireFutures(fs):\n done={f for f in fs\n if f._state in[CANCELLED_AND_NOTIFIED,FINISHED]}\n not_done=fs -done\n if(return_when ==FIRST_COMPLETED)and done:\n return DoneAndNotDoneFutures(done,not_done)\n elif(return_when ==FIRST_EXCEPTION)and done:\n if any(f for f in done\n if not f.cancelled()and f.exception()is not None):\n return DoneAndNotDoneFutures(done,not_done)\n \n if len(done)==len(fs):\n return DoneAndNotDoneFutures(done,not_done)\n \n waiter=_create_and_install_waiters(fs,return_when)\n \n waiter.event.wait(timeout)\n for f in fs:\n with f._condition:\n f._waiters.remove(waiter)\n \n done.update(waiter.finished_futures)\n return DoneAndNotDoneFutures(done,fs -done)\n \n \ndef _result_or_cancel(fut,timeout=None):\n try:\n try:\n return fut.result(timeout)\n finally:\n fut.cancel()\n finally:\n \n del fut\n \n \nclass Future(object):\n ''\n \n def __init__(self):\n ''\n self._condition=threading.Condition()\n self._state=PENDING\n self._result=None\n self._exception=None\n self._waiters=[]\n self._done_callbacks=[]\n \n def _invoke_callbacks(self):\n for callback in self._done_callbacks:\n try:\n callback(self)\n except Exception:\n LOGGER.exception('exception calling callback for %r',self)\n \n def __repr__(self):\n with self._condition:\n if self._state ==FINISHED:\n if self._exception:\n return '<%s at %#x state=%s raised %s>'%(\n self.__class__.__name__,\n id(self),\n _STATE_TO_DESCRIPTION_MAP[self._state],\n self._exception.__class__.__name__)\n else:\n return '<%s at %#x state=%s returned %s>'%(\n self.__class__.__name__,\n id(self),\n _STATE_TO_DESCRIPTION_MAP[self._state],\n self._result.__class__.__name__)\n return '<%s at %#x state=%s>'%(\n self.__class__.__name__,\n id(self),\n _STATE_TO_DESCRIPTION_MAP[self._state])\n \n def cancel(self):\n ''\n\n\n\n \n with self._condition:\n if self._state in[RUNNING,FINISHED]:\n return False\n \n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n return True\n \n self._state=CANCELLED\n self._condition.notify_all()\n \n self._invoke_callbacks()\n return True\n \n def cancelled(self):\n ''\n with self._condition:\n return self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]\n \n def running(self):\n ''\n with self._condition:\n return self._state ==RUNNING\n \n def done(self):\n ''\n with self._condition:\n return self._state in[CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED]\n \n def __get_result(self):\n if self._exception:\n try:\n raise self._exception\n finally:\n \n self=None\n else:\n return self._result\n \n def add_done_callback(self,fn):\n ''\n\n\n\n\n\n\n\n\n \n with self._condition:\n if self._state not in[CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED]:\n self._done_callbacks.append(fn)\n return\n try:\n fn(self)\n except Exception:\n LOGGER.exception('exception calling callback for %r',self)\n \n def result(self,timeout=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try:\n with self._condition:\n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self.__get_result()\n \n self._condition.wait(timeout)\n \n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self.__get_result()\n else:\n raise TimeoutError()\n finally:\n \n self=None\n \n def exception(self,timeout=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n with self._condition:\n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self._exception\n \n self._condition.wait(timeout)\n \n if self._state in[CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self._exception\n else:\n raise TimeoutError()\n \n \n def set_running_or_notify_cancel(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n with self._condition:\n if self._state ==CANCELLED:\n self._state=CANCELLED_AND_NOTIFIED\n for waiter in self._waiters:\n waiter.add_cancelled(self)\n \n \n return False\n elif self._state ==PENDING:\n self._state=RUNNING\n return True\n else:\n LOGGER.critical('Future %s in unexpected state: %s',\n id(self),\n self._state)\n raise RuntimeError('Future in unexpected state')\n \n def set_result(self,result):\n ''\n\n\n \n with self._condition:\n if self._state in{CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED}:\n raise InvalidStateError('{}: {!r}'.format(self._state,self))\n self._result=result\n self._state=FINISHED\n for waiter in self._waiters:\n waiter.add_result(self)\n self._condition.notify_all()\n self._invoke_callbacks()\n \n def set_exception(self,exception):\n ''\n\n\n \n with self._condition:\n if self._state in{CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED}:\n raise InvalidStateError('{}: {!r}'.format(self._state,self))\n self._exception=exception\n self._state=FINISHED\n for waiter in self._waiters:\n waiter.add_exception(self)\n self._condition.notify_all()\n self._invoke_callbacks()\n \n __class_getitem__=classmethod(types.GenericAlias)\n \nclass Executor(object):\n ''\n \n def submit(self,fn,/,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n raise NotImplementedError()\n \n def map(self,fn,*iterables,timeout=None,chunksize=1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if timeout is not None:\n end_time=timeout+time.monotonic()\n \n fs=[self.submit(fn,*args)for args in zip(*iterables)]\n \n \n \n def result_iterator():\n try:\n \n fs.reverse()\n while fs:\n \n if timeout is None:\n yield _result_or_cancel(fs.pop())\n else:\n yield _result_or_cancel(fs.pop(),end_time -time.monotonic())\n finally:\n for future in fs:\n future.cancel()\n return result_iterator()\n \n def shutdown(self,wait=True,*,cancel_futures=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_val,exc_tb):\n self.shutdown(wait=True)\n return False\n \n \nclass BrokenExecutor(RuntimeError):\n ''\n\n \n", ["collections", "logging", "threading", "time", "types"]], "concurrent.futures.thread": [".py", "\n\n\n\"\"\"Implements ThreadPoolExecutor.\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nfrom concurrent.futures import _base\nimport itertools\nimport queue\nimport threading\nimport types\nimport weakref\nimport os\n\n\n_threads_queues=weakref.WeakKeyDictionary()\n_shutdown=False\n\n\n_global_shutdown_lock=threading.Lock()\n\ndef _python_exit():\n global _shutdown\n with _global_shutdown_lock:\n _shutdown=True\n items=list(_threads_queues.items())\n for t,q in items:\n q.put(None)\n for t,q in items:\n t.join()\n \n \n \n \n \nthreading._register_atexit(_python_exit)\n\n\nif hasattr(os,'register_at_fork'):\n os.register_at_fork(before=_global_shutdown_lock.acquire,\n after_in_child=_global_shutdown_lock._at_fork_reinit,\n after_in_parent=_global_shutdown_lock.release)\n \n \nclass _WorkItem:\n def __init__(self,future,fn,args,kwargs):\n self.future=future\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \n def run(self):\n if not self.future.set_running_or_notify_cancel():\n return\n \n try:\n result=self.fn(*self.args,**self.kwargs)\n except BaseException as exc:\n self.future.set_exception(exc)\n \n self=None\n else:\n self.future.set_result(result)\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \ndef _worker(executor_reference,work_queue,initializer,initargs):\n if initializer is not None:\n try:\n initializer(*initargs)\n except BaseException:\n _base.LOGGER.critical('Exception in initializer:',exc_info=True)\n executor=executor_reference()\n if executor is not None:\n executor._initializer_failed()\n return\n try:\n while True:\n try:\n work_item=work_queue.get_nowait()\n except queue.Empty:\n \n executor=executor_reference()\n if executor is not None:\n executor._idle_semaphore.release()\n del executor\n work_item=work_queue.get(block=True)\n \n if work_item is not None:\n work_item.run()\n \n del work_item\n continue\n \n executor=executor_reference()\n \n \n \n \n if _shutdown or executor is None or executor._shutdown:\n \n \n if executor is not None:\n executor._shutdown=True\n \n work_queue.put(None)\n return\n del executor\n except BaseException:\n _base.LOGGER.critical('Exception in worker',exc_info=True)\n \n \nclass BrokenThreadPool(_base.BrokenExecutor):\n ''\n\n \n \n \nclass ThreadPoolExecutor(_base.Executor):\n\n\n _counter=itertools.count().__next__\n \n def __init__(self,max_workers=None,thread_name_prefix='',\n initializer=None,initargs=()):\n ''\n\n\n\n\n\n\n\n \n if max_workers is None:\n \n \n \n \n \n \n \n max_workers=min(32,(os.cpu_count()or 1)+4)\n if max_workers <=0:\n raise ValueError(\"max_workers must be greater than 0\")\n \n if initializer is not None and not callable(initializer):\n raise TypeError(\"initializer must be a callable\")\n \n self._max_workers=max_workers\n self._work_queue=queue.SimpleQueue()\n self._idle_semaphore=threading.Semaphore(0)\n self._threads=set()\n self._broken=False\n self._shutdown=False\n self._shutdown_lock=threading.Lock()\n self._thread_name_prefix=(thread_name_prefix or\n (\"ThreadPoolExecutor-%d\"%self._counter()))\n self._initializer=initializer\n self._initargs=initargs\n \n def submit(self,fn,/,*args,**kwargs):\n with self._shutdown_lock,_global_shutdown_lock:\n if self._broken:\n raise BrokenThreadPool(self._broken)\n \n if self._shutdown:\n raise RuntimeError('cannot schedule new futures after shutdown')\n if _shutdown:\n raise RuntimeError('cannot schedule new futures after '\n 'interpreter shutdown')\n \n f=_base.Future()\n w=_WorkItem(f,fn,args,kwargs)\n \n self._work_queue.put(w)\n self._adjust_thread_count()\n return f\n submit.__doc__=_base.Executor.submit.__doc__\n \n def _adjust_thread_count(self):\n \n if self._idle_semaphore.acquire(timeout=0):\n return\n \n \n \n def weakref_cb(_,q=self._work_queue):\n q.put(None)\n \n num_threads=len(self._threads)\n if num_threads | Work Ids | => | | => | Call Q | => | |\n| | +----------+ | | +-----------+ | |\n| | | ... | | | | ... | | |\n| | | 6 | | | | 5, call() | | |\n| | | 7 | | | | ... | | |\n| Process | | ... | | Local | +-----------+ | Process |\n| Pool | +----------+ | Worker | | #1..n |\n| Executor | | Thread | | |\n| | +----------- + | | +-----------+ | |\n| | <=> | Work Items | <=> | | <= | Result Q | <= | |\n| | +------------+ | | +-----------+ | |\n| | | 6: call() | | | | ... | | |\n| | | future | | | | 4, result | | |\n| | | ... | | | | 3, except | | |\n+----------+ +------------+ +--------+ +-----------+ +---------+\n\nExecutor.submit() called:\n- creates a uniquely numbered _WorkItem and adds it to the \"Work Items\" dict\n- adds the id of the _WorkItem to the \"Work Ids\" queue\n\nLocal worker thread:\n- reads work ids from the \"Work Ids\" queue and looks up the corresponding\n WorkItem from the \"Work Items\" dict: if the work item has been cancelled then\n it is simply removed from the dict, otherwise it is repackaged as a\n _CallItem and put in the \"Call Q\". New _CallItems are put in the \"Call Q\"\n until \"Call Q\" is full. NOTE: the size of the \"Call Q\" is kept small because\n calls placed in the \"Call Q\" can no longer be cancelled with Future.cancel().\n- reads _ResultItems from \"Result Q\", updates the future stored in the\n \"Work Items\" dict and deletes the dict entry\n\nProcess #1..n:\n- reads _CallItems from \"Call Q\", executes the calls, and puts the resulting\n _ResultItems in \"Result Q\"\n\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport atexit\nimport os\nfrom concurrent.futures import _base\nimport queue\nfrom queue import Full\nimport multiprocessing\nfrom multiprocessing import SimpleQueue\nimport threading\nimport weakref\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_threads_queues=weakref.WeakKeyDictionary()\n_shutdown=False\n\ndef _python_exit():\n global _shutdown\n _shutdown=True\n items=list(_threads_queues.items())\n for t,q in items:\n q.put(None)\n for t,q in items:\n t.join()\n \n \n \n \n \nEXTRA_QUEUED_CALLS=1\n\nclass _WorkItem(object):\n def __init__(self,future,fn,args,kwargs):\n self.future=future\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \nclass _ResultItem(object):\n def __init__(self,work_id,exception=None,result=None):\n self.work_id=work_id\n self.exception=exception\n self.result=result\n \nclass _CallItem(object):\n def __init__(self,work_id,fn,args,kwargs):\n self.work_id=work_id\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \ndef _web_worker(call_queue,result_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n \n while True:\n call_item=call_queue.get(block=True)\n if call_item is None:\n \n result_queue.put(os.getpid())\n return\n try:\n r=call_item.fn(*call_item.args,**call_item.kwargs)\n except BaseException as e:\n result_queue.put(_ResultItem(call_item.work_id,\n exception=e))\n else:\n result_queue.put(_ResultItem(call_item.work_id,\n result=r))\n \ndef _add_call_item_to_queue(pending_work_items,\nwork_ids,\ncall_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n while True:\n if call_queue.full():\n return\n try:\n work_id=work_ids.get(block=False)\n except queue.Empty:\n return\n else:\n work_item=pending_work_items[work_id]\n \n if work_item.future.set_running_or_notify_cancel():\n call_queue.put(_CallItem(work_id,\n work_item.fn,\n work_item.args,\n work_item.kwargs),\n block=True)\n else:\n del pending_work_items[work_id]\n continue\n \ndef _queue_management_worker(executor_reference,\nprocesses,\npending_work_items,\nwork_ids_queue,\ncall_queue,\nresult_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n executor=None\n \n def shutting_down():\n return _shutdown or executor is None or executor._shutdown_thread\n \n def shutdown_worker():\n \n nb_children_alive=sum(p.is_alive()for p in processes.values())\n for i in range(0,nb_children_alive):\n call_queue.put_nowait(None)\n \n call_queue.close()\n \n \n for p in processes.values():\n p.join()\n \n reader=result_queue._reader\n \n while True:\n _add_call_item_to_queue(pending_work_items,\n work_ids_queue,\n call_queue)\n \n sentinels=[p.sentinel for p in processes.values()]\n assert sentinels\n \n ready=[reader]+sentinels\n if reader in ready:\n result_item=reader.recv()\n else:\n \n executor=executor_reference()\n if executor is not None:\n executor._broken=True\n executor._shutdown_thread=True\n executor=None\n \n for work_id,work_item in pending_work_items.items():\n work_item.future.set_exception(\n BrokenProcessPool(\n \"A process in the process pool was \"\n \"terminated abruptly while the future was \"\n \"running or pending.\"\n ))\n \n del work_item\n pending_work_items.clear()\n \n \n for p in processes.values():\n p.terminate()\n shutdown_worker()\n return\n if isinstance(result_item,int):\n \n \n assert shutting_down()\n p=processes.pop(result_item)\n p.join()\n if not processes:\n shutdown_worker()\n return\n elif result_item is not None:\n work_item=pending_work_items.pop(result_item.work_id,None)\n \n if work_item is not None:\n if result_item.exception:\n work_item.future.set_exception(result_item.exception)\n else:\n work_item.future.set_result(result_item.result)\n \n del work_item\n \n executor=executor_reference()\n \n \n \n \n if shutting_down():\n try:\n \n \n if not pending_work_items:\n shutdown_worker()\n return\n except Full:\n \n \n pass\n executor=None\n \n_system_limits_checked=False\n_system_limited=None\ndef _check_system_limits():\n global _system_limits_checked,_system_limited\n if _system_limits_checked:\n if _system_limited:\n raise NotImplementedError(_system_limited)\n _system_limits_checked=True\n try:\n nsems_max=os.sysconf(\"SC_SEM_NSEMS_MAX\")\n except(AttributeError,ValueError):\n \n return\n if nsems_max ==-1:\n \n \n return\n if nsems_max >=256:\n \n \n return\n _system_limited=\"system provides too few semaphores (%d available, 256 necessary)\"%nsems_max\n raise NotImplementedError(_system_limited)\n \n \nclass BrokenProcessPool(RuntimeError):\n ''\n\n\n \n \n \nclass WebWorkerExecutor(_base.Executor):\n def __init__(self,max_workers=None):\n ''\n\n\n\n\n\n \n _check_system_limits()\n \n if max_workers is None:\n self._max_workers=os.cpu_count()or 1\n else:\n self._max_workers=max_workers\n \n \n \n \n self._call_queue=multiprocessing.Queue(self._max_workers+\n EXTRA_QUEUED_CALLS)\n \n \n \n self._call_queue._ignore_epipe=True\n self._result_queue=SimpleQueue()\n self._work_ids=queue.Queue()\n self._queue_management_thread=None\n \n self._webworkers={}\n \n \n self._shutdown_thread=False\n self._shutdown_lock=threading.Lock()\n self._broken=False\n self._queue_count=0\n self._pending_work_items={}\n \n def _start_queue_management_thread(self):\n \n \n def weakref_cb(_,q=self._result_queue):\n q.put(None)\n if self._queue_management_thread is None:\n \n self._adjust_process_count()\n self._queue_management_thread=threading.Thread(\n target=_queue_management_worker,\n args=(weakref.ref(self,weakref_cb),\n self._webworkers,\n self._pending_work_items,\n self._work_ids,\n self._call_queue,\n self._result_queue))\n self._queue_management_thread.daemon=True\n self._queue_management_thread.start()\n _threads_queues[self._queue_management_thread]=self._result_queue\n \n def _adjust_process_count(self):\n for _ in range(len(self._webworkers),self._max_workers):\n p=multiprocessing.Process(\n target=_web_worker,\n args=(self._call_queue,\n self._result_queue))\n p.start()\n self._webworkers[p.pid]=p\n \n def submit(self,fn,*args,**kwargs):\n with self._shutdown_lock:\n if self._broken:\n raise BrokenProcessPool('A child process terminated '\n 'abruptly, the process pool is not usable anymore')\n if self._shutdown_thread:\n raise RuntimeError('cannot schedule new futures after shutdown')\n \n f=_base.Future()\n w=_WorkItem(f,fn,args,kwargs)\n \n self._pending_work_items[self._queue_count]=w\n self._work_ids.put(self._queue_count)\n self._queue_count +=1\n \n self._result_queue.put(None)\n \n self._start_queue_management_thread()\n return f\n submit.__doc__=_base.Executor.submit.__doc__\n \n def shutdown(self,wait=True):\n with self._shutdown_lock:\n self._shutdown_thread=True\n if self._queue_management_thread:\n \n self._result_queue.put(None)\n if wait:\n self._queue_management_thread.join()\n \n \n self._queue_management_thread=None\n self._call_queue=None\n self._result_queue=None\n self._webworkers=None\n shutdown.__doc__=_base.Executor.shutdown.__doc__\n \natexit.register(_python_exit)\n", ["atexit", "concurrent.futures", "concurrent.futures._base", "multiprocessing", "os", "queue", "threading", "weakref"]], "concurrent.futures": [".py", "\n\n\n\"\"\"Execute computations asynchronously using threads or processes.\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nfrom concurrent.futures._base import(FIRST_COMPLETED,\nFIRST_EXCEPTION,\nALL_COMPLETED,\nCancelledError,\nTimeoutError,\nInvalidStateError,\nBrokenExecutor,\nFuture,\nExecutor,\nwait,\nas_completed)\n\n__all__=(\n'FIRST_COMPLETED',\n'FIRST_EXCEPTION',\n'ALL_COMPLETED',\n'CancelledError',\n'TimeoutError',\n'BrokenExecutor',\n'Future',\n'Executor',\n'wait',\n'as_completed',\n'ProcessPoolExecutor',\n'ThreadPoolExecutor',\n)\n\n\ndef __dir__():\n return __all__+('__author__','__doc__')\n \n \ndef __getattr__(name):\n global ProcessPoolExecutor,ThreadPoolExecutor\n \n if name =='ProcessPoolExecutor':\n from.process import ProcessPoolExecutor as pe\n ProcessPoolExecutor=pe\n return pe\n \n if name =='ThreadPoolExecutor':\n from.thread import ThreadPoolExecutor as te\n ThreadPoolExecutor=te\n return te\n \n raise AttributeError(f\"module {__name__ !r} has no attribute {name !r}\")\n", ["concurrent.futures._base", "concurrent.futures.process", "concurrent.futures.thread"], 1], "concurrent.futures.process": [".py", "\n\n\n\"\"\"Implements ProcessPoolExecutor.\n\nThe following diagram and text describe the data-flow through the system:\n\n|======================= In-process =====================|== Out-of-process ==|\n\n+----------+ +----------+ +--------+ +-----------+ +---------+\n| | => | Work Ids | | | | Call Q | | Process |\n| | +----------+ | | +-----------+ | Pool |\n| | | ... | | | | ... | +---------+\n| | | 6 | => | | => | 5, call() | => | |\n| | | 7 | | | | ... | | |\n| Process | | ... | | Local | +-----------+ | Process |\n| Pool | +----------+ | Worker | | #1..n |\n| Executor | | Thread | | |\n| | +----------- + | | +-----------+ | |\n| | <=> | Work Items | <=> | | <= | Result Q | <= | |\n| | +------------+ | | +-----------+ | |\n| | | 6: call() | | | | ... | | |\n| | | future | | | | 4, result | | |\n| | | ... | | | | 3, except | | |\n+----------+ +------------+ +--------+ +-----------+ +---------+\n\nExecutor.submit() called:\n- creates a uniquely numbered _WorkItem and adds it to the \"Work Items\" dict\n- adds the id of the _WorkItem to the \"Work Ids\" queue\n\nLocal worker thread:\n- reads work ids from the \"Work Ids\" queue and looks up the corresponding\n WorkItem from the \"Work Items\" dict: if the work item has been cancelled then\n it is simply removed from the dict, otherwise it is repackaged as a\n _CallItem and put in the \"Call Q\". New _CallItems are put in the \"Call Q\"\n until \"Call Q\" is full. NOTE: the size of the \"Call Q\" is kept small because\n calls placed in the \"Call Q\" can no longer be cancelled with Future.cancel().\n- reads _ResultItems from \"Result Q\", updates the future stored in the\n \"Work Items\" dict and deletes the dict entry\n\nProcess #1..n:\n- reads _CallItems from \"Call Q\", executes the calls, and puts the resulting\n _ResultItems in \"Result Q\"\n\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport os\nfrom concurrent.futures import _base\nimport queue\nimport multiprocessing as mp\n\n\nimport multiprocessing.connection\nfrom multiprocessing.queues import Queue\nimport threading\nimport weakref\nfrom functools import partial\nimport itertools\nimport sys\nfrom traceback import format_exception\n\n\n_threads_wakeups=weakref.WeakKeyDictionary()\n_global_shutdown=False\n\n\nclass _ThreadWakeup:\n def __init__(self):\n self._closed=False\n self._reader,self._writer=mp.Pipe(duplex=False)\n \n def close(self):\n if not self._closed:\n self._closed=True\n self._writer.close()\n self._reader.close()\n \n def wakeup(self):\n if not self._closed:\n self._writer.send_bytes(b\"\")\n \n def clear(self):\n if not self._closed:\n while self._reader.poll():\n self._reader.recv_bytes()\n \n \ndef _python_exit():\n global _global_shutdown\n _global_shutdown=True\n items=list(_threads_wakeups.items())\n for _,thread_wakeup in items:\n \n thread_wakeup.wakeup()\n for t,_ in items:\n t.join()\n \n \n \n \n \nthreading._register_atexit(_python_exit)\n\n\n\n\n\nEXTRA_QUEUED_CALLS=1\n\n\n\n\n\n\n_MAX_WINDOWS_WORKERS=63 -2\n\n\n\nclass _RemoteTraceback(Exception):\n def __init__(self,tb):\n self.tb=tb\n def __str__(self):\n return self.tb\n \nclass _ExceptionWithTraceback:\n def __init__(self,exc,tb):\n tb=''.join(format_exception(type(exc),exc,tb))\n self.exc=exc\n \n \n self.exc.__traceback__=None\n self.tb='\\n\"\"\"\\n%s\"\"\"'%tb\n def __reduce__(self):\n return _rebuild_exc,(self.exc,self.tb)\n \ndef _rebuild_exc(exc,tb):\n exc.__cause__=_RemoteTraceback(tb)\n return exc\n \nclass _WorkItem(object):\n def __init__(self,future,fn,args,kwargs):\n self.future=future\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \nclass _ResultItem(object):\n def __init__(self,work_id,exception=None,result=None,exit_pid=None):\n self.work_id=work_id\n self.exception=exception\n self.result=result\n self.exit_pid=exit_pid\n \nclass _CallItem(object):\n def __init__(self,work_id,fn,args,kwargs):\n self.work_id=work_id\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \n \nclass _SafeQueue(Queue):\n ''\n def __init__(self,max_size=0,*,ctx,pending_work_items,shutdown_lock,\n thread_wakeup):\n self.pending_work_items=pending_work_items\n self.shutdown_lock=shutdown_lock\n self.thread_wakeup=thread_wakeup\n super().__init__(max_size,ctx=ctx)\n \n def _on_queue_feeder_error(self,e,obj):\n if isinstance(obj,_CallItem):\n tb=format_exception(type(e),e,e.__traceback__)\n e.__cause__=_RemoteTraceback('\\n\"\"\"\\n{}\"\"\"'.format(''.join(tb)))\n work_item=self.pending_work_items.pop(obj.work_id,None)\n with self.shutdown_lock:\n self.thread_wakeup.wakeup()\n \n \n \n if work_item is not None:\n work_item.future.set_exception(e)\n else:\n super()._on_queue_feeder_error(e,obj)\n \n \ndef _get_chunks(*iterables,chunksize):\n ''\n it=zip(*iterables)\n while True:\n chunk=tuple(itertools.islice(it,chunksize))\n if not chunk:\n return\n yield chunk\n \n \ndef _process_chunk(fn,chunk):\n ''\n\n\n\n\n\n\n \n return[fn(*args)for args in chunk]\n \n \ndef _sendback_result(result_queue,work_id,result=None,exception=None,\nexit_pid=None):\n ''\n try:\n result_queue.put(_ResultItem(work_id,result=result,\n exception=exception,exit_pid=exit_pid))\n except BaseException as e:\n exc=_ExceptionWithTraceback(e,e.__traceback__)\n result_queue.put(_ResultItem(work_id,exception=exc,\n exit_pid=exit_pid))\n \n \ndef _process_worker(call_queue,result_queue,initializer,initargs,max_tasks=None):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if initializer is not None:\n try:\n initializer(*initargs)\n except BaseException:\n _base.LOGGER.critical('Exception in initializer:',exc_info=True)\n \n \n return\n num_tasks=0\n exit_pid=None\n while True:\n call_item=call_queue.get(block=True)\n if call_item is None:\n \n result_queue.put(os.getpid())\n return\n \n if max_tasks is not None:\n num_tasks +=1\n if num_tasks >=max_tasks:\n exit_pid=os.getpid()\n \n try:\n r=call_item.fn(*call_item.args,**call_item.kwargs)\n except BaseException as e:\n exc=_ExceptionWithTraceback(e,e.__traceback__)\n _sendback_result(result_queue,call_item.work_id,exception=exc,\n exit_pid=exit_pid)\n else:\n _sendback_result(result_queue,call_item.work_id,result=r,\n exit_pid=exit_pid)\n del r\n \n \n \n del call_item\n \n if exit_pid is not None:\n return\n \n \nclass _ExecutorManagerThread(threading.Thread):\n ''\n\n\n\n\n\n\n\n\n \n \n def __init__(self,executor):\n \n \n \n \n self.thread_wakeup=executor._executor_manager_thread_wakeup\n self.shutdown_lock=executor._shutdown_lock\n \n \n \n \n \n \n \n def weakref_cb(_,\n thread_wakeup=self.thread_wakeup,\n shutdown_lock=self.shutdown_lock):\n mp.util.debug('Executor collected: triggering callback for'\n ' QueueManager wakeup')\n with shutdown_lock:\n thread_wakeup.wakeup()\n \n self.executor_reference=weakref.ref(executor,weakref_cb)\n \n \n self.processes=executor._processes\n \n \n \n self.call_queue=executor._call_queue\n \n \n self.result_queue=executor._result_queue\n \n \n self.work_ids_queue=executor._work_ids\n \n \n \n self.max_tasks_per_child=executor._max_tasks_per_child\n \n \n \n self.pending_work_items=executor._pending_work_items\n \n super().__init__()\n \n def run(self):\n \n \n while True:\n self.add_call_item_to_queue()\n \n result_item,is_broken,cause=self.wait_result_broken_or_wakeup()\n \n if is_broken:\n self.terminate_broken(cause)\n return\n if result_item is not None:\n self.process_result_item(result_item)\n \n process_exited=result_item.exit_pid is not None\n if process_exited:\n p=self.processes.pop(result_item.exit_pid)\n p.join()\n \n \n \n del result_item\n \n if executor :=self.executor_reference():\n if process_exited:\n with self.shutdown_lock:\n executor._adjust_process_count()\n else:\n executor._idle_worker_semaphore.release()\n del executor\n \n if self.is_shutting_down():\n self.flag_executor_shutting_down()\n \n \n \n \n self.add_call_item_to_queue()\n \n \n \n if not self.pending_work_items:\n self.join_executor_internals()\n return\n \n def add_call_item_to_queue(self):\n \n \n while True:\n if self.call_queue.full():\n return\n try:\n work_id=self.work_ids_queue.get(block=False)\n except queue.Empty:\n return\n else:\n work_item=self.pending_work_items[work_id]\n \n if work_item.future.set_running_or_notify_cancel():\n self.call_queue.put(_CallItem(work_id,\n work_item.fn,\n work_item.args,\n work_item.kwargs),\n block=True)\n else:\n del self.pending_work_items[work_id]\n continue\n \n def wait_result_broken_or_wakeup(self):\n \n \n \n \n \n result_reader=self.result_queue._reader\n assert not self.thread_wakeup._closed\n wakeup_reader=self.thread_wakeup._reader\n readers=[result_reader,wakeup_reader]\n worker_sentinels=[p.sentinel for p in list(self.processes.values())]\n ready=mp.connection.wait(readers+worker_sentinels)\n \n cause=None\n is_broken=True\n result_item=None\n if result_reader in ready:\n try:\n result_item=result_reader.recv()\n is_broken=False\n except BaseException as e:\n cause=format_exception(type(e),e,e.__traceback__)\n \n elif wakeup_reader in ready:\n is_broken=False\n \n with self.shutdown_lock:\n self.thread_wakeup.clear()\n \n return result_item,is_broken,cause\n \n def process_result_item(self,result_item):\n \n \n \n if isinstance(result_item,int):\n \n \n assert self.is_shutting_down()\n p=self.processes.pop(result_item)\n p.join()\n if not self.processes:\n self.join_executor_internals()\n return\n else:\n \n work_item=self.pending_work_items.pop(result_item.work_id,None)\n \n if work_item is not None:\n if result_item.exception:\n work_item.future.set_exception(result_item.exception)\n else:\n work_item.future.set_result(result_item.result)\n \n def is_shutting_down(self):\n \n executor=self.executor_reference()\n \n \n \n \n return(_global_shutdown or executor is None\n or executor._shutdown_thread)\n \n def terminate_broken(self,cause):\n \n \n \n \n \n executor=self.executor_reference()\n if executor is not None:\n executor._broken=('A child process terminated '\n 'abruptly, the process pool is not '\n 'usable anymore')\n executor._shutdown_thread=True\n executor=None\n \n \n \n bpe=BrokenProcessPool(\"A process in the process pool was \"\n \"terminated abruptly while the future was \"\n \"running or pending.\")\n if cause is not None:\n bpe.__cause__=_RemoteTraceback(\n f\"\\n'''\\n{''.join(cause)}'''\")\n \n \n for work_id,work_item in self.pending_work_items.items():\n work_item.future.set_exception(bpe)\n \n del work_item\n self.pending_work_items.clear()\n \n \n \n for p in self.processes.values():\n p.terminate()\n \n \n self.join_executor_internals()\n \n def flag_executor_shutting_down(self):\n \n \n executor=self.executor_reference()\n if executor is not None:\n executor._shutdown_thread=True\n \n if executor._cancel_pending_futures:\n \n \n new_pending_work_items={}\n for work_id,work_item in self.pending_work_items.items():\n if not work_item.future.cancel():\n new_pending_work_items[work_id]=work_item\n self.pending_work_items=new_pending_work_items\n \n \n while True:\n try:\n self.work_ids_queue.get_nowait()\n except queue.Empty:\n break\n \n \n executor._cancel_pending_futures=False\n \n def shutdown_workers(self):\n n_children_to_stop=self.get_n_children_alive()\n n_sentinels_sent=0\n \n \n while(n_sentinels_sent 0):\n for i in range(n_children_to_stop -n_sentinels_sent):\n try:\n self.call_queue.put_nowait(None)\n n_sentinels_sent +=1\n except queue.Full:\n break\n \n def join_executor_internals(self):\n self.shutdown_workers()\n \n self.call_queue.close()\n self.call_queue.join_thread()\n with self.shutdown_lock:\n self.thread_wakeup.close()\n \n \n for p in self.processes.values():\n p.join()\n \n def get_n_children_alive(self):\n \n return sum(p.is_alive()for p in self.processes.values())\n \n \n_system_limits_checked=False\n_system_limited=None\n\n\ndef _check_system_limits():\n global _system_limits_checked,_system_limited\n if _system_limits_checked:\n if _system_limited:\n raise NotImplementedError(_system_limited)\n _system_limits_checked=True\n try:\n import multiprocessing.synchronize\n except ImportError:\n _system_limited=(\n \"This Python build lacks multiprocessing.synchronize, usually due \"\n \"to named semaphores being unavailable on this platform.\"\n )\n raise NotImplementedError(_system_limited)\n try:\n nsems_max=os.sysconf(\"SC_SEM_NSEMS_MAX\")\n except(AttributeError,ValueError):\n \n return\n if nsems_max ==-1:\n \n \n return\n if nsems_max >=256:\n \n \n return\n _system_limited=(\"system provides too few semaphores (%d\"\n \" available, 256 necessary)\"%nsems_max)\n raise NotImplementedError(_system_limited)\n \n \ndef _chain_from_iterable_of_lists(iterable):\n ''\n\n\n\n \n for element in iterable:\n element.reverse()\n while element:\n yield element.pop()\n \n \nclass BrokenProcessPool(_base.BrokenExecutor):\n ''\n\n\n \n \n \nclass ProcessPoolExecutor(_base.Executor):\n def __init__(self,max_workers=None,mp_context=None,\n initializer=None,initargs=(),*,max_tasks_per_child=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n _check_system_limits()\n \n if max_workers is None:\n self._max_workers=os.cpu_count()or 1\n if sys.platform =='win32':\n self._max_workers=min(_MAX_WINDOWS_WORKERS,\n self._max_workers)\n else:\n if max_workers <=0:\n raise ValueError(\"max_workers must be greater than 0\")\n elif(sys.platform =='win32'and\n max_workers >_MAX_WINDOWS_WORKERS):\n raise ValueError(\n f\"max_workers must be <= {_MAX_WINDOWS_WORKERS}\")\n \n self._max_workers=max_workers\n \n if mp_context is None:\n if max_tasks_per_child is not None:\n mp_context=mp.get_context(\"spawn\")\n else:\n mp_context=mp.get_context()\n self._mp_context=mp_context\n \n \n self._safe_to_dynamically_spawn_children=(\n self._mp_context.get_start_method(allow_none=False)!=\"fork\")\n \n if initializer is not None and not callable(initializer):\n raise TypeError(\"initializer must be a callable\")\n self._initializer=initializer\n self._initargs=initargs\n \n if max_tasks_per_child is not None:\n if not isinstance(max_tasks_per_child,int):\n raise TypeError(\"max_tasks_per_child must be an integer\")\n elif max_tasks_per_child <=0:\n raise ValueError(\"max_tasks_per_child must be >= 1\")\n if self._mp_context.get_start_method(allow_none=False)==\"fork\":\n \n raise ValueError(\"max_tasks_per_child is incompatible with\"\n \" the 'fork' multiprocessing start method;\"\n \" supply a different mp_context.\")\n self._max_tasks_per_child=max_tasks_per_child\n \n \n self._executor_manager_thread=None\n \n \n self._processes={}\n \n \n self._shutdown_thread=False\n self._shutdown_lock=threading.Lock()\n self._idle_worker_semaphore=threading.Semaphore(0)\n self._broken=False\n self._queue_count=0\n self._pending_work_items={}\n self._cancel_pending_futures=False\n \n \n \n \n \n \n \n \n \n self._executor_manager_thread_wakeup=_ThreadWakeup()\n \n \n \n \n \n queue_size=self._max_workers+EXTRA_QUEUED_CALLS\n self._call_queue=_SafeQueue(\n max_size=queue_size,ctx=self._mp_context,\n pending_work_items=self._pending_work_items,\n shutdown_lock=self._shutdown_lock,\n thread_wakeup=self._executor_manager_thread_wakeup)\n \n \n \n self._call_queue._ignore_epipe=True\n self._result_queue=mp_context.SimpleQueue()\n self._work_ids=queue.Queue()\n \n def _start_executor_manager_thread(self):\n if self._executor_manager_thread is None:\n \n if not self._safe_to_dynamically_spawn_children:\n self._launch_processes()\n self._executor_manager_thread=_ExecutorManagerThread(self)\n self._executor_manager_thread.start()\n _threads_wakeups[self._executor_manager_thread]=\\\n self._executor_manager_thread_wakeup\n \n def _adjust_process_count(self):\n \n if self._idle_worker_semaphore.acquire(blocking=False):\n return\n \n process_count=len(self._processes)\n if process_count = 1.\")\n \n results=super().map(partial(_process_chunk,fn),\n _get_chunks(*iterables,chunksize=chunksize),\n timeout=timeout)\n return _chain_from_iterable_of_lists(results)\n \n def shutdown(self,wait=True,*,cancel_futures=False):\n with self._shutdown_lock:\n self._cancel_pending_futures=cancel_futures\n self._shutdown_thread=True\n if self._executor_manager_thread_wakeup is not None:\n \n self._executor_manager_thread_wakeup.wakeup()\n \n if self._executor_manager_thread is not None and wait:\n self._executor_manager_thread.join()\n \n \n self._executor_manager_thread=None\n self._call_queue=None\n if self._result_queue is not None and wait:\n self._result_queue.close()\n self._result_queue=None\n self._processes=None\n self._executor_manager_thread_wakeup=None\n \n shutdown.__doc__=_base.Executor.shutdown.__doc__\n", ["concurrent.futures", "concurrent.futures._base", "functools", "itertools", "multiprocessing", "multiprocessing.connection", "multiprocessing.queues", "multiprocessing.synchronize", "os", "queue", "sys", "threading", "traceback", "weakref"]], "importlib.readers": [".py", "''\n\n\n\n\n\n\nfrom.resources.readers import(\nFileReader,ZipReader,MultiplexedPath,NamespaceReader,\n)\n\n__all__=['FileReader','ZipReader','MultiplexedPath','NamespaceReader']\n", ["importlib.resources.readers"]], "importlib._abc": [".py", "''\nfrom. import _bootstrap\nimport abc\n\n\nclass Loader(metaclass=abc.ABCMeta):\n\n ''\n \n def create_module(self,spec):\n ''\n\n\n\n\n \n \n return None\n \n \n \n \n def load_module(self,fullname):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not hasattr(self,'exec_module'):\n raise ImportError\n \n return _bootstrap._load_module_shim(self,fullname)\n", ["abc", "importlib", "importlib._bootstrap"]], "importlib.util": [".py", "''\nfrom._abc import Loader\nfrom._bootstrap import module_from_spec\nfrom._bootstrap import _resolve_name\nfrom._bootstrap import spec_from_loader\nfrom._bootstrap import _find_spec\nfrom._bootstrap_external import MAGIC_NUMBER\nfrom._bootstrap_external import _RAW_MAGIC_NUMBER\nfrom._bootstrap_external import cache_from_source\nfrom._bootstrap_external import decode_source\nfrom._bootstrap_external import source_from_cache\nfrom._bootstrap_external import spec_from_file_location\n\nfrom contextlib import contextmanager\nimport _imp\nimport functools\nimport sys\nimport types\nimport warnings\n\n\ndef source_hash(source_bytes):\n ''\n return _imp.source_hash(_RAW_MAGIC_NUMBER,source_bytes)\n \n \ndef resolve_name(name,package):\n ''\n if not name.startswith('.'):\n return name\n elif not package:\n raise ImportError(f'no package specified for {repr(name)} '\n '(required for relative module names)')\n level=0\n for character in name:\n if character !='.':\n break\n level +=1\n return _resolve_name(name[level:],package,level)\n \n \ndef _find_spec_from_path(name,path=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if name not in sys.modules:\n return _find_spec(name,path)\n else:\n module=sys.modules[name]\n if module is None:\n return None\n try:\n spec=module.__spec__\n except AttributeError:\n raise ValueError('{}.__spec__ is not set'.format(name))from None\n else:\n if spec is None:\n raise ValueError('{}.__spec__ is None'.format(name))\n return spec\n \n \ndef find_spec(name,package=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n fullname=resolve_name(name,package)if name.startswith('.')else name\n if fullname not in sys.modules:\n parent_name=fullname.rpartition('.')[0]\n if parent_name:\n parent=__import__(parent_name,fromlist=['__path__'])\n try:\n parent_path=parent.__path__\n except AttributeError as e:\n raise ModuleNotFoundError(\n f\"__path__ attribute not found on {parent_name !r} \"\n f\"while trying to find {fullname !r}\",name=fullname)from e\n else:\n parent_path=None\n return _find_spec(fullname,parent_path)\n else:\n module=sys.modules[fullname]\n if module is None:\n return None\n try:\n spec=module.__spec__\n except AttributeError:\n raise ValueError('{}.__spec__ is not set'.format(name))from None\n else:\n if spec is None:\n raise ValueError('{}.__spec__ is None'.format(name))\n return spec\n \n \n@contextmanager\ndef _module_to_load(name):\n is_reload=name in sys.modules\n \n module=sys.modules.get(name)\n if not is_reload:\n \n \n \n module=type(sys)(name)\n \n \n module.__initializing__=True\n sys.modules[name]=module\n try:\n yield module\n except Exception:\n if not is_reload:\n try:\n del sys.modules[name]\n except KeyError:\n pass\n finally:\n module.__initializing__=False\n \n \ndef set_package(fxn):\n ''\n\n\n\n \n @functools.wraps(fxn)\n def set_package_wrapper(*args,**kwargs):\n warnings.warn('The import system now takes care of this automatically; '\n 'this decorator is slated for removal in Python 3.12',\n DeprecationWarning,stacklevel=2)\n module=fxn(*args,**kwargs)\n if getattr(module,'__package__',None)is None:\n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=module.__package__.rpartition('.')[0]\n return module\n return set_package_wrapper\n \n \ndef set_loader(fxn):\n ''\n\n\n\n \n @functools.wraps(fxn)\n def set_loader_wrapper(self,*args,**kwargs):\n warnings.warn('The import system now takes care of this automatically; '\n 'this decorator is slated for removal in Python 3.12',\n DeprecationWarning,stacklevel=2)\n module=fxn(self,*args,**kwargs)\n if getattr(module,'__loader__',None)is None:\n module.__loader__=self\n return module\n return set_loader_wrapper\n \n \ndef module_for_loader(fxn):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n warnings.warn('The import system now takes care of this automatically; '\n 'this decorator is slated for removal in Python 3.12',\n DeprecationWarning,stacklevel=2)\n @functools.wraps(fxn)\n def module_for_loader_wrapper(self,fullname,*args,**kwargs):\n with _module_to_load(fullname)as module:\n module.__loader__=self\n try:\n is_package=self.is_package(fullname)\n except(ImportError,AttributeError):\n pass\n else:\n if is_package:\n module.__package__=fullname\n else:\n module.__package__=fullname.rpartition('.')[0]\n \n return fxn(self,module,*args,**kwargs)\n \n return module_for_loader_wrapper\n \n \nclass _LazyModule(types.ModuleType):\n\n ''\n \n def __getattribute__(self,attr):\n ''\n \n \n \n self.__class__=types.ModuleType\n \n \n original_name=self.__spec__.name\n \n \n attrs_then=self.__spec__.loader_state['__dict__']\n attrs_now=self.__dict__\n attrs_updated={}\n for key,value in attrs_now.items():\n \n \n if key not in attrs_then:\n attrs_updated[key]=value\n elif id(attrs_now[key])!=id(attrs_then[key]):\n attrs_updated[key]=value\n self.__spec__.loader.exec_module(self)\n \n \n if original_name in sys.modules:\n if id(self)!=id(sys.modules[original_name]):\n raise ValueError(f\"module object for {original_name !r} \"\n \"substituted in sys.modules during a lazy \"\n \"load\")\n \n \n self.__dict__.update(attrs_updated)\n return getattr(self,attr)\n \n def __delattr__(self,attr):\n ''\n \n \n self.__getattribute__(attr)\n delattr(self,attr)\n \n \nclass LazyLoader(Loader):\n\n ''\n \n @staticmethod\n def __check_eager_loader(loader):\n if not hasattr(loader,'exec_module'):\n raise TypeError('loader must define exec_module()')\n \n @classmethod\n def factory(cls,loader):\n ''\n cls.__check_eager_loader(loader)\n return lambda *args,**kwargs:cls(loader(*args,**kwargs))\n \n def __init__(self,loader):\n self.__check_eager_loader(loader)\n self.loader=loader\n \n def create_module(self,spec):\n return self.loader.create_module(spec)\n \n def exec_module(self,module):\n ''\n module.__spec__.loader=self.loader\n module.__loader__=self.loader\n \n \n \n \n loader_state={}\n loader_state['__dict__']=module.__dict__.copy()\n loader_state['__class__']=module.__class__\n module.__spec__.loader_state=loader_state\n module.__class__=_LazyModule\n", ["_imp", "contextlib", "functools", "importlib._abc", "importlib._bootstrap", "importlib._bootstrap_external", "sys", "types", "warnings"]], "importlib._bootstrap": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_bootstrap_external=None\n_thread=None\nimport _weakref\n\ndef _wrap(new,old):\n ''\n for replace in['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n \n \ndef _new_module(name):\n return type(sys)(name)\n \n \n \n \n \n \n_module_locks={}\n\n_blocking_on={}\n\n\nclass _DeadlockError(RuntimeError):\n pass\n \n \nclass _ModuleLock:\n ''\n\n\n \n \n def __init__(self,name):\n self.lock=_thread.allocate_lock()\n self.wakeup=_thread.allocate_lock()\n self.name=name\n self.owner=None\n self.count=0\n self.waiters=0\n \n def has_deadlock(self):\n \n me=_thread.get_ident()\n tid=self.owner\n while True:\n lock=_blocking_on.get(tid)\n if lock is None:\n return False\n tid=lock.owner\n if tid ==me:\n return True\n \n def acquire(self):\n ''\n\n\n\n \n tid=_thread.get_ident()\n _blocking_on[tid]=self\n try:\n while True:\n with self.lock:\n if self.count ==0 or self.owner ==tid:\n self.owner=tid\n self.count +=1\n return True\n if self.has_deadlock():\n raise _DeadlockError('deadlock detected by %r'%self)\n if self.wakeup.acquire(False):\n self.waiters +=1\n \n self.wakeup.acquire()\n self.wakeup.release()\n finally:\n del _blocking_on[tid]\n \n def release(self):\n tid=_thread.get_ident()\n with self.lock:\n if self.owner !=tid:\n raise RuntimeError('cannot release un-acquired lock')\n assert self.count >0\n self.count -=1\n if self.count ==0:\n self.owner=None\n if self.waiters:\n self.waiters -=1\n self.wakeup.release()\n \n def __repr__(self):\n return '_ModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _DummyModuleLock:\n ''\n \n \n def __init__(self,name):\n self.name=name\n self.count=0\n \n def acquire(self):\n self.count +=1\n return True\n \n def release(self):\n if self.count ==0:\n raise RuntimeError('cannot release un-acquired lock')\n self.count -=1\n \n def __repr__(self):\n return '_DummyModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _ModuleLockManager:\n\n def __init__(self,name):\n self._name=name\n self._lock=None\n \n def __enter__(self):\n self._lock=_get_module_lock(self._name)\n self._lock.acquire()\n \n def __exit__(self,*args,**kwargs):\n self._lock.release()\n \n \n \n \ndef _get_module_lock(name):\n ''\n\n\n \n \n _imp.acquire_lock()\n try:\n try:\n lock=_module_locks[name]()\n except KeyError:\n lock=None\n \n if lock is None:\n if _thread is None:\n lock=_DummyModuleLock(name)\n else:\n lock=_ModuleLock(name)\n \n def cb(ref,name=name):\n _imp.acquire_lock()\n try:\n \n \n \n if _module_locks.get(name)is ref:\n del _module_locks[name]\n finally:\n _imp.release_lock()\n \n _module_locks[name]=_weakref.ref(lock,cb)\n finally:\n _imp.release_lock()\n \n return lock\n \n \ndef _lock_unlock_module(name):\n ''\n\n\n\n \n lock=_get_module_lock(name)\n try:\n lock.acquire()\n except _DeadlockError:\n \n \n pass\n else:\n lock.release()\n \n \ndef _call_with_frames_removed(f,*args,**kwds):\n ''\n\n\n\n\n\n \n return f(*args,**kwds)\n \n \ndef _verbose_message(message,*args,verbosity=1):\n ''\n if sys.flags.verbose >=verbosity:\n if not message.startswith(('#','import ')):\n message='# '+message\n print(message.format(*args),file=sys.stderr)\n \n \ndef _requires_builtin(fxn):\n ''\n def _requires_builtin_wrapper(self,fullname):\n if fullname not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_builtin_wrapper,fxn)\n return _requires_builtin_wrapper\n \n \ndef _requires_frozen(fxn):\n ''\n def _requires_frozen_wrapper(self,fullname):\n if not _imp.is_frozen(fullname):\n raise ImportError('{!r} is not a frozen module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_frozen_wrapper,fxn)\n return _requires_frozen_wrapper\n \n \n \ndef _load_module_shim(self,fullname):\n ''\n\n\n\n \n spec=spec_from_loader(fullname,self)\n if fullname in sys.modules:\n module=sys.modules[fullname]\n _exec(spec,module)\n return sys.modules[fullname]\n else:\n return _load(spec)\n \n \n \ndef _module_repr(module):\n\n loader=getattr(module,'__loader__',None)\n if hasattr(loader,'module_repr'):\n \n \n \n try:\n return loader.module_repr(module)\n except Exception:\n pass\n try:\n spec=module.__spec__\n except AttributeError:\n pass\n else:\n if spec is not None:\n return _module_repr_from_spec(spec)\n \n \n \n try:\n name=module.__name__\n except AttributeError:\n name='?'\n try:\n filename=module.__file__\n except AttributeError:\n if loader is None:\n return ''.format(name)\n else:\n return ''.format(name,loader)\n else:\n return ''.format(name,filename)\n \n \nclass _installed_safely:\n\n def __init__(self,module):\n self._module=module\n self._spec=module.__spec__\n \n def __enter__(self):\n \n \n \n self._spec._initializing=True\n sys.modules[self._spec.name]=self._module\n \n def __exit__(self,*args):\n try:\n spec=self._spec\n if any(arg is not None for arg in args):\n try:\n del sys.modules[spec.name]\n except KeyError:\n pass\n else:\n _verbose_message('import {!r} # {!r}',spec.name,spec.loader)\n finally:\n self._spec._initializing=False\n \n \nclass ModuleSpec:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,name,loader,*,origin=None,loader_state=None,\n is_package=None):\n self.name=name\n self.loader=loader\n self.origin=origin\n self.loader_state=loader_state\n self.submodule_search_locations=[]if is_package else None\n \n \n self._set_fileattr=False\n self._cached=None\n \n def __repr__(self):\n args=['name={!r}'.format(self.name),\n 'loader={!r}'.format(self.loader)]\n if self.origin is not None:\n args.append('origin={!r}'.format(self.origin))\n if self.submodule_search_locations is not None:\n args.append('submodule_search_locations={}'\n .format(self.submodule_search_locations))\n return '{}({})'.format(self.__class__.__name__,', '.join(args))\n \n def __eq__(self,other):\n smsl=self.submodule_search_locations\n try:\n return(self.name ==other.name and\n self.loader ==other.loader and\n self.origin ==other.origin and\n smsl ==other.submodule_search_locations and\n self.cached ==other.cached and\n self.has_location ==other.has_location)\n except AttributeError:\n return False\n \n @property\n def cached(self):\n if self._cached is None:\n if self.origin is not None and self._set_fileattr:\n if _bootstrap_external is None:\n raise NotImplementedError\n self._cached=_bootstrap_external._get_cached(self.origin)\n return self._cached\n \n @cached.setter\n def cached(self,cached):\n self._cached=cached\n \n @property\n def parent(self):\n ''\n if self.submodule_search_locations is None:\n return self.name.rpartition('.')[0]\n else:\n return self.name\n \n @property\n def has_location(self):\n return self._set_fileattr\n \n @has_location.setter\n def has_location(self,value):\n self._set_fileattr=bool(value)\n \n \ndef spec_from_loader(name,loader,*,origin=None,is_package=None):\n ''\n if hasattr(loader,'get_filename'):\n if _bootstrap_external is None:\n raise NotImplementedError\n spec_from_file_location=_bootstrap_external.spec_from_file_location\n \n if is_package is None:\n return spec_from_file_location(name,loader=loader)\n search=[]if is_package else None\n return spec_from_file_location(name,loader=loader,\n submodule_search_locations=search)\n \n if is_package is None:\n if hasattr(loader,'is_package'):\n try:\n is_package=loader.is_package(name)\n except ImportError:\n is_package=None\n else:\n \n is_package=False\n \n return ModuleSpec(name,loader,origin=origin,is_package=is_package)\n \n \ndef _spec_from_module(module,loader=None,origin=None):\n\n try:\n spec=module.__spec__\n except AttributeError:\n pass\n else:\n if spec is not None:\n return spec\n \n name=module.__name__\n if loader is None:\n try:\n loader=module.__loader__\n except AttributeError:\n \n pass\n try:\n location=module.__file__\n except AttributeError:\n location=None\n if origin is None:\n if location is None:\n try:\n origin=loader._ORIGIN\n except AttributeError:\n origin=None\n else:\n origin=location\n try:\n cached=module.__cached__\n except AttributeError:\n cached=None\n try:\n submodule_search_locations=list(module.__path__)\n except AttributeError:\n submodule_search_locations=None\n \n spec=ModuleSpec(name,loader,origin=origin)\n spec._set_fileattr=False if location is None else True\n spec.cached=cached\n spec.submodule_search_locations=submodule_search_locations\n return spec\n \n \ndef _init_module_attrs(spec,module,*,override=False):\n\n\n\n if(override or getattr(module,'__name__',None)is None):\n try:\n module.__name__=spec.name\n except AttributeError:\n pass\n \n if override or getattr(module,'__loader__',None)is None:\n loader=spec.loader\n if loader is None:\n \n if spec.submodule_search_locations is not None:\n if _bootstrap_external is None:\n raise NotImplementedError\n _NamespaceLoader=_bootstrap_external._NamespaceLoader\n \n loader=_NamespaceLoader.__new__(_NamespaceLoader)\n loader._path=spec.submodule_search_locations\n spec.loader=loader\n \n \n \n \n \n \n \n \n \n \n module.__file__=None\n try:\n module.__loader__=loader\n except AttributeError:\n pass\n \n if override or getattr(module,'__package__',None)is None:\n try:\n module.__package__=spec.parent\n except AttributeError:\n pass\n \n try:\n module.__spec__=spec\n except AttributeError:\n pass\n \n if override or getattr(module,'__path__',None)is None:\n if spec.submodule_search_locations is not None:\n try:\n module.__path__=spec.submodule_search_locations\n except AttributeError:\n pass\n \n if spec.has_location:\n if override or getattr(module,'__file__',None)is None:\n try:\n module.__file__=spec.origin\n except AttributeError:\n pass\n \n if override or getattr(module,'__cached__',None)is None:\n if spec.cached is not None:\n try:\n module.__cached__=spec.cached\n except AttributeError:\n pass\n return module\n \n \ndef module_from_spec(spec):\n ''\n \n module=None\n if hasattr(spec.loader,'create_module'):\n \n \n module=spec.loader.create_module(spec)\n elif hasattr(spec.loader,'exec_module'):\n raise ImportError('loaders that define exec_module() '\n 'must also define create_module()')\n if module is None:\n module=_new_module(spec.name)\n _init_module_attrs(spec,module)\n return module\n \n \ndef _module_repr_from_spec(spec):\n ''\n \n name='?'if spec.name is None else spec.name\n if spec.origin is None:\n if spec.loader is None:\n return ''.format(name)\n else:\n return ''.format(name,spec.loader)\n else:\n if spec.has_location:\n return ''.format(name,spec.origin)\n else:\n return ''.format(spec.name,spec.origin)\n \n \n \ndef _exec(spec,module):\n ''\n name=spec.name\n with _ModuleLockManager(name):\n if sys.modules.get(name)is not module:\n msg='module {!r} not in sys.modules'.format(name)\n raise ImportError(msg,name=name)\n if spec.loader is None:\n if spec.submodule_search_locations is None:\n raise ImportError('missing loader',name=spec.name)\n \n _init_module_attrs(spec,module,override=True)\n return module\n _init_module_attrs(spec,module,override=True)\n if not hasattr(spec.loader,'exec_module'):\n \n \n \n spec.loader.load_module(name)\n else:\n spec.loader.exec_module(module)\n return sys.modules[name]\n \n \ndef _load_backward_compatible(spec):\n\n\n\n spec.loader.load_module(spec.name)\n \n module=sys.modules[spec.name]\n if getattr(module,'__loader__',None)is None:\n try:\n module.__loader__=spec.loader\n except AttributeError:\n pass\n if getattr(module,'__package__',None)is None:\n try:\n \n \n \n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=spec.name.rpartition('.')[0]\n except AttributeError:\n pass\n if getattr(module,'__spec__',None)is None:\n try:\n module.__spec__=spec\n except AttributeError:\n pass\n return module\n \ndef _load_unlocked(spec):\n\n if spec.loader is not None:\n \n if not hasattr(spec.loader,'exec_module'):\n return _load_backward_compatible(spec)\n \n module=module_from_spec(spec)\n with _installed_safely(module):\n if spec.loader is None:\n if spec.submodule_search_locations is None:\n raise ImportError('missing loader',name=spec.name)\n \n else:\n spec.loader.exec_module(module)\n \n \n \n \n return sys.modules[spec.name]\n \n \n \ndef _load(spec):\n ''\n\n\n\n\n\n\n \n with _ModuleLockManager(spec.name):\n return _load_unlocked(spec)\n \n \n \n \nclass BuiltinImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(module):\n ''\n\n\n\n \n return ''.format(module.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None,target=None):\n if path is not None:\n return None\n if _imp.is_builtin(fullname):\n return spec_from_loader(fullname,cls,origin='built-in')\n else:\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None):\n ''\n\n\n\n\n\n \n spec=cls.find_spec(fullname,path)\n return spec.loader if spec is not None else None\n \n @classmethod\n def create_module(self,spec):\n ''\n if spec.name not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(spec.name),\n name=spec.name)\n return _call_with_frames_removed(_imp.create_builtin,spec)\n \n @classmethod\n def exec_module(self,module):\n ''\n _call_with_frames_removed(_imp.exec_builtin,module)\n \n @classmethod\n @_requires_builtin\n def get_code(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def is_package(cls,fullname):\n ''\n return False\n \n load_module=classmethod(_load_module_shim)\n \n \nclass FrozenImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(m):\n ''\n\n\n\n \n return ''.format(m.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None,target=None):\n if _imp.is_frozen(fullname):\n return spec_from_loader(fullname,cls,origin='frozen')\n else:\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None):\n ''\n\n\n\n \n return cls if _imp.is_frozen(fullname)else None\n \n @classmethod\n def create_module(cls,spec):\n ''\n \n @staticmethod\n def exec_module(module):\n name=module.__spec__.name\n if not _imp.is_frozen(name):\n raise ImportError('{!r} is not a frozen module'.format(name),\n name=name)\n code=_call_with_frames_removed(_imp.get_frozen_object,name)\n exec(code,module.__dict__)\n \n @classmethod\n def load_module(cls,fullname):\n ''\n\n\n\n \n return _load_module_shim(cls,fullname)\n \n @classmethod\n @_requires_frozen\n def get_code(cls,fullname):\n ''\n return _imp.get_frozen_object(fullname)\n \n @classmethod\n @_requires_frozen\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_frozen\n def is_package(cls,fullname):\n ''\n return _imp.is_frozen_package(fullname)\n \n \n \n \nclass _ImportLockContext:\n\n ''\n \n def __enter__(self):\n ''\n _imp.acquire_lock()\n \n def __exit__(self,exc_type,exc_value,exc_traceback):\n ''\n _imp.release_lock()\n \n \ndef _resolve_name(name,package,level):\n ''\n bits=package.rsplit('.',level -1)\n if len(bits)= 0')\n if level >0:\n if not isinstance(package,str):\n raise TypeError('__package__ not set to a string')\n elif not package:\n raise ImportError('attempted relative import with no known parent '\n 'package')\n if not name and level ==0:\n raise ValueError('Empty module name')\n \n \n_ERR_MSG_PREFIX='No module named '\n_ERR_MSG=_ERR_MSG_PREFIX+'{!r}'\n\ndef _find_and_load_unlocked(name,import_):\n path=None\n parent=name.rpartition('.')[0]\n if parent:\n if parent not in sys.modules:\n _call_with_frames_removed(import_,parent)\n \n if name in sys.modules:\n return sys.modules[name]\n parent_module=sys.modules[parent]\n try:\n path=parent_module.__path__\n except AttributeError:\n msg=(_ERR_MSG+'; {!r} is not a package').format(name,parent)\n raise ModuleNotFoundError(msg,name=name)from None\n spec=_find_spec(name,path)\n if spec is None:\n raise ModuleNotFoundError(_ERR_MSG.format(name),name=name)\n else:\n module=_load_unlocked(spec)\n if parent:\n \n parent_module=sys.modules[parent]\n setattr(parent_module,name.rpartition('.')[2],module)\n return module\n \n \n_NEEDS_LOADING=object()\n\n\ndef _find_and_load(name,import_):\n ''\n with _ModuleLockManager(name):\n module=sys.modules.get(name,_NEEDS_LOADING)\n if module is _NEEDS_LOADING:\n return _find_and_load_unlocked(name,import_)\n \n if module is None:\n message=('import of {} halted; '\n 'None in sys.modules'.format(name))\n raise ModuleNotFoundError(message,name=name)\n \n _lock_unlock_module(name)\n return module\n \n \ndef _gcd_import(name,package=None,level=0):\n ''\n\n\n\n\n\n\n \n _sanity_check(name,package,level)\n if level >0:\n name=_resolve_name(name,package,level)\n return _find_and_load(name,_gcd_import)\n \n \ndef _handle_fromlist(module,fromlist,import_,*,recursive=False):\n ''\n\n\n\n\n\n \n \n \n if hasattr(module,'__path__'):\n for x in fromlist:\n if not isinstance(x,str):\n if recursive:\n where=module.__name__+'.__all__'\n else:\n where=\"``from list''\"\n raise TypeError(f\"Item in {where} must be str, \"\n f\"not {type(x).__name__}\")\n elif x =='*':\n if not recursive and hasattr(module,'__all__'):\n _handle_fromlist(module,module.__all__,import_,\n recursive=True)\n elif not hasattr(module,x):\n from_name='{}.{}'.format(module.__name__,x)\n try:\n _call_with_frames_removed(import_,from_name)\n except ModuleNotFoundError as exc:\n \n \n \n if(exc.name ==from_name and\n sys.modules.get(from_name,_NEEDS_LOADING)is not None):\n continue\n raise\n return module\n \n \ndef _calc___package__(globals):\n ''\n\n\n\n\n \n package=globals.get('__package__')\n spec=globals.get('__spec__')\n if package is not None:\n if spec is not None and package !=spec.parent:\n _warnings.warn(\"__package__ != __spec__.parent \"\n f\"({package !r} != {spec.parent !r})\",\n ImportWarning,stacklevel=3)\n return package\n elif spec is not None:\n return spec.parent\n else:\n _warnings.warn(\"can't resolve package from __spec__ or __package__, \"\n \"falling back on __name__ and __path__\",\n ImportWarning,stacklevel=3)\n package=globals['__name__']\n if '__path__'not in globals:\n package=package.rpartition('.')[0]\n return package\n \n \ndef __import__(name,globals=None,locals=None,fromlist=(),level=0):\n ''\n\n\n\n\n\n\n\n\n \n if level ==0:\n module=_gcd_import(name)\n else:\n globals_=globals if globals is not None else{}\n package=_calc___package__(globals_)\n module=_gcd_import(name,package,level)\n if not fromlist:\n \n \n if level ==0:\n return _gcd_import(name.partition('.')[0])\n elif not name:\n return module\n else:\n \n \n cut_off=len(name)-len(name.partition('.')[0])\n \n \n return sys.modules[module.__name__[:len(module.__name__)-cut_off]]\n else:\n return _handle_fromlist(module,fromlist,_gcd_import)\n \n \ndef _builtin_from_name(name):\n spec=BuiltinImporter.find_spec(name)\n if spec is None:\n raise ImportError('no built-in module named '+name)\n return _load_unlocked(spec)\n \n \ndef _setup(sys_module,_imp_module):\n ''\n\n\n\n\n\n \n global _imp,sys\n _imp=_imp_module\n sys=sys_module\n \n \n module_type=type(sys)\n for name,module in sys.modules.items():\n if isinstance(module,module_type):\n if name in sys.builtin_module_names:\n loader=BuiltinImporter\n elif _imp.is_frozen(name):\n loader=FrozenImporter\n else:\n continue\n spec=_spec_from_module(module,loader)\n _init_module_attrs(spec,module)\n \n \n self_module=sys.modules[__name__]\n \n \n for builtin_name in('_warnings',):\n if builtin_name not in sys.modules:\n builtin_module=_builtin_from_name(builtin_name)\n else:\n builtin_module=sys.modules[builtin_name]\n setattr(self_module,builtin_name,builtin_module)\n \n \ndef _install(sys_module,_imp_module):\n ''\n _setup(sys_module,_imp_module)\n \n sys.meta_path.append(BuiltinImporter)\n sys.meta_path.append(FrozenImporter)\n \n \ndef _install_external_importers():\n ''\n global _bootstrap_external\n import _frozen_importlib_external\n _bootstrap_external=_frozen_importlib_external\n _frozen_importlib_external._install(sys.modules[__name__])\n", ["_frozen_importlib_external", "_weakref"]], "importlib": [".py", "''\n__all__=['__import__','import_module','invalidate_caches','reload']\n\n\n\n\n\n\n\n\n\nimport _imp\nimport sys\n\ntry:\n import _frozen_importlib as _bootstrap\nexcept ImportError:\n from. import _bootstrap\n _bootstrap._setup(sys,_imp)\nelse:\n\n\n _bootstrap.__name__='importlib._bootstrap'\n _bootstrap.__package__='importlib'\n try:\n _bootstrap.__file__=__file__.replace('__init__.py','_bootstrap.py')\n except NameError:\n \n \n pass\n sys.modules['importlib._bootstrap']=_bootstrap\n \ntry:\n import _frozen_importlib_external as _bootstrap_external\nexcept ImportError:\n from. import _bootstrap_external\n _bootstrap_external._set_bootstrap_module(_bootstrap)\n _bootstrap._bootstrap_external=_bootstrap_external\nelse:\n _bootstrap_external.__name__='importlib._bootstrap_external'\n _bootstrap_external.__package__='importlib'\n try:\n _bootstrap_external.__file__=__file__.replace('__init__.py','_bootstrap_external.py')\n except NameError:\n \n \n pass\n sys.modules['importlib._bootstrap_external']=_bootstrap_external\n \n \n_pack_uint32=_bootstrap_external._pack_uint32\n_unpack_uint32=_bootstrap_external._unpack_uint32\n\n\n\n\nimport warnings\n\n\n\n\nfrom._bootstrap import __import__\n\n\ndef invalidate_caches():\n ''\n \n for finder in sys.meta_path:\n if hasattr(finder,'invalidate_caches'):\n finder.invalidate_caches()\n \n \ndef import_module(name,package=None):\n ''\n\n\n\n\n\n \n level=0\n if name.startswith('.'):\n if not package:\n raise TypeError(\"the 'package' argument is required to perform a \"\n f\"relative import for {name !r}\")\n for character in name:\n if character !='.':\n break\n level +=1\n return _bootstrap._gcd_import(name[level:],package,level)\n \n \n_RELOADING={}\n\n\ndef reload(module):\n ''\n\n\n\n \n try:\n name=module.__spec__.name\n except AttributeError:\n try:\n name=module.__name__\n except AttributeError:\n raise TypeError(\"reload() argument must be a module\")\n \n if sys.modules.get(name)is not module:\n raise ImportError(f\"module {name} not in sys.modules\",name=name)\n if name in _RELOADING:\n return _RELOADING[name]\n _RELOADING[name]=module\n try:\n parent_name=name.rpartition('.')[0]\n if parent_name:\n try:\n parent=sys.modules[parent_name]\n except KeyError:\n raise ImportError(f\"parent {parent_name !r} not in sys.modules\",\n name=parent_name)from None\n else:\n pkgpath=parent.__path__\n else:\n pkgpath=None\n target=module\n spec=module.__spec__=_bootstrap._find_spec(name,pkgpath,target)\n if spec is None:\n raise ModuleNotFoundError(f\"spec not found for the module {name !r}\",name=name)\n _bootstrap._exec(spec,module)\n \n return sys.modules[name]\n finally:\n try:\n del _RELOADING[name]\n except KeyError:\n pass\n", ["_frozen_importlib", "_frozen_importlib_external", "_imp", "importlib", "importlib._bootstrap", "importlib._bootstrap_external", "sys", "warnings"], 1], "importlib._bootstrap_external": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_bootstrap=None\n\n\nimport _imp\nimport _io\nimport sys\nimport _warnings\nimport marshal\n\n\n_MS_WINDOWS=(sys.platform =='win32')\nif _MS_WINDOWS:\n import nt as _os\n import winreg\nelse:\n import posix as _os\n \n \nif _MS_WINDOWS:\n path_separators=['\\\\','/']\nelse:\n path_separators=['/']\n \nassert all(len(sep)==1 for sep in path_separators)\npath_sep=path_separators[0]\npath_sep_tuple=tuple(path_separators)\npath_separators=''.join(path_separators)\n_pathseps_with_colon={f':{s}'for s in path_separators}\n\n\n\n_CASE_INSENSITIVE_PLATFORMS_STR_KEY='win',\n_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY='cygwin','darwin'\n_CASE_INSENSITIVE_PLATFORMS=(_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY\n+_CASE_INSENSITIVE_PLATFORMS_STR_KEY)\n\n\ndef _make_relax_case():\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS_STR_KEY):\n key='PYTHONCASEOK'\n else:\n key=b'PYTHONCASEOK'\n \n def _relax_case():\n ''\n return not sys.flags.ignore_environment and key in _os.environ\n else:\n def _relax_case():\n ''\n return False\n return _relax_case\n \n_relax_case=_make_relax_case()\n\n\ndef _pack_uint32(x):\n ''\n return(int(x)&0xFFFFFFFF).to_bytes(4,'little')\n \n \ndef _unpack_uint32(data):\n ''\n assert len(data)==4\n return int.from_bytes(data,'little')\n \ndef _unpack_uint16(data):\n ''\n assert len(data)==2\n return int.from_bytes(data,'little')\n \n \nif _MS_WINDOWS:\n def _path_join(*path_parts):\n ''\n if not path_parts:\n return \"\"\n if len(path_parts)==1:\n return path_parts[0]\n root=\"\"\n path=[]\n for new_root,tail in map(_os._path_splitroot,path_parts):\n if new_root.startswith(path_sep_tuple)or new_root.endswith(path_sep_tuple):\n root=new_root.rstrip(path_separators)or root\n path=[path_sep+tail]\n elif new_root.endswith(':'):\n if root.casefold()!=new_root.casefold():\n \n \n root=new_root\n path=[tail]\n else:\n path.append(tail)\n else:\n root=new_root or root\n path.append(tail)\n path=[p.rstrip(path_separators)for p in path if p]\n if len(path)==1 and not path[0]:\n \n return root+path_sep\n return root+path_sep.join(path)\n \nelse:\n def _path_join(*path_parts):\n ''\n return path_sep.join([part.rstrip(path_separators)\n for part in path_parts if part])\n \n \ndef _path_split(path):\n ''\n i=max(path.rfind(p)for p in path_separators)\n if i <0:\n return '',path\n return path[:i],path[i+1:]\n \n \ndef _path_stat(path):\n ''\n\n\n\n\n \n return _os.stat(path)\n \n \ndef _path_is_mode_type(path,mode):\n ''\n try:\n stat_info=_path_stat(path)\n except OSError:\n return False\n return(stat_info.st_mode&0o170000)==mode\n \n \ndef _path_isfile(path):\n ''\n return _path_is_mode_type(path,0o100000)\n \n \ndef _path_isdir(path):\n ''\n if not path:\n path=_os.getcwd()\n return _path_is_mode_type(path,0o040000)\n \n \nif _MS_WINDOWS:\n def _path_isabs(path):\n ''\n if not path:\n return False\n root=_os._path_splitroot(path)[0].replace('/','\\\\')\n return len(root)>1 and(root.startswith('\\\\\\\\')or root.endswith('\\\\'))\n \nelse:\n def _path_isabs(path):\n ''\n return path.startswith(path_separators)\n \n \ndef _path_abspath(path):\n ''\n if not _path_isabs(path):\n for sep in path_separators:\n path=path.removeprefix(f\".{sep}\")\n return _path_join(_os.getcwd(),path)\n else:\n return path\n \n \ndef _write_atomic(path,data,mode=0o666):\n ''\n\n \n \n path_tmp=f'{path}.{id(path)}'\n fd=_os.open(path_tmp,\n _os.O_EXCL |_os.O_CREAT |_os.O_WRONLY,mode&0o666)\n try:\n \n \n with _io.FileIO(fd,'wb')as file:\n file.write(data)\n _os.replace(path_tmp,path)\n except OSError:\n try:\n _os.unlink(path_tmp)\n except OSError:\n pass\n raise\n \n \n_code_type=type(_write_atomic.__code__)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nMAGIC_NUMBER=(3531).to_bytes(2,'little')+b'\\r\\n'\n\n_RAW_MAGIC_NUMBER=int.from_bytes(MAGIC_NUMBER,'little')\n\n_PYCACHE='__pycache__'\n_OPT='opt-'\n\nSOURCE_SUFFIXES=['.py']\nif _MS_WINDOWS:\n SOURCE_SUFFIXES.append('.pyw')\n \nEXTENSION_SUFFIXES=_imp.extension_suffixes()\n\nBYTECODE_SUFFIXES=['.pyc']\n\nDEBUG_BYTECODE_SUFFIXES=OPTIMIZED_BYTECODE_SUFFIXES=BYTECODE_SUFFIXES\n\ndef cache_from_source(path,debug_override=None,*,optimization=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if debug_override is not None:\n _warnings.warn('the debug_override parameter is deprecated; use '\n \"'optimization' instead\",DeprecationWarning)\n if optimization is not None:\n message='debug_override or optimization must be set to None'\n raise TypeError(message)\n optimization=''if debug_override else 1\n path=_os.fspath(path)\n head,tail=_path_split(path)\n base,sep,rest=tail.rpartition('.')\n tag=sys.implementation.cache_tag\n if tag is None:\n raise NotImplementedError('sys.implementation.cache_tag is None')\n almost_filename=''.join([(base if base else rest),sep,tag])\n if optimization is None:\n if sys.flags.optimize ==0:\n optimization=''\n else:\n optimization=sys.flags.optimize\n optimization=str(optimization)\n if optimization !='':\n if not optimization.isalnum():\n raise ValueError(f'{optimization !r} is not alphanumeric')\n almost_filename=f'{almost_filename}.{_OPT}{optimization}'\n filename=almost_filename+BYTECODE_SUFFIXES[0]\n if sys.pycache_prefix is not None:\n \n \n \n \n \n \n \n \n head=_path_abspath(head)\n \n \n \n \n if head[1]==':'and head[0]not in path_separators:\n head=head[2:]\n \n \n \n return _path_join(\n sys.pycache_prefix,\n head.lstrip(path_separators),\n filename,\n )\n return _path_join(head,_PYCACHE,filename)\n \n \ndef source_from_cache(path):\n ''\n\n\n\n\n\n\n \n if sys.implementation.cache_tag is None:\n raise NotImplementedError('sys.implementation.cache_tag is None')\n path=_os.fspath(path)\n head,pycache_filename=_path_split(path)\n found_in_pycache_prefix=False\n if sys.pycache_prefix is not None:\n stripped_path=sys.pycache_prefix.rstrip(path_separators)\n if head.startswith(stripped_path+path_sep):\n head=head[len(stripped_path):]\n found_in_pycache_prefix=True\n if not found_in_pycache_prefix:\n head,pycache=_path_split(head)\n if pycache !=_PYCACHE:\n raise ValueError(f'{_PYCACHE} not bottom-level directory in '\n f'{path !r}')\n dot_count=pycache_filename.count('.')\n if dot_count not in{2,3}:\n raise ValueError(f'expected only 2 or 3 dots in {pycache_filename !r}')\n elif dot_count ==3:\n optimization=pycache_filename.rsplit('.',2)[-2]\n if not optimization.startswith(_OPT):\n raise ValueError(\"optimization portion of filename does not start \"\n f\"with {_OPT !r}\")\n opt_level=optimization[len(_OPT):]\n if not opt_level.isalnum():\n raise ValueError(f\"optimization level {optimization !r} is not an \"\n \"alphanumeric value\")\n base_filename=pycache_filename.partition('.')[0]\n return _path_join(head,base_filename+SOURCE_SUFFIXES[0])\n \n \ndef _get_sourcefile(bytecode_path):\n ''\n\n\n\n\n \n if len(bytecode_path)==0:\n return None\n rest,_,extension=bytecode_path.rpartition('.')\n if not rest or extension.lower()[-3:-1]!='py':\n return bytecode_path\n try:\n source_path=source_from_cache(bytecode_path)\n except(NotImplementedError,ValueError):\n source_path=bytecode_path[:-1]\n return source_path if _path_isfile(source_path)else bytecode_path\n \n \ndef _get_cached(filename):\n if filename.endswith(tuple(SOURCE_SUFFIXES)):\n try:\n return cache_from_source(filename)\n except NotImplementedError:\n pass\n elif filename.endswith(tuple(BYTECODE_SUFFIXES)):\n return filename\n else:\n return None\n \n \ndef _calc_mode(path):\n ''\n try:\n mode=_path_stat(path).st_mode\n except OSError:\n mode=0o666\n \n \n mode |=0o200\n return mode\n \n \ndef _check_name(method):\n ''\n\n\n\n\n\n \n def _check_name_wrapper(self,name=None,*args,**kwargs):\n if name is None:\n name=self.name\n elif self.name !=name:\n raise ImportError('loader for %s cannot handle %s'%\n (self.name,name),name=name)\n return method(self,name,*args,**kwargs)\n \n \n \n if _bootstrap is not None:\n _wrap=_bootstrap._wrap\n else:\n def _wrap(new,old):\n for replace in['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n \n _wrap(_check_name_wrapper,method)\n return _check_name_wrapper\n \n \ndef _classify_pyc(data,name,exc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n magic=data[:4]\n if magic !=MAGIC_NUMBER:\n message=f'bad magic number in {name !r}: {magic !r}'\n _bootstrap._verbose_message('{}',message)\n raise ImportError(message,**exc_details)\n if len(data)<16:\n message=f'reached EOF while reading pyc header of {name !r}'\n _bootstrap._verbose_message('{}',message)\n raise EOFError(message)\n flags=_unpack_uint32(data[4:8])\n \n if flags&~0b11:\n message=f'invalid flags {flags !r} in {name !r}'\n raise ImportError(message,**exc_details)\n return flags\n \n \ndef _validate_timestamp_pyc(data,source_mtime,source_size,name,\nexc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _unpack_uint32(data[8:12])!=(source_mtime&0xFFFFFFFF):\n message=f'bytecode is stale for {name !r}'\n _bootstrap._verbose_message('{}',message)\n raise ImportError(message,**exc_details)\n if(source_size is not None and\n _unpack_uint32(data[12:16])!=(source_size&0xFFFFFFFF)):\n raise ImportError(f'bytecode is stale for {name !r}',**exc_details)\n \n \ndef _validate_hash_pyc(data,source_hash,name,exc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if data[8:16]!=source_hash:\n raise ImportError(\n f'hash in bytecode doesn\\'t match hash of source {name !r}',\n **exc_details,\n )\n \n \ndef _compile_bytecode(data,name=None,bytecode_path=None,source_path=None):\n ''\n code=marshal.loads(data)\n if isinstance(code,_code_type):\n _bootstrap._verbose_message('code object from {!r}',bytecode_path)\n if source_path is not None:\n _imp._fix_co_filename(code,source_path)\n return code\n else:\n raise ImportError(f'Non-code object in {bytecode_path !r}',\n name=name,path=bytecode_path)\n \n \ndef _code_to_timestamp_pyc(code,mtime=0,source_size=0):\n ''\n data=bytearray(MAGIC_NUMBER)\n data.extend(_pack_uint32(0))\n data.extend(_pack_uint32(mtime))\n data.extend(_pack_uint32(source_size))\n data.extend(marshal.dumps(code))\n return data\n \n \ndef _code_to_hash_pyc(code,source_hash,checked=True):\n ''\n data=bytearray(MAGIC_NUMBER)\n flags=0b1 |checked <<1\n data.extend(_pack_uint32(flags))\n assert len(source_hash)==8\n data.extend(source_hash)\n data.extend(marshal.dumps(code))\n return data\n \n \ndef decode_source(source_bytes):\n ''\n\n\n \n import tokenize\n source_bytes_readline=_io.BytesIO(source_bytes).readline\n encoding=tokenize.detect_encoding(source_bytes_readline)\n newline_decoder=_io.IncrementalNewlineDecoder(None,True)\n return newline_decoder.decode(source_bytes.decode(encoding[0]))\n \n \n \n \n_POPULATE=object()\n\n\ndef spec_from_file_location(name,location=None,*,loader=None,\nsubmodule_search_locations=_POPULATE):\n ''\n\n\n\n\n\n\n\n\n \n if location is None:\n \n \n \n location=''\n if hasattr(loader,'get_filename'):\n \n try:\n location=loader.get_filename(name)\n except ImportError:\n pass\n else:\n location=_os.fspath(location)\n try:\n location=_path_abspath(location)\n except OSError:\n pass\n \n \n \n \n \n \n \n spec=_bootstrap.ModuleSpec(name,loader,origin=location)\n spec._set_fileattr=True\n \n \n if loader is None:\n for loader_class,suffixes in _get_supported_file_loaders():\n if location.endswith(tuple(suffixes)):\n loader=loader_class(name,location)\n spec.loader=loader\n break\n else:\n return None\n \n \n if submodule_search_locations is _POPULATE:\n \n if hasattr(loader,'is_package'):\n try:\n is_package=loader.is_package(name)\n except ImportError:\n pass\n else:\n if is_package:\n spec.submodule_search_locations=[]\n else:\n spec.submodule_search_locations=submodule_search_locations\n if spec.submodule_search_locations ==[]:\n if location:\n dirname=_path_split(location)[0]\n spec.submodule_search_locations.append(dirname)\n \n return spec\n \n \ndef _bless_my_loader(module_globals):\n ''\n\n\n \n \n \n \n \n \n \n \n if not isinstance(module_globals,dict):\n return None\n \n missing=object()\n loader=module_globals.get('__loader__',None)\n spec=module_globals.get('__spec__',missing)\n \n if loader is None:\n if spec is missing:\n \n \n return None\n elif spec is None:\n raise ValueError('Module globals is missing a __spec__.loader')\n \n spec_loader=getattr(spec,'loader',missing)\n \n if spec_loader in(missing,None):\n if loader is None:\n exc=AttributeError if spec_loader is missing else ValueError\n raise exc('Module globals is missing a __spec__.loader')\n _warnings.warn(\n 'Module globals is missing a __spec__.loader',\n DeprecationWarning)\n spec_loader=loader\n \n assert spec_loader is not None\n if loader is not None and loader !=spec_loader:\n _warnings.warn(\n 'Module globals; __loader__ != __spec__.loader',\n DeprecationWarning)\n return loader\n \n return spec_loader\n \n \n \n \nclass WindowsRegistryFinder:\n\n ''\n \n REGISTRY_KEY=(\n 'Software\\\\Python\\\\PythonCore\\\\{sys_version}'\n '\\\\Modules\\\\{fullname}')\n REGISTRY_KEY_DEBUG=(\n 'Software\\\\Python\\\\PythonCore\\\\{sys_version}'\n '\\\\Modules\\\\{fullname}\\\\Debug')\n DEBUG_BUILD=(_MS_WINDOWS and '_d.pyd'in EXTENSION_SUFFIXES)\n \n @staticmethod\n def _open_registry(key):\n try:\n return winreg.OpenKey(winreg.HKEY_CURRENT_USER,key)\n except OSError:\n return winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,key)\n \n @classmethod\n def _search_registry(cls,fullname):\n if cls.DEBUG_BUILD:\n registry_key=cls.REGISTRY_KEY_DEBUG\n else:\n registry_key=cls.REGISTRY_KEY\n key=registry_key.format(fullname=fullname,\n sys_version='%d.%d'%sys.version_info[:2])\n try:\n with cls._open_registry(key)as hkey:\n filepath=winreg.QueryValue(hkey,'')\n except OSError:\n return None\n return filepath\n \n @classmethod\n def find_spec(cls,fullname,path=None,target=None):\n filepath=cls._search_registry(fullname)\n if filepath is None:\n return None\n try:\n _path_stat(filepath)\n except OSError:\n return None\n for loader,suffixes in _get_supported_file_loaders():\n if filepath.endswith(tuple(suffixes)):\n spec=_bootstrap.spec_from_loader(fullname,\n loader(fullname,filepath),\n origin=filepath)\n return spec\n \n \nclass _LoaderBasics:\n\n ''\n \n \n def is_package(self,fullname):\n ''\n \n filename=_path_split(self.get_filename(fullname))[1]\n filename_base=filename.rsplit('.',1)[0]\n tail_name=fullname.rpartition('.')[2]\n return filename_base =='__init__'and tail_name !='__init__'\n \n def create_module(self,spec):\n ''\n \n def exec_module(self,module):\n ''\n code=self.get_code(module.__name__)\n if code is None:\n raise ImportError(f'cannot load module {module.__name__ !r} when '\n 'get_code() returns None')\n _bootstrap._call_with_frames_removed(exec,code,module.__dict__)\n \n def load_module(self,fullname):\n ''\n \n return _bootstrap._load_module_shim(self,fullname)\n \n \nclass SourceLoader(_LoaderBasics):\n\n def path_mtime(self,path):\n ''\n\n\n\n \n raise OSError\n \n def path_stats(self,path):\n ''\n\n\n\n\n\n\n\n\n\n \n return{'mtime':self.path_mtime(path)}\n \n def _cache_bytecode(self,source_path,cache_path,data):\n ''\n\n\n\n\n \n \n return self.set_data(cache_path,data)\n \n def set_data(self,path,data):\n ''\n\n\n \n \n \n def get_source(self,fullname):\n ''\n path=self.get_filename(fullname)\n try:\n source_bytes=self.get_data(path)\n except OSError as exc:\n raise ImportError('source not available through get_data()',\n name=fullname)from exc\n return decode_source(source_bytes)\n \n def source_to_code(self,data,path,*,_optimize=-1):\n ''\n\n\n \n return _bootstrap._call_with_frames_removed(compile,data,path,'exec',\n dont_inherit=True,optimize=_optimize)\n \n def get_code(self,fullname):\n ''\n\n\n\n\n \n source_path=self.get_filename(fullname)\n source_mtime=None\n source_bytes=None\n source_hash=None\n hash_based=False\n check_source=True\n try:\n bytecode_path=cache_from_source(source_path)\n except NotImplementedError:\n bytecode_path=None\n else:\n try:\n st=self.path_stats(source_path)\n except OSError:\n pass\n else:\n source_mtime=int(st['mtime'])\n try:\n data=self.get_data(bytecode_path)\n except OSError:\n pass\n else:\n exc_details={\n 'name':fullname,\n 'path':bytecode_path,\n }\n try:\n flags=_classify_pyc(data,fullname,exc_details)\n bytes_data=memoryview(data)[16:]\n hash_based=flags&0b1 !=0\n if hash_based:\n check_source=flags&0b10 !=0\n if(_imp.check_hash_based_pycs !='never'and\n (check_source or\n _imp.check_hash_based_pycs =='always')):\n source_bytes=self.get_data(source_path)\n source_hash=_imp.source_hash(\n _RAW_MAGIC_NUMBER,\n source_bytes,\n )\n _validate_hash_pyc(data,source_hash,fullname,\n exc_details)\n else:\n _validate_timestamp_pyc(\n data,\n source_mtime,\n st['size'],\n fullname,\n exc_details,\n )\n except(ImportError,EOFError):\n pass\n else:\n _bootstrap._verbose_message('{} matches {}',bytecode_path,\n source_path)\n return _compile_bytecode(bytes_data,name=fullname,\n bytecode_path=bytecode_path,\n source_path=source_path)\n if source_bytes is None:\n source_bytes=self.get_data(source_path)\n code_object=self.source_to_code(source_bytes,source_path)\n _bootstrap._verbose_message('code object from {}',source_path)\n if(not sys.dont_write_bytecode and bytecode_path is not None and\n source_mtime is not None):\n if hash_based:\n if source_hash is None:\n source_hash=_imp.source_hash(_RAW_MAGIC_NUMBER,\n source_bytes)\n data=_code_to_hash_pyc(code_object,source_hash,check_source)\n else:\n data=_code_to_timestamp_pyc(code_object,source_mtime,\n len(source_bytes))\n try:\n self._cache_bytecode(source_path,bytecode_path,data)\n except NotImplementedError:\n pass\n return code_object\n \n \nclass FileLoader:\n\n ''\n \n \n def __init__(self,fullname,path):\n ''\n \n self.name=fullname\n self.path=path\n \n def __eq__(self,other):\n return(self.__class__ ==other.__class__ and\n self.__dict__ ==other.__dict__)\n \n def __hash__(self):\n return hash(self.name)^hash(self.path)\n \n @_check_name\n def load_module(self,fullname):\n ''\n\n\n\n \n \n \n \n return super(FileLoader,self).load_module(fullname)\n \n @_check_name\n def get_filename(self,fullname):\n ''\n return self.path\n \n def get_data(self,path):\n ''\n if isinstance(self,(SourceLoader,ExtensionFileLoader)):\n with _io.open_code(str(path))as file:\n return file.read()\n else:\n with _io.FileIO(path,'r')as file:\n return file.read()\n \n @_check_name\n def get_resource_reader(self,module):\n from importlib.readers import FileReader\n return FileReader(self)\n \n \nclass SourceFileLoader(FileLoader,SourceLoader):\n\n ''\n \n def path_stats(self,path):\n ''\n st=_path_stat(path)\n return{'mtime':st.st_mtime,'size':st.st_size}\n \n def _cache_bytecode(self,source_path,bytecode_path,data):\n \n mode=_calc_mode(source_path)\n return self.set_data(bytecode_path,data,_mode=mode)\n \n def set_data(self,path,data,*,_mode=0o666):\n ''\n parent,filename=_path_split(path)\n path_parts=[]\n \n while parent and not _path_isdir(parent):\n parent,part=_path_split(parent)\n path_parts.append(part)\n \n for part in reversed(path_parts):\n parent=_path_join(parent,part)\n try:\n _os.mkdir(parent)\n except FileExistsError:\n \n continue\n except OSError as exc:\n \n \n _bootstrap._verbose_message('could not create {!r}: {!r}',\n parent,exc)\n return\n try:\n _write_atomic(path,data,_mode)\n _bootstrap._verbose_message('created {!r}',path)\n except OSError as exc:\n \n _bootstrap._verbose_message('could not create {!r}: {!r}',path,\n exc)\n \n \nclass SourcelessFileLoader(FileLoader,_LoaderBasics):\n\n ''\n \n def get_code(self,fullname):\n path=self.get_filename(fullname)\n data=self.get_data(path)\n \n \n exc_details={\n 'name':fullname,\n 'path':path,\n }\n _classify_pyc(data,fullname,exc_details)\n return _compile_bytecode(\n memoryview(data)[16:],\n name=fullname,\n bytecode_path=path,\n )\n \n def get_source(self,fullname):\n ''\n return None\n \n \nclass ExtensionFileLoader(FileLoader,_LoaderBasics):\n\n ''\n\n\n\n \n \n def __init__(self,name,path):\n self.name=name\n self.path=path\n \n def __eq__(self,other):\n return(self.__class__ ==other.__class__ and\n self.__dict__ ==other.__dict__)\n \n def __hash__(self):\n return hash(self.name)^hash(self.path)\n \n def create_module(self,spec):\n ''\n module=_bootstrap._call_with_frames_removed(\n _imp.create_dynamic,spec)\n _bootstrap._verbose_message('extension module {!r} loaded from {!r}',\n spec.name,self.path)\n return module\n \n def exec_module(self,module):\n ''\n _bootstrap._call_with_frames_removed(_imp.exec_dynamic,module)\n _bootstrap._verbose_message('extension module {!r} executed from {!r}',\n self.name,self.path)\n \n def is_package(self,fullname):\n ''\n file_name=_path_split(self.path)[1]\n return any(file_name =='__init__'+suffix\n for suffix in EXTENSION_SUFFIXES)\n \n def get_code(self,fullname):\n ''\n return None\n \n def get_source(self,fullname):\n ''\n return None\n \n @_check_name\n def get_filename(self,fullname):\n ''\n return self.path\n \n \nclass _NamespacePath:\n ''\n\n\n\n \n \n \n \n _epoch=0\n \n def __init__(self,name,path,path_finder):\n self._name=name\n self._path=path\n self._last_parent_path=tuple(self._get_parent_path())\n self._last_epoch=self._epoch\n self._path_finder=path_finder\n \n def _find_parent_path_names(self):\n ''\n parent,dot,me=self._name.rpartition('.')\n if dot =='':\n \n return 'sys','path'\n \n \n return parent,'__path__'\n \n def _get_parent_path(self):\n parent_module_name,path_attr_name=self._find_parent_path_names()\n return getattr(sys.modules[parent_module_name],path_attr_name)\n \n def _recalculate(self):\n \n parent_path=tuple(self._get_parent_path())\n if parent_path !=self._last_parent_path or self._epoch !=self._last_epoch:\n spec=self._path_finder(self._name,parent_path)\n \n \n if spec is not None and spec.loader is None:\n if spec.submodule_search_locations:\n self._path=spec.submodule_search_locations\n self._last_parent_path=parent_path\n self._last_epoch=self._epoch\n return self._path\n \n def __iter__(self):\n return iter(self._recalculate())\n \n def __getitem__(self,index):\n return self._recalculate()[index]\n \n def __setitem__(self,index,path):\n self._path[index]=path\n \n def __len__(self):\n return len(self._recalculate())\n \n def __repr__(self):\n return f'_NamespacePath({self._path !r})'\n \n def __contains__(self,item):\n return item in self._recalculate()\n \n def append(self,item):\n self._path.append(item)\n \n \n \n \n \nclass NamespaceLoader:\n def __init__(self,name,path,path_finder):\n self._path=_NamespacePath(name,path,path_finder)\n \n def is_package(self,fullname):\n return True\n \n def get_source(self,fullname):\n return ''\n \n def get_code(self,fullname):\n return compile('','','exec',dont_inherit=True)\n \n def create_module(self,spec):\n ''\n \n def exec_module(self,module):\n pass\n \n def load_module(self,fullname):\n ''\n\n\n\n \n \n _bootstrap._verbose_message('namespace module loaded with path {!r}',\n self._path)\n \n return _bootstrap._load_module_shim(self,fullname)\n \n def get_resource_reader(self,module):\n from importlib.readers import NamespaceReader\n return NamespaceReader(self._path)\n \n \n \n_NamespaceLoader=NamespaceLoader\n\n\n\n\nclass PathFinder:\n\n ''\n \n @staticmethod\n def invalidate_caches():\n ''\n \n for name,finder in list(sys.path_importer_cache.items()):\n \n \n if finder is None or not _path_isabs(name):\n del sys.path_importer_cache[name]\n elif hasattr(finder,'invalidate_caches'):\n finder.invalidate_caches()\n \n \n _NamespacePath._epoch +=1\n \n @staticmethod\n def _path_hooks(path):\n ''\n if sys.path_hooks is not None and not sys.path_hooks:\n _warnings.warn('sys.path_hooks is empty',ImportWarning)\n for hook in sys.path_hooks:\n try:\n return hook(path)\n except ImportError:\n continue\n else:\n return None\n \n @classmethod\n def _path_importer_cache(cls,path):\n ''\n\n\n\n\n \n if path =='':\n try:\n path=_os.getcwd()\n except FileNotFoundError:\n \n \n return None\n try:\n finder=sys.path_importer_cache[path]\n except KeyError:\n finder=cls._path_hooks(path)\n sys.path_importer_cache[path]=finder\n return finder\n \n @classmethod\n def _get_spec(cls,fullname,path,target=None):\n ''\n \n \n namespace_path=[]\n for entry in path:\n if not isinstance(entry,str):\n continue\n finder=cls._path_importer_cache(entry)\n if finder is not None:\n spec=finder.find_spec(fullname,target)\n if spec is None:\n continue\n if spec.loader is not None:\n return spec\n portions=spec.submodule_search_locations\n if portions is None:\n raise ImportError('spec missing loader')\n \n \n \n \n namespace_path.extend(portions)\n else:\n spec=_bootstrap.ModuleSpec(fullname,None)\n spec.submodule_search_locations=namespace_path\n return spec\n \n @classmethod\n def find_spec(cls,fullname,path=None,target=None):\n ''\n\n\n \n if path is None:\n path=sys.path\n spec=cls._get_spec(fullname,path,target)\n if spec is None:\n return None\n elif spec.loader is None:\n namespace_path=spec.submodule_search_locations\n if namespace_path:\n \n \n spec.origin=None\n spec.submodule_search_locations=_NamespacePath(fullname,namespace_path,cls._get_spec)\n return spec\n else:\n return None\n else:\n return spec\n \n @staticmethod\n def find_distributions(*args,**kwargs):\n ''\n\n\n\n\n\n\n \n from importlib.metadata import MetadataPathFinder\n return MetadataPathFinder.find_distributions(*args,**kwargs)\n \n \nclass FileFinder:\n\n ''\n\n\n\n\n \n \n def __init__(self,path,*loader_details):\n ''\n\n \n loaders=[]\n for loader,suffixes in loader_details:\n loaders.extend((suffix,loader)for suffix in suffixes)\n self._loaders=loaders\n \n if not path or path =='.':\n self.path=_os.getcwd()\n else:\n self.path=_path_abspath(path)\n self._path_mtime=-1\n self._path_cache=set()\n self._relaxed_path_cache=set()\n \n def invalidate_caches(self):\n ''\n self._path_mtime=-1\n \n def _get_spec(self,loader_class,fullname,path,smsl,target):\n loader=loader_class(fullname,path)\n return spec_from_file_location(fullname,path,loader=loader,\n submodule_search_locations=smsl)\n \n def find_spec(self,fullname,target=None):\n ''\n\n\n \n is_namespace=False\n tail_module=fullname.rpartition('.')[2]\n try:\n mtime=_path_stat(self.path or _os.getcwd()).st_mtime\n except OSError:\n mtime=-1\n if mtime !=self._path_mtime:\n self._fill_cache()\n self._path_mtime=mtime\n \n if _relax_case():\n cache=self._relaxed_path_cache\n cache_module=tail_module.lower()\n else:\n cache=self._path_cache\n cache_module=tail_module\n \n if cache_module in cache:\n base_path=_path_join(self.path,tail_module)\n for suffix,loader_class in self._loaders:\n init_filename='__init__'+suffix\n full_path=_path_join(base_path,init_filename)\n if _path_isfile(full_path):\n return self._get_spec(loader_class,fullname,full_path,[base_path],target)\n else:\n \n \n is_namespace=_path_isdir(base_path)\n \n for suffix,loader_class in self._loaders:\n try:\n full_path=_path_join(self.path,tail_module+suffix)\n except ValueError:\n return None\n _bootstrap._verbose_message('trying {}',full_path,verbosity=2)\n if cache_module+suffix in cache:\n if _path_isfile(full_path):\n return self._get_spec(loader_class,fullname,full_path,\n None,target)\n if is_namespace:\n _bootstrap._verbose_message('possible namespace for {}',base_path)\n spec=_bootstrap.ModuleSpec(fullname,None)\n spec.submodule_search_locations=[base_path]\n return spec\n return None\n \n def _fill_cache(self):\n ''\n path=self.path\n try:\n contents=_os.listdir(path or _os.getcwd())\n except(FileNotFoundError,PermissionError,NotADirectoryError):\n \n \n contents=[]\n \n \n if not sys.platform.startswith('win'):\n self._path_cache=set(contents)\n else:\n \n \n \n \n \n lower_suffix_contents=set()\n for item in contents:\n name,dot,suffix=item.partition('.')\n if dot:\n new_name=f'{name}.{suffix.lower()}'\n else:\n new_name=name\n lower_suffix_contents.add(new_name)\n self._path_cache=lower_suffix_contents\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n self._relaxed_path_cache={fn.lower()for fn in contents}\n \n @classmethod\n def path_hook(cls,*loader_details):\n ''\n\n\n\n\n\n\n \n def path_hook_for_FileFinder(path):\n ''\n if not _path_isdir(path):\n raise ImportError('only directories are supported',path=path)\n return cls(path,*loader_details)\n \n return path_hook_for_FileFinder\n \n def __repr__(self):\n return f'FileFinder({self.path !r})'\n \n \n \n \ndef _fix_up_module(ns,name,pathname,cpathname=None):\n\n loader=ns.get('__loader__')\n spec=ns.get('__spec__')\n if not loader:\n if spec:\n loader=spec.loader\n elif pathname ==cpathname:\n loader=SourcelessFileLoader(name,pathname)\n else:\n loader=SourceFileLoader(name,pathname)\n if not spec:\n spec=spec_from_file_location(name,pathname,loader=loader)\n if cpathname:\n spec.cached=_path_abspath(cpathname)\n try:\n ns['__spec__']=spec\n ns['__loader__']=loader\n ns['__file__']=pathname\n ns['__cached__']=cpathname\n except Exception:\n \n pass\n \n \ndef _get_supported_file_loaders():\n ''\n\n\n \n extensions=ExtensionFileLoader,_imp.extension_suffixes()\n source=SourceFileLoader,SOURCE_SUFFIXES\n bytecode=SourcelessFileLoader,BYTECODE_SUFFIXES\n return[extensions,source,bytecode]\n \n \ndef _set_bootstrap_module(_bootstrap_module):\n global _bootstrap\n _bootstrap=_bootstrap_module\n \n \ndef _install(_bootstrap_module):\n ''\n _set_bootstrap_module(_bootstrap_module)\n supported_loaders=_get_supported_file_loaders()\n sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])\n sys.meta_path.append(PathFinder)\n", ["_imp", "_io", "_warnings", "importlib.metadata", "importlib.readers", "marshal", "nt", "posix", "sys", "tokenize", "winreg"]], "importlib.machinery": [".py", "''\n\nfrom._bootstrap import ModuleSpec\nfrom._bootstrap import BuiltinImporter\nfrom._bootstrap import FrozenImporter\nfrom._bootstrap_external import(SOURCE_SUFFIXES,DEBUG_BYTECODE_SUFFIXES,\nOPTIMIZED_BYTECODE_SUFFIXES,BYTECODE_SUFFIXES,\nEXTENSION_SUFFIXES)\nfrom._bootstrap_external import WindowsRegistryFinder\nfrom._bootstrap_external import PathFinder\nfrom._bootstrap_external import FileFinder\nfrom._bootstrap_external import SourceFileLoader\nfrom._bootstrap_external import SourcelessFileLoader\nfrom._bootstrap_external import ExtensionFileLoader\nfrom._bootstrap_external import NamespaceLoader\n\n\ndef all_suffixes():\n ''\n return SOURCE_SUFFIXES+BYTECODE_SUFFIXES+EXTENSION_SUFFIXES\n", ["importlib._bootstrap", "importlib._bootstrap_external"]], "importlib.simple": [".py", "''\n\n\n\n\n\n\nfrom.resources.simple import(\nSimpleReader,ResourceHandle,ResourceContainer,TraversableReader,\n)\n\n__all__=[\n'SimpleReader','ResourceHandle','ResourceContainer','TraversableReader',\n]\n", ["importlib.resources.simple"]], "importlib.abc": [".py", "''\nfrom. import _bootstrap_external\nfrom. import machinery\ntry:\n import _frozen_importlib\nexcept ImportError as exc:\n if exc.name !='_frozen_importlib':\n raise\n _frozen_importlib=None\ntry:\n import _frozen_importlib_external\nexcept ImportError:\n _frozen_importlib_external=_bootstrap_external\nfrom._abc import Loader\nimport abc\nimport warnings\n\nfrom.resources import abc as _resources_abc\n\n\n__all__=[\n'Loader','MetaPathFinder','PathEntryFinder',\n'ResourceLoader','InspectLoader','ExecutionLoader',\n'FileLoader','SourceLoader',\n]\n\n\ndef __getattr__(name):\n ''\n\n\n \n if name in _resources_abc.__all__:\n obj=getattr(_resources_abc,name)\n warnings._deprecated(f\"{__name__}.{name}\",remove=(3,14))\n globals()[name]=obj\n return obj\n raise AttributeError(f'module {__name__ !r} has no attribute {name !r}')\n \n \ndef _register(abstract_cls,*classes):\n for cls in classes:\n abstract_cls.register(cls)\n if _frozen_importlib is not None:\n try:\n frozen_cls=getattr(_frozen_importlib,cls.__name__)\n except AttributeError:\n frozen_cls=getattr(_frozen_importlib_external,cls.__name__)\n abstract_cls.register(frozen_cls)\n \n \nclass MetaPathFinder(metaclass=abc.ABCMeta):\n\n ''\n \n \n \n \n def invalidate_caches(self):\n ''\n\n \n \n_register(MetaPathFinder,machinery.BuiltinImporter,machinery.FrozenImporter,\nmachinery.PathFinder,machinery.WindowsRegistryFinder)\n\n\nclass PathEntryFinder(metaclass=abc.ABCMeta):\n\n ''\n \n def invalidate_caches(self):\n ''\n\n \n \n_register(PathEntryFinder,machinery.FileFinder)\n\n\nclass ResourceLoader(Loader):\n\n ''\n\n\n\n\n \n \n @abc.abstractmethod\n def get_data(self,path):\n ''\n \n raise OSError\n \n \nclass InspectLoader(Loader):\n\n ''\n\n\n\n\n \n \n def is_package(self,fullname):\n ''\n\n\n\n \n raise ImportError\n \n def get_code(self,fullname):\n ''\n\n\n\n\n\n \n source=self.get_source(fullname)\n if source is None:\n return None\n return self.source_to_code(source)\n \n @abc.abstractmethod\n def get_source(self,fullname):\n ''\n\n\n\n \n raise ImportError\n \n @staticmethod\n def source_to_code(data,path=''):\n ''\n\n\n \n return compile(data,path,'exec',dont_inherit=True)\n \n exec_module=_bootstrap_external._LoaderBasics.exec_module\n load_module=_bootstrap_external._LoaderBasics.load_module\n \n_register(InspectLoader,machinery.BuiltinImporter,machinery.FrozenImporter,machinery.NamespaceLoader)\n\n\nclass ExecutionLoader(InspectLoader):\n\n ''\n\n\n\n\n \n \n @abc.abstractmethod\n def get_filename(self,fullname):\n ''\n\n\n\n \n raise ImportError\n \n def get_code(self,fullname):\n ''\n\n\n\n \n source=self.get_source(fullname)\n if source is None:\n return None\n try:\n path=self.get_filename(fullname)\n except ImportError:\n return self.source_to_code(source)\n else:\n return self.source_to_code(source,path)\n \n_register(ExecutionLoader,machinery.ExtensionFileLoader)\n\n\nclass FileLoader(_bootstrap_external.FileLoader,ResourceLoader,ExecutionLoader):\n\n ''\n \n \n_register(FileLoader,machinery.SourceFileLoader,\nmachinery.SourcelessFileLoader)\n\n\nclass SourceLoader(_bootstrap_external.SourceLoader,ResourceLoader,ExecutionLoader):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def path_mtime(self,path):\n ''\n if self.path_stats.__func__ is SourceLoader.path_stats:\n raise OSError\n return int(self.path_stats(path)['mtime'])\n \n def path_stats(self,path):\n ''\n\n\n\n\n \n if self.path_mtime.__func__ is SourceLoader.path_mtime:\n raise OSError\n return{'mtime':self.path_mtime(path)}\n \n def set_data(self,path,data):\n ''\n\n\n\n\n\n\n \n \n_register(SourceLoader,machinery.SourceFileLoader)\n", ["_frozen_importlib", "_frozen_importlib_external", "abc", "importlib", "importlib._abc", "importlib._bootstrap_external", "importlib.machinery", "importlib.resources", "importlib.resources.abc", "warnings"]], "importlib.resources.readers": [".py", "import collections\nimport itertools\nimport pathlib\nimport operator\nimport zipfile\n\nfrom. import abc\n\nfrom._itertools import only\n\n\ndef remove_duplicates(items):\n return iter(collections.OrderedDict.fromkeys(items))\n \n \nclass FileReader(abc.TraversableResources):\n def __init__(self,loader):\n self.path=pathlib.Path(loader.path).parent\n \n def resource_path(self,resource):\n ''\n\n\n\n \n return str(self.path.joinpath(resource))\n \n def files(self):\n return self.path\n \n \nclass ZipReader(abc.TraversableResources):\n def __init__(self,loader,module):\n _,_,name=module.rpartition('.')\n self.prefix=loader.prefix.replace('\\\\','/')+name+'/'\n self.archive=loader.archive\n \n def open_resource(self,resource):\n try:\n return super().open_resource(resource)\n except KeyError as exc:\n raise FileNotFoundError(exc.args[0])\n \n def is_resource(self,path):\n ''\n\n\n \n target=self.files().joinpath(path)\n return target.is_file()and target.exists()\n \n def files(self):\n return zipfile.Path(self.archive,self.prefix)\n \n \nclass MultiplexedPath(abc.Traversable):\n ''\n\n\n\n\n \n \n def __init__(self,*paths):\n self._paths=list(map(pathlib.Path,remove_duplicates(paths)))\n if not self._paths:\n message='MultiplexedPath must contain at least one path'\n raise FileNotFoundError(message)\n if not all(path.is_dir()for path in self._paths):\n raise NotADirectoryError('MultiplexedPath only supports directories')\n \n def iterdir(self):\n children=(child for path in self._paths for child in path.iterdir())\n by_name=operator.attrgetter('name')\n groups=itertools.groupby(sorted(children,key=by_name),key=by_name)\n return map(self._follow,(locs for name,locs in groups))\n \n def read_bytes(self):\n raise FileNotFoundError(f'{self} is not a file')\n \n def read_text(self,*args,**kwargs):\n raise FileNotFoundError(f'{self} is not a file')\n \n def is_dir(self):\n return True\n \n def is_file(self):\n return False\n \n def joinpath(self,*descendants):\n try:\n return super().joinpath(*descendants)\n except abc.TraversalError:\n \n \n return self._paths[0].joinpath(*descendants)\n \n @classmethod\n def _follow(cls,children):\n ''\n\n\n\n\n\n \n subdirs,one_dir,one_file=itertools.tee(children,3)\n \n try:\n return only(one_dir)\n except ValueError:\n try:\n return cls(*subdirs)\n except NotADirectoryError:\n return next(one_file)\n \n def open(self,*args,**kwargs):\n raise FileNotFoundError(f'{self} is not a file')\n \n @property\n def name(self):\n return self._paths[0].name\n \n def __repr__(self):\n paths=', '.join(f\"'{path}'\"for path in self._paths)\n return f'MultiplexedPath({paths})'\n \n \nclass NamespaceReader(abc.TraversableResources):\n def __init__(self,namespace_path):\n if 'NamespacePath'not in str(namespace_path):\n raise ValueError('Invalid path')\n self.path=MultiplexedPath(*list(namespace_path))\n \n def resource_path(self,resource):\n ''\n\n\n\n \n return str(self.path.joinpath(resource))\n \n def files(self):\n return self.path\n", ["collections", "importlib.resources", "importlib.resources._itertools", "importlib.resources.abc", "itertools", "operator", "pathlib", "zipfile"]], "importlib.resources._common": [".py", "import os\nimport pathlib\nimport tempfile\nimport functools\nimport contextlib\nimport types\nimport importlib\nimport inspect\nimport warnings\nimport itertools\n\nfrom typing import Union,Optional,cast\nfrom.abc import ResourceReader,Traversable\n\nfrom._adapters import wrap_spec\n\nPackage=Union[types.ModuleType,str]\nAnchor=Package\n\n\ndef package_to_anchor(func):\n ''\n\n\n\n\n\n\n\n \n undefined=object()\n \n @functools.wraps(func)\n def wrapper(anchor=undefined,package=undefined):\n if package is not undefined:\n if anchor is not undefined:\n return func(anchor,package)\n warnings.warn(\n \"First parameter to files is renamed to 'anchor'\",\n DeprecationWarning,\n stacklevel=2,\n )\n return func(package)\n elif anchor is undefined:\n return func()\n return func(anchor)\n \n return wrapper\n \n \n@package_to_anchor\ndef files(anchor:Optional[Anchor]=None)->Traversable:\n ''\n\n \n return from_package(resolve(anchor))\n \n \ndef get_resource_reader(package:types.ModuleType)->Optional[ResourceReader]:\n ''\n\n \n \n \n \n \n \n spec=package.__spec__\n reader=getattr(spec.loader,'get_resource_reader',None)\n if reader is None:\n return None\n return reader(spec.name)\n \n \n@functools.singledispatch\ndef resolve(cand:Optional[Anchor])->types.ModuleType:\n return cast(types.ModuleType,cand)\n \n \n@resolve.register\ndef _(cand:str)->types.ModuleType:\n return importlib.import_module(cand)\n \n \n@resolve.register\ndef _(cand:None)->types.ModuleType:\n return resolve(_infer_caller().f_globals['__name__'])\n \n \ndef _infer_caller():\n ''\n\n \n \n def is_this_file(frame_info):\n return frame_info.filename ==__file__\n \n def is_wrapper(frame_info):\n return frame_info.function =='wrapper'\n \n not_this_file=itertools.filterfalse(is_this_file,inspect.stack())\n \n callers=itertools.filterfalse(is_wrapper,not_this_file)\n return next(callers).frame\n \n \ndef from_package(package:types.ModuleType):\n ''\n\n\n \n spec=wrap_spec(package)\n reader=spec.loader.get_resource_reader(spec.name)\n return reader.files()\n \n \n@contextlib.contextmanager\ndef _tempfile(\nreader,\nsuffix='',\n\n\n*,\n_os_remove=os.remove,\n):\n\n\n\n fd,raw_path=tempfile.mkstemp(suffix=suffix)\n try:\n try:\n os.write(fd,reader())\n finally:\n os.close(fd)\n del reader\n yield pathlib.Path(raw_path)\n finally:\n try:\n _os_remove(raw_path)\n except FileNotFoundError:\n pass\n \n \ndef _temp_file(path):\n return _tempfile(path.read_bytes,suffix=path.name)\n \n \ndef _is_present_dir(path:Traversable)->bool:\n ''\n\n\n\n\n\n \n with contextlib.suppress(FileNotFoundError):\n return path.is_dir()\n return False\n \n \n@functools.singledispatch\ndef as_file(path):\n ''\n\n\n \n return _temp_dir(path)if _is_present_dir(path)else _temp_file(path)\n \n \n@as_file.register(pathlib.Path)\n@contextlib.contextmanager\ndef _(path):\n ''\n\n \n yield path\n \n \n@contextlib.contextmanager\ndef _temp_path(dir:tempfile.TemporaryDirectory):\n ''\n\n \n with dir as result:\n yield pathlib.Path(result)\n \n \n@contextlib.contextmanager\ndef _temp_dir(path):\n ''\n\n\n \n assert path.is_dir()\n with _temp_path(tempfile.TemporaryDirectory())as temp_dir:\n yield _write_contents(temp_dir,path)\n \n \ndef _write_contents(target,source):\n child=target.joinpath(source.name)\n if source.is_dir():\n child.mkdir()\n for item in source.iterdir():\n _write_contents(child,item)\n else:\n child.write_bytes(source.read_bytes())\n return child\n", ["contextlib", "functools", "importlib", "importlib.resources._adapters", "importlib.resources.abc", "inspect", "itertools", "os", "pathlib", "tempfile", "types", "typing", "warnings"]], "importlib.resources": [".py", "''\n\nfrom._common import(\nas_file,\nfiles,\nPackage,\n)\n\nfrom._legacy import(\ncontents,\nopen_binary,\nread_binary,\nopen_text,\nread_text,\nis_resource,\npath,\nResource,\n)\n\nfrom.abc import ResourceReader\n\n\n__all__=[\n'Package',\n'Resource',\n'ResourceReader',\n'as_file',\n'contents',\n'files',\n'is_resource',\n'open_binary',\n'open_text',\n'path',\n'read_binary',\n'read_text',\n]\n", ["importlib.resources._common", "importlib.resources._legacy", "importlib.resources.abc"], 1], "importlib.resources._itertools": [".py", "\ndef only(iterable,default=None,too_long=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n it=iter(iterable)\n first_value=next(it,default)\n \n try:\n second_value=next(it)\n except StopIteration:\n pass\n else:\n msg=(\n 'Expected exactly one item in iterable, but got {!r}, {!r}, '\n 'and perhaps more.'.format(first_value,second_value)\n )\n raise too_long or ValueError(msg)\n \n return first_value\n", []], "importlib.resources._adapters": [".py", "from contextlib import suppress\nfrom io import TextIOWrapper\n\nfrom. import abc\n\n\nclass SpecLoaderAdapter:\n ''\n\n \n \n def __init__(self,spec,adapter=lambda spec:spec.loader):\n self.spec=spec\n self.loader=adapter(spec)\n \n def __getattr__(self,name):\n return getattr(self.spec,name)\n \n \nclass TraversableResourcesLoader:\n ''\n\n \n \n def __init__(self,spec):\n self.spec=spec\n \n def get_resource_reader(self,name):\n return CompatibilityFiles(self.spec)._native()\n \n \ndef _io_wrapper(file,mode='r',*args,**kwargs):\n if mode =='r':\n return TextIOWrapper(file,*args,**kwargs)\n elif mode =='rb':\n return file\n raise ValueError(f\"Invalid mode value '{mode}', only 'r' and 'rb' are supported\")\n \n \nclass CompatibilityFiles:\n ''\n\n\n \n \n class SpecPath(abc.Traversable):\n ''\n\n\n \n \n def __init__(self,spec,reader):\n self._spec=spec\n self._reader=reader\n \n def iterdir(self):\n if not self._reader:\n return iter(())\n return iter(\n CompatibilityFiles.ChildPath(self._reader,path)\n for path in self._reader.contents()\n )\n \n def is_file(self):\n return False\n \n is_dir=is_file\n \n def joinpath(self,other):\n if not self._reader:\n return CompatibilityFiles.OrphanPath(other)\n return CompatibilityFiles.ChildPath(self._reader,other)\n \n @property\n def name(self):\n return self._spec.name\n \n def open(self,mode='r',*args,**kwargs):\n return _io_wrapper(self._reader.open_resource(None),mode,*args,**kwargs)\n \n class ChildPath(abc.Traversable):\n ''\n\n\n \n \n def __init__(self,reader,name):\n self._reader=reader\n self._name=name\n \n def iterdir(self):\n return iter(())\n \n def is_file(self):\n return self._reader.is_resource(self.name)\n \n def is_dir(self):\n return not self.is_file()\n \n def joinpath(self,other):\n return CompatibilityFiles.OrphanPath(self.name,other)\n \n @property\n def name(self):\n return self._name\n \n def open(self,mode='r',*args,**kwargs):\n return _io_wrapper(\n self._reader.open_resource(self.name),mode,*args,**kwargs\n )\n \n class OrphanPath(abc.Traversable):\n ''\n\n\n \n \n def __init__(self,*path_parts):\n if len(path_parts)<1:\n raise ValueError('Need at least one path part to construct a path')\n self._path=path_parts\n \n def iterdir(self):\n return iter(())\n \n def is_file(self):\n return False\n \n is_dir=is_file\n \n def joinpath(self,other):\n return CompatibilityFiles.OrphanPath(*self._path,other)\n \n @property\n def name(self):\n return self._path[-1]\n \n def open(self,mode='r',*args,**kwargs):\n raise FileNotFoundError(\"Can't open orphan path\")\n \n def __init__(self,spec):\n self.spec=spec\n \n @property\n def _reader(self):\n with suppress(AttributeError):\n return self.spec.loader.get_resource_reader(self.spec.name)\n \n def _native(self):\n ''\n\n \n reader=self._reader\n return reader if hasattr(reader,'files')else self\n \n def __getattr__(self,attr):\n return getattr(self._reader,attr)\n \n def files(self):\n return CompatibilityFiles.SpecPath(self.spec,self._reader)\n \n \ndef wrap_spec(package):\n ''\n\n\n \n return SpecLoaderAdapter(package.__spec__,TraversableResourcesLoader)\n", ["contextlib", "importlib.resources", "importlib.resources.abc", "io"]], "importlib.resources._legacy": [".py", "import functools\nimport os\nimport pathlib\nimport types\nimport warnings\n\nfrom typing import Union,Iterable,ContextManager,BinaryIO,TextIO,Any\n\nfrom. import _common\n\nPackage=Union[types.ModuleType,str]\nResource=str\n\n\ndef deprecated(func):\n @functools.wraps(func)\n def wrapper(*args,**kwargs):\n warnings.warn(\n f\"{func.__name__} is deprecated. Use files() instead. \"\n \"Refer to https://importlib-resources.readthedocs.io\"\n \"/en/latest/using.html#migrating-from-legacy for migration advice.\",\n DeprecationWarning,\n stacklevel=2,\n )\n return func(*args,**kwargs)\n \n return wrapper\n \n \ndef normalize_path(path:Any)->str:\n ''\n\n\n \n str_path=str(path)\n parent,file_name=os.path.split(str_path)\n if parent:\n raise ValueError(f'{path !r} must be only a file name')\n return file_name\n \n \n@deprecated\ndef open_binary(package:Package,resource:Resource)->BinaryIO:\n ''\n return(_common.files(package)/normalize_path(resource)).open('rb')\n \n \n@deprecated\ndef read_binary(package:Package,resource:Resource)->bytes:\n ''\n return(_common.files(package)/normalize_path(resource)).read_bytes()\n \n \n@deprecated\ndef open_text(\npackage:Package,\nresource:Resource,\nencoding:str='utf-8',\nerrors:str='strict',\n)->TextIO:\n ''\n return(_common.files(package)/normalize_path(resource)).open(\n 'r',encoding=encoding,errors=errors\n )\n \n \n@deprecated\ndef read_text(\npackage:Package,\nresource:Resource,\nencoding:str='utf-8',\nerrors:str='strict',\n)->str:\n ''\n\n\n\n \n with open_text(package,resource,encoding,errors)as fp:\n return fp.read()\n \n \n@deprecated\ndef contents(package:Package)->Iterable[str]:\n ''\n\n\n\n\n \n return[path.name for path in _common.files(package).iterdir()]\n \n \n@deprecated\ndef is_resource(package:Package,name:str)->bool:\n ''\n\n\n \n resource=normalize_path(name)\n return any(\n traversable.name ==resource and traversable.is_file()\n for traversable in _common.files(package).iterdir()\n )\n \n \n@deprecated\ndef path(\npackage:Package,\nresource:Resource,\n)->ContextManager[pathlib.Path]:\n ''\n\n\n\n\n\n\n \n return _common.as_file(_common.files(package)/normalize_path(resource))\n", ["functools", "importlib.resources", "importlib.resources._common", "os", "pathlib", "types", "typing", "warnings"]], "importlib.resources.simple": [".py", "''\n\n\n\nimport abc\nimport io\nimport itertools\nfrom typing import BinaryIO,List\n\nfrom.abc import Traversable,TraversableResources\n\n\nclass SimpleReader(abc.ABC):\n ''\n\n\n \n \n @property\n @abc.abstractmethod\n def package(self)->str:\n ''\n\n \n \n @abc.abstractmethod\n def children(self)->List['SimpleReader']:\n ''\n\n\n \n \n @abc.abstractmethod\n def resources(self)->List[str]:\n ''\n\n \n \n @abc.abstractmethod\n def open_binary(self,resource:str)->BinaryIO:\n ''\n\n \n \n @property\n def name(self):\n return self.package.split('.')[-1]\n \n \nclass ResourceContainer(Traversable):\n ''\n\n \n \n def __init__(self,reader:SimpleReader):\n self.reader=reader\n \n def is_dir(self):\n return True\n \n def is_file(self):\n return False\n \n def iterdir(self):\n files=(ResourceHandle(self,name)for name in self.reader.resources)\n dirs=map(ResourceContainer,self.reader.children())\n return itertools.chain(files,dirs)\n \n def open(self,*args,**kwargs):\n raise IsADirectoryError()\n \n \nclass ResourceHandle(Traversable):\n ''\n\n \n \n def __init__(self,parent:ResourceContainer,name:str):\n self.parent=parent\n self.name=name\n \n def is_file(self):\n return True\n \n def is_dir(self):\n return False\n \n def open(self,mode='r',*args,**kwargs):\n stream=self.parent.reader.open_binary(self.name)\n if 'b'not in mode:\n stream=io.TextIOWrapper(*args,**kwargs)\n return stream\n \n def joinpath(self,name):\n raise RuntimeError(\"Cannot traverse into a resource\")\n \n \nclass TraversableReader(TraversableResources,SimpleReader):\n ''\n\n\n\n \n \n def files(self):\n return ResourceContainer(self)\n", ["abc", "importlib.resources.abc", "io", "itertools", "typing"]], "importlib.resources.abc": [".py", "import abc\nimport io\nimport itertools\nimport os\nimport pathlib\nfrom typing import Any,BinaryIO,Iterable,Iterator,NoReturn,Text,Optional\nfrom typing import runtime_checkable,Protocol\nfrom typing import Union\n\n\nStrPath=Union[str,os.PathLike[str]]\n\n__all__=[\"ResourceReader\",\"Traversable\",\"TraversableResources\"]\n\n\nclass ResourceReader(metaclass=abc.ABCMeta):\n ''\n \n @abc.abstractmethod\n def open_resource(self,resource:Text)->BinaryIO:\n ''\n\n\n\n \n \n \n \n raise FileNotFoundError\n \n @abc.abstractmethod\n def resource_path(self,resource:Text)->Text:\n ''\n\n\n\n\n \n \n \n \n raise FileNotFoundError\n \n @abc.abstractmethod\n def is_resource(self,path:Text)->bool:\n ''\n\n\n \n raise FileNotFoundError\n \n @abc.abstractmethod\n def contents(self)->Iterable[str]:\n ''\n raise FileNotFoundError\n \n \nclass TraversalError(Exception):\n pass\n \n \n@runtime_checkable\nclass Traversable(Protocol):\n ''\n\n\n\n\n\n \n \n @abc.abstractmethod\n def iterdir(self)->Iterator[\"Traversable\"]:\n ''\n\n \n \n def read_bytes(self)->bytes:\n ''\n\n \n with self.open('rb')as strm:\n return strm.read()\n \n def read_text(self,encoding:Optional[str]=None)->str:\n ''\n\n \n with self.open(encoding=encoding)as strm:\n return strm.read()\n \n @abc.abstractmethod\n def is_dir(self)->bool:\n ''\n\n \n \n @abc.abstractmethod\n def is_file(self)->bool:\n ''\n\n \n \n def joinpath(self,*descendants:StrPath)->\"Traversable\":\n ''\n\n\n\n\n\n \n if not descendants:\n return self\n names=itertools.chain.from_iterable(\n path.parts for path in map(pathlib.PurePosixPath,descendants)\n )\n target=next(names)\n matches=(\n traversable for traversable in self.iterdir()if traversable.name ==target\n )\n try:\n match=next(matches)\n except StopIteration:\n raise TraversalError(\n \"Target not found during traversal.\",target,list(names)\n )\n return match.joinpath(*names)\n \n def __truediv__(self,child:StrPath)->\"Traversable\":\n ''\n\n \n return self.joinpath(child)\n \n @abc.abstractmethod\n def open(self,mode='r',*args,**kwargs):\n ''\n\n\n\n\n\n \n \n @property\n @abc.abstractmethod\n def name(self)->str:\n ''\n\n \n \n \nclass TraversableResources(ResourceReader):\n ''\n\n\n \n \n @abc.abstractmethod\n def files(self)->\"Traversable\":\n ''\n \n def open_resource(self,resource:StrPath)->io.BufferedReader:\n return self.files().joinpath(resource).open('rb')\n \n def resource_path(self,resource:Any)->NoReturn:\n raise FileNotFoundError(resource)\n \n def is_resource(self,path:StrPath)->bool:\n return self.files().joinpath(path).is_file()\n \n def contents(self)->Iterator[str]:\n return(item.name for item in self.files().iterdir())\n", ["abc", "io", "itertools", "os", "pathlib", "typing"]], "importlib.metadata._meta": [".py", "from typing import Protocol\nfrom typing import Any,Dict,Iterator,List,Optional,TypeVar,Union,overload\n\n\n_T=TypeVar(\"_T\")\n\n\nclass PackageMetadata(Protocol):\n def __len__(self)->int:\n ...\n \n def __contains__(self,item:str)->bool:\n ...\n \n def __getitem__(self,key:str)->str:\n ...\n \n def __iter__(self)->Iterator[str]:\n ...\n \n @overload\n def get(self,name:str,failobj:None=None)->Optional[str]:\n ...\n \n @overload\n def get(self,name:str,failobj:_T)->Union[str,_T]:\n ...\n \n \n @overload\n def get_all(self,name:str,failobj:None=None)->Optional[List[Any]]:\n ...\n \n @overload\n def get_all(self,name:str,failobj:_T)->Union[List[Any],_T]:\n ''\n\n \n \n @property\n def json(self)->Dict[str,Union[str,List[str]]]:\n ''\n\n \n \n \nclass SimplePath(Protocol[_T]):\n ''\n\n \n \n def joinpath(self)->_T:\n ...\n \n def __truediv__(self,other:Union[str,_T])->_T:\n ...\n \n @property\n def parent(self)->_T:\n ...\n \n def read_text(self)->str:\n ...\n", ["typing"]], "importlib.metadata._text": [".py", "import re\n\nfrom._functools import method_cache\n\n\n\nclass FoldedCase(str):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __lt__(self,other):\n return self.lower()other.lower()\n \n def __eq__(self,other):\n return self.lower()==other.lower()\n \n def __ne__(self,other):\n return self.lower()!=other.lower()\n \n def __hash__(self):\n return hash(self.lower())\n \n def __contains__(self,other):\n return super().lower().__contains__(other.lower())\n \n def in_(self,other):\n ''\n return self in FoldedCase(other)\n \n \n @method_cache\n def lower(self):\n return super().lower()\n \n def index(self,sub):\n return self.lower().index(sub.lower())\n \n def split(self,splitter=' ',maxsplit=0):\n pattern=re.compile(re.escape(splitter),re.I)\n return pattern.split(self,maxsplit)\n", ["importlib.metadata._functools", "re"]], "importlib.metadata": [".py", "import os\nimport re\nimport abc\nimport csv\nimport sys\nimport email\nimport pathlib\nimport zipfile\nimport operator\nimport textwrap\nimport warnings\nimport functools\nimport itertools\nimport posixpath\nimport contextlib\nimport collections\nimport inspect\n\nfrom. import _adapters,_meta\nfrom._collections import FreezableDefaultDict,Pair\nfrom._functools import method_cache,pass_none\nfrom._itertools import always_iterable,unique_everseen\nfrom._meta import PackageMetadata,SimplePath\n\nfrom contextlib import suppress\nfrom importlib import import_module\nfrom importlib.abc import MetaPathFinder\nfrom itertools import starmap\nfrom typing import List,Mapping,Optional,cast\n\n\n__all__=[\n'Distribution',\n'DistributionFinder',\n'PackageMetadata',\n'PackageNotFoundError',\n'distribution',\n'distributions',\n'entry_points',\n'files',\n'metadata',\n'packages_distributions',\n'requires',\n'version',\n]\n\n\nclass PackageNotFoundError(ModuleNotFoundError):\n ''\n \n def __str__(self):\n return f\"No package metadata was found for {self.name}\"\n \n @property\n def name(self):\n (name,)=self.args\n return name\n \n \nclass Sectioned:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n _sample=textwrap.dedent(\n \"\"\"\n [sec1]\n # comments ignored\n a = 1\n b = 2\n\n [sec2]\n a = 2\n \"\"\"\n ).lstrip()\n \n @classmethod\n def section_pairs(cls,text):\n return(\n section._replace(value=Pair.parse(section.value))\n for section in cls.read(text,filter_=cls.valid)\n if section.name is not None\n )\n \n @staticmethod\n def read(text,filter_=None):\n lines=filter(filter_,map(str.strip,text.splitlines()))\n name=None\n for value in lines:\n section_match=value.startswith('[')and value.endswith(']')\n if section_match:\n name=value.strip('[]')\n continue\n yield Pair(name,value)\n \n @staticmethod\n def valid(line):\n return line and not line.startswith('#')\n \n \nclass DeprecatedTuple:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n _warn=functools.partial(\n warnings.warn,\n \"EntryPoint tuple interface is deprecated. Access members by name.\",\n DeprecationWarning,\n stacklevel=2,\n )\n \n def __getitem__(self,item):\n self._warn()\n return self._key()[item]\n \n \nclass EntryPoint(DeprecatedTuple):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n pattern=re.compile(\n r'(?P[\\w.]+)\\s*'\n r'(:\\s*(?P[\\w.]+)\\s*)?'\n r'((?P\\[.*\\])\\s*)?$'\n )\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n name:str\n value:str\n group:str\n \n dist:Optional['Distribution']=None\n \n def __init__(self,name,value,group):\n vars(self).update(name=name,value=value,group=group)\n \n def load(self):\n ''\n\n\n \n match=self.pattern.match(self.value)\n module=import_module(match.group('module'))\n attrs=filter(None,(match.group('attr')or '').split('.'))\n return functools.reduce(getattr,attrs,module)\n \n @property\n def module(self):\n match=self.pattern.match(self.value)\n return match.group('module')\n \n @property\n def attr(self):\n match=self.pattern.match(self.value)\n return match.group('attr')\n \n @property\n def extras(self):\n match=self.pattern.match(self.value)\n return re.findall(r'\\w+',match.group('extras')or '')\n \n def _for(self,dist):\n vars(self).update(dist=dist)\n return self\n \n def matches(self,**params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n attrs=(getattr(self,param)for param in params)\n return all(map(operator.eq,params.values(),attrs))\n \n def _key(self):\n return self.name,self.value,self.group\n \n def __lt__(self,other):\n return self._key()'\n \n \nclass DeprecatedNonAbstract:\n def __new__(cls,*args,**kwargs):\n all_names={\n name for subclass in inspect.getmro(cls)for name in vars(subclass)\n }\n abstract={\n name\n for name in all_names\n if getattr(getattr(cls,name),'__isabstractmethod__',False)\n }\n if abstract:\n warnings.warn(\n f\"Unimplemented abstract methods {abstract}\",\n DeprecationWarning,\n stacklevel=2,\n )\n return super().__new__(cls)\n \n \nclass Distribution(DeprecatedNonAbstract):\n ''\n \n @abc.abstractmethod\n def read_text(self,filename)->Optional[str]:\n ''\n\n\n\n \n \n @abc.abstractmethod\n def locate_file(self,path):\n ''\n\n\n \n \n @classmethod\n def from_name(cls,name:str):\n ''\n\n\n\n\n\n\n\n \n if not name:\n raise ValueError(\"A distribution name is required.\")\n try:\n return next(cls.discover(name=name))\n except StopIteration:\n raise PackageNotFoundError(name)\n \n @classmethod\n def discover(cls,**kwargs):\n ''\n\n\n\n\n\n\n \n context=kwargs.pop('context',None)\n if context and kwargs:\n raise ValueError(\"cannot accept context and kwargs\")\n context=context or DistributionFinder.Context(**kwargs)\n return itertools.chain.from_iterable(\n resolver(context)for resolver in cls._discover_resolvers()\n )\n \n @staticmethod\n def at(path):\n ''\n\n\n\n \n return PathDistribution(pathlib.Path(path))\n \n @staticmethod\n def _discover_resolvers():\n ''\n declared=(\n getattr(finder,'find_distributions',None)for finder in sys.meta_path\n )\n return filter(None,declared)\n \n @property\n def metadata(self)->_meta.PackageMetadata:\n ''\n\n\n\n \n opt_text=(\n self.read_text('METADATA')\n or self.read_text('PKG-INFO')\n \n \n \n or self.read_text('')\n )\n text=cast(str,opt_text)\n return _adapters.Message(email.message_from_string(text))\n \n @property\n def name(self):\n ''\n return self.metadata['Name']\n \n @property\n def _normalized_name(self):\n ''\n return Prepared.normalize(self.name)\n \n @property\n def version(self):\n ''\n return self.metadata['Version']\n \n @property\n def entry_points(self):\n return EntryPoints._from_text_for(self.read_text('entry_points.txt'),self)\n \n @property\n def files(self):\n ''\n\n\n\n\n\n\n\n \n \n def make_file(name,hash=None,size_str=None):\n result=PackagePath(name)\n result.hash=FileHash(hash)if hash else None\n result.size=int(size_str)if size_str else None\n result.dist=self\n return result\n \n @pass_none\n def make_files(lines):\n return starmap(make_file,csv.reader(lines))\n \n @pass_none\n def skip_missing_files(package_paths):\n return list(filter(lambda path:path.locate().exists(),package_paths))\n \n return skip_missing_files(\n make_files(\n self._read_files_distinfo()\n or self._read_files_egginfo_installed()\n or self._read_files_egginfo_sources()\n )\n )\n \n def _read_files_distinfo(self):\n ''\n\n \n text=self.read_text('RECORD')\n return text and text.splitlines()\n \n def _read_files_egginfo_installed(self):\n ''\n\n\n\n\n\n\n\n\n \n text=self.read_text('installed-files.txt')\n \n \n \n subdir=getattr(self,'_path',None)\n if not text or not subdir:\n return\n \n paths=(\n (subdir /name)\n .resolve()\n .relative_to(self.locate_file('').resolve())\n .as_posix()\n for name in text.splitlines()\n )\n return map('\"{}\"'.format,paths)\n \n def _read_files_egginfo_sources(self):\n ''\n\n\n\n\n\n\n\n\n\n \n text=self.read_text('SOURCES.txt')\n return text and map('\"{}\"'.format,text.splitlines())\n \n @property\n def requires(self):\n ''\n reqs=self._read_dist_info_reqs()or self._read_egg_info_reqs()\n return reqs and list(reqs)\n \n def _read_dist_info_reqs(self):\n return self.metadata.get_all('Requires-Dist')\n \n def _read_egg_info_reqs(self):\n source=self.read_text('requires.txt')\n return pass_none(self._deps_from_requires_text)(source)\n \n @classmethod\n def _deps_from_requires_text(cls,source):\n return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source))\n \n @staticmethod\n def _convert_egg_info_reqs_to_simple_reqs(sections):\n ''\n\n\n\n\n\n\n\n \n \n def make_condition(name):\n return name and f'extra == \"{name}\"'\n \n def quoted_marker(section):\n section=section or ''\n extra,sep,markers=section.partition(':')\n if extra and markers:\n markers=f'({markers})'\n conditions=list(filter(None,[markers,make_condition(extra)]))\n return '; '+' and '.join(conditions)if conditions else ''\n \n def url_req_space(req):\n ''\n\n\n \n \n return ' '*('@'in req)\n \n for section in sections:\n space=url_req_space(section.value)\n yield section.value+space+quoted_marker(section.name)\n \n \nclass DistributionFinder(MetaPathFinder):\n ''\n\n \n \n class Context:\n ''\n\n\n\n\n\n\n\n\n \n \n name=None\n ''\n\n\n \n \n def __init__(self,**kwargs):\n vars(self).update(kwargs)\n \n @property\n def path(self):\n ''\n\n\n\n\n\n \n return vars(self).get('path',sys.path)\n \n @abc.abstractmethod\n def find_distributions(self,context=Context()):\n ''\n\n\n\n\n\n \n \n \nclass FastPath:\n ''\n\n\n\n\n\n \n \n @functools.lru_cache()\n def __new__(cls,root):\n return super().__new__(cls)\n \n def __init__(self,root):\n self.root=root\n \n def joinpath(self,child):\n return pathlib.Path(self.root,child)\n \n def children(self):\n with suppress(Exception):\n return os.listdir(self.root or '.')\n with suppress(Exception):\n return self.zip_children()\n return[]\n \n def zip_children(self):\n zip_path=zipfile.Path(self.root)\n names=zip_path.root.namelist()\n self.joinpath=zip_path.joinpath\n \n return dict.fromkeys(child.split(posixpath.sep,1)[0]for child in names)\n \n def search(self,name):\n return self.lookup(self.mtime).search(name)\n \n @property\n def mtime(self):\n with suppress(OSError):\n return os.stat(self.root).st_mtime\n self.lookup.cache_clear()\n \n @method_cache\n def lookup(self,mtime):\n return Lookup(self)\n \n \nclass Lookup:\n def __init__(self,path:FastPath):\n base=os.path.basename(path.root).lower()\n base_is_egg=base.endswith(\".egg\")\n self.infos=FreezableDefaultDict(list)\n self.eggs=FreezableDefaultDict(list)\n \n for child in path.children():\n low=child.lower()\n if low.endswith((\".dist-info\",\".egg-info\")):\n \n name=low.rpartition(\".\")[0].partition(\"-\")[0]\n normalized=Prepared.normalize(name)\n self.infos[normalized].append(path.joinpath(child))\n elif base_is_egg and low ==\"egg-info\":\n name=base.rpartition(\".\")[0].partition(\"-\")[0]\n legacy_normalized=Prepared.legacy_normalize(name)\n self.eggs[legacy_normalized].append(path.joinpath(child))\n \n self.infos.freeze()\n self.eggs.freeze()\n \n def search(self,prepared):\n infos=(\n self.infos[prepared.normalized]\n if prepared\n else itertools.chain.from_iterable(self.infos.values())\n )\n eggs=(\n self.eggs[prepared.legacy_normalized]\n if prepared\n else itertools.chain.from_iterable(self.eggs.values())\n )\n return itertools.chain(infos,eggs)\n \n \nclass Prepared:\n ''\n\n \n \n normalized=None\n legacy_normalized=None\n \n def __init__(self,name):\n self.name=name\n if name is None:\n return\n self.normalized=self.normalize(name)\n self.legacy_normalized=self.legacy_normalize(name)\n \n @staticmethod\n def normalize(name):\n ''\n\n \n return re.sub(r\"[-_.]+\",\"-\",name).lower().replace('-','_')\n \n @staticmethod\n def legacy_normalize(name):\n ''\n\n\n \n return name.lower().replace('-','_')\n \n def __bool__(self):\n return bool(self.name)\n \n \nclass MetadataPathFinder(DistributionFinder):\n @classmethod\n def find_distributions(cls,context=DistributionFinder.Context()):\n ''\n\n\n\n\n\n\n \n found=cls._search_paths(context.name,context.path)\n return map(PathDistribution,found)\n \n @classmethod\n def _search_paths(cls,name,paths):\n ''\n prepared=Prepared(name)\n return itertools.chain.from_iterable(\n path.search(prepared)for path in map(FastPath,paths)\n )\n \n def invalidate_caches(cls):\n FastPath.__new__.cache_clear()\n \n \nclass PathDistribution(Distribution):\n def __init__(self,path:SimplePath):\n ''\n\n\n \n self._path=path\n \n def read_text(self,filename):\n with suppress(\n FileNotFoundError,\n IsADirectoryError,\n KeyError,\n NotADirectoryError,\n PermissionError,\n ):\n return self._path.joinpath(filename).read_text(encoding='utf-8')\n \n read_text.__doc__=Distribution.read_text.__doc__\n \n def locate_file(self,path):\n return self._path.parent /path\n \n @property\n def _normalized_name(self):\n ''\n\n\n \n stem=os.path.basename(str(self._path))\n return(\n pass_none(Prepared.normalize)(self._name_from_stem(stem))\n or super()._normalized_name\n )\n \n @staticmethod\n def _name_from_stem(stem):\n ''\n\n\n\n\n\n\n\n \n filename,ext=os.path.splitext(stem)\n if ext not in('.dist-info','.egg-info'):\n return\n name,sep,rest=filename.partition('-')\n return name\n \n \ndef distribution(distribution_name):\n ''\n\n\n\n \n return Distribution.from_name(distribution_name)\n \n \ndef distributions(**kwargs):\n ''\n\n\n \n return Distribution.discover(**kwargs)\n \n \ndef metadata(distribution_name)->_meta.PackageMetadata:\n ''\n\n\n\n \n return Distribution.from_name(distribution_name).metadata\n \n \ndef version(distribution_name):\n ''\n\n\n\n\n \n return distribution(distribution_name).version\n \n \n_unique=functools.partial(\nunique_everseen,\nkey=operator.attrgetter('_normalized_name'),\n)\n''\n\n\n\n\ndef entry_points(**params)->EntryPoints:\n ''\n\n\n\n\n\n\n \n eps=itertools.chain.from_iterable(\n dist.entry_points for dist in _unique(distributions())\n )\n return EntryPoints(eps).select(**params)\n \n \ndef files(distribution_name):\n ''\n\n\n\n \n return distribution(distribution_name).files\n \n \ndef requires(distribution_name):\n ''\n\n\n\n\n \n return distribution(distribution_name).requires\n \n \ndef packages_distributions()->Mapping[str,List[str]]:\n ''\n\n\n\n\n\n\n\n \n pkg_to_dist=collections.defaultdict(list)\n for dist in distributions():\n for pkg in _top_level_declared(dist)or _top_level_inferred(dist):\n pkg_to_dist[pkg].append(dist.metadata['Name'])\n return dict(pkg_to_dist)\n \n \ndef _top_level_declared(dist):\n return(dist.read_text('top_level.txt')or '').split()\n \n \ndef _top_level_inferred(dist):\n opt_names={\n f.parts[0]if len(f.parts)>1 else inspect.getmodulename(f)\n for f in always_iterable(dist.files)\n }\n \n @pass_none\n def importable_name(name):\n return '.'not in name\n \n return filter(importable_name,opt_names)\n", ["abc", "collections", "contextlib", "csv", "email", "functools", "importlib", "importlib.abc", "importlib.metadata", "importlib.metadata._adapters", "importlib.metadata._collections", "importlib.metadata._functools", "importlib.metadata._itertools", "importlib.metadata._meta", "inspect", "itertools", "operator", "os", "pathlib", "posixpath", "re", "sys", "textwrap", "typing", "warnings", "zipfile"], 1], "importlib.metadata._functools": [".py", "import types\nimport functools\n\n\n\ndef method_cache(method,cache_wrapper=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n cache_wrapper=cache_wrapper or functools.lru_cache()\n \n def wrapper(self,*args,**kwargs):\n \n bound_method=types.MethodType(method,self)\n cached_method=cache_wrapper(bound_method)\n setattr(self,method.__name__,cached_method)\n return cached_method(*args,**kwargs)\n \n \n wrapper.cache_clear=lambda:None\n \n return wrapper\n \n \n \ndef pass_none(func):\n ''\n\n\n\n\n\n\n \n \n @functools.wraps(func)\n def wrapper(param,*args,**kwargs):\n if param is not None:\n return func(param,*args,**kwargs)\n \n return wrapper\n", ["functools", "types"]], "importlib.metadata._collections": [".py", "import collections\n\n\n\nclass FreezableDefaultDict(collections.defaultdict):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __missing__(self,key):\n return getattr(self,'_frozen',super().__missing__)(key)\n \n def freeze(self):\n self._frozen=lambda key:self.default_factory()\n \n \nclass Pair(collections.namedtuple('Pair','name value')):\n @classmethod\n def parse(cls,text):\n return cls(*map(str.strip,text.split(\"=\",1)))\n", ["collections"]], "importlib.metadata._itertools": [".py", "from itertools import filterfalse\n\n\ndef unique_everseen(iterable,key=None):\n ''\n \n \n seen=set()\n seen_add=seen.add\n if key is None:\n for element in filterfalse(seen.__contains__,iterable):\n seen_add(element)\n yield element\n else:\n for element in iterable:\n k=key(element)\n if k not in seen:\n seen_add(k)\n yield element\n \n \n \ndef always_iterable(obj,base_type=(str,bytes)):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if obj is None:\n return iter(())\n \n if(base_type is not None)and isinstance(obj,base_type):\n return iter((obj,))\n \n try:\n return iter(obj)\n except TypeError:\n return iter((obj,))\n", ["itertools"]], "importlib.metadata._adapters": [".py", "import functools\nimport warnings\nimport re\nimport textwrap\nimport email.message\n\nfrom._text import FoldedCase\n\n\n\n_warn=functools.partial(\nwarnings.warn,\n\"Implicit None on return values is deprecated and will raise KeyErrors.\",\nDeprecationWarning,\nstacklevel=2,\n)\n\n\nclass Message(email.message.Message):\n multiple_use_keys=set(\n map(\n FoldedCase,\n [\n 'Classifier',\n 'Obsoletes-Dist',\n 'Platform',\n 'Project-URL',\n 'Provides-Dist',\n 'Provides-Extra',\n 'Requires-Dist',\n 'Requires-External',\n 'Supported-Platform',\n 'Dynamic',\n ],\n )\n )\n ''\n\n \n \n def __new__(cls,orig:email.message.Message):\n res=super().__new__(cls)\n vars(res).update(vars(orig))\n return res\n \n def __init__(self,*args,**kwargs):\n self._headers=self._repair_headers()\n \n \n def __iter__(self):\n return super().__iter__()\n \n def __getitem__(self,item):\n ''\n\n\n \n res=super().__getitem__(item)\n if res is None:\n _warn()\n return res\n \n def _repair_headers(self):\n def redent(value):\n ''\n if not value or '\\n'not in value:\n return value\n return textwrap.dedent(' '*8+value)\n \n headers=[(key,redent(value))for key,value in vars(self)['_headers']]\n if self._payload:\n headers.append(('Description',self.get_payload()))\n return headers\n \n @property\n def json(self):\n ''\n\n\n \n \n def transform(key):\n value=self.get_all(key)if key in self.multiple_use_keys else self[key]\n if key =='Keywords':\n value=re.split(r'\\s+',value)\n tk=key.lower().replace('-','_')\n return tk,value\n \n return dict(map(transform,map(FoldedCase,self)))\n", ["email.message", "functools", "importlib.metadata._text", "re", "textwrap", "warnings"]], "collections": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n'ChainMap',\n'Counter',\n'OrderedDict',\n'UserDict',\n'UserList',\n'UserString',\n'defaultdict',\n'deque',\n'namedtuple',\n]\n\nimport _collections_abc\nimport sys as _sys\n\nfrom itertools import chain as _chain\nfrom itertools import repeat as _repeat\nfrom itertools import starmap as _starmap\nfrom keyword import iskeyword as _iskeyword\nfrom operator import eq as _eq\nfrom operator import itemgetter as _itemgetter\nfrom reprlib import recursive_repr as _recursive_repr\nfrom _weakref import proxy as _proxy\n\ntry:\n from _collections import deque\nexcept ImportError:\n pass\nelse:\n _collections_abc.MutableSequence.register(deque)\n \ntry:\n from _collections import _deque_iterator\nexcept ImportError:\n pass\n \ntry:\n from _collections import defaultdict\nexcept ImportError:\n pass\n \n \n \n \n \n \nclass _OrderedDictKeysView(_collections_abc.KeysView):\n\n def __reversed__(self):\n yield from reversed(self._mapping)\n \nclass _OrderedDictItemsView(_collections_abc.ItemsView):\n\n def __reversed__(self):\n for key in reversed(self._mapping):\n yield(key,self._mapping[key])\n \nclass _OrderedDictValuesView(_collections_abc.ValuesView):\n\n def __reversed__(self):\n for key in reversed(self._mapping):\n yield self._mapping[key]\n \nclass _Link(object):\n __slots__='prev','next','key','__weakref__'\n \nclass OrderedDict(dict):\n ''\n \n \n \n \n \n \n \n \n \n \n \n \n \n def __new__(cls,/,*args,**kwds):\n ''\n self=dict.__new__(cls)\n self.__hardroot=_Link()\n self.__root=root=_proxy(self.__hardroot)\n root.prev=root.next=root\n self.__map={}\n return self\n \n def __init__(self,other=(),/,**kwds):\n ''\n\n \n self.__update(other,**kwds)\n \n def __setitem__(self,key,value,\n dict_setitem=dict.__setitem__,proxy=_proxy,Link=_Link):\n ''\n \n \n if key not in self:\n self.__map[key]=link=Link()\n root=self.__root\n last=root.prev\n link.prev,link.next,link.key=last,root,key\n last.next=link\n root.prev=proxy(link)\n dict_setitem(self,key,value)\n \n def __delitem__(self,key,dict_delitem=dict.__delitem__):\n ''\n \n \n dict_delitem(self,key)\n link=self.__map.pop(key)\n link_prev=link.prev\n link_next=link.next\n link_prev.next=link_next\n link_next.prev=link_prev\n link.prev=None\n link.next=None\n \n def __iter__(self):\n ''\n \n root=self.__root\n curr=root.next\n while curr is not root:\n yield curr.key\n curr=curr.next\n \n def __reversed__(self):\n ''\n \n root=self.__root\n curr=root.prev\n while curr is not root:\n yield curr.key\n curr=curr.prev\n \n def clear(self):\n ''\n root=self.__root\n root.prev=root.next=root\n self.__map.clear()\n dict.clear(self)\n \n def popitem(self,last=True):\n ''\n\n\n \n if not self:\n raise KeyError('dictionary is empty')\n root=self.__root\n if last:\n link=root.prev\n link_prev=link.prev\n link_prev.next=root\n root.prev=link_prev\n else:\n link=root.next\n link_next=link.next\n root.next=link_next\n link_next.prev=root\n key=link.key\n del self.__map[key]\n value=dict.pop(self,key)\n return key,value\n \n def move_to_end(self,key,last=True):\n ''\n\n\n \n link=self.__map[key]\n link_prev=link.prev\n link_next=link.next\n soft_link=link_next.prev\n link_prev.next=link_next\n link_next.prev=link_prev\n root=self.__root\n if last:\n last=root.prev\n link.prev=last\n link.next=root\n root.prev=soft_link\n last.next=link\n else:\n first=root.next\n link.prev=root\n link.next=first\n first.prev=soft_link\n root.next=link\n \n def __sizeof__(self):\n sizeof=_sys.getsizeof\n n=len(self)+1\n size=sizeof(self.__dict__)\n size +=sizeof(self.__map)*2\n size +=sizeof(self.__hardroot)*n\n size +=sizeof(self.__root)*n\n return size\n \n update=__update=_collections_abc.MutableMapping.update\n \n def keys(self):\n ''\n return _OrderedDictKeysView(self)\n \n def items(self):\n ''\n return _OrderedDictItemsView(self)\n \n def values(self):\n ''\n return _OrderedDictValuesView(self)\n \n __ne__=_collections_abc.MutableMapping.__ne__\n \n __marker=object()\n \n def pop(self,key,default=__marker):\n ''\n\n\n\n \n marker=self.__marker\n result=dict.pop(self,key,marker)\n if result is not marker:\n \n link=self.__map.pop(key)\n link_prev=link.prev\n link_next=link.next\n link_prev.next=link_next\n link_next.prev=link_prev\n link.prev=None\n link.next=None\n return result\n if default is marker:\n raise KeyError(key)\n return default\n \n def setdefault(self,key,default=None):\n ''\n\n\n \n if key in self:\n return self[key]\n self[key]=default\n return default\n \n @_recursive_repr()\n def __repr__(self):\n ''\n if not self:\n return '%s()'%(self.__class__.__name__,)\n return '%s(%r)'%(self.__class__.__name__,dict(self.items()))\n \n def __reduce__(self):\n ''\n state=self.__getstate__()\n if state:\n if isinstance(state,tuple):\n state,slots=state\n else:\n slots={}\n state=state.copy()\n slots=slots.copy()\n for k in vars(OrderedDict()):\n state.pop(k,None)\n slots.pop(k,None)\n if slots:\n state=state,slots\n else:\n state=state or None\n return self.__class__,(),state,None,iter(self.items())\n \n def copy(self):\n ''\n return self.__class__(self)\n \n @classmethod\n def fromkeys(cls,iterable,value=None):\n ''\n \n self=cls()\n for key in iterable:\n self[key]=value\n return self\n \n def __eq__(self,other):\n ''\n\n\n \n if isinstance(other,OrderedDict):\n return dict.__eq__(self,other)and all(map(_eq,self,other))\n return dict.__eq__(self,other)\n \n def __ior__(self,other):\n self.update(other)\n return self\n \n def __or__(self,other):\n if not isinstance(other,dict):\n return NotImplemented\n new=self.__class__(self)\n new.update(other)\n return new\n \n def __ror__(self,other):\n if not isinstance(other,dict):\n return NotImplemented\n new=self.__class__(other)\n new.update(self)\n return new\n \n \ntry:\n from _collections import OrderedDict\nexcept ImportError:\n\n pass\n \n \n \n \n \n \ntry:\n from _collections import _tuplegetter\nexcept ImportError:\n _tuplegetter=lambda index,doc:property(_itemgetter(index),doc=doc)\n \ndef namedtuple(typename,field_names,*,rename=False,defaults=None,module=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n if isinstance(field_names,str):\n field_names=field_names.replace(',',' ').split()\n field_names=list(map(str,field_names))\n typename=_sys.intern(str(typename))\n \n if rename:\n seen=set()\n for index,name in enumerate(field_names):\n if(not name.isidentifier()\n or _iskeyword(name)\n or name.startswith('_')\n or name in seen):\n field_names[index]=f'_{index}'\n seen.add(name)\n \n for name in[typename]+field_names:\n if type(name)is not str:\n raise TypeError('Type names and field names must be strings')\n if not name.isidentifier():\n raise ValueError('Type names and field names must be valid '\n f'identifiers: {name !r}')\n if _iskeyword(name):\n raise ValueError('Type names and field names cannot be a '\n f'keyword: {name !r}')\n \n seen=set()\n for name in field_names:\n if name.startswith('_')and not rename:\n raise ValueError('Field names cannot start with an underscore: '\n f'{name !r}')\n if name in seen:\n raise ValueError(f'Encountered duplicate field name: {name !r}')\n seen.add(name)\n \n field_defaults={}\n if defaults is not None:\n defaults=tuple(defaults)\n if len(defaults)>len(field_names):\n raise TypeError('Got more default values than field names')\n field_defaults=dict(reversed(list(zip(reversed(field_names),\n reversed(defaults)))))\n \n \n field_names=tuple(map(_sys.intern,field_names))\n num_fields=len(field_names)\n arg_list=', '.join(field_names)\n if num_fields ==1:\n arg_list +=','\n repr_fmt='('+', '.join(f'{name}=%r'for name in field_names)+')'\n tuple_new=tuple.__new__\n _dict,_tuple,_len,_map,_zip=dict,tuple,len,map,zip\n \n \n \n namespace={\n '_tuple_new':tuple_new,\n '__builtins__':{},\n '__name__':f'namedtuple_{typename}',\n }\n code=f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'\n __new__=eval(code,namespace)\n __new__.__name__='__new__'\n __new__.__doc__=f'Create new instance of {typename}({arg_list})'\n if defaults is not None:\n __new__.__defaults__=defaults\n \n @classmethod\n def _make(cls,iterable):\n result=tuple_new(cls,iterable)\n if _len(result)!=num_fields:\n raise TypeError(f'Expected {num_fields} arguments, got {len(result)}')\n return result\n \n _make.__func__.__doc__=(f'Make a new {typename} object from a sequence '\n 'or iterable')\n \n def _replace(self,/,**kwds):\n result=self._make(_map(kwds.pop,field_names,self))\n if kwds:\n raise ValueError(f'Got unexpected field names: {list(kwds)!r}')\n return result\n \n _replace.__doc__=(f'Return a new {typename} object replacing specified '\n 'fields with new values')\n \n def __repr__(self):\n ''\n return self.__class__.__name__+repr_fmt %self\n \n def _asdict(self):\n ''\n return _dict(_zip(self._fields,self))\n \n def __getnewargs__(self):\n ''\n return _tuple(self)\n \n \n for method in(\n __new__,\n _make.__func__,\n _replace,\n __repr__,\n _asdict,\n __getnewargs__,\n ):\n method.__qualname__=f'{typename}.{method.__name__}'\n \n \n \n class_namespace={\n '__doc__':f'{typename}({arg_list})',\n '__slots__':(),\n '_fields':field_names,\n '_field_defaults':field_defaults,\n '__new__':__new__,\n '_make':_make,\n '_replace':_replace,\n '__repr__':__repr__,\n '_asdict':_asdict,\n '__getnewargs__':__getnewargs__,\n '__match_args__':field_names,\n }\n for index,name in enumerate(field_names):\n doc=_sys.intern(f'Alias for field number {index}')\n class_namespace[name]=_tuplegetter(index,doc)\n \n result=type(typename,(tuple,),class_namespace)\n \n \n \n \n \n \n if module is None:\n try:\n module=_sys._getframemodulename(1)or '__main__'\n except AttributeError:\n try:\n module=_sys._getframe(1).f_globals.get('__name__','__main__')\n except(AttributeError,ValueError):\n pass\n if module is not None:\n result.__module__=module\n \n return result\n \n \n \n \n \n \ndef _count_elements(mapping,iterable):\n ''\n mapping_get=mapping.get\n for elem in iterable:\n mapping[elem]=mapping_get(elem,0)+1\n \ntry:\n from _collections import _count_elements\nexcept ImportError:\n pass\n \nclass Counter(dict):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n def __init__(self,iterable=None,/,**kwds):\n ''\n\n\n\n\n\n\n\n\n \n super().__init__()\n self.update(iterable,**kwds)\n \n def __missing__(self,key):\n ''\n \n return 0\n \n def total(self):\n ''\n return sum(self.values())\n \n def most_common(self,n=None):\n ''\n\n\n\n\n\n \n \n if n is None:\n return sorted(self.items(),key=_itemgetter(1),reverse=True)\n \n \n import heapq\n return heapq.nlargest(n,self.items(),key=_itemgetter(1))\n \n def elements(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n return _chain.from_iterable(_starmap(_repeat,self.items()))\n \n \n \n @classmethod\n def fromkeys(cls,iterable,v=None):\n \n \n \n \n \n \n \n raise NotImplementedError(\n 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')\n \n def update(self,iterable=None,/,**kwds):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n if iterable is not None:\n if isinstance(iterable,_collections_abc.Mapping):\n if self:\n self_get=self.get\n for elem,count in iterable.items():\n self[elem]=count+self_get(elem,0)\n else:\n \n super().update(iterable)\n else:\n _count_elements(self,iterable)\n if kwds:\n self.update(kwds)\n \n def subtract(self,iterable=None,/,**kwds):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if iterable is not None:\n self_get=self.get\n if isinstance(iterable,_collections_abc.Mapping):\n for elem,count in iterable.items():\n self[elem]=self_get(elem,0)-count\n else:\n for elem in iterable:\n self[elem]=self_get(elem,0)-1\n if kwds:\n self.subtract(kwds)\n \n def copy(self):\n ''\n return self.__class__(self)\n \n def __reduce__(self):\n return self.__class__,(dict(self),)\n \n def __delitem__(self,elem):\n ''\n if elem in self:\n super().__delitem__(elem)\n \n def __repr__(self):\n if not self:\n return f'{self.__class__.__name__}()'\n try:\n \n d=dict(self.most_common())\n except TypeError:\n \n d=dict(self)\n return f'{self.__class__.__name__}({d !r})'\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def __eq__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return all(self[e]==other[e]for c in(self,other)for e in c)\n \n def __ne__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return not self ==other\n \n def __le__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return all(self[e]<=other[e]for c in(self,other)for e in c)\n \n def __lt__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return self <=other and self !=other\n \n def __ge__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return all(self[e]>=other[e]for c in(self,other)for e in c)\n \n def __gt__(self,other):\n ''\n if not isinstance(other,Counter):\n return NotImplemented\n return self >=other and self !=other\n \n def __add__(self,other):\n ''\n\n\n\n\n \n if not isinstance(other,Counter):\n return NotImplemented\n result=Counter()\n for elem,count in self.items():\n newcount=count+other[elem]\n if newcount >0:\n result[elem]=newcount\n for elem,count in other.items():\n if elem not in self and count >0:\n result[elem]=count\n return result\n \n def __sub__(self,other):\n ''\n\n\n\n\n \n if not isinstance(other,Counter):\n return NotImplemented\n result=Counter()\n for elem,count in self.items():\n newcount=count -other[elem]\n if newcount >0:\n result[elem]=newcount\n for elem,count in other.items():\n if elem not in self and count <0:\n result[elem]=0 -count\n return result\n \n def __or__(self,other):\n ''\n\n\n\n\n \n if not isinstance(other,Counter):\n return NotImplemented\n result=Counter()\n for elem,count in self.items():\n other_count=other[elem]\n newcount=other_count if count 0:\n result[elem]=newcount\n for elem,count in other.items():\n if elem not in self and count >0:\n result[elem]=count\n return result\n \n def __and__(self,other):\n ''\n\n\n\n\n \n if not isinstance(other,Counter):\n return NotImplemented\n result=Counter()\n for elem,count in self.items():\n other_count=other[elem]\n newcount=count if count 0:\n result[elem]=newcount\n return result\n \n def __pos__(self):\n ''\n result=Counter()\n for elem,count in self.items():\n if count >0:\n result[elem]=count\n return result\n \n def __neg__(self):\n ''\n\n\n \n result=Counter()\n for elem,count in self.items():\n if count <0:\n result[elem]=0 -count\n return result\n \n def _keep_positive(self):\n ''\n nonpositive=[elem for elem,count in self.items()if not count >0]\n for elem in nonpositive:\n del self[elem]\n return self\n \n def __iadd__(self,other):\n ''\n\n\n\n\n\n\n \n for elem,count in other.items():\n self[elem]+=count\n return self._keep_positive()\n \n def __isub__(self,other):\n ''\n\n\n\n\n\n\n \n for elem,count in other.items():\n self[elem]-=count\n return self._keep_positive()\n \n def __ior__(self,other):\n ''\n\n\n\n\n\n\n \n for elem,other_count in other.items():\n count=self[elem]\n if other_count >count:\n self[elem]=other_count\n return self._keep_positive()\n \n def __iand__(self,other):\n ''\n\n\n\n\n\n\n \n for elem,count in self.items():\n other_count=other[elem]\n if other_count self.__cast(other)\n \n def __ge__(self,other):\n return self.data >=self.__cast(other)\n \n def __cast(self,other):\n return other.data if isinstance(other,UserList)else other\n \n def __contains__(self,item):\n return item in self.data\n \n def __len__(self):\n return len(self.data)\n \n def __getitem__(self,i):\n if isinstance(i,slice):\n return self.__class__(self.data[i])\n else:\n return self.data[i]\n \n def __setitem__(self,i,item):\n self.data[i]=item\n \n def __delitem__(self,i):\n del self.data[i]\n \n def __add__(self,other):\n if isinstance(other,UserList):\n return self.__class__(self.data+other.data)\n elif isinstance(other,type(self.data)):\n return self.__class__(self.data+other)\n return self.__class__(self.data+list(other))\n \n def __radd__(self,other):\n if isinstance(other,UserList):\n return self.__class__(other.data+self.data)\n elif isinstance(other,type(self.data)):\n return self.__class__(other+self.data)\n return self.__class__(list(other)+self.data)\n \n def __iadd__(self,other):\n if isinstance(other,UserList):\n self.data +=other.data\n elif isinstance(other,type(self.data)):\n self.data +=other\n else:\n self.data +=list(other)\n return self\n \n def __mul__(self,n):\n return self.__class__(self.data *n)\n \n __rmul__=__mul__\n \n def __imul__(self,n):\n self.data *=n\n return self\n \n def __copy__(self):\n inst=self.__class__.__new__(self.__class__)\n inst.__dict__.update(self.__dict__)\n \n inst.__dict__[\"data\"]=self.__dict__[\"data\"][:]\n return inst\n \n def append(self,item):\n self.data.append(item)\n \n def insert(self,i,item):\n self.data.insert(i,item)\n \n def pop(self,i=-1):\n return self.data.pop(i)\n \n def remove(self,item):\n self.data.remove(item)\n \n def clear(self):\n self.data.clear()\n \n def copy(self):\n return self.__class__(self)\n \n def count(self,item):\n return self.data.count(item)\n \n def index(self,item,*args):\n return self.data.index(item,*args)\n \n def reverse(self):\n self.data.reverse()\n \n def sort(self,/,*args,**kwds):\n self.data.sort(*args,**kwds)\n \n def extend(self,other):\n if isinstance(other,UserList):\n self.data.extend(other.data)\n else:\n self.data.extend(other)\n \n \n \n \n \n \nclass UserString(_collections_abc.Sequence):\n\n def __init__(self,seq):\n if isinstance(seq,str):\n self.data=seq\n elif isinstance(seq,UserString):\n self.data=seq.data[:]\n else:\n self.data=str(seq)\n \n def __str__(self):\n return str(self.data)\n \n def __repr__(self):\n return repr(self.data)\n \n def __int__(self):\n return int(self.data)\n \n def __float__(self):\n return float(self.data)\n \n def __complex__(self):\n return complex(self.data)\n \n def __hash__(self):\n return hash(self.data)\n \n def __getnewargs__(self):\n return(self.data[:],)\n \n def __eq__(self,string):\n if isinstance(string,UserString):\n return self.data ==string.data\n return self.data ==string\n \n def __lt__(self,string):\n if isinstance(string,UserString):\n return self.data string.data\n return self.data >string\n \n def __ge__(self,string):\n if isinstance(string,UserString):\n return self.data >=string.data\n return self.data >=string\n \n def __contains__(self,char):\n if isinstance(char,UserString):\n char=char.data\n return char in self.data\n \n def __len__(self):\n return len(self.data)\n \n def __getitem__(self,index):\n return self.__class__(self.data[index])\n \n def __add__(self,other):\n if isinstance(other,UserString):\n return self.__class__(self.data+other.data)\n elif isinstance(other,str):\n return self.__class__(self.data+other)\n return self.__class__(self.data+str(other))\n \n def __radd__(self,other):\n if isinstance(other,str):\n return self.__class__(other+self.data)\n return self.__class__(str(other)+self.data)\n \n def __mul__(self,n):\n return self.__class__(self.data *n)\n \n __rmul__=__mul__\n \n def __mod__(self,args):\n return self.__class__(self.data %args)\n \n def __rmod__(self,template):\n return self.__class__(str(template)%self)\n \n \n def capitalize(self):\n return self.__class__(self.data.capitalize())\n \n def casefold(self):\n return self.__class__(self.data.casefold())\n \n def center(self,width,*args):\n return self.__class__(self.data.center(width,*args))\n \n def count(self,sub,start=0,end=_sys.maxsize):\n if isinstance(sub,UserString):\n sub=sub.data\n return self.data.count(sub,start,end)\n \n def removeprefix(self,prefix,/):\n if isinstance(prefix,UserString):\n prefix=prefix.data\n return self.__class__(self.data.removeprefix(prefix))\n \n def removesuffix(self,suffix,/):\n if isinstance(suffix,UserString):\n suffix=suffix.data\n return self.__class__(self.data.removesuffix(suffix))\n \n def encode(self,encoding='utf-8',errors='strict'):\n encoding='utf-8'if encoding is None else encoding\n errors='strict'if errors is None else errors\n return self.data.encode(encoding,errors)\n \n def endswith(self,suffix,start=0,end=_sys.maxsize):\n return self.data.endswith(suffix,start,end)\n \n def expandtabs(self,tabsize=8):\n return self.__class__(self.data.expandtabs(tabsize))\n \n def find(self,sub,start=0,end=_sys.maxsize):\n if isinstance(sub,UserString):\n sub=sub.data\n return self.data.find(sub,start,end)\n \n def format(self,/,*args,**kwds):\n return self.data.format(*args,**kwds)\n \n def format_map(self,mapping):\n return self.data.format_map(mapping)\n \n def index(self,sub,start=0,end=_sys.maxsize):\n return self.data.index(sub,start,end)\n \n def isalpha(self):\n return self.data.isalpha()\n \n def isalnum(self):\n return self.data.isalnum()\n \n def isascii(self):\n return self.data.isascii()\n \n def isdecimal(self):\n return self.data.isdecimal()\n \n def isdigit(self):\n return self.data.isdigit()\n \n def isidentifier(self):\n return self.data.isidentifier()\n \n def islower(self):\n return self.data.islower()\n \n def isnumeric(self):\n return self.data.isnumeric()\n \n def isprintable(self):\n return self.data.isprintable()\n \n def isspace(self):\n return self.data.isspace()\n \n def istitle(self):\n return self.data.istitle()\n \n def isupper(self):\n return self.data.isupper()\n \n def join(self,seq):\n return self.data.join(seq)\n \n def ljust(self,width,*args):\n return self.__class__(self.data.ljust(width,*args))\n \n def lower(self):\n return self.__class__(self.data.lower())\n \n def lstrip(self,chars=None):\n return self.__class__(self.data.lstrip(chars))\n \n maketrans=str.maketrans\n \n def partition(self,sep):\n return self.data.partition(sep)\n \n def replace(self,old,new,maxsplit=-1):\n if isinstance(old,UserString):\n old=old.data\n if isinstance(new,UserString):\n new=new.data\n return self.__class__(self.data.replace(old,new,maxsplit))\n \n def rfind(self,sub,start=0,end=_sys.maxsize):\n if isinstance(sub,UserString):\n sub=sub.data\n return self.data.rfind(sub,start,end)\n \n def rindex(self,sub,start=0,end=_sys.maxsize):\n return self.data.rindex(sub,start,end)\n \n def rjust(self,width,*args):\n return self.__class__(self.data.rjust(width,*args))\n \n def rpartition(self,sep):\n return self.data.rpartition(sep)\n \n def rstrip(self,chars=None):\n return self.__class__(self.data.rstrip(chars))\n \n def split(self,sep=None,maxsplit=-1):\n return self.data.split(sep,maxsplit)\n \n def rsplit(self,sep=None,maxsplit=-1):\n return self.data.rsplit(sep,maxsplit)\n \n def splitlines(self,keepends=False):\n return self.data.splitlines(keepends)\n \n def startswith(self,prefix,start=0,end=_sys.maxsize):\n return self.data.startswith(prefix,start,end)\n \n def strip(self,chars=None):\n return self.__class__(self.data.strip(chars))\n \n def swapcase(self):\n return self.__class__(self.data.swapcase())\n \n def title(self):\n return self.__class__(self.data.title())\n \n def translate(self,*args):\n return self.__class__(self.data.translate(*args))\n \n def upper(self):\n return self.__class__(self.data.upper())\n \n def zfill(self,width):\n return self.__class__(self.data.zfill(width))\n", ["_collections", "_collections_abc", "_weakref", "copy", "heapq", "itertools", "keyword", "operator", "reprlib", "sys"], 1], "collections.abc": [".py", "from _collections_abc import *\nfrom _collections_abc import __all__\nfrom _collections_abc import _CallableGenericAlias\n", ["_collections_abc"]], "logging.config": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nConfiguration functions for the logging package for Python. The core package\nis based on PEP 282 and comments thereto in comp.lang.python, and influenced\nby Apache's log4j system.\n\nCopyright (C) 2001-2022 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging' and log away!\n\"\"\"\n\nimport errno\nimport functools\nimport io\nimport logging\nimport logging.handlers\nimport os\nimport queue\nimport re\nimport struct\nimport threading\nimport traceback\n\nfrom socketserver import ThreadingTCPServer,StreamRequestHandler\n\n\nDEFAULT_LOGGING_CONFIG_PORT=9030\n\nRESET_ERROR=errno.ECONNRESET\n\n\n\n\n\n\n_listener=None\n\ndef fileConfig(fname,defaults=None,disable_existing_loggers=True,encoding=None):\n ''\n\n\n\n\n\n\n \n import configparser\n \n if isinstance(fname,str):\n if not os.path.exists(fname):\n raise FileNotFoundError(f\"{fname} doesn't exist\")\n elif not os.path.getsize(fname):\n raise RuntimeError(f'{fname} is an empty file')\n \n if isinstance(fname,configparser.RawConfigParser):\n cp=fname\n else:\n try:\n cp=configparser.ConfigParser(defaults)\n if hasattr(fname,'readline'):\n cp.read_file(fname)\n else:\n encoding=io.text_encoding(encoding)\n cp.read(fname,encoding=encoding)\n except configparser.ParsingError as e:\n raise RuntimeError(f'{fname} is invalid: {e}')\n \n formatters=_create_formatters(cp)\n \n \n logging._acquireLock()\n try:\n _clearExistingHandlers()\n \n \n handlers=_install_handlers(cp,formatters)\n _install_loggers(cp,handlers,disable_existing_loggers)\n finally:\n logging._releaseLock()\n \n \ndef _resolve(name):\n ''\n name=name.split('.')\n used=name.pop(0)\n found=__import__(used)\n for n in name:\n used=used+'.'+n\n try:\n found=getattr(found,n)\n except AttributeError:\n __import__(used)\n found=getattr(found,n)\n return found\n \ndef _strip_spaces(alist):\n return map(str.strip,alist)\n \ndef _create_formatters(cp):\n ''\n flist=cp[\"formatters\"][\"keys\"]\n if not len(flist):\n return{}\n flist=flist.split(\",\")\n flist=_strip_spaces(flist)\n formatters={}\n for form in flist:\n sectname=\"formatter_%s\"%form\n fs=cp.get(sectname,\"format\",raw=True,fallback=None)\n dfs=cp.get(sectname,\"datefmt\",raw=True,fallback=None)\n stl=cp.get(sectname,\"style\",raw=True,fallback='%')\n defaults=cp.get(sectname,\"defaults\",raw=True,fallback=None)\n \n c=logging.Formatter\n class_name=cp[sectname].get(\"class\")\n if class_name:\n c=_resolve(class_name)\n \n if defaults is not None:\n defaults=eval(defaults,vars(logging))\n f=c(fs,dfs,stl,defaults=defaults)\n else:\n f=c(fs,dfs,stl)\n formatters[form]=f\n return formatters\n \n \ndef _install_handlers(cp,formatters):\n ''\n hlist=cp[\"handlers\"][\"keys\"]\n if not len(hlist):\n return{}\n hlist=hlist.split(\",\")\n hlist=_strip_spaces(hlist)\n handlers={}\n fixups=[]\n for hand in hlist:\n section=cp[\"handler_%s\"%hand]\n klass=section[\"class\"]\n fmt=section.get(\"formatter\",\"\")\n try:\n klass=eval(klass,vars(logging))\n except(AttributeError,NameError):\n klass=_resolve(klass)\n args=section.get(\"args\",'()')\n args=eval(args,vars(logging))\n kwargs=section.get(\"kwargs\",'{}')\n kwargs=eval(kwargs,vars(logging))\n h=klass(*args,**kwargs)\n h.name=hand\n if \"level\"in section:\n level=section[\"level\"]\n h.setLevel(level)\n if len(fmt):\n h.setFormatter(formatters[fmt])\n if issubclass(klass,logging.handlers.MemoryHandler):\n target=section.get(\"target\",\"\")\n if len(target):\n fixups.append((h,target))\n handlers[hand]=h\n \n for h,t in fixups:\n h.setTarget(handlers[t])\n return handlers\n \ndef _handle_existing_loggers(existing,child_loggers,disable_existing):\n ''\n\n\n\n\n\n\n\n\n \n root=logging.root\n for log in existing:\n logger=root.manager.loggerDict[log]\n if log in child_loggers:\n if not isinstance(logger,logging.PlaceHolder):\n logger.setLevel(logging.NOTSET)\n logger.handlers=[]\n logger.propagate=True\n else:\n logger.disabled=disable_existing\n \ndef _install_loggers(cp,handlers,disable_existing):\n ''\n \n \n llist=cp[\"loggers\"][\"keys\"]\n llist=llist.split(\",\")\n llist=list(_strip_spaces(llist))\n llist.remove(\"root\")\n section=cp[\"logger_root\"]\n root=logging.root\n log=root\n if \"level\"in section:\n level=section[\"level\"]\n log.setLevel(level)\n for h in root.handlers[:]:\n root.removeHandler(h)\n hlist=section[\"handlers\"]\n if len(hlist):\n hlist=hlist.split(\",\")\n hlist=_strip_spaces(hlist)\n for hand in hlist:\n log.addHandler(handlers[hand])\n \n \n \n \n \n \n \n \n \n \n existing=list(root.manager.loggerDict.keys())\n \n \n \n \n existing.sort()\n \n \n child_loggers=[]\n \n for log in llist:\n section=cp[\"logger_%s\"%log]\n qn=section[\"qualname\"]\n propagate=section.getint(\"propagate\",fallback=1)\n logger=logging.getLogger(qn)\n if qn in existing:\n i=existing.index(qn)+1\n prefixed=qn+\".\"\n pflen=len(prefixed)\n num_existing=len(existing)\n while i [a-z]+)://(?P.*)$')\n \n WORD_PATTERN=re.compile(r'^\\s*(\\w+)\\s*')\n DOT_PATTERN=re.compile(r'^\\.\\s*(\\w+)\\s*')\n INDEX_PATTERN=re.compile(r'^\\[\\s*(\\w+)\\s*\\]\\s*')\n DIGIT_PATTERN=re.compile(r'^\\d+$')\n \n value_converters={\n 'ext':'ext_convert',\n 'cfg':'cfg_convert',\n }\n \n \n importer=staticmethod(__import__)\n \n def __init__(self,config):\n self.config=ConvertingDict(config)\n self.config.configurator=self\n \n def resolve(self,s):\n ''\n\n\n \n name=s.split('.')\n used=name.pop(0)\n try:\n found=self.importer(used)\n for frag in name:\n used +='.'+frag\n try:\n found=getattr(found,frag)\n except AttributeError:\n self.importer(used)\n found=getattr(found,frag)\n return found\n except ImportError as e:\n v=ValueError('Cannot resolve %r: %s'%(s,e))\n raise v from e\n \n def ext_convert(self,value):\n ''\n return self.resolve(value)\n \n def cfg_convert(self,value):\n ''\n rest=value\n m=self.WORD_PATTERN.match(rest)\n if m is None:\n raise ValueError(\"Unable to convert %r\"%value)\n else:\n rest=rest[m.end():]\n d=self.config[m.groups()[0]]\n \n while rest:\n m=self.DOT_PATTERN.match(rest)\n if m:\n d=d[m.groups()[0]]\n else:\n m=self.INDEX_PATTERN.match(rest)\n if m:\n idx=m.groups()[0]\n if not self.DIGIT_PATTERN.match(idx):\n d=d[idx]\n else:\n try:\n n=int(idx)\n d=d[n]\n except TypeError:\n d=d[idx]\n if m:\n rest=rest[m.end():]\n else:\n raise ValueError('Unable to convert '\n '%r at %r'%(value,rest))\n \n return d\n \n def convert(self,value):\n ''\n\n\n\n \n if not isinstance(value,ConvertingDict)and isinstance(value,dict):\n value=ConvertingDict(value)\n value.configurator=self\n elif not isinstance(value,ConvertingList)and isinstance(value,list):\n value=ConvertingList(value)\n value.configurator=self\n elif not isinstance(value,ConvertingTuple)and\\\n isinstance(value,tuple)and not hasattr(value,'_fields'):\n value=ConvertingTuple(value)\n value.configurator=self\n elif isinstance(value,str):\n m=self.CONVERT_PATTERN.match(value)\n if m:\n d=m.groupdict()\n prefix=d['prefix']\n converter=self.value_converters.get(prefix,None)\n if converter:\n suffix=d['suffix']\n converter=getattr(self,converter)\n value=converter(suffix)\n return value\n \n def configure_custom(self,config):\n ''\n c=config.pop('()')\n if not callable(c):\n c=self.resolve(c)\n props=config.pop('.',None)\n \n kwargs={k:config[k]for k in config if valid_ident(k)}\n result=c(**kwargs)\n if props:\n for name,value in props.items():\n setattr(result,name,value)\n return result\n \n def as_tuple(self,value):\n ''\n if isinstance(value,list):\n value=tuple(value)\n return value\n \nclass DictConfigurator(BaseConfigurator):\n ''\n\n\n \n \n def configure(self):\n ''\n \n config=self.config\n if 'version'not in config:\n raise ValueError(\"dictionary doesn't specify a version\")\n if config['version']!=1:\n raise ValueError(\"Unsupported version: %s\"%config['version'])\n incremental=config.pop('incremental',False)\n EMPTY_DICT={}\n logging._acquireLock()\n try:\n if incremental:\n handlers=config.get('handlers',EMPTY_DICT)\n for name in handlers:\n if name not in logging._handlers:\n raise ValueError('No handler found with '\n 'name %r'%name)\n else:\n try:\n handler=logging._handlers[name]\n handler_config=handlers[name]\n level=handler_config.get('level',None)\n if level:\n handler.setLevel(logging._checkLevel(level))\n except Exception as e:\n raise ValueError('Unable to configure handler '\n '%r'%name)from e\n loggers=config.get('loggers',EMPTY_DICT)\n for name in loggers:\n try:\n self.configure_logger(name,loggers[name],True)\n except Exception as e:\n raise ValueError('Unable to configure logger '\n '%r'%name)from e\n root=config.get('root',None)\n if root:\n try:\n self.configure_root(root,True)\n except Exception as e:\n raise ValueError('Unable to configure root '\n 'logger')from e\n else:\n disable_existing=config.pop('disable_existing_loggers',True)\n \n _clearExistingHandlers()\n \n \n formatters=config.get('formatters',EMPTY_DICT)\n for name in formatters:\n try:\n formatters[name]=self.configure_formatter(\n formatters[name])\n except Exception as e:\n raise ValueError('Unable to configure '\n 'formatter %r'%name)from e\n \n filters=config.get('filters',EMPTY_DICT)\n for name in filters:\n try:\n filters[name]=self.configure_filter(filters[name])\n except Exception as e:\n raise ValueError('Unable to configure '\n 'filter %r'%name)from e\n \n \n \n \n handlers=config.get('handlers',EMPTY_DICT)\n deferred=[]\n for name in sorted(handlers):\n try:\n handler=self.configure_handler(handlers[name])\n handler.name=name\n handlers[name]=handler\n except Exception as e:\n if ' not configured yet'in str(e.__cause__):\n deferred.append(name)\n else:\n raise ValueError('Unable to configure handler '\n '%r'%name)from e\n \n \n for name in deferred:\n try:\n handler=self.configure_handler(handlers[name])\n handler.name=name\n handlers[name]=handler\n except Exception as e:\n raise ValueError('Unable to configure handler '\n '%r'%name)from e\n \n \n \n \n \n \n \n \n \n \n \n root=logging.root\n existing=list(root.manager.loggerDict.keys())\n \n \n \n \n existing.sort()\n \n \n child_loggers=[]\n \n loggers=config.get('loggers',EMPTY_DICT)\n for name in loggers:\n if name in existing:\n i=existing.index(name)+1\n prefixed=name+\".\"\n pflen=len(prefixed)\n num_existing=len(existing)\n while i L\",chunk)[0]\n chunk=self.connection.recv(slen)\n while len(chunk)0:\n mode='a'\n if \"b\"not in mode:\n encoding=io.text_encoding(encoding)\n BaseRotatingHandler.__init__(self,filename,mode,encoding=encoding,\n delay=delay,errors=errors)\n self.maxBytes=maxBytes\n self.backupCount=backupCount\n \n def doRollover(self):\n ''\n\n \n if self.stream:\n self.stream.close()\n self.stream=None\n if self.backupCount >0:\n for i in range(self.backupCount -1,0,-1):\n sfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,i))\n dfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,\n i+1))\n if os.path.exists(sfn):\n if os.path.exists(dfn):\n os.remove(dfn)\n os.rename(sfn,dfn)\n dfn=self.rotation_filename(self.baseFilename+\".1\")\n if os.path.exists(dfn):\n os.remove(dfn)\n self.rotate(self.baseFilename,dfn)\n if not self.delay:\n self.stream=self._open()\n \n def shouldRollover(self,record):\n ''\n\n\n\n\n \n \n if os.path.exists(self.baseFilename)and not os.path.isfile(self.baseFilename):\n return False\n if self.stream is None:\n self.stream=self._open()\n if self.maxBytes >0:\n msg=\"%s\\n\"%self.format(record)\n self.stream.seek(0,2)\n if self.stream.tell()+len(msg)>=self.maxBytes:\n return True\n return False\n \nclass TimedRotatingFileHandler(BaseRotatingHandler):\n ''\n\n\n\n\n\n \n def __init__(self,filename,when='h',interval=1,backupCount=0,\n encoding=None,delay=False,utc=False,atTime=None,\n errors=None):\n encoding=io.text_encoding(encoding)\n BaseRotatingHandler.__init__(self,filename,'a',encoding=encoding,\n delay=delay,errors=errors)\n self.when=when.upper()\n self.backupCount=backupCount\n self.utc=utc\n self.atTime=atTime\n \n \n \n \n \n \n \n \n \n \n \n \n if self.when =='S':\n self.interval=1\n self.suffix=\"%Y-%m-%d_%H-%M-%S\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='M':\n self.interval=60\n self.suffix=\"%Y-%m-%d_%H-%M\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='H':\n self.interval=60 *60\n self.suffix=\"%Y-%m-%d_%H\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}(\\.\\w+)?$\"\n elif self.when =='D'or self.when =='MIDNIGHT':\n self.interval=60 *60 *24\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when.startswith('W'):\n self.interval=60 *60 *24 *7\n if len(self.when)!=2:\n raise ValueError(\"You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s\"%self.when)\n if self.when[1]<'0'or self.when[1]>'6':\n raise ValueError(\"Invalid day specified for weekly rollover: %s\"%self.when)\n self.dayOfWeek=int(self.when[1])\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n else:\n raise ValueError(\"Invalid rollover interval specified: %s\"%self.when)\n \n self.extMatch=re.compile(self.extMatch,re.ASCII)\n self.interval=self.interval *interval\n \n \n filename=self.baseFilename\n if os.path.exists(filename):\n t=os.stat(filename)[ST_MTIME]\n else:\n t=int(time.time())\n self.rolloverAt=self.computeRollover(t)\n \n def computeRollover(self,currentTime):\n ''\n\n \n result=currentTime+self.interval\n \n \n \n \n \n \n \n if self.when =='MIDNIGHT'or self.when.startswith('W'):\n \n if self.utc:\n t=time.gmtime(currentTime)\n else:\n t=time.localtime(currentTime)\n currentHour=t[3]\n currentMinute=t[4]\n currentSecond=t[5]\n currentDay=t[6]\n \n if self.atTime is None:\n rotate_ts=_MIDNIGHT\n else:\n rotate_ts=((self.atTime.hour *60+self.atTime.minute)*60+\n self.atTime.second)\n \n r=rotate_ts -((currentHour *60+currentMinute)*60+\n currentSecond)\n if r <0:\n \n \n \n r +=_MIDNIGHT\n currentDay=(currentDay+1)%7\n result=currentTime+r\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.when.startswith('W'):\n day=currentDay\n if day !=self.dayOfWeek:\n if day =self.rolloverAt:\n \n if os.path.exists(self.baseFilename)and not os.path.isfile(self.baseFilename):\n \n \n self.rolloverAt=self.computeRollover(t)\n return False\n \n return True\n return False\n \n def getFilesToDelete(self):\n ''\n\n\n\n \n dirName,baseName=os.path.split(self.baseFilename)\n fileNames=os.listdir(dirName)\n result=[]\n \n n,e=os.path.splitext(baseName)\n prefix=n+'.'\n plen=len(prefix)\n for fileName in fileNames:\n if self.namer is None:\n \n if not fileName.startswith(baseName):\n continue\n else:\n \n \n \n if(not fileName.startswith(baseName)and fileName.endswith(e)and\n len(fileName)>(plen+1)and not fileName[plen+1].isdigit()):\n continue\n \n if fileName[:plen]==prefix:\n suffix=fileName[plen:]\n \n \n parts=suffix.split('.')\n for part in parts:\n if self.extMatch.match(part):\n result.append(os.path.join(dirName,fileName))\n break\n if len(result)0:\n for s in self.getFilesToDelete():\n os.remove(s)\n if not self.delay:\n self.stream=self._open()\n newRolloverAt=self.computeRollover(currentTime)\n while newRolloverAt <=currentTime:\n newRolloverAt=newRolloverAt+self.interval\n \n if(self.when =='MIDNIGHT'or self.when.startswith('W'))and not self.utc:\n dstAtRollover=time.localtime(newRolloverAt)[-1]\n if dstNow !=dstAtRollover:\n if not dstNow:\n addend=-3600\n else:\n addend=3600\n newRolloverAt +=addend\n self.rolloverAt=newRolloverAt\n \nclass WatchedFileHandler(logging.FileHandler):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,filename,mode='a',encoding=None,delay=False,\n errors=None):\n if \"b\"not in mode:\n encoding=io.text_encoding(encoding)\n logging.FileHandler.__init__(self,filename,mode=mode,\n encoding=encoding,delay=delay,\n errors=errors)\n self.dev,self.ino=-1,-1\n self._statstream()\n \n def _statstream(self):\n if self.stream:\n sres=os.fstat(self.stream.fileno())\n self.dev,self.ino=sres[ST_DEV],sres[ST_INO]\n \n def reopenIfNeeded(self):\n ''\n\n\n\n\n\n \n \n \n \n \n try:\n \n sres=os.stat(self.baseFilename)\n except FileNotFoundError:\n sres=None\n \n if not sres or sres[ST_DEV]!=self.dev or sres[ST_INO]!=self.ino:\n if self.stream is not None:\n \n self.stream.flush()\n self.stream.close()\n self.stream=None\n \n self.stream=self._open()\n self._statstream()\n \n def emit(self,record):\n ''\n\n\n\n\n \n self.reopenIfNeeded()\n logging.FileHandler.emit(self,record)\n \n \nclass SocketHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,host,port):\n ''\n\n\n\n\n\n \n logging.Handler.__init__(self)\n self.host=host\n self.port=port\n if port is None:\n self.address=host\n else:\n self.address=(host,port)\n self.sock=None\n self.closeOnError=False\n self.retryTime=None\n \n \n \n self.retryStart=1.0\n self.retryMax=30.0\n self.retryFactor=2.0\n \n def makeSocket(self,timeout=1):\n ''\n\n\n \n if self.port is not None:\n result=socket.create_connection(self.address,timeout=timeout)\n else:\n result=socket.socket(socket.AF_UNIX,socket.SOCK_STREAM)\n result.settimeout(timeout)\n try:\n result.connect(self.address)\n except OSError:\n result.close()\n raise\n return result\n \n def createSocket(self):\n ''\n\n\n\n \n now=time.time()\n \n \n \n if self.retryTime is None:\n attempt=True\n else:\n attempt=(now >=self.retryTime)\n if attempt:\n try:\n self.sock=self.makeSocket()\n self.retryTime=None\n except OSError:\n \n if self.retryTime is None:\n self.retryPeriod=self.retryStart\n else:\n self.retryPeriod=self.retryPeriod *self.retryFactor\n if self.retryPeriod >self.retryMax:\n self.retryPeriod=self.retryMax\n self.retryTime=now+self.retryPeriod\n \n def send(self,s):\n ''\n\n\n\n\n \n if self.sock is None:\n self.createSocket()\n \n \n \n if self.sock:\n try:\n self.sock.sendall(s)\n except OSError:\n self.sock.close()\n self.sock=None\n \n def makePickle(self,record):\n ''\n\n\n \n ei=record.exc_info\n if ei:\n \n dummy=self.format(record)\n \n \n \n d=dict(record.__dict__)\n d['msg']=record.getMessage()\n d['args']=None\n d['exc_info']=None\n \n d.pop('message',None)\n s=pickle.dumps(d,1)\n slen=struct.pack(\">L\",len(s))\n return slen+s\n \n def handleError(self,record):\n ''\n\n\n\n\n\n \n if self.closeOnError and self.sock:\n self.sock.close()\n self.sock=None\n else:\n logging.Handler.handleError(self,record)\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n \n try:\n s=self.makePickle(record)\n self.send(s)\n except Exception:\n self.handleError(record)\n \n def close(self):\n ''\n\n \n self.acquire()\n try:\n sock=self.sock\n if sock:\n self.sock=None\n sock.close()\n logging.Handler.close(self)\n finally:\n self.release()\n \nclass DatagramHandler(SocketHandler):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,host,port):\n ''\n\n \n SocketHandler.__init__(self,host,port)\n self.closeOnError=False\n \n def makeSocket(self):\n ''\n\n\n \n if self.port is None:\n family=socket.AF_UNIX\n else:\n family=socket.AF_INET\n s=socket.socket(family,socket.SOCK_DGRAM)\n return s\n \n def send(self,s):\n ''\n\n\n\n\n\n \n if self.sock is None:\n self.createSocket()\n self.sock.sendto(s,self.address)\n \nclass SysLogHandler(logging.Handler):\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n LOG_EMERG=0\n LOG_ALERT=1\n LOG_CRIT=2\n LOG_ERR=3\n LOG_WARNING=4\n LOG_NOTICE=5\n LOG_INFO=6\n LOG_DEBUG=7\n \n \n LOG_KERN=0\n LOG_USER=1\n LOG_MAIL=2\n LOG_DAEMON=3\n LOG_AUTH=4\n LOG_SYSLOG=5\n LOG_LPR=6\n LOG_NEWS=7\n LOG_UUCP=8\n LOG_CRON=9\n LOG_AUTHPRIV=10\n LOG_FTP=11\n LOG_NTP=12\n LOG_SECURITY=13\n LOG_CONSOLE=14\n LOG_SOLCRON=15\n \n \n LOG_LOCAL0=16\n LOG_LOCAL1=17\n LOG_LOCAL2=18\n LOG_LOCAL3=19\n LOG_LOCAL4=20\n LOG_LOCAL5=21\n LOG_LOCAL6=22\n LOG_LOCAL7=23\n \n priority_names={\n \"alert\":LOG_ALERT,\n \"crit\":LOG_CRIT,\n \"critical\":LOG_CRIT,\n \"debug\":LOG_DEBUG,\n \"emerg\":LOG_EMERG,\n \"err\":LOG_ERR,\n \"error\":LOG_ERR,\n \"info\":LOG_INFO,\n \"notice\":LOG_NOTICE,\n \"panic\":LOG_EMERG,\n \"warn\":LOG_WARNING,\n \"warning\":LOG_WARNING,\n }\n \n facility_names={\n \"auth\":LOG_AUTH,\n \"authpriv\":LOG_AUTHPRIV,\n \"console\":LOG_CONSOLE,\n \"cron\":LOG_CRON,\n \"daemon\":LOG_DAEMON,\n \"ftp\":LOG_FTP,\n \"kern\":LOG_KERN,\n \"lpr\":LOG_LPR,\n \"mail\":LOG_MAIL,\n \"news\":LOG_NEWS,\n \"ntp\":LOG_NTP,\n \"security\":LOG_SECURITY,\n \"solaris-cron\":LOG_SOLCRON,\n \"syslog\":LOG_SYSLOG,\n \"user\":LOG_USER,\n \"uucp\":LOG_UUCP,\n \"local0\":LOG_LOCAL0,\n \"local1\":LOG_LOCAL1,\n \"local2\":LOG_LOCAL2,\n \"local3\":LOG_LOCAL3,\n \"local4\":LOG_LOCAL4,\n \"local5\":LOG_LOCAL5,\n \"local6\":LOG_LOCAL6,\n \"local7\":LOG_LOCAL7,\n }\n \n \n \n \n \n priority_map={\n \"DEBUG\":\"debug\",\n \"INFO\":\"info\",\n \"WARNING\":\"warning\",\n \"ERROR\":\"error\",\n \"CRITICAL\":\"critical\"\n }\n \n def __init__(self,address=('localhost',SYSLOG_UDP_PORT),\n facility=LOG_USER,socktype=None):\n ''\n\n\n\n\n\n\n\n\n\n \n logging.Handler.__init__(self)\n \n self.address=address\n self.facility=facility\n self.socktype=socktype\n self.socket=None\n self.createSocket()\n \n def _connect_unixsocket(self,address):\n use_socktype=self.socktype\n if use_socktype is None:\n use_socktype=socket.SOCK_DGRAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try:\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except OSError:\n self.socket.close()\n if self.socktype is not None:\n \n raise\n use_socktype=socket.SOCK_STREAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try:\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except OSError:\n self.socket.close()\n raise\n \n def createSocket(self):\n ''\n\n\n\n\n\n \n address=self.address\n socktype=self.socktype\n \n if isinstance(address,str):\n self.unixsocket=True\n \n \n \n \n try:\n self._connect_unixsocket(address)\n except OSError:\n pass\n else:\n self.unixsocket=False\n if socktype is None:\n socktype=socket.SOCK_DGRAM\n host,port=address\n ress=socket.getaddrinfo(host,port,0,socktype)\n if not ress:\n raise OSError(\"getaddrinfo returns an empty list\")\n for res in ress:\n af,socktype,proto,_,sa=res\n err=sock=None\n try:\n sock=socket.socket(af,socktype,proto)\n if socktype ==socket.SOCK_STREAM:\n sock.connect(sa)\n break\n except OSError as exc:\n err=exc\n if sock is not None:\n sock.close()\n if err is not None:\n raise err\n self.socket=sock\n self.socktype=socktype\n \n def encodePriority(self,facility,priority):\n ''\n\n\n\n\n \n if isinstance(facility,str):\n facility=self.facility_names[facility]\n if isinstance(priority,str):\n priority=self.priority_names[priority]\n return(facility <<3)|priority\n \n def close(self):\n ''\n\n \n self.acquire()\n try:\n sock=self.socket\n if sock:\n self.socket=None\n sock.close()\n logging.Handler.close(self)\n finally:\n self.release()\n \n def mapPriority(self,levelName):\n ''\n\n\n\n\n\n \n return self.priority_map.get(levelName,\"warning\")\n \n ident=''\n append_nul=True\n \n def emit(self,record):\n ''\n\n\n\n\n \n try:\n msg=self.format(record)\n if self.ident:\n msg=self.ident+msg\n if self.append_nul:\n msg +='\\000'\n \n \n \n prio='<%d>'%self.encodePriority(self.facility,\n self.mapPriority(record.levelname))\n prio=prio.encode('utf-8')\n \n msg=msg.encode('utf-8')\n msg=prio+msg\n \n if not self.socket:\n self.createSocket()\n \n if self.unixsocket:\n try:\n self.socket.send(msg)\n except OSError:\n self.socket.close()\n self._connect_unixsocket(self.address)\n self.socket.send(msg)\n elif self.socktype ==socket.SOCK_DGRAM:\n self.socket.sendto(msg,self.address)\n else:\n self.socket.sendall(msg)\n except Exception:\n self.handleError(record)\n \nclass SMTPHandler(logging.Handler):\n ''\n\n \n def __init__(self,mailhost,fromaddr,toaddrs,subject,\n credentials=None,secure=None,timeout=5.0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n logging.Handler.__init__(self)\n if isinstance(mailhost,(list,tuple)):\n self.mailhost,self.mailport=mailhost\n else:\n self.mailhost,self.mailport=mailhost,None\n if isinstance(credentials,(list,tuple)):\n self.username,self.password=credentials\n else:\n self.username=None\n self.fromaddr=fromaddr\n if isinstance(toaddrs,str):\n toaddrs=[toaddrs]\n self.toaddrs=toaddrs\n self.subject=subject\n self.secure=secure\n self.timeout=timeout\n \n def getSubject(self,record):\n ''\n\n\n\n\n \n return self.subject\n \n def emit(self,record):\n ''\n\n\n\n \n try:\n import smtplib\n from email.message import EmailMessage\n import email.utils\n \n port=self.mailport\n if not port:\n port=smtplib.SMTP_PORT\n smtp=smtplib.SMTP(self.mailhost,port,timeout=self.timeout)\n msg=EmailMessage()\n msg['From']=self.fromaddr\n msg['To']=','.join(self.toaddrs)\n msg['Subject']=self.getSubject(record)\n msg['Date']=email.utils.localtime()\n msg.set_content(self.format(record))\n if self.username:\n if self.secure is not None:\n smtp.ehlo()\n smtp.starttls(*self.secure)\n smtp.ehlo()\n smtp.login(self.username,self.password)\n smtp.send_message(msg)\n smtp.quit()\n except Exception:\n self.handleError(record)\n \nclass NTEventLogHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n def __init__(self,appname,dllname=None,logtype=\"Application\"):\n logging.Handler.__init__(self)\n try:\n import win32evtlogutil,win32evtlog\n self.appname=appname\n self._welu=win32evtlogutil\n if not dllname:\n dllname=os.path.split(self._welu.__file__)\n dllname=os.path.split(dllname[0])\n dllname=os.path.join(dllname[0],r'win32service.pyd')\n self.dllname=dllname\n self.logtype=logtype\n \n \n \n try:\n self._welu.AddSourceToRegistry(appname,dllname,logtype)\n except Exception as e:\n \n \n if getattr(e,'winerror',None)!=5:\n raise\n self.deftype=win32evtlog.EVENTLOG_ERROR_TYPE\n self.typemap={\n logging.DEBUG:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.INFO:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.WARNING:win32evtlog.EVENTLOG_WARNING_TYPE,\n logging.ERROR:win32evtlog.EVENTLOG_ERROR_TYPE,\n logging.CRITICAL:win32evtlog.EVENTLOG_ERROR_TYPE,\n }\n except ImportError:\n print(\"The Python Win32 extensions for NT (service, event \"\\\n \"logging) appear not to be available.\")\n self._welu=None\n \n def getMessageID(self,record):\n ''\n\n\n\n\n\n \n return 1\n \n def getEventCategory(self,record):\n ''\n\n\n\n\n \n return 0\n \n def getEventType(self,record):\n ''\n\n\n\n\n\n\n\n\n \n return self.typemap.get(record.levelno,self.deftype)\n \n def emit(self,record):\n ''\n\n\n\n\n \n if self._welu:\n try:\n id=self.getMessageID(record)\n cat=self.getEventCategory(record)\n type=self.getEventType(record)\n msg=self.format(record)\n self._welu.ReportEvent(self.appname,id,cat,type,[msg])\n except Exception:\n self.handleError(record)\n \n def close(self):\n ''\n\n\n\n\n\n\n\n \n \n logging.Handler.close(self)\n \nclass HTTPHandler(logging.Handler):\n ''\n\n\n \n def __init__(self,host,url,method=\"GET\",secure=False,credentials=None,\n context=None):\n ''\n\n\n \n logging.Handler.__init__(self)\n method=method.upper()\n if method not in[\"GET\",\"POST\"]:\n raise ValueError(\"method must be GET or POST\")\n if not secure and context is not None:\n raise ValueError(\"context parameter only makes sense \"\n \"with secure=True\")\n self.host=host\n self.url=url\n self.method=method\n self.secure=secure\n self.credentials=credentials\n self.context=context\n \n def mapLogRecord(self,record):\n ''\n\n\n\n \n return record.__dict__\n \n def getConnection(self,host,secure):\n ''\n\n\n\n\n \n import http.client\n if secure:\n connection=http.client.HTTPSConnection(host,context=self.context)\n else:\n connection=http.client.HTTPConnection(host)\n return connection\n \n def emit(self,record):\n ''\n\n\n\n \n try:\n import urllib.parse\n host=self.host\n h=self.getConnection(host,self.secure)\n url=self.url\n data=urllib.parse.urlencode(self.mapLogRecord(record))\n if self.method ==\"GET\":\n if(url.find('?')>=0):\n sep='&'\n else:\n sep='?'\n url=url+\"%c%s\"%(sep,data)\n h.putrequest(self.method,url)\n \n \n i=host.find(\":\")\n if i >=0:\n host=host[:i]\n \n \n \n if self.method ==\"POST\":\n h.putheader(\"Content-type\",\n \"application/x-www-form-urlencoded\")\n h.putheader(\"Content-length\",str(len(data)))\n if self.credentials:\n import base64\n s=('%s:%s'%self.credentials).encode('utf-8')\n s='Basic '+base64.b64encode(s).strip().decode('ascii')\n h.putheader('Authorization',s)\n h.endheaders()\n if self.method ==\"POST\":\n h.send(data.encode('utf-8'))\n h.getresponse()\n except Exception:\n self.handleError(record)\n \nclass BufferingHandler(logging.Handler):\n ''\n\n\n\n \n def __init__(self,capacity):\n ''\n\n \n logging.Handler.__init__(self)\n self.capacity=capacity\n self.buffer=[]\n \n def shouldFlush(self,record):\n ''\n\n\n\n\n \n return(len(self.buffer)>=self.capacity)\n \n def emit(self,record):\n ''\n\n\n\n\n \n self.buffer.append(record)\n if self.shouldFlush(record):\n self.flush()\n \n def flush(self):\n ''\n\n\n\n \n self.acquire()\n try:\n self.buffer.clear()\n finally:\n self.release()\n \n def close(self):\n ''\n\n\n\n \n try:\n self.flush()\n finally:\n logging.Handler.close(self)\n \nclass MemoryHandler(BufferingHandler):\n ''\n\n\n\n \n def __init__(self,capacity,flushLevel=logging.ERROR,target=None,\n flushOnClose=True):\n ''\n\n\n\n\n\n\n\n\n\n\n \n BufferingHandler.__init__(self,capacity)\n self.flushLevel=flushLevel\n self.target=target\n \n self.flushOnClose=flushOnClose\n \n def shouldFlush(self,record):\n ''\n\n \n return(len(self.buffer)>=self.capacity)or\\\n (record.levelno >=self.flushLevel)\n \n def setTarget(self,target):\n ''\n\n \n self.acquire()\n try:\n self.target=target\n finally:\n self.release()\n \n def flush(self):\n ''\n\n\n\n\n\n \n self.acquire()\n try:\n if self.target:\n for record in self.buffer:\n self.target.handle(record)\n self.buffer.clear()\n finally:\n self.release()\n \n def close(self):\n ''\n\n\n \n try:\n if self.flushOnClose:\n self.flush()\n finally:\n self.acquire()\n try:\n self.target=None\n BufferingHandler.close(self)\n finally:\n self.release()\n \n \nclass QueueHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,queue):\n ''\n\n \n logging.Handler.__init__(self)\n self.queue=queue\n self.listener=None\n \n def enqueue(self,record):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(record)\n \n def prepare(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n msg=self.format(record)\n \n record=copy.copy(record)\n record.message=msg\n record.msg=msg\n record.args=None\n record.exc_info=None\n record.exc_text=None\n record.stack_info=None\n return record\n \n def emit(self,record):\n ''\n\n\n\n \n try:\n self.enqueue(self.prepare(record))\n except Exception:\n self.handleError(record)\n \n \nclass QueueListener(object):\n ''\n\n\n\n \n _sentinel=None\n \n def __init__(self,queue,*handlers,respect_handler_level=False):\n ''\n\n\n \n self.queue=queue\n self.handlers=handlers\n self._thread=None\n self.respect_handler_level=respect_handler_level\n \n def dequeue(self,block):\n ''\n\n\n\n\n \n return self.queue.get(block)\n \n def start(self):\n ''\n\n\n\n\n \n self._thread=t=threading.Thread(target=self._monitor)\n t.daemon=True\n t.start()\n \n def prepare(self,record):\n ''\n\n\n\n\n\n \n return record\n \n def handle(self,record):\n ''\n\n\n\n\n \n record=self.prepare(record)\n for handler in self.handlers:\n if not self.respect_handler_level:\n process=True\n else:\n process=record.levelno >=handler.level\n if process:\n handler.handle(record)\n \n def _monitor(self):\n ''\n\n\n\n\n\n \n q=self.queue\n has_task_done=hasattr(q,'task_done')\n while True:\n try:\n record=self.dequeue(True)\n if record is self._sentinel:\n if has_task_done:\n q.task_done()\n break\n self.handle(record)\n if has_task_done:\n q.task_done()\n except queue.Empty:\n break\n \n def enqueue_sentinel(self):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(self._sentinel)\n \n def stop(self):\n ''\n\n\n\n\n\n \n self.enqueue_sentinel()\n self._thread.join()\n self._thread=None\n", ["base64", "copy", "email.message", "email.utils", "http.client", "io", "logging", "os", "pickle", "queue", "re", "smtplib", "socket", "stat", "struct", "threading", "time", "urllib.parse", "win32evtlog", "win32evtlogutil"]], "logging": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nLogging package for Python. Based on PEP 282 and comments thereto in\ncomp.lang.python.\n\nCopyright (C) 2001-2022 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging' and log away!\n\"\"\"\n\nimport sys,os,time,io,re,traceback,warnings,weakref,collections.abc\n\nfrom types import GenericAlias\nfrom string import Template\nfrom string import Formatter as StrFormatter\n\n\n__all__=['BASIC_FORMAT','BufferingFormatter','CRITICAL','DEBUG','ERROR',\n'FATAL','FileHandler','Filter','Formatter','Handler','INFO',\n'LogRecord','Logger','LoggerAdapter','NOTSET','NullHandler',\n'StreamHandler','WARN','WARNING','addLevelName','basicConfig',\n'captureWarnings','critical','debug','disable','error',\n'exception','fatal','getLevelName','getLogger','getLoggerClass',\n'info','log','makeLogRecord','setLoggerClass','shutdown',\n'warn','warning','getLogRecordFactory','setLogRecordFactory',\n'lastResort','raiseExceptions','getLevelNamesMapping',\n'getHandlerByName','getHandlerNames']\n\nimport threading\n\n__author__=\"Vinay Sajip \"\n__status__=\"production\"\n\n__version__=\"0.5.1.2\"\n__date__=\"07 February 2010\"\n\n\n\n\n\n\n\n\n_startTime=time.time()\n\n\n\n\n\nraiseExceptions=True\n\n\n\n\nlogThreads=True\n\n\n\n\nlogMultiprocessing=True\n\n\n\n\nlogProcesses=True\n\n\n\n\nlogAsyncioTasks=True\n\n\n\n\n\n\n\n\n\n\n\n\nCRITICAL=50\nFATAL=CRITICAL\nERROR=40\nWARNING=30\nWARN=WARNING\nINFO=20\nDEBUG=10\nNOTSET=0\n\n_levelToName={\nCRITICAL:'CRITICAL',\nERROR:'ERROR',\nWARNING:'WARNING',\nINFO:'INFO',\nDEBUG:'DEBUG',\nNOTSET:'NOTSET',\n}\n_nameToLevel={\n'CRITICAL':CRITICAL,\n'FATAL':FATAL,\n'ERROR':ERROR,\n'WARN':WARNING,\n'WARNING':WARNING,\n'INFO':INFO,\n'DEBUG':DEBUG,\n'NOTSET':NOTSET,\n}\n\ndef getLevelNamesMapping():\n return _nameToLevel.copy()\n \ndef getLevelName(level):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n result=_levelToName.get(level)\n if result is not None:\n return result\n result=_nameToLevel.get(level)\n if result is not None:\n return result\n return \"Level %s\"%level\n \ndef addLevelName(level,levelName):\n ''\n\n\n\n \n _acquireLock()\n try:\n _levelToName[level]=levelName\n _nameToLevel[levelName]=level\n finally:\n _releaseLock()\n \nif hasattr(sys,\"_getframe\"):\n currentframe=lambda:sys._getframe(1)\nelse:\n def currentframe():\n ''\n try:\n raise Exception\n except Exception as exc:\n return exc.__traceback__.tb_frame.f_back\n \n \n \n \n \n \n \n \n \n \n \n \n \n_srcfile=os.path.normcase(addLevelName.__code__.co_filename)\n\n\n\n\n\n\n\n\ndef _is_internal_frame(frame):\n ''\n filename=os.path.normcase(frame.f_code.co_filename)\n return filename ==_srcfile or(\n \"importlib\"in filename and \"_bootstrap\"in filename\n )\n \n \ndef _checkLevel(level):\n if isinstance(level,int):\n rv=level\n elif str(level)==level:\n if level not in _nameToLevel:\n raise ValueError(\"Unknown level: %r\"%level)\n rv=_nameToLevel[level]\n else:\n raise TypeError(\"Level not an integer or a valid string: %r\"\n %(level,))\n return rv\n \n \n \n \n \n \n \n \n \n \n \n \n \n_lock=threading.RLock()\n\ndef _acquireLock():\n ''\n\n\n\n \n if _lock:\n _lock.acquire()\n \ndef _releaseLock():\n ''\n\n \n if _lock:\n _lock.release()\n \n \n \n \nif not hasattr(os,'register_at_fork'):\n def _register_at_fork_reinit_lock(instance):\n pass\nelse:\n\n\n\n _at_fork_reinit_lock_weakset=weakref.WeakSet()\n \n def _register_at_fork_reinit_lock(instance):\n _acquireLock()\n try:\n _at_fork_reinit_lock_weakset.add(instance)\n finally:\n _releaseLock()\n \n def _after_at_fork_child_reinit_locks():\n for handler in _at_fork_reinit_lock_weakset:\n handler._at_fork_reinit()\n \n \n \n _lock._at_fork_reinit()\n \n os.register_at_fork(before=_acquireLock,\n after_in_child=_after_at_fork_child_reinit_locks,\n after_in_parent=_releaseLock)\n \n \n \n \n \n \nclass LogRecord(object):\n ''\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level,pathname,lineno,\n msg,args,exc_info,func=None,sinfo=None,**kwargs):\n ''\n\n \n ct=time.time()\n self.name=name\n self.msg=msg\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if(args and len(args)==1 and isinstance(args[0],collections.abc.Mapping)\n and args[0]):\n args=args[0]\n self.args=args\n self.levelname=getLevelName(level)\n self.levelno=level\n self.pathname=pathname\n try:\n self.filename=os.path.basename(pathname)\n self.module=os.path.splitext(self.filename)[0]\n except(TypeError,ValueError,AttributeError):\n self.filename=pathname\n self.module=\"Unknown module\"\n self.exc_info=exc_info\n self.exc_text=None\n self.stack_info=sinfo\n self.lineno=lineno\n self.funcName=func\n self.created=ct\n self.msecs=int((ct -int(ct))*1000)+0.0\n self.relativeCreated=(self.created -_startTime)*1000\n if logThreads:\n self.thread=threading.get_ident()\n self.threadName=threading.current_thread().name\n else:\n self.thread=None\n self.threadName=None\n if not logMultiprocessing:\n self.processName=None\n else:\n self.processName='MainProcess'\n mp=sys.modules.get('multiprocessing')\n if mp is not None:\n \n \n \n \n try:\n self.processName=mp.current_process().name\n except Exception:\n pass\n if logProcesses and hasattr(os,'getpid'):\n self.process=os.getpid()\n else:\n self.process=None\n \n self.taskName=None\n if logAsyncioTasks:\n asyncio=sys.modules.get('asyncio')\n if asyncio:\n try:\n self.taskName=asyncio.current_task().get_name()\n except Exception:\n pass\n \n def __repr__(self):\n return ''%(self.name,self.levelno,\n self.pathname,self.lineno,self.msg)\n \n def getMessage(self):\n ''\n\n\n\n\n \n msg=str(self.msg)\n if self.args:\n msg=msg %self.args\n return msg\n \n \n \n \n_logRecordFactory=LogRecord\n\ndef setLogRecordFactory(factory):\n ''\n\n\n\n\n \n global _logRecordFactory\n _logRecordFactory=factory\n \ndef getLogRecordFactory():\n ''\n\n \n \n return _logRecordFactory\n \ndef makeLogRecord(dict):\n ''\n\n\n\n\n \n rv=_logRecordFactory(None,None,\"\",0,\"\",(),None,None)\n rv.__dict__.update(dict)\n return rv\n \n \n \n \n \n_str_formatter=StrFormatter()\ndel StrFormatter\n\n\nclass PercentStyle(object):\n\n default_format='%(message)s'\n asctime_format='%(asctime)s'\n asctime_search='%(asctime)'\n validation_pattern=re.compile(r'%\\(\\w+\\)[#0+ -]*(\\*|\\d+)?(\\.(\\*|\\d+))?[diouxefgcrsa%]',re.I)\n \n def __init__(self,fmt,*,defaults=None):\n self._fmt=fmt or self.default_format\n self._defaults=defaults\n \n def usesTime(self):\n return self._fmt.find(self.asctime_search)>=0\n \n def validate(self):\n ''\n if not self.validation_pattern.search(self._fmt):\n raise ValueError(\"Invalid format '%s' for '%s' style\"%(self._fmt,self.default_format[0]))\n \n def _format(self,record):\n if defaults :=self._defaults:\n values=defaults |record.__dict__\n else:\n values=record.__dict__\n return self._fmt %values\n \n def format(self,record):\n try:\n return self._format(record)\n except KeyError as e:\n raise ValueError('Formatting field not found in record: %s'%e)\n \n \nclass StrFormatStyle(PercentStyle):\n default_format='{message}'\n asctime_format='{asctime}'\n asctime_search='{asctime'\n \n fmt_spec=re.compile(r'^(.?[<>=^])?[+ -]?#?0?(\\d+|{\\w+})?[,_]?(\\.(\\d+|{\\w+}))?[bcdefgnosx%]?$',re.I)\n field_spec=re.compile(r'^(\\d+|\\w+)(\\.\\w+|\\[[^]]+\\])*$')\n \n def _format(self,record):\n if defaults :=self._defaults:\n values=defaults |record.__dict__\n else:\n values=record.__dict__\n return self._fmt.format(**values)\n \n def validate(self):\n ''\n fields=set()\n try:\n for _,fieldname,spec,conversion in _str_formatter.parse(self._fmt):\n if fieldname:\n if not self.field_spec.match(fieldname):\n raise ValueError('invalid field name/expression: %r'%fieldname)\n fields.add(fieldname)\n if conversion and conversion not in 'rsa':\n raise ValueError('invalid conversion: %r'%conversion)\n if spec and not self.fmt_spec.match(spec):\n raise ValueError('bad specifier: %r'%spec)\n except ValueError as e:\n raise ValueError('invalid format: %s'%e)\n if not fields:\n raise ValueError('invalid format: no fields')\n \n \nclass StringTemplateStyle(PercentStyle):\n default_format='${message}'\n asctime_format='${asctime}'\n asctime_search='${asctime}'\n \n def __init__(self,*args,**kwargs):\n super().__init__(*args,**kwargs)\n self._tpl=Template(self._fmt)\n \n def usesTime(self):\n fmt=self._fmt\n return fmt.find('$asctime')>=0 or fmt.find(self.asctime_search)>=0\n \n def validate(self):\n pattern=Template.pattern\n fields=set()\n for m in pattern.finditer(self._fmt):\n d=m.groupdict()\n if d['named']:\n fields.add(d['named'])\n elif d['braced']:\n fields.add(d['braced'])\n elif m.group(0)=='$':\n raise ValueError('invalid format: bare \\'$\\' not allowed')\n if not fields:\n raise ValueError('invalid format: no fields')\n \n def _format(self,record):\n if defaults :=self._defaults:\n values=defaults |record.__dict__\n else:\n values=record.__dict__\n return self._tpl.substitute(**values)\n \n \nBASIC_FORMAT=\"%(levelname)s:%(name)s:%(message)s\"\n\n_STYLES={\n'%':(PercentStyle,BASIC_FORMAT),\n'{':(StrFormatStyle,'{levelname}:{name}:{message}'),\n'$':(StringTemplateStyle,'${levelname}:${name}:${message}'),\n}\n\nclass Formatter(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n converter=time.localtime\n \n def __init__(self,fmt=None,datefmt=None,style='%',validate=True,*,\n defaults=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if style not in _STYLES:\n raise ValueError('Style must be one of: %s'%','.join(\n _STYLES.keys()))\n self._style=_STYLES[style][0](fmt,defaults=defaults)\n if validate:\n self._style.validate()\n \n self._fmt=self._style._fmt\n self.datefmt=datefmt\n \n default_time_format='%Y-%m-%d %H:%M:%S'\n default_msec_format='%s,%03d'\n \n def formatTime(self,record,datefmt=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ct=self.converter(record.created)\n if datefmt:\n s=time.strftime(datefmt,ct)\n else:\n s=time.strftime(self.default_time_format,ct)\n if self.default_msec_format:\n s=self.default_msec_format %(s,record.msecs)\n return s\n \n def formatException(self,ei):\n ''\n\n\n\n\n \n sio=io.StringIO()\n tb=ei[2]\n \n \n \n traceback.print_exception(ei[0],ei[1],tb,None,sio)\n s=sio.getvalue()\n sio.close()\n if s[-1:]==\"\\n\":\n s=s[:-1]\n return s\n \n def usesTime(self):\n ''\n\n \n return self._style.usesTime()\n \n def formatMessage(self,record):\n return self._style.format(record)\n \n def formatStack(self,stack_info):\n ''\n\n\n\n\n\n\n\n\n \n return stack_info\n \n def format(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n \n record.message=record.getMessage()\n if self.usesTime():\n record.asctime=self.formatTime(record,self.datefmt)\n s=self.formatMessage(record)\n if record.exc_info:\n \n \n if not record.exc_text:\n record.exc_text=self.formatException(record.exc_info)\n if record.exc_text:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+record.exc_text\n if record.stack_info:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+self.formatStack(record.stack_info)\n return s\n \n \n \n \n_defaultFormatter=Formatter()\n\nclass BufferingFormatter(object):\n ''\n\n \n def __init__(self,linefmt=None):\n ''\n\n\n \n if linefmt:\n self.linefmt=linefmt\n else:\n self.linefmt=_defaultFormatter\n \n def formatHeader(self,records):\n ''\n\n \n return \"\"\n \n def formatFooter(self,records):\n ''\n\n \n return \"\"\n \n def format(self,records):\n ''\n\n \n rv=\"\"\n if len(records)>0:\n rv=rv+self.formatHeader(records)\n for record in records:\n rv=rv+self.linefmt.format(record)\n rv=rv+self.formatFooter(records)\n return rv\n \n \n \n \n \nclass Filter(object):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,name=''):\n ''\n\n\n\n\n\n \n self.name=name\n self.nlen=len(name)\n \n def filter(self,record):\n ''\n\n\n\n\n \n if self.nlen ==0:\n return True\n elif self.name ==record.name:\n return True\n elif record.name.find(self.name,0,self.nlen)!=0:\n return False\n return(record.name[self.nlen]==\".\")\n \nclass Filterer(object):\n ''\n\n\n \n def __init__(self):\n ''\n\n \n self.filters=[]\n \n def addFilter(self,filter):\n ''\n\n \n if not(filter in self.filters):\n self.filters.append(filter)\n \n def removeFilter(self,filter):\n ''\n\n \n if filter in self.filters:\n self.filters.remove(filter)\n \n def filter(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n for f in self.filters:\n if hasattr(f,'filter'):\n result=f.filter(record)\n else:\n result=f(record)\n if not result:\n return False\n if isinstance(result,LogRecord):\n record=result\n return record\n \n \n \n \n \n_handlers=weakref.WeakValueDictionary()\n_handlerList=[]\n\ndef _removeHandlerRef(wr):\n ''\n\n \n \n \n \n \n acquire,release,handlers=_acquireLock,_releaseLock,_handlerList\n if acquire and release and handlers:\n acquire()\n try:\n handlers.remove(wr)\n except ValueError:\n pass\n finally:\n release()\n \ndef _addHandlerRef(handler):\n ''\n\n \n _acquireLock()\n try:\n _handlerList.append(weakref.ref(handler,_removeHandlerRef))\n finally:\n _releaseLock()\n \n \ndef getHandlerByName(name):\n ''\n\n\n \n return _handlers.get(name)\n \n \ndef getHandlerNames():\n ''\n\n \n result=set(_handlers.keys())\n return frozenset(result)\n \n \nclass Handler(Filterer):\n ''\n\n\n\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n\n \n Filterer.__init__(self)\n self._name=None\n self.level=_checkLevel(level)\n self.formatter=None\n self._closed=False\n \n _addHandlerRef(self)\n self.createLock()\n \n def get_name(self):\n return self._name\n \n def set_name(self,name):\n _acquireLock()\n try:\n if self._name in _handlers:\n del _handlers[self._name]\n self._name=name\n if name:\n _handlers[name]=self\n finally:\n _releaseLock()\n \n name=property(get_name,set_name)\n \n def createLock(self):\n ''\n\n \n self.lock=threading.RLock()\n _register_at_fork_reinit_lock(self)\n \n def _at_fork_reinit(self):\n self.lock._at_fork_reinit()\n \n def acquire(self):\n ''\n\n \n if self.lock:\n self.lock.acquire()\n \n def release(self):\n ''\n\n \n if self.lock:\n self.lock.release()\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n \n def format(self,record):\n ''\n\n\n\n\n \n if self.formatter:\n fmt=self.formatter\n else:\n fmt=_defaultFormatter\n return fmt.format(record)\n \n def emit(self,record):\n ''\n\n\n\n\n \n raise NotImplementedError('emit must be implemented '\n 'by Handler subclasses')\n \n def handle(self,record):\n ''\n\n\n\n\n\n\n\n\n \n rv=self.filter(record)\n if isinstance(rv,LogRecord):\n record=rv\n if rv:\n self.acquire()\n try:\n self.emit(record)\n finally:\n self.release()\n return rv\n \n def setFormatter(self,fmt):\n ''\n\n \n self.formatter=fmt\n \n def flush(self):\n ''\n\n\n\n\n \n pass\n \n def close(self):\n ''\n\n\n\n\n\n\n \n \n _acquireLock()\n try:\n self._closed=True\n if self._name and self._name in _handlers:\n del _handlers[self._name]\n finally:\n _releaseLock()\n \n def handleError(self,record):\n ''\n\n\n\n\n\n\n\n\n\n \n if raiseExceptions and sys.stderr:\n t,v,tb=sys.exc_info()\n try:\n sys.stderr.write('--- Logging error ---\\n')\n traceback.print_exception(t,v,tb,None,sys.stderr)\n sys.stderr.write('Call stack:\\n')\n \n \n frame=tb.tb_frame\n while(frame and os.path.dirname(frame.f_code.co_filename)==\n __path__[0]):\n frame=frame.f_back\n if frame:\n traceback.print_stack(frame,file=sys.stderr)\n else:\n \n sys.stderr.write('Logged from file %s, line %s\\n'%(\n record.filename,record.lineno))\n \n try:\n sys.stderr.write('Message: %r\\n'\n 'Arguments: %s\\n'%(record.msg,\n record.args))\n except RecursionError:\n raise\n except Exception:\n sys.stderr.write('Unable to print the message and arguments'\n ' - possible formatting error.\\nUse the'\n ' traceback above to help find the error.\\n'\n )\n except OSError:\n pass\n finally:\n del t,v,tb\n \n def __repr__(self):\n level=getLevelName(self.level)\n return '<%s (%s)>'%(self.__class__.__name__,level)\n \nclass StreamHandler(Handler):\n ''\n\n\n\n \n \n terminator='\\n'\n \n def __init__(self,stream=None):\n ''\n\n\n\n \n Handler.__init__(self)\n if stream is None:\n stream=sys.stderr\n self.stream=stream\n \n def flush(self):\n ''\n\n \n self.acquire()\n try:\n if self.stream and hasattr(self.stream,\"flush\"):\n self.stream.flush()\n finally:\n self.release()\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n\n\n \n try:\n msg=self.format(record)\n stream=self.stream\n \n stream.write(msg+self.terminator)\n self.flush()\n except RecursionError:\n raise\n except Exception:\n self.handleError(record)\n \n def setStream(self,stream):\n ''\n\n\n\n\n\n \n if stream is self.stream:\n result=None\n else:\n result=self.stream\n self.acquire()\n try:\n self.flush()\n self.stream=stream\n finally:\n self.release()\n return result\n \n def __repr__(self):\n level=getLevelName(self.level)\n name=getattr(self.stream,'name','')\n \n name=str(name)\n if name:\n name +=' '\n return '<%s %s(%s)>'%(self.__class__.__name__,name,level)\n \n __class_getitem__=classmethod(GenericAlias)\n \n \nclass FileHandler(StreamHandler):\n ''\n\n \n def __init__(self,filename,mode='a',encoding=None,delay=False,errors=None):\n ''\n\n \n \n filename=os.fspath(filename)\n \n \n self.baseFilename=os.path.abspath(filename)\n self.mode=mode\n self.encoding=encoding\n if \"b\"not in mode:\n self.encoding=io.text_encoding(encoding)\n self.errors=errors\n self.delay=delay\n \n \n \n self._builtin_open=open\n if delay:\n \n \n Handler.__init__(self)\n self.stream=None\n else:\n StreamHandler.__init__(self,self._open())\n \n def close(self):\n ''\n\n \n self.acquire()\n try:\n try:\n if self.stream:\n try:\n self.flush()\n finally:\n stream=self.stream\n self.stream=None\n if hasattr(stream,\"close\"):\n stream.close()\n finally:\n \n \n \n \n StreamHandler.close(self)\n finally:\n self.release()\n \n def _open(self):\n ''\n\n\n \n open_func=self._builtin_open\n return open_func(self.baseFilename,self.mode,\n encoding=self.encoding,errors=self.errors)\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n\n \n if self.stream is None:\n if self.mode !='w'or not self._closed:\n self.stream=self._open()\n if self.stream:\n StreamHandler.emit(self,record)\n \n def __repr__(self):\n level=getLevelName(self.level)\n return '<%s %s (%s)>'%(self.__class__.__name__,self.baseFilename,level)\n \n \nclass _StderrHandler(StreamHandler):\n ''\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n \n Handler.__init__(self,level)\n \n @property\n def stream(self):\n return sys.stderr\n \n \n_defaultLastResort=_StderrHandler(WARNING)\nlastResort=_defaultLastResort\n\n\n\n\n\nclass PlaceHolder(object):\n ''\n\n\n\n \n def __init__(self,alogger):\n ''\n\n \n self.loggerMap={alogger:None}\n \n def append(self,alogger):\n ''\n\n \n if alogger not in self.loggerMap:\n self.loggerMap[alogger]=None\n \n \n \n \n \ndef setLoggerClass(klass):\n ''\n\n\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n global _loggerClass\n _loggerClass=klass\n \ndef getLoggerClass():\n ''\n\n \n return _loggerClass\n \nclass Manager(object):\n ''\n\n\n \n def __init__(self,rootnode):\n ''\n\n \n self.root=rootnode\n self.disable=0\n self.emittedNoHandlerWarning=False\n self.loggerDict={}\n self.loggerClass=None\n self.logRecordFactory=None\n \n @property\n def disable(self):\n return self._disable\n \n @disable.setter\n def disable(self,value):\n self._disable=_checkLevel(value)\n \n def getLogger(self,name):\n ''\n\n\n\n\n\n\n\n\n \n rv=None\n if not isinstance(name,str):\n raise TypeError('A logger name must be a string')\n _acquireLock()\n try:\n if name in self.loggerDict:\n rv=self.loggerDict[name]\n if isinstance(rv,PlaceHolder):\n ph=rv\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupChildren(ph,rv)\n self._fixupParents(rv)\n else:\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupParents(rv)\n finally:\n _releaseLock()\n return rv\n \n def setLoggerClass(self,klass):\n ''\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n self.loggerClass=klass\n \n def setLogRecordFactory(self,factory):\n ''\n\n\n \n self.logRecordFactory=factory\n \n def _fixupParents(self,alogger):\n ''\n\n\n \n name=alogger.name\n i=name.rfind(\".\")\n rv=None\n while(i >0)and not rv:\n substr=name[:i]\n if substr not in self.loggerDict:\n self.loggerDict[substr]=PlaceHolder(alogger)\n else:\n obj=self.loggerDict[substr]\n if isinstance(obj,Logger):\n rv=obj\n else:\n assert isinstance(obj,PlaceHolder)\n obj.append(alogger)\n i=name.rfind(\".\",0,i -1)\n if not rv:\n rv=self.root\n alogger.parent=rv\n \n def _fixupChildren(self,ph,alogger):\n ''\n\n\n \n name=alogger.name\n namelen=len(name)\n for c in ph.loggerMap.keys():\n \n if c.parent.name[:namelen]!=name:\n alogger.parent=c.parent\n c.parent=alogger\n \n def _clear_cache(self):\n ''\n\n\n \n \n _acquireLock()\n for logger in self.loggerDict.values():\n if isinstance(logger,Logger):\n logger._cache.clear()\n self.root._cache.clear()\n _releaseLock()\n \n \n \n \n \nclass Logger(Filterer):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level=NOTSET):\n ''\n\n \n Filterer.__init__(self)\n self.name=name\n self.level=_checkLevel(level)\n self.parent=None\n self.propagate=True\n self.handlers=[]\n self.disabled=False\n self._cache={}\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n self.manager._clear_cache()\n \n def debug(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(DEBUG):\n self._log(DEBUG,msg,args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(INFO):\n self._log(INFO,msg,args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(WARNING):\n self._log(WARNING,msg,args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(ERROR):\n self._log(ERROR,msg,args,**kwargs)\n \n def exception(self,msg,*args,exc_info=True,**kwargs):\n ''\n\n \n self.error(msg,*args,exc_info=exc_info,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(CRITICAL):\n self._log(CRITICAL,msg,args,**kwargs)\n \n def fatal(self,msg,*args,**kwargs):\n ''\n\n \n self.critical(msg,*args,**kwargs)\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if not isinstance(level,int):\n if raiseExceptions:\n raise TypeError(\"level must be an integer\")\n else:\n return\n if self.isEnabledFor(level):\n self._log(level,msg,args,**kwargs)\n \n def findCaller(self,stack_info=False,stacklevel=1):\n ''\n\n\n \n f=currentframe()\n \n \n if f is None:\n return \"(unknown file)\",0,\"(unknown function)\",None\n while stacklevel >0:\n next_f=f.f_back\n if next_f is None:\n \n \n break\n \n \n \n \n f=next_f\n if not _is_internal_frame(f):\n stacklevel -=1\n co=f.f_code\n sinfo=None\n if stack_info:\n with io.StringIO()as sio:\n sio.write(\"Stack (most recent call last):\\n\")\n traceback.print_stack(f,file=sio)\n sinfo=sio.getvalue()\n if sinfo[-1]=='\\n':\n sinfo=sinfo[:-1]\n return co.co_filename,f.f_lineno,co.co_name,sinfo\n \n def makeRecord(self,name,level,fn,lno,msg,args,exc_info,\n func=None,extra=None,sinfo=None):\n ''\n\n\n \n rv=_logRecordFactory(name,level,fn,lno,msg,args,exc_info,func,\n sinfo)\n if extra is not None:\n for key in extra:\n if(key in[\"message\",\"asctime\"])or(key in rv.__dict__):\n raise KeyError(\"Attempt to overwrite %r in LogRecord\"%key)\n rv.__dict__[key]=extra[key]\n return rv\n \n def _log(self,level,msg,args,exc_info=None,extra=None,stack_info=False,\n stacklevel=1):\n ''\n\n\n \n sinfo=None\n if _srcfile:\n \n \n \n try:\n fn,lno,func,sinfo=self.findCaller(stack_info,stacklevel)\n except ValueError:\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n else:\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n if exc_info:\n if isinstance(exc_info,BaseException):\n exc_info=(type(exc_info),exc_info,exc_info.__traceback__)\n elif not isinstance(exc_info,tuple):\n exc_info=sys.exc_info()\n record=self.makeRecord(self.name,level,fn,lno,msg,args,\n exc_info,func,extra,sinfo)\n self.handle(record)\n \n def handle(self,record):\n ''\n\n\n\n\n \n if self.disabled:\n return\n maybe_record=self.filter(record)\n if not maybe_record:\n return\n if isinstance(maybe_record,LogRecord):\n record=maybe_record\n self.callHandlers(record)\n \n def addHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try:\n if not(hdlr in self.handlers):\n self.handlers.append(hdlr)\n finally:\n _releaseLock()\n \n def removeHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try:\n if hdlr in self.handlers:\n self.handlers.remove(hdlr)\n finally:\n _releaseLock()\n \n def hasHandlers(self):\n ''\n\n\n\n\n\n\n\n \n c=self\n rv=False\n while c:\n if c.handlers:\n rv=True\n break\n if not c.propagate:\n break\n else:\n c=c.parent\n return rv\n \n def callHandlers(self,record):\n ''\n\n\n\n\n\n\n\n \n c=self\n found=0\n while c:\n for hdlr in c.handlers:\n found=found+1\n if record.levelno >=hdlr.level:\n hdlr.handle(record)\n if not c.propagate:\n c=None\n else:\n c=c.parent\n if(found ==0):\n if lastResort:\n if record.levelno >=lastResort.level:\n lastResort.handle(record)\n elif raiseExceptions and not self.manager.emittedNoHandlerWarning:\n sys.stderr.write(\"No handlers could be found for logger\"\n \" \\\"%s\\\"\\n\"%self.name)\n self.manager.emittedNoHandlerWarning=True\n \n def getEffectiveLevel(self):\n ''\n\n\n\n\n \n logger=self\n while logger:\n if logger.level:\n return logger.level\n logger=logger.parent\n return NOTSET\n \n def isEnabledFor(self,level):\n ''\n\n \n if self.disabled:\n return False\n \n try:\n return self._cache[level]\n except KeyError:\n _acquireLock()\n try:\n if self.manager.disable >=level:\n is_enabled=self._cache[level]=False\n else:\n is_enabled=self._cache[level]=(\n level >=self.getEffectiveLevel()\n )\n finally:\n _releaseLock()\n return is_enabled\n \n def getChild(self,suffix):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.root is not self:\n suffix='.'.join((self.name,suffix))\n return self.manager.getLogger(suffix)\n \n def getChildren(self):\n \n def _hierlevel(logger):\n if logger is logger.manager.root:\n return 0\n return 1+logger.name.count('.')\n \n d=self.manager.loggerDict\n _acquireLock()\n try:\n \n \n \n return set(item for item in d.values()\n if isinstance(item,Logger)and item.parent is self and\n _hierlevel(item)==1+_hierlevel(item.parent))\n finally:\n _releaseLock()\n \n def __repr__(self):\n level=getLevelName(self.getEffectiveLevel())\n return '<%s %s (%s)>'%(self.__class__.__name__,self.name,level)\n \n def __reduce__(self):\n if getLogger(self.name)is not self:\n import pickle\n raise pickle.PicklingError('logger cannot be pickled')\n return getLogger,(self.name,)\n \n \nclass RootLogger(Logger):\n ''\n\n\n\n \n def __init__(self,level):\n ''\n\n \n Logger.__init__(self,\"root\",level)\n \n def __reduce__(self):\n return getLogger,()\n \n_loggerClass=Logger\n\nclass LoggerAdapter(object):\n ''\n\n\n \n \n def __init__(self,logger,extra=None):\n ''\n\n\n\n\n\n\n\n\n \n self.logger=logger\n self.extra=extra\n \n def process(self,msg,kwargs):\n ''\n\n\n\n\n\n\n\n \n kwargs[\"extra\"]=self.extra\n return msg,kwargs\n \n \n \n \n def debug(self,msg,*args,**kwargs):\n ''\n\n \n self.log(DEBUG,msg,*args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n \n self.log(INFO,msg,*args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n \n self.log(WARNING,msg,*args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n \n self.log(ERROR,msg,*args,**kwargs)\n \n def exception(self,msg,*args,exc_info=True,**kwargs):\n ''\n\n \n self.log(ERROR,msg,*args,exc_info=exc_info,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n \n self.log(CRITICAL,msg,*args,**kwargs)\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n \n if self.isEnabledFor(level):\n msg,kwargs=self.process(msg,kwargs)\n self.logger.log(level,msg,*args,**kwargs)\n \n def isEnabledFor(self,level):\n ''\n\n \n return self.logger.isEnabledFor(level)\n \n def setLevel(self,level):\n ''\n\n \n self.logger.setLevel(level)\n \n def getEffectiveLevel(self):\n ''\n\n \n return self.logger.getEffectiveLevel()\n \n def hasHandlers(self):\n ''\n\n \n return self.logger.hasHandlers()\n \n def _log(self,level,msg,args,exc_info=None,extra=None,stack_info=False):\n ''\n\n \n return self.logger._log(\n level,\n msg,\n args,\n exc_info=exc_info,\n extra=extra,\n stack_info=stack_info,\n )\n \n @property\n def manager(self):\n return self.logger.manager\n \n @manager.setter\n def manager(self,value):\n self.logger.manager=value\n \n @property\n def name(self):\n return self.logger.name\n \n def __repr__(self):\n logger=self.logger\n level=getLevelName(logger.getEffectiveLevel())\n return '<%s %s (%s)>'%(self.__class__.__name__,logger.name,level)\n \n __class_getitem__=classmethod(GenericAlias)\n \nroot=RootLogger(WARNING)\nLogger.root=root\nLogger.manager=Manager(Logger.root)\n\n\n\n\n\ndef basicConfig(**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n _acquireLock()\n try:\n force=kwargs.pop('force',False)\n encoding=kwargs.pop('encoding',None)\n errors=kwargs.pop('errors','backslashreplace')\n if force:\n for h in root.handlers[:]:\n root.removeHandler(h)\n h.close()\n if len(root.handlers)==0:\n handlers=kwargs.pop(\"handlers\",None)\n if handlers is None:\n if \"stream\"in kwargs and \"filename\"in kwargs:\n raise ValueError(\"'stream' and 'filename' should not be \"\n \"specified together\")\n else:\n if \"stream\"in kwargs or \"filename\"in kwargs:\n raise ValueError(\"'stream' or 'filename' should not be \"\n \"specified together with 'handlers'\")\n if handlers is None:\n filename=kwargs.pop(\"filename\",None)\n mode=kwargs.pop(\"filemode\",'a')\n if filename:\n if 'b'in mode:\n errors=None\n else:\n encoding=io.text_encoding(encoding)\n h=FileHandler(filename,mode,\n encoding=encoding,errors=errors)\n else:\n stream=kwargs.pop(\"stream\",None)\n h=StreamHandler(stream)\n handlers=[h]\n dfs=kwargs.pop(\"datefmt\",None)\n style=kwargs.pop(\"style\",'%')\n if style not in _STYLES:\n raise ValueError('Style must be one of: %s'%','.join(\n _STYLES.keys()))\n fs=kwargs.pop(\"format\",_STYLES[style][1])\n fmt=Formatter(fs,dfs,style)\n for h in handlers:\n if h.formatter is None:\n h.setFormatter(fmt)\n root.addHandler(h)\n level=kwargs.pop(\"level\",None)\n if level is not None:\n root.setLevel(level)\n if kwargs:\n keys=', '.join(kwargs.keys())\n raise ValueError('Unrecognised argument(s): %s'%keys)\n finally:\n _releaseLock()\n \n \n \n \n \n \ndef getLogger(name=None):\n ''\n\n\n\n \n if not name or isinstance(name,str)and name ==root.name:\n return root\n return Logger.manager.getLogger(name)\n \ndef critical(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.critical(msg,*args,**kwargs)\n \ndef fatal(msg,*args,**kwargs):\n ''\n\n \n critical(msg,*args,**kwargs)\n \ndef error(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.error(msg,*args,**kwargs)\n \ndef exception(msg,*args,exc_info=True,**kwargs):\n ''\n\n\n\n \n error(msg,*args,exc_info=exc_info,**kwargs)\n \ndef warning(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.warning(msg,*args,**kwargs)\n \ndef warn(msg,*args,**kwargs):\n warnings.warn(\"The 'warn' function is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n warning(msg,*args,**kwargs)\n \ndef info(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.info(msg,*args,**kwargs)\n \ndef debug(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.debug(msg,*args,**kwargs)\n \ndef log(level,msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.log(level,msg,*args,**kwargs)\n \ndef disable(level=CRITICAL):\n ''\n\n \n root.manager.disable=level\n root.manager._clear_cache()\n \ndef shutdown(handlerList=_handlerList):\n ''\n\n\n\n\n \n for wr in reversed(handlerList[:]):\n \n \n try:\n h=wr()\n if h:\n try:\n h.acquire()\n \n \n \n if getattr(h,'flushOnClose',True):\n h.flush()\n h.close()\n except(OSError,ValueError):\n \n \n \n \n pass\n finally:\n h.release()\n except:\n if raiseExceptions:\n raise\n \n \n \nimport atexit\natexit.register(shutdown)\n\n\n\nclass NullHandler(Handler):\n ''\n\n\n\n\n\n\n\n \n def handle(self,record):\n ''\n \n def emit(self,record):\n ''\n \n def createLock(self):\n self.lock=None\n \n def _at_fork_reinit(self):\n pass\n \n \n \n_warnings_showwarning=None\n\ndef _showwarning(message,category,filename,lineno,file=None,line=None):\n ''\n\n\n\n\n\n \n if file is not None:\n if _warnings_showwarning is not None:\n _warnings_showwarning(message,category,filename,lineno,file,line)\n else:\n s=warnings.formatwarning(message,category,filename,lineno,line)\n logger=getLogger(\"py.warnings\")\n if not logger.handlers:\n logger.addHandler(NullHandler())\n \n \n logger.warning(str(s))\n \ndef captureWarnings(capture):\n ''\n\n\n\n \n global _warnings_showwarning\n if capture:\n if _warnings_showwarning is None:\n _warnings_showwarning=warnings.showwarning\n warnings.showwarning=_showwarning\n else:\n if _warnings_showwarning is not None:\n warnings.showwarning=_warnings_showwarning\n _warnings_showwarning=None\n", ["atexit", "collections.abc", "io", "os", "pickle", "re", "string", "sys", "threading", "time", "traceback", "types", "warnings", "weakref"], 1], "logging.brython_handlers": [".py", "import logging\n\nfrom browser.ajax import ajax\n\n\nclass XMLHTTPHandler(logging.Handler):\n ''\n\n\n \n def __init__(self,url,method=\"GET\"):\n ''\n\n\n \n logging.Handler.__init__(self)\n method=method.upper()\n if method not in[\"GET\",\"POST\"]:\n raise ValueError(\"method must be GET or POST\")\n self.url=url\n self.method=method\n \n def mapLogRecord(self,record):\n ''\n\n\n\n \n return record.__dict__\n \n def emit(self,record):\n ''\n\n\n\n \n try:\n req=ajax.open(self.method,self.url,sync=False)\n req.send(self.mapLogRecord(record))\n except:\n self.handleError(record)\n", ["browser.ajax", "logging"]], "email.contentmanager": [".py", "import binascii\nimport email.charset\nimport email.message\nimport email.errors\nfrom email import quoprimime\n\nclass ContentManager:\n\n def __init__(self):\n self.get_handlers={}\n self.set_handlers={}\n \n def add_get_handler(self,key,handler):\n self.get_handlers[key]=handler\n \n def get_content(self,msg,*args,**kw):\n content_type=msg.get_content_type()\n if content_type in self.get_handlers:\n return self.get_handlers[content_type](msg,*args,**kw)\n maintype=msg.get_content_maintype()\n if maintype in self.get_handlers:\n return self.get_handlers[maintype](msg,*args,**kw)\n if ''in self.get_handlers:\n return self.get_handlers[''](msg,*args,**kw)\n raise KeyError(content_type)\n \n def add_set_handler(self,typekey,handler):\n self.set_handlers[typekey]=handler\n \n def set_content(self,msg,obj,*args,**kw):\n if msg.get_content_maintype()=='multipart':\n \n \n raise TypeError(\"set_content not valid on multipart\")\n handler=self._find_set_handler(msg,obj)\n msg.clear_content()\n handler(msg,obj,*args,**kw)\n \n def _find_set_handler(self,msg,obj):\n full_path_for_error=None\n for typ in type(obj).__mro__:\n if typ in self.set_handlers:\n return self.set_handlers[typ]\n qname=typ.__qualname__\n modname=getattr(typ,'__module__','')\n full_path='.'.join((modname,qname))if modname else qname\n if full_path_for_error is None:\n full_path_for_error=full_path\n if full_path in self.set_handlers:\n return self.set_handlers[full_path]\n if qname in self.set_handlers:\n return self.set_handlers[qname]\n name=typ.__name__\n if name in self.set_handlers:\n return self.set_handlers[name]\n if None in self.set_handlers:\n return self.set_handlers[None]\n raise KeyError(full_path_for_error)\n \n \nraw_data_manager=ContentManager()\n\n\ndef get_text_content(msg,errors='replace'):\n content=msg.get_payload(decode=True)\n charset=msg.get_param('charset','ASCII')\n return content.decode(charset,errors=errors)\nraw_data_manager.add_get_handler('text',get_text_content)\n\n\ndef get_non_text_content(msg):\n return msg.get_payload(decode=True)\nfor maintype in 'audio image video application'.split():\n raw_data_manager.add_get_handler(maintype,get_non_text_content)\ndel maintype\n\n\ndef get_message_content(msg):\n return msg.get_payload(0)\nfor subtype in 'rfc822 external-body'.split():\n raw_data_manager.add_get_handler('message/'+subtype,get_message_content)\ndel subtype\n\n\ndef get_and_fixup_unknown_message_content(msg):\n\n\n\n\n\n\n return bytes(msg.get_payload(0))\nraw_data_manager.add_get_handler('message',\nget_and_fixup_unknown_message_content)\n\n\ndef _prepare_set(msg,maintype,subtype,headers):\n msg['Content-Type']='/'.join((maintype,subtype))\n if headers:\n if not hasattr(headers[0],'name'):\n mp=msg.policy\n headers=[mp.header_factory(*mp.header_source_parse([header]))\n for header in headers]\n try:\n for header in headers:\n if header.defects:\n raise header.defects[0]\n msg[header.name]=header\n except email.errors.HeaderDefect as exc:\n raise ValueError(\"Invalid header: {}\".format(\n header.fold(policy=msg.policy)))from exc\n \n \ndef _finalize_set(msg,disposition,filename,cid,params):\n if disposition is None and filename is not None:\n disposition='attachment'\n if disposition is not None:\n msg['Content-Disposition']=disposition\n if filename is not None:\n msg.set_param('filename',\n filename,\n header='Content-Disposition',\n replace=True)\n if cid is not None:\n msg['Content-ID']=cid\n if params is not None:\n for key,value in params.items():\n msg.set_param(key,value)\n \n \n \n \n \n \ndef _encode_base64(data,max_line_length):\n encoded_lines=[]\n unencoded_bytes_per_line=max_line_length //4 *3\n for i in range(0,len(data),unencoded_bytes_per_line):\n thisline=data[i:i+unencoded_bytes_per_line]\n encoded_lines.append(binascii.b2a_base64(thisline).decode('ascii'))\n return ''.join(encoded_lines)\n \n \ndef _encode_text(string,charset,cte,policy):\n lines=string.encode(charset).splitlines()\n linesep=policy.linesep.encode('ascii')\n def embedded_body(lines):return linesep.join(lines)+linesep\n def normal_body(lines):return b'\\n'.join(lines)+b'\\n'\n if cte is None:\n \n if max((len(x)for x in lines),default=0)<=policy.max_line_length:\n try:\n return '7bit',normal_body(lines).decode('ascii')\n except UnicodeDecodeError:\n pass\n if policy.cte_type =='8bit':\n return '8bit',normal_body(lines).decode('ascii','surrogateescape')\n sniff=embedded_body(lines[:10])\n sniff_qp=quoprimime.body_encode(sniff.decode('latin-1'),\n policy.max_line_length)\n sniff_base64=binascii.b2a_base64(sniff)\n \n if len(sniff_qp)>len(sniff_base64):\n cte='base64'\n else:\n cte='quoted-printable'\n if len(lines)<=10:\n return cte,sniff_qp\n if cte =='7bit':\n data=normal_body(lines).decode('ascii')\n elif cte =='8bit':\n data=normal_body(lines).decode('ascii','surrogateescape')\n elif cte =='quoted-printable':\n data=quoprimime.body_encode(normal_body(lines).decode('latin-1'),\n policy.max_line_length)\n elif cte =='base64':\n data=_encode_base64(embedded_body(lines),policy.max_line_length)\n else:\n raise ValueError(\"Unknown content transfer encoding {}\".format(cte))\n return cte,data\n \n \ndef set_text_content(msg,string,subtype=\"plain\",charset='utf-8',cte=None,\ndisposition=None,filename=None,cid=None,\nparams=None,headers=None):\n _prepare_set(msg,'text',subtype,headers)\n cte,payload=_encode_text(string,charset,cte,msg.policy)\n msg.set_payload(payload)\n msg.set_param('charset',\n email.charset.ALIASES.get(charset,charset),\n replace=True)\n msg['Content-Transfer-Encoding']=cte\n _finalize_set(msg,disposition,filename,cid,params)\nraw_data_manager.add_set_handler(str,set_text_content)\n\n\ndef set_message_content(msg,message,subtype=\"rfc822\",cte=None,\ndisposition=None,filename=None,cid=None,\nparams=None,headers=None):\n if subtype =='partial':\n raise ValueError(\"message/partial is not supported for Message objects\")\n if subtype =='rfc822':\n if cte not in(None,'7bit','8bit','binary'):\n \n raise ValueError(\n \"message/rfc822 parts do not support cte={}\".format(cte))\n \n \n \n \n \n cte='8bit'if cte is None else cte\n elif subtype =='external-body':\n if cte not in(None,'7bit'):\n \n raise ValueError(\n \"message/external-body parts do not support cte={}\".format(cte))\n cte='7bit'\n elif cte is None:\n \n \n cte='7bit'\n _prepare_set(msg,'message',subtype,headers)\n msg.set_payload([message])\n msg['Content-Transfer-Encoding']=cte\n _finalize_set(msg,disposition,filename,cid,params)\nraw_data_manager.add_set_handler(email.message.Message,set_message_content)\n\n\ndef set_bytes_content(msg,data,maintype,subtype,cte='base64',\ndisposition=None,filename=None,cid=None,\nparams=None,headers=None):\n _prepare_set(msg,maintype,subtype,headers)\n if cte =='base64':\n data=_encode_base64(data,max_line_length=msg.policy.max_line_length)\n elif cte =='quoted-printable':\n \n \n \n data=binascii.b2a_qp(data,istext=False,header=False,quotetabs=True)\n data=data.decode('ascii')\n elif cte =='7bit':\n data=data.decode('ascii')\n elif cte in('8bit','binary'):\n data=data.decode('ascii','surrogateescape')\n msg.set_payload(data)\n msg['Content-Transfer-Encoding']=cte\n _finalize_set(msg,disposition,filename,cid,params)\nfor typ in(bytes,bytearray,memoryview):\n raw_data_manager.add_set_handler(typ,set_bytes_content)\ndel typ\n", ["binascii", "email", "email.charset", "email.errors", "email.message", "email.quoprimime"]], "email._policybase": [".py", "''\n\n\n\n\nimport abc\nfrom email import header\nfrom email import charset as _charset\nfrom email.utils import _has_surrogates\n\n__all__=[\n'Policy',\n'Compat32',\n'compat32',\n]\n\n\nclass _PolicyBase:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,**kw):\n ''\n\n\n\n \n for name,value in kw.items():\n if hasattr(self,name):\n super(_PolicyBase,self).__setattr__(name,value)\n else:\n raise TypeError(\n \"{!r} is an invalid keyword argument for {}\".format(\n name,self.__class__.__name__))\n \n def __repr__(self):\n args=[\"{}={!r}\".format(name,value)\n for name,value in self.__dict__.items()]\n return \"{}({})\".format(self.__class__.__name__,', '.join(args))\n \n def clone(self,**kw):\n ''\n\n\n\n\n \n newpolicy=self.__class__.__new__(self.__class__)\n for attr,value in self.__dict__.items():\n object.__setattr__(newpolicy,attr,value)\n for attr,value in kw.items():\n if not hasattr(self,attr):\n raise TypeError(\n \"{!r} is an invalid keyword argument for {}\".format(\n attr,self.__class__.__name__))\n object.__setattr__(newpolicy,attr,value)\n return newpolicy\n \n def __setattr__(self,name,value):\n if hasattr(self,name):\n msg=\"{!r} object attribute {!r} is read-only\"\n else:\n msg=\"{!r} object has no attribute {!r}\"\n raise AttributeError(msg.format(self.__class__.__name__,name))\n \n def __add__(self,other):\n ''\n\n\n\n \n return self.clone(**other.__dict__)\n \n \ndef _append_doc(doc,added_doc):\n doc=doc.rsplit('\\n',1)[0]\n added_doc=added_doc.split('\\n',1)[1]\n return doc+'\\n'+added_doc\n \ndef _extend_docstrings(cls):\n if cls.__doc__ and cls.__doc__.startswith('+'):\n cls.__doc__=_append_doc(cls.__bases__[0].__doc__,cls.__doc__)\n for name,attr in cls.__dict__.items():\n if attr.__doc__ and attr.__doc__.startswith('+'):\n for c in(c for base in cls.__bases__ for c in base.mro()):\n doc=getattr(getattr(c,name),'__doc__')\n if doc:\n attr.__doc__=_append_doc(doc,attr.__doc__)\n break\n return cls\n \n \nclass Policy(_PolicyBase,metaclass=abc.ABCMeta):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n raise_on_defect=False\n linesep='\\n'\n cte_type='8bit'\n max_line_length=78\n mangle_from_=False\n message_factory=None\n \n def handle_defect(self,obj,defect):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.raise_on_defect:\n raise defect\n self.register_defect(obj,defect)\n \n def register_defect(self,obj,defect):\n ''\n\n\n\n\n\n\n\n\n \n obj.defects.append(defect)\n \n def header_max_count(self,name):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return None\n \n @abc.abstractmethod\n def header_source_parse(self,sourcelines):\n ''\n\n\n\n\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def header_store_parse(self,name,value):\n ''\n\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def header_fetch_parse(self,name,value):\n ''\n\n\n\n\n\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def fold(self,name,value):\n ''\n\n\n\n\n\n\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def fold_binary(self,name,value):\n ''\n\n\n\n\n \n raise NotImplementedError\n \n \n@_extend_docstrings\nclass Compat32(Policy):\n\n ''\n\n\n \n \n mangle_from_=True\n \n def _sanitize_header(self,name,value):\n \n \n if not isinstance(value,str):\n \n return value\n if _has_surrogates(value):\n return header.Header(value,charset=_charset.UNKNOWN8BIT,\n header_name=name)\n else:\n return value\n \n def header_source_parse(self,sourcelines):\n ''\n\n\n\n\n\n \n name,value=sourcelines[0].split(':',1)\n value=value.lstrip(' \\t')+''.join(sourcelines[1:])\n return(name,value.rstrip('\\r\\n'))\n \n def header_store_parse(self,name,value):\n ''\n\n \n return(name,value)\n \n def header_fetch_parse(self,name,value):\n ''\n\n\n \n return self._sanitize_header(name,value)\n \n def fold(self,name,value):\n ''\n\n\n\n\n\n \n return self._fold(name,value,sanitize=True)\n \n def fold_binary(self,name,value):\n ''\n\n\n\n\n\n\n \n folded=self._fold(name,value,sanitize=self.cte_type =='7bit')\n return folded.encode('ascii','surrogateescape')\n \n def _fold(self,name,value,sanitize):\n parts=[]\n parts.append('%s: '%name)\n if isinstance(value,str):\n if _has_surrogates(value):\n if sanitize:\n h=header.Header(value,\n charset=_charset.UNKNOWN8BIT,\n header_name=name)\n else:\n \n \n \n \n \n \n parts.append(value)\n h=None\n else:\n h=header.Header(value,header_name=name)\n else:\n \n h=value\n if h is not None:\n \n \n maxlinelen=0\n if self.max_line_length is not None:\n maxlinelen=self.max_line_length\n parts.append(h.encode(linesep=self.linesep,maxlinelen=maxlinelen))\n parts.append(self.linesep)\n return ''.join(parts)\n \n \ncompat32=Compat32()\n", ["abc", "email", "email.charset", "email.header", "email.utils"]], "email.header": [".py", "\n\n\n\n\"\"\"Header encoding and decoding functionality.\"\"\"\n\n__all__=[\n'Header',\n'decode_header',\n'make_header',\n]\n\nimport re\nimport binascii\n\nimport email.quoprimime\nimport email.base64mime\n\nfrom email.errors import HeaderParseError\nfrom email import charset as _charset\nCharset=_charset.Charset\n\nNL='\\n'\nSPACE=' '\nBSPACE=b' '\nSPACE8=' '*8\nEMPTYSTRING=''\nMAXLINELEN=78\nFWS=' \\t'\n\nUSASCII=Charset('us-ascii')\nUTF8=Charset('utf-8')\n\n\necre=re.compile(r'''\n =\\? # literal =?\n (?P[^?]*?) # non-greedy up to the next ? is the charset\n \\? # literal ?\n (?P[qQbB]) # either a \"q\" or a \"b\", case insensitive\n \\? # literal ?\n (?P.*?) # non-greedy up to the next ?= is the encoded string\n \\?= # literal ?=\n ''',re.VERBOSE |re.MULTILINE)\n\n\n\n\nfcre=re.compile(r'[\\041-\\176]+:$')\n\n\n\n_embedded_header=re.compile(r'\\n[^ \\t]+:')\n\n\n\n_max_append=email.quoprimime._max_append\n\n\ndef decode_header(header):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if hasattr(header,'_chunks'):\n return[(_charset._encode(string,str(charset)),str(charset))\n for string,charset in header._chunks]\n \n if not ecre.search(header):\n return[(header,None)]\n \n \n \n words=[]\n for line in header.splitlines():\n parts=ecre.split(line)\n first=True\n while parts:\n unencoded=parts.pop(0)\n if first:\n unencoded=unencoded.lstrip()\n first=False\n if unencoded:\n words.append((unencoded,None,None))\n if parts:\n charset=parts.pop(0).lower()\n encoding=parts.pop(0).lower()\n encoded=parts.pop(0)\n words.append((encoded,encoding,charset))\n \n \n droplist=[]\n for n,w in enumerate(words):\n if n >1 and w[1]and words[n -2][1]and words[n -1][0].isspace():\n droplist.append(n -1)\n for d in reversed(droplist):\n del words[d]\n \n \n \n \n decoded_words=[]\n for encoded_string,encoding,charset in words:\n if encoding is None:\n \n decoded_words.append((encoded_string,charset))\n elif encoding =='q':\n word=email.quoprimime.header_decode(encoded_string)\n decoded_words.append((word,charset))\n elif encoding =='b':\n paderr=len(encoded_string)%4\n if paderr:\n encoded_string +='==='[:4 -paderr]\n try:\n word=email.base64mime.decode(encoded_string)\n except binascii.Error:\n raise HeaderParseError('Base64 decoding error')\n else:\n decoded_words.append((word,charset))\n else:\n raise AssertionError('Unexpected encoding: '+encoding)\n \n \n collapsed=[]\n last_word=last_charset=None\n for word,charset in decoded_words:\n if isinstance(word,str):\n word=bytes(word,'raw-unicode-escape')\n if last_word is None:\n last_word=word\n last_charset=charset\n elif charset !=last_charset:\n collapsed.append((last_word,last_charset))\n last_word=word\n last_charset=charset\n elif last_charset is None:\n last_word +=BSPACE+word\n else:\n last_word +=word\n collapsed.append((last_word,last_charset))\n return collapsed\n \n \ndef make_header(decoded_seq,maxlinelen=None,header_name=None,\ncontinuation_ws=' '):\n ''\n\n\n\n\n\n\n\n\n \n h=Header(maxlinelen=maxlinelen,header_name=header_name,\n continuation_ws=continuation_ws)\n for s,charset in decoded_seq:\n \n if charset is not None and not isinstance(charset,Charset):\n charset=Charset(charset)\n h.append(s,charset)\n return h\n \n \nclass Header:\n def __init__(self,s=None,charset=None,\n maxlinelen=None,header_name=None,\n continuation_ws=' ',errors='strict'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if charset is None:\n charset=USASCII\n elif not isinstance(charset,Charset):\n charset=Charset(charset)\n self._charset=charset\n self._continuation_ws=continuation_ws\n self._chunks=[]\n if s is not None:\n self.append(s,charset,errors)\n if maxlinelen is None:\n maxlinelen=MAXLINELEN\n self._maxlinelen=maxlinelen\n if header_name is None:\n self._headerlen=0\n else:\n \n self._headerlen=len(header_name)+2\n \n def __str__(self):\n ''\n self._normalize()\n uchunks=[]\n lastcs=None\n lastspace=None\n for string,charset in self._chunks:\n \n \n \n \n \n \n nextcs=charset\n if nextcs ==_charset.UNKNOWN8BIT:\n original_bytes=string.encode('ascii','surrogateescape')\n string=original_bytes.decode('ascii','replace')\n if uchunks:\n hasspace=string and self._nonctext(string[0])\n if lastcs not in(None,'us-ascii'):\n if nextcs in(None,'us-ascii')and not hasspace:\n uchunks.append(SPACE)\n nextcs=None\n elif nextcs not in(None,'us-ascii')and not lastspace:\n uchunks.append(SPACE)\n lastspace=string and self._nonctext(string[-1])\n lastcs=nextcs\n uchunks.append(string)\n return EMPTYSTRING.join(uchunks)\n \n \n \n def __eq__(self,other):\n \n \n \n return other ==str(self)\n \n def append(self,s,charset=None,errors='strict'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if charset is None:\n charset=self._charset\n elif not isinstance(charset,Charset):\n charset=Charset(charset)\n if not isinstance(s,str):\n input_charset=charset.input_codec or 'us-ascii'\n if input_charset ==_charset.UNKNOWN8BIT:\n s=s.decode('us-ascii','surrogateescape')\n else:\n s=s.decode(input_charset,errors)\n \n \n output_charset=charset.output_codec or 'us-ascii'\n if output_charset !=_charset.UNKNOWN8BIT:\n try:\n s.encode(output_charset,errors)\n except UnicodeEncodeError:\n if output_charset !='us-ascii':\n raise\n charset=UTF8\n self._chunks.append((s,charset))\n \n def _nonctext(self,s):\n ''\n \n return s.isspace()or s in('(',')','\\\\')\n \n def encode(self,splitchars=';, \\t',maxlinelen=None,linesep='\\n'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._normalize()\n if maxlinelen is None:\n maxlinelen=self._maxlinelen\n \n \n \n if maxlinelen ==0:\n maxlinelen=1000000\n formatter=_ValueFormatter(self._headerlen,maxlinelen,\n self._continuation_ws,splitchars)\n lastcs=None\n hasspace=lastspace=None\n for string,charset in self._chunks:\n if hasspace is not None:\n hasspace=string and self._nonctext(string[0])\n if lastcs not in(None,'us-ascii'):\n if not hasspace or charset not in(None,'us-ascii'):\n formatter.add_transition()\n elif charset not in(None,'us-ascii')and not lastspace:\n formatter.add_transition()\n lastspace=string and self._nonctext(string[-1])\n lastcs=charset\n hasspace=False\n lines=string.splitlines()\n if lines:\n formatter.feed('',lines[0],charset)\n else:\n formatter.feed('','',charset)\n for line in lines[1:]:\n formatter.newline()\n if charset.header_encoding is not None:\n formatter.feed(self._continuation_ws,' '+line.lstrip(),\n charset)\n else:\n sline=line.lstrip()\n fws=line[:len(line)-len(sline)]\n formatter.feed(fws,sline,charset)\n if len(lines)>1:\n formatter.newline()\n if self._chunks:\n formatter.add_transition()\n value=formatter._str(linesep)\n if _embedded_header.search(value):\n raise HeaderParseError(\"header value appears to contain \"\n \"an embedded header: {!r}\".format(value))\n return value\n \n def _normalize(self):\n \n \n chunks=[]\n last_charset=None\n last_chunk=[]\n for string,charset in self._chunks:\n if charset ==last_charset:\n last_chunk.append(string)\n else:\n if last_charset is not None:\n chunks.append((SPACE.join(last_chunk),last_charset))\n last_chunk=[string]\n last_charset=charset\n if last_chunk:\n chunks.append((SPACE.join(last_chunk),last_charset))\n self._chunks=chunks\n \n \nclass _ValueFormatter:\n def __init__(self,headerlen,maxlen,continuation_ws,splitchars):\n self._maxlen=maxlen\n self._continuation_ws=continuation_ws\n self._continuation_ws_len=len(continuation_ws)\n self._splitchars=splitchars\n self._lines=[]\n self._current_line=_Accumulator(headerlen)\n \n def _str(self,linesep):\n self.newline()\n return linesep.join(self._lines)\n \n def __str__(self):\n return self._str(NL)\n \n def newline(self):\n end_of_line=self._current_line.pop()\n if end_of_line !=(' ',''):\n self._current_line.push(*end_of_line)\n if len(self._current_line)>0:\n if self._current_line.is_onlyws()and self._lines:\n self._lines[-1]+=str(self._current_line)\n else:\n self._lines.append(str(self._current_line))\n self._current_line.reset()\n \n def add_transition(self):\n self._current_line.push(' ','')\n \n def feed(self,fws,string,charset):\n \n \n \n \n \n if charset.header_encoding is None:\n self._ascii_split(fws,string,self._splitchars)\n return\n \n \n \n \n \n \n \n encoded_lines=charset.header_encode_lines(string,self._maxlengths())\n \n \n try:\n first_line=encoded_lines.pop(0)\n except IndexError:\n \n return\n if first_line is not None:\n self._append_chunk(fws,first_line)\n try:\n last_line=encoded_lines.pop()\n except IndexError:\n \n return\n self.newline()\n self._current_line.push(self._continuation_ws,last_line)\n \n for line in encoded_lines:\n self._lines.append(self._continuation_ws+line)\n \n def _maxlengths(self):\n \n yield self._maxlen -len(self._current_line)\n while True:\n yield self._maxlen -self._continuation_ws_len\n \n def _ascii_split(self,fws,string,splitchars):\n \n \n \n \n \n \n \n \n \n \n \n \n \n parts=re.split(\"([\"+FWS+\"]+)\",fws+string)\n if parts[0]:\n parts[:0]=['']\n else:\n parts.pop(0)\n for fws,part in zip(*[iter(parts)]*2):\n self._append_chunk(fws,part)\n \n def _append_chunk(self,fws,string):\n self._current_line.push(fws,string)\n if len(self._current_line)>self._maxlen:\n \n \n for ch in self._splitchars:\n for i in range(self._current_line.part_count()-1,0,-1):\n if ch.isspace():\n fws=self._current_line[i][0]\n if fws and fws[0]==ch:\n break\n prevpart=self._current_line[i -1][1]\n if prevpart and prevpart[-1]==ch:\n break\n else:\n continue\n break\n else:\n fws,part=self._current_line.pop()\n if self._current_line._initial_size >0:\n \n self.newline()\n if not fws:\n \n \n fws=' '\n self._current_line.push(fws,part)\n return\n remainder=self._current_line.pop_from(i)\n self._lines.append(str(self._current_line))\n self._current_line.reset(remainder)\n \n \nclass _Accumulator(list):\n\n def __init__(self,initial_size=0):\n self._initial_size=initial_size\n super().__init__()\n \n def push(self,fws,string):\n self.append((fws,string))\n \n def pop_from(self,i=0):\n popped=self[i:]\n self[i:]=[]\n return popped\n \n def pop(self):\n if self.part_count()==0:\n return('','')\n return super().pop()\n \n def __len__(self):\n return sum((len(fws)+len(part)for fws,part in self),\n self._initial_size)\n \n def __str__(self):\n return EMPTYSTRING.join((EMPTYSTRING.join((fws,part))\n for fws,part in self))\n \n def reset(self,startval=None):\n if startval is None:\n startval=[]\n self[:]=startval\n self._initial_size=0\n \n def is_onlyws(self):\n return self._initial_size ==0 and(not self or str(self).isspace())\n \n def part_count(self):\n return super().__len__()\n", ["binascii", "email", "email.base64mime", "email.charset", "email.errors", "email.quoprimime", "re"]], "email._encoded_words": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\nimport base64\nimport binascii\nimport functools\nfrom string import ascii_letters,digits\nfrom email import errors\n\n__all__=['decode_q',\n'encode_q',\n'decode_b',\n'encode_b',\n'len_q',\n'len_b',\n'decode',\n'encode',\n]\n\n\n\n\n\n\n_q_byte_subber=functools.partial(re.compile(br'=([a-fA-F0-9]{2})').sub,\nlambda m:bytes.fromhex(m.group(1).decode()))\n\ndef decode_q(encoded):\n encoded=encoded.replace(b'_',b' ')\n return _q_byte_subber(encoded),[]\n \n \n \nclass _QByteMap(dict):\n\n safe=b'-!*+/'+ascii_letters.encode('ascii')+digits.encode('ascii')\n \n def __missing__(self,key):\n if key in self.safe:\n self[key]=chr(key)\n else:\n self[key]=\"={:02X}\".format(key)\n return self[key]\n \n_q_byte_map=_QByteMap()\n\n\n_q_byte_map[ord(' ')]='_'\n\ndef encode_q(bstring):\n return ''.join(_q_byte_map[x]for x in bstring)\n \ndef len_q(bstring):\n return sum(len(_q_byte_map[x])for x in bstring)\n \n \n \n \n \n \ndef decode_b(encoded):\n\n\n pad_err=len(encoded)%4\n missing_padding=b'==='[:4 -pad_err]if pad_err else b''\n try:\n return(\n base64.b64decode(encoded+missing_padding,validate=True),\n [errors.InvalidBase64PaddingDefect()]if pad_err else[],\n )\n except binascii.Error:\n \n \n \n \n \n try:\n return(\n base64.b64decode(encoded,validate=False),\n [errors.InvalidBase64CharactersDefect()],\n )\n except binascii.Error:\n \n \n try:\n return(\n base64.b64decode(encoded+b'==',validate=False),\n [errors.InvalidBase64CharactersDefect(),\n errors.InvalidBase64PaddingDefect()],\n )\n except binascii.Error:\n \n \n \n \n \n return encoded,[errors.InvalidBase64LengthDefect()]\n \ndef encode_b(bstring):\n return base64.b64encode(bstring).decode('ascii')\n \ndef len_b(bstring):\n groups_of_3,leftover=divmod(len(bstring),3)\n \n return groups_of_3 *4+(4 if leftover else 0)\n \n \n_cte_decoders={\n'q':decode_q,\n'b':decode_b,\n}\n\ndef decode(ew):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n _,charset,cte,cte_string,_=ew.split('?')\n charset,_,lang=charset.partition('*')\n cte=cte.lower()\n \n bstring=cte_string.encode('ascii','surrogateescape')\n bstring,defects=_cte_decoders[cte](bstring)\n \n try:\n string=bstring.decode(charset)\n except UnicodeDecodeError:\n defects.append(errors.UndecodableBytesDefect(\"Encoded word \"\n f\"contains bytes not decodable using {charset !r} charset\"))\n string=bstring.decode(charset,'surrogateescape')\n except(LookupError,UnicodeEncodeError):\n string=bstring.decode('ascii','surrogateescape')\n if charset.lower()!='unknown-8bit':\n defects.append(errors.CharsetError(f\"Unknown charset {charset !r} \"\n f\"in encoded word; decoded as unknown bytes\"))\n return string,charset,lang,defects\n \n \n_cte_encoders={\n'q':encode_q,\n'b':encode_b,\n}\n\n_cte_encode_length={\n'q':len_q,\n'b':len_b,\n}\n\ndef encode(string,charset='utf-8',encoding=None,lang=''):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if charset =='unknown-8bit':\n bstring=string.encode('ascii','surrogateescape')\n else:\n bstring=string.encode(charset)\n if encoding is None:\n qlen=_cte_encode_length['q'](bstring)\n blen=_cte_encode_length['b'](bstring)\n \n encoding='q'if qlen -blen <5 else 'b'\n encoded=_cte_encoders[encoding](bstring)\n if lang:\n lang='*'+lang\n return \"=?{}{}?{}?{}?=\".format(charset,lang,encoding,encoded)\n", ["base64", "binascii", "email", "email.errors", "functools", "re", "string"]], "email._header_value_parser": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\nimport sys\nimport urllib\nfrom string import hexdigits\nfrom operator import itemgetter\nfrom email import _encoded_words as _ew\nfrom email import errors\nfrom email import utils\n\n\n\n\n\nWSP=set(' \\t')\nCFWS_LEADER=WSP |set('(')\nSPECIALS=set(r'()<>@,:;.\\\"[]')\nATOM_ENDS=SPECIALS |WSP\nDOT_ATOM_ENDS=ATOM_ENDS -set('.')\n\nPHRASE_ENDS=SPECIALS -set('.\"(')\nTSPECIALS=(SPECIALS |set('/?='))-set('.')\nTOKEN_ENDS=TSPECIALS |WSP\nASPECIALS=TSPECIALS |set(\"*'%\")\nATTRIBUTE_ENDS=ASPECIALS |WSP\nEXTENDED_ATTRIBUTE_ENDS=ATTRIBUTE_ENDS -set('%')\n\ndef quote_string(value):\n return '\"'+str(value).replace('\\\\','\\\\\\\\').replace('\"',r'\\\"')+'\"'\n \n \nrfc2047_matcher=re.compile(r'''\n =\\? # literal =?\n [^?]* # charset\n \\? # literal ?\n [qQbB] # literal 'q' or 'b', case insensitive\n \\? # literal ?\n .*? # encoded word\n \\?= # literal ?=\n''',re.VERBOSE |re.MULTILINE)\n\n\n\n\n\n\nclass TokenList(list):\n\n token_type=None\n syntactic_break=True\n ew_combine_allowed=True\n \n def __init__(self,*args,**kw):\n super().__init__(*args,**kw)\n self.defects=[]\n \n def __str__(self):\n return ''.join(str(x)for x in self)\n \n def __repr__(self):\n return '{}({})'.format(self.__class__.__name__,\n super().__repr__())\n \n @property\n def value(self):\n return ''.join(x.value for x in self if x.value)\n \n @property\n def all_defects(self):\n return sum((x.all_defects for x in self),self.defects)\n \n def startswith_fws(self):\n return self[0].startswith_fws()\n \n @property\n def as_ew_allowed(self):\n ''\n return all(part.as_ew_allowed for part in self)\n \n @property\n def comments(self):\n comments=[]\n for token in self:\n comments.extend(token.comments)\n return comments\n \n def fold(self,*,policy):\n return _refold_parse_tree(self,policy=policy)\n \n def pprint(self,indent=''):\n print(self.ppstr(indent=indent))\n \n def ppstr(self,indent=''):\n return '\\n'.join(self._pp(indent=indent))\n \n def _pp(self,indent=''):\n yield '{}{}/{}('.format(\n indent,\n self.__class__.__name__,\n self.token_type)\n for token in self:\n if not hasattr(token,'_pp'):\n yield(indent+' !! invalid element in token '\n 'list: {!r}'.format(token))\n else:\n yield from token._pp(indent+' ')\n if self.defects:\n extra=' Defects: {}'.format(self.defects)\n else:\n extra=''\n yield '{}){}'.format(indent,extra)\n \n \nclass WhiteSpaceTokenList(TokenList):\n\n @property\n def value(self):\n return ' '\n \n @property\n def comments(self):\n return[x.content for x in self if x.token_type =='comment']\n \n \nclass UnstructuredTokenList(TokenList):\n token_type='unstructured'\n \n \nclass Phrase(TokenList):\n token_type='phrase'\n \nclass Word(TokenList):\n token_type='word'\n \n \nclass CFWSList(WhiteSpaceTokenList):\n token_type='cfws'\n \n \nclass Atom(TokenList):\n token_type='atom'\n \n \nclass Token(TokenList):\n token_type='token'\n encode_as_ew=False\n \n \nclass EncodedWord(TokenList):\n token_type='encoded-word'\n cte=None\n charset=None\n lang=None\n \n \nclass QuotedString(TokenList):\n\n token_type='quoted-string'\n \n @property\n def content(self):\n for x in self:\n if x.token_type =='bare-quoted-string':\n return x.value\n \n @property\n def quoted_value(self):\n res=[]\n for x in self:\n if x.token_type =='bare-quoted-string':\n res.append(str(x))\n else:\n res.append(x.value)\n return ''.join(res)\n \n @property\n def stripped_value(self):\n for token in self:\n if token.token_type =='bare-quoted-string':\n return token.value\n \n \nclass BareQuotedString(QuotedString):\n\n token_type='bare-quoted-string'\n \n def __str__(self):\n return quote_string(''.join(str(x)for x in self))\n \n @property\n def value(self):\n return ''.join(str(x)for x in self)\n \n \nclass Comment(WhiteSpaceTokenList):\n\n token_type='comment'\n \n def __str__(self):\n return ''.join(sum([\n [\"(\"],\n [self.quote(x)for x in self],\n [\")\"],\n ],[]))\n \n def quote(self,value):\n if value.token_type =='comment':\n return str(value)\n return str(value).replace('\\\\','\\\\\\\\').replace(\n '(',r'\\(').replace(\n ')',r'\\)')\n \n @property\n def content(self):\n return ''.join(str(x)for x in self)\n \n @property\n def comments(self):\n return[self.content]\n \nclass AddressList(TokenList):\n\n token_type='address-list'\n \n @property\n def addresses(self):\n return[x for x in self if x.token_type =='address']\n \n @property\n def mailboxes(self):\n return sum((x.mailboxes\n for x in self if x.token_type =='address'),[])\n \n @property\n def all_mailboxes(self):\n return sum((x.all_mailboxes\n for x in self if x.token_type =='address'),[])\n \n \nclass Address(TokenList):\n\n token_type='address'\n \n @property\n def display_name(self):\n if self[0].token_type =='group':\n return self[0].display_name\n \n @property\n def mailboxes(self):\n if self[0].token_type =='mailbox':\n return[self[0]]\n elif self[0].token_type =='invalid-mailbox':\n return[]\n return self[0].mailboxes\n \n @property\n def all_mailboxes(self):\n if self[0].token_type =='mailbox':\n return[self[0]]\n elif self[0].token_type =='invalid-mailbox':\n return[self[0]]\n return self[0].all_mailboxes\n \nclass MailboxList(TokenList):\n\n token_type='mailbox-list'\n \n @property\n def mailboxes(self):\n return[x for x in self if x.token_type =='mailbox']\n \n @property\n def all_mailboxes(self):\n return[x for x in self\n if x.token_type in('mailbox','invalid-mailbox')]\n \n \nclass GroupList(TokenList):\n\n token_type='group-list'\n \n @property\n def mailboxes(self):\n if not self or self[0].token_type !='mailbox-list':\n return[]\n return self[0].mailboxes\n \n @property\n def all_mailboxes(self):\n if not self or self[0].token_type !='mailbox-list':\n return[]\n return self[0].all_mailboxes\n \n \nclass Group(TokenList):\n\n token_type=\"group\"\n \n @property\n def mailboxes(self):\n if self[2].token_type !='group-list':\n return[]\n return self[2].mailboxes\n \n @property\n def all_mailboxes(self):\n if self[2].token_type !='group-list':\n return[]\n return self[2].all_mailboxes\n \n @property\n def display_name(self):\n return self[0].display_name\n \n \nclass NameAddr(TokenList):\n\n token_type='name-addr'\n \n @property\n def display_name(self):\n if len(self)==1:\n return None\n return self[0].display_name\n \n @property\n def local_part(self):\n return self[-1].local_part\n \n @property\n def domain(self):\n return self[-1].domain\n \n @property\n def route(self):\n return self[-1].route\n \n @property\n def addr_spec(self):\n return self[-1].addr_spec\n \n \nclass AngleAddr(TokenList):\n\n token_type='angle-addr'\n \n @property\n def local_part(self):\n for x in self:\n if x.token_type =='addr-spec':\n return x.local_part\n \n @property\n def domain(self):\n for x in self:\n if x.token_type =='addr-spec':\n return x.domain\n \n @property\n def route(self):\n for x in self:\n if x.token_type =='obs-route':\n return x.domains\n \n @property\n def addr_spec(self):\n for x in self:\n if x.token_type =='addr-spec':\n if x.local_part:\n return x.addr_spec\n else:\n return quote_string(x.local_part)+x.addr_spec\n else:\n return '<>'\n \n \nclass ObsRoute(TokenList):\n\n token_type='obs-route'\n \n @property\n def domains(self):\n return[x.domain for x in self if x.token_type =='domain']\n \n \nclass Mailbox(TokenList):\n\n token_type='mailbox'\n \n @property\n def display_name(self):\n if self[0].token_type =='name-addr':\n return self[0].display_name\n \n @property\n def local_part(self):\n return self[0].local_part\n \n @property\n def domain(self):\n return self[0].domain\n \n @property\n def route(self):\n if self[0].token_type =='name-addr':\n return self[0].route\n \n @property\n def addr_spec(self):\n return self[0].addr_spec\n \n \nclass InvalidMailbox(TokenList):\n\n token_type='invalid-mailbox'\n \n @property\n def display_name(self):\n return None\n \n local_part=domain=route=addr_spec=display_name\n \n \nclass Domain(TokenList):\n\n token_type='domain'\n as_ew_allowed=False\n \n @property\n def domain(self):\n return ''.join(super().value.split())\n \n \nclass DotAtom(TokenList):\n token_type='dot-atom'\n \n \nclass DotAtomText(TokenList):\n token_type='dot-atom-text'\n as_ew_allowed=True\n \n \nclass NoFoldLiteral(TokenList):\n token_type='no-fold-literal'\n as_ew_allowed=False\n \n \nclass AddrSpec(TokenList):\n\n token_type='addr-spec'\n as_ew_allowed=False\n \n @property\n def local_part(self):\n return self[0].local_part\n \n @property\n def domain(self):\n if len(self)<3:\n return None\n return self[-1].domain\n \n @property\n def value(self):\n if len(self)<3:\n return self[0].value\n return self[0].value.rstrip()+self[1].value+self[2].value.lstrip()\n \n @property\n def addr_spec(self):\n nameset=set(self.local_part)\n if len(nameset)>len(nameset -DOT_ATOM_ENDS):\n lp=quote_string(self.local_part)\n else:\n lp=self.local_part\n if self.domain is not None:\n return lp+'@'+self.domain\n return lp\n \n \nclass ObsLocalPart(TokenList):\n\n token_type='obs-local-part'\n as_ew_allowed=False\n \n \nclass DisplayName(Phrase):\n\n token_type='display-name'\n ew_combine_allowed=False\n \n @property\n def display_name(self):\n res=TokenList(self)\n if len(res)==0:\n return res.value\n if res[0].token_type =='cfws':\n res.pop(0)\n else:\n if res[0][0].token_type =='cfws':\n res[0]=TokenList(res[0][1:])\n if res[-1].token_type =='cfws':\n res.pop()\n else:\n if res[-1][-1].token_type =='cfws':\n res[-1]=TokenList(res[-1][:-1])\n return res.value\n \n @property\n def value(self):\n quote=False\n if self.defects:\n quote=True\n else:\n for x in self:\n if x.token_type =='quoted-string':\n quote=True\n if len(self)!=0 and quote:\n pre=post=''\n if self[0].token_type =='cfws'or self[0][0].token_type =='cfws':\n pre=' '\n if self[-1].token_type =='cfws'or self[-1][-1].token_type =='cfws':\n post=' '\n return pre+quote_string(self.display_name)+post\n else:\n return super().value\n \n \nclass LocalPart(TokenList):\n\n token_type='local-part'\n as_ew_allowed=False\n \n @property\n def value(self):\n if self[0].token_type ==\"quoted-string\":\n return self[0].quoted_value\n else:\n return self[0].value\n \n @property\n def local_part(self):\n \n res=[DOT]\n last=DOT\n last_is_tl=False\n for tok in self[0]+[DOT]:\n if tok.token_type =='cfws':\n continue\n if(last_is_tl and tok.token_type =='dot'and\n last[-1].token_type =='cfws'):\n res[-1]=TokenList(last[:-1])\n is_tl=isinstance(tok,TokenList)\n if(is_tl and last.token_type =='dot'and\n tok[0].token_type =='cfws'):\n res.append(TokenList(tok[1:]))\n else:\n res.append(tok)\n last=res[-1]\n last_is_tl=is_tl\n res=TokenList(res[1:-1])\n return res.value\n \n \nclass DomainLiteral(TokenList):\n\n token_type='domain-literal'\n as_ew_allowed=False\n \n @property\n def domain(self):\n return ''.join(super().value.split())\n \n @property\n def ip(self):\n for x in self:\n if x.token_type =='ptext':\n return x.value\n \n \nclass MIMEVersion(TokenList):\n\n token_type='mime-version'\n major=None\n minor=None\n \n \nclass Parameter(TokenList):\n\n token_type='parameter'\n sectioned=False\n extended=False\n charset='us-ascii'\n \n @property\n def section_number(self):\n \n \n return self[1].number if self.sectioned else 0\n \n @property\n def param_value(self):\n \n for token in self:\n if token.token_type =='value':\n return token.stripped_value\n if token.token_type =='quoted-string':\n for token in token:\n if token.token_type =='bare-quoted-string':\n for token in token:\n if token.token_type =='value':\n return token.stripped_value\n return ''\n \n \nclass InvalidParameter(Parameter):\n\n token_type='invalid-parameter'\n \n \nclass Attribute(TokenList):\n\n token_type='attribute'\n \n @property\n def stripped_value(self):\n for token in self:\n if token.token_type.endswith('attrtext'):\n return token.value\n \nclass Section(TokenList):\n\n token_type='section'\n number=None\n \n \nclass Value(TokenList):\n\n token_type='value'\n \n @property\n def stripped_value(self):\n token=self[0]\n if token.token_type =='cfws':\n token=self[1]\n if token.token_type.endswith(\n ('quoted-string','attribute','extended-attribute')):\n return token.stripped_value\n return self.value\n \n \nclass MimeParameters(TokenList):\n\n token_type='mime-parameters'\n syntactic_break=False\n \n @property\n def params(self):\n \n \n \n \n \n params={}\n for token in self:\n if not token.token_type.endswith('parameter'):\n continue\n if token[0].token_type !='attribute':\n continue\n name=token[0].value.strip()\n if name not in params:\n params[name]=[]\n params[name].append((token.section_number,token))\n for name,parts in params.items():\n parts=sorted(parts,key=itemgetter(0))\n first_param=parts[0][1]\n charset=first_param.charset\n \n \n \n if not first_param.extended and len(parts)>1:\n if parts[1][0]==0:\n parts[1][1].defects.append(errors.InvalidHeaderDefect(\n 'duplicate parameter name; duplicate(s) ignored'))\n parts=parts[:1]\n \n \n value_parts=[]\n i=0\n for section_number,param in parts:\n if section_number !=i:\n \n \n \n if not param.extended:\n param.defects.append(errors.InvalidHeaderDefect(\n 'duplicate parameter name; duplicate ignored'))\n continue\n else:\n param.defects.append(errors.InvalidHeaderDefect(\n \"inconsistent RFC2231 parameter numbering\"))\n i +=1\n value=param.param_value\n if param.extended:\n try:\n value=urllib.parse.unquote_to_bytes(value)\n except UnicodeEncodeError:\n \n \n \n value=urllib.parse.unquote(value,encoding='latin-1')\n else:\n try:\n value=value.decode(charset,'surrogateescape')\n except(LookupError,UnicodeEncodeError):\n \n \n \n \n value=value.decode('us-ascii','surrogateescape')\n if utils._has_surrogates(value):\n param.defects.append(errors.UndecodableBytesDefect())\n value_parts.append(value)\n value=''.join(value_parts)\n yield name,value\n \n def __str__(self):\n params=[]\n for name,value in self.params:\n if value:\n params.append('{}={}'.format(name,quote_string(value)))\n else:\n params.append(name)\n params='; '.join(params)\n return ' '+params if params else ''\n \n \nclass ParameterizedHeaderValue(TokenList):\n\n\n\n syntactic_break=False\n \n @property\n def params(self):\n for token in reversed(self):\n if token.token_type =='mime-parameters':\n return token.params\n return{}\n \n \nclass ContentType(ParameterizedHeaderValue):\n token_type='content-type'\n as_ew_allowed=False\n maintype='text'\n subtype='plain'\n \n \nclass ContentDisposition(ParameterizedHeaderValue):\n token_type='content-disposition'\n as_ew_allowed=False\n content_disposition=None\n \n \nclass ContentTransferEncoding(TokenList):\n token_type='content-transfer-encoding'\n as_ew_allowed=False\n cte='7bit'\n \n \nclass HeaderLabel(TokenList):\n token_type='header-label'\n as_ew_allowed=False\n \n \nclass MsgID(TokenList):\n token_type='msg-id'\n as_ew_allowed=False\n \n def fold(self,policy):\n \n return str(self)+policy.linesep\n \n \nclass MessageID(MsgID):\n token_type='message-id'\n \n \nclass InvalidMessageID(MessageID):\n token_type='invalid-message-id'\n \n \nclass Header(TokenList):\n token_type='header'\n \n \n \n \n \n \nclass Terminal(str):\n\n as_ew_allowed=True\n ew_combine_allowed=True\n syntactic_break=True\n \n def __new__(cls,value,token_type):\n self=super().__new__(cls,value)\n self.token_type=token_type\n self.defects=[]\n return self\n \n def __repr__(self):\n return \"{}({})\".format(self.__class__.__name__,super().__repr__())\n \n def pprint(self):\n print(self.__class__.__name__+'/'+self.token_type)\n \n @property\n def all_defects(self):\n return list(self.defects)\n \n def _pp(self,indent=''):\n return[\"{}{}/{}({}){}\".format(\n indent,\n self.__class__.__name__,\n self.token_type,\n super().__repr__(),\n ''if not self.defects else ' {}'.format(self.defects),\n )]\n \n def pop_trailing_ws(self):\n \n return None\n \n @property\n def comments(self):\n return[]\n \n def __getnewargs__(self):\n return(str(self),self.token_type)\n \n \nclass WhiteSpaceTerminal(Terminal):\n\n @property\n def value(self):\n return ' '\n \n def startswith_fws(self):\n return True\n \n \nclass ValueTerminal(Terminal):\n\n @property\n def value(self):\n return self\n \n def startswith_fws(self):\n return False\n \n \nclass EWWhiteSpaceTerminal(WhiteSpaceTerminal):\n\n @property\n def value(self):\n return ''\n \n def __str__(self):\n return ''\n \n \nclass _InvalidEwError(errors.HeaderParseError):\n ''\n \n \n \n \n \nDOT=ValueTerminal('.','dot')\nListSeparator=ValueTerminal(',','list-separator')\nRouteComponentMarker=ValueTerminal('@','route-component-marker')\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_wsp_splitter=re.compile(r'([{}]+)'.format(''.join(WSP))).split\n_non_atom_end_matcher=re.compile(r\"[^{}]+\".format(\nre.escape(''.join(ATOM_ENDS)))).match\n_non_printable_finder=re.compile(r\"[\\x00-\\x20\\x7F]\").findall\n_non_token_end_matcher=re.compile(r\"[^{}]+\".format(\nre.escape(''.join(TOKEN_ENDS)))).match\n_non_attribute_end_matcher=re.compile(r\"[^{}]+\".format(\nre.escape(''.join(ATTRIBUTE_ENDS)))).match\n_non_extended_attribute_end_matcher=re.compile(r\"[^{}]+\".format(\nre.escape(''.join(EXTENDED_ATTRIBUTE_ENDS)))).match\n\ndef _validate_xtext(xtext):\n ''\n \n non_printables=_non_printable_finder(xtext)\n if non_printables:\n xtext.defects.append(errors.NonPrintableDefect(non_printables))\n if utils._has_surrogates(xtext):\n xtext.defects.append(errors.UndecodableBytesDefect(\n \"Non-ASCII characters found in header token\"))\n \ndef _get_ptext_to_endchars(value,endchars):\n ''\n\n\n\n\n\n\n \n fragment,*remainder=_wsp_splitter(value,1)\n vchars=[]\n escape=False\n had_qp=False\n for pos in range(len(fragment)):\n if fragment[pos]=='\\\\':\n if escape:\n escape=False\n had_qp=True\n else:\n escape=True\n continue\n if escape:\n escape=False\n elif fragment[pos]in endchars:\n break\n vchars.append(fragment[pos])\n else:\n pos=pos+1\n return ''.join(vchars),''.join([fragment[pos:]]+remainder),had_qp\n \ndef get_fws(value):\n ''\n\n\n\n\n\n \n newvalue=value.lstrip()\n fws=WhiteSpaceTerminal(value[:len(value)-len(newvalue)],'fws')\n return fws,newvalue\n \ndef get_encoded_word(value):\n ''\n\n \n ew=EncodedWord()\n if not value.startswith('=?'):\n raise errors.HeaderParseError(\n \"expected encoded word but found {}\".format(value))\n tok,*remainder=value[2:].split('?=',1)\n if tok ==value[2:]:\n raise errors.HeaderParseError(\n \"expected encoded word but found {}\".format(value))\n remstr=''.join(remainder)\n if(len(remstr)>1 and\n remstr[0]in hexdigits and\n remstr[1]in hexdigits and\n tok.count('?')<2):\n \n rest,*remainder=remstr.split('?=',1)\n tok=tok+'?='+rest\n if len(tok.split())>1:\n ew.defects.append(errors.InvalidHeaderDefect(\n \"whitespace inside encoded word\"))\n ew.cte=value\n value=''.join(remainder)\n try:\n text,charset,lang,defects=_ew.decode('=?'+tok+'?=')\n except(ValueError,KeyError):\n raise _InvalidEwError(\n \"encoded word format invalid: '{}'\".format(ew.cte))\n ew.charset=charset\n ew.lang=lang\n ew.defects.extend(defects)\n while text:\n if text[0]in WSP:\n token,text=get_fws(text)\n ew.append(token)\n continue\n chars,*remainder=_wsp_splitter(text,1)\n vtext=ValueTerminal(chars,'vtext')\n _validate_xtext(vtext)\n ew.append(vtext)\n text=''.join(remainder)\n \n if value and value[0]not in WSP:\n ew.defects.append(errors.InvalidHeaderDefect(\n \"missing trailing whitespace after encoded-word\"))\n return ew,value\n \ndef get_unstructured(value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n unstructured=UnstructuredTokenList()\n while value:\n if value[0]in WSP:\n token,value=get_fws(value)\n unstructured.append(token)\n continue\n valid_ew=True\n if value.startswith('=?'):\n try:\n token,value=get_encoded_word(value)\n except _InvalidEwError:\n valid_ew=False\n except errors.HeaderParseError:\n \n \n pass\n else:\n have_ws=True\n if len(unstructured)>0:\n if unstructured[-1].token_type !='fws':\n unstructured.defects.append(errors.InvalidHeaderDefect(\n \"missing whitespace before encoded word\"))\n have_ws=False\n if have_ws and len(unstructured)>1:\n if unstructured[-2].token_type =='encoded-word':\n unstructured[-1]=EWWhiteSpaceTerminal(\n unstructured[-1],'fws')\n unstructured.append(token)\n continue\n tok,*remainder=_wsp_splitter(value,1)\n \n \n \n \n \n \n if valid_ew and rfc2047_matcher.search(tok):\n tok,*remainder=value.partition('=?')\n vtext=ValueTerminal(tok,'vtext')\n _validate_xtext(vtext)\n unstructured.append(vtext)\n value=''.join(remainder)\n return unstructured\n \ndef get_qp_ctext(value):\n ''\n\n\n\n\n\n\n\n\n\n \n ptext,value,_=_get_ptext_to_endchars(value,'()')\n ptext=WhiteSpaceTerminal(ptext,'ptext')\n _validate_xtext(ptext)\n return ptext,value\n \ndef get_qcontent(value):\n ''\n\n\n\n\n\n\n\n \n ptext,value,_=_get_ptext_to_endchars(value,'\"')\n ptext=ValueTerminal(ptext,'ptext')\n _validate_xtext(ptext)\n return ptext,value\n \ndef get_atext(value):\n ''\n\n\n\n \n m=_non_atom_end_matcher(value)\n if not m:\n raise errors.HeaderParseError(\n \"expected atext but found '{}'\".format(value))\n atext=m.group()\n value=value[len(atext):]\n atext=ValueTerminal(atext,'atext')\n _validate_xtext(atext)\n return atext,value\n \ndef get_bare_quoted_string(value):\n ''\n\n\n\n\n \n if value[0]!='\"':\n raise errors.HeaderParseError(\n \"expected '\\\"' but found '{}'\".format(value))\n bare_quoted_string=BareQuotedString()\n value=value[1:]\n if value and value[0]=='\"':\n token,value=get_qcontent(value)\n bare_quoted_string.append(token)\n while value and value[0]!='\"':\n if value[0]in WSP:\n token,value=get_fws(value)\n elif value[:2]=='=?':\n valid_ew=False\n try:\n token,value=get_encoded_word(value)\n bare_quoted_string.defects.append(errors.InvalidHeaderDefect(\n \"encoded word inside quoted string\"))\n valid_ew=True\n except errors.HeaderParseError:\n token,value=get_qcontent(value)\n \n \n if valid_ew and len(bare_quoted_string)>1:\n if(bare_quoted_string[-1].token_type =='fws'and\n bare_quoted_string[-2].token_type =='encoded-word'):\n bare_quoted_string[-1]=EWWhiteSpaceTerminal(\n bare_quoted_string[-1],'fws')\n else:\n token,value=get_qcontent(value)\n bare_quoted_string.append(token)\n if not value:\n bare_quoted_string.defects.append(errors.InvalidHeaderDefect(\n \"end of header inside quoted string\"))\n return bare_quoted_string,value\n return bare_quoted_string,value[1:]\n \ndef get_comment(value):\n ''\n\n\n\n \n if value and value[0]!='(':\n raise errors.HeaderParseError(\n \"expected '(' but found '{}'\".format(value))\n comment=Comment()\n value=value[1:]\n while value and value[0]!=\")\":\n if value[0]in WSP:\n token,value=get_fws(value)\n elif value[0]=='(':\n token,value=get_comment(value)\n else:\n token,value=get_qp_ctext(value)\n comment.append(token)\n if not value:\n comment.defects.append(errors.InvalidHeaderDefect(\n \"end of header inside comment\"))\n return comment,value\n return comment,value[1:]\n \ndef get_cfws(value):\n ''\n\n \n cfws=CFWSList()\n while value and value[0]in CFWS_LEADER:\n if value[0]in WSP:\n token,value=get_fws(value)\n else:\n token,value=get_comment(value)\n cfws.append(token)\n return cfws,value\n \ndef get_quoted_string(value):\n ''\n\n\n\n\n \n quoted_string=QuotedString()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n quoted_string.append(token)\n token,value=get_bare_quoted_string(value)\n quoted_string.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n quoted_string.append(token)\n return quoted_string,value\n \ndef get_atom(value):\n ''\n\n\n \n atom=Atom()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n atom.append(token)\n if value and value[0]in ATOM_ENDS:\n raise errors.HeaderParseError(\n \"expected atom but found '{}'\".format(value))\n if value.startswith('=?'):\n try:\n token,value=get_encoded_word(value)\n except errors.HeaderParseError:\n \n \n token,value=get_atext(value)\n else:\n token,value=get_atext(value)\n atom.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n atom.append(token)\n return atom,value\n \ndef get_dot_atom_text(value):\n ''\n\n \n dot_atom_text=DotAtomText()\n if not value or value[0]in ATOM_ENDS:\n raise errors.HeaderParseError(\"expected atom at a start of \"\n \"dot-atom-text but found '{}'\".format(value))\n while value and value[0]not in ATOM_ENDS:\n token,value=get_atext(value)\n dot_atom_text.append(token)\n if value and value[0]=='.':\n dot_atom_text.append(DOT)\n value=value[1:]\n if dot_atom_text[-1]is DOT:\n raise errors.HeaderParseError(\"expected atom at end of dot-atom-text \"\n \"but found '{}'\".format('.'+value))\n return dot_atom_text,value\n \ndef get_dot_atom(value):\n ''\n\n\n\n \n dot_atom=DotAtom()\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n dot_atom.append(token)\n if value.startswith('=?'):\n try:\n token,value=get_encoded_word(value)\n except errors.HeaderParseError:\n \n \n token,value=get_dot_atom_text(value)\n else:\n token,value=get_dot_atom_text(value)\n dot_atom.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n dot_atom.append(token)\n return dot_atom,value\n \ndef get_word(value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n else:\n leader=None\n if not value:\n raise errors.HeaderParseError(\n \"Expected 'atom' or 'quoted-string' but found nothing.\")\n if value[0]=='\"':\n token,value=get_quoted_string(value)\n elif value[0]in SPECIALS:\n raise errors.HeaderParseError(\"Expected 'atom' or 'quoted-string' \"\n \"but found '{}'\".format(value))\n else:\n token,value=get_atom(value)\n if leader is not None:\n token[:0]=[leader]\n return token,value\n \ndef get_phrase(value):\n ''\n\n\n\n\n\n\n\n\n\n \n phrase=Phrase()\n try:\n token,value=get_word(value)\n phrase.append(token)\n except errors.HeaderParseError:\n phrase.defects.append(errors.InvalidHeaderDefect(\n \"phrase does not start with word\"))\n while value and value[0]not in PHRASE_ENDS:\n if value[0]=='.':\n phrase.append(DOT)\n phrase.defects.append(errors.ObsoleteHeaderDefect(\n \"period in 'phrase'\"))\n value=value[1:]\n else:\n try:\n token,value=get_word(value)\n except errors.HeaderParseError:\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n phrase.defects.append(errors.ObsoleteHeaderDefect(\n \"comment found without atom\"))\n else:\n raise\n phrase.append(token)\n return phrase,value\n \ndef get_local_part(value):\n ''\n\n \n local_part=LocalPart()\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n raise errors.HeaderParseError(\n \"expected local-part but found '{}'\".format(value))\n try:\n token,value=get_dot_atom(value)\n except errors.HeaderParseError:\n try:\n token,value=get_word(value)\n except errors.HeaderParseError:\n if value[0]!='\\\\'and value[0]in PHRASE_ENDS:\n raise\n token=TokenList()\n if leader is not None:\n token[:0]=[leader]\n local_part.append(token)\n if value and(value[0]=='\\\\'or value[0]not in PHRASE_ENDS):\n obs_local_part,value=get_obs_local_part(str(local_part)+value)\n if obs_local_part.token_type =='invalid-obs-local-part':\n local_part.defects.append(errors.InvalidHeaderDefect(\n \"local-part is not dot-atom, quoted-string, or obs-local-part\"))\n else:\n local_part.defects.append(errors.ObsoleteHeaderDefect(\n \"local-part is not a dot-atom (contains CFWS)\"))\n local_part[0]=obs_local_part\n try:\n local_part.value.encode('ascii')\n except UnicodeEncodeError:\n local_part.defects.append(errors.NonASCIILocalPartDefect(\n \"local-part contains non-ASCII characters)\"))\n return local_part,value\n \ndef get_obs_local_part(value):\n ''\n \n obs_local_part=ObsLocalPart()\n last_non_ws_was_dot=False\n while value and(value[0]=='\\\\'or value[0]not in PHRASE_ENDS):\n if value[0]=='.':\n if last_non_ws_was_dot:\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"invalid repeated '.'\"))\n obs_local_part.append(DOT)\n last_non_ws_was_dot=True\n value=value[1:]\n continue\n elif value[0]=='\\\\':\n obs_local_part.append(ValueTerminal(value[0],\n 'misplaced-special'))\n value=value[1:]\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"'\\\\' character outside of quoted-string/ccontent\"))\n last_non_ws_was_dot=False\n continue\n if obs_local_part and obs_local_part[-1].token_type !='dot':\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"missing '.' between words\"))\n try:\n token,value=get_word(value)\n last_non_ws_was_dot=False\n except errors.HeaderParseError:\n if value[0]not in CFWS_LEADER:\n raise\n token,value=get_cfws(value)\n obs_local_part.append(token)\n if(obs_local_part[0].token_type =='dot'or\n obs_local_part[0].token_type =='cfws'and\n obs_local_part[1].token_type =='dot'):\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"Invalid leading '.' in local part\"))\n if(obs_local_part[-1].token_type =='dot'or\n obs_local_part[-1].token_type =='cfws'and\n obs_local_part[-2].token_type =='dot'):\n obs_local_part.defects.append(errors.InvalidHeaderDefect(\n \"Invalid trailing '.' in local part\"))\n if obs_local_part.defects:\n obs_local_part.token_type='invalid-obs-local-part'\n return obs_local_part,value\n \ndef get_dtext(value):\n ''\n\n\n\n\n\n\n\n\n\n \n ptext,value,had_qp=_get_ptext_to_endchars(value,'[]')\n ptext=ValueTerminal(ptext,'ptext')\n if had_qp:\n ptext.defects.append(errors.ObsoleteHeaderDefect(\n \"quoted printable found in domain-literal\"))\n _validate_xtext(ptext)\n return ptext,value\n \ndef _check_for_early_dl_end(value,domain_literal):\n if value:\n return False\n domain_literal.append(errors.InvalidHeaderDefect(\n \"end of input inside domain-literal\"))\n domain_literal.append(ValueTerminal(']','domain-literal-end'))\n return True\n \ndef get_domain_literal(value):\n ''\n\n \n domain_literal=DomainLiteral()\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n domain_literal.append(token)\n if not value:\n raise errors.HeaderParseError(\"expected domain-literal\")\n if value[0]!='[':\n raise errors.HeaderParseError(\"expected '[' at start of domain-literal \"\n \"but found '{}'\".format(value))\n value=value[1:]\n if _check_for_early_dl_end(value,domain_literal):\n return domain_literal,value\n domain_literal.append(ValueTerminal('[','domain-literal-start'))\n if value[0]in WSP:\n token,value=get_fws(value)\n domain_literal.append(token)\n token,value=get_dtext(value)\n domain_literal.append(token)\n if _check_for_early_dl_end(value,domain_literal):\n return domain_literal,value\n if value[0]in WSP:\n token,value=get_fws(value)\n domain_literal.append(token)\n if _check_for_early_dl_end(value,domain_literal):\n return domain_literal,value\n if value[0]!=']':\n raise errors.HeaderParseError(\"expected ']' at end of domain-literal \"\n \"but found '{}'\".format(value))\n domain_literal.append(ValueTerminal(']','domain-literal-end'))\n value=value[1:]\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n domain_literal.append(token)\n return domain_literal,value\n \ndef get_domain(value):\n ''\n\n\n \n domain=Domain()\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n raise errors.HeaderParseError(\n \"expected domain but found '{}'\".format(value))\n if value[0]=='[':\n token,value=get_domain_literal(value)\n if leader is not None:\n token[:0]=[leader]\n domain.append(token)\n return domain,value\n try:\n token,value=get_dot_atom(value)\n except errors.HeaderParseError:\n token,value=get_atom(value)\n if value and value[0]=='@':\n raise errors.HeaderParseError('Invalid Domain')\n if leader is not None:\n token[:0]=[leader]\n domain.append(token)\n if value and value[0]=='.':\n domain.defects.append(errors.ObsoleteHeaderDefect(\n \"domain is not a dot-atom (contains CFWS)\"))\n if domain[0].token_type =='dot-atom':\n domain[:]=domain[0]\n while value and value[0]=='.':\n domain.append(DOT)\n token,value=get_atom(value[1:])\n domain.append(token)\n return domain,value\n \ndef get_addr_spec(value):\n ''\n\n \n addr_spec=AddrSpec()\n token,value=get_local_part(value)\n addr_spec.append(token)\n if not value or value[0]!='@':\n addr_spec.defects.append(errors.InvalidHeaderDefect(\n \"addr-spec local part with no domain\"))\n return addr_spec,value\n addr_spec.append(ValueTerminal('@','address-at-symbol'))\n token,value=get_domain(value[1:])\n addr_spec.append(token)\n return addr_spec,value\n \ndef get_obs_route(value):\n ''\n\n\n\n\n \n obs_route=ObsRoute()\n while value and(value[0]==','or value[0]in CFWS_LEADER):\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n obs_route.append(token)\n elif value[0]==',':\n obs_route.append(ListSeparator)\n value=value[1:]\n if not value or value[0]!='@':\n raise errors.HeaderParseError(\n \"expected obs-route domain but found '{}'\".format(value))\n obs_route.append(RouteComponentMarker)\n token,value=get_domain(value[1:])\n obs_route.append(token)\n while value and value[0]==',':\n obs_route.append(ListSeparator)\n value=value[1:]\n if not value:\n break\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n obs_route.append(token)\n if value[0]=='@':\n obs_route.append(RouteComponentMarker)\n token,value=get_domain(value[1:])\n obs_route.append(token)\n if not value:\n raise errors.HeaderParseError(\"end of header while parsing obs-route\")\n if value[0]!=':':\n raise errors.HeaderParseError(\"expected ':' marking end of \"\n \"obs-route but found '{}'\".format(value))\n obs_route.append(ValueTerminal(':','end-of-obs-route-marker'))\n return obs_route,value[1:]\n \ndef get_angle_addr(value):\n ''\n\n\n \n angle_addr=AngleAddr()\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n angle_addr.append(token)\n if not value or value[0]!='<':\n raise errors.HeaderParseError(\n \"expected angle-addr but found '{}'\".format(value))\n angle_addr.append(ValueTerminal('<','angle-addr-start'))\n value=value[1:]\n \n \n if value[0]=='>':\n angle_addr.append(ValueTerminal('>','angle-addr-end'))\n angle_addr.defects.append(errors.InvalidHeaderDefect(\n \"null addr-spec in angle-addr\"))\n value=value[1:]\n return angle_addr,value\n try:\n token,value=get_addr_spec(value)\n except errors.HeaderParseError:\n try:\n token,value=get_obs_route(value)\n angle_addr.defects.append(errors.ObsoleteHeaderDefect(\n \"obsolete route specification in angle-addr\"))\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected addr-spec or obs-route but found '{}'\".format(value))\n angle_addr.append(token)\n token,value=get_addr_spec(value)\n angle_addr.append(token)\n if value and value[0]=='>':\n value=value[1:]\n else:\n angle_addr.defects.append(errors.InvalidHeaderDefect(\n \"missing trailing '>' on angle-addr\"))\n angle_addr.append(ValueTerminal('>','angle-addr-end'))\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n angle_addr.append(token)\n return angle_addr,value\n \ndef get_display_name(value):\n ''\n\n\n\n\n\n \n display_name=DisplayName()\n token,value=get_phrase(value)\n display_name.extend(token[:])\n display_name.defects=token.defects[:]\n return display_name,value\n \n \ndef get_name_addr(value):\n ''\n\n \n name_addr=NameAddr()\n \n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n raise errors.HeaderParseError(\n \"expected name-addr but found '{}'\".format(leader))\n if value[0]!='<':\n if value[0]in PHRASE_ENDS:\n raise errors.HeaderParseError(\n \"expected name-addr but found '{}'\".format(value))\n token,value=get_display_name(value)\n if not value:\n raise errors.HeaderParseError(\n \"expected name-addr but found '{}'\".format(token))\n if leader is not None:\n token[0][:0]=[leader]\n leader=None\n name_addr.append(token)\n token,value=get_angle_addr(value)\n if leader is not None:\n token[:0]=[leader]\n name_addr.append(token)\n return name_addr,value\n \ndef get_mailbox(value):\n ''\n\n \n \n \n mailbox=Mailbox()\n try:\n token,value=get_name_addr(value)\n except errors.HeaderParseError:\n try:\n token,value=get_addr_spec(value)\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected mailbox but found '{}'\".format(value))\n if any(isinstance(x,errors.InvalidHeaderDefect)\n for x in token.all_defects):\n mailbox.token_type='invalid-mailbox'\n mailbox.append(token)\n return mailbox,value\n \ndef get_invalid_mailbox(value,endchars):\n ''\n\n\n\n\n \n invalid_mailbox=InvalidMailbox()\n while value and value[0]not in endchars:\n if value[0]in PHRASE_ENDS:\n invalid_mailbox.append(ValueTerminal(value[0],\n 'misplaced-special'))\n value=value[1:]\n else:\n token,value=get_phrase(value)\n invalid_mailbox.append(token)\n return invalid_mailbox,value\n \ndef get_mailbox_list(value):\n ''\n\n\n\n\n\n\n\n\n\n \n mailbox_list=MailboxList()\n while value and value[0]!=';':\n try:\n token,value=get_mailbox(value)\n mailbox_list.append(token)\n except errors.HeaderParseError:\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value or value[0]in ',;':\n mailbox_list.append(leader)\n mailbox_list.defects.append(errors.ObsoleteHeaderDefect(\n \"empty element in mailbox-list\"))\n else:\n token,value=get_invalid_mailbox(value,',;')\n if leader is not None:\n token[:0]=[leader]\n mailbox_list.append(token)\n mailbox_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid mailbox in mailbox-list\"))\n elif value[0]==',':\n mailbox_list.defects.append(errors.ObsoleteHeaderDefect(\n \"empty element in mailbox-list\"))\n else:\n token,value=get_invalid_mailbox(value,',;')\n if leader is not None:\n token[:0]=[leader]\n mailbox_list.append(token)\n mailbox_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid mailbox in mailbox-list\"))\n if value and value[0]not in ',;':\n \n \n mailbox=mailbox_list[-1]\n mailbox.token_type='invalid-mailbox'\n token,value=get_invalid_mailbox(value,',;')\n mailbox.extend(token)\n mailbox_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid mailbox in mailbox-list\"))\n if value and value[0]==',':\n mailbox_list.append(ListSeparator)\n value=value[1:]\n return mailbox_list,value\n \n \ndef get_group_list(value):\n ''\n\n\n \n group_list=GroupList()\n if not value:\n group_list.defects.append(errors.InvalidHeaderDefect(\n \"end of header before group-list\"))\n return group_list,value\n leader=None\n if value and value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n \n \n \n group_list.defects.append(errors.InvalidHeaderDefect(\n \"end of header in group-list\"))\n group_list.append(leader)\n return group_list,value\n if value[0]==';':\n group_list.append(leader)\n return group_list,value\n token,value=get_mailbox_list(value)\n if len(token.all_mailboxes)==0:\n if leader is not None:\n group_list.append(leader)\n group_list.extend(token)\n group_list.defects.append(errors.ObsoleteHeaderDefect(\n \"group-list with empty entries\"))\n return group_list,value\n if leader is not None:\n token[:0]=[leader]\n group_list.append(token)\n return group_list,value\n \ndef get_group(value):\n ''\n\n \n group=Group()\n token,value=get_display_name(value)\n if not value or value[0]!=':':\n raise errors.HeaderParseError(\"expected ':' at end of group \"\n \"display name but found '{}'\".format(value))\n group.append(token)\n group.append(ValueTerminal(':','group-display-name-terminator'))\n value=value[1:]\n if value and value[0]==';':\n group.append(ValueTerminal(';','group-terminator'))\n return group,value[1:]\n token,value=get_group_list(value)\n group.append(token)\n if not value:\n group.defects.append(errors.InvalidHeaderDefect(\n \"end of header in group\"))\n elif value[0]!=';':\n raise errors.HeaderParseError(\n \"expected ';' at end of group but found {}\".format(value))\n group.append(ValueTerminal(';','group-terminator'))\n value=value[1:]\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n group.append(token)\n return group,value\n \ndef get_address(value):\n ''\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n address=Address()\n try:\n token,value=get_group(value)\n except errors.HeaderParseError:\n try:\n token,value=get_mailbox(value)\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected address but found '{}'\".format(value))\n address.append(token)\n return address,value\n \ndef get_address_list(value):\n ''\n\n\n\n\n\n\n\n \n address_list=AddressList()\n while value:\n try:\n token,value=get_address(value)\n address_list.append(token)\n except errors.HeaderParseError:\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value or value[0]==',':\n address_list.append(leader)\n address_list.defects.append(errors.ObsoleteHeaderDefect(\n \"address-list entry with no content\"))\n else:\n token,value=get_invalid_mailbox(value,',')\n if leader is not None:\n token[:0]=[leader]\n address_list.append(Address([token]))\n address_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid address in address-list\"))\n elif value[0]==',':\n address_list.defects.append(errors.ObsoleteHeaderDefect(\n \"empty element in address-list\"))\n else:\n token,value=get_invalid_mailbox(value,',')\n if leader is not None:\n token[:0]=[leader]\n address_list.append(Address([token]))\n address_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid address in address-list\"))\n if value and value[0]!=',':\n \n \n mailbox=address_list[-1][0]\n mailbox.token_type='invalid-mailbox'\n token,value=get_invalid_mailbox(value,',')\n mailbox.extend(token)\n address_list.defects.append(errors.InvalidHeaderDefect(\n \"invalid address in address-list\"))\n if value:\n address_list.append(ValueTerminal(',','list-separator'))\n value=value[1:]\n return address_list,value\n \n \ndef get_no_fold_literal(value):\n ''\n \n no_fold_literal=NoFoldLiteral()\n if not value:\n raise errors.HeaderParseError(\n \"expected no-fold-literal but found '{}'\".format(value))\n if value[0]!='[':\n raise errors.HeaderParseError(\n \"expected '[' at the start of no-fold-literal \"\n \"but found '{}'\".format(value))\n no_fold_literal.append(ValueTerminal('[','no-fold-literal-start'))\n value=value[1:]\n token,value=get_dtext(value)\n no_fold_literal.append(token)\n if not value or value[0]!=']':\n raise errors.HeaderParseError(\n \"expected ']' at the end of no-fold-literal \"\n \"but found '{}'\".format(value))\n no_fold_literal.append(ValueTerminal(']','no-fold-literal-end'))\n return no_fold_literal,value[1:]\n \ndef get_msg_id(value):\n ''\n\n\n\n \n msg_id=MsgID()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n msg_id.append(token)\n if not value or value[0]!='<':\n raise errors.HeaderParseError(\n \"expected msg-id but found '{}'\".format(value))\n msg_id.append(ValueTerminal('<','msg-id-start'))\n value=value[1:]\n \n try:\n token,value=get_dot_atom_text(value)\n except errors.HeaderParseError:\n try:\n \n token,value=get_obs_local_part(value)\n msg_id.defects.append(errors.ObsoleteHeaderDefect(\n \"obsolete id-left in msg-id\"))\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected dot-atom-text or obs-id-left\"\n \" but found '{}'\".format(value))\n msg_id.append(token)\n if not value or value[0]!='@':\n msg_id.defects.append(errors.InvalidHeaderDefect(\n \"msg-id with no id-right\"))\n \n \n \n if value and value[0]=='>':\n msg_id.append(ValueTerminal('>','msg-id-end'))\n value=value[1:]\n return msg_id,value\n msg_id.append(ValueTerminal('@','address-at-symbol'))\n value=value[1:]\n \n try:\n token,value=get_dot_atom_text(value)\n except errors.HeaderParseError:\n try:\n token,value=get_no_fold_literal(value)\n except errors.HeaderParseError:\n try:\n token,value=get_domain(value)\n msg_id.defects.append(errors.ObsoleteHeaderDefect(\n \"obsolete id-right in msg-id\"))\n except errors.HeaderParseError:\n raise errors.HeaderParseError(\n \"expected dot-atom-text, no-fold-literal or obs-id-right\"\n \" but found '{}'\".format(value))\n msg_id.append(token)\n if value and value[0]=='>':\n value=value[1:]\n else:\n msg_id.defects.append(errors.InvalidHeaderDefect(\n \"missing trailing '>' on msg-id\"))\n msg_id.append(ValueTerminal('>','msg-id-end'))\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n msg_id.append(token)\n return msg_id,value\n \n \ndef parse_message_id(value):\n ''\n \n message_id=MessageID()\n try:\n token,value=get_msg_id(value)\n message_id.append(token)\n except errors.HeaderParseError as ex:\n token=get_unstructured(value)\n message_id=InvalidMessageID(token)\n message_id.defects.append(\n errors.InvalidHeaderDefect(\"Invalid msg-id: {!r}\".format(ex)))\n else:\n \n if value:\n message_id.defects.append(errors.InvalidHeaderDefect(\n \"Unexpected {!r}\".format(value)))\n \n return message_id\n \n \n \n \n \n \n \n \n \ndef parse_mime_version(value):\n ''\n\n \n \n \n mime_version=MIMEVersion()\n if not value:\n mime_version.defects.append(errors.HeaderMissingRequiredValue(\n \"Missing MIME version number (eg: 1.0)\"))\n return mime_version\n if value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mime_version.append(token)\n if not value:\n mime_version.defects.append(errors.HeaderMissingRequiredValue(\n \"Expected MIME version number but found only CFWS\"))\n digits=''\n while value and value[0]!='.'and value[0]not in CFWS_LEADER:\n digits +=value[0]\n value=value[1:]\n if not digits.isdigit():\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Expected MIME major version number but found {!r}\".format(digits)))\n mime_version.append(ValueTerminal(digits,'xtext'))\n else:\n mime_version.major=int(digits)\n mime_version.append(ValueTerminal(digits,'digits'))\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mime_version.append(token)\n if not value or value[0]!='.':\n if mime_version.major is not None:\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Incomplete MIME version; found only major number\"))\n if value:\n mime_version.append(ValueTerminal(value,'xtext'))\n return mime_version\n mime_version.append(ValueTerminal('.','version-separator'))\n value=value[1:]\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mime_version.append(token)\n if not value:\n if mime_version.major is not None:\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Incomplete MIME version; found only major number\"))\n return mime_version\n digits=''\n while value and value[0]not in CFWS_LEADER:\n digits +=value[0]\n value=value[1:]\n if not digits.isdigit():\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Expected MIME minor version number but found {!r}\".format(digits)))\n mime_version.append(ValueTerminal(digits,'xtext'))\n else:\n mime_version.minor=int(digits)\n mime_version.append(ValueTerminal(digits,'digits'))\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mime_version.append(token)\n if value:\n mime_version.defects.append(errors.InvalidHeaderDefect(\n \"Excess non-CFWS text after MIME version\"))\n mime_version.append(ValueTerminal(value,'xtext'))\n return mime_version\n \ndef get_invalid_parameter(value):\n ''\n\n\n\n\n \n invalid_parameter=InvalidParameter()\n while value and value[0]!=';':\n if value[0]in PHRASE_ENDS:\n invalid_parameter.append(ValueTerminal(value[0],\n 'misplaced-special'))\n value=value[1:]\n else:\n token,value=get_phrase(value)\n invalid_parameter.append(token)\n return invalid_parameter,value\n \ndef get_ttext(value):\n ''\n\n\n\n\n\n\n \n m=_non_token_end_matcher(value)\n if not m:\n raise errors.HeaderParseError(\n \"expected ttext but found '{}'\".format(value))\n ttext=m.group()\n value=value[len(ttext):]\n ttext=ValueTerminal(ttext,'ttext')\n _validate_xtext(ttext)\n return ttext,value\n \ndef get_token(value):\n ''\n\n\n\n\n\n\n \n mtoken=Token()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mtoken.append(token)\n if value and value[0]in TOKEN_ENDS:\n raise errors.HeaderParseError(\n \"expected token but found '{}'\".format(value))\n token,value=get_ttext(value)\n mtoken.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n mtoken.append(token)\n return mtoken,value\n \ndef get_attrtext(value):\n ''\n\n\n\n\n\n\n \n m=_non_attribute_end_matcher(value)\n if not m:\n raise errors.HeaderParseError(\n \"expected attrtext but found {!r}\".format(value))\n attrtext=m.group()\n value=value[len(attrtext):]\n attrtext=ValueTerminal(attrtext,'attrtext')\n _validate_xtext(attrtext)\n return attrtext,value\n \ndef get_attribute(value):\n ''\n\n\n\n\n\n\n \n attribute=Attribute()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n attribute.append(token)\n if value and value[0]in ATTRIBUTE_ENDS:\n raise errors.HeaderParseError(\n \"expected token but found '{}'\".format(value))\n token,value=get_attrtext(value)\n attribute.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n attribute.append(token)\n return attribute,value\n \ndef get_extended_attrtext(value):\n ''\n\n\n\n\n\n \n m=_non_extended_attribute_end_matcher(value)\n if not m:\n raise errors.HeaderParseError(\n \"expected extended attrtext but found {!r}\".format(value))\n attrtext=m.group()\n value=value[len(attrtext):]\n attrtext=ValueTerminal(attrtext,'extended-attrtext')\n _validate_xtext(attrtext)\n return attrtext,value\n \ndef get_extended_attribute(value):\n ''\n\n\n\n\n \n \n attribute=Attribute()\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n attribute.append(token)\n if value and value[0]in EXTENDED_ATTRIBUTE_ENDS:\n raise errors.HeaderParseError(\n \"expected token but found '{}'\".format(value))\n token,value=get_extended_attrtext(value)\n attribute.append(token)\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n attribute.append(token)\n return attribute,value\n \ndef get_section(value):\n ''\n\n\n\n\n\n\n \n section=Section()\n if not value or value[0]!='*':\n raise errors.HeaderParseError(\"Expected section but found {}\".format(\n value))\n section.append(ValueTerminal('*','section-marker'))\n value=value[1:]\n if not value or not value[0].isdigit():\n raise errors.HeaderParseError(\"Expected section number but \"\n \"found {}\".format(value))\n digits=''\n while value and value[0].isdigit():\n digits +=value[0]\n value=value[1:]\n if digits[0]=='0'and digits !='0':\n section.defects.append(errors.InvalidHeaderDefect(\n \"section number has an invalid leading 0\"))\n section.number=int(digits)\n section.append(ValueTerminal(digits,'digits'))\n return section,value\n \n \ndef get_value(value):\n ''\n\n \n v=Value()\n if not value:\n raise errors.HeaderParseError(\"Expected value but found end of string\")\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n raise errors.HeaderParseError(\"Expected value but found \"\n \"only {}\".format(leader))\n if value[0]=='\"':\n token,value=get_quoted_string(value)\n else:\n token,value=get_extended_attribute(value)\n if leader is not None:\n token[:0]=[leader]\n v.append(token)\n return v,value\n \ndef get_parameter(value):\n ''\n\n\n\n\n\n \n \n \n \n param=Parameter()\n token,value=get_attribute(value)\n param.append(token)\n if not value or value[0]==';':\n param.defects.append(errors.InvalidHeaderDefect(\"Parameter contains \"\n \"name ({}) but no value\".format(token)))\n return param,value\n if value[0]=='*':\n try:\n token,value=get_section(value)\n param.sectioned=True\n param.append(token)\n except errors.HeaderParseError:\n pass\n if not value:\n raise errors.HeaderParseError(\"Incomplete parameter\")\n if value[0]=='*':\n param.append(ValueTerminal('*','extended-parameter-marker'))\n value=value[1:]\n param.extended=True\n if value[0]!='=':\n raise errors.HeaderParseError(\"Parameter not followed by '='\")\n param.append(ValueTerminal('=','parameter-separator'))\n value=value[1:]\n if value and value[0]in CFWS_LEADER:\n token,value=get_cfws(value)\n param.append(token)\n remainder=None\n appendto=param\n if param.extended and value and value[0]=='\"':\n \n \n \n qstring,remainder=get_quoted_string(value)\n inner_value=qstring.stripped_value\n semi_valid=False\n if param.section_number ==0:\n if inner_value and inner_value[0]==\"'\":\n semi_valid=True\n else:\n token,rest=get_attrtext(inner_value)\n if rest and rest[0]==\"'\":\n semi_valid=True\n else:\n try:\n token,rest=get_extended_attrtext(inner_value)\n except:\n pass\n else:\n if not rest:\n semi_valid=True\n if semi_valid:\n param.defects.append(errors.InvalidHeaderDefect(\n \"Quoted string value for extended parameter is invalid\"))\n param.append(qstring)\n for t in qstring:\n if t.token_type =='bare-quoted-string':\n t[:]=[]\n appendto=t\n break\n value=inner_value\n else:\n remainder=None\n param.defects.append(errors.InvalidHeaderDefect(\n \"Parameter marked as extended but appears to have a \"\n \"quoted string value that is non-encoded\"))\n if value and value[0]==\"'\":\n token=None\n else:\n token,value=get_value(value)\n if not param.extended or param.section_number >0:\n if not value or value[0]!=\"'\":\n appendto.append(token)\n if remainder is not None:\n assert not value,value\n value=remainder\n return param,value\n param.defects.append(errors.InvalidHeaderDefect(\n \"Apparent initial-extended-value but attribute \"\n \"was not marked as extended or was not initial section\"))\n if not value:\n \n param.defects.append(errors.InvalidHeaderDefect(\n \"Missing required charset/lang delimiters\"))\n appendto.append(token)\n if remainder is None:\n return param,value\n else:\n if token is not None:\n for t in token:\n if t.token_type =='extended-attrtext':\n break\n t.token_type =='attrtext'\n appendto.append(t)\n param.charset=t.value\n if value[0]!=\"'\":\n raise errors.HeaderParseError(\"Expected RFC2231 char/lang encoding \"\n \"delimiter, but found {!r}\".format(value))\n appendto.append(ValueTerminal(\"'\",'RFC2231-delimiter'))\n value=value[1:]\n if value and value[0]!=\"'\":\n token,value=get_attrtext(value)\n appendto.append(token)\n param.lang=token.value\n if not value or value[0]!=\"'\":\n raise errors.HeaderParseError(\"Expected RFC2231 char/lang encoding \"\n \"delimiter, but found {}\".format(value))\n appendto.append(ValueTerminal(\"'\",'RFC2231-delimiter'))\n value=value[1:]\n if remainder is not None:\n \n v=Value()\n while value:\n if value[0]in WSP:\n token,value=get_fws(value)\n elif value[0]=='\"':\n token=ValueTerminal('\"','DQUOTE')\n value=value[1:]\n else:\n token,value=get_qcontent(value)\n v.append(token)\n token=v\n else:\n token,value=get_value(value)\n appendto.append(token)\n if remainder is not None:\n assert not value,value\n value=remainder\n return param,value\n \ndef parse_mime_parameters(value):\n ''\n\n\n\n\n\n\n\n\n\n\n \n mime_parameters=MimeParameters()\n while value:\n try:\n token,value=get_parameter(value)\n mime_parameters.append(token)\n except errors.HeaderParseError:\n leader=None\n if value[0]in CFWS_LEADER:\n leader,value=get_cfws(value)\n if not value:\n mime_parameters.append(leader)\n return mime_parameters\n if value[0]==';':\n if leader is not None:\n mime_parameters.append(leader)\n mime_parameters.defects.append(errors.InvalidHeaderDefect(\n \"parameter entry with no content\"))\n else:\n token,value=get_invalid_parameter(value)\n if leader:\n token[:0]=[leader]\n mime_parameters.append(token)\n mime_parameters.defects.append(errors.InvalidHeaderDefect(\n \"invalid parameter {!r}\".format(token)))\n if value and value[0]!=';':\n \n \n param=mime_parameters[-1]\n param.token_type='invalid-parameter'\n token,value=get_invalid_parameter(value)\n param.extend(token)\n mime_parameters.defects.append(errors.InvalidHeaderDefect(\n \"parameter with invalid trailing text {!r}\".format(token)))\n if value:\n \n mime_parameters.append(ValueTerminal(';','parameter-separator'))\n value=value[1:]\n return mime_parameters\n \ndef _find_mime_parameters(tokenlist,value):\n ''\n\n \n while value and value[0]!=';':\n if value[0]in PHRASE_ENDS:\n tokenlist.append(ValueTerminal(value[0],'misplaced-special'))\n value=value[1:]\n else:\n token,value=get_phrase(value)\n tokenlist.append(token)\n if not value:\n return\n tokenlist.append(ValueTerminal(';','parameter-separator'))\n tokenlist.append(parse_mime_parameters(value[1:]))\n \ndef parse_content_type_header(value):\n ''\n\n\n\n\n \n ctype=ContentType()\n if not value:\n ctype.defects.append(errors.HeaderMissingRequiredValue(\n \"Missing content type specification\"))\n return ctype\n try:\n token,value=get_token(value)\n except errors.HeaderParseError:\n ctype.defects.append(errors.InvalidHeaderDefect(\n \"Expected content maintype but found {!r}\".format(value)))\n _find_mime_parameters(ctype,value)\n return ctype\n ctype.append(token)\n \n \n if not value or value[0]!='/':\n ctype.defects.append(errors.InvalidHeaderDefect(\n \"Invalid content type\"))\n if value:\n _find_mime_parameters(ctype,value)\n return ctype\n ctype.maintype=token.value.strip().lower()\n ctype.append(ValueTerminal('/','content-type-separator'))\n value=value[1:]\n try:\n token,value=get_token(value)\n except errors.HeaderParseError:\n ctype.defects.append(errors.InvalidHeaderDefect(\n \"Expected content subtype but found {!r}\".format(value)))\n _find_mime_parameters(ctype,value)\n return ctype\n ctype.append(token)\n ctype.subtype=token.value.strip().lower()\n if not value:\n return ctype\n if value[0]!=';':\n ctype.defects.append(errors.InvalidHeaderDefect(\n \"Only parameters are valid after content type, but \"\n \"found {!r}\".format(value)))\n \n \n \n del ctype.maintype,ctype.subtype\n _find_mime_parameters(ctype,value)\n return ctype\n ctype.append(ValueTerminal(';','parameter-separator'))\n ctype.append(parse_mime_parameters(value[1:]))\n return ctype\n \ndef parse_content_disposition_header(value):\n ''\n\n \n disp_header=ContentDisposition()\n if not value:\n disp_header.defects.append(errors.HeaderMissingRequiredValue(\n \"Missing content disposition\"))\n return disp_header\n try:\n token,value=get_token(value)\n except errors.HeaderParseError:\n disp_header.defects.append(errors.InvalidHeaderDefect(\n \"Expected content disposition but found {!r}\".format(value)))\n _find_mime_parameters(disp_header,value)\n return disp_header\n disp_header.append(token)\n disp_header.content_disposition=token.value.strip().lower()\n if not value:\n return disp_header\n if value[0]!=';':\n disp_header.defects.append(errors.InvalidHeaderDefect(\n \"Only parameters are valid after content disposition, but \"\n \"found {!r}\".format(value)))\n _find_mime_parameters(disp_header,value)\n return disp_header\n disp_header.append(ValueTerminal(';','parameter-separator'))\n disp_header.append(parse_mime_parameters(value[1:]))\n return disp_header\n \ndef parse_content_transfer_encoding_header(value):\n ''\n\n \n \n cte_header=ContentTransferEncoding()\n if not value:\n cte_header.defects.append(errors.HeaderMissingRequiredValue(\n \"Missing content transfer encoding\"))\n return cte_header\n try:\n token,value=get_token(value)\n except errors.HeaderParseError:\n cte_header.defects.append(errors.InvalidHeaderDefect(\n \"Expected content transfer encoding but found {!r}\".format(value)))\n else:\n cte_header.append(token)\n cte_header.cte=token.value.strip().lower()\n if not value:\n return cte_header\n while value:\n cte_header.defects.append(errors.InvalidHeaderDefect(\n \"Extra text after content transfer encoding\"))\n if value[0]in PHRASE_ENDS:\n cte_header.append(ValueTerminal(value[0],'misplaced-special'))\n value=value[1:]\n else:\n token,value=get_phrase(value)\n cte_header.append(token)\n return cte_header\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _steal_trailing_WSP_if_exists(lines):\n wsp=''\n if lines and lines[-1]and lines[-1][-1]in WSP:\n wsp=lines[-1][-1]\n lines[-1]=lines[-1][:-1]\n return wsp\n \ndef _refold_parse_tree(parse_tree,*,policy):\n ''\n\n \n \n maxlen=policy.max_line_length or sys.maxsize\n encoding='utf-8'if policy.utf8 else 'us-ascii'\n lines=['']\n last_ew=None\n wrap_as_ew_blocked=0\n want_encoding=False\n end_ew_not_allowed=Terminal('','wrap_as_ew_blocked')\n parts=list(parse_tree)\n while parts:\n part=parts.pop(0)\n if part is end_ew_not_allowed:\n wrap_as_ew_blocked -=1\n continue\n tstr=str(part)\n if part.token_type =='ptext'and set(tstr)&SPECIALS:\n \n want_encoding=True\n try:\n tstr.encode(encoding)\n charset=encoding\n except UnicodeEncodeError:\n if any(isinstance(x,errors.UndecodableBytesDefect)\n for x in part.all_defects):\n charset='unknown-8bit'\n else:\n \n \n charset='utf-8'\n want_encoding=True\n if part.token_type =='mime-parameters':\n \n _fold_mime_parameters(part,lines,maxlen,encoding)\n continue\n if want_encoding and not wrap_as_ew_blocked:\n if not part.as_ew_allowed:\n want_encoding=False\n last_ew=None\n if part.syntactic_break:\n encoded_part=part.fold(policy=policy)[:-len(policy.linesep)]\n if policy.linesep not in encoded_part:\n \n if len(encoded_part)>maxlen -len(lines[-1]):\n \n newline=_steal_trailing_WSP_if_exists(lines)\n \n lines.append(newline)\n lines[-1]+=encoded_part\n continue\n \n \n \n \n if not hasattr(part,'encode'):\n \n parts=list(part)+parts\n else:\n \n \n last_ew=_fold_as_ew(tstr,lines,maxlen,last_ew,\n part.ew_combine_allowed,charset)\n want_encoding=False\n continue\n if len(tstr)<=maxlen -len(lines[-1]):\n lines[-1]+=tstr\n continue\n \n \n \n if(part.syntactic_break and\n len(tstr)+1 <=maxlen):\n newline=_steal_trailing_WSP_if_exists(lines)\n if newline or part.startswith_fws():\n lines.append(newline+tstr)\n last_ew=None\n continue\n if not hasattr(part,'encode'):\n \n newparts=list(part)\n if not part.as_ew_allowed:\n wrap_as_ew_blocked +=1\n newparts.append(end_ew_not_allowed)\n parts=newparts+parts\n continue\n if part.as_ew_allowed and not wrap_as_ew_blocked:\n \n \n parts.insert(0,part)\n want_encoding=True\n continue\n \n newline=_steal_trailing_WSP_if_exists(lines)\n if newline or part.startswith_fws():\n lines.append(newline+tstr)\n else:\n \n lines[-1]+=tstr\n return policy.linesep.join(lines)+policy.linesep\n \ndef _fold_as_ew(to_encode,lines,maxlen,last_ew,ew_combine_allowed,charset):\n ''\n\n\n\n\n\n\n\n\n \n if last_ew is not None and ew_combine_allowed:\n to_encode=str(\n get_unstructured(lines[-1][last_ew:]+to_encode))\n lines[-1]=lines[-1][:last_ew]\n if to_encode[0]in WSP:\n \n \n leading_wsp=to_encode[0]\n to_encode=to_encode[1:]\n if(len(lines[-1])==maxlen):\n lines.append(_steal_trailing_WSP_if_exists(lines))\n lines[-1]+=leading_wsp\n trailing_wsp=''\n if to_encode[-1]in WSP:\n \n trailing_wsp=to_encode[-1]\n to_encode=to_encode[:-1]\n new_last_ew=len(lines[-1])if last_ew is None else last_ew\n \n encode_as='utf-8'if charset =='us-ascii'else charset\n \n \n \n chrome_len=len(encode_as)+7\n \n if(chrome_len+1)>=maxlen:\n raise errors.HeaderParseError(\n \"max_line_length is too small to fit an encoded word\")\n \n while to_encode:\n remaining_space=maxlen -len(lines[-1])\n text_space=remaining_space -chrome_len\n if text_space <=0:\n lines.append(' ')\n continue\n \n to_encode_word=to_encode[:text_space]\n encoded_word=_ew.encode(to_encode_word,charset=encode_as)\n excess=len(encoded_word)-remaining_space\n while excess >0:\n \n \n to_encode_word=to_encode_word[:-1]\n encoded_word=_ew.encode(to_encode_word,charset=encode_as)\n excess=len(encoded_word)-remaining_space\n lines[-1]+=encoded_word\n to_encode=to_encode[len(to_encode_word):]\n \n if to_encode:\n lines.append(' ')\n new_last_ew=len(lines[-1])\n lines[-1]+=trailing_wsp\n return new_last_ew if ew_combine_allowed else None\n \ndef _fold_mime_parameters(part,lines,maxlen,encoding):\n ''\n\n\n\n\n\n\n \n \n \n \n \n \n \n for name,value in part.params:\n \n \n \n \n \n if not lines[-1].rstrip().endswith(';'):\n lines[-1]+=';'\n charset=encoding\n error_handler='strict'\n try:\n value.encode(encoding)\n encoding_required=False\n except UnicodeEncodeError:\n encoding_required=True\n if utils._has_surrogates(value):\n charset='unknown-8bit'\n error_handler='surrogateescape'\n else:\n charset='utf-8'\n if encoding_required:\n encoded_value=urllib.parse.quote(\n value,safe='',errors=error_handler)\n tstr=\"{}*={}''{}\".format(name,charset,encoded_value)\n else:\n tstr='{}={}'.format(name,quote_string(value))\n if len(lines[-1])+len(tstr)+1 1:\n \n \n raise ValueError(\"Header values may not contain linefeed \"\n \"or carriage return characters\")\n return(name,self.header_factory(name,value))\n \n def header_fetch_parse(self,name,value):\n ''\n\n\n\n\n\n\n \n if hasattr(value,'name'):\n return value\n \n value=''.join(linesep_splitter.split(value))\n return self.header_factory(name,value)\n \n def fold(self,name,value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return self._fold(name,value,refold_binary=True)\n \n def fold_binary(self,name,value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n folded=self._fold(name,value,refold_binary=self.cte_type =='7bit')\n charset='utf8'if self.utf8 else 'ascii'\n return folded.encode(charset,'surrogateescape')\n \n def _fold(self,name,value,refold_binary=False):\n if hasattr(value,'name'):\n return value.fold(policy=self)\n maxlen=self.max_line_length if self.max_line_length else sys.maxsize\n lines=value.splitlines()\n refold=(self.refold_source =='all'or\n self.refold_source =='long'and\n (lines and len(lines[0])+len(name)+2 >maxlen or\n any(len(x)>maxlen for x in lines[1:])))\n if refold or refold_binary and _has_surrogates(value):\n return self.header_factory(name,''.join(lines)).fold(policy=self)\n return name+': '+self.linesep.join(lines)+self.linesep\n \n \ndefault=EmailPolicy()\n\ndel default.header_factory\nstrict=default.clone(raise_on_defect=True)\nSMTP=default.clone(linesep='\\r\\n')\nHTTP=default.clone(linesep='\\r\\n',max_line_length=None)\nSMTPUTF8=SMTP.clone(utf8=True)\n", ["email._policybase", "email.contentmanager", "email.headerregistry", "email.message", "email.utils", "re", "sys"]], "email": [".py", "\n\n\n\n\"\"\"A package for parsing, handling, and generating email messages.\"\"\"\n\n__all__=[\n'base64mime',\n'charset',\n'encoders',\n'errors',\n'feedparser',\n'generator',\n'header',\n'iterators',\n'message',\n'message_from_file',\n'message_from_binary_file',\n'message_from_string',\n'message_from_bytes',\n'mime',\n'parser',\n'quoprimime',\n'utils',\n]\n\n\n\n\n\ndef message_from_string(s,*args,**kws):\n ''\n\n\n \n from email.parser import Parser\n return Parser(*args,**kws).parsestr(s)\n \ndef message_from_bytes(s,*args,**kws):\n ''\n\n\n \n from email.parser import BytesParser\n return BytesParser(*args,**kws).parsebytes(s)\n \ndef message_from_file(fp,*args,**kws):\n ''\n\n\n \n from email.parser import Parser\n return Parser(*args,**kws).parse(fp)\n \ndef message_from_binary_file(fp,*args,**kws):\n ''\n\n\n \n from email.parser import BytesParser\n return BytesParser(*args,**kws).parse(fp)\n", ["email.parser"], 1], "email.message": [".py", "\n\n\n\n\"\"\"Basic message object for the email package object model.\"\"\"\n\n__all__=['Message','EmailMessage']\n\nimport binascii\nimport re\nimport quopri\nfrom io import BytesIO,StringIO\n\n\nfrom email import utils\nfrom email import errors\nfrom email._policybase import compat32\nfrom email import charset as _charset\nfrom email._encoded_words import decode_b\nCharset=_charset.Charset\n\nSEMISPACE='; '\n\n\n\ntspecials=re.compile(r'[ \\(\\)<>@,;:\\\\\"/\\[\\]\\?=]')\n\n\ndef _splitparam(param):\n\n\n\n\n a,sep,b=str(param).partition(';')\n if not sep:\n return a.strip(),None\n return a.strip(),b.strip()\n \ndef _formatparam(param,value=None,quote=True):\n ''\n\n\n\n\n\n\n \n if value is not None and len(value)>0:\n \n \n \n if isinstance(value,tuple):\n \n param +='*'\n value=utils.encode_rfc2231(value[2],value[0],value[1])\n return '%s=%s'%(param,value)\n else:\n try:\n value.encode('ascii')\n except UnicodeEncodeError:\n param +='*'\n value=utils.encode_rfc2231(value,'utf-8','')\n return '%s=%s'%(param,value)\n \n \n if quote or tspecials.search(value):\n return '%s=\"%s\"'%(param,utils.quote(value))\n else:\n return '%s=%s'%(param,value)\n else:\n return param\n \ndef _parseparam(s):\n\n s=';'+str(s)\n plist=[]\n while s[:1]==';':\n s=s[1:]\n end=s.find(';')\n while end >0 and(s.count('\"',0,end)-s.count('\\\\\"',0,end))%2:\n end=s.find(';',end+1)\n if end <0:\n end=len(s)\n f=s[:end]\n if '='in f:\n i=f.index('=')\n f=f[:i].strip().lower()+'='+f[i+1:].strip()\n plist.append(f.strip())\n s=s[end:]\n return plist\n \n \ndef _unquotevalue(value):\n\n\n\n\n if isinstance(value,tuple):\n return value[0],value[1],utils.unquote(value[2])\n else:\n return utils.unquote(value)\n \n \ndef _decode_uu(encoded):\n ''\n decoded_lines=[]\n encoded_lines_iter=iter(encoded.splitlines())\n for line in encoded_lines_iter:\n if line.startswith(b\"begin \"):\n mode,_,path=line.removeprefix(b\"begin \").partition(b\" \")\n try:\n int(mode,base=8)\n except ValueError:\n continue\n else:\n break\n else:\n raise ValueError(\"`begin` line not found\")\n for line in encoded_lines_iter:\n if not line:\n raise ValueError(\"Truncated input\")\n elif line.strip(b' \\t\\r\\n\\f')==b'end':\n break\n try:\n decoded_line=binascii.a2b_uu(line)\n except binascii.Error:\n \n nbytes=(((line[0]-32)&63)*4+5)//3\n decoded_line=binascii.a2b_uu(line[:nbytes])\n decoded_lines.append(decoded_line)\n \n return b''.join(decoded_lines)\n \n \nclass Message:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,policy=compat32):\n self.policy=policy\n self._headers=[]\n self._unixfrom=None\n self._payload=None\n self._charset=None\n \n self.preamble=self.epilogue=None\n self.defects=[]\n \n self._default_type='text/plain'\n \n def __str__(self):\n ''\n \n return self.as_string()\n \n def as_string(self,unixfrom=False,maxheaderlen=0,policy=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n from email.generator import Generator\n policy=self.policy if policy is None else policy\n fp=StringIO()\n g=Generator(fp,\n mangle_from_=False,\n maxheaderlen=maxheaderlen,\n policy=policy)\n g.flatten(self,unixfrom=unixfrom)\n return fp.getvalue()\n \n def __bytes__(self):\n ''\n \n return self.as_bytes()\n \n def as_bytes(self,unixfrom=False,policy=None):\n ''\n\n\n\n\n\n \n from email.generator import BytesGenerator\n policy=self.policy if policy is None else policy\n fp=BytesIO()\n g=BytesGenerator(fp,mangle_from_=False,policy=policy)\n g.flatten(self,unixfrom=unixfrom)\n return fp.getvalue()\n \n def is_multipart(self):\n ''\n return isinstance(self._payload,list)\n \n \n \n \n def set_unixfrom(self,unixfrom):\n self._unixfrom=unixfrom\n \n def get_unixfrom(self):\n return self._unixfrom\n \n \n \n \n def attach(self,payload):\n ''\n\n\n\n\n \n if self._payload is None:\n self._payload=[payload]\n else:\n try:\n self._payload.append(payload)\n except AttributeError:\n raise TypeError(\"Attach is not valid on a message with a\"\n \" non-multipart payload\")\n \n def get_payload(self,i=None,decode=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.is_multipart():\n if decode:\n return None\n if i is None:\n return self._payload\n else:\n return self._payload[i]\n \n \n if i is not None and not isinstance(self._payload,list):\n raise TypeError('Expected list, got %s'%type(self._payload))\n payload=self._payload\n \n cte=str(self.get('content-transfer-encoding','')).lower()\n \n if isinstance(payload,str):\n if utils._has_surrogates(payload):\n bpayload=payload.encode('ascii','surrogateescape')\n if not decode:\n try:\n payload=bpayload.decode(self.get_param('charset','ascii'),'replace')\n except LookupError:\n payload=bpayload.decode('ascii','replace')\n elif decode:\n try:\n bpayload=payload.encode('ascii')\n except UnicodeError:\n \n \n \n \n bpayload=payload.encode('raw-unicode-escape')\n if not decode:\n return payload\n if cte =='quoted-printable':\n return quopri.decodestring(bpayload)\n elif cte =='base64':\n \n \n value,defects=decode_b(b''.join(bpayload.splitlines()))\n for defect in defects:\n self.policy.handle_defect(self,defect)\n return value\n elif cte in('x-uuencode','uuencode','uue','x-uue'):\n try:\n return _decode_uu(bpayload)\n except ValueError:\n \n return bpayload\n if isinstance(payload,str):\n return bpayload\n return payload\n \n def set_payload(self,payload,charset=None):\n ''\n\n\n\n \n if hasattr(payload,'encode'):\n if charset is None:\n self._payload=payload\n return\n if not isinstance(charset,Charset):\n charset=Charset(charset)\n payload=payload.encode(charset.output_charset)\n if hasattr(payload,'decode'):\n self._payload=payload.decode('ascii','surrogateescape')\n else:\n self._payload=payload\n if charset is not None:\n self.set_charset(charset)\n \n def set_charset(self,charset):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if charset is None:\n self.del_param('charset')\n self._charset=None\n return\n if not isinstance(charset,Charset):\n charset=Charset(charset)\n self._charset=charset\n if 'MIME-Version'not in self:\n self.add_header('MIME-Version','1.0')\n if 'Content-Type'not in self:\n self.add_header('Content-Type','text/plain',\n charset=charset.get_output_charset())\n else:\n self.set_param('charset',charset.get_output_charset())\n if charset !=charset.get_output_charset():\n self._payload=charset.body_encode(self._payload)\n if 'Content-Transfer-Encoding'not in self:\n cte=charset.get_body_encoding()\n try:\n cte(self)\n except TypeError:\n \n \n \n payload=self._payload\n if payload:\n try:\n payload=payload.encode('ascii','surrogateescape')\n except UnicodeError:\n payload=payload.encode(charset.output_charset)\n self._payload=charset.body_encode(payload)\n self.add_header('Content-Transfer-Encoding',cte)\n \n def get_charset(self):\n ''\n \n return self._charset\n \n \n \n \n def __len__(self):\n ''\n return len(self._headers)\n \n def __getitem__(self,name):\n ''\n\n\n\n\n\n\n \n return self.get(name)\n \n def __setitem__(self,name,val):\n ''\n\n\n\n \n max_count=self.policy.header_max_count(name)\n if max_count:\n lname=name.lower()\n found=0\n for k,v in self._headers:\n if k.lower()==lname:\n found +=1\n if found >=max_count:\n raise ValueError(\"There may be at most {} {} headers \"\n \"in a message\".format(max_count,name))\n self._headers.append(self.policy.header_store_parse(name,val))\n \n def __delitem__(self,name):\n ''\n\n\n \n name=name.lower()\n newheaders=[]\n for k,v in self._headers:\n if k.lower()!=name:\n newheaders.append((k,v))\n self._headers=newheaders\n \n def __contains__(self,name):\n name_lower=name.lower()\n for k,v in self._headers:\n if name_lower ==k.lower():\n return True\n return False\n \n def __iter__(self):\n for field,value in self._headers:\n yield field\n \n def keys(self):\n ''\n\n\n\n\n\n \n return[k for k,v in self._headers]\n \n def values(self):\n ''\n\n\n\n\n\n \n return[self.policy.header_fetch_parse(k,v)\n for k,v in self._headers]\n \n def items(self):\n ''\n\n\n\n\n\n \n return[(k,self.policy.header_fetch_parse(k,v))\n for k,v in self._headers]\n \n def get(self,name,failobj=None):\n ''\n\n\n\n \n name=name.lower()\n for k,v in self._headers:\n if k.lower()==name:\n return self.policy.header_fetch_parse(k,v)\n return failobj\n \n \n \n \n \n \n def set_raw(self,name,value):\n ''\n\n\n \n self._headers.append((name,value))\n \n def raw_items(self):\n ''\n\n\n \n return iter(self._headers.copy())\n \n \n \n \n \n def get_all(self,name,failobj=None):\n ''\n\n\n\n\n\n\n \n values=[]\n name=name.lower()\n for k,v in self._headers:\n if k.lower()==name:\n values.append(self.policy.header_fetch_parse(k,v))\n if not values:\n return failobj\n return values\n \n def add_header(self,_name,_value,**_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n parts=[]\n for k,v in _params.items():\n if v is None:\n parts.append(k.replace('_','-'))\n else:\n parts.append(_formatparam(k.replace('_','-'),v))\n if _value is not None:\n parts.insert(0,_value)\n self[_name]=SEMISPACE.join(parts)\n \n def replace_header(self,_name,_value):\n ''\n\n\n\n\n \n _name=_name.lower()\n for i,(k,v)in zip(range(len(self._headers)),self._headers):\n if k.lower()==_name:\n self._headers[i]=self.policy.header_store_parse(k,_value)\n break\n else:\n raise KeyError(_name)\n \n \n \n \n \n def get_content_type(self):\n ''\n\n\n\n\n\n\n\n\n\n\n \n missing=object()\n value=self.get('content-type',missing)\n if value is missing:\n \n return self.get_default_type()\n ctype=_splitparam(value)[0].lower()\n \n if ctype.count('/')!=1:\n return 'text/plain'\n return ctype\n \n def get_content_maintype(self):\n ''\n\n\n\n \n ctype=self.get_content_type()\n return ctype.split('/')[0]\n \n def get_content_subtype(self):\n ''\n\n\n\n \n ctype=self.get_content_type()\n return ctype.split('/')[1]\n \n def get_default_type(self):\n ''\n\n\n\n\n \n return self._default_type\n \n def set_default_type(self,ctype):\n ''\n\n\n\n\n \n self._default_type=ctype\n \n def _get_params_preserve(self,failobj,header):\n \n \n missing=object()\n value=self.get(header,missing)\n if value is missing:\n return failobj\n params=[]\n for p in _parseparam(value):\n try:\n name,val=p.split('=',1)\n name=name.strip()\n val=val.strip()\n except ValueError:\n \n name=p.strip()\n val=''\n params.append((name,val))\n params=utils.decode_params(params)\n return params\n \n def get_params(self,failobj=None,header='content-type',unquote=True):\n ''\n\n\n\n\n\n\n\n\n\n\n \n missing=object()\n params=self._get_params_preserve(missing,header)\n if params is missing:\n return failobj\n if unquote:\n return[(k,_unquotevalue(v))for k,v in params]\n else:\n return params\n \n def get_param(self,param,failobj=None,header='content-type',\n unquote=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if header not in self:\n return failobj\n for k,v in self._get_params_preserve(failobj,header):\n if k.lower()==param.lower():\n if unquote:\n return _unquotevalue(v)\n else:\n return v\n return failobj\n \n def set_param(self,param,value,header='Content-Type',requote=True,\n charset=None,language='',replace=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not isinstance(value,tuple)and charset:\n value=(charset,language,value)\n \n if header not in self and header.lower()=='content-type':\n ctype='text/plain'\n else:\n ctype=self.get(header)\n if not self.get_param(param,header=header):\n if not ctype:\n ctype=_formatparam(param,value,requote)\n else:\n ctype=SEMISPACE.join(\n [ctype,_formatparam(param,value,requote)])\n else:\n ctype=''\n for old_param,old_value in self.get_params(header=header,\n unquote=requote):\n append_param=''\n if old_param.lower()==param.lower():\n append_param=_formatparam(param,value,requote)\n else:\n append_param=_formatparam(old_param,old_value,requote)\n if not ctype:\n ctype=append_param\n else:\n ctype=SEMISPACE.join([ctype,append_param])\n if ctype !=self.get(header):\n if replace:\n self.replace_header(header,ctype)\n else:\n del self[header]\n self[header]=ctype\n \n def del_param(self,param,header='content-type',requote=True):\n ''\n\n\n\n\n\n \n if header not in self:\n return\n new_ctype=''\n for p,v in self.get_params(header=header,unquote=requote):\n if p.lower()!=param.lower():\n if not new_ctype:\n new_ctype=_formatparam(p,v,requote)\n else:\n new_ctype=SEMISPACE.join([new_ctype,\n _formatparam(p,v,requote)])\n if new_ctype !=self.get(header):\n del self[header]\n self[header]=new_ctype\n \n def set_type(self,type,header='Content-Type',requote=True):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not type.count('/')==1:\n raise ValueError\n \n if header.lower()=='content-type':\n del self['mime-version']\n self['MIME-Version']='1.0'\n if header not in self:\n self[header]=type\n return\n params=self.get_params(header=header,unquote=requote)\n del self[header]\n self[header]=type\n \n for p,v in params[1:]:\n self.set_param(p,v,header,requote)\n \n def get_filename(self,failobj=None):\n ''\n\n\n\n\n\n \n missing=object()\n filename=self.get_param('filename',missing,'content-disposition')\n if filename is missing:\n filename=self.get_param('name',missing,'content-type')\n if filename is missing:\n return failobj\n return utils.collapse_rfc2231_value(filename).strip()\n \n def get_boundary(self,failobj=None):\n ''\n\n\n\n \n missing=object()\n boundary=self.get_param('boundary',missing)\n if boundary is missing:\n return failobj\n \n return utils.collapse_rfc2231_value(boundary).rstrip()\n \n def set_boundary(self,boundary):\n ''\n\n\n\n\n\n\n\n \n missing=object()\n params=self._get_params_preserve(missing,'content-type')\n if params is missing:\n \n \n raise errors.HeaderParseError('No Content-Type header found')\n newparams=[]\n foundp=False\n for pk,pv in params:\n if pk.lower()=='boundary':\n newparams.append(('boundary','\"%s\"'%boundary))\n foundp=True\n else:\n newparams.append((pk,pv))\n if not foundp:\n \n \n \n newparams.append(('boundary','\"%s\"'%boundary))\n \n newheaders=[]\n for h,v in self._headers:\n if h.lower()=='content-type':\n parts=[]\n for k,v in newparams:\n if v =='':\n parts.append(k)\n else:\n parts.append('%s=%s'%(k,v))\n val=SEMISPACE.join(parts)\n newheaders.append(self.policy.header_store_parse(h,val))\n \n else:\n newheaders.append((h,v))\n self._headers=newheaders\n \n def get_content_charset(self,failobj=None):\n ''\n\n\n\n\n \n missing=object()\n charset=self.get_param('charset',missing)\n if charset is missing:\n return failobj\n if isinstance(charset,tuple):\n \n pcharset=charset[0]or 'us-ascii'\n try:\n \n \n \n as_bytes=charset[2].encode('raw-unicode-escape')\n charset=str(as_bytes,pcharset)\n except(LookupError,UnicodeError):\n charset=charset[2]\n \n try:\n charset.encode('us-ascii')\n except UnicodeError:\n return failobj\n \n return charset.lower()\n \n def get_charsets(self,failobj=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return[part.get_content_charset(failobj)for part in self.walk()]\n \n def get_content_disposition(self):\n ''\n\n\n\n \n value=self.get('content-disposition')\n if value is None:\n return None\n c_d=_splitparam(value)[0].lower()\n return c_d\n \n \n from email.iterators import walk\n \n \nclass MIMEPart(Message):\n\n def __init__(self,policy=None):\n if policy is None:\n from email.policy import default\n policy=default\n super().__init__(policy)\n \n \n def as_string(self,unixfrom=False,maxheaderlen=None,policy=None):\n ''\n\n\n\n\n\n\n\n\n \n policy=self.policy if policy is None else policy\n if maxheaderlen is None:\n maxheaderlen=policy.max_line_length\n return super().as_string(unixfrom,maxheaderlen,policy)\n \n def __str__(self):\n return self.as_string(policy=self.policy.clone(utf8=True))\n \n def is_attachment(self):\n c_d=self.get('content-disposition')\n return False if c_d is None else c_d.content_disposition =='attachment'\n \n def _find_body(self,part,preferencelist):\n if part.is_attachment():\n return\n maintype,subtype=part.get_content_type().split('/')\n if maintype =='text':\n if subtype in preferencelist:\n yield(preferencelist.index(subtype),part)\n return\n if maintype !='multipart'or not self.is_multipart():\n return\n if subtype !='related':\n for subpart in part.iter_parts():\n yield from self._find_body(subpart,preferencelist)\n return\n if 'related'in preferencelist:\n yield(preferencelist.index('related'),part)\n candidate=None\n start=part.get_param('start')\n if start:\n for subpart in part.iter_parts():\n if subpart['content-id']==start:\n candidate=subpart\n break\n if candidate is None:\n subparts=part.get_payload()\n candidate=subparts[0]if subparts else None\n if candidate is not None:\n yield from self._find_body(candidate,preferencelist)\n \n def get_body(self,preferencelist=('related','html','plain')):\n ''\n\n\n\n\n\n\n\n \n best_prio=len(preferencelist)\n body=None\n for prio,part in self._find_body(self,preferencelist):\n if prio From ',payload)\n self._write_lines(payload)\n \n \n _writeBody=_handle_text\n \n def _handle_multipart(self,msg):\n \n \n \n msgtexts=[]\n subparts=msg.get_payload()\n if subparts is None:\n subparts=[]\n elif isinstance(subparts,str):\n \n self.write(subparts)\n return\n elif not isinstance(subparts,list):\n \n subparts=[subparts]\n for part in subparts:\n s=self._new_buffer()\n g=self.clone(s)\n g.flatten(part,unixfrom=False,linesep=self._NL)\n msgtexts.append(s.getvalue())\n \n boundary=msg.get_boundary()\n if not boundary:\n \n \n alltext=self._encoded_NL.join(msgtexts)\n boundary=self._make_boundary(alltext)\n msg.set_boundary(boundary)\n \n if msg.preamble is not None:\n if self._mangle_from_:\n preamble=fcre.sub('>From ',msg.preamble)\n else:\n preamble=msg.preamble\n self._write_lines(preamble)\n self.write(self._NL)\n \n self.write('--'+boundary+self._NL)\n \n if msgtexts:\n self._fp.write(msgtexts.pop(0))\n \n \n \n for body_part in msgtexts:\n \n self.write(self._NL+'--'+boundary+self._NL)\n \n self._fp.write(body_part)\n \n self.write(self._NL+'--'+boundary+'--'+self._NL)\n if msg.epilogue is not None:\n if self._mangle_from_:\n epilogue=fcre.sub('>From ',msg.epilogue)\n else:\n epilogue=msg.epilogue\n self._write_lines(epilogue)\n \n def _handle_multipart_signed(self,msg):\n \n \n \n p=self.policy\n self.policy=p.clone(max_line_length=0)\n try:\n self._handle_multipart(msg)\n finally:\n self.policy=p\n \n def _handle_message_delivery_status(self,msg):\n \n \n \n blocks=[]\n for part in msg.get_payload():\n s=self._new_buffer()\n g=self.clone(s)\n g.flatten(part,unixfrom=False,linesep=self._NL)\n text=s.getvalue()\n lines=text.split(self._encoded_NL)\n \n if lines and lines[-1]==self._encoded_EMPTY:\n blocks.append(self._encoded_NL.join(lines[:-1]))\n else:\n blocks.append(text)\n \n \n \n self._fp.write(self._encoded_NL.join(blocks))\n \n def _handle_message(self,msg):\n s=self._new_buffer()\n g=self.clone(s)\n \n \n \n \n \n \n \n \n \n payload=msg._payload\n if isinstance(payload,list):\n g.flatten(msg.get_payload(0),unixfrom=False,linesep=self._NL)\n payload=s.getvalue()\n else:\n payload=self._encode(payload)\n self._fp.write(payload)\n \n \n \n \n \n \n @classmethod\n def _make_boundary(cls,text=None):\n \n \n token=random.randrange(sys.maxsize)\n boundary=('='*15)+(_fmt %token)+'=='\n if text is None:\n return boundary\n b=boundary\n counter=0\n while True:\n cre=cls._compile_re('^--'+re.escape(b)+'(--)?$',re.MULTILINE)\n if not cre.search(text):\n break\n b=boundary+'.'+str(counter)\n counter +=1\n return b\n \n @classmethod\n def _compile_re(cls,s,flags):\n return re.compile(s,flags)\n \n \nclass BytesGenerator(Generator):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def write(self,s):\n self._fp.write(s.encode('ascii','surrogateescape'))\n \n def _new_buffer(self):\n return BytesIO()\n \n def _encode(self,s):\n return s.encode('ascii')\n \n def _write_headers(self,msg):\n \n \n for h,v in msg.raw_items():\n self._fp.write(self.policy.fold_binary(h,v))\n \n self.write(self._NL)\n \n def _handle_text(self,msg):\n \n \n if msg._payload is None:\n return\n if _has_surrogates(msg._payload)and not self.policy.cte_type =='7bit':\n if self._mangle_from_:\n msg._payload=fcre.sub(\">From \",msg._payload)\n self._write_lines(msg._payload)\n else:\n super(BytesGenerator,self)._handle_text(msg)\n \n \n _writeBody=_handle_text\n \n @classmethod\n def _compile_re(cls,s,flags):\n return re.compile(s.encode('ascii'),flags)\n \n \n_FMT='[Non-text (%(type)s) part of message omitted, filename %(filename)s]'\n\nclass DecodedGenerator(Generator):\n ''\n\n\n\n \n def __init__(self,outfp,mangle_from_=None,maxheaderlen=None,fmt=None,*,\n policy=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n Generator.__init__(self,outfp,mangle_from_,maxheaderlen,\n policy=policy)\n if fmt is None:\n self._fmt=_FMT\n else:\n self._fmt=fmt\n \n def _dispatch(self,msg):\n for part in msg.walk():\n maintype=part.get_content_maintype()\n if maintype =='text':\n print(part.get_payload(decode=False),file=self)\n elif maintype =='multipart':\n \n pass\n else:\n print(self._fmt %{\n 'type':part.get_content_type(),\n 'maintype':part.get_content_maintype(),\n 'subtype':part.get_content_subtype(),\n 'filename':part.get_filename('[no filename]'),\n 'description':part.get('Content-Description',\n '[no description]'),\n 'encoding':part.get('Content-Transfer-Encoding',\n '[no encoding]'),\n },file=self)\n \n \n \n_width=len(repr(sys.maxsize -1))\n_fmt='%%0%dd'%_width\n\n\n_make_boundary=Generator._make_boundary\n", ["copy", "email.utils", "io", "random", "re", "sys", "time"]], "email.utils": [".py", "\n\n\n\n\"\"\"Miscellaneous utilities.\"\"\"\n\n__all__=[\n'collapse_rfc2231_value',\n'decode_params',\n'decode_rfc2231',\n'encode_rfc2231',\n'formataddr',\n'formatdate',\n'format_datetime',\n'getaddresses',\n'make_msgid',\n'mktime_tz',\n'parseaddr',\n'parsedate',\n'parsedate_tz',\n'parsedate_to_datetime',\n'unquote',\n]\n\nimport os\nimport re\nimport time\nimport random\nimport socket\nimport datetime\nimport urllib.parse\n\nfrom email._parseaddr import quote\nfrom email._parseaddr import AddressList as _AddressList\nfrom email._parseaddr import mktime_tz\n\nfrom email._parseaddr import parsedate,parsedate_tz,_parsedate_tz\n\n\nfrom email.charset import Charset\n\nCOMMASPACE=', '\nEMPTYSTRING=''\nUEMPTYSTRING=''\nCRLF='\\r\\n'\nTICK=\"'\"\n\nspecialsre=re.compile(r'[][\\\\()<>@,:;\".]')\nescapesre=re.compile(r'[\\\\\"]')\n\ndef _has_surrogates(s):\n ''\n \n \n \n try:\n s.encode()\n return False\n except UnicodeEncodeError:\n return True\n \n \n \ndef _sanitize(string):\n\n\n\n\n original_bytes=string.encode('utf-8','surrogateescape')\n return original_bytes.decode('utf-8','replace')\n \n \n \n \n \ndef formataddr(pair,charset='utf-8'):\n ''\n\n\n\n\n\n\n\n\n\n\n \n name,address=pair\n \n address.encode('ascii')\n if name:\n try:\n name.encode('ascii')\n except UnicodeEncodeError:\n if isinstance(charset,str):\n charset=Charset(charset)\n encoded_name=charset.header_encode(name)\n return \"%s <%s>\"%(encoded_name,address)\n else:\n quotes=''\n if specialsre.search(name):\n quotes='\"'\n name=escapesre.sub(r'\\\\\\g<0>',name)\n return '%s%s%s <%s>'%(quotes,name,quotes,address)\n return address\n \n \n \ndef getaddresses(fieldvalues):\n ''\n all=COMMASPACE.join(str(v)for v in fieldvalues)\n a=_AddressList(all)\n return a.addresslist\n \n \ndef _format_timetuple_and_zone(timetuple,zone):\n return '%s, %02d %s %04d %02d:%02d:%02d %s'%(\n ['Mon','Tue','Wed','Thu','Fri','Sat','Sun'][timetuple[6]],\n timetuple[2],\n ['Jan','Feb','Mar','Apr','May','Jun',\n 'Jul','Aug','Sep','Oct','Nov','Dec'][timetuple[1]-1],\n timetuple[0],timetuple[3],timetuple[4],timetuple[5],\n zone)\n \ndef formatdate(timeval=None,localtime=False,usegmt=False):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n if timeval is None:\n timeval=time.time()\n dt=datetime.datetime.fromtimestamp(timeval,datetime.timezone.utc)\n \n if localtime:\n dt=dt.astimezone()\n usegmt=False\n elif not usegmt:\n dt=dt.replace(tzinfo=None)\n return format_datetime(dt,usegmt)\n \ndef format_datetime(dt,usegmt=False):\n ''\n\n\n\n\n \n now=dt.timetuple()\n if usegmt:\n if dt.tzinfo is None or dt.tzinfo !=datetime.timezone.utc:\n raise ValueError(\"usegmt option requires a UTC datetime\")\n zone='GMT'\n elif dt.tzinfo is None:\n zone='-0000'\n else:\n zone=dt.strftime(\"%z\")\n return _format_timetuple_and_zone(now,zone)\n \n \ndef make_msgid(idstring=None,domain=None):\n ''\n\n\n\n\n\n\n\n \n timeval=int(time.time()*100)\n pid=os.getpid()\n randint=random.getrandbits(64)\n if idstring is None:\n idstring=''\n else:\n idstring='.'+idstring\n if domain is None:\n domain=socket.getfqdn()\n msgid='<%d.%d.%d%s@%s>'%(timeval,pid,randint,idstring,domain)\n return msgid\n \n \ndef parsedate_to_datetime(data):\n parsed_date_tz=_parsedate_tz(data)\n if parsed_date_tz is None:\n raise ValueError('Invalid date value or format \"%s\"'%str(data))\n *dtuple,tz=parsed_date_tz\n if tz is None:\n return datetime.datetime(*dtuple[:6])\n return datetime.datetime(*dtuple[:6],\n tzinfo=datetime.timezone(datetime.timedelta(seconds=tz)))\n \n \ndef parseaddr(addr):\n ''\n\n\n\n\n \n addrs=_AddressList(addr).addresslist\n if not addrs:\n return '',''\n return addrs[0]\n \n \n \ndef unquote(str):\n ''\n if len(str)>1:\n if str.startswith('\"')and str.endswith('\"'):\n return str[1:-1].replace('\\\\\\\\','\\\\').replace('\\\\\"','\"')\n if str.startswith('<')and str.endswith('>'):\n return str[1:-1]\n return str\n \n \n \n \ndef decode_rfc2231(s):\n ''\n parts=s.split(TICK,2)\n if len(parts)<=2:\n return None,None,s\n return parts\n \n \ndef encode_rfc2231(s,charset=None,language=None):\n ''\n\n\n\n\n \n s=urllib.parse.quote(s,safe='',encoding=charset or 'ascii')\n if charset is None and language is None:\n return s\n if language is None:\n language=''\n return \"%s'%s'%s\"%(charset,language,s)\n \n \nrfc2231_continuation=re.compile(r'^(?P\\w+)\\*((?P[0-9]+)\\*?)?$',\nre.ASCII)\n\ndef decode_params(params):\n ''\n\n\n \n new_params=[params[0]]\n \n \n \n rfc2231_params={}\n for name,value in params[1:]:\n encoded=name.endswith('*')\n value=unquote(value)\n mo=rfc2231_continuation.match(name)\n if mo:\n name,num=mo.group('name','num')\n if num is not None:\n num=int(num)\n rfc2231_params.setdefault(name,[]).append((num,value,encoded))\n else:\n new_params.append((name,'\"%s\"'%quote(value)))\n if rfc2231_params:\n for name,continuations in rfc2231_params.items():\n value=[]\n extended=False\n \n continuations.sort()\n \n \n \n \n \n for num,s,encoded in continuations:\n if encoded:\n \n \n \n s=urllib.parse.unquote(s,encoding=\"latin-1\")\n extended=True\n value.append(s)\n value=quote(EMPTYSTRING.join(value))\n if extended:\n charset,language,value=decode_rfc2231(value)\n new_params.append((name,(charset,language,'\"%s\"'%value)))\n else:\n new_params.append((name,'\"%s\"'%value))\n return new_params\n \ndef collapse_rfc2231_value(value,errors='replace',\nfallback_charset='us-ascii'):\n if not isinstance(value,tuple)or len(value)!=3:\n return unquote(value)\n \n \n \n charset,language,text=value\n if charset is None:\n \n \n charset=fallback_charset\n rawbytes=bytes(text,'raw-unicode-escape')\n try:\n return str(rawbytes,charset,errors)\n except LookupError:\n \n return unquote(text)\n \n \n \n \n \n \n \n \ndef localtime(dt=None,isdst=None):\n ''\n\n\n\n\n\n\n\n \n if isdst is not None:\n import warnings\n warnings._deprecated(\n \"The 'isdst' parameter to 'localtime'\",\n message='{name} is deprecated and slated for removal in Python {remove}',\n remove=(3,14),\n )\n if dt is None:\n dt=datetime.datetime.now()\n return dt.astimezone()\n", ["datetime", "email._parseaddr", "email.charset", "os", "random", "re", "socket", "time", "urllib.parse", "warnings"]], "email.charset": [".py", "\n\n\n\n__all__=[\n'Charset',\n'add_alias',\n'add_charset',\n'add_codec',\n]\n\nfrom functools import partial\n\nimport email.base64mime\nimport email.quoprimime\n\nfrom email import errors\nfrom email.encoders import encode_7or8bit\n\n\n\nQP=1\nBASE64=2\nSHORTEST=3\n\n\nRFC2047_CHROME_LEN=7\n\nDEFAULT_CHARSET='us-ascii'\nUNKNOWN8BIT='unknown-8bit'\nEMPTYSTRING=''\n\n\n\nCHARSETS={\n\n'iso-8859-1':(QP,QP,None),\n'iso-8859-2':(QP,QP,None),\n'iso-8859-3':(QP,QP,None),\n'iso-8859-4':(QP,QP,None),\n\n\n\n\n'iso-8859-9':(QP,QP,None),\n'iso-8859-10':(QP,QP,None),\n\n'iso-8859-13':(QP,QP,None),\n'iso-8859-14':(QP,QP,None),\n'iso-8859-15':(QP,QP,None),\n'iso-8859-16':(QP,QP,None),\n'windows-1252':(QP,QP,None),\n'viscii':(QP,QP,None),\n'us-ascii':(None,None,None),\n'big5':(BASE64,BASE64,None),\n'gb2312':(BASE64,BASE64,None),\n'euc-jp':(BASE64,None,'iso-2022-jp'),\n'shift_jis':(BASE64,None,'iso-2022-jp'),\n'iso-2022-jp':(BASE64,None,None),\n'koi8-r':(BASE64,BASE64,None),\n'utf-8':(SHORTEST,BASE64,'utf-8'),\n}\n\n\n\nALIASES={\n'latin_1':'iso-8859-1',\n'latin-1':'iso-8859-1',\n'latin_2':'iso-8859-2',\n'latin-2':'iso-8859-2',\n'latin_3':'iso-8859-3',\n'latin-3':'iso-8859-3',\n'latin_4':'iso-8859-4',\n'latin-4':'iso-8859-4',\n'latin_5':'iso-8859-9',\n'latin-5':'iso-8859-9',\n'latin_6':'iso-8859-10',\n'latin-6':'iso-8859-10',\n'latin_7':'iso-8859-13',\n'latin-7':'iso-8859-13',\n'latin_8':'iso-8859-14',\n'latin-8':'iso-8859-14',\n'latin_9':'iso-8859-15',\n'latin-9':'iso-8859-15',\n'latin_10':'iso-8859-16',\n'latin-10':'iso-8859-16',\n'cp949':'ks_c_5601-1987',\n'euc_jp':'euc-jp',\n'euc_kr':'euc-kr',\n'ascii':'us-ascii',\n}\n\n\n\nCODEC_MAP={\n'gb2312':'eucgb2312_cn',\n'big5':'big5_tw',\n\n\n\n'us-ascii':None,\n}\n\n\n\ndef add_charset(charset,header_enc=None,body_enc=None,output_charset=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if body_enc ==SHORTEST:\n raise ValueError('SHORTEST not allowed for body_enc')\n CHARSETS[charset]=(header_enc,body_enc,output_charset)\n \n \ndef add_alias(alias,canonical):\n ''\n\n\n\n \n ALIASES[alias]=canonical\n \n \ndef add_codec(charset,codecname):\n ''\n\n\n\n\n \n CODEC_MAP[charset]=codecname\n \n \n \n \ndef _encode(string,codec):\n if codec ==UNKNOWN8BIT:\n return string.encode('ascii','surrogateescape')\n else:\n return string.encode(codec)\n \n \nclass Charset:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,input_charset=DEFAULT_CHARSET):\n \n \n \n \n try:\n if isinstance(input_charset,str):\n input_charset.encode('ascii')\n else:\n input_charset=str(input_charset,'ascii')\n except UnicodeError:\n raise errors.CharsetError(input_charset)\n input_charset=input_charset.lower()\n \n self.input_charset=ALIASES.get(input_charset,input_charset)\n \n \n \n henc,benc,conv=CHARSETS.get(self.input_charset,\n (SHORTEST,BASE64,None))\n if not conv:\n conv=self.input_charset\n \n self.header_encoding=henc\n self.body_encoding=benc\n self.output_charset=ALIASES.get(conv,conv)\n \n \n self.input_codec=CODEC_MAP.get(self.input_charset,\n self.input_charset)\n self.output_codec=CODEC_MAP.get(self.output_charset,\n self.output_charset)\n \n def __repr__(self):\n return self.input_charset.lower()\n \n def __eq__(self,other):\n return str(self)==str(other).lower()\n \n def get_body_encoding(self):\n ''\n\n\n\n\n\n\n\n\n\n\n \n assert self.body_encoding !=SHORTEST\n if self.body_encoding ==QP:\n return 'quoted-printable'\n elif self.body_encoding ==BASE64:\n return 'base64'\n else:\n return encode_7or8bit\n \n def get_output_charset(self):\n ''\n\n\n\n \n return self.output_charset or self.input_charset\n \n def header_encode(self,string):\n ''\n\n\n\n\n\n\n\n\n \n codec=self.output_codec or 'us-ascii'\n header_bytes=_encode(string,codec)\n \n encoder_module=self._get_encoder(header_bytes)\n if encoder_module is None:\n return string\n return encoder_module.header_encode(header_bytes,codec)\n \n def header_encode_lines(self,string,maxlengths):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n codec=self.output_codec or 'us-ascii'\n header_bytes=_encode(string,codec)\n encoder_module=self._get_encoder(header_bytes)\n encoder=partial(encoder_module.header_encode,charset=codec)\n \n \n charset=self.get_output_charset()\n extra=len(charset)+RFC2047_CHROME_LEN\n \n \n \n \n \n \n \n \n \n \n \n lines=[]\n current_line=[]\n maxlen=next(maxlengths)-extra\n for character in string:\n current_line.append(character)\n this_line=EMPTYSTRING.join(current_line)\n length=encoder_module.header_length(_encode(this_line,charset))\n if length >maxlen:\n \n current_line.pop()\n \n if not lines and not current_line:\n lines.append(None)\n else:\n joined_line=EMPTYSTRING.join(current_line)\n header_bytes=_encode(joined_line,codec)\n lines.append(encoder(header_bytes))\n current_line=[character]\n maxlen=next(maxlengths)-extra\n joined_line=EMPTYSTRING.join(current_line)\n header_bytes=_encode(joined_line,codec)\n lines.append(encoder(header_bytes))\n return lines\n \n def _get_encoder(self,header_bytes):\n if self.header_encoding ==BASE64:\n return email.base64mime\n elif self.header_encoding ==QP:\n return email.quoprimime\n elif self.header_encoding ==SHORTEST:\n len64=email.base64mime.header_length(header_bytes)\n lenqp=email.quoprimime.header_length(header_bytes)\n if len64 '\nb'?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`'\nb'abcdefghijklmnopqrstuvwxyz{|}~\\t'):\n _QUOPRI_BODY_MAP[c]=chr(c)\n \n \n \n \ndef header_check(octet):\n ''\n return chr(octet)!=_QUOPRI_HEADER_MAP[octet]\n \n \ndef body_check(octet):\n ''\n return chr(octet)!=_QUOPRI_BODY_MAP[octet]\n \n \ndef header_length(bytearray):\n ''\n\n\n\n\n\n\n\n \n return sum(len(_QUOPRI_HEADER_MAP[octet])for octet in bytearray)\n \n \ndef body_length(bytearray):\n ''\n\n\n\n\n \n return sum(len(_QUOPRI_BODY_MAP[octet])for octet in bytearray)\n \n \ndef _max_append(L,s,maxlen,extra=''):\n if not isinstance(s,str):\n s=chr(s)\n if not L:\n L.append(s.lstrip())\n elif len(L[-1])+len(s)<=maxlen:\n L[-1]+=extra+s\n else:\n L.append(s.lstrip())\n \n \ndef unquote(s):\n ''\n return chr(int(s[1:3],16))\n \n \ndef quote(c):\n return _QUOPRI_MAP[ord(c)]\n \n \ndef header_encode(header_bytes,charset='iso-8859-1'):\n ''\n\n\n\n\n\n\n\n\n \n \n if not header_bytes:\n return ''\n \n encoded=header_bytes.decode('latin1').translate(_QUOPRI_HEADER_MAP)\n \n \n return '=?%s?q?%s?='%(charset,encoded)\n \n \n_QUOPRI_BODY_ENCODE_MAP=_QUOPRI_BODY_MAP[:]\nfor c in b'\\r\\n':\n _QUOPRI_BODY_ENCODE_MAP[c]=chr(c)\ndel c\n\ndef body_encode(body,maxlinelen=76,eol=NL):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if maxlinelen <4:\n raise ValueError(\"maxlinelen must be at least 4\")\n if not body:\n return body\n \n \n body=body.translate(_QUOPRI_BODY_ENCODE_MAP)\n \n soft_break='='+eol\n \n maxlinelen1=maxlinelen -1\n \n encoded_body=[]\n append=encoded_body.append\n \n for line in body.splitlines():\n \n start=0\n laststart=len(line)-1 -maxlinelen\n while start <=laststart:\n stop=start+maxlinelen1\n \n if line[stop -2]=='=':\n append(line[start:stop -1])\n start=stop -2\n elif line[stop -1]=='=':\n append(line[start:stop])\n start=stop -1\n else:\n append(line[start:stop]+'=')\n start=stop\n \n \n if line and line[-1]in ' \\t':\n room=start -laststart\n if room >=3:\n \n \n q=quote(line[-1])\n elif room ==2:\n \n q=line[-1]+soft_break\n else:\n \n \n q=soft_break+quote(line[-1])\n append(line[start:-1]+q)\n else:\n append(line[start:])\n \n \n if body[-1]in CRLF:\n append('')\n \n return eol.join(encoded_body)\n \n \n \n \n \ndef decode(encoded,eol=NL):\n ''\n\n\n \n if not encoded:\n return encoded\n \n \n \n decoded=''\n \n for line in encoded.splitlines():\n line=line.rstrip()\n if not line:\n decoded +=eol\n continue\n \n i=0\n n=len(line)\n while i '+re.escape(separator)+\n r')(?P--)?(?P[ \\t]*)(?P\\r\\n|\\r|\\n)?$')\n capturing_preamble=True\n preamble=[]\n linesep=False\n close_boundary_seen=False\n while True:\n line=self._input.readline()\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n if line =='':\n break\n mo=boundaryre.match(line)\n if mo:\n \n \n \n \n if mo.group('end'):\n close_boundary_seen=True\n linesep=mo.group('linesep')\n break\n \n if capturing_preamble:\n if preamble:\n \n \n lastline=preamble[-1]\n eolmo=NLCRE_eol.search(lastline)\n if eolmo:\n preamble[-1]=lastline[:-len(eolmo.group(0))]\n self._cur.preamble=EMPTYSTRING.join(preamble)\n capturing_preamble=False\n self._input.unreadline(line)\n continue\n \n \n \n \n while True:\n line=self._input.readline()\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n mo=boundaryre.match(line)\n if not mo:\n self._input.unreadline(line)\n break\n \n \n self._input.push_eof_matcher(boundaryre.match)\n for retval in self._parsegen():\n if retval is NeedMoreData:\n yield NeedMoreData\n continue\n break\n \n \n \n \n if self._last.get_content_maintype()=='multipart':\n epilogue=self._last.epilogue\n if epilogue =='':\n self._last.epilogue=None\n elif epilogue is not None:\n mo=NLCRE_eol.search(epilogue)\n if mo:\n end=len(mo.group(0))\n self._last.epilogue=epilogue[:-end]\n else:\n payload=self._last._payload\n if isinstance(payload,str):\n mo=NLCRE_eol.search(payload)\n if mo:\n payload=payload[:-len(mo.group(0))]\n self._last._payload=payload\n self._input.pop_eof_matcher()\n self._pop_message()\n \n \n self._last=self._cur\n else:\n \n assert capturing_preamble\n preamble.append(line)\n \n \n \n if capturing_preamble:\n defect=errors.StartBoundaryNotFoundDefect()\n self.policy.handle_defect(self._cur,defect)\n self._cur.set_payload(EMPTYSTRING.join(preamble))\n epilogue=[]\n for line in self._input:\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n self._cur.epilogue=EMPTYSTRING.join(epilogue)\n return\n \n \n if not close_boundary_seen:\n defect=errors.CloseBoundaryNotFoundDefect()\n self.policy.handle_defect(self._cur,defect)\n return\n \n \n \n if linesep:\n epilogue=['']\n else:\n epilogue=[]\n for line in self._input:\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n epilogue.append(line)\n \n \n \n if epilogue:\n firstline=epilogue[0]\n bolmo=NLCRE_bol.match(firstline)\n if bolmo:\n epilogue[0]=firstline[len(bolmo.group(0)):]\n self._cur.epilogue=EMPTYSTRING.join(epilogue)\n return\n \n \n lines=[]\n for line in self._input:\n if line is NeedMoreData:\n yield NeedMoreData\n continue\n lines.append(line)\n self._cur.set_payload(EMPTYSTRING.join(lines))\n \n def _parse_headers(self,lines):\n \n lastheader=''\n lastvalue=[]\n for lineno,line in enumerate(lines):\n \n if line[0]in ' \\t':\n if not lastheader:\n \n \n \n defect=errors.FirstHeaderLineIsContinuationDefect(line)\n self.policy.handle_defect(self._cur,defect)\n continue\n lastvalue.append(line)\n continue\n if lastheader:\n self._cur.set_raw(*self.policy.header_source_parse(lastvalue))\n lastheader,lastvalue='',[]\n \n if line.startswith('From '):\n if lineno ==0:\n \n mo=NLCRE_eol.search(line)\n if mo:\n line=line[:-len(mo.group(0))]\n self._cur.set_unixfrom(line)\n continue\n elif lineno ==len(lines)-1:\n \n \n \n self._input.unreadline(line)\n return\n else:\n \n \n defect=errors.MisplacedEnvelopeHeaderDefect(line)\n self._cur.defects.append(defect)\n continue\n \n \n \n i=line.find(':')\n \n \n \n \n if i ==0:\n defect=errors.InvalidHeaderDefect(\"Missing header name.\")\n self._cur.defects.append(defect)\n continue\n \n assert i >0,\"_parse_headers fed line with no : and no leading WS\"\n lastheader=line[:i]\n lastvalue=[line]\n \n if lastheader:\n self._cur.set_raw(*self.policy.header_source_parse(lastvalue))\n \n \nclass BytesFeedParser(FeedParser):\n ''\n \n def feed(self,data):\n super().feed(data.decode('ascii','surrogateescape'))\n", ["collections", "email", "email._policybase", "email.errors", "email.message", "io", "re"]], "email._parseaddr": [".py", "\n\n\n\"\"\"Email address parsing code.\n\nLifted directly from rfc822.py. This should eventually be rewritten.\n\"\"\"\n\n__all__=[\n'mktime_tz',\n'parsedate',\n'parsedate_tz',\n'quote',\n]\n\nimport time,calendar\n\nSPACE=' '\nEMPTYSTRING=''\nCOMMASPACE=', '\n\n\n_monthnames=['jan','feb','mar','apr','may','jun','jul',\n'aug','sep','oct','nov','dec',\n'january','february','march','april','may','june','july',\n'august','september','october','november','december']\n\n_daynames=['mon','tue','wed','thu','fri','sat','sun']\n\n\n\n\n\n\n\n_timezones={'UT':0,'UTC':0,'GMT':0,'Z':0,\n'AST':-400,'ADT':-300,\n'EST':-500,'EDT':-400,\n'CST':-600,'CDT':-500,\n'MST':-700,'MDT':-600,\n'PST':-800,'PDT':-700\n}\n\n\ndef parsedate_tz(data):\n ''\n\n\n \n res=_parsedate_tz(data)\n if not res:\n return\n if res[9]is None:\n res[9]=0\n return tuple(res)\n \ndef _parsedate_tz(data):\n ''\n\n\n\n\n\n\n\n \n if not data:\n return None\n data=data.split()\n if not data:\n return None\n \n \n if data[0].endswith(',')or data[0].lower()in _daynames:\n \n del data[0]\n else:\n i=data[0].rfind(',')\n if i >=0:\n data[0]=data[0][i+1:]\n if len(data)==3:\n stuff=data[0].split('-')\n if len(stuff)==3:\n data=stuff+data[1:]\n if len(data)==4:\n s=data[3]\n i=s.find('+')\n if i ==-1:\n i=s.find('-')\n if i >0:\n data[3:]=[s[:i],s[i:]]\n else:\n data.append('')\n if len(data)<5:\n return None\n data=data[:5]\n [dd,mm,yy,tm,tz]=data\n if not(dd and mm and yy):\n return None\n mm=mm.lower()\n if mm not in _monthnames:\n dd,mm=mm,dd.lower()\n if mm not in _monthnames:\n return None\n mm=_monthnames.index(mm)+1\n if mm >12:\n mm -=12\n if dd[-1]==',':\n dd=dd[:-1]\n i=yy.find(':')\n if i >0:\n yy,tm=tm,yy\n if yy[-1]==',':\n yy=yy[:-1]\n if not yy:\n return None\n if not yy[0].isdigit():\n yy,tz=tz,yy\n if tm[-1]==',':\n tm=tm[:-1]\n tm=tm.split(':')\n if len(tm)==2:\n [thh,tmm]=tm\n tss='0'\n elif len(tm)==3:\n [thh,tmm,tss]=tm\n elif len(tm)==1 and '.'in tm[0]:\n \n tm=tm[0].split('.')\n if len(tm)==2:\n [thh,tmm]=tm\n tss=0\n elif len(tm)==3:\n [thh,tmm,tss]=tm\n else:\n return None\n else:\n return None\n try:\n yy=int(yy)\n dd=int(dd)\n thh=int(thh)\n tmm=int(tmm)\n tss=int(tss)\n except ValueError:\n return None\n \n \n \n \n \n if yy <100:\n \n if yy >68:\n yy +=1900\n \n else:\n yy +=2000\n tzoffset=None\n tz=tz.upper()\n if tz in _timezones:\n tzoffset=_timezones[tz]\n else:\n try:\n tzoffset=int(tz)\n except ValueError:\n pass\n if tzoffset ==0 and tz.startswith('-'):\n tzoffset=None\n \n if tzoffset:\n if tzoffset <0:\n tzsign=-1\n tzoffset=-tzoffset\n else:\n tzsign=1\n tzoffset=tzsign *((tzoffset //100)*3600+(tzoffset %100)*60)\n \n return[yy,mm,dd,thh,tmm,tss,0,1,-1,tzoffset]\n \n \ndef parsedate(data):\n ''\n t=parsedate_tz(data)\n if isinstance(t,tuple):\n return t[:9]\n else:\n return t\n \n \ndef mktime_tz(data):\n ''\n if data[9]is None:\n \n return time.mktime(data[:8]+(-1,))\n else:\n t=calendar.timegm(data)\n return t -data[9]\n \n \ndef quote(str):\n ''\n\n\n\n\n \n return str.replace('\\\\','\\\\\\\\').replace('\"','\\\\\"')\n \n \nclass AddrlistClass:\n ''\n\n\n\n\n\n\n \n \n def __init__(self,field):\n ''\n\n\n\n \n self.specials='()<>@,:;.\\\"[]'\n self.pos=0\n self.LWS=' \\t'\n self.CR='\\r\\n'\n self.FWS=self.LWS+self.CR\n self.atomends=self.specials+self.LWS+self.CR\n \n \n \n self.phraseends=self.atomends.replace('.','')\n self.field=field\n self.commentlist=[]\n \n def gotonext(self):\n ''\n wslist=[]\n while self.pos =len(self.field):\n \n if plist:\n returnlist=[(SPACE.join(self.commentlist),plist[0])]\n \n elif self.field[self.pos]in '.@':\n \n \n self.pos=oldpos\n self.commentlist=oldcl\n addrspec=self.getaddrspec()\n returnlist=[(SPACE.join(self.commentlist),addrspec)]\n \n elif self.field[self.pos]==':':\n \n returnlist=[]\n \n fieldlen=len(self.field)\n self.pos +=1\n while self.pos ':\n self.pos +=1\n break\n elif self.field[self.pos]=='@':\n self.pos +=1\n expectroute=True\n elif self.field[self.pos]==':':\n self.pos +=1\n else:\n adlist=self.getaddrspec()\n self.pos +=1\n break\n self.gotonext()\n \n return adlist\n \n def getaddrspec(self):\n ''\n aslist=[]\n \n self.gotonext()\n while self.pos =len(self.field)or self.field[self.pos]!='@':\n return EMPTYSTRING.join(aslist)\n \n aslist.append('@')\n self.pos +=1\n self.gotonext()\n domain=self.getdomain()\n if not domain:\n \n \n return EMPTYSTRING\n return EMPTYSTRING.join(aslist)+domain\n \n def getdomain(self):\n ''\n sdlist=[]\n while self.pos '\n return lp\n \n def __repr__(self):\n return \"{}(display_name={!r}, username={!r}, domain={!r})\".format(\n self.__class__.__name__,\n self.display_name,self.username,self.domain)\n \n def __str__(self):\n disp=self.display_name\n if not parser.SPECIALS.isdisjoint(disp):\n disp=parser.quote_string(disp)\n if disp:\n addr_spec=''if self.addr_spec =='<>'else self.addr_spec\n return \"{} <{}>\".format(disp,addr_spec)\n return self.addr_spec\n \n def __eq__(self,other):\n if not isinstance(other,Address):\n return NotImplemented\n return(self.display_name ==other.display_name and\n self.username ==other.username and\n self.domain ==other.domain)\n \n \nclass Group:\n\n def __init__(self,display_name=None,addresses=None):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._display_name=display_name\n self._addresses=tuple(addresses)if addresses else tuple()\n \n @property\n def display_name(self):\n return self._display_name\n \n @property\n def addresses(self):\n return self._addresses\n \n def __repr__(self):\n return \"{}(display_name={!r}, addresses={!r}\".format(\n self.__class__.__name__,\n self.display_name,self.addresses)\n \n def __str__(self):\n if self.display_name is None and len(self.addresses)==1:\n return str(self.addresses[0])\n disp=self.display_name\n if disp is not None and not parser.SPECIALS.isdisjoint(disp):\n disp=parser.quote_string(disp)\n adrstr=\", \".join(str(x)for x in self.addresses)\n adrstr=' '+adrstr if adrstr else adrstr\n return \"{}:{};\".format(disp,adrstr)\n \n def __eq__(self,other):\n if not isinstance(other,Group):\n return NotImplemented\n return(self.display_name ==other.display_name and\n self.addresses ==other.addresses)\n \n \n \n \nclass BaseHeader(str):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __new__(cls,name,value):\n kwds={'defects':[]}\n cls.parse(value,kwds)\n if utils._has_surrogates(kwds['decoded']):\n kwds['decoded']=utils._sanitize(kwds['decoded'])\n self=str.__new__(cls,kwds['decoded'])\n del kwds['decoded']\n self.init(name,**kwds)\n return self\n \n def init(self,name,*,parse_tree,defects):\n self._name=name\n self._parse_tree=parse_tree\n self._defects=defects\n \n @property\n def name(self):\n return self._name\n \n @property\n def defects(self):\n return tuple(self._defects)\n \n def __reduce__(self):\n return(\n _reconstruct_header,\n (\n self.__class__.__name__,\n self.__class__.__bases__,\n str(self),\n ),\n self.__getstate__())\n \n @classmethod\n def _reconstruct(cls,value):\n return str.__new__(cls,value)\n \n def fold(self,*,policy):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n header=parser.Header([\n parser.HeaderLabel([\n parser.ValueTerminal(self.name,'header-name'),\n parser.ValueTerminal(':','header-sep')]),\n ])\n if self._parse_tree:\n header.append(\n parser.CFWSList([parser.WhiteSpaceTerminal(' ','fws')]))\n header.append(self._parse_tree)\n return header.fold(policy=policy)\n \n \ndef _reconstruct_header(cls_name,bases,value):\n return type(cls_name,bases,{})._reconstruct(value)\n \n \nclass UnstructuredHeader:\n\n max_count=None\n value_parser=staticmethod(parser.get_unstructured)\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=cls.value_parser(value)\n kwds['decoded']=str(kwds['parse_tree'])\n \n \nclass UniqueUnstructuredHeader(UnstructuredHeader):\n\n max_count=1\n \n \nclass DateHeader:\n\n ''\n\n\n\n\n\n\n \n \n max_count=None\n \n \n value_parser=staticmethod(parser.get_unstructured)\n \n @classmethod\n def parse(cls,value,kwds):\n if not value:\n kwds['defects'].append(errors.HeaderMissingRequiredValue())\n kwds['datetime']=None\n kwds['decoded']=''\n kwds['parse_tree']=parser.TokenList()\n return\n if isinstance(value,str):\n kwds['decoded']=value\n try:\n value=utils.parsedate_to_datetime(value)\n except ValueError:\n kwds['defects'].append(errors.InvalidDateDefect('Invalid date value or format'))\n kwds['datetime']=None\n kwds['parse_tree']=parser.TokenList()\n return\n kwds['datetime']=value\n kwds['decoded']=utils.format_datetime(kwds['datetime'])\n kwds['parse_tree']=cls.value_parser(kwds['decoded'])\n \n def init(self,*args,**kw):\n self._datetime=kw.pop('datetime')\n super().init(*args,**kw)\n \n @property\n def datetime(self):\n return self._datetime\n \n \nclass UniqueDateHeader(DateHeader):\n\n max_count=1\n \n \nclass AddressHeader:\n\n max_count=None\n \n @staticmethod\n def value_parser(value):\n address_list,value=parser.get_address_list(value)\n assert not value,'this should not happen'\n return address_list\n \n @classmethod\n def parse(cls,value,kwds):\n if isinstance(value,str):\n \n \n kwds['parse_tree']=address_list=cls.value_parser(value)\n groups=[]\n for addr in address_list.addresses:\n groups.append(Group(addr.display_name,\n [Address(mb.display_name or '',\n mb.local_part or '',\n mb.domain or '')\n for mb in addr.all_mailboxes]))\n defects=list(address_list.all_defects)\n else:\n \n if not hasattr(value,'__iter__'):\n value=[value]\n groups=[Group(None,[item])if not hasattr(item,'addresses')\n else item\n for item in value]\n defects=[]\n kwds['groups']=groups\n kwds['defects']=defects\n kwds['decoded']=', '.join([str(item)for item in groups])\n if 'parse_tree'not in kwds:\n kwds['parse_tree']=cls.value_parser(kwds['decoded'])\n \n def init(self,*args,**kw):\n self._groups=tuple(kw.pop('groups'))\n self._addresses=None\n super().init(*args,**kw)\n \n @property\n def groups(self):\n return self._groups\n \n @property\n def addresses(self):\n if self._addresses is None:\n self._addresses=tuple(address for group in self._groups\n for address in group.addresses)\n return self._addresses\n \n \nclass UniqueAddressHeader(AddressHeader):\n\n max_count=1\n \n \nclass SingleAddressHeader(AddressHeader):\n\n @property\n def address(self):\n if len(self.addresses)!=1:\n raise ValueError((\"value of single address header {} is not \"\n \"a single address\").format(self.name))\n return self.addresses[0]\n \n \nclass UniqueSingleAddressHeader(SingleAddressHeader):\n\n max_count=1\n \n \nclass MIMEVersionHeader:\n\n max_count=1\n \n value_parser=staticmethod(parser.parse_mime_version)\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=parse_tree=cls.value_parser(value)\n kwds['decoded']=str(parse_tree)\n kwds['defects'].extend(parse_tree.all_defects)\n kwds['major']=None if parse_tree.minor is None else parse_tree.major\n kwds['minor']=parse_tree.minor\n if parse_tree.minor is not None:\n kwds['version']='{}.{}'.format(kwds['major'],kwds['minor'])\n else:\n kwds['version']=None\n \n def init(self,*args,**kw):\n self._version=kw.pop('version')\n self._major=kw.pop('major')\n self._minor=kw.pop('minor')\n super().init(*args,**kw)\n \n @property\n def major(self):\n return self._major\n \n @property\n def minor(self):\n return self._minor\n \n @property\n def version(self):\n return self._version\n \n \nclass ParameterizedMIMEHeader:\n\n\n\n\n max_count=1\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=parse_tree=cls.value_parser(value)\n kwds['decoded']=str(parse_tree)\n kwds['defects'].extend(parse_tree.all_defects)\n if parse_tree.params is None:\n kwds['params']={}\n else:\n \n kwds['params']={utils._sanitize(name).lower():\n utils._sanitize(value)\n for name,value in parse_tree.params}\n \n def init(self,*args,**kw):\n self._params=kw.pop('params')\n super().init(*args,**kw)\n \n @property\n def params(self):\n return MappingProxyType(self._params)\n \n \nclass ContentTypeHeader(ParameterizedMIMEHeader):\n\n value_parser=staticmethod(parser.parse_content_type_header)\n \n def init(self,*args,**kw):\n super().init(*args,**kw)\n self._maintype=utils._sanitize(self._parse_tree.maintype)\n self._subtype=utils._sanitize(self._parse_tree.subtype)\n \n @property\n def maintype(self):\n return self._maintype\n \n @property\n def subtype(self):\n return self._subtype\n \n @property\n def content_type(self):\n return self.maintype+'/'+self.subtype\n \n \nclass ContentDispositionHeader(ParameterizedMIMEHeader):\n\n value_parser=staticmethod(parser.parse_content_disposition_header)\n \n def init(self,*args,**kw):\n super().init(*args,**kw)\n cd=self._parse_tree.content_disposition\n self._content_disposition=cd if cd is None else utils._sanitize(cd)\n \n @property\n def content_disposition(self):\n return self._content_disposition\n \n \nclass ContentTransferEncodingHeader:\n\n max_count=1\n \n value_parser=staticmethod(parser.parse_content_transfer_encoding_header)\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=parse_tree=cls.value_parser(value)\n kwds['decoded']=str(parse_tree)\n kwds['defects'].extend(parse_tree.all_defects)\n \n def init(self,*args,**kw):\n super().init(*args,**kw)\n self._cte=utils._sanitize(self._parse_tree.cte)\n \n @property\n def cte(self):\n return self._cte\n \n \nclass MessageIDHeader:\n\n max_count=1\n value_parser=staticmethod(parser.parse_message_id)\n \n @classmethod\n def parse(cls,value,kwds):\n kwds['parse_tree']=parse_tree=cls.value_parser(value)\n kwds['decoded']=str(parse_tree)\n kwds['defects'].extend(parse_tree.all_defects)\n \n \n \n \n_default_header_map={\n'subject':UniqueUnstructuredHeader,\n'date':UniqueDateHeader,\n'resent-date':DateHeader,\n'orig-date':UniqueDateHeader,\n'sender':UniqueSingleAddressHeader,\n'resent-sender':SingleAddressHeader,\n'to':UniqueAddressHeader,\n'resent-to':AddressHeader,\n'cc':UniqueAddressHeader,\n'resent-cc':AddressHeader,\n'bcc':UniqueAddressHeader,\n'resent-bcc':AddressHeader,\n'from':UniqueAddressHeader,\n'resent-from':AddressHeader,\n'reply-to':UniqueAddressHeader,\n'mime-version':MIMEVersionHeader,\n'content-type':ContentTypeHeader,\n'content-disposition':ContentDispositionHeader,\n'content-transfer-encoding':ContentTransferEncodingHeader,\n'message-id':MessageIDHeader,\n}\n\nclass HeaderRegistry:\n\n ''\n \n def __init__(self,base_class=BaseHeader,default_class=UnstructuredHeader,\n use_default_map=True):\n ''\n\n\n\n\n\n\n\n\n \n self.registry={}\n self.base_class=base_class\n self.default_class=default_class\n if use_default_map:\n self.registry.update(_default_header_map)\n \n def map_to_type(self,name,cls):\n ''\n\n \n self.registry[name.lower()]=cls\n \n def __getitem__(self,name):\n cls=self.registry.get(name.lower(),self.default_class)\n return type('_'+cls.__name__,(cls,self.base_class),{})\n \n def __call__(self,name,value):\n ''\n\n\n\n\n\n\n\n \n return self[name](name,value)\n", ["email", "email._header_value_parser", "email.errors", "email.utils", "types"]], "email.mime.multipart": [".py", "\n\n\n\n\"\"\"Base class for MIME multipart/* type messages.\"\"\"\n\n__all__=['MIMEMultipart']\n\nfrom email.mime.base import MIMEBase\n\n\nclass MIMEMultipart(MIMEBase):\n ''\n \n def __init__(self,_subtype='mixed',boundary=None,_subparts=None,\n *,policy=None,\n **_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n MIMEBase.__init__(self,'multipart',_subtype,policy=policy,**_params)\n \n \n \n \n self._payload=[]\n \n if _subparts:\n for p in _subparts:\n self.attach(p)\n if boundary:\n self.set_boundary(boundary)\n", ["email.mime.base"]], "email.mime": [".py", "", [], 1], "email.mime.message": [".py", "\n\n\n\n\"\"\"Class representing message/* MIME documents.\"\"\"\n\n__all__=['MIMEMessage']\n\nfrom email import message\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEMessage(MIMENonMultipart):\n ''\n \n def __init__(self,_msg,_subtype='rfc822',*,policy=None):\n ''\n\n\n\n\n\n\n\n \n MIMENonMultipart.__init__(self,'message',_subtype,policy=policy)\n if not isinstance(_msg,message.Message):\n raise TypeError('Argument is not an instance of Message')\n \n \n message.Message.attach(self,_msg)\n \n self.set_default_type('message/rfc822')\n", ["email", "email.message", "email.mime.nonmultipart"]], "email.mime.application": [".py", "\n\n\n\n\"\"\"Class representing application/* type MIME documents.\"\"\"\n\n__all__=[\"MIMEApplication\"]\n\nfrom email import encoders\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEApplication(MIMENonMultipart):\n ''\n \n def __init__(self,_data,_subtype='octet-stream',\n _encoder=encoders.encode_base64,*,policy=None,**_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _subtype is None:\n raise TypeError('Invalid application MIME subtype')\n MIMENonMultipart.__init__(self,'application',_subtype,policy=policy,\n **_params)\n self.set_payload(_data)\n _encoder(self)\n", ["email", "email.encoders", "email.mime.nonmultipart"]], "email.mime.nonmultipart": [".py", "\n\n\n\n\"\"\"Base class for MIME type messages that are not multipart.\"\"\"\n\n__all__=['MIMENonMultipart']\n\nfrom email import errors\nfrom email.mime.base import MIMEBase\n\n\nclass MIMENonMultipart(MIMEBase):\n ''\n \n def attach(self,payload):\n \n \n \n raise errors.MultipartConversionError(\n 'Cannot attach additional subparts to non-multipart/*')\n", ["email", "email.errors", "email.mime.base"]], "email.mime.text": [".py", "\n\n\n\n\"\"\"Class representing text/* type MIME documents.\"\"\"\n\n__all__=['MIMEText']\n\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEText(MIMENonMultipart):\n ''\n \n def __init__(self,_text,_subtype='plain',_charset=None,*,policy=None):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \n if _charset is None:\n try:\n _text.encode('us-ascii')\n _charset='us-ascii'\n except UnicodeEncodeError:\n _charset='utf-8'\n \n MIMENonMultipart.__init__(self,'text',_subtype,policy=policy,\n charset=str(_charset))\n \n self.set_payload(_text,_charset)\n", ["email.mime.nonmultipart"]], "email.mime.audio": [".py", "\n\n\n\n\"\"\"Class representing audio/* type MIME documents.\"\"\"\n\n__all__=['MIMEAudio']\n\nfrom io import BytesIO\nfrom email import encoders\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEAudio(MIMENonMultipart):\n ''\n \n def __init__(self,_audiodata,_subtype=None,\n _encoder=encoders.encode_base64,*,policy=None,**_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _subtype is None:\n _subtype=_what(_audiodata)\n if _subtype is None:\n raise TypeError('Could not find audio MIME subtype')\n MIMENonMultipart.__init__(self,'audio',_subtype,policy=policy,\n **_params)\n self.set_payload(_audiodata)\n _encoder(self)\n \n \n_rules=[]\n\n\n\n\n\n\ndef _what(data):\n\n\n\n\n\n hdr=data[:512]\n fakefile=BytesIO(hdr)\n for testfn in _rules:\n if res :=testfn(hdr,fakefile):\n return res\n else:\n return None\n \n \ndef rule(rulefunc):\n _rules.append(rulefunc)\n return rulefunc\n \n \n@rule\ndef _aiff(h,f):\n if not h.startswith(b'FORM'):\n return None\n if h[8:12]in{b'AIFC',b'AIFF'}:\n return 'x-aiff'\n else:\n return None\n \n \n@rule\ndef _au(h,f):\n if h.startswith(b'.snd'):\n return 'basic'\n else:\n return None\n \n \n@rule\ndef _wav(h,f):\n\n if not h.startswith(b'RIFF')or h[8:12]!=b'WAVE'or h[12:16]!=b'fmt ':\n return None\n else:\n return \"x-wav\"\n", ["email", "email.encoders", "email.mime.nonmultipart", "io"]], "email.mime.image": [".py", "\n\n\n\n\"\"\"Class representing image/* type MIME documents.\"\"\"\n\n__all__=['MIMEImage']\n\nfrom email import encoders\nfrom email.mime.nonmultipart import MIMENonMultipart\n\n\nclass MIMEImage(MIMENonMultipart):\n ''\n \n def __init__(self,_imagedata,_subtype=None,\n _encoder=encoders.encode_base64,*,policy=None,**_params):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n _subtype=_what(_imagedata)if _subtype is None else _subtype\n if _subtype is None:\n raise TypeError('Could not guess image MIME subtype')\n MIMENonMultipart.__init__(self,'image',_subtype,policy=policy,\n **_params)\n self.set_payload(_imagedata)\n _encoder(self)\n \n \n_rules=[]\n\n\n\ndef _what(data):\n for rule in _rules:\n if res :=rule(data):\n return res\n else:\n return None\n \n \ndef rule(rulefunc):\n _rules.append(rulefunc)\n return rulefunc\n \n \n@rule\ndef _jpeg(h):\n ''\n if h[6:10]in(b'JFIF',b'Exif'):\n return 'jpeg'\n elif h[:4]==b'\\xff\\xd8\\xff\\xdb':\n return 'jpeg'\n \n \n@rule\ndef _png(h):\n if h.startswith(b'\\211PNG\\r\\n\\032\\n'):\n return 'png'\n \n \n@rule\ndef _gif(h):\n ''\n if h[:6]in(b'GIF87a',b'GIF89a'):\n return 'gif'\n \n \n@rule\ndef _tiff(h):\n ''\n if h[:2]in(b'MM',b'II'):\n return 'tiff'\n \n \n@rule\ndef _rgb(h):\n ''\n if h.startswith(b'\\001\\332'):\n return 'rgb'\n \n \n@rule\ndef _pbm(h):\n ''\n if len(h)>=3 and\\\n h[0]==ord(b'P')and h[1]in b'14'and h[2]in b' \\t\\n\\r':\n return 'pbm'\n \n \n@rule\ndef _pgm(h):\n ''\n if len(h)>=3 and\\\n h[0]==ord(b'P')and h[1]in b'25'and h[2]in b' \\t\\n\\r':\n return 'pgm'\n \n \n@rule\ndef _ppm(h):\n ''\n if len(h)>=3 and\\\n h[0]==ord(b'P')and h[1]in b'36'and h[2]in b' \\t\\n\\r':\n return 'ppm'\n \n \n@rule\ndef _rast(h):\n ''\n if h.startswith(b'\\x59\\xA6\\x6A\\x95'):\n return 'rast'\n \n \n@rule\ndef _xbm(h):\n ''\n if h.startswith(b'#define '):\n return 'xbm'\n \n \n@rule\ndef _bmp(h):\n if h.startswith(b'BM'):\n return 'bmp'\n \n \n@rule\ndef _webp(h):\n if h.startswith(b'RIFF')and h[8:12]==b'WEBP':\n return 'webp'\n \n \n@rule\ndef _exr(h):\n if h.startswith(b'\\x76\\x2f\\x31\\x01'):\n return 'exr'\n", ["email", "email.encoders", "email.mime.nonmultipart"]], "email.mime.base": [".py", "\n\n\n\n\"\"\"Base class for MIME specializations.\"\"\"\n\n__all__=['MIMEBase']\n\nimport email.policy\n\nfrom email import message\n\n\nclass MIMEBase(message.Message):\n ''\n \n def __init__(self,_maintype,_subtype,*,policy=None,**_params):\n ''\n\n\n\n\n \n if policy is None:\n policy=email.policy.compat32\n message.Message.__init__(self,policy=policy)\n ctype='%s/%s'%(_maintype,_subtype)\n self.add_header('Content-Type',ctype,**_params)\n self['MIME-Version']='1.0'\n", ["email", "email.message", "email.policy"]]} __BRYTHON__.update_VFS(scripts) diff --git a/www/src/builtin_modules.js b/www/src/builtin_modules.js index 5a28d6620..af9677479 100644 --- a/www/src/builtin_modules.js +++ b/www/src/builtin_modules.js @@ -458,10 +458,10 @@ // load JS script at specified url // If it exposes a variable $module, use it as the namespace of imported // module named "name" - var $ = $B.args('import_js', 2, {url: null, name: null}, - ['url', 'name'], arguments, {name: _b_.None}, null, null), + var $ = $B.args('import_js', 2, {url: null, alias: null}, + ['url', 'alias'], arguments, {alias: _b_.None}, null, null), url = $.url, - name = $.name + alias = $.alias var xhr = new XMLHttpRequest(), result xhr.open('GET', url, false) @@ -473,7 +473,7 @@ console.log('f', f, f+'') var $module = f() if(typeof $module !== 'undefined'){ - result = $B.module.$factory(name) + result = $B.module.$factory(alias) for(var key in $module){ result[key] = $B.jsobj2pyobj($module[key]) } @@ -484,7 +484,7 @@ `module at ${url} doesn't define $module`) } }else{ - result = _b_.ModuleNotFoundError.$factory(name) + result = _b_.ModuleNotFoundError.$factory(url) } } } @@ -492,18 +492,18 @@ if($B.$isinstance(result, _b_.BaseException)){ $B.handle_error(result) }else{ - if(name === _b_.None){ + if(alias === _b_.None){ // set module name from url - name = url.split('.') + var name = url.split('.') if(name.length > 1){ name.pop() // remove extension } - name = name.join('.') - result.__name__ = name + alias = name.join('.') + result.__name__ = alias } - $B.imported[name] = result + $B.imported[alias] = result var frame = $B.frame_obj.frame - frame[1][name] = result + frame[1][alias] = result } }, import_modules: function(refs, callback, loaded){ diff --git a/www/src/gen_parse.js b/www/src/gen_parse.js index 45092615b..00b46d70d 100644 --- a/www/src/gen_parse.js +++ b/www/src/gen_parse.js @@ -680,29 +680,30 @@ function file_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // statements? $ - if (p.error_indicator) { - return NULL; - } - var a; - var endmarker_var; - if ( - (a = statements_rule(p), !p.error_indicator) // statements? - && - (endmarker_var = $B._PyPegen.expect_token(p, ENDMARKER)) // token='ENDMARKER' - ) - { - _res = $B._PyPegen.make_module (p, a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // statements? $ + if (p.error_indicator) { + return NULL; + } + var a; + var endmarker_var; + if ( + (a = statements_rule(p), !p.error_indicator) // statements? + && + (endmarker_var = $B._PyPegen.expect_token(p, ENDMARKER)) // token='ENDMARKER' + ) + { + _res = $B._PyPegen.make_module (p, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // interactive: statement_newline @@ -711,26 +712,27 @@ function interactive_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // statement_newline - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = statement_newline_rule(p)) // statement_newline - ) - { - _res = new $B._PyAST.Interactive (a, p.arena); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // statement_newline + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = statement_newline_rule(p)) // statement_newline + ) + { + _res = new $B._PyAST.Interactive (a, p.arena); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // eval: expressions NEWLINE* $ @@ -739,32 +741,33 @@ function eval_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expressions NEWLINE* $ - if (p.error_indicator) { - return NULL; - } - var _loop0_1_var; - var a; - var endmarker_var; - if ( - (a = expressions_rule(p)) // expressions - && - (_loop0_1_var = _loop0_1_rule(p)) // NEWLINE* - && - (endmarker_var = $B._PyPegen.expect_token(p, ENDMARKER)) // token='ENDMARKER' - ) - { - _res = new $B._PyAST.Expression (a, p.arena); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expressions NEWLINE* $ + if (p.error_indicator) { + return NULL; + } + var _loop0_1_var; + var a; + var endmarker_var; + if ( + (a = expressions_rule(p)) // expressions + && + (_loop0_1_var = _loop0_1_rule(p)) // NEWLINE* + && + (endmarker_var = $B._PyPegen.expect_token(p, ENDMARKER)) // token='ENDMARKER' + ) + { + _res = new $B._PyAST.Expression (a, p.arena); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // func_type: '(' type_expressions? ')' '->' expression NEWLINE* $ @@ -773,44 +776,45 @@ function func_type_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '(' type_expressions? ')' '->' expression NEWLINE* $ - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var _literal_2; - var _loop0_2_var; - var a; - var b; - var endmarker_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = type_expressions_rule(p), !p.error_indicator) // type_expressions? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - && - (_literal_2 = $B._PyPegen.expect_token(p, 51)) // token='->' - && - (b = expression_rule(p)) // expression - && - (_loop0_2_var = _loop0_2_rule(p)) // NEWLINE* - && - (endmarker_var = $B._PyPegen.expect_token(p, ENDMARKER)) // token='ENDMARKER' - ) - { - _res = new $B._PyAST.FunctionType (a, b, p.arena); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '(' type_expressions? ')' '->' expression NEWLINE* $ + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _literal_2; + var _loop0_2_var; + var a; + var b; + var endmarker_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = type_expressions_rule(p), !p.error_indicator) // type_expressions? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + && + (_literal_2 = $B._PyPegen.expect_token(p, 51)) // token='->' + && + (b = expression_rule(p)) // expression + && + (_loop0_2_var = _loop0_2_rule(p)) // NEWLINE* + && + (endmarker_var = $B._PyPegen.expect_token(p, ENDMARKER)) // token='ENDMARKER' + ) + { + _res = new $B._PyAST.FunctionType (a, b, p.arena); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // statements: statement+ @@ -819,26 +823,27 @@ function statements_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // statement+ - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = _loop1_3_rule(p)) // statement+ - ) - { - _res = $B._PyPegen.seq_flatten (p, a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // statement+ + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = _loop1_3_rule(p)) // statement+ + ) + { + _res = $B._PyPegen.seq_flatten (p, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // statement: compound_stmt | simple_stmts @@ -847,40 +852,41 @@ function statement_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // compound_stmt - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = compound_stmt_rule(p)) // compound_stmt - ) - { - _res = $B._PyPegen.singleton_seq (p, a); - return done(); - } - p.mark = _mark; - } - { // simple_stmts - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // compound_stmt + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = compound_stmt_rule(p)) // compound_stmt + ) + { + _res = $B._PyPegen.singleton_seq (p, a); + break; + } + p.mark = _mark; } - var a; - if ( - (a = simple_stmts_rule(p)) // simple_stmts - ) - { - _res = a; - return done(); + { // simple_stmts + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = simple_stmts_rule(p)) // simple_stmts + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // statement_newline: compound_stmt NEWLINE | simple_stmts | NEWLINE | $ @@ -889,84 +895,85 @@ function statement_newline_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // compound_stmt NEWLINE - if (p.error_indicator) { - return NULL; - } - var a; - var newline_var; - if ( - (a = compound_stmt_rule(p)) // compound_stmt - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B._PyPegen.singleton_seq (p, a); - return done(); - } - p.mark = _mark; - } - { // simple_stmts - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var simple_stmts_var; - if ( - (simple_stmts_var = simple_stmts_rule(p)) // simple_stmts - ) - { - _res = simple_stmts_var; - return done(); - } - p.mark = _mark; - } - { // NEWLINE - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // compound_stmt NEWLINE + if (p.error_indicator) { + return NULL; + } + var a; + var newline_var; + if ( + (a = compound_stmt_rule(p)) // compound_stmt + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B._PyPegen.singleton_seq (p, a); + break; + } + p.mark = _mark; } - var newline_var; - if ( - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // simple_stmts + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B._PyPegen.singleton_seq (p, $B.helper_functions.CHECK ( $B.ast.stmt, new $B._PyAST.Pass ( EXTRA ) )); - return done(); + var simple_stmts_var; + if ( + (simple_stmts_var = simple_stmts_rule(p)) // simple_stmts + ) + { + _res = simple_stmts_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // $ - if (p.error_indicator) { - return NULL; + { // NEWLINE + if (p.error_indicator) { + return NULL; + } + var newline_var; + if ( + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B._PyPegen.singleton_seq (p, $B.helper_functions.CHECK ( $B.ast.stmt, new $B._PyAST.Pass ( EXTRA ) )); + break; + } + p.mark = _mark; } - var endmarker_var; - if ( - (endmarker_var = $B._PyPegen.expect_token(p, ENDMARKER)) // token='ENDMARKER' - ) - { - _res = $B._PyPegen.interactive_exit (p); - return done(); + { // $ + if (p.error_indicator) { + return NULL; + } + var endmarker_var; + if ( + (endmarker_var = $B._PyPegen.expect_token(p, ENDMARKER)) // token='ENDMARKER' + ) + { + _res = $B._PyPegen.interactive_exit (p); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // simple_stmts: simple_stmt !';' NEWLINE | ';'.simple_stmt+ ';'? NEWLINE @@ -975,52 +982,53 @@ function simple_stmts_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // simple_stmt !';' NEWLINE - if (p.error_indicator) { - return NULL; - } - var a; - var newline_var; - if ( - (a = simple_stmt_rule(p)) // simple_stmt - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 13) // token=';' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B._PyPegen.singleton_seq (p, a); - return done(); - } - p.mark = _mark; - } - { // ';'.simple_stmt+ ';'? NEWLINE - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // simple_stmt !';' NEWLINE + if (p.error_indicator) { + return NULL; + } + var a; + var newline_var; + if ( + (a = simple_stmt_rule(p)) // simple_stmt + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 13) // token=';' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B._PyPegen.singleton_seq (p, a); + break; + } + p.mark = _mark; } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var newline_var; - if ( - (a = _gather_4_rule(p)) // ';'.simple_stmt+ - && - (_opt_var = $B._PyPegen.expect_token(p, 13), !p.error_indicator) // ';'? - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = a; - return done(); + { // ';'.simple_stmt+ ';'? NEWLINE + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var newline_var; + if ( + (a = _gather_4_rule(p)) // ';'.simple_stmt+ + && + (_opt_var = $B._PyPegen.expect_token(p, 13), !p.error_indicator) // ';'? + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // simple_stmt: @@ -1043,262 +1051,263 @@ function simple_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, simple_stmt_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // assignment - if (p.error_indicator) { - return NULL; - } - var assignment_var; - if ( - (assignment_var = assignment_rule(p)) // assignment - ) - { - _res = assignment_var; - return done(); + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, simple_stmt_type, _res)) { + return _res.value; } - p.mark = _mark; - } - { // &"type" type_alias - if (p.error_indicator) { + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var type_alias_var; - if ( - $B._PyPegen.lookahead_with_string(1, $B._PyPegen.expect_soft_keyword, p, "type") - && - (type_alias_var = type_alias_rule(p)) // type_alias - ) - { - _res = type_alias_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // assignment + if (p.error_indicator) { + return NULL; + } + var assignment_var; + if ( + (assignment_var = assignment_rule(p)) // assignment + ) + { + _res = assignment_var; + break; + } + p.mark = _mark; } - var e; - if ( - (e = star_expressions_rule(p)) // star_expressions - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // &"type" type_alias + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Expr (e, EXTRA); - return done(); + var type_alias_var; + if ( + $B._PyPegen.lookahead_with_string(1, $B._PyPegen.expect_soft_keyword, p, "type") + && + (type_alias_var = type_alias_rule(p)) // type_alias + ) + { + _res = type_alias_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // &'return' return_stmt - if (p.error_indicator) { - return NULL; + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var e; + if ( + (e = star_expressions_rule(p)) // star_expressions + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Expr (e, EXTRA); + break; + } + p.mark = _mark; } - var return_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 519) // token='return' - && - (return_stmt_var = return_stmt_rule(p)) // return_stmt - ) - { - _res = return_stmt_var; - return done(); + { // &'return' return_stmt + if (p.error_indicator) { + return NULL; + } + var return_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 519) // token='return' + && + (return_stmt_var = return_stmt_rule(p)) // return_stmt + ) + { + _res = return_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // &('import' | 'from') import_stmt - if (p.error_indicator) { - return NULL; + { // &('import' | 'from') import_stmt + if (p.error_indicator) { + return NULL; + } + var import_stmt_var; + if ( + $B._PyPegen.lookahead(1, _tmp_6_rule, p) + && + (import_stmt_var = import_stmt_rule(p)) // import_stmt + ) + { + _res = import_stmt_var; + break; + } + p.mark = _mark; } - var import_stmt_var; - if ( - $B._PyPegen.lookahead(1, _tmp_6_rule, p) - && - (import_stmt_var = import_stmt_rule(p)) // import_stmt - ) - { - _res = import_stmt_var; - return done(); + { // &'raise' raise_stmt + if (p.error_indicator) { + return NULL; + } + var raise_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 522) // token='raise' + && + (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt + ) + { + _res = raise_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // &'raise' raise_stmt - if (p.error_indicator) { - return NULL; + { // 'pass' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 504)) // token='pass' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Pass (EXTRA); + break; + } + p.mark = _mark; } - var raise_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 522) // token='raise' - && - (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt - ) - { - _res = raise_stmt_var; - return done(); + { // &'del' del_stmt + if (p.error_indicator) { + return NULL; + } + var del_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 604) // token='del' + && + (del_stmt_var = del_stmt_rule(p)) // del_stmt + ) + { + _res = del_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'pass' - if (p.error_indicator) { - return NULL; + { // &'yield' yield_stmt + if (p.error_indicator) { + return NULL; + } + var yield_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 573) // token='yield' + && + (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt + ) + { + _res = yield_stmt_var; + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 504)) // token='pass' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // &'assert' assert_stmt + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Pass (EXTRA); - return done(); + var assert_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 526) // token='assert' + && + (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt + ) + { + _res = assert_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // &'del' del_stmt - if (p.error_indicator) { - return NULL; - } - var del_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 604) // token='del' - && - (del_stmt_var = del_stmt_rule(p)) // del_stmt - ) - { - _res = del_stmt_var; - return done(); - } - p.mark = _mark; - } - { // &'yield' yield_stmt - if (p.error_indicator) { - return NULL; - } - var yield_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 573) // token='yield' - && - (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt - ) - { - _res = yield_stmt_var; - return done(); - } - p.mark = _mark; - } - { // &'assert' assert_stmt - if (p.error_indicator) { - return NULL; - } - var assert_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 526) // token='assert' - && - (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt - ) - { - _res = assert_stmt_var; - return done(); - } - p.mark = _mark; - } - { // 'break' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 508)) // token='break' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'break' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Break (EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'continue' - if (p.error_indicator) { - return NULL; + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 508)) // token='break' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Break (EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 509)) // token='continue' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'continue' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Continue (EXTRA); - return done(); - } - p.mark = _mark; - } - { // &'global' global_stmt - if (p.error_indicator) { - return NULL; - } - var global_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 523) // token='global' - && - (global_stmt_var = global_stmt_rule(p)) // global_stmt - ) - { - _res = global_stmt_var; - return done(); + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 509)) // token='continue' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Continue (EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // &'nonlocal' nonlocal_stmt - if (p.error_indicator) { - return NULL; + { // &'global' global_stmt + if (p.error_indicator) { + return NULL; + } + var global_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 523) // token='global' + && + (global_stmt_var = global_stmt_rule(p)) // global_stmt + ) + { + _res = global_stmt_var; + break; + } + p.mark = _mark; } - var nonlocal_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 524) // token='nonlocal' - && - (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt - ) - { - _res = nonlocal_stmt_var; - return done(); + { // &'nonlocal' nonlocal_stmt + if (p.error_indicator) { + return NULL; + } + var nonlocal_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 524) // token='nonlocal' + && + (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt + ) + { + _res = nonlocal_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, simple_stmt_type, _res); return _res; - } } // compound_stmt: @@ -1315,138 +1324,139 @@ function compound_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // &('def' | '@' | ASYNC) function_def - if (p.error_indicator) { - return NULL; - } - var function_def_var; - if ( - $B._PyPegen.lookahead(1, _tmp_7_rule, p) - && - (function_def_var = function_def_rule(p)) // function_def - ) - { - _res = function_def_var; - return done(); - } - p.mark = _mark; - } - { // &'if' if_stmt - if (p.error_indicator) { - return NULL; - } - var if_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 642) // token='if' - && - (if_stmt_var = if_stmt_rule(p)) // if_stmt - ) - { - _res = if_stmt_var; - return done(); - } - p.mark = _mark; - } - { // &('class' | '@') class_def - if (p.error_indicator) { - return NULL; - } - var class_def_var; - if ( - $B._PyPegen.lookahead(1, _tmp_8_rule, p) - && - (class_def_var = class_def_rule(p)) // class_def - ) - { - _res = class_def_var; - return done(); - } - p.mark = _mark; - } - { // &('with' | ASYNC) with_stmt - if (p.error_indicator) { - return NULL; - } - var with_stmt_var; - if ( - $B._PyPegen.lookahead(1, _tmp_9_rule, p) - && - (with_stmt_var = with_stmt_rule(p)) // with_stmt - ) - { - _res = with_stmt_var; - return done(); - } - p.mark = _mark; - } - { // &('for' | ASYNC) for_stmt - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // &('def' | '@' | ASYNC) function_def + if (p.error_indicator) { + return NULL; + } + var function_def_var; + if ( + $B._PyPegen.lookahead(1, _tmp_7_rule, p) + && + (function_def_var = function_def_rule(p)) // function_def + ) + { + _res = function_def_var; + break; + } + p.mark = _mark; } - var for_stmt_var; - if ( - $B._PyPegen.lookahead(1, _tmp_10_rule, p) - && - (for_stmt_var = for_stmt_rule(p)) // for_stmt - ) - { - _res = for_stmt_var; - return done(); + { // &'if' if_stmt + if (p.error_indicator) { + return NULL; + } + var if_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 642) // token='if' + && + (if_stmt_var = if_stmt_rule(p)) // if_stmt + ) + { + _res = if_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // &'try' try_stmt - if (p.error_indicator) { - return NULL; + { // &('class' | '@') class_def + if (p.error_indicator) { + return NULL; + } + var class_def_var; + if ( + $B._PyPegen.lookahead(1, _tmp_8_rule, p) + && + (class_def_var = class_def_rule(p)) // class_def + ) + { + _res = class_def_var; + break; + } + p.mark = _mark; } - var try_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 624) // token='try' - && - (try_stmt_var = try_stmt_rule(p)) // try_stmt - ) - { - _res = try_stmt_var; - return done(); + { // &('with' | ASYNC) with_stmt + if (p.error_indicator) { + return NULL; + } + var with_stmt_var; + if ( + $B._PyPegen.lookahead(1, _tmp_9_rule, p) + && + (with_stmt_var = with_stmt_rule(p)) // with_stmt + ) + { + _res = with_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // &'while' while_stmt - if (p.error_indicator) { - return NULL; + { // &('for' | ASYNC) for_stmt + if (p.error_indicator) { + return NULL; + } + var for_stmt_var; + if ( + $B._PyPegen.lookahead(1, _tmp_10_rule, p) + && + (for_stmt_var = for_stmt_rule(p)) // for_stmt + ) + { + _res = for_stmt_var; + break; + } + p.mark = _mark; } - var while_stmt_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 647) // token='while' - && - (while_stmt_var = while_stmt_rule(p)) // while_stmt - ) - { - _res = while_stmt_var; - return done(); + { // &'try' try_stmt + if (p.error_indicator) { + return NULL; + } + var try_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 624) // token='try' + && + (try_stmt_var = try_stmt_rule(p)) // try_stmt + ) + { + _res = try_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // match_stmt - if (p.error_indicator) { - return NULL; + { // &'while' while_stmt + if (p.error_indicator) { + return NULL; + } + var while_stmt_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 647) // token='while' + && + (while_stmt_var = while_stmt_rule(p)) // while_stmt + ) + { + _res = while_stmt_var; + break; + } + p.mark = _mark; } - var match_stmt_var; - if ( - (match_stmt_var = match_stmt_rule(p)) // match_stmt - ) - { - _res = match_stmt_var; - return done(); + { // match_stmt + if (p.error_indicator) { + return NULL; + } + var match_stmt_var; + if ( + (match_stmt_var = match_stmt_rule(p)) // match_stmt + ) + { + _res = match_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // assignment: @@ -1460,151 +1470,152 @@ function assignment_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME ':' expression ['=' annotated_rhs] - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - var c; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = expression_rule(p)) // expression - && - (c = _tmp_11_rule(p), !p.error_indicator) // ['=' annotated_rhs] - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME ':' expression ['=' annotated_rhs] + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 6, "Variable annotation syntax is", new $B._PyAST.AnnAssign ( $B.helper_functions.CHECK ( $B.ast.expr, $B._PyPegen.set_expr_context ( p, a, $B.parser_constants.Store ) ), b, c, 1, EXTRA )); - return done(); - } - p.mark = _mark; - } - { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + var c; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = expression_rule(p)) // expression + && + (c = _tmp_11_rule(p), !p.error_indicator) // ['=' annotated_rhs] + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 6, "Variable annotation syntax is", new $B._PyAST.AnnAssign ( $B.helper_functions.CHECK ( $B.ast.expr, $B._PyPegen.set_expr_context ( p, a, $B.parser_constants.Store ) ), b, c, 1, EXTRA )); + break; + } + p.mark = _mark; } - var _literal; - var a; - var b; - var c; - if ( - (a = _tmp_12_rule(p)) // '(' single_target ')' | single_subscript_attribute_target - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = expression_rule(p)) // expression - && - (c = _tmp_13_rule(p), !p.error_indicator) // ['=' annotated_rhs] - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 6, "Variable annotations syntax is", new $B._PyAST.AnnAssign ( a, b, c, 0, EXTRA )); - return done(); - } - p.mark = _mark; - } - { // ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + var c; + if ( + (a = _tmp_12_rule(p)) // '(' single_target ')' | single_subscript_attribute_target + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = expression_rule(p)) // expression + && + (c = _tmp_13_rule(p), !p.error_indicator) // ['=' annotated_rhs] + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 6, "Variable annotations syntax is", new $B._PyAST.AnnAssign ( a, b, c, 0, EXTRA )); + break; + } + p.mark = _mark; } - var a; - var b; - var tc; - if ( - (a = _loop1_14_rule(p)) // ((star_targets '='))+ - && - (b = _tmp_15_rule(p)) // yield_expr | star_expressions - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 22) // token='=' - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Assign (a, b, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA); - return done(); - } - p.mark = _mark; - } - { // single_target augassign ~ (yield_expr | star_expressions) - if (p.error_indicator) { - return NULL; + var a; + var b; + var tc; + if ( + (a = _loop1_14_rule(p)) // ((star_targets '='))+ + && + (b = _tmp_15_rule(p)) // yield_expr | star_expressions + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 22) // token='=' + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Assign (a, b, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA); + break; + } + p.mark = _mark; } - var _cut_var = 0; - var a; - var b; - var c; - if ( - (a = single_target_rule(p)) // single_target - && - (b = augassign_rule(p)) // augassign - && - (_cut_var = 1) - && - (c = _tmp_16_rule(p)) // yield_expr | star_expressions - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // single_target augassign ~ (yield_expr | star_expressions) + if (p.error_indicator) { + return NULL; + } + var _cut_var = 0; + var a; + var b; + var c; + if ( + (a = single_target_rule(p)) // single_target + && + (b = augassign_rule(p)) // augassign + && + (_cut_var = 1) + && + (c = _tmp_16_rule(p)) // yield_expr | star_expressions + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.AugAssign (a, b.kind, c, EXTRA); + break; + } + p.mark = _mark; + if (_cut_var) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.AugAssign (a, b.kind, c, EXTRA); - return done(); - } - p.mark = _mark; - if (_cut_var) { - return NULL; - } - } - if (p.call_invalid_rules) { // invalid_assignment - if (p.error_indicator) { - return NULL; } - var invalid_assignment_var; - if ( - (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment - ) - { - _res = invalid_assignment_var; - return done(); + if (p.call_invalid_rules) { // invalid_assignment + if (p.error_indicator) { + return NULL; + } + var invalid_assignment_var; + if ( + (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment + ) + { + _res = invalid_assignment_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // annotated_rhs: yield_expr | star_expressions @@ -1613,40 +1624,41 @@ function annotated_rhs_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // augassign: @@ -1668,194 +1680,195 @@ function augassign_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '+=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '+=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 36)) // token='+=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.Add()); + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 36)) // token='+=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.Add()); - return done(); + { // '-=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 37)) // token='-=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.Sub()); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '-=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 37)) // token='-=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.Sub()); - return done(); - } - p.mark = _mark; - } - { // '*=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 38)) // token='*=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.Mult()); - return done(); - } - p.mark = _mark; - } - { // '@=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 50)) // token='@=' - ) - { - _res = $B.helper_functions.CHECK_VERSION ($B.parser_constants.AugOperator, 5, "The '@' operator is", $B._PyPegen.augoperator ( p, new $B.ast.MatMult() )); - return done(); - } - p.mark = _mark; - } - { // '/=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 39)) // token='/=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.Div()); - return done(); - } - p.mark = _mark; - } - { // '%=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 40)) // token='%=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.Mod()); - return done(); - } - p.mark = _mark; - } - { // '&=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 41)) // token='&=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.BitAnd()); - return done(); - } - p.mark = _mark; - } - { // '|=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 42)) // token='|=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.BitOr()); - return done(); + { // '*=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 38)) // token='*=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.Mult()); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '^=' - if (p.error_indicator) { - return NULL; + { // '@=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 50)) // token='@=' + ) + { + _res = $B.helper_functions.CHECK_VERSION ($B.parser_constants.AugOperator, 5, "The '@' operator is", $B._PyPegen.augoperator ( p, new $B.ast.MatMult() )); + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 43)) // token='^=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.BitXor()); - return done(); + { // '/=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 39)) // token='/=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.Div()); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '<<=' - if (p.error_indicator) { - return NULL; + { // '%=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 40)) // token='%=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.Mod()); + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 44)) // token='<<=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.LShift()); - return done(); + { // '&=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 41)) // token='&=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.BitAnd()); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '>>=' - if (p.error_indicator) { - return NULL; + { // '|=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 42)) // token='|=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.BitOr()); + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 45)) // token='>>=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.RShift()); - return done(); + { // '^=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 43)) // token='^=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.BitXor()); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '**=' - if (p.error_indicator) { - return NULL; + { // '<<=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 44)) // token='<<=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.LShift()); + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 46)) // token='**=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.Pow()); - return done(); + { // '>>=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 45)) // token='>>=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.RShift()); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '//=' - if (p.error_indicator) { - return NULL; + { // '**=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 46)) // token='**=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.Pow()); + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 48)) // token='//=' - ) - { - _res = $B._PyPegen.augoperator (p, new $B.ast.FloorDiv()); - return done(); + { // '//=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 48)) // token='//=' + ) + { + _res = $B._PyPegen.augoperator (p, new $B.ast.FloorDiv()); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // return_stmt: 'return' star_expressions? @@ -1864,42 +1877,43 @@ function return_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'return' star_expressions? - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 519)) // token='return' - && - (a = star_expressions_rule(p), !p.error_indicator) // star_expressions? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'return' star_expressions? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Return (a, EXTRA); - return done(); + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 519)) // token='return' + && + (a = star_expressions_rule(p), !p.error_indicator) // star_expressions? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Return (a, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // raise_stmt: 'raise' expression ['from' expression] | 'raise' @@ -1908,65 +1922,66 @@ function raise_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'raise' expression ['from' expression] - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var a; - var b; - if ( - (_keyword = $B._PyPegen.expect_token(p, 522)) // token='raise' - && - (a = expression_rule(p)) // expression - && - (b = _tmp_17_rule(p), !p.error_indicator) // ['from' expression] - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'raise' expression ['from' expression] + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Raise (a, b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'raise' - if (p.error_indicator) { - return NULL; + var _keyword; + var a; + var b; + if ( + (_keyword = $B._PyPegen.expect_token(p, 522)) // token='raise' + && + (a = expression_rule(p)) // expression + && + (b = _tmp_17_rule(p), !p.error_indicator) // ['from' expression] + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Raise (a, b, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 522)) // token='raise' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'raise' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Raise ($B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); - return done(); + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 522)) // token='raise' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Raise ($B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // global_stmt: 'global' ','.NAME+ @@ -1975,42 +1990,43 @@ function global_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'global' ','.NAME+ - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 523)) // token='global' - && - (a = _gather_18_rule(p)) // ','.NAME+ - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'global' ','.NAME+ + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Global ($B.helper_functions.CHECK ( $B.parser_constants.asdl_identifier_seq, $B._PyPegen.map_names_to_ids ( p, a ) ), EXTRA); - return done(); + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 523)) // token='global' + && + (a = _gather_18_rule(p)) // ','.NAME+ + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Global ($B.helper_functions.CHECK ( $B.parser_constants.asdl_identifier_seq, $B._PyPegen.map_names_to_ids ( p, a ) ), EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // nonlocal_stmt: 'nonlocal' ','.NAME+ @@ -2019,42 +2035,43 @@ function nonlocal_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'nonlocal' ','.NAME+ - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 524)) // token='nonlocal' - && - (a = _gather_20_rule(p)) // ','.NAME+ - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'nonlocal' ','.NAME+ + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Nonlocal ($B.helper_functions.CHECK ( $B.parser_constants.asdl_identifier_seq, $B._PyPegen.map_names_to_ids ( p, a ) ), EXTRA); - return done(); + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 524)) // token='nonlocal' + && + (a = _gather_20_rule(p)) // ','.NAME+ + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Nonlocal ($B.helper_functions.CHECK ( $B.parser_constants.asdl_identifier_seq, $B._PyPegen.map_names_to_ids ( p, a ) ), EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // del_stmt: 'del' del_targets &(';' | NEWLINE) | invalid_del_stmt @@ -2063,58 +2080,59 @@ function del_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'del' del_targets &(';' | NEWLINE) - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 604)) // token='del' - && - (a = del_targets_rule(p)) // del_targets - && - $B._PyPegen.lookahead(1, _tmp_22_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'del' del_targets &(';' | NEWLINE) + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Delete (a, EXTRA); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_del_stmt - if (p.error_indicator) { - return NULL; + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 604)) // token='del' + && + (a = del_targets_rule(p)) // del_targets + && + $B._PyPegen.lookahead(1, _tmp_22_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Delete (a, EXTRA); + break; + } + p.mark = _mark; } - var invalid_del_stmt_var; - if ( - (invalid_del_stmt_var = invalid_del_stmt_rule(p)) // invalid_del_stmt - ) - { - _res = invalid_del_stmt_var; - return done(); + if (p.call_invalid_rules) { // invalid_del_stmt + if (p.error_indicator) { + return NULL; + } + var invalid_del_stmt_var; + if ( + (invalid_del_stmt_var = invalid_del_stmt_rule(p)) // invalid_del_stmt + ) + { + _res = invalid_del_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // yield_stmt: yield_expr @@ -2123,39 +2141,40 @@ function yield_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // yield_expr - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var y; - if ( - (y = yield_expr_rule(p)) // yield_expr - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // yield_expr + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Expr (y, EXTRA); - return done(); + var y; + if ( + (y = yield_expr_rule(p)) // yield_expr + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Expr (y, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // assert_stmt: 'assert' expression [',' expression] @@ -2164,45 +2183,46 @@ function assert_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'assert' expression [',' expression] - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var a; - var b; - if ( - (_keyword = $B._PyPegen.expect_token(p, 526)) // token='assert' - && - (a = expression_rule(p)) // expression - && - (b = _tmp_23_rule(p), !p.error_indicator) // [',' expression] - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'assert' expression [',' expression] + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Assert (a, b, EXTRA); - return done(); + var _keyword; + var a; + var b; + if ( + (_keyword = $B._PyPegen.expect_token(p, 526)) // token='assert' + && + (a = expression_rule(p)) // expression + && + (b = _tmp_23_rule(p), !p.error_indicator) // [',' expression] + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Assert (a, b, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // import_stmt: invalid_import | import_name | import_from @@ -2211,54 +2231,55 @@ function import_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_import - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_import + if (p.error_indicator) { + return NULL; + } + var invalid_import_var; + if ( + (invalid_import_var = invalid_import_rule(p)) // invalid_import + ) + { + _res = invalid_import_var; + break; + } + p.mark = _mark; } - var invalid_import_var; - if ( - (invalid_import_var = invalid_import_rule(p)) // invalid_import - ) - { - _res = invalid_import_var; - return done(); + { // import_name + if (p.error_indicator) { + return NULL; + } + var import_name_var; + if ( + (import_name_var = import_name_rule(p)) // import_name + ) + { + _res = import_name_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // import_name - if (p.error_indicator) { - return NULL; + { // import_from + if (p.error_indicator) { + return NULL; + } + var import_from_var; + if ( + (import_from_var = import_from_rule(p)) // import_from + ) + { + _res = import_from_var; + break; + } + p.mark = _mark; } - var import_name_var; - if ( - (import_name_var = import_name_rule(p)) // import_name - ) - { - _res = import_name_var; - return done(); - } - p.mark = _mark; - } - { // import_from - if (p.error_indicator) { - return NULL; - } - var import_from_var; - if ( - (import_from_var = import_from_rule(p)) // import_from - ) - { - _res = import_from_var; - return done(); - } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // import_name: 'import' dotted_as_names @@ -2267,42 +2288,43 @@ function import_name_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'import' dotted_as_names - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 607)) // token='import' - && - (a = dotted_as_names_rule(p)) // dotted_as_names - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'import' dotted_as_names + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Import (a, EXTRA); - return done(); + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 607)) // token='import' + && + (a = dotted_as_names_rule(p)) // dotted_as_names + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Import (a, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // import_from: @@ -2313,80 +2335,81 @@ function import_from_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _keyword_1; - var a; - var b; - var c; - if ( - (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' - && - (a = _loop0_24_rule(p)) // (('.' | '...'))* - && - (b = dotted_name_rule(p)) // dotted_name - && - (_keyword_1 = $B._PyPegen.expect_token(p, 607)) // token='import' - && - (c = import_from_targets_rule(p)) // import_from_targets - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.ImportFrom (b. id, c, $B._PyPegen.seq_count_dots ( a ), EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'from' (('.' | '...'))+ 'import' import_from_targets - if (p.error_indicator) { - return NULL; + var _keyword; + var _keyword_1; + var a; + var b; + var c; + if ( + (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' + && + (a = _loop0_24_rule(p)) // (('.' | '...'))* + && + (b = dotted_name_rule(p)) // dotted_name + && + (_keyword_1 = $B._PyPegen.expect_token(p, 607)) // token='import' + && + (c = import_from_targets_rule(p)) // import_from_targets + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.ImportFrom (b. id, c, $B._PyPegen.seq_count_dots ( a ), EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - var _keyword_1; - var a; - var b; - if ( - (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' - && - (a = _loop1_25_rule(p)) // (('.' | '...'))+ - && - (_keyword_1 = $B._PyPegen.expect_token(p, 607)) // token='import' - && - (b = import_from_targets_rule(p)) // import_from_targets - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'from' (('.' | '...'))+ 'import' import_from_targets + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.ImportFrom ($B.parser_constants.NULL, b, $B._PyPegen.seq_count_dots ( a ), EXTRA); - return done(); + var _keyword; + var _keyword_1; + var a; + var b; + if ( + (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' + && + (a = _loop1_25_rule(p)) // (('.' | '...'))+ + && + (_keyword_1 = $B._PyPegen.expect_token(p, 607)) // token='import' + && + (b = import_from_targets_rule(p)) // import_from_targets + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.ImportFrom ($B.parser_constants.NULL, b, $B._PyPegen.seq_count_dots ( a ), EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // import_from_targets: @@ -2399,93 +2422,94 @@ function import_from_targets_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '(' import_from_as_names ','? ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = import_from_as_names_rule(p)) // import_from_as_names - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - { // import_from_as_names !',' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var import_from_as_names_var; - if ( - (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 12) // token=',' - ) - { - _res = import_from_as_names_var; - return done(); - } - p.mark = _mark; - } - { // '*' - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '(' import_from_as_names ','? ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = import_from_as_names_rule(p)) // import_from_as_names + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = a; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // import_from_as_names !',' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B._PyPegen.singleton_seq (p, $B.helper_functions.CHECK ( $B.ast.alias, $B._PyPegen.alias_for_star ( p, EXTRA ) )); - return done(); + var import_from_as_names_var; + if ( + (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 12) // token=',' + ) + { + _res = import_from_as_names_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_import_from_targets - if (p.error_indicator) { - return NULL; + { // '*' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B._PyPegen.singleton_seq (p, $B.helper_functions.CHECK ( $B.ast.alias, $B._PyPegen.alias_for_star ( p, EXTRA ) )); + break; + } + p.mark = _mark; } - var invalid_import_from_targets_var; - if ( - (invalid_import_from_targets_var = invalid_import_from_targets_rule(p)) // invalid_import_from_targets - ) - { - _res = invalid_import_from_targets_var; - return done(); + if (p.call_invalid_rules) { // invalid_import_from_targets + if (p.error_indicator) { + return NULL; + } + var invalid_import_from_targets_var; + if ( + (invalid_import_from_targets_var = invalid_import_from_targets_rule(p)) // invalid_import_from_targets + ) + { + _res = invalid_import_from_targets_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // import_from_as_names: ','.import_from_as_name+ @@ -2494,26 +2518,27 @@ function import_from_as_names_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.import_from_as_name+ - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = _gather_26_rule(p)) // ','.import_from_as_name+ - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.import_from_as_name+ + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = _gather_26_rule(p)) // ','.import_from_as_name+ + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // import_from_as_name: NAME ['as' NAME] @@ -2522,42 +2547,43 @@ function import_from_as_name_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME ['as' NAME] - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var b; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (b = _tmp_28_rule(p), !p.error_indicator) // ['as' NAME] - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME ['as' NAME] + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.alias (a. id, ( b ) ? ( b ). id : $B.parser_constants.NULL, EXTRA); - return done(); + var a; + var b; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (b = _tmp_28_rule(p), !p.error_indicator) // ['as' NAME] + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.alias (a. id, ( b ) ? ( b ). id : $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // dotted_as_names: ','.dotted_as_name+ @@ -2566,26 +2592,27 @@ function dotted_as_names_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.dotted_as_name+ - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = _gather_29_rule(p)) // ','.dotted_as_name+ - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.dotted_as_name+ + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = _gather_29_rule(p)) // ','.dotted_as_name+ + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // dotted_as_name: dotted_name ['as' NAME] @@ -2594,42 +2621,43 @@ function dotted_as_name_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // dotted_name ['as' NAME] - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var b; - if ( - (a = dotted_name_rule(p)) // dotted_name - && - (b = _tmp_31_rule(p), !p.error_indicator) // ['as' NAME] - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // dotted_name ['as' NAME] + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.alias (a. id, ( b ) ? ( b ). id : $B.parser_constants.NULL, EXTRA); - return done(); + var a; + var b; + if ( + (a = dotted_name_rule(p)) // dotted_name + && + (b = _tmp_31_rule(p), !p.error_indicator) // ['as' NAME] + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.alias (a. id, ( b ) ? ( b ). id : $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -2667,46 +2695,47 @@ function dotted_name_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // dotted_name '.' NAME - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - if ( - (a = dotted_name_rule(p)) // dotted_name - && - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - && - (b = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.join_names_with_dot (p, a, b); - return done(); - } - p.mark = _mark; - } - { // NAME - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // dotted_name '.' NAME + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + if ( + (a = dotted_name_rule(p)) // dotted_name + && + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + && + (b = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.join_names_with_dot (p, a, b); + break; + } + p.mark = _mark; } - var name_var; - if ( - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = name_var; - return done(); + { // NAME + if (p.error_indicator) { + return NULL; + } + var name_var; + if ( + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = name_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // block: NEWLINE INDENT statements DEDENT | simple_stmts | invalid_block @@ -2715,68 +2744,69 @@ function block_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, block_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - { // NEWLINE INDENT statements DEDENT - if (p.error_indicator) { - return NULL; - } - var a; - var dedent_var; - var indent_var; - var newline_var; - if ( - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - (indent_var = $B._PyPegen.expect_token(p, INDENT)) // token='INDENT' - && - (a = statements_rule(p)) // statements - && - (dedent_var = $B._PyPegen.expect_token(p, DEDENT)) // token='DEDENT' - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - { // simple_stmts - if (p.error_indicator) { - return NULL; - } - var simple_stmts_var; - if ( - (simple_stmts_var = simple_stmts_rule(p)) // simple_stmts - ) - { - _res = simple_stmts_var; - return done(); + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, block_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + { // NEWLINE INDENT statements DEDENT + if (p.error_indicator) { + return NULL; + } + var a; + var dedent_var; + var indent_var; + var newline_var; + if ( + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + (indent_var = $B._PyPegen.expect_token(p, INDENT)) // token='INDENT' + && + (a = statements_rule(p)) // statements + && + (dedent_var = $B._PyPegen.expect_token(p, DEDENT)) // token='DEDENT' + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_block - if (p.error_indicator) { - return NULL; + { // simple_stmts + if (p.error_indicator) { + return NULL; + } + var simple_stmts_var; + if ( + (simple_stmts_var = simple_stmts_rule(p)) // simple_stmts + ) + { + _res = simple_stmts_var; + break; + } + p.mark = _mark; } - var invalid_block_var; - if ( - (invalid_block_var = invalid_block_rule(p)) // invalid_block - ) - { - _res = invalid_block_var; - return done(); + if (p.call_invalid_rules) { // invalid_block + if (p.error_indicator) { + return NULL; + } + var invalid_block_var; + if ( + (invalid_block_var = invalid_block_rule(p)) // invalid_block + ) + { + _res = invalid_block_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, block_type, _res); return _res; - } } // decorators: (('@' named_expression NEWLINE))+ @@ -2785,26 +2815,27 @@ function decorators_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (('@' named_expression NEWLINE))+ - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = _loop1_32_rule(p)) // (('@' named_expression NEWLINE))+ - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (('@' named_expression NEWLINE))+ + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = _loop1_32_rule(p)) // (('@' named_expression NEWLINE))+ + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // class_def: decorators class_def_raw | class_def_raw @@ -2813,43 +2844,44 @@ function class_def_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // decorators class_def_raw - if (p.error_indicator) { - return NULL; - } - var a; - var b; - if ( - (a = decorators_rule(p)) // decorators - && - (b = class_def_raw_rule(p)) // class_def_raw - ) - { - _res = $B._PyPegen.class_def_decorators (p, a, b); - return done(); - } - p.mark = _mark; - } - { // class_def_raw - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // decorators class_def_raw + if (p.error_indicator) { + return NULL; + } + var a; + var b; + if ( + (a = decorators_rule(p)) // decorators + && + (b = class_def_raw_rule(p)) // class_def_raw + ) + { + _res = $B._PyPegen.class_def_decorators (p, a, b); + break; + } + p.mark = _mark; } - var class_def_raw_var; - if ( - (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw - ) - { - _res = class_def_raw_var; - return done(); + { // class_def_raw + if (p.error_indicator) { + return NULL; + } + var class_def_raw_var; + if ( + (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw + ) + { + _res = class_def_raw_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // class_def_raw: @@ -2860,68 +2892,69 @@ function class_def_raw_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_class_def_raw - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var invalid_class_def_raw_var; - if ( - (invalid_class_def_raw_var = invalid_class_def_raw_rule(p)) // invalid_class_def_raw - ) - { - _res = invalid_class_def_raw_var; - return done(); - } - p.mark = _mark; - } - { // 'class' NAME type_params? ['(' arguments? ')'] ':' block - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_class_def_raw + if (p.error_indicator) { + return NULL; + } + var invalid_class_def_raw_var; + if ( + (invalid_class_def_raw_var = invalid_class_def_raw_rule(p)) // invalid_class_def_raw + ) + { + _res = invalid_class_def_raw_var; + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var a; - var b; - var c; - var t; - if ( - (_keyword = $B._PyPegen.expect_token(p, 654)) // token='class' - && - (a = $B._PyPegen.name_token(p)) // NAME - && - (t = type_params_rule(p), !p.error_indicator) // type_params? - && - (b = _tmp_33_rule(p), !p.error_indicator) // ['(' arguments? ')'] - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (c = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'class' NAME type_params? ['(' arguments? ')'] ':' block + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.ClassDef (a. id, ( b ) ? ( b ). args : $B.parser_constants.NULL, ( b ) ? ( b ). keywords : $B.parser_constants.NULL, c, $B.parser_constants.NULL, t, EXTRA); - return done(); + var _keyword; + var _literal; + var a; + var b; + var c; + var t; + if ( + (_keyword = $B._PyPegen.expect_token(p, 654)) // token='class' + && + (a = $B._PyPegen.name_token(p)) // NAME + && + (t = type_params_rule(p), !p.error_indicator) // type_params? + && + (b = _tmp_33_rule(p), !p.error_indicator) // ['(' arguments? ')'] + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (c = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.ClassDef (a. id, ( b ) ? ( b ). args : $B.parser_constants.NULL, ( b ) ? ( b ). keywords : $B.parser_constants.NULL, c, $B.parser_constants.NULL, t, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // function_def: decorators function_def_raw | function_def_raw @@ -2930,43 +2963,44 @@ function function_def_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // decorators function_def_raw - if (p.error_indicator) { - return NULL; - } - var d; - var f; - if ( - (d = decorators_rule(p)) // decorators - && - (f = function_def_raw_rule(p)) // function_def_raw - ) - { - _res = $B._PyPegen.function_def_decorators (p, d, f); - return done(); - } - p.mark = _mark; - } - { // function_def_raw - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // decorators function_def_raw + if (p.error_indicator) { + return NULL; + } + var d; + var f; + if ( + (d = decorators_rule(p)) // decorators + && + (f = function_def_raw_rule(p)) // function_def_raw + ) + { + _res = $B._PyPegen.function_def_decorators (p, d, f); + break; + } + p.mark = _mark; } - var function_def_raw_var; - if ( - (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw - ) - { - _res = function_def_raw_var; - return done(); + { // function_def_raw + if (p.error_indicator) { + return NULL; + } + var function_def_raw_var; + if ( + (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw + ) + { + _res = function_def_raw_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // function_def_raw: @@ -2978,130 +3012,131 @@ function function_def_raw_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_def_raw - if (p.error_indicator) { - return NULL; - } - var invalid_def_raw_var; - if ( - (invalid_def_raw_var = invalid_def_raw_rule(p)) // invalid_def_raw - ) - { - _res = invalid_def_raw_var; - return done(); - } - p.mark = _mark; - } - { // 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _literal; - var _literal_1; - var _literal_2; - var a; - var b; - var n; - var params; - var t; - var tc; - if ( - (_keyword = $B._PyPegen.expect_token(p, 652)) // token='def' - && - (n = $B._PyPegen.name_token(p)) // NAME - && - (t = type_params_rule(p), !p.error_indicator) // type_params? - && - (_literal = $B._PyPegen.expect_forced_token(p, 7, "(")) // forced_token='(' - && - (params = params_rule(p), !p.error_indicator) // params? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - && - (a = _tmp_34_rule(p), !p.error_indicator) // ['->' expression] - && - (_literal_2 = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' - && - (tc = func_type_comment_rule(p), !p.error_indicator) // func_type_comment? - && - (b = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_def_raw + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.FunctionDef (n. id, ( params ) ? params : $B.helper_functions.CHECK ( $B.ast.arguments, $B._PyPegen.empty_arguments ( p ) ), b, $B.parser_constants.NULL, a, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), t, EXTRA); - return done(); + var invalid_def_raw_var; + if ( + (invalid_def_raw_var = invalid_def_raw_rule(p)) // invalid_def_raw + ) + { + _res = invalid_def_raw_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block - if (p.error_indicator) { - return NULL; + { // 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var _literal_1; + var _literal_2; + var a; + var b; + var n; + var params; + var t; + var tc; + if ( + (_keyword = $B._PyPegen.expect_token(p, 652)) // token='def' + && + (n = $B._PyPegen.name_token(p)) // NAME + && + (t = type_params_rule(p), !p.error_indicator) // type_params? + && + (_literal = $B._PyPegen.expect_forced_token(p, 7, "(")) // forced_token='(' + && + (params = params_rule(p), !p.error_indicator) // params? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + && + (a = _tmp_34_rule(p), !p.error_indicator) // ['->' expression] + && + (_literal_2 = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' + && + (tc = func_type_comment_rule(p), !p.error_indicator) // func_type_comment? + && + (b = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.FunctionDef (n. id, ( params ) ? params : $B.helper_functions.CHECK ( $B.ast.arguments, $B._PyPegen.empty_arguments ( p ) ), b, $B.parser_constants.NULL, a, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), t, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var _literal_1; - var _literal_2; - var a; - var async_var; - var b; - var n; - var params; - var t; - var tc; - if ( - (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = $B._PyPegen.expect_token(p, 652)) // token='def' - && - (n = $B._PyPegen.name_token(p)) // NAME - && - (t = type_params_rule(p), !p.error_indicator) // type_params? - && - (_literal = $B._PyPegen.expect_forced_token(p, 7, "(")) // forced_token='(' - && - (params = params_rule(p), !p.error_indicator) // params? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - && - (a = _tmp_35_rule(p), !p.error_indicator) // ['->' expression] - && - (_literal_2 = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' - && - (tc = func_type_comment_rule(p), !p.error_indicator) // func_type_comment? - && - (b = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 5, "Async functions are", new $B._PyAST.AsyncFunctionDef ( n. id, ( params ) ? params : $B.helper_functions.CHECK ( $B.ast.arguments, $B._PyPegen.empty_arguments ( p ) ), b, $B.parser_constants.NULL, a, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), t, EXTRA )); - return done(); + var _keyword; + var _literal; + var _literal_1; + var _literal_2; + var a; + var async_var; + var b; + var n; + var params; + var t; + var tc; + if ( + (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = $B._PyPegen.expect_token(p, 652)) // token='def' + && + (n = $B._PyPegen.name_token(p)) // NAME + && + (t = type_params_rule(p), !p.error_indicator) // type_params? + && + (_literal = $B._PyPegen.expect_forced_token(p, 7, "(")) // forced_token='(' + && + (params = params_rule(p), !p.error_indicator) // params? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + && + (a = _tmp_35_rule(p), !p.error_indicator) // ['->' expression] + && + (_literal_2 = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' + && + (tc = func_type_comment_rule(p), !p.error_indicator) // func_type_comment? + && + (b = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 5, "Async functions are", new $B._PyAST.AsyncFunctionDef ( n. id, ( params ) ? params : $B.helper_functions.CHECK ( $B.ast.arguments, $B._PyPegen.empty_arguments ( p ) ), b, $B.parser_constants.NULL, a, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), t, EXTRA )); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // params: invalid_parameters | parameters @@ -3110,40 +3145,41 @@ function params_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_parameters - if (p.error_indicator) { - return NULL; - } - var invalid_parameters_var; - if ( - (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters - ) - { - _res = invalid_parameters_var; - return done(); - } - p.mark = _mark; - } - { // parameters - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_parameters + if (p.error_indicator) { + return NULL; + } + var invalid_parameters_var; + if ( + (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters + ) + { + _res = invalid_parameters_var; + break; + } + p.mark = _mark; } - var parameters_var; - if ( - (parameters_var = parameters_rule(p)) // parameters - ) - { - _res = parameters_var; - return done(); + { // parameters + if (p.error_indicator) { + return NULL; + } + var parameters_var; + if ( + (parameters_var = parameters_rule(p)) // parameters + ) + { + _res = parameters_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // parameters: @@ -3157,106 +3193,107 @@ function parameters_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // slash_no_default param_no_default* param_with_default* star_etc? - if (p.error_indicator) { - return NULL; - } - var a; - var b; - var c; - var d; - if ( - (a = slash_no_default_rule(p)) // slash_no_default - && - (b = _loop0_36_rule(p)) // param_no_default* - && - (c = _loop0_37_rule(p)) // param_with_default* - && - (d = star_etc_rule(p), !p.error_indicator) // star_etc? - ) - { - _res = $B.helper_functions.CHECK_VERSION ($B.ast.arguments, 8, "Positional-only parameters are", $B._PyPegen.make_arguments ( p, a, $B.parser_constants.NULL, b, c, d )); - return done(); - } - p.mark = _mark; - } - { // slash_with_default param_with_default* star_etc? - if (p.error_indicator) { - return NULL; - } - var a; - var b; - var c; - if ( - (a = slash_with_default_rule(p)) // slash_with_default - && - (b = _loop0_38_rule(p)) // param_with_default* - && - (c = star_etc_rule(p), !p.error_indicator) // star_etc? - ) - { - _res = $B.helper_functions.CHECK_VERSION ($B.ast.arguments, 8, "Positional-only parameters are", $B._PyPegen.make_arguments ( p, $B.parser_constants.NULL, a, $B.parser_constants.NULL, b, c )); - return done(); - } - p.mark = _mark; - } - { // param_no_default+ param_with_default* star_etc? - if (p.error_indicator) { - return NULL; - } - var a; - var b; - var c; - if ( - (a = _loop1_39_rule(p)) // param_no_default+ - && - (b = _loop0_40_rule(p)) // param_with_default* - && - (c = star_etc_rule(p), !p.error_indicator) // star_etc? - ) - { - _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, a, b, c); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // slash_no_default param_no_default* param_with_default* star_etc? + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var c; + var d; + if ( + (a = slash_no_default_rule(p)) // slash_no_default + && + (b = _loop0_36_rule(p)) // param_no_default* + && + (c = _loop0_37_rule(p)) // param_with_default* + && + (d = star_etc_rule(p), !p.error_indicator) // star_etc? + ) + { + _res = $B.helper_functions.CHECK_VERSION ($B.ast.arguments, 8, "Positional-only parameters are", $B._PyPegen.make_arguments ( p, a, $B.parser_constants.NULL, b, c, d )); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // param_with_default+ star_etc? - if (p.error_indicator) { - return NULL; + { // slash_with_default param_with_default* star_etc? + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var c; + if ( + (a = slash_with_default_rule(p)) // slash_with_default + && + (b = _loop0_38_rule(p)) // param_with_default* + && + (c = star_etc_rule(p), !p.error_indicator) // star_etc? + ) + { + _res = $B.helper_functions.CHECK_VERSION ($B.ast.arguments, 8, "Positional-only parameters are", $B._PyPegen.make_arguments ( p, $B.parser_constants.NULL, a, $B.parser_constants.NULL, b, c )); + break; + } + p.mark = _mark; } - var a; - var b; - if ( - (a = _loop1_41_rule(p)) // param_with_default+ - && - (b = star_etc_rule(p), !p.error_indicator) // star_etc? - ) - { - _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, a, b); - return done(); + { // param_no_default+ param_with_default* star_etc? + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var c; + if ( + (a = _loop1_39_rule(p)) // param_no_default+ + && + (b = _loop0_40_rule(p)) // param_with_default* + && + (c = star_etc_rule(p), !p.error_indicator) // star_etc? + ) + { + _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, a, b, c); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // star_etc - if (p.error_indicator) { - return NULL; + { // param_with_default+ star_etc? + if (p.error_indicator) { + return NULL; + } + var a; + var b; + if ( + (a = _loop1_41_rule(p)) // param_with_default+ + && + (b = star_etc_rule(p), !p.error_indicator) // star_etc? + ) + { + _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, a, b); + break; + } + p.mark = _mark; } - var a; - if ( - (a = star_etc_rule(p)) // star_etc - ) - { - _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, a); - return done(); + { // star_etc + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = star_etc_rule(p)) // star_etc + ) + { + _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // slash_no_default: param_no_default+ '/' ',' | param_no_default+ '/' &')' @@ -3265,51 +3302,52 @@ function slash_no_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // param_no_default+ '/' ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - if ( - (a = _loop1_42_rule(p)) // param_no_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - { // param_no_default+ '/' &')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // param_no_default+ '/' ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + if ( + (a = _loop1_42_rule(p)) // param_no_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = a; + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (a = _loop1_43_rule(p)) // param_no_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' - ) - { - _res = a; - return done(); + { // param_no_default+ '/' &')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (a = _loop1_43_rule(p)) // param_no_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // slash_with_default: @@ -3320,57 +3358,58 @@ function slash_with_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // param_no_default* param_with_default+ '/' ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = _loop0_44_rule(p)) // param_no_default* - && - (b = _loop1_45_rule(p)) // param_with_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = $B._PyPegen.slash_with_default (p, a, b); - return done(); - } - p.mark = _mark; - } - { // param_no_default* param_with_default+ '/' &')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // param_no_default* param_with_default+ '/' ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = _loop0_44_rule(p)) // param_no_default* + && + (b = _loop1_45_rule(p)) // param_with_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = $B._PyPegen.slash_with_default (p, a, b); + break; + } + p.mark = _mark; } - var _literal; - var a; - var b; - if ( - (a = _loop0_46_rule(p)) // param_no_default* - && - (b = _loop1_47_rule(p)) // param_with_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' - ) - { - _res = $B._PyPegen.slash_with_default (p, a, b); - return done(); + { // param_no_default* param_with_default+ '/' &')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + if ( + (a = _loop0_46_rule(p)) // param_no_default* + && + (b = _loop1_47_rule(p)) // param_with_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' + ) + { + _res = $B._PyPegen.slash_with_default (p, a, b); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_etc: @@ -3384,109 +3423,110 @@ function star_etc_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_star_etc - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_star_etc + if (p.error_indicator) { + return NULL; + } + var invalid_star_etc_var; + if ( + (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc + ) + { + _res = invalid_star_etc_var; + break; + } + p.mark = _mark; } - var invalid_star_etc_var; - if ( - (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc - ) - { - _res = invalid_star_etc_var; - return done(); + { // '*' param_no_default param_maybe_default* kwds? + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + var c; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = param_no_default_rule(p)) // param_no_default + && + (b = _loop0_48_rule(p)) // param_maybe_default* + && + (c = kwds_rule(p), !p.error_indicator) // kwds? + ) + { + _res = $B._PyPegen.star_etc (p, a, b, c); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '*' param_no_default param_maybe_default* kwds? - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - var c; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = param_no_default_rule(p)) // param_no_default - && - (b = _loop0_48_rule(p)) // param_maybe_default* - && - (c = kwds_rule(p), !p.error_indicator) // kwds? - ) - { - _res = $B._PyPegen.star_etc (p, a, b, c); - return done(); - } - p.mark = _mark; - } - { // '*' param_no_default_star_annotation param_maybe_default* kwds? - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - var c; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = param_no_default_star_annotation_rule(p)) // param_no_default_star_annotation - && - (b = _loop0_49_rule(p)) // param_maybe_default* - && - (c = kwds_rule(p), !p.error_indicator) // kwds? - ) - { - _res = $B._PyPegen.star_etc (p, a, b, c); - return done(); - } - p.mark = _mark; - } - { // '*' ',' param_maybe_default+ kwds? - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var b; - var c; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (b = _loop1_50_rule(p)) // param_maybe_default+ - && - (c = kwds_rule(p), !p.error_indicator) // kwds? - ) - { - _res = $B._PyPegen.star_etc (p, $B.parser_constants.NULL, b, c); - return done(); + { // '*' param_no_default_star_annotation param_maybe_default* kwds? + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + var c; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = param_no_default_star_annotation_rule(p)) // param_no_default_star_annotation + && + (b = _loop0_49_rule(p)) // param_maybe_default* + && + (c = kwds_rule(p), !p.error_indicator) // kwds? + ) + { + _res = $B._PyPegen.star_etc (p, a, b, c); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // kwds - if (p.error_indicator) { - return NULL; + { // '*' ',' param_maybe_default+ kwds? + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var b; + var c; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (b = _loop1_50_rule(p)) // param_maybe_default+ + && + (c = kwds_rule(p), !p.error_indicator) // kwds? + ) + { + _res = $B._PyPegen.star_etc (p, $B.parser_constants.NULL, b, c); + break; + } + p.mark = _mark; } - var a; - if ( - (a = kwds_rule(p)) // kwds - ) - { - _res = $B._PyPegen.star_etc (p, $B.parser_constants.NULL, $B.parser_constants.NULL, a); - return done(); + { // kwds + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = kwds_rule(p)) // kwds + ) + { + _res = $B._PyPegen.star_etc (p, $B.parser_constants.NULL, $B.parser_constants.NULL, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // kwds: invalid_kwds | '**' param_no_default @@ -3495,43 +3535,44 @@ function kwds_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_kwds - if (p.error_indicator) { - return NULL; - } - var invalid_kwds_var; - if ( - (invalid_kwds_var = invalid_kwds_rule(p)) // invalid_kwds - ) - { - _res = invalid_kwds_var; - return done(); - } - p.mark = _mark; - } - { // '**' param_no_default - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_kwds + if (p.error_indicator) { + return NULL; + } + var invalid_kwds_var; + if ( + (invalid_kwds_var = invalid_kwds_rule(p)) // invalid_kwds + ) + { + _res = invalid_kwds_var; + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (a = param_no_default_rule(p)) // param_no_default - ) - { - _res = a; - return done(); + { // '**' param_no_default + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (a = param_no_default_rule(p)) // param_no_default + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // param_no_default: param ',' TYPE_COMMENT? | param TYPE_COMMENT? &')' @@ -3540,51 +3581,52 @@ function param_no_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // param ',' TYPE_COMMENT? - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var tc; - if ( - (a = param_rule(p)) // param - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - ) - { - _res = $B._PyPegen.add_type_comment_to_arg (p, a, tc); - return done(); - } - p.mark = _mark; - } - { // param TYPE_COMMENT? &')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // param ',' TYPE_COMMENT? + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var tc; + if ( + (a = param_rule(p)) // param + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + ) + { + _res = $B._PyPegen.add_type_comment_to_arg (p, a, tc); + break; + } + p.mark = _mark; } - var a; - var tc; - if ( - (a = param_rule(p)) // param - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' - ) - { - _res = $B._PyPegen.add_type_comment_to_arg (p, a, tc); - return done(); + { // param TYPE_COMMENT? &')' + if (p.error_indicator) { + return NULL; + } + var a; + var tc; + if ( + (a = param_rule(p)) // param + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' + ) + { + _res = $B._PyPegen.add_type_comment_to_arg (p, a, tc); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // param_no_default_star_annotation: @@ -3595,51 +3637,52 @@ function param_no_default_star_annotation_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // param_star_annotation ',' TYPE_COMMENT? - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var tc; - if ( - (a = param_star_annotation_rule(p)) // param_star_annotation - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - ) - { - _res = $B._PyPegen.add_type_comment_to_arg (p, a, tc); - return done(); - } - p.mark = _mark; - } - { // param_star_annotation TYPE_COMMENT? &')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // param_star_annotation ',' TYPE_COMMENT? + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var tc; + if ( + (a = param_star_annotation_rule(p)) // param_star_annotation + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + ) + { + _res = $B._PyPegen.add_type_comment_to_arg (p, a, tc); + break; + } + p.mark = _mark; } - var a; - var tc; - if ( - (a = param_star_annotation_rule(p)) // param_star_annotation - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' - ) - { - _res = $B._PyPegen.add_type_comment_to_arg (p, a, tc); - return done(); + { // param_star_annotation TYPE_COMMENT? &')' + if (p.error_indicator) { + return NULL; + } + var a; + var tc; + if ( + (a = param_star_annotation_rule(p)) // param_star_annotation + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' + ) + { + _res = $B._PyPegen.add_type_comment_to_arg (p, a, tc); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // param_with_default: param default ',' TYPE_COMMENT? | param default TYPE_COMMENT? &')' @@ -3648,57 +3691,58 @@ function param_with_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // param default ',' TYPE_COMMENT? - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var c; - var tc; - if ( - (a = param_rule(p)) // param - && - (c = default_rule(p)) // default - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - ) - { - _res = $B._PyPegen.name_default_pair (p, a, c, tc); - return done(); - } - p.mark = _mark; - } - { // param default TYPE_COMMENT? &')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // param default ',' TYPE_COMMENT? + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var c; + var tc; + if ( + (a = param_rule(p)) // param + && + (c = default_rule(p)) // default + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + ) + { + _res = $B._PyPegen.name_default_pair (p, a, c, tc); + break; + } + p.mark = _mark; } - var a; - var c; - var tc; - if ( - (a = param_rule(p)) // param - && - (c = default_rule(p)) // default - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' - ) - { - _res = $B._PyPegen.name_default_pair (p, a, c, tc); - return done(); + { // param default TYPE_COMMENT? &')' + if (p.error_indicator) { + return NULL; + } + var a; + var c; + var tc; + if ( + (a = param_rule(p)) // param + && + (c = default_rule(p)) // default + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' + ) + { + _res = $B._PyPegen.name_default_pair (p, a, c, tc); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // param_maybe_default: @@ -3709,57 +3753,58 @@ function param_maybe_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // param default? ',' TYPE_COMMENT? - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var c; - var tc; - if ( - (a = param_rule(p)) // param - && - (c = default_rule(p), !p.error_indicator) // default? - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - ) - { - _res = $B._PyPegen.name_default_pair (p, a, c, tc); - return done(); - } - p.mark = _mark; - } - { // param default? TYPE_COMMENT? &')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // param default? ',' TYPE_COMMENT? + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var c; + var tc; + if ( + (a = param_rule(p)) // param + && + (c = default_rule(p), !p.error_indicator) // default? + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + ) + { + _res = $B._PyPegen.name_default_pair (p, a, c, tc); + break; + } + p.mark = _mark; } - var a; - var c; - var tc; - if ( - (a = param_rule(p)) // param - && - (c = default_rule(p), !p.error_indicator) // default? - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' - ) - { - _res = $B._PyPegen.name_default_pair (p, a, c, tc); - return done(); + { // param default? TYPE_COMMENT? &')' + if (p.error_indicator) { + return NULL; + } + var a; + var c; + var tc; + if ( + (a = param_rule(p)) // param + && + (c = default_rule(p), !p.error_indicator) // default? + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' + ) + { + _res = $B._PyPegen.name_default_pair (p, a, c, tc); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // param: NAME annotation? @@ -3768,42 +3813,43 @@ function param_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME annotation? - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var b; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (b = annotation_rule(p), !p.error_indicator) // annotation? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME annotation? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.arg (a. id, b, $B.parser_constants.NULL, EXTRA); - return done(); + var a; + var b; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (b = annotation_rule(p), !p.error_indicator) // annotation? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.arg (a. id, b, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // param_star_annotation: NAME star_annotation @@ -3812,42 +3858,43 @@ function param_star_annotation_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME star_annotation - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var b; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (b = star_annotation_rule(p)) // star_annotation - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME star_annotation + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.arg (a. id, b, $B.parser_constants.NULL, EXTRA); - return done(); + var a; + var b; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (b = star_annotation_rule(p)) // star_annotation + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.arg (a. id, b, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // annotation: ':' expression @@ -3856,29 +3903,30 @@ function annotation_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ':' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (a = expression_rule(p)) // expression - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ':' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (a = expression_rule(p)) // expression + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_annotation: ':' star_expression @@ -3887,29 +3935,30 @@ function star_annotation_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ':' star_expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (a = star_expression_rule(p)) // star_expression - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ':' star_expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (a = star_expression_rule(p)) // star_expression + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // default: '=' expression | invalid_default @@ -3918,43 +3967,44 @@ function default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '=' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (a = expression_rule(p)) // expression - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_default - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '=' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (a = expression_rule(p)) // expression + ) + { + _res = a; + break; + } + p.mark = _mark; } - var invalid_default_var; - if ( - (invalid_default_var = invalid_default_rule(p)) // invalid_default - ) - { - _res = invalid_default_var; - return done(); + if (p.call_invalid_rules) { // invalid_default + if (p.error_indicator) { + return NULL; + } + var invalid_default_var; + if ( + (invalid_default_var = invalid_default_rule(p)) // invalid_default + ) + { + _res = invalid_default_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // if_stmt: @@ -3966,97 +4016,98 @@ function if_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_if_stmt - if (p.error_indicator) { - return NULL; - } - var invalid_if_stmt_var; - if ( - (invalid_if_stmt_var = invalid_if_stmt_rule(p)) // invalid_if_stmt - ) - { - _res = invalid_if_stmt_var; - return done(); - } - p.mark = _mark; - } - { // 'if' named_expression ':' block elif_stmt - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _literal; - var a; - var b; - var c; - if ( - (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = elif_stmt_rule(p)) // elif_stmt - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_if_stmt + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.If (a, b, $B.helper_functions.CHECK ( $B.parser_constants.asdl_stmt_seq, $B._PyPegen.singleton_seq ( p, c ) ), EXTRA); - return done(); + var invalid_if_stmt_var; + if ( + (invalid_if_stmt_var = invalid_if_stmt_rule(p)) // invalid_if_stmt + ) + { + _res = invalid_if_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'if' named_expression ':' block else_block? - if (p.error_indicator) { - return NULL; + { // 'if' named_expression ':' block elif_stmt + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var a; + var b; + var c; + if ( + (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = elif_stmt_rule(p)) // elif_stmt + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.If (a, b, $B.helper_functions.CHECK ( $B.parser_constants.asdl_stmt_seq, $B._PyPegen.singleton_seq ( p, c ) ), EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var a; - var b; - var c; - if ( - (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = else_block_rule(p), !p.error_indicator) // else_block? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'if' named_expression ':' block else_block? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.If (a, b, c, EXTRA); - return done(); + var _keyword; + var _literal; + var a; + var b; + var c; + if ( + (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = else_block_rule(p), !p.error_indicator) // else_block? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.If (a, b, c, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // elif_stmt: @@ -4068,97 +4119,98 @@ function elif_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_elif_stmt - if (p.error_indicator) { - return NULL; - } - var invalid_elif_stmt_var; - if ( - (invalid_elif_stmt_var = invalid_elif_stmt_rule(p)) // invalid_elif_stmt - ) - { - _res = invalid_elif_stmt_var; - return done(); - } - p.mark = _mark; - } - { // 'elif' named_expression ':' block elif_stmt - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _literal; - var a; - var b; - var c; - if ( - (_keyword = $B._PyPegen.expect_token(p, 644)) // token='elif' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = elif_stmt_rule(p)) // elif_stmt - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_elif_stmt + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.If (a, b, $B.helper_functions.CHECK ( $B.parser_constants.asdl_stmt_seq, $B._PyPegen.singleton_seq ( p, c ) ), EXTRA); - return done(); + var invalid_elif_stmt_var; + if ( + (invalid_elif_stmt_var = invalid_elif_stmt_rule(p)) // invalid_elif_stmt + ) + { + _res = invalid_elif_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'elif' named_expression ':' block else_block? - if (p.error_indicator) { - return NULL; + { // 'elif' named_expression ':' block elif_stmt + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var a; + var b; + var c; + if ( + (_keyword = $B._PyPegen.expect_token(p, 644)) // token='elif' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = elif_stmt_rule(p)) // elif_stmt + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.If (a, b, $B.helper_functions.CHECK ( $B.parser_constants.asdl_stmt_seq, $B._PyPegen.singleton_seq ( p, c ) ), EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var a; - var b; - var c; - if ( - (_keyword = $B._PyPegen.expect_token(p, 644)) // token='elif' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = else_block_rule(p), !p.error_indicator) // else_block? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'elif' named_expression ':' block else_block? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.If (a, b, c, EXTRA); - return done(); + var _keyword; + var _literal; + var a; + var b; + var c; + if ( + (_keyword = $B._PyPegen.expect_token(p, 644)) // token='elif' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = else_block_rule(p), !p.error_indicator) // else_block? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.If (a, b, c, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // else_block: invalid_else_stmt | 'else' &&':' block @@ -4167,46 +4219,47 @@ function else_block_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_else_stmt - if (p.error_indicator) { - return NULL; - } - var invalid_else_stmt_var; - if ( - (invalid_else_stmt_var = invalid_else_stmt_rule(p)) // invalid_else_stmt - ) - { - _res = invalid_else_stmt_var; - return done(); - } - p.mark = _mark; - } - { // 'else' &&':' block - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_else_stmt + if (p.error_indicator) { + return NULL; + } + var invalid_else_stmt_var; + if ( + (invalid_else_stmt_var = invalid_else_stmt_rule(p)) // invalid_else_stmt + ) + { + _res = invalid_else_stmt_var; + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var b; - if ( - (_keyword = $B._PyPegen.expect_token(p, 645)) // token='else' - && - (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' - && - (b = block_rule(p)) // block - ) - { - _res = b; - return done(); + { // 'else' &&':' block + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var b; + if ( + (_keyword = $B._PyPegen.expect_token(p, 645)) // token='else' + && + (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' + && + (b = block_rule(p)) // block + ) + { + _res = b; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // while_stmt: invalid_while_stmt | 'while' named_expression ':' block else_block? @@ -4215,65 +4268,66 @@ function while_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_while_stmt - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var invalid_while_stmt_var; - if ( - (invalid_while_stmt_var = invalid_while_stmt_rule(p)) // invalid_while_stmt - ) - { - _res = invalid_while_stmt_var; - return done(); - } - p.mark = _mark; - } - { // 'while' named_expression ':' block else_block? - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_while_stmt + if (p.error_indicator) { + return NULL; + } + var invalid_while_stmt_var; + if ( + (invalid_while_stmt_var = invalid_while_stmt_rule(p)) // invalid_while_stmt + ) + { + _res = invalid_while_stmt_var; + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var a; - var b; - var c; - if ( - (_keyword = $B._PyPegen.expect_token(p, 647)) // token='while' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = else_block_rule(p), !p.error_indicator) // else_block? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'while' named_expression ':' block else_block? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.While (a, b, c, EXTRA); - return done(); + var _keyword; + var _literal; + var a; + var b; + var c; + if ( + (_keyword = $B._PyPegen.expect_token(p, 647)) // token='while' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = else_block_rule(p), !p.error_indicator) // else_block? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.While (a, b, c, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // for_stmt: @@ -4286,144 +4340,145 @@ function for_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_for_stmt - if (p.error_indicator) { - return NULL; - } - var invalid_for_stmt_var; - if ( - (invalid_for_stmt_var = invalid_for_stmt_rule(p)) // invalid_for_stmt - ) - { - _res = invalid_for_stmt_var; - return done(); - } - p.mark = _mark; - } - { // 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _cut_var = 0; - var _keyword; - var _keyword_1; - var _literal; - var b; - var el; - var ex; - var t; - var tc; - if ( - (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' - && - (t = star_targets_rule(p)) // star_targets - && - (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' - && - (_cut_var = 1) - && - (ex = star_expressions_rule(p)) // star_expressions - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - && - (b = block_rule(p)) // block - && - (el = else_block_rule(p), !p.error_indicator) // else_block? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_for_stmt + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.For (t, ex, b, el, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA); - return done(); - } - p.mark = _mark; - if (_cut_var) { - return NULL; - } - } - { // ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? - if (p.error_indicator) { - return NULL; + var invalid_for_stmt_var; + if ( + (invalid_for_stmt_var = invalid_for_stmt_rule(p)) // invalid_for_stmt + ) + { + _res = invalid_for_stmt_var; + break; + } + p.mark = _mark; } - var _cut_var = 0; - var _keyword; - var _keyword_1; - var _literal; - var async_var; - var b; - var el; - var ex; - var t; - var tc; - if ( - (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' - && - (t = star_targets_rule(p)) // star_targets - && - (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' - && - (_cut_var = 1) - && - (ex = star_expressions_rule(p)) // star_expressions - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - && - (b = block_rule(p)) // block - && - (el = else_block_rule(p), !p.error_indicator) // else_block? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? + if (p.error_indicator) { + return NULL; + } + var _cut_var = 0; + var _keyword; + var _keyword_1; + var _literal; + var b; + var el; + var ex; + var t; + var tc; + if ( + (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' + && + (t = star_targets_rule(p)) // star_targets + && + (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' + && + (_cut_var = 1) + && + (ex = star_expressions_rule(p)) // star_expressions + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + && + (b = block_rule(p)) // block + && + (el = else_block_rule(p), !p.error_indicator) // else_block? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.For (t, ex, b, el, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA); + break; + } + p.mark = _mark; + if (_cut_var) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 5, "Async for loops are", new $B._PyAST.AsyncFor ( t, ex, b, el, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA )); - return done(); - } - p.mark = _mark; - if (_cut_var) { - return NULL; } - } - if (p.call_invalid_rules) { // invalid_for_target - if (p.error_indicator) { - return NULL; + { // ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? + if (p.error_indicator) { + return NULL; + } + var _cut_var = 0; + var _keyword; + var _keyword_1; + var _literal; + var async_var; + var b; + var el; + var ex; + var t; + var tc; + if ( + (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' + && + (t = star_targets_rule(p)) // star_targets + && + (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' + && + (_cut_var = 1) + && + (ex = star_expressions_rule(p)) // star_expressions + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + && + (b = block_rule(p)) // block + && + (el = else_block_rule(p), !p.error_indicator) // else_block? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 5, "Async for loops are", new $B._PyAST.AsyncFor ( t, ex, b, el, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA )); + break; + } + p.mark = _mark; + if (_cut_var) { + return NULL; + } } - var invalid_for_target_var; - if ( - (invalid_for_target_var = invalid_for_target_rule(p)) // invalid_for_target - ) - { - _res = invalid_for_target_var; - return done(); + if (p.call_invalid_rules) { // invalid_for_target + if (p.error_indicator) { + return NULL; + } + var invalid_for_target_var; + if ( + (invalid_for_target_var = invalid_for_target_rule(p)) // invalid_for_target + ) + { + _res = invalid_for_target_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // with_stmt: @@ -4438,195 +4493,196 @@ function with_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_with_stmt_indent - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var invalid_with_stmt_indent_var; - if ( - (invalid_with_stmt_indent_var = invalid_with_stmt_indent_rule(p)) // invalid_with_stmt_indent - ) - { - _res = invalid_with_stmt_indent_var; - return done(); - } - p.mark = _mark; - } - { // 'with' '(' ','.with_item+ ','? ')' ':' block - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_with_stmt_indent + if (p.error_indicator) { + return NULL; + } + var invalid_with_stmt_indent_var; + if ( + (invalid_with_stmt_indent_var = invalid_with_stmt_indent_rule(p)) // invalid_with_stmt_indent + ) + { + _res = invalid_with_stmt_indent_var; + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var _literal_1; - var _literal_2; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = _gather_51_rule(p)) // ','.with_item+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - && - (_literal_2 = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'with' '(' ','.with_item+ ','? ')' ':' block + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 9, "Parenthesized context managers are", new $B._PyAST.With ( a, b, $B.parser_constants.NULL, EXTRA )); - return done(); + var _keyword; + var _literal; + var _literal_1; + var _literal_2; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = _gather_51_rule(p)) // ','.with_item+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + && + (_literal_2 = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 9, "Parenthesized context managers are", new $B._PyAST.With ( a, b, $B.parser_constants.NULL, EXTRA )); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'with' ','.with_item+ ':' TYPE_COMMENT? block - if (p.error_indicator) { - return NULL; + { // 'with' ','.with_item+ ':' TYPE_COMMENT? block + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var a; + var b; + var tc; + if ( + (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' + && + (a = _gather_53_rule(p)) // ','.with_item+ + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + && + (b = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.With (a, b, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var a; - var b; - var tc; - if ( - (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' - && - (a = _gather_53_rule(p)) // ','.with_item+ - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - && - (b = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.With (a, b, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA); - return done(); + var _keyword; + var _literal; + var _literal_1; + var _literal_2; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var async_var; + var b; + if ( + (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = _gather_55_rule(p)) // ','.with_item+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + && + (_literal_2 = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 5, "Async with statements are", new $B._PyAST.AsyncWith ( a, b, $B.parser_constants.NULL, EXTRA )); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block - if (p.error_indicator) { - return NULL; - } - var _keyword; - var _literal; - var _literal_1; - var _literal_2; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var async_var; - var b; - if ( - (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = _gather_55_rule(p)) // ','.with_item+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - && - (_literal_2 = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 5, "Async with statements are", new $B._PyAST.AsyncWith ( a, b, $B.parser_constants.NULL, EXTRA )); - return done(); - } - p.mark = _mark; - } - { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block - if (p.error_indicator) { - return NULL; + var _keyword; + var _literal; + var a; + var async_var; + var b; + var tc; + if ( + (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' + && + (a = _gather_57_rule(p)) // ','.with_item+ + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? + && + (b = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 5, "Async with statements are", new $B._PyAST.AsyncWith ( a, b, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA )); + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var a; - var async_var; - var b; - var tc; - if ( - (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' - && - (a = _gather_57_rule(p)) // ','.with_item+ - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (tc = $B._PyPegen.expect_token(p, TYPE_COMMENT), !p.error_indicator) // TYPE_COMMENT? - && - (b = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + if (p.call_invalid_rules) { // invalid_with_stmt + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 5, "Async with statements are", new $B._PyAST.AsyncWith ( a, b, $B.helper_functions.NEW_TYPE_COMMENT ( p, tc ), EXTRA )); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_with_stmt - if (p.error_indicator) { - return NULL; - } - var invalid_with_stmt_var; - if ( - (invalid_with_stmt_var = invalid_with_stmt_rule(p)) // invalid_with_stmt - ) - { - _res = invalid_with_stmt_var; - return done(); + var invalid_with_stmt_var; + if ( + (invalid_with_stmt_var = invalid_with_stmt_rule(p)) // invalid_with_stmt + ) + { + _res = invalid_with_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // with_item: @@ -4638,62 +4694,63 @@ function with_item_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression 'as' star_target &(',' | ')' | ':') - if (p.error_indicator) { - return NULL; - } - var _keyword; - var e; - var t; - if ( - (e = expression_rule(p)) // expression - && - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (t = star_target_rule(p)) // star_target - && - $B._PyPegen.lookahead(1, _tmp_59_rule, p) - ) - { - _res = new $B._PyAST.withitem (e, t, p.arena); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_with_item - if (p.error_indicator) { - return NULL; - } - var invalid_with_item_var; - if ( - (invalid_with_item_var = invalid_with_item_rule(p)) // invalid_with_item - ) - { - _res = invalid_with_item_var; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression 'as' star_target &(',' | ')' | ':') + if (p.error_indicator) { + return NULL; + } + var _keyword; + var e; + var t; + if ( + (e = expression_rule(p)) // expression + && + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (t = star_target_rule(p)) // star_target + && + $B._PyPegen.lookahead(1, _tmp_59_rule, p) + ) + { + _res = new $B._PyAST.withitem (e, t, p.arena); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // expression - if (p.error_indicator) { - return NULL; + if (p.call_invalid_rules) { // invalid_with_item + if (p.error_indicator) { + return NULL; + } + var invalid_with_item_var; + if ( + (invalid_with_item_var = invalid_with_item_rule(p)) // invalid_with_item + ) + { + _res = invalid_with_item_var; + break; + } + p.mark = _mark; } - var e; - if ( - (e = expression_rule(p)) // expression - ) - { - _res = new $B._PyAST.withitem (e, $B.parser_constants.NULL, p.arena); - return done(); + { // expression + if (p.error_indicator) { + return NULL; + } + var e; + if ( + (e = expression_rule(p)) // expression + ) + { + _res = new $B._PyAST.withitem (e, $B.parser_constants.NULL, p.arena); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // try_stmt: @@ -4706,132 +4763,133 @@ function try_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_try_stmt - if (p.error_indicator) { - return NULL; - } - var invalid_try_stmt_var; - if ( - (invalid_try_stmt_var = invalid_try_stmt_rule(p)) // invalid_try_stmt - ) - { - _res = invalid_try_stmt_var; - return done(); - } - p.mark = _mark; - } - { // 'try' &&':' block finally_block - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _literal; - var b; - var f; - if ( - (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' - && - (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' - && - (b = block_rule(p)) // block - && - (f = finally_block_rule(p)) // finally_block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_try_stmt + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Try (b, $B.parser_constants.NULL, $B.parser_constants.NULL, f, EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'try' &&':' block except_block+ else_block? finally_block? - if (p.error_indicator) { - return NULL; + var invalid_try_stmt_var; + if ( + (invalid_try_stmt_var = invalid_try_stmt_rule(p)) // invalid_try_stmt + ) + { + _res = invalid_try_stmt_var; + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var b; - var el; - var ex; - var f; - if ( - (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' - && - (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' - && - (b = block_rule(p)) // block - && - (ex = _loop1_60_rule(p)) // except_block+ - && - (el = else_block_rule(p), !p.error_indicator) // else_block? - && - (f = finally_block_rule(p), !p.error_indicator) // finally_block? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'try' &&':' block finally_block + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Try (b, ex, el, f, EXTRA); - return done(); + var _keyword; + var _literal; + var b; + var f; + if ( + (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' + && + (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' + && + (b = block_rule(p)) // block + && + (f = finally_block_rule(p)) // finally_block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Try (b, $B.parser_constants.NULL, $B.parser_constants.NULL, f, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'try' &&':' block except_star_block+ else_block? finally_block? - if (p.error_indicator) { - return NULL; + { // 'try' &&':' block except_block+ else_block? finally_block? + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var b; + var el; + var ex; + var f; + if ( + (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' + && + (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' + && + (b = block_rule(p)) // block + && + (ex = _loop1_60_rule(p)) // except_block+ + && + (el = else_block_rule(p), !p.error_indicator) // else_block? + && + (f = finally_block_rule(p), !p.error_indicator) // finally_block? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Try (b, ex, el, f, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var b; - var el; - var ex; - var f; - if ( - (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' - && - (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' - && - (b = block_rule(p)) // block - && - (ex = _loop1_61_rule(p)) // except_star_block+ - && - (el = else_block_rule(p), !p.error_indicator) // else_block? - && - (f = finally_block_rule(p), !p.error_indicator) // finally_block? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'try' &&':' block except_star_block+ else_block? finally_block? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 11, "Exception groups are", new $B._PyAST.TryStar ( b, ex, el, f, EXTRA )); - return done(); + var _keyword; + var _literal; + var b; + var el; + var ex; + var f; + if ( + (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' + && + (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' + && + (b = block_rule(p)) // block + && + (ex = _loop1_61_rule(p)) // except_star_block+ + && + (el = else_block_rule(p), !p.error_indicator) // else_block? + && + (f = finally_block_rule(p), !p.error_indicator) // finally_block? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 11, "Exception groups are", new $B._PyAST.TryStar ( b, ex, el, f, EXTRA )); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // except_block: @@ -4844,105 +4902,106 @@ function except_block_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_except_stmt_indent - if (p.error_indicator) { - return NULL; - } - var invalid_except_stmt_indent_var; - if ( - (invalid_except_stmt_indent_var = invalid_except_stmt_indent_rule(p)) // invalid_except_stmt_indent - ) - { - _res = invalid_except_stmt_indent_var; - return done(); - } - p.mark = _mark; - } - { // 'except' expression ['as' NAME] ':' block - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _literal; - var b; - var e; - var t; - if ( - (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (e = expression_rule(p)) // expression - && - (t = _tmp_62_rule(p), !p.error_indicator) // ['as' NAME] - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_except_stmt_indent + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.ExceptHandler (e, ( t ) ? ( t ). id : $B.parser_constants.NULL, b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'except' ':' block - if (p.error_indicator) { - return NULL; + var invalid_except_stmt_indent_var; + if ( + (invalid_except_stmt_indent_var = invalid_except_stmt_indent_rule(p)) // invalid_except_stmt_indent + ) + { + _res = invalid_except_stmt_indent_var; + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var b; - if ( - (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'except' expression ['as' NAME] ':' block + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.ExceptHandler ($B.parser_constants.NULL, $B.parser_constants.NULL, b, EXTRA); - return done(); + var _keyword; + var _literal; + var b; + var e; + var t; + if ( + (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (e = expression_rule(p)) // expression + && + (t = _tmp_62_rule(p), !p.error_indicator) // ['as' NAME] + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.ExceptHandler (e, ( t ) ? ( t ). id : $B.parser_constants.NULL, b, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_except_stmt - if (p.error_indicator) { - return NULL; + { // 'except' ':' block + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var b; + if ( + (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.ExceptHandler ($B.parser_constants.NULL, $B.parser_constants.NULL, b, EXTRA); + break; + } + p.mark = _mark; } - var invalid_except_stmt_var; - if ( - (invalid_except_stmt_var = invalid_except_stmt_rule(p)) // invalid_except_stmt - ) - { - _res = invalid_except_stmt_var; - return done(); + if (p.call_invalid_rules) { // invalid_except_stmt + if (p.error_indicator) { + return NULL; + } + var invalid_except_stmt_var; + if ( + (invalid_except_stmt_var = invalid_except_stmt_rule(p)) // invalid_except_stmt + ) + { + _res = invalid_except_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // except_star_block: @@ -4954,82 +5013,83 @@ function except_star_block_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_except_star_stmt_indent - if (p.error_indicator) { - return NULL; - } - var invalid_except_star_stmt_indent_var; - if ( - (invalid_except_star_stmt_indent_var = invalid_except_star_stmt_indent_rule(p)) // invalid_except_star_stmt_indent - ) - { - _res = invalid_except_star_stmt_indent_var; - return done(); - } - p.mark = _mark; - } - { // 'except' '*' expression ['as' NAME] ':' block - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _literal; - var _literal_1; - var b; - var e; - var t; - if ( - (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (e = expression_rule(p)) // expression - && - (t = _tmp_63_rule(p), !p.error_indicator) // ['as' NAME] - && - (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_except_star_stmt_indent + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.ExceptHandler (e, ( t ) ? ( t ). id : $B.parser_constants.NULL, b, EXTRA); - return done(); + var invalid_except_star_stmt_indent_var; + if ( + (invalid_except_star_stmt_indent_var = invalid_except_star_stmt_indent_rule(p)) // invalid_except_star_stmt_indent + ) + { + _res = invalid_except_star_stmt_indent_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_except_stmt - if (p.error_indicator) { - return NULL; + { // 'except' '*' expression ['as' NAME] ':' block + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var _literal_1; + var b; + var e; + var t; + if ( + (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (e = expression_rule(p)) // expression + && + (t = _tmp_63_rule(p), !p.error_indicator) // ['as' NAME] + && + (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.ExceptHandler (e, ( t ) ? ( t ). id : $B.parser_constants.NULL, b, EXTRA); + break; + } + p.mark = _mark; } - var invalid_except_stmt_var; - if ( - (invalid_except_stmt_var = invalid_except_stmt_rule(p)) // invalid_except_stmt - ) - { - _res = invalid_except_stmt_var; - return done(); + if (p.call_invalid_rules) { // invalid_except_stmt + if (p.error_indicator) { + return NULL; + } + var invalid_except_stmt_var; + if ( + (invalid_except_stmt_var = invalid_except_stmt_rule(p)) // invalid_except_stmt + ) + { + _res = invalid_except_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // finally_block: invalid_finally_stmt | 'finally' &&':' block @@ -5038,46 +5098,47 @@ function finally_block_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_finally_stmt - if (p.error_indicator) { - return NULL; - } - var invalid_finally_stmt_var; - if ( - (invalid_finally_stmt_var = invalid_finally_stmt_rule(p)) // invalid_finally_stmt - ) - { - _res = invalid_finally_stmt_var; - return done(); - } - p.mark = _mark; - } - { // 'finally' &&':' block - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_finally_stmt + if (p.error_indicator) { + return NULL; + } + var invalid_finally_stmt_var; + if ( + (invalid_finally_stmt_var = invalid_finally_stmt_rule(p)) // invalid_finally_stmt + ) + { + _res = invalid_finally_stmt_var; + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 633)) // token='finally' - && - (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' - && - (a = block_rule(p)) // block - ) - { - _res = a; - return done(); + { // 'finally' &&':' block + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 633)) // token='finally' + && + (_literal = $B._PyPegen.expect_forced_token(p, 11, ":")) // forced_token=':' + && + (a = block_rule(p)) // block + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // match_stmt: @@ -5088,71 +5149,72 @@ function match_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _literal; - var cases; - var dedent_var; - var indent_var; - var newline_var; - var subject; - if ( - (_keyword = $B._PyPegen.expect_soft_keyword(p, "match")) // soft_keyword='"match"' - && - (subject = subject_expr_rule(p)) // subject_expr - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - (indent_var = $B._PyPegen.expect_token(p, INDENT)) // token='INDENT' - && - (cases = _loop1_64_rule(p)) // case_block+ - && - (dedent_var = $B._PyPegen.expect_token(p, DEDENT)) // token='DEDENT' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 10, "Pattern matching is", new $B._PyAST.Match ( subject, cases, EXTRA )); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_match_stmt - if (p.error_indicator) { - return NULL; + var _keyword; + var _literal; + var cases; + var dedent_var; + var indent_var; + var newline_var; + var subject; + if ( + (_keyword = $B._PyPegen.expect_soft_keyword(p, "match")) // soft_keyword='"match"' + && + (subject = subject_expr_rule(p)) // subject_expr + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + (indent_var = $B._PyPegen.expect_token(p, INDENT)) // token='INDENT' + && + (cases = _loop1_64_rule(p)) // case_block+ + && + (dedent_var = $B._PyPegen.expect_token(p, DEDENT)) // token='DEDENT' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 10, "Pattern matching is", new $B._PyAST.Match ( subject, cases, EXTRA )); + break; + } + p.mark = _mark; } - var invalid_match_stmt_var; - if ( - (invalid_match_stmt_var = invalid_match_stmt_rule(p)) // invalid_match_stmt - ) - { - _res = invalid_match_stmt_var; - return done(); + if (p.call_invalid_rules) { // invalid_match_stmt + if (p.error_indicator) { + return NULL; + } + var invalid_match_stmt_var; + if ( + (invalid_match_stmt_var = invalid_match_stmt_rule(p)) // invalid_match_stmt + ) + { + _res = invalid_match_stmt_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // subject_expr: star_named_expression ',' star_named_expressions? | named_expression @@ -5161,59 +5223,60 @@ function subject_expr_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // star_named_expression ',' star_named_expressions? - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var value; - var values; - if ( - (value = star_named_expression_rule(p)) // star_named_expression - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (values = star_named_expressions_rule(p), !p.error_indicator) // star_named_expressions? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // star_named_expression ',' star_named_expressions? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, value, values ) ), $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // named_expression - if (p.error_indicator) { - return NULL; + var _literal; + var value; + var values; + if ( + (value = star_named_expression_rule(p)) // star_named_expression + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (values = star_named_expressions_rule(p), !p.error_indicator) // star_named_expressions? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, value, values ) ), $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var named_expression_var; - if ( - (named_expression_var = named_expression_rule(p)) // named_expression - ) - { - _res = named_expression_var; - return done(); + { // named_expression + if (p.error_indicator) { + return NULL; + } + var named_expression_var; + if ( + (named_expression_var = named_expression_rule(p)) // named_expression + ) + { + _res = named_expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // case_block: invalid_case_block | "case" patterns guard? ':' block @@ -5222,52 +5285,53 @@ function case_block_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_case_block - if (p.error_indicator) { - return NULL; - } - var invalid_case_block_var; - if ( - (invalid_case_block_var = invalid_case_block_rule(p)) // invalid_case_block - ) - { - _res = invalid_case_block_var; - return done(); - } - p.mark = _mark; - } - { // "case" patterns guard? ':' block - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_case_block + if (p.error_indicator) { + return NULL; + } + var invalid_case_block_var; + if ( + (invalid_case_block_var = invalid_case_block_rule(p)) // invalid_case_block + ) + { + _res = invalid_case_block_var; + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var body; - var guard; - var pattern; - if ( - (_keyword = $B._PyPegen.expect_soft_keyword(p, "case")) // soft_keyword='"case"' - && - (pattern = patterns_rule(p)) // patterns - && - (guard = guard_rule(p), !p.error_indicator) // guard? - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (body = block_rule(p)) // block - ) - { - _res = new $B._PyAST.match_case (pattern, guard, body, p.arena); - return done(); + { // "case" patterns guard? ':' block + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var body; + var guard; + var pattern; + if ( + (_keyword = $B._PyPegen.expect_soft_keyword(p, "case")) // soft_keyword='"case"' + && + (pattern = patterns_rule(p)) // patterns + && + (guard = guard_rule(p), !p.error_indicator) // guard? + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (body = block_rule(p)) // block + ) + { + _res = new $B._PyAST.match_case (pattern, guard, body, p.arena); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // guard: 'if' named_expression @@ -5276,29 +5340,30 @@ function guard_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'if' named_expression - if (p.error_indicator) { - return NULL; - } - var _keyword; - var guard; - if ( - (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (guard = named_expression_rule(p)) // named_expression - ) - { - _res = guard; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'if' named_expression + if (p.error_indicator) { + return NULL; + } + var _keyword; + var guard; + if ( + (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (guard = named_expression_rule(p)) // named_expression + ) + { + _res = guard; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // patterns: open_sequence_pattern | pattern @@ -5307,53 +5372,54 @@ function patterns_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // open_sequence_pattern - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var patterns; - if ( - (patterns = open_sequence_pattern_rule(p)) // open_sequence_pattern - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // open_sequence_pattern + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchSequence (patterns, EXTRA); - return done(); - } - p.mark = _mark; - } - { // pattern - if (p.error_indicator) { - return NULL; + var patterns; + if ( + (patterns = open_sequence_pattern_rule(p)) // open_sequence_pattern + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchSequence (patterns, EXTRA); + break; + } + p.mark = _mark; } - var pattern_var; - if ( - (pattern_var = pattern_rule(p)) // pattern - ) - { - _res = pattern_var; - return done(); + { // pattern + if (p.error_indicator) { + return NULL; + } + var pattern_var; + if ( + (pattern_var = pattern_rule(p)) // pattern + ) + { + _res = pattern_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // pattern: as_pattern | or_pattern @@ -5362,40 +5428,41 @@ function pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // as_pattern - if (p.error_indicator) { - return NULL; - } - var as_pattern_var; - if ( - (as_pattern_var = as_pattern_rule(p)) // as_pattern - ) - { - _res = as_pattern_var; - return done(); - } - p.mark = _mark; - } - { // or_pattern - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // as_pattern + if (p.error_indicator) { + return NULL; + } + var as_pattern_var; + if ( + (as_pattern_var = as_pattern_rule(p)) // as_pattern + ) + { + _res = as_pattern_var; + break; + } + p.mark = _mark; } - var or_pattern_var; - if ( - (or_pattern_var = or_pattern_rule(p)) // or_pattern - ) - { - _res = or_pattern_var; - return done(); + { // or_pattern + if (p.error_indicator) { + return NULL; + } + var or_pattern_var; + if ( + (or_pattern_var = or_pattern_rule(p)) // or_pattern + ) + { + _res = or_pattern_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // as_pattern: or_pattern 'as' pattern_capture_target | invalid_as_pattern @@ -5404,59 +5471,60 @@ function as_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // or_pattern 'as' pattern_capture_target - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var pattern; - var target; - if ( - (pattern = or_pattern_rule(p)) // or_pattern - && - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (target = pattern_capture_target_rule(p)) // pattern_capture_target - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // or_pattern 'as' pattern_capture_target + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchAs (pattern, target. id, EXTRA); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_as_pattern - if (p.error_indicator) { - return NULL; + var _keyword; + var pattern; + var target; + if ( + (pattern = or_pattern_rule(p)) // or_pattern + && + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (target = pattern_capture_target_rule(p)) // pattern_capture_target + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchAs (pattern, target. id, EXTRA); + break; + } + p.mark = _mark; } - var invalid_as_pattern_var; - if ( - (invalid_as_pattern_var = invalid_as_pattern_rule(p)) // invalid_as_pattern - ) - { - _res = invalid_as_pattern_var; - return done(); + if (p.call_invalid_rules) { // invalid_as_pattern + if (p.error_indicator) { + return NULL; + } + var invalid_as_pattern_var; + if ( + (invalid_as_pattern_var = invalid_as_pattern_rule(p)) // invalid_as_pattern + ) + { + _res = invalid_as_pattern_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // or_pattern: '|'.closed_pattern+ @@ -5465,39 +5533,40 @@ function or_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '|'.closed_pattern+ - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var patterns; - if ( - (patterns = _gather_65_rule(p)) // '|'.closed_pattern+ - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '|'.closed_pattern+ + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.asdl_seq_LEN (patterns ) == 1 ? $B.helper_functions.asdl_seq_GET ( patterns, 0 ) : new $B._PyAST.MatchOr ( patterns, EXTRA); - return done(); + var patterns; + if ( + (patterns = _gather_65_rule(p)) // '|'.closed_pattern+ + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.asdl_seq_LEN (patterns ) == 1 ? $B.helper_functions.asdl_seq_GET ( patterns, 0 ) : new $B._PyAST.MatchOr ( patterns, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // closed_pattern: @@ -5514,129 +5583,130 @@ function closed_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, closed_pattern_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - { // literal_pattern - if (p.error_indicator) { - return NULL; - } - var literal_pattern_var; - if ( - (literal_pattern_var = literal_pattern_rule(p)) // literal_pattern - ) - { - _res = literal_pattern_var; - return done(); - } - p.mark = _mark; - } - { // capture_pattern - if (p.error_indicator) { - return NULL; - } - var capture_pattern_var; - if ( - (capture_pattern_var = capture_pattern_rule(p)) // capture_pattern - ) - { - _res = capture_pattern_var; - return done(); - } - p.mark = _mark; - } - { // wildcard_pattern - if (p.error_indicator) { - return NULL; - } - var wildcard_pattern_var; - if ( - (wildcard_pattern_var = wildcard_pattern_rule(p)) // wildcard_pattern - ) - { - _res = wildcard_pattern_var; - return done(); - } - p.mark = _mark; - } - { // value_pattern - if (p.error_indicator) { - return NULL; - } - var value_pattern_var; - if ( - (value_pattern_var = value_pattern_rule(p)) // value_pattern - ) - { - _res = value_pattern_var; - return done(); - } - p.mark = _mark; - } - { // group_pattern - if (p.error_indicator) { - return NULL; + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, closed_pattern_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + { // literal_pattern + if (p.error_indicator) { + return NULL; + } + var literal_pattern_var; + if ( + (literal_pattern_var = literal_pattern_rule(p)) // literal_pattern + ) + { + _res = literal_pattern_var; + break; + } + p.mark = _mark; } - var group_pattern_var; - if ( - (group_pattern_var = group_pattern_rule(p)) // group_pattern - ) - { - _res = group_pattern_var; - return done(); + { // capture_pattern + if (p.error_indicator) { + return NULL; + } + var capture_pattern_var; + if ( + (capture_pattern_var = capture_pattern_rule(p)) // capture_pattern + ) + { + _res = capture_pattern_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // sequence_pattern - if (p.error_indicator) { - return NULL; + { // wildcard_pattern + if (p.error_indicator) { + return NULL; + } + var wildcard_pattern_var; + if ( + (wildcard_pattern_var = wildcard_pattern_rule(p)) // wildcard_pattern + ) + { + _res = wildcard_pattern_var; + break; + } + p.mark = _mark; } - var sequence_pattern_var; - if ( - (sequence_pattern_var = sequence_pattern_rule(p)) // sequence_pattern - ) - { - _res = sequence_pattern_var; - return done(); + { // value_pattern + if (p.error_indicator) { + return NULL; + } + var value_pattern_var; + if ( + (value_pattern_var = value_pattern_rule(p)) // value_pattern + ) + { + _res = value_pattern_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // mapping_pattern - if (p.error_indicator) { - return NULL; + { // group_pattern + if (p.error_indicator) { + return NULL; + } + var group_pattern_var; + if ( + (group_pattern_var = group_pattern_rule(p)) // group_pattern + ) + { + _res = group_pattern_var; + break; + } + p.mark = _mark; } - var mapping_pattern_var; - if ( - (mapping_pattern_var = mapping_pattern_rule(p)) // mapping_pattern - ) - { - _res = mapping_pattern_var; - return done(); + { // sequence_pattern + if (p.error_indicator) { + return NULL; + } + var sequence_pattern_var; + if ( + (sequence_pattern_var = sequence_pattern_rule(p)) // sequence_pattern + ) + { + _res = sequence_pattern_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // class_pattern - if (p.error_indicator) { - return NULL; + { // mapping_pattern + if (p.error_indicator) { + return NULL; + } + var mapping_pattern_var; + if ( + (mapping_pattern_var = mapping_pattern_rule(p)) // mapping_pattern + ) + { + _res = mapping_pattern_var; + break; + } + p.mark = _mark; } - var class_pattern_var; - if ( - (class_pattern_var = class_pattern_rule(p)) // class_pattern - ) - { - _res = class_pattern_var; - return done(); + { // class_pattern + if (p.error_indicator) { + return NULL; + } + var class_pattern_var; + if ( + (class_pattern_var = class_pattern_rule(p)) // class_pattern + ) + { + _res = class_pattern_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, closed_pattern_type, _res); return _res; - } } // literal_pattern: @@ -5651,141 +5721,142 @@ function literal_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // signed_number !('+' | '-') - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var value; - if ( - (value = signed_number_rule(p)) // signed_number - && - $B._PyPegen.lookahead(0, _tmp_67_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // signed_number !('+' | '-') + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchValue (value, EXTRA); - return done(); + var value; + if ( + (value = signed_number_rule(p)) // signed_number + && + $B._PyPegen.lookahead(0, _tmp_67_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchValue (value, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // complex_number - if (p.error_indicator) { - return NULL; - } - var value; - if ( - (value = complex_number_rule(p)) // complex_number - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // complex_number + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchValue (value, EXTRA); - return done(); - } - p.mark = _mark; - } - { // strings - if (p.error_indicator) { - return NULL; + var value; + if ( + (value = complex_number_rule(p)) // complex_number + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchValue (value, EXTRA); + break; + } + p.mark = _mark; } - var value; - if ( - (value = strings_rule(p)) // strings - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // strings + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchValue (value, EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'None' - if (p.error_indicator) { - return NULL; + var value; + if ( + (value = strings_rule(p)) // strings + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchValue (value, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'None' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchSingleton ($B.parser_constants.Py_None, EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'True' - if (p.error_indicator) { - return NULL; + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchSingleton ($B.parser_constants.Py_None, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'True' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchSingleton ($B.parser_constants.Py_True, EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'False' - if (p.error_indicator) { - return NULL; + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchSingleton ($B.parser_constants.Py_True, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'False' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchSingleton ($B.parser_constants.Py_False, EXTRA); - return done(); + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchSingleton ($B.parser_constants.Py_False, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // literal_expr: @@ -5800,123 +5871,124 @@ function literal_expr_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // signed_number !('+' | '-') - if (p.error_indicator) { - return NULL; - } - var signed_number_var; - if ( - (signed_number_var = signed_number_rule(p)) // signed_number - && - $B._PyPegen.lookahead(0, _tmp_68_rule, p) - ) - { - _res = signed_number_var; - return done(); - } - p.mark = _mark; - } - { // complex_number - if (p.error_indicator) { - return NULL; - } - var complex_number_var; - if ( - (complex_number_var = complex_number_rule(p)) // complex_number - ) - { - _res = complex_number_var; - return done(); - } - p.mark = _mark; - } - { // strings - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var strings_var; - if ( - (strings_var = strings_rule(p)) // strings - ) - { - _res = strings_var; - return done(); - } - p.mark = _mark; - } - { // 'None' - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // signed_number !('+' | '-') + if (p.error_indicator) { + return NULL; + } + var signed_number_var; + if ( + (signed_number_var = signed_number_rule(p)) // signed_number + && + $B._PyPegen.lookahead(0, _tmp_68_rule, p) + ) + { + _res = signed_number_var; + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // complex_number + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Constant ($B.parser_constants.Py_None, $B.parser_constants.NULL, EXTRA); - return done(); + var complex_number_var; + if ( + (complex_number_var = complex_number_rule(p)) // complex_number + ) + { + _res = complex_number_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'True' - if (p.error_indicator) { - return NULL; + { // strings + if (p.error_indicator) { + return NULL; + } + var strings_var; + if ( + (strings_var = strings_rule(p)) // strings + ) + { + _res = strings_var; + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'None' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Constant ($B.parser_constants.Py_True, $B.parser_constants.NULL, EXTRA); - return done(); + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Constant ($B.parser_constants.Py_None, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'False' - if (p.error_indicator) { - return NULL; + { // 'True' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Constant ($B.parser_constants.Py_True, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'False' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Constant ($B.parser_constants.Py_False, $B.parser_constants.NULL, EXTRA); - return done(); + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Constant ($B.parser_constants.Py_False, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // complex_number: @@ -5927,71 +5999,72 @@ function complex_number_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // signed_real_number '+' imaginary_number - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var imag; - var real; - if ( - (real = signed_real_number_rule(p)) // signed_real_number - && - (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' - && - (imag = imaginary_number_rule(p)) // imaginary_number - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // signed_real_number '+' imaginary_number + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (real, new $B.ast.Add(), imag, EXTRA); - return done(); - } - p.mark = _mark; - } - { // signed_real_number '-' imaginary_number - if (p.error_indicator) { - return NULL; + var _literal; + var imag; + var real; + if ( + (real = signed_real_number_rule(p)) // signed_real_number + && + (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' + && + (imag = imaginary_number_rule(p)) // imaginary_number + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (real, new $B.ast.Add(), imag, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var imag; - var real; - if ( - (real = signed_real_number_rule(p)) // signed_real_number - && - (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' - && - (imag = imaginary_number_rule(p)) // imaginary_number - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // signed_real_number '-' imaginary_number + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (real, new $B.ast.Sub(), imag, EXTRA); - return done(); + var _literal; + var imag; + var real; + if ( + (real = signed_real_number_rule(p)) // signed_real_number + && + (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' + && + (imag = imaginary_number_rule(p)) // imaginary_number + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (real, new $B.ast.Sub(), imag, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // signed_number: NUMBER | '-' NUMBER @@ -6000,56 +6073,57 @@ function signed_number_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NUMBER - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var number_var; - if ( - (number_var = $B._PyPegen.number_token(p)) // NUMBER - ) - { - _res = number_var; - return done(); - } - p.mark = _mark; - } - { // '-' NUMBER - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NUMBER + if (p.error_indicator) { + return NULL; + } + var number_var; + if ( + (number_var = $B._PyPegen.number_token(p)) // NUMBER + ) + { + _res = number_var; + break; + } + p.mark = _mark; } - var _literal; - var number; - if ( - (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' - && - (number = $B._PyPegen.number_token(p)) // NUMBER - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '-' NUMBER + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.UnaryOp (new $B.ast.USub(), number, EXTRA); - return done(); + var _literal; + var number; + if ( + (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' + && + (number = $B._PyPegen.number_token(p)) // NUMBER + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.UnaryOp (new $B.ast.USub(), number, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // signed_real_number: real_number | '-' real_number @@ -6058,56 +6132,57 @@ function signed_real_number_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // real_number - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var real_number_var; - if ( - (real_number_var = real_number_rule(p)) // real_number - ) - { - _res = real_number_var; - return done(); - } - p.mark = _mark; - } - { // '-' real_number - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // real_number + if (p.error_indicator) { + return NULL; + } + var real_number_var; + if ( + (real_number_var = real_number_rule(p)) // real_number + ) + { + _res = real_number_var; + break; + } + p.mark = _mark; } - var _literal; - var real; - if ( - (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' - && - (real = real_number_rule(p)) // real_number - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '-' real_number + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.UnaryOp (new $B.ast.USub(), real, EXTRA); - return done(); + var _literal; + var real; + if ( + (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' + && + (real = real_number_rule(p)) // real_number + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.UnaryOp (new $B.ast.USub(), real, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // real_number: NUMBER @@ -6116,26 +6191,27 @@ function real_number_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NUMBER - if (p.error_indicator) { - return NULL; - } - var real; - if ( - (real = $B._PyPegen.number_token(p)) // NUMBER - ) - { - _res = $B._PyPegen.ensure_real (p, real); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NUMBER + if (p.error_indicator) { + return NULL; + } + var real; + if ( + (real = $B._PyPegen.number_token(p)) // NUMBER + ) + { + _res = $B._PyPegen.ensure_real (p, real); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // imaginary_number: NUMBER @@ -6144,26 +6220,27 @@ function imaginary_number_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NUMBER - if (p.error_indicator) { - return NULL; - } - var imag; - if ( - (imag = $B._PyPegen.number_token(p)) // NUMBER - ) - { - _res = $B._PyPegen.ensure_imaginary (p, imag); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NUMBER + if (p.error_indicator) { + return NULL; + } + var imag; + if ( + (imag = $B._PyPegen.number_token(p)) // NUMBER + ) + { + _res = $B._PyPegen.ensure_imaginary (p, imag); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // capture_pattern: pattern_capture_target @@ -6172,39 +6249,40 @@ function capture_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // pattern_capture_target - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var target; - if ( - (target = pattern_capture_target_rule(p)) // pattern_capture_target - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // pattern_capture_target + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchAs ($B.parser_constants.NULL, target. id, EXTRA); - return done(); + var target; + if ( + (target = pattern_capture_target_rule(p)) // pattern_capture_target + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchAs ($B.parser_constants.NULL, target. id, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // pattern_capture_target: !"_" NAME !('.' | '(' | '=') @@ -6213,30 +6291,31 @@ function pattern_capture_target_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // !"_" NAME !('.' | '(' | '=') - if (p.error_indicator) { - return NULL; - } - var name; - if ( - $B._PyPegen.lookahead_with_string(0, $B._PyPegen.expect_soft_keyword, p, "_") - && - (name = $B._PyPegen.name_token(p)) // NAME - && - $B._PyPegen.lookahead(0, _tmp_69_rule, p) - ) - { - _res = $B._PyPegen.set_expr_context (p, name, $B.parser_constants.Store); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // !"_" NAME !('.' | '(' | '=') + if (p.error_indicator) { + return NULL; + } + var name; + if ( + $B._PyPegen.lookahead_with_string(0, $B._PyPegen.expect_soft_keyword, p, "_") + && + (name = $B._PyPegen.name_token(p)) // NAME + && + $B._PyPegen.lookahead(0, _tmp_69_rule, p) + ) + { + _res = $B._PyPegen.set_expr_context (p, name, $B.parser_constants.Store); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // wildcard_pattern: "_" @@ -6245,39 +6324,40 @@ function wildcard_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // "_" - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_soft_keyword(p, "_")) // soft_keyword='"_"' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // "_" + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchAs ($B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); - return done(); + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_soft_keyword(p, "_")) // soft_keyword='"_"' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchAs ($B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // value_pattern: attr !('.' | '(' | '=') @@ -6286,41 +6366,42 @@ function value_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // attr !('.' | '(' | '=') - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var attr; - if ( - (attr = attr_rule(p)) // attr - && - $B._PyPegen.lookahead(0, _tmp_70_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // attr !('.' | '(' | '=') + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchValue (attr, EXTRA); - return done(); + var attr; + if ( + (attr = attr_rule(p)) // attr + && + $B._PyPegen.lookahead(0, _tmp_70_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchValue (attr, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -6358,45 +6439,46 @@ function attr_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // name_or_attr '.' NAME - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var attr; - var value; - if ( - (value = name_or_attr_rule(p)) // name_or_attr - && - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - && - (attr = $B._PyPegen.name_token(p)) // NAME - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // name_or_attr '.' NAME + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Attribute (value, attr. id, $B.parser_constants.Load, EXTRA); - return done(); + var _literal; + var attr; + var value; + if ( + (value = name_or_attr_rule(p)) // name_or_attr + && + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + && + (attr = $B._PyPegen.name_token(p)) // NAME + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Attribute (value, attr. id, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -6406,40 +6488,41 @@ function name_or_attr_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // attr - if (p.error_indicator) { - return NULL; - } - var attr_var; - if ( - (attr_var = attr_rule(p)) // attr - ) - { - _res = attr_var; - return done(); - } - p.mark = _mark; - } - { // NAME - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // attr + if (p.error_indicator) { + return NULL; + } + var attr_var; + if ( + (attr_var = attr_rule(p)) // attr + ) + { + _res = attr_var; + break; + } + p.mark = _mark; } - var name_var; - if ( - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = name_var; - return done(); + { // NAME + if (p.error_indicator) { + return NULL; + } + var name_var; + if ( + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = name_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // group_pattern: '(' pattern ')' @@ -6448,32 +6531,33 @@ function group_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '(' pattern ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var pattern; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (pattern = pattern_rule(p)) // pattern - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = pattern; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '(' pattern ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var pattern; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (pattern = pattern_rule(p)) // pattern + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = pattern; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // sequence_pattern: '[' maybe_sequence_pattern? ']' | '(' open_sequence_pattern? ')' @@ -6482,71 +6566,72 @@ function sequence_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '[' maybe_sequence_pattern? ']' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var patterns; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (patterns = maybe_sequence_pattern_rule(p), !p.error_indicator) // maybe_sequence_pattern? - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '[' maybe_sequence_pattern? ']' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchSequence (patterns, EXTRA); - return done(); - } - p.mark = _mark; - } - { // '(' open_sequence_pattern? ')' - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var patterns; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (patterns = maybe_sequence_pattern_rule(p), !p.error_indicator) // maybe_sequence_pattern? + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchSequence (patterns, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var patterns; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (patterns = open_sequence_pattern_rule(p), !p.error_indicator) // open_sequence_pattern? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '(' open_sequence_pattern? ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchSequence (patterns, EXTRA); - return done(); + var _literal; + var _literal_1; + var patterns; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (patterns = open_sequence_pattern_rule(p), !p.error_indicator) // open_sequence_pattern? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchSequence (patterns, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // open_sequence_pattern: maybe_star_pattern ',' maybe_sequence_pattern? @@ -6555,32 +6640,33 @@ function open_sequence_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // maybe_star_pattern ',' maybe_sequence_pattern? - if (p.error_indicator) { - return NULL; - } - var _literal; - var pattern; - var patterns; - if ( - (pattern = maybe_star_pattern_rule(p)) // maybe_star_pattern - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (patterns = maybe_sequence_pattern_rule(p), !p.error_indicator) // maybe_sequence_pattern? - ) - { - _res = $B._PyPegen.seq_insert_in_front (p, pattern, patterns); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // maybe_star_pattern ',' maybe_sequence_pattern? + if (p.error_indicator) { + return NULL; + } + var _literal; + var pattern; + var patterns; + if ( + (pattern = maybe_star_pattern_rule(p)) // maybe_star_pattern + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (patterns = maybe_sequence_pattern_rule(p), !p.error_indicator) // maybe_sequence_pattern? + ) + { + _res = $B._PyPegen.seq_insert_in_front (p, pattern, patterns); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // maybe_sequence_pattern: ','.maybe_star_pattern+ ','? @@ -6589,30 +6675,31 @@ function maybe_sequence_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.maybe_star_pattern+ ','? - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var patterns; - if ( - (patterns = _gather_71_rule(p)) // ','.maybe_star_pattern+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - _res = patterns; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.maybe_star_pattern+ ','? + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var patterns; + if ( + (patterns = _gather_71_rule(p)) // ','.maybe_star_pattern+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + _res = patterns; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // maybe_star_pattern: star_pattern | pattern @@ -6621,40 +6708,41 @@ function maybe_star_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // star_pattern - if (p.error_indicator) { - return NULL; - } - var star_pattern_var; - if ( - (star_pattern_var = star_pattern_rule(p)) // star_pattern - ) - { - _res = star_pattern_var; - return done(); - } - p.mark = _mark; - } - { // pattern - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // star_pattern + if (p.error_indicator) { + return NULL; + } + var star_pattern_var; + if ( + (star_pattern_var = star_pattern_rule(p)) // star_pattern + ) + { + _res = star_pattern_var; + break; + } + p.mark = _mark; } - var pattern_var; - if ( - (pattern_var = pattern_rule(p)) // pattern - ) - { - _res = pattern_var; - return done(); + { // pattern + if (p.error_indicator) { + return NULL; + } + var pattern_var; + if ( + (pattern_var = pattern_rule(p)) // pattern + ) + { + _res = pattern_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_pattern: '*' pattern_capture_target | '*' wildcard_pattern @@ -6663,70 +6751,71 @@ function star_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, star_pattern_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '*' pattern_capture_target - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, star_pattern_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var target; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (target = pattern_capture_target_rule(p)) // pattern_capture_target - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '*' pattern_capture_target + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchStar (target. id, EXTRA); - return done(); - } - p.mark = _mark; - } - { // '*' wildcard_pattern - if (p.error_indicator) { - return NULL; + var _literal; + var target; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (target = pattern_capture_target_rule(p)) // pattern_capture_target + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchStar (target. id, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var wildcard_pattern_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (wildcard_pattern_var = wildcard_pattern_rule(p)) // wildcard_pattern - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '*' wildcard_pattern + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchStar ($B.parser_constants.NULL, EXTRA); - return done(); + var _literal; + var wildcard_pattern_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (wildcard_pattern_var = wildcard_pattern_rule(p)) // wildcard_pattern + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchStar ($B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, star_pattern_type, _res); return _res; - } } // mapping_pattern: @@ -6739,138 +6828,139 @@ function mapping_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '{' '}' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '{' '}' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchMapping ($B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); - return done(); - } - p.mark = _mark; - } - { // '{' double_star_pattern ','? '}' - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchMapping ($B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var rest; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (rest = double_star_pattern_rule(p)) // double_star_pattern - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '{' double_star_pattern ','? '}' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchMapping ($B.parser_constants.NULL, $B.parser_constants.NULL, rest. id, EXTRA); - return done(); - } - p.mark = _mark; - } - { // '{' items_pattern ',' double_star_pattern ','? '}' - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var rest; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (rest = double_star_pattern_rule(p)) // double_star_pattern + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchMapping ($B.parser_constants.NULL, $B.parser_constants.NULL, rest. id, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var _literal_2; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var items; - var rest; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (items = items_pattern_rule(p)) // items_pattern - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (rest = double_star_pattern_rule(p)) // double_star_pattern - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_2 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '{' items_pattern ',' double_star_pattern ','? '}' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchMapping ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_pattern_keys ( p, items ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_pattern_seq, $B._PyPegen.get_patterns ( p, items ) ), rest. id, EXTRA); - return done(); - } - p.mark = _mark; - } - { // '{' items_pattern ','? '}' - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var _literal_2; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var items; + var rest; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (items = items_pattern_rule(p)) // items_pattern + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (rest = double_star_pattern_rule(p)) // double_star_pattern + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_2 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchMapping ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_pattern_keys ( p, items ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_pattern_seq, $B._PyPegen.get_patterns ( p, items ) ), rest. id, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var items; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (items = items_pattern_rule(p)) // items_pattern - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '{' items_pattern ','? '}' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchMapping ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_pattern_keys ( p, items ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_pattern_seq, $B._PyPegen.get_patterns ( p, items ) ), $B.parser_constants.NULL, EXTRA); - return done(); + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var items; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (items = items_pattern_rule(p)) // items_pattern + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchMapping ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_pattern_keys ( p, items ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_pattern_seq, $B._PyPegen.get_patterns ( p, items ) ), $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // items_pattern: ','.key_value_pattern+ @@ -6879,26 +6969,27 @@ function items_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.key_value_pattern+ - if (p.error_indicator) { - return NULL; - } - var _gather_73_var; - if ( - (_gather_73_var = _gather_73_rule(p)) // ','.key_value_pattern+ - ) - { - _res = _gather_73_var; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.key_value_pattern+ + if (p.error_indicator) { + return NULL; + } + var _gather_73_var; + if ( + (_gather_73_var = _gather_73_rule(p)) // ','.key_value_pattern+ + ) + { + _res = _gather_73_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // key_value_pattern: (literal_expr | attr) ':' pattern @@ -6907,32 +6998,33 @@ function key_value_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (literal_expr | attr) ':' pattern - if (p.error_indicator) { - return NULL; - } - var _literal; - var key; - var pattern; - if ( - (key = _tmp_75_rule(p)) // literal_expr | attr - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (pattern = pattern_rule(p)) // pattern - ) - { - _res = $B._PyPegen.key_pattern_pair (p, key, pattern); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (literal_expr | attr) ':' pattern + if (p.error_indicator) { + return NULL; + } + var _literal; + var key; + var pattern; + if ( + (key = _tmp_75_rule(p)) // literal_expr | attr + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (pattern = pattern_rule(p)) // pattern + ) + { + _res = $B._PyPegen.key_pattern_pair (p, key, pattern); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // double_star_pattern: '**' pattern_capture_target @@ -6941,29 +7033,30 @@ function double_star_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '**' pattern_capture_target - if (p.error_indicator) { - return NULL; - } - var _literal; - var target; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (target = pattern_capture_target_rule(p)) // pattern_capture_target - ) - { - _res = target; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '**' pattern_capture_target + if (p.error_indicator) { + return NULL; + } + var _literal; + var target; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (target = pattern_capture_target_rule(p)) // pattern_capture_target + ) + { + _res = target; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // class_pattern: @@ -6977,164 +7070,165 @@ function class_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // name_or_attr '(' ')' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var cls; - if ( - (cls = name_or_attr_rule(p)) // name_or_attr - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // name_or_attr '(' ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchClass (cls, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); - return done(); + var _literal; + var _literal_1; + var cls; + if ( + (cls = name_or_attr_rule(p)) // name_or_attr + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchClass (cls, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // name_or_attr '(' positional_patterns ','? ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var cls; - var patterns; - if ( - (cls = name_or_attr_rule(p)) // name_or_attr - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (patterns = positional_patterns_rule(p)) // positional_patterns - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // name_or_attr '(' positional_patterns ','? ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchClass (cls, patterns, $B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); - return done(); - } - p.mark = _mark; - } - { // name_or_attr '(' keyword_patterns ','? ')' - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var cls; + var patterns; + if ( + (cls = name_or_attr_rule(p)) // name_or_attr + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (patterns = positional_patterns_rule(p)) // positional_patterns + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchClass (cls, patterns, $B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var cls; - var keywords; - if ( - (cls = name_or_attr_rule(p)) // name_or_attr - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (keywords = keyword_patterns_rule(p)) // keyword_patterns - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // name_or_attr '(' keyword_patterns ','? ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchClass (cls, $B.parser_constants.NULL, $B.helper_functions.CHECK ( $B.parser_constants.asdl_identifier_seq, $B._PyPegen.map_names_to_ids ( p, $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_pattern_keys ( p, keywords ) ) ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_pattern_seq, $B._PyPegen.get_patterns ( p, keywords ) ), EXTRA); - return done(); - } - p.mark = _mark; - } - { // name_or_attr '(' positional_patterns ',' keyword_patterns ','? ')' - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var cls; + var keywords; + if ( + (cls = name_or_attr_rule(p)) // name_or_attr + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (keywords = keyword_patterns_rule(p)) // keyword_patterns + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchClass (cls, $B.parser_constants.NULL, $B.helper_functions.CHECK ( $B.parser_constants.asdl_identifier_seq, $B._PyPegen.map_names_to_ids ( p, $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_pattern_keys ( p, keywords ) ) ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_pattern_seq, $B._PyPegen.get_patterns ( p, keywords ) ), EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var _literal_2; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var cls; - var keywords; - var patterns; - if ( - (cls = name_or_attr_rule(p)) // name_or_attr - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (patterns = positional_patterns_rule(p)) // positional_patterns - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (keywords = keyword_patterns_rule(p)) // keyword_patterns - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_2 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // name_or_attr '(' positional_patterns ',' keyword_patterns ','? ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.MatchClass (cls, patterns, $B.helper_functions.CHECK ( $B.parser_constants.asdl_identifier_seq, $B._PyPegen.map_names_to_ids ( p, $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_pattern_keys ( p, keywords ) ) ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_pattern_seq, $B._PyPegen.get_patterns ( p, keywords ) ), EXTRA); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_class_pattern - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var _literal_2; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var cls; + var keywords; + var patterns; + if ( + (cls = name_or_attr_rule(p)) // name_or_attr + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (patterns = positional_patterns_rule(p)) // positional_patterns + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (keywords = keyword_patterns_rule(p)) // keyword_patterns + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_2 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.MatchClass (cls, patterns, $B.helper_functions.CHECK ( $B.parser_constants.asdl_identifier_seq, $B._PyPegen.map_names_to_ids ( p, $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_pattern_keys ( p, keywords ) ) ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_pattern_seq, $B._PyPegen.get_patterns ( p, keywords ) ), EXTRA); + break; + } + p.mark = _mark; } - var invalid_class_pattern_var; - if ( - (invalid_class_pattern_var = invalid_class_pattern_rule(p)) // invalid_class_pattern - ) - { - _res = invalid_class_pattern_var; - return done(); + if (p.call_invalid_rules) { // invalid_class_pattern + if (p.error_indicator) { + return NULL; + } + var invalid_class_pattern_var; + if ( + (invalid_class_pattern_var = invalid_class_pattern_rule(p)) // invalid_class_pattern + ) + { + _res = invalid_class_pattern_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // positional_patterns: ','.pattern+ @@ -7143,26 +7237,27 @@ function positional_patterns_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.pattern+ - if (p.error_indicator) { - return NULL; - } - var args; - if ( - (args = _gather_76_rule(p)) // ','.pattern+ - ) - { - _res = args; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.pattern+ + if (p.error_indicator) { + return NULL; + } + var args; + if ( + (args = _gather_76_rule(p)) // ','.pattern+ + ) + { + _res = args; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // keyword_patterns: ','.keyword_pattern+ @@ -7171,26 +7266,27 @@ function keyword_patterns_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.keyword_pattern+ - if (p.error_indicator) { - return NULL; - } - var _gather_78_var; - if ( - (_gather_78_var = _gather_78_rule(p)) // ','.keyword_pattern+ - ) - { - _res = _gather_78_var; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.keyword_pattern+ + if (p.error_indicator) { + return NULL; + } + var _gather_78_var; + if ( + (_gather_78_var = _gather_78_rule(p)) // ','.keyword_pattern+ + ) + { + _res = _gather_78_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // keyword_pattern: NAME '=' pattern @@ -7199,32 +7295,33 @@ function keyword_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NAME '=' pattern - if (p.error_indicator) { - return NULL; - } - var _literal; - var arg; - var value; - if ( - (arg = $B._PyPegen.name_token(p)) // NAME - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (value = pattern_rule(p)) // pattern - ) - { - _res = $B._PyPegen.key_pattern_pair (p, arg, value); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NAME '=' pattern + if (p.error_indicator) { + return NULL; + } + var _literal; + var arg; + var value; + if ( + (arg = $B._PyPegen.name_token(p)) // NAME + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (value = pattern_rule(p)) // pattern + ) + { + _res = $B._PyPegen.key_pattern_pair (p, arg, value); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // type_alias: "type" NAME type_params? '=' expression @@ -7233,51 +7330,52 @@ function type_alias_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // "type" NAME type_params? '=' expression - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _literal; - var b; - var n; - var t; - if ( - (_keyword = $B._PyPegen.expect_soft_keyword(p, "type")) // soft_keyword='"type"' - && - (n = $B._PyPegen.name_token(p)) // NAME - && - (t = type_params_rule(p), !p.error_indicator) // type_params? - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (b = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // "type" NAME type_params? '=' expression + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 12, "Type statement is", new $B._PyAST.TypeAlias ( $B.helper_functions.CHECK ( $B.ast.expr, $B._PyPegen.set_expr_context ( p, n, $B.parser_constants.Store ) ), t, b, EXTRA )); - return done(); + var _keyword; + var _literal; + var b; + var n; + var t; + if ( + (_keyword = $B._PyPegen.expect_soft_keyword(p, "type")) // soft_keyword='"type"' + && + (n = $B._PyPegen.name_token(p)) // NAME + && + (t = type_params_rule(p), !p.error_indicator) // type_params? + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.stmt, 12, "Type statement is", new $B._PyAST.TypeAlias ( $B.helper_functions.CHECK ( $B.ast.expr, $B._PyPegen.set_expr_context ( p, n, $B.parser_constants.Store ) ), t, b, EXTRA )); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // type_params: '[' type_param_seq ']' @@ -7286,32 +7384,33 @@ function type_params_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '[' type_param_seq ']' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var t; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (t = type_param_seq_rule(p)) // type_param_seq - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - ) - { - _res = $B.helper_functions.CHECK_VERSION ($B.parser_constants.asdl_type_param_seq, 12, "Type parameter lists are", t); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '[' type_param_seq ']' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var t; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (t = type_param_seq_rule(p)) // type_param_seq + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + ) + { + _res = $B.helper_functions.CHECK_VERSION ($B.parser_constants.asdl_type_param_seq, 12, "Type parameter lists are", t); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // type_param_seq: ','.type_param+ ','? @@ -7320,30 +7419,31 @@ function type_param_seq_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.type_param+ ','? - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (a = _gather_80_rule(p)) // ','.type_param+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.type_param+ ','? + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (a = _gather_80_rule(p)) // ','.type_param+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // type_param: @@ -7357,139 +7457,140 @@ function type_param_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, type_param_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME type_param_bound? - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, type_param_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var b; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (b = type_param_bound_rule(p), !p.error_indicator) // type_param_bound? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME type_param_bound? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.TypeVar (a. id, b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // '*' NAME ':' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var colon; - var e; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = $B._PyPegen.name_token(p)) // NAME - && - (colon = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (e = expression_rule(p)) // expression - ) - { - _res = RAISE_SYNTAX_ERROR_STARTING_FROM (colon, e.kind == Tuple_kind ? "cannot use constraints with TypeVarTuple" : "cannot use bound with TypeVarTuple"); - return done(); - } - p.mark = _mark; - } - { // '*' NAME - if (p.error_indicator) { - return NULL; + var a; + var b; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (b = type_param_bound_rule(p), !p.error_indicator) // type_param_bound? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.TypeVar (a. id, b, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = $B._PyPegen.name_token(p)) // NAME - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '*' NAME ':' expression + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.TypeVarTuple (a. id, EXTRA); - return done(); - } - p.mark = _mark; - } - { // '**' NAME ':' expression - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var colon; + var e; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = $B._PyPegen.name_token(p)) // NAME + && + (colon = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (e = expression_rule(p)) // expression + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p, colon, e.kind == Tuple_kind ? "cannot use constraints with TypeVarTuple" : "cannot use bound with TypeVarTuple"); + break; + } + p.mark = _mark; } - var _literal; - var a; - var colon; - var e; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (a = $B._PyPegen.name_token(p)) // NAME - && - (colon = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (e = expression_rule(p)) // expression - ) - { - _res = RAISE_SYNTAX_ERROR_STARTING_FROM (colon, e.kind == Tuple_kind ? "cannot use constraints with ParamSpec" : "cannot use bound with ParamSpec"); - return done(); + { // '*' NAME + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = $B._PyPegen.name_token(p)) // NAME + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.TypeVarTuple (a. id, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '**' NAME - if (p.error_indicator) { - return NULL; + { // '**' NAME ':' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var colon; + var e; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (a = $B._PyPegen.name_token(p)) // NAME + && + (colon = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (e = expression_rule(p)) // expression + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p, colon, e.kind == Tuple_kind ? "cannot use constraints with ParamSpec" : "cannot use bound with ParamSpec"); + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (a = $B._PyPegen.name_token(p)) // NAME - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '**' NAME + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.ParamSpec (a. id, EXTRA); - return done(); + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (a = $B._PyPegen.name_token(p)) // NAME + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.ParamSpec (a. id, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, type_param_type, _res); return _res; - } } // type_param_bound: ':' expression @@ -7498,29 +7599,30 @@ function type_param_bound_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ':' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var e; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (e = expression_rule(p)) // expression - ) - { - _res = e; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ':' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var e; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (e = expression_rule(p)) // expression + ) + { + _res = e; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // expressions: expression ((',' expression))+ ','? | expression ',' | expression @@ -7529,83 +7631,84 @@ function expressions_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // expression ((',' expression))+ ','? - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (a = expression_rule(p)) // expression - && - (b = _loop1_82_rule(p)) // ((',' expression))+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // expression ((',' expression))+ ','? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // expression ',' - if (p.error_indicator) { - return NULL; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (a = expression_rule(p)) // expression + && + (b = _loop1_82_rule(p)) // ((',' expression))+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (a = expression_rule(p)) // expression - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // expression ',' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.singleton_seq ( p, a ) ), $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // expression - if (p.error_indicator) { - return NULL; + var _literal; + var a; + if ( + (a = expression_rule(p)) // expression + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.singleton_seq ( p, a ) ), $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - ) - { - _res = expression_var; - return done(); + { // expression + if (p.error_indicator) { + return NULL; + } + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + ) + { + _res = expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // expression: @@ -7619,112 +7722,113 @@ function expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, expression_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_expression - if (p.error_indicator) { - return NULL; - } - var invalid_expression_var; - if ( - (invalid_expression_var = invalid_expression_rule(p)) // invalid_expression - ) - { - _res = invalid_expression_var; - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_legacy_expression - if (p.error_indicator) { - return NULL; - } - var invalid_legacy_expression_var; - if ( - (invalid_legacy_expression_var = invalid_legacy_expression_rule(p)) // invalid_legacy_expression - ) - { - _res = invalid_legacy_expression_var; - return done(); + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, expression_type, _res)) { + return _res.value; } - p.mark = _mark; - } - { // disjunction 'if' disjunction 'else' expression - if (p.error_indicator) { + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _keyword_1; - var a; - var b; - var c; - if ( - (a = disjunction_rule(p)) // disjunction - && - (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (b = disjunction_rule(p)) // disjunction - && - (_keyword_1 = $B._PyPegen.expect_token(p, 645)) // token='else' - && - (c = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_expression + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.IfExp (b, a, c, EXTRA); - return done(); + var invalid_expression_var; + if ( + (invalid_expression_var = invalid_expression_rule(p)) // invalid_expression + ) + { + _res = invalid_expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // disjunction - if (p.error_indicator) { - return NULL; + if (p.call_invalid_rules) { // invalid_legacy_expression + if (p.error_indicator) { + return NULL; + } + var invalid_legacy_expression_var; + if ( + (invalid_legacy_expression_var = invalid_legacy_expression_rule(p)) // invalid_legacy_expression + ) + { + _res = invalid_legacy_expression_var; + break; + } + p.mark = _mark; } - var disjunction_var; - if ( - (disjunction_var = disjunction_rule(p)) // disjunction - ) - { - _res = disjunction_var; - return done(); + { // disjunction 'if' disjunction 'else' expression + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _keyword_1; + var a; + var b; + var c; + if ( + (a = disjunction_rule(p)) // disjunction + && + (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (b = disjunction_rule(p)) // disjunction + && + (_keyword_1 = $B._PyPegen.expect_token(p, 645)) // token='else' + && + (c = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.IfExp (b, a, c, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // lambdef - if (p.error_indicator) { - return NULL; + { // disjunction + if (p.error_indicator) { + return NULL; + } + var disjunction_var; + if ( + (disjunction_var = disjunction_rule(p)) // disjunction + ) + { + _res = disjunction_var; + break; + } + p.mark = _mark; } - var lambdef_var; - if ( - (lambdef_var = lambdef_rule(p)) // lambdef - ) - { - _res = lambdef_var; - return done(); + { // lambdef + if (p.error_indicator) { + return NULL; + } + var lambdef_var; + if ( + (lambdef_var = lambdef_rule(p)) // lambdef + ) + { + _res = lambdef_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, expression_type, _res); return _res; - } } // yield_expr: 'yield' 'from' expression | 'yield' star_expressions? @@ -7733,68 +7837,69 @@ function yield_expr_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'yield' 'from' expression - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _keyword_1; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 573)) // token='yield' - && - (_keyword_1 = $B._PyPegen.expect_token(p, 608)) // token='from' - && - (a = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'yield' 'from' expression + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.YieldFrom (a, EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'yield' star_expressions? - if (p.error_indicator) { - return NULL; + var _keyword; + var _keyword_1; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 573)) // token='yield' + && + (_keyword_1 = $B._PyPegen.expect_token(p, 608)) // token='from' + && + (a = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.YieldFrom (a, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 573)) // token='yield' - && - (a = star_expressions_rule(p), !p.error_indicator) // star_expressions? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'yield' star_expressions? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Yield (a, EXTRA); - return done(); + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 573)) // token='yield' + && + (a = star_expressions_rule(p), !p.error_indicator) // star_expressions? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Yield (a, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_expressions: @@ -7806,83 +7911,84 @@ function star_expressions_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // star_expression ((',' star_expression))+ ','? - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (a = star_expression_rule(p)) // star_expression - && - (b = _loop1_83_rule(p)) // ((',' star_expression))+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // star_expression ((',' star_expression))+ ','? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // star_expression ',' - if (p.error_indicator) { - return NULL; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (a = star_expression_rule(p)) // star_expression + && + (b = _loop1_83_rule(p)) // ((',' star_expression))+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (a = star_expression_rule(p)) // star_expression - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // star_expression ',' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.singleton_seq ( p, a ) ), $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // star_expression - if (p.error_indicator) { - return NULL; + var _literal; + var a; + if ( + (a = star_expression_rule(p)) // star_expression + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.singleton_seq ( p, a ) ), $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var star_expression_var; - if ( - (star_expression_var = star_expression_rule(p)) // star_expression - ) - { - _res = star_expression_var; - return done(); + { // star_expression + if (p.error_indicator) { + return NULL; + } + var star_expression_var; + if ( + (star_expression_var = star_expression_rule(p)) // star_expression + ) + { + _res = star_expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_expression: '*' bitwise_or | expression @@ -7891,61 +7997,62 @@ function star_expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, star_expression_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '*' bitwise_or - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, star_expression_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '*' bitwise_or + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Starred (a, $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // expression - if (p.error_indicator) { - return NULL; + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Starred (a, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - ) - { - _res = expression_var; - return done(); + { // expression + if (p.error_indicator) { + return NULL; + } + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + ) + { + _res = expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, star_expression_type, _res); return _res; - } } // star_named_expressions: ','.star_named_expression+ ','? @@ -7954,30 +8061,31 @@ function star_named_expressions_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.star_named_expression+ ','? - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (a = _gather_84_rule(p)) // ','.star_named_expression+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.star_named_expression+ ','? + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (a = _gather_84_rule(p)) // ','.star_named_expression+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_named_expression: '*' bitwise_or | named_expression @@ -7986,56 +8094,57 @@ function star_named_expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '*' bitwise_or - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '*' bitwise_or + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Starred (a, $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // named_expression - if (p.error_indicator) { - return NULL; + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Starred (a, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var named_expression_var; - if ( - (named_expression_var = named_expression_rule(p)) // named_expression - ) - { - _res = named_expression_var; - return done(); + { // named_expression + if (p.error_indicator) { + return NULL; + } + var named_expression_var; + if ( + (named_expression_var = named_expression_rule(p)) // named_expression + ) + { + _res = named_expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // assignment_expression: NAME ':=' ~ expression @@ -8044,51 +8153,52 @@ function assignment_expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME ':=' ~ expression - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _cut_var = 0; - var _literal; - var a; - var b; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (_literal = $B._PyPegen.expect_token(p, 53)) // token=':=' - && - (_cut_var = 1) - && - (b = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME ':=' ~ expression + if (p.error_indicator) { + return NULL; + } + var _cut_var = 0; + var _literal; + var a; + var b; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (_literal = $B._PyPegen.expect_token(p, 53)) // token=':=' + && + (_cut_var = 1) + && + (b = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.expr, 8, "Assignment expressions are", new $B._PyAST.NamedExpr ( $B.helper_functions.CHECK ( $B.ast.expr, $B._PyPegen.set_expr_context ( p, a, $B.parser_constants.Store ) ), b, EXTRA )); + break; + } + p.mark = _mark; + if (_cut_var) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.expr, 8, "Assignment expressions are", new $B._PyAST.NamedExpr ( $B.helper_functions.CHECK ( $B.ast.expr, $B._PyPegen.set_expr_context ( p, a, $B.parser_constants.Store ) ), b, EXTRA )); - return done(); - } - p.mark = _mark; - if (_cut_var) { - return NULL; } + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // named_expression: assignment_expression | invalid_named_expression | expression !':=' @@ -8097,56 +8207,57 @@ function named_expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // assignment_expression - if (p.error_indicator) { - return NULL; - } - var assignment_expression_var; - if ( - (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression - ) - { - _res = assignment_expression_var; - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_named_expression - if (p.error_indicator) { - return NULL; - } - var invalid_named_expression_var; - if ( - (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression - ) - { - _res = invalid_named_expression_var; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // assignment_expression + if (p.error_indicator) { + return NULL; + } + var assignment_expression_var; + if ( + (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression + ) + { + _res = assignment_expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // expression !':=' - if (p.error_indicator) { - return NULL; + if (p.call_invalid_rules) { // invalid_named_expression + if (p.error_indicator) { + return NULL; + } + var invalid_named_expression_var; + if ( + (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression + ) + { + _res = invalid_named_expression_var; + break; + } + p.mark = _mark; } - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 53) // token=':=' - ) - { - _res = expression_var; - return done(); + { // expression !':=' + if (p.error_indicator) { + return NULL; + } + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 53) // token=':=' + ) + { + _res = expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // disjunction: conjunction (('or' conjunction))+ | conjunction @@ -8155,61 +8266,62 @@ function disjunction_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, disjunction_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // conjunction (('or' conjunction))+ - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, disjunction_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var b; - if ( - (a = conjunction_rule(p)) // conjunction - && - (b = _loop1_86_rule(p)) // (('or' conjunction))+ - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // conjunction (('or' conjunction))+ + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BoolOp (new $B.ast.Or(), $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), EXTRA); - return done(); - } - p.mark = _mark; - } - { // conjunction - if (p.error_indicator) { - return NULL; + var a; + var b; + if ( + (a = conjunction_rule(p)) // conjunction + && + (b = _loop1_86_rule(p)) // (('or' conjunction))+ + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BoolOp (new $B.ast.Or(), $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), EXTRA); + break; + } + p.mark = _mark; } - var conjunction_var; - if ( - (conjunction_var = conjunction_rule(p)) // conjunction - ) - { - _res = conjunction_var; - return done(); + { // conjunction + if (p.error_indicator) { + return NULL; + } + var conjunction_var; + if ( + (conjunction_var = conjunction_rule(p)) // conjunction + ) + { + _res = conjunction_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, disjunction_type, _res); return _res; - } } // conjunction: inversion (('and' inversion))+ | inversion @@ -8218,61 +8330,62 @@ function conjunction_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, conjunction_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // inversion (('and' inversion))+ - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, conjunction_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var b; - if ( - (a = inversion_rule(p)) // inversion - && - (b = _loop1_87_rule(p)) // (('and' inversion))+ - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // inversion (('and' inversion))+ + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BoolOp (new $B.ast.And(), $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), EXTRA); - return done(); - } - p.mark = _mark; - } - { // inversion - if (p.error_indicator) { - return NULL; + var a; + var b; + if ( + (a = inversion_rule(p)) // inversion + && + (b = _loop1_87_rule(p)) // (('and' inversion))+ + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BoolOp (new $B.ast.And(), $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), EXTRA); + break; + } + p.mark = _mark; } - var inversion_var; - if ( - (inversion_var = inversion_rule(p)) // inversion - ) - { - _res = inversion_var; - return done(); + { // inversion + if (p.error_indicator) { + return NULL; + } + var inversion_var; + if ( + (inversion_var = inversion_rule(p)) // inversion + ) + { + _res = inversion_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, conjunction_type, _res); return _res; - } } // inversion: 'not' inversion | comparison @@ -8281,61 +8394,62 @@ function inversion_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, inversion_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'not' inversion - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, inversion_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 581)) // token='not' - && - (a = inversion_rule(p)) // inversion - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'not' inversion + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.UnaryOp (new $B.ast.Not(), a, EXTRA); - return done(); - } - p.mark = _mark; - } - { // comparison - if (p.error_indicator) { - return NULL; + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 581)) // token='not' + && + (a = inversion_rule(p)) // inversion + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.UnaryOp (new $B.ast.Not(), a, EXTRA); + break; + } + p.mark = _mark; } - var comparison_var; - if ( - (comparison_var = comparison_rule(p)) // comparison - ) - { - _res = comparison_var; - return done(); + { // comparison + if (p.error_indicator) { + return NULL; + } + var comparison_var; + if ( + (comparison_var = comparison_rule(p)) // comparison + ) + { + _res = comparison_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, inversion_type, _res); return _res; - } } // comparison: bitwise_or compare_op_bitwise_or_pair+ | bitwise_or @@ -8344,56 +8458,57 @@ function comparison_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // bitwise_or compare_op_bitwise_or_pair+ - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var b; - if ( - (a = bitwise_or_rule(p)) // bitwise_or - && - (b = _loop1_88_rule(p)) // compare_op_bitwise_or_pair+ - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // bitwise_or compare_op_bitwise_or_pair+ + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Compare (a, $B.helper_functions.CHECK ( $B.parser_constants.asdl_int_seq, $B._PyPegen.get_cmpops ( p, b ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_exprs ( p, b ) ), EXTRA); - return done(); - } - p.mark = _mark; - } - { // bitwise_or - if (p.error_indicator) { - return NULL; + var a; + var b; + if ( + (a = bitwise_or_rule(p)) // bitwise_or + && + (b = _loop1_88_rule(p)) // compare_op_bitwise_or_pair+ + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Compare (a, $B.helper_functions.CHECK ( $B.parser_constants.asdl_int_seq, $B._PyPegen.get_cmpops ( p, b ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_exprs ( p, b ) ), EXTRA); + break; + } + p.mark = _mark; } - var bitwise_or_var; - if ( - (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = bitwise_or_var; - return done(); + { // bitwise_or + if (p.error_indicator) { + return NULL; + } + var bitwise_or_var; + if ( + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = bitwise_or_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // compare_op_bitwise_or_pair: @@ -8412,152 +8527,153 @@ function compare_op_bitwise_or_pair_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // eq_bitwise_or - if (p.error_indicator) { - return NULL; - } - var eq_bitwise_or_var; - if ( - (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or - ) - { - _res = eq_bitwise_or_var; - return done(); - } - p.mark = _mark; - } - { // noteq_bitwise_or - if (p.error_indicator) { - return NULL; - } - var noteq_bitwise_or_var; - if ( - (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or - ) - { - _res = noteq_bitwise_or_var; - return done(); - } - p.mark = _mark; - } - { // lte_bitwise_or - if (p.error_indicator) { - return NULL; - } - var lte_bitwise_or_var; - if ( - (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or - ) - { - _res = lte_bitwise_or_var; - return done(); - } - p.mark = _mark; - } - { // lt_bitwise_or - if (p.error_indicator) { - return NULL; - } - var lt_bitwise_or_var; - if ( - (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or - ) - { - _res = lt_bitwise_or_var; - return done(); - } - p.mark = _mark; - } - { // gte_bitwise_or - if (p.error_indicator) { - return NULL; - } - var gte_bitwise_or_var; - if ( - (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or - ) - { - _res = gte_bitwise_or_var; - return done(); - } - p.mark = _mark; - } - { // gt_bitwise_or - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // eq_bitwise_or + if (p.error_indicator) { + return NULL; + } + var eq_bitwise_or_var; + if ( + (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or + ) + { + _res = eq_bitwise_or_var; + break; + } + p.mark = _mark; } - var gt_bitwise_or_var; - if ( - (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or - ) - { - _res = gt_bitwise_or_var; - return done(); + { // noteq_bitwise_or + if (p.error_indicator) { + return NULL; + } + var noteq_bitwise_or_var; + if ( + (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or + ) + { + _res = noteq_bitwise_or_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // notin_bitwise_or - if (p.error_indicator) { - return NULL; + { // lte_bitwise_or + if (p.error_indicator) { + return NULL; + } + var lte_bitwise_or_var; + if ( + (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or + ) + { + _res = lte_bitwise_or_var; + break; + } + p.mark = _mark; } - var notin_bitwise_or_var; - if ( - (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or - ) - { - _res = notin_bitwise_or_var; - return done(); + { // lt_bitwise_or + if (p.error_indicator) { + return NULL; + } + var lt_bitwise_or_var; + if ( + (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or + ) + { + _res = lt_bitwise_or_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // in_bitwise_or - if (p.error_indicator) { - return NULL; + { // gte_bitwise_or + if (p.error_indicator) { + return NULL; + } + var gte_bitwise_or_var; + if ( + (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or + ) + { + _res = gte_bitwise_or_var; + break; + } + p.mark = _mark; } - var in_bitwise_or_var; - if ( - (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or - ) - { - _res = in_bitwise_or_var; - return done(); + { // gt_bitwise_or + if (p.error_indicator) { + return NULL; + } + var gt_bitwise_or_var; + if ( + (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or + ) + { + _res = gt_bitwise_or_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // isnot_bitwise_or - if (p.error_indicator) { - return NULL; + { // notin_bitwise_or + if (p.error_indicator) { + return NULL; + } + var notin_bitwise_or_var; + if ( + (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or + ) + { + _res = notin_bitwise_or_var; + break; + } + p.mark = _mark; } - var isnot_bitwise_or_var; - if ( - (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or - ) - { - _res = isnot_bitwise_or_var; - return done(); + { // in_bitwise_or + if (p.error_indicator) { + return NULL; + } + var in_bitwise_or_var; + if ( + (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or + ) + { + _res = in_bitwise_or_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // is_bitwise_or - if (p.error_indicator) { - return NULL; + { // isnot_bitwise_or + if (p.error_indicator) { + return NULL; + } + var isnot_bitwise_or_var; + if ( + (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or + ) + { + _res = isnot_bitwise_or_var; + break; + } + p.mark = _mark; } - var is_bitwise_or_var; - if ( - (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or - ) - { - _res = is_bitwise_or_var; - return done(); + { // is_bitwise_or + if (p.error_indicator) { + return NULL; + } + var is_bitwise_or_var; + if ( + (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or + ) + { + _res = is_bitwise_or_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // eq_bitwise_or: '==' bitwise_or @@ -8566,29 +8682,30 @@ function eq_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '==' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 27)) // token='==' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.Eq(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '==' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 27)) // token='==' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.Eq(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // noteq_bitwise_or: ('!=') bitwise_or @@ -8597,29 +8714,30 @@ function noteq_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ('!=') bitwise_or - if (p.error_indicator) { - return NULL; - } - var _tmp_89_var; - var a; - if ( - (_tmp_89_var = _tmp_89_rule(p)) // '!=' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.NotEq(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ('!=') bitwise_or + if (p.error_indicator) { + return NULL; + } + var _tmp_89_var; + var a; + if ( + (_tmp_89_var = _tmp_89_rule(p)) // '!=' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.NotEq(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lte_bitwise_or: '<=' bitwise_or @@ -8628,29 +8746,30 @@ function lte_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '<=' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 29)) // token='<=' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.LtE(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '<=' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 29)) // token='<=' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.LtE(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lt_bitwise_or: '<' bitwise_or @@ -8659,29 +8778,30 @@ function lt_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '<' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 20)) // token='<' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.Lt(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '<' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 20)) // token='<' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.Lt(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // gte_bitwise_or: '>=' bitwise_or @@ -8690,29 +8810,30 @@ function gte_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '>=' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 30)) // token='>=' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.GtE(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '>=' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 30)) // token='>=' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.GtE(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // gt_bitwise_or: '>' bitwise_or @@ -8721,29 +8842,30 @@ function gt_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '>' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 21)) // token='>' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.Gt(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '>' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 21)) // token='>' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.Gt(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // notin_bitwise_or: 'not' 'in' bitwise_or @@ -8752,32 +8874,33 @@ function notin_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'not' 'in' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _keyword; - var _keyword_1; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 581)) // token='not' - && - (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.NotIn(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'not' 'in' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _keyword_1; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 581)) // token='not' + && + (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.NotIn(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // in_bitwise_or: 'in' bitwise_or @@ -8786,29 +8909,30 @@ function in_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'in' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 651)) // token='in' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.In(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'in' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 651)) // token='in' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.In(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // isnot_bitwise_or: 'is' 'not' bitwise_or @@ -8817,32 +8941,33 @@ function isnot_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'is' 'not' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _keyword; - var _keyword_1; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 582)) // token='is' - && - (_keyword_1 = $B._PyPegen.expect_token(p, 581)) // token='not' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.IsNot(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'is' 'not' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _keyword_1; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 582)) // token='is' + && + (_keyword_1 = $B._PyPegen.expect_token(p, 581)) // token='not' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.IsNot(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // is_bitwise_or: 'is' bitwise_or @@ -8851,29 +8976,30 @@ function is_bitwise_or_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'is' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 582)) // token='is' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.Is(), a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'is' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 582)) // token='is' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.cmpop_expr_pair (p, new $B.ast.Is(), a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -8911,59 +9037,60 @@ function bitwise_or_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // bitwise_or '|' bitwise_xor - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = bitwise_or_rule(p)) // bitwise_or - && - (_literal = $B._PyPegen.expect_token(p, 18)) // token='|' - && - (b = bitwise_xor_rule(p)) // bitwise_xor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // bitwise_or '|' bitwise_xor + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.BitOr(), b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // bitwise_xor - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = bitwise_or_rule(p)) // bitwise_or + && + (_literal = $B._PyPegen.expect_token(p, 18)) // token='|' + && + (b = bitwise_xor_rule(p)) // bitwise_xor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.BitOr(), b, EXTRA); + break; + } + p.mark = _mark; } - var bitwise_xor_var; - if ( - (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor - ) - { - _res = bitwise_xor_var; - return done(); + { // bitwise_xor + if (p.error_indicator) { + return NULL; + } + var bitwise_xor_var; + if ( + (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor + ) + { + _res = bitwise_xor_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -9001,59 +9128,60 @@ function bitwise_xor_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // bitwise_xor '^' bitwise_and - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = bitwise_xor_rule(p)) // bitwise_xor - && - (_literal = $B._PyPegen.expect_token(p, 32)) // token='^' - && - (b = bitwise_and_rule(p)) // bitwise_and - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // bitwise_xor '^' bitwise_and + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.BitXor(), b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // bitwise_and - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = bitwise_xor_rule(p)) // bitwise_xor + && + (_literal = $B._PyPegen.expect_token(p, 32)) // token='^' + && + (b = bitwise_and_rule(p)) // bitwise_and + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.BitXor(), b, EXTRA); + break; + } + p.mark = _mark; } - var bitwise_and_var; - if ( - (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and - ) - { - _res = bitwise_and_var; - return done(); + { // bitwise_and + if (p.error_indicator) { + return NULL; + } + var bitwise_and_var; + if ( + (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and + ) + { + _res = bitwise_and_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -9091,59 +9219,60 @@ function bitwise_and_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // bitwise_and '&' shift_expr - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = bitwise_and_rule(p)) // bitwise_and - && - (_literal = $B._PyPegen.expect_token(p, 19)) // token='&' - && - (b = shift_expr_rule(p)) // shift_expr - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // bitwise_and '&' shift_expr + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.BitAnd(), b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // shift_expr - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = bitwise_and_rule(p)) // bitwise_and + && + (_literal = $B._PyPegen.expect_token(p, 19)) // token='&' + && + (b = shift_expr_rule(p)) // shift_expr + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.BitAnd(), b, EXTRA); + break; + } + p.mark = _mark; } - var shift_expr_var; - if ( - (shift_expr_var = shift_expr_rule(p)) // shift_expr - ) - { - _res = shift_expr_var; - return done(); + { // shift_expr + if (p.error_indicator) { + return NULL; + } + var shift_expr_var; + if ( + (shift_expr_var = shift_expr_rule(p)) // shift_expr + ) + { + _res = shift_expr_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -9181,85 +9310,86 @@ function shift_expr_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // shift_expr '<<' sum - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = shift_expr_rule(p)) // shift_expr - && - (_literal = $B._PyPegen.expect_token(p, 33)) // token='<<' - && - (b = sum_rule(p)) // sum - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // shift_expr '<<' sum + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.LShift(), b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // shift_expr '>>' sum - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = shift_expr_rule(p)) // shift_expr + && + (_literal = $B._PyPegen.expect_token(p, 33)) // token='<<' + && + (b = sum_rule(p)) // sum + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.LShift(), b, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var a; - var b; - if ( - (a = shift_expr_rule(p)) // shift_expr - && - (_literal = $B._PyPegen.expect_token(p, 34)) // token='>>' - && - (b = sum_rule(p)) // sum - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // shift_expr '>>' sum + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.RShift(), b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // sum - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = shift_expr_rule(p)) // shift_expr + && + (_literal = $B._PyPegen.expect_token(p, 34)) // token='>>' + && + (b = sum_rule(p)) // sum + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.RShift(), b, EXTRA); + break; + } + p.mark = _mark; } - var sum_var; - if ( - (sum_var = sum_rule(p)) // sum - ) - { - _res = sum_var; - return done(); + { // sum + if (p.error_indicator) { + return NULL; + } + var sum_var; + if ( + (sum_var = sum_rule(p)) // sum + ) + { + _res = sum_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -9297,85 +9427,86 @@ function sum_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // sum '+' term - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = sum_rule(p)) // sum - && - (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' - && - (b = term_rule(p)) // term - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // sum '+' term + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.Add(), b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // sum '-' term - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = sum_rule(p)) // sum + && + (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' + && + (b = term_rule(p)) // term + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.Add(), b, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var a; - var b; - if ( - (a = sum_rule(p)) // sum - && - (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' - && - (b = term_rule(p)) // term - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // sum '-' term + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.Sub(), b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // term - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = sum_rule(p)) // sum + && + (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' + && + (b = term_rule(p)) // term + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.Sub(), b, EXTRA); + break; + } + p.mark = _mark; } - var term_var; - if ( - (term_var = term_rule(p)) // term - ) - { - _res = term_var; - return done(); + { // term + if (p.error_indicator) { + return NULL; + } + var term_var; + if ( + (term_var = term_rule(p)) // term + ) + { + _res = term_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -9419,163 +9550,164 @@ function term_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // term '*' factor - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = term_rule(p)) // term - && - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (b = factor_rule(p)) // factor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // term '*' factor + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.Mult(), b, EXTRA); - return done(); + var _literal; + var a; + var b; + if ( + (a = term_rule(p)) // term + && + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (b = factor_rule(p)) // factor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.Mult(), b, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // term '/' factor - if (p.error_indicator) { - return NULL; + { // term '/' factor + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + if ( + (a = term_rule(p)) // term + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + (b = factor_rule(p)) // factor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.Div(), b, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var a; - var b; - if ( - (a = term_rule(p)) // term - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - (b = factor_rule(p)) // factor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // term '//' factor + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.Div(), b, EXTRA); - return done(); + var _literal; + var a; + var b; + if ( + (a = term_rule(p)) // term + && + (_literal = $B._PyPegen.expect_token(p, 47)) // token='//' + && + (b = factor_rule(p)) // factor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.FloorDiv(), b, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // term '//' factor - if (p.error_indicator) { - return NULL; + { // term '%' factor + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + if ( + (a = term_rule(p)) // term + && + (_literal = $B._PyPegen.expect_token(p, 24)) // token='%' + && + (b = factor_rule(p)) // factor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.Mod(), b, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var a; - var b; - if ( - (a = term_rule(p)) // term - && - (_literal = $B._PyPegen.expect_token(p, 47)) // token='//' - && - (b = factor_rule(p)) // factor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // term '@' factor + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.FloorDiv(), b, EXTRA); - return done(); + var _literal; + var a; + var b; + if ( + (a = term_rule(p)) // term + && + (_literal = $B._PyPegen.expect_token(p, 49)) // token='@' + && + (b = factor_rule(p)) // factor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.expr, 5, "The '@' operator is", new $B._PyAST.BinOp ( a, new $B.ast.MatMult(), b, EXTRA )); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // term '%' factor - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - if ( - (a = term_rule(p)) // term - && - (_literal = $B._PyPegen.expect_token(p, 24)) // token='%' - && - (b = factor_rule(p)) // factor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // factor + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.Mod(), b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // term '@' factor - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - if ( - (a = term_rule(p)) // term - && - (_literal = $B._PyPegen.expect_token(p, 49)) // token='@' - && - (b = factor_rule(p)) // factor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { - return NULL; + var factor_var; + if ( + (factor_var = factor_rule(p)) // factor + ) + { + _res = factor_var; + break; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.expr, 5, "The '@' operator is", new $B._PyAST.BinOp ( a, new $B.ast.MatMult(), b, EXTRA )); - return done(); - } - p.mark = _mark; - } - { // factor - if (p.error_indicator) { - return NULL; + p.mark = _mark; } - var factor_var; - if ( - (factor_var = factor_rule(p)) // factor - ) - { - _res = factor_var; - return done(); - } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // factor: '+' factor | '-' factor | '~' factor | power @@ -9584,107 +9716,108 @@ function factor_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, factor_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '+' factor - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, factor_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' - && - (a = factor_rule(p)) // factor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '+' factor + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.UnaryOp (new $B.ast.UAdd(), a, EXTRA); - return done(); - } - p.mark = _mark; - } - { // '-' factor - if (p.error_indicator) { - return NULL; + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' + && + (a = factor_rule(p)) // factor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.UnaryOp (new $B.ast.UAdd(), a, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' - && - (a = factor_rule(p)) // factor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '-' factor + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.UnaryOp (new $B.ast.USub(), a, EXTRA); - return done(); - } - p.mark = _mark; - } - { // '~' factor - if (p.error_indicator) { - return NULL; + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' + && + (a = factor_rule(p)) // factor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.UnaryOp (new $B.ast.USub(), a, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 31)) // token='~' - && - (a = factor_rule(p)) // factor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '~' factor + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.UnaryOp (new $B.ast.Invert(), a, EXTRA); - return done(); - } - p.mark = _mark; - } - { // power - if (p.error_indicator) { - return NULL; + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 31)) // token='~' + && + (a = factor_rule(p)) // factor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.UnaryOp (new $B.ast.Invert(), a, EXTRA); + break; + } + p.mark = _mark; } - var power_var; - if ( - (power_var = power_rule(p)) // power - ) - { - _res = power_var; - return done(); + { // power + if (p.error_indicator) { + return NULL; + } + var power_var; + if ( + (power_var = power_rule(p)) // power + ) + { + _res = power_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, factor_type, _res); return _res; - } } // power: await_primary '**' factor | await_primary @@ -9693,59 +9826,60 @@ function power_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // await_primary '**' factor - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = await_primary_rule(p)) // await_primary - && - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (b = factor_rule(p)) // factor - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // await_primary '**' factor + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.BinOp (a, new $B.ast.Pow(), b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // await_primary - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = await_primary_rule(p)) // await_primary + && + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (b = factor_rule(p)) // factor + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.BinOp (a, new $B.ast.Pow(), b, EXTRA); + break; + } + p.mark = _mark; } - var await_primary_var; - if ( - (await_primary_var = await_primary_rule(p)) // await_primary - ) - { - _res = await_primary_var; - return done(); + { // await_primary + if (p.error_indicator) { + return NULL; + } + var await_primary_var; + if ( + (await_primary_var = await_primary_rule(p)) // await_primary + ) + { + _res = await_primary_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // await_primary: AWAIT primary | primary @@ -9754,61 +9888,62 @@ function await_primary_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, await_primary_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // AWAIT primary - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, await_primary_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var await_var; - if ( - (await_var = $B._PyPegen.expect_token(p, AWAIT)) // token='AWAIT' - && - (a = primary_rule(p)) // primary - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // AWAIT primary + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B.helper_functions.CHECK_VERSION ($B.ast.expr, 5, "Await expressions are", new $B._PyAST.Await ( a, EXTRA )); - return done(); - } - p.mark = _mark; - } - { // primary - if (p.error_indicator) { - return NULL; + var a; + var await_var; + if ( + (await_var = $B._PyPegen.expect_token(p, AWAIT)) // token='AWAIT' + && + (a = primary_rule(p)) // primary + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B.helper_functions.CHECK_VERSION ($B.ast.expr, 5, "Await expressions are", new $B._PyAST.Await ( a, EXTRA )); + break; + } + p.mark = _mark; } - var primary_var; - if ( - (primary_var = primary_rule(p)) // primary - ) - { - _res = primary_var; - return done(); + { // primary + if (p.error_indicator) { + return NULL; + } + var primary_var; + if ( + (primary_var = primary_rule(p)) // primary + ) + { + _res = primary_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, await_primary_type, _res); return _res; - } } // Left-recursive @@ -9851,140 +9986,141 @@ function primary_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // primary '.' NAME - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = primary_rule(p)) // primary - && - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - && - (b = $B._PyPegen.name_token(p)) // NAME - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // primary '.' NAME + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // primary genexp - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = primary_rule(p)) // primary + && + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + && + (b = $B._PyPegen.name_token(p)) // NAME + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var a; - var b; - if ( - (a = primary_rule(p)) // primary - && - (b = genexp_rule(p)) // genexp - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // primary genexp + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Call (a, $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.singleton_seq ( p, b ) ), $B.parser_constants.NULL, EXTRA); - return done(); - } - p.mark = _mark; - } - { // primary '(' arguments? ')' - if (p.error_indicator) { - return NULL; + var a; + var b; + if ( + (a = primary_rule(p)) // primary + && + (b = genexp_rule(p)) // genexp + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Call (a, $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.singleton_seq ( p, b ) ), $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = primary_rule(p)) // primary - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (b = arguments_rule(p), !p.error_indicator) // arguments? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // primary '(' arguments? ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Call (a, ( b ) ? ( b ). args : $B.parser_constants.NULL, ( b ) ? ( b ). keywords : $B.parser_constants.NULL, EXTRA); - return done(); - } - p.mark = _mark; - } - { // primary '[' slices ']' - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = primary_rule(p)) // primary + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (b = arguments_rule(p), !p.error_indicator) // arguments? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Call (a, ( b ) ? ( b ). args : $B.parser_constants.NULL, ( b ) ? ( b ). keywords : $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = primary_rule(p)) // primary - && - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // primary '[' slices ']' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // atom - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = primary_rule(p)) // primary + && + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var atom_var; - if ( - (atom_var = atom_rule(p)) // atom - ) - { - _res = atom_var; - return done(); + { // atom + if (p.error_indicator) { + return NULL; + } + var atom_var; + if ( + (atom_var = atom_rule(p)) // atom + ) + { + _res = atom_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // slices: slice !',' | ','.(slice | starred_expression)+ ','? @@ -9993,59 +10129,60 @@ function slices_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // slice !',' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - if ( - (a = slice_rule(p)) // slice - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 12) // token=',' - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - { // ','.(slice | starred_expression)+ ','? - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // slice !',' + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = slice_rule(p)) // slice + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 12) // token=',' + ) + { + _res = a; + break; + } + p.mark = _mark; } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (a = _gather_90_rule(p)) // ','.(slice | starred_expression)+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // ','.(slice | starred_expression)+ ','? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple (a, $B.parser_constants.Load, EXTRA); - return done(); + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (a = _gather_90_rule(p)) // ','.(slice | starred_expression)+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple (a, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // slice: expression? ':' expression? [':' expression?] | named_expression @@ -10054,62 +10191,63 @@ function slice_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // expression? ':' expression? [':' expression?] - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - var c; - if ( - (a = expression_rule(p), !p.error_indicator) // expression? - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = expression_rule(p), !p.error_indicator) // expression? - && - (c = _tmp_92_rule(p), !p.error_indicator) // [':' expression?] - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // expression? ':' expression? [':' expression?] + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Slice (a, b, c, EXTRA); - return done(); - } - p.mark = _mark; - } - { // named_expression - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + var c; + if ( + (a = expression_rule(p), !p.error_indicator) // expression? + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = expression_rule(p), !p.error_indicator) // expression? + && + (c = _tmp_92_rule(p), !p.error_indicator) // [':' expression?] + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Slice (a, b, c, EXTRA); + break; + } + p.mark = _mark; } - var a; - if ( - (a = named_expression_rule(p)) // named_expression - ) - { - _res = a; - return done(); + { // named_expression + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = named_expression_rule(p)) // named_expression + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // atom: @@ -10128,191 +10266,192 @@ function atom_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var name_var; - if ( - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = name_var; - return done(); + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME + if (p.error_indicator) { + return NULL; + } + var name_var; + if ( + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = name_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'True' - if (p.error_indicator) { - return NULL; + { // 'True' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Constant ($B.parser_constants.Py_True, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'False' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Constant ($B.parser_constants.Py_True, $B.parser_constants.NULL, EXTRA); - return done(); + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Constant ($B.parser_constants.Py_False, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'False' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // 'None' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Constant ($B.parser_constants.Py_False, $B.parser_constants.NULL, EXTRA); - return done(); - } - p.mark = _mark; - } - { // 'None' - if (p.error_indicator) { - return NULL; + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Constant ($B.parser_constants.Py_None, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // &(STRING | FSTRING_START) strings + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Constant ($B.parser_constants.Py_None, $B.parser_constants.NULL, EXTRA); - return done(); - } - p.mark = _mark; - } - { // &(STRING | FSTRING_START) strings - if (p.error_indicator) { - return NULL; - } - var strings_var; - if ( - $B._PyPegen.lookahead(1, _tmp_93_rule, p) - && - (strings_var = strings_rule(p)) // strings - ) - { - _res = strings_var; - return done(); - } - p.mark = _mark; - } - { // NUMBER - if (p.error_indicator) { - return NULL; - } - var number_var; - if ( - (number_var = $B._PyPegen.number_token(p)) // NUMBER - ) - { - _res = number_var; - return done(); - } - p.mark = _mark; - } - { // &'(' (tuple | group | genexp) - if (p.error_indicator) { - return NULL; - } - var _tmp_94_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 7) // token='(' - && - (_tmp_94_var = _tmp_94_rule(p)) // tuple | group | genexp - ) - { - _res = _tmp_94_var; - return done(); - } - p.mark = _mark; - } - { // &'[' (list | listcomp) - if (p.error_indicator) { - return NULL; + var strings_var; + if ( + $B._PyPegen.lookahead(1, _tmp_93_rule, p) + && + (strings_var = strings_rule(p)) // strings + ) + { + _res = strings_var; + break; + } + p.mark = _mark; } - var _tmp_95_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 9) // token='[' - && - (_tmp_95_var = _tmp_95_rule(p)) // list | listcomp - ) - { - _res = _tmp_95_var; - return done(); + { // NUMBER + if (p.error_indicator) { + return NULL; + } + var number_var; + if ( + (number_var = $B._PyPegen.number_token(p)) // NUMBER + ) + { + _res = number_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // &'{' (dict | set | dictcomp | setcomp) - if (p.error_indicator) { - return NULL; + { // &'(' (tuple | group | genexp) + if (p.error_indicator) { + return NULL; + } + var _tmp_94_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 7) // token='(' + && + (_tmp_94_var = _tmp_94_rule(p)) // tuple | group | genexp + ) + { + _res = _tmp_94_var; + break; + } + p.mark = _mark; } - var _tmp_96_var; - if ( - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 25) // token='{' - && - (_tmp_96_var = _tmp_96_rule(p)) // dict | set | dictcomp | setcomp - ) - { - _res = _tmp_96_var; - return done(); + { // &'[' (list | listcomp) + if (p.error_indicator) { + return NULL; + } + var _tmp_95_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 9) // token='[' + && + (_tmp_95_var = _tmp_95_rule(p)) // list | listcomp + ) + { + _res = _tmp_95_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '...' - if (p.error_indicator) { - return NULL; + { // &'{' (dict | set | dictcomp | setcomp) + if (p.error_indicator) { + return NULL; + } + var _tmp_96_var; + if ( + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 25) // token='{' + && + (_tmp_96_var = _tmp_96_rule(p)) // dict | set | dictcomp | setcomp + ) + { + _res = _tmp_96_var; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 52)) // token='...' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '...' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Constant ($B.parser_constants.Py_Ellipsis, $B.parser_constants.NULL, EXTRA); - return done(); + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 52)) // token='...' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Constant ($B.parser_constants.Py_Ellipsis, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // group: '(' (yield_expr | named_expression) ')' | invalid_group @@ -10321,46 +10460,47 @@ function group_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '(' (yield_expr | named_expression) ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = _tmp_97_rule(p)) // yield_expr | named_expression - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_group - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '(' (yield_expr | named_expression) ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = _tmp_97_rule(p)) // yield_expr | named_expression + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = a; + break; + } + p.mark = _mark; } - var invalid_group_var; - if ( - (invalid_group_var = invalid_group_rule(p)) // invalid_group - ) - { - _res = invalid_group_var; - return done(); + if (p.call_invalid_rules) { // invalid_group + if (p.error_indicator) { + return NULL; + } + var invalid_group_var; + if ( + (invalid_group_var = invalid_group_rule(p)) // invalid_group + ) + { + _res = invalid_group_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambdef: 'lambda' lambda_params? ':' expression @@ -10369,48 +10509,49 @@ function lambdef_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // 'lambda' lambda_params? ':' expression - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _keyword; - var _literal; - var a; - var b; - if ( - (_keyword = $B._PyPegen.expect_token(p, 600)) // token='lambda' - && - (a = lambda_params_rule(p), !p.error_indicator) // lambda_params? - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // 'lambda' lambda_params? ':' expression + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Lambda (( a ) ? a : $B.helper_functions.CHECK ( $B.ast.arguments, $B._PyPegen.empty_arguments ( p ) ), b, EXTRA); - return done(); + var _keyword; + var _literal; + var a; + var b; + if ( + (_keyword = $B._PyPegen.expect_token(p, 600)) // token='lambda' + && + (a = lambda_params_rule(p), !p.error_indicator) // lambda_params? + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Lambda (( a ) ? a : $B.helper_functions.CHECK ( $B.ast.arguments, $B._PyPegen.empty_arguments ( p ) ), b, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambda_params: invalid_lambda_parameters | lambda_parameters @@ -10419,40 +10560,41 @@ function lambda_params_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_lambda_parameters - if (p.error_indicator) { - return NULL; - } - var invalid_lambda_parameters_var; - if ( - (invalid_lambda_parameters_var = invalid_lambda_parameters_rule(p)) // invalid_lambda_parameters - ) - { - _res = invalid_lambda_parameters_var; - return done(); - } - p.mark = _mark; - } - { // lambda_parameters - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_lambda_parameters + if (p.error_indicator) { + return NULL; + } + var invalid_lambda_parameters_var; + if ( + (invalid_lambda_parameters_var = invalid_lambda_parameters_rule(p)) // invalid_lambda_parameters + ) + { + _res = invalid_lambda_parameters_var; + break; + } + p.mark = _mark; } - var lambda_parameters_var; - if ( - (lambda_parameters_var = lambda_parameters_rule(p)) // lambda_parameters - ) - { - _res = lambda_parameters_var; - return done(); + { // lambda_parameters + if (p.error_indicator) { + return NULL; + } + var lambda_parameters_var; + if ( + (lambda_parameters_var = lambda_parameters_rule(p)) // lambda_parameters + ) + { + _res = lambda_parameters_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambda_parameters: @@ -10466,106 +10608,107 @@ function lambda_parameters_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? - if (p.error_indicator) { - return NULL; - } - var a; - var b; - var c; - var d; - if ( - (a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default - && - (b = _loop0_98_rule(p)) // lambda_param_no_default* - && - (c = _loop0_99_rule(p)) // lambda_param_with_default* - && - (d = lambda_star_etc_rule(p), !p.error_indicator) // lambda_star_etc? - ) - { - _res = $B.helper_functions.CHECK_VERSION ($B.ast.arguments, 8, "Positional-only parameters are", $B._PyPegen.make_arguments ( p, a, $B.parser_constants.NULL, b, c, d )); - return done(); - } - p.mark = _mark; - } - { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? - if (p.error_indicator) { - return NULL; - } - var a; - var b; - var c; - if ( - (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default - && - (b = _loop0_100_rule(p)) // lambda_param_with_default* - && - (c = lambda_star_etc_rule(p), !p.error_indicator) // lambda_star_etc? - ) - { - _res = $B.helper_functions.CHECK_VERSION ($B.ast.arguments, 8, "Positional-only parameters are", $B._PyPegen.make_arguments ( p, $B.parser_constants.NULL, a, $B.parser_constants.NULL, b, c )); - return done(); - } - p.mark = _mark; - } - { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? - if (p.error_indicator) { - return NULL; - } - var a; - var b; - var c; - if ( - (a = _loop1_101_rule(p)) // lambda_param_no_default+ - && - (b = _loop0_102_rule(p)) // lambda_param_with_default* - && - (c = lambda_star_etc_rule(p), !p.error_indicator) // lambda_star_etc? - ) - { - _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, a, b, c); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var c; + var d; + if ( + (a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default + && + (b = _loop0_98_rule(p)) // lambda_param_no_default* + && + (c = _loop0_99_rule(p)) // lambda_param_with_default* + && + (d = lambda_star_etc_rule(p), !p.error_indicator) // lambda_star_etc? + ) + { + _res = $B.helper_functions.CHECK_VERSION ($B.ast.arguments, 8, "Positional-only parameters are", $B._PyPegen.make_arguments ( p, a, $B.parser_constants.NULL, b, c, d )); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // lambda_param_with_default+ lambda_star_etc? - if (p.error_indicator) { - return NULL; + { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var c; + if ( + (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default + && + (b = _loop0_100_rule(p)) // lambda_param_with_default* + && + (c = lambda_star_etc_rule(p), !p.error_indicator) // lambda_star_etc? + ) + { + _res = $B.helper_functions.CHECK_VERSION ($B.ast.arguments, 8, "Positional-only parameters are", $B._PyPegen.make_arguments ( p, $B.parser_constants.NULL, a, $B.parser_constants.NULL, b, c )); + break; + } + p.mark = _mark; } - var a; - var b; - if ( - (a = _loop1_103_rule(p)) // lambda_param_with_default+ - && - (b = lambda_star_etc_rule(p), !p.error_indicator) // lambda_star_etc? - ) - { - _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, a, b); - return done(); + { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var c; + if ( + (a = _loop1_101_rule(p)) // lambda_param_no_default+ + && + (b = _loop0_102_rule(p)) // lambda_param_with_default* + && + (c = lambda_star_etc_rule(p), !p.error_indicator) // lambda_star_etc? + ) + { + _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, a, b, c); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // lambda_star_etc - if (p.error_indicator) { - return NULL; + { // lambda_param_with_default+ lambda_star_etc? + if (p.error_indicator) { + return NULL; + } + var a; + var b; + if ( + (a = _loop1_103_rule(p)) // lambda_param_with_default+ + && + (b = lambda_star_etc_rule(p), !p.error_indicator) // lambda_star_etc? + ) + { + _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, a, b); + break; + } + p.mark = _mark; } - var a; - if ( - (a = lambda_star_etc_rule(p)) // lambda_star_etc - ) - { - _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, a); - return done(); + { // lambda_star_etc + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = lambda_star_etc_rule(p)) // lambda_star_etc + ) + { + _res = $B._PyPegen.make_arguments (p, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, $B.parser_constants.NULL, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambda_slash_no_default: @@ -10576,51 +10719,52 @@ function lambda_slash_no_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_param_no_default+ '/' ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - if ( - (a = _loop1_104_rule(p)) // lambda_param_no_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - { // lambda_param_no_default+ '/' &':' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_param_no_default+ '/' ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + if ( + (a = _loop1_104_rule(p)) // lambda_param_no_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = a; + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (a = _loop1_105_rule(p)) // lambda_param_no_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' - ) - { - _res = a; - return done(); + { // lambda_param_no_default+ '/' &':' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (a = _loop1_105_rule(p)) // lambda_param_no_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambda_slash_with_default: @@ -10631,57 +10775,58 @@ function lambda_slash_with_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_param_no_default* lambda_param_with_default+ '/' ',' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_param_no_default* lambda_param_with_default+ '/' ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = _loop0_106_rule(p)) // lambda_param_no_default* + && + (b = _loop1_107_rule(p)) // lambda_param_with_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = $B._PyPegen.slash_with_default (p, a, b); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = _loop0_106_rule(p)) // lambda_param_no_default* - && - (b = _loop1_107_rule(p)) // lambda_param_with_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = $B._PyPegen.slash_with_default (p, a, b); - return done(); + { // lambda_param_no_default* lambda_param_with_default+ '/' &':' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + if ( + (a = _loop0_108_rule(p)) // lambda_param_no_default* + && + (b = _loop1_109_rule(p)) // lambda_param_with_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' + ) + { + _res = $B._PyPegen.slash_with_default (p, a, b); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - { // lambda_param_no_default* lambda_param_with_default+ '/' &':' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - if ( - (a = _loop0_108_rule(p)) // lambda_param_no_default* - && - (b = _loop1_109_rule(p)) // lambda_param_with_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' - ) - { - _res = $B._PyPegen.slash_with_default (p, a, b); - return done(); - } - p.mark = _mark; - } - _res = NULL; - function done(){ return _res; - } } // lambda_star_etc: @@ -10694,86 +10839,87 @@ function lambda_star_etc_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_lambda_star_etc - if (p.error_indicator) { - return NULL; - } - var invalid_lambda_star_etc_var; - if ( - (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc - ) - { - _res = invalid_lambda_star_etc_var; - return done(); - } - p.mark = _mark; - } - { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - var c; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = lambda_param_no_default_rule(p)) // lambda_param_no_default - && - (b = _loop0_110_rule(p)) // lambda_param_maybe_default* - && - (c = lambda_kwds_rule(p), !p.error_indicator) // lambda_kwds? - ) - { - _res = $B._PyPegen.star_etc (p, a, b, c); - return done(); - } - p.mark = _mark; - } - { // '*' ',' lambda_param_maybe_default+ lambda_kwds? - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_lambda_star_etc + if (p.error_indicator) { + return NULL; + } + var invalid_lambda_star_etc_var; + if ( + (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc + ) + { + _res = invalid_lambda_star_etc_var; + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var b; - var c; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (b = _loop1_111_rule(p)) // lambda_param_maybe_default+ - && - (c = lambda_kwds_rule(p), !p.error_indicator) // lambda_kwds? - ) - { - _res = $B._PyPegen.star_etc (p, $B.parser_constants.NULL, b, c); - return done(); + { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + var c; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default + && + (b = _loop0_110_rule(p)) // lambda_param_maybe_default* + && + (c = lambda_kwds_rule(p), !p.error_indicator) // lambda_kwds? + ) + { + _res = $B._PyPegen.star_etc (p, a, b, c); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // lambda_kwds - if (p.error_indicator) { - return NULL; + { // '*' ',' lambda_param_maybe_default+ lambda_kwds? + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var b; + var c; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (b = _loop1_111_rule(p)) // lambda_param_maybe_default+ + && + (c = lambda_kwds_rule(p), !p.error_indicator) // lambda_kwds? + ) + { + _res = $B._PyPegen.star_etc (p, $B.parser_constants.NULL, b, c); + break; + } + p.mark = _mark; } - var a; - if ( - (a = lambda_kwds_rule(p)) // lambda_kwds - ) - { - _res = $B._PyPegen.star_etc (p, $B.parser_constants.NULL, $B.parser_constants.NULL, a); - return done(); + { // lambda_kwds + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = lambda_kwds_rule(p)) // lambda_kwds + ) + { + _res = $B._PyPegen.star_etc (p, $B.parser_constants.NULL, $B.parser_constants.NULL, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambda_kwds: invalid_lambda_kwds | '**' lambda_param_no_default @@ -10782,43 +10928,44 @@ function lambda_kwds_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.call_invalid_rules) { // invalid_lambda_kwds - if (p.error_indicator) { - return NULL; - } - var invalid_lambda_kwds_var; - if ( - (invalid_lambda_kwds_var = invalid_lambda_kwds_rule(p)) // invalid_lambda_kwds - ) - { - _res = invalid_lambda_kwds_var; - return done(); - } - p.mark = _mark; - } - { // '**' lambda_param_no_default - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.call_invalid_rules) { // invalid_lambda_kwds + if (p.error_indicator) { + return NULL; + } + var invalid_lambda_kwds_var; + if ( + (invalid_lambda_kwds_var = invalid_lambda_kwds_rule(p)) // invalid_lambda_kwds + ) + { + _res = invalid_lambda_kwds_var; + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (a = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = a; - return done(); + { // '**' lambda_param_no_default + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambda_param_no_default: lambda_param ',' | lambda_param &':' @@ -10827,45 +10974,46 @@ function lambda_param_no_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_param ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - { // lambda_param &':' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_param ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = a; + break; + } + p.mark = _mark; } - var a; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' - ) - { - _res = a; - return done(); + { // lambda_param &':' + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambda_param_with_default: lambda_param default ',' | lambda_param default &':' @@ -10874,51 +11022,52 @@ function lambda_param_with_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_param default ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var c; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (c = default_rule(p)) // default - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = $B._PyPegen.name_default_pair (p, a, c, $B.parser_constants.NULL); - return done(); - } - p.mark = _mark; - } - { // lambda_param default &':' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_param default ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var c; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (c = default_rule(p)) // default + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = $B._PyPegen.name_default_pair (p, a, c, $B.parser_constants.NULL); + break; + } + p.mark = _mark; } - var a; - var c; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (c = default_rule(p)) // default - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' - ) - { - _res = $B._PyPegen.name_default_pair (p, a, c, $B.parser_constants.NULL); - return done(); + { // lambda_param default &':' + if (p.error_indicator) { + return NULL; + } + var a; + var c; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (c = default_rule(p)) // default + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' + ) + { + _res = $B._PyPegen.name_default_pair (p, a, c, $B.parser_constants.NULL); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambda_param_maybe_default: lambda_param default? ',' | lambda_param default? &':' @@ -10927,51 +11076,52 @@ function lambda_param_maybe_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_param default? ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var c; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (c = default_rule(p), !p.error_indicator) // default? - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = $B._PyPegen.name_default_pair (p, a, c, $B.parser_constants.NULL); - return done(); - } - p.mark = _mark; - } - { // lambda_param default? &':' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_param default? ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var c; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (c = default_rule(p), !p.error_indicator) // default? + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = $B._PyPegen.name_default_pair (p, a, c, $B.parser_constants.NULL); + break; + } + p.mark = _mark; } - var a; - var c; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (c = default_rule(p), !p.error_indicator) // default? - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' - ) - { - _res = $B._PyPegen.name_default_pair (p, a, c, $B.parser_constants.NULL); - return done(); + { // lambda_param default? &':' + if (p.error_indicator) { + return NULL; + } + var a; + var c; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (c = default_rule(p), !p.error_indicator) // default? + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 11) // token=':' + ) + { + _res = $B._PyPegen.name_default_pair (p, a, c, $B.parser_constants.NULL); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // lambda_param: NAME @@ -10980,39 +11130,40 @@ function lambda_param_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.arg (a. id, $B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); - return done(); + var a; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.arg (a. id, $B.parser_constants.NULL, $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // fstring_middle: fstring_replacement_field | FSTRING_MIDDLE @@ -11021,40 +11172,41 @@ function fstring_middle_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // fstring_replacement_field - if (p.error_indicator) { - return NULL; - } - var fstring_replacement_field_var; - if ( - (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field - ) - { - _res = fstring_replacement_field_var; - return done(); - } - p.mark = _mark; - } - { // FSTRING_MIDDLE - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // fstring_replacement_field + if (p.error_indicator) { + return NULL; + } + var fstring_replacement_field_var; + if ( + (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field + ) + { + _res = fstring_replacement_field_var; + break; + } + p.mark = _mark; } - var t; - if ( - (t = $B._PyPegen.expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE' - ) - { - _res = $B._PyPegen.constant_from_token (p, t); - return done(); + { // FSTRING_MIDDLE + if (p.error_indicator) { + return NULL; + } + var t; + if ( + (t = $B._PyPegen.expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE' + ) + { + _res = $B._PyPegen.constant_from_token (p, t); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // fstring_replacement_field: @@ -11065,68 +11217,69 @@ function fstring_replacement_field_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '{' (yield_expr | star_expressions) '='? fstring_conversion? fstring_full_format_spec? '}' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var conversion; - var debug_expr; - var format; - var rbrace; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = _tmp_112_rule(p)) // yield_expr | star_expressions - && - (debug_expr = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? - && - (conversion = fstring_conversion_rule(p), !p.error_indicator) // fstring_conversion? - && - (format = fstring_full_format_spec_rule(p), !p.error_indicator) // fstring_full_format_spec? - && - (rbrace = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '{' (yield_expr | star_expressions) '='? fstring_conversion? fstring_full_format_spec? '}' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B._PyPegen.formatted_value (p, a, debug_expr, conversion, format, rbrace, EXTRA); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_replacement_field - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var conversion; + var debug_expr; + var format; + var rbrace; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = _tmp_112_rule(p)) // yield_expr | star_expressions + && + (debug_expr = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? + && + (conversion = fstring_conversion_rule(p), !p.error_indicator) // fstring_conversion? + && + (format = fstring_full_format_spec_rule(p), !p.error_indicator) // fstring_full_format_spec? + && + (rbrace = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B._PyPegen.formatted_value (p, a, debug_expr, conversion, format, rbrace, EXTRA); + break; + } + p.mark = _mark; } - var invalid_replacement_field_var; - if ( - (invalid_replacement_field_var = invalid_replacement_field_rule(p)) // invalid_replacement_field - ) - { - _res = invalid_replacement_field_var; - return done(); + if (p.call_invalid_rules) { // invalid_replacement_field + if (p.error_indicator) { + return NULL; + } + var invalid_replacement_field_var; + if ( + (invalid_replacement_field_var = invalid_replacement_field_rule(p)) // invalid_replacement_field + ) + { + _res = invalid_replacement_field_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // fstring_conversion: "!" NAME @@ -11135,29 +11288,30 @@ function fstring_conversion_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // "!" NAME - if (p.error_indicator) { - return NULL; - } - var conv; - var conv_token; - if ( - (conv_token = $B._PyPegen.expect_token(p, 54)) // token='!' - && - (conv = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.check_fstring_conversion (p, conv_token, conv); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // "!" NAME + if (p.error_indicator) { + return NULL; + } + var conv; + var conv_token; + if ( + (conv_token = $B._PyPegen.expect_token(p, 54)) // token='!' + && + (conv = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.check_fstring_conversion (p, conv_token, conv); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // fstring_full_format_spec: ':' fstring_format_spec* @@ -11166,42 +11320,43 @@ function fstring_full_format_spec_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // ':' fstring_format_spec* - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var colon; - var spec; - if ( - (colon = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (spec = _loop0_113_rule(p)) // fstring_format_spec* - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // ':' fstring_format_spec* + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B._PyPegen.setup_full_format_spec (p, colon, spec, EXTRA); - return done(); + var colon; + var spec; + if ( + (colon = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (spec = _loop0_113_rule(p)) // fstring_format_spec* + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B._PyPegen.setup_full_format_spec (p, colon, spec, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // fstring_format_spec: FSTRING_MIDDLE | fstring_replacement_field @@ -11210,40 +11365,41 @@ function fstring_format_spec_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // FSTRING_MIDDLE - if (p.error_indicator) { - return NULL; - } - var t; - if ( - (t = $B._PyPegen.expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE' - ) - { - _res = $B._PyPegen.decoded_constant_from_token (p, t); - return done(); - } - p.mark = _mark; - } - { // fstring_replacement_field - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // FSTRING_MIDDLE + if (p.error_indicator) { + return NULL; + } + var t; + if ( + (t = $B._PyPegen.expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE' + ) + { + _res = $B._PyPegen.decoded_constant_from_token (p, t); + break; + } + p.mark = _mark; } - var fstring_replacement_field_var; - if ( - (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field - ) - { - _res = fstring_replacement_field_var; - return done(); + { // fstring_replacement_field + if (p.error_indicator) { + return NULL; + } + var fstring_replacement_field_var; + if ( + (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field + ) + { + _res = fstring_replacement_field_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // fstring: FSTRING_START fstring_middle* FSTRING_END @@ -11252,32 +11408,33 @@ function fstring_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // FSTRING_START fstring_middle* FSTRING_END - if (p.error_indicator) { - return NULL; - } - var a; - var b; - var c; - if ( - (a = $B._PyPegen.expect_token(p, FSTRING_START)) // token='FSTRING_START' - && - (b = _loop0_114_rule(p)) // fstring_middle* - && - (c = $B._PyPegen.expect_token(p, FSTRING_END)) // token='FSTRING_END' - ) - { - _res = $B._PyPegen.joined_str (p, a, b, c); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // FSTRING_START fstring_middle* FSTRING_END + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var c; + if ( + (a = $B._PyPegen.expect_token(p, FSTRING_START)) // token='FSTRING_START' + && + (b = _loop0_114_rule(p)) // fstring_middle* + && + (c = $B._PyPegen.expect_token(p, FSTRING_END)) // token='FSTRING_END' + ) + { + _res = $B._PyPegen.joined_str (p, a, b, c); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // string: STRING @@ -11286,26 +11443,27 @@ function string_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // STRING - if (p.error_indicator) { - return NULL; - } - var s; - if ( - (s = $B._PyPegen.string_token(p)) // STRING - ) - { - _res = $B._PyPegen.constant_from_string (p, s); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // STRING + if (p.error_indicator) { + return NULL; + } + var s; + if ( + (s = $B._PyPegen.string_token(p)) // STRING + ) + { + _res = $B._PyPegen.constant_from_string (p, s); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // strings: ((fstring | string))+ @@ -11314,44 +11472,45 @@ function strings_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, strings_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // ((fstring | string))+ - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, strings_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - if ( - (a = _loop1_115_rule(p)) // ((fstring | string))+ - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // ((fstring | string))+ + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B._PyPegen.concatenate_strings (p, a, EXTRA); - return done(); + var a; + if ( + (a = _loop1_115_rule(p)) // ((fstring | string))+ + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B._PyPegen.concatenate_strings (p, a, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, strings_type, _res); return _res; - } } // list: '[' star_named_expressions? ']' @@ -11360,45 +11519,46 @@ function list_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '[' star_named_expressions? ']' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (a = star_named_expressions_rule(p), !p.error_indicator) // star_named_expressions? - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '[' star_named_expressions? ']' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.List (a, $B.parser_constants.Load, EXTRA); - return done(); + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (a = star_named_expressions_rule(p), !p.error_indicator) // star_named_expressions? + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.List (a, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // tuple: '(' [star_named_expression ',' star_named_expressions?] ')' @@ -11407,45 +11567,46 @@ function tuple_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '(' [star_named_expression ',' star_named_expressions?] ')' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = _tmp_116_rule(p), !p.error_indicator) // [star_named_expression ',' star_named_expressions?] - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '(' [star_named_expression ',' star_named_expressions?] ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple (a, $B.parser_constants.Load, EXTRA); - return done(); + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = _tmp_116_rule(p), !p.error_indicator) // [star_named_expression ',' star_named_expressions?] + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple (a, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // set: '{' star_named_expressions '}' @@ -11454,45 +11615,46 @@ function set_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '{' star_named_expressions '}' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = star_named_expressions_rule(p)) // star_named_expressions - && - (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '{' star_named_expressions '}' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Set (a, EXTRA); - return done(); + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = star_named_expressions_rule(p)) // star_named_expressions + && + (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Set (a, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // dict: '{' double_starred_kvpairs? '}' | '{' invalid_double_starred_kvpairs '}' @@ -11501,65 +11663,66 @@ function dict_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '{' double_starred_kvpairs? '}' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = double_starred_kvpairs_rule(p), !p.error_indicator) // double_starred_kvpairs? - && - (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '{' double_starred_kvpairs? '}' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Dict ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_keys ( p, a ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_values ( p, a ) ), EXTRA); - return done(); - } - p.mark = _mark; - } - { // '{' invalid_double_starred_kvpairs '}' - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = double_starred_kvpairs_rule(p), !p.error_indicator) // double_starred_kvpairs? + && + (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Dict ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_keys ( p, a ) ), $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.get_values ( p, a ) ), EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var invalid_double_starred_kvpairs_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (invalid_double_starred_kvpairs_var = invalid_double_starred_kvpairs_rule(p)) // invalid_double_starred_kvpairs - && - (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, invalid_double_starred_kvpairs_var, _literal_1); - return done(); + { // '{' invalid_double_starred_kvpairs '}' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var invalid_double_starred_kvpairs_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (invalid_double_starred_kvpairs_var = invalid_double_starred_kvpairs_rule(p)) // invalid_double_starred_kvpairs + && + (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, invalid_double_starred_kvpairs_var, _literal_1); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // double_starred_kvpairs: ','.double_starred_kvpair+ ','? @@ -11568,30 +11731,31 @@ function double_starred_kvpairs_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.double_starred_kvpair+ ','? - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (a = _gather_117_rule(p)) // ','.double_starred_kvpair+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.double_starred_kvpair+ ','? + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (a = _gather_117_rule(p)) // ','.double_starred_kvpair+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // double_starred_kvpair: '**' bitwise_or | kvpair @@ -11600,43 +11764,44 @@ function double_starred_kvpair_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '**' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = $B._PyPegen.key_value_pair (p, $B.parser_constants.NULL, a); - return done(); - } - p.mark = _mark; - } - { // kvpair - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '**' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B._PyPegen.key_value_pair (p, $B.parser_constants.NULL, a); + break; + } + p.mark = _mark; } - var kvpair_var; - if ( - (kvpair_var = kvpair_rule(p)) // kvpair - ) - { - _res = kvpair_var; - return done(); + { // kvpair + if (p.error_indicator) { + return NULL; + } + var kvpair_var; + if ( + (kvpair_var = kvpair_rule(p)) // kvpair + ) + { + _res = kvpair_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // kvpair: expression ':' expression @@ -11645,32 +11810,33 @@ function kvpair_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression ':' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - if ( - (a = expression_rule(p)) // expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (b = expression_rule(p)) // expression - ) - { - _res = $B._PyPegen.key_value_pair (p, a, b); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression ':' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + if ( + (a = expression_rule(p)) // expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (b = expression_rule(p)) // expression + ) + { + _res = $B._PyPegen.key_value_pair (p, a, b); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // for_if_clauses: for_if_clause+ @@ -11679,26 +11845,27 @@ function for_if_clauses_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // for_if_clause+ - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = _loop1_119_rule(p)) // for_if_clause+ - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // for_if_clause+ + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = _loop1_119_rule(p)) // for_if_clause+ + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // for_if_clause: @@ -11710,93 +11877,94 @@ function for_if_clause_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))* - if (p.error_indicator) { - return NULL; - } - var _cut_var = 0; - var _keyword; - var _keyword_1; - var a; - var async_var; - var b; - var c; - if ( - (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' - && - (a = star_targets_rule(p)) // star_targets - && - (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' - && - (_cut_var = 1) - && - (b = disjunction_rule(p)) // disjunction - && - (c = _loop0_120_rule(p)) // (('if' disjunction))* - ) - { - _res = $B.helper_functions.CHECK_VERSION ($B.ast.comprehension, 6, "Async comprehensions are", new $B._PyAST.comprehension ( a, b, c, 1, p.arena )); - return done(); - } - p.mark = _mark; - if (_cut_var) { - return NULL; - } - } - { // 'for' star_targets 'in' ~ disjunction (('if' disjunction))* - if (p.error_indicator) { - return NULL; - } - var _cut_var = 0; - var _keyword; - var _keyword_1; - var a; - var b; - var c; - if ( - (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' - && - (a = star_targets_rule(p)) // star_targets - && - (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' - && - (_cut_var = 1) - && - (b = disjunction_rule(p)) // disjunction - && - (c = _loop0_121_rule(p)) // (('if' disjunction))* - ) - { - _res = new $B._PyAST.comprehension (a, b, c, 0, p.arena); - return done(); - } - p.mark = _mark; - if (_cut_var) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))* + if (p.error_indicator) { + return NULL; + } + var _cut_var = 0; + var _keyword; + var _keyword_1; + var a; + var async_var; + var b; + var c; + if ( + (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' + && + (a = star_targets_rule(p)) // star_targets + && + (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' + && + (_cut_var = 1) + && + (b = disjunction_rule(p)) // disjunction + && + (c = _loop0_120_rule(p)) // (('if' disjunction))* + ) + { + _res = $B.helper_functions.CHECK_VERSION ($B.ast.comprehension, 6, "Async comprehensions are", new $B._PyAST.comprehension ( a, b, c, 1, p.arena )); + break; + } + p.mark = _mark; + if (_cut_var) { + return NULL; + } } - } - if (p.call_invalid_rules) { // invalid_for_target - if (p.error_indicator) { - return NULL; + { // 'for' star_targets 'in' ~ disjunction (('if' disjunction))* + if (p.error_indicator) { + return NULL; + } + var _cut_var = 0; + var _keyword; + var _keyword_1; + var a; + var b; + var c; + if ( + (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' + && + (a = star_targets_rule(p)) // star_targets + && + (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' + && + (_cut_var = 1) + && + (b = disjunction_rule(p)) // disjunction + && + (c = _loop0_121_rule(p)) // (('if' disjunction))* + ) + { + _res = new $B._PyAST.comprehension (a, b, c, 0, p.arena); + break; + } + p.mark = _mark; + if (_cut_var) { + return NULL; + } } - var invalid_for_target_var; - if ( - (invalid_for_target_var = invalid_for_target_rule(p)) // invalid_for_target - ) - { - _res = invalid_for_target_var; - return done(); + if (p.call_invalid_rules) { // invalid_for_target + if (p.error_indicator) { + return NULL; + } + var invalid_for_target_var; + if ( + (invalid_for_target_var = invalid_for_target_rule(p)) // invalid_for_target + ) + { + _res = invalid_for_target_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // listcomp: '[' named_expression for_if_clauses ']' | invalid_comprehension @@ -11805,62 +11973,63 @@ function listcomp_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '[' named_expression for_if_clauses ']' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (a = named_expression_rule(p)) // named_expression - && - (b = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '[' named_expression for_if_clauses ']' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.ListComp (a, b, EXTRA); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_comprehension - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (a = named_expression_rule(p)) // named_expression + && + (b = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.ListComp (a, b, EXTRA); + break; + } + p.mark = _mark; } - var invalid_comprehension_var; - if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension - ) - { - _res = invalid_comprehension_var; - return done(); + if (p.call_invalid_rules) { // invalid_comprehension + if (p.error_indicator) { + return NULL; + } + var invalid_comprehension_var; + if ( + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension + ) + { + _res = invalid_comprehension_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // setcomp: '{' named_expression for_if_clauses '}' | invalid_comprehension @@ -11869,62 +12038,63 @@ function setcomp_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '{' named_expression for_if_clauses '}' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = named_expression_rule(p)) // named_expression - && - (b = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '{' named_expression for_if_clauses '}' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.SetComp (a, b, EXTRA); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_comprehension - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = named_expression_rule(p)) // named_expression + && + (b = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.SetComp (a, b, EXTRA); + break; + } + p.mark = _mark; } - var invalid_comprehension_var; - if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension - ) - { - _res = invalid_comprehension_var; - return done(); + if (p.call_invalid_rules) { // invalid_comprehension + if (p.error_indicator) { + return NULL; + } + var invalid_comprehension_var; + if ( + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension + ) + { + _res = invalid_comprehension_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // genexp: @@ -11935,62 +12105,63 @@ function genexp_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '(' (assignment_expression | expression !':=') for_if_clauses ')' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = _tmp_122_rule(p)) // assignment_expression | expression !':=' - && - (b = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '(' (assignment_expression | expression !':=') for_if_clauses ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.GeneratorExp (a, b, EXTRA); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_comprehension - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = _tmp_122_rule(p)) // assignment_expression | expression !':=' + && + (b = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.GeneratorExp (a, b, EXTRA); + break; + } + p.mark = _mark; } - var invalid_comprehension_var; - if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension - ) - { - _res = invalid_comprehension_var; - return done(); + if (p.call_invalid_rules) { // invalid_comprehension + if (p.error_indicator) { + return NULL; + } + var invalid_comprehension_var; + if ( + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension + ) + { + _res = invalid_comprehension_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // dictcomp: '{' kvpair for_if_clauses '}' | invalid_dict_comprehension @@ -11999,62 +12170,63 @@ function dictcomp_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '{' kvpair for_if_clauses '}' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = kvpair_rule(p)) // kvpair - && - (b = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '{' kvpair for_if_clauses '}' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.DictComp (a.key, a.value, b, EXTRA); - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_dict_comprehension - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = kvpair_rule(p)) // kvpair + && + (b = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.DictComp (a.key, a.value, b, EXTRA); + break; + } + p.mark = _mark; } - var invalid_dict_comprehension_var; - if ( - (invalid_dict_comprehension_var = invalid_dict_comprehension_rule(p)) // invalid_dict_comprehension - ) - { - _res = invalid_dict_comprehension_var; - return done(); + if (p.call_invalid_rules) { // invalid_dict_comprehension + if (p.error_indicator) { + return NULL; + } + var invalid_dict_comprehension_var; + if ( + (invalid_dict_comprehension_var = invalid_dict_comprehension_rule(p)) // invalid_dict_comprehension + ) + { + _res = invalid_dict_comprehension_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // arguments: args ','? &')' | invalid_arguments @@ -12063,51 +12235,52 @@ function arguments_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, arguments_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - { // args ','? &')' - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (a = args_rule(p)) // args - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_arguments - if (p.error_indicator) { - return NULL; + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, arguments_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + { // args ','? &')' + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (a = args_rule(p)) // args + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, 8) // token=')' + ) + { + _res = a; + break; + } + p.mark = _mark; } - var invalid_arguments_var; - if ( - (invalid_arguments_var = invalid_arguments_rule(p)) // invalid_arguments - ) - { - _res = invalid_arguments_var; - return done(); + if (p.call_invalid_rules) { // invalid_arguments + if (p.error_indicator) { + return NULL; + } + var invalid_arguments_var; + if ( + (invalid_arguments_var = invalid_arguments_rule(p)) // invalid_arguments + ) + { + _res = invalid_arguments_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, arguments_type, _res); return _res; - } } // args: @@ -12118,62 +12291,63 @@ function args_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ [',' kwargs] - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - var b; - if ( - (a = _gather_123_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ - && - (b = _tmp_125_rule(p), !p.error_indicator) // [',' kwargs] - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ [',' kwargs] + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B._PyPegen.collect_call_seqs (p, a, b, EXTRA); - return done(); - } - p.mark = _mark; - } - { // kwargs - if (p.error_indicator) { - return NULL; + var a; + var b; + if ( + (a = _gather_123_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ + && + (b = _tmp_125_rule(p), !p.error_indicator) // [',' kwargs] + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B._PyPegen.collect_call_seqs (p, a, b, EXTRA); + break; + } + p.mark = _mark; } - var a; - if ( - (a = kwargs_rule(p)) // kwargs - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // kwargs + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Call ($B._PyPegen.dummy_name ( p ), $B.helper_functions.CHECK_NULL_ALLOWED ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_extract_starred_exprs ( p, a ) ), $B.helper_functions.CHECK_NULL_ALLOWED ( $B.parser_constants.asdl_keyword_seq, $B._PyPegen.seq_delete_starred_exprs ( p, a ) ), EXTRA); - return done(); + var a; + if ( + (a = kwargs_rule(p)) // kwargs + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Call ($B._PyPegen.dummy_name ( p ), $B.helper_functions.CHECK_NULL_ALLOWED ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_extract_starred_exprs ( p, a ) ), $B.helper_functions.CHECK_NULL_ALLOWED ( $B.parser_constants.asdl_keyword_seq, $B._PyPegen.seq_delete_starred_exprs ( p, a ) ), EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // kwargs: @@ -12185,60 +12359,61 @@ function kwargs_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - if ( - (a = _gather_126_rule(p)) // ','.kwarg_or_starred+ - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (b = _gather_128_rule(p)) // ','.kwarg_or_double_starred+ - ) - { - _res = $B._PyPegen.join_sequences (p, a, b); - return done(); - } - p.mark = _mark; - } - { // ','.kwarg_or_starred+ - if (p.error_indicator) { - return NULL; - } - var _gather_130_var; - if ( - (_gather_130_var = _gather_130_rule(p)) // ','.kwarg_or_starred+ - ) - { - _res = _gather_130_var; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + if ( + (a = _gather_126_rule(p)) // ','.kwarg_or_starred+ + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (b = _gather_128_rule(p)) // ','.kwarg_or_double_starred+ + ) + { + _res = $B._PyPegen.join_sequences (p, a, b); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ','.kwarg_or_double_starred+ - if (p.error_indicator) { - return NULL; + { // ','.kwarg_or_starred+ + if (p.error_indicator) { + return NULL; + } + var _gather_130_var; + if ( + (_gather_130_var = _gather_130_rule(p)) // ','.kwarg_or_starred+ + ) + { + _res = _gather_130_var; + break; + } + p.mark = _mark; } - var _gather_132_var; - if ( - (_gather_132_var = _gather_132_rule(p)) // ','.kwarg_or_double_starred+ - ) - { - _res = _gather_132_var; - return done(); + { // ','.kwarg_or_double_starred+ + if (p.error_indicator) { + return NULL; + } + var _gather_132_var; + if ( + (_gather_132_var = _gather_132_rule(p)) // ','.kwarg_or_double_starred+ + ) + { + _res = _gather_132_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // starred_expression: invalid_starred_expression | '*' expression @@ -12247,56 +12422,57 @@ function starred_expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_starred_expression - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var invalid_starred_expression_var; - if ( - (invalid_starred_expression_var = invalid_starred_expression_rule(p)) // invalid_starred_expression - ) - { - _res = invalid_starred_expression_var; - return done(); - } - p.mark = _mark; - } - { // '*' expression - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_starred_expression + if (p.error_indicator) { + return NULL; + } + var invalid_starred_expression_var; + if ( + (invalid_starred_expression_var = invalid_starred_expression_rule(p)) // invalid_starred_expression + ) + { + _res = invalid_starred_expression_var; + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '*' expression + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Starred (a, $B.parser_constants.Load, EXTRA); - return done(); + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Starred (a, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // kwarg_or_starred: invalid_kwarg | NAME '=' expression | starred_expression @@ -12305,73 +12481,74 @@ function kwarg_or_starred_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_kwarg - if (p.error_indicator) { - return NULL; - } - var invalid_kwarg_var; - if ( - (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg - ) - { - _res = invalid_kwarg_var; - return done(); - } - p.mark = _mark; - } - { // NAME '=' expression - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (b = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_kwarg + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B._PyPegen.keyword_or_starred (p, $B.helper_functions.CHECK ( $B.ast.keyword, new $B._PyAST.keyword ( a. id, b, EXTRA ) ), 1); - return done(); + var invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + _res = invalid_kwarg_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // starred_expression - if (p.error_indicator) { - return NULL; + { // NAME '=' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B._PyPegen.keyword_or_starred (p, $B.helper_functions.CHECK ( $B.ast.keyword, new $B._PyAST.keyword ( a. id, b, EXTRA ) ), 1); + break; + } + p.mark = _mark; } - var a; - if ( - (a = starred_expression_rule(p)) // starred_expression - ) - { - _res = $B._PyPegen.keyword_or_starred (p, a, 0); - return done(); + { // starred_expression + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = starred_expression_rule(p)) // starred_expression + ) + { + _res = $B._PyPegen.keyword_or_starred (p, a, 0); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // kwarg_or_double_starred: invalid_kwarg | NAME '=' expression | '**' expression @@ -12380,82 +12557,83 @@ function kwarg_or_double_starred_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - if (p.call_invalid_rules) { // invalid_kwarg - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var invalid_kwarg_var; - if ( - (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg - ) - { - _res = invalid_kwarg_var; - return done(); + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + if (p.call_invalid_rules) { // invalid_kwarg + if (p.error_indicator) { + return NULL; + } + var invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + _res = invalid_kwarg_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // NAME '=' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (b = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // NAME '=' expression + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B._PyPegen.keyword_or_starred (p, $B.helper_functions.CHECK ( $B.ast.keyword, new $B._PyAST.keyword ( a. id, b, EXTRA ) ), 1); - return done(); - } - p.mark = _mark; - } - { // '**' expression - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B._PyPegen.keyword_or_starred (p, $B.helper_functions.CHECK ( $B.ast.keyword, new $B._PyAST.keyword ( a. id, b, EXTRA ) ), 1); + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (a = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '**' expression + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = $B._PyPegen.keyword_or_starred (p, $B.helper_functions.CHECK ( $B.ast.keyword, new $B._PyAST.keyword ( $B.parser_constants.NULL, a, EXTRA ) ), 1); - return done(); + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (a = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = $B._PyPegen.keyword_or_starred (p, $B.helper_functions.CHECK ( $B.ast.keyword, new $B._PyAST.keyword ( $B.parser_constants.NULL, a, EXTRA ) ), 1); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_targets: star_target !',' | star_target ((',' star_target))* ','? @@ -12464,62 +12642,63 @@ function star_targets_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // star_target !',' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var a; - if ( - (a = star_target_rule(p)) // star_target - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 12) // token=',' - ) - { - _res = a; - return done(); - } - p.mark = _mark; - } - { // star_target ((',' star_target))* ','? - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // star_target !',' + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = star_target_rule(p)) // star_target + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 12) // token=',' + ) + { + _res = a; + break; + } + p.mark = _mark; } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (a = star_target_rule(p)) // star_target - && - (b = _loop0_134_rule(p)) // ((',' star_target))* - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // star_target ((',' star_target))* ','? + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), $B.parser_constants.Store, EXTRA); - return done(); + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (a = star_target_rule(p)) // star_target + && + (b = _loop0_134_rule(p)) // ((',' star_target))* + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple ($B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.seq_insert_in_front ( p, a, b ) ), $B.parser_constants.Store, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_targets_list_seq: ','.star_target+ ','? @@ -12528,30 +12707,31 @@ function star_targets_list_seq_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.star_target+ ','? - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (a = _gather_135_rule(p)) // ','.star_target+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.star_target+ ','? + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (a = _gather_135_rule(p)) // ','.star_target+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_targets_tuple_seq: star_target ((',' star_target))+ ','? | star_target ',' @@ -12560,50 +12740,51 @@ function star_targets_tuple_seq_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // star_target ((',' star_target))+ ','? - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (a = star_target_rule(p)) // star_target - && - (b = _loop1_137_rule(p)) // ((',' star_target))+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - _res = $B._PyPegen.seq_insert_in_front (p, a, b); - return done(); - } - p.mark = _mark; - } - { // star_target ',' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // star_target ((',' star_target))+ ','? + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (a = star_target_rule(p)) // star_target + && + (b = _loop1_137_rule(p)) // ((',' star_target))+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + _res = $B._PyPegen.seq_insert_in_front (p, a, b); + break; + } + p.mark = _mark; } - var _literal; - var a; - if ( - (a = star_target_rule(p)) // star_target - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = $B._PyPegen.singleton_seq (p, a); - return done(); + { // star_target ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (a = star_target_rule(p)) // star_target + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = $B._PyPegen.singleton_seq (p, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // star_target: '*' (!'*' star_target) | target_with_star_atom @@ -12612,61 +12793,62 @@ function star_target_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, star_target_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // '*' (!'*' star_target) - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, star_target_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = _tmp_138_rule(p)) // !'*' star_target - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // '*' (!'*' star_target) + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Starred ($B.helper_functions.CHECK ( $B.ast.expr, $B._PyPegen.set_expr_context ( p, a, $B.parser_constants.Store ) ), $B.parser_constants.Store, EXTRA); - return done(); - } - p.mark = _mark; - } - { // target_with_star_atom - if (p.error_indicator) { - return NULL; + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = _tmp_138_rule(p)) // !'*' star_target + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Starred ($B.helper_functions.CHECK ( $B.ast.expr, $B._PyPegen.set_expr_context ( p, a, $B.parser_constants.Store ) ), $B.parser_constants.Store, EXTRA); + break; + } + p.mark = _mark; } - var target_with_star_atom_var; - if ( - (target_with_star_atom_var = target_with_star_atom_rule(p)) // target_with_star_atom - ) - { - _res = target_with_star_atom_var; - return done(); + { // target_with_star_atom + if (p.error_indicator) { + return NULL; + } + var target_with_star_atom_var; + if ( + (target_with_star_atom_var = target_with_star_atom_rule(p)) // target_with_star_atom + ) + { + _res = target_with_star_atom_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, star_target_type, _res); return _res; - } } // target_with_star_atom: @@ -12678,97 +12860,98 @@ function target_with_star_atom_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, target_with_star_atom_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // t_primary '.' NAME !t_lookahead - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, target_with_star_atom_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - && - (b = $B._PyPegen.name_token(p)) // NAME - && - $B._PyPegen.lookahead(0, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // t_primary '.' NAME !t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Store, EXTRA); - return done(); - } - p.mark = _mark; - } - { // t_primary '[' slices ']' !t_lookahead - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + && + (b = $B._PyPegen.name_token(p)) // NAME + && + $B._PyPegen.lookahead(0, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Store, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - && - $B._PyPegen.lookahead(0, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // t_primary '[' slices ']' !t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Store, EXTRA); - return done(); - } - p.mark = _mark; - } - { // star_atom - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + && + $B._PyPegen.lookahead(0, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Store, EXTRA); + break; + } + p.mark = _mark; } - var star_atom_var; - if ( - (star_atom_var = star_atom_rule(p)) // star_atom - ) - { - _res = star_atom_var; - return done(); + { // star_atom + if (p.error_indicator) { + return NULL; + } + var star_atom_var; + if ( + (star_atom_var = star_atom_rule(p)) // star_atom + ) + { + _res = star_atom_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, target_with_star_atom_type, _res); return _res; - } } // star_atom: @@ -12781,105 +12964,106 @@ function star_atom_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Store); - return done(); - } - p.mark = _mark; - } - { // '(' target_with_star_atom ')' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = target_with_star_atom_rule(p)) // target_with_star_atom - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Store); - return done(); - } - p.mark = _mark; - } - { // '(' star_targets_tuple_seq? ')' - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Store); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = star_targets_tuple_seq_rule(p), !p.error_indicator) // star_targets_tuple_seq? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '(' target_with_star_atom ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple (a, $B.parser_constants.Store, EXTRA); - return done(); + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = target_with_star_atom_rule(p)) // target_with_star_atom + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Store); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '[' star_targets_list_seq? ']' - if (p.error_indicator) { - return NULL; + { // '(' star_targets_tuple_seq? ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = star_targets_tuple_seq_rule(p), !p.error_indicator) // star_targets_tuple_seq? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple (a, $B.parser_constants.Store, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (a = star_targets_list_seq_rule(p), !p.error_indicator) // star_targets_list_seq? - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '[' star_targets_list_seq? ']' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.List (a, $B.parser_constants.Store, EXTRA); - return done(); + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (a = star_targets_list_seq_rule(p), !p.error_indicator) // star_targets_list_seq? + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.List (a, $B.parser_constants.Store, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // single_target: single_subscript_attribute_target | NAME | '(' single_target ')' @@ -12888,60 +13072,61 @@ function single_target_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // single_subscript_attribute_target - if (p.error_indicator) { - return NULL; - } - var single_subscript_attribute_target_var; - if ( - (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target - ) - { - _res = single_subscript_attribute_target_var; - return done(); - } - p.mark = _mark; - } - { // NAME - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Store); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // single_subscript_attribute_target + if (p.error_indicator) { + return NULL; + } + var single_subscript_attribute_target_var; + if ( + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target + ) + { + _res = single_subscript_attribute_target_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '(' single_target ')' - if (p.error_indicator) { - return NULL; + { // NAME + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Store); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = single_target_rule(p)) // single_target - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = a; - return done(); + { // '(' single_target ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = single_target_rule(p)) // single_target + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // single_subscript_attribute_target: @@ -12952,78 +13137,79 @@ function single_subscript_attribute_target_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // t_primary '.' NAME !t_lookahead - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - && - (b = $B._PyPegen.name_token(p)) // NAME - && - $B._PyPegen.lookahead(0, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // t_primary '.' NAME !t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Store, EXTRA); - return done(); + var _literal; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + && + (b = $B._PyPegen.name_token(p)) // NAME + && + $B._PyPegen.lookahead(0, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Store, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // t_primary '[' slices ']' !t_lookahead - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - && - $B._PyPegen.lookahead(0, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // t_primary '[' slices ']' !t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Store, EXTRA); - return done(); + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + && + $B._PyPegen.lookahead(0, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Store, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // Left-recursive @@ -13066,150 +13252,151 @@ function t_primary_raw(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // t_primary '.' NAME &t_lookahead - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - && - (b = $B._PyPegen.name_token(p)) // NAME - && - $B._PyPegen.lookahead(1, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // t_primary '.' NAME &t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // t_primary '[' slices ']' &t_lookahead - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + && + (b = $B._PyPegen.name_token(p)) // NAME + && + $B._PyPegen.lookahead(1, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - && - $B._PyPegen.lookahead(1, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // t_primary '[' slices ']' &t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Load, EXTRA); - return done(); - } - p.mark = _mark; - } - { // t_primary genexp &t_lookahead - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + && + $B._PyPegen.lookahead(1, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Load, EXTRA); + break; + } + p.mark = _mark; } - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (b = genexp_rule(p)) // genexp - && - $B._PyPegen.lookahead(1, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // t_primary genexp &t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Call (a, $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.singleton_seq ( p, b ) ), $B.parser_constants.NULL, EXTRA); - return done(); - } - p.mark = _mark; - } - { // t_primary '(' arguments? ')' &t_lookahead - if (p.error_indicator) { - return NULL; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (b = genexp_rule(p)) // genexp + && + $B._PyPegen.lookahead(1, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Call (a, $B.helper_functions.CHECK ( $B.parser_constants.asdl_expr_seq, $B._PyPegen.singleton_seq ( p, b ) ), $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (b = arguments_rule(p), !p.error_indicator) // arguments? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - && - $B._PyPegen.lookahead(1, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // t_primary '(' arguments? ')' &t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Call (a, ( b ) ? ( b ). args : $B.parser_constants.NULL, ( b ) ? ( b ). keywords : $B.parser_constants.NULL, EXTRA); - return done(); - } - p.mark = _mark; - } - { // atom &t_lookahead - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (b = arguments_rule(p), !p.error_indicator) // arguments? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + && + $B._PyPegen.lookahead(1, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Call (a, ( b ) ? ( b ). args : $B.parser_constants.NULL, ( b ) ? ( b ). keywords : $B.parser_constants.NULL, EXTRA); + break; + } + p.mark = _mark; } - var a; - if ( - (a = atom_rule(p)) // atom - && - $B._PyPegen.lookahead(1, t_lookahead_rule, p) - ) - { - _res = a; - return done(); + { // atom &t_lookahead + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = atom_rule(p)) // atom + && + $B._PyPegen.lookahead(1, t_lookahead_rule, p) + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // t_lookahead: '(' | '[' | '.' @@ -13218,54 +13405,55 @@ function t_lookahead_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '(' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '[' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - ) - { - _res = _literal; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '(' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '.' - if (p.error_indicator) { - return NULL; + { // '[' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - ) - { - _res = _literal; - return done(); + { // '.' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // del_targets: ','.del_target+ ','? @@ -13274,30 +13462,31 @@ function del_targets_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.del_target+ ','? - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (a = _gather_139_rule(p)) // ','.del_target+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.del_target+ ','? + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (a = _gather_139_rule(p)) // ','.del_target+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // del_target: @@ -13309,97 +13498,98 @@ function del_target_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, del_target_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // t_primary '.' NAME !t_lookahead - if (p.error_indicator) { + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, del_target_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - && - (b = $B._PyPegen.name_token(p)) // NAME - && - $B._PyPegen.lookahead(0, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // t_primary '.' NAME !t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Del, EXTRA); - return done(); - } - p.mark = _mark; - } - { // t_primary '[' slices ']' !t_lookahead - if (p.error_indicator) { - return NULL; + var _literal; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + && + (b = $B._PyPegen.name_token(p)) // NAME + && + $B._PyPegen.lookahead(0, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Attribute (a, b. id, $B.parser_constants.Del, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - && - $B._PyPegen.lookahead(0, t_lookahead_rule, p) - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // t_primary '[' slices ']' !t_lookahead + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Del, EXTRA); - return done(); - } - p.mark = _mark; - } - { // del_t_atom - if (p.error_indicator) { - return NULL; + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + && + $B._PyPegen.lookahead(0, t_lookahead_rule, p) + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Subscript (a, b, $B.parser_constants.Del, EXTRA); + break; + } + p.mark = _mark; } - var del_t_atom_var; - if ( - (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom - ) - { - _res = del_t_atom_var; - return done(); + { // del_t_atom + if (p.error_indicator) { + return NULL; + } + var del_t_atom_var; + if ( + (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom + ) + { + _res = del_t_atom_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, del_target_type, _res); return _res; - } } // del_t_atom: NAME | '(' del_target ')' | '(' del_targets? ')' | '[' del_targets? ']' @@ -13408,105 +13598,106 @@ function del_t_atom_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // NAME - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Del); - return done(); - } - p.mark = _mark; - } - { // '(' del_target ')' - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; return NULL; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = del_target_rule(p)) // del_target - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Del); - return done(); - } - p.mark = _mark; - } - { // '(' del_targets? ')' - if (p.error_indicator) { - return NULL; + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // NAME + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Del); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = del_targets_rule(p), !p.error_indicator) // del_targets? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '(' del_target ')' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.Tuple (a, $B.parser_constants.Del, EXTRA); - return done(); + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = del_target_rule(p)) // del_target + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = $B._PyPegen.set_expr_context (p, a, $B.parser_constants.Del); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '[' del_targets? ']' - if (p.error_indicator) { - return NULL; + { // '(' del_targets? ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = del_targets_rule(p), !p.error_indicator) // del_targets? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.Tuple (a, $B.parser_constants.Del, EXTRA); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - && - (a = del_targets_rule(p), !p.error_indicator) // del_targets? - && - (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + { // '[' del_targets? ']' + if (p.error_indicator) { return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.List (a, $B.parser_constants.Del, EXTRA); - return done(); + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + && + (a = del_targets_rule(p), !p.error_indicator) // del_targets? + && + (_literal_1 = $B._PyPegen.expect_token(p, 10)) // token=']' + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.List (a, $B.parser_constants.Del, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // type_expressions: @@ -13522,164 +13713,165 @@ function type_expressions_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.expression+ ',' '*' expression ',' '**' expression - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.expression+ ',' '*' expression ',' '**' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _literal_2; + var _literal_3; + var a; + var b; + var c; + if ( + (a = _gather_141_rule(p)) // ','.expression+ + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (_literal_1 = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (b = expression_rule(p)) // expression + && + (_literal_2 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (_literal_3 = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (c = expression_rule(p)) // expression + ) + { + _res = $B._PyPegen.seq_append_to_end (p, $B.helper_functions.CHECK ( asdl_seq, $B._PyPegen.seq_append_to_end ( p, a, b ) ), c); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var _literal_2; - var _literal_3; - var a; - var b; - var c; - if ( - (a = _gather_141_rule(p)) // ','.expression+ - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (_literal_1 = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (b = expression_rule(p)) // expression - && - (_literal_2 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (_literal_3 = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (c = expression_rule(p)) // expression - ) - { - _res = $B._PyPegen.seq_append_to_end (p, $B.helper_functions.CHECK ( asdl_seq, $B._PyPegen.seq_append_to_end ( p, a, b ) ), c); - return done(); + { // ','.expression+ ',' '*' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = _gather_143_rule(p)) // ','.expression+ + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (_literal_1 = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (b = expression_rule(p)) // expression + ) + { + _res = $B._PyPegen.seq_append_to_end (p, a, b); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ','.expression+ ',' '*' expression - if (p.error_indicator) { - return NULL; + { // ','.expression+ ',' '**' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var b; + if ( + (a = _gather_145_rule(p)) // ','.expression+ + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (_literal_1 = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (b = expression_rule(p)) // expression + ) + { + _res = $B._PyPegen.seq_append_to_end (p, a, b); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = _gather_143_rule(p)) // ','.expression+ - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (_literal_1 = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (b = expression_rule(p)) // expression - ) - { - _res = $B._PyPegen.seq_append_to_end (p, a, b); - return done(); + { // '*' expression ',' '**' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _literal_2; + var a; + var b; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = expression_rule(p)) // expression + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (_literal_2 = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (b = expression_rule(p)) // expression + ) + { + _res = $B._PyPegen.seq_append_to_end (p, $B.helper_functions.CHECK ( asdl_seq, $B._PyPegen.singleton_seq ( p, a ) ), b); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ','.expression+ ',' '**' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - var b; - if ( - (a = _gather_145_rule(p)) // ','.expression+ - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (_literal_1 = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (b = expression_rule(p)) // expression - ) - { - _res = $B._PyPegen.seq_append_to_end (p, a, b); - return done(); - } - p.mark = _mark; - } - { // '*' expression ',' '**' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var _literal_2; - var a; - var b; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = expression_rule(p)) // expression - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (_literal_2 = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (b = expression_rule(p)) // expression - ) - { - _res = $B._PyPegen.seq_append_to_end (p, $B.helper_functions.CHECK ( asdl_seq, $B._PyPegen.singleton_seq ( p, a ) ), b); - return done(); - } - p.mark = _mark; - } - { // '*' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (a = expression_rule(p)) // expression - ) - { - _res = $B._PyPegen.singleton_seq (p, a); - return done(); - } - p.mark = _mark; - } - { // '**' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (a = expression_rule(p)) // expression - ) - { - _res = $B._PyPegen.singleton_seq (p, a); - return done(); + { // '*' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (a = expression_rule(p)) // expression + ) + { + _res = $B._PyPegen.singleton_seq (p, a); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ','.expression+ - if (p.error_indicator) { - return NULL; + { // '**' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (a = expression_rule(p)) // expression + ) + { + _res = $B._PyPegen.singleton_seq (p, a); + break; + } + p.mark = _mark; } - var a; - if ( - (a = _gather_147_rule(p)) // ','.expression+ - ) - { - _res = a; - return done(); + { // ','.expression+ + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = _gather_147_rule(p)) // ','.expression+ + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // func_type_comment: @@ -13691,59 +13883,60 @@ function func_type_comment_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) - if (p.error_indicator) { - return NULL; - } - var newline_var; - var t; - if ( - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - (t = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - && - $B._PyPegen.lookahead(1, _tmp_149_rule, p) - ) - { - _res = t; - return done(); - } - p.mark = _mark; - } - if (p.call_invalid_rules) { // invalid_double_type_comments - if (p.error_indicator) { - return NULL; - } - var invalid_double_type_comments_var; - if ( - (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments - ) - { - _res = invalid_double_type_comments_var; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) + if (p.error_indicator) { + return NULL; + } + var newline_var; + var t; + if ( + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + (t = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + && + $B._PyPegen.lookahead(1, _tmp_149_rule, p) + ) + { + _res = t; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // TYPE_COMMENT - if (p.error_indicator) { - return NULL; + if (p.call_invalid_rules) { // invalid_double_type_comments + if (p.error_indicator) { + return NULL; + } + var invalid_double_type_comments_var; + if ( + (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments + ) + { + _res = invalid_double_type_comments_var; + break; + } + p.mark = _mark; } - var type_comment_var; - if ( - (type_comment_var = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - ) - { - _res = type_comment_var; - return done(); + { // TYPE_COMMENT + if (p.error_indicator) { + return NULL; + } + var type_comment_var; + if ( + (type_comment_var = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + ) + { + _res = type_comment_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_arguments: @@ -13759,162 +13952,163 @@ function invalid_arguments_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ((','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs) ',' '*' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _tmp_150_var; - var b; - if ( - (_tmp_150_var = _tmp_150_rule(p)) // (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (b = $B._PyPegen.expect_token(p, 16)) // token='*' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (b, "iterable argument unpacking follows keyword argument unpacking"); - return done(); - } - p.mark = _mark; - } - { // expression for_if_clauses ',' [args | expression for_if_clauses] - if (p.error_indicator) { - return NULL; - } - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (a = expression_rule(p)) // expression - && - (b = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (_opt_var = _tmp_151_rule(p), !p.error_indicator) // [args | expression for_if_clauses] - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, $B._PyPegen.get_last_comprehension_item ( PyPegen_last_item ( b, $B.ast.comprehension ) ), "Generator expression must be parenthesized"); - return done(); - } - p.mark = _mark; - } - { // NAME '=' expression for_if_clauses - if (p.error_indicator) { - return NULL; - } - var a; - var b; - var expression_var; - var for_if_clauses_var; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (b = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (expression_var = expression_rule(p)) // expression - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "invalid syntax. Maybe you meant '==' or ':=' instead of '='?"); - return done(); - } - p.mark = _mark; - } - { // [(args ',')] NAME '=' &(',' | ')') - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (_opt_var = _tmp_152_rule(p), !p.error_indicator) // [(args ',')] - && - (a = $B._PyPegen.name_token(p)) // NAME - && - (b = $B._PyPegen.expect_token(p, 22)) // token='=' - && - $B._PyPegen.lookahead(1, _tmp_153_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "expected argument value expression"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ((','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs) ',' '*' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _tmp_150_var; + var b; + if ( + (_tmp_150_var = _tmp_150_rule(p)) // (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (b = $B._PyPegen.expect_token(p, 16)) // token='*' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, b, "iterable argument unpacking follows keyword argument unpacking"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // args for_if_clauses - if (p.error_indicator) { - return NULL; + { // expression for_if_clauses ',' [args | expression for_if_clauses] + if (p.error_indicator) { + return NULL; + } + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (a = expression_rule(p)) // expression + && + (b = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (_opt_var = _tmp_151_rule(p), !p.error_indicator) // [args | expression for_if_clauses] + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, $B._PyPegen.get_last_comprehension_item ( $B.PyPegen.last_item ( b, $B.ast.comprehension ) ), "Generator expression must be parenthesized"); + break; + } + p.mark = _mark; } - var a; - var b; - if ( - (a = args_rule(p)) // args - && - (b = for_if_clauses_rule(p)) // for_if_clauses - ) - { - _res = $B._PyPegen.nonparen_genexp_in_call (p, a, b); - return done(); + { // NAME '=' expression for_if_clauses + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var expression_var; + var for_if_clauses_var; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (b = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (expression_var = expression_rule(p)) // expression + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "invalid syntax. Maybe you meant '==' or ':=' instead of '='?"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // args ',' expression for_if_clauses - if (p.error_indicator) { - return NULL; + { // [(args ',')] NAME '=' &(',' | ')') + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (_opt_var = _tmp_152_rule(p), !p.error_indicator) // [(args ',')] + && + (a = $B._PyPegen.name_token(p)) // NAME + && + (b = $B._PyPegen.expect_token(p, 22)) // token='=' + && + $B._PyPegen.lookahead(1, _tmp_153_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "expected argument value expression"); + break; + } + p.mark = _mark; } - var _literal; - var a; - var args_var; - var b; - if ( - (args_var = args_rule(p)) // args - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (a = expression_rule(p)) // expression - && - (b = for_if_clauses_rule(p)) // for_if_clauses - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, $B._PyPegen.get_last_comprehension_item ( PyPegen_last_item ( b, $B.ast.comprehension ) ), "Generator expression must be parenthesized"); - return done(); + { // args for_if_clauses + if (p.error_indicator) { + return NULL; + } + var a; + var b; + if ( + (a = args_rule(p)) // args + && + (b = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = $B._PyPegen.nonparen_genexp_in_call (p, a, b); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // args ',' args - if (p.error_indicator) { - return NULL; + { // args ',' expression for_if_clauses + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var args_var; + var b; + if ( + (args_var = args_rule(p)) // args + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (a = expression_rule(p)) // expression + && + (b = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, $B._PyPegen.get_last_comprehension_item ( $B.PyPegen.last_item ( b, $B.ast.comprehension ) ), "Generator expression must be parenthesized"); + break; + } + p.mark = _mark; } - var _literal; - var a; - var args_var; - if ( - (a = args_rule(p)) // args - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (args_var = args_rule(p)) // args - ) - { - _res = $B._PyPegen.arguments_parsing_error (p, a); - return done(); + { // args ',' args + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var args_var; + if ( + (a = args_rule(p)) // args + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (args_var = args_rule(p)) // args + ) + { + _res = $B._PyPegen.arguments_parsing_error (p, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_kwarg: @@ -13927,94 +14121,95 @@ function invalid_kwarg_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ('True' | 'False' | 'None') '=' - if (p.error_indicator) { - return NULL; - } - var a; - var b; - if ( - (a = _tmp_154_rule(p)) // 'True' | 'False' | 'None' - && - (b = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "cannot assign to %s", PyBytes_AS_STRING ( a.bytes )); - return done(); - } - p.mark = _mark; - } - { // NAME '=' expression for_if_clauses - if (p.error_indicator) { - return NULL; - } - var a; - var b; - var expression_var; - var for_if_clauses_var; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (b = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (expression_var = expression_rule(p)) // expression - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "invalid syntax. Maybe you meant '==' or ':=' instead of '='?"); - return done(); - } - p.mark = _mark; - } - { // !(NAME '=') expression '=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ('True' | 'False' | 'None') '=' + if (p.error_indicator) { + return NULL; + } + var a; + var b; + if ( + (a = _tmp_154_rule(p)) // 'True' | 'False' | 'None' + && + (b = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "cannot assign to %s", $B.parser_constants.PyBytes_AS_STRING ( a.bytes )); + break; + } + p.mark = _mark; } - var a; - var b; - if ( - $B._PyPegen.lookahead(0, _tmp_155_rule, p) - && - (a = expression_rule(p)) // expression - && - (b = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "expression cannot contain assignment, perhaps you meant \"==\"?"); - return done(); + { // NAME '=' expression for_if_clauses + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var expression_var; + var for_if_clauses_var; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (b = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (expression_var = expression_rule(p)) // expression + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "invalid syntax. Maybe you meant '==' or ':=' instead of '='?"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '**' expression '=' expression - if (p.error_indicator) { - return NULL; + { // !(NAME '=') expression '=' + if (p.error_indicator) { + return NULL; + } + var a; + var b; + if ( + $B._PyPegen.lookahead(0, _tmp_155_rule, p) + && + (a = expression_rule(p)) // expression + && + (b = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "expression cannot contain assignment, perhaps you meant \"==\"?"); + break; + } + p.mark = _mark; } - var _literal; - var a; - var b; - var expression_var; - if ( - (a = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (expression_var = expression_rule(p)) // expression - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (b = expression_rule(p)) // expression - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "cannot assign to keyword argument unpacking"); - return done(); + { // '**' expression '=' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + var expression_var; + if ( + (a = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (expression_var = expression_rule(p)) // expression + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "cannot assign to keyword argument unpacking"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // expression_without_invalid: @@ -14029,85 +14224,86 @@ function expression_without_invalid_rule(p) p.call_invalid_rules = _prev_call_invalid; return NULL; } - var _res = NULL; - var _mark = p.mark; - if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { - p.error_indicator = 1; - p.call_invalid_rules = _prev_call_invalid; - return NULL; - } - var EXTRA = {} - EXTRA.lineno = p.tokens[_mark].lineno; - EXTRA.col_offset = p.tokens[_mark].col_offset; - { // disjunction 'if' disjunction 'else' expression - if (p.error_indicator) { + while (1) { + var _res = NULL; + var _mark = p.mark; + if (p.mark == p.fill && $B._PyPegen.fill_token(p) < 0) { + p.error_indicator = 1; p.call_invalid_rules = _prev_call_invalid; return NULL; } - var _keyword; - var _keyword_1; - var a; - var b; - var c; - if ( - (a = disjunction_rule(p)) // disjunction - && - (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (b = disjunction_rule(p)) // disjunction - && - (_keyword_1 = $B._PyPegen.expect_token(p, 645)) // token='else' - && - (c = expression_rule(p)) // expression - ) - { - var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); - if (_token == NULL) { + var EXTRA = {} + EXTRA.lineno = p.tokens[_mark].lineno; + EXTRA.col_offset = p.tokens[_mark].col_offset; + { // disjunction 'if' disjunction 'else' expression + if (p.error_indicator) { p.call_invalid_rules = _prev_call_invalid; return NULL; } - EXTRA.end_lineno = _token.end_lineno; - EXTRA.end_col_offset = _token.end_col_offset; - _res = new $B._PyAST.IfExp (b, a, c, EXTRA); - return done(); - } - p.mark = _mark; - } - { // disjunction - if (p.error_indicator) { - p.call_invalid_rules = _prev_call_invalid; - return NULL; - } - var disjunction_var; - if ( - (disjunction_var = disjunction_rule(p)) // disjunction - ) - { - _res = disjunction_var; - return done(); + var _keyword; + var _keyword_1; + var a; + var b; + var c; + if ( + (a = disjunction_rule(p)) // disjunction + && + (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (b = disjunction_rule(p)) // disjunction + && + (_keyword_1 = $B._PyPegen.expect_token(p, 645)) // token='else' + && + (c = expression_rule(p)) // expression + ) + { + var _token = $B._PyPegen.get_last_nonnwhitespace_token(p); + if (_token == NULL) { + p.call_invalid_rules = _prev_call_invalid; + return NULL; + } + EXTRA.end_lineno = _token.end_lineno; + EXTRA.end_col_offset = _token.end_col_offset; + _res = new $B._PyAST.IfExp (b, a, c, EXTRA); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // lambdef - if (p.error_indicator) { - p.call_invalid_rules = _prev_call_invalid; - return NULL; + { // disjunction + if (p.error_indicator) { + p.call_invalid_rules = _prev_call_invalid; + return NULL; + } + var disjunction_var; + if ( + (disjunction_var = disjunction_rule(p)) // disjunction + ) + { + _res = disjunction_var; + break; + } + p.mark = _mark; } - var lambdef_var; - if ( - (lambdef_var = lambdef_rule(p)) // lambdef - ) - { - _res = lambdef_var; - return done(); + { // lambdef + if (p.error_indicator) { + p.call_invalid_rules = _prev_call_invalid; + return NULL; + } + var lambdef_var; + if ( + (lambdef_var = lambdef_rule(p)) // lambdef + ) + { + _res = lambdef_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ p.call_invalid_rules = _prev_call_invalid; return _res; - } } // invalid_legacy_expression: NAME !'(' star_expressions @@ -14116,31 +14312,32 @@ function invalid_legacy_expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NAME !'(' star_expressions - if (p.error_indicator) { - return NULL; - } - var a; - var b; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 7) // token='(' - && - (b = star_expressions_rule(p)) // star_expressions - ) - { - _res = $B._PyPegen.check_legacy_stmt (p, a ) ? $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a, b, "Missing parentheses in call to '%U'. Did you mean %U(...)?", a. id, a. id) : $B.parser_constants.NULL; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NAME !'(' star_expressions + if (p.error_indicator) { + return NULL; + } + var a; + var b; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 7) // token='(' + && + (b = star_expressions_rule(p)) // star_expressions + ) + { + _res = $B._PyPegen.check_legacy_stmt (p, a ) ? $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "Missing parentheses in call to '%U'. Did you mean %U(...)?", a. id, a. id) : $B.parser_constants.NULL; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_expression: @@ -14152,76 +14349,77 @@ function invalid_expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // !(NAME STRING | SOFT_KEYWORD) disjunction expression_without_invalid - if (p.error_indicator) { - return NULL; - } - var a; - var b; - if ( - $B._PyPegen.lookahead(0, _tmp_156_rule, p) - && - (a = disjunction_rule(p)) // disjunction - && - (b = expression_without_invalid_rule(p)) // expression_without_invalid - ) - { - _res = $B._PyPegen.check_legacy_stmt (p, a ) ? $B.parser_constants.NULL : p.tokens [p.mark - 1].level == 0 ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a, b, "invalid syntax. Perhaps you forgot a comma?"); - return done(); - } - p.mark = _mark; - } - { // disjunction 'if' disjunction !('else' | ':') - if (p.error_indicator) { - return NULL; - } - var _keyword; - var a; - var b; - if ( - (a = disjunction_rule(p)) // disjunction - && - (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (b = disjunction_rule(p)) // disjunction - && - $B._PyPegen.lookahead(0, _tmp_157_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "expected 'else' after 'if' expression"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // !(NAME STRING | SOFT_KEYWORD) disjunction expression_without_invalid + if (p.error_indicator) { + return NULL; + } + var a; + var b; + if ( + $B._PyPegen.lookahead(0, _tmp_156_rule, p) + && + (a = disjunction_rule(p)) // disjunction + && + (b = expression_without_invalid_rule(p)) // expression_without_invalid + ) + { + _res = $B._PyPegen.check_legacy_stmt (p, a ) ? $B.parser_constants.NULL : p.tokens [p.mark - 1].level == 0 ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "invalid syntax. Perhaps you forgot a comma?"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'lambda' lambda_params? ':' &FSTRING_MIDDLE - if (p.error_indicator) { - return NULL; + { // disjunction 'if' disjunction !('else' | ':') + if (p.error_indicator) { + return NULL; + } + var _keyword; + var a; + var b; + if ( + (a = disjunction_rule(p)) // disjunction + && + (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (b = disjunction_rule(p)) // disjunction + && + $B._PyPegen.lookahead(0, _tmp_157_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "expected 'else' after 'if' expression"); + break; + } + p.mark = _mark; } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (a = $B._PyPegen.expect_token(p, 600)) // token='lambda' - && - (_opt_var = lambda_params_rule(p), !p.error_indicator) // lambda_params? - && - (b = $B._PyPegen.expect_token(p, 11)) // token=':' - && - $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, FSTRING_MIDDLE) // token=FSTRING_MIDDLE - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "f-string: lambda expressions are not allowed without parentheses"); - return done(); + { // 'lambda' lambda_params? ':' &FSTRING_MIDDLE + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (a = $B._PyPegen.expect_token(p, 600)) // token='lambda' + && + (_opt_var = lambda_params_rule(p), !p.error_indicator) // lambda_params? + && + (b = $B._PyPegen.expect_token(p, 11)) // token=':' + && + $B._PyPegen.lookahead_with_int(1, $B._PyPegen.expect_token, p, FSTRING_MIDDLE) // token=FSTRING_MIDDLE + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "f-string: lambda expressions are not allowed without parentheses"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_named_expression: @@ -14233,83 +14431,84 @@ function invalid_named_expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = {value: NULL}; - if ($B._PyPegen.is_memoized(p, invalid_named_expression_type, _res)) { - return _res.value; - } - _res = NULL; - var _mark = p.mark; - { // expression ':=' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var expression_var; - if ( - (a = expression_rule(p)) // expression - && - (_literal = $B._PyPegen.expect_token(p, 53)) // token=':=' - && - (expression_var = expression_rule(p)) // expression - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "cannot use assignment expressions with %s", $B._PyPegen.get_expr_name ( a )); - return done(); - } - p.mark = _mark; - } - { // NAME '=' bitwise_or !('=' | ':=') - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - if ( - (a = $B._PyPegen.name_token(p)) // NAME - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (b = bitwise_or_rule(p)) // bitwise_or - && - $B._PyPegen.lookahead(0, _tmp_158_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "invalid syntax. Maybe you meant '==' or ':=' instead of '='?"); - return done(); + while (1) { + var _res = {value: NULL}; + if ($B._PyPegen.is_memoized(p, invalid_named_expression_type, _res)) { + return _res.value; + } + _res = NULL; + var _mark = p.mark; + { // expression ':=' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var expression_var; + if ( + (a = expression_rule(p)) // expression + && + (_literal = $B._PyPegen.expect_token(p, 53)) // token=':=' + && + (expression_var = expression_rule(p)) // expression + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "cannot use assignment expressions with %s", $B._PyPegen.get_expr_name ( a )); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // !(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=') - if (p.error_indicator) { - return NULL; + { // NAME '=' bitwise_or !('=' | ':=') + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + if ( + (a = $B._PyPegen.name_token(p)) // NAME + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (b = bitwise_or_rule(p)) // bitwise_or + && + $B._PyPegen.lookahead(0, _tmp_158_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "invalid syntax. Maybe you meant '==' or ':=' instead of '='?"); + break; + } + p.mark = _mark; } - var a; - var b; - var bitwise_or_var; - if ( - $B._PyPegen.lookahead(0, _tmp_159_rule, p) - && - (a = bitwise_or_rule(p)) // bitwise_or - && - (b = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or - && - $B._PyPegen.lookahead(0, _tmp_160_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "cannot assign to %s here. Maybe you meant '==' instead of '='?", $B._PyPegen.get_expr_name ( a )); - return done(); + { // !(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=') + if (p.error_indicator) { + return NULL; + } + var a; + var b; + var bitwise_or_var; + if ( + $B._PyPegen.lookahead(0, _tmp_159_rule, p) + && + (a = bitwise_or_rule(p)) // bitwise_or + && + (b = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + && + $B._PyPegen.lookahead(0, _tmp_160_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "cannot assign to %s here. Maybe you meant '==' instead of '='?", $B._PyPegen.get_expr_name ( a )); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ $B._PyPegen.insert_memo(p, _mark, invalid_named_expression_type, _res); return _res; - } } // invalid_assignment: @@ -14324,138 +14523,139 @@ function invalid_assignment_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // invalid_ann_assign_target ':' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var expression_var; - if ( - (a = invalid_ann_assign_target_rule(p)) // invalid_ann_assign_target - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (expression_var = expression_rule(p)) // expression - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "only single target (not %s) can be annotated", $B._PyPegen.get_expr_name ( a )); - return done(); - } - p.mark = _mark; - } - { // star_named_expression ',' star_named_expressions* ':' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var _loop0_161_var; - var a; - var expression_var; - if ( - (a = star_named_expression_rule(p)) // star_named_expression - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (_loop0_161_var = _loop0_161_rule(p)) // star_named_expressions* - && - (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (expression_var = expression_rule(p)) // expression - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "only single target (not tuple) can be annotated"); - return done(); - } - p.mark = _mark; - } - { // expression ':' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var expression_var; - if ( - (a = expression_rule(p)) // expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (expression_var = expression_rule(p)) // expression - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "illegal target for annotation"); - return done(); - } - p.mark = _mark; - } - { // ((star_targets '='))* star_expressions '=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // invalid_ann_assign_target ':' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var expression_var; + if ( + (a = invalid_ann_assign_target_rule(p)) // invalid_ann_assign_target + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (expression_var = expression_rule(p)) // expression + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "only single target (not %s) can be annotated", $B._PyPegen.get_expr_name ( a )); + break; + } + p.mark = _mark; } - var _literal; - var _loop0_162_var; - var a; - if ( - (_loop0_162_var = _loop0_162_rule(p)) // ((star_targets '='))* - && - (a = star_expressions_rule(p)) // star_expressions - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET ($B.parser_constants.STAR_TARGETS, a); - return done(); + { // star_named_expression ',' star_named_expressions* ':' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _loop0_161_var; + var a; + var expression_var; + if ( + (a = star_named_expression_rule(p)) // star_named_expression + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (_loop0_161_var = _loop0_161_rule(p)) // star_named_expressions* + && + (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (expression_var = expression_rule(p)) // expression + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "only single target (not tuple) can be annotated"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ((star_targets '='))* yield_expr '=' - if (p.error_indicator) { - return NULL; + { // expression ':' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var expression_var; + if ( + (a = expression_rule(p)) // expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (expression_var = expression_rule(p)) // expression + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "illegal target for annotation"); + break; + } + p.mark = _mark; } - var _literal; - var _loop0_163_var; - var a; - if ( - (_loop0_163_var = _loop0_163_rule(p)) // ((star_targets '='))* - && - (a = yield_expr_rule(p)) // yield_expr - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "assignment to yield expression not possible"); - return done(); + { // ((star_targets '='))* star_expressions '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _loop0_162_var; + var a; + if ( + (_loop0_162_var = _loop0_162_rule(p)) // ((star_targets '='))* + && + (a = star_expressions_rule(p)) // star_expressions + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET(p, $B.parser_constants.STAR_TARGETS, a); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // star_expressions augassign (yield_expr | star_expressions) - if (p.error_indicator) { - return NULL; + { // ((star_targets '='))* yield_expr '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _loop0_163_var; + var a; + if ( + (_loop0_163_var = _loop0_163_rule(p)) // ((star_targets '='))* + && + (a = yield_expr_rule(p)) // yield_expr + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "assignment to yield expression not possible"); + break; + } + p.mark = _mark; } - var _tmp_164_var; - var a; - var augassign_var; - if ( - (a = star_expressions_rule(p)) // star_expressions - && - (augassign_var = augassign_rule(p)) // augassign - && - (_tmp_164_var = _tmp_164_rule(p)) // yield_expr | star_expressions - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "'%s' is an illegal expression for augmented assignment", $B._PyPegen.get_expr_name ( a )); - return done(); + { // star_expressions augassign (yield_expr | star_expressions) + if (p.error_indicator) { + return NULL; + } + var _tmp_164_var; + var a; + var augassign_var; + if ( + (a = star_expressions_rule(p)) // star_expressions + && + (augassign_var = augassign_rule(p)) // augassign + && + (_tmp_164_var = _tmp_164_rule(p)) // yield_expr | star_expressions + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "'%s' is an illegal expression for augmented assignment", $B._PyPegen.get_expr_name ( a )); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_ann_assign_target: list | tuple | '(' invalid_ann_assign_target ')' @@ -14464,60 +14664,61 @@ function invalid_ann_assign_target_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // list - if (p.error_indicator) { - return NULL; - } - var list_var; - if ( - (list_var = list_rule(p)) // list - ) - { - _res = list_var; - return done(); - } - p.mark = _mark; - } - { // tuple - if (p.error_indicator) { - return NULL; - } - var tuple_var; - if ( - (tuple_var = tuple_rule(p)) // tuple - ) - { - _res = tuple_var; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // list + if (p.error_indicator) { + return NULL; + } + var list_var; + if ( + (list_var = list_rule(p)) // list + ) + { + _res = list_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '(' invalid_ann_assign_target ')' - if (p.error_indicator) { - return NULL; + { // tuple + if (p.error_indicator) { + return NULL; + } + var tuple_var; + if ( + (tuple_var = tuple_rule(p)) // tuple + ) + { + _res = tuple_var; + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = invalid_ann_assign_target_rule(p)) // invalid_ann_assign_target - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = a; - return done(); + { // '(' invalid_ann_assign_target ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = invalid_ann_assign_target_rule(p)) // invalid_ann_assign_target + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_del_stmt: 'del' star_expressions @@ -14526,29 +14727,30 @@ function invalid_del_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'del' star_expressions - if (p.error_indicator) { - return NULL; - } - var _keyword; - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 604)) // token='del' - && - (a = star_expressions_rule(p)) // star_expressions - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET ($B.parser_constants.DEL_TARGETS, a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'del' star_expressions + if (p.error_indicator) { + return NULL; + } + var _keyword; + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 604)) // token='del' + && + (a = star_expressions_rule(p)) // star_expressions + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET(p, $B.parser_constants.DEL_TARGETS, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_block: NEWLINE !INDENT @@ -14557,28 +14759,29 @@ function invalid_block_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NEWLINE !INDENT - if (p.error_indicator) { - return NULL; - } - var newline_var; - if ( - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var newline_var; + if ( + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_comprehension: @@ -14590,81 +14793,82 @@ function invalid_comprehension_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ('[' | '(' | '{') starred_expression for_if_clauses - if (p.error_indicator) { - return NULL; - } - var _tmp_165_var; - var a; - var for_if_clauses_var; - if ( - (_tmp_165_var = _tmp_165_rule(p)) // '[' | '(' | '{' - && - (a = starred_expression_rule(p)) // starred_expression - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "iterable unpacking cannot be used in comprehension"); - return done(); - } - p.mark = _mark; - } - { // ('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses - if (p.error_indicator) { - return NULL; - } - var _literal; - var _tmp_166_var; - var a; - var b; - var for_if_clauses_var; - if ( - (_tmp_166_var = _tmp_166_rule(p)) // '[' | '{' - && - (a = star_named_expression_rule(p)) // star_named_expression - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (b = star_named_expressions_rule(p)) // star_named_expressions - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, PyPegen_last_item ( b, $B.ast.expr ), "did you forget parentheses around the comprehension target?"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ('[' | '(' | '{') starred_expression for_if_clauses + if (p.error_indicator) { + return NULL; + } + var _tmp_165_var; + var a; + var for_if_clauses_var; + if ( + (_tmp_165_var = _tmp_165_rule(p)) // '[' | '(' | '{' + && + (a = starred_expression_rule(p)) // starred_expression + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "iterable unpacking cannot be used in comprehension"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ('[' | '{') star_named_expression ',' for_if_clauses - if (p.error_indicator) { - return NULL; + { // ('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses + if (p.error_indicator) { + return NULL; + } + var _literal; + var _tmp_166_var; + var a; + var b; + var for_if_clauses_var; + if ( + (_tmp_166_var = _tmp_166_rule(p)) // '[' | '{' + && + (a = star_named_expression_rule(p)) // star_named_expression + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (b = star_named_expressions_rule(p)) // star_named_expressions + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, $B.PyPegen.last_item ( b, $B.ast.expr ), "did you forget parentheses around the comprehension target?"); + break; + } + p.mark = _mark; } - var _tmp_167_var; - var a; - var b; - var for_if_clauses_var; - if ( - (_tmp_167_var = _tmp_167_rule(p)) // '[' | '{' - && - (a = star_named_expression_rule(p)) // star_named_expression - && - (b = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "did you forget parentheses around the comprehension target?"); - return done(); + { // ('[' | '{') star_named_expression ',' for_if_clauses + if (p.error_indicator) { + return NULL; + } + var _tmp_167_var; + var a; + var b; + var for_if_clauses_var; + if ( + (_tmp_167_var = _tmp_167_rule(p)) // '[' | '{' + && + (a = star_named_expression_rule(p)) // star_named_expression + && + (b = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "did you forget parentheses around the comprehension target?"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_dict_comprehension: '{' '**' bitwise_or for_if_clauses '}' @@ -14673,38 +14877,39 @@ function invalid_dict_comprehension_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '{' '**' bitwise_or for_if_clauses '}' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - var bitwise_or_var; - var for_if_clauses_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "dict unpacking cannot be used in dict comprehension"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '{' '**' bitwise_or for_if_clauses '}' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var bitwise_or_var; + var for_if_clauses_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "dict unpacking cannot be used in dict comprehension"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_parameters: @@ -14719,150 +14924,151 @@ function invalid_parameters_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // "/" ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (a = $B._PyPegen.expect_token(p, 17)) // token='/' - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "at least one argument must precede /"); - return done(); - } - p.mark = _mark; - } - { // (slash_no_default | slash_with_default) param_maybe_default* '/' - if (p.error_indicator) { - return NULL; - } - var _loop0_169_var; - var _tmp_168_var; - var a; - if ( - (_tmp_168_var = _tmp_168_rule(p)) // slash_no_default | slash_with_default - && - (_loop0_169_var = _loop0_169_rule(p)) // param_maybe_default* - && - (a = $B._PyPegen.expect_token(p, 17)) // token='/' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "/ may appear only once"); - return done(); - } - p.mark = _mark; - } - { // slash_no_default? param_no_default* invalid_parameters_helper param_no_default - if (p.error_indicator) { - return NULL; - } - var _loop0_170_var; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var invalid_parameters_helper_var; - if ( - (_opt_var = slash_no_default_rule(p), !p.error_indicator) // slash_no_default? - && - (_loop0_170_var = _loop0_170_rule(p)) // param_no_default* - && - (invalid_parameters_helper_var = invalid_parameters_helper_rule(p)) // invalid_parameters_helper - && - (a = param_no_default_rule(p)) // param_no_default - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "parameter without a default follows parameter with a default"); - return done(); - } - p.mark = _mark; - } - { // param_no_default* '(' param_no_default+ ','? ')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // "/" ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (a = $B._PyPegen.expect_token(p, 17)) // token='/' + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "at least one argument must precede /"); + break; + } + p.mark = _mark; } - var _loop0_171_var; - var _loop1_172_var; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (_loop0_171_var = _loop0_171_rule(p)) // param_no_default* - && - (a = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (_loop1_172_var = _loop1_172_rule(p)) // param_no_default+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (b = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "Function parameters cannot be parenthesized"); - return done(); + { // (slash_no_default | slash_with_default) param_maybe_default* '/' + if (p.error_indicator) { + return NULL; + } + var _loop0_169_var; + var _tmp_168_var; + var a; + if ( + (_tmp_168_var = _tmp_168_rule(p)) // slash_no_default | slash_with_default + && + (_loop0_169_var = _loop0_169_rule(p)) // param_maybe_default* + && + (a = $B._PyPegen.expect_token(p, 17)) // token='/' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "/ may appear only once"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // [(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/' - if (p.error_indicator) { - return NULL; + { // slash_no_default? param_no_default* invalid_parameters_helper param_no_default + if (p.error_indicator) { + return NULL; + } + var _loop0_170_var; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var invalid_parameters_helper_var; + if ( + (_opt_var = slash_no_default_rule(p), !p.error_indicator) // slash_no_default? + && + (_loop0_170_var = _loop0_170_rule(p)) // param_no_default* + && + (invalid_parameters_helper_var = invalid_parameters_helper_rule(p)) // invalid_parameters_helper + && + (a = param_no_default_rule(p)) // param_no_default + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "parameter without a default follows parameter with a default"); + break; + } + p.mark = _mark; } - var _literal; - var _loop0_174_var; - var _loop0_176_var; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _tmp_175_var; - var a; - if ( - (_opt_var = _tmp_173_rule(p), !p.error_indicator) // [(slash_no_default | slash_with_default)] - && - (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default* - && - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_tmp_175_var = _tmp_175_rule(p)) // ',' | param_no_default - && - (_loop0_176_var = _loop0_176_rule(p)) // param_maybe_default* - && - (a = $B._PyPegen.expect_token(p, 17)) // token='/' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "/ must be ahead of *"); - return done(); + { // param_no_default* '(' param_no_default+ ','? ')' + if (p.error_indicator) { + return NULL; + } + var _loop0_171_var; + var _loop1_172_var; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (_loop0_171_var = _loop0_171_rule(p)) // param_no_default* + && + (a = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (_loop1_172_var = _loop1_172_rule(p)) // param_no_default+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (b = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "Function parameters cannot be parenthesized"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // param_maybe_default+ '/' '*' - if (p.error_indicator) { - return NULL; + { // [(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _loop0_174_var; + var _loop0_176_var; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _tmp_175_var; + var a; + if ( + (_opt_var = _tmp_173_rule(p), !p.error_indicator) // [(slash_no_default | slash_with_default)] + && + (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default* + && + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_tmp_175_var = _tmp_175_rule(p)) // ',' | param_no_default + && + (_loop0_176_var = _loop0_176_rule(p)) // param_maybe_default* + && + (a = $B._PyPegen.expect_token(p, 17)) // token='/' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "/ must be ahead of *"); + break; + } + p.mark = _mark; } - var _literal; - var _loop1_177_var; - var a; - if ( - (_loop1_177_var = _loop1_177_rule(p)) // param_maybe_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - (a = $B._PyPegen.expect_token(p, 16)) // token='*' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "expected comma between / and *"); - return done(); + { // param_maybe_default+ '/' '*' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _loop1_177_var; + var a; + if ( + (_loop1_177_var = _loop1_177_rule(p)) // param_maybe_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + (a = $B._PyPegen.expect_token(p, 16)) // token='*' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "expected comma between / and *"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_default: '=' &(')' | ',') @@ -14871,28 +15077,29 @@ function invalid_default_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '=' &(')' | ',') - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = $B._PyPegen.expect_token(p, 22)) // token='=' - && - $B._PyPegen.lookahead(1, _tmp_178_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "expected default value expression"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '=' &(')' | ',') + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = $B._PyPegen.expect_token(p, 22)) // token='=' + && + $B._PyPegen.lookahead(1, _tmp_178_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "expected default value expression"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_star_etc: @@ -14905,95 +15112,96 @@ function invalid_star_etc_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '*' (')' | ',' (')' | '**')) - if (p.error_indicator) { - return NULL; - } - var _tmp_179_var; - var a; - if ( - (a = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_tmp_179_var = _tmp_179_rule(p)) // ')' | ',' (')' | '**') - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "named arguments must follow bare *"); - return done(); - } - p.mark = _mark; - } - { // '*' ',' TYPE_COMMENT - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var type_comment_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (type_comment_var = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("bare * has associated type comment"); - return done(); - } - p.mark = _mark; - } - { // '*' param '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var param_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (param_var = param_rule(p)) // param - && - (a = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "var-positional argument cannot have default value"); - return done(); - } - p.mark = _mark; - } - { // '*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',') - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '*' (')' | ',' (')' | '**')) + if (p.error_indicator) { + return NULL; + } + var _tmp_179_var; + var a; + if ( + (a = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_tmp_179_var = _tmp_179_rule(p)) // ')' | ',' (')' | '**') + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "named arguments must follow bare *"); + break; + } + p.mark = _mark; } - var _literal; - var _loop0_181_var; - var _tmp_180_var; - var _tmp_182_var; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_tmp_180_var = _tmp_180_rule(p)) // param_no_default | ',' - && - (_loop0_181_var = _loop0_181_rule(p)) // param_maybe_default* - && - (a = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_tmp_182_var = _tmp_182_rule(p)) // param_no_default | ',' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "* argument may appear only once"); - return done(); + { // '*' ',' TYPE_COMMENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var type_comment_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (type_comment_var = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "bare * has associated type comment"); + break; + } + p.mark = _mark; } - p.mark = _mark; + { // '*' param '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var param_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (param_var = param_rule(p)) // param + && + (a = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "var-positional argument cannot have default value"); + break; + } + p.mark = _mark; + } + { // '*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _loop0_181_var; + var _tmp_180_var; + var _tmp_182_var; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_tmp_180_var = _tmp_180_rule(p)) // param_no_default | ',' + && + (_loop0_181_var = _loop0_181_rule(p)) // param_maybe_default* + && + (a = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_tmp_182_var = _tmp_182_rule(p)) // param_no_default | ',' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "* argument may appear only once"); + break; + } + p.mark = _mark; + } + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_kwds: '**' param '=' | '**' param ',' param | '**' param ',' ('*' | '**' | '/') @@ -15002,78 +15210,79 @@ function invalid_kwds_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '**' param '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var param_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (param_var = param_rule(p)) // param - && - (a = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "var-keyword argument cannot have default value"); - return done(); - } - p.mark = _mark; - } - { // '**' param ',' param - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - var param_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (param_var = param_rule(p)) // param - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (a = param_rule(p)) // param - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "arguments cannot follow var-keyword argument"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '**' param '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var param_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (param_var = param_rule(p)) // param + && + (a = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "var-keyword argument cannot have default value"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '**' param ',' ('*' | '**' | '/') - if (p.error_indicator) { - return NULL; + { // '**' param ',' param + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var param_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (param_var = param_rule(p)) // param + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (a = param_rule(p)) // param + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "arguments cannot follow var-keyword argument"); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var param_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (param_var = param_rule(p)) // param - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (a = _tmp_183_rule(p)) // '*' | '**' | '/' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "arguments cannot follow var-keyword argument"); - return done(); + { // '**' param ',' ('*' | '**' | '/') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var param_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (param_var = param_rule(p)) // param + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (a = _tmp_183_rule(p)) // '*' | '**' | '/' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "arguments cannot follow var-keyword argument"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_parameters_helper: slash_with_default | param_with_default+ @@ -15082,40 +15291,41 @@ function invalid_parameters_helper_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // slash_with_default - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = slash_with_default_rule(p)) // slash_with_default - ) - { - _res = $B._PyPegen.singleton_seq (p, a); - return done(); - } - p.mark = _mark; - } - { // param_with_default+ - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // slash_with_default + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = slash_with_default_rule(p)) // slash_with_default + ) + { + _res = $B._PyPegen.singleton_seq (p, a); + break; + } + p.mark = _mark; } - var _loop1_184_var; - if ( - (_loop1_184_var = _loop1_184_rule(p)) // param_with_default+ - ) - { - _res = _loop1_184_var; - return done(); + { // param_with_default+ + if (p.error_indicator) { + return NULL; + } + var _loop1_184_var; + if ( + (_loop1_184_var = _loop1_184_rule(p)) // param_with_default+ + ) + { + _res = _loop1_184_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_lambda_parameters: @@ -15130,150 +15340,151 @@ function invalid_lambda_parameters_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // "/" ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (a = $B._PyPegen.expect_token(p, 17)) // token='/' - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "at least one argument must precede /"); - return done(); - } - p.mark = _mark; - } - { // (lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/' - if (p.error_indicator) { - return NULL; - } - var _loop0_186_var; - var _tmp_185_var; - var a; - if ( - (_tmp_185_var = _tmp_185_rule(p)) // lambda_slash_no_default | lambda_slash_with_default - && - (_loop0_186_var = _loop0_186_rule(p)) // lambda_param_maybe_default* - && - (a = $B._PyPegen.expect_token(p, 17)) // token='/' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "/ may appear only once"); - return done(); - } - p.mark = _mark; - } - { // lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default - if (p.error_indicator) { - return NULL; - } - var _loop0_187_var; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var invalid_lambda_parameters_helper_var; - if ( - (_opt_var = lambda_slash_no_default_rule(p), !p.error_indicator) // lambda_slash_no_default? - && - (_loop0_187_var = _loop0_187_rule(p)) // lambda_param_no_default* - && - (invalid_lambda_parameters_helper_var = invalid_lambda_parameters_helper_rule(p)) // invalid_lambda_parameters_helper - && - (a = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "parameter without a default follows parameter with a default"); - return done(); - } - p.mark = _mark; - } - { // lambda_param_no_default* '(' ','.lambda_param+ ','? ')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // "/" ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (a = $B._PyPegen.expect_token(p, 17)) // token='/' + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "at least one argument must precede /"); + break; + } + p.mark = _mark; } - var _gather_189_var; - var _loop0_188_var; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - if ( - (_loop0_188_var = _loop0_188_rule(p)) // lambda_param_no_default* - && - (a = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (_gather_189_var = _gather_189_rule(p)) // ','.lambda_param+ - && - (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (b = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "Lambda expression parameters cannot be parenthesized"); - return done(); + { // (lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/' + if (p.error_indicator) { + return NULL; + } + var _loop0_186_var; + var _tmp_185_var; + var a; + if ( + (_tmp_185_var = _tmp_185_rule(p)) // lambda_slash_no_default | lambda_slash_with_default + && + (_loop0_186_var = _loop0_186_rule(p)) // lambda_param_maybe_default* + && + (a = $B._PyPegen.expect_token(p, 17)) // token='/' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "/ may appear only once"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // [(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/' - if (p.error_indicator) { - return NULL; + { // lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default + if (p.error_indicator) { + return NULL; + } + var _loop0_187_var; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var invalid_lambda_parameters_helper_var; + if ( + (_opt_var = lambda_slash_no_default_rule(p), !p.error_indicator) // lambda_slash_no_default? + && + (_loop0_187_var = _loop0_187_rule(p)) // lambda_param_no_default* + && + (invalid_lambda_parameters_helper_var = invalid_lambda_parameters_helper_rule(p)) // invalid_lambda_parameters_helper + && + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "parameter without a default follows parameter with a default"); + break; + } + p.mark = _mark; } - var _literal; - var _loop0_192_var; - var _loop0_194_var; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _tmp_193_var; - var a; - if ( - (_opt_var = _tmp_191_rule(p), !p.error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] - && - (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default* - && - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_tmp_193_var = _tmp_193_rule(p)) // ',' | lambda_param_no_default - && - (_loop0_194_var = _loop0_194_rule(p)) // lambda_param_maybe_default* - && - (a = $B._PyPegen.expect_token(p, 17)) // token='/' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "/ must be ahead of *"); - return done(); + { // lambda_param_no_default* '(' ','.lambda_param+ ','? ')' + if (p.error_indicator) { + return NULL; + } + var _gather_189_var; + var _loop0_188_var; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + if ( + (_loop0_188_var = _loop0_188_rule(p)) // lambda_param_no_default* + && + (a = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (_gather_189_var = _gather_189_rule(p)) // ','.lambda_param+ + && + (_opt_var = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (b = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "Lambda expression parameters cannot be parenthesized"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // lambda_param_maybe_default+ '/' '*' - if (p.error_indicator) { - return NULL; + { // [(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _loop0_192_var; + var _loop0_194_var; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _tmp_193_var; + var a; + if ( + (_opt_var = _tmp_191_rule(p), !p.error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] + && + (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default* + && + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_tmp_193_var = _tmp_193_rule(p)) // ',' | lambda_param_no_default + && + (_loop0_194_var = _loop0_194_rule(p)) // lambda_param_maybe_default* + && + (a = $B._PyPegen.expect_token(p, 17)) // token='/' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "/ must be ahead of *"); + break; + } + p.mark = _mark; } - var _literal; - var _loop1_195_var; - var a; - if ( - (_loop1_195_var = _loop1_195_rule(p)) // lambda_param_maybe_default+ - && - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - && - (a = $B._PyPegen.expect_token(p, 16)) // token='*' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "expected comma between / and *"); - return done(); + { // lambda_param_maybe_default+ '/' '*' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _loop1_195_var; + var a; + if ( + (_loop1_195_var = _loop1_195_rule(p)) // lambda_param_maybe_default+ + && + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + && + (a = $B._PyPegen.expect_token(p, 16)) // token='*' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "expected comma between / and *"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_lambda_parameters_helper: @@ -15284,40 +15495,41 @@ function invalid_lambda_parameters_helper_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_slash_with_default - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default - ) - { - _res = $B._PyPegen.singleton_seq (p, a); - return done(); - } - p.mark = _mark; - } - { // lambda_param_with_default+ - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_slash_with_default + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default + ) + { + _res = $B._PyPegen.singleton_seq (p, a); + break; + } + p.mark = _mark; } - var _loop1_196_var; - if ( - (_loop1_196_var = _loop1_196_rule(p)) // lambda_param_with_default+ - ) - { - _res = _loop1_196_var; - return done(); + { // lambda_param_with_default+ + if (p.error_indicator) { + return NULL; + } + var _loop1_196_var; + if ( + (_loop1_196_var = _loop1_196_rule(p)) // lambda_param_with_default+ + ) + { + _res = _loop1_196_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_lambda_star_etc: @@ -15329,75 +15541,76 @@ function invalid_lambda_star_etc_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '*' (':' | ',' (':' | '**')) - if (p.error_indicator) { - return NULL; - } - var _literal; - var _tmp_197_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_tmp_197_var = _tmp_197_rule(p)) // ':' | ',' (':' | '**') - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("named arguments must follow bare *"); - return done(); - } - p.mark = _mark; - } - { // '*' lambda_param '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var lambda_param_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (lambda_param_var = lambda_param_rule(p)) // lambda_param - && - (a = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "var-positional argument cannot have default value"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '*' (':' | ',' (':' | '**')) + if (p.error_indicator) { + return NULL; + } + var _literal; + var _tmp_197_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_tmp_197_var = _tmp_197_rule(p)) // ':' | ',' (':' | '**') + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "named arguments must follow bare *"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',') - if (p.error_indicator) { - return NULL; + { // '*' lambda_param '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var lambda_param_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (lambda_param_var = lambda_param_rule(p)) // lambda_param + && + (a = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "var-positional argument cannot have default value"); + break; + } + p.mark = _mark; } - var _literal; - var _loop0_199_var; - var _tmp_198_var; - var _tmp_200_var; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_tmp_198_var = _tmp_198_rule(p)) // lambda_param_no_default | ',' - && - (_loop0_199_var = _loop0_199_rule(p)) // lambda_param_maybe_default* - && - (a = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_tmp_200_var = _tmp_200_rule(p)) // lambda_param_no_default | ',' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "* argument may appear only once"); - return done(); + { // '*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _loop0_199_var; + var _tmp_198_var; + var _tmp_200_var; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_tmp_198_var = _tmp_198_rule(p)) // lambda_param_no_default | ',' + && + (_loop0_199_var = _loop0_199_rule(p)) // lambda_param_maybe_default* + && + (a = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_tmp_200_var = _tmp_200_rule(p)) // lambda_param_no_default | ',' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "* argument may appear only once"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_lambda_kwds: @@ -15405,82 +15618,83 @@ function invalid_lambda_star_etc_rule(p) // | '**' lambda_param ',' lambda_param // | '**' lambda_param ',' ('*' | '**' | '/') function invalid_lambda_kwds_rule(p) -{ - if (p.error_indicator) { - return NULL; - } - var _res = NULL; - var _mark = p.mark; - { // '**' lambda_param '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var lambda_param_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (lambda_param_var = lambda_param_rule(p)) // lambda_param - && - (a = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "var-keyword argument cannot have default value"); - return done(); - } - p.mark = _mark; - } - { // '**' lambda_param ',' lambda_param - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - var lambda_param_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (lambda_param_var = lambda_param_rule(p)) // lambda_param - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (a = lambda_param_rule(p)) // lambda_param - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "arguments cannot follow var-keyword argument"); - return done(); - } - p.mark = _mark; +{ + if (p.error_indicator) { + return NULL; } - { // '**' lambda_param ',' ('*' | '**' | '/') - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '**' lambda_param '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var lambda_param_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (lambda_param_var = lambda_param_rule(p)) // lambda_param + && + (a = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "var-keyword argument cannot have default value"); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var lambda_param_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (lambda_param_var = lambda_param_rule(p)) // lambda_param - && - (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (a = _tmp_201_rule(p)) // '*' | '**' | '/' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "arguments cannot follow var-keyword argument"); - return done(); + { // '**' lambda_param ',' lambda_param + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var lambda_param_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (lambda_param_var = lambda_param_rule(p)) // lambda_param + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (a = lambda_param_rule(p)) // lambda_param + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "arguments cannot follow var-keyword argument"); + break; + } + p.mark = _mark; } - p.mark = _mark; + { // '**' lambda_param ',' ('*' | '**' | '/') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var lambda_param_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (lambda_param_var = lambda_param_rule(p)) // lambda_param + && + (_literal_1 = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (a = _tmp_201_rule(p)) // '*' | '**' | '/' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "arguments cannot follow var-keyword argument"); + break; + } + p.mark = _mark; + } + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_double_type_comments: TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT @@ -15489,38 +15703,39 @@ function invalid_double_type_comments_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT - if (p.error_indicator) { - return NULL; - } - var indent_var; - var newline_var; - var newline_var_1; - var type_comment_var; - var type_comment_var_1; - if ( - (type_comment_var = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - (type_comment_var_1 = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - && - (newline_var_1 = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - (indent_var = $B._PyPegen.expect_token(p, INDENT)) // token='INDENT' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("Cannot have two type comments on def"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT + if (p.error_indicator) { + return NULL; + } + var indent_var; + var newline_var; + var newline_var_1; + var type_comment_var; + var type_comment_var_1; + if ( + (type_comment_var = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + (type_comment_var_1 = $B._PyPegen.expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + && + (newline_var_1 = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + (indent_var = $B._PyPegen.expect_token(p, INDENT)) // token='INDENT' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "Cannot have two type comments on def"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_with_item: expression 'as' expression &(',' | ')' | ':') @@ -15529,34 +15744,35 @@ function invalid_with_item_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression 'as' expression &(',' | ')' | ':') - if (p.error_indicator) { - return NULL; - } - var _keyword; - var a; - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (a = expression_rule(p)) // expression - && - $B._PyPegen.lookahead(1, _tmp_202_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET ($B.parser_constants.STAR_TARGETS, a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression 'as' expression &(',' | ')' | ':') + if (p.error_indicator) { + return NULL; + } + var _keyword; + var a; + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (a = expression_rule(p)) // expression + && + $B._PyPegen.lookahead(1, _tmp_202_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET(p, $B.parser_constants.STAR_TARGETS, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_for_target: ASYNC? 'for' star_expressions @@ -15565,33 +15781,34 @@ function invalid_for_target_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ASYNC? 'for' star_expressions - if (p.error_indicator) { - return NULL; - } - var _keyword; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? - && - (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' - && - (a = star_expressions_rule(p)) // star_expressions - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET ($B.parser_constants.FOR_TARGETS, a); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ASYNC? 'for' star_expressions + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? + && + (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' + && + (a = star_expressions_rule(p)) // star_expressions + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_INVALID_TARGET(p, $B.parser_constants.FOR_TARGETS, a); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_group: '(' starred_expression ')' | '(' '**' expression ')' @@ -15600,55 +15817,56 @@ function invalid_group_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '(' starred_expression ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = starred_expression_rule(p)) // starred_expression - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "cannot use starred expression here"); - return done(); - } - p.mark = _mark; - } - { // '(' '**' expression ')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '(' starred_expression ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = starred_expression_rule(p)) // starred_expression + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "cannot use starred expression here"); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var a; - var expression_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = $B._PyPegen.expect_token(p, 35)) // token='**' - && - (expression_var = expression_rule(p)) // expression - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "cannot use double starred expression here"); - return done(); + { // '(' '**' expression ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var a; + var expression_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = $B._PyPegen.expect_token(p, 35)) // token='**' + && + (expression_var = expression_rule(p)) // expression + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "cannot use double starred expression here"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_import: 'import' ','.dotted_name+ 'from' dotted_name @@ -15657,35 +15875,36 @@ function invalid_import_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'import' ','.dotted_name+ 'from' dotted_name - if (p.error_indicator) { - return NULL; - } - var _gather_203_var; - var _keyword; - var a; - var dotted_name_var; - if ( - (a = $B._PyPegen.expect_token(p, 607)) // token='import' - && - (_gather_203_var = _gather_203_rule(p)) // ','.dotted_name+ - && - (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' - && - (dotted_name_var = dotted_name_rule(p)) // dotted_name - ) - { - _res = RAISE_SYNTAX_ERROR_STARTING_FROM (a, "Did you mean to use 'from ... import ...' instead?"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'import' ','.dotted_name+ 'from' dotted_name + if (p.error_indicator) { + return NULL; + } + var _gather_203_var; + var _keyword; + var a; + var dotted_name_var; + if ( + (a = $B._PyPegen.expect_token(p, 607)) // token='import' + && + (_gather_203_var = _gather_203_rule(p)) // ','.dotted_name+ + && + (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' + && + (dotted_name_var = dotted_name_rule(p)) // dotted_name + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p, a, "Did you mean to use 'from ... import ...' instead?"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_import_from_targets: import_from_as_names ',' NEWLINE @@ -15694,32 +15913,33 @@ function invalid_import_from_targets_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // import_from_as_names ',' NEWLINE - if (p.error_indicator) { - return NULL; - } - var _literal; - var import_from_as_names_var; - var newline_var; - if ( - (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("trailing comma not allowed without surrounding parentheses"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // import_from_as_names ',' NEWLINE + if (p.error_indicator) { + return NULL; + } + var _literal; + var import_from_as_names_var; + var newline_var; + if ( + (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "trailing comma not allowed without surrounding parentheses"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_with_stmt: @@ -15730,70 +15950,71 @@ function invalid_with_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE - if (p.error_indicator) { - return NULL; - } - var _gather_205_var; - var _keyword; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var newline_var; - if ( - (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? - && - (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' - && - (_gather_205_var = _gather_205_rule(p)) // ','.(expression ['as' star_target])+ - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); - } - p.mark = _mark; - } - { // ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE + if (p.error_indicator) { + return NULL; + } + var _gather_205_var; + var _keyword; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var newline_var; + if ( + (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? + && + (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' + && + (_gather_205_var = _gather_205_rule(p)) // ','.(expression ['as' star_target])+ + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - var _gather_207_var; - var _keyword; - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var newline_var; - if ( - (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? - && - (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (_gather_207_var = _gather_207_rule(p)) // ','.(expressions ['as' star_target])+ - && - (_opt_var_1 = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); + { // ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE + if (p.error_indicator) { + return NULL; + } + var _gather_207_var; + var _keyword; + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var newline_var; + if ( + (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? + && + (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (_gather_207_var = _gather_207_rule(p)) // ','.(expressions ['as' star_target])+ + && + (_opt_var_1 = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_with_stmt_indent: @@ -15804,214 +16025,216 @@ function invalid_with_stmt_indent_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; - } - var _gather_209_var; - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var newline_var; - if ( - (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? - && - (a = $B._PyPegen.expect_token(p, 615)) // token='with' - && - (_gather_209_var = _gather_209_rule(p)) // ','.(expression ['as' star_target])+ - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'with' statement on line %d", a.lineno); - return done(); - } - p.mark = _mark; - } - { // ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _gather_209_var; + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var newline_var; + if ( + (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? + && + (a = $B._PyPegen.expect_token(p, 615)) // token='with' + && + (_gather_209_var = _gather_209_rule(p)) // ','.(expression ['as' star_target])+ + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'with' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - var _gather_211_var; - var _literal; - var _literal_1; - var _literal_2; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var a; - var newline_var; - if ( - (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? - && - (a = $B._PyPegen.expect_token(p, 615)) // token='with' - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (_gather_211_var = _gather_211_rule(p)) // ','.(expressions ['as' star_target])+ - && - (_opt_var_1 = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - && - (_literal_2 = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'with' statement on line %d", a.lineno); - return done(); + { // ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _gather_211_var; + var _literal; + var _literal_1; + var _literal_2; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var a; + var newline_var; + if ( + (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? + && + (a = $B._PyPegen.expect_token(p, 615)) // token='with' + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (_gather_211_var = _gather_211_rule(p)) // ','.(expressions ['as' star_target])+ + && + (_opt_var_1 = $B._PyPegen.expect_token(p, 12), !p.error_indicator) // ','? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + && + (_literal_2 = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'with' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_try_stmt: -// | 'try' ':' NEWLINE !INDENT -// | 'try' ':' block !('except' | 'finally') -// | 'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':' -// | 'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':' -function invalid_try_stmt_rule(p) -{ - if (p.error_indicator) { - return NULL; - } - var _res = NULL; - var _mark = p.mark; - { // 'try' ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 624)) // token='try' - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'try' statement on line %d", a.lineno); - return done(); - } - p.mark = _mark; - } - { // 'try' ':' block !('except' | 'finally') - if (p.error_indicator) { - return NULL; - } - var _keyword; - var _literal; - var block_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (block_var = block_rule(p)) // block - && - $B._PyPegen.lookahead(0, _tmp_213_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected 'except' or 'finally' block"); - return done(); - } - p.mark = _mark; +// | 'try' ':' NEWLINE !INDENT +// | 'try' ':' block !('except' | 'finally') +// | 'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':' +// | 'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':' +function invalid_try_stmt_rule(p) +{ + if (p.error_indicator) { + return NULL; } - { // 'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'try' ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 624)) // token='try' + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'try' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var _literal_1; - var _loop0_214_var; - var _loop1_215_var; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var b; - var expression_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (_loop0_214_var = _loop0_214_rule(p)) // block* - && - (_loop1_215_var = _loop1_215_rule(p)) // except_block+ - && - (a = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (b = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (expression_var = expression_rule(p)) // expression - && - (_opt_var = _tmp_216_rule(p), !p.error_indicator) // ['as' NAME] - && - (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "cannot have both 'except' and 'except' on the same 'try'"); - return done(); + { // 'try' ':' block !('except' | 'finally') + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var block_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (block_var = block_rule(p)) // block + && + $B._PyPegen.lookahead(0, _tmp_213_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected 'except' or 'finally' block"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':' - if (p.error_indicator) { - return NULL; + { // 'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':' + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var _literal_1; + var _loop0_214_var; + var _loop1_215_var; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var b; + var expression_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (_loop0_214_var = _loop0_214_rule(p)) // block* + && + (_loop1_215_var = _loop1_215_rule(p)) // except_block+ + && + (a = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (b = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_216_rule(p), !p.error_indicator) // ['as' NAME] + && + (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "cannot have both 'except' and 'except*' on the same 'try'"); + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var _literal_1; - var _loop0_217_var; - var _loop1_218_var; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - if ( - (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (_loop0_217_var = _loop0_217_rule(p)) // block* - && - (_loop1_218_var = _loop1_218_rule(p)) // except_star_block+ - && - (a = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (_opt_var = _tmp_219_rule(p), !p.error_indicator) // [expression ['as' NAME]] - && - (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "cannot have both 'except' and 'except' on the same 'try'"); - return done(); + { // 'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':' + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var _literal_1; + var _loop0_217_var; + var _loop1_218_var; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + if ( + (_keyword = $B._PyPegen.expect_token(p, 624)) // token='try' + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (_loop0_217_var = _loop0_217_rule(p)) // block* + && + (_loop1_218_var = _loop1_218_rule(p)) // except_star_block+ + && + (a = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (_opt_var = _tmp_219_rule(p), !p.error_indicator) // [expression ['as' NAME]] + && + (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "cannot have both 'except' and 'except*' on the same 'try'"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_except_stmt: @@ -16024,111 +16247,112 @@ function invalid_except_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'except' '*'? expression ',' expressions ['as' NAME] ':' - if (p.error_indicator) { - return NULL; - } - var _keyword; - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var a; - var expressions_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (_opt_var = $B._PyPegen.expect_token(p, 16), !p.error_indicator) // '*'? - && - (a = expression_rule(p)) // expression - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (expressions_var = expressions_rule(p)) // expressions - && - (_opt_var_1 = _tmp_220_rule(p), !p.error_indicator) // ['as' NAME] - && - (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = RAISE_SYNTAX_ERROR_STARTING_FROM (a, "multiple exception types must be parenthesized"); - return done(); - } - p.mark = _mark; - } - { // 'except' '*'? expression ['as' NAME] NEWLINE - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var a; - var expression_var; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (_opt_var = $B._PyPegen.expect_token(p, 16), !p.error_indicator) // '*'? - && - (expression_var = expression_rule(p)) // expression - && - (_opt_var_1 = _tmp_221_rule(p), !p.error_indicator) // ['as' NAME] - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); - } - p.mark = _mark; - } - { // 'except' NEWLINE - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'except' '*'? expression ',' expressions ['as' NAME] ':' + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var a; + var expressions_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (_opt_var = $B._PyPegen.expect_token(p, 16), !p.error_indicator) // '*'? + && + (a = expression_rule(p)) // expression + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (expressions_var = expressions_rule(p)) // expressions + && + (_opt_var_1 = _tmp_220_rule(p), !p.error_indicator) // ['as' NAME] + && + (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p, a, "multiple exception types must be parenthesized"); + break; + } + p.mark = _mark; } - var a; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); + { // 'except' '*'? expression ['as' NAME] NEWLINE + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var a; + var expression_var; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (_opt_var = $B._PyPegen.expect_token(p, 16), !p.error_indicator) // '*'? + && + (expression_var = expression_rule(p)) // expression + && + (_opt_var_1 = _tmp_221_rule(p), !p.error_indicator) // ['as' NAME] + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'except' '*' (NEWLINE | ':') - if (p.error_indicator) { - return NULL; + { // 'except' NEWLINE + if (p.error_indicator) { + return NULL; + } + var a; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - var _literal; - var _tmp_222_var; - var a; - if ( - (a = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (_tmp_222_var = _tmp_222_rule(p)) // NEWLINE | ':' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected one or more exception types"); - return done(); + { // 'except' '*' (NEWLINE | ':') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _tmp_222_var; + var a; + if ( + (a = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (_tmp_222_var = _tmp_222_rule(p)) // NEWLINE | ':' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected one or more exception types"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_finally_stmt: 'finally' ':' NEWLINE !INDENT @@ -16137,34 +16361,35 @@ function invalid_finally_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'finally' ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 633)) // token='finally' - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'finally' statement on line %d", a.lineno); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'finally' ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 633)) // token='finally' + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'finally' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_except_stmt_indent: @@ -16175,63 +16400,64 @@ function invalid_except_stmt_indent_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'except' expression ['as' NAME] ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; - } - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var expression_var; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (expression_var = expression_rule(p)) // expression - && - (_opt_var = _tmp_223_rule(p), !p.error_indicator) // ['as' NAME] - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'except' statement on line %d", a.lineno); - return done(); - } - p.mark = _mark; - } - { // 'except' ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'except' expression ['as' NAME] ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var expression_var; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_223_rule(p), !p.error_indicator) // ['as' NAME] + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'except' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - var _literal; - var a; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'except' statement on line %d", a.lineno); - return done(); + { // 'except' ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'except' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_except_star_stmt_indent: @@ -16241,44 +16467,45 @@ function invalid_except_star_stmt_indent_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'except' '*' expression ['as' NAME] ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var expression_var; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 637)) // token='except' - && - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (expression_var = expression_rule(p)) // expression - && - (_opt_var = _tmp_224_rule(p), !p.error_indicator) // ['as' NAME] - && - (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'except' statement on line %d", a.lineno); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'except' '*' expression ['as' NAME] ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var expression_var; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 637)) // token='except' + && + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_224_rule(p), !p.error_indicator) // ['as' NAME] + && + (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'except*' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_match_stmt: @@ -16289,57 +16516,58 @@ function invalid_match_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // "match" subject_expr NEWLINE - if (p.error_indicator) { - return NULL; - } - var _keyword; - var newline_var; - var subject_expr_var; - if ( - (_keyword = $B._PyPegen.expect_soft_keyword(p, "match")) // soft_keyword='"match"' - && - (subject_expr_var = subject_expr_rule(p)) // subject_expr - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.CHECK_VERSION (_void, 10, "Pattern matching is", $B.helper_functions.RAISE_SYNTAX_ERROR ( "expected ':'" )); - return done(); - } - p.mark = _mark; - } - { // "match" subject_expr ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // "match" subject_expr NEWLINE + if (p.error_indicator) { + return NULL; + } + var _keyword; + var newline_var; + var subject_expr_var; + if ( + (_keyword = $B._PyPegen.expect_soft_keyword(p, "match")) // soft_keyword='"match"' + && + (subject_expr_var = subject_expr_rule(p)) // subject_expr + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.CHECK_VERSION (NULL, 10, "Pattern matching is", $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'" )); + break; + } + p.mark = _mark; } - var _literal; - var a; - var newline_var; - var subject; - if ( - (a = $B._PyPegen.expect_soft_keyword(p, "match")) // soft_keyword='"match"' - && - (subject = subject_expr_rule(p)) // subject_expr - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'match' statement on line %d", a.lineno); - return done(); + { // "match" subject_expr ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var newline_var; + var subject; + if ( + (a = $B._PyPegen.expect_soft_keyword(p, "match")) // soft_keyword='"match"' + && + (subject = subject_expr_rule(p)) // subject_expr + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'match' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_case_block: @@ -16350,65 +16578,66 @@ function invalid_case_block_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // "case" patterns guard? NEWLINE - if (p.error_indicator) { - return NULL; - } - var _keyword; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var newline_var; - var patterns_var; - if ( - (_keyword = $B._PyPegen.expect_soft_keyword(p, "case")) // soft_keyword='"case"' - && - (patterns_var = patterns_rule(p)) // patterns - && - (_opt_var = guard_rule(p), !p.error_indicator) // guard? - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); - } - p.mark = _mark; - } - { // "case" patterns guard? ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; - } - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var newline_var; - var patterns_var; - if ( - (a = $B._PyPegen.expect_soft_keyword(p, "case")) // soft_keyword='"case"' - && - (patterns_var = patterns_rule(p)) // patterns - && - (_opt_var = guard_rule(p), !p.error_indicator) // guard? - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'case' statement on line %d", a.lineno); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // "case" patterns guard? NEWLINE + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var newline_var; + var patterns_var; + if ( + (_keyword = $B._PyPegen.expect_soft_keyword(p, "case")) // soft_keyword='"case"' + && + (patterns_var = patterns_rule(p)) // patterns + && + (_opt_var = guard_rule(p), !p.error_indicator) // guard? + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - p.mark = _mark; + { // "case" patterns guard? ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var newline_var; + var patterns_var; + if ( + (a = $B._PyPegen.expect_soft_keyword(p, "case")) // soft_keyword='"case"' + && + (patterns_var = patterns_rule(p)) // patterns + && + (_opt_var = guard_rule(p), !p.error_indicator) // guard? + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'case' statement on line %d", a.lineno); + break; + } + p.mark = _mark; + } + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_as_pattern: or_pattern 'as' "_" | or_pattern 'as' !NAME expression @@ -16417,54 +16646,55 @@ function invalid_as_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // or_pattern 'as' "_" - if (p.error_indicator) { - return NULL; - } - var _keyword; - var a; - var or_pattern_var; - if ( - (or_pattern_var = or_pattern_rule(p)) // or_pattern - && - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (a = $B._PyPegen.expect_soft_keyword(p, "_")) // soft_keyword='"_"' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "cannot use '_' as a target"); - return done(); - } - p.mark = _mark; - } - { // or_pattern 'as' !NAME expression - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // or_pattern 'as' "_" + if (p.error_indicator) { + return NULL; + } + var _keyword; + var a; + var or_pattern_var; + if ( + (or_pattern_var = or_pattern_rule(p)) // or_pattern + && + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (a = $B._PyPegen.expect_soft_keyword(p, "_")) // soft_keyword='"_"' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "cannot use '_' as a target"); + break; + } + p.mark = _mark; } - var _keyword; - var a; - var or_pattern_var; - if ( - (or_pattern_var = or_pattern_rule(p)) // or_pattern - && - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - $B._PyPegen.lookahead_with_name(0, $B._PyPegen.name_token, p) - && - (a = expression_rule(p)) // expression - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "invalid pattern target"); - return done(); + { // or_pattern 'as' !NAME expression + if (p.error_indicator) { + return NULL; + } + var _keyword; + var a; + var or_pattern_var; + if ( + (or_pattern_var = or_pattern_rule(p)) // or_pattern + && + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + $B._PyPegen.lookahead_with_name(0, $B._PyPegen.name_token, p) + && + (a = expression_rule(p)) // expression + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "invalid pattern target"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_class_pattern: name_or_attr '(' invalid_class_argument_pattern @@ -16473,32 +16703,33 @@ function invalid_class_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // name_or_attr '(' invalid_class_argument_pattern - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var name_or_attr_var; - if ( - (name_or_attr_var = name_or_attr_rule(p)) // name_or_attr - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (a = invalid_class_argument_pattern_rule(p)) // invalid_class_argument_pattern - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (PyPegen_first_item ( a, $B.ast.pattern ), PyPegen_last_item ( a, $B.ast.pattern ), "positional patterns follow keyword patterns"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // name_or_attr '(' invalid_class_argument_pattern + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var name_or_attr_var; + if ( + (name_or_attr_var = name_or_attr_rule(p)) // name_or_attr + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (a = invalid_class_argument_pattern_rule(p)) // invalid_class_argument_pattern + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, $B.PyPegen.first_item ( a, $B.ast.pattern ), $B.PyPegen.last_item ( a, $B.ast.pattern ), "positional patterns follow keyword patterns"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_class_argument_pattern: @@ -16508,36 +16739,37 @@ function invalid_class_argument_pattern_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // [positional_patterns ','] keyword_patterns ',' positional_patterns - if (p.error_indicator) { - return NULL; - } - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var keyword_patterns_var; - if ( - (_opt_var = _tmp_225_rule(p), !p.error_indicator) // [positional_patterns ','] - && - (keyword_patterns_var = keyword_patterns_rule(p)) // keyword_patterns - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (a = positional_patterns_rule(p)) // positional_patterns - ) - { - _res = a; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // [positional_patterns ','] keyword_patterns ',' positional_patterns + if (p.error_indicator) { + return NULL; + } + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var keyword_patterns_var; + if ( + (_opt_var = _tmp_225_rule(p), !p.error_indicator) // [positional_patterns ','] + && + (keyword_patterns_var = keyword_patterns_rule(p)) // keyword_patterns + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (a = positional_patterns_rule(p)) // positional_patterns + ) + { + _res = a; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_if_stmt: @@ -16548,57 +16780,58 @@ function invalid_if_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'if' named_expression NEWLINE - if (p.error_indicator) { - return NULL; - } - var _keyword; - var named_expression_var; - var newline_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (named_expression_var = named_expression_rule(p)) // named_expression - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); - } - p.mark = _mark; - } - { // 'if' named_expression ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'if' named_expression NEWLINE + if (p.error_indicator) { + return NULL; + } + var _keyword; + var named_expression_var; + var newline_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (named_expression_var = named_expression_rule(p)) // named_expression + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - var _literal; - var a; - var a_1; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (a_1 = named_expression_rule(p)) // named_expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'if' statement on line %d", a.lineno); - return done(); + { // 'if' named_expression ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var a_1; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (a_1 = named_expression_rule(p)) // named_expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'if' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_elif_stmt: @@ -16609,57 +16842,58 @@ function invalid_elif_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'elif' named_expression NEWLINE - if (p.error_indicator) { - return NULL; - } - var _keyword; - var named_expression_var; - var newline_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 644)) // token='elif' - && - (named_expression_var = named_expression_rule(p)) // named_expression - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); - } - p.mark = _mark; - } - { // 'elif' named_expression ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'elif' named_expression NEWLINE + if (p.error_indicator) { + return NULL; + } + var _keyword; + var named_expression_var; + var newline_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 644)) // token='elif' + && + (named_expression_var = named_expression_rule(p)) // named_expression + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - var _literal; - var a; - var named_expression_var; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 644)) // token='elif' - && - (named_expression_var = named_expression_rule(p)) // named_expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'elif' statement on line %d", a.lineno); - return done(); + { // 'elif' named_expression ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var named_expression_var; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 644)) // token='elif' + && + (named_expression_var = named_expression_rule(p)) // named_expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'elif' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_else_stmt: 'else' ':' NEWLINE !INDENT @@ -16668,34 +16902,35 @@ function invalid_else_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'else' ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 645)) // token='else' - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'else' statement on line %d", a.lineno); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'else' ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 645)) // token='else' + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'else' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_while_stmt: @@ -16706,57 +16941,58 @@ function invalid_while_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'while' named_expression NEWLINE - if (p.error_indicator) { - return NULL; - } - var _keyword; - var named_expression_var; - var newline_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 647)) // token='while' - && - (named_expression_var = named_expression_rule(p)) // named_expression - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); - } - p.mark = _mark; - } - { // 'while' named_expression ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'while' named_expression NEWLINE + if (p.error_indicator) { + return NULL; + } + var _keyword; + var named_expression_var; + var newline_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 647)) // token='while' + && + (named_expression_var = named_expression_rule(p)) // named_expression + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - var _literal; - var a; - var named_expression_var; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 647)) // token='while' - && - (named_expression_var = named_expression_rule(p)) // named_expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'while' statement on line %d", a.lineno); - return done(); + { // 'while' named_expression ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var named_expression_var; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 647)) // token='while' + && + (named_expression_var = named_expression_rule(p)) // named_expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'while' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_for_stmt: @@ -16767,77 +17003,78 @@ function invalid_for_stmt_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ASYNC? 'for' star_targets 'in' star_expressions NEWLINE - if (p.error_indicator) { - return NULL; - } - var _keyword; - var _keyword_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var newline_var; - var star_expressions_var; - var star_targets_var; - if ( - (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? - && - (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' - && - (star_targets_var = star_targets_rule(p)) // star_targets - && - (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' - && - (star_expressions_var = star_expressions_rule(p)) // star_expressions - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); - } - p.mark = _mark; - } - { // ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ASYNC? 'for' star_targets 'in' star_expressions NEWLINE + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _keyword_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var newline_var; + var star_expressions_var; + var star_targets_var; + if ( + (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? + && + (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' + && + (star_targets_var = star_targets_rule(p)) // star_targets + && + (_keyword_1 = $B._PyPegen.expect_token(p, 651)) // token='in' + && + (star_expressions_var = star_expressions_rule(p)) // star_expressions + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - var _keyword; - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var a; - var newline_var; - var star_expressions_var; - var star_targets_var; - if ( - (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? - && - (a = $B._PyPegen.expect_token(p, 650)) // token='for' - && - (star_targets_var = star_targets_rule(p)) // star_targets - && - (_keyword = $B._PyPegen.expect_token(p, 651)) // token='in' - && - (star_expressions_var = star_expressions_rule(p)) // star_expressions - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after 'for' statement on line %d", a.lineno); - return done(); + { // ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var a; + var newline_var; + var star_expressions_var; + var star_targets_var; + if ( + (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? + && + (a = $B._PyPegen.expect_token(p, 650)) // token='for' + && + (star_targets_var = star_targets_rule(p)) // star_targets + && + (_keyword = $B._PyPegen.expect_token(p, 651)) // token='in' + && + (star_expressions_var = star_expressions_rule(p)) // star_expressions + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after 'for' statement on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_def_raw: @@ -16847,59 +17084,60 @@ function invalid_def_raw_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ASYNC? 'def' NAME type_params? '(' params? ')' ['->' expression] ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var _literal_2; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var _opt_var_2; - UNUSED(_opt_var_2); // Silence compiler warnings - var _opt_var_3; - UNUSED(_opt_var_3); // Silence compiler warnings - var a; - var name_var; - var newline_var; - if ( - (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? - && - (a = $B._PyPegen.expect_token(p, 652)) // token='def' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - && - (_opt_var_1 = type_params_rule(p), !p.error_indicator) // type_params? - && - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (_opt_var_2 = params_rule(p), !p.error_indicator) // params? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - && - (_opt_var_3 = _tmp_226_rule(p), !p.error_indicator) // ['->' expression] - && - (_literal_2 = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after function definition on line %d", a.lineno); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ASYNC? 'def' NAME type_params? '(' params? ')' ['->' expression] ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _literal_2; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var _opt_var_2; + UNUSED(_opt_var_2); // Silence compiler warnings + var _opt_var_3; + UNUSED(_opt_var_3); // Silence compiler warnings + var a; + var name_var; + var newline_var; + if ( + (_opt_var = $B._PyPegen.expect_token(p, ASYNC), !p.error_indicator) // ASYNC? + && + (a = $B._PyPegen.expect_token(p, 652)) // token='def' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + && + (_opt_var_1 = type_params_rule(p), !p.error_indicator) // type_params? + && + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (_opt_var_2 = params_rule(p), !p.error_indicator) // params? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + && + (_opt_var_3 = _tmp_226_rule(p), !p.error_indicator) // ['->' expression] + && + (_literal_2 = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after function definition on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_class_def_raw: @@ -16910,73 +17148,74 @@ function invalid_class_def_raw_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'class' NAME type_params? ['(' arguments? ')'] NEWLINE - if (p.error_indicator) { - return NULL; - } - var _keyword; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var name_var; - var newline_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 654)) // token='class' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - && - (_opt_var = type_params_rule(p), !p.error_indicator) // type_params? - && - (_opt_var_1 = _tmp_227_rule(p), !p.error_indicator) // ['(' arguments? ')'] - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR ("expected ':'"); - return done(); - } - p.mark = _mark; - } - { // 'class' NAME type_params? ['(' arguments? ')'] ':' NEWLINE !INDENT - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'class' NAME type_params? ['(' arguments? ')'] NEWLINE + if (p.error_indicator) { + return NULL; + } + var _keyword; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var name_var; + var newline_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 654)) // token='class' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + && + (_opt_var = type_params_rule(p), !p.error_indicator) // type_params? + && + (_opt_var_1 = _tmp_227_rule(p), !p.error_indicator) // ['(' arguments? ')'] + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR(p, "expected ':'"); + break; + } + p.mark = _mark; } - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var a; - var name_var; - var newline_var; - if ( - (a = $B._PyPegen.expect_token(p, 654)) // token='class' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - && - (_opt_var = type_params_rule(p), !p.error_indicator) // type_params? - && - (_opt_var_1 = _tmp_228_rule(p), !p.error_indicator) // ['(' arguments? ')'] - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT - ) - { - _res = $B.helper_functions.RAISE_INDENTATION_ERROR ("expected an indented block after class definition on line %d", a.lineno); - return done(); + { // 'class' NAME type_params? ['(' arguments? ')'] ':' NEWLINE !INDENT + if (p.error_indicator) { + return NULL; + } + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var a; + var name_var; + var newline_var; + if ( + (a = $B._PyPegen.expect_token(p, 654)) // token='class' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + && + (_opt_var = type_params_rule(p), !p.error_indicator) // type_params? + && + (_opt_var_1 = _tmp_228_rule(p), !p.error_indicator) // ['(' arguments? ')'] + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, INDENT) // token=INDENT + ) + { + _res = $B.helper_functions.RAISE_INDENTATION_ERROR(p, "expected an indented block after class definition on line %d", a.lineno); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_double_starred_kvpairs: @@ -16988,74 +17227,75 @@ function invalid_double_starred_kvpairs_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.double_starred_kvpair+ ',' invalid_kvpair - if (p.error_indicator) { - return NULL; - } - var _gather_229_var; - var _literal; - var invalid_kvpair_var; - if ( - (_gather_229_var = _gather_229_rule(p)) // ','.double_starred_kvpair+ - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (invalid_kvpair_var = invalid_kvpair_rule(p)) // invalid_kvpair - ) - { - _res = $B._PyPegen.dummy_name(p, _gather_229_var, _literal, invalid_kvpair_var); - return done(); - } - p.mark = _mark; - } - { // expression ':' '*' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var bitwise_or_var; - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (a = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = RAISE_SYNTAX_ERROR_STARTING_FROM (a, "cannot use a starred expression in a dictionary value"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.double_starred_kvpair+ ',' invalid_kvpair + if (p.error_indicator) { + return NULL; + } + var _gather_229_var; + var _literal; + var invalid_kvpair_var; + if ( + (_gather_229_var = _gather_229_rule(p)) // ','.double_starred_kvpair+ + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (invalid_kvpair_var = invalid_kvpair_rule(p)) // invalid_kvpair + ) + { + _res = $B._PyPegen.dummy_name(p, _gather_229_var, _literal, invalid_kvpair_var); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // expression ':' &('}' | ',') - if (p.error_indicator) { - return NULL; + { // expression ':' '*' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var bitwise_or_var; + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (a = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p, a, "cannot use a starred expression in a dictionary value"); + break; + } + p.mark = _mark; } - var a; - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (a = $B._PyPegen.expect_token(p, 11)) // token=':' - && - $B._PyPegen.lookahead(1, _tmp_231_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "expression expected after dictionary key and ':'"); - return done(); + { // expression ':' &('}' | ',') + if (p.error_indicator) { + return NULL; + } + var a; + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (a = $B._PyPegen.expect_token(p, 11)) // token=':' + && + $B._PyPegen.lookahead(1, _tmp_231_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "expression expected after dictionary key and ':'"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_kvpair: @@ -17067,70 +17307,71 @@ function invalid_kvpair_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression !(':') - if (p.error_indicator) { - return NULL; - } - var a; - if ( - (a = expression_rule(p)) // expression - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 11) // token=(':') - ) - { - _res = $B.helper_functions.RAISE_ERROR_KNOWN_LOCATION (p, $B.parser_constants.PyExc_SyntaxError, a.lineno, a.end_col_offset - 1, a.end_lineno, - 1, "':' expected after dictionary key"); - return done(); - } - p.mark = _mark; - } - { // expression ':' '*' bitwise_or - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var bitwise_or_var; - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (a = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or - ) - { - _res = RAISE_SYNTAX_ERROR_STARTING_FROM (a, "cannot use a starred expression in a dictionary value"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression !(':') + if (p.error_indicator) { + return NULL; + } + var a; + if ( + (a = expression_rule(p)) // expression + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 11) // token=(':') + ) + { + _res = $B.helper_functions.RAISE_ERROR_KNOWN_LOCATION (p, $B.parser_constants.PyExc_SyntaxError, a.lineno, a.end_col_offset - 1, a.end_lineno, - 1, "':' expected after dictionary key"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // expression ':' &('}' | ',') - if (p.error_indicator) { - return NULL; + { // expression ':' '*' bitwise_or + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var bitwise_or_var; + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (a = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_STARTING_FROM(p, a, "cannot use a starred expression in a dictionary value"); + break; + } + p.mark = _mark; } - var a; - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (a = $B._PyPegen.expect_token(p, 11)) // token=':' - && - $B._PyPegen.lookahead(1, _tmp_232_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "expression expected after dictionary key and ':'"); - return done(); + { // expression ':' &('}' | ',') + if (p.error_indicator) { + return NULL; + } + var a; + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (a = $B._PyPegen.expect_token(p, 11)) // token=':' + && + $B._PyPegen.lookahead(1, _tmp_232_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "expression expected after dictionary key and ':'"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_starred_expression: '*' expression '=' expression @@ -17139,35 +17380,36 @@ function invalid_starred_expression_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '*' expression '=' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - var b; - var expression_var; - if ( - (a = $B._PyPegen.expect_token(p, 16)) // token='*' - && - (expression_var = expression_rule(p)) // expression - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (b = expression_rule(p)) // expression - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE (a, b, "cannot assign to iterable argument unpacking"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '*' expression '=' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + var b; + var expression_var; + if ( + (a = $B._PyPegen.expect_token(p, 16)) // token='*' + && + (expression_var = expression_rule(p)) // expression + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_RANGE(p, a, b, "cannot assign to iterable argument unpacking"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_replacement_field: @@ -17187,248 +17429,249 @@ function invalid_replacement_field_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '{' '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "f-string: valid expression required before '='"); - return done(); - } - p.mark = _mark; - } - { // '{' '!' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = $B._PyPegen.expect_token(p, 54)) // token='!' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "f-string: valid expression required before '!'"); - return done(); - } - p.mark = _mark; - } - { // '{' ':' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "f-string: valid expression required before ':'"); - return done(); - } - p.mark = _mark; - } - { // '{' '}' - if (p.error_indicator) { - return NULL; - } - var _literal; - var a; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (a = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "f-string: valid expression required before '}'"); - return done(); - } - p.mark = _mark; - } - { // '{' !(yield_expr | star_expressions) - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - $B._PyPegen.lookahead(0, _tmp_233_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ("f-string: expecting a valid expression after '{'"); - return done(); - } - p.mark = _mark; - } - { // '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}') - if (p.error_indicator) { - return NULL; - } - var _literal; - var _tmp_234_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (_tmp_234_var = _tmp_234_rule(p)) // yield_expr | star_expressions - && - $B._PyPegen.lookahead(0, _tmp_235_rule, p) - ) - { - _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '=', or '!', or ':', or '}'"); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '{' '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "f-string: valid expression required before '='"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}') - if (p.error_indicator) { - return NULL; + { // '{' '!' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = $B._PyPegen.expect_token(p, 54)) // token='!' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "f-string: valid expression required before '!'"); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var _tmp_236_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (_tmp_236_var = _tmp_236_rule(p)) // yield_expr | star_expressions - && - (_literal_1 = $B._PyPegen.expect_token(p, 22)) // token='=' - && - $B._PyPegen.lookahead(0, _tmp_237_rule, p) - ) - { - _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '!', or ':', or '}'"); - return done(); + { // '{' ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "f-string: valid expression required before ':'"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '{' (yield_expr | star_expressions) '='? invalid_conversion_character - if (p.error_indicator) { - return NULL; + { // '{' '}' + if (p.error_indicator) { + return NULL; + } + var _literal; + var a; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (a = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, a, "f-string: valid expression required before '}'"); + break; + } + p.mark = _mark; } - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _tmp_238_var; - var invalid_conversion_character_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (_tmp_238_var = _tmp_238_rule(p)) // yield_expr | star_expressions - && - (_opt_var = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? - && - (invalid_conversion_character_var = invalid_conversion_character_rule(p)) // invalid_conversion_character - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, _tmp_238_var, _opt_var, invalid_conversion_character_var); - return done(); + { // '{' !(yield_expr | star_expressions) + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + $B._PyPegen.lookahead(0, _tmp_233_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p, "f-string: expecting a valid expression after '{'"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}') - if (p.error_indicator) { - return NULL; + { // '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _tmp_234_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (_tmp_234_var = _tmp_234_rule(p)) // yield_expr | star_expressions + && + $B._PyPegen.lookahead(0, _tmp_235_rule, p) + ) + { + _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p, "f-string: expecting '=', or '!', or ':', or '}'"); + break; + } + p.mark = _mark; } - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var _tmp_239_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (_tmp_239_var = _tmp_239_rule(p)) // yield_expr | star_expressions - && - (_opt_var = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? - && - (_opt_var_1 = _tmp_240_rule(p), !p.error_indicator) // ['!' NAME] - && - $B._PyPegen.lookahead(0, _tmp_241_rule, p) - ) - { - _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting ':' or '}'"); - return done(); + { // '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _tmp_236_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (_tmp_236_var = _tmp_236_rule(p)) // yield_expr | star_expressions + && + (_literal_1 = $B._PyPegen.expect_token(p, 22)) // token='=' + && + $B._PyPegen.lookahead(0, _tmp_237_rule, p) + ) + { + _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p, "f-string: expecting '!', or ':', or '}'"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}' - if (p.error_indicator) { - return NULL; + { // '{' (yield_expr | star_expressions) '='? invalid_conversion_character + if (p.error_indicator) { + return NULL; + } + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _tmp_238_var; + var invalid_conversion_character_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (_tmp_238_var = _tmp_238_rule(p)) // yield_expr | star_expressions + && + (_opt_var = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? + && + (invalid_conversion_character_var = invalid_conversion_character_rule(p)) // invalid_conversion_character + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, _tmp_238_var, _opt_var, invalid_conversion_character_var); + break; + } + p.mark = _mark; } - var _literal; - var _literal_1; - var _loop0_244_var; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var _tmp_242_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (_tmp_242_var = _tmp_242_rule(p)) // yield_expr | star_expressions - && - (_opt_var = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? - && - (_opt_var_1 = _tmp_243_rule(p), !p.error_indicator) // ['!' NAME] - && - (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (_loop0_244_var = _loop0_244_rule(p)) // fstring_format_spec* - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 26) // token='}' - ) - { - _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '}', or format specs"); - return done(); + { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var _tmp_239_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (_tmp_239_var = _tmp_239_rule(p)) // yield_expr | star_expressions + && + (_opt_var = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? + && + (_opt_var_1 = _tmp_240_rule(p), !p.error_indicator) // ['!' NAME] + && + $B._PyPegen.lookahead(0, _tmp_241_rule, p) + ) + { + _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p, "f-string: expecting ':' or '}'"); + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}' - if (p.error_indicator) { - return NULL; + { // '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _loop0_244_var; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var _tmp_242_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (_tmp_242_var = _tmp_242_rule(p)) // yield_expr | star_expressions + && + (_opt_var = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? + && + (_opt_var_1 = _tmp_243_rule(p), !p.error_indicator) // ['!' NAME] + && + (_literal_1 = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (_loop0_244_var = _loop0_244_rule(p)) // fstring_format_spec* + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 26) // token='}' + ) + { + _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p, "f-string: expecting '}', or format specs"); + break; + } + p.mark = _mark; } - var _literal; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var _opt_var_1; - UNUSED(_opt_var_1); // Silence compiler warnings - var _tmp_245_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - && - (_tmp_245_var = _tmp_245_rule(p)) // yield_expr | star_expressions - && - (_opt_var = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? - && - (_opt_var_1 = _tmp_246_rule(p), !p.error_indicator) // ['!' NAME] - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 26) // token='}' - ) - { - _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '}'"); - return done(); + { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var _opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + var _tmp_245_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + && + (_tmp_245_var = _tmp_245_rule(p)) // yield_expr | star_expressions + && + (_opt_var = $B._PyPegen.expect_token(p, 22), !p.error_indicator) // '='? + && + (_opt_var_1 = _tmp_246_rule(p), !p.error_indicator) // ['!' NAME] + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 26) // token='}' + ) + { + _res = PyErr_Occurred () ? $B.parser_constants.NULL : $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p, "f-string: expecting '}'"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // invalid_conversion_character: '!' &(':' | '}') | '!' !NAME @@ -17437,44 +17680,45 @@ function invalid_conversion_character_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '!' &(':' | '}') - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' - && - $B._PyPegen.lookahead(1, _tmp_247_rule, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ("f-string: missing conversion character"); - return done(); - } - p.mark = _mark; - } - { // '!' !NAME - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '!' &(':' | '}') + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' + && + $B._PyPegen.lookahead(1, _tmp_247_rule, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p, "f-string: missing conversion character"); + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' - && - $B._PyPegen.lookahead_with_name(0, $B._PyPegen.name_token, p) - ) - { - _res = $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ("f-string: invalid conversion character"); - return done(); + { // '!' !NAME + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' + && + $B._PyPegen.lookahead_with_name(0, $B._PyPegen.name_token, p) + ) + { + _res = $B.helper_functions.RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(p, "f-string: invalid conversion character"); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_1: NEWLINE @@ -17609,29 +17853,30 @@ function _gather_4_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // simple_stmt _loop0_5 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = simple_stmt_rule(p)) // simple_stmt - && - (seq = _loop0_5_rule(p)) // _loop0_5 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // simple_stmt _loop0_5 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = simple_stmt_rule(p)) // simple_stmt + && + (seq = _loop0_5_rule(p)) // _loop0_5 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_6: 'import' | 'from' @@ -17640,40 +17885,41 @@ function _tmp_6_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'import' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 607)) // token='import' - ) - { - _res = _keyword; - return done(); - } - p.mark = _mark; - } - { // 'from' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'import' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 607)) // token='import' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' - ) - { - _res = _keyword; - return done(); + { // 'from' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_7: 'def' | '@' | ASYNC @@ -17682,54 +17928,55 @@ function _tmp_7_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'def' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 652)) // token='def' - ) - { - _res = _keyword; - return done(); - } - p.mark = _mark; - } - { // '@' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 49)) // token='@' - ) - { - _res = _literal; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'def' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 652)) // token='def' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ASYNC - if (p.error_indicator) { - return NULL; + { // '@' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 49)) // token='@' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var async_var; - if ( - (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' - ) - { - _res = async_var; - return done(); + { // ASYNC + if (p.error_indicator) { + return NULL; + } + var async_var; + if ( + (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' + ) + { + _res = async_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_8: 'class' | '@' @@ -17738,40 +17985,41 @@ function _tmp_8_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'class' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 654)) // token='class' - ) - { - _res = _keyword; - return done(); - } - p.mark = _mark; - } - { // '@' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'class' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 654)) // token='class' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 49)) // token='@' - ) - { - _res = _literal; - return done(); + { // '@' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 49)) // token='@' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_9: 'with' | ASYNC @@ -17780,40 +18028,41 @@ function _tmp_9_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'with' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' - ) - { - _res = _keyword; - return done(); - } - p.mark = _mark; - } - { // ASYNC - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'with' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 615)) // token='with' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - var async_var; - if ( - (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' - ) - { - _res = async_var; - return done(); + { // ASYNC + if (p.error_indicator) { + return NULL; + } + var async_var; + if ( + (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' + ) + { + _res = async_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_10: 'for' | ASYNC @@ -17822,40 +18071,41 @@ function _tmp_10_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'for' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' - ) - { - _res = _keyword; - return done(); - } - p.mark = _mark; - } - { // ASYNC - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'for' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 650)) // token='for' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - var async_var; - if ( - (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' - ) - { - _res = async_var; - return done(); + { // ASYNC + if (p.error_indicator) { + return NULL; + } + var async_var; + if ( + (async_var = $B._PyPegen.expect_token(p, ASYNC)) // token='ASYNC' + ) + { + _res = async_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_11: '=' annotated_rhs @@ -17864,29 +18114,30 @@ function _tmp_11_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '=' annotated_rhs - if (p.error_indicator) { - return NULL; - } - var _literal; - var d; - if ( - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (d = annotated_rhs_rule(p)) // annotated_rhs - ) - { - _res = d; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '=' annotated_rhs + if (p.error_indicator) { + return NULL; + } + var _literal; + var d; + if ( + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (d = annotated_rhs_rule(p)) // annotated_rhs + ) + { + _res = d; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_12: '(' single_target ')' | single_subscript_attribute_target @@ -17895,77 +18146,79 @@ function _tmp_12_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '(' single_target ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var b; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (b = single_target_rule(p)) // single_target - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = b; - return done(); - } - p.mark = _mark; - } - { // single_subscript_attribute_target - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '(' single_target ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var b; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (b = single_target_rule(p)) // single_target + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = b; + break; + } + p.mark = _mark; } - var single_subscript_attribute_target_var; - if ( - (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target - ) - { - _res = single_subscript_attribute_target_var; - return done(); + { // single_subscript_attribute_target + if (p.error_indicator) { + return NULL; + } + var single_subscript_attribute_target_var; + if ( + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target + ) + { + _res = single_subscript_attribute_target_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_13: '=' annotated_rhs function _tmp_13_rule(p) -{ - if (p.error_indicator) { - return NULL; - } - var _res = NULL; - var _mark = p.mark; - { // '=' annotated_rhs - if (p.error_indicator) { - return NULL; - } - var _literal; - var d; - if ( - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - && - (d = annotated_rhs_rule(p)) // annotated_rhs - ) - { - _res = d; - return done(); +{ + if (p.error_indicator) { + return NULL; + } + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '=' annotated_rhs + if (p.error_indicator) { + return NULL; + } + var _literal; + var d; + if ( + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + && + (d = annotated_rhs_rule(p)) // annotated_rhs + ) + { + _res = d; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop1_14: (star_targets '=') @@ -18007,40 +18260,41 @@ function _tmp_15_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_16: yield_expr | star_expressions @@ -18049,40 +18303,41 @@ function _tmp_16_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_17: 'from' expression @@ -18091,29 +18346,30 @@ function _tmp_17_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'from' expression - if (p.error_indicator) { - return NULL; - } - var _keyword; - var z; - if ( - (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' - && - (z = expression_rule(p)) // expression - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'from' expression + if (p.error_indicator) { + return NULL; + } + var _keyword; + var z; + if ( + (_keyword = $B._PyPegen.expect_token(p, 608)) // token='from' + && + (z = expression_rule(p)) // expression + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_19: ',' NAME @@ -18155,29 +18411,30 @@ function _gather_18_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NAME _loop0_19 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = $B._PyPegen.name_token(p)) // NAME - && - (seq = _loop0_19_rule(p)) // _loop0_19 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NAME _loop0_19 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = $B._PyPegen.name_token(p)) // NAME + && + (seq = _loop0_19_rule(p)) // _loop0_19 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_21: ',' NAME @@ -18219,29 +18476,30 @@ function _gather_20_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NAME _loop0_21 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = $B._PyPegen.name_token(p)) // NAME - && - (seq = _loop0_21_rule(p)) // _loop0_21 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NAME _loop0_21 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = $B._PyPegen.name_token(p)) // NAME + && + (seq = _loop0_21_rule(p)) // _loop0_21 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_22: ';' | NEWLINE @@ -18250,40 +18508,41 @@ function _tmp_22_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ';' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 13)) // token=';' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // NEWLINE - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ';' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 13)) // token=';' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var newline_var; - if ( - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = newline_var; - return done(); + { // NEWLINE + if (p.error_indicator) { + return NULL; + } + var newline_var; + if ( + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = newline_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_23: ',' expression @@ -18292,29 +18551,30 @@ function _tmp_23_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var z; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (z = expression_rule(p)) // expression - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var z; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (z = expression_rule(p)) // expression + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_24: ('.' | '...') @@ -18419,29 +18679,30 @@ function _gather_26_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // import_from_as_name _loop0_27 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = import_from_as_name_rule(p)) // import_from_as_name - && - (seq = _loop0_27_rule(p)) // _loop0_27 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // import_from_as_name _loop0_27 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = import_from_as_name_rule(p)) // import_from_as_name + && + (seq = _loop0_27_rule(p)) // _loop0_27 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_28: 'as' NAME @@ -18450,29 +18711,30 @@ function _tmp_28_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var z; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (z = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var z; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (z = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_30: ',' dotted_as_name @@ -18514,29 +18776,30 @@ function _gather_29_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // dotted_as_name _loop0_30 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = dotted_as_name_rule(p)) // dotted_as_name - && - (seq = _loop0_30_rule(p)) // _loop0_30 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // dotted_as_name _loop0_30 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = dotted_as_name_rule(p)) // dotted_as_name + && + (seq = _loop0_30_rule(p)) // _loop0_30 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_31: 'as' NAME @@ -18545,29 +18808,30 @@ function _tmp_31_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var z; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (z = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var z; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (z = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop1_32: ('@' named_expression NEWLINE) @@ -18609,32 +18873,33 @@ function _tmp_33_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '(' arguments? ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var z; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (z = arguments_rule(p), !p.error_indicator) // arguments? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '(' arguments? ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var z; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (z = arguments_rule(p), !p.error_indicator) // arguments? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_34: '->' expression @@ -18643,29 +18908,30 @@ function _tmp_34_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '->' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var z; - if ( - (_literal = $B._PyPegen.expect_token(p, 51)) // token='->' - && - (z = expression_rule(p)) // expression - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '->' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var z; + if ( + (_literal = $B._PyPegen.expect_token(p, 51)) // token='->' + && + (z = expression_rule(p)) // expression + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_35: '->' expression @@ -18674,29 +18940,30 @@ function _tmp_35_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '->' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var z; - if ( - (_literal = $B._PyPegen.expect_token(p, 51)) // token='->' - && - (z = expression_rule(p)) // expression - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '->' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var z; + if ( + (_literal = $B._PyPegen.expect_token(p, 51)) // token='->' + && + (z = expression_rule(p)) // expression + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - _res = NULL; - function done(){ - return _res; + _res = NULL; + break; } + return _res; } // _loop0_36: param_no_default @@ -19209,29 +19476,30 @@ function _gather_51_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // with_item _loop0_52 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = with_item_rule(p)) // with_item - && - (seq = _loop0_52_rule(p)) // _loop0_52 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // with_item _loop0_52 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = with_item_rule(p)) // with_item + && + (seq = _loop0_52_rule(p)) // _loop0_52 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_54: ',' with_item @@ -19273,29 +19541,30 @@ function _gather_53_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // with_item _loop0_54 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = with_item_rule(p)) // with_item - && - (seq = _loop0_54_rule(p)) // _loop0_54 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // with_item _loop0_54 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = with_item_rule(p)) // with_item + && + (seq = _loop0_54_rule(p)) // _loop0_54 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_56: ',' with_item @@ -19337,29 +19606,30 @@ function _gather_55_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // with_item _loop0_56 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = with_item_rule(p)) // with_item - && - (seq = _loop0_56_rule(p)) // _loop0_56 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // with_item _loop0_56 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = with_item_rule(p)) // with_item + && + (seq = _loop0_56_rule(p)) // _loop0_56 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_58: ',' with_item @@ -19401,29 +19671,30 @@ function _gather_57_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // with_item _loop0_58 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = with_item_rule(p)) // with_item - && - (seq = _loop0_58_rule(p)) // _loop0_58 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // with_item _loop0_58 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = with_item_rule(p)) // with_item + && + (seq = _loop0_58_rule(p)) // _loop0_58 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_59: ',' | ')' | ':' @@ -19432,54 +19703,55 @@ function _tmp_59_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = _literal; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ':' - if (p.error_indicator) { - return NULL; + { // ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop1_60: except_block @@ -19554,29 +19826,30 @@ function _tmp_62_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var z; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (z = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var z; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (z = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_63: 'as' NAME @@ -19585,29 +19858,30 @@ function _tmp_63_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var z; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (z = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var z; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (z = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop1_64: case_block @@ -19682,29 +19956,30 @@ function _gather_65_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // closed_pattern _loop0_66 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = closed_pattern_rule(p)) // closed_pattern - && - (seq = _loop0_66_rule(p)) // _loop0_66 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // closed_pattern _loop0_66 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = closed_pattern_rule(p)) // closed_pattern + && + (seq = _loop0_66_rule(p)) // _loop0_66 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_67: '+' | '-' @@ -19713,40 +19988,41 @@ function _tmp_67_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '+' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '-' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '+' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' - ) - { - _res = _literal; - return done(); + { // '-' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_68: '+' | '-' @@ -19755,40 +20031,41 @@ function _tmp_68_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '+' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '-' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '+' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 14)) // token='+' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' - ) - { - _res = _literal; - return done(); + { // '-' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 15)) // token='-' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_69: '.' | '(' | '=' @@ -19797,54 +20074,55 @@ function _tmp_69_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '.' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '(' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - ) - { - _res = _literal; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '.' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '=' - if (p.error_indicator) { - return NULL; + { // '(' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = _literal; - return done(); + { // '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_70: '.' | '(' | '=' @@ -19853,54 +20131,55 @@ function _tmp_70_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '.' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '(' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '.' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = _literal; - return done(); + { // '(' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + { // '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = _literal; + break; + } + p.mark = _mark; + } + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_72: ',' maybe_star_pattern @@ -19942,29 +20221,30 @@ function _gather_71_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // maybe_star_pattern _loop0_72 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = maybe_star_pattern_rule(p)) // maybe_star_pattern - && - (seq = _loop0_72_rule(p)) // _loop0_72 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // maybe_star_pattern _loop0_72 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = maybe_star_pattern_rule(p)) // maybe_star_pattern + && + (seq = _loop0_72_rule(p)) // _loop0_72 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_74: ',' key_value_pattern @@ -20006,29 +20286,30 @@ function _gather_73_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // key_value_pattern _loop0_74 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = key_value_pattern_rule(p)) // key_value_pattern - && - (seq = _loop0_74_rule(p)) // _loop0_74 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // key_value_pattern _loop0_74 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = key_value_pattern_rule(p)) // key_value_pattern + && + (seq = _loop0_74_rule(p)) // _loop0_74 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_75: literal_expr | attr @@ -20037,40 +20318,41 @@ function _tmp_75_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // literal_expr - if (p.error_indicator) { - return NULL; - } - var literal_expr_var; - if ( - (literal_expr_var = literal_expr_rule(p)) // literal_expr - ) - { - _res = literal_expr_var; - return done(); - } - p.mark = _mark; - } - { // attr - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // literal_expr + if (p.error_indicator) { + return NULL; + } + var literal_expr_var; + if ( + (literal_expr_var = literal_expr_rule(p)) // literal_expr + ) + { + _res = literal_expr_var; + break; + } + p.mark = _mark; } - var attr_var; - if ( - (attr_var = attr_rule(p)) // attr - ) - { - _res = attr_var; - return done(); + { // attr + if (p.error_indicator) { + return NULL; + } + var attr_var; + if ( + (attr_var = attr_rule(p)) // attr + ) + { + _res = attr_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_77: ',' pattern @@ -20112,29 +20394,30 @@ function _gather_76_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // pattern _loop0_77 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = pattern_rule(p)) // pattern - && - (seq = _loop0_77_rule(p)) // _loop0_77 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // pattern _loop0_77 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = pattern_rule(p)) // pattern + && + (seq = _loop0_77_rule(p)) // _loop0_77 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_79: ',' keyword_pattern @@ -20176,29 +20459,30 @@ function _gather_78_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // keyword_pattern _loop0_79 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = keyword_pattern_rule(p)) // keyword_pattern - && - (seq = _loop0_79_rule(p)) // _loop0_79 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // keyword_pattern _loop0_79 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = keyword_pattern_rule(p)) // keyword_pattern + && + (seq = _loop0_79_rule(p)) // _loop0_79 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_81: ',' type_param @@ -20240,29 +20524,30 @@ function _gather_80_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // type_param _loop0_81 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = type_param_rule(p)) // type_param - && - (seq = _loop0_81_rule(p)) // _loop0_81 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // type_param _loop0_81 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = type_param_rule(p)) // type_param + && + (seq = _loop0_81_rule(p)) // _loop0_81 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop1_82: (',' expression) @@ -20370,29 +20655,30 @@ function _gather_84_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // star_named_expression _loop0_85 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = star_named_expression_rule(p)) // star_named_expression - && - (seq = _loop0_85_rule(p)) // _loop0_85 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // star_named_expression _loop0_85 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = star_named_expression_rule(p)) // star_named_expression + && + (seq = _loop0_85_rule(p)) // _loop0_85 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop1_86: ('or' conjunction) @@ -20500,26 +20786,27 @@ function _tmp_89_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '!=' - if (p.error_indicator) { - return NULL; - } - var tok; - if ( - (tok = $B._PyPegen.expect_token(p, 28)) // token='!=' - ) - { - _res = $B._PyPegen.check_barry_as_flufl (p, tok) ? $B.parser_constants.NULL : tok; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '!=' + if (p.error_indicator) { + return NULL; + } + var tok; + if ( + (tok = $B._PyPegen.expect_token(p, 28)) // token='!=' + ) + { + _res = $B._PyPegen.check_barry_as_flufl (p, tok) ? $B.parser_constants.NULL : tok; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_91: ',' (slice | starred_expression) @@ -20561,29 +20848,30 @@ function _gather_90_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (slice | starred_expression) _loop0_91 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = _tmp_256_rule(p)) // slice | starred_expression - && - (seq = _loop0_91_rule(p)) // _loop0_91 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (slice | starred_expression) _loop0_91 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = _tmp_256_rule(p)) // slice | starred_expression + && + (seq = _loop0_91_rule(p)) // _loop0_91 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_92: ':' expression? @@ -20592,29 +20880,30 @@ function _tmp_92_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ':' expression? - if (p.error_indicator) { - return NULL; - } - var _literal; - var d; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - && - (d = expression_rule(p), !p.error_indicator) // expression? - ) - { - _res = d; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ':' expression? + if (p.error_indicator) { + return NULL; + } + var _literal; + var d; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + && + (d = expression_rule(p), !p.error_indicator) // expression? + ) + { + _res = d; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_93: STRING | FSTRING_START @@ -20623,40 +20912,41 @@ function _tmp_93_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // STRING - if (p.error_indicator) { - return NULL; - } - var string_var; - if ( - (string_var = $B._PyPegen.string_token(p)) // STRING - ) - { - _res = string_var; - return done(); - } - p.mark = _mark; - } - { // FSTRING_START - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // STRING + if (p.error_indicator) { + return NULL; + } + var string_var; + if ( + (string_var = $B._PyPegen.string_token(p)) // STRING + ) + { + _res = string_var; + break; + } + p.mark = _mark; } - var fstring_start_var; - if ( - (fstring_start_var = $B._PyPegen.expect_token(p, FSTRING_START)) // token='FSTRING_START' - ) - { - _res = fstring_start_var; - return done(); + { // FSTRING_START + if (p.error_indicator) { + return NULL; + } + var fstring_start_var; + if ( + (fstring_start_var = $B._PyPegen.expect_token(p, FSTRING_START)) // token='FSTRING_START' + ) + { + _res = fstring_start_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_94: tuple | group | genexp @@ -20665,54 +20955,55 @@ function _tmp_94_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // tuple - if (p.error_indicator) { - return NULL; - } - var tuple_var; - if ( - (tuple_var = tuple_rule(p)) // tuple - ) - { - _res = tuple_var; - return done(); - } - p.mark = _mark; - } - { // group - if (p.error_indicator) { - return NULL; - } - var group_var; - if ( - (group_var = group_rule(p)) // group - ) - { - _res = group_var; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // tuple + if (p.error_indicator) { + return NULL; + } + var tuple_var; + if ( + (tuple_var = tuple_rule(p)) // tuple + ) + { + _res = tuple_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // genexp - if (p.error_indicator) { - return NULL; + { // group + if (p.error_indicator) { + return NULL; + } + var group_var; + if ( + (group_var = group_rule(p)) // group + ) + { + _res = group_var; + break; + } + p.mark = _mark; } - var genexp_var; - if ( - (genexp_var = genexp_rule(p)) // genexp - ) - { - _res = genexp_var; - return done(); + { // genexp + if (p.error_indicator) { + return NULL; + } + var genexp_var; + if ( + (genexp_var = genexp_rule(p)) // genexp + ) + { + _res = genexp_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_95: list | listcomp @@ -20721,40 +21012,41 @@ function _tmp_95_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // list - if (p.error_indicator) { - return NULL; - } - var list_var; - if ( - (list_var = list_rule(p)) // list - ) - { - _res = list_var; - return done(); - } - p.mark = _mark; - } - { // listcomp - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // list + if (p.error_indicator) { + return NULL; + } + var list_var; + if ( + (list_var = list_rule(p)) // list + ) + { + _res = list_var; + break; + } + p.mark = _mark; } - var listcomp_var; - if ( - (listcomp_var = listcomp_rule(p)) // listcomp - ) - { - _res = listcomp_var; - return done(); + { // listcomp + if (p.error_indicator) { + return NULL; + } + var listcomp_var; + if ( + (listcomp_var = listcomp_rule(p)) // listcomp + ) + { + _res = listcomp_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_96: dict | set | dictcomp | setcomp @@ -20763,68 +21055,69 @@ function _tmp_96_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // dict - if (p.error_indicator) { - return NULL; - } - var dict_var; - if ( - (dict_var = dict_rule(p)) // dict - ) - { - _res = dict_var; - return done(); - } - p.mark = _mark; - } - { // set - if (p.error_indicator) { - return NULL; - } - var set_var; - if ( - (set_var = set_rule(p)) // set - ) - { - _res = set_var; - return done(); - } - p.mark = _mark; - } - { // dictcomp - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // dict + if (p.error_indicator) { + return NULL; + } + var dict_var; + if ( + (dict_var = dict_rule(p)) // dict + ) + { + _res = dict_var; + break; + } + p.mark = _mark; } - var dictcomp_var; - if ( - (dictcomp_var = dictcomp_rule(p)) // dictcomp - ) - { - _res = dictcomp_var; - return done(); + { // set + if (p.error_indicator) { + return NULL; + } + var set_var; + if ( + (set_var = set_rule(p)) // set + ) + { + _res = set_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // setcomp - if (p.error_indicator) { - return NULL; + { // dictcomp + if (p.error_indicator) { + return NULL; + } + var dictcomp_var; + if ( + (dictcomp_var = dictcomp_rule(p)) // dictcomp + ) + { + _res = dictcomp_var; + break; + } + p.mark = _mark; } - var setcomp_var; - if ( - (setcomp_var = setcomp_rule(p)) // setcomp - ) - { - _res = setcomp_var; - return done(); + { // setcomp + if (p.error_indicator) { + return NULL; + } + var setcomp_var; + if ( + (setcomp_var = setcomp_rule(p)) // setcomp + ) + { + _res = setcomp_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_97: yield_expr | named_expression @@ -20833,40 +21126,41 @@ function _tmp_97_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // named_expression - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var named_expression_var; - if ( - (named_expression_var = named_expression_rule(p)) // named_expression - ) - { - _res = named_expression_var; - return done(); + { // named_expression + if (p.error_indicator) { + return NULL; + } + var named_expression_var; + if ( + (named_expression_var = named_expression_rule(p)) // named_expression + ) + { + _res = named_expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_98: lambda_param_no_default @@ -21316,40 +21610,41 @@ function _tmp_112_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_113: fstring_format_spec @@ -21451,32 +21746,33 @@ function _tmp_116_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // star_named_expression ',' star_named_expressions? - if (p.error_indicator) { - return NULL; - } - var _literal; - var y; - var z; - if ( - (y = star_named_expression_rule(p)) // star_named_expression - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (z = star_named_expressions_rule(p), !p.error_indicator) // star_named_expressions? - ) - { - _res = $B._PyPegen.seq_insert_in_front (p, y, z); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // star_named_expression ',' star_named_expressions? + if (p.error_indicator) { + return NULL; + } + var _literal; + var y; + var z; + if ( + (y = star_named_expression_rule(p)) // star_named_expression + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (z = star_named_expressions_rule(p), !p.error_indicator) // star_named_expressions? + ) + { + _res = $B._PyPegen.seq_insert_in_front (p, y, z); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_118: ',' double_starred_kvpair @@ -21518,29 +21814,30 @@ function _gather_117_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // double_starred_kvpair _loop0_118 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair - && - (seq = _loop0_118_rule(p)) // _loop0_118 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // double_starred_kvpair _loop0_118 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair + && + (seq = _loop0_118_rule(p)) // _loop0_118 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop1_119: for_if_clause @@ -21642,42 +21939,43 @@ function _tmp_122_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // assignment_expression - if (p.error_indicator) { - return NULL; - } - var assignment_expression_var; - if ( - (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression - ) - { - _res = assignment_expression_var; - return done(); - } - p.mark = _mark; - } - { // expression !':=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // assignment_expression + if (p.error_indicator) { + return NULL; + } + var assignment_expression_var; + if ( + (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression + ) + { + _res = assignment_expression_var; + break; + } + p.mark = _mark; } - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 53) // token=':=' - ) - { - _res = expression_var; - return done(); + { // expression !':=' + if (p.error_indicator) { + return NULL; + } + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 53) // token=':=' + ) + { + _res = expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_124: ',' (starred_expression | (assignment_expression | expression !':=') !'=') @@ -21720,29 +22018,30 @@ function _gather_123_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_124 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = _tmp_260_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' - && - (seq = _loop0_124_rule(p)) // _loop0_124 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_124 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = _tmp_260_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + && + (seq = _loop0_124_rule(p)) // _loop0_124 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_125: ',' kwargs @@ -21751,29 +22050,30 @@ function _tmp_125_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' kwargs - if (p.error_indicator) { - return NULL; - } - var _literal; - var k; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (k = kwargs_rule(p)) // kwargs - ) - { - _res = k; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' kwargs + if (p.error_indicator) { + return NULL; + } + var _literal; + var k; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (k = kwargs_rule(p)) // kwargs + ) + { + _res = k; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_127: ',' kwarg_or_starred @@ -21815,29 +22115,30 @@ function _gather_126_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // kwarg_or_starred _loop0_127 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred - && - (seq = _loop0_127_rule(p)) // _loop0_127 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // kwarg_or_starred _loop0_127 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + && + (seq = _loop0_127_rule(p)) // _loop0_127 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_129: ',' kwarg_or_double_starred @@ -21879,29 +22180,30 @@ function _gather_128_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // kwarg_or_double_starred _loop0_129 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred - && - (seq = _loop0_129_rule(p)) // _loop0_129 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // kwarg_or_double_starred _loop0_129 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + && + (seq = _loop0_129_rule(p)) // _loop0_129 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_131: ',' kwarg_or_starred @@ -21943,29 +22245,30 @@ function _gather_130_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // kwarg_or_starred _loop0_131 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred - && - (seq = _loop0_131_rule(p)) // _loop0_131 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // kwarg_or_starred _loop0_131 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + && + (seq = _loop0_131_rule(p)) // _loop0_131 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_133: ',' kwarg_or_double_starred @@ -22007,29 +22310,30 @@ function _gather_132_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // kwarg_or_double_starred _loop0_133 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred - && - (seq = _loop0_133_rule(p)) // _loop0_133 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // kwarg_or_double_starred _loop0_133 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + && + (seq = _loop0_133_rule(p)) // _loop0_133 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_134: (',' star_target) @@ -22096,34 +22400,35 @@ function _loop0_136_rule(p) } // _gather_135: star_target _loop0_136 -function _gather_135_rule(p) -{ - if (p.error_indicator) { - return NULL; - } - var _res = NULL; - var _mark = p.mark; - { // star_target _loop0_136 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = star_target_rule(p)) // star_target - && - (seq = _loop0_136_rule(p)) // _loop0_136 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); +function _gather_135_rule(p) +{ + if (p.error_indicator) { + return NULL; + } + while (1) { + var _res = NULL; + var _mark = p.mark; + { // star_target _loop0_136 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = star_target_rule(p)) // star_target + && + (seq = _loop0_136_rule(p)) // _loop0_136 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop1_137: (',' star_target) @@ -22165,28 +22470,29 @@ function _tmp_138_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // !'*' star_target - if (p.error_indicator) { - return NULL; - } - var star_target_var; - if ( - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 16) // token='*' - && - (star_target_var = star_target_rule(p)) // star_target - ) - { - _res = star_target_var; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // !'*' star_target + if (p.error_indicator) { + return NULL; + } + var star_target_var; + if ( + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 16) // token='*' + && + (star_target_var = star_target_rule(p)) // star_target + ) + { + _res = star_target_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_140: ',' del_target @@ -22228,29 +22534,30 @@ function _gather_139_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // del_target _loop0_140 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = del_target_rule(p)) // del_target - && - (seq = _loop0_140_rule(p)) // _loop0_140 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // del_target _loop0_140 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = del_target_rule(p)) // del_target + && + (seq = _loop0_140_rule(p)) // _loop0_140 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_142: ',' expression @@ -22292,29 +22599,30 @@ function _gather_141_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression _loop0_142 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = expression_rule(p)) // expression - && - (seq = _loop0_142_rule(p)) // _loop0_142 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression _loop0_142 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = expression_rule(p)) // expression + && + (seq = _loop0_142_rule(p)) // _loop0_142 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_144: ',' expression @@ -22356,29 +22664,30 @@ function _gather_143_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression _loop0_144 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = expression_rule(p)) // expression - && - (seq = _loop0_144_rule(p)) // _loop0_144 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression _loop0_144 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = expression_rule(p)) // expression + && + (seq = _loop0_144_rule(p)) // _loop0_144 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_146: ',' expression @@ -22420,29 +22729,30 @@ function _gather_145_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression _loop0_146 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = expression_rule(p)) // expression - && - (seq = _loop0_146_rule(p)) // _loop0_146 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression _loop0_146 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = expression_rule(p)) // expression + && + (seq = _loop0_146_rule(p)) // _loop0_146 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_148: ',' expression @@ -22484,29 +22794,30 @@ function _gather_147_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression _loop0_148 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = expression_rule(p)) // expression - && - (seq = _loop0_148_rule(p)) // _loop0_148 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression _loop0_148 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = expression_rule(p)) // expression + && + (seq = _loop0_148_rule(p)) // _loop0_148 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_149: NEWLINE INDENT @@ -22515,29 +22826,30 @@ function _tmp_149_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NEWLINE INDENT - if (p.error_indicator) { - return NULL; - } - var indent_var; - var newline_var; - if ( - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - && - (indent_var = $B._PyPegen.expect_token(p, INDENT)) // token='INDENT' - ) - { - _res = $B._PyPegen.dummy_name(p, newline_var, indent_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NEWLINE INDENT + if (p.error_indicator) { + return NULL; + } + var indent_var; + var newline_var; + if ( + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + && + (indent_var = $B._PyPegen.expect_token(p, INDENT)) // token='INDENT' + ) + { + _res = $B._PyPegen.dummy_name(p, newline_var, indent_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_150: @@ -22548,40 +22860,41 @@ function _tmp_150_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) - if (p.error_indicator) { - return NULL; - } - var _tmp_263_var; - if ( - (_tmp_263_var = _tmp_263_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs - ) - { - _res = _tmp_263_var; - return done(); - } - p.mark = _mark; - } - { // kwargs - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) + if (p.error_indicator) { + return NULL; + } + var _tmp_263_var; + if ( + (_tmp_263_var = _tmp_263_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs + ) + { + _res = _tmp_263_var; + break; + } + p.mark = _mark; } - var kwargs_var; - if ( - (kwargs_var = kwargs_rule(p)) // kwargs - ) - { - _res = kwargs_var; - return done(); + { // kwargs + if (p.error_indicator) { + return NULL; + } + var kwargs_var; + if ( + (kwargs_var = kwargs_rule(p)) // kwargs + ) + { + _res = kwargs_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_151: args | expression for_if_clauses @@ -22590,43 +22903,44 @@ function _tmp_151_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // args - if (p.error_indicator) { - return NULL; - } - var args_var; - if ( - (args_var = args_rule(p)) // args - ) - { - _res = args_var; - return done(); - } - p.mark = _mark; - } - { // expression for_if_clauses - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // args + if (p.error_indicator) { + return NULL; + } + var args_var; + if ( + (args_var = args_rule(p)) // args + ) + { + _res = args_var; + break; + } + p.mark = _mark; } - var expression_var; - var for_if_clauses_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - _res = $B._PyPegen.dummy_name(p, expression_var, for_if_clauses_var); - return done(); + { // expression for_if_clauses + if (p.error_indicator) { + return NULL; + } + var expression_var; + var for_if_clauses_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + _res = $B._PyPegen.dummy_name(p, expression_var, for_if_clauses_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_152: args ',' @@ -22635,29 +22949,30 @@ function _tmp_152_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // args ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var args_var; - if ( - (args_var = args_rule(p)) // args - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = $B._PyPegen.dummy_name(p, args_var, _literal); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // args ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var args_var; + if ( + (args_var = args_rule(p)) // args + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = $B._PyPegen.dummy_name(p, args_var, _literal); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_153: ',' | ')' @@ -22666,40 +22981,41 @@ function _tmp_153_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ')' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = _literal; - return done(); + { // ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_154: 'True' | 'False' | 'None' @@ -22708,85 +23024,87 @@ function _tmp_154_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'True' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' - ) - { - _res = _keyword; - return done(); - } - p.mark = _mark; - } - { // 'False' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' - ) - { - _res = _keyword; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'True' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'None' - if (p.error_indicator) { - return NULL; + { // 'False' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' - ) - { - _res = _keyword; - return done(); + { // 'None' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_155: NAME '=' function _tmp_155_rule(p) -{ - if (p.error_indicator) { - return NULL; - } - var _res = NULL; - var _mark = p.mark; - { // NAME '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - var name_var; - if ( - (name_var = $B._PyPegen.name_token(p)) // NAME - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B._PyPegen.dummy_name(p, name_var, _literal); - return done(); +{ + if (p.error_indicator) { + return NULL; + } + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NAME '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var name_var; + if ( + (name_var = $B._PyPegen.name_token(p)) // NAME + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B._PyPegen.dummy_name(p, name_var, _literal); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_156: NAME STRING | SOFT_KEYWORD @@ -22795,43 +23113,44 @@ function _tmp_156_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NAME STRING - if (p.error_indicator) { - return NULL; - } - var name_var; - var string_var; - if ( - (name_var = $B._PyPegen.name_token(p)) // NAME - && - (string_var = $B._PyPegen.string_token(p)) // STRING - ) - { - _res = $B._PyPegen.dummy_name(p, name_var, string_var); - return done(); - } - p.mark = _mark; - } - { // SOFT_KEYWORD - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NAME STRING + if (p.error_indicator) { + return NULL; + } + var name_var; + var string_var; + if ( + (name_var = $B._PyPegen.name_token(p)) // NAME + && + (string_var = $B._PyPegen.string_token(p)) // STRING + ) + { + _res = $B._PyPegen.dummy_name(p, name_var, string_var); + break; + } + p.mark = _mark; } - var soft_keyword_var; - if ( - (soft_keyword_var = $B._PyPegen.soft_keyword_token(p)) // SOFT_KEYWORD - ) - { - _res = soft_keyword_var; - return done(); + { // SOFT_KEYWORD + if (p.error_indicator) { + return NULL; + } + var soft_keyword_var; + if ( + (soft_keyword_var = $B._PyPegen.soft_keyword_token(p)) // SOFT_KEYWORD + ) + { + _res = soft_keyword_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_157: 'else' | ':' @@ -22840,40 +23159,41 @@ function _tmp_157_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'else' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 645)) // token='else' - ) - { - _res = _keyword; - return done(); - } - p.mark = _mark; - } - { // ':' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'else' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 645)) // token='else' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_158: '=' | ':=' @@ -22882,40 +23202,41 @@ function _tmp_158_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ':=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 53)) // token=':=' - ) - { - _res = _literal; - return done(); + { // ':=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 53)) // token=':=' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_159: list | tuple | genexp | 'True' | 'None' | 'False' @@ -22924,96 +23245,97 @@ function _tmp_159_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // list - if (p.error_indicator) { - return NULL; - } - var list_var; - if ( - (list_var = list_rule(p)) // list - ) - { - _res = list_var; - return done(); - } - p.mark = _mark; - } - { // tuple - if (p.error_indicator) { - return NULL; - } - var tuple_var; - if ( - (tuple_var = tuple_rule(p)) // tuple - ) - { - _res = tuple_var; - return done(); - } - p.mark = _mark; - } - { // genexp - if (p.error_indicator) { - return NULL; - } - var genexp_var; - if ( - (genexp_var = genexp_rule(p)) // genexp - ) - { - _res = genexp_var; - return done(); - } - p.mark = _mark; - } - { // 'True' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // list + if (p.error_indicator) { + return NULL; + } + var list_var; + if ( + (list_var = list_rule(p)) // list + ) + { + _res = list_var; + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' - ) - { - _res = _keyword; - return done(); + { // tuple + if (p.error_indicator) { + return NULL; + } + var tuple_var; + if ( + (tuple_var = tuple_rule(p)) // tuple + ) + { + _res = tuple_var; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'None' - if (p.error_indicator) { - return NULL; + { // genexp + if (p.error_indicator) { + return NULL; + } + var genexp_var; + if ( + (genexp_var = genexp_rule(p)) // genexp + ) + { + _res = genexp_var; + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' - ) - { - _res = _keyword; - return done(); + { // 'True' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 601)) // token='True' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // 'False' - if (p.error_indicator) { - return NULL; + { // 'None' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 602)) // token='None' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' - ) - { - _res = _keyword; - return done(); + { // 'False' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 603)) // token='False' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_160: '=' | ':=' @@ -23022,40 +23344,41 @@ function _tmp_160_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ':=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 53)) // token=':=' - ) - { - _res = _literal; - return done(); + { // ':=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 53)) // token=':=' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_161: star_named_expressions @@ -23154,40 +23477,41 @@ function _tmp_164_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_165: '[' | '(' | '{' @@ -23196,54 +23520,55 @@ function _tmp_165_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '[' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '(' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - ) - { - _res = _literal; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '[' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '{' - if (p.error_indicator) { - return NULL; + { // '(' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - ) - { - _res = _literal; - return done(); + { // '{' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_166: '[' | '{' @@ -23252,40 +23577,41 @@ function _tmp_166_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '[' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '{' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '[' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - ) - { - _res = _literal; - return done(); + { // '{' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_167: '[' | '{' @@ -23294,40 +23620,41 @@ function _tmp_167_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '[' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '{' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '[' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 9)) // token='[' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' - ) - { - _res = _literal; - return done(); + { // '{' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 25)) // token='{' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_168: slash_no_default | slash_with_default @@ -23336,40 +23663,41 @@ function _tmp_168_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // slash_no_default - if (p.error_indicator) { - return NULL; - } - var slash_no_default_var; - if ( - (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default - ) - { - _res = slash_no_default_var; - return done(); - } - p.mark = _mark; - } - { // slash_with_default - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // slash_no_default + if (p.error_indicator) { + return NULL; + } + var slash_no_default_var; + if ( + (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default + ) + { + _res = slash_no_default_var; + break; + } + p.mark = _mark; } - var slash_with_default_var; - if ( - (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default - ) - { - _res = slash_with_default_var; - return done(); + { // slash_with_default + if (p.error_indicator) { + return NULL; + } + var slash_with_default_var; + if ( + (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default + ) + { + _res = slash_with_default_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_169: param_maybe_default @@ -23501,40 +23829,41 @@ function _tmp_173_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // slash_no_default - if (p.error_indicator) { - return NULL; - } - var slash_no_default_var; - if ( - (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default - ) - { - _res = slash_no_default_var; - return done(); - } - p.mark = _mark; - } - { // slash_with_default - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // slash_no_default + if (p.error_indicator) { + return NULL; + } + var slash_no_default_var; + if ( + (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default + ) + { + _res = slash_no_default_var; + break; + } + p.mark = _mark; } - var slash_with_default_var; - if ( - (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default - ) - { - _res = slash_with_default_var; - return done(); + { // slash_with_default + if (p.error_indicator) { + return NULL; + } + var slash_with_default_var; + if ( + (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default + ) + { + _res = slash_with_default_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_174: param_maybe_default @@ -23573,40 +23902,41 @@ function _tmp_175_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // param_no_default - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var param_no_default_var; - if ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - return done(); + { // param_no_default + if (p.error_indicator) { + return NULL; + } + var param_no_default_var; + if ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_176: param_maybe_default @@ -23678,40 +24008,41 @@ function _tmp_178_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ',' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_179: ')' | ',' (')' | '**') @@ -23720,43 +24051,44 @@ function _tmp_179_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ',' (')' | '**') - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - var _tmp_266_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (_tmp_266_var = _tmp_266_rule(p)) // ')' | '**' - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, _tmp_266_var); - return done(); + { // ',' (')' | '**') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _tmp_266_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (_tmp_266_var = _tmp_266_rule(p)) // ')' | '**' + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, _tmp_266_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_180: param_no_default | ',' @@ -23765,40 +24097,41 @@ function _tmp_180_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // param_no_default - if (p.error_indicator) { - return NULL; - } - var param_no_default_var; - if ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - return done(); - } - p.mark = _mark; - } - { // ',' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // param_no_default + if (p.error_indicator) { + return NULL; + } + var param_no_default_var; + if ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_181: param_maybe_default @@ -23837,40 +24170,41 @@ function _tmp_182_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // param_no_default - if (p.error_indicator) { - return NULL; - } - var param_no_default_var; - if ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - return done(); - } - p.mark = _mark; - } - { // ',' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // param_no_default + if (p.error_indicator) { + return NULL; + } + var param_no_default_var; + if ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_183: '*' | '**' | '/' @@ -23879,54 +24213,55 @@ function _tmp_183_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '*' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '**' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - ) - { - _res = _literal; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '*' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '/' - if (p.error_indicator) { - return NULL; + { // '**' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - ) - { - _res = _literal; - return done(); + { // '/' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop1_184: param_with_default @@ -23968,40 +24303,41 @@ function _tmp_185_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_slash_no_default - if (p.error_indicator) { - return NULL; - } - var lambda_slash_no_default_var; - if ( - (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default - ) - { - _res = lambda_slash_no_default_var; - return done(); - } - p.mark = _mark; - } - { // lambda_slash_with_default - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_slash_no_default + if (p.error_indicator) { + return NULL; + } + var lambda_slash_no_default_var; + if ( + (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default + ) + { + _res = lambda_slash_no_default_var; + break; + } + p.mark = _mark; } - var lambda_slash_with_default_var; - if ( - (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default - ) - { - _res = lambda_slash_with_default_var; - return done(); + { // lambda_slash_with_default + if (p.error_indicator) { + return NULL; + } + var lambda_slash_with_default_var; + if ( + (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default + ) + { + _res = lambda_slash_with_default_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_186: lambda_param_maybe_default @@ -24133,29 +24469,30 @@ function _gather_189_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_param _loop0_190 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = lambda_param_rule(p)) // lambda_param - && - (seq = _loop0_190_rule(p)) // _loop0_190 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_param _loop0_190 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = lambda_param_rule(p)) // lambda_param + && + (seq = _loop0_190_rule(p)) // _loop0_190 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_191: lambda_slash_no_default | lambda_slash_with_default @@ -24164,40 +24501,41 @@ function _tmp_191_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_slash_no_default - if (p.error_indicator) { - return NULL; - } - var lambda_slash_no_default_var; - if ( - (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default - ) - { - _res = lambda_slash_no_default_var; - return done(); - } - p.mark = _mark; - } - { // lambda_slash_with_default - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_slash_no_default + if (p.error_indicator) { + return NULL; + } + var lambda_slash_no_default_var; + if ( + (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default + ) + { + _res = lambda_slash_no_default_var; + break; + } + p.mark = _mark; } - var lambda_slash_with_default_var; - if ( - (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default - ) - { - _res = lambda_slash_with_default_var; - return done(); + { // lambda_slash_with_default + if (p.error_indicator) { + return NULL; + } + var lambda_slash_with_default_var; + if ( + (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default + ) + { + _res = lambda_slash_with_default_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_192: lambda_param_maybe_default @@ -24236,40 +24574,41 @@ function _tmp_193_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // lambda_param_no_default - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var lambda_param_no_default_var; - if ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - return done(); + { // lambda_param_no_default + if (p.error_indicator) { + return NULL; + } + var lambda_param_no_default_var; + if ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_194: lambda_param_maybe_default @@ -24374,43 +24713,44 @@ function _tmp_197_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ':' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ',' (':' | '**') - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - var _tmp_267_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (_tmp_267_var = _tmp_267_rule(p)) // ':' | '**' - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, _tmp_267_var); - return done(); + { // ',' (':' | '**') + if (p.error_indicator) { + return NULL; + } + var _literal; + var _tmp_267_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (_tmp_267_var = _tmp_267_rule(p)) // ':' | '**' + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, _tmp_267_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_198: lambda_param_no_default | ',' @@ -24419,40 +24759,41 @@ function _tmp_198_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_param_no_default - if (p.error_indicator) { - return NULL; - } - var lambda_param_no_default_var; - if ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - return done(); - } - p.mark = _mark; - } - { // ',' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_param_no_default + if (p.error_indicator) { + return NULL; + } + var lambda_param_no_default_var; + if ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_199: lambda_param_maybe_default @@ -24491,40 +24832,41 @@ function _tmp_200_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // lambda_param_no_default - if (p.error_indicator) { - return NULL; - } - var lambda_param_no_default_var; - if ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - return done(); - } - p.mark = _mark; - } - { // ',' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // lambda_param_no_default + if (p.error_indicator) { + return NULL; + } + var lambda_param_no_default_var; + if ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_201: '*' | '**' | '/' @@ -24533,54 +24875,55 @@ function _tmp_201_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '*' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '**' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - ) - { - _res = _literal; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '*' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 16)) // token='*' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '/' - if (p.error_indicator) { - return NULL; + { // '**' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' - ) - { - _res = _literal; - return done(); + { // '/' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 17)) // token='/' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_202: ',' | ')' | ':' @@ -24589,54 +24932,55 @@ function _tmp_202_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = _literal; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // ':' - if (p.error_indicator) { - return NULL; + { // ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_204: ',' dotted_name @@ -24671,36 +25015,37 @@ function _loop0_204_rule(p) } return _children; } - -// _gather_203: dotted_name _loop0_204 -function _gather_203_rule(p) -{ - if (p.error_indicator) { - return NULL; - } - var _res = NULL; - var _mark = p.mark; - { // dotted_name _loop0_204 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = dotted_name_rule(p)) // dotted_name - && - (seq = _loop0_204_rule(p)) // _loop0_204 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + +// _gather_203: dotted_name _loop0_204 +function _gather_203_rule(p) +{ + if (p.error_indicator) { + return NULL; + } + while (1) { + var _res = NULL; + var _mark = p.mark; + { // dotted_name _loop0_204 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = dotted_name_rule(p)) // dotted_name + && + (seq = _loop0_204_rule(p)) // _loop0_204 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_206: ',' (expression ['as' star_target]) @@ -24742,29 +25087,30 @@ function _gather_205_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (expression ['as' star_target]) _loop0_206 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = _tmp_268_rule(p)) // expression ['as' star_target] - && - (seq = _loop0_206_rule(p)) // _loop0_206 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (expression ['as' star_target]) _loop0_206 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = _tmp_268_rule(p)) // expression ['as' star_target] + && + (seq = _loop0_206_rule(p)) // _loop0_206 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_208: ',' (expressions ['as' star_target]) @@ -24806,29 +25152,30 @@ function _gather_207_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (expressions ['as' star_target]) _loop0_208 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = _tmp_269_rule(p)) // expressions ['as' star_target] - && - (seq = _loop0_208_rule(p)) // _loop0_208 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (expressions ['as' star_target]) _loop0_208 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = _tmp_269_rule(p)) // expressions ['as' star_target] + && + (seq = _loop0_208_rule(p)) // _loop0_208 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_210: ',' (expression ['as' star_target]) @@ -24870,29 +25217,30 @@ function _gather_209_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (expression ['as' star_target]) _loop0_210 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = _tmp_270_rule(p)) // expression ['as' star_target] - && - (seq = _loop0_210_rule(p)) // _loop0_210 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (expression ['as' star_target]) _loop0_210 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = _tmp_270_rule(p)) // expression ['as' star_target] + && + (seq = _loop0_210_rule(p)) // _loop0_210 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_212: ',' (expressions ['as' star_target]) @@ -24934,29 +25282,30 @@ function _gather_211_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (expressions ['as' star_target]) _loop0_212 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = _tmp_271_rule(p)) // expressions ['as' star_target] - && - (seq = _loop0_212_rule(p)) // _loop0_212 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (expressions ['as' star_target]) _loop0_212 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = _tmp_271_rule(p)) // expressions ['as' star_target] + && + (seq = _loop0_212_rule(p)) // _loop0_212 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_213: 'except' | 'finally' @@ -24965,40 +25314,41 @@ function _tmp_213_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'except' - if (p.error_indicator) { - return NULL; - } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' - ) - { - _res = _keyword; - return done(); - } - p.mark = _mark; - } - { // 'finally' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'except' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 637)) // token='except' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - var _keyword; - if ( - (_keyword = $B._PyPegen.expect_token(p, 633)) // token='finally' - ) - { - _res = _keyword; - return done(); + { // 'finally' + if (p.error_indicator) { + return NULL; + } + var _keyword; + if ( + (_keyword = $B._PyPegen.expect_token(p, 633)) // token='finally' + ) + { + _res = _keyword; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_214: block @@ -25070,29 +25420,30 @@ function _tmp_216_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var name_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, name_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var name_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, name_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_217: block @@ -25164,30 +25515,31 @@ function _tmp_219_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression ['as' NAME] - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (_opt_var = _tmp_272_rule(p), !p.error_indicator) // ['as' NAME] - ) - { - _res = $B._PyPegen.dummy_name(p, expression_var, _opt_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression ['as' NAME] + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_272_rule(p), !p.error_indicator) // ['as' NAME] + ) + { + _res = $B._PyPegen.dummy_name(p, expression_var, _opt_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_220: 'as' NAME @@ -25196,29 +25548,30 @@ function _tmp_220_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var name_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, name_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var name_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, name_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_221: 'as' NAME @@ -25227,29 +25580,30 @@ function _tmp_221_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var name_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, name_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var name_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, name_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_222: NEWLINE | ':' @@ -25258,40 +25612,41 @@ function _tmp_222_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // NEWLINE - if (p.error_indicator) { - return NULL; - } - var newline_var; - if ( - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = newline_var; - return done(); - } - p.mark = _mark; - } - { // ':' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // NEWLINE + if (p.error_indicator) { + return NULL; + } + var newline_var; + if ( + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = newline_var; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_223: 'as' NAME @@ -25300,29 +25655,30 @@ function _tmp_223_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var name_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, name_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var name_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, name_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_224: 'as' NAME @@ -25331,29 +25687,30 @@ function _tmp_224_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var name_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, name_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var name_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, name_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_225: positional_patterns ',' @@ -25362,29 +25719,30 @@ function _tmp_225_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // positional_patterns ',' - if (p.error_indicator) { - return NULL; - } - var _literal; - var positional_patterns_var; - if ( - (positional_patterns_var = positional_patterns_rule(p)) // positional_patterns - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = $B._PyPegen.dummy_name(p, positional_patterns_var, _literal); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // positional_patterns ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + var positional_patterns_var; + if ( + (positional_patterns_var = positional_patterns_rule(p)) // positional_patterns + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = $B._PyPegen.dummy_name(p, positional_patterns_var, _literal); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_226: '->' expression @@ -25393,64 +25751,66 @@ function _tmp_226_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '->' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var expression_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 51)) // token='->' - && - (expression_var = expression_rule(p)) // expression - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, expression_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '->' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var expression_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 51)) // token='->' + && + (expression_var = expression_rule(p)) // expression + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, expression_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_227: '(' arguments? ')' function _tmp_227_rule(p) { - if (p.error_indicator) { - return NULL; - } - var _res = NULL; - var _mark = p.mark; - { // '(' arguments? ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (_opt_var = arguments_rule(p), !p.error_indicator) // arguments? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, _opt_var, _literal_1); - return done(); + if (p.error_indicator) { + return NULL; + } + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '(' arguments? ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (_opt_var = arguments_rule(p), !p.error_indicator) // arguments? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, _opt_var, _literal_1); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_228: '(' arguments? ')' @@ -25459,33 +25819,34 @@ function _tmp_228_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '(' arguments? ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - var _literal_1; - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - if ( - (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' - && - (_opt_var = arguments_rule(p), !p.error_indicator) // arguments? - && - (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, _opt_var, _literal_1); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '(' arguments? ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + var _literal_1; + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + if ( + (_literal = $B._PyPegen.expect_token(p, 7)) // token='(' + && + (_opt_var = arguments_rule(p), !p.error_indicator) // arguments? + && + (_literal_1 = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, _opt_var, _literal_1); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_230: ',' double_starred_kvpair @@ -25527,29 +25888,30 @@ function _gather_229_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // double_starred_kvpair _loop0_230 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair - && - (seq = _loop0_230_rule(p)) // _loop0_230 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // double_starred_kvpair _loop0_230 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair + && + (seq = _loop0_230_rule(p)) // _loop0_230 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_231: '}' | ',' @@ -25558,40 +25920,41 @@ function _tmp_231_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '}' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ',' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '}' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_232: '}' | ',' @@ -25600,40 +25963,41 @@ function _tmp_232_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '}' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ',' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '}' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - ) - { - _res = _literal; - return done(); + { // ',' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_233: yield_expr | star_expressions @@ -25642,40 +26006,41 @@ function _tmp_233_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_234: yield_expr | star_expressions @@ -25684,40 +26049,41 @@ function _tmp_234_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_235: '=' | '!' | ':' | '}' @@ -25726,68 +26092,69 @@ function _tmp_235_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '!' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ':' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); + { // '!' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '}' - if (p.error_indicator) { - return NULL; + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - _res = _literal; - return done(); + { // '}' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_236: yield_expr | star_expressions @@ -25796,40 +26163,41 @@ function _tmp_236_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_237: '!' | ':' | '}' @@ -25838,96 +26206,98 @@ function _tmp_237_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '!' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // ':' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '!' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; - } - { // '}' - if (p.error_indicator) { - return NULL; + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - _res = _literal; - return done(); + { // '}' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_238: yield_expr | star_expressions function _tmp_238_rule(p) { if (p.error_indicator) { - return NULL; - } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; + return NULL; } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_239: yield_expr | star_expressions @@ -25936,40 +26306,41 @@ function _tmp_239_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_240: '!' NAME @@ -25978,29 +26349,30 @@ function _tmp_240_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '!' NAME - if (p.error_indicator) { - return NULL; - } - var _literal; - var name_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, name_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '!' NAME + if (p.error_indicator) { + return NULL; + } + var _literal; + var name_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, name_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_241: ':' | '}' @@ -26009,40 +26381,41 @@ function _tmp_241_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ':' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '}' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - _res = _literal; - return done(); + { // '}' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_242: yield_expr | star_expressions @@ -26051,40 +26424,41 @@ function _tmp_242_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_243: '!' NAME @@ -26093,29 +26467,30 @@ function _tmp_243_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '!' NAME - if (p.error_indicator) { - return NULL; - } - var _literal; - var name_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, name_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '!' NAME + if (p.error_indicator) { + return NULL; + } + var _literal; + var name_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, name_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_244: fstring_format_spec @@ -26154,40 +26529,41 @@ function _tmp_245_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // yield_expr - if (p.error_indicator) { - return NULL; - } - var yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - _res = yield_expr_var; - return done(); - } - p.mark = _mark; - } - { // star_expressions - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // yield_expr + if (p.error_indicator) { + return NULL; + } + var yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + _res = yield_expr_var; + break; + } + p.mark = _mark; } - var star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - _res = star_expressions_var; - return done(); + { // star_expressions + if (p.error_indicator) { + return NULL; + } + var star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + _res = star_expressions_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_246: '!' NAME @@ -26196,29 +26572,30 @@ function _tmp_246_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '!' NAME - if (p.error_indicator) { - return NULL; - } - var _literal; - var name_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.dummy_name(p, _literal, name_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '!' NAME + if (p.error_indicator) { + return NULL; + } + var _literal; + var name_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 54)) // token='!' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.dummy_name(p, _literal, name_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_247: ':' | '}' @@ -26227,40 +26604,41 @@ function _tmp_247_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ':' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '}' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' - ) - { - _res = _literal; - return done(); + { // '}' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 26)) // token='}' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_248: star_targets '=' @@ -26269,29 +26647,30 @@ function _tmp_248_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // star_targets '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - var z; - if ( - (z = star_targets_rule(p)) // star_targets - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // star_targets '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var z; + if ( + (z = star_targets_rule(p)) // star_targets + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_249: '.' | '...' @@ -26300,40 +26679,41 @@ function _tmp_249_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '.' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '...' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '.' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 52)) // token='...' - ) - { - _res = _literal; - return done(); + { // '...' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 52)) // token='...' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_250: '.' | '...' @@ -26342,40 +26722,41 @@ function _tmp_250_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '.' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '...' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '.' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 23)) // token='.' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 52)) // token='...' - ) - { - _res = _literal; - return done(); + { // '...' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 52)) // token='...' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_251: '@' named_expression NEWLINE @@ -26384,32 +26765,33 @@ function _tmp_251_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // '@' named_expression NEWLINE - if (p.error_indicator) { - return NULL; - } - var _literal; - var f; - var newline_var; - if ( - (_literal = $B._PyPegen.expect_token(p, 49)) // token='@' - && - (f = named_expression_rule(p)) // named_expression - && - (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = f; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // '@' named_expression NEWLINE + if (p.error_indicator) { + return NULL; + } + var _literal; + var f; + var newline_var; + if ( + (_literal = $B._PyPegen.expect_token(p, 49)) // token='@' + && + (f = named_expression_rule(p)) // named_expression + && + (newline_var = $B._PyPegen.expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = f; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_252: ',' expression @@ -26418,29 +26800,30 @@ function _tmp_252_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var c; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (c = expression_rule(p)) // expression - ) - { - _res = c; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var c; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (c = expression_rule(p)) // expression + ) + { + _res = c; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_253: ',' star_expression @@ -26449,29 +26832,30 @@ function _tmp_253_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' star_expression - if (p.error_indicator) { - return NULL; - } - var _literal; - var c; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (c = star_expression_rule(p)) // star_expression - ) - { - _res = c; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' star_expression + if (p.error_indicator) { + return NULL; + } + var _literal; + var c; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (c = star_expression_rule(p)) // star_expression + ) + { + _res = c; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_254: 'or' conjunction @@ -26480,29 +26864,30 @@ function _tmp_254_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'or' conjunction - if (p.error_indicator) { - return NULL; - } - var _keyword; - var c; - if ( - (_keyword = $B._PyPegen.expect_token(p, 574)) // token='or' - && - (c = conjunction_rule(p)) // conjunction - ) - { - _res = c; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'or' conjunction + if (p.error_indicator) { + return NULL; + } + var _keyword; + var c; + if ( + (_keyword = $B._PyPegen.expect_token(p, 574)) // token='or' + && + (c = conjunction_rule(p)) // conjunction + ) + { + _res = c; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_255: 'and' inversion @@ -26511,29 +26896,30 @@ function _tmp_255_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'and' inversion - if (p.error_indicator) { - return NULL; - } - var _keyword; - var c; - if ( - (_keyword = $B._PyPegen.expect_token(p, 575)) // token='and' - && - (c = inversion_rule(p)) // inversion - ) - { - _res = c; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'and' inversion + if (p.error_indicator) { + return NULL; + } + var _keyword; + var c; + if ( + (_keyword = $B._PyPegen.expect_token(p, 575)) // token='and' + && + (c = inversion_rule(p)) // inversion + ) + { + _res = c; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_256: slice | starred_expression @@ -26542,40 +26928,41 @@ function _tmp_256_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // slice - if (p.error_indicator) { - return NULL; - } - var slice_var; - if ( - (slice_var = slice_rule(p)) // slice - ) - { - _res = slice_var; - return done(); - } - p.mark = _mark; - } - { // starred_expression - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // slice + if (p.error_indicator) { + return NULL; + } + var slice_var; + if ( + (slice_var = slice_rule(p)) // slice + ) + { + _res = slice_var; + break; + } + p.mark = _mark; } - var starred_expression_var; - if ( - (starred_expression_var = starred_expression_rule(p)) // starred_expression - ) - { - _res = starred_expression_var; - return done(); + { // starred_expression + if (p.error_indicator) { + return NULL; + } + var starred_expression_var; + if ( + (starred_expression_var = starred_expression_rule(p)) // starred_expression + ) + { + _res = starred_expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_257: fstring | string @@ -26584,40 +26971,41 @@ function _tmp_257_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // fstring - if (p.error_indicator) { - return NULL; - } - var fstring_var; - if ( - (fstring_var = fstring_rule(p)) // fstring - ) - { - _res = fstring_var; - return done(); - } - p.mark = _mark; - } - { // string - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // fstring + if (p.error_indicator) { + return NULL; + } + var fstring_var; + if ( + (fstring_var = fstring_rule(p)) // fstring + ) + { + _res = fstring_var; + break; + } + p.mark = _mark; } - var string_var; - if ( - (string_var = string_rule(p)) // string - ) - { - _res = string_var; - return done(); + { // string + if (p.error_indicator) { + return NULL; + } + var string_var; + if ( + (string_var = string_rule(p)) // string + ) + { + _res = string_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_258: 'if' disjunction @@ -26626,29 +27014,30 @@ function _tmp_258_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'if' disjunction - if (p.error_indicator) { - return NULL; - } - var _keyword; - var z; - if ( - (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (z = disjunction_rule(p)) // disjunction - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'if' disjunction + if (p.error_indicator) { + return NULL; + } + var _keyword; + var z; + if ( + (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (z = disjunction_rule(p)) // disjunction + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_259: 'if' disjunction @@ -26657,29 +27046,30 @@ function _tmp_259_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'if' disjunction - if (p.error_indicator) { - return NULL; - } - var _keyword; - var z; - if ( - (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' - && - (z = disjunction_rule(p)) // disjunction - ) - { - _res = z; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'if' disjunction + if (p.error_indicator) { + return NULL; + } + var _keyword; + var z; + if ( + (_keyword = $B._PyPegen.expect_token(p, 642)) // token='if' + && + (z = disjunction_rule(p)) // disjunction + ) + { + _res = z; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_260: starred_expression | (assignment_expression | expression !':=') !'=' @@ -26688,42 +27078,43 @@ function _tmp_260_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // starred_expression - if (p.error_indicator) { - return NULL; - } - var starred_expression_var; - if ( - (starred_expression_var = starred_expression_rule(p)) // starred_expression - ) - { - _res = starred_expression_var; - return done(); - } - p.mark = _mark; - } - { // (assignment_expression | expression !':=') !'=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // starred_expression + if (p.error_indicator) { + return NULL; + } + var starred_expression_var; + if ( + (starred_expression_var = starred_expression_rule(p)) // starred_expression + ) + { + _res = starred_expression_var; + break; + } + p.mark = _mark; } - var _tmp_273_var; - if ( - (_tmp_273_var = _tmp_273_rule(p)) // assignment_expression | expression !':=' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 22) // token='=' - ) - { - _res = _tmp_273_var; - return done(); + { // (assignment_expression | expression !':=') !'=' + if (p.error_indicator) { + return NULL; + } + var _tmp_273_var; + if ( + (_tmp_273_var = _tmp_273_rule(p)) // assignment_expression | expression !':=' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 22) // token='=' + ) + { + _res = _tmp_273_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_261: ',' star_target @@ -26732,29 +27123,30 @@ function _tmp_261_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' star_target - if (p.error_indicator) { - return NULL; - } - var _literal; - var c; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (c = star_target_rule(p)) // star_target - ) - { - _res = c; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' star_target + if (p.error_indicator) { + return NULL; + } + var _literal; + var c; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (c = star_target_rule(p)) // star_target + ) + { + _res = c; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_262: ',' star_target @@ -26763,29 +27155,30 @@ function _tmp_262_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ',' star_target - if (p.error_indicator) { - return NULL; - } - var _literal; - var c; - if ( - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (c = star_target_rule(p)) // star_target - ) - { - _res = c; - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ',' star_target + if (p.error_indicator) { + return NULL; + } + var _literal; + var c; + if ( + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (c = star_target_rule(p)) // star_target + ) + { + _res = c; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_263: @@ -26795,32 +27188,33 @@ function _tmp_263_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs - if (p.error_indicator) { - return NULL; - } - var _gather_274_var; - var _literal; - var kwargs_var; - if ( - (_gather_274_var = _gather_274_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ - && - (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' - && - (kwargs_var = kwargs_rule(p)) // kwargs - ) - { - _res = $B._PyPegen.dummy_name(p, _gather_274_var, _literal, kwargs_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs + if (p.error_indicator) { + return NULL; + } + var _gather_274_var; + var _literal; + var kwargs_var; + if ( + (_gather_274_var = _gather_274_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ + && + (_literal = $B._PyPegen.expect_token(p, 12)) // token=',' + && + (kwargs_var = kwargs_rule(p)) // kwargs + ) + { + _res = $B._PyPegen.dummy_name(p, _gather_274_var, _literal, kwargs_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_264: star_targets '=' @@ -26829,29 +27223,30 @@ function _tmp_264_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // star_targets '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - var star_targets_var; - if ( - (star_targets_var = star_targets_rule(p)) // star_targets - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B._PyPegen.dummy_name(p, star_targets_var, _literal); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // star_targets '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var star_targets_var; + if ( + (star_targets_var = star_targets_rule(p)) // star_targets + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B._PyPegen.dummy_name(p, star_targets_var, _literal); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_265: star_targets '=' @@ -26860,29 +27255,30 @@ function _tmp_265_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // star_targets '=' - if (p.error_indicator) { - return NULL; - } - var _literal; - var star_targets_var; - if ( - (star_targets_var = star_targets_rule(p)) // star_targets - && - (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' - ) - { - _res = $B._PyPegen.dummy_name(p, star_targets_var, _literal); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // star_targets '=' + if (p.error_indicator) { + return NULL; + } + var _literal; + var star_targets_var; + if ( + (star_targets_var = star_targets_rule(p)) // star_targets + && + (_literal = $B._PyPegen.expect_token(p, 22)) // token='=' + ) + { + _res = $B._PyPegen.dummy_name(p, star_targets_var, _literal); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_266: ')' | '**' @@ -26891,40 +27287,41 @@ function _tmp_266_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ')' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '**' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ')' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 8)) // token=')' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - ) - { - _res = _literal; - return done(); + { // '**' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_267: ':' | '**' @@ -26933,40 +27330,41 @@ function _tmp_267_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // ':' - if (p.error_indicator) { - return NULL; - } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' - ) - { - _res = _literal; - return done(); - } - p.mark = _mark; - } - { // '**' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // ':' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 11)) // token=':' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - var _literal; - if ( - (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' - ) - { - _res = _literal; - return done(); + { // '**' + if (p.error_indicator) { + return NULL; + } + var _literal; + if ( + (_literal = $B._PyPegen.expect_token(p, 35)) // token='**' + ) + { + _res = _literal; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_268: expression ['as' star_target] @@ -26975,30 +27373,31 @@ function _tmp_268_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression ['as' star_target] - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (_opt_var = _tmp_276_rule(p), !p.error_indicator) // ['as' star_target] - ) - { - _res = $B._PyPegen.dummy_name(p, expression_var, _opt_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression ['as' star_target] + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_276_rule(p), !p.error_indicator) // ['as' star_target] + ) + { + _res = $B._PyPegen.dummy_name(p, expression_var, _opt_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_269: expressions ['as' star_target] @@ -27007,30 +27406,31 @@ function _tmp_269_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expressions ['as' star_target] - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var expressions_var; - if ( - (expressions_var = expressions_rule(p)) // expressions - && - (_opt_var = _tmp_277_rule(p), !p.error_indicator) // ['as' star_target] - ) - { - _res = $B._PyPegen.dummy_name(p, expressions_var, _opt_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expressions ['as' star_target] + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var expressions_var; + if ( + (expressions_var = expressions_rule(p)) // expressions + && + (_opt_var = _tmp_277_rule(p), !p.error_indicator) // ['as' star_target] + ) + { + _res = $B._PyPegen.dummy_name(p, expressions_var, _opt_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_270: expression ['as' star_target] @@ -27039,30 +27439,31 @@ function _tmp_270_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expression ['as' star_target] - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (_opt_var = _tmp_278_rule(p), !p.error_indicator) // ['as' star_target] - ) - { - _res = $B._PyPegen.dummy_name(p, expression_var, _opt_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expression ['as' star_target] + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_278_rule(p), !p.error_indicator) // ['as' star_target] + ) + { + _res = $B._PyPegen.dummy_name(p, expression_var, _opt_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_271: expressions ['as' star_target] @@ -27071,30 +27472,31 @@ function _tmp_271_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // expressions ['as' star_target] - if (p.error_indicator) { - return NULL; - } - var _opt_var; - UNUSED(_opt_var); // Silence compiler warnings - var expressions_var; - if ( - (expressions_var = expressions_rule(p)) // expressions - && - (_opt_var = _tmp_279_rule(p), !p.error_indicator) // ['as' star_target] - ) - { - _res = $B._PyPegen.dummy_name(p, expressions_var, _opt_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // expressions ['as' star_target] + if (p.error_indicator) { + return NULL; + } + var _opt_var; + UNUSED(_opt_var); // Silence compiler warnings + var expressions_var; + if ( + (expressions_var = expressions_rule(p)) // expressions + && + (_opt_var = _tmp_279_rule(p), !p.error_indicator) // ['as' star_target] + ) + { + _res = $B._PyPegen.dummy_name(p, expressions_var, _opt_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_272: 'as' NAME @@ -27103,29 +27505,30 @@ function _tmp_272_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' NAME - if (p.error_indicator) { - return NULL; - } - var _keyword; - var name_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (name_var = $B._PyPegen.name_token(p)) // NAME - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, name_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' NAME + if (p.error_indicator) { + return NULL; + } + var _keyword; + var name_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (name_var = $B._PyPegen.name_token(p)) // NAME + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, name_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_273: assignment_expression | expression !':=' @@ -27134,42 +27537,43 @@ function _tmp_273_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // assignment_expression - if (p.error_indicator) { - return NULL; - } - var assignment_expression_var; - if ( - (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression - ) - { - _res = assignment_expression_var; - return done(); - } - p.mark = _mark; - } - { // expression !':=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // assignment_expression + if (p.error_indicator) { + return NULL; + } + var assignment_expression_var; + if ( + (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression + ) + { + _res = assignment_expression_var; + break; + } + p.mark = _mark; } - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 53) // token=':=' - ) - { - _res = expression_var; - return done(); + { // expression !':=' + if (p.error_indicator) { + return NULL; + } + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 53) // token=':=' + ) + { + _res = expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _loop0_275: ',' (starred_expression | (assignment_expression | expression !':=') !'=') @@ -27212,29 +27616,30 @@ function _gather_274_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_275 - if (p.error_indicator) { - return NULL; - } - var elem; - var seq; - if ( - (elem = _tmp_280_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' - && - (seq = _loop0_275_rule(p)) // _loop0_275 - ) - { - _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_275 + if (p.error_indicator) { + return NULL; + } + var elem; + var seq; + if ( + (elem = _tmp_280_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + && + (seq = _loop0_275_rule(p)) // _loop0_275 + ) + { + _res = $B._PyPegen.seq_insert_in_front(p, elem, seq); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_276: 'as' star_target @@ -27243,29 +27648,30 @@ function _tmp_276_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' star_target - if (p.error_indicator) { - return NULL; - } - var _keyword; - var star_target_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (star_target_var = star_target_rule(p)) // star_target - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, star_target_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' star_target + if (p.error_indicator) { + return NULL; + } + var _keyword; + var star_target_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (star_target_var = star_target_rule(p)) // star_target + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, star_target_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_277: 'as' star_target @@ -27274,29 +27680,30 @@ function _tmp_277_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' star_target - if (p.error_indicator) { - return NULL; - } - var _keyword; - var star_target_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (star_target_var = star_target_rule(p)) // star_target - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, star_target_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' star_target + if (p.error_indicator) { + return NULL; + } + var _keyword; + var star_target_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (star_target_var = star_target_rule(p)) // star_target + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, star_target_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_278: 'as' star_target @@ -27305,29 +27712,30 @@ function _tmp_278_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' star_target - if (p.error_indicator) { - return NULL; - } - var _keyword; - var star_target_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (star_target_var = star_target_rule(p)) // star_target - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, star_target_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' star_target + if (p.error_indicator) { + return NULL; + } + var _keyword; + var star_target_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (star_target_var = star_target_rule(p)) // star_target + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, star_target_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_279: 'as' star_target @@ -27336,29 +27744,30 @@ function _tmp_279_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // 'as' star_target - if (p.error_indicator) { - return NULL; - } - var _keyword; - var star_target_var; - if ( - (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' - && - (star_target_var = star_target_rule(p)) // star_target - ) - { - _res = $B._PyPegen.dummy_name(p, _keyword, star_target_var); - return done(); + while (1) { + var _res = NULL; + var _mark = p.mark; + { // 'as' star_target + if (p.error_indicator) { + return NULL; + } + var _keyword; + var star_target_var; + if ( + (_keyword = $B._PyPegen.expect_token(p, 640)) // token='as' + && + (star_target_var = star_target_rule(p)) // star_target + ) + { + _res = $B._PyPegen.dummy_name(p, _keyword, star_target_var); + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_280: starred_expression | (assignment_expression | expression !':=') !'=' @@ -27367,42 +27776,43 @@ function _tmp_280_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // starred_expression - if (p.error_indicator) { - return NULL; - } - var starred_expression_var; - if ( - (starred_expression_var = starred_expression_rule(p)) // starred_expression - ) - { - _res = starred_expression_var; - return done(); - } - p.mark = _mark; - } - { // (assignment_expression | expression !':=') !'=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // starred_expression + if (p.error_indicator) { + return NULL; + } + var starred_expression_var; + if ( + (starred_expression_var = starred_expression_rule(p)) // starred_expression + ) + { + _res = starred_expression_var; + break; + } + p.mark = _mark; } - var _tmp_281_var; - if ( - (_tmp_281_var = _tmp_281_rule(p)) // assignment_expression | expression !':=' - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 22) // token='=' - ) - { - _res = _tmp_281_var; - return done(); + { // (assignment_expression | expression !':=') !'=' + if (p.error_indicator) { + return NULL; + } + var _tmp_281_var; + if ( + (_tmp_281_var = _tmp_281_rule(p)) // assignment_expression | expression !':=' + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 22) // token='=' + ) + { + _res = _tmp_281_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } // _tmp_281: assignment_expression | expression !':=' @@ -27411,57 +27821,59 @@ function _tmp_281_rule(p) if (p.error_indicator) { return NULL; } - var _res = NULL; - var _mark = p.mark; - { // assignment_expression - if (p.error_indicator) { - return NULL; - } - var assignment_expression_var; - if ( - (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression - ) - { - _res = assignment_expression_var; - return done(); - } - p.mark = _mark; - } - { // expression !':=' - if (p.error_indicator) { - return NULL; + while (1) { + var _res = NULL; + var _mark = p.mark; + { // assignment_expression + if (p.error_indicator) { + return NULL; + } + var assignment_expression_var; + if ( + (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression + ) + { + _res = assignment_expression_var; + break; + } + p.mark = _mark; } - var expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 53) // token=':=' - ) - { - _res = expression_var; - return done(); + { // expression !':=' + if (p.error_indicator) { + return NULL; + } + var expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + $B._PyPegen.lookahead_with_int(0, $B._PyPegen.expect_token, p, 53) // token=':=' + ) + { + _res = expression_var; + break; + } + p.mark = _mark; } - p.mark = _mark; + _res = NULL; + break; } - _res = NULL; - function done(){ return _res; - } } -$B._PyPegen_parse = function(p){ +$B._PyPegen.parse = function(p){ p.keywords = reserved_keywords; p.n_keyword_lists = n_keyword_lists; p.soft_keywords = soft_keywords; - // skip first token (ENCODING) - p.tok.next() - switch(p.mode){ case 'file': return file_rule(p) case 'eval': return eval_rule(p) + case 'single': + return interactive_rule(p) + default: + console.log('unknown mode', p.mode) + alert() } - } diff --git a/www/src/js_objects.js b/www/src/js_objects.js index 06f097a8b..ca524d871 100644 --- a/www/src/js_objects.js +++ b/www/src/js_objects.js @@ -146,7 +146,11 @@ var jsobj2pyobj = $B.jsobj2pyobj = function(jsobj, _this){ if(Array.isArray(jsobj)){ // set it as non-enumerable, prevents issues when looping on it in JS. - Object.defineProperty(jsobj, "$is_js_array", {value: true}); + try{ + Object.defineProperty(jsobj, "$is_js_array", {value: true}); + }catch(err){ + // ignore; cf. issue #2379 + } return jsobj } @@ -556,7 +560,7 @@ function jsclass2pyclass(js_class){ } $B.JSObj.__getattribute__ = function(_self, attr){ - var test = false // attr == "Date" + var test = attr == "line" if(test){ console.log("__ga__", _self, attr) } @@ -625,7 +629,7 @@ $B.JSObj.__getattribute__ = function(_self, attr){ } if(js_attr !== null && js_attr.toString && - typeof js_attr.toString == 'function' && + typeof js_attr == 'function' && js_attr.toString().startsWith('class ')){ // Javascript class return jsclass2pyclass(js_attr) @@ -798,6 +802,10 @@ function convert_to_python(obj){ return obj.map(convert_to_python) } if($B.$isinstance(obj, $B.JSObj)){ + if(typeof obj == 'number'){ + // float + return $B.fast_float(obj) + } var res = $B.empty_dict() for(var key in obj){ _b_.dict.$setitem_string(res, key, convert_to_python(obj[key])) diff --git a/www/src/libs/_ajax.js b/www/src/libs/_ajax.js index 2d7c80f07..19f49f6d0 100644 --- a/www/src/libs/_ajax.js +++ b/www/src/libs/_ajax.js @@ -81,19 +81,20 @@ function handle_kwargs(self, kw, method){ headers={}, cache, mode = "text", - timeout = {} + timeout = {}, + rawdata for(var item of _b_.dict.$iter_items(kw)){ var key = item.key if(key == "data"){ - var params = item.value - if(typeof params == "string" || params instanceof FormData){ - data = params - }else if(params.__class__ === _b_.dict){ - data = stringify(params) + var rawdata = item.value + if(typeof rawdata == "string" || rawdata instanceof FormData){ + data = rawdata + }else if(rawdata.__class__ === _b_.dict){ + data = stringify(rawdata) }else{ throw _b_.TypeError.$factory("wrong type for data: " + - $B.class_name(params)) + $B.class_name(rawdata)) } }else if(key == "encoding"){ encoding = item.value @@ -132,7 +133,7 @@ function handle_kwargs(self, kw, method){ "application/x-www-form-urlencoded") } - return {cache, data, encoding, headers, mode, timeout} + return {cache, data, rawdata, encoding, headers, mode, timeout} } var ajax = $B.make_class('ajax') @@ -288,6 +289,16 @@ ajax.send = function(self, params){ return _b_.None } +ajax.responseType = _b_.property.$factory( + function(_self){ + return _self.responseType + }, + function(_self, value){ + console.log('set response type', value) + _self.js.responseType = value + } +) + ajax.set_header = function(self, key, value){ self.headers[key.toLowerCase()] = value } @@ -478,7 +489,7 @@ function file_upload(){ var self = ajax.$factory() var items = handle_kwargs(self, kw, method), - data = items.data, + rawdata = items.rawdata, headers = items.headers for(var key in headers){ @@ -500,14 +511,14 @@ function file_upload(){ var formdata = new FormData() formdata.append(field_name, file, file.name) - if(data){ - if(data instanceof FormData){ + if(rawdata){ + if(rawdata instanceof FormData){ // append additional data - for(var d of data){ + for(var d of rawdata){ formdata.append(d[0], d[1]) } - }else if($B.$isinstance(data, _b_.dict)){ - for(var item of _b_.dict.$iter_items(data)){ + }else if($B.$isinstance(rawdata, _b_.dict)){ + for(var item of _b_.dict.$iter_items(rawdata)){ formdata.append(item.key, item.value) } }else{ diff --git a/www/src/libs/_tokenize.js b/www/src/libs/_tokenize.js index 2cbf33a1f..1928b347f 100644 --- a/www/src/libs/_tokenize.js +++ b/www/src/libs/_tokenize.js @@ -22,12 +22,11 @@ TokenizerIter.__iter__ = function(self){ }catch(err){ if($B.is_exc(err, [_b_.StopIteration])){ token = endmarker - token.start[0]++ - token.end[0]++ - var type_code = $B.imported.token[token.type] - yield $B.fast_tuple([type_code, token.string, - $B.fast_tuple(token.start), - $B.fast_tuple(token.end), + token.lineno++ + token.end_lineno++ + yield $B.fast_tuple([token.num_type, token.string, + $B.fast_tuple([token.lineno, token.col_offset]), + $B.fast_tuple([token.end_lineno, token.end_col_offset]), token.line]) } throw err @@ -35,18 +34,18 @@ TokenizerIter.__iter__ = function(self){ line_num++ var line = _b_.bytes.decode(bytes, 'utf-8') for(var token of $B.tokenizer(line, 'test')){ - if(token.type == 'ENCODING'){ // skip encoding token + if(token.num_type == $B.py_tokens.ENCODING){ // skip encoding token continue - }else if(token.type == 'ENDMARKER'){ + }else if(token.num_type == $B.py_tokens.ENDMARKER){ var endmarker = token continue } - token.start[0] = line_num - token.end[0] = line_num - var type_code = $B.imported.token[token.type] - yield $B.fast_tuple([type_code, token.string, - $B.fast_tuple(token.start), - $B.fast_tuple(token.end), + token.type = token.num_type + token.lineno = line_num + token.end_lineno = line_num + yield $B.fast_tuple([token.num_type, token.string, + $B.fast_tuple([token.lineno, token.col_offset]), + $B.fast_tuple([token.end_lineno, token.end_col_offset]), token.line]) } } diff --git a/www/src/libs/_webworker.js b/www/src/libs/_webworker.js index 4ce2db8a7..dbb1c0cbf 100644 --- a/www/src/libs/_webworker.js +++ b/www/src/libs/_webworker.js @@ -11,12 +11,13 @@ function scripts_to_load(debug_level){ if(debug_level > 2){ var brython_scripts = [ 'brython_builtins', - + 'py_ast_classes', - 'unicode_data', 'stdlib_paths', + 'unicode_data', 'version_info', - + + 'py_tokens', 'python_tokenizer', 'py_ast', 'py2js', @@ -30,7 +31,6 @@ function scripts_to_load(debug_level){ 'py_range_slice', 'py_bytes', 'py_set', - 'js_objects', 'py_import', 'py_string', 'py_int', @@ -39,6 +39,7 @@ function scripts_to_load(debug_level){ 'py_complex', 'py_dict', 'py_list', + 'js_objects', 'py_generator', 'py_dom', 'py_pattern_matching', @@ -47,8 +48,15 @@ function scripts_to_load(debug_level){ 'builtin_modules', 'ast_to_js', 'symtable', - 'builtins_docstrings' - ] + + 'action_helpers_generated_version', + 'string_parser', + 'number_parser', + 'python_parser_peg_version', + 'pegen', + 'gen_parse', + 'brython_ready' + ] }else{ var brython_scripts = ['brython'] } diff --git a/www/src/libs/encoding_cp932.js b/www/src/libs/encoding_cp932.js index a54115dce..59abe30a3 100644 --- a/www/src/libs/encoding_cp932.js +++ b/www/src/libs/encoding_cp932.js @@ -1,11 +1,14 @@ -var _table = [0x00, 0x0000,0x01, 0x0001,0x02, 0x0002,0x03, 0x0003,0x04, 0x0004,0x05, 0x0005,0x06, 0x0006,0x07, 0x0007,0x08, 0x0008,0x09, 0x0009,0x0A, 0x000A,0x0B, 0x000B,0x0C, 0x000C,0x0D, 0x000D,0x0E, 0x000E,0x0F, 0x000F,0x10, 0x0010,0x11, 0x0011,0x12, 0x0012,0x13, 0x0013,0x14, 0x0014,0x15, 0x0015,0x16, 0x0016,0x17, 0x0017,0x18, 0x0018,0x19, 0x0019,0x1A, 0x001A,0x1B, 0x001B,0x1C, 0x001C,0x1D, 0x001D,0x1E, 0x001E,0x1F, 0x001F,0x20, 0x0020,0x21, 0x0021,0x22, 0x0022,0x23, 0x0023,0x24, 0x0024,0x25, 0x0025,0x26, 0x0026,0x27, 0x0027,0x28, 0x0028,0x29, 0x0029,0x2A, 0x002A,0x2B, 0x002B,0x2C, 0x002C,0x2D, 0x002D,0x2E, 0x002E,0x2F, 0x002F,0x30, 0x0030,0x31, 0x0031,0x32, 0x0032,0x33, 0x0033,0x34, 0x0034,0x35, 0x0035,0x36, 0x0036,0x37, 0x0037,0x38, 0x0038,0x39, 0x0039,0x3A, 0x003A,0x3B, 0x003B,0x3C, 0x003C,0x3D, 0x003D,0x3E, 0x003E,0x3F, 0x003F,0x40, 0x0040,0x41, 0x0041,0x42, 0x0042,0x43, 0x0043,0x44, 0x0044,0x45, 0x0045,0x46, 0x0046,0x47, 0x0047,0x48, 0x0048,0x49, 0x0049,0x4A, 0x004A,0x4B, 0x004B,0x4C, 0x004C,0x4D, 0x004D,0x4E, 0x004E,0x4F, 0x004F,0x50, 0x0050,0x51, 0x0051,0x52, 0x0052,0x53, 0x0053,0x54, 0x0054,0x55, 0x0055,0x56, 0x0056,0x57, 0x0057,0x58, 0x0058,0x59, 0x0059,0x5A, 0x005A,0x5B, 0x005B,0x5C, 0x005C,0x5D, 0x005D,0x5E, 0x005E,0x5F, 0x005F,0x60, 0x0060,0x61, 0x0061,0x62, 0x0062,0x63, 0x0063,0x64, 0x0064,0x65, 0x0065,0x66, 0x0066,0x67, 0x0067,0x68, 0x0068,0x69, 0x0069,0x6A, 0x006A,0x6B, 0x006B,0x6C, 0x006C,0x6D, 0x006D,0x6E, 0x006E,0x6F, 0x006F,0x70, 0x0070,0x71, 0x0071,0x72, 0x0072,0x73, 0x0073,0x74, 0x0074,0x75, 0x0075,0x76, 0x0076,0x77, 0x0077,0x78, 0x0078,0x79, 0x0079,0x7A, 0x007A,0x7B, 0x007B,0x7C, 0x007C,0x7D, 0x007D,0x7E, 0x007E,0x7F, 0x007F,0x80, -1,0x81, -1,0x82, -1,0x83, -1,0x84, -1,0x85, -1,0x86, -1,0x87, -1,0x88, -1,0x89, -1,0x8A, -1,0x8B, -1,0x8C, -1,0x8D, -1,0x8E, -1,0x8F, -1,0x90, -1,0x91, -1,0x92, -1,0x93, -1,0x94, -1,0x95, -1,0x96, -1,0x97, -1,0x98, -1,0x99, -1,0x9A, -1,0x9B, -1,0x9C, -1,0x9D, -1,0x9E, -1,0x9F, -1,0xA0, -1,0xA1, 0xFF61,0xA2, 0xFF62,0xA3, 0xFF63,0xA4, 0xFF64,0xA5, 0xFF65,0xA6, 0xFF66,0xA7, 0xFF67,0xA8, 0xFF68,0xA9, 0xFF69,0xAA, 0xFF6A,0xAB, 0xFF6B,0xAC, 0xFF6C,0xAD, 0xFF6D,0xAE, 0xFF6E,0xAF, 0xFF6F,0xB0, 0xFF70,0xB1, 0xFF71,0xB2, 0xFF72,0xB3, 0xFF73,0xB4, 0xFF74,0xB5, 0xFF75,0xB6, 0xFF76,0xB7, 0xFF77,0xB8, 0xFF78,0xB9, 0xFF79,0xBA, 0xFF7A,0xBB, 0xFF7B,0xBC, 0xFF7C,0xBD, 0xFF7D,0xBE, 0xFF7E,0xBF, 0xFF7F,0xC0, 0xFF80,0xC1, 0xFF81,0xC2, 0xFF82,0xC3, 0xFF83,0xC4, 0xFF84,0xC5, 0xFF85,0xC6, 0xFF86,0xC7, 0xFF87,0xC8, 0xFF88,0xC9, 0xFF89,0xCA, 0xFF8A,0xCB, 0xFF8B,0xCC, 0xFF8C,0xCD, 0xFF8D,0xCE, 0xFF8E,0xCF, 0xFF8F,0xD0, 0xFF90,0xD1, 0xFF91,0xD2, 0xFF92,0xD3, 0xFF93,0xD4, 0xFF94,0xD5, 0xFF95,0xD6, 0xFF96,0xD7, 0xFF97,0xD8, 0xFF98,0xD9, 0xFF99,0xDA, 0xFF9A,0xDB, 0xFF9B,0xDC, 0xFF9C,0xDD, 0xFF9D,0xDE, 0xFF9E,0xDF, 0xFF9F,0xE0, -1,0xE1, -1,0xE2, -1,0xE3, -1,0xE4, -1,0xE5, -1,0xE6, -1,0xE7, -1,0xE8, -1,0xE9, -1,0xEA, -1,0xEB, -1,0xEC, -1,0xED, -1,0xEE, -1,0xEF, -1,0xF0, -1,0xF1, -1,0xF2, -1,0xF3, -1,0xF4, -1,0xF5, -1,0xF6, -1,0xF7, -1,0xF8, -1,0xF9, -1,0xFA, -1,0xFB, -1,0xFC, -1,0xFD, -1,0xFE, -1,0xFF, -1,0x8140, 0x3000,0x8141, 0x3001,0x8142, 0x3002,0x8143, 0xFF0C,0x8144, 0xFF0E,0x8145, 0x30FB,0x8146, 0xFF1A,0x8147, 0xFF1B,0x8148, 0xFF1F,0x8149, 0xFF01,0x814A, 0x309B,0x814B, 0x309C,0x814C, 0x00B4,0x814D, 0xFF40,0x814E, 0x00A8,0x814F, 0xFF3E,0x8150, 0xFFE3,0x8151, 0xFF3F,0x8152, 0x30FD,0x8153, 0x30FE,0x8154, 0x309D,0x8155, 0x309E,0x8156, 0x3003,0x8157, 0x4EDD,0x8158, 0x3005,0x8159, 0x3006,0x815A, 0x3007,0x815B, 0x30FC,0x815C, 0x2015,0x815D, 0x2010,0x815E, 0xFF0F,0x815F, 0xFF3C,0x8160, 0xFF5E,0x8161, 0x2225,0x8162, 0xFF5C,0x8163, 0x2026,0x8164, 0x2025,0x8165, 0x2018,0x8166, 0x2019,0x8167, 0x201C,0x8168, 0x201D,0x8169, 0xFF08,0x816A, 0xFF09,0x816B, 0x3014,0x816C, 0x3015,0x816D, 0xFF3B,0x816E, 0xFF3D,0x816F, 0xFF5B,0x8170, 0xFF5D,0x8171, 0x3008,0x8172, 0x3009,0x8173, 0x300A,0x8174, 0x300B,0x8175, 0x300C,0x8176, 0x300D,0x8177, 0x300E,0x8178, 0x300F,0x8179, 0x3010,0x817A, 0x3011,0x817B, 0xFF0B,0x817C, 0xFF0D,0x817D, 0x00B1,0x817E, 0x00D7,0x8180, 0x00F7,0x8181, 0xFF1D,0x8182, 0x2260,0x8183, 0xFF1C,0x8184, 0xFF1E,0x8185, 0x2266,0x8186, 0x2267,0x8187, 0x221E,0x8188, 0x2234,0x8189, 0x2642,0x818A, 0x2640,0x818B, 0x00B0,0x818C, 0x2032,0x818D, 0x2033,0x818E, 0x2103,0x818F, 0xFFE5,0x8190, 0xFF04,0x8191, 0xFFE0,0x8192, 0xFFE1,0x8193, 0xFF05,0x8194, 0xFF03,0x8195, 0xFF06,0x8196, 0xFF0A,0x8197, 0xFF20,0x8198, 0x00A7,0x8199, 0x2606,0x819A, 0x2605,0x819B, 0x25CB,0x819C, 0x25CF,0x819D, 0x25CE,0x819E, 0x25C7,0x819F, 0x25C6,0x81A0, 0x25A1,0x81A1, 0x25A0,0x81A2, 0x25B3,0x81A3, 0x25B2,0x81A4, 0x25BD,0x81A5, 0x25BC,0x81A6, 0x203B,0x81A7, 0x3012,0x81A8, 0x2192,0x81A9, 0x2190,0x81AA, 0x2191,0x81AB, 0x2193,0x81AC, 0x3013,0x81B8, 0x2208,0x81B9, 0x220B,0x81BA, 0x2286,0x81BB, 0x2287,0x81BC, 0x2282,0x81BD, 0x2283,0x81BE, 0x222A,0x81BF, 0x2229,0x81C8, 0x2227,0x81C9, 0x2228,0x81CA, 0xFFE2,0x81CB, 0x21D2,0x81CC, 0x21D4,0x81CD, 0x2200,0x81CE, 0x2203,0x81DA, 0x2220,0x81DB, 0x22A5,0x81DC, 0x2312,0x81DD, 0x2202,0x81DE, 0x2207,0x81DF, 0x2261,0x81E0, 0x2252,0x81E1, 0x226A,0x81E2, 0x226B,0x81E3, 0x221A,0x81E4, 0x223D,0x81E5, 0x221D,0x81E6, 0x2235,0x81E7, 0x222B,0x81E8, 0x222C,0x81F0, 0x212B,0x81F1, 0x2030,0x81F2, 0x266F,0x81F3, 0x266D,0x81F4, 0x266A,0x81F5, 0x2020,0x81F6, 0x2021,0x81F7, 0x00B6,0x81FC, 0x25EF,0x824F, 0xFF10,0x8250, 0xFF11,0x8251, 0xFF12,0x8252, 0xFF13,0x8253, 0xFF14,0x8254, 0xFF15,0x8255, 0xFF16,0x8256, 0xFF17,0x8257, 0xFF18,0x8258, 0xFF19,0x8260, 0xFF21,0x8261, 0xFF22,0x8262, 0xFF23,0x8263, 0xFF24,0x8264, 0xFF25,0x8265, 0xFF26,0x8266, 0xFF27,0x8267, 0xFF28,0x8268, 0xFF29,0x8269, 0xFF2A,0x826A, 0xFF2B,0x826B, 0xFF2C,0x826C, 0xFF2D,0x826D, 0xFF2E,0x826E, 0xFF2F,0x826F, 0xFF30,0x8270, 0xFF31,0x8271, 0xFF32,0x8272, 0xFF33,0x8273, 0xFF34,0x8274, 0xFF35,0x8275, 0xFF36,0x8276, 0xFF37,0x8277, 0xFF38,0x8278, 0xFF39,0x8279, 0xFF3A,0x8281, 0xFF41,0x8282, 0xFF42,0x8283, 0xFF43,0x8284, 0xFF44,0x8285, 0xFF45,0x8286, 0xFF46,0x8287, 0xFF47,0x8288, 0xFF48,0x8289, 0xFF49,0x828A, 0xFF4A,0x828B, 0xFF4B,0x828C, 0xFF4C,0x828D, 0xFF4D,0x828E, 0xFF4E,0x828F, 0xFF4F,0x8290, 0xFF50,0x8291, 0xFF51,0x8292, 0xFF52,0x8293, 0xFF53,0x8294, 0xFF54,0x8295, 0xFF55,0x8296, 0xFF56,0x8297, 0xFF57,0x8298, 0xFF58,0x8299, 0xFF59,0x829A, 0xFF5A,0x829F, 0x3041,0x82A0, 0x3042,0x82A1, 0x3043,0x82A2, 0x3044,0x82A3, 0x3045,0x82A4, 0x3046,0x82A5, 0x3047,0x82A6, 0x3048,0x82A7, 0x3049,0x82A8, 0x304A,0x82A9, 0x304B,0x82AA, 0x304C,0x82AB, 0x304D,0x82AC, 0x304E,0x82AD, 0x304F,0x82AE, 0x3050,0x82AF, 0x3051,0x82B0, 0x3052,0x82B1, 0x3053,0x82B2, 0x3054,0x82B3, 0x3055,0x82B4, 0x3056,0x82B5, 0x3057,0x82B6, 0x3058,0x82B7, 0x3059,0x82B8, 0x305A,0x82B9, 0x305B,0x82BA, 0x305C,0x82BB, 0x305D,0x82BC, 0x305E,0x82BD, 0x305F,0x82BE, 0x3060,0x82BF, 0x3061,0x82C0, 0x3062,0x82C1, 0x3063,0x82C2, 0x3064,0x82C3, 0x3065,0x82C4, 0x3066,0x82C5, 0x3067,0x82C6, 0x3068,0x82C7, 0x3069,0x82C8, 0x306A,0x82C9, 0x306B,0x82CA, 0x306C,0x82CB, 0x306D,0x82CC, 0x306E,0x82CD, 0x306F,0x82CE, 0x3070,0x82CF, 0x3071,0x82D0, 0x3072,0x82D1, 0x3073,0x82D2, 0x3074,0x82D3, 0x3075,0x82D4, 0x3076,0x82D5, 0x3077,0x82D6, 0x3078,0x82D7, 0x3079,0x82D8, 0x307A,0x82D9, 0x307B,0x82DA, 0x307C,0x82DB, 0x307D,0x82DC, 0x307E,0x82DD, 0x307F,0x82DE, 0x3080,0x82DF, 0x3081,0x82E0, 0x3082,0x82E1, 0x3083,0x82E2, 0x3084,0x82E3, 0x3085,0x82E4, 0x3086,0x82E5, 0x3087,0x82E6, 0x3088,0x82E7, 0x3089,0x82E8, 0x308A,0x82E9, 0x308B,0x82EA, 0x308C,0x82EB, 0x308D,0x82EC, 0x308E,0x82ED, 0x308F,0x82EE, 0x3090,0x82EF, 0x3091,0x82F0, 0x3092,0x82F1, 0x3093,0x8340, 0x30A1,0x8341, 0x30A2,0x8342, 0x30A3,0x8343, 0x30A4,0x8344, 0x30A5,0x8345, 0x30A6,0x8346, 0x30A7,0x8347, 0x30A8,0x8348, 0x30A9,0x8349, 0x30AA,0x834A, 0x30AB,0x834B, 0x30AC,0x834C, 0x30AD,0x834D, 0x30AE,0x834E, 0x30AF,0x834F, 0x30B0,0x8350, 0x30B1,0x8351, 0x30B2,0x8352, 0x30B3,0x8353, 0x30B4,0x8354, 0x30B5,0x8355, 0x30B6,0x8356, 0x30B7,0x8357, 0x30B8,0x8358, 0x30B9,0x8359, 0x30BA,0x835A, 0x30BB,0x835B, 0x30BC,0x835C, 0x30BD,0x835D, 0x30BE,0x835E, 0x30BF,0x835F, 0x30C0,0x8360, 0x30C1,0x8361, 0x30C2,0x8362, 0x30C3,0x8363, 0x30C4,0x8364, 0x30C5,0x8365, 0x30C6,0x8366, 0x30C7,0x8367, 0x30C8,0x8368, 0x30C9,0x8369, 0x30CA,0x836A, 0x30CB,0x836B, 0x30CC,0x836C, 0x30CD,0x836D, 0x30CE,0x836E, 0x30CF,0x836F, 0x30D0,0x8370, 0x30D1,0x8371, 0x30D2,0x8372, 0x30D3,0x8373, 0x30D4,0x8374, 0x30D5,0x8375, 0x30D6,0x8376, 0x30D7,0x8377, 0x30D8,0x8378, 0x30D9,0x8379, 0x30DA,0x837A, 0x30DB,0x837B, 0x30DC,0x837C, 0x30DD,0x837D, 0x30DE,0x837E, 0x30DF,0x8380, 0x30E0,0x8381, 0x30E1,0x8382, 0x30E2,0x8383, 0x30E3,0x8384, 0x30E4,0x8385, 0x30E5,0x8386, 0x30E6,0x8387, 0x30E7,0x8388, 0x30E8,0x8389, 0x30E9,0x838A, 0x30EA,0x838B, 0x30EB,0x838C, 0x30EC,0x838D, 0x30ED,0x838E, 0x30EE,0x838F, 0x30EF,0x8390, 0x30F0,0x8391, 0x30F1,0x8392, 0x30F2,0x8393, 0x30F3,0x8394, 0x30F4,0x8395, 0x30F5,0x8396, 0x30F6,0x839F, 0x0391,0x83A0, 0x0392,0x83A1, 0x0393,0x83A2, 0x0394,0x83A3, 0x0395,0x83A4, 0x0396,0x83A5, 0x0397,0x83A6, 0x0398,0x83A7, 0x0399,0x83A8, 0x039A,0x83A9, 0x039B,0x83AA, 0x039C,0x83AB, 0x039D,0x83AC, 0x039E,0x83AD, 0x039F,0x83AE, 0x03A0,0x83AF, 0x03A1,0x83B0, 0x03A3,0x83B1, 0x03A4,0x83B2, 0x03A5,0x83B3, 0x03A6,0x83B4, 0x03A7,0x83B5, 0x03A8,0x83B6, 0x03A9,0x83BF, 0x03B1,0x83C0, 0x03B2,0x83C1, 0x03B3,0x83C2, 0x03B4,0x83C3, 0x03B5,0x83C4, 0x03B6,0x83C5, 0x03B7,0x83C6, 0x03B8,0x83C7, 0x03B9,0x83C8, 0x03BA,0x83C9, 0x03BB,0x83CA, 0x03BC,0x83CB, 0x03BD,0x83CC, 0x03BE,0x83CD, 0x03BF,0x83CE, 0x03C0,0x83CF, 0x03C1,0x83D0, 0x03C3,0x83D1, 0x03C4,0x83D2, 0x03C5,0x83D3, 0x03C6,0x83D4, 0x03C7,0x83D5, 0x03C8,0x83D6, 0x03C9,0x8440, 0x0410,0x8441, 0x0411,0x8442, 0x0412,0x8443, 0x0413,0x8444, 0x0414,0x8445, 0x0415,0x8446, 0x0401,0x8447, 0x0416,0x8448, 0x0417,0x8449, 0x0418,0x844A, 0x0419,0x844B, 0x041A,0x844C, 0x041B,0x844D, 0x041C,0x844E, 0x041D,0x844F, 0x041E,0x8450, 0x041F,0x8451, 0x0420,0x8452, 0x0421,0x8453, 0x0422,0x8454, 0x0423,0x8455, 0x0424,0x8456, 0x0425,0x8457, 0x0426,0x8458, 0x0427,0x8459, 0x0428,0x845A, 0x0429,0x845B, 0x042A,0x845C, 0x042B,0x845D, 0x042C,0x845E, 0x042D,0x845F, 0x042E,0x8460, 0x042F,0x8470, 0x0430,0x8471, 0x0431,0x8472, 0x0432,0x8473, 0x0433,0x8474, 0x0434,0x8475, 0x0435,0x8476, 0x0451,0x8477, 0x0436,0x8478, 0x0437,0x8479, 0x0438,0x847A, 0x0439,0x847B, 0x043A,0x847C, 0x043B,0x847D, 0x043C,0x847E, 0x043D,0x8480, 0x043E,0x8481, 0x043F,0x8482, 0x0440,0x8483, 0x0441,0x8484, 0x0442,0x8485, 0x0443,0x8486, 0x0444,0x8487, 0x0445,0x8488, 0x0446,0x8489, 0x0447,0x848A, 0x0448,0x848B, 0x0449,0x848C, 0x044A,0x848D, 0x044B,0x848E, 0x044C,0x848F, 0x044D,0x8490, 0x044E,0x8491, 0x044F,0x849F, 0x2500,0x84A0, 0x2502,0x84A1, 0x250C,0x84A2, 0x2510,0x84A3, 0x2518,0x84A4, 0x2514,0x84A5, 0x251C,0x84A6, 0x252C,0x84A7, 0x2524,0x84A8, 0x2534,0x84A9, 0x253C,0x84AA, 0x2501,0x84AB, 0x2503,0x84AC, 0x250F,0x84AD, 0x2513,0x84AE, 0x251B,0x84AF, 0x2517,0x84B0, 0x2523,0x84B1, 0x2533,0x84B2, 0x252B,0x84B3, 0x253B,0x84B4, 0x254B,0x84B5, 0x2520,0x84B6, 0x252F,0x84B7, 0x2528,0x84B8, 0x2537,0x84B9, 0x253F,0x84BA, 0x251D,0x84BB, 0x2530,0x84BC, 0x2525,0x84BD, 0x2538,0x84BE, 0x2542,0x8740, 0x2460,0x8741, 0x2461,0x8742, 0x2462,0x8743, 0x2463,0x8744, 0x2464,0x8745, 0x2465,0x8746, 0x2466,0x8747, 0x2467,0x8748, 0x2468,0x8749, 0x2469,0x874A, 0x246A,0x874B, 0x246B,0x874C, 0x246C,0x874D, 0x246D,0x874E, 0x246E,0x874F, 0x246F,0x8750, 0x2470,0x8751, 0x2471,0x8752, 0x2472,0x8753, 0x2473,0x8754, 0x2160,0x8755, 0x2161,0x8756, 0x2162,0x8757, 0x2163,0x8758, 0x2164,0x8759, 0x2165,0x875A, 0x2166,0x875B, 0x2167,0x875C, 0x2168,0x875D, 0x2169,0x875F, 0x3349,0x8760, 0x3314,0x8761, 0x3322,0x8762, 0x334D,0x8763, 0x3318,0x8764, 0x3327,0x8765, 0x3303,0x8766, 0x3336,0x8767, 0x3351,0x8768, 0x3357,0x8769, 0x330D,0x876A, 0x3326,0x876B, 0x3323,0x876C, 0x332B,0x876D, 0x334A,0x876E, 0x333B,0x876F, 0x339C,0x8770, 0x339D,0x8771, 0x339E,0x8772, 0x338E,0x8773, 0x338F,0x8774, 0x33C4,0x8775, 0x33A1,0x877E, 0x337B,0x8780, 0x301D,0x8781, 0x301F,0x8782, 0x2116,0x8783, 0x33CD,0x8784, 0x2121,0x8785, 0x32A4,0x8786, 0x32A5,0x8787, 0x32A6,0x8788, 0x32A7,0x8789, 0x32A8,0x878A, 0x3231,0x878B, 0x3232,0x878C, 0x3239,0x878D, 0x337E,0x878E, 0x337D,0x878F, 0x337C,0x8790, 0x2252,0x8791, 0x2261,0x8792, 0x222B,0x8793, 0x222E,0x8794, 0x2211,0x8795, 0x221A,0x8796, 0x22A5,0x8797, 0x2220,0x8798, 0x221F,0x8799, 0x22BF,0x879A, 0x2235,0x879B, 0x2229,0x879C, 0x222A,0x889F, 0x4E9C,0x88A0, 0x5516,0x88A1, 0x5A03,0x88A2, 0x963F,0x88A3, 0x54C0,0x88A4, 0x611B,0x88A5, 0x6328,0x88A6, 0x59F6,0x88A7, 0x9022,0x88A8, 0x8475,0x88A9, 0x831C,0x88AA, 0x7A50,0x88AB, 0x60AA,0x88AC, 0x63E1,0x88AD, 0x6E25,0x88AE, 0x65ED,0x88AF, 0x8466,0x88B0, 0x82A6,0x88B1, 0x9BF5,0x88B2, 0x6893,0x88B3, 0x5727,0x88B4, 0x65A1,0x88B5, 0x6271,0x88B6, 0x5B9B,0x88B7, 0x59D0,0x88B8, 0x867B,0x88B9, 0x98F4,0x88BA, 0x7D62,0x88BB, 0x7DBE,0x88BC, 0x9B8E,0x88BD, 0x6216,0x88BE, 0x7C9F,0x88BF, 0x88B7,0x88C0, 0x5B89,0x88C1, 0x5EB5,0x88C2, 0x6309,0x88C3, 0x6697,0x88C4, 0x6848,0x88C5, 0x95C7,0x88C6, 0x978D,0x88C7, 0x674F,0x88C8, 0x4EE5,0x88C9, 0x4F0A,0x88CA, 0x4F4D,0x88CB, 0x4F9D,0x88CC, 0x5049,0x88CD, 0x56F2,0x88CE, 0x5937,0x88CF, 0x59D4,0x88D0, 0x5A01,0x88D1, 0x5C09,0x88D2, 0x60DF,0x88D3, 0x610F,0x88D4, 0x6170,0x88D5, 0x6613,0x88D6, 0x6905,0x88D7, 0x70BA,0x88D8, 0x754F,0x88D9, 0x7570,0x88DA, 0x79FB,0x88DB, 0x7DAD,0x88DC, 0x7DEF,0x88DD, 0x80C3,0x88DE, 0x840E,0x88DF, 0x8863,0x88E0, 0x8B02,0x88E1, 0x9055,0x88E2, 0x907A,0x88E3, 0x533B,0x88E4, 0x4E95,0x88E5, 0x4EA5,0x88E6, 0x57DF,0x88E7, 0x80B2,0x88E8, 0x90C1,0x88E9, 0x78EF,0x88EA, 0x4E00,0x88EB, 0x58F1,0x88EC, 0x6EA2,0x88ED, 0x9038,0x88EE, 0x7A32,0x88EF, 0x8328,0x88F0, 0x828B,0x88F1, 0x9C2F,0x88F2, 0x5141,0x88F3, 0x5370,0x88F4, 0x54BD,0x88F5, 0x54E1,0x88F6, 0x56E0,0x88F7, 0x59FB,0x88F8, 0x5F15,0x88F9, 0x98F2,0x88FA, 0x6DEB,0x88FB, 0x80E4,0x88FC, 0x852D,0x8940, 0x9662,0x8941, 0x9670,0x8942, 0x96A0,0x8943, 0x97FB,0x8944, 0x540B,0x8945, 0x53F3,0x8946, 0x5B87,0x8947, 0x70CF,0x8948, 0x7FBD,0x8949, 0x8FC2,0x894A, 0x96E8,0x894B, 0x536F,0x894C, 0x9D5C,0x894D, 0x7ABA,0x894E, 0x4E11,0x894F, 0x7893,0x8950, 0x81FC,0x8951, 0x6E26,0x8952, 0x5618,0x8953, 0x5504,0x8954, 0x6B1D,0x8955, 0x851A,0x8956, 0x9C3B,0x8957, 0x59E5,0x8958, 0x53A9,0x8959, 0x6D66,0x895A, 0x74DC,0x895B, 0x958F,0x895C, 0x5642,0x895D, 0x4E91,0x895E, 0x904B,0x895F, 0x96F2,0x8960, 0x834F,0x8961, 0x990C,0x8962, 0x53E1,0x8963, 0x55B6,0x8964, 0x5B30,0x8965, 0x5F71,0x8966, 0x6620,0x8967, 0x66F3,0x8968, 0x6804,0x8969, 0x6C38,0x896A, 0x6CF3,0x896B, 0x6D29,0x896C, 0x745B,0x896D, 0x76C8,0x896E, 0x7A4E,0x896F, 0x9834,0x8970, 0x82F1,0x8971, 0x885B,0x8972, 0x8A60,0x8973, 0x92ED,0x8974, 0x6DB2,0x8975, 0x75AB,0x8976, 0x76CA,0x8977, 0x99C5,0x8978, 0x60A6,0x8979, 0x8B01,0x897A, 0x8D8A,0x897B, 0x95B2,0x897C, 0x698E,0x897D, 0x53AD,0x897E, 0x5186,0x8980, 0x5712,0x8981, 0x5830,0x8982, 0x5944,0x8983, 0x5BB4,0x8984, 0x5EF6,0x8985, 0x6028,0x8986, 0x63A9,0x8987, 0x63F4,0x8988, 0x6CBF,0x8989, 0x6F14,0x898A, 0x708E,0x898B, 0x7114,0x898C, 0x7159,0x898D, 0x71D5,0x898E, 0x733F,0x898F, 0x7E01,0x8990, 0x8276,0x8991, 0x82D1,0x8992, 0x8597,0x8993, 0x9060,0x8994, 0x925B,0x8995, 0x9D1B,0x8996, 0x5869,0x8997, 0x65BC,0x8998, 0x6C5A,0x8999, 0x7525,0x899A, 0x51F9,0x899B, 0x592E,0x899C, 0x5965,0x899D, 0x5F80,0x899E, 0x5FDC,0x899F, 0x62BC,0x89A0, 0x65FA,0x89A1, 0x6A2A,0x89A2, 0x6B27,0x89A3, 0x6BB4,0x89A4, 0x738B,0x89A5, 0x7FC1,0x89A6, 0x8956,0x89A7, 0x9D2C,0x89A8, 0x9D0E,0x89A9, 0x9EC4,0x89AA, 0x5CA1,0x89AB, 0x6C96,0x89AC, 0x837B,0x89AD, 0x5104,0x89AE, 0x5C4B,0x89AF, 0x61B6,0x89B0, 0x81C6,0x89B1, 0x6876,0x89B2, 0x7261,0x89B3, 0x4E59,0x89B4, 0x4FFA,0x89B5, 0x5378,0x89B6, 0x6069,0x89B7, 0x6E29,0x89B8, 0x7A4F,0x89B9, 0x97F3,0x89BA, 0x4E0B,0x89BB, 0x5316,0x89BC, 0x4EEE,0x89BD, 0x4F55,0x89BE, 0x4F3D,0x89BF, 0x4FA1,0x89C0, 0x4F73,0x89C1, 0x52A0,0x89C2, 0x53EF,0x89C3, 0x5609,0x89C4, 0x590F,0x89C5, 0x5AC1,0x89C6, 0x5BB6,0x89C7, 0x5BE1,0x89C8, 0x79D1,0x89C9, 0x6687,0x89CA, 0x679C,0x89CB, 0x67B6,0x89CC, 0x6B4C,0x89CD, 0x6CB3,0x89CE, 0x706B,0x89CF, 0x73C2,0x89D0, 0x798D,0x89D1, 0x79BE,0x89D2, 0x7A3C,0x89D3, 0x7B87,0x89D4, 0x82B1,0x89D5, 0x82DB,0x89D6, 0x8304,0x89D7, 0x8377,0x89D8, 0x83EF,0x89D9, 0x83D3,0x89DA, 0x8766,0x89DB, 0x8AB2,0x89DC, 0x5629,0x89DD, 0x8CA8,0x89DE, 0x8FE6,0x89DF, 0x904E,0x89E0, 0x971E,0x89E1, 0x868A,0x89E2, 0x4FC4,0x89E3, 0x5CE8,0x89E4, 0x6211,0x89E5, 0x7259,0x89E6, 0x753B,0x89E7, 0x81E5,0x89E8, 0x82BD,0x89E9, 0x86FE,0x89EA, 0x8CC0,0x89EB, 0x96C5,0x89EC, 0x9913,0x89ED, 0x99D5,0x89EE, 0x4ECB,0x89EF, 0x4F1A,0x89F0, 0x89E3,0x89F1, 0x56DE,0x89F2, 0x584A,0x89F3, 0x58CA,0x89F4, 0x5EFB,0x89F5, 0x5FEB,0x89F6, 0x602A,0x89F7, 0x6094,0x89F8, 0x6062,0x89F9, 0x61D0,0x89FA, 0x6212,0x89FB, 0x62D0,0x89FC, 0x6539,0x8A40, 0x9B41,0x8A41, 0x6666,0x8A42, 0x68B0,0x8A43, 0x6D77,0x8A44, 0x7070,0x8A45, 0x754C,0x8A46, 0x7686,0x8A47, 0x7D75,0x8A48, 0x82A5,0x8A49, 0x87F9,0x8A4A, 0x958B,0x8A4B, 0x968E,0x8A4C, 0x8C9D,0x8A4D, 0x51F1,0x8A4E, 0x52BE,0x8A4F, 0x5916,0x8A50, 0x54B3,0x8A51, 0x5BB3,0x8A52, 0x5D16,0x8A53, 0x6168,0x8A54, 0x6982,0x8A55, 0x6DAF,0x8A56, 0x788D,0x8A57, 0x84CB,0x8A58, 0x8857,0x8A59, 0x8A72,0x8A5A, 0x93A7,0x8A5B, 0x9AB8,0x8A5C, 0x6D6C,0x8A5D, 0x99A8,0x8A5E, 0x86D9,0x8A5F, 0x57A3,0x8A60, 0x67FF,0x8A61, 0x86CE,0x8A62, 0x920E,0x8A63, 0x5283,0x8A64, 0x5687,0x8A65, 0x5404,0x8A66, 0x5ED3,0x8A67, 0x62E1,0x8A68, 0x64B9,0x8A69, 0x683C,0x8A6A, 0x6838,0x8A6B, 0x6BBB,0x8A6C, 0x7372,0x8A6D, 0x78BA,0x8A6E, 0x7A6B,0x8A6F, 0x899A,0x8A70, 0x89D2,0x8A71, 0x8D6B,0x8A72, 0x8F03,0x8A73, 0x90ED,0x8A74, 0x95A3,0x8A75, 0x9694,0x8A76, 0x9769,0x8A77, 0x5B66,0x8A78, 0x5CB3,0x8A79, 0x697D,0x8A7A, 0x984D,0x8A7B, 0x984E,0x8A7C, 0x639B,0x8A7D, 0x7B20,0x8A7E, 0x6A2B,0x8A80, 0x6A7F,0x8A81, 0x68B6,0x8A82, 0x9C0D,0x8A83, 0x6F5F,0x8A84, 0x5272,0x8A85, 0x559D,0x8A86, 0x6070,0x8A87, 0x62EC,0x8A88, 0x6D3B,0x8A89, 0x6E07,0x8A8A, 0x6ED1,0x8A8B, 0x845B,0x8A8C, 0x8910,0x8A8D, 0x8F44,0x8A8E, 0x4E14,0x8A8F, 0x9C39,0x8A90, 0x53F6,0x8A91, 0x691B,0x8A92, 0x6A3A,0x8A93, 0x9784,0x8A94, 0x682A,0x8A95, 0x515C,0x8A96, 0x7AC3,0x8A97, 0x84B2,0x8A98, 0x91DC,0x8A99, 0x938C,0x8A9A, 0x565B,0x8A9B, 0x9D28,0x8A9C, 0x6822,0x8A9D, 0x8305,0x8A9E, 0x8431,0x8A9F, 0x7CA5,0x8AA0, 0x5208,0x8AA1, 0x82C5,0x8AA2, 0x74E6,0x8AA3, 0x4E7E,0x8AA4, 0x4F83,0x8AA5, 0x51A0,0x8AA6, 0x5BD2,0x8AA7, 0x520A,0x8AA8, 0x52D8,0x8AA9, 0x52E7,0x8AAA, 0x5DFB,0x8AAB, 0x559A,0x8AAC, 0x582A,0x8AAD, 0x59E6,0x8AAE, 0x5B8C,0x8AAF, 0x5B98,0x8AB0, 0x5BDB,0x8AB1, 0x5E72,0x8AB2, 0x5E79,0x8AB3, 0x60A3,0x8AB4, 0x611F,0x8AB5, 0x6163,0x8AB6, 0x61BE,0x8AB7, 0x63DB,0x8AB8, 0x6562,0x8AB9, 0x67D1,0x8ABA, 0x6853,0x8ABB, 0x68FA,0x8ABC, 0x6B3E,0x8ABD, 0x6B53,0x8ABE, 0x6C57,0x8ABF, 0x6F22,0x8AC0, 0x6F97,0x8AC1, 0x6F45,0x8AC2, 0x74B0,0x8AC3, 0x7518,0x8AC4, 0x76E3,0x8AC5, 0x770B,0x8AC6, 0x7AFF,0x8AC7, 0x7BA1,0x8AC8, 0x7C21,0x8AC9, 0x7DE9,0x8ACA, 0x7F36,0x8ACB, 0x7FF0,0x8ACC, 0x809D,0x8ACD, 0x8266,0x8ACE, 0x839E,0x8ACF, 0x89B3,0x8AD0, 0x8ACC,0x8AD1, 0x8CAB,0x8AD2, 0x9084,0x8AD3, 0x9451,0x8AD4, 0x9593,0x8AD5, 0x9591,0x8AD6, 0x95A2,0x8AD7, 0x9665,0x8AD8, 0x97D3,0x8AD9, 0x9928,0x8ADA, 0x8218,0x8ADB, 0x4E38,0x8ADC, 0x542B,0x8ADD, 0x5CB8,0x8ADE, 0x5DCC,0x8ADF, 0x73A9,0x8AE0, 0x764C,0x8AE1, 0x773C,0x8AE2, 0x5CA9,0x8AE3, 0x7FEB,0x8AE4, 0x8D0B,0x8AE5, 0x96C1,0x8AE6, 0x9811,0x8AE7, 0x9854,0x8AE8, 0x9858,0x8AE9, 0x4F01,0x8AEA, 0x4F0E,0x8AEB, 0x5371,0x8AEC, 0x559C,0x8AED, 0x5668,0x8AEE, 0x57FA,0x8AEF, 0x5947,0x8AF0, 0x5B09,0x8AF1, 0x5BC4,0x8AF2, 0x5C90,0x8AF3, 0x5E0C,0x8AF4, 0x5E7E,0x8AF5, 0x5FCC,0x8AF6, 0x63EE,0x8AF7, 0x673A,0x8AF8, 0x65D7,0x8AF9, 0x65E2,0x8AFA, 0x671F,0x8AFB, 0x68CB,0x8AFC, 0x68C4,0x8B40, 0x6A5F,0x8B41, 0x5E30,0x8B42, 0x6BC5,0x8B43, 0x6C17,0x8B44, 0x6C7D,0x8B45, 0x757F,0x8B46, 0x7948,0x8B47, 0x5B63,0x8B48, 0x7A00,0x8B49, 0x7D00,0x8B4A, 0x5FBD,0x8B4B, 0x898F,0x8B4C, 0x8A18,0x8B4D, 0x8CB4,0x8B4E, 0x8D77,0x8B4F, 0x8ECC,0x8B50, 0x8F1D,0x8B51, 0x98E2,0x8B52, 0x9A0E,0x8B53, 0x9B3C,0x8B54, 0x4E80,0x8B55, 0x507D,0x8B56, 0x5100,0x8B57, 0x5993,0x8B58, 0x5B9C,0x8B59, 0x622F,0x8B5A, 0x6280,0x8B5B, 0x64EC,0x8B5C, 0x6B3A,0x8B5D, 0x72A0,0x8B5E, 0x7591,0x8B5F, 0x7947,0x8B60, 0x7FA9,0x8B61, 0x87FB,0x8B62, 0x8ABC,0x8B63, 0x8B70,0x8B64, 0x63AC,0x8B65, 0x83CA,0x8B66, 0x97A0,0x8B67, 0x5409,0x8B68, 0x5403,0x8B69, 0x55AB,0x8B6A, 0x6854,0x8B6B, 0x6A58,0x8B6C, 0x8A70,0x8B6D, 0x7827,0x8B6E, 0x6775,0x8B6F, 0x9ECD,0x8B70, 0x5374,0x8B71, 0x5BA2,0x8B72, 0x811A,0x8B73, 0x8650,0x8B74, 0x9006,0x8B75, 0x4E18,0x8B76, 0x4E45,0x8B77, 0x4EC7,0x8B78, 0x4F11,0x8B79, 0x53CA,0x8B7A, 0x5438,0x8B7B, 0x5BAE,0x8B7C, 0x5F13,0x8B7D, 0x6025,0x8B7E, 0x6551,0x8B80, 0x673D,0x8B81, 0x6C42,0x8B82, 0x6C72,0x8B83, 0x6CE3,0x8B84, 0x7078,0x8B85, 0x7403,0x8B86, 0x7A76,0x8B87, 0x7AAE,0x8B88, 0x7B08,0x8B89, 0x7D1A,0x8B8A, 0x7CFE,0x8B8B, 0x7D66,0x8B8C, 0x65E7,0x8B8D, 0x725B,0x8B8E, 0x53BB,0x8B8F, 0x5C45,0x8B90, 0x5DE8,0x8B91, 0x62D2,0x8B92, 0x62E0,0x8B93, 0x6319,0x8B94, 0x6E20,0x8B95, 0x865A,0x8B96, 0x8A31,0x8B97, 0x8DDD,0x8B98, 0x92F8,0x8B99, 0x6F01,0x8B9A, 0x79A6,0x8B9B, 0x9B5A,0x8B9C, 0x4EA8,0x8B9D, 0x4EAB,0x8B9E, 0x4EAC,0x8B9F, 0x4F9B,0x8BA0, 0x4FA0,0x8BA1, 0x50D1,0x8BA2, 0x5147,0x8BA3, 0x7AF6,0x8BA4, 0x5171,0x8BA5, 0x51F6,0x8BA6, 0x5354,0x8BA7, 0x5321,0x8BA8, 0x537F,0x8BA9, 0x53EB,0x8BAA, 0x55AC,0x8BAB, 0x5883,0x8BAC, 0x5CE1,0x8BAD, 0x5F37,0x8BAE, 0x5F4A,0x8BAF, 0x602F,0x8BB0, 0x6050,0x8BB1, 0x606D,0x8BB2, 0x631F,0x8BB3, 0x6559,0x8BB4, 0x6A4B,0x8BB5, 0x6CC1,0x8BB6, 0x72C2,0x8BB7, 0x72ED,0x8BB8, 0x77EF,0x8BB9, 0x80F8,0x8BBA, 0x8105,0x8BBB, 0x8208,0x8BBC, 0x854E,0x8BBD, 0x90F7,0x8BBE, 0x93E1,0x8BBF, 0x97FF,0x8BC0, 0x9957,0x8BC1, 0x9A5A,0x8BC2, 0x4EF0,0x8BC3, 0x51DD,0x8BC4, 0x5C2D,0x8BC5, 0x6681,0x8BC6, 0x696D,0x8BC7, 0x5C40,0x8BC8, 0x66F2,0x8BC9, 0x6975,0x8BCA, 0x7389,0x8BCB, 0x6850,0x8BCC, 0x7C81,0x8BCD, 0x50C5,0x8BCE, 0x52E4,0x8BCF, 0x5747,0x8BD0, 0x5DFE,0x8BD1, 0x9326,0x8BD2, 0x65A4,0x8BD3, 0x6B23,0x8BD4, 0x6B3D,0x8BD5, 0x7434,0x8BD6, 0x7981,0x8BD7, 0x79BD,0x8BD8, 0x7B4B,0x8BD9, 0x7DCA,0x8BDA, 0x82B9,0x8BDB, 0x83CC,0x8BDC, 0x887F,0x8BDD, 0x895F,0x8BDE, 0x8B39,0x8BDF, 0x8FD1,0x8BE0, 0x91D1,0x8BE1, 0x541F,0x8BE2, 0x9280,0x8BE3, 0x4E5D,0x8BE4, 0x5036,0x8BE5, 0x53E5,0x8BE6, 0x533A,0x8BE7, 0x72D7,0x8BE8, 0x7396,0x8BE9, 0x77E9,0x8BEA, 0x82E6,0x8BEB, 0x8EAF,0x8BEC, 0x99C6,0x8BED, 0x99C8,0x8BEE, 0x99D2,0x8BEF, 0x5177,0x8BF0, 0x611A,0x8BF1, 0x865E,0x8BF2, 0x55B0,0x8BF3, 0x7A7A,0x8BF4, 0x5076,0x8BF5, 0x5BD3,0x8BF6, 0x9047,0x8BF7, 0x9685,0x8BF8, 0x4E32,0x8BF9, 0x6ADB,0x8BFA, 0x91E7,0x8BFB, 0x5C51,0x8BFC, 0x5C48,0x8C40, 0x6398,0x8C41, 0x7A9F,0x8C42, 0x6C93,0x8C43, 0x9774,0x8C44, 0x8F61,0x8C45, 0x7AAA,0x8C46, 0x718A,0x8C47, 0x9688,0x8C48, 0x7C82,0x8C49, 0x6817,0x8C4A, 0x7E70,0x8C4B, 0x6851,0x8C4C, 0x936C,0x8C4D, 0x52F2,0x8C4E, 0x541B,0x8C4F, 0x85AB,0x8C50, 0x8A13,0x8C51, 0x7FA4,0x8C52, 0x8ECD,0x8C53, 0x90E1,0x8C54, 0x5366,0x8C55, 0x8888,0x8C56, 0x7941,0x8C57, 0x4FC2,0x8C58, 0x50BE,0x8C59, 0x5211,0x8C5A, 0x5144,0x8C5B, 0x5553,0x8C5C, 0x572D,0x8C5D, 0x73EA,0x8C5E, 0x578B,0x8C5F, 0x5951,0x8C60, 0x5F62,0x8C61, 0x5F84,0x8C62, 0x6075,0x8C63, 0x6176,0x8C64, 0x6167,0x8C65, 0x61A9,0x8C66, 0x63B2,0x8C67, 0x643A,0x8C68, 0x656C,0x8C69, 0x666F,0x8C6A, 0x6842,0x8C6B, 0x6E13,0x8C6C, 0x7566,0x8C6D, 0x7A3D,0x8C6E, 0x7CFB,0x8C6F, 0x7D4C,0x8C70, 0x7D99,0x8C71, 0x7E4B,0x8C72, 0x7F6B,0x8C73, 0x830E,0x8C74, 0x834A,0x8C75, 0x86CD,0x8C76, 0x8A08,0x8C77, 0x8A63,0x8C78, 0x8B66,0x8C79, 0x8EFD,0x8C7A, 0x981A,0x8C7B, 0x9D8F,0x8C7C, 0x82B8,0x8C7D, 0x8FCE,0x8C7E, 0x9BE8,0x8C80, 0x5287,0x8C81, 0x621F,0x8C82, 0x6483,0x8C83, 0x6FC0,0x8C84, 0x9699,0x8C85, 0x6841,0x8C86, 0x5091,0x8C87, 0x6B20,0x8C88, 0x6C7A,0x8C89, 0x6F54,0x8C8A, 0x7A74,0x8C8B, 0x7D50,0x8C8C, 0x8840,0x8C8D, 0x8A23,0x8C8E, 0x6708,0x8C8F, 0x4EF6,0x8C90, 0x5039,0x8C91, 0x5026,0x8C92, 0x5065,0x8C93, 0x517C,0x8C94, 0x5238,0x8C95, 0x5263,0x8C96, 0x55A7,0x8C97, 0x570F,0x8C98, 0x5805,0x8C99, 0x5ACC,0x8C9A, 0x5EFA,0x8C9B, 0x61B2,0x8C9C, 0x61F8,0x8C9D, 0x62F3,0x8C9E, 0x6372,0x8C9F, 0x691C,0x8CA0, 0x6A29,0x8CA1, 0x727D,0x8CA2, 0x72AC,0x8CA3, 0x732E,0x8CA4, 0x7814,0x8CA5, 0x786F,0x8CA6, 0x7D79,0x8CA7, 0x770C,0x8CA8, 0x80A9,0x8CA9, 0x898B,0x8CAA, 0x8B19,0x8CAB, 0x8CE2,0x8CAC, 0x8ED2,0x8CAD, 0x9063,0x8CAE, 0x9375,0x8CAF, 0x967A,0x8CB0, 0x9855,0x8CB1, 0x9A13,0x8CB2, 0x9E78,0x8CB3, 0x5143,0x8CB4, 0x539F,0x8CB5, 0x53B3,0x8CB6, 0x5E7B,0x8CB7, 0x5F26,0x8CB8, 0x6E1B,0x8CB9, 0x6E90,0x8CBA, 0x7384,0x8CBB, 0x73FE,0x8CBC, 0x7D43,0x8CBD, 0x8237,0x8CBE, 0x8A00,0x8CBF, 0x8AFA,0x8CC0, 0x9650,0x8CC1, 0x4E4E,0x8CC2, 0x500B,0x8CC3, 0x53E4,0x8CC4, 0x547C,0x8CC5, 0x56FA,0x8CC6, 0x59D1,0x8CC7, 0x5B64,0x8CC8, 0x5DF1,0x8CC9, 0x5EAB,0x8CCA, 0x5F27,0x8CCB, 0x6238,0x8CCC, 0x6545,0x8CCD, 0x67AF,0x8CCE, 0x6E56,0x8CCF, 0x72D0,0x8CD0, 0x7CCA,0x8CD1, 0x88B4,0x8CD2, 0x80A1,0x8CD3, 0x80E1,0x8CD4, 0x83F0,0x8CD5, 0x864E,0x8CD6, 0x8A87,0x8CD7, 0x8DE8,0x8CD8, 0x9237,0x8CD9, 0x96C7,0x8CDA, 0x9867,0x8CDB, 0x9F13,0x8CDC, 0x4E94,0x8CDD, 0x4E92,0x8CDE, 0x4F0D,0x8CDF, 0x5348,0x8CE0, 0x5449,0x8CE1, 0x543E,0x8CE2, 0x5A2F,0x8CE3, 0x5F8C,0x8CE4, 0x5FA1,0x8CE5, 0x609F,0x8CE6, 0x68A7,0x8CE7, 0x6A8E,0x8CE8, 0x745A,0x8CE9, 0x7881,0x8CEA, 0x8A9E,0x8CEB, 0x8AA4,0x8CEC, 0x8B77,0x8CED, 0x9190,0x8CEE, 0x4E5E,0x8CEF, 0x9BC9,0x8CF0, 0x4EA4,0x8CF1, 0x4F7C,0x8CF2, 0x4FAF,0x8CF3, 0x5019,0x8CF4, 0x5016,0x8CF5, 0x5149,0x8CF6, 0x516C,0x8CF7, 0x529F,0x8CF8, 0x52B9,0x8CF9, 0x52FE,0x8CFA, 0x539A,0x8CFB, 0x53E3,0x8CFC, 0x5411,0x8D40, 0x540E,0x8D41, 0x5589,0x8D42, 0x5751,0x8D43, 0x57A2,0x8D44, 0x597D,0x8D45, 0x5B54,0x8D46, 0x5B5D,0x8D47, 0x5B8F,0x8D48, 0x5DE5,0x8D49, 0x5DE7,0x8D4A, 0x5DF7,0x8D4B, 0x5E78,0x8D4C, 0x5E83,0x8D4D, 0x5E9A,0x8D4E, 0x5EB7,0x8D4F, 0x5F18,0x8D50, 0x6052,0x8D51, 0x614C,0x8D52, 0x6297,0x8D53, 0x62D8,0x8D54, 0x63A7,0x8D55, 0x653B,0x8D56, 0x6602,0x8D57, 0x6643,0x8D58, 0x66F4,0x8D59, 0x676D,0x8D5A, 0x6821,0x8D5B, 0x6897,0x8D5C, 0x69CB,0x8D5D, 0x6C5F,0x8D5E, 0x6D2A,0x8D5F, 0x6D69,0x8D60, 0x6E2F,0x8D61, 0x6E9D,0x8D62, 0x7532,0x8D63, 0x7687,0x8D64, 0x786C,0x8D65, 0x7A3F,0x8D66, 0x7CE0,0x8D67, 0x7D05,0x8D68, 0x7D18,0x8D69, 0x7D5E,0x8D6A, 0x7DB1,0x8D6B, 0x8015,0x8D6C, 0x8003,0x8D6D, 0x80AF,0x8D6E, 0x80B1,0x8D6F, 0x8154,0x8D70, 0x818F,0x8D71, 0x822A,0x8D72, 0x8352,0x8D73, 0x884C,0x8D74, 0x8861,0x8D75, 0x8B1B,0x8D76, 0x8CA2,0x8D77, 0x8CFC,0x8D78, 0x90CA,0x8D79, 0x9175,0x8D7A, 0x9271,0x8D7B, 0x783F,0x8D7C, 0x92FC,0x8D7D, 0x95A4,0x8D7E, 0x964D,0x8D80, 0x9805,0x8D81, 0x9999,0x8D82, 0x9AD8,0x8D83, 0x9D3B,0x8D84, 0x525B,0x8D85, 0x52AB,0x8D86, 0x53F7,0x8D87, 0x5408,0x8D88, 0x58D5,0x8D89, 0x62F7,0x8D8A, 0x6FE0,0x8D8B, 0x8C6A,0x8D8C, 0x8F5F,0x8D8D, 0x9EB9,0x8D8E, 0x514B,0x8D8F, 0x523B,0x8D90, 0x544A,0x8D91, 0x56FD,0x8D92, 0x7A40,0x8D93, 0x9177,0x8D94, 0x9D60,0x8D95, 0x9ED2,0x8D96, 0x7344,0x8D97, 0x6F09,0x8D98, 0x8170,0x8D99, 0x7511,0x8D9A, 0x5FFD,0x8D9B, 0x60DA,0x8D9C, 0x9AA8,0x8D9D, 0x72DB,0x8D9E, 0x8FBC,0x8D9F, 0x6B64,0x8DA0, 0x9803,0x8DA1, 0x4ECA,0x8DA2, 0x56F0,0x8DA3, 0x5764,0x8DA4, 0x58BE,0x8DA5, 0x5A5A,0x8DA6, 0x6068,0x8DA7, 0x61C7,0x8DA8, 0x660F,0x8DA9, 0x6606,0x8DAA, 0x6839,0x8DAB, 0x68B1,0x8DAC, 0x6DF7,0x8DAD, 0x75D5,0x8DAE, 0x7D3A,0x8DAF, 0x826E,0x8DB0, 0x9B42,0x8DB1, 0x4E9B,0x8DB2, 0x4F50,0x8DB3, 0x53C9,0x8DB4, 0x5506,0x8DB5, 0x5D6F,0x8DB6, 0x5DE6,0x8DB7, 0x5DEE,0x8DB8, 0x67FB,0x8DB9, 0x6C99,0x8DBA, 0x7473,0x8DBB, 0x7802,0x8DBC, 0x8A50,0x8DBD, 0x9396,0x8DBE, 0x88DF,0x8DBF, 0x5750,0x8DC0, 0x5EA7,0x8DC1, 0x632B,0x8DC2, 0x50B5,0x8DC3, 0x50AC,0x8DC4, 0x518D,0x8DC5, 0x6700,0x8DC6, 0x54C9,0x8DC7, 0x585E,0x8DC8, 0x59BB,0x8DC9, 0x5BB0,0x8DCA, 0x5F69,0x8DCB, 0x624D,0x8DCC, 0x63A1,0x8DCD, 0x683D,0x8DCE, 0x6B73,0x8DCF, 0x6E08,0x8DD0, 0x707D,0x8DD1, 0x91C7,0x8DD2, 0x7280,0x8DD3, 0x7815,0x8DD4, 0x7826,0x8DD5, 0x796D,0x8DD6, 0x658E,0x8DD7, 0x7D30,0x8DD8, 0x83DC,0x8DD9, 0x88C1,0x8DDA, 0x8F09,0x8DDB, 0x969B,0x8DDC, 0x5264,0x8DDD, 0x5728,0x8DDE, 0x6750,0x8DDF, 0x7F6A,0x8DE0, 0x8CA1,0x8DE1, 0x51B4,0x8DE2, 0x5742,0x8DE3, 0x962A,0x8DE4, 0x583A,0x8DE5, 0x698A,0x8DE6, 0x80B4,0x8DE7, 0x54B2,0x8DE8, 0x5D0E,0x8DE9, 0x57FC,0x8DEA, 0x7895,0x8DEB, 0x9DFA,0x8DEC, 0x4F5C,0x8DED, 0x524A,0x8DEE, 0x548B,0x8DEF, 0x643E,0x8DF0, 0x6628,0x8DF1, 0x6714,0x8DF2, 0x67F5,0x8DF3, 0x7A84,0x8DF4, 0x7B56,0x8DF5, 0x7D22,0x8DF6, 0x932F,0x8DF7, 0x685C,0x8DF8, 0x9BAD,0x8DF9, 0x7B39,0x8DFA, 0x5319,0x8DFB, 0x518A,0x8DFC, 0x5237,0x8E40, 0x5BDF,0x8E41, 0x62F6,0x8E42, 0x64AE,0x8E43, 0x64E6,0x8E44, 0x672D,0x8E45, 0x6BBA,0x8E46, 0x85A9,0x8E47, 0x96D1,0x8E48, 0x7690,0x8E49, 0x9BD6,0x8E4A, 0x634C,0x8E4B, 0x9306,0x8E4C, 0x9BAB,0x8E4D, 0x76BF,0x8E4E, 0x6652,0x8E4F, 0x4E09,0x8E50, 0x5098,0x8E51, 0x53C2,0x8E52, 0x5C71,0x8E53, 0x60E8,0x8E54, 0x6492,0x8E55, 0x6563,0x8E56, 0x685F,0x8E57, 0x71E6,0x8E58, 0x73CA,0x8E59, 0x7523,0x8E5A, 0x7B97,0x8E5B, 0x7E82,0x8E5C, 0x8695,0x8E5D, 0x8B83,0x8E5E, 0x8CDB,0x8E5F, 0x9178,0x8E60, 0x9910,0x8E61, 0x65AC,0x8E62, 0x66AB,0x8E63, 0x6B8B,0x8E64, 0x4ED5,0x8E65, 0x4ED4,0x8E66, 0x4F3A,0x8E67, 0x4F7F,0x8E68, 0x523A,0x8E69, 0x53F8,0x8E6A, 0x53F2,0x8E6B, 0x55E3,0x8E6C, 0x56DB,0x8E6D, 0x58EB,0x8E6E, 0x59CB,0x8E6F, 0x59C9,0x8E70, 0x59FF,0x8E71, 0x5B50,0x8E72, 0x5C4D,0x8E73, 0x5E02,0x8E74, 0x5E2B,0x8E75, 0x5FD7,0x8E76, 0x601D,0x8E77, 0x6307,0x8E78, 0x652F,0x8E79, 0x5B5C,0x8E7A, 0x65AF,0x8E7B, 0x65BD,0x8E7C, 0x65E8,0x8E7D, 0x679D,0x8E7E, 0x6B62,0x8E80, 0x6B7B,0x8E81, 0x6C0F,0x8E82, 0x7345,0x8E83, 0x7949,0x8E84, 0x79C1,0x8E85, 0x7CF8,0x8E86, 0x7D19,0x8E87, 0x7D2B,0x8E88, 0x80A2,0x8E89, 0x8102,0x8E8A, 0x81F3,0x8E8B, 0x8996,0x8E8C, 0x8A5E,0x8E8D, 0x8A69,0x8E8E, 0x8A66,0x8E8F, 0x8A8C,0x8E90, 0x8AEE,0x8E91, 0x8CC7,0x8E92, 0x8CDC,0x8E93, 0x96CC,0x8E94, 0x98FC,0x8E95, 0x6B6F,0x8E96, 0x4E8B,0x8E97, 0x4F3C,0x8E98, 0x4F8D,0x8E99, 0x5150,0x8E9A, 0x5B57,0x8E9B, 0x5BFA,0x8E9C, 0x6148,0x8E9D, 0x6301,0x8E9E, 0x6642,0x8E9F, 0x6B21,0x8EA0, 0x6ECB,0x8EA1, 0x6CBB,0x8EA2, 0x723E,0x8EA3, 0x74BD,0x8EA4, 0x75D4,0x8EA5, 0x78C1,0x8EA6, 0x793A,0x8EA7, 0x800C,0x8EA8, 0x8033,0x8EA9, 0x81EA,0x8EAA, 0x8494,0x8EAB, 0x8F9E,0x8EAC, 0x6C50,0x8EAD, 0x9E7F,0x8EAE, 0x5F0F,0x8EAF, 0x8B58,0x8EB0, 0x9D2B,0x8EB1, 0x7AFA,0x8EB2, 0x8EF8,0x8EB3, 0x5B8D,0x8EB4, 0x96EB,0x8EB5, 0x4E03,0x8EB6, 0x53F1,0x8EB7, 0x57F7,0x8EB8, 0x5931,0x8EB9, 0x5AC9,0x8EBA, 0x5BA4,0x8EBB, 0x6089,0x8EBC, 0x6E7F,0x8EBD, 0x6F06,0x8EBE, 0x75BE,0x8EBF, 0x8CEA,0x8EC0, 0x5B9F,0x8EC1, 0x8500,0x8EC2, 0x7BE0,0x8EC3, 0x5072,0x8EC4, 0x67F4,0x8EC5, 0x829D,0x8EC6, 0x5C61,0x8EC7, 0x854A,0x8EC8, 0x7E1E,0x8EC9, 0x820E,0x8ECA, 0x5199,0x8ECB, 0x5C04,0x8ECC, 0x6368,0x8ECD, 0x8D66,0x8ECE, 0x659C,0x8ECF, 0x716E,0x8ED0, 0x793E,0x8ED1, 0x7D17,0x8ED2, 0x8005,0x8ED3, 0x8B1D,0x8ED4, 0x8ECA,0x8ED5, 0x906E,0x8ED6, 0x86C7,0x8ED7, 0x90AA,0x8ED8, 0x501F,0x8ED9, 0x52FA,0x8EDA, 0x5C3A,0x8EDB, 0x6753,0x8EDC, 0x707C,0x8EDD, 0x7235,0x8EDE, 0x914C,0x8EDF, 0x91C8,0x8EE0, 0x932B,0x8EE1, 0x82E5,0x8EE2, 0x5BC2,0x8EE3, 0x5F31,0x8EE4, 0x60F9,0x8EE5, 0x4E3B,0x8EE6, 0x53D6,0x8EE7, 0x5B88,0x8EE8, 0x624B,0x8EE9, 0x6731,0x8EEA, 0x6B8A,0x8EEB, 0x72E9,0x8EEC, 0x73E0,0x8EED, 0x7A2E,0x8EEE, 0x816B,0x8EEF, 0x8DA3,0x8EF0, 0x9152,0x8EF1, 0x9996,0x8EF2, 0x5112,0x8EF3, 0x53D7,0x8EF4, 0x546A,0x8EF5, 0x5BFF,0x8EF6, 0x6388,0x8EF7, 0x6A39,0x8EF8, 0x7DAC,0x8EF9, 0x9700,0x8EFA, 0x56DA,0x8EFB, 0x53CE,0x8EFC, 0x5468,0x8F40, 0x5B97,0x8F41, 0x5C31,0x8F42, 0x5DDE,0x8F43, 0x4FEE,0x8F44, 0x6101,0x8F45, 0x62FE,0x8F46, 0x6D32,0x8F47, 0x79C0,0x8F48, 0x79CB,0x8F49, 0x7D42,0x8F4A, 0x7E4D,0x8F4B, 0x7FD2,0x8F4C, 0x81ED,0x8F4D, 0x821F,0x8F4E, 0x8490,0x8F4F, 0x8846,0x8F50, 0x8972,0x8F51, 0x8B90,0x8F52, 0x8E74,0x8F53, 0x8F2F,0x8F54, 0x9031,0x8F55, 0x914B,0x8F56, 0x916C,0x8F57, 0x96C6,0x8F58, 0x919C,0x8F59, 0x4EC0,0x8F5A, 0x4F4F,0x8F5B, 0x5145,0x8F5C, 0x5341,0x8F5D, 0x5F93,0x8F5E, 0x620E,0x8F5F, 0x67D4,0x8F60, 0x6C41,0x8F61, 0x6E0B,0x8F62, 0x7363,0x8F63, 0x7E26,0x8F64, 0x91CD,0x8F65, 0x9283,0x8F66, 0x53D4,0x8F67, 0x5919,0x8F68, 0x5BBF,0x8F69, 0x6DD1,0x8F6A, 0x795D,0x8F6B, 0x7E2E,0x8F6C, 0x7C9B,0x8F6D, 0x587E,0x8F6E, 0x719F,0x8F6F, 0x51FA,0x8F70, 0x8853,0x8F71, 0x8FF0,0x8F72, 0x4FCA,0x8F73, 0x5CFB,0x8F74, 0x6625,0x8F75, 0x77AC,0x8F76, 0x7AE3,0x8F77, 0x821C,0x8F78, 0x99FF,0x8F79, 0x51C6,0x8F7A, 0x5FAA,0x8F7B, 0x65EC,0x8F7C, 0x696F,0x8F7D, 0x6B89,0x8F7E, 0x6DF3,0x8F80, 0x6E96,0x8F81, 0x6F64,0x8F82, 0x76FE,0x8F83, 0x7D14,0x8F84, 0x5DE1,0x8F85, 0x9075,0x8F86, 0x9187,0x8F87, 0x9806,0x8F88, 0x51E6,0x8F89, 0x521D,0x8F8A, 0x6240,0x8F8B, 0x6691,0x8F8C, 0x66D9,0x8F8D, 0x6E1A,0x8F8E, 0x5EB6,0x8F8F, 0x7DD2,0x8F90, 0x7F72,0x8F91, 0x66F8,0x8F92, 0x85AF,0x8F93, 0x85F7,0x8F94, 0x8AF8,0x8F95, 0x52A9,0x8F96, 0x53D9,0x8F97, 0x5973,0x8F98, 0x5E8F,0x8F99, 0x5F90,0x8F9A, 0x6055,0x8F9B, 0x92E4,0x8F9C, 0x9664,0x8F9D, 0x50B7,0x8F9E, 0x511F,0x8F9F, 0x52DD,0x8FA0, 0x5320,0x8FA1, 0x5347,0x8FA2, 0x53EC,0x8FA3, 0x54E8,0x8FA4, 0x5546,0x8FA5, 0x5531,0x8FA6, 0x5617,0x8FA7, 0x5968,0x8FA8, 0x59BE,0x8FA9, 0x5A3C,0x8FAA, 0x5BB5,0x8FAB, 0x5C06,0x8FAC, 0x5C0F,0x8FAD, 0x5C11,0x8FAE, 0x5C1A,0x8FAF, 0x5E84,0x8FB0, 0x5E8A,0x8FB1, 0x5EE0,0x8FB2, 0x5F70,0x8FB3, 0x627F,0x8FB4, 0x6284,0x8FB5, 0x62DB,0x8FB6, 0x638C,0x8FB7, 0x6377,0x8FB8, 0x6607,0x8FB9, 0x660C,0x8FBA, 0x662D,0x8FBB, 0x6676,0x8FBC, 0x677E,0x8FBD, 0x68A2,0x8FBE, 0x6A1F,0x8FBF, 0x6A35,0x8FC0, 0x6CBC,0x8FC1, 0x6D88,0x8FC2, 0x6E09,0x8FC3, 0x6E58,0x8FC4, 0x713C,0x8FC5, 0x7126,0x8FC6, 0x7167,0x8FC7, 0x75C7,0x8FC8, 0x7701,0x8FC9, 0x785D,0x8FCA, 0x7901,0x8FCB, 0x7965,0x8FCC, 0x79F0,0x8FCD, 0x7AE0,0x8FCE, 0x7B11,0x8FCF, 0x7CA7,0x8FD0, 0x7D39,0x8FD1, 0x8096,0x8FD2, 0x83D6,0x8FD3, 0x848B,0x8FD4, 0x8549,0x8FD5, 0x885D,0x8FD6, 0x88F3,0x8FD7, 0x8A1F,0x8FD8, 0x8A3C,0x8FD9, 0x8A54,0x8FDA, 0x8A73,0x8FDB, 0x8C61,0x8FDC, 0x8CDE,0x8FDD, 0x91A4,0x8FDE, 0x9266,0x8FDF, 0x937E,0x8FE0, 0x9418,0x8FE1, 0x969C,0x8FE2, 0x9798,0x8FE3, 0x4E0A,0x8FE4, 0x4E08,0x8FE5, 0x4E1E,0x8FE6, 0x4E57,0x8FE7, 0x5197,0x8FE8, 0x5270,0x8FE9, 0x57CE,0x8FEA, 0x5834,0x8FEB, 0x58CC,0x8FEC, 0x5B22,0x8FED, 0x5E38,0x8FEE, 0x60C5,0x8FEF, 0x64FE,0x8FF0, 0x6761,0x8FF1, 0x6756,0x8FF2, 0x6D44,0x8FF3, 0x72B6,0x8FF4, 0x7573,0x8FF5, 0x7A63,0x8FF6, 0x84B8,0x8FF7, 0x8B72,0x8FF8, 0x91B8,0x8FF9, 0x9320,0x8FFA, 0x5631,0x8FFB, 0x57F4,0x8FFC, 0x98FE,0x9040, 0x62ED,0x9041, 0x690D,0x9042, 0x6B96,0x9043, 0x71ED,0x9044, 0x7E54,0x9045, 0x8077,0x9046, 0x8272,0x9047, 0x89E6,0x9048, 0x98DF,0x9049, 0x8755,0x904A, 0x8FB1,0x904B, 0x5C3B,0x904C, 0x4F38,0x904D, 0x4FE1,0x904E, 0x4FB5,0x904F, 0x5507,0x9050, 0x5A20,0x9051, 0x5BDD,0x9052, 0x5BE9,0x9053, 0x5FC3,0x9054, 0x614E,0x9055, 0x632F,0x9056, 0x65B0,0x9057, 0x664B,0x9058, 0x68EE,0x9059, 0x699B,0x905A, 0x6D78,0x905B, 0x6DF1,0x905C, 0x7533,0x905D, 0x75B9,0x905E, 0x771F,0x905F, 0x795E,0x9060, 0x79E6,0x9061, 0x7D33,0x9062, 0x81E3,0x9063, 0x82AF,0x9064, 0x85AA,0x9065, 0x89AA,0x9066, 0x8A3A,0x9067, 0x8EAB,0x9068, 0x8F9B,0x9069, 0x9032,0x906A, 0x91DD,0x906B, 0x9707,0x906C, 0x4EBA,0x906D, 0x4EC1,0x906E, 0x5203,0x906F, 0x5875,0x9070, 0x58EC,0x9071, 0x5C0B,0x9072, 0x751A,0x9073, 0x5C3D,0x9074, 0x814E,0x9075, 0x8A0A,0x9076, 0x8FC5,0x9077, 0x9663,0x9078, 0x976D,0x9079, 0x7B25,0x907A, 0x8ACF,0x907B, 0x9808,0x907C, 0x9162,0x907D, 0x56F3,0x907E, 0x53A8,0x9080, 0x9017,0x9081, 0x5439,0x9082, 0x5782,0x9083, 0x5E25,0x9084, 0x63A8,0x9085, 0x6C34,0x9086, 0x708A,0x9087, 0x7761,0x9088, 0x7C8B,0x9089, 0x7FE0,0x908A, 0x8870,0x908B, 0x9042,0x908C, 0x9154,0x908D, 0x9310,0x908E, 0x9318,0x908F, 0x968F,0x9090, 0x745E,0x9091, 0x9AC4,0x9092, 0x5D07,0x9093, 0x5D69,0x9094, 0x6570,0x9095, 0x67A2,0x9096, 0x8DA8,0x9097, 0x96DB,0x9098, 0x636E,0x9099, 0x6749,0x909A, 0x6919,0x909B, 0x83C5,0x909C, 0x9817,0x909D, 0x96C0,0x909E, 0x88FE,0x909F, 0x6F84,0x90A0, 0x647A,0x90A1, 0x5BF8,0x90A2, 0x4E16,0x90A3, 0x702C,0x90A4, 0x755D,0x90A5, 0x662F,0x90A6, 0x51C4,0x90A7, 0x5236,0x90A8, 0x52E2,0x90A9, 0x59D3,0x90AA, 0x5F81,0x90AB, 0x6027,0x90AC, 0x6210,0x90AD, 0x653F,0x90AE, 0x6574,0x90AF, 0x661F,0x90B0, 0x6674,0x90B1, 0x68F2,0x90B2, 0x6816,0x90B3, 0x6B63,0x90B4, 0x6E05,0x90B5, 0x7272,0x90B6, 0x751F,0x90B7, 0x76DB,0x90B8, 0x7CBE,0x90B9, 0x8056,0x90BA, 0x58F0,0x90BB, 0x88FD,0x90BC, 0x897F,0x90BD, 0x8AA0,0x90BE, 0x8A93,0x90BF, 0x8ACB,0x90C0, 0x901D,0x90C1, 0x9192,0x90C2, 0x9752,0x90C3, 0x9759,0x90C4, 0x6589,0x90C5, 0x7A0E,0x90C6, 0x8106,0x90C7, 0x96BB,0x90C8, 0x5E2D,0x90C9, 0x60DC,0x90CA, 0x621A,0x90CB, 0x65A5,0x90CC, 0x6614,0x90CD, 0x6790,0x90CE, 0x77F3,0x90CF, 0x7A4D,0x90D0, 0x7C4D,0x90D1, 0x7E3E,0x90D2, 0x810A,0x90D3, 0x8CAC,0x90D4, 0x8D64,0x90D5, 0x8DE1,0x90D6, 0x8E5F,0x90D7, 0x78A9,0x90D8, 0x5207,0x90D9, 0x62D9,0x90DA, 0x63A5,0x90DB, 0x6442,0x90DC, 0x6298,0x90DD, 0x8A2D,0x90DE, 0x7A83,0x90DF, 0x7BC0,0x90E0, 0x8AAC,0x90E1, 0x96EA,0x90E2, 0x7D76,0x90E3, 0x820C,0x90E4, 0x8749,0x90E5, 0x4ED9,0x90E6, 0x5148,0x90E7, 0x5343,0x90E8, 0x5360,0x90E9, 0x5BA3,0x90EA, 0x5C02,0x90EB, 0x5C16,0x90EC, 0x5DDD,0x90ED, 0x6226,0x90EE, 0x6247,0x90EF, 0x64B0,0x90F0, 0x6813,0x90F1, 0x6834,0x90F2, 0x6CC9,0x90F3, 0x6D45,0x90F4, 0x6D17,0x90F5, 0x67D3,0x90F6, 0x6F5C,0x90F7, 0x714E,0x90F8, 0x717D,0x90F9, 0x65CB,0x90FA, 0x7A7F,0x90FB, 0x7BAD,0x90FC, 0x7DDA,0x9140, 0x7E4A,0x9141, 0x7FA8,0x9142, 0x817A,0x9143, 0x821B,0x9144, 0x8239,0x9145, 0x85A6,0x9146, 0x8A6E,0x9147, 0x8CCE,0x9148, 0x8DF5,0x9149, 0x9078,0x914A, 0x9077,0x914B, 0x92AD,0x914C, 0x9291,0x914D, 0x9583,0x914E, 0x9BAE,0x914F, 0x524D,0x9150, 0x5584,0x9151, 0x6F38,0x9152, 0x7136,0x9153, 0x5168,0x9154, 0x7985,0x9155, 0x7E55,0x9156, 0x81B3,0x9157, 0x7CCE,0x9158, 0x564C,0x9159, 0x5851,0x915A, 0x5CA8,0x915B, 0x63AA,0x915C, 0x66FE,0x915D, 0x66FD,0x915E, 0x695A,0x915F, 0x72D9,0x9160, 0x758F,0x9161, 0x758E,0x9162, 0x790E,0x9163, 0x7956,0x9164, 0x79DF,0x9165, 0x7C97,0x9166, 0x7D20,0x9167, 0x7D44,0x9168, 0x8607,0x9169, 0x8A34,0x916A, 0x963B,0x916B, 0x9061,0x916C, 0x9F20,0x916D, 0x50E7,0x916E, 0x5275,0x916F, 0x53CC,0x9170, 0x53E2,0x9171, 0x5009,0x9172, 0x55AA,0x9173, 0x58EE,0x9174, 0x594F,0x9175, 0x723D,0x9176, 0x5B8B,0x9177, 0x5C64,0x9178, 0x531D,0x9179, 0x60E3,0x917A, 0x60F3,0x917B, 0x635C,0x917C, 0x6383,0x917D, 0x633F,0x917E, 0x63BB,0x9180, 0x64CD,0x9181, 0x65E9,0x9182, 0x66F9,0x9183, 0x5DE3,0x9184, 0x69CD,0x9185, 0x69FD,0x9186, 0x6F15,0x9187, 0x71E5,0x9188, 0x4E89,0x9189, 0x75E9,0x918A, 0x76F8,0x918B, 0x7A93,0x918C, 0x7CDF,0x918D, 0x7DCF,0x918E, 0x7D9C,0x918F, 0x8061,0x9190, 0x8349,0x9191, 0x8358,0x9192, 0x846C,0x9193, 0x84BC,0x9194, 0x85FB,0x9195, 0x88C5,0x9196, 0x8D70,0x9197, 0x9001,0x9198, 0x906D,0x9199, 0x9397,0x919A, 0x971C,0x919B, 0x9A12,0x919C, 0x50CF,0x919D, 0x5897,0x919E, 0x618E,0x919F, 0x81D3,0x91A0, 0x8535,0x91A1, 0x8D08,0x91A2, 0x9020,0x91A3, 0x4FC3,0x91A4, 0x5074,0x91A5, 0x5247,0x91A6, 0x5373,0x91A7, 0x606F,0x91A8, 0x6349,0x91A9, 0x675F,0x91AA, 0x6E2C,0x91AB, 0x8DB3,0x91AC, 0x901F,0x91AD, 0x4FD7,0x91AE, 0x5C5E,0x91AF, 0x8CCA,0x91B0, 0x65CF,0x91B1, 0x7D9A,0x91B2, 0x5352,0x91B3, 0x8896,0x91B4, 0x5176,0x91B5, 0x63C3,0x91B6, 0x5B58,0x91B7, 0x5B6B,0x91B8, 0x5C0A,0x91B9, 0x640D,0x91BA, 0x6751,0x91BB, 0x905C,0x91BC, 0x4ED6,0x91BD, 0x591A,0x91BE, 0x592A,0x91BF, 0x6C70,0x91C0, 0x8A51,0x91C1, 0x553E,0x91C2, 0x5815,0x91C3, 0x59A5,0x91C4, 0x60F0,0x91C5, 0x6253,0x91C6, 0x67C1,0x91C7, 0x8235,0x91C8, 0x6955,0x91C9, 0x9640,0x91CA, 0x99C4,0x91CB, 0x9A28,0x91CC, 0x4F53,0x91CD, 0x5806,0x91CE, 0x5BFE,0x91CF, 0x8010,0x91D0, 0x5CB1,0x91D1, 0x5E2F,0x91D2, 0x5F85,0x91D3, 0x6020,0x91D4, 0x614B,0x91D5, 0x6234,0x91D6, 0x66FF,0x91D7, 0x6CF0,0x91D8, 0x6EDE,0x91D9, 0x80CE,0x91DA, 0x817F,0x91DB, 0x82D4,0x91DC, 0x888B,0x91DD, 0x8CB8,0x91DE, 0x9000,0x91DF, 0x902E,0x91E0, 0x968A,0x91E1, 0x9EDB,0x91E2, 0x9BDB,0x91E3, 0x4EE3,0x91E4, 0x53F0,0x91E5, 0x5927,0x91E6, 0x7B2C,0x91E7, 0x918D,0x91E8, 0x984C,0x91E9, 0x9DF9,0x91EA, 0x6EDD,0x91EB, 0x7027,0x91EC, 0x5353,0x91ED, 0x5544,0x91EE, 0x5B85,0x91EF, 0x6258,0x91F0, 0x629E,0x91F1, 0x62D3,0x91F2, 0x6CA2,0x91F3, 0x6FEF,0x91F4, 0x7422,0x91F5, 0x8A17,0x91F6, 0x9438,0x91F7, 0x6FC1,0x91F8, 0x8AFE,0x91F9, 0x8338,0x91FA, 0x51E7,0x91FB, 0x86F8,0x91FC, 0x53EA,0x9240, 0x53E9,0x9241, 0x4F46,0x9242, 0x9054,0x9243, 0x8FB0,0x9244, 0x596A,0x9245, 0x8131,0x9246, 0x5DFD,0x9247, 0x7AEA,0x9248, 0x8FBF,0x9249, 0x68DA,0x924A, 0x8C37,0x924B, 0x72F8,0x924C, 0x9C48,0x924D, 0x6A3D,0x924E, 0x8AB0,0x924F, 0x4E39,0x9250, 0x5358,0x9251, 0x5606,0x9252, 0x5766,0x9253, 0x62C5,0x9254, 0x63A2,0x9255, 0x65E6,0x9256, 0x6B4E,0x9257, 0x6DE1,0x9258, 0x6E5B,0x9259, 0x70AD,0x925A, 0x77ED,0x925B, 0x7AEF,0x925C, 0x7BAA,0x925D, 0x7DBB,0x925E, 0x803D,0x925F, 0x80C6,0x9260, 0x86CB,0x9261, 0x8A95,0x9262, 0x935B,0x9263, 0x56E3,0x9264, 0x58C7,0x9265, 0x5F3E,0x9266, 0x65AD,0x9267, 0x6696,0x9268, 0x6A80,0x9269, 0x6BB5,0x926A, 0x7537,0x926B, 0x8AC7,0x926C, 0x5024,0x926D, 0x77E5,0x926E, 0x5730,0x926F, 0x5F1B,0x9270, 0x6065,0x9271, 0x667A,0x9272, 0x6C60,0x9273, 0x75F4,0x9274, 0x7A1A,0x9275, 0x7F6E,0x9276, 0x81F4,0x9277, 0x8718,0x9278, 0x9045,0x9279, 0x99B3,0x927A, 0x7BC9,0x927B, 0x755C,0x927C, 0x7AF9,0x927D, 0x7B51,0x927E, 0x84C4,0x9280, 0x9010,0x9281, 0x79E9,0x9282, 0x7A92,0x9283, 0x8336,0x9284, 0x5AE1,0x9285, 0x7740,0x9286, 0x4E2D,0x9287, 0x4EF2,0x9288, 0x5B99,0x9289, 0x5FE0,0x928A, 0x62BD,0x928B, 0x663C,0x928C, 0x67F1,0x928D, 0x6CE8,0x928E, 0x866B,0x928F, 0x8877,0x9290, 0x8A3B,0x9291, 0x914E,0x9292, 0x92F3,0x9293, 0x99D0,0x9294, 0x6A17,0x9295, 0x7026,0x9296, 0x732A,0x9297, 0x82E7,0x9298, 0x8457,0x9299, 0x8CAF,0x929A, 0x4E01,0x929B, 0x5146,0x929C, 0x51CB,0x929D, 0x558B,0x929E, 0x5BF5,0x929F, 0x5E16,0x92A0, 0x5E33,0x92A1, 0x5E81,0x92A2, 0x5F14,0x92A3, 0x5F35,0x92A4, 0x5F6B,0x92A5, 0x5FB4,0x92A6, 0x61F2,0x92A7, 0x6311,0x92A8, 0x66A2,0x92A9, 0x671D,0x92AA, 0x6F6E,0x92AB, 0x7252,0x92AC, 0x753A,0x92AD, 0x773A,0x92AE, 0x8074,0x92AF, 0x8139,0x92B0, 0x8178,0x92B1, 0x8776,0x92B2, 0x8ABF,0x92B3, 0x8ADC,0x92B4, 0x8D85,0x92B5, 0x8DF3,0x92B6, 0x929A,0x92B7, 0x9577,0x92B8, 0x9802,0x92B9, 0x9CE5,0x92BA, 0x52C5,0x92BB, 0x6357,0x92BC, 0x76F4,0x92BD, 0x6715,0x92BE, 0x6C88,0x92BF, 0x73CD,0x92C0, 0x8CC3,0x92C1, 0x93AE,0x92C2, 0x9673,0x92C3, 0x6D25,0x92C4, 0x589C,0x92C5, 0x690E,0x92C6, 0x69CC,0x92C7, 0x8FFD,0x92C8, 0x939A,0x92C9, 0x75DB,0x92CA, 0x901A,0x92CB, 0x585A,0x92CC, 0x6802,0x92CD, 0x63B4,0x92CE, 0x69FB,0x92CF, 0x4F43,0x92D0, 0x6F2C,0x92D1, 0x67D8,0x92D2, 0x8FBB,0x92D3, 0x8526,0x92D4, 0x7DB4,0x92D5, 0x9354,0x92D6, 0x693F,0x92D7, 0x6F70,0x92D8, 0x576A,0x92D9, 0x58F7,0x92DA, 0x5B2C,0x92DB, 0x7D2C,0x92DC, 0x722A,0x92DD, 0x540A,0x92DE, 0x91E3,0x92DF, 0x9DB4,0x92E0, 0x4EAD,0x92E1, 0x4F4E,0x92E2, 0x505C,0x92E3, 0x5075,0x92E4, 0x5243,0x92E5, 0x8C9E,0x92E6, 0x5448,0x92E7, 0x5824,0x92E8, 0x5B9A,0x92E9, 0x5E1D,0x92EA, 0x5E95,0x92EB, 0x5EAD,0x92EC, 0x5EF7,0x92ED, 0x5F1F,0x92EE, 0x608C,0x92EF, 0x62B5,0x92F0, 0x633A,0x92F1, 0x63D0,0x92F2, 0x68AF,0x92F3, 0x6C40,0x92F4, 0x7887,0x92F5, 0x798E,0x92F6, 0x7A0B,0x92F7, 0x7DE0,0x92F8, 0x8247,0x92F9, 0x8A02,0x92FA, 0x8AE6,0x92FB, 0x8E44,0x92FC, 0x9013,0x9340, 0x90B8,0x9341, 0x912D,0x9342, 0x91D8,0x9343, 0x9F0E,0x9344, 0x6CE5,0x9345, 0x6458,0x9346, 0x64E2,0x9347, 0x6575,0x9348, 0x6EF4,0x9349, 0x7684,0x934A, 0x7B1B,0x934B, 0x9069,0x934C, 0x93D1,0x934D, 0x6EBA,0x934E, 0x54F2,0x934F, 0x5FB9,0x9350, 0x64A4,0x9351, 0x8F4D,0x9352, 0x8FED,0x9353, 0x9244,0x9354, 0x5178,0x9355, 0x586B,0x9356, 0x5929,0x9357, 0x5C55,0x9358, 0x5E97,0x9359, 0x6DFB,0x935A, 0x7E8F,0x935B, 0x751C,0x935C, 0x8CBC,0x935D, 0x8EE2,0x935E, 0x985B,0x935F, 0x70B9,0x9360, 0x4F1D,0x9361, 0x6BBF,0x9362, 0x6FB1,0x9363, 0x7530,0x9364, 0x96FB,0x9365, 0x514E,0x9366, 0x5410,0x9367, 0x5835,0x9368, 0x5857,0x9369, 0x59AC,0x936A, 0x5C60,0x936B, 0x5F92,0x936C, 0x6597,0x936D, 0x675C,0x936E, 0x6E21,0x936F, 0x767B,0x9370, 0x83DF,0x9371, 0x8CED,0x9372, 0x9014,0x9373, 0x90FD,0x9374, 0x934D,0x9375, 0x7825,0x9376, 0x783A,0x9377, 0x52AA,0x9378, 0x5EA6,0x9379, 0x571F,0x937A, 0x5974,0x937B, 0x6012,0x937C, 0x5012,0x937D, 0x515A,0x937E, 0x51AC,0x9380, 0x51CD,0x9381, 0x5200,0x9382, 0x5510,0x9383, 0x5854,0x9384, 0x5858,0x9385, 0x5957,0x9386, 0x5B95,0x9387, 0x5CF6,0x9388, 0x5D8B,0x9389, 0x60BC,0x938A, 0x6295,0x938B, 0x642D,0x938C, 0x6771,0x938D, 0x6843,0x938E, 0x68BC,0x938F, 0x68DF,0x9390, 0x76D7,0x9391, 0x6DD8,0x9392, 0x6E6F,0x9393, 0x6D9B,0x9394, 0x706F,0x9395, 0x71C8,0x9396, 0x5F53,0x9397, 0x75D8,0x9398, 0x7977,0x9399, 0x7B49,0x939A, 0x7B54,0x939B, 0x7B52,0x939C, 0x7CD6,0x939D, 0x7D71,0x939E, 0x5230,0x939F, 0x8463,0x93A0, 0x8569,0x93A1, 0x85E4,0x93A2, 0x8A0E,0x93A3, 0x8B04,0x93A4, 0x8C46,0x93A5, 0x8E0F,0x93A6, 0x9003,0x93A7, 0x900F,0x93A8, 0x9419,0x93A9, 0x9676,0x93AA, 0x982D,0x93AB, 0x9A30,0x93AC, 0x95D8,0x93AD, 0x50CD,0x93AE, 0x52D5,0x93AF, 0x540C,0x93B0, 0x5802,0x93B1, 0x5C0E,0x93B2, 0x61A7,0x93B3, 0x649E,0x93B4, 0x6D1E,0x93B5, 0x77B3,0x93B6, 0x7AE5,0x93B7, 0x80F4,0x93B8, 0x8404,0x93B9, 0x9053,0x93BA, 0x9285,0x93BB, 0x5CE0,0x93BC, 0x9D07,0x93BD, 0x533F,0x93BE, 0x5F97,0x93BF, 0x5FB3,0x93C0, 0x6D9C,0x93C1, 0x7279,0x93C2, 0x7763,0x93C3, 0x79BF,0x93C4, 0x7BE4,0x93C5, 0x6BD2,0x93C6, 0x72EC,0x93C7, 0x8AAD,0x93C8, 0x6803,0x93C9, 0x6A61,0x93CA, 0x51F8,0x93CB, 0x7A81,0x93CC, 0x6934,0x93CD, 0x5C4A,0x93CE, 0x9CF6,0x93CF, 0x82EB,0x93D0, 0x5BC5,0x93D1, 0x9149,0x93D2, 0x701E,0x93D3, 0x5678,0x93D4, 0x5C6F,0x93D5, 0x60C7,0x93D6, 0x6566,0x93D7, 0x6C8C,0x93D8, 0x8C5A,0x93D9, 0x9041,0x93DA, 0x9813,0x93DB, 0x5451,0x93DC, 0x66C7,0x93DD, 0x920D,0x93DE, 0x5948,0x93DF, 0x90A3,0x93E0, 0x5185,0x93E1, 0x4E4D,0x93E2, 0x51EA,0x93E3, 0x8599,0x93E4, 0x8B0E,0x93E5, 0x7058,0x93E6, 0x637A,0x93E7, 0x934B,0x93E8, 0x6962,0x93E9, 0x99B4,0x93EA, 0x7E04,0x93EB, 0x7577,0x93EC, 0x5357,0x93ED, 0x6960,0x93EE, 0x8EDF,0x93EF, 0x96E3,0x93F0, 0x6C5D,0x93F1, 0x4E8C,0x93F2, 0x5C3C,0x93F3, 0x5F10,0x93F4, 0x8FE9,0x93F5, 0x5302,0x93F6, 0x8CD1,0x93F7, 0x8089,0x93F8, 0x8679,0x93F9, 0x5EFF,0x93FA, 0x65E5,0x93FB, 0x4E73,0x93FC, 0x5165,0x9440, 0x5982,0x9441, 0x5C3F,0x9442, 0x97EE,0x9443, 0x4EFB,0x9444, 0x598A,0x9445, 0x5FCD,0x9446, 0x8A8D,0x9447, 0x6FE1,0x9448, 0x79B0,0x9449, 0x7962,0x944A, 0x5BE7,0x944B, 0x8471,0x944C, 0x732B,0x944D, 0x71B1,0x944E, 0x5E74,0x944F, 0x5FF5,0x9450, 0x637B,0x9451, 0x649A,0x9452, 0x71C3,0x9453, 0x7C98,0x9454, 0x4E43,0x9455, 0x5EFC,0x9456, 0x4E4B,0x9457, 0x57DC,0x9458, 0x56A2,0x9459, 0x60A9,0x945A, 0x6FC3,0x945B, 0x7D0D,0x945C, 0x80FD,0x945D, 0x8133,0x945E, 0x81BF,0x945F, 0x8FB2,0x9460, 0x8997,0x9461, 0x86A4,0x9462, 0x5DF4,0x9463, 0x628A,0x9464, 0x64AD,0x9465, 0x8987,0x9466, 0x6777,0x9467, 0x6CE2,0x9468, 0x6D3E,0x9469, 0x7436,0x946A, 0x7834,0x946B, 0x5A46,0x946C, 0x7F75,0x946D, 0x82AD,0x946E, 0x99AC,0x946F, 0x4FF3,0x9470, 0x5EC3,0x9471, 0x62DD,0x9472, 0x6392,0x9473, 0x6557,0x9474, 0x676F,0x9475, 0x76C3,0x9476, 0x724C,0x9477, 0x80CC,0x9478, 0x80BA,0x9479, 0x8F29,0x947A, 0x914D,0x947B, 0x500D,0x947C, 0x57F9,0x947D, 0x5A92,0x947E, 0x6885,0x9480, 0x6973,0x9481, 0x7164,0x9482, 0x72FD,0x9483, 0x8CB7,0x9484, 0x58F2,0x9485, 0x8CE0,0x9486, 0x966A,0x9487, 0x9019,0x9488, 0x877F,0x9489, 0x79E4,0x948A, 0x77E7,0x948B, 0x8429,0x948C, 0x4F2F,0x948D, 0x5265,0x948E, 0x535A,0x948F, 0x62CD,0x9490, 0x67CF,0x9491, 0x6CCA,0x9492, 0x767D,0x9493, 0x7B94,0x9494, 0x7C95,0x9495, 0x8236,0x9496, 0x8584,0x9497, 0x8FEB,0x9498, 0x66DD,0x9499, 0x6F20,0x949A, 0x7206,0x949B, 0x7E1B,0x949C, 0x83AB,0x949D, 0x99C1,0x949E, 0x9EA6,0x949F, 0x51FD,0x94A0, 0x7BB1,0x94A1, 0x7872,0x94A2, 0x7BB8,0x94A3, 0x8087,0x94A4, 0x7B48,0x94A5, 0x6AE8,0x94A6, 0x5E61,0x94A7, 0x808C,0x94A8, 0x7551,0x94A9, 0x7560,0x94AA, 0x516B,0x94AB, 0x9262,0x94AC, 0x6E8C,0x94AD, 0x767A,0x94AE, 0x9197,0x94AF, 0x9AEA,0x94B0, 0x4F10,0x94B1, 0x7F70,0x94B2, 0x629C,0x94B3, 0x7B4F,0x94B4, 0x95A5,0x94B5, 0x9CE9,0x94B6, 0x567A,0x94B7, 0x5859,0x94B8, 0x86E4,0x94B9, 0x96BC,0x94BA, 0x4F34,0x94BB, 0x5224,0x94BC, 0x534A,0x94BD, 0x53CD,0x94BE, 0x53DB,0x94BF, 0x5E06,0x94C0, 0x642C,0x94C1, 0x6591,0x94C2, 0x677F,0x94C3, 0x6C3E,0x94C4, 0x6C4E,0x94C5, 0x7248,0x94C6, 0x72AF,0x94C7, 0x73ED,0x94C8, 0x7554,0x94C9, 0x7E41,0x94CA, 0x822C,0x94CB, 0x85E9,0x94CC, 0x8CA9,0x94CD, 0x7BC4,0x94CE, 0x91C6,0x94CF, 0x7169,0x94D0, 0x9812,0x94D1, 0x98EF,0x94D2, 0x633D,0x94D3, 0x6669,0x94D4, 0x756A,0x94D5, 0x76E4,0x94D6, 0x78D0,0x94D7, 0x8543,0x94D8, 0x86EE,0x94D9, 0x532A,0x94DA, 0x5351,0x94DB, 0x5426,0x94DC, 0x5983,0x94DD, 0x5E87,0x94DE, 0x5F7C,0x94DF, 0x60B2,0x94E0, 0x6249,0x94E1, 0x6279,0x94E2, 0x62AB,0x94E3, 0x6590,0x94E4, 0x6BD4,0x94E5, 0x6CCC,0x94E6, 0x75B2,0x94E7, 0x76AE,0x94E8, 0x7891,0x94E9, 0x79D8,0x94EA, 0x7DCB,0x94EB, 0x7F77,0x94EC, 0x80A5,0x94ED, 0x88AB,0x94EE, 0x8AB9,0x94EF, 0x8CBB,0x94F0, 0x907F,0x94F1, 0x975E,0x94F2, 0x98DB,0x94F3, 0x6A0B,0x94F4, 0x7C38,0x94F5, 0x5099,0x94F6, 0x5C3E,0x94F7, 0x5FAE,0x94F8, 0x6787,0x94F9, 0x6BD8,0x94FA, 0x7435,0x94FB, 0x7709,0x94FC, 0x7F8E,0x9540, 0x9F3B,0x9541, 0x67CA,0x9542, 0x7A17,0x9543, 0x5339,0x9544, 0x758B,0x9545, 0x9AED,0x9546, 0x5F66,0x9547, 0x819D,0x9548, 0x83F1,0x9549, 0x8098,0x954A, 0x5F3C,0x954B, 0x5FC5,0x954C, 0x7562,0x954D, 0x7B46,0x954E, 0x903C,0x954F, 0x6867,0x9550, 0x59EB,0x9551, 0x5A9B,0x9552, 0x7D10,0x9553, 0x767E,0x9554, 0x8B2C,0x9555, 0x4FF5,0x9556, 0x5F6A,0x9557, 0x6A19,0x9558, 0x6C37,0x9559, 0x6F02,0x955A, 0x74E2,0x955B, 0x7968,0x955C, 0x8868,0x955D, 0x8A55,0x955E, 0x8C79,0x955F, 0x5EDF,0x9560, 0x63CF,0x9561, 0x75C5,0x9562, 0x79D2,0x9563, 0x82D7,0x9564, 0x9328,0x9565, 0x92F2,0x9566, 0x849C,0x9567, 0x86ED,0x9568, 0x9C2D,0x9569, 0x54C1,0x956A, 0x5F6C,0x956B, 0x658C,0x956C, 0x6D5C,0x956D, 0x7015,0x956E, 0x8CA7,0x956F, 0x8CD3,0x9570, 0x983B,0x9571, 0x654F,0x9572, 0x74F6,0x9573, 0x4E0D,0x9574, 0x4ED8,0x9575, 0x57E0,0x9576, 0x592B,0x9577, 0x5A66,0x9578, 0x5BCC,0x9579, 0x51A8,0x957A, 0x5E03,0x957B, 0x5E9C,0x957C, 0x6016,0x957D, 0x6276,0x957E, 0x6577,0x9580, 0x65A7,0x9581, 0x666E,0x9582, 0x6D6E,0x9583, 0x7236,0x9584, 0x7B26,0x9585, 0x8150,0x9586, 0x819A,0x9587, 0x8299,0x9588, 0x8B5C,0x9589, 0x8CA0,0x958A, 0x8CE6,0x958B, 0x8D74,0x958C, 0x961C,0x958D, 0x9644,0x958E, 0x4FAE,0x958F, 0x64AB,0x9590, 0x6B66,0x9591, 0x821E,0x9592, 0x8461,0x9593, 0x856A,0x9594, 0x90E8,0x9595, 0x5C01,0x9596, 0x6953,0x9597, 0x98A8,0x9598, 0x847A,0x9599, 0x8557,0x959A, 0x4F0F,0x959B, 0x526F,0x959C, 0x5FA9,0x959D, 0x5E45,0x959E, 0x670D,0x959F, 0x798F,0x95A0, 0x8179,0x95A1, 0x8907,0x95A2, 0x8986,0x95A3, 0x6DF5,0x95A4, 0x5F17,0x95A5, 0x6255,0x95A6, 0x6CB8,0x95A7, 0x4ECF,0x95A8, 0x7269,0x95A9, 0x9B92,0x95AA, 0x5206,0x95AB, 0x543B,0x95AC, 0x5674,0x95AD, 0x58B3,0x95AE, 0x61A4,0x95AF, 0x626E,0x95B0, 0x711A,0x95B1, 0x596E,0x95B2, 0x7C89,0x95B3, 0x7CDE,0x95B4, 0x7D1B,0x95B5, 0x96F0,0x95B6, 0x6587,0x95B7, 0x805E,0x95B8, 0x4E19,0x95B9, 0x4F75,0x95BA, 0x5175,0x95BB, 0x5840,0x95BC, 0x5E63,0x95BD, 0x5E73,0x95BE, 0x5F0A,0x95BF, 0x67C4,0x95C0, 0x4E26,0x95C1, 0x853D,0x95C2, 0x9589,0x95C3, 0x965B,0x95C4, 0x7C73,0x95C5, 0x9801,0x95C6, 0x50FB,0x95C7, 0x58C1,0x95C8, 0x7656,0x95C9, 0x78A7,0x95CA, 0x5225,0x95CB, 0x77A5,0x95CC, 0x8511,0x95CD, 0x7B86,0x95CE, 0x504F,0x95CF, 0x5909,0x95D0, 0x7247,0x95D1, 0x7BC7,0x95D2, 0x7DE8,0x95D3, 0x8FBA,0x95D4, 0x8FD4,0x95D5, 0x904D,0x95D6, 0x4FBF,0x95D7, 0x52C9,0x95D8, 0x5A29,0x95D9, 0x5F01,0x95DA, 0x97AD,0x95DB, 0x4FDD,0x95DC, 0x8217,0x95DD, 0x92EA,0x95DE, 0x5703,0x95DF, 0x6355,0x95E0, 0x6B69,0x95E1, 0x752B,0x95E2, 0x88DC,0x95E3, 0x8F14,0x95E4, 0x7A42,0x95E5, 0x52DF,0x95E6, 0x5893,0x95E7, 0x6155,0x95E8, 0x620A,0x95E9, 0x66AE,0x95EA, 0x6BCD,0x95EB, 0x7C3F,0x95EC, 0x83E9,0x95ED, 0x5023,0x95EE, 0x4FF8,0x95EF, 0x5305,0x95F0, 0x5446,0x95F1, 0x5831,0x95F2, 0x5949,0x95F3, 0x5B9D,0x95F4, 0x5CF0,0x95F5, 0x5CEF,0x95F6, 0x5D29,0x95F7, 0x5E96,0x95F8, 0x62B1,0x95F9, 0x6367,0x95FA, 0x653E,0x95FB, 0x65B9,0x95FC, 0x670B,0x9640, 0x6CD5,0x9641, 0x6CE1,0x9642, 0x70F9,0x9643, 0x7832,0x9644, 0x7E2B,0x9645, 0x80DE,0x9646, 0x82B3,0x9647, 0x840C,0x9648, 0x84EC,0x9649, 0x8702,0x964A, 0x8912,0x964B, 0x8A2A,0x964C, 0x8C4A,0x964D, 0x90A6,0x964E, 0x92D2,0x964F, 0x98FD,0x9650, 0x9CF3,0x9651, 0x9D6C,0x9652, 0x4E4F,0x9653, 0x4EA1,0x9654, 0x508D,0x9655, 0x5256,0x9656, 0x574A,0x9657, 0x59A8,0x9658, 0x5E3D,0x9659, 0x5FD8,0x965A, 0x5FD9,0x965B, 0x623F,0x965C, 0x66B4,0x965D, 0x671B,0x965E, 0x67D0,0x965F, 0x68D2,0x9660, 0x5192,0x9661, 0x7D21,0x9662, 0x80AA,0x9663, 0x81A8,0x9664, 0x8B00,0x9665, 0x8C8C,0x9666, 0x8CBF,0x9667, 0x927E,0x9668, 0x9632,0x9669, 0x5420,0x966A, 0x982C,0x966B, 0x5317,0x966C, 0x50D5,0x966D, 0x535C,0x966E, 0x58A8,0x966F, 0x64B2,0x9670, 0x6734,0x9671, 0x7267,0x9672, 0x7766,0x9673, 0x7A46,0x9674, 0x91E6,0x9675, 0x52C3,0x9676, 0x6CA1,0x9677, 0x6B86,0x9678, 0x5800,0x9679, 0x5E4C,0x967A, 0x5954,0x967B, 0x672C,0x967C, 0x7FFB,0x967D, 0x51E1,0x967E, 0x76C6,0x9680, 0x6469,0x9681, 0x78E8,0x9682, 0x9B54,0x9683, 0x9EBB,0x9684, 0x57CB,0x9685, 0x59B9,0x9686, 0x6627,0x9687, 0x679A,0x9688, 0x6BCE,0x9689, 0x54E9,0x968A, 0x69D9,0x968B, 0x5E55,0x968C, 0x819C,0x968D, 0x6795,0x968E, 0x9BAA,0x968F, 0x67FE,0x9690, 0x9C52,0x9691, 0x685D,0x9692, 0x4EA6,0x9693, 0x4FE3,0x9694, 0x53C8,0x9695, 0x62B9,0x9696, 0x672B,0x9697, 0x6CAB,0x9698, 0x8FC4,0x9699, 0x4FAD,0x969A, 0x7E6D,0x969B, 0x9EBF,0x969C, 0x4E07,0x969D, 0x6162,0x969E, 0x6E80,0x969F, 0x6F2B,0x96A0, 0x8513,0x96A1, 0x5473,0x96A2, 0x672A,0x96A3, 0x9B45,0x96A4, 0x5DF3,0x96A5, 0x7B95,0x96A6, 0x5CAC,0x96A7, 0x5BC6,0x96A8, 0x871C,0x96A9, 0x6E4A,0x96AA, 0x84D1,0x96AB, 0x7A14,0x96AC, 0x8108,0x96AD, 0x5999,0x96AE, 0x7C8D,0x96AF, 0x6C11,0x96B0, 0x7720,0x96B1, 0x52D9,0x96B2, 0x5922,0x96B3, 0x7121,0x96B4, 0x725F,0x96B5, 0x77DB,0x96B6, 0x9727,0x96B7, 0x9D61,0x96B8, 0x690B,0x96B9, 0x5A7F,0x96BA, 0x5A18,0x96BB, 0x51A5,0x96BC, 0x540D,0x96BD, 0x547D,0x96BE, 0x660E,0x96BF, 0x76DF,0x96C0, 0x8FF7,0x96C1, 0x9298,0x96C2, 0x9CF4,0x96C3, 0x59EA,0x96C4, 0x725D,0x96C5, 0x6EC5,0x96C6, 0x514D,0x96C7, 0x68C9,0x96C8, 0x7DBF,0x96C9, 0x7DEC,0x96CA, 0x9762,0x96CB, 0x9EBA,0x96CC, 0x6478,0x96CD, 0x6A21,0x96CE, 0x8302,0x96CF, 0x5984,0x96D0, 0x5B5F,0x96D1, 0x6BDB,0x96D2, 0x731B,0x96D3, 0x76F2,0x96D4, 0x7DB2,0x96D5, 0x8017,0x96D6, 0x8499,0x96D7, 0x5132,0x96D8, 0x6728,0x96D9, 0x9ED9,0x96DA, 0x76EE,0x96DB, 0x6762,0x96DC, 0x52FF,0x96DD, 0x9905,0x96DE, 0x5C24,0x96DF, 0x623B,0x96E0, 0x7C7E,0x96E1, 0x8CB0,0x96E2, 0x554F,0x96E3, 0x60B6,0x96E4, 0x7D0B,0x96E5, 0x9580,0x96E6, 0x5301,0x96E7, 0x4E5F,0x96E8, 0x51B6,0x96E9, 0x591C,0x96EA, 0x723A,0x96EB, 0x8036,0x96EC, 0x91CE,0x96ED, 0x5F25,0x96EE, 0x77E2,0x96EF, 0x5384,0x96F0, 0x5F79,0x96F1, 0x7D04,0x96F2, 0x85AC,0x96F3, 0x8A33,0x96F4, 0x8E8D,0x96F5, 0x9756,0x96F6, 0x67F3,0x96F7, 0x85AE,0x96F8, 0x9453,0x96F9, 0x6109,0x96FA, 0x6108,0x96FB, 0x6CB9,0x96FC, 0x7652,0x9740, 0x8AED,0x9741, 0x8F38,0x9742, 0x552F,0x9743, 0x4F51,0x9744, 0x512A,0x9745, 0x52C7,0x9746, 0x53CB,0x9747, 0x5BA5,0x9748, 0x5E7D,0x9749, 0x60A0,0x974A, 0x6182,0x974B, 0x63D6,0x974C, 0x6709,0x974D, 0x67DA,0x974E, 0x6E67,0x974F, 0x6D8C,0x9750, 0x7336,0x9751, 0x7337,0x9752, 0x7531,0x9753, 0x7950,0x9754, 0x88D5,0x9755, 0x8A98,0x9756, 0x904A,0x9757, 0x9091,0x9758, 0x90F5,0x9759, 0x96C4,0x975A, 0x878D,0x975B, 0x5915,0x975C, 0x4E88,0x975D, 0x4F59,0x975E, 0x4E0E,0x975F, 0x8A89,0x9760, 0x8F3F,0x9761, 0x9810,0x9762, 0x50AD,0x9763, 0x5E7C,0x9764, 0x5996,0x9765, 0x5BB9,0x9766, 0x5EB8,0x9767, 0x63DA,0x9768, 0x63FA,0x9769, 0x64C1,0x976A, 0x66DC,0x976B, 0x694A,0x976C, 0x69D8,0x976D, 0x6D0B,0x976E, 0x6EB6,0x976F, 0x7194,0x9770, 0x7528,0x9771, 0x7AAF,0x9772, 0x7F8A,0x9773, 0x8000,0x9774, 0x8449,0x9775, 0x84C9,0x9776, 0x8981,0x9777, 0x8B21,0x9778, 0x8E0A,0x9779, 0x9065,0x977A, 0x967D,0x977B, 0x990A,0x977C, 0x617E,0x977D, 0x6291,0x977E, 0x6B32,0x9780, 0x6C83,0x9781, 0x6D74,0x9782, 0x7FCC,0x9783, 0x7FFC,0x9784, 0x6DC0,0x9785, 0x7F85,0x9786, 0x87BA,0x9787, 0x88F8,0x9788, 0x6765,0x9789, 0x83B1,0x978A, 0x983C,0x978B, 0x96F7,0x978C, 0x6D1B,0x978D, 0x7D61,0x978E, 0x843D,0x978F, 0x916A,0x9790, 0x4E71,0x9791, 0x5375,0x9792, 0x5D50,0x9793, 0x6B04,0x9794, 0x6FEB,0x9795, 0x85CD,0x9796, 0x862D,0x9797, 0x89A7,0x9798, 0x5229,0x9799, 0x540F,0x979A, 0x5C65,0x979B, 0x674E,0x979C, 0x68A8,0x979D, 0x7406,0x979E, 0x7483,0x979F, 0x75E2,0x97A0, 0x88CF,0x97A1, 0x88E1,0x97A2, 0x91CC,0x97A3, 0x96E2,0x97A4, 0x9678,0x97A5, 0x5F8B,0x97A6, 0x7387,0x97A7, 0x7ACB,0x97A8, 0x844E,0x97A9, 0x63A0,0x97AA, 0x7565,0x97AB, 0x5289,0x97AC, 0x6D41,0x97AD, 0x6E9C,0x97AE, 0x7409,0x97AF, 0x7559,0x97B0, 0x786B,0x97B1, 0x7C92,0x97B2, 0x9686,0x97B3, 0x7ADC,0x97B4, 0x9F8D,0x97B5, 0x4FB6,0x97B6, 0x616E,0x97B7, 0x65C5,0x97B8, 0x865C,0x97B9, 0x4E86,0x97BA, 0x4EAE,0x97BB, 0x50DA,0x97BC, 0x4E21,0x97BD, 0x51CC,0x97BE, 0x5BEE,0x97BF, 0x6599,0x97C0, 0x6881,0x97C1, 0x6DBC,0x97C2, 0x731F,0x97C3, 0x7642,0x97C4, 0x77AD,0x97C5, 0x7A1C,0x97C6, 0x7CE7,0x97C7, 0x826F,0x97C8, 0x8AD2,0x97C9, 0x907C,0x97CA, 0x91CF,0x97CB, 0x9675,0x97CC, 0x9818,0x97CD, 0x529B,0x97CE, 0x7DD1,0x97CF, 0x502B,0x97D0, 0x5398,0x97D1, 0x6797,0x97D2, 0x6DCB,0x97D3, 0x71D0,0x97D4, 0x7433,0x97D5, 0x81E8,0x97D6, 0x8F2A,0x97D7, 0x96A3,0x97D8, 0x9C57,0x97D9, 0x9E9F,0x97DA, 0x7460,0x97DB, 0x5841,0x97DC, 0x6D99,0x97DD, 0x7D2F,0x97DE, 0x985E,0x97DF, 0x4EE4,0x97E0, 0x4F36,0x97E1, 0x4F8B,0x97E2, 0x51B7,0x97E3, 0x52B1,0x97E4, 0x5DBA,0x97E5, 0x601C,0x97E6, 0x73B2,0x97E7, 0x793C,0x97E8, 0x82D3,0x97E9, 0x9234,0x97EA, 0x96B7,0x97EB, 0x96F6,0x97EC, 0x970A,0x97ED, 0x9E97,0x97EE, 0x9F62,0x97EF, 0x66A6,0x97F0, 0x6B74,0x97F1, 0x5217,0x97F2, 0x52A3,0x97F3, 0x70C8,0x97F4, 0x88C2,0x97F5, 0x5EC9,0x97F6, 0x604B,0x97F7, 0x6190,0x97F8, 0x6F23,0x97F9, 0x7149,0x97FA, 0x7C3E,0x97FB, 0x7DF4,0x97FC, 0x806F,0x9840, 0x84EE,0x9841, 0x9023,0x9842, 0x932C,0x9843, 0x5442,0x9844, 0x9B6F,0x9845, 0x6AD3,0x9846, 0x7089,0x9847, 0x8CC2,0x9848, 0x8DEF,0x9849, 0x9732,0x984A, 0x52B4,0x984B, 0x5A41,0x984C, 0x5ECA,0x984D, 0x5F04,0x984E, 0x6717,0x984F, 0x697C,0x9850, 0x6994,0x9851, 0x6D6A,0x9852, 0x6F0F,0x9853, 0x7262,0x9854, 0x72FC,0x9855, 0x7BED,0x9856, 0x8001,0x9857, 0x807E,0x9858, 0x874B,0x9859, 0x90CE,0x985A, 0x516D,0x985B, 0x9E93,0x985C, 0x7984,0x985D, 0x808B,0x985E, 0x9332,0x985F, 0x8AD6,0x9860, 0x502D,0x9861, 0x548C,0x9862, 0x8A71,0x9863, 0x6B6A,0x9864, 0x8CC4,0x9865, 0x8107,0x9866, 0x60D1,0x9867, 0x67A0,0x9868, 0x9DF2,0x9869, 0x4E99,0x986A, 0x4E98,0x986B, 0x9C10,0x986C, 0x8A6B,0x986D, 0x85C1,0x986E, 0x8568,0x986F, 0x6900,0x9870, 0x6E7E,0x9871, 0x7897,0x9872, 0x8155,0x989F, 0x5F0C,0x98A0, 0x4E10,0x98A1, 0x4E15,0x98A2, 0x4E2A,0x98A3, 0x4E31,0x98A4, 0x4E36,0x98A5, 0x4E3C,0x98A6, 0x4E3F,0x98A7, 0x4E42,0x98A8, 0x4E56,0x98A9, 0x4E58,0x98AA, 0x4E82,0x98AB, 0x4E85,0x98AC, 0x8C6B,0x98AD, 0x4E8A,0x98AE, 0x8212,0x98AF, 0x5F0D,0x98B0, 0x4E8E,0x98B1, 0x4E9E,0x98B2, 0x4E9F,0x98B3, 0x4EA0,0x98B4, 0x4EA2,0x98B5, 0x4EB0,0x98B6, 0x4EB3,0x98B7, 0x4EB6,0x98B8, 0x4ECE,0x98B9, 0x4ECD,0x98BA, 0x4EC4,0x98BB, 0x4EC6,0x98BC, 0x4EC2,0x98BD, 0x4ED7,0x98BE, 0x4EDE,0x98BF, 0x4EED,0x98C0, 0x4EDF,0x98C1, 0x4EF7,0x98C2, 0x4F09,0x98C3, 0x4F5A,0x98C4, 0x4F30,0x98C5, 0x4F5B,0x98C6, 0x4F5D,0x98C7, 0x4F57,0x98C8, 0x4F47,0x98C9, 0x4F76,0x98CA, 0x4F88,0x98CB, 0x4F8F,0x98CC, 0x4F98,0x98CD, 0x4F7B,0x98CE, 0x4F69,0x98CF, 0x4F70,0x98D0, 0x4F91,0x98D1, 0x4F6F,0x98D2, 0x4F86,0x98D3, 0x4F96,0x98D4, 0x5118,0x98D5, 0x4FD4,0x98D6, 0x4FDF,0x98D7, 0x4FCE,0x98D8, 0x4FD8,0x98D9, 0x4FDB,0x98DA, 0x4FD1,0x98DB, 0x4FDA,0x98DC, 0x4FD0,0x98DD, 0x4FE4,0x98DE, 0x4FE5,0x98DF, 0x501A,0x98E0, 0x5028,0x98E1, 0x5014,0x98E2, 0x502A,0x98E3, 0x5025,0x98E4, 0x5005,0x98E5, 0x4F1C,0x98E6, 0x4FF6,0x98E7, 0x5021,0x98E8, 0x5029,0x98E9, 0x502C,0x98EA, 0x4FFE,0x98EB, 0x4FEF,0x98EC, 0x5011,0x98ED, 0x5006,0x98EE, 0x5043,0x98EF, 0x5047,0x98F0, 0x6703,0x98F1, 0x5055,0x98F2, 0x5050,0x98F3, 0x5048,0x98F4, 0x505A,0x98F5, 0x5056,0x98F6, 0x506C,0x98F7, 0x5078,0x98F8, 0x5080,0x98F9, 0x509A,0x98FA, 0x5085,0x98FB, 0x50B4,0x98FC, 0x50B2,0x9940, 0x50C9,0x9941, 0x50CA,0x9942, 0x50B3,0x9943, 0x50C2,0x9944, 0x50D6,0x9945, 0x50DE,0x9946, 0x50E5,0x9947, 0x50ED,0x9948, 0x50E3,0x9949, 0x50EE,0x994A, 0x50F9,0x994B, 0x50F5,0x994C, 0x5109,0x994D, 0x5101,0x994E, 0x5102,0x994F, 0x5116,0x9950, 0x5115,0x9951, 0x5114,0x9952, 0x511A,0x9953, 0x5121,0x9954, 0x513A,0x9955, 0x5137,0x9956, 0x513C,0x9957, 0x513B,0x9958, 0x513F,0x9959, 0x5140,0x995A, 0x5152,0x995B, 0x514C,0x995C, 0x5154,0x995D, 0x5162,0x995E, 0x7AF8,0x995F, 0x5169,0x9960, 0x516A,0x9961, 0x516E,0x9962, 0x5180,0x9963, 0x5182,0x9964, 0x56D8,0x9965, 0x518C,0x9966, 0x5189,0x9967, 0x518F,0x9968, 0x5191,0x9969, 0x5193,0x996A, 0x5195,0x996B, 0x5196,0x996C, 0x51A4,0x996D, 0x51A6,0x996E, 0x51A2,0x996F, 0x51A9,0x9970, 0x51AA,0x9971, 0x51AB,0x9972, 0x51B3,0x9973, 0x51B1,0x9974, 0x51B2,0x9975, 0x51B0,0x9976, 0x51B5,0x9977, 0x51BD,0x9978, 0x51C5,0x9979, 0x51C9,0x997A, 0x51DB,0x997B, 0x51E0,0x997C, 0x8655,0x997D, 0x51E9,0x997E, 0x51ED,0x9980, 0x51F0,0x9981, 0x51F5,0x9982, 0x51FE,0x9983, 0x5204,0x9984, 0x520B,0x9985, 0x5214,0x9986, 0x520E,0x9987, 0x5227,0x9988, 0x522A,0x9989, 0x522E,0x998A, 0x5233,0x998B, 0x5239,0x998C, 0x524F,0x998D, 0x5244,0x998E, 0x524B,0x998F, 0x524C,0x9990, 0x525E,0x9991, 0x5254,0x9992, 0x526A,0x9993, 0x5274,0x9994, 0x5269,0x9995, 0x5273,0x9996, 0x527F,0x9997, 0x527D,0x9998, 0x528D,0x9999, 0x5294,0x999A, 0x5292,0x999B, 0x5271,0x999C, 0x5288,0x999D, 0x5291,0x999E, 0x8FA8,0x999F, 0x8FA7,0x99A0, 0x52AC,0x99A1, 0x52AD,0x99A2, 0x52BC,0x99A3, 0x52B5,0x99A4, 0x52C1,0x99A5, 0x52CD,0x99A6, 0x52D7,0x99A7, 0x52DE,0x99A8, 0x52E3,0x99A9, 0x52E6,0x99AA, 0x98ED,0x99AB, 0x52E0,0x99AC, 0x52F3,0x99AD, 0x52F5,0x99AE, 0x52F8,0x99AF, 0x52F9,0x99B0, 0x5306,0x99B1, 0x5308,0x99B2, 0x7538,0x99B3, 0x530D,0x99B4, 0x5310,0x99B5, 0x530F,0x99B6, 0x5315,0x99B7, 0x531A,0x99B8, 0x5323,0x99B9, 0x532F,0x99BA, 0x5331,0x99BB, 0x5333,0x99BC, 0x5338,0x99BD, 0x5340,0x99BE, 0x5346,0x99BF, 0x5345,0x99C0, 0x4E17,0x99C1, 0x5349,0x99C2, 0x534D,0x99C3, 0x51D6,0x99C4, 0x535E,0x99C5, 0x5369,0x99C6, 0x536E,0x99C7, 0x5918,0x99C8, 0x537B,0x99C9, 0x5377,0x99CA, 0x5382,0x99CB, 0x5396,0x99CC, 0x53A0,0x99CD, 0x53A6,0x99CE, 0x53A5,0x99CF, 0x53AE,0x99D0, 0x53B0,0x99D1, 0x53B6,0x99D2, 0x53C3,0x99D3, 0x7C12,0x99D4, 0x96D9,0x99D5, 0x53DF,0x99D6, 0x66FC,0x99D7, 0x71EE,0x99D8, 0x53EE,0x99D9, 0x53E8,0x99DA, 0x53ED,0x99DB, 0x53FA,0x99DC, 0x5401,0x99DD, 0x543D,0x99DE, 0x5440,0x99DF, 0x542C,0x99E0, 0x542D,0x99E1, 0x543C,0x99E2, 0x542E,0x99E3, 0x5436,0x99E4, 0x5429,0x99E5, 0x541D,0x99E6, 0x544E,0x99E7, 0x548F,0x99E8, 0x5475,0x99E9, 0x548E,0x99EA, 0x545F,0x99EB, 0x5471,0x99EC, 0x5477,0x99ED, 0x5470,0x99EE, 0x5492,0x99EF, 0x547B,0x99F0, 0x5480,0x99F1, 0x5476,0x99F2, 0x5484,0x99F3, 0x5490,0x99F4, 0x5486,0x99F5, 0x54C7,0x99F6, 0x54A2,0x99F7, 0x54B8,0x99F8, 0x54A5,0x99F9, 0x54AC,0x99FA, 0x54C4,0x99FB, 0x54C8,0x99FC, 0x54A8,0x9A40, 0x54AB,0x9A41, 0x54C2,0x9A42, 0x54A4,0x9A43, 0x54BE,0x9A44, 0x54BC,0x9A45, 0x54D8,0x9A46, 0x54E5,0x9A47, 0x54E6,0x9A48, 0x550F,0x9A49, 0x5514,0x9A4A, 0x54FD,0x9A4B, 0x54EE,0x9A4C, 0x54ED,0x9A4D, 0x54FA,0x9A4E, 0x54E2,0x9A4F, 0x5539,0x9A50, 0x5540,0x9A51, 0x5563,0x9A52, 0x554C,0x9A53, 0x552E,0x9A54, 0x555C,0x9A55, 0x5545,0x9A56, 0x5556,0x9A57, 0x5557,0x9A58, 0x5538,0x9A59, 0x5533,0x9A5A, 0x555D,0x9A5B, 0x5599,0x9A5C, 0x5580,0x9A5D, 0x54AF,0x9A5E, 0x558A,0x9A5F, 0x559F,0x9A60, 0x557B,0x9A61, 0x557E,0x9A62, 0x5598,0x9A63, 0x559E,0x9A64, 0x55AE,0x9A65, 0x557C,0x9A66, 0x5583,0x9A67, 0x55A9,0x9A68, 0x5587,0x9A69, 0x55A8,0x9A6A, 0x55DA,0x9A6B, 0x55C5,0x9A6C, 0x55DF,0x9A6D, 0x55C4,0x9A6E, 0x55DC,0x9A6F, 0x55E4,0x9A70, 0x55D4,0x9A71, 0x5614,0x9A72, 0x55F7,0x9A73, 0x5616,0x9A74, 0x55FE,0x9A75, 0x55FD,0x9A76, 0x561B,0x9A77, 0x55F9,0x9A78, 0x564E,0x9A79, 0x5650,0x9A7A, 0x71DF,0x9A7B, 0x5634,0x9A7C, 0x5636,0x9A7D, 0x5632,0x9A7E, 0x5638,0x9A80, 0x566B,0x9A81, 0x5664,0x9A82, 0x562F,0x9A83, 0x566C,0x9A84, 0x566A,0x9A85, 0x5686,0x9A86, 0x5680,0x9A87, 0x568A,0x9A88, 0x56A0,0x9A89, 0x5694,0x9A8A, 0x568F,0x9A8B, 0x56A5,0x9A8C, 0x56AE,0x9A8D, 0x56B6,0x9A8E, 0x56B4,0x9A8F, 0x56C2,0x9A90, 0x56BC,0x9A91, 0x56C1,0x9A92, 0x56C3,0x9A93, 0x56C0,0x9A94, 0x56C8,0x9A95, 0x56CE,0x9A96, 0x56D1,0x9A97, 0x56D3,0x9A98, 0x56D7,0x9A99, 0x56EE,0x9A9A, 0x56F9,0x9A9B, 0x5700,0x9A9C, 0x56FF,0x9A9D, 0x5704,0x9A9E, 0x5709,0x9A9F, 0x5708,0x9AA0, 0x570B,0x9AA1, 0x570D,0x9AA2, 0x5713,0x9AA3, 0x5718,0x9AA4, 0x5716,0x9AA5, 0x55C7,0x9AA6, 0x571C,0x9AA7, 0x5726,0x9AA8, 0x5737,0x9AA9, 0x5738,0x9AAA, 0x574E,0x9AAB, 0x573B,0x9AAC, 0x5740,0x9AAD, 0x574F,0x9AAE, 0x5769,0x9AAF, 0x57C0,0x9AB0, 0x5788,0x9AB1, 0x5761,0x9AB2, 0x577F,0x9AB3, 0x5789,0x9AB4, 0x5793,0x9AB5, 0x57A0,0x9AB6, 0x57B3,0x9AB7, 0x57A4,0x9AB8, 0x57AA,0x9AB9, 0x57B0,0x9ABA, 0x57C3,0x9ABB, 0x57C6,0x9ABC, 0x57D4,0x9ABD, 0x57D2,0x9ABE, 0x57D3,0x9ABF, 0x580A,0x9AC0, 0x57D6,0x9AC1, 0x57E3,0x9AC2, 0x580B,0x9AC3, 0x5819,0x9AC4, 0x581D,0x9AC5, 0x5872,0x9AC6, 0x5821,0x9AC7, 0x5862,0x9AC8, 0x584B,0x9AC9, 0x5870,0x9ACA, 0x6BC0,0x9ACB, 0x5852,0x9ACC, 0x583D,0x9ACD, 0x5879,0x9ACE, 0x5885,0x9ACF, 0x58B9,0x9AD0, 0x589F,0x9AD1, 0x58AB,0x9AD2, 0x58BA,0x9AD3, 0x58DE,0x9AD4, 0x58BB,0x9AD5, 0x58B8,0x9AD6, 0x58AE,0x9AD7, 0x58C5,0x9AD8, 0x58D3,0x9AD9, 0x58D1,0x9ADA, 0x58D7,0x9ADB, 0x58D9,0x9ADC, 0x58D8,0x9ADD, 0x58E5,0x9ADE, 0x58DC,0x9ADF, 0x58E4,0x9AE0, 0x58DF,0x9AE1, 0x58EF,0x9AE2, 0x58FA,0x9AE3, 0x58F9,0x9AE4, 0x58FB,0x9AE5, 0x58FC,0x9AE6, 0x58FD,0x9AE7, 0x5902,0x9AE8, 0x590A,0x9AE9, 0x5910,0x9AEA, 0x591B,0x9AEB, 0x68A6,0x9AEC, 0x5925,0x9AED, 0x592C,0x9AEE, 0x592D,0x9AEF, 0x5932,0x9AF0, 0x5938,0x9AF1, 0x593E,0x9AF2, 0x7AD2,0x9AF3, 0x5955,0x9AF4, 0x5950,0x9AF5, 0x594E,0x9AF6, 0x595A,0x9AF7, 0x5958,0x9AF8, 0x5962,0x9AF9, 0x5960,0x9AFA, 0x5967,0x9AFB, 0x596C,0x9AFC, 0x5969,0x9B40, 0x5978,0x9B41, 0x5981,0x9B42, 0x599D,0x9B43, 0x4F5E,0x9B44, 0x4FAB,0x9B45, 0x59A3,0x9B46, 0x59B2,0x9B47, 0x59C6,0x9B48, 0x59E8,0x9B49, 0x59DC,0x9B4A, 0x598D,0x9B4B, 0x59D9,0x9B4C, 0x59DA,0x9B4D, 0x5A25,0x9B4E, 0x5A1F,0x9B4F, 0x5A11,0x9B50, 0x5A1C,0x9B51, 0x5A09,0x9B52, 0x5A1A,0x9B53, 0x5A40,0x9B54, 0x5A6C,0x9B55, 0x5A49,0x9B56, 0x5A35,0x9B57, 0x5A36,0x9B58, 0x5A62,0x9B59, 0x5A6A,0x9B5A, 0x5A9A,0x9B5B, 0x5ABC,0x9B5C, 0x5ABE,0x9B5D, 0x5ACB,0x9B5E, 0x5AC2,0x9B5F, 0x5ABD,0x9B60, 0x5AE3,0x9B61, 0x5AD7,0x9B62, 0x5AE6,0x9B63, 0x5AE9,0x9B64, 0x5AD6,0x9B65, 0x5AFA,0x9B66, 0x5AFB,0x9B67, 0x5B0C,0x9B68, 0x5B0B,0x9B69, 0x5B16,0x9B6A, 0x5B32,0x9B6B, 0x5AD0,0x9B6C, 0x5B2A,0x9B6D, 0x5B36,0x9B6E, 0x5B3E,0x9B6F, 0x5B43,0x9B70, 0x5B45,0x9B71, 0x5B40,0x9B72, 0x5B51,0x9B73, 0x5B55,0x9B74, 0x5B5A,0x9B75, 0x5B5B,0x9B76, 0x5B65,0x9B77, 0x5B69,0x9B78, 0x5B70,0x9B79, 0x5B73,0x9B7A, 0x5B75,0x9B7B, 0x5B78,0x9B7C, 0x6588,0x9B7D, 0x5B7A,0x9B7E, 0x5B80,0x9B80, 0x5B83,0x9B81, 0x5BA6,0x9B82, 0x5BB8,0x9B83, 0x5BC3,0x9B84, 0x5BC7,0x9B85, 0x5BC9,0x9B86, 0x5BD4,0x9B87, 0x5BD0,0x9B88, 0x5BE4,0x9B89, 0x5BE6,0x9B8A, 0x5BE2,0x9B8B, 0x5BDE,0x9B8C, 0x5BE5,0x9B8D, 0x5BEB,0x9B8E, 0x5BF0,0x9B8F, 0x5BF6,0x9B90, 0x5BF3,0x9B91, 0x5C05,0x9B92, 0x5C07,0x9B93, 0x5C08,0x9B94, 0x5C0D,0x9B95, 0x5C13,0x9B96, 0x5C20,0x9B97, 0x5C22,0x9B98, 0x5C28,0x9B99, 0x5C38,0x9B9A, 0x5C39,0x9B9B, 0x5C41,0x9B9C, 0x5C46,0x9B9D, 0x5C4E,0x9B9E, 0x5C53,0x9B9F, 0x5C50,0x9BA0, 0x5C4F,0x9BA1, 0x5B71,0x9BA2, 0x5C6C,0x9BA3, 0x5C6E,0x9BA4, 0x4E62,0x9BA5, 0x5C76,0x9BA6, 0x5C79,0x9BA7, 0x5C8C,0x9BA8, 0x5C91,0x9BA9, 0x5C94,0x9BAA, 0x599B,0x9BAB, 0x5CAB,0x9BAC, 0x5CBB,0x9BAD, 0x5CB6,0x9BAE, 0x5CBC,0x9BAF, 0x5CB7,0x9BB0, 0x5CC5,0x9BB1, 0x5CBE,0x9BB2, 0x5CC7,0x9BB3, 0x5CD9,0x9BB4, 0x5CE9,0x9BB5, 0x5CFD,0x9BB6, 0x5CFA,0x9BB7, 0x5CED,0x9BB8, 0x5D8C,0x9BB9, 0x5CEA,0x9BBA, 0x5D0B,0x9BBB, 0x5D15,0x9BBC, 0x5D17,0x9BBD, 0x5D5C,0x9BBE, 0x5D1F,0x9BBF, 0x5D1B,0x9BC0, 0x5D11,0x9BC1, 0x5D14,0x9BC2, 0x5D22,0x9BC3, 0x5D1A,0x9BC4, 0x5D19,0x9BC5, 0x5D18,0x9BC6, 0x5D4C,0x9BC7, 0x5D52,0x9BC8, 0x5D4E,0x9BC9, 0x5D4B,0x9BCA, 0x5D6C,0x9BCB, 0x5D73,0x9BCC, 0x5D76,0x9BCD, 0x5D87,0x9BCE, 0x5D84,0x9BCF, 0x5D82,0x9BD0, 0x5DA2,0x9BD1, 0x5D9D,0x9BD2, 0x5DAC,0x9BD3, 0x5DAE,0x9BD4, 0x5DBD,0x9BD5, 0x5D90,0x9BD6, 0x5DB7,0x9BD7, 0x5DBC,0x9BD8, 0x5DC9,0x9BD9, 0x5DCD,0x9BDA, 0x5DD3,0x9BDB, 0x5DD2,0x9BDC, 0x5DD6,0x9BDD, 0x5DDB,0x9BDE, 0x5DEB,0x9BDF, 0x5DF2,0x9BE0, 0x5DF5,0x9BE1, 0x5E0B,0x9BE2, 0x5E1A,0x9BE3, 0x5E19,0x9BE4, 0x5E11,0x9BE5, 0x5E1B,0x9BE6, 0x5E36,0x9BE7, 0x5E37,0x9BE8, 0x5E44,0x9BE9, 0x5E43,0x9BEA, 0x5E40,0x9BEB, 0x5E4E,0x9BEC, 0x5E57,0x9BED, 0x5E54,0x9BEE, 0x5E5F,0x9BEF, 0x5E62,0x9BF0, 0x5E64,0x9BF1, 0x5E47,0x9BF2, 0x5E75,0x9BF3, 0x5E76,0x9BF4, 0x5E7A,0x9BF5, 0x9EBC,0x9BF6, 0x5E7F,0x9BF7, 0x5EA0,0x9BF8, 0x5EC1,0x9BF9, 0x5EC2,0x9BFA, 0x5EC8,0x9BFB, 0x5ED0,0x9BFC, 0x5ECF,0x9C40, 0x5ED6,0x9C41, 0x5EE3,0x9C42, 0x5EDD,0x9C43, 0x5EDA,0x9C44, 0x5EDB,0x9C45, 0x5EE2,0x9C46, 0x5EE1,0x9C47, 0x5EE8,0x9C48, 0x5EE9,0x9C49, 0x5EEC,0x9C4A, 0x5EF1,0x9C4B, 0x5EF3,0x9C4C, 0x5EF0,0x9C4D, 0x5EF4,0x9C4E, 0x5EF8,0x9C4F, 0x5EFE,0x9C50, 0x5F03,0x9C51, 0x5F09,0x9C52, 0x5F5D,0x9C53, 0x5F5C,0x9C54, 0x5F0B,0x9C55, 0x5F11,0x9C56, 0x5F16,0x9C57, 0x5F29,0x9C58, 0x5F2D,0x9C59, 0x5F38,0x9C5A, 0x5F41,0x9C5B, 0x5F48,0x9C5C, 0x5F4C,0x9C5D, 0x5F4E,0x9C5E, 0x5F2F,0x9C5F, 0x5F51,0x9C60, 0x5F56,0x9C61, 0x5F57,0x9C62, 0x5F59,0x9C63, 0x5F61,0x9C64, 0x5F6D,0x9C65, 0x5F73,0x9C66, 0x5F77,0x9C67, 0x5F83,0x9C68, 0x5F82,0x9C69, 0x5F7F,0x9C6A, 0x5F8A,0x9C6B, 0x5F88,0x9C6C, 0x5F91,0x9C6D, 0x5F87,0x9C6E, 0x5F9E,0x9C6F, 0x5F99,0x9C70, 0x5F98,0x9C71, 0x5FA0,0x9C72, 0x5FA8,0x9C73, 0x5FAD,0x9C74, 0x5FBC,0x9C75, 0x5FD6,0x9C76, 0x5FFB,0x9C77, 0x5FE4,0x9C78, 0x5FF8,0x9C79, 0x5FF1,0x9C7A, 0x5FDD,0x9C7B, 0x60B3,0x9C7C, 0x5FFF,0x9C7D, 0x6021,0x9C7E, 0x6060,0x9C80, 0x6019,0x9C81, 0x6010,0x9C82, 0x6029,0x9C83, 0x600E,0x9C84, 0x6031,0x9C85, 0x601B,0x9C86, 0x6015,0x9C87, 0x602B,0x9C88, 0x6026,0x9C89, 0x600F,0x9C8A, 0x603A,0x9C8B, 0x605A,0x9C8C, 0x6041,0x9C8D, 0x606A,0x9C8E, 0x6077,0x9C8F, 0x605F,0x9C90, 0x604A,0x9C91, 0x6046,0x9C92, 0x604D,0x9C93, 0x6063,0x9C94, 0x6043,0x9C95, 0x6064,0x9C96, 0x6042,0x9C97, 0x606C,0x9C98, 0x606B,0x9C99, 0x6059,0x9C9A, 0x6081,0x9C9B, 0x608D,0x9C9C, 0x60E7,0x9C9D, 0x6083,0x9C9E, 0x609A,0x9C9F, 0x6084,0x9CA0, 0x609B,0x9CA1, 0x6096,0x9CA2, 0x6097,0x9CA3, 0x6092,0x9CA4, 0x60A7,0x9CA5, 0x608B,0x9CA6, 0x60E1,0x9CA7, 0x60B8,0x9CA8, 0x60E0,0x9CA9, 0x60D3,0x9CAA, 0x60B4,0x9CAB, 0x5FF0,0x9CAC, 0x60BD,0x9CAD, 0x60C6,0x9CAE, 0x60B5,0x9CAF, 0x60D8,0x9CB0, 0x614D,0x9CB1, 0x6115,0x9CB2, 0x6106,0x9CB3, 0x60F6,0x9CB4, 0x60F7,0x9CB5, 0x6100,0x9CB6, 0x60F4,0x9CB7, 0x60FA,0x9CB8, 0x6103,0x9CB9, 0x6121,0x9CBA, 0x60FB,0x9CBB, 0x60F1,0x9CBC, 0x610D,0x9CBD, 0x610E,0x9CBE, 0x6147,0x9CBF, 0x613E,0x9CC0, 0x6128,0x9CC1, 0x6127,0x9CC2, 0x614A,0x9CC3, 0x613F,0x9CC4, 0x613C,0x9CC5, 0x612C,0x9CC6, 0x6134,0x9CC7, 0x613D,0x9CC8, 0x6142,0x9CC9, 0x6144,0x9CCA, 0x6173,0x9CCB, 0x6177,0x9CCC, 0x6158,0x9CCD, 0x6159,0x9CCE, 0x615A,0x9CCF, 0x616B,0x9CD0, 0x6174,0x9CD1, 0x616F,0x9CD2, 0x6165,0x9CD3, 0x6171,0x9CD4, 0x615F,0x9CD5, 0x615D,0x9CD6, 0x6153,0x9CD7, 0x6175,0x9CD8, 0x6199,0x9CD9, 0x6196,0x9CDA, 0x6187,0x9CDB, 0x61AC,0x9CDC, 0x6194,0x9CDD, 0x619A,0x9CDE, 0x618A,0x9CDF, 0x6191,0x9CE0, 0x61AB,0x9CE1, 0x61AE,0x9CE2, 0x61CC,0x9CE3, 0x61CA,0x9CE4, 0x61C9,0x9CE5, 0x61F7,0x9CE6, 0x61C8,0x9CE7, 0x61C3,0x9CE8, 0x61C6,0x9CE9, 0x61BA,0x9CEA, 0x61CB,0x9CEB, 0x7F79,0x9CEC, 0x61CD,0x9CED, 0x61E6,0x9CEE, 0x61E3,0x9CEF, 0x61F6,0x9CF0, 0x61FA,0x9CF1, 0x61F4,0x9CF2, 0x61FF,0x9CF3, 0x61FD,0x9CF4, 0x61FC,0x9CF5, 0x61FE,0x9CF6, 0x6200,0x9CF7, 0x6208,0x9CF8, 0x6209,0x9CF9, 0x620D,0x9CFA, 0x620C,0x9CFB, 0x6214,0x9CFC, 0x621B,0x9D40, 0x621E,0x9D41, 0x6221,0x9D42, 0x622A,0x9D43, 0x622E,0x9D44, 0x6230,0x9D45, 0x6232,0x9D46, 0x6233,0x9D47, 0x6241,0x9D48, 0x624E,0x9D49, 0x625E,0x9D4A, 0x6263,0x9D4B, 0x625B,0x9D4C, 0x6260,0x9D4D, 0x6268,0x9D4E, 0x627C,0x9D4F, 0x6282,0x9D50, 0x6289,0x9D51, 0x627E,0x9D52, 0x6292,0x9D53, 0x6293,0x9D54, 0x6296,0x9D55, 0x62D4,0x9D56, 0x6283,0x9D57, 0x6294,0x9D58, 0x62D7,0x9D59, 0x62D1,0x9D5A, 0x62BB,0x9D5B, 0x62CF,0x9D5C, 0x62FF,0x9D5D, 0x62C6,0x9D5E, 0x64D4,0x9D5F, 0x62C8,0x9D60, 0x62DC,0x9D61, 0x62CC,0x9D62, 0x62CA,0x9D63, 0x62C2,0x9D64, 0x62C7,0x9D65, 0x629B,0x9D66, 0x62C9,0x9D67, 0x630C,0x9D68, 0x62EE,0x9D69, 0x62F1,0x9D6A, 0x6327,0x9D6B, 0x6302,0x9D6C, 0x6308,0x9D6D, 0x62EF,0x9D6E, 0x62F5,0x9D6F, 0x6350,0x9D70, 0x633E,0x9D71, 0x634D,0x9D72, 0x641C,0x9D73, 0x634F,0x9D74, 0x6396,0x9D75, 0x638E,0x9D76, 0x6380,0x9D77, 0x63AB,0x9D78, 0x6376,0x9D79, 0x63A3,0x9D7A, 0x638F,0x9D7B, 0x6389,0x9D7C, 0x639F,0x9D7D, 0x63B5,0x9D7E, 0x636B,0x9D80, 0x6369,0x9D81, 0x63BE,0x9D82, 0x63E9,0x9D83, 0x63C0,0x9D84, 0x63C6,0x9D85, 0x63E3,0x9D86, 0x63C9,0x9D87, 0x63D2,0x9D88, 0x63F6,0x9D89, 0x63C4,0x9D8A, 0x6416,0x9D8B, 0x6434,0x9D8C, 0x6406,0x9D8D, 0x6413,0x9D8E, 0x6426,0x9D8F, 0x6436,0x9D90, 0x651D,0x9D91, 0x6417,0x9D92, 0x6428,0x9D93, 0x640F,0x9D94, 0x6467,0x9D95, 0x646F,0x9D96, 0x6476,0x9D97, 0x644E,0x9D98, 0x652A,0x9D99, 0x6495,0x9D9A, 0x6493,0x9D9B, 0x64A5,0x9D9C, 0x64A9,0x9D9D, 0x6488,0x9D9E, 0x64BC,0x9D9F, 0x64DA,0x9DA0, 0x64D2,0x9DA1, 0x64C5,0x9DA2, 0x64C7,0x9DA3, 0x64BB,0x9DA4, 0x64D8,0x9DA5, 0x64C2,0x9DA6, 0x64F1,0x9DA7, 0x64E7,0x9DA8, 0x8209,0x9DA9, 0x64E0,0x9DAA, 0x64E1,0x9DAB, 0x62AC,0x9DAC, 0x64E3,0x9DAD, 0x64EF,0x9DAE, 0x652C,0x9DAF, 0x64F6,0x9DB0, 0x64F4,0x9DB1, 0x64F2,0x9DB2, 0x64FA,0x9DB3, 0x6500,0x9DB4, 0x64FD,0x9DB5, 0x6518,0x9DB6, 0x651C,0x9DB7, 0x6505,0x9DB8, 0x6524,0x9DB9, 0x6523,0x9DBA, 0x652B,0x9DBB, 0x6534,0x9DBC, 0x6535,0x9DBD, 0x6537,0x9DBE, 0x6536,0x9DBF, 0x6538,0x9DC0, 0x754B,0x9DC1, 0x6548,0x9DC2, 0x6556,0x9DC3, 0x6555,0x9DC4, 0x654D,0x9DC5, 0x6558,0x9DC6, 0x655E,0x9DC7, 0x655D,0x9DC8, 0x6572,0x9DC9, 0x6578,0x9DCA, 0x6582,0x9DCB, 0x6583,0x9DCC, 0x8B8A,0x9DCD, 0x659B,0x9DCE, 0x659F,0x9DCF, 0x65AB,0x9DD0, 0x65B7,0x9DD1, 0x65C3,0x9DD2, 0x65C6,0x9DD3, 0x65C1,0x9DD4, 0x65C4,0x9DD5, 0x65CC,0x9DD6, 0x65D2,0x9DD7, 0x65DB,0x9DD8, 0x65D9,0x9DD9, 0x65E0,0x9DDA, 0x65E1,0x9DDB, 0x65F1,0x9DDC, 0x6772,0x9DDD, 0x660A,0x9DDE, 0x6603,0x9DDF, 0x65FB,0x9DE0, 0x6773,0x9DE1, 0x6635,0x9DE2, 0x6636,0x9DE3, 0x6634,0x9DE4, 0x661C,0x9DE5, 0x664F,0x9DE6, 0x6644,0x9DE7, 0x6649,0x9DE8, 0x6641,0x9DE9, 0x665E,0x9DEA, 0x665D,0x9DEB, 0x6664,0x9DEC, 0x6667,0x9DED, 0x6668,0x9DEE, 0x665F,0x9DEF, 0x6662,0x9DF0, 0x6670,0x9DF1, 0x6683,0x9DF2, 0x6688,0x9DF3, 0x668E,0x9DF4, 0x6689,0x9DF5, 0x6684,0x9DF6, 0x6698,0x9DF7, 0x669D,0x9DF8, 0x66C1,0x9DF9, 0x66B9,0x9DFA, 0x66C9,0x9DFB, 0x66BE,0x9DFC, 0x66BC,0x9E40, 0x66C4,0x9E41, 0x66B8,0x9E42, 0x66D6,0x9E43, 0x66DA,0x9E44, 0x66E0,0x9E45, 0x663F,0x9E46, 0x66E6,0x9E47, 0x66E9,0x9E48, 0x66F0,0x9E49, 0x66F5,0x9E4A, 0x66F7,0x9E4B, 0x670F,0x9E4C, 0x6716,0x9E4D, 0x671E,0x9E4E, 0x6726,0x9E4F, 0x6727,0x9E50, 0x9738,0x9E51, 0x672E,0x9E52, 0x673F,0x9E53, 0x6736,0x9E54, 0x6741,0x9E55, 0x6738,0x9E56, 0x6737,0x9E57, 0x6746,0x9E58, 0x675E,0x9E59, 0x6760,0x9E5A, 0x6759,0x9E5B, 0x6763,0x9E5C, 0x6764,0x9E5D, 0x6789,0x9E5E, 0x6770,0x9E5F, 0x67A9,0x9E60, 0x677C,0x9E61, 0x676A,0x9E62, 0x678C,0x9E63, 0x678B,0x9E64, 0x67A6,0x9E65, 0x67A1,0x9E66, 0x6785,0x9E67, 0x67B7,0x9E68, 0x67EF,0x9E69, 0x67B4,0x9E6A, 0x67EC,0x9E6B, 0x67B3,0x9E6C, 0x67E9,0x9E6D, 0x67B8,0x9E6E, 0x67E4,0x9E6F, 0x67DE,0x9E70, 0x67DD,0x9E71, 0x67E2,0x9E72, 0x67EE,0x9E73, 0x67B9,0x9E74, 0x67CE,0x9E75, 0x67C6,0x9E76, 0x67E7,0x9E77, 0x6A9C,0x9E78, 0x681E,0x9E79, 0x6846,0x9E7A, 0x6829,0x9E7B, 0x6840,0x9E7C, 0x684D,0x9E7D, 0x6832,0x9E7E, 0x684E,0x9E80, 0x68B3,0x9E81, 0x682B,0x9E82, 0x6859,0x9E83, 0x6863,0x9E84, 0x6877,0x9E85, 0x687F,0x9E86, 0x689F,0x9E87, 0x688F,0x9E88, 0x68AD,0x9E89, 0x6894,0x9E8A, 0x689D,0x9E8B, 0x689B,0x9E8C, 0x6883,0x9E8D, 0x6AAE,0x9E8E, 0x68B9,0x9E8F, 0x6874,0x9E90, 0x68B5,0x9E91, 0x68A0,0x9E92, 0x68BA,0x9E93, 0x690F,0x9E94, 0x688D,0x9E95, 0x687E,0x9E96, 0x6901,0x9E97, 0x68CA,0x9E98, 0x6908,0x9E99, 0x68D8,0x9E9A, 0x6922,0x9E9B, 0x6926,0x9E9C, 0x68E1,0x9E9D, 0x690C,0x9E9E, 0x68CD,0x9E9F, 0x68D4,0x9EA0, 0x68E7,0x9EA1, 0x68D5,0x9EA2, 0x6936,0x9EA3, 0x6912,0x9EA4, 0x6904,0x9EA5, 0x68D7,0x9EA6, 0x68E3,0x9EA7, 0x6925,0x9EA8, 0x68F9,0x9EA9, 0x68E0,0x9EAA, 0x68EF,0x9EAB, 0x6928,0x9EAC, 0x692A,0x9EAD, 0x691A,0x9EAE, 0x6923,0x9EAF, 0x6921,0x9EB0, 0x68C6,0x9EB1, 0x6979,0x9EB2, 0x6977,0x9EB3, 0x695C,0x9EB4, 0x6978,0x9EB5, 0x696B,0x9EB6, 0x6954,0x9EB7, 0x697E,0x9EB8, 0x696E,0x9EB9, 0x6939,0x9EBA, 0x6974,0x9EBB, 0x693D,0x9EBC, 0x6959,0x9EBD, 0x6930,0x9EBE, 0x6961,0x9EBF, 0x695E,0x9EC0, 0x695D,0x9EC1, 0x6981,0x9EC2, 0x696A,0x9EC3, 0x69B2,0x9EC4, 0x69AE,0x9EC5, 0x69D0,0x9EC6, 0x69BF,0x9EC7, 0x69C1,0x9EC8, 0x69D3,0x9EC9, 0x69BE,0x9ECA, 0x69CE,0x9ECB, 0x5BE8,0x9ECC, 0x69CA,0x9ECD, 0x69DD,0x9ECE, 0x69BB,0x9ECF, 0x69C3,0x9ED0, 0x69A7,0x9ED1, 0x6A2E,0x9ED2, 0x6991,0x9ED3, 0x69A0,0x9ED4, 0x699C,0x9ED5, 0x6995,0x9ED6, 0x69B4,0x9ED7, 0x69DE,0x9ED8, 0x69E8,0x9ED9, 0x6A02,0x9EDA, 0x6A1B,0x9EDB, 0x69FF,0x9EDC, 0x6B0A,0x9EDD, 0x69F9,0x9EDE, 0x69F2,0x9EDF, 0x69E7,0x9EE0, 0x6A05,0x9EE1, 0x69B1,0x9EE2, 0x6A1E,0x9EE3, 0x69ED,0x9EE4, 0x6A14,0x9EE5, 0x69EB,0x9EE6, 0x6A0A,0x9EE7, 0x6A12,0x9EE8, 0x6AC1,0x9EE9, 0x6A23,0x9EEA, 0x6A13,0x9EEB, 0x6A44,0x9EEC, 0x6A0C,0x9EED, 0x6A72,0x9EEE, 0x6A36,0x9EEF, 0x6A78,0x9EF0, 0x6A47,0x9EF1, 0x6A62,0x9EF2, 0x6A59,0x9EF3, 0x6A66,0x9EF4, 0x6A48,0x9EF5, 0x6A38,0x9EF6, 0x6A22,0x9EF7, 0x6A90,0x9EF8, 0x6A8D,0x9EF9, 0x6AA0,0x9EFA, 0x6A84,0x9EFB, 0x6AA2,0x9EFC, 0x6AA3,0x9F40, 0x6A97,0x9F41, 0x8617,0x9F42, 0x6ABB,0x9F43, 0x6AC3,0x9F44, 0x6AC2,0x9F45, 0x6AB8,0x9F46, 0x6AB3,0x9F47, 0x6AAC,0x9F48, 0x6ADE,0x9F49, 0x6AD1,0x9F4A, 0x6ADF,0x9F4B, 0x6AAA,0x9F4C, 0x6ADA,0x9F4D, 0x6AEA,0x9F4E, 0x6AFB,0x9F4F, 0x6B05,0x9F50, 0x8616,0x9F51, 0x6AFA,0x9F52, 0x6B12,0x9F53, 0x6B16,0x9F54, 0x9B31,0x9F55, 0x6B1F,0x9F56, 0x6B38,0x9F57, 0x6B37,0x9F58, 0x76DC,0x9F59, 0x6B39,0x9F5A, 0x98EE,0x9F5B, 0x6B47,0x9F5C, 0x6B43,0x9F5D, 0x6B49,0x9F5E, 0x6B50,0x9F5F, 0x6B59,0x9F60, 0x6B54,0x9F61, 0x6B5B,0x9F62, 0x6B5F,0x9F63, 0x6B61,0x9F64, 0x6B78,0x9F65, 0x6B79,0x9F66, 0x6B7F,0x9F67, 0x6B80,0x9F68, 0x6B84,0x9F69, 0x6B83,0x9F6A, 0x6B8D,0x9F6B, 0x6B98,0x9F6C, 0x6B95,0x9F6D, 0x6B9E,0x9F6E, 0x6BA4,0x9F6F, 0x6BAA,0x9F70, 0x6BAB,0x9F71, 0x6BAF,0x9F72, 0x6BB2,0x9F73, 0x6BB1,0x9F74, 0x6BB3,0x9F75, 0x6BB7,0x9F76, 0x6BBC,0x9F77, 0x6BC6,0x9F78, 0x6BCB,0x9F79, 0x6BD3,0x9F7A, 0x6BDF,0x9F7B, 0x6BEC,0x9F7C, 0x6BEB,0x9F7D, 0x6BF3,0x9F7E, 0x6BEF,0x9F80, 0x9EBE,0x9F81, 0x6C08,0x9F82, 0x6C13,0x9F83, 0x6C14,0x9F84, 0x6C1B,0x9F85, 0x6C24,0x9F86, 0x6C23,0x9F87, 0x6C5E,0x9F88, 0x6C55,0x9F89, 0x6C62,0x9F8A, 0x6C6A,0x9F8B, 0x6C82,0x9F8C, 0x6C8D,0x9F8D, 0x6C9A,0x9F8E, 0x6C81,0x9F8F, 0x6C9B,0x9F90, 0x6C7E,0x9F91, 0x6C68,0x9F92, 0x6C73,0x9F93, 0x6C92,0x9F94, 0x6C90,0x9F95, 0x6CC4,0x9F96, 0x6CF1,0x9F97, 0x6CD3,0x9F98, 0x6CBD,0x9F99, 0x6CD7,0x9F9A, 0x6CC5,0x9F9B, 0x6CDD,0x9F9C, 0x6CAE,0x9F9D, 0x6CB1,0x9F9E, 0x6CBE,0x9F9F, 0x6CBA,0x9FA0, 0x6CDB,0x9FA1, 0x6CEF,0x9FA2, 0x6CD9,0x9FA3, 0x6CEA,0x9FA4, 0x6D1F,0x9FA5, 0x884D,0x9FA6, 0x6D36,0x9FA7, 0x6D2B,0x9FA8, 0x6D3D,0x9FA9, 0x6D38,0x9FAA, 0x6D19,0x9FAB, 0x6D35,0x9FAC, 0x6D33,0x9FAD, 0x6D12,0x9FAE, 0x6D0C,0x9FAF, 0x6D63,0x9FB0, 0x6D93,0x9FB1, 0x6D64,0x9FB2, 0x6D5A,0x9FB3, 0x6D79,0x9FB4, 0x6D59,0x9FB5, 0x6D8E,0x9FB6, 0x6D95,0x9FB7, 0x6FE4,0x9FB8, 0x6D85,0x9FB9, 0x6DF9,0x9FBA, 0x6E15,0x9FBB, 0x6E0A,0x9FBC, 0x6DB5,0x9FBD, 0x6DC7,0x9FBE, 0x6DE6,0x9FBF, 0x6DB8,0x9FC0, 0x6DC6,0x9FC1, 0x6DEC,0x9FC2, 0x6DDE,0x9FC3, 0x6DCC,0x9FC4, 0x6DE8,0x9FC5, 0x6DD2,0x9FC6, 0x6DC5,0x9FC7, 0x6DFA,0x9FC8, 0x6DD9,0x9FC9, 0x6DE4,0x9FCA, 0x6DD5,0x9FCB, 0x6DEA,0x9FCC, 0x6DEE,0x9FCD, 0x6E2D,0x9FCE, 0x6E6E,0x9FCF, 0x6E2E,0x9FD0, 0x6E19,0x9FD1, 0x6E72,0x9FD2, 0x6E5F,0x9FD3, 0x6E3E,0x9FD4, 0x6E23,0x9FD5, 0x6E6B,0x9FD6, 0x6E2B,0x9FD7, 0x6E76,0x9FD8, 0x6E4D,0x9FD9, 0x6E1F,0x9FDA, 0x6E43,0x9FDB, 0x6E3A,0x9FDC, 0x6E4E,0x9FDD, 0x6E24,0x9FDE, 0x6EFF,0x9FDF, 0x6E1D,0x9FE0, 0x6E38,0x9FE1, 0x6E82,0x9FE2, 0x6EAA,0x9FE3, 0x6E98,0x9FE4, 0x6EC9,0x9FE5, 0x6EB7,0x9FE6, 0x6ED3,0x9FE7, 0x6EBD,0x9FE8, 0x6EAF,0x9FE9, 0x6EC4,0x9FEA, 0x6EB2,0x9FEB, 0x6ED4,0x9FEC, 0x6ED5,0x9FED, 0x6E8F,0x9FEE, 0x6EA5,0x9FEF, 0x6EC2,0x9FF0, 0x6E9F,0x9FF1, 0x6F41,0x9FF2, 0x6F11,0x9FF3, 0x704C,0x9FF4, 0x6EEC,0x9FF5, 0x6EF8,0x9FF6, 0x6EFE,0x9FF7, 0x6F3F,0x9FF8, 0x6EF2,0x9FF9, 0x6F31,0x9FFA, 0x6EEF,0x9FFB, 0x6F32,0x9FFC, 0x6ECC,0xE040, 0x6F3E,0xE041, 0x6F13,0xE042, 0x6EF7,0xE043, 0x6F86,0xE044, 0x6F7A,0xE045, 0x6F78,0xE046, 0x6F81,0xE047, 0x6F80,0xE048, 0x6F6F,0xE049, 0x6F5B,0xE04A, 0x6FF3,0xE04B, 0x6F6D,0xE04C, 0x6F82,0xE04D, 0x6F7C,0xE04E, 0x6F58,0xE04F, 0x6F8E,0xE050, 0x6F91,0xE051, 0x6FC2,0xE052, 0x6F66,0xE053, 0x6FB3,0xE054, 0x6FA3,0xE055, 0x6FA1,0xE056, 0x6FA4,0xE057, 0x6FB9,0xE058, 0x6FC6,0xE059, 0x6FAA,0xE05A, 0x6FDF,0xE05B, 0x6FD5,0xE05C, 0x6FEC,0xE05D, 0x6FD4,0xE05E, 0x6FD8,0xE05F, 0x6FF1,0xE060, 0x6FEE,0xE061, 0x6FDB,0xE062, 0x7009,0xE063, 0x700B,0xE064, 0x6FFA,0xE065, 0x7011,0xE066, 0x7001,0xE067, 0x700F,0xE068, 0x6FFE,0xE069, 0x701B,0xE06A, 0x701A,0xE06B, 0x6F74,0xE06C, 0x701D,0xE06D, 0x7018,0xE06E, 0x701F,0xE06F, 0x7030,0xE070, 0x703E,0xE071, 0x7032,0xE072, 0x7051,0xE073, 0x7063,0xE074, 0x7099,0xE075, 0x7092,0xE076, 0x70AF,0xE077, 0x70F1,0xE078, 0x70AC,0xE079, 0x70B8,0xE07A, 0x70B3,0xE07B, 0x70AE,0xE07C, 0x70DF,0xE07D, 0x70CB,0xE07E, 0x70DD,0xE080, 0x70D9,0xE081, 0x7109,0xE082, 0x70FD,0xE083, 0x711C,0xE084, 0x7119,0xE085, 0x7165,0xE086, 0x7155,0xE087, 0x7188,0xE088, 0x7166,0xE089, 0x7162,0xE08A, 0x714C,0xE08B, 0x7156,0xE08C, 0x716C,0xE08D, 0x718F,0xE08E, 0x71FB,0xE08F, 0x7184,0xE090, 0x7195,0xE091, 0x71A8,0xE092, 0x71AC,0xE093, 0x71D7,0xE094, 0x71B9,0xE095, 0x71BE,0xE096, 0x71D2,0xE097, 0x71C9,0xE098, 0x71D4,0xE099, 0x71CE,0xE09A, 0x71E0,0xE09B, 0x71EC,0xE09C, 0x71E7,0xE09D, 0x71F5,0xE09E, 0x71FC,0xE09F, 0x71F9,0xE0A0, 0x71FF,0xE0A1, 0x720D,0xE0A2, 0x7210,0xE0A3, 0x721B,0xE0A4, 0x7228,0xE0A5, 0x722D,0xE0A6, 0x722C,0xE0A7, 0x7230,0xE0A8, 0x7232,0xE0A9, 0x723B,0xE0AA, 0x723C,0xE0AB, 0x723F,0xE0AC, 0x7240,0xE0AD, 0x7246,0xE0AE, 0x724B,0xE0AF, 0x7258,0xE0B0, 0x7274,0xE0B1, 0x727E,0xE0B2, 0x7282,0xE0B3, 0x7281,0xE0B4, 0x7287,0xE0B5, 0x7292,0xE0B6, 0x7296,0xE0B7, 0x72A2,0xE0B8, 0x72A7,0xE0B9, 0x72B9,0xE0BA, 0x72B2,0xE0BB, 0x72C3,0xE0BC, 0x72C6,0xE0BD, 0x72C4,0xE0BE, 0x72CE,0xE0BF, 0x72D2,0xE0C0, 0x72E2,0xE0C1, 0x72E0,0xE0C2, 0x72E1,0xE0C3, 0x72F9,0xE0C4, 0x72F7,0xE0C5, 0x500F,0xE0C6, 0x7317,0xE0C7, 0x730A,0xE0C8, 0x731C,0xE0C9, 0x7316,0xE0CA, 0x731D,0xE0CB, 0x7334,0xE0CC, 0x732F,0xE0CD, 0x7329,0xE0CE, 0x7325,0xE0CF, 0x733E,0xE0D0, 0x734E,0xE0D1, 0x734F,0xE0D2, 0x9ED8,0xE0D3, 0x7357,0xE0D4, 0x736A,0xE0D5, 0x7368,0xE0D6, 0x7370,0xE0D7, 0x7378,0xE0D8, 0x7375,0xE0D9, 0x737B,0xE0DA, 0x737A,0xE0DB, 0x73C8,0xE0DC, 0x73B3,0xE0DD, 0x73CE,0xE0DE, 0x73BB,0xE0DF, 0x73C0,0xE0E0, 0x73E5,0xE0E1, 0x73EE,0xE0E2, 0x73DE,0xE0E3, 0x74A2,0xE0E4, 0x7405,0xE0E5, 0x746F,0xE0E6, 0x7425,0xE0E7, 0x73F8,0xE0E8, 0x7432,0xE0E9, 0x743A,0xE0EA, 0x7455,0xE0EB, 0x743F,0xE0EC, 0x745F,0xE0ED, 0x7459,0xE0EE, 0x7441,0xE0EF, 0x745C,0xE0F0, 0x7469,0xE0F1, 0x7470,0xE0F2, 0x7463,0xE0F3, 0x746A,0xE0F4, 0x7476,0xE0F5, 0x747E,0xE0F6, 0x748B,0xE0F7, 0x749E,0xE0F8, 0x74A7,0xE0F9, 0x74CA,0xE0FA, 0x74CF,0xE0FB, 0x74D4,0xE0FC, 0x73F1,0xE140, 0x74E0,0xE141, 0x74E3,0xE142, 0x74E7,0xE143, 0x74E9,0xE144, 0x74EE,0xE145, 0x74F2,0xE146, 0x74F0,0xE147, 0x74F1,0xE148, 0x74F8,0xE149, 0x74F7,0xE14A, 0x7504,0xE14B, 0x7503,0xE14C, 0x7505,0xE14D, 0x750C,0xE14E, 0x750E,0xE14F, 0x750D,0xE150, 0x7515,0xE151, 0x7513,0xE152, 0x751E,0xE153, 0x7526,0xE154, 0x752C,0xE155, 0x753C,0xE156, 0x7544,0xE157, 0x754D,0xE158, 0x754A,0xE159, 0x7549,0xE15A, 0x755B,0xE15B, 0x7546,0xE15C, 0x755A,0xE15D, 0x7569,0xE15E, 0x7564,0xE15F, 0x7567,0xE160, 0x756B,0xE161, 0x756D,0xE162, 0x7578,0xE163, 0x7576,0xE164, 0x7586,0xE165, 0x7587,0xE166, 0x7574,0xE167, 0x758A,0xE168, 0x7589,0xE169, 0x7582,0xE16A, 0x7594,0xE16B, 0x759A,0xE16C, 0x759D,0xE16D, 0x75A5,0xE16E, 0x75A3,0xE16F, 0x75C2,0xE170, 0x75B3,0xE171, 0x75C3,0xE172, 0x75B5,0xE173, 0x75BD,0xE174, 0x75B8,0xE175, 0x75BC,0xE176, 0x75B1,0xE177, 0x75CD,0xE178, 0x75CA,0xE179, 0x75D2,0xE17A, 0x75D9,0xE17B, 0x75E3,0xE17C, 0x75DE,0xE17D, 0x75FE,0xE17E, 0x75FF,0xE180, 0x75FC,0xE181, 0x7601,0xE182, 0x75F0,0xE183, 0x75FA,0xE184, 0x75F2,0xE185, 0x75F3,0xE186, 0x760B,0xE187, 0x760D,0xE188, 0x7609,0xE189, 0x761F,0xE18A, 0x7627,0xE18B, 0x7620,0xE18C, 0x7621,0xE18D, 0x7622,0xE18E, 0x7624,0xE18F, 0x7634,0xE190, 0x7630,0xE191, 0x763B,0xE192, 0x7647,0xE193, 0x7648,0xE194, 0x7646,0xE195, 0x765C,0xE196, 0x7658,0xE197, 0x7661,0xE198, 0x7662,0xE199, 0x7668,0xE19A, 0x7669,0xE19B, 0x766A,0xE19C, 0x7667,0xE19D, 0x766C,0xE19E, 0x7670,0xE19F, 0x7672,0xE1A0, 0x7676,0xE1A1, 0x7678,0xE1A2, 0x767C,0xE1A3, 0x7680,0xE1A4, 0x7683,0xE1A5, 0x7688,0xE1A6, 0x768B,0xE1A7, 0x768E,0xE1A8, 0x7696,0xE1A9, 0x7693,0xE1AA, 0x7699,0xE1AB, 0x769A,0xE1AC, 0x76B0,0xE1AD, 0x76B4,0xE1AE, 0x76B8,0xE1AF, 0x76B9,0xE1B0, 0x76BA,0xE1B1, 0x76C2,0xE1B2, 0x76CD,0xE1B3, 0x76D6,0xE1B4, 0x76D2,0xE1B5, 0x76DE,0xE1B6, 0x76E1,0xE1B7, 0x76E5,0xE1B8, 0x76E7,0xE1B9, 0x76EA,0xE1BA, 0x862F,0xE1BB, 0x76FB,0xE1BC, 0x7708,0xE1BD, 0x7707,0xE1BE, 0x7704,0xE1BF, 0x7729,0xE1C0, 0x7724,0xE1C1, 0x771E,0xE1C2, 0x7725,0xE1C3, 0x7726,0xE1C4, 0x771B,0xE1C5, 0x7737,0xE1C6, 0x7738,0xE1C7, 0x7747,0xE1C8, 0x775A,0xE1C9, 0x7768,0xE1CA, 0x776B,0xE1CB, 0x775B,0xE1CC, 0x7765,0xE1CD, 0x777F,0xE1CE, 0x777E,0xE1CF, 0x7779,0xE1D0, 0x778E,0xE1D1, 0x778B,0xE1D2, 0x7791,0xE1D3, 0x77A0,0xE1D4, 0x779E,0xE1D5, 0x77B0,0xE1D6, 0x77B6,0xE1D7, 0x77B9,0xE1D8, 0x77BF,0xE1D9, 0x77BC,0xE1DA, 0x77BD,0xE1DB, 0x77BB,0xE1DC, 0x77C7,0xE1DD, 0x77CD,0xE1DE, 0x77D7,0xE1DF, 0x77DA,0xE1E0, 0x77DC,0xE1E1, 0x77E3,0xE1E2, 0x77EE,0xE1E3, 0x77FC,0xE1E4, 0x780C,0xE1E5, 0x7812,0xE1E6, 0x7926,0xE1E7, 0x7820,0xE1E8, 0x792A,0xE1E9, 0x7845,0xE1EA, 0x788E,0xE1EB, 0x7874,0xE1EC, 0x7886,0xE1ED, 0x787C,0xE1EE, 0x789A,0xE1EF, 0x788C,0xE1F0, 0x78A3,0xE1F1, 0x78B5,0xE1F2, 0x78AA,0xE1F3, 0x78AF,0xE1F4, 0x78D1,0xE1F5, 0x78C6,0xE1F6, 0x78CB,0xE1F7, 0x78D4,0xE1F8, 0x78BE,0xE1F9, 0x78BC,0xE1FA, 0x78C5,0xE1FB, 0x78CA,0xE1FC, 0x78EC,0xE240, 0x78E7,0xE241, 0x78DA,0xE242, 0x78FD,0xE243, 0x78F4,0xE244, 0x7907,0xE245, 0x7912,0xE246, 0x7911,0xE247, 0x7919,0xE248, 0x792C,0xE249, 0x792B,0xE24A, 0x7940,0xE24B, 0x7960,0xE24C, 0x7957,0xE24D, 0x795F,0xE24E, 0x795A,0xE24F, 0x7955,0xE250, 0x7953,0xE251, 0x797A,0xE252, 0x797F,0xE253, 0x798A,0xE254, 0x799D,0xE255, 0x79A7,0xE256, 0x9F4B,0xE257, 0x79AA,0xE258, 0x79AE,0xE259, 0x79B3,0xE25A, 0x79B9,0xE25B, 0x79BA,0xE25C, 0x79C9,0xE25D, 0x79D5,0xE25E, 0x79E7,0xE25F, 0x79EC,0xE260, 0x79E1,0xE261, 0x79E3,0xE262, 0x7A08,0xE263, 0x7A0D,0xE264, 0x7A18,0xE265, 0x7A19,0xE266, 0x7A20,0xE267, 0x7A1F,0xE268, 0x7980,0xE269, 0x7A31,0xE26A, 0x7A3B,0xE26B, 0x7A3E,0xE26C, 0x7A37,0xE26D, 0x7A43,0xE26E, 0x7A57,0xE26F, 0x7A49,0xE270, 0x7A61,0xE271, 0x7A62,0xE272, 0x7A69,0xE273, 0x9F9D,0xE274, 0x7A70,0xE275, 0x7A79,0xE276, 0x7A7D,0xE277, 0x7A88,0xE278, 0x7A97,0xE279, 0x7A95,0xE27A, 0x7A98,0xE27B, 0x7A96,0xE27C, 0x7AA9,0xE27D, 0x7AC8,0xE27E, 0x7AB0,0xE280, 0x7AB6,0xE281, 0x7AC5,0xE282, 0x7AC4,0xE283, 0x7ABF,0xE284, 0x9083,0xE285, 0x7AC7,0xE286, 0x7ACA,0xE287, 0x7ACD,0xE288, 0x7ACF,0xE289, 0x7AD5,0xE28A, 0x7AD3,0xE28B, 0x7AD9,0xE28C, 0x7ADA,0xE28D, 0x7ADD,0xE28E, 0x7AE1,0xE28F, 0x7AE2,0xE290, 0x7AE6,0xE291, 0x7AED,0xE292, 0x7AF0,0xE293, 0x7B02,0xE294, 0x7B0F,0xE295, 0x7B0A,0xE296, 0x7B06,0xE297, 0x7B33,0xE298, 0x7B18,0xE299, 0x7B19,0xE29A, 0x7B1E,0xE29B, 0x7B35,0xE29C, 0x7B28,0xE29D, 0x7B36,0xE29E, 0x7B50,0xE29F, 0x7B7A,0xE2A0, 0x7B04,0xE2A1, 0x7B4D,0xE2A2, 0x7B0B,0xE2A3, 0x7B4C,0xE2A4, 0x7B45,0xE2A5, 0x7B75,0xE2A6, 0x7B65,0xE2A7, 0x7B74,0xE2A8, 0x7B67,0xE2A9, 0x7B70,0xE2AA, 0x7B71,0xE2AB, 0x7B6C,0xE2AC, 0x7B6E,0xE2AD, 0x7B9D,0xE2AE, 0x7B98,0xE2AF, 0x7B9F,0xE2B0, 0x7B8D,0xE2B1, 0x7B9C,0xE2B2, 0x7B9A,0xE2B3, 0x7B8B,0xE2B4, 0x7B92,0xE2B5, 0x7B8F,0xE2B6, 0x7B5D,0xE2B7, 0x7B99,0xE2B8, 0x7BCB,0xE2B9, 0x7BC1,0xE2BA, 0x7BCC,0xE2BB, 0x7BCF,0xE2BC, 0x7BB4,0xE2BD, 0x7BC6,0xE2BE, 0x7BDD,0xE2BF, 0x7BE9,0xE2C0, 0x7C11,0xE2C1, 0x7C14,0xE2C2, 0x7BE6,0xE2C3, 0x7BE5,0xE2C4, 0x7C60,0xE2C5, 0x7C00,0xE2C6, 0x7C07,0xE2C7, 0x7C13,0xE2C8, 0x7BF3,0xE2C9, 0x7BF7,0xE2CA, 0x7C17,0xE2CB, 0x7C0D,0xE2CC, 0x7BF6,0xE2CD, 0x7C23,0xE2CE, 0x7C27,0xE2CF, 0x7C2A,0xE2D0, 0x7C1F,0xE2D1, 0x7C37,0xE2D2, 0x7C2B,0xE2D3, 0x7C3D,0xE2D4, 0x7C4C,0xE2D5, 0x7C43,0xE2D6, 0x7C54,0xE2D7, 0x7C4F,0xE2D8, 0x7C40,0xE2D9, 0x7C50,0xE2DA, 0x7C58,0xE2DB, 0x7C5F,0xE2DC, 0x7C64,0xE2DD, 0x7C56,0xE2DE, 0x7C65,0xE2DF, 0x7C6C,0xE2E0, 0x7C75,0xE2E1, 0x7C83,0xE2E2, 0x7C90,0xE2E3, 0x7CA4,0xE2E4, 0x7CAD,0xE2E5, 0x7CA2,0xE2E6, 0x7CAB,0xE2E7, 0x7CA1,0xE2E8, 0x7CA8,0xE2E9, 0x7CB3,0xE2EA, 0x7CB2,0xE2EB, 0x7CB1,0xE2EC, 0x7CAE,0xE2ED, 0x7CB9,0xE2EE, 0x7CBD,0xE2EF, 0x7CC0,0xE2F0, 0x7CC5,0xE2F1, 0x7CC2,0xE2F2, 0x7CD8,0xE2F3, 0x7CD2,0xE2F4, 0x7CDC,0xE2F5, 0x7CE2,0xE2F6, 0x9B3B,0xE2F7, 0x7CEF,0xE2F8, 0x7CF2,0xE2F9, 0x7CF4,0xE2FA, 0x7CF6,0xE2FB, 0x7CFA,0xE2FC, 0x7D06,0xE340, 0x7D02,0xE341, 0x7D1C,0xE342, 0x7D15,0xE343, 0x7D0A,0xE344, 0x7D45,0xE345, 0x7D4B,0xE346, 0x7D2E,0xE347, 0x7D32,0xE348, 0x7D3F,0xE349, 0x7D35,0xE34A, 0x7D46,0xE34B, 0x7D73,0xE34C, 0x7D56,0xE34D, 0x7D4E,0xE34E, 0x7D72,0xE34F, 0x7D68,0xE350, 0x7D6E,0xE351, 0x7D4F,0xE352, 0x7D63,0xE353, 0x7D93,0xE354, 0x7D89,0xE355, 0x7D5B,0xE356, 0x7D8F,0xE357, 0x7D7D,0xE358, 0x7D9B,0xE359, 0x7DBA,0xE35A, 0x7DAE,0xE35B, 0x7DA3,0xE35C, 0x7DB5,0xE35D, 0x7DC7,0xE35E, 0x7DBD,0xE35F, 0x7DAB,0xE360, 0x7E3D,0xE361, 0x7DA2,0xE362, 0x7DAF,0xE363, 0x7DDC,0xE364, 0x7DB8,0xE365, 0x7D9F,0xE366, 0x7DB0,0xE367, 0x7DD8,0xE368, 0x7DDD,0xE369, 0x7DE4,0xE36A, 0x7DDE,0xE36B, 0x7DFB,0xE36C, 0x7DF2,0xE36D, 0x7DE1,0xE36E, 0x7E05,0xE36F, 0x7E0A,0xE370, 0x7E23,0xE371, 0x7E21,0xE372, 0x7E12,0xE373, 0x7E31,0xE374, 0x7E1F,0xE375, 0x7E09,0xE376, 0x7E0B,0xE377, 0x7E22,0xE378, 0x7E46,0xE379, 0x7E66,0xE37A, 0x7E3B,0xE37B, 0x7E35,0xE37C, 0x7E39,0xE37D, 0x7E43,0xE37E, 0x7E37,0xE380, 0x7E32,0xE381, 0x7E3A,0xE382, 0x7E67,0xE383, 0x7E5D,0xE384, 0x7E56,0xE385, 0x7E5E,0xE386, 0x7E59,0xE387, 0x7E5A,0xE388, 0x7E79,0xE389, 0x7E6A,0xE38A, 0x7E69,0xE38B, 0x7E7C,0xE38C, 0x7E7B,0xE38D, 0x7E83,0xE38E, 0x7DD5,0xE38F, 0x7E7D,0xE390, 0x8FAE,0xE391, 0x7E7F,0xE392, 0x7E88,0xE393, 0x7E89,0xE394, 0x7E8C,0xE395, 0x7E92,0xE396, 0x7E90,0xE397, 0x7E93,0xE398, 0x7E94,0xE399, 0x7E96,0xE39A, 0x7E8E,0xE39B, 0x7E9B,0xE39C, 0x7E9C,0xE39D, 0x7F38,0xE39E, 0x7F3A,0xE39F, 0x7F45,0xE3A0, 0x7F4C,0xE3A1, 0x7F4D,0xE3A2, 0x7F4E,0xE3A3, 0x7F50,0xE3A4, 0x7F51,0xE3A5, 0x7F55,0xE3A6, 0x7F54,0xE3A7, 0x7F58,0xE3A8, 0x7F5F,0xE3A9, 0x7F60,0xE3AA, 0x7F68,0xE3AB, 0x7F69,0xE3AC, 0x7F67,0xE3AD, 0x7F78,0xE3AE, 0x7F82,0xE3AF, 0x7F86,0xE3B0, 0x7F83,0xE3B1, 0x7F88,0xE3B2, 0x7F87,0xE3B3, 0x7F8C,0xE3B4, 0x7F94,0xE3B5, 0x7F9E,0xE3B6, 0x7F9D,0xE3B7, 0x7F9A,0xE3B8, 0x7FA3,0xE3B9, 0x7FAF,0xE3BA, 0x7FB2,0xE3BB, 0x7FB9,0xE3BC, 0x7FAE,0xE3BD, 0x7FB6,0xE3BE, 0x7FB8,0xE3BF, 0x8B71,0xE3C0, 0x7FC5,0xE3C1, 0x7FC6,0xE3C2, 0x7FCA,0xE3C3, 0x7FD5,0xE3C4, 0x7FD4,0xE3C5, 0x7FE1,0xE3C6, 0x7FE6,0xE3C7, 0x7FE9,0xE3C8, 0x7FF3,0xE3C9, 0x7FF9,0xE3CA, 0x98DC,0xE3CB, 0x8006,0xE3CC, 0x8004,0xE3CD, 0x800B,0xE3CE, 0x8012,0xE3CF, 0x8018,0xE3D0, 0x8019,0xE3D1, 0x801C,0xE3D2, 0x8021,0xE3D3, 0x8028,0xE3D4, 0x803F,0xE3D5, 0x803B,0xE3D6, 0x804A,0xE3D7, 0x8046,0xE3D8, 0x8052,0xE3D9, 0x8058,0xE3DA, 0x805A,0xE3DB, 0x805F,0xE3DC, 0x8062,0xE3DD, 0x8068,0xE3DE, 0x8073,0xE3DF, 0x8072,0xE3E0, 0x8070,0xE3E1, 0x8076,0xE3E2, 0x8079,0xE3E3, 0x807D,0xE3E4, 0x807F,0xE3E5, 0x8084,0xE3E6, 0x8086,0xE3E7, 0x8085,0xE3E8, 0x809B,0xE3E9, 0x8093,0xE3EA, 0x809A,0xE3EB, 0x80AD,0xE3EC, 0x5190,0xE3ED, 0x80AC,0xE3EE, 0x80DB,0xE3EF, 0x80E5,0xE3F0, 0x80D9,0xE3F1, 0x80DD,0xE3F2, 0x80C4,0xE3F3, 0x80DA,0xE3F4, 0x80D6,0xE3F5, 0x8109,0xE3F6, 0x80EF,0xE3F7, 0x80F1,0xE3F8, 0x811B,0xE3F9, 0x8129,0xE3FA, 0x8123,0xE3FB, 0x812F,0xE3FC, 0x814B,0xE440, 0x968B,0xE441, 0x8146,0xE442, 0x813E,0xE443, 0x8153,0xE444, 0x8151,0xE445, 0x80FC,0xE446, 0x8171,0xE447, 0x816E,0xE448, 0x8165,0xE449, 0x8166,0xE44A, 0x8174,0xE44B, 0x8183,0xE44C, 0x8188,0xE44D, 0x818A,0xE44E, 0x8180,0xE44F, 0x8182,0xE450, 0x81A0,0xE451, 0x8195,0xE452, 0x81A4,0xE453, 0x81A3,0xE454, 0x815F,0xE455, 0x8193,0xE456, 0x81A9,0xE457, 0x81B0,0xE458, 0x81B5,0xE459, 0x81BE,0xE45A, 0x81B8,0xE45B, 0x81BD,0xE45C, 0x81C0,0xE45D, 0x81C2,0xE45E, 0x81BA,0xE45F, 0x81C9,0xE460, 0x81CD,0xE461, 0x81D1,0xE462, 0x81D9,0xE463, 0x81D8,0xE464, 0x81C8,0xE465, 0x81DA,0xE466, 0x81DF,0xE467, 0x81E0,0xE468, 0x81E7,0xE469, 0x81FA,0xE46A, 0x81FB,0xE46B, 0x81FE,0xE46C, 0x8201,0xE46D, 0x8202,0xE46E, 0x8205,0xE46F, 0x8207,0xE470, 0x820A,0xE471, 0x820D,0xE472, 0x8210,0xE473, 0x8216,0xE474, 0x8229,0xE475, 0x822B,0xE476, 0x8238,0xE477, 0x8233,0xE478, 0x8240,0xE479, 0x8259,0xE47A, 0x8258,0xE47B, 0x825D,0xE47C, 0x825A,0xE47D, 0x825F,0xE47E, 0x8264,0xE480, 0x8262,0xE481, 0x8268,0xE482, 0x826A,0xE483, 0x826B,0xE484, 0x822E,0xE485, 0x8271,0xE486, 0x8277,0xE487, 0x8278,0xE488, 0x827E,0xE489, 0x828D,0xE48A, 0x8292,0xE48B, 0x82AB,0xE48C, 0x829F,0xE48D, 0x82BB,0xE48E, 0x82AC,0xE48F, 0x82E1,0xE490, 0x82E3,0xE491, 0x82DF,0xE492, 0x82D2,0xE493, 0x82F4,0xE494, 0x82F3,0xE495, 0x82FA,0xE496, 0x8393,0xE497, 0x8303,0xE498, 0x82FB,0xE499, 0x82F9,0xE49A, 0x82DE,0xE49B, 0x8306,0xE49C, 0x82DC,0xE49D, 0x8309,0xE49E, 0x82D9,0xE49F, 0x8335,0xE4A0, 0x8334,0xE4A1, 0x8316,0xE4A2, 0x8332,0xE4A3, 0x8331,0xE4A4, 0x8340,0xE4A5, 0x8339,0xE4A6, 0x8350,0xE4A7, 0x8345,0xE4A8, 0x832F,0xE4A9, 0x832B,0xE4AA, 0x8317,0xE4AB, 0x8318,0xE4AC, 0x8385,0xE4AD, 0x839A,0xE4AE, 0x83AA,0xE4AF, 0x839F,0xE4B0, 0x83A2,0xE4B1, 0x8396,0xE4B2, 0x8323,0xE4B3, 0x838E,0xE4B4, 0x8387,0xE4B5, 0x838A,0xE4B6, 0x837C,0xE4B7, 0x83B5,0xE4B8, 0x8373,0xE4B9, 0x8375,0xE4BA, 0x83A0,0xE4BB, 0x8389,0xE4BC, 0x83A8,0xE4BD, 0x83F4,0xE4BE, 0x8413,0xE4BF, 0x83EB,0xE4C0, 0x83CE,0xE4C1, 0x83FD,0xE4C2, 0x8403,0xE4C3, 0x83D8,0xE4C4, 0x840B,0xE4C5, 0x83C1,0xE4C6, 0x83F7,0xE4C7, 0x8407,0xE4C8, 0x83E0,0xE4C9, 0x83F2,0xE4CA, 0x840D,0xE4CB, 0x8422,0xE4CC, 0x8420,0xE4CD, 0x83BD,0xE4CE, 0x8438,0xE4CF, 0x8506,0xE4D0, 0x83FB,0xE4D1, 0x846D,0xE4D2, 0x842A,0xE4D3, 0x843C,0xE4D4, 0x855A,0xE4D5, 0x8484,0xE4D6, 0x8477,0xE4D7, 0x846B,0xE4D8, 0x84AD,0xE4D9, 0x846E,0xE4DA, 0x8482,0xE4DB, 0x8469,0xE4DC, 0x8446,0xE4DD, 0x842C,0xE4DE, 0x846F,0xE4DF, 0x8479,0xE4E0, 0x8435,0xE4E1, 0x84CA,0xE4E2, 0x8462,0xE4E3, 0x84B9,0xE4E4, 0x84BF,0xE4E5, 0x849F,0xE4E6, 0x84D9,0xE4E7, 0x84CD,0xE4E8, 0x84BB,0xE4E9, 0x84DA,0xE4EA, 0x84D0,0xE4EB, 0x84C1,0xE4EC, 0x84C6,0xE4ED, 0x84D6,0xE4EE, 0x84A1,0xE4EF, 0x8521,0xE4F0, 0x84FF,0xE4F1, 0x84F4,0xE4F2, 0x8517,0xE4F3, 0x8518,0xE4F4, 0x852C,0xE4F5, 0x851F,0xE4F6, 0x8515,0xE4F7, 0x8514,0xE4F8, 0x84FC,0xE4F9, 0x8540,0xE4FA, 0x8563,0xE4FB, 0x8558,0xE4FC, 0x8548,0xE540, 0x8541,0xE541, 0x8602,0xE542, 0x854B,0xE543, 0x8555,0xE544, 0x8580,0xE545, 0x85A4,0xE546, 0x8588,0xE547, 0x8591,0xE548, 0x858A,0xE549, 0x85A8,0xE54A, 0x856D,0xE54B, 0x8594,0xE54C, 0x859B,0xE54D, 0x85EA,0xE54E, 0x8587,0xE54F, 0x859C,0xE550, 0x8577,0xE551, 0x857E,0xE552, 0x8590,0xE553, 0x85C9,0xE554, 0x85BA,0xE555, 0x85CF,0xE556, 0x85B9,0xE557, 0x85D0,0xE558, 0x85D5,0xE559, 0x85DD,0xE55A, 0x85E5,0xE55B, 0x85DC,0xE55C, 0x85F9,0xE55D, 0x860A,0xE55E, 0x8613,0xE55F, 0x860B,0xE560, 0x85FE,0xE561, 0x85FA,0xE562, 0x8606,0xE563, 0x8622,0xE564, 0x861A,0xE565, 0x8630,0xE566, 0x863F,0xE567, 0x864D,0xE568, 0x4E55,0xE569, 0x8654,0xE56A, 0x865F,0xE56B, 0x8667,0xE56C, 0x8671,0xE56D, 0x8693,0xE56E, 0x86A3,0xE56F, 0x86A9,0xE570, 0x86AA,0xE571, 0x868B,0xE572, 0x868C,0xE573, 0x86B6,0xE574, 0x86AF,0xE575, 0x86C4,0xE576, 0x86C6,0xE577, 0x86B0,0xE578, 0x86C9,0xE579, 0x8823,0xE57A, 0x86AB,0xE57B, 0x86D4,0xE57C, 0x86DE,0xE57D, 0x86E9,0xE57E, 0x86EC,0xE580, 0x86DF,0xE581, 0x86DB,0xE582, 0x86EF,0xE583, 0x8712,0xE584, 0x8706,0xE585, 0x8708,0xE586, 0x8700,0xE587, 0x8703,0xE588, 0x86FB,0xE589, 0x8711,0xE58A, 0x8709,0xE58B, 0x870D,0xE58C, 0x86F9,0xE58D, 0x870A,0xE58E, 0x8734,0xE58F, 0x873F,0xE590, 0x8737,0xE591, 0x873B,0xE592, 0x8725,0xE593, 0x8729,0xE594, 0x871A,0xE595, 0x8760,0xE596, 0x875F,0xE597, 0x8778,0xE598, 0x874C,0xE599, 0x874E,0xE59A, 0x8774,0xE59B, 0x8757,0xE59C, 0x8768,0xE59D, 0x876E,0xE59E, 0x8759,0xE59F, 0x8753,0xE5A0, 0x8763,0xE5A1, 0x876A,0xE5A2, 0x8805,0xE5A3, 0x87A2,0xE5A4, 0x879F,0xE5A5, 0x8782,0xE5A6, 0x87AF,0xE5A7, 0x87CB,0xE5A8, 0x87BD,0xE5A9, 0x87C0,0xE5AA, 0x87D0,0xE5AB, 0x96D6,0xE5AC, 0x87AB,0xE5AD, 0x87C4,0xE5AE, 0x87B3,0xE5AF, 0x87C7,0xE5B0, 0x87C6,0xE5B1, 0x87BB,0xE5B2, 0x87EF,0xE5B3, 0x87F2,0xE5B4, 0x87E0,0xE5B5, 0x880F,0xE5B6, 0x880D,0xE5B7, 0x87FE,0xE5B8, 0x87F6,0xE5B9, 0x87F7,0xE5BA, 0x880E,0xE5BB, 0x87D2,0xE5BC, 0x8811,0xE5BD, 0x8816,0xE5BE, 0x8815,0xE5BF, 0x8822,0xE5C0, 0x8821,0xE5C1, 0x8831,0xE5C2, 0x8836,0xE5C3, 0x8839,0xE5C4, 0x8827,0xE5C5, 0x883B,0xE5C6, 0x8844,0xE5C7, 0x8842,0xE5C8, 0x8852,0xE5C9, 0x8859,0xE5CA, 0x885E,0xE5CB, 0x8862,0xE5CC, 0x886B,0xE5CD, 0x8881,0xE5CE, 0x887E,0xE5CF, 0x889E,0xE5D0, 0x8875,0xE5D1, 0x887D,0xE5D2, 0x88B5,0xE5D3, 0x8872,0xE5D4, 0x8882,0xE5D5, 0x8897,0xE5D6, 0x8892,0xE5D7, 0x88AE,0xE5D8, 0x8899,0xE5D9, 0x88A2,0xE5DA, 0x888D,0xE5DB, 0x88A4,0xE5DC, 0x88B0,0xE5DD, 0x88BF,0xE5DE, 0x88B1,0xE5DF, 0x88C3,0xE5E0, 0x88C4,0xE5E1, 0x88D4,0xE5E2, 0x88D8,0xE5E3, 0x88D9,0xE5E4, 0x88DD,0xE5E5, 0x88F9,0xE5E6, 0x8902,0xE5E7, 0x88FC,0xE5E8, 0x88F4,0xE5E9, 0x88E8,0xE5EA, 0x88F2,0xE5EB, 0x8904,0xE5EC, 0x890C,0xE5ED, 0x890A,0xE5EE, 0x8913,0xE5EF, 0x8943,0xE5F0, 0x891E,0xE5F1, 0x8925,0xE5F2, 0x892A,0xE5F3, 0x892B,0xE5F4, 0x8941,0xE5F5, 0x8944,0xE5F6, 0x893B,0xE5F7, 0x8936,0xE5F8, 0x8938,0xE5F9, 0x894C,0xE5FA, 0x891D,0xE5FB, 0x8960,0xE5FC, 0x895E,0xE640, 0x8966,0xE641, 0x8964,0xE642, 0x896D,0xE643, 0x896A,0xE644, 0x896F,0xE645, 0x8974,0xE646, 0x8977,0xE647, 0x897E,0xE648, 0x8983,0xE649, 0x8988,0xE64A, 0x898A,0xE64B, 0x8993,0xE64C, 0x8998,0xE64D, 0x89A1,0xE64E, 0x89A9,0xE64F, 0x89A6,0xE650, 0x89AC,0xE651, 0x89AF,0xE652, 0x89B2,0xE653, 0x89BA,0xE654, 0x89BD,0xE655, 0x89BF,0xE656, 0x89C0,0xE657, 0x89DA,0xE658, 0x89DC,0xE659, 0x89DD,0xE65A, 0x89E7,0xE65B, 0x89F4,0xE65C, 0x89F8,0xE65D, 0x8A03,0xE65E, 0x8A16,0xE65F, 0x8A10,0xE660, 0x8A0C,0xE661, 0x8A1B,0xE662, 0x8A1D,0xE663, 0x8A25,0xE664, 0x8A36,0xE665, 0x8A41,0xE666, 0x8A5B,0xE667, 0x8A52,0xE668, 0x8A46,0xE669, 0x8A48,0xE66A, 0x8A7C,0xE66B, 0x8A6D,0xE66C, 0x8A6C,0xE66D, 0x8A62,0xE66E, 0x8A85,0xE66F, 0x8A82,0xE670, 0x8A84,0xE671, 0x8AA8,0xE672, 0x8AA1,0xE673, 0x8A91,0xE674, 0x8AA5,0xE675, 0x8AA6,0xE676, 0x8A9A,0xE677, 0x8AA3,0xE678, 0x8AC4,0xE679, 0x8ACD,0xE67A, 0x8AC2,0xE67B, 0x8ADA,0xE67C, 0x8AEB,0xE67D, 0x8AF3,0xE67E, 0x8AE7,0xE680, 0x8AE4,0xE681, 0x8AF1,0xE682, 0x8B14,0xE683, 0x8AE0,0xE684, 0x8AE2,0xE685, 0x8AF7,0xE686, 0x8ADE,0xE687, 0x8ADB,0xE688, 0x8B0C,0xE689, 0x8B07,0xE68A, 0x8B1A,0xE68B, 0x8AE1,0xE68C, 0x8B16,0xE68D, 0x8B10,0xE68E, 0x8B17,0xE68F, 0x8B20,0xE690, 0x8B33,0xE691, 0x97AB,0xE692, 0x8B26,0xE693, 0x8B2B,0xE694, 0x8B3E,0xE695, 0x8B28,0xE696, 0x8B41,0xE697, 0x8B4C,0xE698, 0x8B4F,0xE699, 0x8B4E,0xE69A, 0x8B49,0xE69B, 0x8B56,0xE69C, 0x8B5B,0xE69D, 0x8B5A,0xE69E, 0x8B6B,0xE69F, 0x8B5F,0xE6A0, 0x8B6C,0xE6A1, 0x8B6F,0xE6A2, 0x8B74,0xE6A3, 0x8B7D,0xE6A4, 0x8B80,0xE6A5, 0x8B8C,0xE6A6, 0x8B8E,0xE6A7, 0x8B92,0xE6A8, 0x8B93,0xE6A9, 0x8B96,0xE6AA, 0x8B99,0xE6AB, 0x8B9A,0xE6AC, 0x8C3A,0xE6AD, 0x8C41,0xE6AE, 0x8C3F,0xE6AF, 0x8C48,0xE6B0, 0x8C4C,0xE6B1, 0x8C4E,0xE6B2, 0x8C50,0xE6B3, 0x8C55,0xE6B4, 0x8C62,0xE6B5, 0x8C6C,0xE6B6, 0x8C78,0xE6B7, 0x8C7A,0xE6B8, 0x8C82,0xE6B9, 0x8C89,0xE6BA, 0x8C85,0xE6BB, 0x8C8A,0xE6BC, 0x8C8D,0xE6BD, 0x8C8E,0xE6BE, 0x8C94,0xE6BF, 0x8C7C,0xE6C0, 0x8C98,0xE6C1, 0x621D,0xE6C2, 0x8CAD,0xE6C3, 0x8CAA,0xE6C4, 0x8CBD,0xE6C5, 0x8CB2,0xE6C6, 0x8CB3,0xE6C7, 0x8CAE,0xE6C8, 0x8CB6,0xE6C9, 0x8CC8,0xE6CA, 0x8CC1,0xE6CB, 0x8CE4,0xE6CC, 0x8CE3,0xE6CD, 0x8CDA,0xE6CE, 0x8CFD,0xE6CF, 0x8CFA,0xE6D0, 0x8CFB,0xE6D1, 0x8D04,0xE6D2, 0x8D05,0xE6D3, 0x8D0A,0xE6D4, 0x8D07,0xE6D5, 0x8D0F,0xE6D6, 0x8D0D,0xE6D7, 0x8D10,0xE6D8, 0x9F4E,0xE6D9, 0x8D13,0xE6DA, 0x8CCD,0xE6DB, 0x8D14,0xE6DC, 0x8D16,0xE6DD, 0x8D67,0xE6DE, 0x8D6D,0xE6DF, 0x8D71,0xE6E0, 0x8D73,0xE6E1, 0x8D81,0xE6E2, 0x8D99,0xE6E3, 0x8DC2,0xE6E4, 0x8DBE,0xE6E5, 0x8DBA,0xE6E6, 0x8DCF,0xE6E7, 0x8DDA,0xE6E8, 0x8DD6,0xE6E9, 0x8DCC,0xE6EA, 0x8DDB,0xE6EB, 0x8DCB,0xE6EC, 0x8DEA,0xE6ED, 0x8DEB,0xE6EE, 0x8DDF,0xE6EF, 0x8DE3,0xE6F0, 0x8DFC,0xE6F1, 0x8E08,0xE6F2, 0x8E09,0xE6F3, 0x8DFF,0xE6F4, 0x8E1D,0xE6F5, 0x8E1E,0xE6F6, 0x8E10,0xE6F7, 0x8E1F,0xE6F8, 0x8E42,0xE6F9, 0x8E35,0xE6FA, 0x8E30,0xE6FB, 0x8E34,0xE6FC, 0x8E4A,0xE740, 0x8E47,0xE741, 0x8E49,0xE742, 0x8E4C,0xE743, 0x8E50,0xE744, 0x8E48,0xE745, 0x8E59,0xE746, 0x8E64,0xE747, 0x8E60,0xE748, 0x8E2A,0xE749, 0x8E63,0xE74A, 0x8E55,0xE74B, 0x8E76,0xE74C, 0x8E72,0xE74D, 0x8E7C,0xE74E, 0x8E81,0xE74F, 0x8E87,0xE750, 0x8E85,0xE751, 0x8E84,0xE752, 0x8E8B,0xE753, 0x8E8A,0xE754, 0x8E93,0xE755, 0x8E91,0xE756, 0x8E94,0xE757, 0x8E99,0xE758, 0x8EAA,0xE759, 0x8EA1,0xE75A, 0x8EAC,0xE75B, 0x8EB0,0xE75C, 0x8EC6,0xE75D, 0x8EB1,0xE75E, 0x8EBE,0xE75F, 0x8EC5,0xE760, 0x8EC8,0xE761, 0x8ECB,0xE762, 0x8EDB,0xE763, 0x8EE3,0xE764, 0x8EFC,0xE765, 0x8EFB,0xE766, 0x8EEB,0xE767, 0x8EFE,0xE768, 0x8F0A,0xE769, 0x8F05,0xE76A, 0x8F15,0xE76B, 0x8F12,0xE76C, 0x8F19,0xE76D, 0x8F13,0xE76E, 0x8F1C,0xE76F, 0x8F1F,0xE770, 0x8F1B,0xE771, 0x8F0C,0xE772, 0x8F26,0xE773, 0x8F33,0xE774, 0x8F3B,0xE775, 0x8F39,0xE776, 0x8F45,0xE777, 0x8F42,0xE778, 0x8F3E,0xE779, 0x8F4C,0xE77A, 0x8F49,0xE77B, 0x8F46,0xE77C, 0x8F4E,0xE77D, 0x8F57,0xE77E, 0x8F5C,0xE780, 0x8F62,0xE781, 0x8F63,0xE782, 0x8F64,0xE783, 0x8F9C,0xE784, 0x8F9F,0xE785, 0x8FA3,0xE786, 0x8FAD,0xE787, 0x8FAF,0xE788, 0x8FB7,0xE789, 0x8FDA,0xE78A, 0x8FE5,0xE78B, 0x8FE2,0xE78C, 0x8FEA,0xE78D, 0x8FEF,0xE78E, 0x9087,0xE78F, 0x8FF4,0xE790, 0x9005,0xE791, 0x8FF9,0xE792, 0x8FFA,0xE793, 0x9011,0xE794, 0x9015,0xE795, 0x9021,0xE796, 0x900D,0xE797, 0x901E,0xE798, 0x9016,0xE799, 0x900B,0xE79A, 0x9027,0xE79B, 0x9036,0xE79C, 0x9035,0xE79D, 0x9039,0xE79E, 0x8FF8,0xE79F, 0x904F,0xE7A0, 0x9050,0xE7A1, 0x9051,0xE7A2, 0x9052,0xE7A3, 0x900E,0xE7A4, 0x9049,0xE7A5, 0x903E,0xE7A6, 0x9056,0xE7A7, 0x9058,0xE7A8, 0x905E,0xE7A9, 0x9068,0xE7AA, 0x906F,0xE7AB, 0x9076,0xE7AC, 0x96A8,0xE7AD, 0x9072,0xE7AE, 0x9082,0xE7AF, 0x907D,0xE7B0, 0x9081,0xE7B1, 0x9080,0xE7B2, 0x908A,0xE7B3, 0x9089,0xE7B4, 0x908F,0xE7B5, 0x90A8,0xE7B6, 0x90AF,0xE7B7, 0x90B1,0xE7B8, 0x90B5,0xE7B9, 0x90E2,0xE7BA, 0x90E4,0xE7BB, 0x6248,0xE7BC, 0x90DB,0xE7BD, 0x9102,0xE7BE, 0x9112,0xE7BF, 0x9119,0xE7C0, 0x9132,0xE7C1, 0x9130,0xE7C2, 0x914A,0xE7C3, 0x9156,0xE7C4, 0x9158,0xE7C5, 0x9163,0xE7C6, 0x9165,0xE7C7, 0x9169,0xE7C8, 0x9173,0xE7C9, 0x9172,0xE7CA, 0x918B,0xE7CB, 0x9189,0xE7CC, 0x9182,0xE7CD, 0x91A2,0xE7CE, 0x91AB,0xE7CF, 0x91AF,0xE7D0, 0x91AA,0xE7D1, 0x91B5,0xE7D2, 0x91B4,0xE7D3, 0x91BA,0xE7D4, 0x91C0,0xE7D5, 0x91C1,0xE7D6, 0x91C9,0xE7D7, 0x91CB,0xE7D8, 0x91D0,0xE7D9, 0x91D6,0xE7DA, 0x91DF,0xE7DB, 0x91E1,0xE7DC, 0x91DB,0xE7DD, 0x91FC,0xE7DE, 0x91F5,0xE7DF, 0x91F6,0xE7E0, 0x921E,0xE7E1, 0x91FF,0xE7E2, 0x9214,0xE7E3, 0x922C,0xE7E4, 0x9215,0xE7E5, 0x9211,0xE7E6, 0x925E,0xE7E7, 0x9257,0xE7E8, 0x9245,0xE7E9, 0x9249,0xE7EA, 0x9264,0xE7EB, 0x9248,0xE7EC, 0x9295,0xE7ED, 0x923F,0xE7EE, 0x924B,0xE7EF, 0x9250,0xE7F0, 0x929C,0xE7F1, 0x9296,0xE7F2, 0x9293,0xE7F3, 0x929B,0xE7F4, 0x925A,0xE7F5, 0x92CF,0xE7F6, 0x92B9,0xE7F7, 0x92B7,0xE7F8, 0x92E9,0xE7F9, 0x930F,0xE7FA, 0x92FA,0xE7FB, 0x9344,0xE7FC, 0x932E,0xE840, 0x9319,0xE841, 0x9322,0xE842, 0x931A,0xE843, 0x9323,0xE844, 0x933A,0xE845, 0x9335,0xE846, 0x933B,0xE847, 0x935C,0xE848, 0x9360,0xE849, 0x937C,0xE84A, 0x936E,0xE84B, 0x9356,0xE84C, 0x93B0,0xE84D, 0x93AC,0xE84E, 0x93AD,0xE84F, 0x9394,0xE850, 0x93B9,0xE851, 0x93D6,0xE852, 0x93D7,0xE853, 0x93E8,0xE854, 0x93E5,0xE855, 0x93D8,0xE856, 0x93C3,0xE857, 0x93DD,0xE858, 0x93D0,0xE859, 0x93C8,0xE85A, 0x93E4,0xE85B, 0x941A,0xE85C, 0x9414,0xE85D, 0x9413,0xE85E, 0x9403,0xE85F, 0x9407,0xE860, 0x9410,0xE861, 0x9436,0xE862, 0x942B,0xE863, 0x9435,0xE864, 0x9421,0xE865, 0x943A,0xE866, 0x9441,0xE867, 0x9452,0xE868, 0x9444,0xE869, 0x945B,0xE86A, 0x9460,0xE86B, 0x9462,0xE86C, 0x945E,0xE86D, 0x946A,0xE86E, 0x9229,0xE86F, 0x9470,0xE870, 0x9475,0xE871, 0x9477,0xE872, 0x947D,0xE873, 0x945A,0xE874, 0x947C,0xE875, 0x947E,0xE876, 0x9481,0xE877, 0x947F,0xE878, 0x9582,0xE879, 0x9587,0xE87A, 0x958A,0xE87B, 0x9594,0xE87C, 0x9596,0xE87D, 0x9598,0xE87E, 0x9599,0xE880, 0x95A0,0xE881, 0x95A8,0xE882, 0x95A7,0xE883, 0x95AD,0xE884, 0x95BC,0xE885, 0x95BB,0xE886, 0x95B9,0xE887, 0x95BE,0xE888, 0x95CA,0xE889, 0x6FF6,0xE88A, 0x95C3,0xE88B, 0x95CD,0xE88C, 0x95CC,0xE88D, 0x95D5,0xE88E, 0x95D4,0xE88F, 0x95D6,0xE890, 0x95DC,0xE891, 0x95E1,0xE892, 0x95E5,0xE893, 0x95E2,0xE894, 0x9621,0xE895, 0x9628,0xE896, 0x962E,0xE897, 0x962F,0xE898, 0x9642,0xE899, 0x964C,0xE89A, 0x964F,0xE89B, 0x964B,0xE89C, 0x9677,0xE89D, 0x965C,0xE89E, 0x965E,0xE89F, 0x965D,0xE8A0, 0x965F,0xE8A1, 0x9666,0xE8A2, 0x9672,0xE8A3, 0x966C,0xE8A4, 0x968D,0xE8A5, 0x9698,0xE8A6, 0x9695,0xE8A7, 0x9697,0xE8A8, 0x96AA,0xE8A9, 0x96A7,0xE8AA, 0x96B1,0xE8AB, 0x96B2,0xE8AC, 0x96B0,0xE8AD, 0x96B4,0xE8AE, 0x96B6,0xE8AF, 0x96B8,0xE8B0, 0x96B9,0xE8B1, 0x96CE,0xE8B2, 0x96CB,0xE8B3, 0x96C9,0xE8B4, 0x96CD,0xE8B5, 0x894D,0xE8B6, 0x96DC,0xE8B7, 0x970D,0xE8B8, 0x96D5,0xE8B9, 0x96F9,0xE8BA, 0x9704,0xE8BB, 0x9706,0xE8BC, 0x9708,0xE8BD, 0x9713,0xE8BE, 0x970E,0xE8BF, 0x9711,0xE8C0, 0x970F,0xE8C1, 0x9716,0xE8C2, 0x9719,0xE8C3, 0x9724,0xE8C4, 0x972A,0xE8C5, 0x9730,0xE8C6, 0x9739,0xE8C7, 0x973D,0xE8C8, 0x973E,0xE8C9, 0x9744,0xE8CA, 0x9746,0xE8CB, 0x9748,0xE8CC, 0x9742,0xE8CD, 0x9749,0xE8CE, 0x975C,0xE8CF, 0x9760,0xE8D0, 0x9764,0xE8D1, 0x9766,0xE8D2, 0x9768,0xE8D3, 0x52D2,0xE8D4, 0x976B,0xE8D5, 0x9771,0xE8D6, 0x9779,0xE8D7, 0x9785,0xE8D8, 0x977C,0xE8D9, 0x9781,0xE8DA, 0x977A,0xE8DB, 0x9786,0xE8DC, 0x978B,0xE8DD, 0x978F,0xE8DE, 0x9790,0xE8DF, 0x979C,0xE8E0, 0x97A8,0xE8E1, 0x97A6,0xE8E2, 0x97A3,0xE8E3, 0x97B3,0xE8E4, 0x97B4,0xE8E5, 0x97C3,0xE8E6, 0x97C6,0xE8E7, 0x97C8,0xE8E8, 0x97CB,0xE8E9, 0x97DC,0xE8EA, 0x97ED,0xE8EB, 0x9F4F,0xE8EC, 0x97F2,0xE8ED, 0x7ADF,0xE8EE, 0x97F6,0xE8EF, 0x97F5,0xE8F0, 0x980F,0xE8F1, 0x980C,0xE8F2, 0x9838,0xE8F3, 0x9824,0xE8F4, 0x9821,0xE8F5, 0x9837,0xE8F6, 0x983D,0xE8F7, 0x9846,0xE8F8, 0x984F,0xE8F9, 0x984B,0xE8FA, 0x986B,0xE8FB, 0x986F,0xE8FC, 0x9870,0xE940, 0x9871,0xE941, 0x9874,0xE942, 0x9873,0xE943, 0x98AA,0xE944, 0x98AF,0xE945, 0x98B1,0xE946, 0x98B6,0xE947, 0x98C4,0xE948, 0x98C3,0xE949, 0x98C6,0xE94A, 0x98E9,0xE94B, 0x98EB,0xE94C, 0x9903,0xE94D, 0x9909,0xE94E, 0x9912,0xE94F, 0x9914,0xE950, 0x9918,0xE951, 0x9921,0xE952, 0x991D,0xE953, 0x991E,0xE954, 0x9924,0xE955, 0x9920,0xE956, 0x992C,0xE957, 0x992E,0xE958, 0x993D,0xE959, 0x993E,0xE95A, 0x9942,0xE95B, 0x9949,0xE95C, 0x9945,0xE95D, 0x9950,0xE95E, 0x994B,0xE95F, 0x9951,0xE960, 0x9952,0xE961, 0x994C,0xE962, 0x9955,0xE963, 0x9997,0xE964, 0x9998,0xE965, 0x99A5,0xE966, 0x99AD,0xE967, 0x99AE,0xE968, 0x99BC,0xE969, 0x99DF,0xE96A, 0x99DB,0xE96B, 0x99DD,0xE96C, 0x99D8,0xE96D, 0x99D1,0xE96E, 0x99ED,0xE96F, 0x99EE,0xE970, 0x99F1,0xE971, 0x99F2,0xE972, 0x99FB,0xE973, 0x99F8,0xE974, 0x9A01,0xE975, 0x9A0F,0xE976, 0x9A05,0xE977, 0x99E2,0xE978, 0x9A19,0xE979, 0x9A2B,0xE97A, 0x9A37,0xE97B, 0x9A45,0xE97C, 0x9A42,0xE97D, 0x9A40,0xE97E, 0x9A43,0xE980, 0x9A3E,0xE981, 0x9A55,0xE982, 0x9A4D,0xE983, 0x9A5B,0xE984, 0x9A57,0xE985, 0x9A5F,0xE986, 0x9A62,0xE987, 0x9A65,0xE988, 0x9A64,0xE989, 0x9A69,0xE98A, 0x9A6B,0xE98B, 0x9A6A,0xE98C, 0x9AAD,0xE98D, 0x9AB0,0xE98E, 0x9ABC,0xE98F, 0x9AC0,0xE990, 0x9ACF,0xE991, 0x9AD1,0xE992, 0x9AD3,0xE993, 0x9AD4,0xE994, 0x9ADE,0xE995, 0x9ADF,0xE996, 0x9AE2,0xE997, 0x9AE3,0xE998, 0x9AE6,0xE999, 0x9AEF,0xE99A, 0x9AEB,0xE99B, 0x9AEE,0xE99C, 0x9AF4,0xE99D, 0x9AF1,0xE99E, 0x9AF7,0xE99F, 0x9AFB,0xE9A0, 0x9B06,0xE9A1, 0x9B18,0xE9A2, 0x9B1A,0xE9A3, 0x9B1F,0xE9A4, 0x9B22,0xE9A5, 0x9B23,0xE9A6, 0x9B25,0xE9A7, 0x9B27,0xE9A8, 0x9B28,0xE9A9, 0x9B29,0xE9AA, 0x9B2A,0xE9AB, 0x9B2E,0xE9AC, 0x9B2F,0xE9AD, 0x9B32,0xE9AE, 0x9B44,0xE9AF, 0x9B43,0xE9B0, 0x9B4F,0xE9B1, 0x9B4D,0xE9B2, 0x9B4E,0xE9B3, 0x9B51,0xE9B4, 0x9B58,0xE9B5, 0x9B74,0xE9B6, 0x9B93,0xE9B7, 0x9B83,0xE9B8, 0x9B91,0xE9B9, 0x9B96,0xE9BA, 0x9B97,0xE9BB, 0x9B9F,0xE9BC, 0x9BA0,0xE9BD, 0x9BA8,0xE9BE, 0x9BB4,0xE9BF, 0x9BC0,0xE9C0, 0x9BCA,0xE9C1, 0x9BB9,0xE9C2, 0x9BC6,0xE9C3, 0x9BCF,0xE9C4, 0x9BD1,0xE9C5, 0x9BD2,0xE9C6, 0x9BE3,0xE9C7, 0x9BE2,0xE9C8, 0x9BE4,0xE9C9, 0x9BD4,0xE9CA, 0x9BE1,0xE9CB, 0x9C3A,0xE9CC, 0x9BF2,0xE9CD, 0x9BF1,0xE9CE, 0x9BF0,0xE9CF, 0x9C15,0xE9D0, 0x9C14,0xE9D1, 0x9C09,0xE9D2, 0x9C13,0xE9D3, 0x9C0C,0xE9D4, 0x9C06,0xE9D5, 0x9C08,0xE9D6, 0x9C12,0xE9D7, 0x9C0A,0xE9D8, 0x9C04,0xE9D9, 0x9C2E,0xE9DA, 0x9C1B,0xE9DB, 0x9C25,0xE9DC, 0x9C24,0xE9DD, 0x9C21,0xE9DE, 0x9C30,0xE9DF, 0x9C47,0xE9E0, 0x9C32,0xE9E1, 0x9C46,0xE9E2, 0x9C3E,0xE9E3, 0x9C5A,0xE9E4, 0x9C60,0xE9E5, 0x9C67,0xE9E6, 0x9C76,0xE9E7, 0x9C78,0xE9E8, 0x9CE7,0xE9E9, 0x9CEC,0xE9EA, 0x9CF0,0xE9EB, 0x9D09,0xE9EC, 0x9D08,0xE9ED, 0x9CEB,0xE9EE, 0x9D03,0xE9EF, 0x9D06,0xE9F0, 0x9D2A,0xE9F1, 0x9D26,0xE9F2, 0x9DAF,0xE9F3, 0x9D23,0xE9F4, 0x9D1F,0xE9F5, 0x9D44,0xE9F6, 0x9D15,0xE9F7, 0x9D12,0xE9F8, 0x9D41,0xE9F9, 0x9D3F,0xE9FA, 0x9D3E,0xE9FB, 0x9D46,0xE9FC, 0x9D48,0xEA40, 0x9D5D,0xEA41, 0x9D5E,0xEA42, 0x9D64,0xEA43, 0x9D51,0xEA44, 0x9D50,0xEA45, 0x9D59,0xEA46, 0x9D72,0xEA47, 0x9D89,0xEA48, 0x9D87,0xEA49, 0x9DAB,0xEA4A, 0x9D6F,0xEA4B, 0x9D7A,0xEA4C, 0x9D9A,0xEA4D, 0x9DA4,0xEA4E, 0x9DA9,0xEA4F, 0x9DB2,0xEA50, 0x9DC4,0xEA51, 0x9DC1,0xEA52, 0x9DBB,0xEA53, 0x9DB8,0xEA54, 0x9DBA,0xEA55, 0x9DC6,0xEA56, 0x9DCF,0xEA57, 0x9DC2,0xEA58, 0x9DD9,0xEA59, 0x9DD3,0xEA5A, 0x9DF8,0xEA5B, 0x9DE6,0xEA5C, 0x9DED,0xEA5D, 0x9DEF,0xEA5E, 0x9DFD,0xEA5F, 0x9E1A,0xEA60, 0x9E1B,0xEA61, 0x9E1E,0xEA62, 0x9E75,0xEA63, 0x9E79,0xEA64, 0x9E7D,0xEA65, 0x9E81,0xEA66, 0x9E88,0xEA67, 0x9E8B,0xEA68, 0x9E8C,0xEA69, 0x9E92,0xEA6A, 0x9E95,0xEA6B, 0x9E91,0xEA6C, 0x9E9D,0xEA6D, 0x9EA5,0xEA6E, 0x9EA9,0xEA6F, 0x9EB8,0xEA70, 0x9EAA,0xEA71, 0x9EAD,0xEA72, 0x9761,0xEA73, 0x9ECC,0xEA74, 0x9ECE,0xEA75, 0x9ECF,0xEA76, 0x9ED0,0xEA77, 0x9ED4,0xEA78, 0x9EDC,0xEA79, 0x9EDE,0xEA7A, 0x9EDD,0xEA7B, 0x9EE0,0xEA7C, 0x9EE5,0xEA7D, 0x9EE8,0xEA7E, 0x9EEF,0xEA80, 0x9EF4,0xEA81, 0x9EF6,0xEA82, 0x9EF7,0xEA83, 0x9EF9,0xEA84, 0x9EFB,0xEA85, 0x9EFC,0xEA86, 0x9EFD,0xEA87, 0x9F07,0xEA88, 0x9F08,0xEA89, 0x76B7,0xEA8A, 0x9F15,0xEA8B, 0x9F21,0xEA8C, 0x9F2C,0xEA8D, 0x9F3E,0xEA8E, 0x9F4A,0xEA8F, 0x9F52,0xEA90, 0x9F54,0xEA91, 0x9F63,0xEA92, 0x9F5F,0xEA93, 0x9F60,0xEA94, 0x9F61,0xEA95, 0x9F66,0xEA96, 0x9F67,0xEA97, 0x9F6C,0xEA98, 0x9F6A,0xEA99, 0x9F77,0xEA9A, 0x9F72,0xEA9B, 0x9F76,0xEA9C, 0x9F95,0xEA9D, 0x9F9C,0xEA9E, 0x9FA0,0xEA9F, 0x582F,0xEAA0, 0x69C7,0xEAA1, 0x9059,0xEAA2, 0x7464,0xEAA3, 0x51DC,0xEAA4, 0x7199,0xED40, 0x7E8A,0xED41, 0x891C,0xED42, 0x9348,0xED43, 0x9288,0xED44, 0x84DC,0xED45, 0x4FC9,0xED46, 0x70BB,0xED47, 0x6631,0xED48, 0x68C8,0xED49, 0x92F9,0xED4A, 0x66FB,0xED4B, 0x5F45,0xED4C, 0x4E28,0xED4D, 0x4EE1,0xED4E, 0x4EFC,0xED4F, 0x4F00,0xED50, 0x4F03,0xED51, 0x4F39,0xED52, 0x4F56,0xED53, 0x4F92,0xED54, 0x4F8A,0xED55, 0x4F9A,0xED56, 0x4F94,0xED57, 0x4FCD,0xED58, 0x5040,0xED59, 0x5022,0xED5A, 0x4FFF,0xED5B, 0x501E,0xED5C, 0x5046,0xED5D, 0x5070,0xED5E, 0x5042,0xED5F, 0x5094,0xED60, 0x50F4,0xED61, 0x50D8,0xED62, 0x514A,0xED63, 0x5164,0xED64, 0x519D,0xED65, 0x51BE,0xED66, 0x51EC,0xED67, 0x5215,0xED68, 0x529C,0xED69, 0x52A6,0xED6A, 0x52C0,0xED6B, 0x52DB,0xED6C, 0x5300,0xED6D, 0x5307,0xED6E, 0x5324,0xED6F, 0x5372,0xED70, 0x5393,0xED71, 0x53B2,0xED72, 0x53DD,0xED73, 0xFA0E,0xED74, 0x549C,0xED75, 0x548A,0xED76, 0x54A9,0xED77, 0x54FF,0xED78, 0x5586,0xED79, 0x5759,0xED7A, 0x5765,0xED7B, 0x57AC,0xED7C, 0x57C8,0xED7D, 0x57C7,0xED7E, 0xFA0F,0xED80, 0xFA10,0xED81, 0x589E,0xED82, 0x58B2,0xED83, 0x590B,0xED84, 0x5953,0xED85, 0x595B,0xED86, 0x595D,0xED87, 0x5963,0xED88, 0x59A4,0xED89, 0x59BA,0xED8A, 0x5B56,0xED8B, 0x5BC0,0xED8C, 0x752F,0xED8D, 0x5BD8,0xED8E, 0x5BEC,0xED8F, 0x5C1E,0xED90, 0x5CA6,0xED91, 0x5CBA,0xED92, 0x5CF5,0xED93, 0x5D27,0xED94, 0x5D53,0xED95, 0xFA11,0xED96, 0x5D42,0xED97, 0x5D6D,0xED98, 0x5DB8,0xED99, 0x5DB9,0xED9A, 0x5DD0,0xED9B, 0x5F21,0xED9C, 0x5F34,0xED9D, 0x5F67,0xED9E, 0x5FB7,0xED9F, 0x5FDE,0xEDA0, 0x605D,0xEDA1, 0x6085,0xEDA2, 0x608A,0xEDA3, 0x60DE,0xEDA4, 0x60D5,0xEDA5, 0x6120,0xEDA6, 0x60F2,0xEDA7, 0x6111,0xEDA8, 0x6137,0xEDA9, 0x6130,0xEDAA, 0x6198,0xEDAB, 0x6213,0xEDAC, 0x62A6,0xEDAD, 0x63F5,0xEDAE, 0x6460,0xEDAF, 0x649D,0xEDB0, 0x64CE,0xEDB1, 0x654E,0xEDB2, 0x6600,0xEDB3, 0x6615,0xEDB4, 0x663B,0xEDB5, 0x6609,0xEDB6, 0x662E,0xEDB7, 0x661E,0xEDB8, 0x6624,0xEDB9, 0x6665,0xEDBA, 0x6657,0xEDBB, 0x6659,0xEDBC, 0xFA12,0xEDBD, 0x6673,0xEDBE, 0x6699,0xEDBF, 0x66A0,0xEDC0, 0x66B2,0xEDC1, 0x66BF,0xEDC2, 0x66FA,0xEDC3, 0x670E,0xEDC4, 0xF929,0xEDC5, 0x6766,0xEDC6, 0x67BB,0xEDC7, 0x6852,0xEDC8, 0x67C0,0xEDC9, 0x6801,0xEDCA, 0x6844,0xEDCB, 0x68CF,0xEDCC, 0xFA13,0xEDCD, 0x6968,0xEDCE, 0xFA14,0xEDCF, 0x6998,0xEDD0, 0x69E2,0xEDD1, 0x6A30,0xEDD2, 0x6A6B,0xEDD3, 0x6A46,0xEDD4, 0x6A73,0xEDD5, 0x6A7E,0xEDD6, 0x6AE2,0xEDD7, 0x6AE4,0xEDD8, 0x6BD6,0xEDD9, 0x6C3F,0xEDDA, 0x6C5C,0xEDDB, 0x6C86,0xEDDC, 0x6C6F,0xEDDD, 0x6CDA,0xEDDE, 0x6D04,0xEDDF, 0x6D87,0xEDE0, 0x6D6F,0xEDE1, 0x6D96,0xEDE2, 0x6DAC,0xEDE3, 0x6DCF,0xEDE4, 0x6DF8,0xEDE5, 0x6DF2,0xEDE6, 0x6DFC,0xEDE7, 0x6E39,0xEDE8, 0x6E5C,0xEDE9, 0x6E27,0xEDEA, 0x6E3C,0xEDEB, 0x6EBF,0xEDEC, 0x6F88,0xEDED, 0x6FB5,0xEDEE, 0x6FF5,0xEDEF, 0x7005,0xEDF0, 0x7007,0xEDF1, 0x7028,0xEDF2, 0x7085,0xEDF3, 0x70AB,0xEDF4, 0x710F,0xEDF5, 0x7104,0xEDF6, 0x715C,0xEDF7, 0x7146,0xEDF8, 0x7147,0xEDF9, 0xFA15,0xEDFA, 0x71C1,0xEDFB, 0x71FE,0xEDFC, 0x72B1,0xEE40, 0x72BE,0xEE41, 0x7324,0xEE42, 0xFA16,0xEE43, 0x7377,0xEE44, 0x73BD,0xEE45, 0x73C9,0xEE46, 0x73D6,0xEE47, 0x73E3,0xEE48, 0x73D2,0xEE49, 0x7407,0xEE4A, 0x73F5,0xEE4B, 0x7426,0xEE4C, 0x742A,0xEE4D, 0x7429,0xEE4E, 0x742E,0xEE4F, 0x7462,0xEE50, 0x7489,0xEE51, 0x749F,0xEE52, 0x7501,0xEE53, 0x756F,0xEE54, 0x7682,0xEE55, 0x769C,0xEE56, 0x769E,0xEE57, 0x769B,0xEE58, 0x76A6,0xEE59, 0xFA17,0xEE5A, 0x7746,0xEE5B, 0x52AF,0xEE5C, 0x7821,0xEE5D, 0x784E,0xEE5E, 0x7864,0xEE5F, 0x787A,0xEE60, 0x7930,0xEE61, 0xFA18,0xEE62, 0xFA19,0xEE63, 0xFA1A,0xEE64, 0x7994,0xEE65, 0xFA1B,0xEE66, 0x799B,0xEE67, 0x7AD1,0xEE68, 0x7AE7,0xEE69, 0xFA1C,0xEE6A, 0x7AEB,0xEE6B, 0x7B9E,0xEE6C, 0xFA1D,0xEE6D, 0x7D48,0xEE6E, 0x7D5C,0xEE6F, 0x7DB7,0xEE70, 0x7DA0,0xEE71, 0x7DD6,0xEE72, 0x7E52,0xEE73, 0x7F47,0xEE74, 0x7FA1,0xEE75, 0xFA1E,0xEE76, 0x8301,0xEE77, 0x8362,0xEE78, 0x837F,0xEE79, 0x83C7,0xEE7A, 0x83F6,0xEE7B, 0x8448,0xEE7C, 0x84B4,0xEE7D, 0x8553,0xEE7E, 0x8559,0xEE80, 0x856B,0xEE81, 0xFA1F,0xEE82, 0x85B0,0xEE83, 0xFA20,0xEE84, 0xFA21,0xEE85, 0x8807,0xEE86, 0x88F5,0xEE87, 0x8A12,0xEE88, 0x8A37,0xEE89, 0x8A79,0xEE8A, 0x8AA7,0xEE8B, 0x8ABE,0xEE8C, 0x8ADF,0xEE8D, 0xFA22,0xEE8E, 0x8AF6,0xEE8F, 0x8B53,0xEE90, 0x8B7F,0xEE91, 0x8CF0,0xEE92, 0x8CF4,0xEE93, 0x8D12,0xEE94, 0x8D76,0xEE95, 0xFA23,0xEE96, 0x8ECF,0xEE97, 0xFA24,0xEE98, 0xFA25,0xEE99, 0x9067,0xEE9A, 0x90DE,0xEE9B, 0xFA26,0xEE9C, 0x9115,0xEE9D, 0x9127,0xEE9E, 0x91DA,0xEE9F, 0x91D7,0xEEA0, 0x91DE,0xEEA1, 0x91ED,0xEEA2, 0x91EE,0xEEA3, 0x91E4,0xEEA4, 0x91E5,0xEEA5, 0x9206,0xEEA6, 0x9210,0xEEA7, 0x920A,0xEEA8, 0x923A,0xEEA9, 0x9240,0xEEAA, 0x923C,0xEEAB, 0x924E,0xEEAC, 0x9259,0xEEAD, 0x9251,0xEEAE, 0x9239,0xEEAF, 0x9267,0xEEB0, 0x92A7,0xEEB1, 0x9277,0xEEB2, 0x9278,0xEEB3, 0x92E7,0xEEB4, 0x92D7,0xEEB5, 0x92D9,0xEEB6, 0x92D0,0xEEB7, 0xFA27,0xEEB8, 0x92D5,0xEEB9, 0x92E0,0xEEBA, 0x92D3,0xEEBB, 0x9325,0xEEBC, 0x9321,0xEEBD, 0x92FB,0xEEBE, 0xFA28,0xEEBF, 0x931E,0xEEC0, 0x92FF,0xEEC1, 0x931D,0xEEC2, 0x9302,0xEEC3, 0x9370,0xEEC4, 0x9357,0xEEC5, 0x93A4,0xEEC6, 0x93C6,0xEEC7, 0x93DE,0xEEC8, 0x93F8,0xEEC9, 0x9431,0xEECA, 0x9445,0xEECB, 0x9448,0xEECC, 0x9592,0xEECD, 0xF9DC,0xEECE, 0xFA29,0xEECF, 0x969D,0xEED0, 0x96AF,0xEED1, 0x9733,0xEED2, 0x973B,0xEED3, 0x9743,0xEED4, 0x974D,0xEED5, 0x974F,0xEED6, 0x9751,0xEED7, 0x9755,0xEED8, 0x9857,0xEED9, 0x9865,0xEEDA, 0xFA2A,0xEEDB, 0xFA2B,0xEEDC, 0x9927,0xEEDD, 0xFA2C,0xEEDE, 0x999E,0xEEDF, 0x9A4E,0xEEE0, 0x9AD9,0xEEE1, 0x9ADC,0xEEE2, 0x9B75,0xEEE3, 0x9B72,0xEEE4, 0x9B8F,0xEEE5, 0x9BB1,0xEEE6, 0x9BBB,0xEEE7, 0x9C00,0xEEE8, 0x9D70,0xEEE9, 0x9D6B,0xEEEA, 0xFA2D,0xEEEB, 0x9E19,0xEEEC, 0x9ED1,0xEEEF, 0x2170,0xEEF0, 0x2171,0xEEF1, 0x2172,0xEEF2, 0x2173,0xEEF3, 0x2174,0xEEF4, 0x2175,0xEEF5, 0x2176,0xEEF6, 0x2177,0xEEF7, 0x2178,0xEEF8, 0x2179,0xEEF9, 0xFFE2,0xEEFA, 0xFFE4,0xEEFB, 0xFF07,0xEEFC, 0xFF02,0xFA40, 0x2170,0xFA41, 0x2171,0xFA42, 0x2172,0xFA43, 0x2173,0xFA44, 0x2174,0xFA45, 0x2175,0xFA46, 0x2176,0xFA47, 0x2177,0xFA48, 0x2178,0xFA49, 0x2179,0xFA4A, 0x2160,0xFA4B, 0x2161,0xFA4C, 0x2162,0xFA4D, 0x2163,0xFA4E, 0x2164,0xFA4F, 0x2165,0xFA50, 0x2166,0xFA51, 0x2167,0xFA52, 0x2168,0xFA53, 0x2169,0xFA54, 0xFFE2,0xFA55, 0xFFE4,0xFA56, 0xFF07,0xFA57, 0xFF02,0xFA58, 0x3231,0xFA59, 0x2116,0xFA5A, 0x2121,0xFA5B, 0x2235,0xFA5C, 0x7E8A,0xFA5D, 0x891C,0xFA5E, 0x9348,0xFA5F, 0x9288,0xFA60, 0x84DC,0xFA61, 0x4FC9,0xFA62, 0x70BB,0xFA63, 0x6631,0xFA64, 0x68C8,0xFA65, 0x92F9,0xFA66, 0x66FB,0xFA67, 0x5F45,0xFA68, 0x4E28,0xFA69, 0x4EE1,0xFA6A, 0x4EFC,0xFA6B, 0x4F00,0xFA6C, 0x4F03,0xFA6D, 0x4F39,0xFA6E, 0x4F56,0xFA6F, 0x4F92,0xFA70, 0x4F8A,0xFA71, 0x4F9A,0xFA72, 0x4F94,0xFA73, 0x4FCD,0xFA74, 0x5040,0xFA75, 0x5022,0xFA76, 0x4FFF,0xFA77, 0x501E,0xFA78, 0x5046,0xFA79, 0x5070,0xFA7A, 0x5042,0xFA7B, 0x5094,0xFA7C, 0x50F4,0xFA7D, 0x50D8,0xFA7E, 0x514A,0xFA80, 0x5164,0xFA81, 0x519D,0xFA82, 0x51BE,0xFA83, 0x51EC,0xFA84, 0x5215,0xFA85, 0x529C,0xFA86, 0x52A6,0xFA87, 0x52C0,0xFA88, 0x52DB,0xFA89, 0x5300,0xFA8A, 0x5307,0xFA8B, 0x5324,0xFA8C, 0x5372,0xFA8D, 0x5393,0xFA8E, 0x53B2,0xFA8F, 0x53DD,0xFA90, 0xFA0E,0xFA91, 0x549C,0xFA92, 0x548A,0xFA93, 0x54A9,0xFA94, 0x54FF,0xFA95, 0x5586,0xFA96, 0x5759,0xFA97, 0x5765,0xFA98, 0x57AC,0xFA99, 0x57C8,0xFA9A, 0x57C7,0xFA9B, 0xFA0F,0xFA9C, 0xFA10,0xFA9D, 0x589E,0xFA9E, 0x58B2,0xFA9F, 0x590B,0xFAA0, 0x5953,0xFAA1, 0x595B,0xFAA2, 0x595D,0xFAA3, 0x5963,0xFAA4, 0x59A4,0xFAA5, 0x59BA,0xFAA6, 0x5B56,0xFAA7, 0x5BC0,0xFAA8, 0x752F,0xFAA9, 0x5BD8,0xFAAA, 0x5BEC,0xFAAB, 0x5C1E,0xFAAC, 0x5CA6,0xFAAD, 0x5CBA,0xFAAE, 0x5CF5,0xFAAF, 0x5D27,0xFAB0, 0x5D53,0xFAB1, 0xFA11,0xFAB2, 0x5D42,0xFAB3, 0x5D6D,0xFAB4, 0x5DB8,0xFAB5, 0x5DB9,0xFAB6, 0x5DD0,0xFAB7, 0x5F21,0xFAB8, 0x5F34,0xFAB9, 0x5F67,0xFABA, 0x5FB7,0xFABB, 0x5FDE,0xFABC, 0x605D,0xFABD, 0x6085,0xFABE, 0x608A,0xFABF, 0x60DE,0xFAC0, 0x60D5,0xFAC1, 0x6120,0xFAC2, 0x60F2,0xFAC3, 0x6111,0xFAC4, 0x6137,0xFAC5, 0x6130,0xFAC6, 0x6198,0xFAC7, 0x6213,0xFAC8, 0x62A6,0xFAC9, 0x63F5,0xFACA, 0x6460,0xFACB, 0x649D,0xFACC, 0x64CE,0xFACD, 0x654E,0xFACE, 0x6600,0xFACF, 0x6615,0xFAD0, 0x663B,0xFAD1, 0x6609,0xFAD2, 0x662E,0xFAD3, 0x661E,0xFAD4, 0x6624,0xFAD5, 0x6665,0xFAD6, 0x6657,0xFAD7, 0x6659,0xFAD8, 0xFA12,0xFAD9, 0x6673,0xFADA, 0x6699,0xFADB, 0x66A0,0xFADC, 0x66B2,0xFADD, 0x66BF,0xFADE, 0x66FA,0xFADF, 0x670E,0xFAE0, 0xF929,0xFAE1, 0x6766,0xFAE2, 0x67BB,0xFAE3, 0x6852,0xFAE4, 0x67C0,0xFAE5, 0x6801,0xFAE6, 0x6844,0xFAE7, 0x68CF,0xFAE8, 0xFA13,0xFAE9, 0x6968,0xFAEA, 0xFA14,0xFAEB, 0x6998,0xFAEC, 0x69E2,0xFAED, 0x6A30,0xFAEE, 0x6A6B,0xFAEF, 0x6A46,0xFAF0, 0x6A73,0xFAF1, 0x6A7E,0xFAF2, 0x6AE2,0xFAF3, 0x6AE4,0xFAF4, 0x6BD6,0xFAF5, 0x6C3F,0xFAF6, 0x6C5C,0xFAF7, 0x6C86,0xFAF8, 0x6C6F,0xFAF9, 0x6CDA,0xFAFA, 0x6D04,0xFAFB, 0x6D87,0xFAFC, 0x6D6F,0xFB40, 0x6D96,0xFB41, 0x6DAC,0xFB42, 0x6DCF,0xFB43, 0x6DF8,0xFB44, 0x6DF2,0xFB45, 0x6DFC,0xFB46, 0x6E39,0xFB47, 0x6E5C,0xFB48, 0x6E27,0xFB49, 0x6E3C,0xFB4A, 0x6EBF,0xFB4B, 0x6F88,0xFB4C, 0x6FB5,0xFB4D, 0x6FF5,0xFB4E, 0x7005,0xFB4F, 0x7007,0xFB50, 0x7028,0xFB51, 0x7085,0xFB52, 0x70AB,0xFB53, 0x710F,0xFB54, 0x7104,0xFB55, 0x715C,0xFB56, 0x7146,0xFB57, 0x7147,0xFB58, 0xFA15,0xFB59, 0x71C1,0xFB5A, 0x71FE,0xFB5B, 0x72B1,0xFB5C, 0x72BE,0xFB5D, 0x7324,0xFB5E, 0xFA16,0xFB5F, 0x7377,0xFB60, 0x73BD,0xFB61, 0x73C9,0xFB62, 0x73D6,0xFB63, 0x73E3,0xFB64, 0x73D2,0xFB65, 0x7407,0xFB66, 0x73F5,0xFB67, 0x7426,0xFB68, 0x742A,0xFB69, 0x7429,0xFB6A, 0x742E,0xFB6B, 0x7462,0xFB6C, 0x7489,0xFB6D, 0x749F,0xFB6E, 0x7501,0xFB6F, 0x756F,0xFB70, 0x7682,0xFB71, 0x769C,0xFB72, 0x769E,0xFB73, 0x769B,0xFB74, 0x76A6,0xFB75, 0xFA17,0xFB76, 0x7746,0xFB77, 0x52AF,0xFB78, 0x7821,0xFB79, 0x784E,0xFB7A, 0x7864,0xFB7B, 0x787A,0xFB7C, 0x7930,0xFB7D, 0xFA18,0xFB7E, 0xFA19,0xFB80, 0xFA1A,0xFB81, 0x7994,0xFB82, 0xFA1B,0xFB83, 0x799B,0xFB84, 0x7AD1,0xFB85, 0x7AE7,0xFB86, 0xFA1C,0xFB87, 0x7AEB,0xFB88, 0x7B9E,0xFB89, 0xFA1D,0xFB8A, 0x7D48,0xFB8B, 0x7D5C,0xFB8C, 0x7DB7,0xFB8D, 0x7DA0,0xFB8E, 0x7DD6,0xFB8F, 0x7E52,0xFB90, 0x7F47,0xFB91, 0x7FA1,0xFB92, 0xFA1E,0xFB93, 0x8301,0xFB94, 0x8362,0xFB95, 0x837F,0xFB96, 0x83C7,0xFB97, 0x83F6,0xFB98, 0x8448,0xFB99, 0x84B4,0xFB9A, 0x8553,0xFB9B, 0x8559,0xFB9C, 0x856B,0xFB9D, 0xFA1F,0xFB9E, 0x85B0,0xFB9F, 0xFA20,0xFBA0, 0xFA21,0xFBA1, 0x8807,0xFBA2, 0x88F5,0xFBA3, 0x8A12,0xFBA4, 0x8A37,0xFBA5, 0x8A79,0xFBA6, 0x8AA7,0xFBA7, 0x8ABE,0xFBA8, 0x8ADF,0xFBA9, 0xFA22,0xFBAA, 0x8AF6,0xFBAB, 0x8B53,0xFBAC, 0x8B7F,0xFBAD, 0x8CF0,0xFBAE, 0x8CF4,0xFBAF, 0x8D12,0xFBB0, 0x8D76,0xFBB1, 0xFA23,0xFBB2, 0x8ECF,0xFBB3, 0xFA24,0xFBB4, 0xFA25,0xFBB5, 0x9067,0xFBB6, 0x90DE,0xFBB7, 0xFA26,0xFBB8, 0x9115,0xFBB9, 0x9127,0xFBBA, 0x91DA,0xFBBB, 0x91D7,0xFBBC, 0x91DE,0xFBBD, 0x91ED,0xFBBE, 0x91EE,0xFBBF, 0x91E4,0xFBC0, 0x91E5,0xFBC1, 0x9206,0xFBC2, 0x9210,0xFBC3, 0x920A,0xFBC4, 0x923A,0xFBC5, 0x9240,0xFBC6, 0x923C,0xFBC7, 0x924E,0xFBC8, 0x9259,0xFBC9, 0x9251,0xFBCA, 0x9239,0xFBCB, 0x9267,0xFBCC, 0x92A7,0xFBCD, 0x9277,0xFBCE, 0x9278,0xFBCF, 0x92E7,0xFBD0, 0x92D7,0xFBD1, 0x92D9,0xFBD2, 0x92D0,0xFBD3, 0xFA27,0xFBD4, 0x92D5,0xFBD5, 0x92E0,0xFBD6, 0x92D3,0xFBD7, 0x9325,0xFBD8, 0x9321,0xFBD9, 0x92FB,0xFBDA, 0xFA28,0xFBDB, 0x931E,0xFBDC, 0x92FF,0xFBDD, 0x931D,0xFBDE, 0x9302,0xFBDF, 0x9370,0xFBE0, 0x9357,0xFBE1, 0x93A4,0xFBE2, 0x93C6,0xFBE3, 0x93DE,0xFBE4, 0x93F8,0xFBE5, 0x9431,0xFBE6, 0x9445,0xFBE7, 0x9448,0xFBE8, 0x9592,0xFBE9, 0xF9DC,0xFBEA, 0xFA29,0xFBEB, 0x969D,0xFBEC, 0x96AF,0xFBED, 0x9733,0xFBEE, 0x973B,0xFBEF, 0x9743,0xFBF0, 0x974D,0xFBF1, 0x974F,0xFBF2, 0x9751,0xFBF3, 0x9755,0xFBF4, 0x9857,0xFBF5, 0x9865,0xFBF6, 0xFA2A,0xFBF7, 0xFA2B,0xFBF8, 0x9927,0xFBF9, 0xFA2C,0xFBFA, 0x999E,0xFBFB, 0x9A4E,0xFBFC, 0x9AD9,0xFC40, 0x9ADC,0xFC41, 0x9B75,0xFC42, 0x9B72,0xFC43, 0x9B8F,0xFC44, 0x9BB1,0xFC45, 0x9BBB,0xFC46, 0x9C00,0xFC47, 0x9D70,0xFC48, 0x9D6B,0xFC49, 0xFA2D,0xFC4A, 0x9E19,0xFC4B, 0x9ED1,] +const cps = {"0": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 65377, 65378, 65379, 65380, 65381, 65382, 65383, 65384, 65385, 65386, 65387, 65388, 65389, 65390, 65391, 65392, 65393, 65394, 65395, 65396, 65397, 65398, 65399, 65400, 65401, 65402, 65403, 65404, 65405, 65406, 65407, 65408, 65409, 65410, 65411, 65412, 65413, 65414, 65415, 65416, 65417, 65418, 65419, 65420, 65421, 65422, 65423, 65424, 65425, 65426, 65427, 65428, 65429, 65430, 65431, 65432, 65433, 65434, 65435, 65436, 65437, 65438, 65439, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null], "33088": [12288, 12289, 12290, 65292, 65294, 12539, 65306, 65307, 65311, 65281, 12443, 12444, 180, 65344, 168, 65342, 65507, 65343, 12541, 12542, 12445, 12446, 12291, 20189, 12293, 12294, 12295, 12540, 8213, 8208, 65295, 65340, 65374, 8741, 65372, 8230, 8229, 8216, 8217, 8220, 8221, 65288, 65289, 12308, 12309, 65339, 65341, 65371, 65373, 12296, 12297, 12298, 12299, 12300, 12301, 12302, 12303, 12304, 12305, 65291, 65293, 177, 215], "33152": [247, 65309, 8800, 65308, 65310, 8806, 8807, 8734, 8756, 9794, 9792, 176, 8242, 8243, 8451, 65509, 65284, 65504, 65505, 65285, 65283, 65286, 65290, 65312, 167, 9734, 9733, 9675, 9679, 9678, 9671, 9670, 9633, 9632, 9651, 9650, 9661, 9660, 8251, 12306, 8594, 8592, 8593, 8595, 12307], "33208": [8712, 8715, 8838, 8839, 8834, 8835, 8746, 8745], "33224": [8743, 8744, 65506, 8658, 8660, 8704, 8707], "33242": [8736, 8869, 8978, 8706, 8711, 8801, 8786, 8810, 8811, 8730, 8765, 8733, 8757, 8747, 8748], "33264": [8491, 8240, 9839, 9837, 9834, 8224, 8225, 182], "33276": [9711], "33359": [65296, 65297, 65298, 65299, 65300, 65301, 65302, 65303, 65304, 65305], "33376": [65313, 65314, 65315, 65316, 65317, 65318, 65319, 65320, 65321, 65322, 65323, 65324, 65325, 65326, 65327, 65328, 65329, 65330, 65331, 65332, 65333, 65334, 65335, 65336, 65337, 65338], "33409": [65345, 65346, 65347, 65348, 65349, 65350, 65351, 65352, 65353, 65354, 65355, 65356, 65357, 65358, 65359, 65360, 65361, 65362, 65363, 65364, 65365, 65366, 65367, 65368, 65369, 65370], "33439": [12353, 12354, 12355, 12356, 12357, 12358, 12359, 12360, 12361, 12362, 12363, 12364, 12365, 12366, 12367, 12368, 12369, 12370, 12371, 12372, 12373, 12374, 12375, 12376, 12377, 12378, 12379, 12380, 12381, 12382, 12383, 12384, 12385, 12386, 12387, 12388, 12389, 12390, 12391, 12392, 12393, 12394, 12395, 12396, 12397, 12398, 12399, 12400, 12401, 12402, 12403, 12404, 12405, 12406, 12407, 12408, 12409, 12410, 12411, 12412, 12413, 12414, 12415, 12416, 12417, 12418, 12419, 12420, 12421, 12422, 12423, 12424, 12425, 12426, 12427, 12428, 12429, 12430, 12431, 12432, 12433, 12434, 12435], "33600": [12449, 12450, 12451, 12452, 12453, 12454, 12455, 12456, 12457, 12458, 12459, 12460, 12461, 12462, 12463, 12464, 12465, 12466, 12467, 12468, 12469, 12470, 12471, 12472, 12473, 12474, 12475, 12476, 12477, 12478, 12479, 12480, 12481, 12482, 12483, 12484, 12485, 12486, 12487, 12488, 12489, 12490, 12491, 12492, 12493, 12494, 12495, 12496, 12497, 12498, 12499, 12500, 12501, 12502, 12503, 12504, 12505, 12506, 12507, 12508, 12509, 12510, 12511], "33664": [12512, 12513, 12514, 12515, 12516, 12517, 12518, 12519, 12520, 12521, 12522, 12523, 12524, 12525, 12526, 12527, 12528, 12529, 12530, 12531, 12532, 12533, 12534], "33695": [913, 914, 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925, 926, 927, 928, 929, 931, 932, 933, 934, 935, 936, 937], "33727": [945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 963, 964, 965, 966, 967, 968, 969], "33856": [1040, 1041, 1042, 1043, 1044, 1045, 1025, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071], "33904": [1072, 1073, 1074, 1075, 1076, 1077, 1105, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085], "33920": [1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103], "33951": [9472, 9474, 9484, 9488, 9496, 9492, 9500, 9516, 9508, 9524, 9532, 9473, 9475, 9487, 9491, 9499, 9495, 9507, 9523, 9515, 9531, 9547, 9504, 9519, 9512, 9527, 9535, 9501, 9520, 9509, 9528, 9538], "34624": [9312, 9313, 9314, 9315, 9316, 9317, 9318, 9319, 9320, 9321, 9322, 9323, 9324, 9325, 9326, 9327, 9328, 9329, 9330, 9331, 8544, 8545, 8546, 8547, 8548, 8549, 8550, 8551, 8552, 8553], "34655": [13129, 13076, 13090, 13133, 13080, 13095, 13059, 13110, 13137, 13143, 13069, 13094, 13091, 13099, 13130, 13115, 13212, 13213, 13214, 13198, 13199, 13252, 13217], "34686": [13179], "34688": [12317, 12319, 8470, 13261, 8481, 12964, 12965, 12966, 12967, 12968, 12849, 12850, 12857, 13182, 13181, 13180, 8786, 8801, 8747, 8750, 8721, 8730, 8869, 8736, 8735, 8895, 8757, 8745, 8746], "34975": [20124, 21782, 23043, 38463, 21696, 24859, 25384, 23030, 36898, 33909, 33564, 31312, 24746, 25569, 28197, 26093, 33894, 33446, 39925, 26771, 22311, 26017, 25201, 23451, 22992, 34427, 39156, 32098, 32190, 39822, 25110, 31903, 34999, 23433, 24245, 25353, 26263, 26696, 38343, 38797, 26447, 20197, 20234, 20301, 20381, 20553, 22258, 22839, 22996, 23041, 23561, 24799, 24847, 24944, 26131, 26885, 28858, 30031, 30064, 31227, 32173, 32239, 32963, 33806, 34915, 35586, 36949, 36986, 21307, 20117, 20133, 22495, 32946, 37057, 30959, 19968, 22769, 28322, 36920, 31282, 33576, 33419, 39983, 20801, 21360, 21693, 21729, 22240, 23035, 24341, 39154, 28139, 32996, 34093], "35136": [38498, 38512, 38560, 38907, 21515, 21491, 23431, 28879, 32701, 36802, 38632, 21359, 40284, 31418, 19985, 30867, 33276, 28198, 22040, 21764, 27421, 34074, 39995, 23013, 21417, 28006, 29916, 38287, 22082, 20113, 36939, 38642, 33615, 39180, 21473, 21942, 23344, 24433, 26144, 26355, 26628, 27704, 27891, 27945, 29787, 30408, 31310, 38964, 33521, 34907, 35424, 37613, 28082, 30123, 30410, 39365, 24742, 35585, 36234, 38322, 27022, 21421, 20870], "35200": [22290, 22576, 22852, 23476, 24310, 24616, 25513, 25588, 27839, 28436, 28814, 28948, 29017, 29141, 29503, 32257, 33398, 33489, 34199, 36960, 37467, 40219, 22633, 26044, 27738, 29989, 20985, 22830, 22885, 24448, 24540, 25276, 26106, 27178, 27431, 27572, 29579, 32705, 35158, 40236, 40206, 40644, 23713, 27798, 33659, 20740, 23627, 25014, 33222, 26742, 29281, 20057, 20474, 21368, 24681, 28201, 31311, 38899, 19979, 21270, 20206, 20309, 20285, 20385, 20339, 21152, 21487, 22025, 22799, 23233, 23478, 23521, 31185, 26247, 26524, 26550, 27468, 27827, 28779, 29634, 31117, 31166, 31292, 31623, 33457, 33499, 33540, 33655, 33775, 33747, 34662, 35506, 22057, 36008, 36838, 36942, 38686, 34442, 20420, 23784, 25105, 29273, 30011, 33253, 33469, 34558, 36032, 38597, 39187, 39381, 20171, 20250, 35299, 22238, 22602, 22730, 24315, 24555, 24618, 24724, 24674, 25040, 25106, 25296, 25913], "35392": [39745, 26214, 26800, 28023, 28784, 30028, 30342, 32117, 33445, 34809, 38283, 38542, 35997, 20977, 21182, 22806, 21683, 23475, 23830, 24936, 27010, 28079, 30861, 33995, 34903, 35442, 37799, 39608, 28012, 39336, 34521, 22435, 26623, 34510, 37390, 21123, 22151, 21508, 24275, 25313, 25785, 26684, 26680, 27579, 29554, 30906, 31339, 35226, 35282, 36203, 36611, 37101, 38307, 38548, 38761, 23398, 23731, 27005, 38989, 38990, 25499, 31520, 27179], "35456": [27263, 26806, 39949, 28511, 21106, 21917, 24688, 25324, 27963, 28167, 28369, 33883, 35088, 36676, 19988, 39993, 21494, 26907, 27194, 38788, 26666, 20828, 31427, 33970, 37340, 37772, 22107, 40232, 26658, 33541, 33841, 31909, 21000, 33477, 29926, 20094, 20355, 20896, 23506, 21002, 21208, 21223, 24059, 21914, 22570, 23014, 23436, 23448, 23515, 24178, 24185, 24739, 24863, 24931, 25022, 25563, 25954, 26577, 26707, 26874, 27454, 27475, 27735, 28450, 28567, 28485, 29872, 29976, 30435, 30475, 31487, 31649, 31777, 32233, 32566, 32752, 32925, 33382, 33694, 35251, 35532, 36011, 36996, 37969, 38291, 38289, 38306, 38501, 38867, 39208, 33304, 20024, 21547, 23736, 24012, 29609, 30284, 30524, 23721, 32747, 36107, 38593, 38929, 38996, 39000, 20225, 20238, 21361, 21916, 22120, 22522, 22855, 23305, 23492, 23696, 24076, 24190, 24524, 25582, 26426, 26071, 26082, 26399, 26827, 26820], "35648": [27231, 24112, 27589, 27671, 27773, 30079, 31048, 23395, 31232, 32000, 24509, 35215, 35352, 36020, 36215, 36556, 36637, 39138, 39438, 39740, 20096, 20605, 20736, 22931, 23452, 25135, 25216, 25836, 27450, 29344, 30097, 31047, 32681, 34811, 35516, 35696, 25516, 33738, 38816, 21513, 21507, 21931, 26708, 27224, 35440, 30759, 26485, 40653, 21364, 23458, 33050, 34384, 36870, 19992, 20037, 20167, 20241, 21450, 21560, 23470, 24339, 24613, 25937], "35712": [26429, 27714, 27762, 27875, 28792, 29699, 31350, 31406, 31496, 32026, 31998, 32102, 26087, 29275, 21435, 23621, 24040, 25298, 25312, 25369, 28192, 34394, 35377, 36317, 37624, 28417, 31142, 39770, 20136, 20139, 20140, 20379, 20384, 20689, 20807, 31478, 20849, 20982, 21332, 21281, 21375, 21483, 21932, 22659, 23777, 24375, 24394, 24623, 24656, 24685, 25375, 25945, 27211, 27841, 29378, 29421, 30703, 33016, 33029, 33288, 34126, 37111, 37857, 38911, 39255, 39514, 20208, 20957, 23597, 26241, 26989, 23616, 26354, 26997, 29577, 26704, 31873, 20677, 21220, 22343, 24062, 37670, 26020, 27427, 27453, 29748, 31105, 31165, 31563, 32202, 33465, 33740, 34943, 35167, 35641, 36817, 37329, 21535, 37504, 20061, 20534, 21477, 21306, 29399, 29590, 30697, 33510, 36527, 39366, 39368, 39378, 20855, 24858, 34398, 21936, 31354, 20598, 23507, 36935, 38533, 20018, 27355, 37351, 23633, 23624], "35904": [25496, 31391, 27795, 38772, 36705, 31402, 29066, 38536, 31874, 26647, 32368, 26705, 37740, 21234, 21531, 34219, 35347, 32676, 36557, 37089, 21350, 34952, 31041, 20418, 20670, 21009, 20804, 21843, 22317, 29674, 22411, 22865, 24418, 24452, 24693, 24950, 24935, 25001, 25522, 25658, 25964, 26223, 26690, 28179, 30054, 31293, 31995, 32076, 32153, 32331, 32619, 33550, 33610, 34509, 35336, 35427, 35686, 36605, 38938, 40335, 33464, 36814, 39912], "35968": [21127, 25119, 25731, 28608, 38553, 26689, 20625, 27424, 27770, 28500, 31348, 32080, 34880, 35363, 26376, 20214, 20537, 20518, 20581, 20860, 21048, 21091, 21927, 22287, 22533, 23244, 24314, 25010, 25080, 25331, 25458, 26908, 27177, 29309, 29356, 29486, 30740, 30831, 32121, 30476, 32937, 35211, 35609, 36066, 36562, 36963, 37749, 38522, 38997, 39443, 40568, 20803, 21407, 21427, 24187, 24358, 28187, 28304, 29572, 29694, 32067, 33335, 35328, 35578, 38480, 20046, 20491, 21476, 21628, 22266, 22993, 23396, 24049, 24235, 24359, 25144, 25925, 26543, 28246, 29392, 31946, 34996, 32929, 32993, 33776, 34382, 35463, 36328, 37431, 38599, 39015, 40723, 20116, 20114, 20237, 21320, 21577, 21566, 23087, 24460, 24481, 24735, 26791, 27278, 29786, 30849, 35486, 35492, 35703, 37264, 20062, 39881, 20132, 20348, 20399, 20505, 20502, 20809, 20844, 21151, 21177, 21246, 21402, 21475, 21521], "36160": [21518, 21897, 22353, 22434, 22909, 23380, 23389, 23439, 24037, 24039, 24055, 24184, 24195, 24218, 24247, 24344, 24658, 24908, 25239, 25304, 25511, 25915, 26114, 26179, 26356, 26477, 26657, 26775, 27083, 27743, 27946, 28009, 28207, 28317, 30002, 30343, 30828, 31295, 31968, 32005, 32024, 32094, 32177, 32789, 32771, 32943, 32945, 33108, 33167, 33322, 33618, 34892, 34913, 35611, 36002, 36092, 37066, 37237, 37489, 30783, 37628, 38308, 38477], "36224": [38917, 39321, 39640, 40251, 21083, 21163, 21495, 21512, 22741, 25335, 28640, 35946, 36703, 40633, 20811, 21051, 21578, 22269, 31296, 37239, 40288, 40658, 29508, 28425, 33136, 29969, 24573, 24794, 39592, 29403, 36796, 27492, 38915, 20170, 22256, 22372, 22718, 23130, 24680, 25031, 26127, 26118, 26681, 26801, 28151, 30165, 32058, 33390, 39746, 20123, 20304, 21449, 21766, 23919, 24038, 24046, 26619, 27801, 29811, 30722, 35408, 37782, 35039, 22352, 24231, 25387, 20661, 20652, 20877, 26368, 21705, 22622, 22971, 23472, 24425, 25165, 25505, 26685, 27507, 28168, 28797, 37319, 29312, 30741, 30758, 31085, 25998, 32048, 33756, 35009, 36617, 38555, 21092, 22312, 26448, 32618, 36001, 20916, 22338, 38442, 22586, 27018, 32948, 21682, 23822, 22524, 30869, 40442, 20316, 21066, 21643, 25662, 26152, 26388, 26613, 31364, 31574, 32034, 37679, 26716, 39853, 31545, 21273, 20874, 21047], "36416": [23519, 25334, 25774, 25830, 26413, 27578, 34217, 38609, 30352, 39894, 25420, 37638, 39851, 30399, 26194, 19977, 20632, 21442, 23665, 24808, 25746, 25955, 26719, 29158, 29642, 29987, 31639, 32386, 34453, 35715, 36059, 37240, 39184, 26028, 26283, 27531, 20181, 20180, 20282, 20351, 21050, 21496, 21490, 21987, 22235, 22763, 22987, 22985, 23039, 23376, 23629, 24066, 24107, 24535, 24605, 25351, 25903, 23388, 26031, 26045, 26088, 26525, 27490], "36480": [27515, 27663, 29509, 31049, 31169, 31992, 32025, 32043, 32930, 33026, 33267, 35222, 35422, 35433, 35430, 35468, 35566, 36039, 36060, 38604, 39164, 27503, 20107, 20284, 20365, 20816, 23383, 23546, 24904, 25345, 26178, 27425, 28363, 27835, 29246, 29885, 30164, 30913, 31034, 32780, 32819, 33258, 33940, 36766, 27728, 40575, 24335, 35672, 40235, 31482, 36600, 23437, 38635, 19971, 21489, 22519, 22833, 23241, 23460, 24713, 28287, 28422, 30142, 36074, 23455, 34048, 31712, 20594, 26612, 33437, 23649, 34122, 32286, 33294, 20889, 23556, 25448, 36198, 26012, 29038, 31038, 32023, 32773, 35613, 36554, 36974, 34503, 37034, 20511, 21242, 23610, 26451, 28796, 29237, 37196, 37320, 37675, 33509, 23490, 24369, 24825, 20027, 21462, 23432, 25163, 26417, 27530, 29417, 29664, 31278, 33131, 36259, 37202, 39318, 20754, 21463, 21610, 23551, 25480, 27193, 32172, 38656, 22234, 21454, 21608], "36672": [23447, 23601, 24030, 20462, 24833, 25342, 27954, 31168, 31179, 32066, 32333, 32722, 33261, 33311, 33936, 34886, 35186, 35728, 36468, 36655, 36913, 37195, 37228, 38598, 37276, 20160, 20303, 20805, 21313, 24467, 25102, 26580, 27713, 28171, 29539, 32294, 37325, 37507, 21460, 22809, 23487, 28113, 31069, 32302, 31899, 22654, 29087, 20986, 34899, 36848, 20426, 23803, 26149, 30636, 31459, 33308, 39423, 20934, 24490, 26092, 26991, 27529, 28147], "36736": [28310, 28516, 30462, 32020, 24033, 36981, 37255, 38918, 20966, 21021, 25152, 26257, 26329, 28186, 24246, 32210, 32626, 26360, 34223, 34295, 35576, 21161, 21465, 22899, 24207, 24464, 24661, 37604, 38500, 20663, 20767, 21213, 21280, 21319, 21484, 21736, 21830, 21809, 22039, 22888, 22974, 23100, 23477, 23558, 23567, 23569, 23578, 24196, 24202, 24288, 24432, 25215, 25220, 25307, 25484, 25463, 26119, 26124, 26157, 26230, 26494, 26786, 27167, 27189, 27836, 28040, 28169, 28248, 28988, 28966, 29031, 30151, 30465, 30813, 30977, 31077, 31216, 31456, 31505, 31911, 32057, 32918, 33750, 33931, 34121, 34909, 35059, 35359, 35388, 35412, 35443, 35937, 36062, 37284, 37478, 37758, 37912, 38556, 38808, 19978, 19976, 19998, 20055, 20887, 21104, 22478, 22580, 22732, 23330, 24120, 24773, 25854, 26465, 26454, 27972, 29366, 30067, 31331, 33976, 35698, 37304, 37664, 22065, 22516, 39166], "36928": [25325, 26893, 27542, 29165, 32340, 32887, 33394, 35302, 39135, 34645, 36785, 23611, 20280, 20449, 20405, 21767, 23072, 23517, 23529, 24515, 24910, 25391, 26032, 26187, 26862, 27035, 28024, 28145, 30003, 30137, 30495, 31070, 31206, 32051, 33251, 33455, 34218, 35242, 35386, 36523, 36763, 36914, 37341, 38663, 20154, 20161, 20995, 22645, 22764, 23563, 29978, 23613, 33102, 35338, 36805, 38499, 38765, 31525, 35535, 38920, 37218, 22259, 21416], "36992": [36887, 21561, 22402, 24101, 25512, 27700, 28810, 30561, 31883, 32736, 34928, 36930, 37204, 37648, 37656, 38543, 29790, 39620, 23815, 23913, 25968, 26530, 36264, 38619, 25454, 26441, 26905, 33733, 38935, 38592, 35070, 28548, 25722, 23544, 19990, 28716, 30045, 26159, 20932, 21046, 21218, 22995, 24449, 24615, 25104, 25919, 25972, 26143, 26228, 26866, 26646, 27491, 28165, 29298, 29983, 30427, 31934, 32854, 22768, 35069, 35199, 35488, 35475, 35531, 36893, 37266, 38738, 38745, 25993, 31246, 33030, 38587, 24109, 24796, 25114, 26021, 26132, 26512, 30707, 31309, 31821, 32318, 33034, 36012, 36196, 36321, 36447, 30889, 20999, 25305, 25509, 25666, 25240, 35373, 31363, 31680, 35500, 38634, 32118, 33292, 34633, 20185, 20808, 21315, 21344, 23459, 23554, 23574, 24029, 25126, 25159, 25776, 26643, 26676, 27849, 27973, 27927, 26579, 28508, 29006, 29053, 26059, 31359, 31661, 32218], "37184": [32330, 32680, 33146, 33307, 33337, 34214, 35438, 36046, 36341, 36984, 36983, 37549, 37521, 38275, 39854, 21069, 21892, 28472, 28982, 20840, 31109, 32341, 33203, 31950, 22092, 22609, 23720, 25514, 26366, 26365, 26970, 29401, 30095, 30094, 30990, 31062, 31199, 31895, 32032, 32068, 34311, 35380, 38459, 36961, 40736, 20711, 21109, 21452, 21474, 20489, 21930, 22766, 22863, 29245, 23435, 23652, 21277, 24803, 24819, 25436, 25475, 25407, 25531], "37248": [25805, 26089, 26361, 24035, 27085, 27133, 28437, 29157, 20105, 30185, 30456, 31379, 31967, 32207, 32156, 32865, 33609, 33624, 33900, 33980, 34299, 35013, 36208, 36865, 36973, 37783, 38684, 39442, 20687, 22679, 24974, 33235, 34101, 36104, 36896, 20419, 20596, 21063, 21363, 24687, 25417, 26463, 28204, 36275, 36895, 20439, 23646, 36042, 26063, 32154, 21330, 34966, 20854, 25539, 23384, 23403, 23562, 25613, 26449, 36956, 20182, 22810, 22826, 27760, 35409, 21822, 22549, 22949, 24816, 25171, 26561, 33333, 26965, 38464, 39364, 39464, 20307, 22534, 23550, 32784, 23729, 24111, 24453, 24608, 24907, 25140, 26367, 27888, 28382, 32974, 33151, 33492, 34955, 36024, 36864, 36910, 38538, 40667, 39899, 20195, 21488, 22823, 31532, 37261, 38988, 40441, 28381, 28711, 21331, 21828, 23429, 25176, 25246, 25299, 27810, 28655, 29730, 35351, 37944, 28609, 35582, 33592, 20967, 34552, 21482], "37440": [21481, 20294, 36948, 36784, 22890, 33073, 24061, 31466, 36799, 26842, 35895, 29432, 40008, 27197, 35504, 20025, 21336, 22022, 22374, 25285, 25506, 26086, 27470, 28129, 28251, 28845, 30701, 31471, 31658, 32187, 32829, 32966, 34507, 35477, 37723, 22243, 22727, 24382, 26029, 26262, 27264, 27573, 30007, 35527, 20516, 30693, 22320, 24347, 24677, 26234, 27744, 30196, 31258, 32622, 33268, 34584, 36933, 39347, 31689, 30044, 31481, 31569, 33988], "37504": [36880, 31209, 31378, 33590, 23265, 30528, 20013, 20210, 23449, 24544, 25277, 26172, 26609, 27880, 34411, 34935, 35387, 37198, 37619, 39376, 27159, 28710, 29482, 33511, 33879, 36015, 19969, 20806, 20939, 21899, 23541, 24086, 24115, 24193, 24340, 24373, 24427, 24500, 25074, 25361, 26274, 26397, 28526, 29266, 30010, 30522, 32884, 33081, 33144, 34678, 35519, 35548, 36229, 36339, 37530, 38263, 38914, 40165, 21189, 25431, 30452, 26389, 27784, 29645, 36035, 37806, 38515, 27941, 22684, 26894, 27084, 36861, 37786, 30171, 36890, 22618, 26626, 25524, 27131, 20291, 28460, 26584, 36795, 34086, 32180, 37716, 26943, 28528, 22378, 22775, 23340, 32044, 29226, 21514, 37347, 40372, 20141, 20302, 20572, 20597, 21059, 35998, 21576, 22564, 23450, 24093, 24213, 24237, 24311, 24351, 24716, 25269, 25402, 25552, 26799, 27712, 30855, 31118, 31243, 32224, 33351, 35330, 35558, 36420, 36883], "37696": [37048, 37165, 37336, 40718, 27877, 25688, 25826, 25973, 28404, 30340, 31515, 36969, 37841, 28346, 21746, 24505, 25764, 36685, 36845, 37444, 20856, 22635, 22825, 23637, 24215, 28155, 32399, 29980, 36028, 36578, 39003, 28857, 20253, 27583, 28593, 30000, 38651, 20814, 21520, 22581, 22615, 22956, 23648, 24466, 26007, 26460, 28193, 30331, 33759, 36077, 36884, 37117, 37709, 30757, 30778, 21162, 24230, 22303, 22900, 24594, 20498, 20826, 20908], "37760": [20941, 20992, 21776, 22612, 22616, 22871, 23445, 23798, 23947, 24764, 25237, 25645, 26481, 26691, 26812, 26847, 30423, 28120, 28271, 28059, 28783, 29128, 24403, 30168, 31095, 31561, 31572, 31570, 31958, 32113, 21040, 33891, 34153, 34276, 35342, 35588, 35910, 36367, 36867, 36879, 37913, 38518, 38957, 39472, 38360, 20685, 21205, 21516, 22530, 23566, 24999, 25758, 27934, 30643, 31461, 33012, 33796, 36947, 37509, 23776, 40199, 21311, 24471, 24499, 28060, 29305, 30563, 31167, 31716, 27602, 29420, 35501, 26627, 27233, 20984, 31361, 26932, 23626, 40182, 33515, 23493, 37193, 28702, 22136, 23663, 24775, 25958, 27788, 35930, 36929, 38931, 21585, 26311, 37389, 22856, 37027, 20869, 20045, 20970, 34201, 35598, 28760, 25466, 37707, 26978, 39348, 32260, 30071, 21335, 26976, 36575, 38627, 27741, 20108, 23612, 24336, 36841, 21250, 36049, 32905, 34425, 24319, 26085, 20083, 20837], "37952": [22914, 23615, 38894, 20219, 22922, 24525, 35469, 28641, 31152, 31074, 23527, 33905, 29483, 29105, 24180, 24565, 25467, 25754, 29123, 31896, 20035, 24316, 20043, 22492, 22178, 24745, 28611, 32013, 33021, 33075, 33215, 36786, 35223, 34468, 24052, 25226, 25773, 35207, 26487, 27874, 27966, 29750, 30772, 23110, 32629, 33453, 39340, 20467, 24259, 25309, 25490, 25943, 26479, 30403, 29260, 32972, 32954, 36649, 37197, 20493, 22521, 23186, 26757], "38016": [26995, 29028, 29437, 36023, 22770, 36064, 38506, 36889, 34687, 31204, 30695, 33833, 20271, 21093, 21338, 25293, 26575, 27850, 30333, 31636, 31893, 33334, 34180, 36843, 26333, 28448, 29190, 32283, 33707, 39361, 40614, 20989, 31665, 30834, 31672, 32903, 31560, 27368, 24161, 32908, 30033, 30048, 20843, 37474, 28300, 30330, 37271, 39658, 20240, 32624, 25244, 31567, 38309, 40169, 22138, 22617, 34532, 38588, 20276, 21028, 21322, 21453, 21467, 24070, 25644, 26001, 26495, 27710, 27726, 29256, 29359, 29677, 30036, 32321, 33324, 34281, 36009, 31684, 37318, 29033, 38930, 39151, 25405, 26217, 30058, 30436, 30928, 34115, 34542, 21290, 21329, 21542, 22915, 24199, 24444, 24754, 25161, 25209, 25259, 26000, 27604, 27852, 30130, 30382, 30865, 31192, 32203, 32631, 32933, 34987, 35513, 36027, 36991, 38750, 39131, 27147, 31800, 20633, 23614, 24494, 26503, 27608, 29749, 30473, 32654], "38208": [40763, 26570, 31255, 21305, 30091, 39661, 24422, 33181, 33777, 32920, 24380, 24517, 30050, 31558, 36924, 26727, 23019, 23195, 32016, 30334, 35628, 20469, 24426, 27161, 27703, 28418, 29922, 31080, 34920, 35413, 35961, 24287, 25551, 30149, 31186, 33495, 37672, 37618, 33948, 34541, 39981, 21697, 24428, 25996, 27996, 28693, 36007, 36051, 38971, 25935, 29942, 19981, 20184, 22496, 22827, 23142, 23500, 20904, 24067, 24220, 24598, 25206, 25975], "38272": [26023, 26222, 28014, 29238, 31526, 33104, 33178, 33433, 35676, 36000, 36070, 36212, 38428, 38468, 20398, 25771, 27494, 33310, 33889, 34154, 37096, 23553, 26963, 39080, 33914, 34135, 20239, 21103, 24489, 24133, 26381, 31119, 33145, 35079, 35206, 28149, 24343, 25173, 27832, 20175, 29289, 39826, 20998, 21563, 22132, 22707, 24996, 25198, 28954, 22894, 31881, 31966, 32027, 38640, 25991, 32862, 19993, 20341, 20853, 22592, 24163, 24179, 24330, 26564, 20006, 34109, 38281, 38491, 31859, 38913, 20731, 22721, 30294, 30887, 21029, 30629, 34065, 31622, 20559, 22793, 29255, 31687, 32232, 36794, 36820, 36941, 20415, 21193, 23081, 24321, 38829, 20445, 33303, 37610, 22275, 25429, 27497, 29995, 35036, 36628, 31298, 21215, 22675, 24917, 25098, 26286, 27597, 31807, 33769, 20515, 20472, 21253, 21574, 22577, 22857, 23453, 23792, 23791, 23849, 24214, 25265, 25447, 25918, 26041, 26379], "38464": [27861, 27873, 28921, 30770, 32299, 32990, 33459, 33804, 34028, 34562, 35090, 35370, 35914, 37030, 37586, 39165, 40179, 40300, 20047, 20129, 20621, 21078, 22346, 22952, 24125, 24536, 24537, 25151, 26292, 26395, 26576, 26834, 20882, 32033, 32938, 33192, 35584, 35980, 36031, 37502, 38450, 21536, 38956, 21271, 20693, 21340, 22696, 25778, 26420, 29287, 30566, 31302, 37350, 21187, 27809, 27526, 22528, 24140, 22868, 26412, 32763, 20961, 30406], "38528": [25705, 30952, 39764, 40635, 22475, 22969, 26151, 26522, 27598, 21737, 27097, 24149, 33180, 26517, 39850, 26622, 40018, 26717, 20134, 20451, 21448, 25273, 26411, 27819, 36804, 20397, 32365, 40639, 19975, 24930, 28288, 28459, 34067, 21619, 26410, 39749, 24051, 31637, 23724, 23494, 34588, 28234, 34001, 31252, 33032, 22937, 31885, 27665, 30496, 21209, 22818, 28961, 29279, 30683, 38695, 40289, 26891, 23167, 23064, 20901, 21517, 21629, 26126, 30431, 36855, 37528, 40180, 23018, 29277, 28357, 20813, 26825, 32191, 32236, 38754, 40634, 25720, 27169, 33538, 22916, 23391, 27611, 29467, 30450, 32178, 32791, 33945, 20786, 26408, 40665, 30446, 26466, 21247, 39173, 23588, 25147, 31870, 36016, 21839, 24758, 32011, 38272, 21249, 20063, 20918, 22812, 29242, 32822, 37326, 24357, 30690, 21380, 24441, 32004, 34220, 35379, 36493, 38742, 26611, 34222, 37971, 24841, 24840, 27833, 30290], "38720": [35565, 36664, 21807, 20305, 20778, 21191, 21451, 23461, 24189, 24736, 24962, 25558, 26377, 26586, 28263, 28044, 29494, 29495, 30001, 31056, 35029, 35480, 36938, 37009, 37109, 38596, 34701, 22805, 20104, 20313, 19982, 35465, 36671, 38928, 20653, 24188, 22934, 23481, 24248, 25562, 25594, 25793, 26332, 26954, 27096, 27915, 28342, 29076, 29992, 31407, 32650, 32768, 33865, 33993, 35201, 35617, 36362, 36965, 38525, 39178, 24958, 25233, 27442], "38784": [27779, 28020, 32716, 32764, 28096, 32645, 34746, 35064, 26469, 33713, 38972, 38647, 27931, 32097, 33853, 37226, 20081, 21365, 23888, 27396, 28651, 34253, 34349, 35239, 21033, 21519, 23653, 26446, 26792, 29702, 29827, 30178, 35023, 35041, 37324, 38626, 38520, 24459, 29575, 31435, 33870, 25504, 30053, 21129, 27969, 28316, 29705, 30041, 30827, 31890, 38534, 31452, 40845, 20406, 24942, 26053, 34396, 20102, 20142, 20698, 20001, 20940, 23534, 26009, 26753, 28092, 29471, 30274, 30637, 31260, 31975, 33391, 35538, 36988, 37327, 38517, 38936, 21147, 32209, 20523, 21400, 26519, 28107, 29136, 29747, 33256, 36650, 38563, 40023, 40607, 29792, 22593, 28057, 32047, 39006, 20196, 20278, 20363, 20919, 21169, 23994, 24604, 29618, 31036, 33491, 37428, 38583, 38646, 38666, 40599, 40802, 26278, 27508, 21015, 21155, 28872, 35010, 24265, 24651, 24976, 28451, 29001, 31806, 32244, 32879], "38976": [34030, 36899, 37676, 21570, 39791, 27347, 28809, 36034, 36335, 38706, 21172, 23105, 24266, 24324, 26391, 27004, 27028, 28010, 28431, 29282, 29436, 31725, 32769, 32894, 34635, 37070, 20845, 40595, 31108, 32907, 37682, 35542, 20525, 21644, 35441, 27498, 36036, 33031, 24785, 26528, 40434, 20121, 20120, 39952, 35435, 34241, 34152, 26880, 28286, 30871, 33109], "39071": [24332, 19984, 19989, 20010, 20017, 20022, 20028, 20031, 20034, 20054, 20056, 20098, 20101, 35947, 20106, 33298, 24333, 20110, 20126, 20127, 20128, 20130, 20144, 20147, 20150, 20174, 20173, 20164, 20166, 20162, 20183, 20190, 20205, 20191, 20215, 20233, 20314, 20272, 20315, 20317, 20311, 20295, 20342, 20360, 20367, 20376, 20347, 20329, 20336, 20369, 20335, 20358, 20374, 20760, 20436, 20447, 20430, 20440, 20443, 20433, 20442, 20432, 20452, 20453, 20506, 20520, 20500, 20522, 20517, 20485, 20252, 20470, 20513, 20521, 20524, 20478, 20463, 20497, 20486, 20547, 20551, 26371, 20565, 20560, 20552, 20570, 20566, 20588, 20600, 20608, 20634, 20613, 20660, 20658], "39232": [20681, 20682, 20659, 20674, 20694, 20702, 20709, 20717, 20707, 20718, 20729, 20725, 20745, 20737, 20738, 20758, 20757, 20756, 20762, 20769, 20794, 20791, 20796, 20795, 20799, 20800, 20818, 20812, 20820, 20834, 31480, 20841, 20842, 20846, 20864, 20866, 22232, 20876, 20873, 20879, 20881, 20883, 20885, 20886, 20900, 20902, 20898, 20905, 20906, 20907, 20915, 20913, 20914, 20912, 20917, 20925, 20933, 20937, 20955, 20960, 34389, 20969, 20973], "39296": [20976, 20981, 20990, 20996, 21003, 21012, 21006, 21031, 21034, 21038, 21043, 21049, 21071, 21060, 21067, 21068, 21086, 21076, 21098, 21108, 21097, 21107, 21119, 21117, 21133, 21140, 21138, 21105, 21128, 21137, 36776, 36775, 21164, 21165, 21180, 21173, 21185, 21197, 21207, 21214, 21219, 21222, 39149, 21216, 21235, 21237, 21240, 21241, 21254, 21256, 30008, 21261, 21264, 21263, 21269, 21274, 21283, 21295, 21297, 21299, 21304, 21312, 21318, 21317, 19991, 21321, 21325, 20950, 21342, 21353, 21358, 22808, 21371, 21367, 21378, 21398, 21408, 21414, 21413, 21422, 21424, 21430, 21443, 31762, 38617, 21471, 26364, 29166, 21486, 21480, 21485, 21498, 21505, 21565, 21568, 21548, 21549, 21564, 21550, 21558, 21545, 21533, 21582, 21647, 21621, 21646, 21599, 21617, 21623, 21616, 21650, 21627, 21632, 21622, 21636, 21648, 21638, 21703, 21666, 21688, 21669, 21676, 21700, 21704, 21672], "39488": [21675, 21698, 21668, 21694, 21692, 21720, 21733, 21734, 21775, 21780, 21757, 21742, 21741, 21754, 21730, 21817, 21824, 21859, 21836, 21806, 21852, 21829, 21846, 21847, 21816, 21811, 21853, 21913, 21888, 21679, 21898, 21919, 21883, 21886, 21912, 21918, 21934, 21884, 21891, 21929, 21895, 21928, 21978, 21957, 21983, 21956, 21980, 21988, 21972, 22036, 22007, 22038, 22014, 22013, 22043, 22009, 22094, 22096, 29151, 22068, 22070, 22066, 22072], "39552": [22123, 22116, 22063, 22124, 22122, 22150, 22144, 22154, 22176, 22164, 22159, 22181, 22190, 22198, 22196, 22210, 22204, 22209, 22211, 22208, 22216, 22222, 22225, 22227, 22231, 22254, 22265, 22272, 22271, 22276, 22281, 22280, 22283, 22285, 22291, 22296, 22294, 21959, 22300, 22310, 22327, 22328, 22350, 22331, 22336, 22351, 22377, 22464, 22408, 22369, 22399, 22409, 22419, 22432, 22451, 22436, 22442, 22448, 22467, 22470, 22484, 22482, 22483, 22538, 22486, 22499, 22539, 22553, 22557, 22642, 22561, 22626, 22603, 22640, 27584, 22610, 22589, 22649, 22661, 22713, 22687, 22699, 22714, 22750, 22715, 22712, 22702, 22725, 22739, 22737, 22743, 22745, 22744, 22757, 22748, 22756, 22751, 22767, 22778, 22777, 22779, 22780, 22781, 22786, 22794, 22800, 22811, 26790, 22821, 22828, 22829, 22834, 22840, 22846, 31442, 22869, 22864, 22862, 22874, 22872, 22882, 22880, 22887, 22892, 22889], "39744": [22904, 22913, 22941, 20318, 20395, 22947, 22962, 22982, 23016, 23004, 22925, 23001, 23002, 23077, 23071, 23057, 23068, 23049, 23066, 23104, 23148, 23113, 23093, 23094, 23138, 23146, 23194, 23228, 23230, 23243, 23234, 23229, 23267, 23255, 23270, 23273, 23254, 23290, 23291, 23308, 23307, 23318, 23346, 23248, 23338, 23350, 23358, 23363, 23365, 23360, 23377, 23381, 23386, 23387, 23397, 23401, 23408, 23411, 23413, 23416, 25992, 23418, 23424], "39808": [23427, 23462, 23480, 23491, 23495, 23497, 23508, 23504, 23524, 23526, 23522, 23518, 23525, 23531, 23536, 23542, 23539, 23557, 23559, 23560, 23565, 23571, 23584, 23586, 23592, 23608, 23609, 23617, 23622, 23630, 23635, 23632, 23631, 23409, 23660, 23662, 20066, 23670, 23673, 23692, 23697, 23700, 22939, 23723, 23739, 23734, 23740, 23735, 23749, 23742, 23751, 23769, 23785, 23805, 23802, 23789, 23948, 23786, 23819, 23829, 23831, 23900, 23839, 23835, 23825, 23828, 23842, 23834, 23833, 23832, 23884, 23890, 23886, 23883, 23916, 23923, 23926, 23943, 23940, 23938, 23970, 23965, 23980, 23982, 23997, 23952, 23991, 23996, 24009, 24013, 24019, 24018, 24022, 24027, 24043, 24050, 24053, 24075, 24090, 24089, 24081, 24091, 24118, 24119, 24132, 24131, 24128, 24142, 24151, 24148, 24159, 24162, 24164, 24135, 24181, 24182, 24186, 40636, 24191, 24224, 24257, 24258, 24264, 24272, 24271], "40000": [24278, 24291, 24285, 24282, 24283, 24290, 24289, 24296, 24297, 24300, 24305, 24307, 24304, 24308, 24312, 24318, 24323, 24329, 24413, 24412, 24331, 24337, 24342, 24361, 24365, 24376, 24385, 24392, 24396, 24398, 24367, 24401, 24406, 24407, 24409, 24417, 24429, 24435, 24439, 24451, 24450, 24447, 24458, 24456, 24465, 24455, 24478, 24473, 24472, 24480, 24488, 24493, 24508, 24534, 24571, 24548, 24568, 24561, 24541, 24755, 24575, 24609, 24672], "40064": [24601, 24592, 24617, 24590, 24625, 24603, 24597, 24619, 24614, 24591, 24634, 24666, 24641, 24682, 24695, 24671, 24650, 24646, 24653, 24675, 24643, 24676, 24642, 24684, 24683, 24665, 24705, 24717, 24807, 24707, 24730, 24708, 24731, 24726, 24727, 24722, 24743, 24715, 24801, 24760, 24800, 24787, 24756, 24560, 24765, 24774, 24757, 24792, 24909, 24853, 24838, 24822, 24823, 24832, 24820, 24826, 24835, 24865, 24827, 24817, 24845, 24846, 24903, 24894, 24872, 24871, 24906, 24895, 24892, 24876, 24884, 24893, 24898, 24900, 24947, 24951, 24920, 24921, 24922, 24939, 24948, 24943, 24933, 24945, 24927, 24925, 24915, 24949, 24985, 24982, 24967, 25004, 24980, 24986, 24970, 24977, 25003, 25006, 25036, 25034, 25033, 25079, 25032, 25027, 25030, 25018, 25035, 32633, 25037, 25062, 25059, 25078, 25082, 25076, 25087, 25085, 25084, 25086, 25088, 25096, 25097, 25101, 25100, 25108, 25115], "40256": [25118, 25121, 25130, 25134, 25136, 25138, 25139, 25153, 25166, 25182, 25187, 25179, 25184, 25192, 25212, 25218, 25225, 25214, 25234, 25235, 25238, 25300, 25219, 25236, 25303, 25297, 25275, 25295, 25343, 25286, 25812, 25288, 25308, 25292, 25290, 25282, 25287, 25243, 25289, 25356, 25326, 25329, 25383, 25346, 25352, 25327, 25333, 25424, 25406, 25421, 25628, 25423, 25494, 25486, 25472, 25515, 25462, 25507, 25487, 25481, 25503, 25525, 25451], "40320": [25449, 25534, 25577, 25536, 25542, 25571, 25545, 25554, 25590, 25540, 25622, 25652, 25606, 25619, 25638, 25654, 25885, 25623, 25640, 25615, 25703, 25711, 25718, 25678, 25898, 25749, 25747, 25765, 25769, 25736, 25788, 25818, 25810, 25797, 25799, 25787, 25816, 25794, 25841, 25831, 33289, 25824, 25825, 25260, 25827, 25839, 25900, 25846, 25844, 25842, 25850, 25856, 25853, 25880, 25884, 25861, 25892, 25891, 25899, 25908, 25909, 25911, 25910, 25912, 30027, 25928, 25942, 25941, 25933, 25944, 25950, 25949, 25970, 25976, 25986, 25987, 35722, 26011, 26015, 26027, 26039, 26051, 26054, 26049, 26052, 26060, 26066, 26075, 26073, 26080, 26081, 26097, 26482, 26122, 26115, 26107, 26483, 26165, 26166, 26164, 26140, 26191, 26180, 26185, 26177, 26206, 26205, 26212, 26215, 26216, 26207, 26210, 26224, 26243, 26248, 26254, 26249, 26244, 26264, 26269, 26305, 26297, 26313, 26302, 26300], "40512": [26308, 26296, 26326, 26330, 26336, 26175, 26342, 26345, 26352, 26357, 26359, 26383, 26390, 26398, 26406, 26407, 38712, 26414, 26431, 26422, 26433, 26424, 26423, 26438, 26462, 26464, 26457, 26467, 26468, 26505, 26480, 26537, 26492, 26474, 26508, 26507, 26534, 26529, 26501, 26551, 26607, 26548, 26604, 26547, 26601, 26552, 26596, 26590, 26589, 26594, 26606, 26553, 26574, 26566, 26599, 27292, 26654, 26694, 26665, 26688, 26701, 26674, 26702], "40576": [26803, 26667, 26713, 26723, 26743, 26751, 26783, 26767, 26797, 26772, 26781, 26779, 26755, 27310, 26809, 26740, 26805, 26784, 26810, 26895, 26765, 26750, 26881, 26826, 26888, 26840, 26914, 26918, 26849, 26892, 26829, 26836, 26855, 26837, 26934, 26898, 26884, 26839, 26851, 26917, 26873, 26848, 26863, 26920, 26922, 26906, 26915, 26913, 26822, 27001, 26999, 26972, 27000, 26987, 26964, 27006, 26990, 26937, 26996, 26941, 26969, 26928, 26977, 26974, 26973, 27009, 26986, 27058, 27054, 27088, 27071, 27073, 27091, 27070, 27086, 23528, 27082, 27101, 27067, 27075, 27047, 27182, 27025, 27040, 27036, 27029, 27060, 27102, 27112, 27138, 27163, 27135, 27402, 27129, 27122, 27111, 27141, 27057, 27166, 27117, 27156, 27115, 27146, 27154, 27329, 27171, 27155, 27204, 27148, 27250, 27190, 27256, 27207, 27234, 27225, 27238, 27208, 27192, 27170, 27280, 27277, 27296, 27268, 27298, 27299], "40768": [27287, 34327, 27323, 27331, 27330, 27320, 27315, 27308, 27358, 27345, 27359, 27306, 27354, 27370, 27387, 27397, 34326, 27386, 27410, 27414, 39729, 27423, 27448, 27447, 30428, 27449, 39150, 27463, 27459, 27465, 27472, 27481, 27476, 27483, 27487, 27489, 27512, 27513, 27519, 27520, 27524, 27523, 27533, 27544, 27541, 27550, 27556, 27562, 27563, 27567, 27570, 27569, 27571, 27575, 27580, 27590, 27595, 27603, 27615, 27628, 27627, 27635, 27631], "40832": [40638, 27656, 27667, 27668, 27675, 27684, 27683, 27742, 27733, 27746, 27754, 27778, 27789, 27802, 27777, 27803, 27774, 27752, 27763, 27794, 27792, 27844, 27889, 27859, 27837, 27863, 27845, 27869, 27822, 27825, 27838, 27834, 27867, 27887, 27865, 27882, 27935, 34893, 27958, 27947, 27965, 27960, 27929, 27957, 27955, 27922, 27916, 28003, 28051, 28004, 27994, 28025, 27993, 28046, 28053, 28644, 28037, 28153, 28181, 28170, 28085, 28103, 28134, 28088, 28102, 28140, 28126, 28108, 28136, 28114, 28101, 28154, 28121, 28132, 28117, 28138, 28142, 28205, 28270, 28206, 28185, 28274, 28255, 28222, 28195, 28267, 28203, 28278, 28237, 28191, 28227, 28218, 28238, 28196, 28415, 28189, 28216, 28290, 28330, 28312, 28361, 28343, 28371, 28349, 28335, 28356, 28338, 28372, 28373, 28303, 28325, 28354, 28319, 28481, 28433, 28748, 28396, 28408, 28414, 28479, 28402, 28465, 28399, 28466, 28364], "57408": [28478, 28435, 28407, 28550, 28538, 28536, 28545, 28544, 28527, 28507, 28659, 28525, 28546, 28540, 28504, 28558, 28561, 28610, 28518, 28595, 28579, 28577, 28580, 28601, 28614, 28586, 28639, 28629, 28652, 28628, 28632, 28657, 28654, 28635, 28681, 28683, 28666, 28689, 28673, 28687, 28670, 28699, 28698, 28532, 28701, 28696, 28703, 28720, 28734, 28722, 28753, 28771, 28825, 28818, 28847, 28913, 28844, 28856, 28851, 28846, 28895, 28875, 28893], "57472": [28889, 28937, 28925, 28956, 28953, 29029, 29013, 29064, 29030, 29026, 29004, 29014, 29036, 29071, 29179, 29060, 29077, 29096, 29100, 29143, 29113, 29118, 29138, 29129, 29140, 29134, 29152, 29164, 29159, 29173, 29180, 29177, 29183, 29197, 29200, 29211, 29224, 29229, 29228, 29232, 29234, 29243, 29244, 29247, 29248, 29254, 29259, 29272, 29300, 29310, 29314, 29313, 29319, 29330, 29334, 29346, 29351, 29369, 29362, 29379, 29382, 29380, 29390, 29394, 29410, 29408, 29409, 29433, 29431, 20495, 29463, 29450, 29468, 29462, 29469, 29492, 29487, 29481, 29477, 29502, 29518, 29519, 40664, 29527, 29546, 29544, 29552, 29560, 29557, 29563, 29562, 29640, 29619, 29646, 29627, 29632, 29669, 29678, 29662, 29858, 29701, 29807, 29733, 29688, 29746, 29754, 29781, 29759, 29791, 29785, 29761, 29788, 29801, 29808, 29795, 29802, 29814, 29822, 29835, 29854, 29863, 29898, 29903, 29908, 29681], "57664": [29920, 29923, 29927, 29929, 29934, 29938, 29936, 29937, 29944, 29943, 29956, 29955, 29957, 29964, 29966, 29965, 29973, 29971, 29982, 29990, 29996, 30012, 30020, 30029, 30026, 30025, 30043, 30022, 30042, 30057, 30052, 30055, 30059, 30061, 30072, 30070, 30086, 30087, 30068, 30090, 30089, 30082, 30100, 30106, 30109, 30117, 30115, 30146, 30131, 30147, 30133, 30141, 30136, 30140, 30129, 30157, 30154, 30162, 30169, 30179, 30174, 30206, 30207], "57728": [30204, 30209, 30192, 30202, 30194, 30195, 30219, 30221, 30217, 30239, 30247, 30240, 30241, 30242, 30244, 30260, 30256, 30267, 30279, 30280, 30278, 30300, 30296, 30305, 30306, 30312, 30313, 30314, 30311, 30316, 30320, 30322, 30326, 30328, 30332, 30336, 30339, 30344, 30347, 30350, 30358, 30355, 30361, 30362, 30384, 30388, 30392, 30393, 30394, 30402, 30413, 30422, 30418, 30430, 30433, 30437, 30439, 30442, 34351, 30459, 30472, 30471, 30468, 30505, 30500, 30494, 30501, 30502, 30491, 30519, 30520, 30535, 30554, 30568, 30571, 30555, 30565, 30591, 30590, 30585, 30606, 30603, 30609, 30624, 30622, 30640, 30646, 30649, 30655, 30652, 30653, 30651, 30663, 30669, 30679, 30682, 30684, 30691, 30702, 30716, 30732, 30738, 31014, 30752, 31018, 30789, 30862, 30836, 30854, 30844, 30874, 30860, 30883, 30901, 30890, 30895, 30929, 30918, 30923, 30932, 30910, 30908, 30917, 30922, 30956], "57920": [30951, 30938, 30973, 30964, 30983, 30994, 30993, 31001, 31020, 31019, 31040, 31072, 31063, 31071, 31066, 31061, 31059, 31098, 31103, 31114, 31133, 31143, 40779, 31146, 31150, 31155, 31161, 31162, 31177, 31189, 31207, 31212, 31201, 31203, 31240, 31245, 31256, 31257, 31264, 31263, 31104, 31281, 31291, 31294, 31287, 31299, 31319, 31305, 31329, 31330, 31337, 40861, 31344, 31353, 31357, 31368, 31383, 31381, 31384, 31382, 31401, 31432, 31408], "57984": [31414, 31429, 31428, 31423, 36995, 31431, 31434, 31437, 31439, 31445, 31443, 31449, 31450, 31453, 31457, 31458, 31462, 31469, 31472, 31490, 31503, 31498, 31494, 31539, 31512, 31513, 31518, 31541, 31528, 31542, 31568, 31610, 31492, 31565, 31499, 31564, 31557, 31605, 31589, 31604, 31591, 31600, 31601, 31596, 31598, 31645, 31640, 31647, 31629, 31644, 31642, 31627, 31634, 31631, 31581, 31641, 31691, 31681, 31692, 31695, 31668, 31686, 31709, 31721, 31761, 31764, 31718, 31717, 31840, 31744, 31751, 31763, 31731, 31735, 31767, 31757, 31734, 31779, 31783, 31786, 31775, 31799, 31787, 31805, 31820, 31811, 31828, 31823, 31808, 31824, 31832, 31839, 31844, 31830, 31845, 31852, 31861, 31875, 31888, 31908, 31917, 31906, 31915, 31905, 31912, 31923, 31922, 31921, 31918, 31929, 31933, 31936, 31941, 31938, 31960, 31954, 31964, 31970, 39739, 31983, 31986, 31988, 31990, 31994, 32006], "58176": [32002, 32028, 32021, 32010, 32069, 32075, 32046, 32050, 32063, 32053, 32070, 32115, 32086, 32078, 32114, 32104, 32110, 32079, 32099, 32147, 32137, 32091, 32143, 32125, 32155, 32186, 32174, 32163, 32181, 32199, 32189, 32171, 32317, 32162, 32175, 32220, 32184, 32159, 32176, 32216, 32221, 32228, 32222, 32251, 32242, 32225, 32261, 32266, 32291, 32289, 32274, 32305, 32287, 32265, 32267, 32290, 32326, 32358, 32315, 32309, 32313, 32323, 32311], "58240": [32306, 32314, 32359, 32349, 32342, 32350, 32345, 32346, 32377, 32362, 32361, 32380, 32379, 32387, 32213, 32381, 36782, 32383, 32392, 32393, 32396, 32402, 32400, 32403, 32404, 32406, 32398, 32411, 32412, 32568, 32570, 32581, 32588, 32589, 32590, 32592, 32593, 32597, 32596, 32600, 32607, 32608, 32616, 32617, 32615, 32632, 32642, 32646, 32643, 32648, 32647, 32652, 32660, 32670, 32669, 32666, 32675, 32687, 32690, 32697, 32686, 32694, 32696, 35697, 32709, 32710, 32714, 32725, 32724, 32737, 32742, 32745, 32755, 32761, 39132, 32774, 32772, 32779, 32786, 32792, 32793, 32796, 32801, 32808, 32831, 32827, 32842, 32838, 32850, 32856, 32858, 32863, 32866, 32872, 32883, 32882, 32880, 32886, 32889, 32893, 32895, 32900, 32902, 32901, 32923, 32915, 32922, 32941, 20880, 32940, 32987, 32997, 32985, 32989, 32964, 32986, 32982, 33033, 33007, 33009, 33051, 33065, 33059, 33071, 33099], "58432": [38539, 33094, 33086, 33107, 33105, 33020, 33137, 33134, 33125, 33126, 33140, 33155, 33160, 33162, 33152, 33154, 33184, 33173, 33188, 33187, 33119, 33171, 33193, 33200, 33205, 33214, 33208, 33213, 33216, 33218, 33210, 33225, 33229, 33233, 33241, 33240, 33224, 33242, 33247, 33248, 33255, 33274, 33275, 33278, 33281, 33282, 33285, 33287, 33290, 33293, 33296, 33302, 33321, 33323, 33336, 33331, 33344, 33369, 33368, 33373, 33370, 33375, 33380], "58496": [33378, 33384, 33386, 33387, 33326, 33393, 33399, 33400, 33406, 33421, 33426, 33451, 33439, 33467, 33452, 33505, 33507, 33503, 33490, 33524, 33523, 33530, 33683, 33539, 33531, 33529, 33502, 33542, 33500, 33545, 33497, 33589, 33588, 33558, 33586, 33585, 33600, 33593, 33616, 33605, 33583, 33579, 33559, 33560, 33669, 33690, 33706, 33695, 33698, 33686, 33571, 33678, 33671, 33674, 33660, 33717, 33651, 33653, 33696, 33673, 33704, 33780, 33811, 33771, 33742, 33789, 33795, 33752, 33803, 33729, 33783, 33799, 33760, 33778, 33805, 33826, 33824, 33725, 33848, 34054, 33787, 33901, 33834, 33852, 34138, 33924, 33911, 33899, 33965, 33902, 33922, 33897, 33862, 33836, 33903, 33913, 33845, 33994, 33890, 33977, 33983, 33951, 34009, 33997, 33979, 34010, 34000, 33985, 33990, 34006, 33953, 34081, 34047, 34036, 34071, 34072, 34092, 34079, 34069, 34068, 34044, 34112, 34147, 34136, 34120], "58688": [34113, 34306, 34123, 34133, 34176, 34212, 34184, 34193, 34186, 34216, 34157, 34196, 34203, 34282, 34183, 34204, 34167, 34174, 34192, 34249, 34234, 34255, 34233, 34256, 34261, 34269, 34277, 34268, 34297, 34314, 34323, 34315, 34302, 34298, 34310, 34338, 34330, 34352, 34367, 34381, 20053, 34388, 34399, 34407, 34417, 34451, 34467, 34473, 34474, 34443, 34444, 34486, 34479, 34500, 34502, 34480, 34505, 34851, 34475, 34516, 34526, 34537, 34540], "58752": [34527, 34523, 34543, 34578, 34566, 34568, 34560, 34563, 34555, 34577, 34569, 34573, 34553, 34570, 34612, 34623, 34615, 34619, 34597, 34601, 34586, 34656, 34655, 34680, 34636, 34638, 34676, 34647, 34664, 34670, 34649, 34643, 34659, 34666, 34821, 34722, 34719, 34690, 34735, 34763, 34749, 34752, 34768, 38614, 34731, 34756, 34739, 34759, 34758, 34747, 34799, 34802, 34784, 34831, 34829, 34814, 34806, 34807, 34830, 34770, 34833, 34838, 34837, 34850, 34849, 34865, 34870, 34873, 34855, 34875, 34884, 34882, 34898, 34905, 34910, 34914, 34923, 34945, 34942, 34974, 34933, 34941, 34997, 34930, 34946, 34967, 34962, 34990, 34969, 34978, 34957, 34980, 34992, 35007, 34993, 35011, 35012, 35028, 35032, 35033, 35037, 35065, 35074, 35068, 35060, 35048, 35058, 35076, 35084, 35082, 35091, 35139, 35102, 35109, 35114, 35115, 35137, 35140, 35131, 35126, 35128, 35148, 35101, 35168, 35166], "58944": [35174, 35172, 35181, 35178, 35183, 35188, 35191, 35198, 35203, 35208, 35210, 35219, 35224, 35233, 35241, 35238, 35244, 35247, 35250, 35258, 35261, 35263, 35264, 35290, 35292, 35293, 35303, 35316, 35320, 35331, 35350, 35344, 35340, 35355, 35357, 35365, 35382, 35393, 35419, 35410, 35398, 35400, 35452, 35437, 35436, 35426, 35461, 35458, 35460, 35496, 35489, 35473, 35493, 35494, 35482, 35491, 35524, 35533, 35522, 35546, 35563, 35571, 35559], "59008": [35556, 35569, 35604, 35552, 35554, 35575, 35550, 35547, 35596, 35591, 35610, 35553, 35606, 35600, 35607, 35616, 35635, 38827, 35622, 35627, 35646, 35624, 35649, 35660, 35663, 35662, 35657, 35670, 35675, 35674, 35691, 35679, 35692, 35695, 35700, 35709, 35712, 35724, 35726, 35730, 35731, 35734, 35737, 35738, 35898, 35905, 35903, 35912, 35916, 35918, 35920, 35925, 35938, 35948, 35960, 35962, 35970, 35977, 35973, 35978, 35981, 35982, 35988, 35964, 35992, 25117, 36013, 36010, 36029, 36018, 36019, 36014, 36022, 36040, 36033, 36068, 36067, 36058, 36093, 36090, 36091, 36100, 36101, 36106, 36103, 36111, 36109, 36112, 40782, 36115, 36045, 36116, 36118, 36199, 36205, 36209, 36211, 36225, 36249, 36290, 36286, 36282, 36303, 36314, 36310, 36300, 36315, 36299, 36330, 36331, 36319, 36323, 36348, 36360, 36361, 36351, 36381, 36382, 36368, 36383, 36418, 36405, 36400, 36404, 36426], "59200": [36423, 36425, 36428, 36432, 36424, 36441, 36452, 36448, 36394, 36451, 36437, 36470, 36466, 36476, 36481, 36487, 36485, 36484, 36491, 36490, 36499, 36497, 36500, 36505, 36522, 36513, 36524, 36528, 36550, 36529, 36542, 36549, 36552, 36555, 36571, 36579, 36604, 36603, 36587, 36606, 36618, 36613, 36629, 36626, 36633, 36627, 36636, 36639, 36635, 36620, 36646, 36659, 36667, 36665, 36677, 36674, 36670, 36684, 36681, 36678, 36686, 36695, 36700], "59264": [36706, 36707, 36708, 36764, 36767, 36771, 36781, 36783, 36791, 36826, 36837, 36834, 36842, 36847, 36999, 36852, 36869, 36857, 36858, 36881, 36885, 36897, 36877, 36894, 36886, 36875, 36903, 36918, 36917, 36921, 36856, 36943, 36944, 36945, 36946, 36878, 36937, 36926, 36950, 36952, 36958, 36968, 36975, 36982, 38568, 36978, 36994, 36989, 36993, 36992, 37002, 37001, 37007, 37032, 37039, 37041, 37045, 37090, 37092, 25160, 37083, 37122, 37138, 37145, 37170, 37168, 37194, 37206, 37208, 37219, 37221, 37225, 37235, 37234, 37259, 37257, 37250, 37282, 37291, 37295, 37290, 37301, 37300, 37306, 37312, 37313, 37321, 37323, 37328, 37334, 37343, 37345, 37339, 37372, 37365, 37366, 37406, 37375, 37396, 37420, 37397, 37393, 37470, 37463, 37445, 37449, 37476, 37448, 37525, 37439, 37451, 37456, 37532, 37526, 37523, 37531, 37466, 37583, 37561, 37559, 37609, 37647, 37626, 37700, 37678], "59456": [37657, 37666, 37658, 37667, 37690, 37685, 37691, 37724, 37728, 37756, 37742, 37718, 37808, 37804, 37805, 37780, 37817, 37846, 37847, 37864, 37861, 37848, 37827, 37853, 37840, 37832, 37860, 37914, 37908, 37907, 37891, 37895, 37904, 37942, 37931, 37941, 37921, 37946, 37953, 37970, 37956, 37979, 37984, 37986, 37982, 37994, 37417, 38000, 38005, 38007, 38013, 37978, 38012, 38014, 38017, 38015, 38274, 38279, 38282, 38292, 38294, 38296, 38297], "59520": [38304, 38312, 38311, 38317, 38332, 38331, 38329, 38334, 38346, 28662, 38339, 38349, 38348, 38357, 38356, 38358, 38364, 38369, 38373, 38370, 38433, 38440, 38446, 38447, 38466, 38476, 38479, 38475, 38519, 38492, 38494, 38493, 38495, 38502, 38514, 38508, 38541, 38552, 38549, 38551, 38570, 38567, 38577, 38578, 38576, 38580, 38582, 38584, 38585, 38606, 38603, 38601, 38605, 35149, 38620, 38669, 38613, 38649, 38660, 38662, 38664, 38675, 38670, 38673, 38671, 38678, 38681, 38692, 38698, 38704, 38713, 38717, 38718, 38724, 38726, 38728, 38722, 38729, 38748, 38752, 38756, 38758, 38760, 21202, 38763, 38769, 38777, 38789, 38780, 38785, 38778, 38790, 38795, 38799, 38800, 38812, 38824, 38822, 38819, 38835, 38836, 38851, 38854, 38856, 38859, 38876, 38893, 40783, 38898, 31455, 38902, 38901, 38927, 38924, 38968, 38948, 38945, 38967, 38973, 38982, 38991, 38987, 39019, 39023, 39024], "59712": [39025, 39028, 39027, 39082, 39087, 39089, 39094, 39108, 39107, 39110, 39145, 39147, 39171, 39177, 39186, 39188, 39192, 39201, 39197, 39198, 39204, 39200, 39212, 39214, 39229, 39230, 39234, 39241, 39237, 39248, 39243, 39249, 39250, 39244, 39253, 39319, 39320, 39333, 39341, 39342, 39356, 39391, 39387, 39389, 39384, 39377, 39405, 39406, 39409, 39410, 39419, 39416, 39425, 39439, 39429, 39394, 39449, 39467, 39479, 39493, 39490, 39488, 39491], "59776": [39486, 39509, 39501, 39515, 39511, 39519, 39522, 39525, 39524, 39529, 39531, 39530, 39597, 39600, 39612, 39616, 39631, 39633, 39635, 39636, 39646, 39647, 39650, 39651, 39654, 39663, 39659, 39662, 39668, 39665, 39671, 39675, 39686, 39704, 39706, 39711, 39714, 39715, 39717, 39719, 39720, 39721, 39722, 39726, 39727, 39730, 39748, 39747, 39759, 39757, 39758, 39761, 39768, 39796, 39827, 39811, 39825, 39830, 39831, 39839, 39840, 39848, 39860, 39872, 39882, 39865, 39878, 39887, 39889, 39890, 39907, 39906, 39908, 39892, 39905, 39994, 39922, 39921, 39920, 39957, 39956, 39945, 39955, 39948, 39942, 39944, 39954, 39946, 39940, 39982, 39963, 39973, 39972, 39969, 39984, 40007, 39986, 40006, 39998, 40026, 40032, 40039, 40054, 40056, 40167, 40172, 40176, 40201, 40200, 40171, 40195, 40198, 40234, 40230, 40367, 40227, 40223, 40260, 40213, 40210, 40257, 40255, 40254, 40262, 40264], "59968": [40285, 40286, 40292, 40273, 40272, 40281, 40306, 40329, 40327, 40363, 40303, 40314, 40346, 40356, 40361, 40370, 40388, 40385, 40379, 40376, 40378, 40390, 40399, 40386, 40409, 40403, 40440, 40422, 40429, 40431, 40445, 40474, 40475, 40478, 40565, 40569, 40573, 40577, 40584, 40587, 40588, 40594, 40597, 40593, 40605, 40613, 40617, 40632, 40618, 40621, 38753, 40652, 40654, 40655, 40656, 40660, 40668, 40670, 40669, 40672, 40677, 40680, 40687], "60032": [40692, 40694, 40695, 40697, 40699, 40700, 40701, 40711, 40712, 30391, 40725, 40737, 40748, 40766, 40778, 40786, 40788, 40803, 40799, 40800, 40801, 40806, 40807, 40812, 40810, 40823, 40818, 40822, 40853, 40860, 40864, 22575, 27079, 36953, 29796, 20956, 29081], "60736": [32394, 35100, 37704, 37512, 34012, 20425, 28859, 26161, 26824, 37625, 26363, 24389, 20008, 20193, 20220, 20224, 20227, 20281, 20310, 20370, 20362, 20378, 20372, 20429, 20544, 20514, 20479, 20510, 20550, 20592, 20546, 20628, 20724, 20696, 20810, 20836, 20893, 20926, 20972, 21013, 21148, 21158, 21184, 21211, 21248, 21255, 21284, 21362, 21395, 21426, 21469, 64014, 21660, 21642, 21673, 21759, 21894, 22361, 22373, 22444, 22472, 22471, 64015], "60800": [64016, 22686, 22706, 22795, 22867, 22875, 22877, 22883, 22948, 22970, 23382, 23488, 29999, 23512, 23532, 23582, 23718, 23738, 23797, 23847, 23891, 64017, 23874, 23917, 23992, 23993, 24016, 24353, 24372, 24423, 24503, 24542, 24669, 24709, 24714, 24798, 24789, 24864, 24818, 24849, 24887, 24880, 24984, 25107, 25254, 25589, 25696, 25757, 25806, 25934, 26112, 26133, 26171, 26121, 26158, 26142, 26148, 26213, 26199, 26201, 64018, 26227, 26265, 26272, 26290, 26303, 26362, 26382, 63785, 26470, 26555, 26706, 26560, 26625, 26692, 26831, 64019, 26984, 64020, 27032, 27106, 27184, 27243, 27206, 27251, 27262, 27362, 27364, 27606, 27711, 27740, 27782, 27759, 27866, 27908, 28039, 28015, 28054, 28076, 28111, 28152, 28146, 28156, 28217, 28252, 28199, 28220, 28351, 28552, 28597, 28661, 28677, 28679, 28712, 28805, 28843, 28943, 28932, 29020, 28998, 28999, 64021, 29121, 29182, 29361], "60992": [29374, 29476, 64022, 29559, 29629, 29641, 29654, 29667, 29650, 29703, 29685, 29734, 29738, 29737, 29742, 29794, 29833, 29855, 29953, 30063, 30338, 30364, 30366, 30363, 30374, 64023, 30534, 21167, 30753, 30798, 30820, 30842, 31024, 64024, 64025, 64026, 31124, 64027, 31131, 31441, 31463, 64028, 31467, 31646, 64029, 32072, 32092, 32183, 32160, 32214, 32338, 32583, 32673, 64030, 33537, 33634, 33663, 33735, 33782, 33864, 33972, 34131, 34137], "61056": [34155, 64031, 34224, 64032, 64033, 34823, 35061, 35346, 35383, 35449, 35495, 35518, 35551, 64034, 35574, 35667, 35711, 36080, 36084, 36114, 36214, 64035, 36559, 64036, 64037, 36967, 37086, 64038, 37141, 37159, 37338, 37335, 37342, 37357, 37358, 37348, 37349, 37382, 37392, 37386, 37434, 37440, 37436, 37454, 37465, 37457, 37433, 37479, 37543, 37495, 37496, 37607, 37591, 37593, 37584, 64039, 37589, 37600, 37587, 37669, 37665, 37627, 64040, 37662, 37631, 37661, 37634, 37744, 37719, 37796, 37830, 37854, 37880, 37937, 37957, 37960, 38290, 63964, 64041, 38557, 38575, 38707, 38715, 38723, 38733, 38735, 38737, 38741, 38999, 39013, 64042, 64043, 39207, 64044, 39326, 39502, 39641, 39644, 39797, 39794, 39823, 39857, 39867, 39936, 40304, 40299, 64045, 40473, 40657], "61167": [8560, 8561, 8562, 8563, 8564, 8565, 8566, 8567, 8568, 8569, 65506, 65508, 65287, 65282], "64064": [8560, 8561, 8562, 8563, 8564, 8565, 8566, 8567, 8568, 8569, 8544, 8545, 8546, 8547, 8548, 8549, 8550, 8551, 8552, 8553, 65506, 65508, 65287, 65282, 12849, 8470, 8481, 8757, 32394, 35100, 37704, 37512, 34012, 20425, 28859, 26161, 26824, 37625, 26363, 24389, 20008, 20193, 20220, 20224, 20227, 20281, 20310, 20370, 20362, 20378, 20372, 20429, 20544, 20514, 20479, 20510, 20550, 20592, 20546, 20628, 20724, 20696, 20810], "64128": [20836, 20893, 20926, 20972, 21013, 21148, 21158, 21184, 21211, 21248, 21255, 21284, 21362, 21395, 21426, 21469, 64014, 21660, 21642, 21673, 21759, 21894, 22361, 22373, 22444, 22472, 22471, 64015, 64016, 22686, 22706, 22795, 22867, 22875, 22877, 22883, 22948, 22970, 23382, 23488, 29999, 23512, 23532, 23582, 23718, 23738, 23797, 23847, 23891, 64017, 23874, 23917, 23992, 23993, 24016, 24353, 24372, 24423, 24503, 24542, 24669, 24709, 24714, 24798, 24789, 24864, 24818, 24849, 24887, 24880, 24984, 25107, 25254, 25589, 25696, 25757, 25806, 25934, 26112, 26133, 26171, 26121, 26158, 26142, 26148, 26213, 26199, 26201, 64018, 26227, 26265, 26272, 26290, 26303, 26362, 26382, 63785, 26470, 26555, 26706, 26560, 26625, 26692, 26831, 64019, 26984, 64020, 27032, 27106, 27184, 27243, 27206, 27251, 27262, 27362, 27364, 27606, 27711, 27740, 27782, 27759, 27866, 27908, 28039, 28015], "64320": [28054, 28076, 28111, 28152, 28146, 28156, 28217, 28252, 28199, 28220, 28351, 28552, 28597, 28661, 28677, 28679, 28712, 28805, 28843, 28943, 28932, 29020, 28998, 28999, 64021, 29121, 29182, 29361, 29374, 29476, 64022, 29559, 29629, 29641, 29654, 29667, 29650, 29703, 29685, 29734, 29738, 29737, 29742, 29794, 29833, 29855, 29953, 30063, 30338, 30364, 30366, 30363, 30374, 64023, 30534, 21167, 30753, 30798, 30820, 30842, 31024, 64024, 64025], "64384": [64026, 31124, 64027, 31131, 31441, 31463, 64028, 31467, 31646, 64029, 32072, 32092, 32183, 32160, 32214, 32338, 32583, 32673, 64030, 33537, 33634, 33663, 33735, 33782, 33864, 33972, 34131, 34137, 34155, 64031, 34224, 64032, 64033, 34823, 35061, 35346, 35383, 35449, 35495, 35518, 35551, 64034, 35574, 35667, 35711, 36080, 36084, 36114, 36214, 64035, 36559, 64036, 64037, 36967, 37086, 64038, 37141, 37159, 37338, 37335, 37342, 37357, 37358, 37348, 37349, 37382, 37392, 37386, 37434, 37440, 37436, 37454, 37465, 37457, 37433, 37479, 37543, 37495, 37496, 37607, 37591, 37593, 37584, 64039, 37589, 37600, 37587, 37669, 37665, 37627, 64040, 37662, 37631, 37661, 37634, 37744, 37719, 37796, 37830, 37854, 37880, 37937, 37957, 37960, 38290, 63964, 64041, 38557, 38575, 38707, 38715, 38723, 38733, 38735, 38737, 38741, 38999, 39013, 64042, 64043, 39207, 64044, 39326, 39502, 39641], "64576": [39644, 39797, 39794, 39823, 39857, 39867, 39936, 40304, 40299, 64045, 40473, 40657]} var decoding_table = [], encoding_table = [] -for(var i = 0, len = _table.length; i < len; i += 2){ -var value = _table[i + 1] -if(value !== null){ - encoding_table[value] = _table[i] +for(let cp in cps){ + cp = parseInt(cp) + for(let i = 0, len = cps[cp].length; i < len; i++){ + let key = cp + i, + value = cps[cp][i] + decoding_table[key] = value + encoding_table[value] = key + } } -decoding_table[_table[i]] = _table[i + 1] -} -__BRYTHON__.imported.encoding_cp932 = {encoding_table, decoding_table} +var module = {encoding_table, decoding_table} +__BRYTHON__.addToImported("encoding_cp932", module) diff --git a/www/src/libs/posix.js b/www/src/libs/posix.js index 657ce230b..7bd2eb36c 100644 --- a/www/src/libs/posix.js +++ b/www/src/libs/posix.js @@ -110,6 +110,28 @@ var module = { return stat_result.$factory(filename) }, open: function(path, flags){return _b_.open(path, flags)}, + remove: function(path) { + var $ = $B.args("remove", 1, { path: null }, ["path"], arguments, {}, null, null) + console.log($) + + var path = $.path + var found_file = false + + if ($B.file_cache && $B.file_cache.hasOwnProperty(path)){ + delete $B.file_cache[path] + found_file = true + } + if ($B.files && $B.files.hasOwnProperty(path)){ + delete $B.files[path] + found_file = true + } + + if(!found_file) { + throw _b_.FileNotFoundError.$factory(`No such file or directory: '${path}'`) + } + + return _b_.None + }, stat: function(filename){return stat_result.$factory(filename)}, stat_result: function(filename){return stat_result.$factory(filename)}, urandom: function(n){ @@ -135,7 +157,7 @@ var module = { "close", "closerange", "device_encoding", "dup", "dup2", "execv", "execve", "fsat", "fsync", "get_terminal_size", "getcwdb", "getlogin", "getppid", "isatty", "kill", "link", "listdir", "lseek", - "mkdir", "pipe", "putenv", "read", "readlink", "remove", "rename", + "mkdir", "pipe", "putenv", "read", "readlink", "rename", "replace", "rmdir", "spawnv", "spawnve", "startfile", "stat_float_times", "statvfs_result", "strerror", "symlink", "system", "terminal_size", "times", "times_result", "umask", "uname_result", "unlink", "utime", diff --git a/www/src/libs/pyexpat.js b/www/src/libs/pyexpat.js index 1d3bf4453..e18c0a1c2 100644 --- a/www/src/libs/pyexpat.js +++ b/www/src/libs/pyexpat.js @@ -6,6 +6,8 @@ const XML_PARAM_ENTITY_PARSING_NEVER = 0, XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE = 1, XML_PARAM_ENTITY_PARSING_ALWAYS = 2 +const FAIL = {} + const xml_entities = { '>': '>', '<': '<', @@ -101,6 +103,7 @@ function flush_final_char_data(parser){ for(var i = 0; i < buf.length; i++){ if(! buf[i].match(/\s/)){ var pos = parser._pos - buf.length + i - 1 + console.log('rest', buf) var msg = `junk after document element: line 1, column ${pos}` raise_error(parser, msg) } @@ -230,84 +233,16 @@ xmlparser.StartElementHandler = _b_.None xmlparser.xml_tokenizer = function*(self){ // convert bytes to string + self._element = new DOCUMENT(self) while(self._pos < self._buffer_length){ - var char = self._buffer[self._pos] - if(self._state == 'data' && char == '<'){ - self._maybe_entity = null - self._state = 'element' - self._tag_state = 'tag_name' - self._element = new ELEMENT(self) - self._pos++ - }else if(self._state == 'data'){ - if(char == '\n'){ - if(! self.buffer_text){ - flush_char_data(self) - self._data_buffer = char - flush_char_data(self) - }else{ - self._data_buffer += char - } - self._maybe_entity = null - }else{ - self._data_buffer += char - if(char == '&'){ - // maybe start entity - self._maybe_entity = char - }else if(self._maybe_entity !== null){ - self._maybe_entity += char - if(char == ';'){ - var entity_pos = self._pos - self._maybe_entity.length + 1 - var replacement = check_entity(self, entity_pos) - self._data_buffer = self._data_buffer.replace( - self._maybe_entity, replacement) - self._maybe_entity = null - } - } - } - self._pos++ - }else if(self._state == 'element' && - self._element.expect == 'name_start' - && char == '!'){ - self._element = new DTD(self) - self._pos++ - }else if(self._state == 'element'){ - self._element = self._element.feed(char) - if(self._element === undefined){ - console.log('undefined after char', char, - self._buffer.substring(self._pos - 10, self._pos + 10)) - } - if(self._element.closed){ - xmlparser._handle_stack(self) - if(self._element instanceof DOCTYPE){ - if(self._element.declarations){ - var parser = xmlparser.$factory() - xmlparser.Parse(parser, - self._element.declarations.trim(), - true) - console.log('parser', parser) - } - } - yield self._element - self._state = 'data' - // self._data_buffer = '' - }else if(self._element.is_comment){ - self._state = 'comment' - self._comment = new COMMENT(self) - } - self._pos++ - }else if(self._state == 'comment'){ - self._comment.feed(char) - if(self._comment.closed){ - yield self._comment - self._state = 'data' - self._data_buffer = '' - } - self._pos++ - }else{ - self._pos++ + self._element = self._element.feed(char) + if(self._element.closed){ + yield self._element } + self._pos++ } + console.log('fini') } $B.set_func_names(xmlparser, 'expat') @@ -319,6 +254,7 @@ function raise_error_known_position(parser, message, pos){ ix-- } message += '\n' + parser._buffer.substring(ix, pos + 1) + message += '\n' + ' '.repeat(pos - ix - 1) + '^' throw error.$factory(message) } @@ -326,6 +262,19 @@ function raise_error(parser, message){ throw error.$factory(message) } +function raise_error1(element, char){ + var head = element + while(head.origin){ + head = head.origin + } + console.log(head) + var cls = element.constructor.name, + message = cls + ' expected ' + element.expect + + ', got: ' + char + var pos = head.parser._pos + raise_error_known_position(head.parser, message, pos) +} + var error = $B.make_class("error", function(message){ return { @@ -342,99 +291,496 @@ error.__mro__ = [_b_.Exception, _b_.BaseException, _b_.object] $B.set_func_names(error, "expat") -function DOCTYPE(parser){ - this.parser = parser - this.expect = 'element_start' +function expect_chars(element, char, stop){ + var res + if(! element.hasOwnProperty('expected_chars')){ + element.expected_chars = '' + } + if(is_char(char)){ + element.expected_chars += char + if(stop){ + var end_pos = element.expected_chars.length - stop.length + var tail = element.expected_chars.substr(end_pos) + if(tail == stop){ + res = {value: element.expected_chars.substr(0, end_pos)} + delete element.expected_chars + return res + } + } + }else{ + res = {value: element.expected_chars} + if(element.expected_pos == literal.length){ + delete element.expected_pos + return {value: literal} + } + } + return {value: null} } -DOCTYPE.prototype.feed = function(char){ - if(this.expect == 'element_start'){ + +function expect_name(element, char){ + if(! element.hasOwnProperty('expected_name')){ if(is_id_start(char)){ - this.root_element = char - this.expect = 'element_continue' + element.expected_name = char }else if(! is_whitespace(char)){ - throw Error('expected element start, got: ' + char) + raise_error(element.parser, 'expected name start, got: ' + char) } - }else if(this.expect == 'element_continue'){ - if(is_id_continue(char)){ - this.root_element += char + }else if(is_id_continue(char)){ + element.expected_name += char + }else if(is_whitespace(char)){ + var res = {value: element.expected_name} + delete element.expected_name + return res + }else{ + raise_error(element.parser, 'name expected id, got: ' + char) + } + return {} +} + +function expect_literal(element, literal, char){ + if(! element.hasOwnProperty('expected_pos')){ + element.expected_pos = 0 + } + if(literal[element.expected_pos] == char){ + element.expected_pos++ + if(element.expected_pos == literal.length){ + delete element.expected_pos + return {value: literal} }else{ - if(is_whitespace(char)){ - this.expect = 'rest' - }else{ - throw Error('expected whitespace after root element, got: ' + char) - } + return {value: null} } - }else if(this.expect == 'rest'){ - if(! is_whitespace(char)){ - if(is_id_start(char)){ - // external DTD - this.type = 'external' - this.decl = char - this.expect = 'decl_continue' - }else if(char == '['){ - this.type = 'internal' - this.expect = ']' - this.declarations = '' - }else{ - throw Error('unexpected in DOCTYPE: ' + char) - } + } + return FAIL +} + +function get_parser(element){ + while(element.origin){ + element = element.origin + } + return element.parser +} + +function get_pos(element){ + while(element.origin){ + element = element.origin + } + return element.parser._pos +} + +/* +document ::= prolog element Misc* + +prolog ::= XMLDecl? Misc* (doctypedecl Misc*)? +XMLDecl ::= '' +Misc ::= Comment | PI | S +Comment ::= '' +PI ::= '' Char*)))? '?>' +doctypedecl ::= '' +*/ +function DOCUMENT(parser){ + this.parser = parser + this.expect = 'prolog' + this.names = [] +} + +DOCUMENT.prototype.feed = function(char){ + if(this.expect == 'prolog'){ + this.expect = 'element' + return (new prolog(this)).feed(char) + if(char !== '<'){ + raise_error(this.parser, 'expected <') } - }else if(this.expect == 'decl_continue'){ - if(is_id_continue(char)){ - this.decl += char + this.expect = 'name_start_or_special' + }else if(this.expect == 'name_start_or_special'){ + if(char == '!'){ + this.expect = 'comment_or_doctype' + }else if(char == '?'){ + this.expect = 'xmldecl_or_pi' + }else if(is_id_start(char)){ + this.expect = 'prolog' + return new ELEMENT(this).feed(char) }else{ - if(is_whitespace(char)){ - this.expect = 'string_start' - this.strings = [] + raise_error1(this, char) + } + }else if(this.expect == 'comment_or_doctype'){ + if(char == '-'){ + this.expect = 'comment' + }else if(char == 'D'){ + this.expect = 'DOCTYPE' + return this.feed(char) + }else{ + raise_error('expected comment or DOCTYPE, got: ' + char) + } + }else if(this.expect == 'DOCTYPE'){ + var res = expect_literal(this, 'DOCTYPE', char) + if(res.value){ + return new DOCTYPE(this.parser, this) + } + }else if(this.expect == 'xmldecl_or_pi'){ + var res = expect_name(this, char) + if(res.value){ + if(res.value == 'xml'){ + this.expect = 'prolog' + return new XMLDECL(this.parser, this) }else{ - throw Error('unexpected after declaration: ' + char) + this.expect = 'prolog' + var pi = new PI(this.parser, this) + pi.name = res.value + pi.expect = 'content' + return pi } } - }else if(this.expect == 'string_start'){ + return this + }else if(this.expect == 'comment'){ + if(char == '-'){ + this.expect = 'prolog' + return new COMMENT(this.parser, this) + }else{ + raise_error(this.parser, 'DOCUMENT, expected -, got: ' + char) + } + }else{ + raise_error(this.parser, 'DOCUMENT, unhandled expect: ' + this.expect) + } + return this +} + +/* +prolog ::= XMLDecl? Misc* (doctypedecl Misc*)? +*/ +function prolog(origin){ + this.origin = origin + this.expect = 'XMLDecl?' +} + +prolog.prototype.feed = function(char){ + if(this.expect == 'XMLDecl?'){ + return (new XMLDecl(this)).feed(char) + } + return this +} + +/* +XMLDecl ::= '' +*/ +function XMLDecl(origin){ + this.origin = origin + this.expect = '' +intSubset ::= (markupdecl | DeclSep)* +markupdecl ::= elementdecl | AttlistDecl | EntityDecl | NotationDecl + | PI | Comment +DeclSep ::= PEReference | S +*/ + +function DOCTYPE(parser, origin){ + this.parser = parser + this.origin = origin + this.expect = 'element_start' +} + +DOCTYPE.prototype.feed = function(char){ + console.log('DOCTYPE feed', this.expect, 'char', char) + if(this.expect == 'element_start'){ + var res = expect_name(this, char) + if(res.value){ + this.name = res.value + this.expect = 'external_id_or_[_or_>' + } + }else if(this.expect == 'external_id_or_[_or_>'){ + if(char == '['){ + this.expect = '>' + return new intSubset(this) + }else if(char == '>'){ + this.expect == 'no_whitespace' + }else if(char == 'S' || char == 'P'){ + this.expect = '[_or_>' + var res = new ExternalID(this) + return res.feed(char) + }else{ + raise_error(this.parser, 'DOCTYPE expected SYSTEM, PUBLIC, [ or >, got: ' + char) + } + }else if(this.expect == '[_or_>'){ if(char == '['){ - this.type = 'mixed' - this.declarations = '' - this.expect = ']' + this.expect = '>' + return new intSubset(this) + }else if(char == '>'){ + this.expect = 'no_whitespace' }else if(! is_whitespace(char)){ - if(char == '"' || char == "'"){ - this.quote = char - this.string = '' - this.expect = 'string_end' - }else{ - raise_error(this.parser, 'expected quote, got: ' + char) - } + raise_error(this.parser, 'DOCTYPE expected [ or >, got: ' + char) } - }else if(this.expect == 'string_end'){ - if(char == this.quote){ - this.strings.push(this.string) - if(this.strings.length == 1){ - this.fpi = this.strings[0] - this.expect = 'string_start' - this.string = '' + }else if(this.expect == '>'){ + if(! is_whitespace(char)){ + if(char == '>'){ + this.expect = 'no_whitespace' }else{ - this.url = this.strings[1] - this.expect = '>' + raise_error(this.parser, 'DOCTYPE expected >, got: ' + char) } - }else{ - this.string += char } - }else if(this.expect == '>'){ + }else if(this.expect = 'no_whitespace'){ if(! is_whitespace(char)){ + return this.origin.feed(char) + } + } + return this +} + +/* +XMLDecl ::= '' +VersionInfo ::= S 'version' Eq ("'" VersionNum "'" | '"' VersionNum '"') +Eq ::= S? '=' S? +VersionNum ::= '1.0' +EncodingDecl ::= S 'encoding' Eq ('"' EncName '"' | "'" EncName "'" ) +EncName ::= [A-Za-z] ([A-Za-z0-9._] | '-')* +SDDecl ::= S 'standalone' Eq + (("'" ('yes' | 'no') "'") | ('"' ('yes' | 'no') '"')) +*/ +function XMLDECL(parser, origin){ + this.parser = parser + this.expect = 'version_info' + this.origin = origin +} + +XMLDECL.prototype.feed = function(char){ + switch(this.expect){ + case 'version_info': + var res = expect_literal(this, 'version', char) + if(res.value){ + this.expect = 'eq' + this.attr_name = 'version' + } + break + case 'eq': + if(char == '='){ + this.expect = 'quote' + }else if(! is_whitespace(char)){ + raise_error(this.parser, 'expect =, got: ' + char) + } + break + case 'quote': + if(is_quote(char)){ + this.expect = char + this.quoted = '' + }else if(! is_whitespace(char)){ + raise_error(this.parser, 'expected quote, got: ' + char) + } + break + case '"': + case "'": + var res = expect_literal(this, this.expect, char) + if(res.value){ + this[this.attr_name] = this.quoted + this.expect = 'encoding_or_sd_or_close' + }else{ + this.quoted += char + } + break + case 'encoding_or_sd_or_close': + switch(char){ + case 'e': + if(! this.hasOwnProperty('encoding')){ + this.expect = 'encoding' + return this.feed(char) + } + break + case 's': + if(! this.hasOwnProperty('standalone')){ + this.expect = 'standalone' + return this.feed(char) + } + break + case '?': + this.expect = '>' + break + default: + if(! is_whitespace(char)){ + raise_error(this.parser, + 'expected encoding, standalone or ?, got: ' + char) + } + } + break + case 'encoding': + case 'standalone': + var res = expect_literal(this, this.expect, char) + if(res.value){ + this.attr_name = this.expect + this.expect = 'eq' + } + break + case '>': if(char == '>'){ this.closed = true - }else{ - throw Error('expected >, ggot: ' + char) + }else if(! is_whitespace(char)){ + if(this.closed){ + return this.origin.feed(char) + } + raise_error(this.parser, 'expected >, got: ' + char) } + break + default: + raise_error(this.parser, 'unhandled case: ' + this.expect) + } + return this +} + +/* +PI ::= '' Char*)))? '?>' +PITarget ::= Name - (('X' | 'x') ('M' | 'm') ('L' | 'l')) +*/ +function PI(parser, origin){ + this.parser = parser + this.origin = origin + this.expect = 'pi_target' +} + +PI.prototype.feed = function(char){ + if(this.expect == 'pi_target'){ + var res = expect_name(this, char) + if(res.value){ + this.pi_target = res.value + this.expect = 'content' } - }else if(this.expect == ']'){ - if(char == ']'){ - this.expect = '>' - }else{ - this.declarations += char + }else if(this.expect == 'content'){ + var res = expect_chars(this, char, '?>') + if(res.value){ + this.content = res.value + this.closed = true + this.expect = 'no_whitespace' + } + }else if(this.expect == 'no_whitespace'){ + if(! is_whitespace(char)){ + return this.origin.feed(char) } - }else{ - throw Error('wrong expect: ' + this.expect) } return this } @@ -534,38 +880,41 @@ DTD.prototype.toString = function(){ return res + '>' } -function COMMENT(parser){ +function COMMENT(parser, origin){ this.parser = parser + this.origin = origin this.value = '' - this.expect = '-' + this.expect = '-->' } COMMENT.prototype.feed = function(char){ - if(this.expect == '-'){ - if(char == '-'){ - this.expect = '--' - }else{ - this.value += char + if(this.expect == '-->'){ + var res = expect_chars(this, char, '-->') + if(res.value){ + this.content = res.value + this.expect = 'no_whitespace' } - }else if(this.expect == '--'){ - if(char == '-'){ - this.expect = '>' - }else{ - this.value += '-' + char - this.expect = '-' - } - }else if(this.expect == '>'){ - if(char == '>'){ - this.closed = true - }else{ - throw Error('comment, expected >, got: ' + char) + }else if(this.expect == 'no_whitespace'){ + if(! is_whitespace(char)){ + return this.origin.feed(char) } } + return this } -function ELEMENT(parser) { - this.parser = parser - this.expect = 'name_start' +/* +element ::= EmptyElemTag | STag content ETag +STag ::= '<' Name (S Attribute)* S? '>' +Attribute ::= Name Eq AttValue +ETag ::= '' +content ::= CharData? + ((element | Reference | CDSect | PI | Comment) CharData?)* +EmptyElemTag ::= '<' Name (S Attribute)* S? '/>' +*/ + +function ELEMENT(origin) { + this.origin = origin + this.expect = '?_/_or_name_start' this.attrs = $B.empty_dict() } @@ -580,25 +929,26 @@ ELEMENT.prototype.set_attribute_value = function(value){ _b_.dict.$setitem(this.attrs, this.attr_name, value) } -ELEMENT.prototype.feed = function(item){ +ELEMENT.prototype.feed = function(char){ + console.log('ELEMENT feed, expects', this.expect, 'char', char) if(this.expect == 'name_start'){ - if(item == '?'){ + if(char == '?'){ if(this.is_declaration){ throw Error('already got ?') } this.is_declaration = true - }else if(item == '/'){ + }else if(char == '/'){ if(this.is_end){ throw Error('already got /') } this.is_end = true - }else if(is_id_start(item)){ - this.name = item + }else if(is_id_start(char)){ + this.name = char this.expect = 'name_continue' } }else if(this.expect == 'name_continue'){ - if(is_id_continue(item)){ - this.name += item + if(is_id_continue(char)){ + this.name += char }else{ // end of element name if(this.is_declaration){ @@ -608,115 +958,121 @@ ELEMENT.prototype.feed = function(item){ return new PROCESSING_INSTRUCTION(this.parser, this.name) } } - if(is_whitespace(item)){ + if(is_whitespace(char)){ this.expect = 'attr_name_start' - }else if(item == '>'){ + }else if(char == '>'){ this.closed = true - }else if(item == '/'){ + }else if(char == '/'){ this.self_closing = true this.expect = '>' }else{ - throw Error('unexpected at end of element name: ' + item) + throw Error('unexpected at end of element name: ' + char) } } }else if(this.expect == 'attr_name_start'){ - if(item == '/'){ + if(char == '/'){ this.self_closing = true - }else if(item == '>'){ - this.closed = true - }else if(is_id_start(item)){ - this.attr_name = item + }else if(char == '>'){ + this.expect = 'no_whitespace' + }else if(is_id_start(char)){ + this.attr_name = char this.expect = 'attr_name_continue' - }else if(item == '?' && this.is_declaration){ + }else if(char == '?' && this.is_declaration){ this.expect = '>' - }else if(! is_whitespace(item)){ - throw Error('expected attribute name, got: ' + item) + }else if(! is_whitespace(char)){ + throw Error('expected attribute name, got: ' + char) } }else if(this.expect == 'attr_name_continue'){ - if(is_id_continue(item)){ - this.attr_name += item - }else if(item == '='){ + if(is_id_continue(char)){ + this.attr_name += char + }else if(char == '='){ this.add_attribute_name(this.attr_name) this.expect = 'attr_value_start' this.attr_value = '' - }else if(is_whitespace(item)){ + }else if(is_whitespace(char)){ this.add_attribute_name(this.attr_name) this.expect = '=' - }else if(item == '>'){ + }else if(char == '>'){ this.add_attribute_name(this.attr_name) this.closed = true }else{ - throw Error('unexpected character in attribute name: ' + item) + throw Error('unexpected character in attribute name: ' + char) } }else if(this.expect == '='){ - if(item == '='){ + if(char == '='){ this.expect = 'attr_value_start' - }else if(! is_whitespace(item)){ - throw Error('expected =, got: ' + item) + }else if(! is_whitespace(char)){ + raise_error1(this, char) } }else if(this.expect == 'attr_value'){ - if(item == '='){ + if(char == '='){ this.expect = 'attr_value_start' this.attr_value = '' - }else if(item == '>'){ + }else if(char == '>'){ this.closed = true - }else if(is_id_start(item)){ - this.attr_name = item + }else if(is_id_start(char)){ + this.attr_name = char this.expect = 'attr_name_continue' - }else if(! is_whitespace(item)){ - throw Error('expected attribute value or name, got: ' + item) + }else if(! is_whitespace(char)){ + throw Error('expected attribute value or name, got: ' + char) } }else if(this.expect == 'attr_value_start'){ - if(item == '"' || item == "'"){ + if(char == '"' || char == "'"){ this.expect = 'quote' - this.quote = item + this.quote = char this.attr_value = '' - }else if(! is_whitespace(item)){ - throw Error('unexpect attribute value start: ' + item) + }else if(! is_whitespace(char)){ + throw Error('unexpect attribute value start: ' + char) } }else if(this.expect == "quote"){ - if(item == this.quote){ + if(char == this.quote){ this.set_attribute_value(this.attr_value) this.expect = 'attr_name_start' }else{ - this.attr_value += item + this.attr_value += char } }else if(this.expect == '>'){ - if(item == '>'){ + if(char == '>'){ this.closed = true }else{ - throw Error('expected >, got: ' + item) + throw Error('expected >, got: ' + char) } }else if(this.expect == 'attr_name'){ - if(item instanceof Name){ - if(_b_.dict.__contains__(this.attrs, item.value)){ - throw Error('duplicate value ' + item.value) + if(char instanceof Name){ + if(_b_.dict.__contains__(this.attrs, char.value)){ + throw Error('duplicate value ' + char.value) } - _b_.dict.$setitem(this.attrs, item.value, _b_.None) - this.last_attr = item.value - }else if(item.value == '?' && this.is_declaration){ + _b_.dict.$setitem(this.attrs, char.value, _b_.None) + this.last_attr = char.value + }else if(char.value == '?' && this.is_declaration){ if(this.question_mark){ throw Error('already ?') } this.question_mark = true - }else if(item == END){ + }else if(char == END){ if(this.is_declaration && ! this.question_mark){ throw Error('missing ') } - }else if(item instanceof Punctuation && item.value == '/'){ + }else if(char instanceof Punctuation && char.value == '/'){ this.no_end = true this.expect = END }else{ - throw Error('expected attribute name, got ' + item) + throw Error('expected attribute name, got ' + char) } }else if(this.expect == 'attr_value'){ - _b_.dict.$setitem(this.attrs, this.last_attr, item) + _b_.dict.$setitem(this.attrs, this.last_attr, char) this.expect = 'attr_name' }else if(this.expect == END){ // after "/" - if(item != END){ + if(char != END){ throw Error('nothing after /') } + }else if(this.expect == 'no_whitespace'){ + if(! is_whitespace(char)){ + return this.origin.feed(char) + } + }else{ + raise_error1(this, char) } return this } @@ -740,6 +1096,301 @@ ELEMENT.prototype.toString = function() { return res + '>' } +/* +EntityDecl ::= GEDecl | PEDecl +PEDecl ::= '' +PEDef ::= EntityValue | ExternalID +*/ +function ENTITY(parser){ + this.parser = parser +} + +ENTITY.prototype.feed = function(char){ + if(! is_whitespace(char)){ + if(is_id_start(char)){ + return new GEDecl(this.parser, char) + }else if(char == "%"){ + return new PEDecl(this.parser) + } + throw Error('unexpected after ENTITY: ' + char) + } +} + +/* +GEDecl ::= '' +EntityDef ::= EntityValue | (ExternalID NDataDecl?) +ExternalID ::= 'SYSTEM' S SystemLiteral + | 'PUBLIC' S PubidLiteral S SystemLiteral +NDataDecl ::= S 'NDATA' S Name +EntityValue ::= '"' ([^%&"] | PEReference | Reference)* '"' + | "'" ([^%&'] | PEReference | Reference)* "'" + +*/ +function GEDecl(parser, char){ + this.parser = parser + this.expect = 'name_continue' + this.name = char + this.state = 'name' +} + +GEDecl.prototype.feed = function(char){ + switch(this.expect){ + case 'name_start': + if(is_id_start(char)){ + if(this.state == 'NDATA'){ + this.ndata_name = char + } + this.expect = 'name_continue' + }else if(! is_whitespace(char)){ + throw Error('GEDecl expected name start, got: ' + char) + } + break + case 'name_continue': + if(is_id_continue(char)){ + if(this.state == 'name'){ + this.name += char + }else if(this.state == 'NDATA'){ + this.ndata_name += char + } + }else if(is_whitespace(char)){ + if(this.state == 'NDATA'){ + this.expect = '>' + }else{ + this.expect = 'entity_def' + } + }else if(char == '>' && this.state == 'NDATA'){ + this.closed = true + }else{ + throw Error('GEDecl expected name, got: ' + char) + } + break + case 'entity_def': + if(is_quote(char)){ + this.quoted = '' + this.state = this.expect + this.expect = char + }else if(char == 'S' || char == 'P'){ + this.expect = char == 'S' ? 'SYSTEM' : 'PUBLIC' + this.expect_pos = 1 + this.external_id = this.expect + }else if(! is_whitespace(char)){ + throw Error('GEDCL expect quote, SYSTEM or PUBLIC, got: ' + char) + } + break + case 'SYSTEM': + case 'PUBLIC': + if(char == this.expect[this.expect_pos]){ + this.expect_pos++ + if(this.expect_pos == this.expect.length){ + this.expect = this.expect == 'SYSTEM' ? 'system_literal' : + 'pubid_literal' + } + }else{ + throw Error(`GEDecl expected ${this.expect}, got: ${char}`) + } + break + case 'NDATA': + if(char == this.expect[this.expect_pos]){ + this.expect_pos++ + if(this.expect_pos == this.expect.length){ + this.expect = 'name_start' + this.ndata_name = '' + this.state = 'NDATA' + } + }else{ + throw Error(`GEDecl expected ${this.expect}, got: ${char}`) + } + break + case '"': + case "'": + if(this.state == 'entity_def'){ + if(char == this.expect){ + this.entity_def = this.quoted + this.expect = '>' + }else{ + this.quoted += char + } + }else if(this.state == 'system_literal'){ + if(char == this.expect){ + this.system_literal = this.quoted + this.expect = 'n_data_decl_or_close' + }else{ + this.quoted += char + } + } + break + case 'system_literal': + if(is_quote(char)){ + this.expect = char + this.state = 'system_literal' + this.quoted = '' + }else if(! is_whitespace(char)){ + throw Error('GEDecl expected SystemLiteral, got: ' + char) + } + break + case '>': + if(! is_whitespace(char)){ + if(char == '>'){ + this.closed = true + }else{ + throw Error('GEDecl expected >, got: ' + char) + } + } + break + case 'n_data_decl_or_close': + if(char == '>'){ + this.closed = true + }else if(char == 'N'){ + this.expect = 'NDATA' + this.expect_pos = 1 + }else if(! is_whitespace(char)){ + throw Error('GEDecl expected NDATA or >, got: ' + char) + } + break + default: + console.log(this.parser._buffer.substr(0, this.parser._pos)) + throw Error('pas fini...') + } + return this +} + +/* +ExternalID ::= 'SYSTEM' S SystemLiteral + | 'PUBLIC' S PubidLiteral S SystemLiteral +*/ +function ExternalID(origin){ + this.origin = origin + this.expect = 'first' +} + +ExternalID.prototype.feed = function(char){ + if(this.expect == 'first'){ + if(! is_whitespace(char)){ + if(char == 'S'){ + this.expect = 'SYSTEM' + return this.feed(char) + }else if(char == 'P'){ + this.expect = 'PUBLIC' + return this.feed(char) + }else{ + raise_error(this, 'ExternalID expected SYSTME or PUBLIC, got: ' + char) + } + } + }else if(this.expect == 'SYSTEM' || this.expect == 'PUBLIC'){ + var res = expect_literal(this, this.expect, char) + if(res.value){ + this.type = this.expect + if(this.type == 'SYSTEM'){ + this.expect = '[_or_>' + return new SystemLiteral(this) + }else{ + this.expect = 'system_after_pubid' + return new PubidLiteral(this) + } + } + }else if(this.expect == 'system_after_pubid'){ + if(! is_whitespace(char)){ + this.expect = '[_or_>' + return (new SystemLiteral(this)).feed(char) + } + }else if(this.expect == '[_or_>'){ + if(char == '['){ + this.expect = '>' + return new intSubset(this) + }else if(char == '>'){ + return this.origin.feed(char) + }else{ + raise_error1(this, char) + } + }else if(this.expect == '>'){ + if(char == '>'){ + this.expect = 'no_whitespace' + }else if(! is_whitespace(char)){ + raise_error1(this, char) + } + }else if(this.expect == 'no_whitespace'){ + if(! is_whitespace(char)){ + console.log('return to origin', this.origin, 'char', char) + return this.origin.feed(char) + } + } + return this +} + +/* +PubidLiteral ::= '"' PubidChar* '"' | "'" (PubidChar - "'")* "'" +PubidChar ::= #x20 | #xD | #xA | [a-zA-Z0-9] + | [-'()+,./:=?;!*#@$_%] +*/ +function PubidLiteral(origin){ + this.origin = origin + this.expect = 'quote' +} + + +function is_pubid_char(char){ + /* +#x20 | #xD | #xA | [a-zA-Z0-9] + | [-'()+,./:=?;!*#@$_%] +*/ + return char.match(new RegExp("[a-zA-Z0-9-'()+,./:=?;!*#@$_%]")) || + ' \n\r'.includes(char) +} + +PubidLiteral.prototype.feed = function(char){ + if(this.expect == 'quote'){ + if(is_quote(char)){ + this.expect = char + this.content = '' + }else if(! is_whitespace(char)){ + raise_error1(this, char) + } + }else if(this.expect == 'no_whitespace'){ + if(! is_whitespace(char)){ + return this.origin.feed(char) + } + }else{ + if(char == this.expect){ + this.expect = 'no_whitespace' + }else if(is_pubid_char(char)){ + this.content += char + }else{ + console.log('PubidLiteral expects', this.expect, 'char', char) + console.log(is_pubid_char(char)) + raise_error1(this, char) + } + } + return this +} + +function SystemLiteral(origin){ + this.origin = origin + this.expect = 'quote' +} + +SystemLiteral.prototype.feed = function(char){ + console.log('SystemLiteral expects', this.expect, 'char', char) + if(this.expect == 'quote'){ + if(is_quote(char)){ + this.expect = char + this.content = '' + }else if(! is_whitespace(char)){ + raise_error1(this, char) + } + }else if(this.expect == 'no_whitespace'){ + if(! is_whitespace(char)){ + return this.origin.feed(char) + } + }else{ + if(char == this.expect){ + this.expect = 'no_whitespace' + }else{ + this.content += char + } + } + return this +} + function PROCESSING_INSTRUCTION(parser, name){ this.parser = parser this.name = name @@ -906,6 +1557,19 @@ function is_whitespace(s){ return s.length > 0 } +function is_quote(char){ + return char == '"' || char == "'" +} + +function is_char(char){ + // #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] + var cp = char.codePointAt(0) + return ([0x9, 0xa, 0xd].includes(cp)) || + (0x20 <= cp && cp <= 0xd7ff) || + (0xe000 <= cp && cp <= 0xfffd) || + (0x10000 <= cp && cp <= 0x10ffff) +} + var model = 'model', errors = 'errors' diff --git a/www/src/pegen.js b/www/src/pegen.js index 2ced61c0f..0fe6551a0 100644 --- a/www/src/pegen.js +++ b/www/src/pegen.js @@ -15,6 +15,11 @@ var _b_ = __BRYTHON__.builtins const Load = new $B.ast.Load() +const NULL = undefined; +const ENDMARKER = 0, + NAME = 1, + NUMBER = 2, + STRING = 3 function strchr(s, char){ return s.includes(char) @@ -50,24 +55,13 @@ const NSTATISTICS = 2000, ERRORTOKEN = 'ERRORTOKEN', NEWLINE = $B.py_tokens.NEWLINE, DEDENT = $B.py_tokens.DEDENT, - Py_single_input = 'py_single_input' + Py_single_input = 'py_single_input', + PyPARSE_ALLOW_INCOMPLETE_INPUT = 0x0100 function PyUnicode_IS_ASCII(char){ return char.codePointAt(0) < 128 } -function PyBytes_FromStringAndSize(s){ - var dest = new Uint8Array(s.length * 3) - var encoder = new TextEncoder() - var result = encoder.encodeInto(s, dest) - return $B.fast_bytes(Array.from(dest.slice(0, result.written))) -} - -function _PyArena_AddPyObject(arena, obj){ - // arena.a_objects.push(obj) - return 1 -} - function set_position_from_token(ast_obj, token){ for(var attr of ['lineno', 'col_offset', 'end_lineno', 'end_col_offset']){ ast_obj[attr] = token[attr] @@ -253,12 +247,9 @@ function initialize_token(p, parser_token, new_token, token_type) { console.log('keywords', p.keywords) alert() } - parser_token.bytes = PyBytes_FromStringAndSize(new_token.string) - _PyArena_AddPyObject(p.arena, parser_token.bytes) parser_token.metadata = NULL; if (new_token.metadata != NULL) { - _PyArena_AddPyObject(p.arena, new_token.metadata) parser_token.metadata = new_token.metadata; new_token.metadata = NULL; } @@ -292,12 +283,23 @@ function _PyTokenizer_Get(tok, new_token){ return token.num_type } - function get_next_token(p, new_token){ var token = p.tokens[p.fill] ?? p.read_token() for(var key in token){ new_token[key] = token[key] } + if(token.num_type == $B.py_tokens.ENDMARKER){ + // on 'single' mode, insert a NEWLINE before ENDMARKER + if(p.mode == 'single'){ + var end_token = p.tokens[p.tokens.length - 2] + if(end_token.num_type != $B.py_tokens.NEWLINE){ + var newline = $B.clone(end_token) + newline.num_type = $B.py_tokens.NEWLINE + p.tokens.splice(p.tokens.length - 1, 0, newline) + token = newline + } + } + } return token.num_type } @@ -466,7 +468,8 @@ $B._PyPegen.expect_forced_token = function(p, type, expected) { } var t = p.tokens[p.mark]; if (t.num_type != type) { - $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(t, "expected '%s'", expected); + $B.helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p, t, + `expected '${expected}'`); return NULL; } p.mark += 1; @@ -540,10 +543,6 @@ $B._PyPegen.new_identifier = function(p, n){ id = id2; } PyUnicode_InternInPlace(id); - if (_PyArena_AddPyObject(p.arena, id) < 0) - { - return error() - } return id; function error(){ @@ -589,7 +588,7 @@ $B._PyPegen.soft_keyword_token = function(p) { } var the_token; var size; - the_token = _b_.bytes.decode(t.bytes, 'iso-8859-1'); + the_token = t.string; // _b_.bytes.decode(t.bytes, 'iso-8859-1'); for (let keyword = p.soft_keywords; keyword != NULL; keyword++) { if (strncmp(keyword, the_token, size) == 0) { return $B._PyPegen.name_from_token(p, t); @@ -601,11 +600,24 @@ $B._PyPegen.soft_keyword_token = function(p) { function prepared_number_value(prepared){ switch(prepared.type){ case 'float': - return parseFloat(prepared.value) + return $B.fast_float(prepared.value) case 'imaginary': return $B.make_complex(0, prepared_number_value(prepared.value)) case 'int': - return parseInt(prepared.value[1], prepared.value[0]) + var res = parseInt(prepared.value[1], prepared.value[0]) + if(! Number.isSafeInteger(res)){ + var base = prepared.value[0], + num_str = prepared.value[1] + switch(base){ + case 8: + return $B.fast_long_int(BigInt('0x' + num_str)) + case 10: + return $B.fast_long_int(BigInt(num_str)) + case 16: + return $B.fast_long_int(BigInt('0x' + num_str)) + } + } + return res } } @@ -689,7 +701,7 @@ $B._PyPegen.number_token = function(p){ } var c = parsenumber(num_raw); - + if (c == NULL) { p.error_indicator = 1; var tstate = _PyThreadState_GET(); @@ -713,11 +725,6 @@ $B._PyPegen.number_token = function(p){ return NULL; } - if (_PyArena_AddPyObject(p.arena, c) < 0) { - Py_DECREF(c); - p.error_indicator = 1; - return NULL; - } var res = new $B.ast.Constant(c, NULL); set_position_from_token(res, t) return res @@ -732,7 +739,7 @@ function bad_single_statement(p){ var pos = 0 for (;;) { - while (c == ' ' || c == '\t' || c == '\n' || c == '\014') { + while (c == ' ' || c == '\t' || c == '\n' || c == '\f') { c = cur[pos++] } @@ -851,7 +858,7 @@ function reset_parser_state_for_error_pass(p){ p.call_invalid_rules = 1; // Don't try to get extra tokens in interactive mode when trying to // raise specialized errors in the second pass. - p.tok.interactive_underflow = IUNDERFLOW_STOP; + // p.tok.interactive_underflow = IUNDERFLOW_STOP; } function _is_end_of_source(p) { @@ -859,6 +866,79 @@ function _is_end_of_source(p) { return err == E_EOF || err == E_EOFS || err == E_EOLS; } +$B._PyPegen.tokenize_full_source_to_check_for_errors = function(p){ + var last_token = p.tokens[p.fill - 1] + var tokenizer = $B.tokenizer(p.src, p.filename, p.mode, p) + for(var token of tokenizer){ + } + if(p.braces.length > 0){ + var brace = $B.last(p.braces), + err_lineno, + msg + if('([{'.includes(brace.char)){ + err_lineno = brace.line_num + }else{ + if(p.braces.length > 1){ + err_lineno = p.braces[p.braces.length - 2].line_num + }else{ + err_lineno = brace.line_num + } + } + if(p.tokens.length == 0 || $B.last(p.tokens).lineno >= err_lineno){ + if('([{'.includes(brace.char)){ + msg = `'${brace.char}' was never closed` + }else if(p.braces.length > 1){ + var closing = brace.char, + opening = p.braces[p.braces.length - 2].char + msg = `closing parenthesis '${closing}' does not match ` + + `opening parenthesis '${opening}'` + }else{ + msg = `unmatched '${brace.char}'` + } + $B.raise_error_known_location(_b_.SyntaxError, + p.filename, + brace.line_num, brace.pos - brace.line_start, + brace.line_num, brace.pos - brace.line_start + 1, + brace.line, + msg) + } + } +} + +$B._PyPegen.set_syntax_error = function(p, last_token) { + // Initialization error + if (p.fill == 0) { + $B.helper_functions.RAISE_SYNTAX_ERROR(p, + "error at start before reading any input"); + } + $B._PyPegen.tokenize_full_source_to_check_for_errors(p); + + // Parser encountered EOF (End of File) unexpectedtly + if (last_token.num_type == ERRORTOKEN && p.tok.done == E_EOF) { + if (p.tok.level) { + raise_unclosed_parentheses_error(p); + } else { + $B.helper_functions.RAISE_SYNTAX_ERROR(p, "unexpected EOF while parsing"); + } + return; + } + // Indentation error in the tokenizer + if (last_token.num_type == INDENT || last_token.num_type == DEDENT) { + $B.helper_functions.RAISE_INDENTATION_ERROR(p, + last_token.num_type == INDENT ? "unexpected indent" : "unexpected unindent"); + return; + } + // Unknown error (generic case) + $B._PyPegen.tokenize_full_source_to_check_for_errors(p); + // Use the last token we found on the first pass to avoid reporting + // incorrect locations for generic syntax errors just because we reached + // further away when trying to find specific syntax errors in the second + // pass. + $B.raise_error_known_token(_b_.SyntaxError, p.filename, last_token, + "invalid syntax"); +} + + $B._PyPegen.run_parser = function(p){ var res = $B._PyPegen.parse(p); // assert(p->level == 0); @@ -867,20 +947,22 @@ $B._PyPegen.run_parser = function(p){ PyErr_Clear(); return RAISE_SYNTAX_ERROR("incomplete input"); } - if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_SyntaxError)) { - return NULL; - } - // Make a second parser pass. In this pass we activate heavier and slower checks + // Make a second parser pass. In this pass we activate heavier and slower checks // to produce better error messages and more complete diagnostics. Extra "invalid_*" // rules will be active during parsing. var last_token = p.tokens[p.fill - 1]; reset_parser_state_for_error_pass(p); - _PyPegen_parse(p); - + try{ + $B._PyPegen.parse(p); + }catch(err){ + last_token = p.tokens[p.fill - 1] + // check if a parenthesis error occurred before + $B._PyPegen.tokenize_full_source_to_check_for_errors(p) + throw err + } // Set SyntaxErrors accordingly depending on the parser/tokenizer status at the failure // point. - _Pypegen_set_syntax_error(p, last_token); - return NULL; + $B._PyPegen.set_syntax_error(p, last_token); } if (p.start_rule == Py_single_input && bad_single_statement(p)) { @@ -890,14 +972,16 @@ $B._PyPegen.run_parser = function(p){ // test_peg_generator defines _Py_TEST_PEGEN to not call PyAST_Validate() // #if defined(Py_DEBUG) && !defined(_Py_TEST_PEGEN) - if (p.start_rule == Py_single_input || - p.start_rule == Py_file_input || - p.start_rule == Py_eval_input) + /* + if (p.mode == 'single' || + p.mode == 'file' || + p.mode == 'eval') { if (!_PyAST_Validate(res)) { return NULL; } } + */ // #endif return res; } @@ -979,4 +1063,13 @@ $B._PyPegen.run_parser_from_string = function(str, start_rule, filename_ob, } } +$B.PyPegen = { + first_item: function(a, type){ + return a[0] + }, + last_item: function(a, ptype){ + return a[a.length - 1] + } +} + })(__BRYTHON__) diff --git a/www/src/py2js.js b/www/src/py2js.js index ea6c07e2c..110e1f8f3 100644 --- a/www/src/py2js.js +++ b/www/src/py2js.js @@ -38,206 +38,10 @@ $B.last = function(table){ return table[table.length - 1] } -// Convert a list to an object indexed with list values -$B.list2obj = function(list, value){ - var res = {}, - i = list.length - if(value === undefined){ - value = true - } - while(i-- > 0){ - res[list[i]] = value - } - return res -} - -/* -Internal variables -================== -*/ - -// Mapping between operators and special Python method names -$B.op2method = { - operations: { - "**": "pow", "//": "floordiv", "<<": "lshift", ">>": "rshift", - "+": "add", "-": "sub", "*": "mul", "/": "truediv", "%": "mod", - "@": "matmul" // PEP 465 - }, - augmented_assigns: { - "//=": "ifloordiv", ">>=": "irshift", "<<=": "ilshift", "**=": "ipow", - "+=": "iadd","-=": "isub", "*=": "imul", "/=": "itruediv", - "%=": "imod", "&=": "iand","|=": "ior","^=": "ixor", "@=": "imatmul" - }, - binary: { - "&": "and", "|": "or", "~": "invert", "^": "xor" - }, - comparisons: { - "<": "lt", ">": "gt", "<=": "le", ">=": "ge", "==": "eq", "!=": "ne" - }, - boolean: { - "or": "or", "and": "and", "in": "in", "not": "not", "is": "is" - }, - subset: function(){ - var res = {}, - keys = [] - if(arguments[0] == "all"){ - keys = Object.keys($B.op2method) - keys.splice(keys.indexOf("subset"), 1) - }else{ - for(var arg of arguments){ - keys.push(arg) - } - } - for(var key of keys){ - var ops = $B.op2method[key] - if(ops === undefined){ - throw Error(key) - } - for(var attr in ops){ - res[attr] = ops[attr] - } - } - return res - } -} - -var $operators = $B.op2method.subset("all") - -$B.method_to_op = {} -for(var category in $B.op2method){ - for(var op in $B.op2method[category]){ - var method = `__${$B.op2method[category][op]}__` - $B.method_to_op[method] = op - } -} - -// Mapping between augmented assignment operators and method names -var $augmented_assigns = $B.augmented_assigns = $B.op2method.augmented_assigns - -// Names that can't be assigned to -var noassign = $B.list2obj(['True', 'False', 'None', '__debug__']) - -// Operators weight for precedence -var $op_order = [['or'], ['and'], ['not'], - ['in','not_in'], - ['<', '<=', '>', '>=', '!=', '==', 'is', 'is_not'], - ['|'], - ['^'], - ['&'], - ['>>', '<<'], - ['+', '-'], - ['*', '@', '/', '//', '%'], - ['unary_neg', 'unary_inv', 'unary_pos'], - ['**'] -] - -var $op_weight = {}, - $weight = 1 -for(var _tmp of $op_order){ - for(var item of _tmp){ - $op_weight[item] = $weight - } - $weight++ -} // $B.ast is in generated script py_ast.js -var ast = $B.ast, - op2ast_class = $B.op2ast_class - -function ast_body(block_ctx){ - // return the attribute body of nodes with a block (def, class etc.) - var body = [] - for(var child of block_ctx.node.children){ - var ctx = child.context.tree[0] - if(['single_kw', 'except', 'decorator'].indexOf(ctx.type) > -1 || - (ctx.type == 'condition' && ctx.token == 'elif')){ - continue - } - var child_ast = ctx.ast() - if(ast.expr.indexOf(child_ast.constructor) > -1){ - child_ast = new ast.Expr(child_ast) - copy_position(child_ast, child_ast.value) - } - body.push(child_ast) - } - return body -} +var ast = $B.ast -var ast_dump = $B.ast_dump = function(tree, indent){ - var attr, - value - indent = indent || 0 - if(tree === _b_.None){ - // happens in dictionary keys for **kw - return 'None' - }else if(typeof tree == 'string'){ - return `'${tree}'` - }else if(typeof tree == 'number'){ - return tree + '' - }else if(tree.imaginary){ - return tree.value + 'j' - }else if(Array.isArray(tree)){ - if(tree.length == 0){ - return '[]' - } - res = '[\n' - var items = [] - for(var x of tree){ - try{ - items.push(ast_dump(x, indent + 1)) - }catch(err){ - console.log('error', tree) - console.log('for item', x) - throw err - } - } - res += items.join(',\n') - return res + ']' - }else if(tree.$name){ - return tree.$name + '()' - }else if(tree instanceof ast.MatchSingleton){ - return `MatchSingleton(value=${$B.AST.$convert(tree.value)})` - }else if(tree instanceof ast.Constant){ - value = tree.value - // For imaginary numbers, value is an object with - // attribute "imaginary" set - if(value.imaginary){ - return `Constant(value=${_b_.repr(value.value)}j)` - } - return `Constant(value=${$B.AST.$convert(value)})` - } - var proto = Object.getPrototypeOf(tree).constructor - var res = ' ' .repeat(indent) + proto.$name + '(' - if($B.ast_classes[proto.$name] === undefined){ - console.log('no ast class', proto) - } - var attr_names = $B.ast_classes[proto.$name].split(','), - attrs = [] - // remove trailing * in attribute names - attr_names = attr_names.map(x => (x.endsWith('*') || x.endsWith('?')) ? - x.substr(0, x.length - 1) : x) - if([ast.Name].indexOf(proto) > -1){ - for(attr of attr_names){ - if(tree[attr] !== undefined){ - attrs.push(`${attr}=${ast_dump(tree[attr])}`) - } - } - return res + attrs.join(', ') + ')' - } - for(attr of attr_names){ - if(tree[attr] !== undefined){ - value = tree[attr] - attrs.push(attr + '=' + - ast_dump(tree[attr], indent + 1).trimStart()) - } - } - if(attrs.length > 0){ - res += '\n' - res += attrs.map(x => ' '.repeat(indent + 1) + x).join(',\n') - } - res += ')' - return res -} function get_line(filename, lineno){ var src = $B.file_cache[filename], @@ -301,345 +105,6 @@ $B.future_features = function(mod, filename){ return {features} } -// Functions used to set position attributes to AST nodes -function set_position(ast_obj, position, end_position){ - ast_obj.lineno = position.start[0] - ast_obj.col_offset = position.start[1] - position = end_position || position - ast_obj.end_lineno = position.end[0] - ast_obj.end_col_offset = position.end[1] -} - -function copy_position(target, origin){ - target.lineno = origin.lineno - target.col_offset = origin.col_offset - target.end_lineno = origin.end_lineno - target.end_col_offset = origin.end_col_offset -} - -/* -Function called in case of SyntaxError or IndentationError -========================================================== -*/ -function first_position(context){ - var ctx = context - while(ctx.tree && ctx.tree.length > 0){ - ctx = ctx.tree[0] - } - return ctx.position -} - -function last_position(context){ - var ctx = context - while(ctx.tree && ctx.tree.length > 0){ - ctx = $B.last(ctx.tree) - if(ctx.end_position){ - return ctx.end_position - } - } - return ctx.end_position || ctx.position -} - -function raise_error_known_location(type, filename, lineno, col_offset, - end_lineno, end_col_offset, line, message){ - var exc = type.$factory(message) - exc.filename = filename - exc.lineno = lineno - exc.offset = col_offset + 1 - exc.end_lineno = end_lineno - exc.end_offset = end_col_offset + 1 - exc.text = line - exc.args[1] = $B.fast_tuple([filename, exc.lineno, exc.offset, exc.text, - exc.end_lineno, exc.end_offset]) - exc.$frame_obj = $B.frame_obj - throw exc -} - -$B.raise_error_known_location = raise_error_known_location - - -function raise_syntax_error_known_range(context, a, b, msg){ - // a and b are the first and last tokens for the exception - raise_error_known_location(_b_.SyntaxError, get_module(context).filename, - a.start[0], a.start[1], b.end[0], b.end[1], a.line, msg) -} - -function raise_error(errtype, context, msg, token){ - var filename = get_module(context).filename - token = token || $token.value - msg = msg || 'invalid syntax' - if(msg.startsWith('(')){ - msg = 'invalid syntax ' + msg - } - msg = msg.trim() - raise_error_known_location(errtype, filename, - token.start[0], token.start[1], - token.end[0], token.end[1] - 1, - token.line, msg) -} - -function raise_syntax_error(context, msg, token){ - raise_error(_b_.SyntaxError, context, msg, token) -} - -function raise_indentation_error(context, msg, indented_node){ - // IndentationError - if(indented_node){ - // indent is the node that expected an indentation - var type = indented_node.context.tree[0].type, - token = indented_node.context.tree[0].token, - lineno = indented_node.line_num - - if (type == 'except' && indented_node.context.tree[0].try_node.context.is_trystar) { - type = 'except*' - } - - switch(type){ - case 'class': - type = 'class definition' - break - case 'condition': - type = `'${token}' statement` - break - case 'def': - type = 'function definition' - break - case 'case': - case 'except': - case 'except*': - case 'for': - case 'match': - case 'try': - case 'while': - case 'with': - type = `'${type}' statement` - break - case 'single_kw': - type = `'${token}' statement` - break - } - msg += ` after ${type} on line ${lineno}` - } - raise_error(_b_.IndentationError, context, msg) -} - -/* -Function that checks that a context is not inside another incompatible -context. Used for (augmented) assignements */ -function check_assignment(context, kwargs){ - // kwargs, if provided, is a Javascript object that can have these - // attributes: - // .once : if set, only check the context; otherwise, also check - // the context's parents - // .delete: if set, the context checked is not an assignment but - // a "del"; adapt error message - - function in_left_side(context, assign_type){ - var ctx = context - while(ctx){ - if(ctx.parent && ctx.parent.type == assign_type && - ctx === ctx.parent.tree[0]){ - return true - } - ctx = ctx.parent - } - } - - var once, - action = 'assign to', - augmented = false - if(kwargs){ - once = kwargs.once - action = kwargs.action || action - augmented = kwargs.augmented === undefined ? false : kwargs.augmented - } - var ctx = context, - forbidden = ['assert', 'import', 'raise', 'return', 'decorator', - 'comprehension', 'await'] - if(action != 'delete'){ - // "del x = ..." is invalid - forbidden.push('del') - } - - function report(wrong_type, a, b){ - a = a || context.position - b = b || $token.value - if(augmented){ - raise_syntax_error_known_range( - context, a, b, - `'${wrong_type}' is an illegal expression ` + - 'for augmented assignment') - }else{ - var msg = wrong_type - if(Array.isArray(msg)){ - // eg assignment to None - msg = msg[0] - }else if($token.value.string == '=' && $token.value.type == 'OP'){ - if(parent_match(context, {type: 'augm_assign'})){ - // "x += 1, y = 2" - raise_syntax_error(context) - } - if(parent_match(context, {type: 'assign'})){ - raise_syntax_error_known_range( - context, - a, b, - `invalid syntax. Maybe you meant '==' or ':=' instead of '='?`) - } - if(! parent_match(context, {type: 'list_or_tuple'})){ - msg += " here. Maybe you meant '==' instead of '='?" - } - } - raise_syntax_error_known_range( - context, - a, b, - `cannot ${action} ${msg}`) - } - } - - // no assign in left side of augmented assignment - if(in_left_side(context, 'augm_assign')){ - raise_syntax_error(context) - } - - if(context.type == 'target_list'){ - for(let target of context.tree){ - check_assignment(target, {action: 'assign to'}) - } - return - } - ctx = context - while(ctx){ - if(forbidden.indexOf(ctx.type) > -1){ - raise_syntax_error(context, - `(assign to ${ctx.type})`) - }else if(ctx.type == "expr"){ - if(parent_match(ctx, {type: 'annotation'})){ - return true - } - if(ctx.parent.type == 'yield'){ - raise_syntax_error_known_range(ctx, ctx.parent.position, - last_position(ctx), - "assignment to yield expression not possible") - } - - var assigned = ctx.tree[0] - if(assigned.type == "op"){ - if($B.op2method.comparisons[ctx.tree[0].op] !== undefined){ - if(parent_match(ctx, {type: 'target_list'})){ - // "for i < (): pass" - raise_syntax_error(context) - } - report('comparison', assigned.tree[0].position, - last_position(assigned)) - }else{ - report('expression', assigned.tree[0].position, - last_position(assigned)) - } - }else if(assigned.type == 'attribute' && - parent_match(ctx, {type: 'condition'})){ - report('attribute', ctx.position, last_position(context)) - }else if(assigned.type == 'sub' && - parent_match(ctx, {type: 'condition'})){ - report('subscript', ctx.position, last_position(context)) - }else if(assigned.type == 'unary'){ - report('expression', assigned.position, last_position(assigned)) - }else if(assigned.type == 'call'){ - report('function call', assigned.position, assigned.end_position) - }else if(assigned.type == 'id'){ - var name = assigned.value - if(['None', 'True', 'False', '__debug__'].indexOf(name) > -1){ - // argument as Array to avoid adding "Maybe you meant ==" - if(name == '__debug__' && augmented){ - // special case (or inconsistency ?) - $token.value = assigned.position - raise_syntax_error(assigned, - 'cannot assign to __debug__') - } - report([name]) - } - }else if(['str', 'int', 'float', 'complex'].indexOf(assigned.type) > -1){ - if(ctx.parent.type != 'op'){ - report('literal') - } - }else if(assigned.type == "ellipsis"){ - report('ellipsis') - }else if(assigned.type == 'genexpr'){ - report(['generator expression']) - }else if(assigned.type == 'starred'){ - if(action == 'delete'){ - report('starred', assigned.position, last_position(assigned)) - } - check_assignment(assigned.tree[0], {action, once: true}) - }else if(assigned.type == 'named_expr'){ - if(! assigned.parenthesized){ - report('named expression') - }else if(ctx.parent.type == 'node'){ - raise_syntax_error_known_range( - context, - assigned.target.position, - last_position(assigned), - "cannot assign to named expression here. " + - "Maybe you meant '==' instead of '='?") - }else if(action == 'delete'){ - report('named expression', assigned.position, - last_position(assigned)) - } - }else if(assigned.type == 'list_or_tuple'){ - for(let item of ctx.tree){ - check_assignment(item, {action, once: true}) - } - }else if(assigned.type == 'dict_or_set'){ - if(assigned.closed){ - report(assigned.real == 'set' ? 'set display' : 'dict literal', - ctx.position, - last_position(assigned)) - } - }else if(assigned.type == 'lambda'){ - report('lambda') - }else if(assigned.type == 'ternary'){ - report(['conditional expression']) - }else if(['fstring', 'JoinedStr'].indexOf(assigned.type) > -1){ - report('f-string expression', - assigned.position, - last_position(assigned)) - } - }else if(ctx.type == 'list_or_tuple'){ - for(let item of ctx.tree){ - check_assignment(item, {action, once: true}) - } - }else if(ctx.type == 'ternary'){ - report(['conditional expression'], - ctx.position, last_position(context)) - }else if(ctx.type == 'op'){ - let a = ctx.tree[0].position, - last = $B.last(ctx.tree).tree[0], - b = last.end_position || last.position - if($B.op2method.comparisons[ctx.op] !== undefined){ - if(parent_match(context, {type: 'target_list'})){ - // "for i < (): pass" - raise_syntax_error(context) - } - report('comparison', a, b) - }else{ - report('expression', a, b) - } - }else if(ctx.type == 'yield'){ - report('yield expression') - }else if(ctx.comprehension){ - break - } - if(once){ - break - } - ctx = ctx.parent - } -} - -function remove_abstract_expr(tree){ - if(tree.length > 0 && $B.last(tree).type == 'abstract_expr'){ - tree.pop() - } -} $B.format_indent = function(js, indent){ // Indent JS code based on curly braces ({ and }) @@ -671,7139 +136,46 @@ $B.format_indent = function(js, indent){ last_is_closing_brace = line.endsWith('}') if(line.startsWith('}')){ level-- - }else if(line.endsWith('}')){ - line = line.substr(0, line.length - 1) - add_closing_brace = true - } - if(level < 0){ - if($B.get_option('debug') > 2){ - console.log('wrong js indent') - console.log(res) - } - level = 0 - } - try{ - res += (add_spaces ? indentation.repeat(level) : '') + line + '\n' - }catch(err){ - console.log(res) - throw err - } - if(line.endsWith('{')){ - level++ - }else if(add_closing_brace){ - level-- - if(level < 0){ - level = 0 - } - try{ - res += indentation.repeat(level) + '}\n' - }catch(err){ - console.log(res) - throw err - } - } - last_is_backslash = line.endsWith('\\') - last_is_var_and_comma = line.endsWith(',') && - (line.startsWith('var ') || last_is_var_and_comma) - } - return res -} - - -function show_line(ctx){ - // for debugging - var lnum = get_node(ctx).line_num, - src = get_module(ctx).src - console.log('this', ctx, '\nline', lnum, src.split('\n')[lnum - 1]) -} - -/* -Class for syntax tree -===================== - -An instance is created for the whole Python program as the root of the tree. - -For each instruction in the Python source code, an instance is created -as a child of the block where it stands : the root for instructions at -module level, or a function definition, a loop, a condition, etc. -*/ - -var $Node = $B.parser.$Node = function(type){ - this.type = type - this.children = [] -} - -$Node.prototype.add = function(child){ - // Insert as the last child - this.children[this.children.length] = child - child.parent = this - child.module = this.module -} - -$Node.prototype.ast = function(){ - var root_ast = new ast.Module([], []) - root_ast.lineno = this.line_num - for(var node of this.children){ - var t = node.context.tree[0] - // Ignore except / elif / else / finally : they are attributes of - // try / for / if nodes - // decorator is attribute of the class / def node - if(['single_kw', 'except', 'decorator'].indexOf(t.type) > -1 || - (t.type == 'condition' && t.token == 'elif')){ - continue - } - var node_ast = node.context.tree[0].ast() - if(ast.expr.indexOf(node_ast.constructor) > -1){ - node_ast = new ast.Expr(node_ast) - copy_position(node_ast, node_ast.value) - } - root_ast.body.push(node_ast) - } - - if(this.mode == 'eval'){ - if(root_ast.body.length > 1 || - ! (root_ast.body[0] instanceof $B.ast.Expr)){ - raise_syntax_error(this.children[0].context, - 'eval() argument must be an expression') - } - root_ast = new $B.ast.Expression(root_ast.body[0].value) - copy_position(root_ast, root_ast.body) - } - return root_ast -} - -$Node.prototype.insert = function(pos, child){ - // Insert child at position pos - this.children.splice(pos, 0, child) - child.parent = this - child.module = this.module -} - -$Node.prototype.show = function(indent){ - // For debugging purposes - var res = '' - if(this.type === 'module'){ - for(let child of this.children){ - res += child.show(indent) - } - return res - } - - indent = indent || 0 - res += ' '.repeat(indent) - res += this.context - if(this.children.length > 0){ - res += '{' - } - res +='\n' - for(let child of this.children){ - res += child.show(indent + 4) - } - if(this.children.length > 0){ - res += ' '.repeat(indent) - res += '}\n' - } - return res -} - - -/* -Context classes -=============== - -In the parser, for each token found in the source code, a -new context is created by a call like : - - new_context = transition(current_context, token_type, token_value) - -For each new instruction, an instance of $Node is created ; it receives an -attribute "context" which is an initial, empty context. - -For instance, if the first token is the keyword "assert", the new context -is an instance of class AssertCtx, in a state where it expects an -expression. - -Most contexts have an attribute "tree", a list of the elements associated -with the keyword or the syntax element (eg the arguments in a function -definition). - -Context have a method .transition(token, value) called by the tokens -dispatcher. It handles the next token in the token stream, raises errors if -the token is invalid. - -Most contexts have a method ast() that returns the AST node for this context. -It is called by the method ast() of the root node. -*/ - -var AbstractExprCtx = $B.parser.AbstractExprCtx = function(context, with_commas){ - this.type = 'abstract_expr' - // allow expression with comma-separated values, or a single value ? - this.with_commas = with_commas - this.parent = context - this.tree = [] - this.position = $token.value - context.tree.push(this) -} - -AbstractExprCtx.prototype.transition = function(token, value){ - var context = this - var packed = context.packed, - is_await = context.is_await, - commas - - switch(token) { - case 'await': - case 'id': - case 'imaginary': - case 'int': - case 'float': - case 'str': - case 'JoinedStr': - case 'bytes': - case 'ellipsis': - case '[': - case '(': - case '{': - case '.': - case 'not': - case 'lambda': - case 'yield': - context.parent.tree.pop() // remove abstract expression - commas = context.with_commas - var star_position - if(context.packed){ - star_position = context.star_position - } - context = context.parent - context.packed = packed - context.is_await = is_await - if(context.position === undefined){ - context.position = $token.value - } - if(star_position){ - context.star_position = star_position - } - } - switch(token) { - case 'await': - return new AbstractExprCtx(new AwaitCtx( - new ExprCtx(context, 'await', false)), false) - case 'id': - return new IdCtx(new ExprCtx(context, 'id', commas), - value) - case 'str': - return new StringCtx(new ExprCtx(context, 'str', commas), - value) - case 'JoinedStr': - return new FStringCtx(new ExprCtx(context, 'str', commas), - value) - case 'bytes': - return new StringCtx(new ExprCtx(context, 'bytes', commas), - value) - case 'int': - return new NumberCtx('int', - new ExprCtx(context, 'int', commas), value) - case 'float': - return new NumberCtx('float', - new ExprCtx(context, 'float', commas), value) - case 'imaginary': - return new NumberCtx('imaginary', - new ExprCtx(context, 'imaginary', commas), value) - case '(': - return new ListOrTupleCtx( - new ExprCtx(context, 'tuple', commas), 'tuple') - case '[': - return new ListOrTupleCtx( - new ExprCtx(context, 'list', commas), 'list') - case '{': - return new AbstractExprCtx( - new DictOrSetCtx( - new ExprCtx(context, 'dict_or_set', commas)), false) - case 'ellipsis': - return new EllipsisCtx( - new ExprCtx(context, 'ellipsis', commas)) - case 'not': - if(context.type == 'op' && context.op == 'is'){ // "is not" - context.op = 'is_not' - return new AbstractExprCtx(context, false) - } - return new AbstractExprCtx( - new NotCtx(new ExprCtx(context, 'not', commas)), false) - case 'lambda': - return new LambdaCtx(new ExprCtx(context, 'lambda', commas)) - case 'op': - var tg = value - if(context.parent.type == 'op' && '+-~'.indexOf(tg) == -1){ - raise_syntax_error(context) - } - switch(tg) { - case '*': - context.parent.tree.pop() // remove abstract expression - commas = context.with_commas - context = context.parent - context.position = $token.value - return new AbstractExprCtx( - new StarredCtx( - new ExprCtx(context, 'expr', commas)), - false) - case '**': - context.parent.tree.pop() // remove abstract expression - commas = context.with_commas - context = context.parent - context.position = $token.value - - if (context.type != 'dict_or_set') { - raise_syntax_error(context) - } - - return new AbstractExprCtx( - new KwdCtx( - new ExprCtx(context, 'expr', commas)), - false) - case '-': - case '~': - case '+': - // unary op - context.parent.tree.pop() // remove abstract expr - return new AbstractExprCtx( - new UnaryCtx( - new ExprCtx(context.parent, 'unary', false), - tg), - false - ) - case 'not': - context.parent.tree.pop() // remove abstract expression - commas = context.with_commas - context = context.parent - return new NotCtx( - new ExprCtx(context, 'not', commas)) - case '...': - return new EllipsisCtx(new ExprCtx(context, 'ellipsis', commas)) - } - raise_syntax_error(context) - break - case 'in': - if(context.parent.type == 'op' && context.parent.op == 'not'){ - context.parent.op = 'not_in' - return context - } - raise_syntax_error(context) - break - case '=': - if(context.parent.type == "yield"){ - raise_syntax_error(context, - "assignment to yield expression not possible", - context.parent.position) - } - raise_syntax_error(context) - break - case 'yield': - return new AbstractExprCtx(new YieldCtx(context), true) - case ':': - if(context.parent.type == "sub" || - (context.parent.type == "list_or_tuple" && - context.parent.parent.type == "sub")){ - return new AbstractExprCtx(new SliceCtx(context.parent), false) - } - return transition(context.parent, token, value) - case ')': - case ',': - switch(context.parent.type) { - case 'list_or_tuple': - case 'slice': - case 'call_arg': - case 'op': - case 'yield': - break - case 'match': - if(token == ','){ - // implicit tuple - context.parent.tree.pop() - var tuple = new ListOrTupleCtx(context.parent, - 'tuple') - tuple.implicit = true - tuple.has_comma = true - tuple.tree = [context] - context.parent = tuple - return tuple - } - break - case 'func_arg_id': - raise_syntax_error(context, 'expected default value expression') - default: - raise_syntax_error(context) - - } - break - case '.': - case 'assert': - case 'break': - case 'class': - case 'continue': - case 'def': - case 'except': - case 'for': - case 'while': - case 'return': - case 'try': - raise_syntax_error(context) - break - } - return transition(context.parent, token, value) -} - -var AliasCtx = $B.parser.AliasCtx = function(context){ - // Class for context manager alias - this.type = 'ctx_manager_alias' - this.parent = context - this.tree = [] - context.tree[context.tree.length - 1].alias = this -} - -AliasCtx.prototype.transition = function(token, value){ - var context = this - switch(token){ - case ',': - case ')': - case ':': - check_assignment(context.tree[0]) - context.parent.set_alias(context.tree[0].tree[0]) - return transition(context.parent, token, value) - case 'eol': - $token.value = last_position(context) - raise_syntax_error(context, "expected ':'") - } - raise_syntax_error(context) -} - -var AnnotationCtx = $B.parser.AnnotationCtx = function(context){ - // Class for annotations, eg "def f(x:int) -> list:" - this.type = 'annotation' - this.parent = context - this.tree = [] - // annotation is stored in attribute "annotations" of parent, not "tree" - context.annotation = this - - var scope = get_scope(context) - - if(scope.ntype == "def" && context.tree && context.tree.length > 0 && - context.tree[0].type == "id"){ - var name = context.tree[0].value - scope.annotations = scope.annotations || new Set() - scope.annotations.add(name) - } -} - -AnnotationCtx.prototype.transition = function(token){ - var context = this - if(token == "eol" && context.tree.length == 1 && - context.tree[0].tree.length == 0){ - raise_syntax_error(context) - }else if(token == ':' && context.parent.type != "def"){ - raise_syntax_error(context, "more than one annotation") - }else if(token == "augm_assign"){ - raise_syntax_error(context, "augmented assign as annotation") - }else if(token == "op"){ - raise_syntax_error(context, "operator as annotation") - } - if(context.parent.type == 'expr'){ - context.parent.with_commas = false - } - return transition(context.parent, token) -} - -var AssertCtx = $B.parser.AssertCtx = function(context){ - // Context for keyword "assert" - this.type = 'assert' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this -} - -AssertCtx.prototype.ast = function(){ - // Assert(expr test, expr? msg) - var msg = this.tree[1], - ast_obj = new ast.Assert(this.tree[0].ast(), - msg === undefined ? msg : msg.ast()) - set_position(ast_obj, this.position) - return ast_obj -} - -AssertCtx.prototype.transition = function(token){ - var context = this - if(token == ","){ - if(this.tree.length > 1){ - raise_syntax_error(context, - '(too many commas after assert)') - } - return new AbstractExprCtx(this, false) - } - if(token == 'eol'){ - if(this.tree.length == 1 && - this.tree[0].type == 'expr' && - this.tree[0].tree[0].type == 'list_or_tuple'){ - $B.warn(_b_.SyntaxWarning, - "assertion is always true, perhaps remove parentheses?", - get_module(context).filename, - $token.value) - } - return transition(context.parent, token) - } - raise_syntax_error(context) -} - -var AssignCtx = $B.parser.AssignCtx = function(context){ - /* - Class for the assignment operator "=" - context is the left operand of assignment - This check is done when the AssignCtx object is created, but must be - disabled if a new AssignCtx object is created afterwards by method - transform() - */ - check_assignment(context) - - this.type = 'assign' - this.position = $token.value - - // replace parent by "this" in parent tree - context.parent.tree.pop() - context.parent.tree.push(this) - - this.parent = context.parent - this.tree = [context] - - if(context.type == 'assign'){ - check_assignment(context.tree[1]) - }else{ - var assigned = context.tree[0] - if(assigned.type == "ellipsis"){ - raise_syntax_error(context, 'cannot assign to Ellipsis') - }else if(assigned.type == 'unary'){ - raise_syntax_error(context, 'cannot assign to operator') - }else if(assigned.type == 'starred'){ - if(assigned.tree[0].name == 'id'){ - var id = assigned.tree[0].tree[0].value - if(['None', 'True', 'False', '__debug__'].indexOf(id) > -1){ - raise_syntax_error(context, 'cannot assign to ' + id) - } - } - // If the packed item was in a tuple (eg "a, *b = X") the - // assignment is valid; in this case the attribute in_tuple - // is set - if(assigned.parent.in_tuple === undefined){ - raise_syntax_error(context, - "starred assignment target must be in a list or tuple") - } - } - } -} - -function set_ctx_to_store(obj){ - if(Array.isArray(obj)){ - for(let item of obj){ - set_ctx_to_store(item) - } - }else if(obj instanceof ast.List || - obj instanceof ast.Tuple){ - for(let item of obj.elts){ - set_ctx_to_store(item) - } - }else if(obj instanceof ast.Starred){ - obj.value.ctx = new ast.Store() - }else if(obj === undefined){ - // ignore - }else if(obj.ctx){ - obj.ctx = new ast.Store() - }else{ - console.log('bizarre', obj, obj.constructor.$name) - } -} - -AssignCtx.prototype.ast = function(){ - var value = this.tree[1].ast(), - targets = [], - target = this.tree[0] - if(target.type == 'expr' && target.tree[0].type == 'list_or_tuple'){ - target = target.tree[0] - } - if(target.type == 'list_or_tuple'){ - target = target.ast() - target.ctx = new ast.Store() - targets = [target] - }else{ - while(target.type == 'assign'){ - targets.splice(0, 0, target.tree[1].ast()) - target = target.tree[0] - } - targets.splice(0, 0, target.ast()) - } - value.ctx = new ast.Load() - var ast_obj - if(target.annotation){ - ast_obj = new ast.AnnAssign( - target.tree[0].ast(), - target.annotation.tree[0].ast(), - value, - target.$was_parenthesized ? 0 : 1) - // set position of annotation to get annotation string - // in ast_to_js.js - set_position(ast_obj.annotation, target.annotation.position, - last_position(target.annotation)) - ast_obj.target.ctx = new ast.Store() - }else{ - ast_obj = new ast.Assign(targets, value) - } - set_position(ast_obj, this.position) - set_ctx_to_store(ast_obj.targets) - return ast_obj -} - -AssignCtx.prototype.transition = function(token){ - var context = this - if(token == 'eol'){ - if(context.tree[1].type == 'abstract_expr'){ - raise_syntax_error(context) - } - return transition(context.parent, 'eol') - } - raise_syntax_error(context) -} - -var AsyncCtx = $B.parser.AsyncCtx = function(context){ - // Class for async : def, while, for - this.type = 'async' - this.parent = context - context.async = true - this.position = context.position = $token.value -} - -AsyncCtx.prototype.transition = function(token, value){ - var context = this - if(token == "def"){ - return transition(context.parent, token, value) - }else if(token == "with"){ - let ctx = transition(context.parent, token, value) - ctx.async = context // set attr "async" of with context - return ctx - }else if(token == "for"){ - let ctx = transition(context.parent, token, value) - ctx.parent.async = context // set attr "async" of for context - return ctx - } - raise_syntax_error(context) -} - -var AttrCtx = $B.parser.AttrCtx = function(context){ - // Class for object attributes (eg x in obj.x) - this.type = 'attribute' - this.value = context.tree[0] - this.parent = context - this.position = $token.value - context.tree.pop() - context.tree[context.tree.length] = this - this.tree = [] - this.func = 'getattr' // becomes setattr for an assignment -} - -AttrCtx.prototype.ast = function(){ - // ast.Attribute(value, attr, ctx) - var value = this.value.ast(), - attr = this.unmangled_name, - ctx = new ast.Load() - if(this.func == 'setattr'){ - ctx = new ast.Store() - }else if(this.func == 'delattr'){ - ctx = new ast.Delete() - } - var ast_obj = new ast.Attribute(value, attr, ctx) - set_position(ast_obj, this.position, this.end_position) - return ast_obj -} - -AttrCtx.prototype.transition = function(token, value){ - var context = this - if(token === 'id'){ - var name = value - if(name == '__debug__'){ - raise_syntax_error(context, 'cannot assign to __debug__') - }else if(noassign[name] === true){ - raise_syntax_error(context) - } - context.unmangled_name = name - context.position = $token.value - context.end_position = $token.value - name = mangle_name(name, context) - context.name = name - return context.parent - } - raise_syntax_error(context) -} - -var AugmentedAssignCtx = $B.parser.AugmentedAssignCtx = function(context, op){ - // Class for augmented assignments such as "+=" - - check_assignment(context, {augmented: true}) - - this.type = 'augm_assign' - this.context = context - this.parent = context.parent - this.position = $token.value - context.parent.tree.pop() - context.parent.tree[context.parent.tree.length] = this - this.op = op - this.tree = [context] - - var scope = this.scope = get_scope(this) - this.module = scope.module -} - -AugmentedAssignCtx.prototype.ast = function(){ - // AugAssign(expr target, operator op, expr value) - var target = this.tree[0].ast(), - value = this.tree[1].ast() - target.ctx = new ast.Store() - value.ctx = new ast.Load() - var op = this.op.substr(0, this.op.length -1), - ast_type_class = op2ast_class[op], - ast_class = ast_type_class[1] - - var ast_obj = new ast.AugAssign(target, new ast_class(), value) - set_position(ast_obj, this.position) - return ast_obj -} - -AugmentedAssignCtx.prototype.transition = function(token){ - var context = this - if(token == 'eol'){ - if(context.tree[1].type == 'abstract_expr'){ - raise_syntax_error(context) - } - return transition(context.parent, 'eol') - } - raise_syntax_error(context) -} - -var AwaitCtx = $B.parser.AwaitCtx = function(context){ - // Class for "await" - this.type = 'await' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree.push(this) - - var p = context - while(p){ - if(p.type == "list_or_tuple"){ - p.is_await = true - } - p = p.parent - } - var node = get_node(this) - node.awaits = node.awaits || [] - node.awaits.push(this) -} - -AwaitCtx.prototype.ast = function(){ - // Await(expr value) - var ast_obj = new ast.Await(this.tree[0].ast()) - set_position(ast_obj, this.position) - return ast_obj -} - -AwaitCtx.prototype.transition = function(token, value){ - var context = this - context.parent.is_await = true - return transition(context.parent, token, value) -} - -var BodyCtx = $B.parser.BodyCtx = function(context){ - // inline body for def, class, if, elif, else, try... - // creates a new node, child of context node - var ctx_node = context.parent - while(ctx_node.type !== 'node'){ - ctx_node = ctx_node.parent - } - var tree_node = ctx_node.node - var body_node = new $Node() - body_node.is_body_node = true - body_node.line_num = tree_node.line_num - tree_node.insert(0, body_node) - return new NodeCtx(body_node) -} - -var BreakCtx = $B.parser.BreakCtx = function(context){ - // Used for the keyword "break" - // A flag is associated to the enclosing "for" or "while" loop - // If the loop exits with a break, this flag is set to true - // so that the "else" clause of the loop, if present, is executed - - this.type = 'break' - this.position = $token.value - - this.parent = context - context.tree[context.tree.length] = this -} - -BreakCtx.prototype.ast = function(){ - var ast_obj = new ast.Break() - set_position(ast_obj, this.position) - return ast_obj -} - -BreakCtx.prototype.transition = function(token){ - var context = this - if(token == 'eol'){ - return transition(context.parent, 'eol') - } - raise_syntax_error(context) -} - -var CallArgCtx = $B.parser.CallArgCtx = function(context){ - // Base class for arguments in a function call - this.type = 'call_arg' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree.push(this) - this.expect = 'id' -} - -CallArgCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'await': - case 'id': - case 'imaginary': - case 'int': - case 'float': - case 'str': - case 'JoinedStr': - case 'bytes': - case '[': - case '(': - case '{': - case '.': - case 'ellipsis': - case 'not': - case 'lambda': - if(context.expect == 'id'){ - this.position = $token.value - context.expect = ',' - var expr = new AbstractExprCtx(context, false) - return transition(expr, token, value) - } - break - case '=': - if(context.expect == ','){ - return new ExprCtx(new KwArgCtx(context), 'kw_value', - false) - } - break - case 'for': - return new TargetListCtx(new ForExpr(new GeneratorExpCtx(context))) - case 'op': - if(context.expect == 'id'){ - var op = value - context.expect = ',' - switch(op) { - case '+': - case '-': - case '~': - return transition(new AbstractExprCtx(context,false),token,op) - case '*': - context.parent.tree.pop() - return new StarArgCtx(context.parent) - case '**': - context.parent.tree.pop() - return new DoubleStarArgCtx(context.parent) - } - } - raise_syntax_error(context) - break - case ')': - return transition(context.parent,token) - case ':': - if(context.expect == ',' && - context.parent.parent.type == 'lambda') { - return transition(context.parent.parent, token) - } - break - case ',': - if(context.expect == ','){ - return transition(context.parent, token, value) - } - } - raise_syntax_error(context) -} - -var CallCtx = $B.parser.CallCtx = function(context){ - // Context of a call on a callable, ie what is inside the parenthesis - // in "callable(...)" - this.position = $token.value - this.type = 'call' - this.func = context.tree[0] - if(this.func !== undefined){ // undefined for lambda - this.func.parent = this - this.parenth_position = this.position - this.position = this.func.position - } - this.parent = context - if(context.type != 'class'){ - context.tree.pop() - context.tree[context.tree.length] = this - }else{ - // class parameters - context.args = this - } - this.expect = 'id' - this.tree = [] -} - -CallCtx.prototype.ast = function(){ - var res = new ast.Call(this.func.ast(), [], []), - keywords = new Set() - for(var call_arg of this.tree){ - if(call_arg.type == 'double_star_arg'){ - let value = call_arg.tree[0].tree[0].ast(), - keyword = new ast.keyword(_b_.None, value) - delete keyword.arg - res.keywords.push(keyword) - }else if(call_arg.type == 'star_arg'){ - if(res.keywords.length > 0){ - if(! res.keywords[0].arg){ - raise_syntax_error(this, - 'iterable argument unpacking follows keyword argument unpacking') - } - } - let starred = new ast.Starred(call_arg.tree[0].ast()) - set_position(starred, call_arg.position) - starred.ctx = new ast.Load() - res.args.push(starred) - }else if(call_arg.type == 'genexpr'){ - res.args.push(call_arg.ast()) - }else{ - let item = call_arg.tree[0] - if(item === undefined){ - // case when call ends with ",)" - continue - } - if(item.type == 'kwarg'){ - let key = item.tree[0].value - if(key == '__debug__'){ - raise_syntax_error_known_range(this, - this.position, - this.end_position, - "cannot assign to __debug__") - }else if(['True', 'False', 'None'].indexOf(key) > -1){ - raise_syntax_error_known_range(this, - item.position, - item.equal_sign_position, - 'expression cannot contain assignment, perhaps you meant "=="?') - } - if(keywords.has(key)){ - raise_syntax_error_known_range(item, - item.position, - last_position(item), - `keyword argument repeated: ${key}`) - } - keywords.add(key) - let keyword = new ast.keyword(item.tree[0].value, - item.tree[1].ast()) - set_position(keyword, item.position) - res.keywords.push(keyword) - }else{ - if(res.keywords.length > 0){ - if(res.keywords[0].arg){ - raise_syntax_error_known_range(this, - item.position, - last_position(item), - 'positional argument follows keyword argument') - }else{ - raise_syntax_error_known_range(this, - item.position, - last_position(item), - 'positional argument follows keyword argument unpacking') - } - } - res.args.push(item.ast()) - } - } - } - set_position(res, this.position, this.end_position) - return res -} - -CallCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case ',': - if(context.expect == 'id'){ - raise_syntax_error(context) - } - context.expect = 'id' - return context - case 'await': - case 'id': - case 'imaginary': - case 'int': - case 'float': - case 'str': - case 'JoinedStr': - case 'bytes': - case '[': - case '(': - case '{': - case '.': - case 'not': - case 'lambda': - case 'ellipsis': - context.expect = ',' - return transition(new CallArgCtx(context), token, - value) - case ')': - context.end_position = $token.value - return context.parent - case 'op': - context.expect = ',' - switch(value) { - case '-': - case '~': - case '+': - context.expect = ',' - return transition(new CallArgCtx(context), token, - value) - case '*': - context.has_star = true - return new StarArgCtx(context) - case '**': - context.has_dstar = true - return new DoubleStarArgCtx(context) - } - raise_syntax_error(context) - break - case 'yield': - raise_syntax_error(context) - } - - return transition(context.parent, token, value) -} - -var CaseCtx = $B.parser.CaseCtx = function(node_ctx){ - // node already has an expression with the id "match" - this.type = "case" - this.position = $token.value - node_ctx.tree = [this] - this.parent = node_ctx - this.tree = [] - this.expect = 'as' -} - -CaseCtx.prototype.ast = function(){ - // ast.match_case(pattern, guard, body) - // pattern : the match pattern that the subject will be matched against - // guard : an expression that will be evaluated if the pattern matches the subject - var ast_obj = new ast.match_case(this.tree[0].ast(), - this.has_guard ? this.tree[1].tree[0].ast() : undefined, - ast_body(this.parent)) - set_position(ast_obj, this.position) - return ast_obj -} - -CaseCtx.prototype.set_alias = function(name){ - this.alias = name -} - -// check if case is 'irrefutable' (cf. PEP 634) -function is_irrefutable(pattern){ - var cause - if(pattern.type == "capture_pattern"){ - return pattern.tree[0] - }else if(pattern.type == "or_pattern"){ - for(var subpattern of pattern.tree){ - if(cause = is_irrefutable(subpattern)){ - return cause - } - } - }else if(pattern.type == "sequence_pattern" && - pattern.token == '(' && - pattern.tree.length == 1 && - (cause = is_irrefutable(pattern.tree[0]))){ - return cause - } - return false -} - -CaseCtx.prototype.transition = function(token, value){ - var context = this - switch(token){ - case 'as': - context.expect = ':' - return new AbstractExprCtx(new AliasCtx(context)) - case ':': - var cause - if(cause = is_irrefutable(this.tree[0])){ - // mark match node as having already an irrefutable pattern, - // so that remaining patterns raise a SyntaxError - get_node(context).parent.irrefutable = cause - } - switch(context.expect) { - case 'id': - case 'as': - case ':': - var last = $B.last(context.tree) - if(last && last.type == 'sequence_pattern'){ - remove_empty_pattern(last) - } - return BodyCtx(context) - } - break - case 'op': - if(value == '|'){ - return new PatternCtx(new PatternOrCtx(context)) - } - raise_syntax_error(context, "expected ':'") - break - case ',': - if(context.expect == ':' || context.expect == 'as'){ - return new PatternCtx(new PatternSequenceCtx(context)) - } - break - case 'if': - // guard - context.has_guard = true - return new AbstractExprCtx(new ConditionCtx(context, token), - false) - default: - raise_syntax_error(context, "expected ':'") - } -} - -var ClassCtx = $B.parser.ClassCtx = function(context){ - // Class for keyword "class" - this.type = 'class' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this - this.expect = 'id' - - var scope = this.scope = get_scope(this) - this.parent.node.parent_block = scope - this.parent.node.bound = {} // will store the names bound in the function -} - -ClassCtx.prototype.ast = function(){ - // ClassDef:'name,bases*,keywords*,body*,decorator_list*,type_params*' - var decorators = get_decorators(this.parent.node), - bases = [], - keywords = [], - type_params = [] - if(this.args){ - for(var arg of this.args.tree){ - if(arg.tree[0].type == 'kwarg'){ - keywords.push(new ast.keyword(arg.tree[0].tree[0].value, - arg.tree[0].tree[1].ast())) - }else{ - bases.push(arg.tree[0].ast()) - } - } - } - if(this.type_params){ - type_params = this.type_params.ast() - } - var ast_obj = new ast.ClassDef(this.name, bases, keywords, - ast_body(this.parent), decorators, - type_params) - set_position(ast_obj, this.position) - return ast_obj -} - -ClassCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.expect == 'id'){ - context.set_name(value) - context.expect = '(:' - return context - } - break - case '(': - if(context.name === undefined){ - raise_syntax_error(context, 'missing class name') - } - context.parenthesis_position = $token.value - return new CallCtx(context) - case '[': - if(context.name === undefined){ - raise_syntax_error(context, 'missing class name') - } - return new TypeParamsCtx(context) - case ':': - if(this.args){ - for(var arg of this.args.tree){ - var param = arg.tree[0] - if(arg.type != 'call_arg'){ - $token.value = context.parenthesis_position - raise_syntax_error(context, "invalid syntax") - } - if((param.type == 'expr' && param.name == 'id') || - param.type == "kwarg"){ - continue - } - $token.value = arg.position - raise_syntax_error(arg, 'invalid class parameter') - } - } - return BodyCtx(context) - case 'eol': - raise_syntax_error(context, "expected ':'") - } - raise_syntax_error(context) -} - -ClassCtx.prototype.set_name = function(name){ - var context = this.parent - this.random = $B.UUID() - this.name = name - this.id = context.node.module + '_' + name + '_' + this.random - this.parent.node.id = this.id - - var scope = this.scope, - parent_block = scope - - // Set attribute "qualname", which includes possible parent classes - var block = scope, - parent_classes = [] - while(block.ntype == "class"){ - parent_classes.splice(0, 0, block.context.tree[0].name) - block = block.parent - } - this.qualname = parent_classes.concat([name]).join(".") - - while(parent_block.context && - parent_block.context.tree[0].type == 'class'){ - parent_block = parent_block.parent - } - while(parent_block.context && - 'def' != parent_block.context.tree[0].type && - 'generator' != parent_block.context.tree[0].type){ - parent_block = parent_block.parent - } - this.parent.node.parent_block = parent_block - -} - -var Comprehension = { - generators: function(comps){ - // Return a list of comprehensions - // ast.comprehension(target, iter, ifs, is_async) - var comprehensions = [] - for(var item of comps){ - if(item.type == 'for'){ - var target = item.tree[0].ast() - set_ctx_to_store(target) - comprehensions.push( - new ast.comprehension( - target, - item.tree[1].ast(), - [], - item.is_async ? 1 : 0 - ) - ) - }else{ - $B.last(comprehensions).ifs.push(item.tree[0].ast()) - } - } - return comprehensions - }, - make_comp: function(comp, context){ - comp.comprehension = true - comp.parent = context.parent - comp.id = comp.type + $B.UUID() - var scope = get_scope(context) - comp.parent_block = scope - while(scope){ - if(scope.context && scope.context.tree && - scope.context.tree.length > 0 && - scope.context.tree[0].async){ - comp.async = true - break - } - scope = scope.parent_block - } - comp.module = get_module(context).module - comp.module_ref = comp.module.replace(/\./g, '_') - context.parent.tree[context.parent.tree.length - 1] = comp - Comprehension.set_parent_block(context.tree[0], comp) - }, - set_parent_block: function(ctx, parent_block){ - if(ctx.tree){ - for(var item of ctx.tree){ - if(item.comprehension){ - item.parent_block = parent_block - } - Comprehension.set_parent_block(item, parent_block) - } - } - } -} - -var ConditionCtx = $B.parser.ConditionCtx = function(context,token){ - // Class for keywords "if", "elif", "while" - this.type = 'condition' - this.token = token - this.parent = context - this.tree = [] - this.position = $token.value - this.node = get_node(this) - this.scope = get_scope(this) - if(token == 'elif'){ - // in the AST, this is the attribute 'orelse' of the previous "if" - // or "elif" - var rank = this.node.parent.children.indexOf(this.node), - previous = this.node.parent.children[rank - 1] - previous.context.tree[0].orelse = this - } - context.tree.push(this) -} - -ConditionCtx.prototype.ast = function(){ - // While(expr test, stmt* body, stmt* orelse) | - // If(expr test, stmt* body, stmt* orelse) - var types = {'if': 'If', 'while': 'While', 'elif': 'If'} - var res = new ast[types[this.token]](this.tree[0].ast()) - if(this.orelse){ - if(this.orelse.token == 'elif'){ - res.orelse = [this.orelse.ast()] - }else{ - res.orelse = this.orelse.ast() - } - }else{ - res.orelse = [] - } - res.body = ast_body(this) - set_position(res, this.position) - return res -} - -ConditionCtx.prototype.transition = function(token, value){ - var context = this - if(token == ':'){ - if(context.tree[0].type == "abstract_expr" && - context.tree[0].tree.length == 0){ // issue #965 - raise_syntax_error(context) - } - return BodyCtx(context) - }else if(context.in_comp && context.token == 'if'){ - // [x for x in A if cond1 if cond2] - if(token == ']'){ - return transition(context.parent, token, value) - }else if(token == 'if'){ - var if_exp = new ConditionCtx(context.parent, 'if') - if_exp.in_comp = context.in_comp - return new AbstractExprCtx(if_exp, false) - }else if(')]}'.indexOf(token) > -1){ - return transition(this.parent, token, value) - }else if(context.in_comp && token == 'for'){ - return new TargetListCtx(new ForExpr(context.parent)) - } - if(token == ',' && parent_match(context, {type: 'call'})){ - raise_syntax_error_known_range(context, - context.in_comp.position, - last_position(context), - 'Generator expression must be parenthesized') - } - } - raise_syntax_error(context, "expected ':'") -} - -var ContinueCtx = $B.parser.ContinueCtx = function(context){ - // Class for keyword "continue" - this.type = 'continue' - this.parent = context - this.position = $token.value - get_node(this).is_continue = true - context.tree[context.tree.length] = this -} - -ContinueCtx.prototype.ast = function(){ - var ast_obj = new ast.Continue() - set_position(ast_obj, this.position) - return ast_obj -} - -ContinueCtx.prototype.transition = function(token){ - var context = this - if(token == 'eol'){return context.parent} - raise_syntax_error(context) -} - -var DecoratorCtx = $B.parser.DecoratorCtx = function(context){ - // Class for decorators - this.type = 'decorator' - this.parent = context - context.tree[context.tree.length] = this - this.tree = [] - this.position = $token.value -} - -DecoratorCtx.prototype.transition = function(token){ - var context = this - if(token == 'eol') { - return transition(context.parent, token) - } - raise_syntax_error(context) -} - -function get_decorators(node){ - var decorators = [] - var parent_node = node.parent - var rank = parent_node.children.indexOf(node) - while(true){ - rank-- - if(rank < 0){ - break - }else if(parent_node.children[rank].context.tree[0].type == - 'decorator'){ - var deco = parent_node.children[rank].context.tree[0].tree[0] - decorators.splice(0, 0, deco.ast()) - }else{ - break - } - } - return decorators -} - -var DefCtx = $B.parser.DefCtx = function(context){ - this.type = 'def' - this.name = null - this.parent = context - this.tree = [] - this.async = context.async - if(this.async){ - this.position = context.position - }else{ - this.position = $token.value - } - - context.tree[context.tree.length] = this - - // store id of enclosing functions - this.enclosing = [] - var scope = this.scope = get_scope(this) - if(scope.context && scope.context.tree[0].type == "class"){ - this.class_name = scope.context.tree[0].name - } - - // For functions inside classes, the parent scope is not the class body - // but the block where the class is defined - // - // Example - // - // a = 9 - // class A: - // a = 7 - // def f(self): - // print(a) - // - // A().f() # must print 9, not 7 - - var parent_block = scope - while(parent_block.context && - parent_block.context.tree[0].type == 'class'){ - parent_block = parent_block.parent - } - while(parent_block.context && - 'def' != parent_block.context.tree[0].type){ - parent_block = parent_block.parent - } - - this.parent.node.parent_block = parent_block - - // this.inside_function : set if the function is defined inside another - // function - var pb = parent_block - this.is_comp = pb.is_comp - while(pb && pb.context){ - if(pb.context.tree[0].type == 'def'){ - this.inside_function = true - break - } - pb = pb.parent_block - } - - this.module = scope.module - this.root = get_module(this) - - // Arrays for arguments - this.positional_list = [] - this.default_list = [] - this.other_args = null - this.other_kw = null - this.after_star = [] -} - -DefCtx.prototype.ast = function(){ - var args = { - posonlyargs: [], - args: [], - kwonlyargs: [], - kw_defaults: [], - defaults: [], - type_params: [] - }, - decorators = get_decorators(this.parent.node), - func_args = this.tree[1], - res - - args = func_args.ast() - if(this.async){ - res = new ast.AsyncFunctionDef(this.name, args, [], decorators) - }else{ - res = new ast.FunctionDef(this.name, args, [], decorators) - } - if(this.annotation){ - res.returns = this.annotation.tree[0].ast() - } - if(this.type_params){ - res.type_params = this.type_params.ast() - } - res.body = ast_body(this.parent) - set_position(res, this.position) - return res -} - -DefCtx.prototype.set_name = function(name){ - if(["None", "True", "False"].indexOf(name) > -1){ - raise_syntax_error(this) // invalid function name - } - new IdCtx(this, name) - this.name = name - this.id = this.scope.id + '_' + name - this.id = this.id.replace(/\./g, '_') // for modules inside packages - this.id += '_' + $B.UUID() - this.parent.node.id = this.id - this.parent.node.module = this.module -} - -DefCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.name) { - raise_syntax_error(context) - } - context.set_name(value) - return context - case '(': - if(context.name == null){ - raise_syntax_error(context, - "missing name in function definition") - } - context.has_args = true; - return new FuncArgs(context) - case '[': - if(context.name === undefined){ - raise_syntax_error(context, 'missing function name') - } - return new TypeParamsCtx(context) - case ')': - return context - case 'annotation': - return new AbstractExprCtx(new AnnotationCtx(context), true) - case ':': - if(context.has_args){ - return BodyCtx(context) - } - raise_syntax_error(context, "expected '('") - break - case 'eol': - if(context.has_args){ - raise_syntax_error(context, "expected ':'") - } - } - raise_syntax_error(context) -} - -var DelCtx = $B.parser.DelCtx = function(context){ - // Class for keyword "del" - this.type = 'del' - this.parent = context - context.tree.push(this) - this.tree = [] - this.position = $token.value -} - -DelCtx.prototype.ast = function(){ - let targets - if(this.tree[0].type == 'list_or_tuple'){ - // Syntax "del a, b, c" - targets = this.tree[0].tree.map(x => x.ast()) - }else if(this.tree[0].type == 'expr' && - this.tree[0].tree[0].type == 'list_or_tuple'){ - // del(x[0]) is the same as del x[0], cf.issue #923 - targets = this.tree[0].tree[0].ast() - targets.ctx = new ast.Del() - for(var elt of targets.elts){ - elt.ctx = new ast.Del() - } - let ast_obj = new ast.Delete([targets]) - set_position(ast_obj, this.position) - return ast_obj - }else{ - targets = [this.tree[0].tree[0].ast()] - } - for(let target of targets){ - target.ctx = new ast.Del() - } - let ast_obj = new ast.Delete(targets) - set_position(ast_obj, this.position) - return ast_obj -} - -DelCtx.prototype.transition = function(token){ - var context = this - if(token == 'eol'){ - check_assignment(this.tree[0], {action: 'delete'}) - return transition(context.parent, token) - } - raise_syntax_error(context) -} - -var DictCompCtx = function(context){ - // create a List Comprehension - // context is a DictOrSetCtx - if(context.tree[0].type == 'expr' && - context.tree[0].tree[0].comprehension){ - // If the DictComp expression is a comprehension, reset its parent - // block to this - var comp = context.tree[0].tree[0] - comp.parent_block = this - } - this.type = 'dictcomp' - this.position = $token.value - this.comprehension = true - this.parent = context.parent - this.key = context.tree[0] - this.value = context.tree[1] - this.key.parent = this - this.value.parent = this - this.tree = [] - this.id = 'dictcomp' + $B.UUID() - this.parent_block = get_scope(context) - this.module = get_module(context).module - context.parent.tree[context.parent.tree.length - 1] = this - this.type = 'dictcomp' - Comprehension.make_comp(this, context) -} - -DictCompCtx.prototype.ast = function(){ - // ast.DictComp(key, value, generators) - // key, value is the part evaluated for each item - // generators is a list of comprehensions - if(this.value.ast === undefined){ - console.log('dict comp ast, no value.ast', this) - } - var ast_obj = new ast.DictComp( - this.key.ast(), - this.value.ast(), - Comprehension.generators(this.tree) - ) - set_position(ast_obj, this.position) - return ast_obj -} - -DictCompCtx.prototype.transition = function(token){ - var context = this - if(token == '}'){ - return this.parent - } - raise_syntax_error(context) -} - -var DictOrSetCtx = $B.parser.DictOrSetCtx = function(context){ - // Context for literal dictionaries or sets - // The real type (dist or set) is set inside transition - // as the attribute 'real' - this.type = 'dict_or_set' - this.real = 'dict_or_set' - this.expect = ',' - this.closed = false - this.position = $token.value - - this.nb_items = 0 - - this.parent = context - this.tree = [] - context.tree[context.tree.length] = this -} - -DictOrSetCtx.prototype.ast = function(){ - // Dict(expr* keys, expr* values) | Set(expr* elts) - var ast_obj - if(this.real == 'dict'){ - let keys = [], - values = [] - for(let i = 0, len = this.items.length; i < len; i++){ - if(this.items[i].type == 'expr' && - this.items[i].tree[0].type == 'kwd'){ - keys.push(_b_.None) - values.push(this.items[i].tree[0].tree[0].ast()) - }else{ - keys.push(this.items[i].ast()) - values.push(this.items[i + 1].ast()) - i++ - } - } - ast_obj = new ast.Dict(keys, values) - }else if(this.real == 'set'){ - var items = [] - for(let item of this.items){ - if(item.packed){ - var starred = new ast.Starred(item.ast(), - new ast.Load()) - set_position(starred, item.position) - items.push(starred) - }else{ - items.push(item.ast()) - } - } - ast_obj = new ast.Set(items) - } - set_position(ast_obj, this.position) - return ast_obj -} - -DictOrSetCtx.prototype.transition = function(token, value){ - var context = this - if(context.closed){ - switch(token) { - case '[': - return new AbstractExprCtx(new SubscripCtx(context.parent),false) - case '(': - return new CallArgCtx(new CallCtx(context.parent)) - } - return transition(context.parent, token, value) - }else{ - if(context.expect == ','){ - function check_last(){ - var last = $B.last(context.tree), - err_msg - if(last && last.wrong_assignment){ - // {x=1} - err_msg = "invalid syntax. Maybe you meant '==' or ':=' instead of '='?" - }else if(context.real == 'dict' && last.type == 'expr' && - last.tree[0].type == 'starred'){ - // {x: *12} - err_msg = 'cannot use a starred expression in a dictionary value' - }else if(context.real == 'set' && last.tree[0].type == 'kwd'){ - $token.value = last.position - raise_syntax_error(context) - } - if(err_msg){ - raise_syntax_error_known_range(context, - last.position, - last_position(last), - err_msg) - } - } - switch(token) { - case '}': - var last = $B.last(context.tree) - if(last.type == "expr" && last.tree[0].type == "kwd"){ - context.nb_items += 2 - }else if(last.type == "abstract_expr"){ - context.tree.pop() - }else{ - context.nb_items++ - } - check_last() - context.end_position = $token.value - if(context.real == 'dict_or_set'){ - // {**{1:2}} should be a dictionary, even though it - // contains elements and no colons, which makes it - // look like a set - for (var item of context.tree) { - if (item.type == "expr" && item.tree[0].type == "kwd") { - context.real = 'dict' - break - } - } - } - if(context.real == 'dict_or_set'){ - // {} should be a dictionary, but {1} should be a set - context.real = context.tree.length == 0 ? - 'dict' : 'set' - } - switch(context.real) { - case 'set': - context.items = context.tree - context.tree = [] - context.closed = true - return context - case 'dict': - if(context.tree.length && - $B.last(context.tree).type == 'abstract_expr'){ - raise_syntax_error(context, - "expression expected after dictionary key and ':'") - }else{ - if(context.nb_items % 2 != 0){ - raise_syntax_error(context, - "':' expected after dictionary key") - } - } - context.items = context.tree - context.tree = [] - context.closed = true - return context - } - raise_syntax_error(context) - break - case ',': - check_last() - var last = $B.last(context.tree) - if(last.type == "expr" && last.tree[0].type == "kwd"){ - context.nb_items += 2 - }else{ - context.nb_items++ - } - if(context.real == 'dict_or_set'){ - var last = context.tree[0] - context.real = (last.type == 'expr' && - last.tree[0].type == 'kwd') ? 'dict' : 'set' - } - if(context.real == 'dict' && context.nb_items % 2){ - raise_syntax_error(context, - "':' expected after dictionary key") - } - return new AbstractExprCtx(context, false) - case ':': - if(context.real == 'dict_or_set'){ - context.real = 'dict' - } - if(context.real == 'dict'){ - context.expect = 'value' - this.nb_items++ - context.value_pos = $token.value - return context - }else{ - raise_syntax_error(context) - } - break - case 'for': - // comprehension - if(context.real == "set" && context.tree.length > 1){ - $token.value = context.tree[0].position - raise_syntax_error(context, "did you forget " + - "parentheses around the comprehension target?") - } - var expr = context.tree[0], - err_msg - if(expr.type == 'expr'){ - if(expr.tree[0].type == 'kwd'){ - err_msg = 'dict unpacking cannot be used in dict comprehension' - }else if(expr.tree[0].type == 'starred'){ - err_msg = 'iterable unpacking cannot be used in comprehension' - } - if(err_msg){ - raise_syntax_error_known_range(context, - expr.position, - last_position(expr), - err_msg) - } - } - if(context.real == 'dict_or_set'){ - return new TargetListCtx(new ForExpr( - new SetCompCtx(this))) - }else{ - return new TargetListCtx(new ForExpr( - new DictCompCtx(this))) - } - } - raise_syntax_error(context) - }else if(context.expect == 'value'){ - if(python_keywords.indexOf(token) > -1){ - var ae = new AbstractExprCtx(context, false) - try{ - transition(ae, token, value) - context.tree.pop() - }catch(err){ - raise_syntax_error(context) - } - } - try{ - context.expect = ',' - return transition(new AbstractExprCtx(context, false), - token, value) - }catch(err){ - $token.value = context.value_pos - raise_syntax_error(context, "expression expected after " + - "dictionary key and ':'") - } - } - return transition(context.parent, token, value) - } -} - -var DoubleStarArgCtx = $B.parser.DoubleStarArgCtx = function(context){ - // Class for syntax "**kw" in a call - this.type = 'double_star_arg' - this.parent = context - this.tree = [] - context.tree[context.tree.length] = this -} - -DoubleStarArgCtx.prototype.transition = function(token, value){ - var context = this - switch(token){ - case 'id': - case 'imaginary': - case 'int': - case 'float': - case 'str': - case 'JoinedStr': - case 'bytes': - case '[': - case '(': - case '{': - case '.': - case 'not': - case 'lambda': - return transition(new AbstractExprCtx(context, false), - token, value) - case ',': - case ')': - return transition(context.parent, token) - case ':': - if(context.parent.parent.type == 'lambda'){ - return transition(context.parent.parent, token) - } - } - raise_syntax_error(context) -} - -var EllipsisCtx = $B.parser.EllipsisCtx = function(context){ - // Class for "..." - this.type = 'ellipsis' - this.parent = context - this.position = $token.value - context.tree[context.tree.length] = this -} - -EllipsisCtx.prototype.ast = function(){ - var ast_obj = new ast.Constant(_b_.Ellipsis) - set_position(ast_obj, this.position) - return ast_obj -} - -EllipsisCtx.prototype.transition = function(token, value){ - var context = this - return transition(context.parent, token, value) -} - -var EndOfPositionalCtx = $B.parser.$EndOfConditionalCtx = function(context){ - // Indicates the end of positional arguments in a function definition - // PEP 570 - this.type = "end_positional" - this.parent = context - context.has_end_positional = true - context.parent.pos_only = context.tree.length - context.tree.push(this) -} - -EndOfPositionalCtx.prototype.transition = function(token, value){ - var context = this - if(token == "," || token == ")"){ - return transition(context.parent, token, value) - } - if(token == 'op' && value == '*') { - raise_syntax_error(context, "expected comma between / and *") - } - raise_syntax_error(context) -} - -var ExceptCtx = $B.parser.ExceptCtx = function(context){ - // Class for keyword "except" - this.type = 'except' - this.position = $token.value - this.parent = context - context.tree[context.tree.length] = this - this.tree = [] - this.scope = get_scope(this) - var node = context.node, - rank = node.parent.children.indexOf(node), - ix = rank - 1 - while(node.parent.children[ix].context.tree[0].type != 'try'){ - ix-- - } - this.try_node = node.parent.children[ix] - this.is_first_child = rank == ix + 1 - if(this.try_node.context.is_trystar){ - this.expect = '*' - }else{ - this.expect = 'id' - } -} - -ExceptCtx.prototype.ast = function(){ - // ast.ExceptHandler(type, name, body) - var ast_obj = new ast.ExceptHandler( - this.tree.length == 1 ? this.tree[0].ast() : undefined, - this.has_alias ? this.tree[0].alias : undefined, - ast_body(this.parent) - ) - set_position(ast_obj, this.position) - return ast_obj -} - -ExceptCtx.prototype.transition = function(token, value){ - var context = this - if(token == 'op' && value == '*'){ - // syntax "except*" in PEP 654 - - if(context.is_first_child){ - // first "except" of the "try" - context.try_node.context.is_trystar = true - context.expect = 'id' - return context - }else if(context.expect != '*'){ - // if "try" already has non-starred excepts, raise SyntaxError - raise_syntax_error(context, - "cannot have both 'except' and 'except*' " + - "on the same 'try'") - }else{ - context.expect = 'id' - return context - } - }else if(context.expect == '*'){ - // if "try" already has starred excepts, raise SyntaxError - raise_syntax_error(context, - "cannot have both 'except' and 'except*' " + - "on the same 'try'") - } - - switch(token) { - case 'id': - case 'imaginary': - case 'int': - case 'float': - case 'str': - case 'JoinedStr': - case 'bytes': - case '[': - case '(': - case '{': - case 'not': - case 'lambda': - if(context.expect == 'id'){ - context.expect = 'as' - return transition(new AbstractExprCtx(context, false), - token, value) - } - } - switch(token){ - case 'as': - // only one alias allowed - if(context.expect == 'as' && - context.has_alias === undefined){ - context.expect = 'alias' - context.has_alias = true - return context - } - break - case 'id': - if(context.expect == 'alias'){ - context.expect = ':' - context.set_alias(value) - return context - } - break - case ':': - if (context.tree.length == 0 && context.try_node.context.is_trystar) { - raise_syntax_error(context, "expected one or more exception types") - } - var _ce = context.expect - if(_ce == 'id' || _ce == 'as' || _ce == ':'){ - return BodyCtx(context) - } - break - case '(': - if(context.expect == 'id' && context.tree.length == 0){ - context.parenth = true - return context - } - break - case ')': - if(context.expect == ',' || context.expect == 'as'){ - context.expect = 'as' - return context - } - break - case ',': - if(context.parenth !== undefined && - context.has_alias === undefined && - (context.expect == 'as' || context.expect == ',')){ - context.expect = 'id' - return context - }else if(context.parenth === undefined){ - raise_syntax_error(context, - "multiple exception types must be parenthesized") - } - break - case 'eol': - raise_syntax_error(context, "expected ':'") - } - raise_syntax_error(context) -} - -ExceptCtx.prototype.set_alias = function(alias){ - this.tree[0].alias = mangle_name(alias, this) -} - -var ExprCtx = $B.parser.ExprCtx = function(context, name, with_commas){ - // Base class for expressions - this.type = 'expr' - this.name = name - this.position = $token.value //context.position - // allow expression with comma-separted values, or a single value ? - this.with_commas = with_commas - this.expect = ',' // can be 'expr' or ',' - this.parent = context - if(context.packed){ - this.packed = context.packed - } - this.tree = [] - context.tree[context.tree.length] = this -} - -ExprCtx.prototype.ast = function(){ - var res = this.tree[0].ast() - if(this.packed){ - // return new ast.Starred(res) - }else if(this.annotation){ - res = new ast.AnnAssign( - res, - this.annotation.tree[0].ast(), - undefined, - this.$was_parenthesized ? 0 : 1) - set_position(res, this.position) - } - return res -} - -ExprCtx.prototype.transition = function(token, value){ - var context = this - if(python_keywords.indexOf(token) > -1 && - ['as', 'else', 'if', 'for', 'from', 'in'].indexOf(token) == -1){ - raise_syntax_error(context) - } - if(context.parent.expect == 'star_target'){ - if(['pass', 'in', 'not', 'op', 'augm_assign', '=', ':=', 'if', 'eol']. - indexOf(token) > -1){ - return transition(context.parent, token, value) - } - } - switch(token) { - case 'bytes': - case 'float': - case 'id': - case 'imaginary': - case 'int': - case 'lambda': - case 'pass': - var msg = 'invalid syntax. Perhaps you forgot a comma?' - raise_syntax_error_known_range(context, - this.position, $token.value, msg) - break - case '{': - // Special case : "print {...}" must raise a SyntaxError - // with "Missing parenthesis"... - if(context.tree[0].type != "id" || - ["print", "exec"].indexOf(context.tree[0].value) == -1){ - raise_syntax_error(context) - } - return new AbstractExprCtx(new DictOrSetCtx(context), false) - case '[': - case '(': - case '.': - case 'not': - if(context.expect == 'expr'){ - context.expect = ',' - return transition(new AbstractExprCtx(context, false), - token, value) - } - } - switch(token) { - case 'not': - if(context.expect == ','){ - return new ExprNot(context) - } - break - case 'in': - if(context.parent.type == 'target_list'){ - // expr used for target list - return transition(context.parent, token) - } - if(context.expect == ','){ - return transition(context, 'op', 'in') - } - break - case ',': - if(context.expect == ','){ - if(context.name == 'iterator' && - context.parent.parent.type != 'node'){ - // case "(x for x in expr, y)" : we must detect that the - // expression is the iterator of a generator expression - var for_expr = context.parent.parent - raise_syntax_error_known_range(context, - first_position(for_expr), last_position(for_expr), - 'Generator expression must be parenthesized') - } - if(context.with_commas || - ["assign", "return"].indexOf(context.parent.type) > -1){ - if(parent_match(context, {type: "yield", "from": true})){ - raise_syntax_error(context, "no implicit tuple for yield from") - } - // implicit tuple - context.parent.tree.pop() - var tuple = new ListOrTupleCtx(context.parent, - 'tuple') - tuple.implicit = true - tuple.has_comma = true - tuple.tree = [context] - context.parent = tuple - return tuple - } - } - return transition(context.parent, token) - case '.': - return new AttrCtx(context) - case '[': - if(context.tree[0].type == 'id'){ - // ids in "for" targets have attribute "bound" set - // remove it if target is a subscript - delete context.tree[0].bound - } - return new AbstractExprCtx(new SubscripCtx(context), true) - case '(': - return new CallCtx(context) - case 'op': - if($op_weight[value] === undefined){ - // case of "!" - var frs = parent_match(context, {type: "fstring_replacement_field"}) - if(frs){ - return transition(frs, token, value) - } - raise_syntax_error(context) - } - if(context.parent.type == 'withitem' && context.parent.tree.length == 2){ - raise_syntax_error(context, "expected ':'") - } - if(value == '~'){ - raise_syntax_error(context) - } - // handle operator precedence ; fasten seat belt ;-) - var op_parent = context.parent, - op = value - - // conditional expressions have the lowest priority - if(op_parent.type == 'ternary' && op_parent.in_else){ - var new_op = new OpCtx(context, op) - return new AbstractExprCtx(new_op, false) - } - - // Climb up the tree until we find an operation op1. - // If it has a lower precedence than the new token op, replace it by - // an operation with op, whose left side is the operation op1. - var op1 = context.parent, - repl = null - while(1){ - if(op1.type == 'unary' && op !== '**'){ - repl = op1 - op1 = op1.parent - }else if(op1.type == 'expr'){ - op1 = op1.parent - }else if(op1.type == 'op' && - $op_weight[op1.op] >= $op_weight[op] && - ! (op1.op == '**' && op == '**')){ // cf. issue #250 - repl = op1 - op1 = op1.parent - }else if(op1.type == "not" && - $op_weight['not'] > $op_weight[op]){ - repl = op1 - op1 = op1.parent - }else{ - break - } - } - - if(repl === null){ - if(op1.type == 'op'){ - // current expr is inside an operation with lower precedence - // than op, eg (+ a b) with op == '*' - // Replace this expression by (+ a (* b ?)) - var right = op1.tree.pop(), - expr = new ExprCtx(op1, 'operand', context.with_commas) - expr.tree.push(right) - right.parent = expr - var new_op = new OpCtx(expr, op) - return new AbstractExprCtx(new_op, false) - } - var position = context.position - - while(context.parent !== op1){ - context = context.parent - op_parent = context.parent - } - context.parent.tree.pop() - var expr = new ExprCtx(op_parent, 'operand', - context.with_commas) - expr.position = position - expr.expect = ',' - context.parent = expr - var new_op = new OpCtx(context, op) - return new AbstractExprCtx(new_op, false) - }else{ - // issue #371 - if(op === 'and' || op === 'or'){ - while(repl.parent.type == 'not' || - (repl.parent.type == 'expr' && - repl.parent.parent.type == 'not')){ - // 'and' and 'or' have higher precedence than 'not' - repl = repl.parent - op_parent = repl.parent - } - } - } - if(repl.type == 'op'){ - var _flag = false - switch(repl.op){ - case '<': - case '<=': - case '==': - case '!=': - case 'is': - case '>=': - case '>': - _flag = true - } - if(_flag) { - switch(op) { - case '<': - case '<=': - case '==': - case '!=': - case 'is': - case '>=': - case '>': - case 'in': - case 'not_in': - // chained comparisons such as c1 <= c2 < c3 - repl.ops = repl.ops || [repl.op] - repl.ops.push(op) - return new AbstractExprCtx(repl, false) - } - } - } - repl.parent.tree.pop() - var expr = new ExprCtx(repl.parent, 'operand', false) - expr.tree = [op1] - expr.position = op1.position - repl.parent = expr - var new_op = new OpCtx(repl,op) // replace old operation - return new AbstractExprCtx(new_op,false) - case 'augm_assign': - check_assignment(context, {augmented: true}) - var parent = context - while(parent){ - if(parent.type == "assign" || parent.type == "augm_assign"){ - raise_syntax_error(context, - "augmented assignment inside assignment") - }else if(parent.type == "op"){ - raise_syntax_error(context, "cannot assign to operator") - }else if(parent.type == "list_or_tuple"){ - raise_syntax_error(context, `'${parent.real}' is an illegal` + - " expression for augmented assignment") - }else if(['list', 'tuple'].indexOf(parent.name) > -1){ - raise_syntax_error(context, `'${parent.name}' is an illegal` + - " expression for augmented assignment") - }else if(['dict_or_set'].indexOf(parent.name) > -1){ - raise_syntax_error(context, `'${parent.tree[0].real } display'` + - " is an illegal expression for augmented assignment") - } - parent = parent.parent - } - if(context.expect == ','){ - return new AbstractExprCtx( - new AugmentedAssignCtx(context, value), true) - } - return transition(context.parent, token, value) - case ":": // slice or annotation - // slice only if expr parent is a subscription, or a tuple - // inside a subscription, or a slice - if(context.parent.type == "sub" || - (context.parent.type == "list_or_tuple" && - context.parent.parent.type == "sub")){ - return new AbstractExprCtx(new SliceCtx(context.parent), false) - }else if(context.parent.type == "slice"){ - return transition(context.parent, token, value) - }else if(context.parent.type == "node"){ - // annotation - if(context.tree.length == 1){ - var child = context.tree[0] - check_assignment(child) - if(["id", "sub", "attribute"].indexOf(child.type) > -1){ - return new AbstractExprCtx(new AnnotationCtx(context), false) - }else if(child.real == "tuple" && child.expect == "," && - child.tree.length == 1){ - return new AbstractExprCtx(new AnnotationCtx(child.tree[0]), false) - } - } - var type = context.tree[0].real - raise_syntax_error_known_range(context, - context.position, - last_position(context), - `only single target (not ${type}) can be annotated`) - } - break - case '=': - var frs = parent_match(context, {type: 'fstring_replacement_field'}) - if(frs){ - return transition(frs, token, value) - } - var call_arg = parent_match(context, {type: 'call_arg'}) - // Special case for '=' inside a call - try{ - check_assignment(context) - }catch(err){ - if(call_arg){ - var ctx = context - while(ctx.parent !== call_arg){ - ctx = ctx.parent - } - raise_syntax_error_known_range(ctx, - ctx.position, - $token.value, - 'expression cannot contain assignment, perhaps you meant "=="?') - }else{ - - throw err - } - } - var annotation - if(context.expect == ','){ - if(context.parent.type == "call_arg"){ - // issue 708 - if(context.tree[0].type != "id"){ - raise_syntax_error_known_range(context, - context.position, - $token.value, - 'expression cannot contain assignment, perhaps you meant "=="?') - } - return new AbstractExprCtx(new KwArgCtx(context), true) - }else if(annotation = parent_match(context, {type: "annotation"})){ - return transition(annotation, token, value) - }else if(context.parent.type == "op"){ - // issue 811 - raise_syntax_error(context, "cannot assign to operator") - }else if(context.parent.type == "not"){ - // issue 1496 - raise_syntax_error(context, "cannot assign to operator") - }else if(context.parent.type == "with"){ - raise_syntax_error(context, "expected :") - }else if(context.parent.type == 'dict_or_set'){ - if(context.parent.expect == ','){ - // We could raise a SyntaxError here, but CPython waits - // until the right part of the assignment is finished - context.wrong_assignment = true - return transition(context, ':=') - } - }else if(context.parent.type == "list_or_tuple"){ - // issue 973 - for(var i = 0; i < context.parent.tree.length; i++){ - var item = context.parent.tree[i] - try{ - check_assignment(item, {once: true}) - }catch(err){ - console.log(context) - raise_syntax_error(context, "invalid syntax. " + - "Maybe you meant '==' or ':=' instead of '='?") - } - if(item.type == "expr" && item.name == "operand"){ - raise_syntax_error(context, "cannot assign to operator") - } - } - // issue 1875 - if(context.parent.real == 'list' || - (context.parent.real == 'tuple' && - ! context.parent.implicit)){ - raise_syntax_error(context, "invalid syntax. " + - "Maybe you meant '==' or ':=' instead of '='?") - } - }else if(context.parent.type == "expr" && - context.parent.name == "iterator"){ - raise_syntax_error(context, 'expected :') - }else if(context.parent.type == "lambda"){ - if(context.parent.parent.parent.type != "node"){ - raise_syntax_error(context, 'expression cannot contain' + - ' assignment, perhaps you meant "=="?') - } - }else if(context.parent.type == 'target_list'){ - raise_syntax_error(context, "(assign to target in iteration)") - } - while(context.parent !== undefined){ - context = context.parent - if(context.type == "condition"){ - raise_syntax_error(context, "invalid syntax. Maybe you" + - " meant '==' or ':=' instead of '='?") - }else if(context.type == "augm_assign"){ - raise_syntax_error(context, - "(assignment inside augmented assignment)") - } - } - context = context.tree[0] - return new AbstractExprCtx(new AssignCtx(context), true) - } - break - case ':=': - // PEP 572 : assignment expression - var ptype = context.parent.type - if(["node", "assign", "kwarg", "annotation"]. - indexOf(ptype) > -1){ - raise_syntax_error(context, - '(:= invalid, parent ' + ptype + ')') - }else if(ptype == "func_arg_id" && - context.parent.tree.length > 0){ - // def foo(answer = p := 42): - raise_syntax_error(context, - '(:= invalid, parent ' + ptype + ')') - }else if(ptype == "call_arg" && - context.parent.parent.type == "call" && - context.parent.parent.parent.type == "lambda"){ - // lambda x := 1 - raise_syntax_error(context, - '(:= invalid inside function arguments)' ) - } - if(context.tree.length == 1 && context.tree[0].type == "id"){ - var scope = get_scope(context), - name = context.tree[0].value - if(['None', 'True', 'False'].indexOf(name) > -1){ - raise_syntax_error(context, - `cannot use assignment expressions with ${name}`) - }else if(name == '__debug__'){ - raise_syntax_error(context, 'cannot assign to __debug__') - } - while(scope.comprehension){ - scope = scope.parent_block - } - return new AbstractExprCtx(new NamedExprCtx(context), false) - } - raise_syntax_error(context) - break - case 'if': - var in_comp = false, - ctx = context.parent - while(ctx){ - if(ctx.comprehension){ - in_comp = true - break - }else if(ctx.type == "list_or_tuple"){ - // In parenthised expression, eg the second "if" in - // flds=[f for f in fields if (x if y is None else z)] - break - }else if(ctx.type == 'comp_for'){ - break - }else if(ctx.type == 'comp_if'){ - // [x for x in A if condition if ...] - in_comp = true - break - }else if(ctx.type == 'call_arg' || ctx.type == 'sub'){ - // f(x if ...) - // f[x if ...] - break - }else if(ctx.type == 'expr'){ - if(ctx.parent.type == 'comp_iterable'){ - // [x for x in a + b if ...] - in_comp = true - break - } - } - ctx = ctx.parent - } - if(in_comp){ - break - } - // Ternary operator : "expr1 if cond else expr2" - // If the part before "if" is an operation, apply operator - // precedence - // Example : print(1+n if n else 0) - ctx = context - while(ctx.parent && - (ctx.parent.type == 'op' || - ctx.parent.type == 'not' || - ctx.parent.type == 'unary' || - (ctx.parent.type == "expr" && ctx.parent.name == "operand"))){ - ctx = ctx.parent - } - return new AbstractExprCtx(new TernaryCtx(ctx), false) - case 'JoinedStr': - if(context.tree.length == 1 && context.tree[0] instanceof FStringCtx){ - return context.tree[0] - }else{ - raise_syntax_error_known_range(context, this.position, - $token.value, 'invalid syntax. Perhaps you forgot a comma?') - } - break - case 'str': - if(context.tree.length == 1 && context.tree[0] instanceof FStringCtx){ - new StringCtx(context.tree[0], value) - return context - }else{ - raise_syntax_error_known_range(context, this.position, - $token.value, 'invalid syntax. Perhaps you forgot a comma?') - } - break - case 'eol': - // Special case for print and exec - if(context.tree.length == 2 && - context.tree[0].type == "id" && - ["print", "exec"].indexOf(context.tree[0].value) > -1){ - var func = context.tree[0].value - raise_syntax_error_known_range(context, - context.position, - $token.value, - "Missing parentheses in call " + - `to '${func}'. Did you mean ${func}(...)?`) - } - if(["dict_or_set", "list_or_tuple", "str"].indexOf(context.parent.type) == -1){ - var t = context.tree[0] - if(t.type == "starred"){ - $token.value = t.position - if(parent_match(context, {type: 'del'})){ - raise_syntax_error(context, 'cannot delete starred') - } - if (['assign', 'augm_assign', 'node'].indexOf(context.parent.type) > -1) { - raise_syntax_error_known_range(context, - t.position, - last_position(t), - "can't use starred expression here") - } - raise_syntax_error_known_range(context, - t.position, - last_position(t), - "invalid syntax") - }else if(t.type == "call" && t.func.type == "starred"){ - $token.value = t.func.position - raise_syntax_error(context, - "can't use starred expression here") - } - } - } - return transition(context.parent,token) -} - -var ExprNot = $B.parser.ExprNot = function(context){ - // Class used temporarily for 'x not', only accepts 'in' as next token - // Never remains in the final tree, so there is no need to define to_js() - this.type = 'expr_not' - this.parent = context - this.tree = [] - context.tree[context.tree.length] = this -} - -ExprNot.prototype.transition = function(token){ - var context = this - if(token == 'in'){ // expr not in : operator - context.parent.tree.pop() - // Apply operator precedence to the expression above this instance - // eg "a + b not in ?" becomes "(a + b) not in ?") - var op1 = context.parent - while(op1.type !== 'expr'){ - op1 = op1.parent - } - return op1.transition('op', 'not_in') - } - raise_syntax_error(context) -} - -var ForExpr = $B.parser.ForExpr = function(context){ - // Class for keyword "for" - if(context.node && context.node.parent.is_comp){ - // first "for" inside a comprehension - context.node.parent.first_for = this - } - this.type = 'for' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree.push(this) - this.scope = get_scope(this) - this.module = this.scope.module -} - -ForExpr.prototype.ast = function(){ - // ast.For(target, iter, body, orelse, type_comment) - var target = this.tree[0].ast(), - iter = this.tree[1].ast(), - orelse = this.orelse ? this.orelse.ast() : [], - type_comment, - body = ast_body(this.parent) - set_ctx_to_store(target) - var klass = this.async ? ast.AsyncFor : ast.For - var ast_obj = new klass(target, iter, body, orelse, type_comment) - set_position(ast_obj, - this.async ? this.async.position : this.position, - last_position(this)) - return ast_obj -} - -ForExpr.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'in': - if(context.tree[0].tree.length == 0){ - // issue 1293 : "for in range(n)" - raise_syntax_error(context, - "(missing target between 'for' and 'in')") - } - check_assignment(context.tree[0]) - return new AbstractExprCtx( - new ExprCtx(context, 'iterator', true), false) - case ':': - check_assignment(context.tree[0]) - if(context.tree.length < 2 || // issue 638 - context.tree[1].tree[0].type == "abstract_expr"){ - raise_syntax_error(context) - } - return BodyCtx(context) - } - if(this.parent.comprehension){ - switch(token){ - case ']': - if(this.parent.type == 'listcomp'){ - return transition(this.parent, token, value) - } - break - case ')': - if(this.parent.type == 'genexpr'){ - return transition(this.parent, token, value) - } - break - case '}': - if(this.parent.type == 'dictcomp' || - this.parent.type == 'setcomp'){ - return transition(this.parent, token, value) - } - break - case 'for': - return new TargetListCtx(new ForExpr(this.parent)) - case 'if': - var if_ctx = new ConditionCtx(this.parent, 'if') - if_ctx.in_comp = this.parent - return new AbstractExprCtx(if_ctx, false) - - } - } - if(token == 'eol'){ - $token.value = last_position(context) - if(context.tree.length == 2){ - raise_syntax_error(context, "expected ':'") - } - } - raise_syntax_error(context) -} - -var FromCtx = $B.parser.FromCtx = function(context){ - // Class for keyword "from" for imports - this.type = 'from' - this.parent = context - this.module = '' - this.names = [] - this.names_position = [] - this.position = $token.value - - context.tree[context.tree.length] = this - this.expect = 'module' - this.scope = get_scope(this) -} - -FromCtx.prototype.ast = function(){ - // ast.ImportFrom(module, names, level) - var module = this.module, - level = 0, - alias - while(module.length > 0 && module.startsWith('.')){ - level++ - module = module.substr(1) - } - var res = { - module: module || undefined, - names: [], - level - } - for(var i=0, len=this.names.length; i < len; i++){ - var name = this.names[i], - position = this.names_position[i] - if(Array.isArray(name)){ - alias = new ast.alias(name[0], name[1]) - }else{ - alias = new ast.alias(name) - } - set_position(alias, position) - res.names.push(alias) - } - var ast_obj = new ast.ImportFrom(res.module, res.names, res.level) - set_position(ast_obj, this.position) - return ast_obj -} - -FromCtx.prototype.add_name = function(name){ - this.names.push(name) - this.names_position.push($token.value) - if(name == '*'){ - this.scope.blurred = true - } - this.end_position = $token.value -} - -FromCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.expect == 'module'){ - context.module += value - return context - }else if(context.expect == 'id'){ - context.add_name(value) - context.expect = ',' - return context - }else if(context.expect == 'alias'){ - context.names[context.names.length - 1] = - [$B.last(context.names), value] - context.expect = ',' - return context - } - break - case '.': - if(context.expect == 'module'){ - if(token == 'id'){context.module += value} - else{context.module += '.'} - return context - } - break - case 'ellipsis': - if(context.expect == 'module'){ - context.module += '...' - return context - } - break - case 'import': - if(context.names.length > 0){ // issue 1850 - raise_syntax_error(context, - "only one 'import' allowed after 'from'") - } - if(context.expect == 'module'){ - context.expect = 'id' - return context - } - break - case 'op': - if(value == '*' && context.expect == 'id' && - context.names.length == 0){ - if(get_scope(context).ntype !== 'module'){ - raise_syntax_error(context, - "import * only allowed at module level") - } - context.add_name('*') - context.expect = 'eol' - return context - }else{ - raise_syntax_error(context) - } - break - case ',': - if(context.expect == ','){ - context.expect = 'id' - return context - } - break - case 'eol': - switch(context.expect) { - case ',': - case 'eol': - return transition(context.parent, token) - case 'id': - raise_syntax_error(context, - 'trailing comma not allowed without ' + - 'surrounding parentheses') - break - default: - raise_syntax_error(context) - } - break - case 'as': - if(context.expect == ',' || context.expect == 'eol'){ - context.expect = 'alias' - return context - } - break - case '(': - if(context.expect == 'id'){ - context.expect = 'id' - return context - } - break - case ')': - if(context.expect == ',' || context.expect == 'id'){ - context.expect = 'eol' - return context - } - } - raise_syntax_error(context) - -} - -function escape_quotes(s, quotes){ - if(quotes.length == 1){ - return quotes + s + quotes - }else{ - var quote = quotes[0] - return quote + s.replace(new RegExp(quote, 'g'), '\\' + quote) + quote - } -} - -var FStringCtx = $B.parser.FStringCtx = function(context, start){ - // Class for f-strings. start is prefix + quote - for(var i = 0; i < start.length; i++){ - if(start[i] == '"' || start[i] == "'"){ - this.prefix = start.substr(0, i) - this.quotes = start.substr(i) - break - } - } - this.type = 'fstring' - this.parent = context - this.tree = [] - this.position = $token.value - this.scope = get_scope(context) - context.tree.push(this) - this.raw = this.prefix.toLowerCase().indexOf('r') > -1 -} - -FStringCtx.prototype.transition = function(token, value){ - var context = this - if(token == 'middle'){ - new StringCtx(context, escape_quotes(value, this.quotes)) - return context - }else if(token == '{'){ - return new AbstractExprCtx(new FStringReplacementFieldCtx(context), false) - }else if(token == 'end'){ - return context.parent - } - raise_syntax_error(context) -} - -FStringCtx.prototype.ast = function(){ - var res = { - type: 'JoinedStr', - values: [] - } - var state - for(var item of this.tree){ - if(item instanceof StringCtx){ - if(state == 'string'){ - // eg in "'ab' f'c{x}'" - $B.last(res.values).value += item.value - }else{ - let item_ast = new ast.Constant(item.value) - set_position(item_ast, item.position) - res.values.push(item_ast) - } - state = 'string' - }else{ - let item_ast = item.ast() - set_position(item_ast, item.position) - res.values.push(item_ast) - state = 'formatted_value' - } - } - var ast_obj = new ast.JoinedStr(res.values) - set_position(ast_obj, this.position) - return ast_obj -} - -var FStringReplacementFieldCtx = - $B.parser.FStringReplacementFieldCtx = function(context){ - this.type = 'fstring_replacement_field' - this.tree = [] - this.parent = context - this.position = $token.value - context.tree.push(this) -} - -FStringReplacementFieldCtx.prototype.transition = function(token, value){ - var context = this - - if(token == '='){ - if(context.equal_sign_pos){ - raise_syntax_error(context) - } - var expr_text = context.position.line.substring( - context.position.start[1] + 1, $token.value.start[1]) - // introduce a string with epxression text before replacement field - var quotes = context.parent.quotes - context.formula = new StringCtx(context.parent, escape_quotes(expr_text + '=', quotes)) - var s = context.parent.tree.pop() - context.parent.tree.splice(context.parent.tree.length - 1, 0, s) - context.equal_sign_pos = $token.value.start - return context - }else if(context.equal_sign_pos){ - // retain whitespaces between "=" and the next part of the - // replacement field, eg 'f"{x = :.1f}"' - if(! context.insert_whitespace){ - var nb_ws = $token.value.start[1] - context.equal_sign_pos[1] - if(nb_ws > 1){ - context.formula.value += ' '.repeat(nb_ws - 1) - } - context.insert_whitespace = true - } - } - if(token == 'op' && value == '!'){ - context.expect = 'id' - return context - }else if(token == ':'){ - return new FStringFormatSpecCtx(context) - }else if(token == '}'){ - if(context.tree.length == 1 && - context.tree[0] instanceof AbstractExprCtx){ - raise_syntax_error(context, - "f-string: valid expression required before '}'") - } - return context.parent - }else if(token == 'id' && this.expect == 'id'){ - if('sra'.indexOf(value) > -1){ - context.conversion = value - delete this.expect - return context - } - raise_syntax_error(context, `unknown conversion type ${value}`) - } - raise_syntax_error(context) -} - -FStringReplacementFieldCtx.prototype.ast = function(){ - var value = this.tree[0].ast(), - format = this.tree[1] - var conv_num = {a: 97, r: 114, s: 115}, - conversion = conv_num[this.conversion] || -1 - if(format !== undefined){ - format = format.ast() - } - var res = new ast.FormattedValue( - value, - conversion, - format) - set_position(res, this.position) - return res -} - - -var FStringFormatSpecCtx = - $B.parser.FStringFormatSpecCtx = function(context){ - this.type = 'fstring_format_spec' - this.tree = [] - this.parent = context - this.position = $token.value - context.tree.push(this) -} - -FStringFormatSpecCtx.prototype.transition = function(token, value){ - var context = this - if(token == 'middle'){ - var quotes = this.parent.parent.quotes - new StringCtx(context, escape_quotes(value, quotes)) - return context - }else if(token == '{'){ - return new AbstractExprCtx(new FStringReplacementFieldCtx(context), false) - }else if(token == '}'){ - return transition(context.parent, token, value) - } - raise_syntax_error(context) -} - -FStringFormatSpecCtx.prototype.ast = function(){ - if(this.tree.length == 1){ - return this.tree[0].ast() - }else{ - return FStringCtx.prototype.ast.call(this) - } -} - -var FuncArgs = $B.parser.FuncArgs = function(context){ - // Class for arguments in a function definition - this.type = 'func_args' - this.parent = context - this.tree = [] - this.names = [] - context.tree[context.tree.length] = this - - this.expect = 'id' - this.has_default = false - this.has_star_arg = false - this.has_kw_arg = false -} - -FuncArgs.prototype.ast = function(){ - var args = { - posonlyargs: [], - args: [], - kwonlyargs: [], - kw_defaults: [], - defaults: [] - }, - state = 'arg', - default_value - for(var arg of this.tree){ - if(arg.type == 'end_positional'){ - args.posonlyargs = args.args - args.args = [] - }else if(arg.type == 'func_star_arg'){ - state = 'kwonly' - if(arg.op == '*' && arg.name != '*'){ - args.vararg = new ast.arg(arg.name) - if(arg.annotation){ - args.vararg.annotation = arg.annotation.tree[0].ast() - } - set_position(args.vararg, arg.position) - }else if(arg.op == '**'){ - args.kwarg = new ast.arg(arg.name) - if(arg.annotation){ - args.kwarg.annotation = arg.annotation.tree[0].ast() - } - set_position(args.kwarg, arg.position) - } - }else{ - default_value = false - if(arg.has_default){ - default_value = arg.tree[0].ast() - } - var argument = new ast.arg(arg.name) - set_position(argument, arg.position, - last_position(arg)) - if(arg.annotation){ - argument.annotation = arg.annotation.tree[0].ast() - } - if(state == 'kwonly'){ - args.kwonlyargs.push(argument) - if(default_value){ - args.kw_defaults.push(default_value) - }else{ - args.kw_defaults.push(_b_.None) - } - }else{ - args.args.push(argument) - if(default_value){ - args.defaults.push(default_value) - } - } - } - } - // ast.arguments(posonlyargs, args, vararg, kwonlyargs, kw_defaults, kwarg, defaults) - var res = new ast.arguments(args.posonlyargs, args.args, args.vararg, - args.kwonlyargs, args.kw_defaults, args.kwarg, args.defaults) - return res -} - -FuncArgs.prototype.transition = function(token, value){ - var context = this - function check(){ - if(context.tree.length == 0){ - return - } - var last = $B.last(context.tree) - if(context.has_default && ! last.has_default){ - if(last.type == 'func_star_arg' || - last.type == 'end_positional'){ - return - } - if(context.has_star_arg){ - // non-default arg after default arg is allowed for - // keyword-only parameters, eg arg "z" in "f(x, *, y=1, z)" - return - } - raise_syntax_error(context, - 'non-default argument follows default argument') - } - if(last.has_default){ - context.has_default = true - } - } - - function check_last(){ - var last = $B.last(context.tree) - if(last && last.type == "func_star_arg"){ - if(last.name == "*"){ - // Form "def f(x, *)" is invalid - raise_syntax_error(context, - 'named arguments must follow bare *') - - } - } - } - - switch (token) { - case 'id': - if(context.has_kw_arg){ - raise_syntax_error(context, 'arguments cannot follow var-keyword argument') - } - if(context.expect == 'id'){ - context.expect = ',' - if(context.names.indexOf(value) > -1){ - raise_syntax_error(context, - 'duplicate argument ' + value + - ' in function definition') - } - } - return new FuncArgIdCtx(context, value) - case ',': - if(context.expect == ','){ - check() - context.expect = 'id' - return context - } - raise_syntax_error(context) - break - case ')': - check() - check_last() - return transition(context.parent, token, value) - case 'op': - if(context.has_kw_arg){ - raise_syntax_error(context, "arguments cannot follow var-keyword argument") - } - var op = value - context.expect = ',' - if(op == '*'){ - if(context.has_star_arg){ - raise_syntax_error(context, "* argument may appear only once") - } - return new FuncStarArgCtx(context, '*') - }else if(op == '**'){ - return new FuncStarArgCtx(context, '**') - }else if(op == '/'){ // PEP 570 - if(context.tree.length == 0){ - raise_syntax_error(context, 'at least one argument must precede /') - }else if(context.has_end_positional){ - raise_syntax_error(context, '/ may appear only once') - }else if(context.has_star_arg){ - raise_syntax_error(context, '/ must be ahead of *') - } - return new EndOfPositionalCtx(context) - } - raise_syntax_error(context) - break - case ':': - if(context.parent.type == "lambda"){ - return transition(context.parent, token) - } - case '(': - let type_name = context.parent.type == 'def' ? 'Function' : 'Lambda expression' - raise_syntax_error(context, `${type_name} parameters cannot be parenthesized`) - } - raise_syntax_error(context) -} - -var FuncArgIdCtx = $B.parser.FuncArgIdCtx = function(context, name){ - // id in function arguments - // may be followed by = for default value - this.type = 'func_arg_id' - if(["None", "True", "False"].indexOf(name) > -1){ - raise_syntax_error(context) // invalid argument name - } - if(name == '__debug__'){ - raise_syntax_error(context, 'cannot assign to __debug__') - } - - this.name = name - this.parent = context - this.position = $token.value - - if(context.has_star_arg){ - context.parent.after_star.push(name) - }else{ - context.parent.positional_list.push(name) - } - this.tree = [] - context.tree[context.tree.length] = this - this.expect = '=' -} - -FuncArgIdCtx.prototype.transition = function(token){ - var context = this - switch(token) { - case '=': - if(context.expect == '='){ - context.has_default = true - var def_ctx = context.parent.parent - if(context.parent.has_star_arg){ - def_ctx.default_list.push(def_ctx.after_star.pop()) - }else{ - def_ctx.default_list.push(def_ctx.positional_list.pop()) - } - return new AbstractExprCtx(context, false) - } - break - case ',': - case ')': - if(context.parent.has_default && context.tree.length == 0 && - context.parent.has_star_arg === undefined){ - raise_syntax_error(context, - 'non-default argument follows default argument') - }else{ - return transition(context.parent, token) - } - break - case ':': - if(context.parent.parent.type == "lambda"){ - // end of parameters - return transition(context.parent.parent, ":") - } - // annotation associated with a function parameter - if(context.has_default){ // issue 610 - raise_syntax_error(context) - } - return new AbstractExprCtx(new AnnotationCtx(context), - false) - } - raise_syntax_error(context) -} - -var FuncStarArgCtx = $B.parser.FuncStarArgCtx = function(context,op){ - // Class for "star argument" in a function definition : f(*args) - this.type = 'func_star_arg' - this.op = op - this.parent = context - this.node = get_node(this) - this.position = $token.value - - context.has_star_arg = op == '*' - context.has_kw_arg = op == '**' - context.tree[context.tree.length] = this -} - -FuncStarArgCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.name === undefined){ - if(context.parent.names.indexOf(value) > -1){ - raise_syntax_error(context, - 'duplicate argument ' + value + - ' in function definition') - } - } - if(["None", "True", "False"].indexOf(value) > -1){ - raise_syntax_error(context) // invalid starred argument name - } - context.set_name(value) - context.parent.names.push(value) - return context - case ',': - case ')': - if(context.name === undefined){ - // anonymous star arg - found in configparser - context.set_name('*') - context.parent.names.push('*') - } - return transition(context.parent, token) - case ':': - if(context.parent.parent.type == "lambda"){ - // end of parameters - if(context.name === undefined){ - raise_syntax_error(context, - 'named arguments must follow bare *') - } - return transition(context.parent.parent, ":") - } - // annotation associated with a function parameter - if(context.name === undefined){ - raise_syntax_error(context, - '(annotation on an unnamed parameter)') - } - return new AbstractExprCtx( - new AnnotationCtx(context), false) - case '=': - if (context.op == '*') { - raise_syntax_error(context, 'var-positional argument cannot have default value') - } - raise_syntax_error(context, 'var-keyword argument cannot have default value') - - } - raise_syntax_error(context) -} - -FuncStarArgCtx.prototype.set_name = function(name){ - if(name == '__debug__'){ - raise_syntax_error_known_range(this, - this.position, - $token.value, - 'cannot assign to __debug__') - } - this.name = name - - var ctx = this.parent - while(ctx.parent !== undefined){ - if(ctx.type == 'def'){ - break - } - ctx = ctx.parent - } - if(this.op == '*'){ - ctx.other_args = '"' + name + '"' - }else{ - ctx.other_kw = '"' + name + '"' - } -} - -var GeneratorExpCtx = function(context){ - // create a List Comprehension - // context is a ListOrTupleCtx - this.type = 'genexpr' - this.tree = [context.tree[0]] - this.tree[0].parent = this - this.position = context.position - Comprehension.make_comp(this, context) -} - -GeneratorExpCtx.prototype.ast = function(){ - // ast.GeneratorExp(elt, generators) - // elt is the part evaluated for each item - // generators is a list of comprehensions - var res = new ast.GeneratorExp( - this.tree[0].ast(), - Comprehension.generators(this.tree.slice(1)) - ) - set_position(res, this.position) - return res -} - -GeneratorExpCtx.prototype.transition = function(token){ - var context = this - if(token == ')'){ - if(this.parent.type == 'call'){ - // If the call had a previous argument, raise syntax error - if(context.parent.tree.length > 1){ - raise_syntax_error_known_range(context, - first_position(context), - last_position(context), - 'Generator expression must be parenthesized') - } - return this.parent.parent - } - return this.parent - } - raise_syntax_error(context) -} -var GlobalCtx = $B.parser.GlobalCtx = function(context){ - // Class for keyword "global" - this.type = 'global' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this - this.expect = 'id' - this.scope = get_scope(this) - this.module = get_module(this) - if(this.module.module !== ''){ // used by eval1 - while(this.module.module != this.module.id){ - this.module = this.module.parent_block - } - } -} - -GlobalCtx.prototype.ast = function(){ - // Global(identifier* names) - var ast_obj = new ast.Global(this.tree.map(item => item.value)) - set_position(ast_obj, this.position) - return ast_obj -} - -GlobalCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.expect == 'id'){ - new IdCtx(context, value) - context.add(value) - context.expect = ',' - return context - } - break - case ',': - if(context.expect == ','){ - context.expect = 'id' - return context - } - break - case 'eol': - if(context.expect == ','){ - return transition(context.parent, token) - } - break - } - raise_syntax_error(context) -} - -GlobalCtx.prototype.add = function(name){ - if(this.scope.type == "module"){ - // "global x" at module level does nothing - return - } - // Remove bindings between scope and module - var mod = this.scope.parent_block - if(this.module.module.startsWith("$exec")){ - while(mod && mod.parent_block !== this.module){ - // Set attribute _globals for intermediate scopes - mod._globals = mod._globals || new Map() - mod._globals.set(name, this.module.id) - // Delete possibly existing binding below module level - mod = mod.parent_block - } - } -} - -var IdCtx = $B.parser.IdCtx = function(context, value){ - // Class for identifiers (variable names) - this.type = 'id' - this.value = value // mangle_name(value, context) - this.parent = context - this.tree = [] - context.tree[context.tree.length] = this - - this.position = $token.value - - var scope = this.scope = get_scope(this) - - this.blurred_scope = this.scope.blurred - - // Store variables referenced in scope - if(["def", "generator"].indexOf(scope.ntype) > -1){ - if((! (context instanceof GlobalCtx)) && - ! (context instanceof NonlocalCtx)){ - scope.referenced = scope.referenced || {} - if(! $B.builtins[this.value]){ - scope.referenced[this.value] = true - } - } - } - if(context.parent.type == 'call_arg') { - this.call_arg = true - } - -} - -IdCtx.prototype.ast = function(){ - var ast_obj - if(['True', 'False', 'None'].indexOf(this.value) > -1){ - ast_obj = new ast.Constant(_b_[this.value]) - }else{ - ast_obj = new ast.Name(this.value, - this.bound ? new ast.Store() : new ast.Load()) - } - set_position(ast_obj, this.position) - return ast_obj -} - -IdCtx.prototype.transition = function(token, value){ - var context = this, - module = get_module(this) - if(context.value == 'case' && context.parent.parent.type == "node"){ - // if `case` is at the beginning of a line and either: - // - the line ends with a colon (:) OR - // - it is immediately followed by an identifier - // it is the "soft keyword" `case` for pattern matching. - // - // NodeCtx.prototype.transition also helps handle the soft keyword vs - // identifier differentiation, by treating an occurrence of `case` - // differently if it occurs at the beginning of a line and is a direct - // child of a match. - let save_position = module.token_reader.position, - ends_with_colon = line_ends_with_colon(module.token_reader, module.filename) - module.token_reader.position = save_position - if(ends_with_colon || token == 'id'){ - var node = get_node(context) - if((! node.parent) || !(node.parent.is_match)){ - raise_syntax_error(context, "('case' not inside 'match')") - }else{ - if(node.parent.irrefutable){ - // "match" statement already has an irrefutable pattern - let name = node.parent.irrefutable, - msg = name == '_' ? 'wildcard' : - `name capture '${name}'` - raise_syntax_error(context, - `${msg} makes remaining patterns unreachable`) - } - } - return transition(new PatternCtx( - new CaseCtx(context.parent.parent)), - token, value) - } - }else if(context.value == 'match' && context.parent.parent.type == "node"){ - // same 'soft keyword' handling as case, but for match - let save_position = module.token_reader.position, - ends_with_colon = line_ends_with_colon(module.token_reader, module.filename) - module.token_reader.position = save_position - if(ends_with_colon || token == 'id'){ - return transition(new AbstractExprCtx( - new MatchCtx(context.parent.parent), true), - token, value) - } - }else if(context.value == 'type' && context.parent.parent.type == "node"){ - if(token == 'id'){ - // test soft keyword 'type' - return new TypeAliasCtx(context, value) - } - } - switch(token) { - case '=': - if(context.parent.type == 'expr' && - context.parent.parent !== undefined && - context.parent.parent.type == 'call_arg'){ - return new AbstractExprCtx( - new KwArgCtx(context.parent), false) - } - return transition(context.parent, token, value) - case '.': - // If followed by ".", the id cannot be bound - delete this.bound - return transition(context.parent, token, value) - case 'op': - return transition(context.parent, token, value) - case 'id': - case 'str': - case 'JoinedStr': - case 'int': - case 'float': - case 'imaginary': - var msg = 'invalid syntax' - if(["print", "exec"].indexOf(context.value) > -1 ){ - var f = context.value - msg = `Missing parentheses in call to '${f}'.` + - ` Did you mean ${f}(...)?` - }else if(context.parent.parent && (['list_or_tuple', 'dict'].indexOf(context.parent.parent.type) > -1)){ - msg = 'invalid syntax. Perhaps you forgot a comma?' - } - raise_syntax_error_known_range(context, - this.position, $token.value, msg) - - } - if(this.parent.parent.type == "starred"){ - if(['.', '[', '('].indexOf(token) == -1){ - return this.parent.parent.transition(token, value) - } - } - return transition(context.parent, token, value) -} - -var ImportCtx = $B.parser.ImportCtx = function(context){ - // Class for keyword "import" - this.type = 'import' - this.parent = context - this.tree = [] - this.position = $token.value - - context.tree[context.tree.length] = this - this.expect = 'id' -} - -ImportCtx.prototype.ast = function(){ - //ast.Import(names) - var names = [] - for(var item of this.tree){ - // check if item.name is a valid identifier - var alias = new ast.alias(item.name) - if(item.alias != item.name){ - alias.asname = item.alias - } - names.push(alias) - } - var ast_obj = new ast.Import(names) - set_position(ast_obj, this.position) - return ast_obj -} - -ImportCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.expect == 'id'){ - if(context.order_error){ - raise_syntax_error(context, - "Did you mean to use 'from ... import ...' instead?") - } - new ImportedModuleCtx(context, value) - context.expect = ',' - return context - } - if(context.expect == 'qual'){ - context.expect = ',' - context.tree[context.tree.length - 1].name += - '.' + value - context.tree[context.tree.length - 1].alias += - '.' + value - return context - } - if(context.expect == 'alias'){ - context.expect = ',' - context.tree[context.tree.length - 1].alias = - value - return context - } - break - case '.': - if(context.expect == ','){ - context.expect = 'qual' - return context - } - break - case ',': - if(context.expect == ','){ - context.expect = 'id' - return context - } - break - case 'as': - if(context.expect == ','){ - context.expect = 'alias' - return context - } - break - case 'eol': - if(context.expect == ','){ - return transition(context.parent, token) - } - break - case 'from': - // new in 3.12 to prepare error message in case of - // "import A from B" instead of "from B import A" - if(context.expect == ','){ - context.expect = 'id' - context.order_error = true - return context - } - break - } - raise_syntax_error(context) -} - -var ImportedModuleCtx = $B.parser.ImportedModuleCtx = function(context,name){ - this.type = 'imported module' - this.parent = context - this.name = name - this.alias = name - context.tree[context.tree.length] = this -} - -var JoinedStrCtx = $B.parser.JoinedStrCtx = function(context, values){ - // Class for f-strings. values is an Array with strings or expressions - this.type = 'JoinedStr' - this.parent = context - this.tree = [] - this.position = $token.value - this.scope = get_scope(context) - var line_num = get_node(context).line_num - for(var value of values){ - if(typeof value == "string"){ - new StringCtx(this, "'" + - value.replace(new RegExp("'", "g"), "\\" + "'") + "'") - }else{ - if(value.format !== undefined){ - value.format = new JoinedStrCtx(this, value.format) - this.tree.pop() - } - var src = value.expression.trimStart(), // ignore leading whitespace - filename = get_module(this).filename, - root = create_root_node(src, - this.scope.module, this.scope.id, - this.scope.parent_block, line_num) - try{ - dispatch_tokens(root) - }catch(err){ - var fstring_lineno = this.position.start[0], - fstring_offset = this.position.start[1] - err.filename = get_module(this).filename - err.lineno += fstring_lineno - 1 - err.offset += fstring_offset - 1 - err.end_lineno += fstring_lineno - 1 - err.end_offset += fstring_offset - 1 - err.text = this.position.string - err.args[1] = $B.fast_tuple([filename, - err.lineno, err.offset, - err.text, - err.end_lineno, err.end_offset]) - throw err - } - - var expr = root.children[0].context.tree[0] - this.tree.push(expr) - expr.parent = this - expr.elt = value - } - } - context.tree.push(this) - this.raw = false -} - -JoinedStrCtx.prototype.ast = function(){ - var res = { - type: 'JoinedStr', - values: [] - } - var state - for(var item of this.tree){ - if(item instanceof StringCtx){ - if(state == 'string'){ - // eg in "'ab' f'c{x}'" - $B.last(res.values).value += item.value - }else{ - var item_ast = new ast.Constant(item.value) - set_position(item_ast, item.position) - res.values.push(item_ast) - } - state = 'string' - }else{ - var conv_num = {a: 97, r: 114, s: 115}, - format = item.elt.format - format = format === undefined ? format : format.ast() - var value = new ast.FormattedValue( - item.ast(), - conv_num[item.elt.conversion] || -1, - format) - set_position(value, this.position) - format = item.format - if(format !== undefined){ - value.format = item.format.ast() - } - res.values.push(value) - state = 'formatted_value' - } - } - var ast_obj = new ast.JoinedStr(res.values) - set_position(ast_obj, this.position) - return ast_obj -} - -JoinedStrCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case '[': - return new AbstractExprCtx(new SubscripCtx(context.parent), - false) - case '(': - // Strings are not callable. We replace the string by a - // call to an object that will raise the correct exception - context.parent.tree[0] = context - return new CallCtx(context.parent) - case 'str': - if(context.tree.length > 0 && - $B.last(context.tree).type == "str"){ - context.tree[context.tree.length - 1].add_value(value) - }else{ - new StringCtx(this, value) - } - return context - case 'JoinedStr': - // create new JoinedStr - var joined_expr = new JoinedStrCtx(context.parent, value) - context.parent.tree.pop() - if(context.tree.length > 0 && - $B.last(context.tree) instanceof StringCtx && - joined_expr.tree[0] instanceof StringCtx){ - // merge last string in context and first in value - $B.last(context.tree).value += joined_expr.tree[0].value - context.tree = context.tree.concat(joined_expr.tree.slice(1)) - }else{ - context.tree = context.tree.concat(joined_expr.tree) - } - return context - } - return transition(context.parent, token, value) -} - -var KwdCtx = $B.parser.KwdCtx = function(context){ - // used for **expr in a dictionary - this.type = 'kwd' - this.position = context.position - this.parent = context - this.tree = [] - context.tree.push(this) -} - -KwdCtx.prototype.ast = function(){ - var ast_obj = new $B.ast.keyword(this.tree[0].ast(), new ast.Load()) - set_position(ast_obj, this.position) - return ast_obj -} - -KwdCtx.prototype.transition = function(token, value){ - var context = this - return transition(context.parent, token, value) -} - -var KwArgCtx = $B.parser.KwArgCtx = function(context){ - // Class for keyword argument in a call - this.type = 'kwarg' - this.parent = context.parent - this.position = first_position(context) - this.equal_sign_position = $token.value - this.tree = [context.tree[0]] - // operation replaces left operand - context.parent.tree.pop() - context.parent.tree.push(this) - - if (['None', 'True', 'False', '__debug__'].indexOf(context.tree[0].value) > -1) { - raise_syntax_error(context, 'cannot assign to ' + context.tree[0].value) - } - - // set attribute "has_kw" of CallCtx instance to true - context.parent.parent.has_kw = true -} - -KwArgCtx.prototype.transition = function(token){ - var context = this - if(token == ','){ - return new CallArgCtx(context.parent.parent) - }else if(token == 'for'){ - // generator expression is invalid - raise_syntax_error_known_range(context, - context.position, - context.equal_sign_position, - "invalid syntax. " + - "Maybe you meant '==' or ':=' instead of '='?") - } - return transition(context.parent, token) -} - -var LambdaCtx = $B.parser.LambdaCtx = function(context){ - // Class for keyword "lambda" - this.type = 'lambda' - this.parent = context - context.tree[context.tree.length] = this - this.tree = [] - this.position = $token.value - - // initialize object for names bound in the function - this.node = get_node(this) - - // Arrays for arguments - this.positional_list = [] - this.default_list = [] - this.other_args = null - this.other_kw = null - this.after_star = [] -} - -LambdaCtx.prototype.ast = function(){ - // ast.Lambda(args, body) - var args - if(this.args.length == 0){ - args = new ast.arguments([], [], undefined, [], [], undefined, []) - }else{ - args = this.args[0].ast() - } - var ast_obj = new ast.Lambda(args, this.tree[0].ast()) - set_position(ast_obj, this.position) - return ast_obj -} - -LambdaCtx.prototype.transition = function(token, value){ - var context = this - if(token == ':' && context.args === undefined){ - context.args = context.tree - context.tree = [] - return new AbstractExprCtx(context, false) - } - if(context.args !== undefined){ // returning from expression - return transition(context.parent, token) - } - if(context.args === undefined){ - if(token == '('){ - raise_syntax_error(context, - 'Lambda expression parameters cannot be parenthesized') - }else if(context.tree.length > 0 && - context.tree[0].type == 'func_args'){ - // cf issue #2217: infinite loop for type(lambda) - raise_syntax_error(context) - }else{ - return transition(new FuncArgs(context), token, value) - } - } - raise_syntax_error(context) -} - -var ListCompCtx = function(context){ - // create a List Comprehension - // context is a ListOrTupleCtx - this.type = 'listcomp' - this.tree = [context.tree[0]] - this.tree[0].parent = this - this.position = $token.value - Comprehension.make_comp(this, context) -} - -ListCompCtx.prototype.ast = function(){ - // ast.ListComp(elt, generators) - // elt is the part evaluated for each item - // generators is a list of comprehensions - var res = new ast.ListComp( - this.tree[0].ast(), - Comprehension.generators(this.tree.slice(1))) - set_position(res, this.position) - return res -} - -ListCompCtx.prototype.transition = function(token){ - var context = this - if(token == ']'){ - return this.parent - } - raise_syntax_error(context) -} - -var ListOrTupleCtx = $B.parser.ListOrTupleCtx = function(context, real){ - // Class for literal lists or tuples - // The real type (list or tuple) is set inside transition - // as attribute 'real' - this.type = 'list_or_tuple' - this.real = real - this.expect = 'id' - this.closed = false - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this -} - -ListOrTupleCtx.prototype.ast = function(){ - var elts = this.tree.map(x => x.ast()), - ast_obj - if(this.real == 'list'){ - ast_obj = new ast.List(elts, new ast.Load()) - }else if(this.real == 'tuple'){ - ast_obj = new ast.Tuple(elts, new ast.Load()) - } - set_position(ast_obj, this.position, this.end_position) - return ast_obj -} - -ListOrTupleCtx.prototype.transition = function(token, value){ - var context = this - if(context.closed){ - if(token == '['){ - return new AbstractExprCtx( - new SubscripCtx(context.parent),false) - } - if(token == '('){return new CallCtx(context.parent)} - return transition(context.parent, token, value) - }else{ - if(context.expect == ','){ - switch(context.real){ - case 'tuple': - if(token == ')'){ - if(context.implicit){ - return transition(context.parent, token, value) - } - var close = true - context.end_position = $token.value - if(context.tree.length == 1){ - if(context.tree[0].type == 'expr' && - context.tree[0].tree[0].type == 'starred'){ - raise_syntax_error_known_range(context, - context.tree[0].tree[0].position, - last_position(context.tree[0]), - 'cannot use starred expression here') - } - // make single element replace tuple as child of - // context.parent.parent - var grandparent = context.parent.parent - // remove expr tuple - grandparent.tree.pop() - grandparent.tree.push(context.tree[0]) - // note that the expression was inside () - // used in annotation, to sort "(a): int" from - // "a: int" - context.tree[0].$was_parenthesized = true - context.tree[0].parent = grandparent - return context.tree[0] - } - if(context.packed || - (context.type == 'list_or_tuple' && - context.tree.length == 1 && - context.tree[0].type == 'expr' && - context.tree[0].tree[0].type == 'starred')){ - // syntax "(*x)" - raise_syntax_error(context, - "cannot use starred expression here") - } - if(close){ - context.close() - } - if(context.parent.type == "starred"){ - return context.parent.parent - } - return context.parent - } - break - case 'list': - if(token == ']'){ - context.close() - if(context.parent.type == "starred"){ - if(context.parent.tree.length > 0){ - return context.parent.tree[0] - }else{ - return context.parent.parent - } - } - return context.parent - } - break - } - - switch(token) { - case ',': - if(context.real == 'tuple'){ - context.has_comma = true - } - context.expect = 'id' - return context - case 'for': - // comprehension - if(context.real == 'list'){ - if(this.tree.length > 1){ - // eg [x, y for x in A for y in B] - raise_syntax_error(context, "did you forget " + - "parentheses around the comprehension target?") - } - return new TargetListCtx(new ForExpr( - new ListCompCtx(context))) - } - else{ - return new TargetListCtx(new ForExpr( - new GeneratorExpCtx(context))) - } - } - return transition(context.parent,token,value) - }else if(context.expect == 'id'){ - switch(context.real) { - case 'tuple': - if(token == ')'){ - context.close() - return context.parent - } - if(token == 'eol' && - context.implicit === true){ - context.close() - return transition(context.parent, token) - } - break - case 'list': - if(token == ']'){ - context.close() - return context - } - break - } - - switch(token) { - case '=': - if(context.real == 'tuple' && - context.implicit === true){ - context.close() - context.parent.tree.pop() - var expr1 = new ExprCtx(context.parent, - 'tuple', false) - expr1.tree = [context] - context.parent = expr1 - return transition(context.parent, token) - } - raise_syntax_error(context, "(unexpected '=' inside list)") - break - case ')': - break - case ']': - if(context.real == 'tuple' && - context.implicit === true){ - // Syntax like d[1,] = 2 - return transition(context.parent, token, - value) - }else{ - break - } - raise_syntax_error(context, '(unexpected "if" inside list)') - break - case ',': - raise_syntax_error(context, '(unexpected comma inside list)') - break - case 'str': - case 'JoinedStr': - case 'int': - case 'float': - case 'imaginary': - case 'ellipsis': - case 'lambda': - case 'yield': - case 'id': - case '(': - case '[': - case '{': - case 'await': - case 'not': - case ':': - context.expect = ',' - var expr2 = new AbstractExprCtx(context, false) - return transition(expr2, token, value) - case 'op': - if('+-~*'.indexOf(value) > -1 || value == '**'){ - context.expect = ',' - var expr3 = new AbstractExprCtx(context, false) - return transition(expr3, token, value) - } - raise_syntax_error(context, - `(unexpected operator: ${value})`) - break - default: - raise_syntax_error(context) - } - - }else{ - return transition(context.parent, token, value) - } - } -} - -ListOrTupleCtx.prototype.close = function(){ - this.closed = true - this.end_position = $token.value - this.src = get_module(this).src - for(var i = 0, len = this.tree.length; i < len; i++){ - // Replace parenthesized expressions inside list or tuple - // by the expression itself, eg (x, (y)) by (x, y). - // Cf. issue 1333 - var elt = this.tree[i] - if(elt.type == "expr" && - elt.tree[0].type == "list_or_tuple" && - elt.tree[0].real == "tuple" && - elt.tree[0].tree.length == 1 && - elt.tree[0].expect == ","){ - this.tree[i] = elt.tree[0].tree[0] - this.tree[i].parent = this - } - } -} - -var MatchCtx = $B.parser.MatchCtx = function(node_ctx){ - // node already has an expression with the id "match" - this.type = "match" - this.position = $token.value - node_ctx.tree = [this] - node_ctx.node.is_match = true - this.parent = node_ctx - this.tree = [] - this.expect = 'as' - this.token_position = get_module(this).token_reader.position -} - -MatchCtx.prototype.ast = function(){ - // ast.Match(subject, cases) - // subject holds the subject of the match - // cases contains an iterable of match_case nodes with the different cases - var res = new ast.Match(this.tree[0].ast(), ast_body(this.parent)) - set_position(res, this.position) - res.$line_num = get_node(this).line_num - return res -} - -MatchCtx.prototype.transition = function(token){ - var context = this - switch(token){ - case ':': - if(this.tree[0].type == 'list_or_tuple'){ - remove_abstract_expr(this.tree[0].tree) - } - switch(context.expect) { - case 'id': - case 'as': - case ':': - return BodyCtx(context) - } - break - case 'eol': - raise_syntax_error(context, "expected ':'") - } - raise_syntax_error(context) -} - -var NamedExprCtx = function(context){ - // context is an expression where context.tree[0] is an id - this.type = 'named_expr' - this.position = context.position - this.target = context.tree[0] - context.tree.pop() - context.tree.push(this) - this.parent = context - this.target.parent = this - this.tree = [] - if(context.parent.type == 'list_or_tuple' && - context.parent.real == 'tuple'){ - // used to check assignments - this.parenthesized = true - } -} - -NamedExprCtx.prototype.ast = function(){ - var res = new ast.NamedExpr(this.target.ast(), - this.tree[0].ast()) - res.target.ctx = new ast.Store() - set_position(res, this.position) - return res -} - -NamedExprCtx.prototype.transition = function(token, value){ - return transition(this.parent, token, value) -} - -function get_node_ancestor(node) { - return node.parent - && node.parent.context - && node.parent.context.tree - && node.parent.context.tree.length > 0 - && node.parent.context.tree[0] -} - -var NodeCtx = $B.parser.NodeCtx = function(node){ - // Base class for the context in a node - this.node = node - node.context = this - this.tree = [] - this.type = 'node' - - var scope = null - var tree_node = node - while(tree_node.parent && tree_node.parent.type != 'module'){ - var ntype = tree_node.parent.context.tree[0].type, - _break_flag = false - switch(ntype){ - case 'def': - case 'class': - case 'generator': - scope = tree_node.parent - _break_flag = true - } - if(_break_flag){break} - - tree_node = tree_node.parent - } - if(scope === null){ - scope = tree_node.parent || tree_node // module - } - - this.scope = scope -} - -NodeCtx.prototype.transition = function(token, value){ - var context = this - if(this.node.parent && this.node.parent.context){ - var pctx = this.node.parent.context - if(pctx.tree && pctx.tree.length == 1 && - pctx.tree[0].type == "match"){ - if(token != 'eol' && (token !== 'id' || value !== 'case')){ - raise_syntax_error(context) // 'line does not start with "case"' - } - } - } - if(this.tree.length == 0 && this.node.parent){ - var rank = this.node.parent.children.indexOf(this.node) - if(rank > 0){ - let previous = this.node.parent.children[rank - 1] - if(previous.context.tree[0].type == 'try' && - ['except', 'finally'].indexOf(token) == -1){ - raise_syntax_error(context, - "expected 'except' or 'finally' block") - } - } - } - switch(token) { - case ',': - if(context.tree && context.tree.length == 0){ - raise_syntax_error(context) - } - // Implicit tuple - var first = context.tree[0] - context.tree = [] - var implicit_tuple = new ListOrTupleCtx(context) - implicit_tuple.real = "tuple" - implicit_tuple.implicit = 0 - implicit_tuple.tree.push(first) - first.parent = implicit_tuple - return implicit_tuple - case '[': - case '(': - case '{': - case '.': - case 'bytes': - case 'float': - case 'id': - case 'imaginary': - case 'int': - case 'str': - case 'JoinedStr': - case 'not': - case 'lambda': - // If we're seeing a case as a direct child of a match, we can - // treat this case as a hard keyword - if (value == 'case') { - let node_ancestor = get_node_ancestor(context.node) - if (node_ancestor && node_ancestor.type == 'match') { - return new PatternCtx(new CaseCtx(context)) - } - } - var expr = new AbstractExprCtx(context,true) - return transition(expr, token, value) - case 'assert': - return new AbstractExprCtx( - new AssertCtx(context), false, true) - case 'async': - return new AsyncCtx(context) - case 'await': - return new AbstractExprCtx(new AwaitCtx(context), false) - case 'break': - return new BreakCtx(context) - case 'class': - return new ClassCtx(context) - case 'continue': - return new ContinueCtx(context) - case 'def': - return new DefCtx(context) - case 'del': - return new AbstractExprCtx(new DelCtx(context),true) - case 'elif': - try{ - var previous = get_previous(context) - }catch(err){ - raise_syntax_error(context, "('elif' does not follow 'if')") - } - if(['condition'].indexOf(previous.type) == -1 || - previous.token == 'while'){ - raise_syntax_error(context, `(elif after ${previous.type})`) - } - return new AbstractExprCtx( - new ConditionCtx(context, token), false) - case 'ellipsis': - var ell_expr = new AbstractExprCtx(context, true) - return transition(ell_expr, token, value) - case 'else': - var previous1 = get_previous(context) - if(! ['condition', 'except', 'for'].includes(previous1.type)){ - raise_syntax_error(context, `(else after ${previous.type})`) - } - return new SingleKwCtx(context,token) - case 'except': - var previous2 = get_previous(context) - if(! ['try', 'except'].includes(previous2.type)){ - raise_syntax_error(context, `(except after ${previous.type})`) - } - return new ExceptCtx(context) - case 'finally': - var previous3 = get_previous(context) - if(! ['try', 'except'].includes(previous3.type) && - (previous3.type != 'single_kw' || - previous3.token != 'else')){ - raise_syntax_error(context, `finally after ${previous.type})`) - } - return new SingleKwCtx(context,token) - case 'for': - return new TargetListCtx(new ForExpr(context)) - case 'from': - return new FromCtx(context) - case 'global': - return new GlobalCtx(context) - case 'if': - case 'while': - return new AbstractExprCtx( - new ConditionCtx(context, token), false) - case 'import': - return new ImportCtx(context) - case 'nonlocal': - return new NonlocalCtx(context) - case 'op': - switch(value) { - case '*': - var expr1 = new AbstractExprCtx(context, true) - return transition(expr1, token, value) - case '+': - case '-': - case '~': - context.position = $token.value - var expr2 = new ExprCtx(context, 'unary', true) - return new AbstractExprCtx( - new UnaryCtx(expr2, value), false) - case '@': - return new AbstractExprCtx(new DecoratorCtx(context), false) - } - break - case 'pass': - return new PassCtx(context) - case 'raise': - return new AbstractExprCtx(new RaiseCtx(context), false) - case 'return': - return new AbstractExprCtx(new ReturnCtx(context),true) - case 'try': - return new TryCtx(context) - case 'with': - return new WithCtx(context) - case 'yield': - return new AbstractExprCtx(new YieldCtx(context),true) - case 'eol': - if(context.maybe_type){ - if(context.tree.length > 0 && context.tree[0].type == 'assign'){ - alert('type soft keyword') - }else{ - raise_syntax_error(context) - } - } - if(context.tree.length == 0){ // might be the case after a : - context.node.parent.children.pop() - return context.node.parent.context - } - return context - } - console.log('error, context', context, 'token', token, value) - raise_syntax_error(context) -} - -var NonlocalCtx = $B.parser.NonlocalCtx = function(context){ - // Class for keyword "nonlocal" - this.type = 'nonlocal' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this - this.expect = 'id' - - this.scope = get_scope(this) - this.scope.nonlocals = this.scope.nonlocals || new Set() -} - -NonlocalCtx.prototype.ast = function(){ - // Nonlocal(identifier* names) - var ast_obj = new ast.Nonlocal(this.tree.map(item => item.value)) - set_position(ast_obj, this.position) - return ast_obj -} - -NonlocalCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.expect == 'id'){ - new IdCtx(context, value) - context.expect = ',' - return context - } - break - case ',': - if(context.expect == ','){ - context.expect = 'id' - return context - } - break - case 'eol': - if(context.expect == ','){ - return transition(context.parent, token) - } - break - } - raise_syntax_error(context) -} - -var NotCtx = $B.parser.NotCtx = function(context){ - // Class for keyword "not" - this.type = 'not' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this -} - -NotCtx.prototype.ast = function(){ - var ast_obj = new ast.UnaryOp(new ast.Not(), this.tree[0].ast()) - set_position(ast_obj, this.position) - return ast_obj -} - -NotCtx.prototype.transition = function(token, value){ - var context = this, - expr - switch(token) { - case 'in': - // not is always in an expression : remove it - context.parent.parent.tree.pop() // remove 'not' - return new ExprCtx(new OpCtx(context.parent, 'not_in'), - 'op', false) - case 'id': - case 'imaginary': - case 'int': - case 'float': - case 'str': - case 'JoinedStr': - case 'bytes': - case '[': - case '(': - case '{': - case '.': - case 'not': - case 'lambda': - expr = new AbstractExprCtx(context, false) - return transition(expr, token, value) - case 'op': - var a = value - if('+' == a || '-' == a || '~' == a){ - expr = new AbstractExprCtx(context, false) - return transition(expr, token, value) - } - } - if(this.tree.length == 0 || this.tree[0] instanceof AbstractExprCtx){ - raise_syntax_error(context) - } - return transition(context.parent, token) -} - -var NumberCtx = $B.parser.NumberCtx = function(type, context, value){ - // Class for literal integers, floats and imaginary numbers - // For integers, value is a 2-elt tuple [base, value_as_string] where - // base is one of 16 (hex literal), 8 (octal), 2 (binary) or 10 (int) - - this.type = type - this.value = value - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this -} - -NumberCtx.prototype.ast = function(){ - var value = $B.AST.$convert(this), // in builtin_modules.js - ast_obj = new $B.ast.Constant(value) - set_position(ast_obj, this.position) - return ast_obj -} - -NumberCtx.prototype.transition = function(token, value){ - var context = this - var num_type = {2: 'binary', 8: 'octal', 10: 'decimal', - 16: 'hexadecimal'}[this.value[0]] - - if(token == 'id'){ - if(value == '_'){ - raise_syntax_error(context, 'invalid decimal literal') - }else if(["and", "else", "for", "if", "in", "is", "or"].indexOf(value) == -1){ - raise_syntax_error(context, `invalid ${num_type} literal`) - }else if(num_type == 'hexadecimal' && this.value[1].length % 2 == 1){ - $B.warn(_b_.SyntaxWarning, - `invalid hexadecimal literal`, - get_module(context).filename, - $token.value) - } - }else if(token == 'op'){ - if(["and", "in", "is", "or"].indexOf(value) > -1 && - num_type == 'hexadecimal' && - this.value[1].length % 2 == 1){ - $B.warn(_b_.SyntaxWarning, - `invalid hexadecimal literal`, - get_module(context).filename, - $token.value) - } - } - return transition(context.parent, token, value) -} - -var OpCtx = $B.parser.OpCtx = function(context, op){ - // Class for operators ; context is the left operand - this.type = 'op' - this.op = op - this.parent = context.parent - this.position = $token.value - this.tree = [context] - this.scope = get_scope(this) - - // Get type of left operand - if(context.type == "expr"){ - if(['int', 'float', 'str'].indexOf(context.tree[0].type) > -1){ - this.left_type = context.tree[0].type - } - } - - // operation replaces left operand - context.parent.tree.pop() - context.parent.tree.push(this) -} - -OpCtx.prototype.ast = function(){ - //console.log('op ast', this) - var ast_type_class = op2ast_class[this.op], - op_type = ast_type_class[0], - ast_class = ast_type_class[1], - ast_obj - - if(op_type === ast.Compare){ - var left = this.tree[0].ast(), - ops = [new ast_class()] - if(this.ops){ - for(var op of this.ops.slice(1)){ - ops.push(new op2ast_class[op][1]()) - } - ast_obj = new ast.Compare(left, ops, - this.tree.slice(1).map(x => x.ast())) - }else{ - ast_obj = new ast.Compare(left, ops, - [this.tree[1].ast()]) - } - }else if(op_type === ast.UnaryOp){ - ast_obj = new op_type(new ast_class(), this.tree[1].ast()) - }else if(op_type === ast.BoolOp){ - // Consecutive operations with the same operator, such as a or b or c, - // are collapsed into one node with several values - var values = [this.tree[1]], - main_op = this.op, - ctx = this - while(ctx.tree[0].type == 'op' && ctx.tree[0].op == main_op){ - values.splice(0, 0, ctx.tree[0].tree[1]) - ctx = ctx.tree[0] - } - values.splice(0, 0, ctx.tree[0]) - ast_obj = new op_type(new ast_class(), values.map(x => x.ast())) - }else{ - ast_obj = new op_type( - this.tree[0].ast(), new ast_class(), this.tree[1].ast()) - } - set_position(ast_obj, this.position) - return ast_obj -} - -function is_literal(expr){ - return expr.type == 'expr' && - ['int', 'str', 'float', 'imaginary'].indexOf(expr.tree[0].type) > -1 -} - -OpCtx.prototype.transition = function(token, value){ - var context = this - if(context.op === undefined){ - console.log('context has no op', context) - raise_syntax_error(context) - } - if((context.op == 'is' || context.op == 'is_not') && - context.tree.length > 1){ - for(var operand of context.tree){ - if(is_literal(operand)){ - var head = context.op == 'is' ? 'is' : 'is not' - $B.warn(_b_.SyntaxWarning, - `"${head}" with a literal. Did you mean "=="?"`, - get_module(context).filename, - $token.value) - break - } - } - } - - switch(token) { - case 'id': - case 'imaginary': - case 'int': - case 'float': - case 'str': - case 'JoinedStr': - case 'bytes': - case '[': - case '(': - case '{': - case '.': - case 'not': - case 'lambda': - return transition(new AbstractExprCtx(context, false), - token, value) - case 'op': - switch(value){ - case '+': - case '-': - case '~': - return new UnaryCtx(context, value) - } - break - default: - if(context.tree[context.tree.length - 1].type == - 'abstract_expr'){ - raise_syntax_error(context) - } - } - return transition(context.parent, token) -} - -var PassCtx = $B.parser.PassCtx = function(context){ - // Class for keyword "pass" - this.type = 'pass' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this -} - -PassCtx.prototype.ast = function(){ - var ast_obj = new ast.Pass() - set_position(ast_obj, this.position) - return ast_obj -} - -PassCtx.prototype.transition = function(token){ - var context = this - if(token == 'eol'){ - return context.parent - } - raise_syntax_error(context) -} - -var PatternCtx = $B.parser.PatternCtx = function(context){ - // Class for patterns in a "case" statement - this.type = "pattern" - this.parent = context - this.tree = [] - context.tree.push(this) - this.expect = 'id' -} - -PatternCtx.prototype.transition = function(token, value){ - var context = this - switch(context.expect){ - case 'id': - switch(token){ - case 'str': - case 'int': - case 'float': - case 'imaginary': - context.expect = ',' - return new PatternLiteralCtx(context, token, value) - case 'op': - switch(value){ - case '-': - case '+': - context.expect = ',' - return new PatternLiteralCtx(context, - {sign: value}) - case '*': - context.expect = 'starred_id' - return context - default: - raise_syntax_error(context) - } - break - case 'id': - context.expect = ',' - if(['None', 'True', 'False'].indexOf(value) > -1){ - return new PatternLiteralCtx(context, token, value) - }else{ - return new PatternCaptureCtx(context, value) - } - break - case '[': - return new PatternCtx( - new PatternSequenceCtx(context.parent, token)) - case '(': - return new PatternCtx( - new PatternGroupCtx(context.parent, token)) - case '{': - return new PatternMappingCtx(context.parent, token) - case 'JoinedStr': - raise_syntax_error(context, "patterns may only match " + - "literals and attribute lookups") - } - break - case 'starred_id': - if(token == 'id'){ - var capture = new PatternCaptureCtx(context, value) - capture.starred = true - return capture - } - raise_syntax_error(context, "(expected id after '*')") - break - case 'number': - // if pattern starts with unary - or + - switch(token){ - case 'int': - case 'float': - case 'imaginary': - context.expect = ',' - return new PatternLiteralCtx(context, token, - value, context.sign) - default: - raise_syntax_error(context) - } - break - case ',': - switch(token){ - case ',': - if(context.parent instanceof PatternSequenceCtx){ - return new PatternCtx(context.parent) - } - return new PatternCtx( - new PatternSequenceCtx(context.parent)) - case ':': - return BodyCtx(context) - } - } - return context.parent.transition(token, value) -} - -function as_pattern(context, token, value){ - // common to all patterns - if(context.expect == 'as'){ - if(token == 'as'){ - context.expect = 'alias' - return context - }else{ - return transition(context.parent, token, value) - } - }else if(context.expect == 'alias'){ - if(token == 'id'){ - if(value == '_'){ - raise_syntax_error(context, "cannot use '_' as a target") - } - if(context.bindings().indexOf(value) > -1){ - raise_syntax_error(context, - `multiple assignments to name '${value}' in pattern`) - } - context.alias = value - return context.parent - }else{ - raise_syntax_error(context, 'invalid pattern target') - } - } -} - -var PatternCaptureCtx = function(context, value){ - // Class for capture patterns in a "case" statement - // context is a PatternCtx - this.type = "capture_pattern" - this.parent = context.parent - context.parent.tree.pop() - context.parent.tree.push(this) - this.tree = [value] - this.position = $token.value - this.positions = [this.position] - this.expect = '.' -} - -PatternCaptureCtx.prototype.ast = function(){ - var ast_obj, - pattern - try{ - if(this.tree.length > 1){ - pattern = new ast.Name(this.tree[0], new ast.Load()) - set_position(pattern, this.position) - for(var i = 1; i < this.tree.length; i++){ - pattern = new ast.Attribute(pattern, this.tree[i], new ast.Load()) - copy_position(pattern, pattern.value) - } - pattern = new ast.MatchValue(pattern) - copy_position(pattern, pattern.value) - }else if(this.starred){ - var v = this.tree[0] - if(v == '_'){ - ast_obj = new ast.MatchStar() - }else{ - ast_obj = new ast.MatchStar(v) - } - set_position(ast_obj, this.position) - }else{ - pattern = this.tree[0] - if(typeof pattern == 'string'){ - // pattern is the string - }else if(pattern.type == 'group_pattern'){ - pattern = pattern.ast() - }else{ - console.log('bizarre', pattern) - pattern = NumberCtx.prototype.ast.bind(this)() - } - if(pattern == '_'){ - pattern = new ast.MatchAs() - set_position(pattern, this.position) - } - } - if(this.alias){ - if(typeof pattern == "string"){ - pattern = new ast.MatchAs(undefined, pattern) - set_position(pattern, this.position) - } - ast_obj = new ast.MatchAs(pattern, this.alias) - }else if(this.tree.length > 1 || pattern instanceof ast.MatchAs){ - ast_obj = pattern - }else if(typeof pattern == 'string'){ - ast_obj = new ast.MatchAs(undefined, pattern) - }else if(! this.starred){ - ast_obj = new ast.MatchAs(undefined, pattern) - } - set_position(ast_obj, this.position) - return ast_obj - }catch(err){ - console.log('error capture ast') - show_line(this) - throw err - } -} - -PatternCaptureCtx.prototype.bindings = function(){ - var bindings = this.tree[0] == '_' ? [] : this.tree.slice() - if(this.alias){ - bindings.push(this.alias) - } - return bindings -} - -PatternCaptureCtx.prototype.transition = function(token, value){ - var context = this - switch(context.expect){ - case '.': - if(token == '.'){ - context.type = "value_pattern" - context.expect = 'id' - return context - }else if(token == '('){ - // open class pattern - return new PatternCtx(new PatternClassCtx(context)) - }else if(context.parent instanceof PatternMappingCtx){ - return context.parent.transition(token, value) - }else{ - context.expect = 'as' - return context.transition(token, value) - } - break - case 'as': - case 'alias': - var res = as_pattern(context, token, value) - return res - case 'id': - if(token == 'id'){ - context.tree.push(value) - context.positions.push($token.value) - context.expect = '.' - return context - } - - } - return transition(context.parent, token, value) -} - -const PatternClassCtx = function(context){ - this.type = "class_pattern" - this.tree = [] - this.parent = context.parent - this.position = $token.value - // create an id for class name - this.class_id = context.tree.slice() - this.positions = context.positions - // remove this instance of $dCtx from tree - context.tree.pop() - // get possible attributes of id - this.attrs = context.tree.slice(2) - context.parent.tree.pop() - context.parent.tree.push(this) - this.expect = ',' - this.keywords = [] - this.positionals = [] - this.bound_names = [] -} - -PatternClassCtx.prototype.ast = function(){ - // ast.MatchClass(cls, patterns, kwd_attrs, kwd_patterns) - // `cls` is an expression giving the nominal class to be matched - // `patterns` is a sequence of pattern nodes to be matched against the - // class defined sequence of pattern matching attributes - // `kwd_attrs` is a sequence of additional attributes to be matched - // `kwd_patterns` are the corresponding patterns - var cls - if(this.class_id.length == 1){ - cls = new ast.Name(this.class_id[0]) - }else{ - // attribute, eg "case ast.Expr(expr)": class_id is - // ['ast', '.', 'Expr'] - for(let i = 0, len = this.class_id.length; i < len - 1; i++){ - var value = new ast.Name(this.class_id[i], new ast.Load()) - set_position(value, this.positions[i]) - if(i == 0){ - cls = new ast.Attribute(value, this.class_id[i + 1]) - }else{ - cls = new ast.Attribute(cls, this.class_id[i + 1]) - } - set_position(cls, this.positions[i]) - } - } - set_position(cls, this.position) - cls.ctx = new ast.Load() - var patterns = [], - kwd_attrs = [], - kwd_patterns = [] - for(var item of this.tree){ - if(item.is_keyword){ - kwd_attrs.push(item.tree[0]) - kwd_patterns.push(item.tree[1].ast()) - }else{ - try{ - patterns.push(item.ast()) - }catch(err){ - console.log('error in class pattern item') - show_line(this) - throw err - } - } - } - var ast_obj = new ast.MatchClass(cls, patterns, kwd_attrs, kwd_patterns) - set_position(ast_obj, this.position) - if(this.alias){ - ast_obj = new ast.MatchAs(ast_obj, this.alias) - set_position(ast_obj, this.position) - } - return ast_obj -} - -PatternClassCtx.prototype.bindings = function(){ - var bindings = this.bound_names - if(this.alias){ - bindings.push(this.alias) - } - return bindings -} - -PatternClassCtx.prototype.transition = function(token, value){ - var context = this - - function check_last_arg(){ - var last = $B.last(context.tree), - bound - if(last instanceof PatternCaptureCtx){ - if(! last.is_keyword && - context.keywords.length > 0){ - $token.value = last.position - raise_syntax_error(context, - 'positional patterns follow keyword patterns') - } - if(last.is_keyword){ - if(context.keywords.indexOf(last.tree[0]) > -1){ - raise_syntax_error(context, - `keyword argument repeated: ${last.tree[0]}`) - } - context.keywords.push(last.tree[0]) - bound = last.tree[1].bindings() - }else{ - bound = last.bindings() - } - for(var b of bound){ - if(context.bound_names.indexOf(b) > -1){ - raise_syntax_error(context, 'multiple assignments ' + - `to name '${b}' in pattern`) - } - } - context.bound_names = context.bound_names.concat(bound) - } - } - - switch(this.expect){ - case ',': - switch(token){ - case '=': - // check that current argument is a capture - var current = $B.last(this.tree) - if(current instanceof PatternCaptureCtx){ - // check duplicate - if(this.keywords.indexOf(current.tree[0]) > -1){ - raise_syntax_error(context, - 'attribute name repeated in class pattern: ' + - current.tree[0]) - } - current.is_keyword = true - return new PatternCtx(current) - } - raise_syntax_error(this, "'=' after non-capture") - break - case ',': - check_last_arg() - return new PatternCtx(this) - case ')': - check_last_arg() - if($B.last(this.tree).tree.length == 0){ - this.tree.pop() - } - context.expect = 'as' - return context - default: - raise_syntax_error(context) - } - break - case 'as': - case 'alias': - return as_pattern(context, token, value) - } - return transition(context.parent, token, value) - -} - -var PatternGroupCtx = function(context){ - // Class for group patterns, delimited by (), in a "case" statement - this.type = "group_pattern" - this.parent = context - this.position = $token.value - this.tree = [] - context.tree.pop() - this.expect = ',|' - context.tree.push(this) -} - -function remove_empty_pattern(context){ - var last = $B.last(context.tree) - if(last && last instanceof PatternCtx && - last.tree.length == 0){ - context.tree.pop() - } -} - -PatternGroupCtx.prototype.ast = function(){ - var ast_obj - if(this.tree.length == 1 && ! this.has_comma){ - ast_obj = this.tree[0].ast() - }else{ - ast_obj = PatternSequenceCtx.prototype.ast.bind(this)() - } - if(this.alias){ - ast_obj = new ast.MatchAs(ast_obj, this.alias) - } - set_position(ast_obj, this.position) - return ast_obj -} - -PatternGroupCtx.prototype.bindings = function(){ - var bindings = [] - for(var item of this.tree){ - bindings = bindings.concat(item.bindings()) - } - if(this.alias){ - bindings.push(this.alias) - } - return bindings -} - -PatternGroupCtx.prototype.transition = function(token, value){ - var context = this - switch(context.expect){ - case ',|': - if(token == ")"){ - // close group - remove_empty_pattern(context) - context.expect = 'as' - return context - }else if(token == ','){ - context.expect = 'id' - context.has_comma = true - return context - }else if(token == 'op' && value == '|'){ - var opctx = new PatternOrCtx(context.parent) - opctx.parenthese = true - return new PatternCtx(opctx) - }else if(this.token === undefined){ - return transition(context.parent, token, value) - } - raise_syntax_error(context) - break - case 'as': - case 'alias': - return as_pattern(context, token, value) - case 'id': - if(token == ')'){ - // case (x,) - remove_empty_pattern(context) - context.expect ='as' - return context - } - context.expect = ',|' - return transition(new PatternCtx(context), token, value) - } - raise_syntax_error(context) -} - -var PatternLiteralCtx = function(context, token, value, sign){ - // Class for literal patterns in a "case" statement - // context is a PatternCtx - this.type = "literal_pattern" - this.parent = context.parent - this.position = $token.value - context.parent.tree.pop() - context.parent.tree.push(this) - if(token.sign){ - this.tree = [{sign: token.sign}] - this.expect = 'number' - }else{ - if(token == 'str'){ - this.tree = [] - new StringCtx(this, value) - }else if(token == 'JoinedStr'){ - raise_syntax_error(this, "patterns cannot include f-strings") - }else{ - this.tree = [{type: token, value, sign}] - } - this.expect = 'op' - } -} - -PatternLiteralCtx.prototype.ast = function(){ - try{ - var first = this.tree[0], - result - if(first.type == 'str'){ - var v = StringCtx.prototype.ast.bind(first)() - result = new ast.MatchValue(v) - }else if(first.type == 'id'){ - result = new ast.MatchSingleton(_b_[first.value]) - }else{ - first.position = this.position - var num = NumberCtx.prototype.ast.bind(first)(), - res = new ast.MatchValue(num) - if(first.sign && first.sign != '+'){ - var op = {'+': ast.UAdd, '-': ast.USub, '~': ast.Invert}[first.sign] - var unary_op = new ast.UnaryOp(new op(), res.value) - set_position(unary_op, this.position) - res = new ast.MatchValue(unary_op) - set_position(res, this.position) - } - if(this.tree.length == 1){ - result = res - }else{ - this.tree[2].position = this.position - var num2 = NumberCtx.prototype.ast.bind(this.tree[2])(), - binop = new ast.BinOp(res.value, - this.tree[1] == '+' ? new ast.Add() : new ast.Sub(), - num2) - set_position(binop, this.position) - result = new ast.MatchValue(binop) - } - } - set_position(result, this.position) - if(this.tree.length == 2){ - // value = complex number - result = new ast.MatchValue(new ast.BinOp( - this.tree[0].ast(), - this.num_sign == '+' ? ast.Add : ast.Sub, - this.tree[1].ast())) - } - if(this.alias){ - result = new ast.MatchAs(result, this.alias) - } - set_position(result, this.position) - return result - }catch(err){ - show_line(this) - throw err - } -} - -PatternLiteralCtx.prototype.bindings = function(){ - if(this.alias){ - return [this.alias] - } - return [] -} - -PatternLiteralCtx.prototype.transition = function(token, value){ - var context = this - switch(context.expect){ - case 'op': - if(token == "op"){ - switch(value){ - case '+': - case '-': - if(['int', 'float'].indexOf(context.tree[0].type) > -1){ - context.expect = 'imaginary' - this.tree.push(value) - context.num_sign = value - return context - } - raise_syntax_error(context, - 'patterns cannot include operators') - break - default: - return transition(context.parent, token, value) - } - } - break - case 'number': - switch(token){ - case 'int': - case 'float': - case 'imaginary': - var last = $B.last(context.tree) - if(this.tree.token === undefined){ - // if pattern starts with unary - or + - last.type = token - last.value = value - context.expect = 'op' - return context - } - break - default: - raise_syntax_error(context) - } - break - case 'imaginary': - switch(token){ - case 'imaginary': - context.tree.push({type: token, value, sign: context.num_sign}) - return context.parent - default: - raise_syntax_error(context, '(expected imaginary)') - - } - break - case 'as': - case 'alias': - return as_pattern(context, token, value) - } - if(token == 'as' && context.tree.length == 1){ - context.expect = 'as' - return context.transition(token, value) - } - return transition(context.parent, token, value) -} - -var PatternMappingCtx = function(context){ - // Class for sequence patterns in a "case" statement - this.type = "mapping_pattern" - this.parent = context - this.position = $token.value - context.tree.pop() - this.tree = [] - context.tree.push(this) - this.expect = 'key_value_pattern' - // store literal key values to detect duplicates - this.literal_keys = [] - this.bound_names = [] -} - -PatternMappingCtx.prototype.ast = function(){ - // ast.MatchMapping(keys, patterns, rest) - // `keys` : sequence of expression nodes - // `patterns` : corresponding sequence of pattern nodes - // `rest` : optional name to capture the remaining mapping elements - var keys = [], - patterns = [] - for(var item of this.tree){ - keys.push(item.tree[0].ast().value) - if(item.tree[0] instanceof PatternLiteralCtx){ - patterns.push(item.tree[1].ast()) - }else{ - patterns.push(item.tree[2].ast()) - } - } - var res = new ast.MatchMapping(keys, patterns) - if(this.double_star){ - res.rest = this.double_star.tree[0] - } - set_position(res, this.position) - return res -} - -PatternMappingCtx.prototype.bindings = function(){ - var bindings = [] - for(var item of this.tree){ - bindings = bindings.concat(item.bindings()) - } - if(this.rest){ - bindings = bindings.concat(this.rest.bindings()) - } - if(this.alias){ - bindings.push(this.alias) - } - return bindings -} - -PatternMappingCtx.prototype.transition = function(token, value){ - var context = this - function check_duplicate_names(){ - var last = $B.last(context.tree), - bindings - if(last instanceof PatternKeyValueCtx){ - if(context.double_star){ - // key-value after double star is not allowed - raise_syntax_error(context, - "can't use starred name here (consider moving to end)") - } - if(last.tree[0].type == 'value_pattern'){ - bindings = last.tree[2].bindings() - }else{ - bindings = last.tree[1].bindings() - } - for(var binding of bindings){ - if(context.bound_names.indexOf(binding) > -1){ - raise_syntax_error(context, - `multiple assignments to name '${binding}'` + - ' in pattern') - } - } - context.bound_names = context.bound_names.concat(bindings) - } - } - switch(context.expect){ - case 'key_value_pattern': - if(token == '}' || token == ','){ - // If there are only literal values, raise SyntaxError if - // there are duplicate keys - check_duplicate_names() - if(context.double_star){ - var ix = context.tree.indexOf(context.double_star) - if(ix != context.tree.length - 1){ - raise_syntax_error(context, - "can't use starred name here (consider moving to end)") - } - context.rest = context.tree.pop() - } - return token == ',' ? context : context.parent - } - if(token == 'op' && value == '**'){ - context.expect = 'capture_pattern' - return context - } - var p = new PatternCtx(context) - try{ - var lit_or_val = p.transition(token, value) - }catch(err){ - raise_syntax_error(context, "mapping pattern keys may only " + - "match literals and attribute lookups") - } - if(context.double_star){ - // eg "case {**rest, x: y}" - raise_syntax_error(context) - } - if(lit_or_val instanceof PatternLiteralCtx){ - context.tree.pop() // remove PatternCtx - new PatternKeyValueCtx(context, lit_or_val) - return lit_or_val - }else if(lit_or_val instanceof PatternCaptureCtx){ - context.has_value_pattern_keys = true - // expect a dotted name (value pattern) - context.tree.pop() - new PatternKeyValueCtx(context, lit_or_val) - context.expect = '.' - return this - }else{ - raise_syntax_error(context, '(expected key or **)') - } - break - case 'capture_pattern': - var capture = transition(new PatternCtx(context), token, value) - if(capture instanceof PatternCaptureCtx){ - if(context.double_star){ - raise_syntax_error(context, - "only one double star pattern is accepted") - } - if(value == '_'){ - raise_syntax_error(context) // , "('**_' is not valid)") - } - if(context.bound_names.indexOf(value) > -1){ - raise_syntax_error(context, 'duplicate binding: ' + value) - } - context.bound_names.push(value) - capture.double_star = true - context.double_star = capture - context.expect = ',' - return context - }else{ - raise_syntax_error(context, '(expected identifier)') - } - break - case ',': - // after a **rest item - if(token == ','){ - context.expect = 'key_value_pattern' - return context - }else if(token == '}'){ - context.expect = 'key_value_pattern' - return context.transition(token, value) - } - raise_syntax_error(context) - break - case '.': - // value pattern - if(context.tree.length > 0){ - var last = $B.last(context.tree) - if(last instanceof PatternKeyValueCtx){ - // create an id with the first name in value pattern - new IdCtx(last, last.tree[0].tree[0]) - context.expect = 'key_value_pattern' - return transition(last.tree[0], token, value) - } - } - raise_syntax_error(context) - } - return transition(context.parent, token, value) -} - -var PatternKeyValueCtx = function(context, literal_or_value){ - this.type = "pattern_key_value" - this.parent = context - this.tree = [literal_or_value] - literal_or_value.parent = this - this.expect = ':' - context.tree.push(this) -} - -PatternKeyValueCtx.prototype.bindings = PatternMappingCtx.prototype.bindings - -PatternKeyValueCtx.prototype.transition = function(token, value){ - var context = this - switch(context.expect){ - case ':': - switch(token){ - case ':': - // check duplicate literal keys - var key_obj = this.tree[0] - if(key_obj instanceof PatternLiteralCtx){ - var key = $B.AST.$convert(key_obj.tree[0]) - // check if present in this.literal_keys - if(_b_.list.__contains__(this.parent.literal_keys, key)){ - raise_syntax_error(context, `mapping pattern checks ` + - `duplicate key (${_b_.repr(key)})`) - } - this.parent.literal_keys.push(key) - } - this.expect = ',' - return new PatternCtx(this) - default: - raise_syntax_error(context, '(expected :)') - } - break - case ',': - switch(token){ - case '}': - return transition(context.parent, token, value) - case ',': - context.parent.expect = 'key_value_pattern' - return transition(context.parent, token, value) - case 'op': - if(value == '|'){ - // value is an alternative - return new PatternCtx(new PatternOrCtx(context)) - } - } - raise_syntax_error(context, "(expected ',' or '}')") - } - return transition(context.parent, token, value) -} - -var PatternOrCtx = function(context){ - // Class for "or patterns" in a "case" statement - // context already has a pattern as its first child - this.type = "or_pattern" - this.parent = context - this.position = $token.value - var first_pattern = context.tree.pop() - if(first_pattern instanceof PatternGroupCtx && - first_pattern.expect != 'as'){ - // eg "case (a, ...)" - first_pattern = first_pattern.tree[0] - } - this.tree = [first_pattern] - first_pattern.parent = this - this.expect = '|' - context.tree.push(this) - this.check_reachable() -} - -PatternOrCtx.prototype.ast = function(){ - // ast.MatchOr(patterns) - var ast_obj = new ast.MatchOr(this.tree.map(x => x.ast())) - set_position(ast_obj, this.position) - if(this.alias){ - ast_obj = new ast.MatchAs(ast_obj, this.alias) - } - set_position(ast_obj, this.position) - return ast_obj -} - -PatternOrCtx.prototype.bindings = function(){ - var names - for(var subpattern of this.tree){ - if(subpattern.bindings === undefined){ - console.log('no binding', subpattern) - } - var subbindings = subpattern.bindings() - if(names === undefined){ - names = subbindings - }else{ - for(let item of names){ - if(subbindings.indexOf(item) == -1){ - raise_syntax_error(this, - "alternative patterns bind different names") - } - } - for(let item of subbindings){ - if(names.indexOf(item) == -1){ - raise_syntax_error(this, - "alternative patterns bind different names") - } - } - } - } - if(this.alias){ - return names.concat(this.alias) - } - return names -} - -PatternOrCtx.prototype.check_reachable = function(){ - // Called before accepting a new alternative. If the last one is a - // capture or wildcard, raise SyntaxError - var item = $B.last(this.tree) - var capture - if(item.type == 'capture_pattern'){ - capture = item.tree[0] - }else if(item.type == 'group_pattern' && item.tree.length == 1 && - item.tree[0].type == 'capture_pattern'){ - capture = item.tree[0].tree[0] - }else if(item instanceof PatternOrCtx){ - item.check_reachable() - } - if(capture){ - var msg = capture == '_' ? 'wildcard' : - `name capture '${capture}'` - raise_syntax_error(this, - `${msg} makes remaining patterns unreachable`) - } -} - -PatternOrCtx.prototype.transition = function(token, value){ - function set_alias(){ - // If last item has an alias, it is the alias of the whole "or pattern" - var last = $B.last(context.tree) - if(last.alias){ - context.alias = last.alias - delete last.alias - } - } - - var context = this - - if(['as', 'alias'].indexOf(context.expect) > -1){ - return as_pattern(context, token, value) - } - if(token == 'op' && value == "|"){ - // items cannot be aliased - for(var item of context.tree){ - if(item.alias){ - raise_syntax_error(context, '(no as pattern inside or pattern)') - } - } - context.check_reachable() - return new PatternCtx(context) - }else if(token == ')' && context.parenthese){ - set_alias() - context.bindings() - delete context.parenthese - context.expect = 'as' - return context - } - set_alias() - context.bindings() - return transition(context.parent, token, value) -} - -var PatternSequenceCtx = function(context, token){ - // Class for sequence patterns in a "case" statement - this.type = "sequence_pattern" - this.parent = context - this.position = $token.value - this.tree = [] - this.bound_names = [] - var first_pattern = context.tree.pop() - if(token === undefined){ - // implicit sequence : form "case x, y:" - // context.parent already has a pattern - this.bound_names = first_pattern.bindings() - this.tree = [first_pattern] - if(first_pattern.starred){ - this.has_star = true - } - first_pattern.parent = this - }else{ - // explicit sequence with token '[' or '(' - this.token = token - } - this.expect = ',' - context.tree.push(this) -} - -PatternSequenceCtx.prototype.ast = function(){ - var ast_obj = new ast.MatchSequence(this.tree.map(x => x.ast())) - set_position(ast_obj, this.position) - if(this.alias){ - ast_obj = new ast.MatchAs(ast_obj, this.alias) - set_position(ast_obj, this.position) - } - return ast_obj -} - -PatternSequenceCtx.prototype.bindings = PatternMappingCtx.prototype.bindings - -PatternSequenceCtx.prototype.transition = function(token, value){ - function check_duplicate_names(){ - var last = $B.last(context.tree) - if(! (last instanceof PatternCtx)){ - // check duplicate bindings - var last_bindings = last.bindings() - for(var b of last_bindings){ - if(context.bound_names.indexOf(b) > -1){ - raise_syntax_error(context, "multiple assignments to" + - ` name '${b}' in pattern`) - } - } - if(last.starred){ - if(context.has_star){ - raise_syntax_error(context, - 'multiple starred names in sequence pattern') - } - context.has_star = true - } - context.bound_names = context.bound_names.concat(last_bindings) - } - } - - var context = this - if(context.expect == ','){ - if((context.token == '[' && token == ']') || - (context.token == '(' && token == ")")){ - // check if there are more than 1 starred subpattern - var nb_starred = 0 - for(var item of context.tree){ - if(item instanceof PatternCaptureCtx && item.starred){ - nb_starred++ - if(nb_starred > 1){ - raise_syntax_error(context, - 'multiple starred names in sequence pattern') - } - } - } - context.expect = 'as' - check_duplicate_names() - remove_empty_pattern(context) - return context - }else if(token == ','){ - check_duplicate_names() - context.expect = 'id' - return context - }else if(token == 'op' && value == '|'){ - // alternative on last element - remove_empty_pattern(context) - return new PatternCtx(new PatternOrCtx(context)) - }else if(this.token === undefined){ - // implicit tuple - check_duplicate_names() - return transition(context.parent, token, value) - } - raise_syntax_error(context) - }else if(context.expect == 'as'){ - if(token == 'as'){ - this.expect = 'alias' - return context - } - return transition(context.parent, token, value) - }else if(context.expect == 'alias'){ - if(token == 'id'){ - context.alias = value - return context.parent - } - raise_syntax_error(context, 'expected alias') - }else if(context.expect == 'id'){ - context.expect = ',' - return transition(new PatternCtx(context), token, value) - } -} - -var RaiseCtx = $B.parser.RaiseCtx = function(context){ - // Class for keyword "raise" - this.type = 'raise' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this - this.scope_type = get_scope(this).ntype - -} - -RaiseCtx.prototype.ast = function(){ - // ast.Raise(exc, cause) - // cause is the optional part in "raise exc from cause" - var ast_obj = new ast.Raise(...this.tree.map(x => x.ast())) - set_position(ast_obj, this.position) - return ast_obj -} - -RaiseCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.tree.length == 0){ - return new IdCtx(new ExprCtx(context, 'exc', false), - value) - } - break - case 'from': - if(context.tree.length > 0){ - return new AbstractExprCtx(context, false) - } - break - case 'eol': - remove_abstract_expr(this.tree) - return transition(context.parent, token) - } - raise_syntax_error(context) -} - -var ReturnCtx = $B.parser.ReturnCtx = function(context){ - // Class for keyword "return" - this.type = 'return' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this - - // Check if inside a function - this.scope = get_scope(this) - if(["def", "generator"].indexOf(this.scope.ntype) == -1){ - raise_syntax_error(context, "'return' outside function") - } - - // Check if return is inside a "for" loop - // In this case, the loop will not be included inside a function - // for optimisation - var node = this.node = get_node(this) - while(node.parent){ - if(node.parent.context){ - var elt = node.parent.context.tree[0] - if(elt.type == 'for'){ - elt.has_return = true - break - }else if(elt.type == 'try'){ - elt.has_return = true - }else if(elt.type == 'single_kw' && elt.token == 'finally'){ - elt.has_return = true - } - } - node = node.parent - } -} - -ReturnCtx.prototype.ast = function(){ - var res = new ast.Return() - if(this.tree.length > 0){ - res.value = this.tree[0].ast() - } - set_position(res, this.position) - return res -} - -ReturnCtx.prototype.transition = function(token, value){ - var context = this - if(token == 'eol' && this.tree.length == 1 && - this.tree[0].type == 'abstract_expr'){ - // "return" must be transformed into "return None" - this.tree.pop() - } - return transition(new AbstractExprCtx(context.parent, false), - token, value) -} - -var SetCompCtx = function(context){ - // create a Set Comprehension - // context is a DictOrSetCtx - this.type = 'setcomp' - this.tree = [context.tree[0]] - this.tree[0].parent = this - this.position = $token.value - Comprehension.make_comp(this, context) -} - -SetCompCtx.prototype.ast = function(){ - // ast.SetComp(elt, generators) - // elt is the part evaluated for each item - // generators is a list of comprehensions - var ast_obj = new ast.SetComp( - this.tree[0].ast(), - Comprehension.generators(this.tree.slice(1)) - ) - set_position(ast_obj, this.position) - return ast_obj -} - -SetCompCtx.prototype.transition = function(token){ - var context = this - if(token == '}'){ - return this.parent - } - raise_syntax_error(context) -} - -var SingleKwCtx = $B.parser.SingleKwCtx = function(context,token){ - // Class for keywords "finally", "else" - this.type = 'single_kw' - this.token = token - this.parent = context - this.tree = [] - context.tree[context.tree.length] = this - - // If token is "else" inside a "for" loop, set the flag "has_break" - // on the loop, to force the creation of a boolean "$no_break" - if(token == "else"){ - var node = context.node, - rank = node.parent.children.indexOf(node), - pctx = node.parent.children[rank - 1].context - pctx.tree[0].orelse = this - if(pctx.tree.length > 0){ - var elt = pctx.tree[0] - if(elt.type == 'for' || - elt.type == 'asyncfor' || - (elt.type == 'condition' && elt.token == 'while')){ - elt.has_break = true - elt.else_node = get_node(this) - } - } - } -} - -SingleKwCtx.prototype.ast = function(){ - return ast_body(this.parent) -} - -SingleKwCtx.prototype.transition = function(token){ - var context = this - if(token == ':'){ - return BodyCtx(context) - }else if(token == 'eol'){ - raise_syntax_error(context, "expected ':'") - } - raise_syntax_error(context) -} - -var SliceCtx = $B.parser.SliceCtx = function(context){ - // Class for slices inside a subscription : t[1:2] - this.type = 'slice' - this.parent = context - this.position = $token.value - this.tree = context.tree.length > 0 ? [context.tree.pop()] : [] - context.tree.push(this) -} - -SliceCtx.prototype.ast = function(){ - var slice = new ast.Slice() - var attrs = ['lower', 'upper', 'step'] - for(var i = 0; i < this.tree.length; i++){ - var item = this.tree[i] - if(item.type !== 'abstract_expr'){ - slice[attrs[i]] = item.ast() - } - } - set_position(slice, this.position) - return slice -} - -SliceCtx.prototype.transition = function(token, value){ - var context = this - if(token == ":"){ - return new AbstractExprCtx(context, false) - } - return transition(context.parent, token, value) -} - -var StarArgCtx = $B.parser.StarArgCtx = function(context){ - // Class for star args in calls, eg f(*args) - this.type = 'star_arg' - this.parent = context - this.tree = [] - this.position = $token.value - context.tree[context.tree.length] = this -} - -StarArgCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.parent.type == "target_list"){ - context.tree.push(value) - context.parent.expect = ',' - return context.parent - } - return transition(new AbstractExprCtx(context, false), - token, value) - case 'imaginary': - case 'int': - case 'float': - case 'str': - case 'JoinedStr': - case 'bytes': - case '[': - case '(': - case '{': - case 'not': - case 'lambda': - return transition(new AbstractExprCtx(context, false), - token, value) - case ',': - case ')': - if(context.tree.length == 0){ - raise_syntax_error(context, "(unnamed star argument)") - } - return transition(context.parent, token) - case ':': - if(context.parent.parent.type == 'lambda'){ - return transition(context.parent.parent, token) - } - } - raise_syntax_error(context) -} - -var StarredCtx = $B.parser.StarredCtx = function(context){ - // used for packed tuples in expressions, eg - // a, *b, c = [1, 2, 3, 4] - // and for targets in "for" loops - // for a, *b in t: pass - this.type = 'starred' - this.position = context.position - if(context.parent.type == 'list_or_tuple' && - context.parent.parent.type == "node"){ - // SyntaxError for a, *b, *c = ... - for(var i = 0; i < context.parent.tree.length; i++){ - var child = context.parent.tree[i] - if(child.type == 'expr' && - child.tree.length > 0 && - child.tree[0].type == 'starred'){ - raise_syntax_error(context, - "two starred expressions in assignment") - } - } - } - this.parent = context - this.tree = [] - context.tree[context.tree.length] = this -} - -StarredCtx.prototype.ast = function(){ - if (this.tree[0].type == "abstract_expr") { - raise_syntax_error_known_range(this, this.position, last_position(this), 'invalid syntax') - } - var ast_obj = new ast.Starred(this.tree[0].ast(), new ast.Load()) - set_position(ast_obj, this.position) - return ast_obj -} - -StarredCtx.prototype.transition = function(token, value){ - var context = this - return transition(context.parent, token, value) -} - -var StringCtx = $B.parser.StringCtx = function(context, value){ - // Class for literal strings - // value is the string with quotes, eg 'a', "b\"c" etc. - this.type = 'str' - this.parent = context - this.position = this.end_position = $token.value - context.tree.push(this) - this.is_bytes = value.startsWith('b') - this.value = this.is_bytes ? [] : '' - this.add_value(value) - this.raw = false -} - -$B.string_from_ast_value = function(value){ - // remove escaped "'" in string value - return value.replace(new RegExp("\\\\'", 'g'), "'") -} - -var make_string_for_ast_value = $B.make_string_for_ast_value = function(value){ - value = value.replace(/\n/g,'\\n\\\n') - value = value.replace(/\r/g,'\\r\\\r') - if(value[0] == "'"){ - var unquoted = value.substr(1, value.length - 2) - return unquoted - } - // prepare value so that "'" + value + "'" is the correct string - if(value.indexOf("'") > -1){ - var s = '', - escaped = false - for(var char of value){ - if(char == '\\'){ - if(escaped){ - s += '\\\\' - } - escaped = !escaped - }else{ - if(char == "'" && ! escaped){ - // escape unescaped single quotes - s += '\\' - }else if(escaped){ - s += '\\' - } - s += char - escaped = false - } - } - value = s - } - return value.substr(1, value.length - 2) -} - -StringCtx.prototype.add_value = function(value){ - this.is_bytes = value.charAt(0) == 'b' - if(! this.is_bytes){ - this.value += make_string_for_ast_value(value) - }else{ - value = value.substr(2, value.length - 3) - try{ - var b = encode_bytestring(value) - }catch(err){ - raise_syntax_error(this, - 'bytes can only contain ASCII literal characters') - } - this.value = this.value.concat(b) - } -} - -var encode_bytestring = $B.encode_bytestring = function(s){ - s = s.replace(/\\t/g, '\t') - .replace(/\\n/g, '\n') - .replace(/\\r/g, '\r') - .replace(/\\f/g, '\f') - .replace(/\\v/g, '\v') - .replace(/\\\\/g, '\\') - var t = [] - for(var i = 0, len = s.length; i < len; i++){ - var cp = s.codePointAt(i) - if(cp > 255){ - throw Error() - } - t.push(cp) - } - return t -} - -StringCtx.prototype.ast = function(){ - var value = this.value - if(this.is_bytes){ - value = _b_.bytes.$factory(this.value) - } - var ast_obj = new ast.Constant(value) - set_position(ast_obj, this.position) - return ast_obj -} - -StringCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case '[': - return new AbstractExprCtx(new SubscripCtx(context.parent), - false) - case '(': - // Strings are not callable. We replace the string by a - // call to an object that will raise the correct exception - context.parent.tree[0] = context - return new CallCtx(context.parent) - case 'str': - if((this.is_bytes && ! value.startsWith('b')) || - (! this.is_bytes && value.startsWith('b'))){ - raise_syntax_error(context, - "cannot mix bytes and nonbytes literals") - } - context.add_value(value) - return context - case 'JoinedStr': - // replace by a new JoinedStr where the first value is this - context.parent.tree.pop() - var fstring = new FStringCtx(context.parent, value) - new StringCtx(fstring, fstring.quotes + this.value + fstring.quotes) - /* - if(typeof joined_str.tree[0].value == "string"){ - joined_str.tree[0].value = this.value + ' + ' + joined_str.tree[0].value - }else{ - joined_str.tree.splice(0, 0, this) - } - */ - return fstring - } - return transition(context.parent, token, value) -} - -var SubscripCtx = $B.parser.SubscripCtx = function(context){ - // Class for subscription or slicing, eg x in t[x] - this.type = 'sub' - this.func = 'getitem' // set to 'setitem' if assignment - this.value = context.tree[0] - this.position = $token.value // this.value.position - context.tree.pop() - context.tree[context.tree.length] = this - this.parent = context - this.tree = [] -} - -SubscripCtx.prototype.ast = function(){ - var slice - if(this.tree.length > 1){ - var slice_items = this.tree.map(x => x.ast()) - slice = new ast.Tuple(slice_items) - }else{ - slice = this.tree[0].ast() - } - set_position(slice, this.position, this.end_position) - slice.ctx = new ast.Load() - var value = this.value.ast() - if(value.ctx){ - value.ctx = new ast.Load() - } - var ast_obj = new ast.Subscript(value, slice, new ast.Load()) - ast_obj.lineno = value.lineno - ast_obj.col_offset = value.col_offset - ast_obj.end_lineno = slice.end_lineno - ast_obj.end_col_offset = slice.end_col_offset - return ast_obj -} - -SubscripCtx.prototype.transition = function(token, value){ - var context = this - // subscription x[a] or slicing x[a:b:c] - switch(token) { - case 'id': - case 'imaginary': - case 'int': - case 'float': - case 'str': - case 'JoinedStr': - case 'bytes': - case '[': - case '(': - case '{': - case '.': - case 'not': - case 'lambda': - var expr = new AbstractExprCtx(context,false) - return transition(expr, token, value) - case ']': - context.end_position = $token.value - if(context.parent.packed){ - return context.parent - } - if(context.tree[0].tree.length > 0){ - return context.parent - } - break - case ':': - return new AbstractExprCtx(new SliceCtx(context), false) - case ',': - return new AbstractExprCtx(context, false) - } - raise_syntax_error(context) -} - -var TargetListCtx = $B.parser.TargetListCtx = function(context){ - // Class for target of "for" in loops or comprehensions, - // eg x in "for target_list in A" - this.type = 'target_list' - this.parent = context - this.tree = [] - this.position = $token.value - this.expect = 'id' - this.nb_packed = 0 - context.tree[context.tree.length] = this -} - -TargetListCtx.prototype.ast = function(){ - if(this.tree.length == 1 && ! this.implicit_tuple){ - let item = this.tree[0].ast() - item.ctx = new ast.Store() - if(item instanceof ast.Tuple){ - for(var target of item.elts){ - target.ctx = new ast.Store() - } - } - return item - }else{ - let items = [] - for(let item of this.tree){ - item = item.ast() - if(item.hasOwnProperty('ctx')){ - item.ctx = new ast.Store() - } - items.push(item) - } - var ast_obj = new ast.Tuple(items, new ast.Store()) - set_position(ast_obj, this.position) - return ast_obj - } -} - -TargetListCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'id': - if(context.expect == 'id'){ - context.expect = ',' - return new IdCtx( - new ExprCtx(context, 'target', false), - value) - } - break - case 'op': - if(context.expect == 'id' && value == '*'){ - // form "for a, *b in X" - this.nb_packed++ - context.expect = ',' - return new AbstractExprCtx( - new StarredCtx(context), false) - } - break - case '(': - case '[': - if(context.expect == 'id'){ - context.expect = ',' - return new ListOrTupleCtx(context, - token == '(' ? 'tuple' : 'list') - } - break - case ')': - case ']': - if(context.expect == ','){ - return context.parent - } - break - case ',': - if(context.expect == ','){ - context.expect = 'id' - context.implicit_tuple = true - return context - } - } - - if(context.expect == ',') { - return transition(context.parent, token, value) - }else if(token == 'in'){ - // Support syntax "for x, in ..." - return transition(context.parent, token, value) - } - console.log('unexpected token for target list', token, value) - console.log(context) - raise_syntax_error(context) -} - -var TernaryCtx = $B.parser.TernaryCtx = function(context){ - // Class for the ternary operator : "x if C else y" - // "context" represents the expression "x" - // Replace it by an expression holding the ternary - this.type = 'ternary' - this.position = context.position - context.parent.tree.pop() - var expr = new ExprCtx(context.parent, 'ternary', false) - expr.tree.push(this) - this.parent = expr - this.tree = [context] - context.parent = this -} - -TernaryCtx.prototype.ast = function(){ - // ast.IfExp(test, body, orelse) - var ast_obj = new ast.IfExp(this.tree[1].ast(), - this.tree[0].ast(), - this.tree[2].ast()) - set_position(ast_obj, this.position) - return ast_obj -} - -TernaryCtx.prototype.transition = function(token, value){ - var context = this - if(token == 'else'){ - context.in_else = true - return new AbstractExprCtx(context, false) - }else if(! context.in_else){ - if(token == ':'){ - raise_syntax_error(context) - } - raise_syntax_error_known_range(context, - context.position, - last_position(context), - "expected 'else' after 'if' expression") - }else if(token == ","){ - // eg x = a if b else c, 2, 3 - if(["assign", "augm_assign", "node", "return"]. - indexOf(context.parent.type) > -1){ - context.parent.tree.pop() - var t = new ListOrTupleCtx(context.parent, 'tuple') - t.implicit = true - t.tree[0] = context - context.parent = t - t.expect = "id" - return t - } - } - return transition(context.parent, token, value) -} - -var TryCtx = $B.parser.TryCtx = function(context){ - // Class for the keyword "try" - this.type = 'try' - this.parent = context - this.position = $token.value - context.tree[context.tree.length] = this -} - -TryCtx.prototype.ast = function(){ - // Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) - var node = this.parent.node, - res = { - body: ast_body(this.parent), - handlers: [], - orelse: [], - finalbody: [] - } - var rank = node.parent.children.indexOf(node) - for(var child of node.parent.children.slice(rank + 1)){ - var t = child.context.tree[0], - type = t.type - if(type == 'single_kw'){ - type = t.token - } - if(type == 'except'){ - res.handlers.push(t.ast()) - }else if(type == 'else'){ - res.orelse = ast_body(child.context) - }else if(type == 'finally'){ - res.finalbody = ast_body(child.context) - }else{ - break - } - } - if(res.handlers.length == 0 && - res.finalbody.length == 0){ - raise_syntax_error(this, "expected 'except' or 'finally' block") - } - var klass = this.parent.is_trystar ? ast.TryStar : ast.Try - res = new klass(res.body, res.handlers, res.orelse, res.finalbody) - set_position(res, this.position) - return res -} - -TryCtx.prototype.transition = function(token){ - var context = this - if(token == ':'){ - return BodyCtx(context) - } - raise_syntax_error(context, "expected ':'") -} - -var TypeAliasCtx = $B.parser.TypeAlias = function(context, value){ - // used for "type " - // context is the id "type"; context.parent is an ExprCtx - context.parent.parent.tree = [this] - this.parent = context.parent.parent - this.name = value - this.expect = '=' - this.tree = [] - this.position = $token.value -} - -TypeAliasCtx.prototype.transition = function(token, value){ - var context = this - - if(context.expect == '='){ - if(token == '['){ - if(this.tree.length > 0){ - raise_syntax_error(context) - } - return new TypeParamsCtx(context) - }else if(token == '='){ - context.has_value = true - return new AbstractExprCtx(context, false) - }else if(token == 'eol'){ - if(! context.has_value || - this.tree.length !== 1 || - this.tree[0] instanceof AbstractExprCtx){ - raise_syntax_error(context) - } - return transition(context.parent, token, value) - } - } - raise_syntax_error(context) -} - -TypeAliasCtx.prototype.ast = function(){ - var name = new ast.Name(this.name), - params, - value = this.tree[0].ast() - if(this.type_params){ - params = this.type_params.ast() - } - var ast_obj = new ast.TypeAlias(name, params, value) - set_position(ast_obj, this.position) - return ast_obj -} - -var TypeParamsCtx = $B.parser.TypeParamsCtx = function(context){ - this.type = 'type_params' - this.parent = context - context.type_params = this - this.tree = [] - this.expect = 'param' -} - -TypeParamsCtx.prototype.check_duplicate = function(name){ - // check duplicate names - for(var item of this.tree){ - if(item.name == name){ - raise_syntax_error(this, `duplicate type parameter '${name}'`) - } - } -} - -TypeParamsCtx.prototype.transition = function(token, value){ - var context = this - if(context.expect == 'param'){ - if(token == 'id'){ - context.check_duplicate(value) - context.expect = ',' - return new TypeVarCtx(context, value) - }else if(token == 'op'){ - if(value == '*'){ - context.expect = ',' - return new TypeVarTupleCtx(context) - }else if(value == '**'){ - context.expect = ',' - return new TypeParamSpecCtx(context) - } - }else if(token == ']'){ - return context.parent - } - raise_syntax_error(context) - }else if(context.expect == ','){ - if(token == ','){ - context.expect = 'param' - return context - }else if(token == ']'){ - return context.parent - } - raise_syntax_error(context) - } - raise_syntax_error(context) -} - -TypeParamsCtx.prototype.ast = function(){ - return this.tree.map(x => x.ast()) -} - -var TypeVarCtx = $B.parser.TypeVarCtx = function(context, name){ - this.name = name - this.parent = context - context.tree.push(this) - this.tree = [] - this.position = $token.value -} - -TypeVarCtx.prototype.transition = function(token, value){ - var context = this - if(token == ':'){ - return new AbstractExprCtx(context, false) - } - return transition(this.parent, token, value) -} - -TypeVarCtx.prototype.ast = function(){ - var name = this.name, - bound - if(this.tree.length > 0){ - bound = this.tree[0].ast() - } - var ast_obj = new ast.TypeVar(name, bound) - set_position(ast_obj, this.position) - return ast_obj -} - -var TypeParamSpecCtx = $B.parser.TypeParamSpecCtx = function(context){ - this.parent = context - context.tree.push(this) - this.tree = [] - this.position = $token.value -} - -TypeParamSpecCtx.prototype.transition = function(token, value){ - var context = this - if(token == 'id'){ - if(context.name){ - raise_syntax_error(context) - } - context.parent.check_duplicate(value) - context.name = value - return context - }else if(token == ':'){ - if(! context.name){ - raise_syntax_error(context) - } - this.has_colon = true - return new AbstractExprCtx(context, false) - }else if(this.has_colon){ - var msg - if(this.tree[0].name == 'tuple'){ - msg = 'cannot use constraints with ParamSpec' - }else{ - msg = 'cannot use bound with ParamSpec' - } - raise_syntax_error_known_range(context, this.position, $token.value, msg) - } - return transition(this.parent, token, value) -} - -TypeParamSpecCtx.prototype.ast = function(){ - var name = new ast.Name(this.name) - var ast_obj = new ast.ParamSpec(name) - set_position(ast_obj, this.position) - return ast_obj -} - -var TypeVarTupleCtx = $B.parser.TypeVarTupleCtx = function(context){ - this.parent = context - context.tree.push(this) - this.tree = [] - this.position = $token.value -} - -TypeVarTupleCtx.prototype.transition = function(token, value){ - var context = this - if(token == 'id'){ - if(context.name){ - raise_syntax_error(context) - } - context.parent.check_duplicate(value) - context.name = value - return context - }else if(token == ':'){ - if(! context.name){ - raise_syntax_error(context) - } - this.has_colon = true - return new AbstractExprCtx(context, false) - }else if(this.has_colon){ - var msg - if(this.tree[0].name == 'tuple'){ - msg = 'cannot use constraints with TypeVarTuple' - }else{ - msg = 'cannot use bound with TypeVarTuple' - } - raise_syntax_error_known_range(context, this.position, $token.value, msg) - } - return transition(this.parent, token, value) -} - -TypeVarTupleCtx.prototype.ast = function(){ - var name = new ast.Name(this.name) - var ast_obj = new ast.TypeVarTuple(name) - set_position(ast_obj, this.position) - return ast_obj -} - -var UnaryCtx = $B.parser.UnaryCtx = function(context, op){ - // Class for unary operators : - and ~ - this.type = 'unary' - this.op = op - this.parent = context - this.tree = [] - this.position = $token.value - context.tree.push(this) -} - -UnaryCtx.prototype.ast = function(){ - var op = {'+': ast.UAdd, '-': ast.USub, '~': ast.Invert}[this.op], - ast_obj = new ast.UnaryOp(new op(), this.tree[0].ast()) - set_position(ast_obj, this.position) - return ast_obj -} - -UnaryCtx.prototype.transition = function(token, value){ - var context = this - switch(token) { - case 'op': - if('+' == value || '-' == value){ - if(context.op === value){ - context.op = '+' - }else{ - context.op = '-' - } - return context - } - break - case 'int': - case 'float': - case 'imaginary': - if(context.parent.type == "starred"){ - raise_syntax_error(context, - "can't use starred expression here") - } - var res = new NumberCtx(token, context, value) - return res - case 'id': - return transition(new AbstractExprCtx(context, false), - token, value) - } - if(this.tree.length == 0 || this.tree[0].type == 'abstract_expr'){ - raise_syntax_error(context) - } - return transition(context.parent, token, value) -} - -var WithCtx = $B.parser.WithCtx = function(context){ - // Class for keyword "with" - this.type = 'with' - this.parent = context - this.position = $token.value - context.tree[context.tree.length] = this - this.tree = [] - this.expect = 'expr' - this.scope = get_scope(this) -} - -WithCtx.prototype.ast = function(){ - // With(withitem* items, stmt* body, string? type_comment) - // items is a list of withitem nodes representing the context managers - // ast.withitem(context_expr, optional_vars) - // context_expr is the context manager, often a Call node. - // optional_vars is a Name, Tuple or List for the "as foo part", or None - var withitems = [] - for(var withitem of this.tree){ - withitems.push(withitem.ast()) - } - var klass = this.async ? ast.AsyncWith : ast.With - var ast_obj = new klass(withitems, ast_body(this.parent)) - set_position(ast_obj, - this.async ? this.async.position : this.position, - last_position(this)) - return ast_obj -} - -WithCtx.prototype.transition = function(token, value){ - var context = this - function check_last(){ - var last = $B.last(context.tree) - if(last.tree.length > 1){ - var alias = last.tree[1] - if(alias.tree.length == 0){ - raise_syntax_error(context, "expected ':'") - } - check_assignment(alias) - } - } - switch(token) { - case '(': - case '[': - if(this.expect == 'expr' && this.tree.length == 0){ - // start a parenthesized list of managers - context.parenth = token - return context - }else{ - raise_syntax_error(context) - } - break - case 'id': - case 'lambda': - if(context.expect == 'expr'){ - // start withitem - context.expect = ',' - return transition( - new AbstractExprCtx(new withitem(context), false), token, - value) - } - raise_syntax_error(context) - break - case ':': - if((! context.parenth) || context.parenth == 'implicit'){ - check_last() - } - return BodyCtx(context) - case ')': - case ']': - if(context.parenth == opening[token]){ - if(context.expect == ',' || context.expect == 'expr') { - check_last() - context.expect = ':' - return context - } - } - break - case ',': - if(context.expect == ','){ - if(! context.parenth){ - context.parenth = 'implicit' - } - check_last() - context.expect = 'expr' - return context - } - break - case 'eol': - raise_syntax_error(context, "expected ':'") - } - raise_syntax_error(context) -} - -WithCtx.prototype.set_alias = function(ctx){ - var ids = [] - if(ctx.type == "id"){ - ids = [ctx] - }else if(ctx.type == "list_or_tuple"){ - // Form "with manager as (x, y)" - for(var expr of ctx.tree){ - if(expr.type == "expr" && expr.tree[0].type == "id"){ - ids.push(expr.tree[0]) - } - } - } -} - -var withitem = function(context){ - this.type = 'withitem' - this.parent = context - context.tree.push(this) - this.tree = [] - this.expect = 'as' - this.position = $token.value -} - -withitem.prototype.ast = function(){ - var ast_obj = new ast.withitem(this.tree[0].ast()) - if(this.tree[1]){ - ast_obj.optional_vars = this.tree[1].tree[0].ast() - if(ast_obj.optional_vars.elts){ - for(var elt of ast_obj.optional_vars.elts){ - elt.ctx = new ast.Store() - } - }else{ - ast_obj.optional_vars.ctx = new ast.Store() - } - } - set_position(ast_obj, this.position) - return ast_obj -} - -withitem.prototype.transition = function(token, value){ - var context = this - if(token == 'as' && context.expect == 'as'){ - context.expect = 'star_target' - return new AbstractExprCtx(context, false) - }else{ - return transition(context.parent, token, value) - } -} - -var YieldCtx = $B.parser.YieldCtx = function(context, is_await){ - // Class for keyword "yield" - this.type = 'yield' - this.parent = context - this.tree = [] - this.is_await = is_await - this.position = $token.value - context.tree[context.tree.length] = this - - if(context.type == "list_or_tuple" && context.tree.length > 1){ - raise_syntax_error(context, "(non-parenthesized yield)") - } - - if(parent_match(context, {type: "annotation"})){ - raise_syntax_error(context, "'yield' outside function") - } - - /* Strangely, the control that the "yield" is inside a function is done - after parsing the whole program. - For instance, the code - - {(yield 1)} - a b c - - raises - - a b c - ^ - SyntaxError: invalid syntax - - and not the arguably more expected - - {(yield 1)} - ^ - SyntaxError: 'yield' outside function - - The "yield" is stored in attribute "yields_func_check" of the root node - */ - - var root = get_module(this) - - root.yields_func_check = root.yields_func_check || [] - root.yields_func_check.push(this) - - var scope = this.scope = get_scope(this, true), - node = get_node(this) - - node.has_yield = this - - // yield inside a comprehension ? - var in_comp = parent_match(this, {type: "comprehension"}) - if(get_scope(this).id.startsWith("lc" + $B.lambda_magic)){ - delete node.has_yield - } - - if(in_comp){ - var outermost_expr = in_comp.tree[0].tree[1] - // In a comprehension, "yield" is only allowed in the outermost - // expression - parent = context - while(parent){ - if(parent === outermost_expr){ - break - } - parent = parent.parent - } - if(! parent){ - raise_syntax_error(context, "'yield' inside list comprehension") - } - } - - var in_lambda = false, - parent = context - while(parent){ - if(parent.type == "lambda"){ - in_lambda = true - this.in_lambda = true - break - } - parent = parent.parent - } - - parent = node.parent - while(parent){ - if(parent.context && parent.context.tree.length > 0 && - parent.context.tree[0].type == "with"){ - scope.context.tree[0].$has_yield_in_cm = true - break - } - parent = parent.parent - } - - // Syntax control : 'yield' can start a 'yield expression' - if(! in_lambda){ - switch(context.type) { - case 'node': - case 'assign': - case 'list_or_tuple': - break - default: - // else it is a SyntaxError - raise_syntax_error(context, '(non-parenthesized yield)') - } - } - -} - -YieldCtx.prototype.ast = function(){ - // ast.Yield(value) - // ast.YieldFrom(value) - var ast_obj - if(this.from){ - ast_obj = new ast.YieldFrom(this.tree[0].ast()) - }else if(this.tree.length == 1){ - ast_obj = new ast.Yield(this.tree[0].ast()) - }else{ - ast_obj = new ast.Yield() - } - set_position(ast_obj, this.position) - return ast_obj -} - -YieldCtx.prototype.transition = function(token){ - var context = this - if(token == 'from'){ // form "yield from " - if(context.tree[0].type != 'abstract_expr'){ - // 'from' must follow 'yield' immediately - raise_syntax_error(context, "('from' must follow 'yield')") - } - - context.from = true - context.from_num = $B.UUID() - return context.tree[0] - }else{ - remove_abstract_expr(context.tree) - if(context.from && context.tree.length == 0){ - raise_syntax_error(context) - } - } - return transition(context.parent, token) -} - -YieldCtx.prototype.check_in_function = function(){ - if(this.in_lambda){ - return - } - var scope = get_scope(this), - in_func = scope.is_function, - func_scope = scope - if(! in_func && scope.comprehension){ - var parent = scope.parent_block - while(parent.comprehension){ - parent = parent.parent_block - } - in_func = parent.is_function - func_scope = parent - } - if(in_func){ - var def = func_scope.context.tree[0] - if(! this.is_await){ - def.type = 'generator' + }else if(line.endsWith('}')){ + line = line.substr(0, line.length - 1) + add_closing_brace = true } - } -} - -function parent_match(ctx, obj){ - // If any of context's parents has the same properties as obj, - // return this parent; else return false - var flag - while(ctx.parent){ - flag = true - for(var attr in obj){ - if(ctx.parent[attr] != obj[attr]){ - flag = false - break + if(level < 0){ + if($B.get_option('debug') > 2){ + console.log('wrong js indent') + console.log(res) } + level = 0 + } + try{ + res += (add_spaces ? indentation.repeat(level) : '') + line + '\n' + }catch(err){ + console.log(res) + throw err } - if(flag){ - return ctx.parent + if(line.endsWith('{')){ + level++ + }else if(add_closing_brace){ + level-- + if(level < 0){ + level = 0 + } + try{ + res += indentation.repeat(level) + '}\n' + }catch(err){ + console.log(res) + throw err + } } - ctx = ctx.parent + last_is_backslash = line.endsWith('\\') + last_is_var_and_comma = line.endsWith(',') && + (line.startsWith('var ') || last_is_var_and_comma) } - return false + return res } -var get_previous = $B.parser.get_previous = function(context){ - var previous = context.node.parent.children[ - context.node.parent.children.length - 2] - if(!previous || !previous.context){ - raise_syntax_error(context, '(keyword not following correct keyword)') - } - return previous.context.tree[0] -} -var get_docstring = $B.parser.get_docstring = function(node){ +function get_docstring(node){ var doc_string = _b_.None if(node.body.length > 0){ var firstchild = node.body[0] @@ -7815,109 +187,6 @@ var get_docstring = $B.parser.get_docstring = function(node){ return doc_string } -var get_scope = $B.parser.get_scope = function(context){ - // Return the instance of $Node indicating the scope of context - // Return null for the root node - var ctx_node = context.parent - while(true){ - if(ctx_node.type === 'node'){ - break - }else if(ctx_node.comprehension){ - return ctx_node - } - ctx_node = ctx_node.parent - } - var tree_node = ctx_node.node, - scope = null - while(tree_node.parent && tree_node.parent.type !== 'module'){ - var ntype = tree_node.parent.context.tree[0].type - - switch (ntype) { - case 'def': - case 'class': - case 'generator': - scope = tree_node.parent - scope.ntype = ntype - scope.is_function = ntype != 'class' - return scope - } - tree_node = tree_node.parent - } - scope = tree_node.parent || tree_node // module - scope.ntype = "module" - return scope -} - -var get_module = $B.parser.get_module = function(context){ - // Return the instance of $Node for the module where context - // is defined - var ctx_node = context instanceof NodeCtx ? context : context.parent - while(ctx_node.type !== 'node'){ctx_node = ctx_node.parent} - var tree_node = ctx_node.node - if(tree_node.ntype == "module"){ - return tree_node - } - var scope = null - while(tree_node.parent.type != 'module'){ - tree_node = tree_node.parent - } - scope = tree_node.parent // module - scope.ntype = "module" - return scope -} - -var get_node = $B.parser.get_node = function(context){ - var ctx = context - while(ctx.parent){ - ctx = ctx.parent - } - return ctx.node -} - -var mangle_name = $B.parser.mangle_name = function(name, context){ - // If name starts with __ and doesn't end with __, and if it is defined - // in a class, "mangle" it, ie preprend _ - if(name.substr(0, 2) == "__" && name.substr(name.length - 2) !== "__"){ - var scope = get_scope(context) - while(true){ - if(scope.ntype == "module"){ - return name - }else if(scope.ntype == "class"){ - var class_name = scope.context.tree[0].name - while(class_name.charAt(0) == '_'){ - class_name = class_name.substr(1) - } - return '_' + class_name + name - }else{ - if(scope.parent && scope.parent.context){ - scope = get_scope(scope.context.tree[0]) - }else{ - return name - } - } - } - }else{ - return name - } -} - -// Function called in function $tokenize for each token found in the -// Python source code - -$B.nb_debug_lines = 0 - -var transition = $B.parser.transition = function(context, token, value){ - if($B.nb_debug_lines > 100){ - alert('too many debug lines') - $B.nb_debug_lines = 0 - } - if($B.track_transitions){ - console.log("context", context, "token", token, value) // , '\n pos', $token.value) - $B.nb_debug_lines++ - } - return context.transition(token, value) -} - var s_escaped = 'abfnrtvxuU"0123456789' + "'" + '\\', is_escaped = {} for(var i = 0; i < s_escaped.length; i++){ @@ -7932,228 +201,6 @@ function SurrogatePair(value){ String.fromCharCode(0xDC00 | (value & 0x3FF)) } -function test_num(num_lit){ - var len = num_lit.length, - pos = 0, - char, - elt = null, - subtypes = {b: 'binary', o: 'octal', x: 'hexadecimal'}, - digits_re = /[_\d]/ - - function error(message){ - throw SyntaxError(message) - } - function check(elt){ - if(elt.value.length == 0){ - let t = subtypes[elt.subtype] || 'decimal' - error("invalid " + t + " literal") - }else if(elt.value[elt.value.length - 1].match(/[-+_]/)){ - let t = subtypes[elt.subtype] || 'decimal' - error("invalid " + t + " literal") - }else{ - // remove underscores - elt.value = elt.value.replace(/_/g, "") - // set length - elt.length = pos - return elt - } - } - - while(pos < len){ - char = num_lit[pos] - if(char.match(digits_re)){ - if(elt === null){ - elt = {value: char} - }else{ - if(char == '_' && elt.value.match(/[._+-]$/)){ - // consecutive underscores - error('consecutive _ at ' + pos) - }else if(char == '_' && elt.subtype == 'float' && - elt.value.match(/e$/i)){ - // consecutive underscores - error('syntax error') - }else if(elt.subtype == 'b' && !(char.match(/[01_]/))){ - error(`invalid digit '${char}' in binary literal`) - }else if(elt.subtype == 'o' && !(char.match(/[0-7_]/))){ - error(`invalid digit '${char}' in octal literal`) - }else if(elt.subtype === undefined && elt.value.startsWith("0") && - !char.match(/[0_]/)){ - error("leading zeros in decimal integer literals are not" + - " permitted; use an 0o prefix for octal integers") - } - elt.value += char - } - pos++ - }else if(char.match(/[oxb]/i)){ - if(elt.value == "0"){ - elt.subtype = char.toLowerCase() - if(elt.subtype == "x"){ - digits_re = /[_\da-fA-F]/ - } - elt.value = '' - pos++ - }else{ - error("invalid char " + char) - } - }else if(char == '.'){ - if(elt === null){ - error("invalid char in " + num_lit + " pos " + pos + ": " + char) - }else if(elt.subtype === undefined){ - elt.subtype = "float" - if(elt.value.endsWith('_')){ - error("invalid decimal literal") - } - elt.value = elt.value.replace(/_/g, "") + char - pos++ - }else{ - return check(elt) - } - }else if(char.match(/e/i)){ - if(num_lit[pos + 1] === undefined){ - error("nothing after e") - }else if(elt && subtypes[elt.subtype] !== undefined){ - // 0b01e5 is invalid - error("syntax error") - }else if(elt && elt.value.endsWith('_')){ - // 1_e2 is invalid - error("syntax error") - }else if(num_lit[pos + 1].match(/[+\-0-9_]/)){ - if(elt && elt.value){ - if(elt.exp){ - elt.length = pos - return elt - } - elt.subtype = 'float' - elt.value += char - elt.exp = true - pos++ - }else{ - error("unexpected e") - } - }else{ - return check(elt) - } - }else if(char.match(/[+-]/i)){ - if(elt === null){ - elt = {value: char} - pos++ - }else if(elt.value.search(/e$/i) > -1){ - elt.value += char - pos++ - }else{ - return check(elt) - } - }else if(char.match(/j/i)){ - if(elt && (! elt.subtype || elt.subtype == "float")){ - elt.imaginary = true - check(elt) - elt.length++ // for "j" - return elt - }else{ - error("invalid syntax") - } - }else{ - break - } - } - return check(elt) -} - -var opening = {')': '(', '}': '{', ']': '['} - -function line_ends_with_colon(token_reader){ - var braces = [] - token_reader.position-- - while(true){ - var token = token_reader.read() - if(! token){ - return false - } - if(token.type == 'OP' && token.string == ':' && braces.length == 0){ - return true - }else if(token.type == 'OP'){ - if('([{'.indexOf(token.string) > -1){ - braces.push(token) - }else if(')]}'.indexOf(token.string) > -1){ - if(braces.length == 0){ - let err = SyntaxError( - `unmatched '${token.string}'`) - err.offset = token.start[1] - throw err - }else if($B.last(braces).string != opening[token.string]){ - let err = SyntaxError("closing parenthesis " + - `'${token.string}' does not match opening ` + - `parenthesis '${$B.last(braces).string}'`) - err.offset = token.start[1] - throw err - }else{ - braces.pop() - } - } - }else if(token.type == 'NEWLINE'){ - return false - } - } - return false -} - -function prepare_number(n){ - // n is a numeric literal - // return an object {type: , value} - if(n.startsWith('.')){ - if(n.endsWith("j")){ - return {type: 'imaginary', - value: prepare_number(n.substr(0, n.length - 1))} - }else{ - return {type: 'float', value: n.replace(/_/g, '')} - } - }else if(n.startsWith('0') && n != '0'){ - // octal, hexadecimal, binary - let num = test_num(n), - base - if(num.imaginary){ - return {type: 'imaginary', value: prepare_number(num.value)} - } - if(num.subtype == 'float'){ - return {type: num.subtype, value: num.value} - } - if(num.subtype === undefined){ - base = 10 - }else{ - base = {'b': 2, 'o': 8, 'x': 16}[num.subtype] - } - if(base !== undefined){ - return{type: 'int', value: [base, num.value]} - } - }else{ - let num = test_num(n) - if(num.subtype == "float"){ - if(num.imaginary){ - return { - type: 'imaginary', - value: prepare_number(num.value) - } - }else{ - return { - type: 'float', - value: num.value - } - } - }else{ - if(num.imaginary){ - return { - type: 'imaginary', - value: prepare_number(num.value) - } - }else{ - return { - type: 'int', - value: [10, num.value] - } - } - } - } -} function test_escape(text, antislash_pos){ // Test if the escape sequence starting at position "antislah_pos" in text @@ -8217,231 +264,7 @@ function test_escape(text, antislash_pos){ } } -$B.test_escape = test_escape - -function prepare_string(context, s){ - var len = s.length, - pos = 0, - string_modifier, - _type = "string" - - let quote; - let inner; - - while(pos < len){ - if(s[pos] == '"' || s[pos] == "'"){ - quote = s[pos] - string_modifier = s.substr(0, pos) - if(s.substr(pos, 3) == quote.repeat(3)){ - _type = "triple_string" - inner = s.substring(pos + 3, s.length - 3) - }else{ - inner = s.substring(pos + quote.length, - len - quote.length) - } - break - } - pos++ - } - var result = {quote} - var mods = {r: 'raw', f: 'fstring', b: 'bytes'} - for(var mod of string_modifier){ - result[mods[mod]] = true - } - - var raw = context.type == 'str' && context.raw, - bytes = false, - fstring = false, - end = null - - if(string_modifier){ - switch(string_modifier) { - case 'r': // raw string - raw = true - break - case 'u': - // in string literals, '\U' and '\u' escapes in raw strings - // are not treated specially. - break - case 'b': - bytes = true - break - case 'rb': - case 'br': - bytes = true - raw = true - break - case 'f': - fstring = true - break - case 'fr': - case 'rf': - fstring = true - raw = true - break - } - string_modifier = false - } - - var escaped = false, - zone = '', - src = inner - - end = 0 - - while(end < src.length){ - if(escaped){ - if(src.charAt(end) == "a" && ! raw){ - zone = zone.substr(0, zone.length - 1) + "\u0007" - }else{ - zone += src.charAt(end) - if(raw && src.charAt(end) == '\\'){ - zone += '\\' - } - } - escaped = false - end++ - }else if(src.charAt(end) == "\\"){ - if(raw){ - if(end < src.length - 1 && - src.charAt(end + 1) == quote){ - zone += '\\\\' + quote - end += 2 - }else{ - zone += '\\\\' - end++ - } - escaped = true - }else{ - if(src.charAt(end + 1) == '\n'){ - // explicit line joining inside strings - end += 2 - }else if(src.substr(end + 1, 2) == 'N{'){ - // Unicode literal ? - let end_lit = end + 3, - re = new RegExp("[-a-zA-Z0-9 ]+"), - search = re.exec(src.substr(end_lit)) - if(search === null){ - raise_syntax_error(context," (unicode error) " + - "malformed \\N character escape", pos) - } - end_lit = end_lit + search[0].length - if(src.charAt(end_lit) != "}"){ - raise_syntax_error(context, " (unicode error) " + - "malformed \\N character escape") - } - var description = search[0].toUpperCase() - // Load unicode table if not already loaded - if($B.unicodedb === undefined){ - var xhr = new XMLHttpRequest() - xhr.open("GET", - $B.brython_path + "unicode.txt", false) - xhr.onreadystatechange = function(){ - if(this.readyState == 4){ - if(this.status == 200){ - $B.unicodedb = this.responseText - }else{ - console.log("Warning - could not " + - "load unicode.txt") - } - } - } - xhr.send() - } - if($B.unicodedb !== undefined){ - let re = new RegExp("^([0-9A-F]+);" + - description + ";.*$", "m") - search = re.exec($B.unicodedb) - if(search === null){ - raise_syntax_error(context, " (unicode error) " + - "unknown Unicode character name") - } - var cp = parseInt(search[1], 16) // code point - zone += String.fromCodePoint(cp) - end = end_lit + 1 - }else{ - end++ - } - }else{ - try{ - var esc = test_escape(src, end) - }catch(err){ - raise_syntax_error(context, err.message) - } - if(esc){ - if(esc[0] == '\\'){ - zone += '\\\\' - }else{ - zone += esc[0] - } - end += esc[1] - }else{ - if(end < src.length - 1 && - is_escaped[src.charAt(end + 1)] === undefined){ - zone += '\\' - } - zone += '\\' - escaped = true - end++ - } - } - } - }else if(src.charAt(end) == '\n' && _type != 'triple_string'){ - // In a string with single quotes, line feed not following - // a backslash raises SyntaxError - raise_syntax_error(context, "EOL while scanning string literal") - }else{ - zone += src.charAt(end) - end++ - } - } - - var $string = zone, - string = '' - - // Escape quotes inside string, except if they are - // already escaped. - // In raw mode, always escape. - for(var i = 0; i < $string.length; i++){ - var $car = $string.charAt(i) - if($car == quote){ - if(raw || (i == 0 || - $string.charAt(i - 1) != '\\')){ - string += '\\' - }else if(_type == "triple_string"){ - // Unescaped quotes in triple string are allowed - var j = i - 1 - while($string.charAt(j) == '\\'){ - j-- - } - if((i - j - 1) % 2 == 0){ - string += '\\' - } - } - } - string += $car - } - - if(fstring){ - try{ - let re = new RegExp("\\\\" + quote, "g"), - string_no_bs = string.replace(re, quote) - var elts = $B.parse_fstring(string_no_bs) // in py_string.js - }catch(err){ - raise_syntax_error(context, err.message) - } - } - - if(bytes){ - result.value = 'b' + quote + string + quote - }else if(fstring){ - result.value = elts - }else{ - result.value = quote + string + quote - } - context.raw = raw; - return result -} +$B.test_escape = test_escape // used in libs/_python_re.js function unindent(src){ // Brython supports scripts that don't start at column 0 @@ -8451,404 +274,37 @@ function unindent(src){ line, global_indent, indent, + first, unindented_lines = [] - for(var line_num = 0, len = lines.length; line_num < len; line_num++){ - line = lines[line_num] - indent = line.match(/^\s*/)[0] - if(indent != line){ // non whitespace-only line - if(global_indent === undefined){ - // The indentation of the first non-whitespace line sets the - // "global indentation" for the whole script. - if(indent.length == 0){ - // Return source code unchanged if no global indentation - return src - } - global_indent = indent - var start = global_indent.length - unindented_lines.push(line.substr(start)) - }else if(line.startsWith(global_indent)){ - unindented_lines.push(line.substr(start)) - }else{ - throw SyntaxError("first line starts at " + - `column ${start}, line ${line_num} at column ` + - line.match(/\s*/).length + '\n ' + line) - } - }else{ - unindented_lines.push('') - } - } - return unindented_lines.join('\n') -} - -// This regex should match the one in py_string.js -var unprintable_re = /\p{Cc}|\p{Cf}|\p{Co}|\p{Cs}|\p{Zl}|\p{Zp}|\p{Zs}/u - -function handle_errortoken(context, token, token_reader){ - if(token.string == "'" || token.string == '"'){ - raise_syntax_error(context, 'unterminated string literal ' + - `(detected at line ${token.start[0]})`) - }else if(token.string == '\\'){ - var nxt = token_reader.read() - if((! nxt) || nxt.type == 'NEWLINE'){ - raise_syntax_error(context, 'unexpected EOF while parsing') - }else{ - raise_syntax_error_known_range(context, - nxt, nxt, - 'unexpected character after line continuation character') + var min_indent + for(var line of lines){ + if(/^\s*$/.exec(line)){ + continue } - }else if(' `$'.indexOf(token.string) == -1){ - var u = _b_.ord(token.string).toString(16).toUpperCase() - u = 'U+' + '0'.repeat(Math.max(0, 4 - u.length)) + u - - let error_message; - if (unprintable_re.test(token.string)) { - error_message = `invalid non-printable character ${u}` - } else { - error_message = `invalid character '${token.string}' (${u})` + indent = line.match(/^\s*/)[0].length + if(indent == 0){ + return src } - raise_syntax_error(context, error_message); - - } - raise_syntax_error(context) -} - -const braces_opener = {")": "(", "]": "[", "}": "{"}, - braces_open = "([{", - braces_closer = {'(': ')', '{': '}', '[': ']'} - -function check_brace_is_closed(brace, reader){ - // check if the brace is closed - var save_reader_pos = reader.position, - closer = braces_closer[brace], - nb_braces = 1 - while(true){ - var tk = reader.read() - if(tk.type == 'OP' && tk.string == brace){ - nb_braces += 1 - }else if(tk.type == 'OP' && tk.string == closer){ - nb_braces -= 1 - if(nb_braces == 0){ - // reset reader to the position after the brace - reader.seek(save_reader_pos) - break - } + if(min_indent === undefined){ + min_indent = indent } - } -} - -var python_keywords = [ - "class", "return", "break", "for", "lambda", "try", "finally", "raise", - "def", "from", "nonlocal", "while", "del", "global", "with", "as", "elif", - "else", "if", "yield", "assert", "import", "except", "raise", "in", - "pass", "with", "continue", "async", "await" -] - -var $token = {} - -var dispatch_tokens = $B.parser.dispatch_tokens = function(root){ - var src = root.src - root.token_reader = new $B.TokenReader(src, root.filename) - var braces_stack = [] - - var unsupported = [] - - var module = root.module - - var lnum = root.line_num === undefined ? 1 : root.line_num - - var node = new $Node() - node.line_num = lnum - root.add(node) - var context = null, - expect_indent = false, - indent = 0 - - // line2pos maps line numbers to position of first character in line - var line2pos = {0: 0, 1: 0}, - line_num = 1 - for(var pos = 0, len = src.length; pos < len; pos++){ - if(src[pos] == '\n'){ - line_num++ - line2pos[line_num] = pos + 1 + if(indent < min_indent){ + min_indent = indent } } - var token - - while(true){ - try{ - token = root.token_reader.read() - // console.log(token.type, token.string, token.start, token.end) - }catch(err){ - context = context || new NodeCtx(node) - if(err.type == 'IndentationError'){ - raise_indentation_error(context, err.message) - }else if(err instanceof SyntaxError){ - if(braces_stack.length > 0){ - let last_brace = $B.last(braces_stack) - $token.value = last_brace - raise_syntax_error(context, `'${last_brace.string}'` + - ' was never closed') - } - var err_msg = err.message - if(err_msg == 'EOF in multi-line statement'){ - err_msg = 'unexpected EOF while parsing' - } - if(err.lineno){ - raise_error_known_location(_b_.SyntaxError, - root.filename, err.lineno, err.col_offset, - err.end_lineno, err.end_col_offset, err.line, - err.message) - }else{ - raise_syntax_error(context, err_msg) - } - } - throw err - } - if(! token){ - throw Error('token done without ENDMARKER.') - } - $token.value = token - if(token[2] === undefined){ - console.log('token incomplet', token, 'module', module, root) - console.log('src', src) - } - if(token.start === undefined){ - console.log('no start', token) - } - lnum = token.start[0] - //console.log('token', token.type, token.string, 'lnum', lnum, 'context', context) - //console.log('context', context) - if(expect_indent && - ['INDENT', 'COMMENT', 'NL'].indexOf(token.type) == -1){ - context = context || new NodeCtx(node) - raise_indentation_error(context, "expected an indented block", - expect_indent) - } - - switch(token.type){ - case 'ENDMARKER': - // Check that all "yield"s are in a function - if(root.yields_func_check){ - for(const _yield of root.yields_func_check){ - $token.value = _yield.position - _yield.check_in_function() - } - } - if(indent != 0){ - raise_indentation_error(node.context, - 'expected an indented block') - } - if(node.context === undefined || node.context.tree.length == 0){ - node.parent.children.pop() - } - return - case 'ENCODING': - case 'TYPE_COMMENT': - continue - case 'NL': - if((! node.context) || node.context.tree.length == 0){ - node.line_num++ - } - continue - case 'COMMENT': - continue - case 'ERRORTOKEN': - context = context || new NodeCtx(node) - if(token.string != ' '){ - handle_errortoken(context, token, root.token_reader) - } - continue - } - // create context if needed - switch(token[0]){ - case 'NAME': - case 'NUMBER': - case 'OP': - case 'STRING': - case 'FSTRING_START': - context = context || new NodeCtx(node) - } - - switch(token[0]){ - case 'NAME': - var name = token[1] - if(python_keywords.indexOf(name) > -1){ - if(unsupported.indexOf(name) > -1){ - raise_syntax_error(context, - "(Unsupported Python keyword '" + name + "')") - } - context = transition(context, name) - }else if(name == 'not'){ - context = transition(context, 'not') - }else if(typeof $operators[name] == 'string'){ - // Literal operators : "and", "or", "is" - context = transition(context, 'op', name) - }else{ - context = transition(context, 'id', name) - } - continue - case 'OP': - var op = token[1] - if((op.length == 1 && '()[]{}.,='.indexOf(op) > -1) || - [':='].indexOf(op) > -1){ - if(braces_open.indexOf(op) > -1){ - braces_stack.push(token) - // check that opening brace is closed later, this - // takes precedence over syntax errors that might - // occur before the closing brace - try{ - check_brace_is_closed(op, root.token_reader) - }catch(err){ - if(err.message == 'EOF in multi-line statement'){ - raise_syntax_error(context, - `'${op}' was never closed`) - }else{ - throw err - } - } - }else if(braces_opener[op]){ - if(braces_stack.length == 0){ - raise_syntax_error(context, "(unmatched '" + op + "')") - }else{ - let last_brace = $B.last(braces_stack) - if(last_brace.string == braces_opener[op]){ - braces_stack.pop() - }else{ - raise_syntax_error(context, - `closing parenthesis '${op}' does not ` + - `match opening parenthesis '` + - `${last_brace.string}'`) - } - } - } - context = transition(context, token[1]) - }else if(op == ':'){ - context = transition(context, ':') - if(context.node && context.node.is_body_node){ - node = context.node - } - }else if(op == '...'){ - context = transition(context, 'ellipsis') - }else if(op == '->'){ - context = transition(context, 'annotation') - }else if(op == ';'){ - if(context.type == 'node' && context.tree.length == 0){ - raise_syntax_error(context, - '(statement cannot start with ;)') - } - // same as NEWLINE - transition(context, 'eol') - let new_node = new $Node() - new_node.line_num = token[2][0] + 1 - context = new NodeCtx(new_node) - node.parent.add(new_node) - node = new_node - }else if($augmented_assigns[op]){ - context = transition(context, 'augm_assign', op) - }else{ - context = transition(context, 'op', op) - } - continue - case 'STRING': - var prepared = prepare_string(context, token[1], token[2]) - if(prepared.value instanceof Array){ - context = transition(context, 'JoinedStr', prepared.value) - }else{ - context = transition(context, 'str', prepared.value) - } - continue - case 'FSTRING_START': - context = transition(context, 'JoinedStr', token[1]) - break - case 'FSTRING_MIDDLE': - context = transition(context, 'middle', token[1]) - break - case 'FSTRING_END': - context = transition(context, 'end', token[1]) - break - case 'NUMBER': - try{ - var prepared_num = prepare_number(token[1]) - }catch(err){ - raise_syntax_error(context, err.message) - } - context = transition(context, prepared_num.type, - prepared_num.value) - continue - case 'NEWLINE': - if(context && context.node && context.node.is_body_node){ - expect_indent = context.node.parent - } - context = context || new NodeCtx(node) - transition(context, 'eol') - // Create a new node - var new_node = new $Node() - new_node.line_num = token[2][0] + 1 - if(node.parent.children.length > 0 && - node.parent.children[0].is_body_node){ - node.parent.parent.add(new_node) - }else{ - node.parent.add(new_node) - } - context = new NodeCtx(new_node) - node = new_node - continue - case 'DEDENT': - // The last node was added after a NEWLINE. It was attached - // to the current node's parent. - // Detach it - indent-- - if(! indent_continuation){ - node.parent.children.pop() - // Attach new_node to new "current" - node.parent.parent.add(node) - // redefine context to set locals to bindings of node scope - context = new NodeCtx(node) - } - continue - case 'INDENT': - indent++ - var indent_continuation = false - // Check that it supports indentation - if(! expect_indent){ - if(token.line.trim() == '\\'){ - // Strange special case - // See test_syntax.py/test_empty_line_after_linecont - indent_continuation = true - }else{ - context = context || new NodeCtx(node) - raise_indentation_error(context, 'unexpected indent') - } - } - expect_indent = false - continue + for(var line of lines){ + if(/^\s*$/.exec(line)){ + unindented_lines.push(line) + }else{ + unindented_lines.push(line.substr(min_indent)) } } + return unindented_lines.join('\n') } -var create_root_node = $B.parser.create_root_node = function(src, module, - locals_id, parent_block, line_num){ - var root = new $Node('module') - root.module = module - root.id = locals_id - root.parent_block = parent_block - root.line_num = line_num - root.indent = -1 - root.imports = {} - - if(typeof src == "object"){ - root.is_comp = src.is_comp - root.filename = src.filename - src = src.src - } - - // Normalize line ends - src = src.replace(/\r\n/gm, "\n") - root.src = src - return root -} +var $token = {} $B.parse_time = 0 @@ -8877,36 +333,12 @@ $B.py2js = function(src, module, locals_id, parent_scope){ locals_id = locals_id[0] } - var _ast, - t0 = globalThis.performance.now() + var t0 = globalThis.performance.now() + + // generated PEG parser + var parser = new $B.Parser(src, filename, 'file'), + _ast = $B._PyPegen.run_parser(parser) - if($B.parser_to_ast){ - console.log('use standard parser') - _ast = new $B.Parser(src, filename, 'file').parse() - }else if($B.py_tokens){ - // generated PEG parser - console.log('use generated PEG parser') - var parser = new $B.Parser(src, filename, 'file') - _ast = $B._PyPegen_parse(parser) - console.log('tokens', parser.tokens) - if(_ast === undefined){ - parser = new $B.Parser(src, filename, 'file') - parser.call_invalid_rules = true - $B._PyPegen_parse(parser) - // if invalid rules didn't raise an error, fall back to - // SyntaxError - var err_token = $B.last(parser.tokens) - raise_error_known_location(_b_.SyntaxError, - filename, err_token.lineno, err_token.col_offset, - err_token.end_lineno, err_token.end_col_offset, - err_token.line, 'invalid syntax') - } - }else{ - var root = create_root_node({src, filename}, - module, locals_id, parent_scope) - dispatch_tokens(root) - _ast = root.ast() - } $B.parse_time += globalThis.performance.now() - t0 var future = $B.future_features(_ast, filename) var symtable = $B._PySymtable_Build(_ast, filename, future) @@ -9298,13 +730,13 @@ function run_scripts(_scripts){ {script: worker, name: worker.id, url: worker.src, is_ww: true}]) }else{ // Get source code inside the script element + $B.webworkers[worker.id] = worker + filename = $B.script_filename = $B.strip_host( + $B.script_path + "#" + worker.id) var source = (worker.innerText || worker.textContent) source = unindent(source) // remove global indentation // remove leading CR if any source = source.replace(/^\n/, '') - $B.webworkers[worker.id] = worker - filename = $B.script_filename = $B.strip_host( - $B.script_path + "#" + worker.id) $B.url2name[filename] = worker.id $B.file_cache[filename] = source $B.scripts[filename] = worker @@ -9321,6 +753,8 @@ function run_scripts(_scripts){ $B.tasks.push([$B.ajax_load_script, {script, name: module_name, url: script.src, id: script.id}]) }else{ + filename = $B.script_filename = $B.strip_host( + $B.script_path + "#" + module_name) // Get source code inside the script element src = (script.innerHTML || script.textContent) src = unindent(src) // remove global indentation @@ -9330,8 +764,6 @@ function run_scripts(_scripts){ if(src.endsWith('\n')){ src = src.substr(0, src.length - 1) } - filename = $B.script_filename = $B.strip_host( - $B.script_path + "#" + module_name) // store source code $B.file_cache[filename] = src $B.url2name[filename] = module_name @@ -9389,8 +821,6 @@ $B.run_script = function(script, src, name, url, run_loop){ } } -$B.$operators = $operators -$B.$Node = $Node // in case the name 'brython' is used in a Javascript library, // we can use $B.brython diff --git a/www/src/py_builtin_functions.js b/www/src/py_builtin_functions.js index e2cd9d65a..b933c8006 100644 --- a/www/src/py_builtin_functions.js +++ b/www/src/py_builtin_functions.js @@ -254,6 +254,7 @@ code.__getattribute__ = function(self, attr){ $B.set_func_names(code, "builtins") + //compile() (built in function) _b_.compile = function() { var $ = $B.args('compile', 7, @@ -275,7 +276,7 @@ _b_.compile = function() { $B.url2name[filename] = module_name if ($.flags & $B.PyCF_TYPE_COMMENTS) { - throw _b_.NotImplementedError.$factory('Brython does not currently support parsing of type comments') + // throw _b_.NotImplementedError.$factory('Brython does not currently support parsing of type comments') } if($B.$isinstance($.source, _b_.bytes)){ @@ -323,9 +324,19 @@ _b_.compile = function() { // This is used in codeop.py to raise SyntaxError until a block in the // interactive interpreter ends with "\n" // Cf. issue #853 - var lines = $.source.split("\n") - if($B.last(lines).startsWith(" ")){ - throw _b_.SyntaxError.$factory("unexpected EOF while parsing") + var lines = $.source.split("\n"), + last_line = $B.last(lines) + if(last_line.startsWith(" ")){ + var msg = "unexpected EOF while parsing", + exc = _b_.SyntaxError.$factory() + exc.filename = filename + exc.lineno = exc.end_lineno = lines.length - 1 + exc.offset = 0 + exc.end_offset = last_line.length - 1 + exc.text = last_line + exc.args = [msg, $B.fast_tuple([filename, exc.lineno, exc.offset, + exc.text, exc.end_lineno, exc.end_offset])] + throw exc } } @@ -340,145 +351,44 @@ _b_.compile = function() { var _ast, parser - if($B.parser_to_ast){ - try{ - var parser_mode = $.mode == 'eval' ? 'eval' : 'file' - parser = new $B.Parser($.source, filename, parser_mode) - _ast = parser.parse() - }catch(err){ - if($.mode == 'single'){ - try{ - parser.tokens.next // throws an exception if tokenizer exhausted - }catch(err2){ - // special case - var tokens = parser.tokens, - tester = tokens[tokens.length - 2] - if((tester.type == "NEWLINE" && ($.flags & 0x4000)) || - tester.type == "DEDENT" && ($.flags & 0x200)){ - err.__class__ = _b_.SyntaxError - err.args[0] = 'incomplete input' - } - } - } - throw err - } - if($.mode == 'single' && _ast.body.length == 1 && - _ast.body[0] instanceof $B.ast.Expr){ - // If mode is 'single' and the source is a single expression, - // set _ast to an Expression and set attribute .single_expression - // to compile() result. This is used in exec() to print the - // expression if it is not None - parser = new $B.Parser($.source, filename, 'eval') - _ast = parser.parse() - $.single_expression = true - } - - if($.flags == $B.PyCF_ONLY_AST){ - delete $B.url2name[filename] - let res = $B.ast_js_to_py(_ast) - res.$js_ast = _ast - return res - } - }else if($B.py_tokens){ - // generated PEG parser - try{ - var parser_mode = $.mode == 'eval' ? 'eval' : 'file' - parser = new $B.Parser($.source, filename, parser_mode) - _ast = $B._PyPegen_parse(parser) - if(_ast === undefined){ - parser = new $B.Parser(src, filename, 'file') - parser.call_invalid_rules = true - $B._PyPegen_parse(parser) - // if invalid rules didn't raise an error, fall back to - // SyntaxError - var err_token = $B.last(parser.tokens) - $B.raise_error_known_location(_b_.SyntaxError, - filename, err_token.lineno, err_token.col_offset, - err_token.end_lineno, err_token.end_col_offset, - err_token.line, 'invalid syntax') - } - }catch(err){ - if($.mode == 'single'){ - try{ - parser.tokens.next // throws an exception if tokenizer exhausted - }catch(err2){ - // special case - var tokens = parser.tokens, - tester = tokens[tokens.length - 2] - if((tester.type == "NEWLINE" && ($.flags & 0x4000)) || - tester.type == "DEDENT" && ($.flags & 0x200)){ - err.__class__ = _b_.SyntaxError - err.args[0] = 'incomplete input' - } - } - } - throw err - } - if($.mode == 'single' && _ast.body.length == 1 && - _ast.body[0] instanceof $B.ast.Expr){ - // If mode is 'single' and the source is a single expression, - // set _ast to an Expression and set attribute .single_expression - // to compile() result. This is used in exec() to print the - // expression if it is not None - parser = new $B.Parser($.source, filename, 'eval') - _ast = parser.parse() - $.single_expression = true - } - - if($.flags == $B.PyCF_ONLY_AST){ - delete $B.url2name[filename] - let res = $B.ast_js_to_py(_ast) - res.$js_ast = _ast - return res - } - }else{ - var root = $B.parser.create_root_node( - {src: $.source, filename}, - module_name, module_name) - root.mode = $.mode - root.parent_block = $B.builtins_scope - try{ - $B.parser.dispatch_tokens(root, $.source) - _ast = root.ast() - }catch(err){ - if($.mode == 'single' && root.token_reader.read() === undefined){ - // special case - let tokens = root.token_reader.tokens, - tester = tokens[tokens.length - 2] - if((tester.type == "NEWLINE" && ($.flags & 0x4000)) || - tester.type == "DEDENT" && ($.flags & 0x200)){ - err.__class__ = _b_.SyntaxError - err.args[0] = 'incomplete input' - } + // generated PEG parser + try{ + var parser_mode = $.mode == 'eval' ? 'eval' : 'file' + parser = new $B.Parser($.source, filename, parser_mode) + parser.flags = $.flags + _ast = $B._PyPegen.run_parser(parser) + }catch(err){ + if($.mode == 'single'){ + var tester = parser.tokens[parser.tokens.length - 2] + + if(tester && ( + (tester.type == "NEWLINE" && ($.flags & 0x4000)) || + (tester.type == "DEDENT" && ($.flags & 0x200)))){ + err.__class__ = _b_.SyntaxError + err.args[0] = 'incomplete input' } - throw err } - if($.mode == 'single' && _ast.body.length == 1 && - _ast.body[0] instanceof $B.ast.Expr){ - // If mode is 'single' and the source is a single expression, - // set _ast to an Expression and set attribute .single_expression - // to compile() result. This is used in exec() to print the - // expression if it is not None - root = $B.parser.create_root_node( - {src: $.source, filename}, - module_name, module_name) - root.mode = 'eval' - $.single_expression = true - root.parent_block = $B.builtins_scope - $B.parser.dispatch_tokens(root, $.source) - _ast = root.ast() - } - delete $B.url2name[filename] + throw err + } + if($.mode == 'single' && _ast.body.length == 1 && + _ast.body[0] instanceof $B.ast.Expr){ + // If mode is 'single' and the source is a single expression, + // set _ast to an Expression and set attribute .single_expression + // to compile() result. This is used in exec() to print the + // expression if it is not None + parser = new $B.Parser($.source, filename, 'eval') + _ast = $B._PyPegen.run_parser(parser) + $.single_expression = true + } - if($.flags == $B.PyCF_ONLY_AST){ - $B.create_python_ast_classes() // in py_ast.js - // Transform _ast (JS version) into a Python ast instance - let res = $B.ast_js_to_py(_ast) // in py_ast.js - res.$js_ast = _ast - return res - } + if($.flags == $B.PyCF_ONLY_AST){ + delete $B.url2name[filename] + let res = $B.ast_js_to_py(_ast) + res.$js_ast = _ast + return res } + delete $B.url2name[filename] // Set attribute ._ast to avoid compiling again if result is passed to // exec() @@ -790,32 +700,9 @@ var $$eval = _b_.eval = function(){ try{ if(! _ast){ - if($B.parser_to_ast){ - var _mode = mode == 'eval' ? 'eval' : 'file' - _ast = new $B.Parser(src, filename, _mode).parse() - }else if($B.py_tokens){ - // generated PEG parser - var _mode = mode == 'eval' ? 'eval' : 'file' - var parser = new $B.Parser(src, filename, _mode) - _ast = $B._PyPegen_parse(parser) - if(_ast === undefined){ - parser = new $B.Parser(src, filename, 'file') - parser.call_invalid_rules = true - $B._PyPegen_parse(parser) - var err_token = $B.last(parser.tokens) - $B.raise_error_known_location(_b_.SyntaxError, - filename, err_token.lineno, err_token.col_offset, - err_token.end_lineno, err_token.end_col_offset, - err_token.line, 'invalid syntax') - } - }else{ - var root = $B.parser.create_root_node(src, '', frame[0], frame[2], - 1) - root.mode = mode - root.filename = filename - $B.parser.dispatch_tokens(root) - _ast = root.ast() - } + var _mode = mode == 'eval' ? 'eval' : 'file' + var parser = new $B.Parser(src, filename, _mode) + _ast = $B._PyPegen.run_parser(parser) } var future = $B.future_features(_ast, filename), symtable = $B._PySymtable_Build(_ast, filename, future), @@ -1799,7 +1686,18 @@ var len = _b_.len = function(obj){ throw _b_.TypeError.$factory("object of type '" + $B.class_name(obj) + "' has no len()") } - return $B.$call(method)(obj) + + let res = $B.$call(method)(obj) + + if (!$B.$isinstance(res, _b_.int)) { + throw _b_.TypeError.$factory(`'${$B.class_name(res)}' object cannot be interpreted as an integer`) + } + + if(!$B.rich_comp('__ge__', res, 0)) { + throw _b_.ValueError.$factory('ValueError: __len__() should return >= 0') + } + + return res } _b_.locals = function(){ @@ -2787,8 +2685,8 @@ $Reader.__enter__ = function(self){ return self } -$Reader.__exit__ = function(){ - return false +$Reader.__exit__ = function(self){ + $Reader.close(self) } $Reader.__init__ = function(_self, initial_value=''){ diff --git a/www/src/py_bytes.js b/www/src/py_bytes.js index fd2cf6a51..de89d2c82 100644 --- a/www/src/py_bytes.js +++ b/www/src/py_bytes.js @@ -1584,6 +1584,16 @@ var decode = $B.decode = function(obj, encoding, errors){ case "utf8": case "U8": case "UTF": + if(globalThis.TextDecoder){ + var decoder = new TextDecoder('utf-8', {fatal: true}), + array = new Uint8Array(b) + try{ + return decoder.decode(array) + }catch(err){ + // handled below; TextDecoder doesn't provide the same + // information as Python + } + } var pos = 0, err_info while(pos < b.length){ @@ -1795,6 +1805,16 @@ var encode = $B.encode = function(){ case "utf-8": case "utf_8": case "utf8": + if(globalThis.TextEncoder){ + var encoder = new TextEncoder('utf-8', {fatal: true}) + try{ + var array = encoder.encode(s) + return fast_bytes(Array.from(array)) + }catch(err){ + // handled below; TextDecoder doesn't provide the same + // information as Python + } + } for(let i = 0, len = s.length; i < len; i++){ let cp = s.charCodeAt(i) if(cp <= 0x7f){ diff --git a/www/src/py_exceptions.js b/www/src/py_exceptions.js index c22f1e8e6..5fb4f80b5 100644 --- a/www/src/py_exceptions.js +++ b/www/src/py_exceptions.js @@ -183,6 +183,10 @@ var frame = $B.frame = $B.make_class("frame", } ) +frame.__bool__ = function(){ + return true +} + frame.__delattr__ = function(_self, attr){ if(attr == "f_trace"){ _self.$f_trace = _b_.None @@ -222,7 +226,7 @@ frame.__getattr__ = function(_self, attr){ return _self.$f_trace } - console.log('no attr', attr, 'for frame', _self) + //console.log('no attr', attr, 'for frame', _self) throw $B.attr_error(attr, _self) } @@ -1099,7 +1103,7 @@ $B.error_trace = function(err){ var start = err.offset - indent - 1, end_offset = err.end_offset - 1 + (err.end_offset == err.offset ? 1 : 0), - marks = ' ' + ' '.repeat(start), + marks = ' ' + ' '.repeat(Math.max(0, start)), nb_marks = 1 if(err.end_lineno){ if(err.end_lineno > err.lineno){ diff --git a/www/src/py_flags.js b/www/src/py_flags.js index 395170bd1..805d82c23 100644 --- a/www/src/py_flags.js +++ b/www/src/py_flags.js @@ -2,15 +2,15 @@ (function($B){ $B.builtin_class_flags = { builtins: { - 1074287874: ['ReferenceError', 'SyntaxWarning', 'ConnectionAbortedError', 'Exception', 'OSError', 'KeyboardInterrupt', 'PermissionError', 'UnicodeTranslateError', 'InterruptedError', 'RuntimeWarning', 'Warning', 'SystemExit', 'ImportWarning', 'BaseException', 'FileNotFoundError', 'GeneratorExit', 'NotImplementedError', 'LookupError', 'WindowsError', 'IsADirectoryError', 'StopAsyncIteration', 'BlockingIOError', 'DeprecationWarning', 'StopIteration', 'BufferError', 'MemoryError', 'BaseExceptionGroup', 'FileExistsError', 'ModuleNotFoundError', 'ProcessLookupError', 'OverflowError', 'SyntaxError', 'EOFError', 'SystemError', 'RuntimeError', 'AssertionError', 'BytesWarning', 'EncodingWarning', 'RecursionError', 'ArithmeticError', 'PendingDeprecationWarning', 'TabError', 'UnboundLocalError', 'UnicodeDecodeError', 'NotADirectoryError', 'ResourceWarning', 'ChildProcessError', 'UnicodeError', 'BrokenPipeError', 'EnvironmentError', 'FloatingPointError', 'ValueError', 'UnicodeWarning', 'IndexError', 'NameError', 'IndentationError', 'ConnectionRefusedError', 'AttributeError', 'ConnectionError', 'FutureWarning', 'IOError', 'KeyError', 'TypeError', 'ConnectionResetError', 'TimeoutError', 'UnicodeEncodeError', 'ZeroDivisionError', 'ImportError', 'UserWarning'], + 1074287874: ['DeprecationWarning', 'Exception', 'RuntimeError', 'BytesWarning', 'EncodingWarning', 'FutureWarning', 'Warning', 'AttributeError', 'FileExistsError', 'ImportWarning', 'StopAsyncIteration', 'UnicodeDecodeError', 'ValueError', 'WindowsError', 'NameError', 'EnvironmentError', 'IndentationError', 'RuntimeWarning', 'LookupError', 'ChildProcessError', 'BaseException', 'OSError', 'StopIteration', 'PendingDeprecationWarning', 'InterruptedError', 'TimeoutError', 'UnboundLocalError', 'NotImplementedError', 'IndexError', 'IsADirectoryError', 'UnicodeEncodeError', 'UnicodeWarning', 'BaseExceptionGroup', 'SyntaxWarning', 'IOError', 'EOFError', 'ZeroDivisionError', 'GeneratorExit', 'ConnectionResetError', 'ImportError', 'SyntaxError', 'KeyError', 'UnicodeTranslateError', 'TypeError', 'ProcessLookupError', 'KeyboardInterrupt', 'OverflowError', 'BufferError', 'SystemExit', 'FileNotFoundError', 'NotADirectoryError', 'ConnectionError', 'RecursionError', 'PermissionError', 'UserWarning', 'ConnectionRefusedError', 'SystemError', 'AssertionError', 'ModuleNotFoundError', 'FloatingPointError', 'TabError', 'BrokenPipeError', 'ResourceWarning', 'ReferenceError', 'UnicodeError', 'ConnectionAbortedError', 'BlockingIOError', 'ArithmeticError', 'MemoryError'], 1073763848: ['ExceptionGroup'], 21500162: ['bool'], 4723970: ['bytearray', 'float'], 138941698: ['bytes'], - 546050: ['zip', 'property', 'enumerate', 'classmethod', 'map', 'staticmethod', 'reversed', 'super', 'filter'], + 546050: ['map', 'staticmethod', 'property', 'super', 'filter', 'zip', 'enumerate', 'reversed', 'classmethod'], 529666: ['object', 'complex'], 541611330: ['dict'], - 4740354: ['set', 'frozenset'], + 4740354: ['frozenset', 'set'], 21501186: ['int'], 38294818: ['list'], 545058: ['memoryview'], @@ -21,10 +21,10 @@ $B.builtin_class_flags = { 2156420354: ['type'], }, types: { - 545154: ['method-wrapper', 'async_generator', 'classmethod_descriptor', 'member_descriptor', 'getset_descriptor', 'coroutine', 'generator', 'frame'], + 545154: ['member_descriptor', 'classmethod_descriptor', 'async_generator', 'generator', 'getset_descriptor', 'coroutine', 'frame', 'method-wrapper'], 547202: ['builtin_function_or_method'], - 545026: ['traceback', 'cell'], - 528642: ['NotImplementedType', 'ellipsis', 'code', 'NoneType'], + 545026: ['cell', 'traceback'], + 528642: ['NoneType', 'ellipsis', 'NotImplementedType', 'code'], 678146: ['function'], 545090: ['mappingproxy'], 678274: ['method_descriptor'], diff --git a/www/src/py_import.js b/www/src/py_import.js index 2add2d67e..4c686b0a8 100644 --- a/www/src/py_import.js +++ b/www/src/py_import.js @@ -73,7 +73,7 @@ $B.make_import_paths = function(filename){ // - finder_static_stlib : use the script stdlib_path.js to identify the // packages and modules in the standard distribution // - finder_path : search module at different urls - var filepath = $B.domain ? $B.domain + '/' + filename : filename + var filepath = $B.script_domain ? $B.script_domain + '/' + filename : filename var elts = filepath.split('/') elts.pop() var script_dir = elts.join('/'), @@ -233,7 +233,7 @@ function run_js(module_contents, path, _module){ function run_py(module_contents, path, module, compiled) { // set file cache for path ; used in built-in function open() - var filename = $B.strip_host(path) + var filename = module.__file__ $B.file_cache[filename] = module_contents $B.url2name[filename] = module.__name__ var root, diff --git a/www/src/py_int.js b/www/src/py_int.js index 18ed2c12d..d08dc8c23 100644 --- a/www/src/py_int.js +++ b/www/src/py_int.js @@ -337,7 +337,12 @@ int.__init__ = () => _b_.None int.__int__ = (self) => self -int.__invert__ = (self) => ~self +int.__invert__ = function(self){ + if(Math.abs(self) < 2 ** 31){ + return ~self + } + return $B.rich_op('__sub__', $B.rich_op('__mul__', self, -1), 1) +} int.__mod__ = function(self, other) { // can't use Javascript % because it works differently for negative numbers @@ -389,6 +394,9 @@ int.__new__ = function(cls, value, base){ if(cls === int){ return int.$factory(value, base) } + if(cls === bool) { + throw _b_.TypeError.$factory("int.__new__(bool) is not safe, use bool.__new__()") + } // set method .toString so that BigInt(instance) returns a bingint return { __class__: cls, @@ -937,7 +945,9 @@ $B.$bool = function(obj, bool_class){ // return true or false if(len_method === missing){ return true } - return len_method(obj) > 0 + // Call _b_.len here instead of len_method directly to use + // len's handling of non-integer and negative values + return _b_.len(obj) > 0 }else{ var res = bool_class ? $B.$call(bool_method)(obj) : @@ -1015,19 +1025,58 @@ bool.__xor__ = function(self, other) { return _b_.NotImplemented } +bool.__invert__ = function(self) { + $B.warn(_b_.DeprecationWarning, `Bitwise inversion '~' on bool is deprecated.This returns the bitwise inversion of the underlying int object and is usually not what you expect from negating a bool.Use the 'not' operator for boolean negation or ~int(x) if you really want the bitwise inversion of the underlying int.`) + return int.__invert__(self) +} + bool.$factory = function(){ // Calls $B.$bool, which is used inside the generated JS code and skips // arguments control. var $ = $B.args("bool", 1, {x: null}, ["x"], - arguments, {x: false}, null, null) + arguments, {x: false}, null, null, 1) return $B.$bool($.x, true) } +bool.__new__ = function (cls, value) { + if (cls === undefined) { + throw _b_.TypeError.$factory("bool.__new__(): not enough arguments") + } else if (!$B.$isinstance(cls, _b_.type)) { + throw _b_.TypeError.$factory(`bool.__new__(X): X is not a type object (${$B.class_name(cls) })`) + } else if (!_b_.issubclass(cls, bool)) { + let class_name = $B.class_name(cls) + throw _b_.TypeError.$factory(`bool.__new__(${class_name}): ${class_name} is not a subtype of bool`) + } + if (arguments.length > 2) { + throw _b_.TypeError.$factory(`bool expected at most 1 argument, got ${arguments.length - 1}`) + } + return bool.$factory(value) +} + +bool.from_bytes = function () { + var $ = $B.args("from_bytes", 3, + { bytes: null, byteorder: null, signed: null }, + ["bytes", "byteorder", "signed"], + arguments, { byteorder: 'big', signed: false }, null, null) + let int_result = int.from_bytes($.bytes, $.byteorder, $.signed) + return bool.$factory(int_result) +} + bool.numerator = int.numerator bool.denominator = int.denominator -bool.real = int.real +bool.real = (self) => self ? 1 : 0 bool.imag = int.imag +for (var attr of ['real']) { + bool[attr].setter = (function (x) { + return function (self) { + throw _b_.AttributeError.$factory(`attribute '${x}' of ` + + `'${$B.class_name(self)}' objects is not writable`) + } + })(attr) +} + + _b_.bool = bool $B.set_func_names(bool, "builtins") diff --git a/www/src/py_string.js b/www/src/py_string.js index 61b198784..e42f82aff 100644 --- a/www/src/py_string.js +++ b/www/src/py_string.js @@ -447,6 +447,8 @@ var num_format = function(val, flags) { val = parseInt(val.value) }else if(! $B.$isinstance(val, _b_.int)){ val = parseInt(val) + }else if ($B.$isinstance(val, _b_.bool)) { + val = val ? 1 : 0 } var s = format_int_precision(val, flags) @@ -692,6 +694,8 @@ var signed_hex_format = function(val, upper, flags){ if(! $B.$isinstance(val, _b_.int)){ throw _b_.TypeError.$factory( `%X format: an integer is required, not ${$B.class_name(val)}`) + } else if ($B.$isinstance(val, _b_.bool)) { + val = val ? 1 : 0 } if(val.__class__ === $B.long_int){ @@ -1743,21 +1747,33 @@ str.isascii = function(){ return true } +var unicode_categories_contain_character = function (categories, cp) { + for (var cat of categories) { + console.log(cat, cp); + if ($B.in_unicode_category(cat, cp)) { + return true + } + } + return false +} + +var alpha_categories = ['Ll', 'Lu', 'Lm', 'Lt', 'Lo'] +var alnum_categories = ['Ll', 'Lu', 'Lm', 'Lt', 'Lo', 'Nd'] + str.isalnum = function(){ /* Return true if all characters in the string are alphanumeric and there is at least one character, false otherwise. A character c is alphanumeric if one of the following returns True: c.isalpha(), c.isdecimal(), c.isdigit(), or c.isnumeric(). */ var $ = $B.args("isalnum", 1, {self: null}, ["self"], - arguments, {}, null, null), - cp, - _self = to_string($.self) - for(var char of _self){ - cp = _b_.ord(char) - for(var cat of ['Ll', 'Lu', 'Lm', 'Lt', 'Lo', 'Nd', 'digits', 'numeric']){ - if(! $B.in_unicode_category(cat, cp)){ - return false - } + arguments, {}, null, null) + var _self = to_string($.self); + if (_self.length == 0) { + return false + } + for (var char of _self) { + if (!unicode_categories_contain_character(alnum_categories, _b_.ord(char))) { + return false } } return true @@ -1770,15 +1786,14 @@ str.isalpha = function(){ those with general category property being one of "Lm", "Lt", "Lu", "Ll", or "Lo". */ var $ = $B.args("isalpha", 1, {self: null}, ["self"], - arguments, {}, null, null), - cp, - _self = to_string($.self) - for(var char of _self){ - cp = _b_.ord(char) - for(var cat of ['Ll', 'Lu', 'Lm', 'Lt', 'Lo']){ - if(! $B.in_unicode_category(cat, cp)){ - return false - } + arguments, {}, null, null) + var _self = to_string($.self); + if (_self.length == 0) { + return false + } + for (var char of _self) { + if (!unicode_categories_contain_character(alpha_categories, _b_.ord(char))) { + return false } } return true diff --git a/www/src/py_type.js b/www/src/py_type.js index 59fa20793..b433d24e5 100644 --- a/www/src/py_type.js +++ b/www/src/py_type.js @@ -45,7 +45,7 @@ $B.$class_constructor = function(class_name, class_obj_proxy, metaclass, dict = class_obj_proxy.$target } var module = class_obj_proxy.__module__ - + // bool is not a valid base for(var base of bases){ if(base.__flags__ !== undefined && diff --git a/www/src/py_utils.js b/www/src/py_utils.js index c80f59a51..b8c1edd00 100644 --- a/www/src/py_utils.js +++ b/www/src/py_utils.js @@ -891,16 +891,16 @@ $B.unpacker = function(obj, nb_targets, has_starred){ var nb_after_starred = arguments[3] position_rank++ } - if($B.pep657){ - position = $B.decode_position(arguments[position_rank]) - } + position = $B.decode_position(arguments[position_rank]) var t = _b_.list.$factory(obj), right_length = t.length, left_length = nb_targets + (has_starred ? nb_after_starred - 1 : 0) - if(right_length < left_length){ + if((! has_starred && (right_length < nb_targets)) || + (has_starred && (right_length < nb_targets - 1))){ var exc = _b_.ValueError.$factory(`not enough values to unpack ` + - `(expected ${left_length}, got ${right_length})`) + `(expected ${has_starred ? ' at least ' : ''} ` + + `${left_length}, got ${right_length})`) if(position){ $B.set_exception_offsets(exc, position) } diff --git a/www/src/python_parser.js b/www/src/python_parser.js index 5336cbddf..5a74a5706 100644 --- a/www/src/python_parser.js +++ b/www/src/python_parser.js @@ -228,6 +228,7 @@ var helper_functions = { }, _RAISE_SYNTAX_ERROR_INVALID_TARGET(p, type, e){ + console.log('RAISE INVLAID TARGET') var invalid_target = $B.helper_functions.CHECK_NULL_ALLOWED($B.ast.expr, $B._PyPegen.get_invalid_target(e, type)); if (invalid_target != NULL) { @@ -356,6 +357,7 @@ var Parser = $B.Parser = function(src, filename, mode){ if(filename){ p.filename = filename } + } Parser.prototype.parse = function(){ @@ -429,10 +431,16 @@ Parser.prototype.set_memo = function(rule, position, value){ this.memo[rule.name][position] = value } +var rule_indent = -1 +var rule_stack = [] +$B.rule_stack = rule_stack + Parser.prototype.apply_rule = function(rule, position){ // apply rule at position - if(debug){ - console.log('apply rule', rule, 'at position', position, this.tokens[position]) + if(this.use_invalid){ //debug || rule.name.includes('invalid')){ + rule_indent++ + rule_stack.push(rule) + console.log(' '.repeat(rule_indent) + 'apply rule', rule.name, 'at position', position) //, this.tokens[position]) } var memoized = this.RECALL(rule, position), result @@ -461,6 +469,11 @@ Parser.prototype.apply_rule = function(rule, position){ result = memoized === FAIL ? memoized : memoized.match } } + if(this.use_invalid){ // debug || rule.name.includes('invalid')){ + console.log(' '.repeat(rule_indent) + 'result of apply rule', rule.name, result) + rule_stack.pop() + rule_indent-- + } return result } @@ -595,6 +608,10 @@ Parser.prototype.eval_option_once = function(rule, position){ var match = {rule, matches, start, end: position} if(this.use_invalid && rule.parent_rule && rule.parent_rule.startsWith('invalid_')){ + console.log('match invalid rule', show_rule(rule)) + for(var m of matches){ + console.log(m) + } var _ast = make_ast(match, tokens) if(_ast === undefined){ return FAIL diff --git a/www/src/python_parser_peg_version.js b/www/src/python_parser_peg_version.js index 814433514..c474b00c5 100644 --- a/www/src/python_parser_peg_version.js +++ b/www/src/python_parser_peg_version.js @@ -38,7 +38,8 @@ $B.parser_constants = { PyExc_SyntaxError: _b_.SyntaxError, STAR_TARGETS: 1, DEL_TARGETS: 2, - FOR_TARGETS: 3 + FOR_TARGETS: 3, + PyBytes_AS_STRING: (b) => b } // actions such as Add, Not, etc. @@ -54,39 +55,24 @@ var NULL = $B.parser_constants.NULL // Generate functions to create AST instances $B._PyAST = {} -var template = ` -$B._PyAST. = function(EXTRA){ - var ast_obj = new $B.ast.() - set_position_from_EXTRA(ast_obj, EXTRA) - return ast_obj -} -` - for(var ast_class in $B.ast_classes){ // in py_ast.js var args = $B.ast_classes[ast_class] if(Array.isArray(args)){ continue } args = args.replace(/\*/g, '').replace(/\?/g, '') - var sep = args.length > 0 ? ', ' : '' - var function_code = template.replace(//g, ast_class) - .replace(//, sep) - .replace(//g, args) - eval(function_code) -} - - -var inf = Number.POSITIVE_INFINITY - -// Python keywords don't match NAME rules, so that "pass = 7" is illegal -// The list doesn't include 'case' and 'match' that are 'soft keywords' -// in PEP 634 -var keywords = ['and', 'as', 'elif', 'for', 'yield', 'while', 'assert', 'or', - 'continue', 'lambda', 'from', 'class', 'in', 'not', 'finally', 'is', - 'except', 'global', 'return', 'raise', 'break', 'with', 'def', - 'try', 'if', 'else', 'del', 'import', 'nonlocal', 'pass' - ] + var arg_names = args.split(',') + $B._PyAST[ast_class] = (function(ast_name, ast_args){ + return function(){ + var _args = Array.from(arguments).slice(0, ast_args.length + 1) + var EXTRA = _args.pop() + var ast_obj = new $B.ast[ast_name](..._args) + set_position_from_EXTRA(ast_obj, EXTRA) + return ast_obj + } + })(ast_class, arg_names) +} function get_last_token(p){ var last_token = $B.last(p.tokens) @@ -173,7 +159,7 @@ var helper_functions = { return NULL; }, - RAISE_SYNTAX_ERROR: function(msg){ + RAISE_SYNTAX_ERROR: function(p, msg){ var extra_args = [] for(var i = 1, len = arguments.length; i < len; i++){ extra_args.push(arguments[i]) @@ -183,7 +169,7 @@ var helper_functions = { }, - RAISE_INDENTATION_ERROR: function(msg, arg){ + RAISE_INDENTATION_ERROR: function(p, msg, arg){ if(arg !== undefined){ msg = _b_.str.__mod__(msg, arg) } @@ -203,7 +189,7 @@ var helper_functions = { $B._PyPegen.raise_error(p, _b_.IndentationError, msg) }, - RAISE_SYNTAX_ERROR_KNOWN_LOCATION: function(a, err_msg, arg){ + RAISE_SYNTAX_ERROR_KNOWN_LOCATION: function(p, a, err_msg, arg){ if(arg !== undefined){ err_msg = _b_.str.__mod__(err_msg, arg) } @@ -214,8 +200,11 @@ var helper_functions = { err_msg) }, - RAISE_SYNTAX_ERROR_KNOWN_RANGE: function(a, b, msg){ - var extra_args = arguments[3] + RAISE_SYNTAX_ERROR_KNOWN_RANGE: function(p, a, b, msg){ + var extra_args = arguments[4] + if(extra_args){ + msg = _b_.str.__mod__(msg, extra_args) + } helper_functions.RAISE_ERROR_KNOWN_LOCATION(p, _b_.SyntaxError, a.lineno, a.col_offset, b.end_lineno, b.end_col_offset, @@ -223,7 +212,7 @@ var helper_functions = { }, - RAISE_SYNTAX_ERROR_INVALID_TARGET: function(type, e){ + RAISE_SYNTAX_ERROR_INVALID_TARGET: function(p, type, e){ return helper_functions._RAISE_SYNTAX_ERROR_INVALID_TARGET(p, type, e) }, @@ -239,6 +228,7 @@ var helper_functions = { msg = "cannot delete %s"; } return helper_functions.RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + p, invalid_target, msg, $B._PyPegen.get_expr_name(invalid_target) @@ -247,36 +237,61 @@ var helper_functions = { return NULL; }, - RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN: function(msg){ - return helper_functions.RAISE_SYNTAX_ERROR(msg) + RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN: function(p, msg){ + return helper_functions.RAISE_SYNTAX_ERROR(p, msg) }, + RAISE_SYNTAX_ERROR_STARTING_FROM: function(p, a, msg, ...args){ + var last = p.tokens[p.tokens.length - 1] + return helper_functions.RAISE_ERROR_KNOWN_LOCATION(p, _b_.SyntaxError, + a.lineno, a.col_offset, + last.end_lineno, last.end_col_offset - 1, + msg, ...args) + }, + asdl_seq_LEN: (t) => t.length, asdl_seq_GET: (t, i) => t[i] } + $B.helper_functions = helper_functions -function handle_errortoken(token, token_reader){ - if(token.string == "'" || token.string == '"'){ - return 'unterminated string literal ' + - `(detected at line ${token.start[0]})` - }else if(token.string == '\\'){ - var nxt = token_reader.next - if((! nxt) || nxt.type == 'NEWLINE'){ - return 'unexpected EOF while parsing' - }else{ - return 'unexpected character after line continuation character' - } - }else if(! ' `$'.includes(token.string)){ - var u = _b_.ord(token.string).toString(16).toUpperCase() - u = 'U+' + '0'.repeat(Math.max(0, 4 - u.length)) + u - return `invalid character '${token.string}' (${u})` - } - return 'invalid syntax' -} +// XXX redundant with above functions +function raise_error_known_location(type, filename, lineno, col_offset, + end_lineno, end_col_offset, line, message){ + var exc = type.$factory(message) + exc.filename = filename + exc.lineno = lineno + exc.offset = col_offset + 1 + exc.end_lineno = end_lineno + exc.end_offset = end_col_offset + 1 + exc.text = line + exc.args[1] = $B.fast_tuple([filename, exc.lineno, exc.offset, exc.text, + exc.end_lineno, exc.end_offset]) + exc.$frame_obj = $B.frame_obj + throw exc +} + +$B.raise_error_known_location = raise_error_known_location + +function raise_error_known_token(type, filename, token, message){ + var exc = type.$factory(message) + exc.filename = filename + exc.lineno = token.lineno + exc.offset = token.col_offset + 1 + exc.end_lineno = token.end_lineno + exc.end_offset = token.end_col_offset + 1 + exc.text = token.line + exc.args[1] = $B.fast_tuple([filename, exc.lineno, exc.offset, exc.text, + exc.end_lineno, exc.end_offset]) + exc.$frame_obj = $B.frame_obj + throw exc +} + +$B.raise_error_known_token = raise_error_known_token + function set_position_from_EXTRA(ast_obj, EXTRA){ for(var key in EXTRA){ @@ -285,45 +300,11 @@ function set_position_from_EXTRA(ast_obj, EXTRA){ } -// ---- end of names used by grammar actions - - -// JS classes and functions used by the parsing algorithm - -// transform repeat string to min and max number of repetitions -var repeater = { - '?' : [0, 1], - '*': [0, inf], - '+': [1, inf] -} - -// Singletons for failure -var FAIL = {name: 'FAIL'}, - FROZEN_FAIL = {name: 'FROZEN_FAIL'} - -// Classes used in the algorithm -function MemoEntry(match){ - this.match = match -} - -function LR(seed, rule){ - this.seed = seed - this.rule = rule -} - -function HEAD(rule, involvedSet, evalSet){ - this.rule = rule - this.involvedSet = involvedSet - this.evalSet = evalSet -} - -// An instance of Parser is created for each script / exec / -// f-string expression var Parser = $B.Parser = function(src, filename, mode){ // mode is 'file' for a script or exec(), 'eval' for eval() // Normalize line ends src = src.replace(/\r\n/gm, "\n") - var tokenizer = $B.tokenizer(src, filename, mode) + var tokenizer = $B.tokenizer(src, filename, mode, this) this.tokenizer = tokenizer this.tok = tokenizer this.mark = 0 @@ -346,73 +327,6 @@ var Parser = $B.Parser = function(src, filename, mode){ } } -Parser.prototype.parse = function(){ - console.log('parse') - if(this.src.trim().length == 0){ - // eg empty __init__.py - return new $B.ast.Module([]) - } - var rule = $B.grammar[this.mode], - match - this.clear_memo() - this.HEADS = {} - this.LRStack = [] - // first pass skipping invalid_ rules - this.use_invalid = false - match = this.apply_rule(rule, 0) - if(match === FAIL){ - // second pass using invalid_ rules - this.use_invalid = true - this.clear_memo() - this.HEADS = {} - this.LRStack = [] - try{ - match = this.apply_rule(rule, 0) - }catch(err){ - throw err - } - } - if(match === FAIL){ - var err_token = $B.last(this.tokens) - p.filename = this.filename - p.known_err_token = err_token - var message = 'invalid syntax' - if(err_token.type == 'ERRORTOKEN'){ - message = handle_errortoken(err_token, this.tokens) - } - $B.helper_functions.RAISE_ERROR_KNOWN_LOCATION(p, _b_.SyntaxError, - err_token.start[0], - err_token.start[1], - err_token.end[0], - err_token.end[1], - message) - } - - // If parsing succeeds, return AST object - var t0 = window.performance.now() - var res = make_ast(match, this.tokens) - $B.time_make_ast += window.performance.now() - t0 - return res -} - -Parser.prototype.clear_memo = function(){ - for(var key in this.memo){ - delete this.memo[key] - } -} - -Parser.prototype.get_memo = function(rule, position){ - if(this.memo[rule.name] === undefined || - this.memo[rule.name][position] === undefined){ - return null - } - var m = this.memo[rule.name][position] - if(m.match === FAIL){ - return FAIL - } - return m -} - var ignored = [$B.py_tokens.ENCODING, $B.py_tokens.NL, $B.py_tokens.COMMENT] @@ -432,673 +346,4 @@ Parser.prototype.read_token = function(){ } } -Parser.prototype.set_memo = function(rule, position, value){ - this.memo[rule.name] = this.memo[rule.name] || {} - this.memo[rule.name][position] = value -} - -Parser.prototype.apply_rule = function(rule, position){ - // apply rule at position - if(debug){ - console.log('apply rule', rule, 'at position', position, this.tokens[position]) - } - var memoized = this.RECALL(rule, position), - result - if(memoized === null){ - var lr = new LR(FAIL, rule) - this.LRStack.push(lr) - var m = new MemoEntry(lr) - this.set_memo(rule, position, m) - // evaluate rule body - var match = this.eval_body(rule, position) - this.LRStack.pop() - m.end = match.end - - if(lr.head){ - lr.seed = match - result = this.LR_ANSWER(rule, position, m) - }else{ - m.match = match - result = match - } - }else{ - if(memoized.match instanceof LR){ - this.SETUP_LR(rule, memoized.match) - result = memoized.match.seed - }else{ - result = memoized === FAIL ? memoized : memoized.match - } - } - return result -} - -function set_id(rule){ - return $B.UUID() -} - -$B.nb_eval_option = 0 -$B.nb_deja_vu = 0 - -Parser.prototype.eval_option = function(rule, position){ - $B.nb_eval_option++ - var tokens = this.tokens, - result, - start = position, - join_position = false - - rule.id = rule.id ?? $B.UUID() - if(! rule.repeat){ - result = this.eval_option_once(rule, position) - }else{ - var matches = [], - start = position, - repeat = repeater[rule.repeat] - while(matches.length < repeat[1]){ - var match = this.eval_option_once(rule, position) - if(match === FAIL){ - if(join_position){ - result = {rule, matches, start, end: join_position - 1} - join_position = false - position = join_position - 1 - }else if(matches.length >= repeat[0]){ - // Enough repetitions - result = {rule, matches, start, end: position} - }else{ - result = FAIL - } - break - } - matches.push(match) - // If the rule is of the form "s.e" : - // - if the next token matches "s", increment position and remain - // in the loop. Keep track of the position that matches "s". If - // the next tokens don't match the rule, the position will be - // reset to the position of the "s" character - // - else break - if(rule.join){ - while(tokens[match.end] === undefined){ - this.read_token() - } - if(tokens[match.end][1] == rule.join){ - position = match.end + 1 - join_position = position - }else{ - position = match.end - break - } - }else{ - join_position = false - position = match.end - } - } - if(! result){ - result = {rule, start, matches, end: position} - } - } - if(rule.lookahead){ - switch(rule.lookahead){ - case 'positive': - if(result !== FAIL){ - result.end = result.start // don't consume input - } - break - case 'negative': - if(result === FAIL){ - result = {rule, start, end: start} - }else{ - result = FAIL - } - break - } - } - return result -} - -Parser.prototype.eval_option_once = function(rule, position){ - var tokens = this.tokens - tokens[position] ?? this.read_token() - if(rule.choices){ - for(var i = 0, len = rule.choices.length; i < len; i++){ - var choice = rule.choices[i], - invalid = choice.invalid ?? test_invalid(choice) - if(invalid && ! this.use_invalid){ - continue - } - var match = this.eval_option(choice, position) - if(match === FROZEN_FAIL){ - // if a choice with a ~ fails, don't try other alternatives - return FAIL - }else if(match !== FAIL){ - if(invalid){ - var _ast = make_ast(match, tokens) - if(_ast === undefined){ - continue - } - match.invalid = true - } - match.rank = i - return match - } - } - return FAIL - }else if(rule.items){ - var start = position, - matches = [], - frozen_choice = false // set to true if we reach a COMMIT_CHOICE (~) - for(var item of rule.items){ - if(item.type == 'COMMIT_CHOICE'){ - frozen_choice = true - } - var match = this.eval_option(item, position) - if(match !== FAIL){ - matches.push(match) - position = match.end - }else{ - if(frozen_choice){ - return FROZEN_FAIL - } - return FAIL - } - } - var match = {rule, matches, start, end: position} - if(this.use_invalid && rule.parent_rule && - rule.parent_rule.startsWith('invalid_')){ - var _ast = make_ast(match, tokens) - if(_ast === undefined){ - return FAIL - } - match.invalid = true - } - return match - }else if(rule.type == "rule"){ - return this.apply_rule($B.grammar[rule.name], position) - }else if(rule.type == "string"){ - return tokens[position][1] == rule.value ? - {rule, start: position, end: position + 1} : - FAIL - }else if(rule.type == 'COMMIT_CHOICE'){ - // mark current option as frozen - return {rule, start: position, end: position} - }else if(rule.type == 'NAME'){ - var token = tokens[position], - string = token.string, - test = token.type == rule.type && - ! keywords.includes(token.string) && - ! ['True', 'False', 'None'].includes(token.string) && - (rule.value === undefined ? true : tokens[position][1] == rule.value) - return test ? {rule, start: position, end: position + 1} : FAIL - }else if(rule.type == 'ASYNC'){ - var test = tokens[position].type == 'NAME' && tokens[position].string == 'async' - return test ? {rule, start: position, end: position + 1} : FAIL - }else if(rule.type == 'AWAIT'){ - var test = tokens[position].type == 'NAME' && tokens[position].string == 'await' - return test ? {rule, start: position, end: position + 1} : FAIL - }else{ - var test = tokens[position][0] == rule.type && - (rule.value === undefined ? true : tokens[position][1] == rule.value) - return test ? {rule, start: position, end: position + 1} : FAIL - } -} - -function test_invalid(choice){ - choice.invalid = choice.items && choice.items.length == 1 && - choice.items[0].name && - choice.items[0].name.startsWith('invalid_') - return choice.invalid -} - -Parser.prototype.eval_body = function(rule, position){ - // Only for grammar rules - if(debug){ - console.log('eval body', rule, position, this.tokens[position]) - } - var start = position - if(rule.choices){ - for(var i = 0, len = rule.choices.length; i < len; i++){ - var choice = rule.choices[i], - invalid = choice.invalid ?? test_invalid(choice) - if(invalid && ! this.use_invalid){ - continue - } - var match = this.eval_option(choice, position) - if(match === FROZEN_FAIL){ - // if a choice with a ~ fails, don't try other alternatives - return FAIL - }else if(match !== FAIL){ - if(invalid){ - var _ast = make_ast(match, this.tokens) - if(_ast === undefined){ - // ignore invalid match if its action returns NULL - continue - } - } - match.rank = i - return match - } - } - return FAIL - }else if(rule.items){ - var matches = [], - frozen_choice = false // set to true if we reach a COMMIT_CHOICE (~) - for(var item of rule.items){ - if(item.type == 'COMMIT_CHOICE'){ - frozen_choice = true - } - var match = this.eval_option(item, position) - if(match !== FAIL){ - matches.push(match) - position = match.end - }else{ - return frozen_choice ? FROZEN_FAIL : FAIL - } - } - var match = {rule, matches, start, end: position} - if(this.use_invalid && rule.parent_rule && - rule.parent_rule.startsWith('invalid_')){ - make_ast(match, this.tokens) - } - return match - } -} - -Parser.prototype.matched_string = function(match){ - var s = '' - for(var i = match.start; i < match.end; i++){ - s += this.tokens[i].string - } - return s -} - -Parser.prototype.RECALL = function(R, P){ - let m = this.get_memo(R, P) - let h = this.HEADS[P] - // If not growing a seed parse, just return what is stored - // in the memo table. - if(! h){ - return m - } - // Do not evaluate any rule that is not involved in this - // left recursion. - var set = new Set([h.head]) - for(var s of h.involvedSet){ - set.add(s) - } - if((! m) && ! set.has(R)){ - return new MemoEntry(FAIL) - } - // Allow involved rules to be evaluated, but only once, - // during a seed-growing iteration. - if(h.evalSet.has(R)){ - h.evalSet.delete(R) - let ans = this.eval_body(R, P) - m.match = ans - m.end = ans === FAIL ? P : ans.end - } - return m -} - -Parser.prototype.SETUP_LR = function(R, L){ - if(! L.head){ - L.head = new HEAD(R, new Set(), new Set()) - } - let ix = this.LRStack.length -1, - s = this.LRStack[ix] - while(s && s.head !== L.head){ - s.head = L.head - L.head.involvedSet.add(s.rule) - ix-- - s = this.LRStack[ix] - } -} - -Parser.prototype.LR_ANSWER = function(R, P, M){ - let h = M.match.head - if(h.rule != R){ - return M.match.seed - }else{ - M.match = M.match.seed - } - if(M.match === FAIL){ - return FAIL - }else{ - return this.grow_lr(R, P, M, h) - } -} - -Parser.prototype.grow_lr = function(rule, position, m, H){ - // Called after eval_body(rule, position) produced a match and ignored - // an option that referenced itself (recursion) because at that time, - // memo(rule, position) was a LeftReference. - // - // m is the MemoEntry for (rule, position); m.match is the latest match - // - // apply_rule(rule, position) will return this match - // - // In each iteration of the "while" loop, we try again eval_body(), - // which uses the MemoEntry m for the rule. This allows an - // expression such as "1 + 2 + 3" to set a first match for "1 + 2", - // then a second for "1 + 2 + 3" - this.HEADS[position] = H - while(true){ - if(H){ - H.evalSet = new Set(H.involvedSet) - } - var match = this.eval_body(rule, position) - if(match === FAIL || match.end <= m.end){ - break - } - m.match = match - m.end = match.end - } - delete this.HEADS[position] - return m.match -} - -function set_alias(L, name, value){ - L[name] = value -} - -// Function that generates the AST for a match -$B.time_make_ast = 0 - -function make_ast(match, tokens){ - // match.rule succeeds; make_ast() returns a value for the match, based on - // the grammar action for the rule - var rule = match.rule, - names = {}, - L = {p} // used as namespace for rule action parameters - p.tokens = tokens - p.mark = match.start - p.fill = match.start - - var test = false // show_rule(rule).indexOf('invalid_def_raw') > -1 - if(test){ - console.log('make_ast', show_rule(rule, true), '\n match', match) - } - - // name EXTRA is used in grammar actions - var token = tokens[match.start], - EXTRA = {lineno: token.start[0], - col_offset: token.start[1], - end_lineno: token.end[0], - end_col_offset: token.end[1] - } - p.arena = EXTRA - L.EXTRA = EXTRA - - if(rule.repeat){ - // If a repeated rule has an alias, it applies to the repetition list - // The number of repetitions is len(match.matches) - var res = [] - if(['STRING', 'string', 'NEWLINE'].includes(rule.type)){ - for(var m of match.matches){ - res.push(tokens[m.start]) - } - if(rule.alias){ - set_alias(L, rule.alias, res) - } - if(rule.action){ - return rule.action(L) - } - return res - }else if(rule.type == 'NAME'){ - for(var m of match.matches){ - res.push(new $B.ast.Name(tokens[m.start].string, - new $B.ast.Load())) - } - if(rule.alias){ - set_alias(L, rule.alias, res) - } - if(rule.action){ - return rule.action(L) - } - return res - } - var makes = [] - for(var one_match of match.matches){ - // Each match matches rule.items - if(one_match.rule === rule){ - var elts = [] - for(var i = 0; i < one_match.matches.length; i++){ - var m = one_match.matches[i] - var _make = make_ast(m, tokens) - if(rule.items[i].alias){ - set_alias(L, rule.items[i].alias, _make) - } - elts.push(_make) - } - if(rule.action){ - try{ - var res = rule.action(L) - }catch(err){ - console.log('error eval action of', show_rule(rule), match) - throw err - } - makes.push(res) - }else if(elts.length == 1){ - makes.push(elts[0]) - }else{ - makes.push(elts) - } - }else{ - makes.push(make_ast(one_match, tokens)) - } - } - if(makes.length == 0){ - return - } - if(repeater[rule.repeat][1] == 1){ - return makes[0] - } - return makes - } - - if(rule.items){ - var makes = [], - nb_consuming = 0, - ast, - _make - if(match.matches.length > 0){ - var first = match.matches[0], - last = $B.last(match.matches), - last_token - EXTRA = { - lineno: tokens[first.start].start[0], - col_offset: tokens[first.start].start[1], - end_lineno: tokens[last.end - 1].end[0], - end_col_offset: tokens[last.end - 1].end[1] - } - var pos = last.end - 1, - last_line = tokens[pos].start[0] - if(last_line > tokens[last.end - 1].start[0] + 1){ - last_token = {type: 'NL', - start: [last_line - 1, 0], - end: [last_line - 1, 0], - line: '\n'} - }else{ - last_token = tokens[last.end - 1] - } - - p.arena = { - lineno: last_token.start[0], - offset: last_token.start[1], - end_lineno: last_token.end[0], - end_col_offset: last_token.end[1] - } - if(test){ - console.log('last token', tokens[last.end]) - console.log('extra', EXTRA) - } - } - for(var i = 0; i < match.matches.length; i++){ - var m = match.matches[i] - if(test){ - console.log(' match', i, m) - } - if(m.end > m.start){ - _make = make_ast(m, tokens) - makes.push(_make) - }else{ - if(m.rule.repeat && repeater[m.rule.repeat][1] > 1){ - // If m.rule has * or + modifier, return empty list - _make = [] - }else{ - _make = undefined - } - } - if(rule.items[i].alias){ - names[rule.items[i].alias] = _make - set_alias(L, rule.items[i].alias, _make) - } - if(! rule.items[i].lookahead){ - nb_consuming++ - } - } - if(rule.action){ - try{ - ast = rule.action(L) - }catch(err){ - if(debug === null){ - var rule_str = show_rule(rule, true) - console.log('error eval action of', rule_str) - console.log('rule.action', rule.action + '') - console.log('p', p) - //console.log($B.make_frames_stack()) - console.log(err.message) - console.log(err.stack) - } - throw err - } - }else if(nb_consuming == 1){ - ast = makes[0] - }else{ - ast = makes - } - return ast - }else{ - if(rule.type == 'NAME'){ - var ast_obj = new $B.ast.Name(tokens[match.start].string, - new $B.ast.Load()) - set_position_from_EXTRA(ast_obj, EXTRA) - return ast_obj - }else if(rule.type == 'NUMBER'){ - try{ - var prepared = $B.prepare_number(token[1]) - }catch(err){ - RAISE_SYNTAX_ERROR_KNOWN_LOCATION(p.arena, - 'wrong number %s', token[1]) - } - var value = $B.AST.$convert(prepared) - var ast_obj = new $B.ast.Constant(value) - set_position_from_EXTRA(ast_obj, EXTRA) - return ast_obj - }else if(['STRING', 'string'].includes(rule.type)){ - return token - }else if(rule.type == 'FSTRING_START'){ - return token - }else if(rule.type == 'FSTRING_MIDDLE'){ - return token - }else if(rule.type == 'FSTRING_END'){ - return token - } - - // ignore other rules such as DEDENT, NEWLINE etc. - } -} - -// Functions for debugging -function show(match, tokens, level){ - level = level || 0 - var s = '', - prefix = ' '.repeat(level), - rule = match.rule - - s += prefix + show_rule(rule) - if(match.matches){ - s += ' (' + match.matches.length + ' matches' - for(var m of match.matches){ - if(m.rule === rule){ - s += ' same rule ' + show_rule(m.rule) - } - } - s += ')' - } - - s += '\n' - if(! match.rule.repeat){ - level += 1 - } - - if(match.matches){ - for(var m of match.matches){ - s += show(m, tokens, level) - } - }else{ - if(match.end > match.start){ - s += prefix - if(['NAME', 'STRING', 'NUMBER', 'string'].includes(match.rule.type)){ - s += match.rule.type + ' ' + tokens[match.start][1] - }else{ - s += match.rule.type + ' ' + (match.rule.value || '') + - match.start + '-' + match.end - } - s += '\n' - } - } - return s -} - -function debug_head(n){ - var signs = '|:.', - s = '' - for(var i = 0; i < n; i++){ - s += '| ' - } - return s -} - -function show_rule(rule, show_action){ - var res = rule.name || '' - if(rule.lookahead == 'positive'){ - res += '&' - }else if(rule.lookahead == 'negative'){ - res += '!' - } - if(rule.type && rule.type != 'rule'){ - if(rule.type == 'string'){ - res += "'" + rule.value + "'" - }else{ - res += rule.type - } - } - - if(rule.choices){ - res += ' (' + rule.choices.map(show_rule).join(' | ') + ')' - }else if(rule.items){ - res += ' ' + rule.items.map(show_rule).join(' ') - } - - if(rule.action && show_action){ - res += ' {' + rule.action + '}' - } - - if(rule.repeat){ - if(rule.items && rule.items.length > 1){ - res = '(' + res + ')' - } - res += rule.repeat - } - if(rule.join){ - res = `'${rule.join}'.` + res - } - if(rule.alias){ - res = (rule.alias + '=' + res) - } - if(rule.parent_rule){ - res = '<' + rule.parent_rule +' #' + rule.rank +'>' + res - } - return res -} - - })(__BRYTHON__) \ No newline at end of file diff --git a/www/src/python_tokenizer.js b/www/src/python_tokenizer.js index b6c524b39..f73d78d01 100644 --- a/www/src/python_tokenizer.js +++ b/www/src/python_tokenizer.js @@ -3,6 +3,12 @@ var _b_ = $B.builtins +function is_whitespace(char){ + return ' \n\r\t\f'.includes(char) +} + +var unprintable_re = /\p{Cc}|\p{Cf}|\p{Co}|\p{Cs}|\p{Zl}|\p{Zp}|\p{Zs}/u + const Other_ID_Start = [0x1885, 0x1886, 0x2118, 0x212E, 0x309B, 0x309C].map( x => String.fromCodePoint(x)) @@ -136,6 +142,7 @@ function ord(char){ return code } + function $last(array){ return array[array.length - 1] } @@ -145,45 +152,59 @@ var ops = '.,:;+-*/%~^|&=<>[](){}@', // ! is valid in f-strings augm_op = '+-*/%^|&=<>@', closing = {'}': '{', ']': '[', ')': '('} -function Token(type, string, start, end, line){ - start = start.slice(0, 2) - var res - if($B.py_tokens){ - res = {string, line} - res.num_type = $B.py_tokens[type] - if(type == 'OP'){ - res.num_type = $B.py_tokens[$B.EXACT_TOKEN_TYPES[string]] - }else if(type == 'NAME' && ['async', 'await'].includes(string)){ - res.num_type = $B.py_tokens[string.toUpperCase()] - } - res.lineno = start[0] - res.col_offset = start[1] - res.end_lineno = end[0] - res.end_col_offset = end[1] - if(res.num_type == -1){ - console.log('res', res) - alert() - } - }else{ - res = {type, string, start, end, line} - res[0] = type - res[1] = string - res[2] = start - res[3] = end - res[4] = line +function Token(type, string, lineno, col_offset, end_lineno, end_col_offset, + line){ + var res = {type, string, line, lineno, col_offset, end_lineno, end_col_offset} + res.num_type = $B.py_tokens[type] + if(type == 'OP'){ + res.num_type = $B.py_tokens[$B.EXACT_TOKEN_TYPES[string]] + }else if(type == 'NAME' && ['async', 'await'].includes(string)){ + res.num_type = $B.py_tokens[string.toUpperCase()] + }else if(type == 'ENCODING'){ + res.num_type = $B.py_tokens.ENCODING } + res.bytes = res.string // cheating return res } -function get_comment(src, pos, line_num, line_start, token_name, line){ +function get_comment(parser, src, pos, line_num, line_start, token_name, line){ var start = pos, ix var t = [] while(true){ if(pos >= src.length || (ix = '\r\n'.indexOf(src[pos])) > -1){ + if(parser && parser.flags & $B.PyCF_TYPE_COMMENTS){ + var comment = src.substring(start - 1, pos), + mo = /^#\s*type\s*:(.*)/.exec(comment) + if(mo){ + var is_type_ignore = false + if(mo[1].startsWith('ignore')){ + if(mo[1].length == 6){ + is_type_ignore = true + }else{ + var char = mo[1][6] + if(char.charCodeAt(0) <= 128 && /[a-zA-Z0-9]/.exec(char) === null){ + is_type_ignore = true + } + } + } + if(is_type_ignore){ + t.push(Token('TYPE_IGNORE', comment, + line_num, start - line_start, + line_num, pos - line_start + 1, + line)) + }else{ + t.push(Token('TYPE_COMMENT', comment, + line_num, start - line_start, + line_num, pos - line_start + 1, + line)) + } + return {t, pos} + } + } t.push(Token('COMMENT', src.substring(start - 1, pos), - [line_num, start - line_start], - [line_num, pos - line_start + 1], + line_num, start - line_start, + line_num, pos - line_start + 1, line)) if(ix !== undefined){ var nb = 1 @@ -194,13 +215,13 @@ function get_comment(src, pos, line_num, line_start, token_name, line){ nb = 0 } t.push(Token(token_name, src.substr(pos, nb), - [line_num, pos - line_start + 1], - [line_num, pos - line_start + nb + 1], + line_num, pos - line_start + 1, + line_num, pos - line_start + nb + 1, line)) if(src[pos] === undefined){ t.push(Token('NEWLINE', '\n', - [line_num, pos - line_start + 1], - [line_num, pos - line_start + 2], + line_num, pos - line_start + 1, + line_num, pos - line_start + 2, '')) } pos += nb @@ -226,33 +247,6 @@ function test_num(num_type, char){ } } -$B.TokenReader = function(src, filename){ - this.tokens = [] - this.tokenizer = $B.tokenizer(src, filename) - this.position = 0 -} - -$B.TokenReader.prototype.read = function(){ - var res - if(this.position < this.tokens.length){ - res = this.tokens[this.position] - }else{ - res = this.tokenizer.next() - if(res.done){ - this.done = true - return - } - res = res.value - this.tokens.push(res) - } - this.position++ - return res -} - -$B.TokenReader.prototype.seek = function(position){ - this.position = position -} - function nesting_level(token_modes){ var ix = token_modes.length - 1 while(ix >= 0){ @@ -264,7 +258,7 @@ function nesting_level(token_modes){ } } -$B.tokenizer = function*(src, filename, mode){ +$B.tokenizer = function*(src, filename, mode, parser){ var string_prefix = /^(r|u|R|U|f|F|fr|Fr|fR|FR|rf|rF|Rf|RF)$/, bytes_prefix = /^(b|B|br|Br|bR|BR|rb|rB|Rb|RB)$/ @@ -277,7 +271,7 @@ $B.tokenizer = function*(src, filename, mode){ linenum = 0, line_at = {} - for(var i = 0, len = src.length; i < len; i++){ + for(let i = 0, len = src.length; i < len; i++){ line_at[i] = linenum if(src[i] == '\n'){ linenum++ @@ -304,6 +298,7 @@ $B.tokenizer = function*(src, filename, mode){ num_type, comment, indent, + indent_before_continuation = 0, indents = [], braces = [], line, @@ -318,11 +313,14 @@ $B.tokenizer = function*(src, filename, mode){ fstring_escape, format_specifier - yield Token('ENCODING', 'utf-8', [0, 0], [0, 0], '') + if(parser){ + parser.braces = braces + } + + yield Token('ENCODING', 'utf-8', 0, 0, 0, 0, '') while(pos < src.length){ char = src[pos] - // console.log('char', char, 'state', state, 'token mode', token_mode) cp = src.charCodeAt(pos) if(cp >= 0xD800 && cp <= 0xDBFF){ // code point encoded by a surrogate pair @@ -361,12 +359,12 @@ $B.tokenizer = function*(src, filename, mode){ if(fstring_buffer.length > 0){ // emit FSTRING_MIDDLE token yield Token(FSTRING_MIDDLE, fstring_buffer, - [line_num, fstring_start], - [line_num, fstring_start + fstring_buffer.length], + line_num, fstring_start, + line_num, fstring_start + fstring_buffer.length, line) } - yield Token(FSTRING_END, char, [line_num, pos], - [line_num, pos], line) + yield Token(FSTRING_END, char, line_num, pos, + line_num, pos, line) // pop from token modes token_modes.pop() token_mode = $B.last(token_modes) @@ -382,8 +380,8 @@ $B.tokenizer = function*(src, filename, mode){ // emit FSTRING_MIDDLE if not empty if(fstring_buffer.length > 0){ yield Token(FSTRING_MIDDLE, fstring_buffer, - [line_num, fstring_start], - [line_num, fstring_start + fstring_buffer.length], + line_num, fstring_start, + line_num, fstring_start + fstring_buffer.length, line) } token_mode = 'regular_within_fstring' @@ -400,8 +398,8 @@ $B.tokenizer = function*(src, filename, mode){ }else{ // emit closing bracket token yield Token('OP', char, - [line_num, pos - line_start], - [line_num, pos - line_start + 1], + line_num, pos - line_start, + line_num, pos - line_start + 1, line) continue } @@ -431,8 +429,8 @@ $B.tokenizer = function*(src, filename, mode){ if(format_specifier.length > 0){ // emit FSTRING_MIDDLE token yield Token(FSTRING_MIDDLE, format_specifier, - [line_num, fstring_start], - [line_num, fstring_start + format_specifier.length], + line_num, fstring_start, + line_num, fstring_start + format_specifier.length, line) // pop from token modes token_modes.pop() @@ -442,8 +440,8 @@ $B.tokenizer = function*(src, filename, mode){ }else if(char == '{'){ // emit FSTRING_MIDDLE yield Token(FSTRING_MIDDLE, format_specifier, - [line_num, fstring_start], - [line_num, fstring_start + format_specifier.length], + line_num, fstring_start, + line_num, fstring_start + format_specifier.length, line) token_mode = 'regular_within_fstring' fstring_expr_start = pos - line_start @@ -452,15 +450,15 @@ $B.tokenizer = function*(src, filename, mode){ }else if(char == '}'){ // emit FSTRING_MIDDLE yield Token(FSTRING_MIDDLE, format_specifier, - [line_num, fstring_start], - [line_num, fstring_start + format_specifier.length], + line_num, fstring_start, + line_num, fstring_start + format_specifier.length, line) // emit closing bracket token yield Token('OP', char, - [line_num, pos - line_start], - [line_num, pos - line_start + 1], + line_num, pos - line_start, + line_num, pos - line_start + 1, line) - if(braces.length == 0 || $B.last(braces) !== '{'){ + if(braces.length == 0 || $B.last(braces).char !== '{'){ throw Error('wrong braces') } braces.pop() @@ -482,14 +480,14 @@ $B.tokenizer = function*(src, filename, mode){ line_num++ if(mo = /^\f?(\r\n|\r|\n)/.exec(src.substr(pos - 1))){ // line break - yield Token('NL', mo[0], [line_num, 0], - [line_num, mo[0].length], + yield Token('NL', mo[0], line_num, 0, + line_num, mo[0].length, line) pos += mo[0].length - 1 continue }else if(char == '#'){ - comment = get_comment(src, pos, line_num, line_start, - 'NL', line) + comment = get_comment(parser, src, pos, line_num, + line_start, 'NL', line) for(var item of comment.t){ yield item } @@ -505,11 +503,31 @@ $B.tokenizer = function*(src, filename, mode){ indent = 8 } if(indent){ + var broken = false while(pos < src.length){ + if(broken && indent > 0 && ' \t'.includes(src[pos])){ + console.log('indentation error 479') + $B.raise_error_known_location( + _b_.IndentationError, + filename, + line_num, pos - line_start, + line_num, pos - line_start + 1, + line, + 'unindent does not match any outer indentation level' + ) + } if(src[pos] == ' '){ indent++ }else if(src[pos] == '\t'){ indent += 8 + }else if(src[pos] == '\\' && src[pos + 1] == '\n'){ + // continuation line at the end of a + // whitespace-only line + pos++ + line_start = pos + 2 + line_num++ + line = get_line_at(pos + 2) + broken = true }else{ break } @@ -522,36 +540,49 @@ $B.tokenizer = function*(src, filename, mode){ } if(src[pos] == '#'){ // ignore leading whitespace if line is a comment - var comment = get_comment(src, pos + 1, line_num, + comment = get_comment(parser, src, pos + 1, line_num, line_start, 'NL', line) for(var item of comment.t){ yield item } pos = comment.pos continue + }else if(src[pos] == '\\'){ + if(/^\f?(\r\n|\r|\n)/.exec(src[pos + 1])){ + line_num++ + pos++ + continue + }else{ + $B.raise_error_known_location(_b_.SyntaxError, + filename, line_num, pos + 2 - line_start, + line_num, pos + 3 - line_start, + line, + 'unexpected character after line continuation character') + } }else if(mo = /^\f?(\r\n|\r|\n)/.exec(src.substr(pos))){ // whitespace-only line - yield Token('NL', '', [line_num, pos - line_start + 1], - [line_num, pos - line_start + 1 + mo[0].length], line) + yield Token('NL', '', line_num, pos - line_start + 1, + line_num, pos - line_start + 1 + mo[0].length, line) pos += mo[0].length continue } if(indents.length == 0 || indent > $last(indents)){ indents.push(indent) - yield Token('INDENT', '', [line_num, 0], - [line_num, indent], line) + yield Token('INDENT', '', line_num, 0, + line_num, indent, line) }else if(indent < $last(indents)){ var ix = indents.indexOf(indent) if(ix == -1){ - var error = Error('unindent does not match ' + - 'any outer indentation level') - error.type = 'IndentationError' - error.line_num = line_num - throw error } + var message = 'unindent does not match ' + + 'any outer indentation level' + $B.raise_error_known_location(_b_.IndentationError, + filename, line_num, 0, + line_num, 0, line, message) + } for(var i = indents.length - 1; i > ix; i--){ indents.pop() - yield Token('DEDENT', '', [line_num, indent], - [line_num, indent], line) + yield Token('DEDENT', '', line_num, indent, + line_num, indent, line) } } state = null @@ -559,8 +590,8 @@ $B.tokenizer = function*(src, filename, mode){ // dedent all while(indents.length > 0){ indents.pop() - yield Token('DEDENT', '', [line_num, indent], - [line_num, indent], line) + yield Token('DEDENT', '', line_num, indent, + line_num, indent, line) } state = null pos-- @@ -584,7 +615,7 @@ $B.tokenizer = function*(src, filename, mode){ break case '#': var token_name = braces.length > 0 ? 'NL' : 'NEWLINE' - comment = get_comment(src, pos, line_num, line_start, + comment = get_comment(parser, src, pos, line_num, line_start, token_name, line) for(var item of comment.t){ yield item @@ -649,28 +680,25 @@ $B.tokenizer = function*(src, filename, mode){ while(op.length >= 3){ // sequences of 3 consecutive dots are sent // as a single token for Ellipsis - yield Token('OP', '...', [line_num, dot_pos], - [line_num, dot_pos + 3], line) + yield Token('OP', '...', line_num, dot_pos, + line_num, dot_pos + 3, line) op = op.substr(3) } for(var i = 0; i < op.length; i++){ - yield Token('OP', '.', [line_num, dot_pos], - [line_num, dot_pos + 1], line) + yield Token('OP', '.', line_num, dot_pos, + line_num, dot_pos + 1, line) dot_pos++ } } break case '\\': var mo = /^\f?(\r\n|\r|\n)/.exec(src.substr(pos)) - if(mo = /^\f?(\r\n|\r|\n)/.exec(src.substr(pos))){ + if(mo){ if(pos == src.length - 1){ - yield Token('ERRORTOKEN', char, - [line_num, pos - line_start], - [line_num, pos - line_start + 1], line) - var token_name = braces.length > 0 ? 'NL': 'NEWLINE' - yield Token(token_name, mo[0], - [line_num, pos - line_start], - [line_num, pos - line_start + mo[0].length], line) + var msg = 'unexpected EOF while parsing' + $B.raise_error_known_location(_b_.SyntaxError, + filename, line_num, pos - line_start, line_num, pos - line_start + 1, + line, msg) } line_num++ pos += mo[0].length @@ -690,8 +718,8 @@ $B.tokenizer = function*(src, filename, mode){ var token_name = braces.length > 0 ? 'NL': 'NEWLINE' mo = /^\f?(\r\n|\r|\n)/.exec(src.substr(pos - 1)) yield Token(token_name, mo[0], - [line_num, pos - line_start], - [line_num, pos - line_start + mo[0].length], line) + line_num, pos - line_start, + line_num, pos - line_start + mo[0].length, line) pos += mo[0].length - 1 if(token_name == 'NEWLINE'){ state = 'line_start' @@ -720,8 +748,8 @@ $B.tokenizer = function*(src, filename, mode){ // opening '{' appended if(nesting_level(token_modes) == braces.length - 1){ let colon = Token('OP', char, - [line_num, pos - line_start - op.length + 1], - [line_num, pos - line_start + 1], + line_num, pos - line_start - op.length + 1, + line_num, pos - line_start + 1, line) // used on fstring debug mode colon.metadata = src.substr( @@ -736,15 +764,15 @@ $B.tokenizer = function*(src, filename, mode){ }else{ // closing brace let closing_brace = Token('OP', char, - [line_num, pos - line_start - op.length + 1], - [line_num, pos - line_start + 1], + line_num, pos - line_start - op.length + 1, + line_num, pos - line_start + 1, line) closing_brace.metadata = src.substring( - line_start + fstring_start + 2, pos - 1) + line_start + fstring_expr_start, pos - 1) yield closing_brace token_modes.pop() token_mode = token_modes[token_modes.length - 1] - if(braces.length == 0 || $B.last(braces) !== '{'){ + if(braces.length == 0 || $B.last(braces).char !== '{'){ throw Error('wrong braces') } braces.pop() @@ -766,30 +794,30 @@ $B.tokenizer = function*(src, filename, mode){ pos++ } if('[({'.includes(char)){ - braces.push(char) + braces.push({char, pos, line_num, line_start, line}) }else if('])}'.includes(char)){ - if(braces && $last(braces) == closing[char]){ + if(braces.length && $last(braces).char == closing[char]){ braces.pop() }else{ - braces.push(char) + braces.push({char, pos, line_num, line_start, line}) } } yield Token('OP', op, - [line_num, pos - line_start - op.length + 1], - [line_num, pos - line_start + 1], + line_num, pos - line_start - op.length + 1, + line_num, pos - line_start + 1, line) }else if(char == '!'){ if(src[pos] == '='){ yield Token('OP', '!=', - [line_num, pos - line_start], - [line_num, pos - line_start + 2], + line_num, pos - line_start, + line_num, pos - line_start + 2, line) pos++ }else{ // conversion let token = Token('OP', char, - [line_num, pos - line_start], - [line_num, pos - line_start + 1], + line_num, pos - line_start, + line_num, pos - line_start + 1, line) // used on fstring debug mode token.metadata = src.substring( @@ -800,10 +828,25 @@ $B.tokenizer = function*(src, filename, mode){ // ignore }else{ // invalid character - yield Token('ERRORTOKEN', char, - [line_num, pos - line_start], - [line_num, pos - line_start + 1], + var cp = char.codePointAt(0), + err_msg = 'invalid' + if(unprintable_re.exec(char)){ + err_msg += ' non-printable' + } + var unicode = cp.toString(16).toUpperCase() + while(unicode.length < 4){ + unicode = '0' + unicode + } + err_msg += ` character '${char}' (U+${unicode})` + if(char == '$' || char == '`'){ + err_msg = 'invalid syntax' + } + var err_token = Token('ERRORTOKEN', char, + line_num, pos - line_start, + line_num, pos - line_start + 1, line) + $B.raise_error_known_token(_b_.SyntaxError, filename, + err_token, err_msg) } } break @@ -832,8 +875,8 @@ $B.tokenizer = function*(src, filename, mode){ var s = triple_quote ? quote.repeat(3) : quote var end_col = fstring_start + name.length + s.length yield Token(FSTRING_START, prefix + s, - [line_num, fstring_start], - [line_num, end_col], + line_num, fstring_start, + line_num, end_col, line) continue } @@ -843,16 +886,16 @@ $B.tokenizer = function*(src, filename, mode){ string = '' }else{ yield Token('NAME', name, - [line_num, pos - line_start - name.length], - [line_num, pos - line_start], + line_num, pos - line_start - name.length, + line_num, pos - line_start, line) state = null pos-- } }else{ yield Token('NAME', name, - [line_num, pos - line_start - name.length], - [line_num, pos - line_start], + line_num, pos - line_start - name.length, + line_num, pos - line_start, line) state = null pos-- @@ -876,8 +919,8 @@ $B.tokenizer = function*(src, filename, mode){ full_string = prefix + quote + string + quote yield Token('STRING', full_string, - string_start, - [line_num, pos - line_start + 1], + string_start[0], string_start[1], + line_num, pos - line_start + 1, string_line) state = null }else if(char + src.substr(pos, 2) == @@ -888,8 +931,8 @@ $B.tokenizer = function*(src, filename, mode){ // several lines, "line" is extended until the // last quote yield Token('STRING', full_string, - string_start, - [line_num, pos - line_start + 3], + string_start[0], string_start[1], + line_num, pos - line_start + 3, string_line) pos += 2 state = null @@ -905,27 +948,15 @@ $B.tokenizer = function*(src, filename, mode){ case '\n': if(! escaped && ! triple_quote){ // unterminated string - // go back to yield whitespace as ERRORTOKEN - var quote_pos = string_start[1] + line_start - 1 - pos = quote_pos - while(src[pos - 1] == ' '){ - pos-- - } - while(pos < quote_pos){ - yield Token('ERRORTOKEN', ' ', - [line_num, pos - line_start + 1], - [line_num, pos - line_start + 2], - line) - pos++ - } - pos++ - yield Token('ERRORTOKEN', quote, - [line_num, pos - line_start], - [line_num, pos - line_start + 1], - line) - state = null - pos++ - break + var msg = `unterminated string literal ` + + `(detected at line ${line_num})`, + line_num = string_start[0], + col_offset = string_start[1] + $B.raise_error_known_location(_b_.SyntaxError, + filename, line_num, col_offset, + line_num, col_offset, + line, + msg) } string += char line_num++ @@ -959,8 +990,8 @@ $B.tokenizer = function*(src, filename, mode){ ! test_num(num_type, src[pos])){ // eg 12_ yield Token('NUMBER', number, - [line_num, pos - line_start - number.length], - [line_num, pos - line_start], + line_num, pos - line_start - number.length, + line_num, pos - line_start, line) state = null pos-- @@ -976,8 +1007,8 @@ $B.tokenizer = function*(src, filename, mode){ number += char }else{ yield Token('NUMBER', number, - [line_num, pos - line_start - number.length], - [line_num, pos - line_start], + line_num, pos - line_start - number.length, + line_num, pos - line_start, line) state = null pos-- @@ -988,8 +1019,8 @@ $B.tokenizer = function*(src, filename, mode){ }else if(char.toLowerCase() == 'j'){ // complex number number += char yield Token('NUMBER', number, - [line_num, pos - line_start - number.length + 1], - [line_num, pos - line_start + 1], + line_num, pos - line_start - number.length + 1, + line_num, pos - line_start + 1, line) state = null }else if(char.match(/\p{Letter}/u)){ @@ -1000,8 +1031,8 @@ $B.tokenizer = function*(src, filename, mode){ line, 'invalid decimal literal') }else{ yield Token('NUMBER', number, - [line_num, pos - line_start - number.length], - [line_num, pos - line_start], + line_num, pos - line_start - number.length, + line_num, pos - line_start, line) state = null pos-- @@ -1010,44 +1041,47 @@ $B.tokenizer = function*(src, filename, mode){ } } - if(braces.length > 0){ - throw SyntaxError('EOF in multi-line statement') - } - switch(state){ case 'line_start': line_num++ break case 'NAME': yield Token('NAME', name, - [line_num, pos - line_start - name.length + 1], - [line_num, pos - line_start + 1], + line_num, pos - line_start - name.length + 1, + line_num, pos - line_start + 1, line) break case 'NUMBER': yield Token('NUMBER', number, - [line_num, pos - line_start - number.length + 1], - [line_num, pos - line_start + 1], + line_num, pos - line_start - number.length + 1, + line_num, pos - line_start + 1, line) break case 'STRING': - throw SyntaxError( - `unterminated ${triple_quote ? 'triple-quoted ' : ''}` + - `string literal (detected at line ${line_num})`) - } + line_num = string_start[0] + line = lines[line_num - 1] + var msg = `unterminated ${triple_quote ? 'triple-quoted ' : ''}` + + `string literal (detected at line ${line_num})`, + col_offset = string_start[1] + $B.raise_error_known_location(_b_.SyntaxError, + filename, line_num, col_offset, + line_num, col_offset, + line, + msg) +} if(! src.endsWith('\n') && state != line_start){ - yield Token('NEWLINE', '', [line_num, pos - line_start + 1], - [line_num, pos - line_start + 1], line +'\n') + yield Token('NEWLINE', '', line_num, pos - line_start + 1, + line_num, pos - line_start + 1, line +'\n') line_num++ } while(indents.length > 0){ indents.pop() - yield Token('DEDENT', '', [line_num, 0], [line_num, 0], '') + yield Token('DEDENT', '', line_num, 0, line_num, 0, '') } - yield Token('ENDMARKER', '', [line_num, 0], [line_num, 0], '') + yield Token('ENDMARKER', '', line_num, 0, line_num, 0, '') } })(__BRYTHON__) diff --git a/www/src/stdlib_paths.js b/www/src/stdlib_paths.js index 701106088..d8f2cce22 100644 --- a/www/src/stdlib_paths.js +++ b/www/src/stdlib_paths.js @@ -7,7 +7,7 @@ for(var i = 0; i < pylist.length; i++){$B.stdlib[pylist[i]] = ['py']} var js = ['_ajax','_ast','_base64','_binascii','_io_classes','_json','_jsre','_locale','_multiprocessing','_posixsubprocess','_profile','_random','_sre','_sre_utils','_string','_strptime','_svg','_symtable','_tokenize','_webcomponent','_webworker','_zlib_utils','aes','array','builtins','dis','encoding_cp932','hashlib','hmac-md5','hmac-ripemd160','hmac-sha1','hmac-sha224','hmac-sha256','hmac-sha3','hmac-sha384','hmac-sha512','html_parser','marshal','math','md5','modulefinder','pbkdf2','posix','pyexpat','python_re','rabbit','rabbit-legacy','rc4','ripemd160','sha1','sha224','sha256','sha3','sha384','sha512','tripledes','unicodedata'] for(var i = 0; i < js.length; i++){$B.stdlib[js[i]] = ['js']} -var pkglist = ['browser','browser.widgets','collections','concurrent','concurrent.futures','email','email.mime','encodings','html','http','importlib','importlib.metadata','importlib.resources','json','logging','multiprocessing','multiprocessing.dummy','pydoc_data','site-packages.foobar','site-packages.simpleaio','site-packages.ui','test','test.encoded_modules','test.leakers','test.namespace_pkgs.not_a_namespace_pkg.foo','test.support','test.test_email','test.test_importlib','test.test_importlib.builtin','test.test_importlib.extension','test.test_importlib.frozen','test.test_importlib.import_','test.test_importlib.source','test.test_json','test.tracedmodules','unittest','unittest.test','unittest.test.testmock','urllib'] +var pkglist = ['browser','browser.widgets','collections','concurrent','concurrent.futures','email','email.mime','encodings','html','http','importlib','importlib.metadata','importlib.resources','json','logging','multiprocessing','multiprocessing.dummy','site-packages.foobar','site-packages.simpleaio','site-packages.ui','test','test.encoded_modules','test.leakers','test.namespace_pkgs.not_a_namespace_pkg.foo','test.support','test.test_email','test.test_importlib','test.test_importlib.builtin','test.test_importlib.extension','test.test_importlib.frozen','test.test_importlib.import_','test.test_importlib.source','test.test_json','test.tracedmodules','unittest','unittest.test','unittest.test.testmock','urllib'] for(var i = 0; i < pkglist.length; i++){$B.stdlib[pkglist[i]] = ['py', true]} $B.stdlib_module_names = Object.keys($B.stdlib) })(__BRYTHON__) \ No newline at end of file diff --git a/www/src/string_parser.js b/www/src/string_parser.js index eb3388fd2..1057cb797 100644 --- a/www/src/string_parser.js +++ b/www/src/string_parser.js @@ -109,6 +109,7 @@ $B.prepare_string = function(token){ pos = 0, string_modifier, _type = "string", + quote, context = {type: 'str'} // XXX while(pos < len){ diff --git a/www/src/symtable.js b/www/src/symtable.js index aa347ac04..7da7af28b 100644 --- a/www/src/symtable.js +++ b/www/src/symtable.js @@ -24,10 +24,10 @@ var GLOBAL_PARAM = "name '%s' is parameter and global", ANNOTATION_NOT_ALLOWED = "'%s' can not be used within an annotation", DUPLICATE_ARGUMENT = "duplicate argument '%s' in function definition", - TYPEVAR_BOUND_NOT_ALLOWED = "'%s' can not be used within a TypeVar bound", - TYPEALIAS_NOT_ALLOWED = "'%s' can not be used within a type alias", + TYPEVAR_BOUND_NOT_ALLOWED = "%s cannot be used within a TypeVar bound", + TYPEALIAS_NOT_ALLOWED = "%s cannot be used within a type alias", TYPEPARAM_NOT_ALLOWED = - "'%s' can not be used within the definition of a generic", + "%s cannot be used within the definition of a generic", DUPLICATE_TYPE_PARAM = "duplicate type parameter '%s'" /* Flags for def-use information */ diff --git a/www/src/version_info.js b/www/src/version_info.js index b49b3cfb5..09f9e818b 100644 --- a/www/src/version_info.js +++ b/www/src/version_info.js @@ -1,7 +1,7 @@ -__BRYTHON__.implementation = [3, 12, 1, 'dev', 0] +__BRYTHON__.implementation = [3, 12, 3, 'dev', 0] __BRYTHON__.version_info = [3, 12, 0, 'final', 0] -__BRYTHON__.compiled_date = "2024-02-04 02:27:37.926428" -__BRYTHON__.timestamp = 1707031657926 +__BRYTHON__.compiled_date = "2024-03-05 02:20:55.108836" +__BRYTHON__.timestamp = 1709623255108 __BRYTHON__.builtin_module_names = ["_ajax", "_ast", "_base64", diff --git a/www/tests/console.css b/www/tests/console.css index b0572477d..4242ea0de 100644 --- a/www/tests/console.css +++ b/www/tests/console.css @@ -41,7 +41,7 @@ textarea { width:100%; height:100%; font-size: 12px; - font-family: Consolas,"Courier new" + font-family: Consolas,"Courier New", monospace; float:none; background-color:var(--dark-3); color:var(--clear-1); diff --git a/www/tests/console.html b/www/tests/console.html index 28fab1704..bba112b00 100644 --- a/www/tests/console.html +++ b/www/tests/console.html @@ -13,12 +13,14 @@ - + - + + + @@ -40,8 +42,8 @@ - + @@ -49,8 +51,16 @@ - - + + + + + + + + + + diff --git a/www/tests/editor.html b/www/tests/editor.html index 3e51191bf..05a147d3a 100644 --- a/www/tests/editor.html +++ b/www/tests/editor.html @@ -13,6 +13,8 @@ + + @@ -34,8 +36,8 @@ - + @@ -44,6 +46,16 @@ + + + + + + + + + + diff --git a/www/tests/index.html b/www/tests/index.html index 577ef9574..7995f6a93 100644 --- a/www/tests/index.html +++ b/www/tests/index.html @@ -19,6 +19,8 @@ + + @@ -40,8 +42,8 @@ - + @@ -50,11 +52,18 @@ + + + + + + + + + - - @@ -86,9 +95,7 @@ - diff --git a/www/tests/run_tests.html b/www/tests/run_tests.html index bb33a54b0..32810b597 100644 --- a/www/tests/run_tests.html +++ b/www/tests/run_tests.html @@ -161,7 +161,7 @@ qunit_add_tests(test_utils.load_brython_test_cases('.')) -for test_name in ('test.test_syntax', 'test.test_int'): +for test_name in ('test.test_syntax', 'test.test_int', 'test.test_bool'): suite = defaultTestLoader.loadTestsFromName(test_name) qunit_add_tests(suite, default_caption = 'CPython') diff --git a/www/tests/test_ajax.py b/www/tests/test_ajax.py index 39a4c7496..a4abc4a53 100644 --- a/www/tests/test_ajax.py +++ b/www/tests/test_ajax.py @@ -131,6 +131,16 @@ def check(num, req, expected): data=data, oncomplete=lambda req: check(2, req, expected2)) +expected3 = { + 'files': {'filetosave': content}, + 'form': {'name': 'coucou3'} + } +data = {'name': 'coucou3'} +ajax.file_upload('https://httpbin.org/anything', + file, + data=data, + oncomplete=lambda req: check(2, req, expected3)) + # issue 2346 import json diff --git a/www/tests/test_fstrings.py b/www/tests/test_fstrings.py index 45f48333b..f112c840d 100644 --- a/www/tests/test_fstrings.py +++ b/www/tests/test_fstrings.py @@ -130,4 +130,9 @@ def fn(l, incr): x = 8 assert rf'raw\n{x}' == 'raw\\n8' +# issue 2376 +a = 5 +b = 6 +assert f"{a=} {b=}" == 'a=5 b=6' + print("passed all tests") \ No newline at end of file diff --git a/www/tests/test_jsobjects.py b/www/tests/test_jsobjects.py index 54a8eb4c8..018ba61f1 100644 --- a/www/tests/test_jsobjects.py +++ b/www/tests/test_jsobjects.py @@ -389,4 +389,14 @@ async def call_js_async(): pydict = window.nestedJSObj.to_dict() assert pydict == {'e': {'a': 1, 'b': 2}, 'f': {'c': 3, 'd': 4}} +# issue 2371 +import browser +a = '{"num":1, "val":2.3}' +o = browser.window.JSON.parse(a) +assert o.num == 1 +assert o.val == 2.3 +t = o.to_dict() # prints {'num': 1, 'val': {}} +assert t["num"] == 1 +assert t["val"] == 2.3 + print("all tests ok...") \ No newline at end of file diff --git a/www/tests/test_list.py b/www/tests/test_list.py index 082a696cb..492a09f3e 100644 --- a/www/tests/test_list.py +++ b/www/tests/test_list.py @@ -483,5 +483,14 @@ def __init__(self, *args, **kw): assert not [1, 2] > [1, 2] assert [1, 2] >= [1, 2] +# issue 2374 +*a, b = [1] +assert a == [] +assert b == 1 + +a, *b, c = [1, 2] +assert a == 1 +assert b == [] +assert c == 2 print("passed all tests..") \ No newline at end of file diff --git a/www/tests/test_numbers.py b/www/tests/test_numbers.py index bb98cb762..9eaac62e9 100644 --- a/www/tests/test_numbers.py +++ b/www/tests/test_numbers.py @@ -775,5 +775,19 @@ def __hash__(self): # issue 2363 assert_raises(ValueError, int, '') +# issue +ints = [1, 10, 20, 30, 40, 50, 60] + +expected = ["-8000000000000000", + "-040000000000000", + "-000100000000000", + "-000000400000000", + "-000000001000000", + "-000000000004000", + "-000000000000010"] + +for imm6, exp in zip(ints, expected): + assert f"{(~((1 << (64 - imm6)) - 1)):016X}" == exp + print('passed all tests...') diff --git a/www/tests/test_typing.py b/www/tests/test_typing.py index 5ce1c7614..9349efad3 100644 --- a/www/tests/test_typing.py +++ b/www/tests/test_typing.py @@ -48,4 +48,12 @@ def func[T:(int, 1/0)](arg): assert_raises(ZeroDivisionError, getattr, T, '__constraints__') assert t == ['dec2', 'dec1'] -func(0) \ No newline at end of file +func(0) + +# issue 2375 +from typing import Protocol +from collections.abc import Sized +class MySized(Sized, Protocol): + pass + +print('all tests pass...') \ No newline at end of file diff --git a/www/tests/test_unicodedata.py b/www/tests/test_unicodedata.py index 3dba495e8..5de27624a 100644 --- a/www/tests/test_unicodedata.py +++ b/www/tests/test_unicodedata.py @@ -244,1686 +244,1749 @@ "9": "0039" }, "names": { - "EXCLAMATION MARK": "0021", - "DIGIT FOUR": "0034", - "LATIN CAPITAL LETTER X": "0058", - "LATIN CAPITAL LETTER Z": "005A", - "LATIN SMALL LETTER E": "0065", + "": "0008", "LATIN SMALL LETTER F": "0066", - "LATIN SMALL LETTER K": "006B", - "LATIN SMALL LETTER Z": "007A", + "LATIN SMALL LETTER J": "006A", + "LATIN SMALL LETTER N": "006E", + "LATIN SMALL LETTER O": "006F", "CURRENCY SIGN": "00A4", - "MICRO SIGN": "00B5", - "MIDDLE DOT": "00B7", - "VULGAR FRACTION ONE HALF": "00BD", - "LATIN CAPITAL LETTER O WITH STROKE": "00D8", + "LATIN CAPITAL LETTER U WITH ACUTE": "00DA", + "LATIN SMALL LETTER ETH": "00F0", "LATIN SMALL LETTER C WITH ACUTE": "0107", - "LATIN SMALL LETTER H WITH CIRCUMFLEX": "0125", - "LATIN CAPITAL LETTER R WITH ACUTE": "0154", - "LATIN SMALL LETTER R WITH CEDILLA": "0157", - "LATIN SMALL LETTER S WITH CARON": "0161", - "LATIN CAPITAL LETTER C WITH HOOK": "0187", - "LATIN SMALL LETTER D WITH TOPBAR": "018C", - "LATIN CAPITAL LETTER REVERSED E": "018E", - "LATIN CAPITAL LETTER P WITH HOOK": "01A4", - "LATIN CAPITAL LETTER ESH": "01A9", - "LATIN CAPITAL LETTER UPSILON": "01B1", - "LATIN CAPITAL LETTER EZH WITH CARON": "01EE", - "LATIN CAPITAL LETTER WYNN": "01F7", - "LATIN SMALL LETTER A WITH RING ABOVE AND ACUTE": "01FB", - "LATIN SMALL LETTER I WITH DOUBLE GRAVE": "0209", - "LATIN CAPITAL LETTER U WITH INVERTED BREVE": "0216", - "LATIN CAPITAL LETTER O WITH DOT ABOVE": "022E", - "LATIN SMALL LETTER GLOTTAL STOP": "0242", - "LATIN CAPITAL LETTER TURNED V": "0245", - "LATIN SMALL LETTER D WITH HOOK": "0257", - "LATIN SMALL LETTER GAMMA": "0263", - "LATIN SMALL LETTER I WITH STROKE": "0268", - "LATIN SMALL LETTER TURNED R WITH LONG LEG": "027A", - "LATIN SMALL LETTER DOTLESS J WITH STROKE AND HOOK": "0284", - "LATIN SMALL LETTER TURNED V": "028C", - "MODIFIER LETTER DOWN ARROWHEAD": "02C5", - "DOUBLE ACUTE ACCENT": "02DD", - "MODIFIER LETTER EXTRA-LOW TONE BAR": "02E9", - "MODIFIER LETTER DOUBLE APOSTROPHE": "02EE", - "COMBINING OVERLINE": "0305", - "COMBINING MINUS SIGN BELOW": "0320", - "COMBINING OGONEK": "0328", - "COMBINING LONG STROKE OVERLAY": "0336", - "COMBINING X ABOVE": "033D", - "GREEK CAPITAL LETTER UPSILON": "03A5", - "GREEK CAPITAL LETTER IOTA WITH DIALYTIKA": "03AA", - "COPTIC CAPITAL LETTER KHEI": "03E6", - "CYRILLIC SMALL LETTER SHCHA": "0449", - "CYRILLIC CAPITAL LETTER OMEGA": "0460", - "CYRILLIC SMALL LETTER LITTLE YUS": "0467", + "LATIN CAPITAL LETTER I WITH TILDE": "0128", + "LATIN CAPITAL LETTER N WITH CEDILLA": "0145", + "LATIN SMALL LETTER U WITH DOUBLE ACUTE": "0171", + "LATIN SMALL LETTER C WITH HOOK": "0188", + "LATIN CAPITAL LETTER N WITH LEFT HOOK": "019D", + "LATIN LETTER ALVEOLAR CLICK": "01C2", + "LATIN CAPITAL LETTER N WITH SMALL LETTER J": "01CB", + "LATIN SMALL LETTER I WITH CARON": "01D0", + "LATIN SMALL LETTER TURNED E": "01DD", + "LATIN CAPITAL LETTER A WITH DIAERESIS AND MACRON": "01DE", + "LATIN SMALL LETTER G WITH ACUTE": "01F5", + "LATIN CAPITAL LETTER O WITH DOUBLE GRAVE": "020C", + "LATIN SMALL LETTER H WITH CARON": "021F", + "LATIN SMALL LETTER A WITH DOT ABOVE": "0227", + "LATIN SMALL LETTER O WITH DIAERESIS AND MACRON": "022B", + "LATIN SMALL LETTER QP DIGRAPH": "0239", + "LATIN CAPITAL LETTER A WITH STROKE": "023A", + "LATIN SMALL LETTER C WITH STROKE": "023C", + "LATIN CAPITAL LETTER Y WITH STROKE": "024E", + "LATIN SMALL LETTER LEZH": "026E", + "LATIN SMALL LETTER ESH WITH CURL": "0286", + "LATIN SMALL LETTER TURNED H WITH FISHHOOK": "02AE", + "MODIFIER LETTER SMALL TURNED R WITH HOOK": "02B5", + "MODIFIER LETTER SMALL W": "02B7", + "MODIFIER LETTER CIRCUMFLEX ACCENT": "02C6", + "CARON": "02C7", + "COMBINING RING BELOW": "0325", + "COMBINING GREEK PERISPOMENI": "0342", + "COMBINING LATIN SMALL LETTER R": "036C", + "COMBINING LATIN SMALL LETTER T": "036D", + "GREEK NUMERAL SIGN": "0374", + "GREEK CAPITAL LETTER EPSILON": "0395", + "GREEK SMALL LETTER CHI": "03C7", + "GREEK KAI SYMBOL": "03D7", + "GREEK LETTER ARCHAIC KOPPA": "03D8", + "COPTIC CAPITAL LETTER SHEI": "03E2", + "GREEK REVERSED LUNATE EPSILON SYMBOL": "03F6", + "GREEK SMALL LETTER SHO": "03F8", + "CYRILLIC CAPITAL LETTER NJE": "040A", + "CYRILLIC SMALL LETTER CHE": "0447", + "CYRILLIC SMALL LETTER YA": "044F", + "CYRILLIC SMALL LETTER LJE": "0459", "CYRILLIC CAPITAL LETTER IZHITSA": "0474", - "CYRILLIC SMALL LETTER ES WITH DESCENDER": "04AB", + "CYRILLIC CAPITAL LETTER ROUND OMEGA": "047A", + "CYRILLIC SMALL LETTER GHE WITH MIDDLE HOOK": "0495", + "CYRILLIC SMALL LETTER BASHKIR KA": "04A1", + "CYRILLIC CAPITAL LETTER EN WITH DESCENDER": "04A2", + "CYRILLIC SMALL LIGATURE TE TSE": "04B5", + "CYRILLIC SMALL LETTER ABKHASIAN CHE": "04BD", "CYRILLIC SMALL LETTER ABKHASIAN CHE WITH DESCENDER": "04BF", - "CYRILLIC CAPITAL LETTER KA WITH HOOK": "04C3", - "CYRILLIC CAPITAL LETTER ZHE WITH DIAERESIS": "04DC", - "CYRILLIC SMALL LETTER ZE WITH DIAERESIS": "04DF", - "CYRILLIC SMALL LETTER U WITH DIAERESIS": "04F1", - "CYRILLIC CAPITAL LETTER KOMI DJE": "0502", - "ARMENIAN SMALL LETTER DA": "0564", - "ARMENIAN SMALL LETTER MEN": "0574", - "ARMENIAN SMALL LETTER VEW": "057E", + "CYRILLIC CAPITAL LETTER EN WITH HOOK": "04C7", + "CYRILLIC SMALL LETTER EN WITH TAIL": "04CA", + "CYRILLIC SMALL LETTER A WITH DIAERESIS": "04D3", + "CYRILLIC SMALL LETTER SCHWA": "04D9", + "CYRILLIC CAPITAL LETTER ABKHASIAN DZE": "04E0", + "CYRILLIC SMALL LETTER I WITH DIAERESIS": "04E5", + "CYRILLIC SMALL LETTER LHA": "0515", + "CYRILLIC CAPITAL LETTER RHA": "0516", + "CYRILLIC SMALL LETTER RHA": "0517", + "CYRILLIC SMALL LETTER QA": "051B", + "CYRILLIC CAPITAL LETTER EN WITH MIDDLE HOOK": "0522", + "ARMENIAN CAPITAL LETTER TO": "0539", + "ARMENIAN CAPITAL LETTER VEW": "054E", + "ARMENIAN CAPITAL LETTER FEH": "0556", + "ARMENIAN SMALL LETTER REH": "0580", "ARMENIAN DRAM SIGN": "058F", - "HEBREW ACCENT ETNAHTA": "0591", - "HEBREW LETTER HET": "05D7", + "HEBREW ACCENT ZAQEF QATAN": "0594", + "HEBREW ACCENT ATNAH HAFUKH": "05A2", + "HEBREW LETTER FINAL MEM": "05DD", + "HEBREW LETTER FINAL NUN": "05DF", "HEBREW LETTER AYIN": "05E2", - "ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM": "0610", - "ARABIC SMALL HIGH TAH": "0615", - "ARABIC LETTER MARK": "061C", - "ARABIC LETTER KEHEH WITH THREE DOTS BELOW": "063C", - "ARABIC THOUSANDS SEPARATOR": "066C", - "ARABIC LETTER TEHEH": "067F", - "ARABIC LETTER TCHEH": "0686", - "ARABIC LETTER NG": "06AD", - "ARABIC LETTER GAF WITH TWO DOTS BELOW": "06B2", - "ARABIC LETTER HEH GOAL WITH HAMZA ABOVE": "06C2", - "ARABIC LETTER WAW WITH RING": "06C4", - "ARABIC SIGN SINDHI AMPERSAND": "06FD", - "SYRIAC LETTER ALAPH": "0710", - "SYRIAC LETTER DALATH": "0715", - "SYRIAC LETTER TETH": "071B", + "HEBREW LETTER TSADI": "05E6", + "ARABIC NUMBER MARK ABOVE": "0605", + "ARABIC-INDIC PER MILLE SIGN": "0609", + "ARABIC LETTER DAD": "0636", + "ARABIC DAMMATAN": "064C", + "ARABIC LETTER HAH WITH TWO DOTS VERTICAL ABOVE": "0682", + "ARABIC LETTER DAL WITH DOT BELOW AND SMALL TAH": "068B", + "ARABIC LETTER WAW WITH TWO DOTS ABOVE": "06CA", + "ARABIC SMALL HIGH SEEN": "06DC", + "ARABIC LETTER GHAIN WITH DOT BELOW": "06FC", + "SYRIAC PTHAHA DOTTED": "0732", + "SYRIAC OBLIQUE LINE BELOW": "0748", "SYRIAC MUSIC": "0749", - "ARABIC LETTER AIN WITH TWO DOTS VERTICALLY ABOVE": "075F", - "ARABIC LETTER FARSI YEH WITH EXTENDED ARABIC-INDIC DIGIT TWO ABOVE": "0775", - "THAANA LETTER GAAFU": "078E", - "THAANA LETTER KHAA": "079A", - "THAANA LETTER TO": "07A0", - "NKO DIGIT SIX": "07C6", + "ARABIC LETTER BEH WITH DOT BELOW AND THREE DOTS ABOVE": "0751", + "ARABIC LETTER FEH WITH THREE DOTS POINTING UPWARDS BELOW": "0761", + "ARABIC LETTER FARSI YEH WITH EXTENDED ARABIC-INDIC DIGIT FOUR BELOW": "0777", + "THAANA EEBEEFILI": "07A9", + "THAANA OABOAFILI": "07AF", + "NKO DIGIT ZERO": "07C0", + "NKO DIGIT THREE": "07C3", "NKO LETTER A": "07CA", - "NKO LETTER PA": "07D4", - "NKO LETTER NA WOLOSO": "07E0", - "NKO LETTER NA": "07E3", - "NKO COMBINING SHORT LOW TONE": "07EC", + "NKO COMBINING SHORT HIGH TONE": "07EB", "NKO COMBINING SHORT RISING TONE": "07ED", - "NKO COMBINING LONG HIGH TONE": "07EF", - "NKO LAJANYALAN": "07FA", - "SAMARITAN LETTER SINGAAT": "080E", - "SAMARITAN LETTER SHAN": "0814", - "SAMARITAN MODIFIER LETTER I": "0828", - "MANDAIC LETTER AZ": "0846", - "ARABIC SUKUN BELOW": "08D0", - "ARABIC SMALL HIGH WORD ATH-THALATHA": "08DA", - "ARABIC RIGHT ARROWHEAD ABOVE WITH DOT": "08FD", - "DEVANAGARI LETTER A": "0905", - "DEVANAGARI LETTER VOCALIC L": "090C", - "DEVANAGARI LETTER SHORT E": "090E", - "DEVANAGARI LETTER JA": "091C", - "DEVANAGARI LETTER YA": "092F", - "BENGALI LETTER GA": "0997", - "BENGALI LETTER THA": "09A5", - "BENGALI LETTER DA": "09A6", - "BENGALI VOWEL SIGN VOCALIC RR": "09C4", - "BENGALI LETTER VOCALIC LL": "09E1", - "BENGALI GANDA MARK": "09FB", - "GURMUKHI SIGN VISARGA": "0A03", - "GURMUKHI LETTER AA": "0A06", - "GURMUKHI LETTER U": "0A09", - "GURMUKHI LETTER UU": "0A0A", - "GURMUKHI LETTER EE": "0A0F", - "GURMUKHI LETTER AU": "0A14", - "GURMUKHI LETTER CHA": "0A1B", - "GURMUKHI LETTER NYA": "0A1E", - "GURMUKHI LETTER DHA": "0A27", - "GURMUKHI LETTER LLA": "0A33", - "GURMUKHI VOWEL SIGN U": "0A41", - "GUJARATI LETTER A": "0A85", - "GUJARATI LETTER NGA": "0A99", - "GUJARATI LETTER SA": "0AB8", - "GUJARATI SIGN MADDAH": "0AFC", - "ORIYA LETTER JA": "0B1C", - "ORIYA LETTER NNA": "0B23", - "ORIYA LETTER NA": "0B28", - "ORIYA VOWEL SIGN U": "0B41", - "ORIYA VOWEL SIGN UU": "0B42", - "TAMIL LETTER II": "0B88", + "NKO COMBINING NASALIZATION MARK": "07F2", + "SAMARITAN LETTER GAMAN": "0802", + "SAMARITAN VOWEL SIGN OVERLONG AA": "081E", + "MANDAIC LETTER KAD": "0857", + "MANDAIC LETTER AIN": "0858", + "MANDAIC GEMINATION MARK": "085B", + "SYRIAC LETTER MALAYALAM NNNA": "0865", + "ARABIC LETTER ALEF WITH ATTACHED ROUND DOT ABOVE": "0876", + "ARABIC LETTER LAM WITH DOUBLE BAR": "08A6", + "ARABIC LETTER REH WITH LOOP": "08AA", + "ARABIC LETTER ZAIN WITH INVERTED V ABOVE": "08B2", + "ARABIC LETTER REH WITH SMALL NOON ABOVE": "08B9", + "ARABIC SMALL LOW NOON WITH KASRA": "08D9", + "ARABIC TONE LOOP ABOVE": "08EC", + "ARABIC DOUBLE RIGHT ARROWHEAD ABOVE": "08FB", + "DEVANAGARI LETTER UU": "090A", + "DEVANAGARI LETTER TTA": "091F", + "DEVANAGARI LETTER BA": "092C", + "DEVANAGARI LETTER BHA": "092D", + "DEVANAGARI VOWEL SIGN CANDRA O": "0949", + "DEVANAGARI DIGIT ONE": "0967", + "DEVANAGARI LETTER AW": "0975", + "BENGALI LETTER E": "098F", + "BENGALI VOWEL SIGN AI": "09C8", + "BENGALI DIGIT ZERO": "09E6", + "GURMUKHI LETTER VA": "0A35", + "GURMUKHI VOWEL SIGN II": "0A40", + "GUJARATI LETTER U": "0A89", + "GUJARATI LETTER DDHA": "0AA2", + "GUJARATI LETTER THA": "0AA5", + "GUJARATI LETTER VA": "0AB5", + "ORIYA LETTER LA": "0B32", + "ORIYA LETTER RRA": "0B5C", "TAMIL LETTER KA": "0B95", - "TAMIL LETTER RA": "0BB0", - "TELUGU SIGN VISARGA": "0C03", - "TELUGU LETTER I": "0C07", - "TELUGU LETTER GHA": "0C18", - "TELUGU LETTER TTA": "0C1F", - "TELUGU LETTER DDA": "0C21", - "TELUGU LETTER TA": "0C24", - "TELUGU LETTER DHA": "0C27", - "TELUGU VOWEL SIGN II": "0C40", - "TELUGU DIGIT ONE": "0C67", - "KANNADA LETTER II": "0C88", - "KANNADA LETTER GA": "0C97", - "KANNADA LETTER GHA": "0C98", - "KANNADA LETTER LA": "0CB2", - "KANNADA VOWEL SIGN I": "0CBF", - "KANNADA LETTER VOCALIC RR": "0CE0", - "MALAYALAM LETTER TTA": "0D1F", - "MALAYALAM LETTER PHA": "0D2B", - "MALAYALAM LETTER RA": "0D30", - "MALAYALAM VOWEL SIGN VOCALIC RR": "0D44", - "MALAYALAM FRACTION ONE TENTH": "0D5C", - "SINHALA LETTER SANYAKA GAYANNA": "0D9F", - "SINHALA LETTER DANTAJA LAYANNA": "0DBD", - "SINHALA LETTER MUURDHAJA LAYANNA": "0DC5", - "SINHALA VOWEL SIGN DIGA GAETTA-PILLA": "0DF2", - "THAI CHARACTER DO DEK": "0E14", - "THAI CHARACTER HO NOKHUK": "0E2E", - "THAI CHARACTER MAITAIKHU": "0E47", - "THAI CHARACTER MAI TRI": "0E4A", - "THAI CHARACTER ANGKHANKHU": "0E5A", - "LAO SIGN PALI VIRAMA": "0EBA", - "LAO TONE MAI EK": "0EC8", - "LAO LETTER KHMU NYO": "0EDF", - "TIBETAN MARK BKA- SHOG YIG MGO": "0F0A", - "TIBETAN DIGIT HALF SIX": "0F2F", - "TIBETAN MARK ANG KHANG GYAS": "0F3D", - "TIBETAN LETTER GA": "0F42", - "TIBETAN LETTER GHA": "0F43", - "TIBETAN LETTER KSSA": "0F69", + "TAMIL YEAR SIGN": "0BF5", + "TELUGU LETTER TTHA": "0C20", + "KANNADA VOWEL SIGN O": "0CCA", + "KANNADA DIGIT ONE": "0CE7", + "MALAYALAM LETTER GHA": "0D18", + "MALAYALAM LETTER NA": "0D28", + "MALAYALAM LETTER VA": "0D35", + "MALAYALAM SIGN CIRCULAR VIRAMA": "0D3C", + "MALAYALAM FRACTION ONE FIFTH": "0D5E", + "MALAYALAM DATE MARK": "0D79", + "MALAYALAM LETTER CHILLU K": "0D7F", + "SINHALA LETTER AYANNA": "0D85", + "SINHALA LETTER AAYANNA": "0D86", + "SINHALA VOWEL SIGN KOMBU DEKA": "0DDB", + "SINHALA LITH DIGIT FOUR": "0DEA", + "SINHALA LITH DIGIT SIX": "0DEC", + "LAO LETTER NYO": "0E8D", + "LAO LETTER DO": "0E94", + "TIBETAN MARK YIG MGO TSHEG SHAD MA": "0F07", + "TIBETAN LETTER NYA": "0F49", + "TIBETAN LETTER DHA": "0F52", + "TIBETAN LETTER RA": "0F62", "TIBETAN LETTER RRA": "0F6C", - "TIBETAN MARK PALUTA": "0F85", - "TIBETAN SIGN GRU MED RGYINGS": "0F8B", - "TIBETAN SUBJOINED LETTER NYA": "0F99", - "TIBETAN SYMBOL PADMA GDAN": "0FC6", - "TIBETAN MARK INITIAL BRDA RNYING YIG MGO MDUN MA": "0FD3", - "MYANMAR LETTER DDHA": "100E", - "MYANMAR LETTER MON E": "1028", - "MYANMAR SYMBOL SHAN EXCLAMATION": "109F", - "GEORGIAN CAPITAL LETTER IN": "10A8", - "GEORGIAN LETTER DON": "10D3", - "GEORGIAN LETTER GHAN": "10E6", - "MODIFIER LETTER GEORGIAN NAR": "10FC", - "HANGUL CHOSEONG SSANGNIEUN": "1114", - "HANGUL CHOSEONG SIOS-CIEUC": "1136", - "HANGUL CHOSEONG CEONGCHIEUMSIOS": "113E", - "HANGUL JUNGSEONG E": "1166", - "HANGUL JUNGSEONG U": "116E", - "HANGUL JUNGSEONG EU": "1173", - "HANGUL JUNGSEONG EO-U": "117B", - "HANGUL JUNGSEONG U-A": "1189", - "HANGUL JUNGSEONG YU-YEO": "1191", - "HANGUL JUNGSEONG YU-YE": "1192", - "ETHIOPIC SYLLABLE HHA": "1210", - "ETHIOPIC SYLLABLE HHWA": "1217", - "ETHIOPIC SYLLABLE QWA": "1248", - "ETHIOPIC SYLLABLE QHA": "1250", - "ETHIOPIC SYLLABLE VAA": "126B", - "ETHIOPIC SYLLABLE TI": "1272", - "ETHIOPIC SYLLABLE CEE": "127C", - "ETHIOPIC SYLLABLE XI": "1282", - "ETHIOPIC SYLLABLE WOA": "12CF", - "ETHIOPIC SYLLABLE PHARYNGEAL AA": "12D3", - "ETHIOPIC SYLLABLE ZHU": "12E1", - "ETHIOPIC SYLLABLE DI": "12F2", - "ETHIOPIC SYLLABLE DE": "12F5", + "TIBETAN VOWEL SIGN VOCALIC RR": "0F77", + "TIBETAN SIGN SNA LDAN": "0F83", + "TIBETAN MARK HALANTA": "0F84", + "TIBETAN SUBJOINED LETTER RA": "0FB2", + "TIBETAN SUBJOINED LETTER FIXED-FORM WA": "0FBA", + "TIBETAN SUBJOINED LETTER FIXED-FORM YA": "0FBB", + "TIBETAN SIGN RDEL NAG RDEL DKAR": "0FCE", + "TIBETAN MARK TRAILING MCHAN RTAGS": "0FDA", + "MYANMAR LETTER NNYA": "100A", + "MYANMAR LETTER LA": "101C", + "MYANMAR VOWEL SIGN E": "1031", + "MYANMAR LETTER VOCALIC R": "1052", + "MYANMAR SIGN WESTERN PWO KAREN TONE-3": "106B", + "MYANMAR SIGN SHAN COUNCIL TONE-3": "108C", + "GEORGIAN CAPITAL LETTER KHAR": "10B5", + "GEORGIAN CAPITAL LETTER HAE": "10C0", + "HANGUL CHOSEONG CIEUC": "110C", + "HANGUL CHOSEONG NIEUN-PIEUP": "1116", + "HANGUL CHOSEONG SSANGIEUNG": "1147", + "HANGUL CHOSEONG CIEUC-IEUNG": "114D", + "HANGUL CHOSEONG NIEUN-HIEUH": "115D", + "HANGUL JONGSEONG NIEUN": "11AB", + "HANGUL JONGSEONG RIEUL": "11AF", + "HANGUL JONGSEONG NIEUN-KIYEOK": "11C5", + "ETHIOPIC SYLLABLE HU": "1201", + "ETHIOPIC SYLLABLE HAA": "1203", + "ETHIOPIC SYLLABLE HHU": "1211", + "ETHIOPIC SYLLABLE RAA": "122B", + "ETHIOPIC SYLLABLE QAA": "1243", + "ETHIOPIC SYLLABLE BEE": "1264", + "ETHIOPIC SYLLABLE NI": "1292", + "ETHIOPIC SYLLABLE KU": "12A9", + "ETHIOPIC SYLLABLE WU": "12C9", + "ETHIOPIC SYLLABLE ZHAA": "12E3", + "ETHIOPIC SYLLABLE YA": "12E8", + "ETHIOPIC SYLLABLE DDAA": "12FB", "ETHIOPIC SYLLABLE GWI": "1312", - "ETHIOPIC SYLLABLE GWAA": "1313", - "ETHIOPIC SYLLABLE THAA": "1323", - "ETHIOPIC SYLLABLE THWA": "1327", - "ETHIOPIC SYLLABLE SEBATBEIT PWA": "138C", - "ETHIOPIC TONAL MARK KURT": "1399", - "CHEROKEE LETTER QUI": "13C8", - "CANADIAN SYLLABICS CARRIER I": "1409", - "CANADIAN SYLLABICS WEST-CREE WI": "140F", - "CANADIAN SYLLABICS NASKAPI WOO": "1416", - "CANADIAN SYLLABICS FINAL ACUTE": "141F", - "CANADIAN SYLLABICS WEST-CREE PWE": "143B", - "CANADIAN SYLLABICS WEST-CREE TWA": "1462", - "CANADIAN SYLLABICS WEST-CREE TWAA": "1464", - "CANADIAN SYLLABICS KWO": "147A", - "CANADIAN SYLLABICS CWE": "1492", - "CANADIAN SYLLABICS WEST-CREE CWAA": "149F", - "CANADIAN SYLLABICS MWII": "14B0", - "CANADIAN SYLLABICS MWO": "14B2", - "CANADIAN SYLLABICS NAAI": "14C1", - "CANADIAN SYLLABICS LI": "14D5", - "CANADIAN SYLLABICS WEST-CREE LWI": "14DF", + "ETHIOPIC SYLLABLE GGI": "131A", + "ETHIOPIC SYLLABLE CHEE": "132C", + "ETHIOPIC SYLLABLE FO": "134E", + "ETHIOPIC PARAGRAPH SEPARATOR": "1368", + "ETHIOPIC SYLLABLE FWE": "138B", + "CHEROKEE LETTER U": "13A4", + "CHEROKEE LETTER HI": "13AF", + "CHEROKEE LETTER LE": "13B4", + "CHEROKEE LETTER QUA": "13C6", + "CHEROKEE LETTER SO": "13D0", + "CHEROKEE LETTER SU": "13D1", + "CANADIAN SYLLABICS AA": "140B", + "CANADIAN SYLLABICS WEST-CREE WOO": "1415", + "CANADIAN SYLLABICS CARRIER HEE": "1436", + "CANADIAN SYLLABICS PWII": "143E", + "CANADIAN SYLLABICS TWI": "1459", + "CANADIAN SYLLABICS WEST-CREE TWOO": "1460", + "CANADIAN SYLLABICS KO": "146F", + "CANADIAN SYLLABICS WEST-CREE KWO": "147B", + "CANADIAN SYLLABICS NH": "14D2", + "CANADIAN SYLLABICS WEST-CREE LWE": "14DD", + "CANADIAN SYLLABICS SWE": "14F6", + "CANADIAN SYLLABICS WEST-CREE SWA": "1501", "CANADIAN SYLLABICS S": "1505", - "CANADIAN SYLLABICS SHII": "1512", - "CANADIAN SYLLABICS YE": "1526", - "CANADIAN SYLLABICS YOO": "152B", - "CANADIAN SYLLABICS YWA": "1539", - "CANADIAN SYLLABICS YWAA": "153B", - "CANADIAN SYLLABICS WEST-CREE FWAA": "155C", - "CANADIAN SYLLABICS TTHE": "156B", - "CANADIAN SYLLABICS TYE": "1570", - "CANADIAN SYLLABICS TLHO": "1588", - "CANADIAN SYLLABICS LHO": "15A2", - "CANADIAN SYLLABICS BLACKFOOT NE": "15B8", - "CANADIAN SYLLABICS BLACKFOOT KE": "15BC", - "CANADIAN SYLLABICS CARRIER TTI": "15E6", - "CANADIAN SYLLABICS CARRIER NE": "1604", - "CANADIAN SYLLABICS CARRIER LEE": "1625", - "CANADIAN SYLLABICS CARRIER TLHEE": "1637", - "CANADIAN SYLLABICS CARRIER TLHA": "1639", - "OGHAM LETTER DAIR": "1687", - "RUNIC LETTER AC A": "16AA", - "RUNIC LETTER ENG": "16B6", - "RUNIC LETTER WUNJO WYNN W": "16B9", - "RUNIC LETTER SHORT-TWIG-AR A": "16C6", - "RUNIC LETTER OTHALAN ETHEL O": "16DF", - "RUNIC LETTER FRANKS CASKET IS": "16F5", - "TAGALOG LETTER MA": "170B", - "BUHID LETTER PA": "1749", - "BUHID LETTER RA": "174D", - "TAGBANWA LETTER TA": "1766", + "CANADIAN SYLLABICS NASKAPI SCWA": "150F", + "CANADIAN SYLLABICS WEST-CREE SHWI": "151A", + "CANADIAN SYLLABICS SHWAA": "1523", + "CANADIAN SYLLABICS YWI": "1531", + "CANADIAN SYLLABICS YWII": "1533", + "CANADIAN SYLLABICS THA": "1566", + "CANADIAN SYLLABICS NUNAVUT H": "157C", + "CANADIAN SYLLABICS NGI": "158F", + "CANADIAN SYLLABICS LH": "15A6", + "CANADIAN SYLLABICS BLACKFOOT WA": "15B7", + "CANADIAN SYLLABICS CARRIER GHEE": "15C7", + "CANADIAN SYLLABICS CARRIER RE": "15CC", + "CANADIAN SYLLABICS CARRIER WO": "15D1", + "CANADIAN SYLLABICS SAYISI JU": "1615", + "CANADIAN SYLLABICS CARRIER LA": "1627", + "CANADIAN SYLLABICS CARRIER Z": "1646", + "CANADIAN SYLLABICS CARRIER DZI": "164C", + "CANADIAN SYLLABICS CARRIER CHE": "1663", + "OGHAM LETTER GORT": "168C", + "OGHAM LETTER ONN": "1691", + "RUNIC LETTER YR": "16A3", + "RUNIC LETTER OPEN-P": "16D5", + "RUNIC LETTER DOTTED-L": "16DB", + "HANUNOO LETTER BA": "172A", + "BUHID LETTER GA": "1744", + "BUHID LETTER SA": "1750", + "KHMER INDEPENDENT VOWEL QAQ": "17A3", "KHMER INDEPENDENT VOWEL RY": "17AB", - "KHMER INDEPENDENT VOWEL RYY": "17AC", - "KHMER DIGIT ZERO": "17E0", - "KHMER SYMBOL LEK ATTAK MUOY": "17F1", - "KHMER SYMBOL LEK ATTAK BUON": "17F4", - "KHMER SYMBOL LEK ATTAK PRAM": "17F5", - "MONGOLIAN LETTER PA": "182B", - "MONGOLIAN LETTER SHA": "1831", - "MONGOLIAN LETTER TODO LONG VOWEL SIGN": "1843", - "MONGOLIAN LETTER TODO DZA": "185C", - "MONGOLIAN LETTER SIBE SHA": "1867", - "MONGOLIAN LETTER MANCHU RA": "1875", - "MONGOLIAN LETTER ALI GALI DAMARU": "1882", - "MONGOLIAN LETTER ALI GALI UBADAMA": "1883", - "CANADIAN SYLLABICS OJIBWAY N": "18D9", - "CANADIAN SYLLABICS BEAVER DENE L": "18F3", - "LIMBU LETTER NGA": "1905", - "LIMBU VOWEL SIGN AU": "1926", + "KHMER VOWEL SIGN AI": "17C3", + "KHMER SIGN TOANDAKHIAT": "17CD", + "KHMER DIGIT FOUR": "17E4", + "KHMER SYMBOL LEK ATTAK BEI": "17F3", + "MONGOLIAN NIRUGU": "180A", + "MONGOLIAN DIGIT EIGHT": "1818", + "MONGOLIAN LETTER ZA": "183D", + "MONGOLIAN LETTER TODO QA": "184D", + "MONGOLIAN LETTER TODO MA": "184F", + "MONGOLIAN LETTER SIBE HAA": "186D", + "MONGOLIAN LETTER ALI GALI NGA": "188A", + "MONGOLIAN LETTER ALI GALI HALF YA": "18A7", + "LIMBU LETTER KA": "1901", + "LIMBU VOWEL SIGN A": "1920", + "LIMBU VOWEL SIGN OO": "1925", + "LIMBU VOWEL SIGN O": "1928", "LIMBU SUBJOINED LETTER WA": "192B", - "LIMBU SIGN MUKPHRENG": "1939", - "TAI LE LETTER YA": "1955", - "NEW TAI LUE LETTER LOW HA": "19A3", - "NEW TAI LUE LETTER LOW BA": "19A5", - "BUGINESE LETTER NRA": "1A0B", - "TAI THAM LETTER HIGH KHA": "1A21", - "TAI THAM VOWEL SIGN THAM AI": "1A72", - "TAI THAM THAM DIGIT ONE": "1A91", - "COMBINING RIGHT PARENTHESIS ABOVE RIGHT": "1AC2", - "COMBINING NUMBER SIGN ABOVE": "1AC6", + "LIMBU SMALL LETTER NGA": "1931", + "LIMBU SIGN LOO": "1940", + "NEW TAI LUE LETTER LOW NGA": "1987", + "NEW TAI LUE LETTER HIGH THA": "198F", + "NEW TAI LUE LETTER HIGH FA": "199A", + "NEW TAI LUE VOWEL SIGN UEY": "19BF", + "NEW TAI LUE LETTER FINAL V": "19C1", + "NEW TAI LUE LETTER FINAL K": "19C5", + "KHMER SYMBOL DAP-MUOY KOET": "19EB", + "KHMER SYMBOL PII ROC": "19F2", + "KHMER SYMBOL PRAM-PII ROC": "19F7", + "BUGINESE LETTER NYCA": "1A0F", + "BUGINESE LETTER RA": "1A11", + "BUGINESE LETTER VA": "1A13", + "BUGINESE VOWEL SIGN I": "1A17", + "BUGINESE VOWEL SIGN E": "1A19", + "TAI THAM LETTER LOW SA": "1A2A", + "TAI THAM LETTER HIGH PHA": "1A39", + "TAI THAM LETTER MA": "1A3E", + "TAI THAM CONSONANT SIGN LA TANG LAI": "1A57", + "TAI THAM CONSONANT SIGN MA": "1A5C", + "TAI THAM CONSONANT SIGN SA": "1A5E", + "TAI THAM VOWEL SIGN UU": "1A6A", + "TAI THAM SIGN HOY": "1AA4", + "TAI THAM SIGN KAAN": "1AA8", + "TAI THAM SIGN SATKAANKUU": "1AAB", + "BALINESE SIGN ULU RICEM": "1B00", "BALINESE LETTER IKARA TEDUNG": "1B08", - "BALINESE LETTER UKARA TEDUNG": "1B0A", - "BALINESE LETTER CA LACA": "1B19", - "BALINESE DIGIT FOUR": "1B54", - "BALINESE PANTI": "1B5A", - "BALINESE PAMENENG": "1B60", - "BALINESE MUSICAL SYMBOL COMBINING KEMPLI": "1B6E", - "SUNDANESE LETTER NGA": "1B8D", - "SUNDANESE LETTER BA": "1B98", - "SUNDANESE LETTER RA": "1B9B", - "SUNDANESE CONSONANT SIGN PASANGAN WA": "1BAD", - "BATAK LETTER SIMALUNGUN HA": "1BC3", - "BATAK SYMBOL BINDU PINARBORAS": "1BFD", - "LEPCHA DIGIT SIX": "1C46", - "OL CHIKI LETTER AAM": "1C62", - "OL CHIKI LETTER LI": "1C64", - "CYRILLIC SMALL LETTER ROUNDED VE": "1C80", - "GEORGIAN MTAVRULI CAPITAL LETTER ZEN": "1C96", - "GEORGIAN MTAVRULI CAPITAL LETTER ON": "1C9D", - "GEORGIAN MTAVRULI CAPITAL LETTER KHAR": "1CA5", - "GEORGIAN MTAVRULI CAPITAL LETTER CIL": "1CAC", - "GEORGIAN MTAVRULI CAPITAL LETTER FI": "1CB6", - "VEDIC TONE PRENKHA": "1CD2", - "VEDIC SIGN ANUSVARA VAMAGOMUKHA": "1CEB", - "VEDIC SIGN DOUBLE ANUSVARA ANTARGOMUKHA": "1CFA", - "LATIN LETTER SMALL CAPITAL J": "1D0A", - "LATIN LETTER SMALL CAPITAL REVERSED N": "1D0E", - "MODIFIER LETTER CAPITAL T": "1D40", - "MODIFIER LETTER SMALL TOP HALF O": "1D54", - "LATIN SMALL LETTER D WITH MIDDLE TILDE": "1D6D", - "MODIFIER LETTER SMALL U BAR": "1DB6", - "COMBINING IS BELOW": "1DD0", - "COMBINING UR ABOVE": "1DD1", - "COMBINING LATIN LETTER SMALL CAPITAL G": "1DDB", - "COMBINING LATIN SMALL LETTER K": "1DDC", - "COMBINING LATIN SMALL LETTER B": "1DE8", - "COMBINING DELETION MARK": "1DFB", - "LATIN CAPITAL LETTER H WITH BREVE BELOW": "1E2A", + "BALINESE LETTER DA MURDA MAHAPRANA": "1B20", + "BALINESE VOWEL SIGN TEDUNG": "1B35", + "BALINESE VOWEL SIGN ULU": "1B36", + "BALINESE LETTER TZIR SASAK": "1B47", + "BALINESE DIGIT NINE": "1B59", + "BALINESE CARIK PAMUNGKAH": "1B5D", + "BALINESE MUSICAL SYMBOL DAING": "1B69", + "SUNDANESE LETTER AE": "1B86", + "SUNDANESE LETTER QA": "1B8B", + "SUNDANESE DIGIT FIVE": "1BB5", + "BATAK LETTER HA": "1BC2", + "BATAK LETTER GA": "1BCE", + "BATAK LETTER I": "1BE4", + "LEPCHA CONSONANT SIGN T": "1C33", + "LEPCHA DIGIT ZERO": "1C40", + "LEPCHA DIGIT SEVEN": "1C47", + "OL CHIKI DIGIT EIGHT": "1C58", + "CYRILLIC SMALL LETTER TALL TE": "1C84", + "GEORGIAN MTAVRULI CAPITAL LETTER RAE": "1CA0", + "GEORGIAN MTAVRULI CAPITAL LETTER JIL": "1CAB", + "GEORGIAN MTAVRULI CAPITAL LETTER HE": "1CB1", + "SUNDANESE PUNCTUATION BINDU KA SATANGA": "1CC5", + "VEDIC SIGN JIHVAMULIYA": "1CF5", + "LATIN LETTER SMALL CAPITAL D": "1D05", + "LATIN LETTER SMALL CAPITAL K": "1D0B", + "LATIN LETTER SMALL CAPITAL T": "1D1B", + "MODIFIER LETTER CAPITAL J": "1D36", + "MODIFIER LETTER SMALL OPEN O": "1D53", + "LATIN SMALL LETTER P WITH STROKE": "1D7D", + "MODIFIER LETTER SMALL TURNED ALPHA": "1D9B", + "MODIFIER LETTER SMALL DOTLESS J WITH STROKE": "1DA1", + "MODIFIER LETTER SMALL TURNED H": "1DA3", + "MODIFIER LETTER SMALL CAPITAL N": "1DB0", + "COMBINING SUSPENSION MARK": "1DC3", + "COMBINING ACUTE-MACRON": "1DC7", + "COMBINING LATIN SMALL LETTER R BELOW": "1DCA", + "COMBINING LATIN SMALL LETTER G": "1DDA", + "COMBINING LATIN SMALL LETTER Z": "1DE6", + "LATIN SMALL LETTER B WITH DOT ABOVE": "1E03", + "LATIN CAPITAL LETTER B WITH LINE BELOW": "1E06", + "LATIN CAPITAL LETTER D WITH LINE BELOW": "1E0E", + "LATIN SMALL LETTER H WITH DOT ABOVE": "1E23", + "LATIN CAPITAL LETTER N WITH LINE BELOW": "1E48", + "LATIN CAPITAL LETTER N WITH CIRCUMFLEX BELOW": "1E4A", + "LATIN CAPITAL LETTER O WITH MACRON AND GRAVE": "1E50", + "LATIN CAPITAL LETTER O WITH MACRON AND ACUTE": "1E52", + "LATIN SMALL LETTER R WITH DOT ABOVE": "1E59", + "LATIN CAPITAL LETTER S WITH DOT BELOW": "1E62", + "LATIN SMALL LETTER T WITH DOT ABOVE": "1E6B", + "LATIN SMALL LETTER T WITH CIRCUMFLEX BELOW": "1E71", "LATIN CAPITAL LETTER V WITH TILDE": "1E7C", - "LATIN SMALL LETTER V WITH TILDE": "1E7D", - "LATIN CAPITAL LETTER W WITH DOT ABOVE": "1E86", - "LATIN SMALL LETTER W WITH RING ABOVE": "1E98", - "LATIN SMALL LETTER A WITH CIRCUMFLEX AND ACUTE": "1EA5", - "LATIN SMALL LETTER A WITH CIRCUMFLEX AND GRAVE": "1EA7", - "LATIN SMALL LETTER E WITH CIRCUMFLEX AND DOT BELOW": "1EC7", - "LATIN CAPITAL LETTER O WITH HOOK ABOVE": "1ECE", - "LATIN SMALL LETTER Y WITH DOT BELOW": "1EF5", - "GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA": "1F2A", + "LATIN CAPITAL LETTER Y WITH DOT ABOVE": "1E8E", + "LATIN CAPITAL LETTER Z WITH CIRCUMFLEX": "1E90", + "LATIN CAPITAL LETTER A WITH BREVE AND ACUTE": "1EAE", + "LATIN SMALL LETTER A WITH BREVE AND HOOK ABOVE": "1EB3", + "LATIN SMALL LETTER E WITH HOOK ABOVE": "1EBB", + "LATIN CAPITAL LETTER O WITH HORN AND ACUTE": "1EDA", + "LATIN CAPITAL LETTER O WITH HORN AND TILDE": "1EE0", + "LATIN SMALL LETTER U WITH DOT BELOW": "1EE5", + "LATIN SMALL LETTER U WITH HORN AND DOT BELOW": "1EF1", + "GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA": "1F0A", + "GREEK SMALL LETTER EPSILON WITH DASIA": "1F11", + "GREEK CAPITAL LETTER EPSILON WITH PSILI AND VARIA": "1F1A", + "GREEK SMALL LETTER ETA WITH DASIA AND OXIA": "1F25", "GREEK CAPITAL LETTER IOTA WITH DASIA AND PERISPOMENI": "1F3F", - "GREEK CAPITAL LETTER OMICRON WITH PSILI AND VARIA": "1F4A", - "GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI": "1F97", - "GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND YPOGEGRAMMENI": "1FA5", - "GREEK OXIA": "1FFD", - "FIGURE SPACE": "2007", - "SINGLE LOW-9 QUOTATION MARK": "201A", - "DOUBLE DAGGER": "2021", - "HYPHENATION POINT": "2027", - "PER TEN THOUSAND SIGN": "2031", - "CLOSE UP": "2050", - "MEDIUM MATHEMATICAL SPACE": "205F", - "SUBSCRIPT LEFT PARENTHESIS": "208D", - "MILL SIGN": "20A5", - "AUSTRAL SIGN": "20B3", - "INDIAN RUPEE SIGN": "20B9", - "COMBINING WIDE BRIDGE ABOVE": "20E9", - "PLANCK CONSTANT OVER TWO PI": "210F", + "GREEK SMALL LETTER OMICRON WITH DASIA AND VARIA": "1F43", + "GREEK SMALL LETTER UPSILON WITH DASIA AND OXIA": "1F55", + "GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA": "1F6D", + "GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND YPOGEGRAMMENI": "1F85", + "GREEK SMALL LETTER ETA WITH DASIA AND YPOGEGRAMMENI": "1F91", + "GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI": "1F9F", + "GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA AND PROSGEGRAMMENI": "1FAA", + "GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI": "1FAE", + "GREEK CAPITAL LETTER ALPHA WITH OXIA": "1FBB", + "GREEK PERISPOMENI": "1FC0", + "GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA": "1FD2", + "GREEK SMALL LETTER UPSILON WITH VRACHY": "1FE0", + "GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND YPOGEGRAMMENI": "1FF7", + "THREE-PER-EM SPACE": "2004", + "FOUR-PER-EM SPACE": "2005", + "DOUBLE PRIME": "2033", + "SUBSCRIPT EIGHT": "2088", + "SUBSCRIPT RIGHT PARENTHESIS": "208E", + "LATIN SUBSCRIPT SMALL LETTER N": "2099", + "CRUZEIRO SIGN": "20A2", + "COMBINING DOUBLE VERTICAL STROKE OVERLAY": "20E6", + "COMBINING ANNUITY SYMBOL": "20E7", "DOUBLE-STRUCK CAPITAL Q": "211A", - "SCRIPT CAPITAL E": "2130", - "SCRIPT CAPITAL F": "2131", - "VULGAR FRACTION ONE SEVENTH": "2150", - "ROMAN NUMERAL FIVE": "2164", - "SMALL ROMAN NUMERAL ONE": "2170", - "SMALL ROMAN NUMERAL SIX": "2175", - "SMALL ROMAN NUMERAL EIGHT": "2177", - "SMALL ROMAN NUMERAL TWELVE": "217B", + "RESPONSE": "211F", + "DOUBLE-STRUCK ITALIC SMALL D": "2146", + "TURNED SMALL F": "214E", + "VULGAR FRACTION ONE FIFTH": "2155", + "VULGAR FRACTION THREE FIFTHS": "2157", + "VULGAR FRACTION ONE SIXTH": "2159", + "ROMAN NUMERAL THREE": "2162", "SMALL ROMAN NUMERAL ONE HUNDRED": "217D", - "UPWARDS HARPOON WITH BARB RIGHTWARDS": "21BE", - "RIGHTWARDS DOUBLE ARROW WITH STROKE": "21CF", - "RIGHTWARDS ARROW WITH DOUBLE VERTICAL STROKE": "21FB", - "THERE EXISTS": "2203", - "VOLUME INTEGRAL": "2230", - "CLOCKWISE CONTOUR INTEGRAL": "2232", - "NOT ALMOST EQUAL TO": "2249", - "GREATER-THAN OR EQUIVALENT TO": "2273", + "SOUTH EAST ARROW": "2198", + "LEFTWARDS ARROW WITH LOOP": "21AB", + "UPWARDS ARROW WITH TIP RIGHTWARDS": "21B1", + "DOWNWARDS ARROW WITH TIP LEFTWARDS": "21B2", + "CLOCKWISE TOP SEMICIRCLE ARROW": "21B7", + "RIGHTWARDS HARPOON WITH BARB UPWARDS": "21C0", + "SOUTH WEST DOUBLE ARROW": "21D9", + "LEFTWARDS TRIPLE ARROW": "21DA", + "NORTH WEST ARROW TO CORNER": "21F1", + "BULLET OPERATOR": "2219", + "GREATER-THAN OR EQUAL TO": "2265", + "NOT GREATER-THAN": "226F", "NEITHER GREATER-THAN NOR EQUIVALENT TO": "2275", - "SUPERSET OF WITH NOT EQUAL TO": "228B", - "REVERSED TILDE EQUALS": "22CD", - "PERSPECTIVE": "2306", - "TOP HALF INTEGRAL": "2320", - "APL FUNCTIONAL SYMBOL DEL DIAERESIS": "2362", - "APL FUNCTIONAL SYMBOL IOTA": "2373", - "COMPOSITION SYMBOL": "2384", - "NEXT PAGE": "2398", - "BLACK DOWN-POINTING DOUBLE TRIANGLE": "23EC", - "BLACK RIGHT-POINTING TRIANGLE WITH DOUBLE VERTICAL BAR": "23EF", - "SYMBOL FOR ACKNOWLEDGE": "2406", - "SYMBOL FOR NEGATIVE ACKNOWLEDGE": "2415", - "SYMBOL FOR GROUP SEPARATOR": "241D", - "SYMBOL FOR DELETE FORM TWO": "2425", - "PARENTHESIZED NUMBER FOURTEEN": "2481", - "NUMBER FIFTEEN FULL STOP": "2496", - "PARENTHESIZED LATIN SMALL LETTER Z": "24B5", - "NEGATIVE CIRCLED NUMBER FOURTEEN": "24EE", - "DOUBLE CIRCLED DIGIT TWO": "24F6", + "ELEMENT OF WITH OVERBAR": "22F6", + "ELECTRIC ARROW": "2301", + "PROJECTIVE": "2305", + "LEFT FLOOR": "230A", + "CONICAL TAPER": "2332", + "APL FUNCTIONAL SYMBOL QUAD DIVIDE": "2339", + "APL FUNCTIONAL SYMBOL CIRCLE JOT": "233E", + "APL FUNCTIONAL SYMBOL QUAD BACKSLASH": "2342", + "APL FUNCTIONAL SYMBOL CIRCLE UNDERBAR": "235C", + "APL FUNCTIONAL SYMBOL QUOTE QUAD": "235E", + "APL FUNCTIONAL SYMBOL DOWN SHOE STILE": "2366", + "VERTICAL LINE WITH MIDDLE DOT": "237F", + "INSERTION SYMBOL": "2380", + "APL FUNCTIONAL SYMBOL QUAD": "2395", + "PRINT SCREEN SYMBOL": "2399", + "LEFT PARENTHESIS UPPER HOOK": "239B", + "HORIZONTAL SCAN LINE-7": "23BC", + "DOUBLE VERTICAL BAR": "23F8", + "PARENTHESIZED NUMBER TWENTY": "2487", + "DIGIT FOUR FULL STOP": "248B", + "NUMBER ELEVEN FULL STOP": "2492", + "PARENTHESIZED LATIN SMALL LETTER D": "249F", + "PARENTHESIZED LATIN SMALL LETTER R": "24AD", + "CIRCLED LATIN CAPITAL LETTER M": "24C2", + "CIRCLED LATIN CAPITAL LETTER N": "24C3", + "CIRCLED LATIN SMALL LETTER J": "24D9", + "NEGATIVE CIRCLED NUMBER THIRTEEN": "24ED", "BOX DRAWINGS HEAVY HORIZONTAL": "2501", - "BOX DRAWINGS DOWN LIGHT AND RIGHT HEAVY": "250D", - "BOX DRAWINGS DOWN LIGHT AND LEFT UP HEAVY": "2529", - "BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE": "2556", + "BOX DRAWINGS LIGHT QUADRUPLE DASH HORIZONTAL": "2508", + "BOX DRAWINGS HEAVY VERTICAL AND LEFT": "252B", + "BOX DRAWINGS LEFT LIGHT AND RIGHT DOWN HEAVY": "2532", + "BOX DRAWINGS HEAVY DOUBLE DASH VERTICAL": "254F", "BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE": "2559", - "BOX DRAWINGS LIGHT UP": "2575", + "BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE": "255F", + "BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE": "2565", "BOX DRAWINGS HEAVY RIGHT": "257A", - "LEFT THREE QUARTERS BLOCK": "258A", - "LEFT HALF BLOCK": "258C", - "QUADRANT UPPER LEFT AND LOWER RIGHT": "259A", - "QUADRANT UPPER RIGHT AND LOWER LEFT": "259E", - "SQUARE WITH VERTICAL FILL": "25A5", - "SQUARE WITH UPPER RIGHT TO LOWER LEFT FILL": "25A8", - "WHITE UP-POINTING SMALL TRIANGLE": "25B5", - "WHITE RIGHT-POINTING SMALL TRIANGLE": "25B9", - "LOWER HALF INVERSE WHITE CIRCLE": "25DB", - "WHITE LEFT POINTING INDEX": "261C", - "WHITE DOWN POINTING INDEX": "261F", - "CAPRICORN": "2651", - "MUSIC NATURAL SIGN": "266E", - "BLACK UNIVERSAL RECYCLING SYMBOL": "267B", - "DIE FACE-5": "2684", - "INTERLOCKED FEMALE AND MALE SIGN": "26A4", - "VERTICAL MALE WITH STROKE SIGN": "26A8", - "TURNED WHITE SHOGI PIECE": "26C9", - "ALTERNATE ONE-WAY LEFT WAY TRAFFIC": "26D5", - "ASTRONOMICAL SYMBOL FOR URANUS": "26E2", - "SQUARE FOUR CORNERS": "26F6", - "BLACK NIB": "2712", - "HEAVY OPEN CENTRE CROSS": "271C", - "OUTLINED BLACK STAR": "272D", - "HEAVY TEARDROP-SPOKED PINWHEEL ASTERISK": "2743", - "NEGATIVE SQUARED CROSS MARK": "274E", - "MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT": "276D", - "HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT": "2771", - "DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ONE": "278A", - "DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT EIGHT": "2791", - "LONG RIGHTWARDS ARROW FROM BAR": "27FC", - "BRAILLE PATTERN DOTS-45": "2818", - "BRAILLE PATTERN DOTS-1346": "282D", - "BRAILLE PATTERN DOTS-123456": "283F", - "BRAILLE PATTERN DOTS-7": "2840", - "BRAILLE PATTERN DOTS-17": "2841", - "BRAILLE PATTERN DOTS-237": "2846", - "BRAILLE PATTERN DOTS-47": "2848", - "BRAILLE PATTERN DOTS-267": "2862", - "BRAILLE PATTERN DOTS-1467": "2869", - "BRAILLE PATTERN DOTS-23467": "286E", - "BRAILLE PATTERN DOTS-1234567": "287F", - "BRAILLE PATTERN DOTS-1238": "2887", - "BRAILLE PATTERN DOTS-3458": "289C", - "BRAILLE PATTERN DOTS-4568": "28B8", - "BRAILLE PATTERN DOTS-124568": "28BB", - "BRAILLE PATTERN DOTS-1378": "28C5", - "BRAILLE PATTERN DOTS-12478": "28CB", - "BRAILLE PATTERN DOTS-678": "28E0", - "ARROW POINTING DOWNWARDS THEN CURVING RIGHTWARDS": "2937", - "LEFTWARDS ARROW THROUGH LESS-THAN": "2977", + "LEFT ONE EIGHTH BLOCK": "258F", + "WHITE SQUARE": "25A1", + "SQUARE WITH HORIZONTAL FILL": "25A4", + "WHITE RECTANGLE": "25AD", + "BLACK LEFT-POINTING POINTER": "25C4", + "CIRCLE WITH UPPER RIGHT QUADRANT BLACK": "25D4", + "WHITE CIRCLE WITH LOWER RIGHT QUADRANT": "25F6", + "LOWER RIGHT TRIANGLE": "25FF", + "CLOUD": "2601", + "THUNDERSTORM": "2608", + "BLACK LEFT POINTING INDEX": "261A", + "PEACE SYMBOL": "262E", + "TRIGRAM FOR FIRE": "2632", + "TRIGRAM FOR EARTH": "2637", + "LIBRA": "264E", + "PISCES": "2653", + "BLACK CHESS BISHOP": "265D", + "BLACK CHESS PAWN": "265F", + "DIE FACE-4": "2683", + "BLACK CIRCLE WITH TWO WHITE DOTS": "2689", + "SQUARED KEY": "26BF", + "WHITE DRAUGHTS MAN": "26C0", + "BLACK DRAUGHTS KING": "26C3", + "FALLING DIAGONAL IN WHITE CIRCLE IN BLACK SQUARE": "26DE", + "RIGHT-HANDED INTERLACED PENTAGRAM": "26E5", + "FLAG IN HOLE": "26F3", + "SKIER": "26F7", + "BLACK FOUR POINTED STAR": "2726", + "PINWHEEL STAR": "272F", + "TIGHT TRIFOLIATE SNOWFLAKE": "2745", + "CROSS MARK": "274C", + "MEDIUM RIGHT PARENTHESIS ORNAMENT": "2769", + "DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT THREE": "278C", + "BLACK-FEATHERED SOUTH EAST ARROW": "27B4", + "LEFT AND RIGHT DOUBLE TURNSTILE": "27DA", + "MATHEMATICAL RIGHT FLATTENED PARENTHESIS": "27EF", + "LONG LEFTWARDS ARROW FROM BAR": "27FB", + "BRAILLE PATTERN DOTS-146": "2829", + "BRAILLE PATTERN DOTS-137": "2845", + "BRAILLE PATTERN DOTS-2347": "284E", + "BRAILLE PATTERN DOTS-23567": "2876", + "BRAILLE PATTERN DOTS-124567": "287B", + "BRAILLE PATTERN DOTS-358": "2894", + "BRAILLE PATTERN DOTS-34578": "28DC", + "BRAILLE PATTERN DOTS-345678": "28FC", + "BRAILLE PATTERN DOTS-2345678": "28FE", + "UPWARDS ARROW WITH HORIZONTAL STROKE": "2909", + "FALLING DIAGONAL CROSSING NORTH EAST ARROW": "292F", + "RISING DIAGONAL CROSSING SOUTH EAST ARROW": "2930", + "NORTH EAST ARROW CROSSING NORTH WEST ARROW": "2931", + "LEFTWARDS HARPOON WITH BARB DOWN BELOW LONG DASH": "296B", + "RIGHT WHITE PARENTHESIS": "2986", "Z NOTATION RIGHT IMAGE BRACKET": "2988", - "LEFT SQUARE BRACKET WITH UNDERBAR": "298B", - "RIGHT ARC GREATER-THAN BRACKET": "2994", "DOUBLE RIGHT ARC LESS-THAN BRACKET": "2996", - "MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING UP AND RIGHT": "29A8", - "EMPTY SET WITH SMALL CIRCLE ABOVE": "29B2", - "CIRCLED REVERSE SOLIDUS": "29B8", - "SQUARE WITH CONTOURED OUTLINE": "29E0", - "EQUALS SIGN AND SLANTED PARALLEL": "29E3", - "INTEGRAL WITH LEFTWARDS ARROW WITH HOOK": "2A17", - "PLUS SIGN WITH TILDE BELOW": "2A26", - "PLUS SIGN IN TRIANGLE": "2A39", - "LOGICAL AND WITH DOT ABOVE": "2A51", + "VERTICAL ZIGZAG LINE": "299A", + "MEASURED ANGLE OPENING LEFT": "299B", + "EMPTY SET WITH RIGHT ARROW ABOVE": "29B3", + "CIRCLE WITH HORIZONTAL BAR": "29B5", + "SQUARED ASTERISK": "29C6", + "INCOMPLETE INFINITY": "29DC", + "INCREASES AS": "29E1", + "RIGHT-POINTING CURVED ANGLE BRACKET": "29FD", + "SUMMATION WITH INTEGRAL": "2A0B", + "Z NOTATION RELATIONAL COMPOSITION": "2A3E", "TWO INTERSECTING LOGICAL OR": "2A56", - "TILDE OPERATOR WITH RISING DOTS": "2A6B", - "PLUS SIGN ABOVE EQUALS SIGN": "2A72", - "DOUBLE COLON EQUAL": "2A74", - "LESS-THAN ABOVE GREATER-THAN ABOVE DOUBLE-LINE EQUAL": "2A91", - "GREATER-THAN CLOSED BY CURVE": "2AA7", - "SUPERSET WITH DOT": "2ABE", - "LONG DASH FROM LEFT MEMBER OF DOUBLE VERTICAL": "2AE6", - "DOES NOT DIVIDE WITH REVERSED NEGATION SLASH": "2AEE", - "BLACK PENTAGON": "2B1F", - "LEFTWARDS TWO-HEADED ARROW WITH TAIL WITH VERTICAL STROKE": "2B3C", - "LEFTWARDS TWO-HEADED ARROW WITH TAIL WITH DOUBLE VERTICAL STROKE": "2B3D", - "EQUALS SIGN ABOVE LEFTWARDS ARROW": "2B40", - "WHITE MEDIUM STAR": "2B50", - "LEFTWARDS TRIANGLE-HEADED DASHED ARROW": "2B6A", - "UPWARDS TRIANGLE-HEADED ARROW LEFTWARDS OF DOWNWARDS TRIANGLE-HEADED ARROW": "2B81", - "OVERLAPPING BLACK SQUARES": "2BBC", - "BLACK MEDIUM UP-POINTING TRIANGLE CENTRED": "2BC5", + "TILDE OPERATOR WITH DOT ABOVE": "2A6A", + "LESS-THAN WITH QUESTION MARK ABOVE": "2A7B", + "GREATER-THAN AND SINGLE-LINE NOT EQUAL TO": "2A88", + "GREATER-THAN AND NOT APPROXIMATE": "2A8A", + "SMALLER THAN OR EQUAL TO": "2AAC", + "PRECEDES ABOVE EQUALS SIGN": "2AB3", + "SHORT DOWN TACK WITH OVERBAR": "2AE7", + "UPWARDS BLACK ARROW": "2B06", + "DOWNWARDS BLACK ARROW": "2B07", + "NORTH WEST BLACK ARROW": "2B09", + "BLACK LARGE SQUARE": "2B1B", + "HORIZONTAL BLACK HEXAGON": "2B23", + "WHITE RIGHT-POINTING PENTAGON": "2B54", + "SLANTED NORTH ARROW WITH HOOKED HEAD": "2B5A", + "ANTICLOCKWISE TRIANGLE-HEADED OPEN CIRCLE ARROW": "2B6F", + "DOWNWARDS TRIANGLE-HEADED ARROW TO BAR": "2B73", + "VERTICAL TAB KEY": "2B7F", + "BLACK LEFTWARDS EQUILATERAL ARROWHEAD": "2B9C", + "LEFTWARDS TRIANGLE-HEADED ARROW WITH LONG TIP DOWNWARDS": "2BA6", + "BLACK CURVED UPWARDS AND RIGHTWARDS ARROW": "2BAB", + "UPWARDS WHITE ARROW FROM BAR WITH HORIZONTAL BAR": "2BB8", + "CIRCLED BOLD X": "2BBF", + "BLACK DIAMOND CENTRED": "2BC1", + "NEPTUNE FORM TWO": "2BC9", + "WHITE FOUR POINTED CUSP": "2BCE", + "TRUE LIGHT MOON ARTA": "2BDF", + "CUPIDO": "2BE0", "VULCANUS": "2BE6", - "STAR WITH LEFT HALF BLACK": "2BEA", - "GLAGOLITIC CAPITAL LETTER DOBRO": "2C04", - "GLAGOLITIC CAPITAL LETTER ZHIVETE": "2C06", - "GLAGOLITIC CAPITAL LETTER I": "2C0B", - "GLAGOLITIC CAPITAL LETTER PE": "2C1A", + "GLAGOLITIC CAPITAL LETTER YESTU": "2C05", "GLAGOLITIC CAPITAL LETTER YO": "2C26", - "GLAGOLITIC SMALL LETTER YATI": "2C51", - "GLAGOLITIC SMALL LETTER CAUDATE CHRIVI": "2C5F", - "LATIN CAPITAL LETTER L WITH DOUBLE BAR": "2C60", - "LATIN CAPITAL LETTER K WITH DESCENDER": "2C69", - "COPTIC SMALL LETTER EIE": "2C89", + "GLAGOLITIC SMALL LETTER ONU": "2C41", + "GLAGOLITIC SMALL LETTER TVRIDO": "2C45", + "GLAGOLITIC SMALL LETTER SPIDERY HA": "2C52", + "GLAGOLITIC SMALL LETTER FITA": "2C5A", + "LATIN CAPITAL LETTER W WITH HOOK": "2C72", + "LATIN SMALL LETTER V WITH CURL": "2C74", + "LATIN SMALL LETTER O WITH LOW RING INSIDE": "2C7A", + "COPTIC CAPITAL LETTER ZATA": "2C8C", + "COPTIC SMALL LETTER ZATA": "2C8D", + "COPTIC CAPITAL LETTER HATE": "2C8E", + "COPTIC SMALL LETTER NI": "2C9B", + "COPTIC CAPITAL LETTER O": "2C9E", "COPTIC SMALL LETTER RO": "2CA3", - "COPTIC CAPITAL LETTER TAU": "2CA6", - "COPTIC SMALL LETTER OLD COPTIC GANGIA": "2CD7", - "COPTIC OLD NUBIAN VERSE DIVIDER": "2CFC", - "GEORGIAN SMALL LETTER QAR": "2D17", - "TIFINAGH LETTER YA": "2D30", - "TIFINAGH LETTER YADD": "2D39", - "ETHIOPIC SYLLABLE DOA": "2D8C", - "ETHIOPIC SYLLABLE SSI": "2DA2", - "ETHIOPIC SYLLABLE SSE": "2DA5", - "ETHIOPIC SYLLABLE QYEE": "2DC4", - "ETHIOPIC SYLLABLE QYO": "2DC6", - "COMBINING CYRILLIC LETTER O": "2DEA", - "LEFT RAISED OMISSION BRACKET": "2E0C", + "COPTIC SMALL LETTER UA": "2CA9", + "COPTIC FULL STOP": "2CFE", + "GEORGIAN SMALL LETTER CAN": "2D1A", + "GEORGIAN SMALL LETTER HOE": "2D25", + "TIFINAGH LETTER YI": "2D49", + "TIFINAGH LETTER YASH": "2D5B", + "TIFINAGH LETTER YAY": "2D62", + "ETHIOPIC SYLLABLE SHOA": "2D84", + "ETHIOPIC SYLLABLE NOA": "2D88", + "ETHIOPIC SYLLABLE GGWE": "2D96", + "ETHIOPIC SYLLABLE ZZEE": "2DB4", + "ETHIOPIC SYLLABLE ZZE": "2DB5", + "ETHIOPIC SYLLABLE CCHU": "2DB9", + "ETHIOPIC SYLLABLE CCHAA": "2DBB", + "ETHIOPIC SYLLABLE XYI": "2DD2", + "ETHIOPIC SYLLABLE GYU": "2DD9", + "RIGHT RAISED OMISSION BRACKET": "2E0D", "PALM BRANCH": "2E19", - "RIGHT DOUBLE PARENTHESIS": "2E29", - "DOUBLE HYPHEN": "2E40", - "LOW KAVYKA WITH DOT": "2E48", - "MEDIEVAL COMMA": "2E4C", - "CJK RADICAL DIVINATION": "2E8A", - "CJK RADICAL MOON": "2E9D", - "CJK RADICAL SIMPLIFIED HALF TREE TRUNK": "2EA6", - "CJK RADICAL BRUSH TWO": "2EBB", + "FIVE DOT MARK": "2E2D", + "TURNED SEMICOLON": "2E35", + "DOUBLE STACKED COMMA": "2E49", + "CJK RADICAL LAME THREE": "2E90", + "CJK RADICAL SUN": "2E9C", + "CJK RADICAL WATER TWO": "2EA2", + "CJK RADICAL MORTAR": "2EBD", "CJK RADICAL WEST ONE": "2EC3", - "CJK RADICAL SIMPLIFIED WALK": "2ECC", - "CJK RADICAL WALK ONE": "2ECD", - "CJK RADICAL EAT TWO": "2EDE", - "KANGXI RADICAL TURBAN": "2F31", - "KANGXI RADICAL WHITE": "2F69", + "CJK RADICAL BONE": "2EE3", + "CJK RADICAL J-SIMPLIFIED EVEN": "2EEB", + "KANGXI RADICAL SPOON": "2F14", + "KANGXI RADICAL GO": "2F21", + "KANGXI RADICAL LACK": "2F4B", + "KANGXI RADICAL FIELD": "2F65", "KANGXI RADICAL SKIN": "2F6A", - "KANGXI RADICAL STAND": "2F74", - "KANGXI RADICAL WEST": "2F91", - "KANGXI RADICAL GATE": "2FA8", - "KANGXI RADICAL SLAVE": "2FAA", - "KANGXI RADICAL TOOTH": "2FD2", - "IDEOGRAPHIC DESCRIPTION CHARACTER SURROUND FROM LOWER RIGHT": "2FFD", - "LEFT DOUBLE ANGLE BRACKET": "300A", - "IDEOGRAPHIC ENTERING TONE MARK": "302D", - "HANGUL SINGLE DOT TONE MARK": "302E", - "VERTICAL KANA REPEAT WITH VOICED SOUND MARK": "3032", - "HIRAGANA LETTER I": "3044", - "HIRAGANA LETTER DE": "3067", - "HIRAGANA LETTER NI": "306B", - "HIRAGANA LETTER BA": "3070", - "HIRAGANA LETTER MU": "3080", - "HIRAGANA LETTER YA": "3084", - "HIRAGANA LETTER WE": "3091", - "KATAKANA LETTER E": "30A8", - "KATAKANA LETTER ZO": "30BE", + "KANGXI RADICAL STOPPING": "2F89", + "KANGXI RADICAL WALK ENCLOSURE": "2F8F", + "KANGXI RADICAL CLOTHES": "2F90", + "KANGXI RADICAL TANNED LEATHER": "2FB1", + "KANGXI RADICAL FLY": "2FB6", + "KANGXI RADICAL SACRIFICIAL WINE": "2FBF", + "RIGHT DOUBLE ANGLE BRACKET": "300B", + "RIGHT WHITE TORTOISE SHELL BRACKET": "3019", + "VERTICAL KANA REPEAT MARK LOWER HALF": "3035", + "PART ALTERNATION MARK": "303D", + "HIRAGANA LETTER DU": "3065", + "HIRAGANA LETTER RE": "308C", + "HIRAGANA LETTER SMALL WA": "308E", + "HIRAGANA LETTER WO": "3092", + "HIRAGANA LETTER VU": "3094", + "KATAKANA LETTER SE": "30BB", + "KATAKANA LETTER ZE": "30BC", "KATAKANA LETTER BI": "30D3", - "KATAKANA LETTER PU": "30D7", - "BOPOMOFO LETTER EN": "3123", - "HANGUL LETTER NIEUN": "3134", - "HANGUL LETTER EU": "3161", - "IDEOGRAPHIC ANNOTATION MIDDLE MARK": "3197", - "IDEOGRAPHIC ANNOTATION EARTH MARK": "319E", - "BOPOMOFO LETTER ENN": "31A5", - "BOPOMOFO FINAL LETTER K": "31B6", - "BOPOMOFO LETTER GW": "31BC", - "CJK STROKE XG": "31C2", - "CJK STROKE P": "31D2", - "PARENTHESIZED HANGUL KHIEUKH A": "3218", - "PARENTHESIZED HANGUL CIEUC U": "321C", - "PARENTHESIZED IDEOGRAPH SUPERVISE": "323C", - "CIRCLED IDEOGRAPH CORRECT": "32A3", - "CIRCLED KATAKANA KE": "32D8", - "CIRCLED KATAKANA HI": "32EA", - "SQUARE KYURII": "3312", - "SQUARE KIRO": "3314", - "SQUARE KIROGURAMU": "3315", - "SQUARE PENIHI": "3338", - "SQUARE HERUTU": "3339", - "SQUARE HOON": "3342", - "SQUARE MARUKU": "3346", - "SQUARE MIRIBAARU": "334A", - "SQUARE MEGA": "334B", - "SQUARE YAADO": "334E", - "SQUARE GB": "3387", - "SQUARE CAL": "3388", - "SQUARE PF": "338A", - "SQUARE FM": "3399", - "SQUARE KW": "33BE", - "SQUARE KM CAPITAL": "33CE", - "SQUARE LN": "33D1", - "HEXAGRAM FOR GREAT POSSESSION": "4DCD", - "HEXAGRAM FOR APPROACH": "4DD2", - "HEXAGRAM FOR GRACE": "4DD5", - "HEXAGRAM FOR GATHERING TOGETHER": "4DEC", - "HEXAGRAM FOR DISPERSION": "4DFA", - "YI SYLLABLE I": "A002", - "YI SYLLABLE EX": "A013", - "YI SYLLABLE BEX": "A029", - "YI SYLLABLE BUT": "A02C", - "YI SYLLABLE PIT": "A038", - "YI SYLLABLE PIEP": "A03E", + "KATAKANA LETTER SMALL WA": "30EE", + "KATAKANA LETTER SMALL KE": "30F6", + "KATAKANA LETTER VI": "30F8", + "BOPOMOFO LETTER F": "3108", + "BOPOMOFO LETTER N": "310B", + "BOPOMOFO LETTER L": "310C", + "HANGUL LETTER KIYEOK-SIOS": "3133", + "HANGUL LETTER CHIEUCH": "314A", + "HANGUL LETTER O": "3157", + "HANGUL FILLER": "3164", + "HANGUL LETTER PIEUP-SIOS-KIYEOK": "3174", + "IDEOGRAPHIC ANNOTATION FOUR MARK": "3195", + "IDEOGRAPHIC ANNOTATION BOTTOM MARK": "3198", + "BOPOMOFO LETTER IR": "31A8", + "BOPOMOFO FINAL LETTER H": "31B7", + "CJK STROKE N": "31CF", + "CJK STROKE HZZZG": "31E1", + "KATAKANA LETTER SMALL HA": "31F5", + "PARENTHESIZED HANGUL KHIEUKH": "320A", + "PARENTHESIZED HANGUL TIKEUT A": "3210", + "PARENTHESIZED KOREAN CHARACTER OJEON": "321D", + "PARENTHESIZED KOREAN CHARACTER O HU": "321E", + "PARENTHESIZED IDEOGRAPH FOUR": "3223", + "CIRCLED NUMBER TWENTY EIGHT": "3258", + "CIRCLED HANGUL KIYEOK": "3260", + "CIRCLED IDEOGRAPH FEMALE": "329B", + "LIMITED LIABILITY SIGN": "32CF", + "CIRCLED KATAKANA HO": "32ED", + "SQUARE ININGU": "3304", + "SQUARE CM CUBED": "33A4", + "SQUARE CC": "33C4", + "SQUARE KK": "33CD", + "IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY": "33F3", + "IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-TWO": "33F5", + "HEXAGRAM FOR TREADING": "4DC9", + "HEXAGRAM FOR CONTEMPLATION": "4DD3", + "HEXAGRAM FOR THE ABYSMAL WATER": "4DDC", + "HEXAGRAM FOR LIMITATION": "4DFB", + "YI SYLLABLE IP": "A003", + "YI SYLLABLE AT": "A008", + "YI SYLLABLE UO": "A00D", + "YI SYLLABLE WU": "A015", "YI SYLLABLE POP": "A049", - "YI SYLLABLE BBIX": "A057", - "YI SYLLABLE BBURX": "A070", - "YI SYLLABLE BBYP": "A075", - "YI SYLLABLE NBUT": "A085", - "YI SYLLABLE NBYP": "A08E", - "YI SYLLABLE MO": "A0BF", - "YI SYLLABLE FA": "A0D3", - "YI SYLLABLE FU": "A0DA", - "YI SYLLABLE FUP": "A0DB", - "YI SYLLABLE FYP": "A0E1", - "YI SYLLABLE DA": "A109", - "YI SYLLABLE TUR": "A135", - "YI SYLLABLE DDU": "A14D", - "YI SYLLABLE HNAT": "A170", - "YI SYLLABLE HNOX": "A177", - "YI SYLLABLE NEP": "A190", - "YI SYLLABLE HLIT": "A197", - "YI SYLLABLE LAP": "A1C2", - "YI SYLLABLE GIT": "A1DA", - "YI SYLLABLE GUOX": "A1E7", - "YI SYLLABLE KOT": "A206", - "YI SYLLABLE GGEX": "A227", - "YI SYLLABLE MGAP": "A235", - "YI SYLLABLE MGOX": "A23A", - "YI SYLLABLE MGUX": "A241", - "YI SYLLABLE MGUR": "A245", - "YI SYLLABLE HXIEX": "A24B", + "YI SYLLABLE BBUOP": "A064", + "YI SYLLABLE BBEP": "A06B", + "YI SYLLABLE HMA": "A09A", + "YI SYLLABLE MIP": "A0B1", + "YI SYLLABLE FAP": "A0D4", + "YI SYLLABLE VIEP": "A0E9", + "YI SYLLABLE VO": "A0F0", + "YI SYLLABLE VEX": "A0F2", + "YI SYLLABLE TIT": "A11A", + "YI SYLLABLE TAX": "A122", + "YI SYLLABLE DDAP": "A140", + "YI SYLLABLE DDEP": "A14A", + "YI SYLLABLE DDUT": "A14B", + "YI SYLLABLE NDIE": "A156", + "YI SYLLABLE NDOT": "A15B", + "YI SYLLABLE HLIEX": "A19B", + "YI SYLLABLE LIT": "A1B7", + "YI SYLLABLE LUR": "A1D3", + "YI SYLLABLE GA": "A1E4", "YI SYLLABLE HXEX": "A25A", - "YI SYLLABLE HXE": "A25B", - "YI SYLLABLE HIEX": "A26F", - "YI SYLLABLE WEX": "A28A", - "YI SYLLABLE ZAT": "A294", - "YI SYLLABLE CIX": "A2AF", - "YI SYLLABLE ZZIT": "A2D0", - "YI SYLLABLE ZZYT": "A2E7", - "YI SYLLABLE NZUX": "A2FE", - "YI SYLLABLE SSIP": "A32D", - "YI SYLLABLE SSE": "A33A", - "YI SYLLABLE ZHUO": "A34B", - "YI SYLLABLE ZHO": "A34F", - "YI SYLLABLE ZHUP": "A358", + "YI SYLLABLE NGIEP": "A25F", + "YI SYLLABLE NGUOX": "A265", + "YI SYLLABLE HAP": "A274", + "YI SYLLABLE ZZY": "A2E9", + "YI SYLLABLE NZAP": "A2F7", + "YI SYLLABLE SUX": "A31F", + "YI SYLLABLE ZHUX": "A356", + "YI SYLLABLE CHOX": "A36A", "YI SYLLABLE CHUX": "A371", - "YI SYLLABLE RRO": "A382", - "YI SYLLABLE RRET": "A384", - "YI SYLLABLE NRET": "A39B", - "YI SYLLABLE RUR": "A3D9", - "YI SYLLABLE JIET": "A3E4", - "YI SYLLABLE JUP": "A3F3", - "YI SYLLABLE JYRX": "A3FA", - "YI SYLLABLE QIX": "A3FD", - "YI SYLLABLE QURX": "A410", - "YI SYLLABLE QUR": "A411", - "YI SYLLABLE NJIET": "A435", - "YI SYLLABLE NJYRX": "A448", - "YI SYLLABLE XYRX": "A46F", - "YI RADICAL PUT": "A49E", - "YI RADICAL TAT": "A4A0", - "YI RADICAL CYT": "A4A3", - "YI RADICAL ZOT": "A4AB", - "LISU LETTER TONE NA PO": "A4F9", + "YI SYLLABLE CHYP": "A379", + "YI SYLLABLE RRUT": "A388", + "YI SYLLABLE NRA": "A396", + "YI SYLLABLE RAX": "A3C7", + "YI SYLLABLE RAP": "A3C9", + "YI SYLLABLE RU": "A3D6", + "YI SYLLABLE RURX": "A3D8", + "YI SYLLABLE RYX": "A3DB", + "YI SYLLABLE RY": "A3DC", + "YI SYLLABLE RYR": "A3DF", + "YI SYLLABLE QIE": "A402", + "YI SYLLABLE QOT": "A408", + "YI SYLLABLE QYT": "A412", + "YI SYLLABLE NJIT": "A431", + "YI SYLLABLE NJOP": "A43E", + "YI SYLLABLE NYI": "A44C", + "YI SYLLABLE NYOP": "A458", + "YI SYLLABLE XYP": "A46E", + "YI SYLLABLE YUO": "A47B", + "YI SYLLABLE YUT": "A481", + "YI SYLLABLE YUX": "A482", + "YI RADICAL GA": "A4A1", + "YI RADICAL ZUR": "A4C1", + "YI RADICAL SHOP": "A4C2", + "LISU LETTER JA": "A4D9", "LISU LETTER TONE MYA NA": "A4FC", - "VAI SYLLABLE WIN": "A529", - "VAI SYLLABLE MI": "A546", - "VAI SYLLABLE HA": "A54C", - "VAI SYLLABLE PA": "A550", - "VAI SYLLABLE GBA": "A557", - "VAI SYLLABLE TA": "A55A", - "VAI SYLLABLE THA": "A55B", - "VAI SYLLABLE CA": "A566", - "VAI SYLLABLE MBOO": "A579", - "VAI SYLLABLE KPOO": "A57A", - "VAI SYLLABLE GBOO": "A57C", - "VAI SYLLABLE DHOO": "A581", - "VAI SYLLABLE DHHOO": "A582", - "VAI SYLLABLE ZOO": "A589", - "VAI SYLLABLE JOO": "A58C", - "VAI SYLLABLE BHU": "A59C", - "VAI SYLLABLE FU": "A5A2", - "VAI SYLLABLE DU": "A5AA", - "VAI SYLLABLE SHU": "A5AD", - "VAI SYLLABLE JU": "A5B1", - "VAI SYLLABLE NJU": "A5B2", - "VAI SYLLABLE KU": "A5B4", - "VAI SYLLABLE ON": "A5BB", - "VAI SYLLABLE BO": "A5C3", - "VAI SYLLABLE JO": "A5D8", - "VAI SYLLABLE FE": "A5F1", - "VAI SYLLABLE LE": "A5F7", - "VAI SYLLABLE ZE": "A5FD", - "VAI SYLLABLE NGGEN": "A605", - "CYRILLIC CAPITAL LETTER BROAD OMEGA": "A64C", - "CYRILLIC SMALL LETTER DOUBLE MONOCULAR O": "A66D", - "CYRILLIC CAPITAL LETTER TCHE": "A692", + "LISU PUNCTUATION FULL STOP": "A4FF", + "VAI SYLLABLE GBEE": "A50B", + "VAI SYLLABLE FA": "A558", + "VAI SYLLABLE DHA": "A55C", + "VAI SYLLABLE OON": "A572", + "VAI SYLLABLE ZHOO": "A58A", + "VAI SYLLABLE MOO": "A592", + "VAI SYLLABLE NYU": "A5B9", + "VAI SYLLABLE GBO": "A5C7", + "VAI SYLLABLE NDO": "A5D2", + "VAI SYLLABLE ZHO": "A5D6", + "VAI SYLLABLE VE": "A5F2", + "VAI SYLLABLE GE": "A606", + "VAI SYMBOL NII": "A616", + "VAI SYMBOL BANG": "A617", + "CYRILLIC SMALL LETTER BINOCULAR O": "A66B", + "COMBINING CYRILLIC LETTER U": "A677", + "COMBINING CYRILLIC PAYEROK": "A67D", + "CYRILLIC CAPITAL LETTER ZHWE": "A684", "COMBINING CYRILLIC LETTER EF": "A69E", - "BAMUM LETTER MI": "A6CE", - "BAMUM LETTER KEN": "A6D2", + "BAMUM LETTER KYEE": "A6BA", + "BAMUM LETTER SHO": "A6D6", + "BAMUM LETTER MA": "A6E3", + "BAMUM LETTER TI": "A6E4", + "BAMUM NJAEMLI": "A6F2", "MODIFIER LETTER EXTRA-HIGH DOTTED LEFT-STEM TONE BAR": "A70D", - "MODIFIER LETTER HIGH LEFT-STEM TONE BAR": "A713", - "MODIFIER LETTER LOW LEFT-STEM TONE BAR": "A715", - "LATIN LETTER SMALL CAPITAL S": "A731", - "LATIN SMALL LETTER R ROTUNDA": "A75B", - "LATIN SMALL LETTER VEND": "A769", - "LATIN CAPITAL LETTER INSULAR D": "A779", - "MODIFIER LETTER SHORT EQUALS SIGN": "A78A", - "LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT": "A78E", - "LATIN CAPITAL LETTER VOLAPUK AE": "A79A", - "LATIN LETTER SMALL CAPITAL Q": "A7AF", - "LATIN CAPITAL LETTER TURNED T": "A7B1", - "LATIN CAPITAL LETTER CHI": "A7B3", - "LATIN SMALL LETTER OMEGA": "A7B7", - "LATIN CAPITAL LETTER CLOSED INSULAR G": "A7D0", - "PHAGS-PA LETTER JA": "A846", - "PHAGS-PA LETTER PHA": "A84D", - "SAURASHTRA LETTER GHA": "A895", - "SAURASHTRA LETTER TA": "A8A1", - "COMBINING DEVANAGARI DIGIT SEVEN": "A8E7", - "DEVANAGARI SIGN CANDRABINDU VIRAMA": "A8F3", - "KAYAH LI DIGIT SEVEN": "A907", - "KAYAH LI LETTER SHA": "A90F", - "KAYAH LI LETTER RA": "A91A", - "KAYAH LI VOWEL E": "A927", - "KAYAH LI VOWEL U": "A928", - "REJANG LETTER GA": "A931", + "LATIN CAPITAL LETTER TZ": "A728", + "LATIN CAPITAL LETTER REVERSED C WITH DOT": "A73E", + "LATIN CAPITAL LETTER K WITH DIAGONAL STROKE": "A742", + "LATIN CAPITAL LETTER P WITH SQUIRREL TAIL": "A754", + "LATIN CAPITAL LETTER Q WITH DIAGONAL STROKE": "A758", + "LATIN SMALL LETTER INSULAR F": "A77C", + "LATIN CAPITAL LETTER INSULAR T": "A786", + "LATIN SMALL LETTER VOLAPUK AE": "A79B", + "SYLOTI NAGRI LETTER TO": "A814", + "SYLOTI NAGRI LETTER PO": "A819", + "SYLOTI NAGRI LETTER LO": "A81F", + "SYLOTI NAGRI VOWEL SIGN U": "A825", + "NORTH INDIC FRACTION ONE SIXTEENTH": "A833", + "NORTH INDIC QUANTITY MARK": "A839", + "PHAGS-PA DOUBLE HEAD MARK": "A875", + "SAURASHTRA LETTER AA": "A883", + "SAURASHTRA LETTER DDA": "A89E", + "SAURASHTRA VOWEL SIGN VOCALIC L": "A8BC", + "SAURASHTRA VOWEL SIGN E": "A8BE", + "SAURASHTRA DIGIT ONE": "A8D1", + "COMBINING DEVANAGARI DIGIT TWO": "A8E2", "REJANG LETTER SA": "A93C", - "REJANG CONSONANT SIGN H": "A952", - "HANGUL CHOSEONG RIEUL-SIOS": "A96C", - "HANGUL CHOSEONG SSANGSIOS-PIEUP": "A975", - "HANGUL CHOSEONG PHIEUPH-HIEUH": "A97A", - "JAVANESE SIGN CECAK": "A981", + "REJANG LETTER NGGA": "A943", + "REJANG VOWEL SIGN I": "A947", + "HANGUL CHOSEONG RIEUL-KAPYEOUNPIEUP": "A96B", + "HANGUL CHOSEONG MIEUM-KIYEOK": "A96F", + "HANGUL CHOSEONG MIEUM-SIOS": "A971", + "JAVANESE SIGN WIGNYAN": "A983", + "JAVANESE VOWEL SIGN WULU": "A9B6", "JAVANESE PADA PANGKAT": "A9C7", - "JAVANESE PANGRANGKEP": "A9CF", - "MYANMAR LETTER SHAN GHA": "A9E0", - "MYANMAR LETTER SHAN JHA": "A9E2", - "MYANMAR TAI LAING DIGIT EIGHT": "A9F8", - "CHAM LETTER JHA": "AA0F", - "CHAM LETTER TA": "AA13", - "CHAM LETTER BA": "AA1D", - "CHAM VOWEL SIGN O": "AA2F", - "CHAM LETTER FINAL SS": "AA4B", - "MYANMAR LOGOGRAM KHAMTI HM": "AA76", - "MYANMAR SYMBOL AITON ONE": "AA78", - "MYANMAR SIGN TAI LAING TONE-2": "AA7C", - "TAI VIET LETTER LOW KO": "AA80", - "TAI VIET LETTER HIGH KO": "AA81", - "TAI VIET LETTER HIGH TO": "AA95", - "TAI VIET LETTER LOW PO": "AA9C", - "TAI VIET MAI KANG": "AAB0", - "TAI VIET VOWEL O": "AAB6", - "TAI VIET VOWEL UEA": "AAB9", - "MEETEI MAYEK LETTER TTHA": "AAE5", - "ETHIOPIC SYLLABLE DDHU": "AB09", - "LATIN SMALL LETTER SCRIPT G WITH CROSSED-TAIL": "AB36", + "MYANMAR TAI LAING DIGIT SIX": "A9F6", + "CHAM LETTER NUE": "AA17", + "CHAM VOWEL SIGN II": "AA2B", + "CHAM LETTER FINAL G": "AA41", + "CHAM LETTER FINAL R": "AA49", + "MYANMAR LOGOGRAM KHAMTI OAY": "AA74", + "MYANMAR SIGN PAO KAREN TONE": "AA7B", + "TAI VIET LETTER LOW NYO": "AA90", + "TAI VIET LETTER HIGH FO": "AAA1", + "TAI VIET VOWEL U": "AAB4", + "TAI VIET VOWEL AY": "AABC", + "TAI VIET TONE MAI NUENG": "AAC0", + "TAI VIET SYMBOL KOI KOI": "AADF", + "MEETEI MAYEK LETTER DDHA": "AAE7", + "ETHIOPIC SYLLABLE DZE": "AB15", + "LATIN SMALL LETTER DOUBLE R WITH CROSSED-TAIL": "AB4A", "LATIN SMALL LETTER SCRIPT R WITH RING": "AB4C", - "LATIN SMALL LETTER UO": "AB63", - "LATIN SMALL LETTER DZ DIGRAPH WITH RETROFLEX HOOK": "AB66", - "LATIN SMALL LETTER TURNED R WITH MIDDLE TILDE": "AB68", - "CHEROKEE SMALL LETTER NO": "AB93", - "CHEROKEE SMALL LETTER TLO": "ABB0", - "MEETEI MAYEK LETTER MIT LONSUM": "ABDD", + "LATIN SMALL LETTER U BAR WITH SHORT RIGHT LEG": "AB4F", + "CHEROKEE SMALL LETTER LA": "AB83", + "CHEROKEE SMALL LETTER SE": "AB9E", + "CHEROKEE SMALL LETTER TA": "ABA4", + "CHEROKEE SMALL LETTER TSO": "ABB6", + "CHEROKEE SMALL LETTER WU": "ABBD", "MEETEI MAYEK VOWEL SIGN CHEINAP": "ABE9", - "HANGUL JUNGSEONG EU-E": "D7BB", - "HANGUL JUNGSEONG I-YEO": "D7BF", - "CJK COMPATIBILITY IDEOGRAPH-F92C": "F92C", - "CJK COMPATIBILITY IDEOGRAPH-F937": "F937", - "CJK COMPATIBILITY IDEOGRAPH-F949": "F949", - "CJK COMPATIBILITY IDEOGRAPH-F962": "F962", - "CJK COMPATIBILITY IDEOGRAPH-F97E": "F97E", - "CJK COMPATIBILITY IDEOGRAPH-F97F": "F97F", - "CJK COMPATIBILITY IDEOGRAPH-F995": "F995", - "CJK COMPATIBILITY IDEOGRAPH-F9A1": "F9A1", - "CJK COMPATIBILITY IDEOGRAPH-F9C4": "F9C4", - "CJK COMPATIBILITY IDEOGRAPH-F9CF": "F9CF", - "CJK COMPATIBILITY IDEOGRAPH-F9D7": "F9D7", - "CJK COMPATIBILITY IDEOGRAPH-FA05": "FA05", - "CJK COMPATIBILITY IDEOGRAPH-FA1D": "FA1D", - "CJK COMPATIBILITY IDEOGRAPH-FA45": "FA45", - "CJK COMPATIBILITY IDEOGRAPH-FA77": "FA77", - "CJK COMPATIBILITY IDEOGRAPH-FA89": "FA89", - "CJK COMPATIBILITY IDEOGRAPH-FAA2": "FAA2", - "CJK COMPATIBILITY IDEOGRAPH-FAAD": "FAAD", - "CJK COMPATIBILITY IDEOGRAPH-FAAE": "FAAE", - "CJK COMPATIBILITY IDEOGRAPH-FAB9": "FAB9", - "CJK COMPATIBILITY IDEOGRAPH-FAD2": "FAD2", - "CJK COMPATIBILITY IDEOGRAPH-FAD3": "FAD3", - "HEBREW LETTER LAMED WITH DAGESH": "FB3C", - "HEBREW LETTER MEM WITH DAGESH": "FB3E", - "HEBREW LETTER NUN WITH DAGESH": "FB40", - "HEBREW LETTER TSADI WITH DAGESH": "FB46", - "ARABIC LETTER BEHEH INITIAL FORM": "FB5C", - "ARABIC LETTER BEHEH MEDIAL FORM": "FB5D", + "HANGUL JONGSEONG SSANGTIKEUT": "D7CD", + "HANGUL JONGSEONG RIEUL-MIEUM-HIEUH": "D7D8", + "HANGUL JONGSEONG PIEUP-MIEUM": "D7E5", + "HANGUL JONGSEONG SIOS-KAPYEOUNPIEUP": "D7EB", + "": "DB80", + "CJK COMPATIBILITY IDEOGRAPH-F90F": "F90F", + "CJK COMPATIBILITY IDEOGRAPH-F952": "F952", + "CJK COMPATIBILITY IDEOGRAPH-F961": "F961", + "CJK COMPATIBILITY IDEOGRAPH-F9BB": "F9BB", + "CJK COMPATIBILITY IDEOGRAPH-F9C2": "F9C2", + "CJK COMPATIBILITY IDEOGRAPH-F9DB": "F9DB", + "CJK COMPATIBILITY IDEOGRAPH-FA0A": "FA0A", + "CJK COMPATIBILITY IDEOGRAPH-FA35": "FA35", + "CJK COMPATIBILITY IDEOGRAPH-FA37": "FA37", + "CJK COMPATIBILITY IDEOGRAPH-FA4A": "FA4A", + "CJK COMPATIBILITY IDEOGRAPH-FA5F": "FA5F", + "CJK COMPATIBILITY IDEOGRAPH-FA66": "FA66", + "CJK COMPATIBILITY IDEOGRAPH-FA8B": "FA8B", + "CJK COMPATIBILITY IDEOGRAPH-FAC5": "FAC5", + "LATIN SMALL LIGATURE FFI": "FB03", + "LATIN SMALL LIGATURE FFL": "FB04", + "HEBREW LETTER FINAL KAF WITH DAGESH": "FB3A", + "HEBREW LETTER TAV WITH DAGESH": "FB4A", + "ARABIC LETTER VEH ISOLATED FORM": "FB6A", "ARABIC LETTER DYEH ISOLATED FORM": "FB72", - "ARABIC LETTER NYEH FINAL FORM": "FB77", - "ARABIC LETTER TCHEHEH MEDIAL FORM": "FB81", - "ARABIC LETTER NOON GHUNNA FINAL FORM": "FB9F", - "ARABIC LETTER HEH DOACHASHMEE INITIAL FORM": "FBAC", - "ARABIC LETTER E FINAL FORM": "FBE5", - "ARABIC LIGATURE KAF WITH YEH ISOLATED FORM": "FC3E", - "ARABIC LIGATURE SHADDA WITH FATHA ISOLATED FORM": "FC60", - "ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH REH FINAL FORM": "FC64", + "ARABIC LETTER TCHEHEH INITIAL FORM": "FB80", + "ARABIC LETTER DAHAL ISOLATED FORM": "FB84", + "ARABIC LETTER DDAL FINAL FORM": "FB89", + "ARABIC SYMBOL THREE DOTS BELOW": "FBB7", + "ARABIC LETTER NG FINAL FORM": "FBD4", + "ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH OE ISOLATED FORM": "FBF2", + "ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA ISOLATED FORM": "FC03", + "ARABIC LIGATURE TEH WITH MEEM ISOLATED FORM": "FC0E", + "ARABIC LIGATURE ZAH WITH MEEM ISOLATED FORM": "FC28", + "ARABIC LIGATURE MEEM WITH JEEM ISOLATED FORM": "FC45", + "ARABIC LIGATURE BEH WITH MEEM FINAL FORM": "FC6C", + "ARABIC LIGATURE TEH WITH REH FINAL FORM": "FC70", + "ARABIC LIGATURE TEH WITH MEEM FINAL FORM": "FC72", + "ARABIC LIGATURE TEH WITH NOON FINAL FORM": "FC73", + "ARABIC LIGATURE THEH WITH ALEF MAKSURA FINAL FORM": "FC7A", "ARABIC LIGATURE QAF WITH YEH FINAL FORM": "FC7F", - "ARABIC LIGATURE YEH WITH MEEM FINAL FORM": "FC93", - "ARABIC LIGATURE SEEN WITH HAH INITIAL FORM": "FCAE", - "ARABIC LIGATURE THEH WITH MEEM MEDIAL FORM": "FCE5", - "ARABIC LIGATURE SEEN WITH MEEM MEDIAL FORM": "FCE7", - "ARABIC LIGATURE KAF WITH LAM MEDIAL FORM": "FCEB", - "ARABIC LIGATURE DAD WITH ALEF MAKSURA ISOLATED FORM": "FD07", - "ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM": "FD50", - "ARABIC LIGATURE SAD WITH HAH WITH HAH INITIAL FORM": "FD65", - "ARABIC LIGATURE SHEEN WITH HAH WITH MEEM INITIAL FORM": "FD68", - "ARABIC LIGATURE TAH WITH MEEM WITH YEH FINAL FORM": "FD74", - "ARABIC LIGATURE LAM WITH HAH WITH MEEM FINAL FORM": "FD80", - "ARABIC LIGATURE LAM WITH KHAH WITH MEEM INITIAL FORM": "FD86", - "ARABIC LIGATURE SHEEN WITH HAH WITH YEH FINAL FORM": "FDAA", - "ARABIC LIGATURE ALAYHE ISOLATED FORM": "FDF7", - "VARIATION SELECTOR-3": "FE02", - "VARIATION SELECTOR-5": "FE04", - "VARIATION SELECTOR-11": "FE0A", - "ARABIC LETTER JEEM INITIAL FORM": "FE9F", - "ARABIC LETTER SEEN INITIAL FORM": "FEB3", - "ARABIC LETTER TAH INITIAL FORM": "FEC3", - "ARABIC LETTER QAF FINAL FORM": "FED6", - "ARABIC LETTER KAF ISOLATED FORM": "FED9", - "ARABIC LETTER KAF FINAL FORM": "FEDA", - "ARABIC LETTER HEH INITIAL FORM": "FEEB", - "ARABIC LIGATURE LAM WITH ALEF WITH HAMZA BELOW ISOLATED FORM": "FEF9", - "FULLWIDTH LATIN CAPITAL LETTER T": "FF34", - "FULLWIDTH LEFT SQUARE BRACKET": "FF3B", - "FULLWIDTH LATIN SMALL LETTER F": "FF46", - "HALFWIDTH KATAKANA LETTER KU": "FF78", - "HALFWIDTH KATAKANA LETTER KE": "FF79", - "HALFWIDTH KATAKANA LETTER KO": "FF7A", - "HALFWIDTH KATAKANA LETTER SI": "FF7C", - "HALFWIDTH KATAKANA LETTER HA": "FF8A", - "HALFWIDTH KATAKANA LETTER RE": "FF9A", - "HALFWIDTH HANGUL LETTER WI": "FFD6", - "FULLWIDTH NOT SIGN": "FFE2", - "LINEAR B SYMBOL B049": "10055", - "LINEAR B SYMBOL B063": "10057", - "LINEAR B IDEOGRAM B169": "100B1", - "LINEAR B IDEOGRAM B171": "100B3", - "LINEAR B IDEOGRAM B172": "100B4", - "LINEAR B IDEOGRAM B233 SWORD": "100C9", - "LINEAR B IDEOGRAM B254 DART": "100D8", - "LINEAR B IDEOGRAM VESSEL B217": "100F0", - "LINEAR B IDEOGRAM VESSEL B228": "100F7", - "AEGEAN WORD SEPARATOR LINE": "10100", - "AEGEAN NUMBER FOUR": "1010A", - "AEGEAN NUMBER TEN": "10110", - "AEGEAN NUMBER SEVEN HUNDRED": "1011F", - "AEGEAN NUMBER NINE HUNDRED": "10121", - "GREEK ACROPHONIC ATTIC ONE HUNDRED STATERS": "10152", - "GREEK ACROPHONIC HERMIONIAN ONE": "1015A", - "GREEK ACROPHONIC TROEZENIAN FIFTY ALTERNATE FORM": "10167", - "GREEK ACROPHONIC THESPIAN ONE HUNDRED": "1016A", - "GREEK THREE OBOLS SIGN": "1017E", - "GREEK TRYBLION BASE SIGN": "10189", - "PHAISTOS DISC SIGN CARPENTRY PLANE": "101E2", - "PHAISTOS DISC SIGN COMB": "101E4", - "LYCIAN LETTER B": "10282", - "CARIAN LETTER D": "102A2", - "OLD ITALIC LETTER ZE": "10306", - "OLD ITALIC LETTER THE": "10308", - "OLD ITALIC LETTER SOUTHERN TSE": "1032F", - "GOTHIC LETTER GIBA": "10332", - "GOTHIC LETTER THIUTH": "10338", - "GOTHIC LETTER PAIRTHRA": "10340", - "OLD PERMIC LETTER BUR": "10351", - "OLD PERMIC LETTER SII": "10361", - "OLD PERMIC LETTER U": "10363", - "OLD PERMIC LETTER HA": "1036C", - "OLD PERMIC LETTER IA": "10375", - "UGARITIC LETTER TET": "10389", - "OLD PERSIAN SIGN DI": "103AE", - "OLD PERSIAN SIGN BA": "103B2", - "DESERET CAPITAL LETTER DEE": "10414", - "DESERET SMALL LETTER SHORT E": "1042F", - "DESERET SMALL LETTER BEE": "1043A", - "SHAVIAN LETTER FEE": "10453", - "SHAVIAN LETTER OR": "10479", - "OSMANYA LETTER ALEF": "10480", - "OSMANYA DIGIT SIX": "104A6", - "OSAGE CAPITAL LETTER PA": "104C4", - "OSAGE CAPITAL LETTER TSHA": "104CC", - "OSAGE CAPITAL LETTER ZA": "104D2", - "OSAGE SMALL LETTER CHA": "104DD", - "OSAGE SMALL LETTER EHPA": "104ED", - "ELBASAN LETTER JE": "1050E", - "ELBASAN LETTER LLE": "10511", - "ELBASAN LETTER RE": "10519", - "ELBASAN LETTER KHE": "10527", - "CAUCASIAN ALBANIAN LETTER DAT": "10533", - "CAUCASIAN ALBANIAN LETTER ZHIL": "10537", - "VITHKUQI SMALL LETTER DE": "1059C", - "VITHKUQI SMALL LETTER LA": "105A9", + "ARABIC LIGATURE YEH WITH NOON FINAL FORM": "FC94", + "ARABIC LIGATURE YEH WITH ALEF MAKSURA FINAL FORM": "FC95", + "ARABIC LIGATURE JEEM WITH MEEM INITIAL FORM": "FCA8", + "ARABIC LIGATURE SEEN WITH KHAH INITIAL FORM": "FCAF", + "ARABIC LIGATURE FEH WITH KHAH INITIAL FORM": "FCC0", + "ARABIC LIGATURE BEH WITH HEH MEDIAL FORM": "FCE2", + "ARABIC LIGATURE KAF WITH MEEM MEDIAL FORM": "FCEC", + "ARABIC LIGATURE NOON WITH HEH MEDIAL FORM": "FCEF", + "ARABIC LIGATURE SHEEN WITH ALEF MAKSURA FINAL FORM": "FD19", + "ARABIC LIGATURE SHEEN WITH YEH FINAL FORM": "FD1A", + "ARABIC LIGATURE HAH WITH ALEF MAKSURA FINAL FORM": "FD1B", + "ARABIC LIGATURE SAD WITH REH FINAL FORM": "FD2B", + "ARABIC LIGATURE SEEN WITH JEEM MEDIAL FORM": "FD34", + "ORNATE LEFT PARENTHESIS": "FD3E", + "ARABIC LIGATURE SHEEN WITH MEEM WITH KHAH INITIAL FORM": "FD6B", + "ARABIC LIGATURE SHEEN WITH MEEM WITH MEEM FINAL FORM": "FD6C", + "ARABIC LIGATURE NOON WITH MEEM WITH ALEF MAKSURA FINAL FORM": "FD9B", + "ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM": "FDFD", + "PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET": "FE41", + "PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET": "FE43", + "SESAME DOT": "FE45", + "DOUBLE WAVY OVERLINE": "FE4C", + "SMALL AMPERSAND": "FE60", + "SMALL HYPHEN-MINUS": "FE63", + "ARABIC LETTER KHAH MEDIAL FORM": "FEA8", + "ARABIC LETTER SEEN FINAL FORM": "FEB2", + "ARABIC LETTER SHEEN INITIAL FORM": "FEB7", + "ARABIC LETTER NOON ISOLATED FORM": "FEE5", + "ARABIC LETTER YEH FINAL FORM": "FEF2", + "ARABIC LETTER YEH MEDIAL FORM": "FEF4", + "FULLWIDTH PERCENT SIGN": "FF05", + "FULLWIDTH SEMICOLON": "FF1B", + "HALFWIDTH KATAKANA LETTER SMALL O": "FF6B", + "HALFWIDTH KATAKANA LETTER HE": "FF8D", + "HALFWIDTH KATAKANA LETTER YA": "FF94", + "HALFWIDTH KATAKANA LETTER RO": "FF9B", + "HALFWIDTH KATAKANA LETTER WA": "FF9C", + "HALFWIDTH HANGUL LETTER CIEUC": "FFB8", + "HALFWIDTH HANGUL LETTER YE": "FFCB", + "HALFWIDTH RIGHTWARDS ARROW": "FFEB", + "LINEAR B SYLLABLE B065 JU": "1000E", + "LINEAR B SYLLABLE B006 NA": "10019", + "LINEAR B SYLLABLE B050 PU": "10022", + "LINEAR B SYLLABLE B004 TE": "10033", + "LINEAR B SYLLABLE B075 WE": "10038", + "LINEAR B SYLLABLE B017 ZA": "1003C", + "LINEAR B SYLLABLE B062 PTE": "10047", + "LINEAR B IDEOGRAM B106M RAM": "10087", + "LINEAR B IDEOGRAM B150": "1009F", + "LINEAR B IDEOGRAM B190": "100C2", + "LINEAR B IDEOGRAM B231 ARROW": "100C7", + "LINEAR B IDEOGRAM VESSEL B200": "100DF", + "LINEAR B IDEOGRAM VESSEL B206": "100E5", + "AEGEAN NUMBER EIGHT": "1010E", + "AEGEAN NUMBER SEVENTY THOUSAND": "10131", + "GREEK ACROPHONIC ATTIC FIVE THOUSAND": "10146", + "GREEK ACROPHONIC ATTIC TEN MNAS": "10157", + "GREEK ACROPHONIC THESPIAN FIFTY": "10169", + "GREEK ONE HALF SIGN ALTERNATE FORM": "10176", + "PHAISTOS DISC SIGN HIDE": "101EA", + "PHAISTOS DISC SIGN TUNNY": "101F0", + "PHAISTOS DISC SIGN FLUTE": "101F8", + "CARIAN LETTER A2": "102A7", + "CARIAN LETTER Q": "102A8", + "COPTIC EPACT THOUSANDS MARK": "102E0", + "COPTIC EPACT NUMBER TWO HUNDRED": "102F4", + "COPTIC EPACT NUMBER THREE HUNDRED": "102F5", + "OLD ITALIC LETTER O": "1030F", + "OLD ITALIC NUMERAL ONE": "10320", + "GOTHIC LETTER OTHAL": "10349", + "COMBINING OLD PERMIC LETTER ZATA": "10378", + "UGARITIC LETTER ZU": "10391", + "OLD PERSIAN SIGN THA": "103B0", + "OLD PERSIAN SIGN MI": "103B7", + "OLD PERSIAN SIGN MU": "103B8", + "OLD PERSIAN SIGN SHA": "103C1", + "OLD PERSIAN NUMBER TWENTY": "103D4", + "DESERET CAPITAL LETTER SHORT I": "10406", + "DESERET CAPITAL LETTER H": "10410", + "DESERET SMALL LETTER ZEE": "10446", + "SHAVIAN LETTER ASH": "10468", + "SHAVIAN LETTER ERR": "1047B", + "OSMANYA LETTER XA": "10484", + "OSMANYA LETTER SHIIN": "10489", + "OSMANYA LETTER MIIN": "10491", + "OSMANYA DIGIT THREE": "104A3", + "OSAGE CAPITAL LETTER GHA": "104D1", + "OSAGE SMALL LETTER HA": "104E1", + "OSAGE SMALL LETTER EHTA": "104F1", + "OSAGE SMALL LETTER WA": "104F7", + "ELBASAN LETTER A": "10500", + "ELBASAN LETTER PE": "10517", + "ELBASAN LETTER SE": "1051B", + "CAUCASIAN ALBANIAN LETTER CHOY": "10549", + "VITHKUQI CAPITAL LETTER BE": "10572", + "VITHKUQI CAPITAL LETTER O": "10587", + "VITHKUQI SMALL LETTER A": "10597", + "VITHKUQI SMALL LETTER NE": "105AC", "VITHKUQI SMALL LETTER O": "105AE", - "VITHKUQI SMALL LETTER SE": "105B3", - "LINEAR A SIGN AB011": "1060A", - "LINEAR A SIGN AB013": "1060B", - "LINEAR A SIGN AB034": "1061F", - "LINEAR A SIGN AB057": "10631", - "LINEAR A SIGN AB070": "1063A", - "LINEAR A SIGN AB080": "10641", - "LINEAR A SIGN A309C": "1065F", - "LINEAR A SIGN A310": "10660", - "LINEAR A SIGN A311": "10661", - "LINEAR A SIGN A339": "1067F", - "LINEAR A SIGN A364": "10698", - "LINEAR A SIGN A371": "1069F", - "LINEAR A SIGN A513": "106BE", - "LINEAR A SIGN A550": "106DA", - "LINEAR A SIGN A559": "106E2", - "LINEAR A SIGN A563": "106E3", + "VITHKUQI SMALL LETTER SHE": "105B4", + "LINEAR A SIGN AB065": "10636", + "LINEAR A SIGN AB078": "1063F", + "LINEAR A SIGN AB123": "1064C", + "LINEAR A SIGN A312": "10662", + "LINEAR A SIGN A314": "10666", + "LINEAR A SIGN A501": "106B3", + "LINEAR A SIGN A527": "106C7", + "LINEAR A SIGN A542": "106D5", + "LINEAR A SIGN A570": "106E9", "LINEAR A SIGN A573": "106EC", - "LINEAR A SIGN A614": "1070F", - "LINEAR A SIGN A619": "10714", - "LINEAR A SIGN A628": "1071C", - "LINEAR A SIGN A659": "10731", - "LINEAR A SIGN A701 A": "10740", - "MODIFIER LETTER SMALL DEZH DIGRAPH": "1078A", - "MODIFIER LETTER SMALL CAPITAL G WITH HOOK": "10794", - "MODIFIER LETTER SMALL HENG WITH HOOK": "10797", - "CYPRIOT SYLLABLE SE": "10829", - "PALMYRENE NUMBER ONE": "10879", - "NABATAEAN LETTER FINAL NUN": "10894", - "HATRAN LETTER SHIN": "108F4", - "HATRAN NUMBER FIVE": "108FC", - "PHOENICIAN NUMBER TWO": "1091A", - "LYDIAN LETTER Y": "10927", - "LYDIAN LETTER AN": "10935", + "LINEAR A SIGN A643": "10723", + "LINEAR A SIGN A654": "1072C", + "LINEAR A SIGN A709-6 L6": "1074C", + "LINEAR A SIGN A714 ABB": "10751", + "MODIFIER LETTER SMALL LEZH": "1079E", + "CYPRIOT SYLLABLE KU": "1080E", + "CYPRIOT SYLLABLE LA": "1080F", + "CYPRIOT SYLLABLE TE": "1082E", + "CYPRIOT SYLLABLE ZO": "1083F", + "IMPERIAL ARAMAIC NUMBER THREE": "1085A", + "IMPERIAL ARAMAIC NUMBER TEN THOUSAND": "1085F", + "NABATAEAN LETTER RESH": "1089B", + "HATRAN LETTER ZAYN": "108E6", + "HATRAN LETTER NUN": "108ED", + "PHOENICIAN LETTER DELT": "10903", + "PHOENICIAN LETTER MEM": "1090C", + "PHOENICIAN LETTER NUN": "1090D", + "PHOENICIAN NUMBER ONE": "10916", + "PHOENICIAN NUMBER THREE": "1091B", + "LYDIAN LETTER O": "1092C", + "LYDIAN LETTER SS": "1092E", + "LYDIAN LETTER NN": "10938", + "MEROITIC HIEROGLYPHIC LETTER A": "10980", "MEROITIC HIEROGLYPHIC LETTER NA-2": "1098B", - "MEROITIC HIEROGLYPHIC LETTER NE-2": "1098D", - "MEROITIC HIEROGLYPHIC LETTER TE": "1099A", - "MEROITIC CURSIVE LETTER WA": "109A5", - "MEROITIC CURSIVE LETTER TA": "109B4", - "MEROITIC CURSIVE NUMBER FIVE HUNDRED THOUSAND": "109F1", - "KHAROSHTHI PUNCTUATION LINES": "10A58", - "OLD NORTH ARABIAN LETTER AIN": "10A92", - "OLD NORTH ARABIAN LETTER YEH": "10A9A", - "OLD NORTH ARABIAN NUMBER TEN": "10A9E", - "MANICHAEAN LETTER FE": "10ADC", - "MANICHAEAN LETTER XOPH": "10ADF", - "AVESTAN LETTER NE": "10B25", - "INSCRIPTIONAL PARTHIAN LETTER DALETH": "10B43", - "INSCRIPTIONAL PARTHIAN LETTER HE": "10B44", - "INSCRIPTIONAL PAHLAVI NUMBER ONE": "10B78", - "PSALTER PAHLAVI LETTER BETH": "10B81", - "PSALTER PAHLAVI LETTER HETH": "10B87", + "MEROITIC HIEROGLYPHIC LETTER TE-2": "1099B", + "MEROITIC CURSIVE LETTER YA": "109A4", + "MEROITIC CURSIVE LETTER NA": "109A9", + "MEROITIC CURSIVE NUMBER FOUR": "109C3", + "MEROITIC CURSIVE NUMBER FIFTY THOUSAND": "109E8", + "MEROITIC CURSIVE FRACTION SEVEN TWELFTHS": "109FC", + "MEROITIC CURSIVE FRACTION TEN TWELFTHS": "109FF", + "KHAROSHTHI LETTER KHA": "10A11", + "KHAROSHTHI LETTER PHA": "10A25", + "KHAROSHTHI NUMBER ONE HUNDRED": "10A46", + "KHAROSHTHI FRACTION ONE HALF": "10A48", + "KHAROSHTHI PUNCTUATION DOUBLE DANDA": "10A57", + "OLD SOUTH ARABIAN LETTER SHIN": "10A66", + "OLD SOUTH ARABIAN LETTER TAW": "10A69", + "OLD SOUTH ARABIAN LETTER SADHE": "10A6E", + "OLD NORTH ARABIAN LETTER GEEM": "10A94", + "OLD NORTH ARABIAN LETTER GHAIN": "10A96", + "MANICHAEAN LETTER HETH": "10ACD", + "AVESTAN LETTER E": "10B08", + "INSCRIPTIONAL PARTHIAN NUMBER TWO": "10B59", + "PSALTER PAHLAVI LETTER GIMEL": "10B82", + "PSALTER PAHLAVI LETTER MEM-QOPH": "10B8B", "PSALTER PAHLAVI NUMBER TWENTY": "10BAE", - "OLD TURKIC LETTER ORKHON ENT": "10C26", - "OLD TURKIC LETTER YENISEI ENY": "10C2B", - "OLD TURKIC LETTER YENISEI ANG": "10C2C", - "OLD TURKIC LETTER ORKHON AES": "10C3E", - "OLD TURKIC LETTER YENISEI AT": "10C44", - "OLD HUNGARIAN CAPITAL LETTER I": "10C90", - "OLD HUNGARIAN CAPITAL LETTER II": "10C91", - "HANIFI ROHINGYA LETTER FA": "10D09", - "HANIFI ROHINGYA SIGN TANA": "10D26", - "RUMI DIGIT ONE": "10E60", - "RUMI NUMBER TWENTY": "10E6A", - "RUMI FRACTION ONE HALF": "10E7B", - "YEZIDI LETTER CHIM": "10E87", - "YEZIDI LETTER SAD": "10E93", - "OLD SOGDIAN NUMBER ONE": "10F1D", - "OLD SOGDIAN NUMBER ONE HUNDRED": "10F25", + "OLD TURKIC LETTER ORKHON AED": "10C13", + "OLD TURKIC LETTER ORKHON ELT": "10C21", + "OLD TURKIC LETTER YENISEI OQ": "10C39", + "OLD TURKIC LETTER ORKHON ESH": "10C41", + "OLD HUNGARIAN CAPITAL LETTER E": "10C89", + "OLD HUNGARIAN CAPITAL LETTER EK": "10C93", + "OLD HUNGARIAN SMALL LETTER EK": "10CD3", + "OLD HUNGARIAN NUMBER TEN": "10CFC", + "HANIFI ROHINGYA DIGIT TWO": "10D32", + "HANIFI ROHINGYA DIGIT FIVE": "10D35", + "HANIFI ROHINGYA DIGIT EIGHT": "10D38", + "HANIFI ROHINGYA DIGIT NINE": "10D39", + "RUMI DIGIT EIGHT": "10E67", + "RUMI FRACTION ONE THIRD": "10E7D", + "YEZIDI LETTER BE": "10E81", + "YEZIDI LETTER ZA": "10E8F", + "YEZIDI LETTER EYN": "10E97", + "YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE": "10EB1", "SOGDIAN LETTER GIMEL": "10F32", - "SOGDIAN LETTER ZAYIN": "10F35", - "SOGDIAN LETTER KAPH": "10F38", - "SOGDIAN LETTER MEM": "10F3A", - "OLD UYGHUR PUNCTUATION FOUR DOTS": "10F89", - "CHORASMIAN LETTER ZAYIN": "10FB8", - "CHORASMIAN LETTER LAMEDH": "10FBC", - "ELYMAIC LETTER HETH": "10FE7", - "BRAHMI LETTER AA": "11006", - "BRAHMI LETTER U": "11009", - "BRAHMI LETTER E": "1100F", - "BRAHMI LETTER JA": "1101A", - "BRAHMI LETTER TA": "11022", - "BRAHMI LETTER OLD TAMIL NNNA": "11037", - "BRAHMI DIGIT FIVE": "1106B", - "KAITHI LETTER NGA": "11091", - "KAITHI LETTER TA": "1109E", + "SOGDIAN LETTER TAW": "10F42", + "SOGDIAN LETTER LESH": "10F44", + "SOGDIAN COMBINING HOOK ABOVE": "10F4C", + "OLD UYGHUR LETTER YODH": "10F76", + "BRAHMI LETTER VOCALIC LL": "1100E", + "BRAHMI LETTER KA": "11013", + "BRAHMI LETTER TTHA": "1101E", + "BRAHMI NUMBER NINETY": "11063", + "BRAHMI DIGIT EIGHT": "1106E", + "KAITHI LETTER E": "11089", + "KAITHI LETTER KA": "1108D", + "KAITHI LETTER NNA": "1109D", + "KAITHI VOWEL SIGN II": "110B2", "KAITHI VOWEL SIGN E": "110B5", - "KAITHI VOWEL SIGN VOCALIC R": "110C2", - "SORA SOMPENG LETTER VAH": "110DA", - "SORA SOMPENG DIGIT ONE": "110F1", - "CHAKMA LETTER DAA": "11118", - "CHAKMA LETTER LAA": "11123", - "SHARADA LETTER VOCALIC RR": "1118A", - "SHARADA LETTER KHA": "11192", - "SHARADA DOUBLE DANDA": "111C6", - "SHARADA SIGN SIDDHAM": "111DB", - "SINHALA ARCHAIC DIGIT ONE": "111E1", - "KHOJKI LETTER CHA": "1120F", - "KHOJKI LETTER TA": "11219", - "KHOJKI LETTER PHA": "11220", - "MULTANI LETTER TA": "11296", - "MULTANI LETTER DHA": "11299", - "KHUDAWADI LETTER GGA": "112BD", - "KHUDAWADI LETTER NNA": "112CC", - "KHUDAWADI LETTER TA": "112CD", - "KHUDAWADI DIGIT THREE": "112F3", - "KHUDAWADI DIGIT SEVEN": "112F7", - "KHUDAWADI DIGIT EIGHT": "112F8", - "COMBINING GRANTHA LETTER A": "11370", - "NEWA LETTER JA": "11416", - "NEWA VOWEL SIGN I": "11436", - "NEWA VOWEL SIGN E": "1143E", - "NEWA VOWEL SIGN AI": "1143F", - "NEWA DIGIT EIGHT": "11458", - "NEWA LETTER VEDIC ANUSVARA": "1145F", + "KAITHI DOUBLE SECTION MARK": "110BF", + "SORA SOMPENG LETTER MAE": "110E8", + "CHAKMA LETTER I": "11104", + "CHAKMA LETTER KAA": "11107", + "CHAKMA DIGIT FOUR": "1113A", + "CHAKMA QUESTION MARK": "11143", + "MAHAJANI LETTER U": "11152", + "MAHAJANI LETTER GHA": "11158", + "MAHAJANI LETTER DA": "11165", + "SHARADA LETTER U": "11187", + "SHARADA LETTER VOCALIC LL": "1118C", + "SHARADA LETTER DA": "111A2", + "SHARADA VOWEL SIGN I": "111B4", + "SHARADA SUTRA MARK": "111CD", + "SHARADA DIGIT SEVEN": "111D7", + "SINHALA ARCHAIC NUMBER NINETY": "111F2", + "KHOJKI VOWEL SIGN AI": "11231", + "MULTANI LETTER VA": "112A4", + "KHUDAWADI LETTER E": "112B6", + "KHUDAWADI LETTER SHA": "112DC", + "KHUDAWADI VOWEL SIGN O": "112E7", + "KHUDAWADI SIGN NUKTA": "112E9", + "GRANTHA SIGN ANUSVARA": "11302", + "GRANTHA LETTER TTHA": "11320", + "GRANTHA SIGN VIRAMA": "1134D", + "COMBINING GRANTHA LETTER VI": "11373", + "NEWA LETTER I": "11402", + "NEWA LETTER TTA": "1141A", + "NEWA VOWEL SIGN VOCALIC RR": "1143B", + "NEWA SIGN AVAGRAHA": "11447", + "NEWA GAP FILLER": "1144E", + "NEWA DIGIT SEVEN": "11457", + "NEWA DIGIT NINE": "11459", + "TIRHUTA LETTER GA": "11491", + "TIRHUTA LETTER NGA": "11493", "TIRHUTA LETTER DHA": "114A1", "TIRHUTA LETTER RA": "114A9", - "TIRHUTA VOWEL SIGN VOCALIC LL": "114B8", - "SIDDHAM LETTER DDA": "1159A", - "SIDDHAM VOWEL SIGN O": "115BA", - "MODI LETTER LA": "11629", - "MODI VOWEL SIGN VOCALIC RR": "11636", - "MODI VOWEL SIGN AI": "1163A", - "MODI DIGIT ONE": "11651", - "MONGOLIAN ROTATED BIRGA": "11661", - "MONGOLIAN INVERTED BIRGA": "11667", - "TAKRI VOWEL SIGN I": "116AE", - "TAKRI VOWEL SIGN UU": "116B1", - "TAKRI DIGIT TWO": "116C2", - "AHOM LETTER KA": "11700", - "AHOM LETTER ALTERNATE TA": "11705", - "AHOM LETTER PHA": "11707", - "AHOM LETTER THA": "1170C", - "AHOM LETTER ALTERNATE BA": "1171A", - "AHOM VOWEL SIGN AM": "1172A", - "AHOM DIGIT TWO": "11732", - "AHOM DIGIT FOUR": "11734", + "TIRHUTA DIGIT FIVE": "114D5", + "SIDDHAM LETTER I": "11582", + "SIDDHAM LETTER II": "11583", + "SIDDHAM LETTER GHA": "11591", + "SIDDHAM LETTER TA": "1159D", + "SIDDHAM LETTER NA": "115A1", + "MODI LETTER JHA": "11616", + "MODI DIGIT ZERO": "11650", + "MONGOLIAN BIRGA WITH ORNAMENT": "11660", + "TAKRI LETTER U": "11684", + "TAKRI LETTER TA": "11699", + "TAKRI LETTER NA": "1169D", + "TAKRI LETTER SA": "116A8", + "TAKRI SIGN VISARGA": "116AC", + "TAKRI VOWEL SIGN AU": "116B5", + "AHOM LETTER GHA": "11717", + "AHOM VOWEL SIGN A": "11720", "AHOM NUMBER TEN": "1173A", - "DOGRA LETTER I": "11802", - "DOGRA LETTER SA": "11829", - "DOGRA VOWEL SIGN II": "1182E", - "DOGRA VOWEL SIGN VOCALIC RR": "11832", - "WARANG CITI CAPITAL LETTER II": "118A6", - "WARANG CITI CAPITAL LETTER SII": "118BE", - "WARANG CITI SMALL LETTER E": "118C8", - "WARANG CITI SMALL LETTER KO": "118CC", - "WARANG CITI SMALL LETTER ENY": "118CD", - "WARANG CITI SMALL LETTER ENN": "118D0", - "WARANG CITI SMALL LETTER TTE": "118D2", - "WARANG CITI SMALL LETTER VIYO": "118DF", - "WARANG CITI DIGIT FOUR": "118E4", - "DIVES AKURU LETTER A": "11900", - "DIVES AKURU LETTER THA": "1191C", + "DOGRA LETTER NYA": "11813", + "DOGRA LETTER NNA": "11818", + "DOGRA VOWEL SIGN I": "1182D", + "WARANG CITI CAPITAL LETTER YA": "118A4", + "WARANG CITI SMALL LETTER ANG": "118CA", + "WARANG CITI NUMBER SIXTY": "118EF", + "DIVES AKURU LETTER NGA": "11910", + "DIVES AKURU LETTER PHA": "11921", "DIVES AKURU LETTER SHA": "1192A", - "NANDINAGARI LETTER RA": "119C8", - "NANDINAGARI LETTER SA": "119CD", - "NANDINAGARI LETTER LLA": "119CF", - "ZANABAZAR SQUARE SIGN CANDRA WITH ORNAMENT": "11A37", - "SOYOMBO LETTER KA": "11A5C", - "SOYOMBO LETTER JA": "11A63", - "SOYOMBO LETTER DDHA": "11A69", - "SOYOMBO LETTER -A": "11A7A", + "DIVES AKURU VOWEL SIGN AA": "11930", + "DIVES AKURU DIGIT EIGHT": "11958", + "ZANABAZAR SQUARE LETTER A": "11A00", + "ZANABAZAR SQUARE LETTER KA": "11A0B", + "ZANABAZAR SQUARE LETTER PA": "11A1E", + "ZANABAZAR SQUARE LETTER LA": "11A2C", + "ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK": "11A46", + "SOYOMBO VOWEL SIGN VOCALIC L": "11A5A", + "SOYOMBO VOWEL LENGTH MARK": "11A5B", + "SOYOMBO LETTER NA": "11A6F", "SOYOMBO LETTER RA": "11A7C", - "SOYOMBO FINAL CONSONANT SIGN G": "11A8A", - "SOYOMBO FINAL CONSONANT SIGN M": "11A90", - "SOYOMBO FINAL CONSONANT SIGN SH": "11A93", - "CANADIAN SYLLABICS NATTILIK HA": "11AB4", - "PAU CIN HAU LETTER BA": "11ACC", - "PAU CIN HAU LETTER FINAL W": "11AE2", - "PAU CIN HAU SANDHI TONE FINAL": "11AEE", - "PAU CIN HAU LOW-FALLING TONE LONG FINAL": "11AF6", - "BHAIKSUKI LETTER VOCALIC R": "11C06", - "BHAIKSUKI LETTER MA": "11C26", - "BHAIKSUKI VOWEL SIGN VOCALIC R": "11C34", - "BHAIKSUKI NUMBER TWENTY": "11C64", - "BHAIKSUKI NUMBER EIGHTY": "11C6A", - "MARCHEN LETTER -A": "11C88", - "MARCHEN SUBJOINED LETTER LA": "11CAB", - "MARCHEN SIGN ANUSVARA": "11CB5", - "MASARAM GONDI LETTER E": "11D06", - "MASARAM GONDI LETTER LA": "11D27", - "MAKASAR LETTER TA": "11EE6", - "MAKASAR LETTER CA": "11EE9", - "MAKASAR LETTER SA": "11EF0", - "KAWI LETTER TTA": "11F1C", - "KAWI LETTER TA": "11F21", - "KAWI LETTER NA": "11F25", - "KAWI VOWEL SIGN EU": "11F40", - "TAMIL SIGN VARAAKAN": "11FE0", - "CUNEIFORM SIGN AB TIMES GAL": "1200D", - "CUNEIFORM SIGN AB2 TIMES BALAG": "12017", + "SOYOMBO LETTER SA": "11A81", + "SOYOMBO FINAL CONSONANT SIGN N": "11A8E", + "SOYOMBO SIGN ANUSVARA": "11A96", + "SOYOMBO MARK DOUBLE SHAD": "11A9C", + "SOYOMBO MARK PLUTA": "11A9D", + "PAU CIN HAU LETTER U": "11AD9", + "DEVANAGARI SIGN WESTERN FIVE-LIKE BHALE": "11B06", + "BHAIKSUKI LETTER GHA": "11C11", + "BHAIKSUKI LETTER BA": "11C24", + "BHAIKSUKI SIGN VIRAMA": "11C3F", + "BHAIKSUKI DANDA": "11C41", + "BHAIKSUKI DIGIT THREE": "11C53", + "BHAIKSUKI DIGIT FIVE": "11C55", + "BHAIKSUKI NUMBER SEVENTY": "11C69", + "MARCHEN LETTER GA": "11C74", + "MASARAM GONDI LETTER NGA": "11D10", + "MASARAM GONDI LETTER CA": "11D11", + "MASARAM GONDI LETTER JHA": "11D14", + "MASARAM GONDI LETTER NA": "11D1F", + "MASARAM GONDI LETTER PHA": "11D21", + "MASARAM GONDI VOWEL SIGN VOCALIC R": "11D36", + "MASARAM GONDI REPHA": "11D46", + "GUNJALA GONDI LETTER CA": "11D7B", + "GUNJALA GONDI LETTER JA": "11D80", + "GUNJALA GONDI LETTER HA": "11D87", + "GUNJALA GONDI VOWEL SIGN AU": "11D94", + "MAKASAR LETTER GA": "11EE1", + "MAKASAR LETTER NA": "11EE8", + "MAKASAR LETTER NYA": "11EEB", + "MAKASAR VOWEL SIGN E": "11EF5", + "KAWI LETTER BA": "11F28", + "KAWI VOWEL SIGN II": "11F37", + "KAWI PUNCTUATION DOUBLE DOT": "11F4A", + "KAWI DIGIT FIVE": "11F55", + "KAWI DIGIT SIX": "11F56", + "TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH": "11FC0", + "TAMIL FRACTION ONE SIXTEENTH-1": "11FC9", + "TAMIL FRACTION ONE HALF-2": "11FD2", + "CUNEIFORM SIGN AB TIMES DUN3 GUNU": "1200C", + "CUNEIFORM SIGN AB TIMES GAN2 TENU": "1200E", + "CUNEIFORM SIGN AK TIMES ERIN2": "1201E", + "CUNEIFORM SIGN AK TIMES SHITA PLUS GISH": "1201F", "CUNEIFORM SIGN AL TIMES GISH": "12023", - "CUNEIFORM SIGN AMAR": "1202B", - "CUNEIFORM SIGN DAG KISIM5 TIMES USH": "1206D", - "CUNEIFORM SIGN E TIMES PAP": "1208B", - "CUNEIFORM SIGN EN": "12097", - "CUNEIFORM SIGN EZEN TIMES A": "120A2", - "CUNEIFORM SIGN EZEN TIMES KASKAL SQUARED": "120AD", - "CUNEIFORM SIGN EZEN TIMES LAL TIMES LAL": "120B0", - "CUNEIFORM SIGN GA2 TIMES A PLUS DA PLUS HA": "120B8", - "CUNEIFORM SIGN GA2 TIMES HUB2": "120D5", - "CUNEIFORM SIGN GA2 TIMES SAR": "120E4", - "CUNEIFORM SIGN IDIM": "12142", - "CUNEIFORM SIGN IDIM OVER IDIM SQUARED": "12144", - "CUNEIFORM SIGN KA TIMES ESH2": "12161", - "CUNEIFORM SIGN KA TIMES GUR7": "1216C", - "CUNEIFORM SIGN KA TIMES LU": "12173", - "CUNEIFORM SIGN KA TIMES SIG": "12184", - "CUNEIFORM SIGN KAD2": "12190", - "CUNEIFORM SIGN KAL TIMES BAD": "12198", - "CUNEIFORM SIGN KIN": "121A5", - "CUNEIFORM SIGN KISAL": "121A6", - "CUNEIFORM SIGN KUL": "121B0", - "CUNEIFORM SIGN KWU318": "121B6", - "CUNEIFORM SIGN LAGAB TIMES A": "121B9", - "CUNEIFORM SIGN LAGAB TIMES BI": "121C1", - "CUNEIFORM SIGN LAGAB TIMES KU3": "121D1", - "CUNEIFORM SIGN LAGAB TIMES MUSH": "121DA", - "CUNEIFORM SIGN LAGAB TIMES TAK4": "121E3", - "CUNEIFORM SIGN LIMMU2": "121F9", - "CUNEIFORM SIGN MASH": "12226", - "CUNEIFORM SIGN MI": "1222A", - "CUNEIFORM SIGN MUG": "1222E", - "CUNEIFORM SIGN NAGA INVERTED": "12241", - "CUNEIFORM SIGN PI TIMES A": "12280", - "CUNEIFORM SIGN PIRIG TIMES UD": "1228C", - "CUNEIFORM SIGN SAL LAGAB TIMES ASH2": "122AA", - "CUNEIFORM SIGN SHE HU": "122BB", - "CUNEIFORM SIGN TAG TIMES TUG2": "122F8", - "CUNEIFORM SIGN TIL": "12300", - "CUNEIFORM SIGN UR CROSSING UR": "12329", - "CUNEIFORM SIGN URI3": "12336", - "CUNEIFORM SIGN URU TIMES ISH": "12344", - "CUNEIFORM SIGN URUDA": "1234F", + "CUNEIFORM SIGN ASH": "12038", + "CUNEIFORM SIGN BAL OVER BAL": "12045", + "CUNEIFORM SIGN BU CROSSING BU": "12050", + "CUNEIFORM SIGN DAG KISIM5 TIMES A PLUS MASH": "12057", + "CUNEIFORM SIGN DAG KISIM5 TIMES HA": "12060", + "CUNEIFORM SIGN DAG KISIM5 TIMES U2 PLUS GIR2": "1206C", + "CUNEIFORM SIGN DI": "12072", + "CUNEIFORM SIGN EZEN": "120A1", + "CUNEIFORM SIGN GA2": "120B7", + "CUNEIFORM SIGN GA2 TIMES GI4": "120CE", + "CUNEIFORM SIGN GA2 TIMES NUN OVER NUN": "120E1", + "CUNEIFORM SIGN GUD": "1211E", + "CUNEIFORM SIGN HI TIMES SHE": "12135", + "CUNEIFORM SIGN KA TIMES GA": "12162", + "CUNEIFORM SIGN KA TIMES GAN2 TENU": "12164", + "CUNEIFORM SIGN KA TIMES KAK": "1216F", + "CUNEIFORM SIGN KA TIMES SHA": "12180", + "CUNEIFORM SIGN KA TIMES ZI": "1218C", + "CUNEIFORM SIGN KAL CROSSING KAL": "12199", + "CUNEIFORM SIGN LAGAB TIMES IM PLUS LU": "121CE", + "CUNEIFORM SIGN LAGAB TIMES KI": "121CF", + "CUNEIFORM SIGN LAGAB TIMES LAGAB": "121D4", + "CUNEIFORM SIGN LAGAB TIMES U PLUS U PLUS U": "121E7", + "CUNEIFORM SIGN LU2 TIMES SIK2 PLUS BU": "1220F", + "CUNEIFORM SIGN LUGAL OPPOSING LUGAL": "12219", + "CUNEIFORM SIGN LUL": "1221C", + "CUNEIFORM SIGN MIN": "1222B", + "CUNEIFORM SIGN NA": "1223E", + "CUNEIFORM SIGN NAGA": "12240", + "CUNEIFORM SIGN SAG TIMES DU": "12297", + "CUNEIFORM SIGN SHA3 TIMES GISH": "122B1", + "CUNEIFORM SIGN SHID TIMES A": "122C4", + "CUNEIFORM SIGN SHIR": "122D3", + "CUNEIFORM SIGN SIG4": "122DE", + "CUNEIFORM SIGN TAG TIMES GUD": "122F5", + "CUNEIFORM SIGN TAG TIMES SHU": "122F7", + "CUNEIFORM SIGN TIR OVER TIR GAD OVER GAD GAR OVER GAR": "12304", + "CUNEIFORM SIGN U2": "12311", + "CUNEIFORM SIGN UMUM TIMES PA": "12325", + "CUNEIFORM SIGN URU TIMES PA": "12348", "CUNEIFORM SIGN UZU": "1235C", - "CUNEIFORM SIGN DISH PLUS SHU": "12375", - "CUNEIFORM SIGN KA TIMES GUD": "12381", - "CUNEIFORM SIGN KA TIMES SHUL": "12385", - "CUNEIFORM NUMERIC SIGN EIGHT GESH2": "1241C", - "CUNEIFORM NUMERIC SIGN ONE GESHU": "1241E", - "CUNEIFORM NUMERIC SIGN SEVEN SHAR2": "12429", - "CUNEIFORM NUMERIC SIGN TWO THIRDS VARIANT FORM A": "1245E", + "CUNEIFORM SIGN ZI3": "12365", + "CUNEIFORM NUMERIC SIGN SEVEN GESH2": "1241B", + "CUNEIFORM NUMERIC SIGN THREE GESHU": "12420", + "CUNEIFORM NUMERIC SIGN EIGHT VARIANT FORM USSU3": "12445", + "CUNEIFORM NUMERIC SIGN NINE VARIANT FORM ILIMMU4": "12448", + "CUNEIFORM NUMERIC SIGN FOUR ASH TENU": "1244C", + "CUNEIFORM NUMERIC SIGN FOUR BAN2 VARIANT FORM": "12453", "CUNEIFORM NUMERIC SIGN OLD ASSYRIAN ONE QUARTER": "12462", - "CUNEIFORM SIGN DIM TIMES IGI": "1248A", - "CUNEIFORM SIGN DUG TIMES GA": "12493", - "CUNEIFORM SIGN LAK-030": "124D8", - "CUNEIFORM SIGN SAG GUNU TIMES HA": "1252D", - "CUNEIFORM SIGN SAG TIMES SHE AT LEFT": "1252F", - "CUNEIFORM SIGN SI TIMES TAK4": "12538", - "CYPRO-MINOAN SIGN CM028": "12FA6", - "CYPRO-MINOAN SIGN CM053": "12FB9", - "CYPRO-MINOAN SIGN CM058": "12FBD", - "CYPRO-MINOAN SIGN CM085": "12FD7", - "CYPRO-MINOAN SIGN CM105": "12FEA", - "CYPRO-MINOAN SIGN CM112": "12FEF", - "EGYPTIAN HIEROGLYPH A042A": "13031", - "EGYPTIAN HIEROGLYPH A058": "13043", - "EGYPTIAN HIEROGLYPH A070": "1304F", - "EGYPTIAN HIEROGLYPH B004": "13053", - "EGYPTIAN HIEROGLYPH C002": "1305B", - "EGYPTIAN HIEROGLYPH C008": "13064", - "EGYPTIAN HIEROGLYPH C013": "1306A", + "CUNEIFORM NUMERIC SIGN SIX U VARIANT FORM": "1246B", + "CUNEIFORM NUMERIC SIGN EIGHT U VARIANT FORM": "1246D", + "CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON": "12474", + "CUNEIFORM SIGN DIM TIMES U U U": "1248B", + "CUNEIFORM SIGN DUG TIMES KUSHU2 PLUS KASKAL": "1249D", + "CUNEIFORM SIGN GA2 TIMES GAR PLUS DI": "124BC", + "CUNEIFORM SIGN LAK-080": "124DD", + "CUNEIFORM SIGN LAK-081 OVER LAK-081": "124DE", + "CUNEIFORM SIGN LAK-130": "124E0", + "CUNEIFORM SIGN LAK-266": "124E9", + "CUNEIFORM SIGN LAK-648 TIMES IGI GUNU": "12511", + "CUNEIFORM SIGN UR2 TIMES UD": "1253D", + "CUNEIFORM SIGN URU TIMES LAK-668": "1253F", + "CYPRO-MINOAN SIGN CM051": "12FB7", + "CYPRO-MINOAN SIGN CM054": "12FBA", + "EGYPTIAN HIEROGLYPH A001": "13000", + "EGYPTIAN HIEROGLYPH A055": "13040", + "EGYPTIAN HIEROGLYPH B007": "13057", + "EGYPTIAN HIEROGLYPH B009": "13059", + "EGYPTIAN HIEROGLYPH C011": "13068", "EGYPTIAN HIEROGLYPH C024": "13075", - "EGYPTIAN HIEROGLYPH D001": "13076", - "EGYPTIAN HIEROGLYPH D008A": "1307E", - "EGYPTIAN HIEROGLYPH D031A": "13097", - "EGYPTIAN HIEROGLYPH D039": "130A0", - "EGYPTIAN HIEROGLYPH D052": "130B8", - "EGYPTIAN HIEROGLYPH D067G": "130D0", - "EGYPTIAN HIEROGLYPH E008": "130D9", - "EGYPTIAN HIEROGLYPH F021A": "13115", - "EGYPTIAN HIEROGLYPH F023": "13117", + "EGYPTIAN HIEROGLYPH D009": "1307F", + "EGYPTIAN HIEROGLYPH D014": "13084", + "EGYPTIAN HIEROGLYPH D047": "130A9", + "EGYPTIAN HIEROGLYPH D049": "130AC", + "EGYPTIAN HIEROGLYPH D058": "130C0", + "EGYPTIAN HIEROGLYPH D061": "130C3", + "EGYPTIAN HIEROGLYPH E003": "130D4", + "EGYPTIAN HIEROGLYPH E020": "130E9", + "EGYPTIAN HIEROGLYPH E028A": "130F3", "EGYPTIAN HIEROGLYPH F026": "1311A", - "EGYPTIAN HIEROGLYPH F040": "1312B", - "EGYPTIAN HIEROGLYPH F046": "13132", - "EGYPTIAN HIEROGLYPH F046A": "13133", - "EGYPTIAN HIEROGLYPH F048": "13136", - "EGYPTIAN HIEROGLYPH G011": "1314C", - "EGYPTIAN HIEROGLYPH G038": "1316C", - "EGYPTIAN HIEROGLYPH G050": "1317A", - "EGYPTIAN HIEROGLYPH M012A": "131BD", - "EGYPTIAN HIEROGLYPH M035": "131E4", - "EGYPTIAN HIEROGLYPH N020": "13204", - "EGYPTIAN HIEROGLYPH N023": "13207", - "EGYPTIAN HIEROGLYPH NU006": "1323B", - "EGYPTIAN HIEROGLYPH NU009": "1323E", - "EGYPTIAN HIEROGLYPH NU018": "13249", - "EGYPTIAN HIEROGLYPH NU020": "1324C", - "EGYPTIAN HIEROGLYPH O001A": "13251", - "EGYPTIAN HIEROGLYPH O009": "13260", - "EGYPTIAN HIEROGLYPH O024A": "13275", - "EGYPTIAN HIEROGLYPH O044": "13291", - "EGYPTIAN HIEROGLYPH Q003": "132AA", - "EGYPTIAN HIEROGLYPH S021": "132EA", - "EGYPTIAN HIEROGLYPH U027": "13350", - "EGYPTIAN HIEROGLYPH U033": "13358", - "EGYPTIAN HIEROGLYPH V025": "13398", - "EGYPTIAN HIEROGLYPH V034": "133A6", - "EGYPTIAN HIEROGLYPH V038": "133AB", - "EGYPTIAN HIEROGLYPH V040A": "133AE", - "EGYPTIAN HIEROGLYPH Y003": "133DE", - "EGYPTIAN HIEROGLYPH Y007": "133E2", - "EGYPTIAN HIEROGLYPH Z016B": "13406", - "EGYPTIAN HIEROGLYPH Z016D": "13408", - "EGYPTIAN HIEROGLYPH AA004": "13410", - "EGYPTIAN HIEROGLYPH AA027": "13429", - "EGYPTIAN HIEROGLYPH V011D": "1342F", - "ANATOLIAN HIEROGLYPH A020": "14414", - "ANATOLIAN HIEROGLYPH A025": "14419", - "ANATOLIAN HIEROGLYPH A041A": "1442C", - "ANATOLIAN HIEROGLYPH A076": "14455", - "ANATOLIAN HIEROGLYPH A098": "1446C", - "ANATOLIAN HIEROGLYPH A102": "14473", - "ANATOLIAN HIEROGLYPH A108": "14482", - "ANATOLIAN HIEROGLYPH A217": "144FA", - "ANATOLIAN HIEROGLYPH A245": "14517", - "ANATOLIAN HIEROGLYPH A246": "14518", - "ANATOLIAN HIEROGLYPH A250": "1451C", - "ANATOLIAN HIEROGLYPH A304": "14556", - "ANATOLIAN HIEROGLYPH A306": "14558", - "ANATOLIAN HIEROGLYPH A336C": "1457D", - "ANATOLIAN HIEROGLYPH A383 RA OR RI": "145B1", - "ANATOLIAN HIEROGLYPH A404": "145C8", - "ANATOLIAN HIEROGLYPH A411": "145D0", - "ANATOLIAN HIEROGLYPH A425": "145DE", - "ANATOLIAN HIEROGLYPH A445": "145F2", - "ANATOLIAN HIEROGLYPH A518": "1463A", - "BAMUM LETTER PHASE-A PON PA NJI PIPAEMBA": "1680A", - "BAMUM LETTER PHASE-A MVEUAENGAM": "16810", - "BAMUM LETTER PHASE-A SUU": "16815", - "BAMUM LETTER PHASE-A MGBASA": "16824", - "BAMUM LETTER PHASE-A SONJAM": "16830", - "BAMUM LETTER PHASE-B LOM NTEUM": "1685B", - "BAMUM LETTER PHASE-C YUM": "16891", - "BAMUM LETTER PHASE-C NZEUM": "168A3", - "BAMUM LETTER PHASE-C MBIT": "168A6", - "BAMUM LETTER PHASE-C WUP": "168D1", - "BAMUM LETTER PHASE-C MBANYI": "168DF", - "BAMUM LETTER PHASE-C TET": "168F0", - "BAMUM LETTER PHASE-D NTEUM": "16938", + "EGYPTIAN HIEROGLYPH F027": "1311B", + "EGYPTIAN HIEROGLYPH G004": "13142", + "EGYPTIAN HIEROGLYPH K001": "1319B", + "EGYPTIAN HIEROGLYPH M026": "131D7", + "EGYPTIAN HIEROGLYPH N027": "1320C", + "EGYPTIAN HIEROGLYPH NL005A": "13225", + "EGYPTIAN HIEROGLYPH NU012": "13243", + "EGYPTIAN HIEROGLYPH O003": "13253", + "EGYPTIAN HIEROGLYPH O018": "1326C", + "EGYPTIAN HIEROGLYPH O020": "1326F", + "EGYPTIAN HIEROGLYPH P006": "132A2", + "EGYPTIAN HIEROGLYPH R011": "132BD", + "EGYPTIAN HIEROGLYPH R013": "132BF", + "EGYPTIAN HIEROGLYPH T010": "13314", + "EGYPTIAN HIEROGLYPH T020": "13320", + "EGYPTIAN HIEROGLYPH U019": "13347", + "EGYPTIAN HIEROGLYPH U034": "13359", + "EGYPTIAN HIEROGLYPH V020E": "1338B", + "EGYPTIAN HIEROGLYPH V033": "133A4", + "EGYPTIAN HIEROGLYPH W023": "133CB", + "EGYPTIAN HIEROGLYPH X001": "133CF", + "EGYPTIAN HIEROGLYPH Z015A": "133FB", + "EGYPTIAN HIEROGLYPH Z015D": "133FE", + "EGYPTIAN HIEROGLYPH AA006": "13412", + "EGYPTIAN HIEROGLYPH AA016": "1341E", + "EGYPTIAN HIEROGLYPH AA018": "13420", + "EGYPTIAN HIEROGLYPH AA019": "13421", + "EGYPTIAN HIEROGLYPH AA026": "13428", + "ANATOLIAN HIEROGLYPH A045": "14430", + "ANATOLIAN HIEROGLYPH A063": "14445", + "ANATOLIAN HIEROGLYPH A115A": "1448C", + "ANATOLIAN HIEROGLYPH A149": "144B0", + "ANATOLIAN HIEROGLYPH A181": "144D0", + "ANATOLIAN HIEROGLYPH A198": "144E1", + "ANATOLIAN HIEROGLYPH A215": "144F6", + "ANATOLIAN HIEROGLYPH A228": "14506", + "ANATOLIAN HIEROGLYPH A253": "1451F", + "ANATOLIAN HIEROGLYPH A254": "14520", + "ANATOLIAN HIEROGLYPH A279": "1453A", + "ANATOLIAN HIEROGLYPH A294A": "1454B", + "ANATOLIAN HIEROGLYPH A299A": "14551", + "ANATOLIAN HIEROGLYPH A319": "14566", + "ANATOLIAN HIEROGLYPH A321": "14568", + "ANATOLIAN HIEROGLYPH A343": "14584", + "ANATOLIAN HIEROGLYPH A370": "145A2", + "ANATOLIAN HIEROGLYPH A380": "145AD", + "ANATOLIAN HIEROGLYPH A402": "145C6", + "ANATOLIAN HIEROGLYPH A419": "145D8", + "ANATOLIAN HIEROGLYPH A421": "145DA", + "ANATOLIAN HIEROGLYPH A427": "145E0", + "ANATOLIAN HIEROGLYPH A458": "14601", + "ANATOLIAN HIEROGLYPH A494": "14625", + "ANATOLIAN HIEROGLYPH A501": "14629", + "ANATOLIAN HIEROGLYPH A502": "1462A", + "ANATOLIAN HIEROGLYPH A525": "14641", + "ANATOLIAN HIEROGLYPH A526": "14642", + "BAMUM LETTER PHASE-A NGKUE MFON": "16800", + "BAMUM LETTER PHASE-A SHINDA PA NJI": "16808", + "BAMUM LETTER PHASE-A NTAP MFAA": "16819", + "BAMUM LETTER PHASE-A NZUN MEUT": "1681F", + "BAMUM LETTER PHASE-A NDAANGGEUAET": "16834", + "BAMUM LETTER PHASE-A MAEM": "1684E", + "BAMUM LETTER PHASE-B PARUM": "16872", + "BAMUM LETTER PHASE-C NZA": "16890", + "BAMUM LETTER PHASE-C SETFON": "168DC", + "BAMUM LETTER PHASE-C MUAE": "168E7", + "BAMUM LETTER PHASE-D SHEE": "168FD", + "BAMUM LETTER PHASE-D NJAP": "16902", + "BAMUM LETTER PHASE-D NDEE": "16915", "BAMUM LETTER PHASE-D NJUEQ": "16947", - "BAMUM LETTER PHASE-E LOON": "1696B", - "BAMUM LETTER PHASE-E LOM": "1697B", - "BAMUM LETTER PHASE-E MAP": "1698B", - "BAMUM LETTER PHASE-E PO": "16998", - "BAMUM LETTER PHASE-E MGBEN": "169A5", - "BAMUM LETTER PHASE-E NKOM": "169A8", - "BAMUM LETTER PHASE-E KUET": "169AB", - "BAMUM LETTER PHASE-E NYI CLEAVER": "169AD", - "BAMUM LETTER PHASE-E YUEQ": "169B2", - "BAMUM LETTER PHASE-E MIEE": "169C3", - "BAMUM LETTER PHASE-E NJEE": "169D4", - "BAMUM LETTER PHASE-E TAA": "169DB", - "BAMUM LETTER PHASE-E YEUX": "169DF", - "BAMUM LETTER PHASE-E PEUX": "169E7", - "BAMUM LETTER PHASE-E TEN": "16A00", - "BAMUM LETTER PHASE-F RU": "16A23", - "MRO LETTER CHU": "16A4B", - "MRO LETTER THEA": "16A55", - "MRO DANDA": "16A6E", - "TANGSA LETTER OC": "16A71", - "TANGSA LETTER EX": "16A7F", - "TANGSA LETTER UQ": "16A86", - "TANGSA LETTER MC": "16A9D", - "TANGSA DIGIT SIX": "16AC6", - "BASSA VAH LETTER HWAH": "16ADA", - "PAHAWH HMONG VOWEL KEEB": "16B00", - "PAHAWH HMONG CLAN SIGN TSHEEJ": "16B7D", - "MEDEFAIDRIN CAPITAL LETTER S": "16E41", - "MEDEFAIDRIN CAPITAL LETTER I": "16E4B", - "MEDEFAIDRIN CAPITAL LETTER Y": "16E5F", - "MEDEFAIDRIN SMALL LETTER J": "16E6E", - "MEDEFAIDRIN DIGIT FIVE": "16E85", - "MIAO LETTER NHA": "16F11", - "MIAO LETTER TSA": "16F37", - "MIAO VOWEL SIGN YI": "16F72", - "TANGUT ITERATION MARK": "16FE0", - "TANGUT COMPONENT-002": "18801", - "TANGUT COMPONENT-007": "18806", - "TANGUT COMPONENT-026": "18819", - "TANGUT COMPONENT-115": "18872", - "TANGUT COMPONENT-125": "1887C", - "TANGUT COMPONENT-133": "18884", - "TANGUT COMPONENT-142": "1888D", - "TANGUT COMPONENT-151": "18896", - "TANGUT COMPONENT-159": "1889E", - "TANGUT COMPONENT-178": "188B1", - "TANGUT COMPONENT-194": "188C1", - "TANGUT COMPONENT-200": "188C7", - "TANGUT COMPONENT-223": "188DE", - "TANGUT COMPONENT-239": "188EE", - "TANGUT COMPONENT-241": "188F0", - "TANGUT COMPONENT-252": "188FB", - "TANGUT COMPONENT-254": "188FD", - "TANGUT COMPONENT-259": "18902", - "TANGUT COMPONENT-283": "1891A", + "BAMUM LETTER PHASE-D PEE": "1695A", + "BAMUM LETTER PHASE-D TI": "16961", + "BAMUM LETTER PHASE-E LAQ": "169D9", + "BAMUM LETTER PHASE-E NGA": "169F2", + "BAMUM LETTER PHASE-E GHOM": "16A02", + "BAMUM LETTER PHASE-F SI": "16A14", + "MRO LETTER MIM": "16A43", + "MRO DIGIT NINE": "16A69", + "TANGSA LETTER UZ": "16A84", + "TANGSA LETTER UX": "16A87", + "TANGSA LETTER UIUX": "16A9B", + "TANGSA LETTER NGA": "16AA3", + "TANGSA LETTER HTA": "16AAF", + "TANGSA LETTER CA": "16AB5", + "TANGSA DIGIT FOUR": "16AC4", + "BASSA VAH COMBINING LOW TONE": "16AF1", + "PAHAWH HMONG VOWEL KEEV": "16B01", + "PAHAWH HMONG VOWEL KWV": "16B19", + "PAHAWH HMONG CONSONANT NAU": "16B2C", + "PAHAWH HMONG MARK CIM HOM": "16B35", + "PAHAWH HMONG CLAN SIGN KWM": "16B8E", + "MEDEFAIDRIN CAPITAL LETTER W": "16E43", + "MEDEFAIDRIN CAPITAL LETTER HP": "16E56", + "MEDEFAIDRIN CAPITAL LETTER NY": "16E57", + "MEDEFAIDRIN SMALL LETTER P": "16E67", + "MEDEFAIDRIN SMALL LETTER A": "16E6D", + "MEDEFAIDRIN SMALL LETTER R": "16E7C", + "MEDEFAIDRIN DIGIT EIGHT": "16E88", + "MEDEFAIDRIN NUMBER SEVENTEEN": "16E91", + "MEDEFAIDRIN DIGIT THREE ALTERNATE FORM": "16E96", + "MIAO LETTER TTA": "16F0E", + "MIAO LETTER ZZSYA": "16F41", + "MIAO VOWEL SIGN ENG": "16F5F", + "KHITAN SMALL SCRIPT FILLER": "16FE4", + "TANGUT COMPONENT-012": "1880B", + "TANGUT COMPONENT-019": "18812", + "TANGUT COMPONENT-050": "18831", + "TANGUT COMPONENT-054": "18835", + "TANGUT COMPONENT-066": "18841", + "TANGUT COMPONENT-101": "18864", + "TANGUT COMPONENT-153": "18898", + "TANGUT COMPONENT-176": "188AF", + "TANGUT COMPONENT-234": "188E9", + "TANGUT COMPONENT-273": "18910", "TANGUT COMPONENT-284": "1891B", - "TANGUT COMPONENT-310": "18935", - "TANGUT COMPONENT-342": "18955", + "TANGUT COMPONENT-300": "1892B", + "TANGUT COMPONENT-331": "1894A", + "TANGUT COMPONENT-339": "18952", + "TANGUT COMPONENT-345": "18958", + "TANGUT COMPONENT-353": "18960", "TANGUT COMPONENT-370": "18971", - "TANGUT COMPONENT-443": "189BA", - "TANGUT COMPONENT-511": "189FE", - "TANGUT COMPONENT-514": "18A01", - "TANGUT COMPONENT-557": "18A2C", - "TANGUT COMPONENT-559": "18A2E", - "TANGUT COMPONENT-564": "18A33", - "TANGUT COMPONENT-601": "18A58", - "TANGUT COMPONENT-623": "18A6E", - "TANGUT COMPONENT-636": "18A7B", - "TANGUT COMPONENT-645": "18A84", - "TANGUT COMPONENT-649": "18A88", - "TANGUT COMPONENT-653": "18A8C", - "TANGUT COMPONENT-657": "18A90", - "TANGUT COMPONENT-664": "18A97", - "TANGUT COMPONENT-704": "18ABF", - "TANGUT COMPONENT-719": "18ACE", - "TANGUT COMPONENT-754": "18AF1", - "TANGUT COMPONENT-756": "18AF3", - "TANGUT COMPONENT-768": "18AFF", - "KHITAN SMALL SCRIPT CHARACTER-18B07": "18B07", - "KHITAN SMALL SCRIPT CHARACTER-18B0B": "18B0B", - "KHITAN SMALL SCRIPT CHARACTER-18B23": "18B23", - "KHITAN SMALL SCRIPT CHARACTER-18B24": "18B24", - "KHITAN SMALL SCRIPT CHARACTER-18B25": "18B25", - "KHITAN SMALL SCRIPT CHARACTER-18B2C": "18B2C", - "KHITAN SMALL SCRIPT CHARACTER-18B2D": "18B2D", - "KHITAN SMALL SCRIPT CHARACTER-18B2F": "18B2F", - "KHITAN SMALL SCRIPT CHARACTER-18B38": "18B38", + "TANGUT COMPONENT-403": "18992", + "TANGUT COMPONENT-405": "18994", + "TANGUT COMPONENT-430": "189AD", + "TANGUT COMPONENT-441": "189B8", + "TANGUT COMPONENT-475": "189DA", + "TANGUT COMPONENT-512": "189FF", + "TANGUT COMPONENT-556": "18A2B", + "TANGUT COMPONENT-566": "18A35", + "TANGUT COMPONENT-581": "18A44", + "TANGUT COMPONENT-592": "18A4F", + "TANGUT COMPONENT-600": "18A57", + "TANGUT COMPONENT-666": "18A99", + "TANGUT COMPONENT-673": "18AA0", + "TANGUT COMPONENT-681": "18AA8", + "TANGUT COMPONENT-689": "18AB0", + "TANGUT COMPONENT-701": "18ABC", + "TANGUT COMPONENT-720": "18ACF", + "TANGUT COMPONENT-758": "18AF5", + "TANGUT COMPONENT-766": "18AFD", + "KHITAN SMALL SCRIPT CHARACTER-18B04": "18B04", + "KHITAN SMALL SCRIPT CHARACTER-18B10": "18B10", + "KHITAN SMALL SCRIPT CHARACTER-18B17": "18B17", + "KHITAN SMALL SCRIPT CHARACTER-18B18": "18B18", + "KHITAN SMALL SCRIPT CHARACTER-18B3C": "18B3C", + "KHITAN SMALL SCRIPT CHARACTER-18B3E": "18B3E", + "KHITAN SMALL SCRIPT CHARACTER-18B41": "18B41", + "KHITAN SMALL SCRIPT CHARACTER-18B42": "18B42", "KHITAN SMALL SCRIPT CHARACTER-18B4C": "18B4C", - "KHITAN SMALL SCRIPT CHARACTER-18B64": "18B64", - "KHITAN SMALL SCRIPT CHARACTER-18B77": "18B77", - "KHITAN SMALL SCRIPT CHARACTER-18B7E": "18B7E", - "KHITAN SMALL SCRIPT CHARACTER-18B9E": "18B9E", - "KHITAN SMALL SCRIPT CHARACTER-18BA0": "18BA0", - "KHITAN SMALL SCRIPT CHARACTER-18BB8": "18BB8", - "KHITAN SMALL SCRIPT CHARACTER-18BD1": "18BD1", - "KHITAN SMALL SCRIPT CHARACTER-18BEF": "18BEF", - "KHITAN SMALL SCRIPT CHARACTER-18C07": "18C07", + "KHITAN SMALL SCRIPT CHARACTER-18B5C": "18B5C", + "KHITAN SMALL SCRIPT CHARACTER-18B6F": "18B6F", + "KHITAN SMALL SCRIPT CHARACTER-18BB4": "18BB4", + "KHITAN SMALL SCRIPT CHARACTER-18BBD": "18BBD", + "KHITAN SMALL SCRIPT CHARACTER-18BE3": "18BE3", + "KHITAN SMALL SCRIPT CHARACTER-18C00": "18C00", "KHITAN SMALL SCRIPT CHARACTER-18C19": "18C19", - "KHITAN SMALL SCRIPT CHARACTER-18C1A": "18C1A", + "KHITAN SMALL SCRIPT CHARACTER-18C1C": "18C1C", "KHITAN SMALL SCRIPT CHARACTER-18C24": "18C24", - "KHITAN SMALL SCRIPT CHARACTER-18C2F": "18C2F", - "KHITAN SMALL SCRIPT CHARACTER-18C59": "18C59", - "KHITAN SMALL SCRIPT CHARACTER-18C70": "18C70", - "KHITAN SMALL SCRIPT CHARACTER-18C7B": "18C7B", - "KHITAN SMALL SCRIPT CHARACTER-18C80": "18C80", - "KHITAN SMALL SCRIPT CHARACTER-18C84": "18C84", - "KHITAN SMALL SCRIPT CHARACTER-18CC3": "18CC3", - "KHITAN SMALL SCRIPT CHARACTER-18CC7": "18CC7", - "HIRAGANA LETTER ARCHAIC YE": "1B001", - "HENTAIGANA LETTER U-2": "1B00B", - "HENTAIGANA LETTER KI-6": "1B028", - "HENTAIGANA LETTER SE-2": "1B053", - "HENTAIGANA LETTER TE-9": "1B076", - "HENTAIGANA LETTER NA-2": "1B07F", - "HENTAIGANA LETTER NA-7": "1B084", - "HENTAIGANA LETTER NE-1": "1B092", - "HENTAIGANA LETTER HA-3": "1B0A0", - "HENTAIGANA LETTER HI-6": "1B0AE", - "HENTAIGANA LETTER HE-2": "1B0B4", - "HENTAIGANA LETTER HE-5": "1B0B7", - "HENTAIGANA LETTER YO-5": "1B0EB", - "HENTAIGANA LETTER RO-1": "1B102", - "HENTAIGANA LETTER RO-3": "1B104", - "NUSHU CHARACTER-1B193": "1B193", + "KHITAN SMALL SCRIPT CHARACTER-18C27": "18C27", + "KHITAN SMALL SCRIPT CHARACTER-18C30": "18C30", + "KHITAN SMALL SCRIPT CHARACTER-18C49": "18C49", + "KHITAN SMALL SCRIPT CHARACTER-18C4A": "18C4A", + "KHITAN SMALL SCRIPT CHARACTER-18C53": "18C53", + "KHITAN SMALL SCRIPT CHARACTER-18C66": "18C66", + "KHITAN SMALL SCRIPT CHARACTER-18C83": "18C83", + "KHITAN SMALL SCRIPT CHARACTER-18C9A": "18C9A", + "KHITAN SMALL SCRIPT CHARACTER-18C9E": "18C9E", + "KHITAN SMALL SCRIPT CHARACTER-18CA6": "18CA6", + "KHITAN SMALL SCRIPT CHARACTER-18CB6": "18CB6", + "KHITAN SMALL SCRIPT CHARACTER-18CD5": "18CD5", + "KATAKANA LETTER MINNAN TONE-7": "1AFF5", + "KATAKANA LETTER MINNAN NASALIZED TONE-7": "1AFFD", + "KATAKANA LETTER ARCHAIC E": "1B000", + "HENTAIGANA LETTER U-1": "1B00A", + "HENTAIGANA LETTER KO-2": "1B039", + "HENTAIGANA LETTER SI-1": "1B044", + "HENTAIGANA LETTER SE-4": "1B055", + "HENTAIGANA LETTER TU-4": "1B06C", + "HENTAIGANA LETTER TE-1": "1B06E", + "HENTAIGANA LETTER NA-1": "1B07E", + "HENTAIGANA LETTER NI-5": "1B08B", + "HENTAIGANA LETTER RE-4": "1B101", + "HENTAIGANA LETTER RO-2": "1B103", + "HENTAIGANA LETTER WE-3": "1B114", + "NUSHU CHARACTER-1B171": "1B171", + "NUSHU CHARACTER-1B179": "1B179", "NUSHU CHARACTER-1B199": "1B199", - "NUSHU CHARACTER-1B1A7": "1B1A7", - "NUSHU CHARACTER-1B1AB": "1B1AB", - "NUSHU CHARACTER-1B1AD": "1B1AD", - "NUSHU CHARACTER-1B1B8": "1B1B8", - "NUSHU CHARACTER-1B1C7": "1B1C7", - "NUSHU CHARACTER-1B1F0": "1B1F0", - "NUSHU CHARACTER-1B1F3": "1B1F3", - "NUSHU CHARACTER-1B211": "1B211", - "NUSHU CHARACTER-1B21D": "1B21D", - "NUSHU CHARACTER-1B21E": "1B21E", - "NUSHU CHARACTER-1B223": "1B223", - "NUSHU CHARACTER-1B240": "1B240", - "NUSHU CHARACTER-1B247": "1B247", - "NUSHU CHARACTER-1B284": "1B284", - "NUSHU CHARACTER-1B28A": "1B28A", - "NUSHU CHARACTER-1B29B": "1B29B", - "NUSHU CHARACTER-1B2B3": "1B2B3", - "NUSHU CHARACTER-1B2C9": "1B2C9", - "NUSHU CHARACTER-1B2E5": "1B2E5", - "NUSHU CHARACTER-1B2EF": "1B2EF", - "DUPLOYAN LETTER S S": "1BC2A", - "DUPLOYAN LETTER WH": "1BC39", - "DUPLOYAN LETTER G R S": "1BC3E", - "DUPLOYAN LETTER EE": "1BC4B", - "DUPLOYAN LETTER WI": "1BC5E", - "DUPLOYAN AFFIX ATTACHED TANGENT": "1BC78", - "ZNAMENNY COMBINING MARK VYSOKO ON LEFT": "1CF08", - "ZNAMENNY COMBINING MARK GORAZDO VYSOKO": "1CF1F", - "ZNAMENNY COMBINING MARK ON": "1CF21", - "ZNAMENNY COMBINING TONAL RANGE MARK SVETLO": "1CF31", - "ZNAMENNY COMBINING MARK KRYZH": "1CF40", - "ZNAMENNY NEUME VRAKHIYA MRACHNAYA": "1CF64", - "ZNAMENNY NEUME VRAKHIYA SVETLAYA": "1CF65", - "ZNAMENNY NEUME VRAKHIYA TRESVETLAYA": "1CF66", - "ZNAMENNY NEUME SKAMEYTSA DVOECHELNAYA KLYUCHEVAYA": "1CF7C", - "ZNAMENNY NEUME SLOZHITIE": "1CF7D", - "ZNAMENNY NEUME STRELA KRYUKOVAYA GROMOPOVODNAYA WITH SINGLE ZAPYATAYA": "1CFB8", - "BYZANTINE MUSICAL SYMBOL DASEIA": "1D001", - "BYZANTINE MUSICAL SYMBOL VAREIA EKFONITIKON": "1D005", - "BYZANTINE MUSICAL SYMBOL KREMASTI": "1D00C", - "BYZANTINE MUSICAL SYMBOL GORGON ARCHAION": "1D016", - "BYZANTINE MUSICAL SYMBOL PSILON": "1D017", - "BYZANTINE MUSICAL SYMBOL OXEIAI ARCHAION": "1D020", - "BYZANTINE MUSICAL SYMBOL ANTIKENOMA": "1D05C", - "BYZANTINE MUSICAL SYMBOL PARAKALESMA NEO": "1D05F", - "BYZANTINE MUSICAL SYMBOL SEISMA NEO": "1D069", - "BYZANTINE MUSICAL SYMBOL KORONIS": "1D089", + "NUSHU CHARACTER-1B1A4": "1B1A4", + "NUSHU CHARACTER-1B1D6": "1B1D6", + "NUSHU CHARACTER-1B1D7": "1B1D7", + "NUSHU CHARACTER-1B1EC": "1B1EC", + "NUSHU CHARACTER-1B20F": "1B20F", + "NUSHU CHARACTER-1B21A": "1B21A", + "NUSHU CHARACTER-1B24D": "1B24D", + "NUSHU CHARACTER-1B275": "1B275", + "NUSHU CHARACTER-1B2B7": "1B2B7", + "NUSHU CHARACTER-1B2BF": "1B2BF", + "NUSHU CHARACTER-1B2C7": "1B2C7", + "NUSHU CHARACTER-1B2E6": "1B2E6", + "NUSHU CHARACTER-1B2E7": "1B2E7", + "NUSHU CHARACTER-1B2EC": "1B2EC", + "NUSHU CHARACTER-1B2ED": "1B2ED", + "DUPLOYAN LETTER D": "1BC08", + "DUPLOYAN LETTER V": "1BC09", + "DUPLOYAN LETTER F N": "1BC0E", + "DUPLOYAN LETTER J M S": "1BC2D", + "DUPLOYAN LETTER J N S": "1BC31", + "DUPLOYAN LETTER S K": "1BC3F", + "DUPLOYAN LETTER SLOAN OW": "1BC42", + "DUPLOYAN LETTER UH": "1BC57", + "SHORTHAND FORMAT CONTINUING OVERLAP": "1BCA1", + "SHORTHAND FORMAT UP STEP": "1BCA3", + "ZNAMENNY COMBINING MARK POVYSHE ON LEFT": "1CF07", + "ZNAMENNY COMBINING MARK STRANNO MALO POVYSHE": "1CF1A", + "ZNAMENNY NEUME KOBYLA": "1CF84", + "ZNAMENNY NEUME STRELA TIKHAYA PUTNAYA": "1CF9A", + "ZNAMENNY NEUME STRELA KRYUKOVAYA GROMOKRYZHEVAYA": "1CFBC", + "BYZANTINE MUSICAL SYMBOL PARICHON": "1D01E", + "BYZANTINE MUSICAL SYMBOL APOTHEMA": "1D023", + "BYZANTINE MUSICAL SYMBOL DYO": "1D02F", + "BYZANTINE MUSICAL SYMBOL PELASTON": "1D038", + "BYZANTINE MUSICAL SYMBOL PETASTI": "1D049", + "BYZANTINE MUSICAL SYMBOL TROMIKON NEO": "1D063", + "BYZANTINE MUSICAL SYMBOL EKSTREPTON": "1D064", + "BYZANTINE MUSICAL SYMBOL TROMIKOPSIFISTON": "1D06B", + "BYZANTINE MUSICAL SYMBOL KRATIMA ARCHAION": "1D081", + "BYZANTINE MUSICAL SYMBOL AGOGI METRIA": "1D09D", "BYZANTINE MUSICAL SYMBOL AGOGI GORGI": "1D09F", - "BYZANTINE MUSICAL SYMBOL MARTYRIA VARYS ICHOS": "1D0B1", - "BYZANTINE MUSICAL SYMBOL YFESIS DIGRAMMOS EX DODEKATA": "1D0D6", - "MUSICAL SYMBOL F CLEF": "1D122", - "MUSICAL SYMBOL COMMON TIME": "1D134", - "MUSICAL SYMBOL SIXTEENTH REST": "1D13F", + "BYZANTINE MUSICAL SYMBOL FTHORA DIATONIKI KE": "1D0BF", + "BYZANTINE MUSICAL SYMBOL FTHORA DIATONIKI ZO": "1D0C0", + "MUSICAL SYMBOL WHOLE REST": "1D13B", + "MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH REST": "1D142", + "MUSICAL SYMBOL TRIANGLE NOTEHEAD RIGHT WHITE": "1D14C", + "MUSICAL SYMBOL NOTEHEAD BLACK": "1D158", "MUSICAL SYMBOL CLUSTER NOTEHEAD BLACK": "1D15B", - "MUSICAL SYMBOL COMBINING FLAG-2": "1D16F", - "MUSICAL SYMBOL COMBINING ACCENT-STACCATO": "1D181", - "MUSICAL SYMBOL TURN SLASH": "1D199", - "MUSICAL SYMBOL GLISSANDO UP": "1D1B1", - "MUSICAL SYMBOL MINIMA": "1D1BB", - "MUSICAL SYMBOL KIEVAN RECITATIVE MARK": "1D1E1", + "MUSICAL SYMBOL SIXTY-FOURTH NOTE": "1D163", + "MUSICAL SYMBOL COMBINING FLAG-3": "1D170", + "MUSICAL SYMBOL COMBINING DOIT": "1D185", + "MUSICAL SYMBOL COMBINING TRIPLE TONGUE": "1D18B", + "MUSICAL SYMBOL ORNAMENT STROKE-11": "1D1A5", + "MUSICAL SYMBOL KIEVAN WHOLE NOTE": "1D1E2", + "GREEK INSTRUMENTAL NOTATION SYMBOL-2": "1D21E", + "GREEK INSTRUMENTAL NOTATION SYMBOL-12": "1D224", + "GREEK INSTRUMENTAL NOTATION SYMBOL-36": "1D232", + "COMBINING GREEK MUSICAL TETRASEME": "1D243", + "KAKTOVIK NUMERAL FOUR": "1D2C4", + "MAYAN NUMERAL THREE": "1D2E3", "MAYAN NUMERAL FOURTEEN": "1D2EE", - "MAYAN NUMERAL EIGHTEEN": "1D2F2", - "TETRAGRAM FOR CONTENTION": "1D31E", - "TETRAGRAM FOR KINSHIP": "1D327", + "TETRAGRAM FOR DECISIVENESS": "1D322", + "TETRAGRAM FOR CLOSENESS": "1D326", + "TETRAGRAM FOR VASTNESS OR WASTING": "1D337", + "TETRAGRAM FOR UNITY": "1D33B", "TETRAGRAM FOR SEVERANCE": "1D34B", - "MATHEMATICAL BOLD SMALL Q": "1D42A", - "MATHEMATICAL ITALIC SMALL G": "1D454", - "MATHEMATICAL ITALIC SMALL Z": "1D467", - "MATHEMATICAL BOLD ITALIC CAPITAL T": "1D47B", - "MATHEMATICAL BOLD ITALIC CAPITAL X": "1D47F", - "MATHEMATICAL BOLD ITALIC SMALL T": "1D495", - "MATHEMATICAL SCRIPT CAPITAL U": "1D4B0", - "MATHEMATICAL BOLD SCRIPT SMALL G": "1D4F0", - "MATHEMATICAL FRAKTUR CAPITAL S": "1D516", + "TETRAGRAM FOR HARDNESS": "1D34D", + "TETRAGRAM FOR AGGRAVATION": "1D351", + "COUNTING ROD TENS DIGIT FIVE": "1D36D", + "MATHEMATICAL BOLD CAPITAL P": "1D40F", + "MATHEMATICAL BOLD CAPITAL X": "1D417", + "MATHEMATICAL BOLD SMALL E": "1D41E", + "MATHEMATICAL BOLD SMALL T": "1D42D", + "MATHEMATICAL ITALIC CAPITAL T": "1D447", + "MATHEMATICAL BOLD ITALIC CAPITAL P": "1D477", + "MATHEMATICAL BOLD ITALIC SMALL O": "1D490", + "MATHEMATICAL BOLD ITALIC SMALL Z": "1D49B", + "MATHEMATICAL SCRIPT SMALL R": "1D4C7", + "MATHEMATICAL BOLD SCRIPT CAPITAL V": "1D4E5", + "MATHEMATICAL BOLD SCRIPT CAPITAL Z": "1D4E9", + "MATHEMATICAL BOLD SCRIPT SMALL K": "1D4F4", "MATHEMATICAL FRAKTUR CAPITAL U": "1D518", - "MATHEMATICAL FRAKTUR CAPITAL Y": "1D51C", - "MATHEMATICAL DOUBLE-STRUCK CAPITAL M": "1D544", - "MATHEMATICAL BOLD FRAKTUR CAPITAL A": "1D56C", - "MATHEMATICAL BOLD FRAKTUR CAPITAL J": "1D575", - "MATHEMATICAL BOLD FRAKTUR CAPITAL O": "1D57A", - "MATHEMATICAL BOLD FRAKTUR CAPITAL U": "1D580", - "MATHEMATICAL BOLD FRAKTUR SMALL L": "1D591", - "MATHEMATICAL BOLD FRAKTUR SMALL O": "1D594", - "MATHEMATICAL SANS-SERIF CAPITAL J": "1D5A9", - "MATHEMATICAL SANS-SERIF BOLD CAPITAL C": "1D5D6", - "MATHEMATICAL SANS-SERIF BOLD SMALL P": "1D5FD", - "MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL A": "1D656", - "MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL M": "1D662", - "MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Y": "1D66E", - "MATHEMATICAL MONOSPACE CAPITAL T": "1D683", - "MATHEMATICAL MONOSPACE SMALL U": "1D69E", - "MATHEMATICAL BOLD CAPITAL IOTA": "1D6B0", - "MATHEMATICAL ITALIC SMALL PHI": "1D711", - "MATHEMATICAL BOLD ITALIC CAPITAL ALPHA": "1D71C", - "MATHEMATICAL BOLD ITALIC CAPITAL IOTA": "1D724", - "MATHEMATICAL BOLD ITALIC CAPITAL LAMDA": "1D726", - "MATHEMATICAL BOLD ITALIC SMALL THETA": "1D73D", - "MATHEMATICAL BOLD ITALIC PHI SYMBOL": "1D753", - "MATHEMATICAL SANS-SERIF BOLD CAPITAL EPSILON": "1D75A", - "MATHEMATICAL SANS-SERIF BOLD SMALL PSI": "1D787", - "MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL LAMDA": "1D79A", - "MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ZETA": "1D7AF", - "MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL MU": "1D7B5", - "MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PI": "1D7B9", - "MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL CHI": "1D7C0", - "MATHEMATICAL SANS-SERIF DIGIT SEVEN": "1D7E9", - "MATHEMATICAL SANS-SERIF BOLD DIGIT TWO": "1D7EE", - "MATHEMATICAL MONOSPACE DIGIT TWO": "1D7F8", - "SIGNWRITING HAND-FIST INDEX THUMB CIRCLED MIDDLE UP": "1D83C", - "SIGNWRITING HAND-FIST MIDDLE THUMB ANGLED OUT INDEX UP": "1D840", - "SIGNWRITING HAND-OVAL FIVE FINGERS SPREAD": "1D856", + "MATHEMATICAL DOUBLE-STRUCK SMALL F": "1D557", + "MATHEMATICAL BOLD FRAKTUR CAPITAL K": "1D576", + "MATHEMATICAL BOLD FRAKTUR CAPITAL L": "1D577", + "MATHEMATICAL BOLD FRAKTUR CAPITAL M": "1D578", + "MATHEMATICAL BOLD FRAKTUR CAPITAL P": "1D57B", + "MATHEMATICAL BOLD FRAKTUR CAPITAL S": "1D57E", + "MATHEMATICAL BOLD FRAKTUR SMALL G": "1D58C", + "MATHEMATICAL BOLD FRAKTUR SMALL U": "1D59A", + "MATHEMATICAL SANS-SERIF SMALL G": "1D5C0", + "MATHEMATICAL SANS-SERIF BOLD CAPITAL U": "1D5E8", + "MATHEMATICAL SANS-SERIF BOLD SMALL F": "1D5F3", + "MATHEMATICAL SANS-SERIF BOLD SMALL J": "1D5F7", + "MATHEMATICAL SANS-SERIF BOLD SMALL R": "1D5FF", + "MATHEMATICAL SANS-SERIF ITALIC SMALL H": "1D629", + "MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL D": "1D63F", + "MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL W": "1D652", + "MATHEMATICAL MONOSPACE CAPITAL B": "1D671", + "MATHEMATICAL MONOSPACE CAPITAL Y": "1D688", + "MATHEMATICAL MONOSPACE SMALL I": "1D692", + "MATHEMATICAL MONOSPACE SMALL J": "1D693", + "MATHEMATICAL ITALIC SMALL DOTLESS I": "1D6A4", + "MATHEMATICAL BOLD SMALL IOTA": "1D6CA", + "MATHEMATICAL ITALIC CAPITAL ALPHA": "1D6E2", + "MATHEMATICAL ITALIC SMALL ALPHA": "1D6FC", + "MATHEMATICAL BOLD ITALIC CAPITAL TAU": "1D72F", + "MATHEMATICAL BOLD ITALIC CAPITAL PSI": "1D733", + "MATHEMATICAL SANS-SERIF BOLD CAPITAL GAMMA": "1D758", + "MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA": "1D76E", + "MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ALPHA": "1D790", + "MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL LAMDA": "1D7B4", + "MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL": "1D7C4", + "MATHEMATICAL BOLD SMALL DIGAMMA": "1D7CB", + "MATHEMATICAL BOLD DIGIT ZERO": "1D7CE", + "MATHEMATICAL SANS-SERIF DIGIT FOUR": "1D7E6", + "MATHEMATICAL SANS-SERIF DIGIT FIVE": "1D7E7", + "MATHEMATICAL SANS-SERIF BOLD DIGIT NINE": "1D7F5", + "MATHEMATICAL MONOSPACE DIGIT EIGHT": "1D7FE", + "SIGNWRITING HAND-FIST INDEX HINGED MIDDLE UP": "1D814", + "SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED": "1D815", + "SIGNWRITING HAND-FLAT FOUR FINGERS BENT": "1D845", "SIGNWRITING HAND-FLAT HEEL": "1D85C", - "SIGNWRITING HAND-OVAL THUMB SIDE": "1D878", - "SIGNWRITING HAND-FIST INDEX MIDDLE RING CONJOINED": "1D88C", - "SIGNWRITING HAND-OVAL LITTLE UP": "1D895", - "SIGNWRITING HAND-FIST RING UP": "1D8AE", - "SIGNWRITING HAND-CIRCLE RING LITTLE": "1D8B1", - "SIGNWRITING HAND-FIST MIDDLE RAISED KNUCKLE": "1D8C8", - "SIGNWRITING HAND-CURLICUE MIDDLE RING LITTLE ON": "1D8CF", - "SIGNWRITING HAND-FIST INDEX THUMB SIDE": "1D8DC", - "SIGNWRITING HAND-FIST THUMB UNDER FOUR FINGERS": "1D901", - "SIGNWRITING HAND-FIST HEEL": "1D904", - "SIGNWRITING SQUEEZE FLICK ALTERNATING": "1D920", - "SIGNWRITING MOVEMENT-FLOORPLANE FINGER CONTACT": "1D929", - "SIGNWRITING MOVEMENT-WALLPLANE SINGLE WRIST FLEX": "1D92E", - "SIGNWRITING MOVEMENT-DIAGONAL BETWEEN AWAY LARGE": "1D95F", - "SIGNWRITING MOVEMENT-WALLPLANE CURVE HALF-CIRCLE LARGEST": "1D98F", - "SIGNWRITING MOVEMENT-WALLPLANE LOOP SMALL DOUBLE": "1D998", - "SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING CEILING LARGE DOUBLE": "1D9BA", - "SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING CEILING LARGE DOUBLE": "1D9C0", - "SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING FLOOR LARGE SINGLE": "1D9CD", - "SIGNWRITING MOVEMENT-FLOORPLANE FINGER CIRCLES HITTING WALL DOUBLE": "1D9F4", + "SIGNWRITING HAND-CUP": "1D86D", + "SIGNWRITING HAND-HOOK INDEX RING LITTLE UNDER": "1D8BF", + "SIGNWRITING HAND-CIRCLE MIDDLE UP": "1D8C7", + "SIGNWRITING HAND-HINGE INDEX THUMB OPEN": "1D8EE", + "SIGNWRITING HAND-FIST THUMB": "1D8F5", + "SIGNWRITING HAND-FIST THUMB SIDE CONJOINED": "1D8F8", + "SIGNWRITING HAND-FIST THUMB BETWEEN RING LITTLE": "1D8FD", + "SIGNWRITING STRIKE BETWEEN": "1D90D", + "SIGNWRITING MOVEMENT-WALLPLANE TRIPLE WRIST FLEX": "1D935", + "SIGNWRITING MOVEMENT-WALLPLANE BEND MEDIUM": "1D939", + "SIGNWRITING MOVEMENT-WALLPLANE BOX MEDIUM": "1D943", + "SIGNWRITING TRAVEL-WALLPLANE SHAKING": "1D951", + "SIGNWRITING MOVEMENT-DIAGONAL BETWEEN TOWARDS SMALL": "1D961", + "SIGNWRITING MOVEMENT-FLOORPLANE DOUBLE STRAIGHT": "1D96A", + "SIGNWRITING MOVEMENT-FLOORPLANE TRIPLE WRIST FLEX": "1D970", + "SIGNWRITING MOVEMENT-FLOORPLANE BOX LARGE": "1D97A", + "SIGNWRITING MOVEMENT-WALLPLANE WAVE CURVE TRIPLE SMALL": "1D99C", + "SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING CEILING LARGE TRIPLE": "1D9BC", + "SIGNWRITING MOVEMENT-FLOORPLANE WAVE HITTING CEILING SMALL": "1D9C1", + "SIGNWRITING ROTATION-FLOORPLANE SINGLE HITTING CEILING": "1D9C3", + "SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING FLOOR SMALL DOUBLE": "1D9C8", + "SIGNWRITING MOVEMENT-FLOORPLANE WAVE SNAKE": "1D9DC", + "SIGNWRITING HEAD MOVEMENT-WALLPLANE TILT": "1DA02", + "SIGNWRITING HEAD MOVEMENT-FLOORPLANE CURVE": "1DA05", + "SIGNWRITING DREAMY EYEBROWS NEUTRAL DOWN": "1DA0D", "SIGNWRITING FOREHEAD WRINKLED": "1DA13", - "SIGNWRITING EARS": "1DA30", - "SIGNWRITING SHOULDER HIP POSITIONS": "1DA6E", - "SIGNWRITING ROTATION MODIFIER-4": "1DAA3", - "SIGNWRITING ROTATION MODIFIER-7": "1DAA6", - "COMBINING GLAGOLITIC LETTER IZHE": "1E009", - "MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE": "1E06D", - "NYIAKENG PUACHUE HMONG LETTER DLA": "1E11D", - "NYIAKENG PUACHUE HMONG LETTER AA": "1E125", - "TOTO LETTER EO": "1E2A8", - "TOTO LETTER BREATHY EO": "1E2A9", - "WANCHO DIGIT TWO": "1E2F2", - "ETHIOPIC SYLLABLE GURAGE MWEE": "1E7EE", - "ETHIOPIC SYLLABLE GURAGE BWEE": "1E7F4", - "MENDE KIKAKUI SYLLABLE M118 WE": "1E80C", - "MENDE KIKAKUI SYLLABLE M157 JEE": "1E84E", - "MENDE KIKAKUI SYLLABLE M053 HIN": "1E871", - "MENDE KIKAKUI SYLLABLE M120 NGGO": "1E87D", - "MENDE KIKAKUI SYLLABLE M072 MBE": "1E898", - "ADLAM CAPITAL LETTER HA": "1E916", - "ADLAM SMALL LETTER GBE": "1E940", - "INDIC SIYAQ NUMBER FOUR HUNDRED": "1EC86", - "INDIC SIYAQ NUMBER SEVEN HUNDRED": "1EC89", - "INDIC SIYAQ NUMBER PREFIXED FOUR": "1ECA6", - "OTTOMAN SIYAQ NUMBER EIGHTY": "1ED11", - "OTTOMAN SIYAQ NUMBER EIGHT HUNDRED": "1ED1A", - "OTTOMAN SIYAQ ALTERNATE NUMBER FIVE": "1ED32", - "ARABIC MATHEMATICAL ZAH": "1EE1A", - "ARABIC MATHEMATICAL INITIAL JEEM": "1EE22", - "ARABIC MATHEMATICAL INITIAL FEH": "1EE30", - "ARABIC MATHEMATICAL INITIAL SHEEN": "1EE34", - "ARABIC MATHEMATICAL TAILED KHAH": "1EE57", - "ARABIC MATHEMATICAL STRETCHED TEH": "1EE75", - "ARABIC MATHEMATICAL LOOPED FEH": "1EE90", - "ARABIC MATHEMATICAL DOUBLE-STRUCK SEEN": "1EEAE", - "ARABIC MATHEMATICAL DOUBLE-STRUCK THEH": "1EEB6", - "ARABIC MATHEMATICAL DOUBLE-STRUCK THAL": "1EEB8", - "MAHJONG TILE EAST WIND": "1F000", - "MAHJONG TILE SEVEN OF CHARACTERS": "1F00D", - "MAHJONG TILE FOUR OF BAMBOOS": "1F013", - "MAHJONG TILE BAMBOO": "1F024", - "MAHJONG TILE JOKER": "1F02A", - "DOMINO TILE HORIZONTAL-04-01": "1F04E", - "DOMINO TILE HORIZONTAL-04-03": "1F050", - "DOMINO TILE HORIZONTAL-06-03": "1F05E", - "DOMINO TILE VERTICAL-01-01": "1F06B", - "DOMINO TILE VERTICAL-01-03": "1F06D", - "DOMINO TILE VERTICAL-04-02": "1F081", - "DOMINO TILE VERTICAL-05-04": "1F08A", - "PLAYING CARD THREE OF SPADES": "1F0A3", - "PLAYING CARD TWO OF HEARTS": "1F0B2", - "PLAYING CARD ACE OF DIAMONDS": "1F0C1", - "PLAYING CARD FOUR OF CLUBS": "1F0D4", - "PLAYING CARD TRUMP-13": "1F0ED", - "DIGIT ZERO FULL STOP": "1F100", - "SQUARED LATIN CAPITAL LETTER N": "1F13D", + "SIGNWRITING EYEGAZE-WALLPLANE CIRCLING": "1DA29", + "SIGNWRITING CHEEKS NEUTRAL": "1DA2B", + "SIGNWRITING AIR SUCK SMALL ROTATIONS": "1DA38", + "SIGNWRITING TEETH": "1DA61", + "SIGNWRITING EXCITEMENT": "1DA6C", + "SIGNWRITING SEMICOLON": "1DA89", + "SIGNWRITING FILL MODIFIER-6": "1DA9F", + "LATIN LETTER SMALL CAPITAL L WITH BELT": "1DF04", + "LATIN SMALL LETTER REVERSED ENG": "1DF07", + "LATIN SMALL LETTER TESH DIGRAPH WITH RETROFLEX HOOK": "1DF1C", + "COMBINING GLAGOLITIC LETTER ONU": "1E011", + "COMBINING GLAGOLITIC LETTER UKU": "1E016", + "CYRILLIC SUBSCRIPT SMALL LETTER ZE": "1E058", + "CYRILLIC SUBSCRIPT SMALL LETTER O": "1E05C", + "NYIAKENG PUACHUE HMONG LETTER TA": "1E103", + "NYIAKENG PUACHUE HMONG LETTER NA": "1E105", + "NYIAKENG PUACHUE HMONG LETTER YA": "1E118", + "NYIAKENG PUACHUE HMONG LETTER I": "1E126", + "WANCHO LETTER ONG": "1E2E2", + "NAG MUNDARI LETTER UD": "1E4E1", + "MENDE KIKAKUI SYLLABLE M001 KI": "1E800", + "MENDE KIKAKUI SYLLABLE M065 KEE": "1E803", + "MENDE KIKAKUI SYLLABLE M143 WEI": "1E810", + "MENDE KIKAKUI SYLLABLE M073 LE": "1E83A", + "MENDE KIKAKUI SYLLABLE M028 JI": "1E84B", + "MENDE KIKAKUI SYLLABLE M183 LONG NGGO": "1E882", + "MENDE KIKAKUI SYLLABLE M167 GUAN": "1E888", + "MENDE KIKAKUI SYLLABLE M184 NGEN": "1E889", + "MENDE KIKAKUI SYLLABLE M112 KPOO": "1E8A4", + "MENDE KIKAKUI SYLLABLE M067 NDOO": "1E8B3", + "MENDE KIKAKUI SYLLABLE M134 NJOO": "1E8B8", + "MENDE KIKAKUI DIGIT SIX": "1E8CC", + "ADLAM SMALL LETTER CHI": "1E937", + "ADLAM SMALL LETTER ZAL": "1E941", + "ADLAM DIGIT TWO": "1E952", + "ADLAM DIGIT NINE": "1E959", + "INDIC SIYAQ NUMBER TWO": "1EC72", + "INDIC SIYAQ NUMBER FORTY": "1EC7D", + "INDIC SIYAQ NUMBER NINETY": "1EC82", + "INDIC SIYAQ NUMBER EIGHT HUNDRED": "1EC8A", + "OTTOMAN SIYAQ NUMBER THREE": "1ED03", + "OTTOMAN SIYAQ NUMBER EIGHT": "1ED08", + "OTTOMAN SIYAQ ALTERNATE NUMBER SEVEN": "1ED34", + "ARABIC MATHEMATICAL JEEM": "1EE02", + "ARABIC MATHEMATICAL WAW": "1EE05", + "ARABIC MATHEMATICAL LAM": "1EE0B", + "ARABIC MATHEMATICAL TAILED AIN": "1EE4F", + "ARABIC MATHEMATICAL LOOPED JEEM": "1EE82", + "MAHJONG TILE ONE OF BAMBOOS": "1F010", + "MAHJONG TILE THREE OF BAMBOOS": "1F012", + "MAHJONG TILE TWO OF CIRCLES": "1F01A", + "MAHJONG TILE SPRING": "1F026", + "DOMINO TILE VERTICAL-05-03": "1F089", + "PLAYING CARD TRUMP-11": "1F0EB", + "DIGIT EIGHT COMMA": "1F109", + "PARENTHESIZED LATIN CAPITAL LETTER E": "1F114", + "SQUARED LATIN CAPITAL LETTER L": "1F13B", + "NEGATIVE CIRCLED LATIN CAPITAL LETTER M": "1F15C", "NEGATIVE CIRCLED LATIN CAPITAL LETTER N": "1F15D", - "NEGATIVE SQUARED LATIN CAPITAL LETTER K": "1F17A", - "NEGATIVE SQUARED LATIN CAPITAL LETTER Z": "1F189", - "SQUARED ONE HUNDRED TWENTY P": "1F1A4", - "SQUARED UHD": "1F1AB", - "SQUARED CJK UNIFIED IDEOGRAPH-6295": "1F227", + "NEGATIVE SQUARED LATIN CAPITAL LETTER A": "1F170", + "SQUARED HI-RES": "1F1A8", + "REGIONAL INDICATOR SYMBOL LETTER D": "1F1E9", + "REGIONAL INDICATOR SYMBOL LETTER M": "1F1F2", + "REGIONAL INDICATOR SYMBOL LETTER N": "1F1F3", + "SQUARED CJK UNIFIED IDEOGRAPH-89E3": "1F216", + "SQUARED CJK UNIFIED IDEOGRAPH-6620": "1F219", + "SQUARED CJK UNIFIED IDEOGRAPH-6307": "1F22F", "SQUARED CJK UNIFIED IDEOGRAPH-55B6": "1F23A", - "WATER WAVE": "1F30A", - "NEW MOON SYMBOL": "1F311", - "WHITE SUN BEHIND CLOUD": "1F325", - "BLOSSOM": "1F33C", - "PEAR": "1F350", - "FRIED SHRIMP": "1F364", - "LOLLIPOP": "1F36D", - "MUSICAL KEYBOARD WITH JACKS": "1F398", - "TENNIS RACQUET AND BALL": "1F3BE", - "TROPHY": "1F3C6", - "HOUSE BUILDINGS": "1F3D8", - "ANT": "1F41C", - "KOALA": "1F428", - "CHIPMUNK": "1F43F", - "WHITE LEFT POINTING BACKHAND INDEX": "1F448", - "WAVING HAND SIGN": "1F44B", - "MAN WITH GUA PI MAO": "1F472", - "CALENDAR": "1F4C5", - "STRAIGHT RULER": "1F4CF", - "TELEVISION": "1F4FA", - "ANTICLOCKWISE DOWNWARDS AND UPWARDS OPEN CIRCLE ARROWS": "1F504", - "BELL WITH CANCELLATION STROKE": "1F515", - "INPUT SYMBOL FOR LATIN SMALL LETTERS": "1F521", - "INPUT SYMBOL FOR NUMBERS": "1F522", - "FIRE": "1F525", - "SMALL BLUE DIAMOND": "1F539", - "CLOCK FACE TEN OCLOCK": "1F559", - "RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS": "1F596", - "SIDEWAYS WHITE LEFT POINTING INDEX": "1F598", - "SIDEWAYS BLACK UP POINTING INDEX": "1F5A0", - "BLACK HARD SHELL FLOPPY DISK": "1F5AA", - "EMPTY PAGES": "1F5CD", - "ROLLED-UP NEWSPAPER": "1F5DE", - "THREE RAYS LEFT": "1F5E6", - "FEARFUL FACE": "1F628", - "HAPPY PERSON RAISING ONE HAND": "1F64B", - "HEAVY NORTH WEST POINTING BUD": "1F664", - "POLICE CAR": "1F693", - "BOYS SYMBOL": "1F6C9", - "PLAYGROUND SLIDE": "1F6DD", - "ALCHEMICAL SYMBOL FOR VITRIOL-2": "1F717", - "ALCHEMICAL SYMBOL FOR IRON ORE": "1F71C", - "ALCHEMICAL SYMBOL FOR SUBLIMATE OF SALT OF COPPER": "1F727", - "ALCHEMICAL SYMBOL FOR REGULUS-2": "1F733", - "ALCHEMICAL SYMBOL FOR TUTTY": "1F74D", + "CYCLONE": "1F300", + "SUNSET OVER BUILDINGS": "1F307", + "BRIDGE AT NIGHT": "1F309", + "MILKY WAY": "1F30C", + "EARTH GLOBE AMERICAS": "1F30E", + "EAR OF RICE": "1F33E", + "COOKING": "1F373", + "ROLLER COASTER": "1F3A2", + "PERFORMING ARTS": "1F3AD", + "EMOJI MODIFIER FITZPATRICK TYPE-3": "1F3FC", + "WATER BUFFALO": "1F403", + "SNAIL": "1F40C", + "MONKEY": "1F412", + "BUG": "1F41B", + "HATCHING CHICK": "1F423", + "FROG FACE": "1F438", + "POUCH": "1F45D", + "OLDER WOMAN": "1F475", + "RING": "1F48D", + "HEART WITH RIBBON": "1F49D", + "ELECTRIC LIGHT BULB": "1F4A1", + "MONEY BAG": "1F4B0", + "MINIDISC": "1F4BD", + "DVD": "1F4C0", + "FILE FOLDER": "1F4C1", + "CLOSED BOOK": "1F4D5", + "INBOX TRAY": "1F4E5", + "INPUT SYMBOL FOR LATIN LETTERS": "1F524", + "LOWER RIGHT SHADOWED WHITE CIRCLE": "1F53E", + "NOTCHED RIGHT SEMICIRCLE WITH THREE DOTS": "1F544", + "SIDEWAYS WHITE RIGHT POINTING INDEX": "1F599", + "PRINTER": "1F5A8", + "THREE BUTTON MOUSE": "1F5B1", + "FOLDER": "1F5C0", + "OPEN FOLDER": "1F5C1", + "WASTEBASKET": "1F5D1", + "BALLOT BOLD SCRIPT X": "1F5F6", + "TOKYO TOWER": "1F5FC", + "FACE WITH LOOK OF TRIUMPH": "1F624", + "FLUSHED FACE": "1F633", + "SMILING CAT FACE WITH OPEN MOUTH": "1F63A", + "SMILING CAT FACE WITH HEART-SHAPED EYES": "1F63B", + "NORTH EAST POINTING BUD": "1F662", + "SWASH AMPERSAND ORNAMENT": "1F675", + "REVERSE CHECKER BOARD": "1F67F", + "MOUNTAIN CABLEWAY": "1F6A0", + "TRIANGULAR FLAG ON POST": "1F6A9", + "NON-POTABLE WATER SYMBOL": "1F6B1", + "PEDESTRIAN": "1F6B6", + "BABY SYMBOL": "1F6BC", + "BATH": "1F6C0", + "SLEEPING ACCOMMODATION": "1F6CC", + "SHOPPING BAGS": "1F6CD", + "PAGODA": "1F6D4", + "ALCHEMICAL SYMBOL FOR FIRE": "1F702", + "ALCHEMICAL SYMBOL FOR AQUA REGIA-2": "1F707", + "ALCHEMICAL SYMBOL FOR SALT OF COPPER ANTIMONIATE": "1F726", + "ALCHEMICAL SYMBOL FOR GUM": "1F749", + "ALCHEMICAL SYMBOL FOR WAX": "1F74A", + "ALCHEMICAL SYMBOL FOR POT ASHES": "1F758", "ALCHEMICAL SYMBOL FOR CRUCIBLE-5": "1F769", - "QUAOAR": "1F77E", - "BOLD WHITE SQUARE": "1F790", - "VERY HEAVY GREEK CROSS": "1F7A6", - "BOLD SALTIRE": "1F7AB", - "MEDIUM FIVE SPOKED ASTERISK": "1F7B0", - "RIGHTWARDS ARROW WITH MEDIUM TRIANGLE ARROWHEAD": "1F806", - "RIGHTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD": "1F812", - "WIDE-HEADED SOUTH WEST LIGHT BARB ARROW": "1F867", - "WIDE-HEADED UPWARDS VERY HEAVY BARB ARROW": "1F881", - "HANDSHAKE": "1F91D", - "SECOND PLACE MEDAL": "1F948", - "POTATO": "1F954", - "COCONUT": "1F965", - "CANNED FOOD": "1F96B", - "MANGO": "1F96D", - "EAGLE": "1F985", + "ALCHEMICAL SYMBOL FOR BATH OF MARY": "1F76B", + "HEAVY SIX POINTED BLACK STAR": "1F7CC", + "HEAVY LEFTWARDS ARROW WITH LARGE EQUILATERAL ARROWHEAD": "1F81C", + "UPWARDS HEAVY COMPRESSED ARROW": "1F841", + "WIDE-HEADED NORTH EAST LIGHT BARB ARROW": "1F865", + "WIDE-HEADED SOUTH EAST HEAVY BARB ARROW": "1F87E", + "LEFT HALF CIRCLE WITH DOT": "1F906", + "PALMS UP TOGETHER": "1F932", + "MAN IN TUXEDO": "1F935", + "SMILING FACE WITH TEAR": "1F972", + "SQUID": "1F991", + "MOTORIZED WHEELCHAIR": "1F9BC", "CUPCAKE": "1F9C1", - "SALT SHAKER": "1F9C2", - "GARLIC": "1F9C4", - "ONION": "1F9C5", - "BILLED CAP": "1F9E2", - "ROLL OF PAPER": "1F9FB", - "BAR OF SOAP": "1F9FC", - "NEUTRAL CHESS ROOK ROTATED NINETY DEGREES": "1FA17", - "NEUTRAL CHESS TURNED ROOK": "1FA2C", - "BLACK CHESS KNIGHT ROTATED TWO HUNDRED TWENTY-FIVE DEGREES": "1FA31", - "NEUTRAL CHESS EQUIHOPPER": "1FA4A", - "XIANGQI RED ELEPHANT": "1FA62", - "GREY HEART": "1FA76", - "KITE": "1FA81", - "MAGIC WAND": "1FA84", - "RAZOR": "1FA92", - "PLACARD": "1FAA7", - "BEETLE": "1FAB2", - "BEANS": "1FAD8", - "RIGHTWARDS HAND": "1FAF1", - "BLOCK SEXTANT-256": "1FB2F", - "LOWER RIGHT BLOCK DIAGONAL LOWER MIDDLE LEFT TO UPPER MIDDLE RIGHT": "1FB46", - "UPPER LEFT BLOCK DIAGONAL LOWER CENTRE TO UPPER MIDDLE RIGHT": "1FB5F", - "LEFT TRIANGULAR ONE QUARTER BLOCK": "1FB6C", - "ARROWHEAD-SHAPED POINTER": "1FBB0", - "CJK COMPATIBILITY IDEOGRAPH-2F806": "2F806", - "CJK COMPATIBILITY IDEOGRAPH-2F84A": "2F84A", - "CJK COMPATIBILITY IDEOGRAPH-2F84B": "2F84B", - "CJK COMPATIBILITY IDEOGRAPH-2F84E": "2F84E", - "CJK COMPATIBILITY IDEOGRAPH-2F86B": "2F86B", - "CJK COMPATIBILITY IDEOGRAPH-2F86D": "2F86D", + "BEVERAGE BOX": "1F9C3", + "FALAFEL": "1F9C6", + "LOTION BOTTLE": "1F9F4", + "SPONGE": "1F9FD", + "BLACK CHESS PAWN ROTATED NINETY DEGREES": "1FA14", + "NEUTRAL CHESS BISHOP ROTATED NINETY DEGREES": "1FA18", + "BLACK CHESS ROOK ROTATED TWO HUNDRED SEVENTY DEGREES": "1FA3B", + "XIANGQI BLACK SOLDIER": "1FA6D", + "PREGNANT PERSON": "1FAC4", + "PALM DOWN HAND": "1FAF3", + "BLOCK SEXTANT-235": "1FB14", + "BLOCK SEXTANT-6": "1FB1E", + "LOWER LEFT BLOCK DIAGONAL LOWER MIDDLE LEFT TO LOWER CENTRE": "1FB3C", + "UPPER RIGHT BLOCK DIAGONAL LOWER MIDDLE LEFT TO LOWER CENTRE": "1FB52", + "UPPER RIGHT BLOCK DIAGONAL UPPER MIDDLE LEFT TO LOWER CENTRE": "1FB54", + "UPPER RIGHT BLOCK DIAGONAL UPPER CENTRE TO LOWER MIDDLE RIGHT": "1FB64", + "VERTICAL ONE EIGHTH BLOCK-4": "1FB72", + "LEFT AND RIGHT TRIANGULAR HALF BLOCK": "1FB9B", + "BOX DRAWINGS LIGHT DIAGONAL MIDDLE RIGHT TO LOWER CENTRE": "1FBA3", + "BOX DRAWINGS LIGHT DIAGONAL MIDDLE LEFT TO UPPER CENTRE TO MIDDLE RIGHT TO LOWER CENTRE": "1FBAC", + "RIGHT HALF FOLDER": "1FBBA", + "": "2CEB0", + "CJK COMPATIBILITY IDEOGRAPH-2F810": "2F810", + "CJK COMPATIBILITY IDEOGRAPH-2F820": "2F820", + "CJK COMPATIBILITY IDEOGRAPH-2F855": "2F855", + "CJK COMPATIBILITY IDEOGRAPH-2F859": "2F859", + "CJK COMPATIBILITY IDEOGRAPH-2F863": "2F863", "CJK COMPATIBILITY IDEOGRAPH-2F873": "2F873", - "CJK COMPATIBILITY IDEOGRAPH-2F895": "2F895", - "CJK COMPATIBILITY IDEOGRAPH-2F899": "2F899", - "CJK COMPATIBILITY IDEOGRAPH-2F89F": "2F89F", - "CJK COMPATIBILITY IDEOGRAPH-2F8A3": "2F8A3", - "CJK COMPATIBILITY IDEOGRAPH-2F8CC": "2F8CC", - "CJK COMPATIBILITY IDEOGRAPH-2F8CD": "2F8CD", - "CJK COMPATIBILITY IDEOGRAPH-2F8DE": "2F8DE", - "CJK COMPATIBILITY IDEOGRAPH-2F8F3": "2F8F3", - "CJK COMPATIBILITY IDEOGRAPH-2F8F8": "2F8F8", - "CJK COMPATIBILITY IDEOGRAPH-2F900": "2F900", - "CJK COMPATIBILITY IDEOGRAPH-2F90F": "2F90F", - "CJK COMPATIBILITY IDEOGRAPH-2F911": "2F911", - "CJK COMPATIBILITY IDEOGRAPH-2F929": "2F929", - "CJK COMPATIBILITY IDEOGRAPH-2F945": "2F945", - "CJK COMPATIBILITY IDEOGRAPH-2F964": "2F964", - "CJK COMPATIBILITY IDEOGRAPH-2F990": "2F990", - "CJK COMPATIBILITY IDEOGRAPH-2F991": "2F991", - "CJK COMPATIBILITY IDEOGRAPH-2F9B1": "2F9B1", - "CJK COMPATIBILITY IDEOGRAPH-2F9C9": "2F9C9", - "CJK COMPATIBILITY IDEOGRAPH-2F9E0": "2F9E0", - "CJK COMPATIBILITY IDEOGRAPH-2F9E6": "2F9E6", - "CJK COMPATIBILITY IDEOGRAPH-2F9F7": "2F9F7", + "CJK COMPATIBILITY IDEOGRAPH-2F8FA": "2F8FA", + "CJK COMPATIBILITY IDEOGRAPH-2F8FF": "2F8FF", + "CJK COMPATIBILITY IDEOGRAPH-2F930": "2F930", + "CJK COMPATIBILITY IDEOGRAPH-2F938": "2F938", + "CJK COMPATIBILITY IDEOGRAPH-2F956": "2F956", + "CJK COMPATIBILITY IDEOGRAPH-2F972": "2F972", + "CJK COMPATIBILITY IDEOGRAPH-2F977": "2F977", + "CJK COMPATIBILITY IDEOGRAPH-2F997": "2F997", + "CJK COMPATIBILITY IDEOGRAPH-2F99D": "2F99D", + "CJK COMPATIBILITY IDEOGRAPH-2F9B5": "2F9B5", + "CJK COMPATIBILITY IDEOGRAPH-2F9BA": "2F9BA", + "CJK COMPATIBILITY IDEOGRAPH-2F9BE": "2F9BE", + "CJK COMPATIBILITY IDEOGRAPH-2F9C0": "2F9C0", + "CJK COMPATIBILITY IDEOGRAPH-2F9D1": "2F9D1", + "CJK COMPATIBILITY IDEOGRAPH-2F9F8": "2F9F8", + "CJK COMPATIBILITY IDEOGRAPH-2F9FD": "2F9FD", + "CJK COMPATIBILITY IDEOGRAPH-2FA0B": "2FA0B", + "TAG DOLLAR SIGN": "E0024", + "TAG GREATER-THAN SIGN": "E003E", "TAG LATIN CAPITAL LETTER C": "E0043", - "TAG LOW LINE": "E005F", - "VARIATION SELECTOR-22": "E0105", - "VARIATION SELECTOR-41": "E0118", - "VARIATION SELECTOR-73": "E0138", - "VARIATION SELECTOR-82": "E0141", - "VARIATION SELECTOR-123": "E016A", - "VARIATION SELECTOR-127": "E016E", - "VARIATION SELECTOR-128": "E016F", - "VARIATION SELECTOR-159": "E018E", - "VARIATION SELECTOR-162": "E0191", - "VARIATION SELECTOR-219": "E01CA", - "VARIATION SELECTOR-229": "E01D4", - "VARIATION SELECTOR-235": "E01DA" + "VARIATION SELECTOR-53": "E0124", + "VARIATION SELECTOR-69": "E0134", + "VARIATION SELECTOR-88": "E0147", + "VARIATION SELECTOR-90": "E0149", + "VARIATION SELECTOR-135": "E0176", + "VARIATION SELECTOR-161": "E0190", + "VARIATION SELECTOR-174": "E019D", + "VARIATION SELECTOR-188": "E01AB", + "VARIATION SELECTOR-196": "E01B3", + "VARIATION SELECTOR-205": "E01BC", + "VARIATION SELECTOR-225": "E01D0", + "VARIATION SELECTOR-231": "E01D6" }, "numerics": { "": "0000", diff --git a/www/tests/tester.py b/www/tests/tester.py index 76fea1f71..64690b8a9 100644 --- a/www/tests/tester.py +++ b/www/tests/tester.py @@ -413,7 +413,10 @@ def assert_raises(exc_type, func, *args, msg=None, nb_args=None, **kw): f' Expected: {msg}\n' + f' Got : {exc.args[0]}') elif isinstance(msg, re.Pattern): - assert msg.match(exc.args[0]) + if not msg.match(exc.args[0]): + raise AssertionError('correct exception type, but message\n' + + f' {exc.args[0]}' + + f'\ndoes not match pattern {msg}') else: raise AssertionError(f'should have raised {exc_type.__name__}') diff --git a/www/tests/unittests/index.html b/www/tests/unittests/index.html index 2c465443e..573a58ef9 100644 --- a/www/tests/unittests/index.html +++ b/www/tests/unittests/index.html @@ -369,7 +369,7 @@ - +

    CPython tests

    @@ -426,6 +426,7 @@

    CPython tests

    + @@ -448,9 +449,6 @@

    CPython tests

    - - - @@ -458,6 +456,8 @@

    CPython tests

    + + @@ -479,8 +479,8 @@

    CPython tests

    - + @@ -489,6 +489,16 @@

    CPython tests

    + + + + + + + + + +